wicked-brain 0.1.2 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/install.mjs +57 -8
  2. package/package.json +1 -1
  3. package/server/bin/wicked-brain-server.mjs +54 -7
  4. package/server/lib/file-watcher.mjs +152 -6
  5. package/server/lib/lsp-client.mjs +278 -0
  6. package/server/lib/lsp-helpers.mjs +133 -0
  7. package/server/lib/lsp-manager.mjs +164 -0
  8. package/server/lib/lsp-protocol.mjs +123 -0
  9. package/server/lib/lsp-servers.mjs +290 -0
  10. package/server/lib/sqlite-search.mjs +216 -10
  11. package/server/lib/wikilinks.mjs +20 -4
  12. package/server/package.json +1 -1
  13. package/skills/wicked-brain-agent/SKILL.md +52 -0
  14. package/skills/wicked-brain-agent/agents/consolidate.md +138 -0
  15. package/skills/wicked-brain-agent/agents/context.md +88 -0
  16. package/skills/wicked-brain-agent/agents/onboard.md +88 -0
  17. package/skills/wicked-brain-agent/agents/session-teardown.md +84 -0
  18. package/skills/wicked-brain-agent/hooks/claude-hooks.json +12 -0
  19. package/skills/wicked-brain-agent/hooks/copilot-hooks.json +10 -0
  20. package/skills/wicked-brain-agent/hooks/gemini-hooks.json +12 -0
  21. package/skills/wicked-brain-agent/platform/antigravity/wicked-brain-consolidate.md +103 -0
  22. package/skills/wicked-brain-agent/platform/antigravity/wicked-brain-context.md +67 -0
  23. package/skills/wicked-brain-agent/platform/antigravity/wicked-brain-onboard.md +74 -0
  24. package/skills/wicked-brain-agent/platform/antigravity/wicked-brain-session-teardown.md +72 -0
  25. package/skills/wicked-brain-agent/platform/claude/wicked-brain-consolidate.md +106 -0
  26. package/skills/wicked-brain-agent/platform/claude/wicked-brain-context.md +70 -0
  27. package/skills/wicked-brain-agent/platform/claude/wicked-brain-onboard.md +77 -0
  28. package/skills/wicked-brain-agent/platform/claude/wicked-brain-session-teardown.md +75 -0
  29. package/skills/wicked-brain-agent/platform/codex/wicked-brain-consolidate.toml +104 -0
  30. package/skills/wicked-brain-agent/platform/codex/wicked-brain-context.toml +68 -0
  31. package/skills/wicked-brain-agent/platform/codex/wicked-brain-onboard.toml +75 -0
  32. package/skills/wicked-brain-agent/platform/codex/wicked-brain-session-teardown.toml +73 -0
  33. package/skills/wicked-brain-agent/platform/copilot/wicked-brain-consolidate.agent.md +105 -0
  34. package/skills/wicked-brain-agent/platform/copilot/wicked-brain-context.agent.md +69 -0
  35. package/skills/wicked-brain-agent/platform/copilot/wicked-brain-onboard.agent.md +76 -0
  36. package/skills/wicked-brain-agent/platform/copilot/wicked-brain-session-teardown.agent.md +74 -0
  37. package/skills/wicked-brain-agent/platform/cursor/wicked-brain-consolidate.md +104 -0
  38. package/skills/wicked-brain-agent/platform/cursor/wicked-brain-context.md +68 -0
  39. package/skills/wicked-brain-agent/platform/cursor/wicked-brain-onboard.md +75 -0
  40. package/skills/wicked-brain-agent/platform/cursor/wicked-brain-session-teardown.md +73 -0
  41. package/skills/wicked-brain-agent/platform/gemini/wicked-brain-consolidate.md +107 -0
  42. package/skills/wicked-brain-agent/platform/gemini/wicked-brain-context.md +71 -0
  43. package/skills/wicked-brain-agent/platform/gemini/wicked-brain-onboard.md +78 -0
  44. package/skills/wicked-brain-agent/platform/gemini/wicked-brain-session-teardown.md +76 -0
  45. package/skills/wicked-brain-agent/platform/kiro/wicked-brain-consolidate.json +17 -0
  46. package/skills/wicked-brain-agent/platform/kiro/wicked-brain-context.json +16 -0
  47. package/skills/wicked-brain-agent/platform/kiro/wicked-brain-onboard.json +17 -0
  48. package/skills/wicked-brain-agent/platform/kiro/wicked-brain-session-teardown.json +17 -0
  49. package/skills/wicked-brain-compile/SKILL.md +8 -0
  50. package/skills/wicked-brain-configure/SKILL.md +99 -0
  51. package/skills/wicked-brain-enhance/SKILL.md +19 -0
  52. package/skills/wicked-brain-ingest/SKILL.md +68 -5
  53. package/skills/wicked-brain-lint/SKILL.md +14 -0
  54. package/skills/wicked-brain-lsp/SKILL.md +172 -0
  55. package/skills/wicked-brain-memory/SKILL.md +144 -0
  56. package/skills/wicked-brain-query/SKILL.md +78 -1
  57. package/skills/wicked-brain-retag/SKILL.md +79 -0
  58. package/skills/wicked-brain-search/SKILL.md +3 -11
  59. package/skills/wicked-brain-status/SKILL.md +7 -0
  60. package/skills/wicked-brain-update/SKILL.md +20 -1
@@ -0,0 +1,290 @@
1
+ /**
2
+ * Known language servers map — 40+ servers covering 70+ extensions.
3
+ * Extensible via {brainPath}/_meta/lsp.json.
4
+ */
5
+
6
+ import { readFileSync } from "node:fs";
7
+
8
+ export const KNOWN_SERVERS = {
9
+ // Web
10
+ typescript: {
11
+ command: "typescript-language-server", args: ["--stdio"],
12
+ extensions: [".ts", ".tsx", ".js", ".jsx", ".mts", ".mjs"],
13
+ install: { method: "npm", package: "typescript-language-server typescript" }
14
+ },
15
+ html: {
16
+ command: "html-languageserver", args: ["--stdio"],
17
+ extensions: [".html", ".htm"],
18
+ install: { method: "npm", package: "vscode-html-languageserver-bin" }
19
+ },
20
+ css: {
21
+ command: "css-languageserver", args: ["--stdio"],
22
+ extensions: [".css", ".scss", ".less"],
23
+ install: { method: "npm", package: "vscode-css-languageserver-bin" }
24
+ },
25
+ vue: {
26
+ command: "vue-language-server", args: ["--stdio"],
27
+ extensions: [".vue"],
28
+ install: { method: "npm", package: "@vue/language-server" }
29
+ },
30
+ svelte: {
31
+ command: "svelteserver", args: ["--stdio"],
32
+ extensions: [".svelte"],
33
+ install: { method: "npm", package: "svelte-language-server" }
34
+ },
35
+ json: {
36
+ command: "json-languageserver", args: ["--stdio"],
37
+ extensions: [".json", ".jsonc"],
38
+ install: { method: "npm", package: "vscode-json-languageserver" }
39
+ },
40
+ astro: {
41
+ command: "astro-ls", args: ["--stdio"],
42
+ extensions: [".astro"],
43
+ install: { method: "npm", package: "@astrojs/language-server" }
44
+ },
45
+ // Backend
46
+ python: {
47
+ command: "pyright-langserver", args: ["--stdio"],
48
+ extensions: [".py"],
49
+ install: { method: "npm", package: "pyright" }
50
+ },
51
+ go: {
52
+ command: "gopls", args: ["serve"],
53
+ extensions: [".go"],
54
+ install: { method: "go", package: "golang.org/x/tools/gopls" }
55
+ },
56
+ rust: {
57
+ command: "rust-analyzer", args: [],
58
+ extensions: [".rs"],
59
+ install: { method: "rustup", package: "rust-analyzer" }
60
+ },
61
+ java: {
62
+ command: "jdt-language-server", args: [],
63
+ extensions: [".java"],
64
+ install: { method: "manual", package: "Eclipse JDT LS — download from eclipse.org" }
65
+ },
66
+ csharp: {
67
+ command: "omnisharp", args: ["-lsp"],
68
+ extensions: [".cs"],
69
+ install: { method: "dotnet", package: "OmniSharp" }
70
+ },
71
+ cpp: {
72
+ command: "clangd", args: [],
73
+ extensions: [".c", ".cpp", ".cc", ".cxx", ".h", ".hpp"],
74
+ install: { method: "brew", package: "llvm" }
75
+ },
76
+ ruby: {
77
+ command: "ruby-lsp", args: ["--stdio"],
78
+ extensions: [".rb"],
79
+ install: { method: "gem", package: "ruby-lsp" }
80
+ },
81
+ php: {
82
+ command: "intelephense", args: ["--stdio"],
83
+ extensions: [".php"],
84
+ install: { method: "npm", package: "intelephense" }
85
+ },
86
+ kotlin: {
87
+ command: "kotlin-language-server", args: [],
88
+ extensions: [".kt", ".kts"],
89
+ install: { method: "brew", package: "kotlin-language-server" }
90
+ },
91
+ scala: {
92
+ command: "metals", args: [],
93
+ extensions: [".scala", ".sc"],
94
+ install: { method: "coursier", package: "metals" }
95
+ },
96
+ elixir: {
97
+ command: "elixir-ls", args: [],
98
+ extensions: [".ex", ".exs"],
99
+ install: { method: "mix", package: "elixir_ls" }
100
+ },
101
+ erlang: {
102
+ command: "erlang_ls", args: ["--stdio"],
103
+ extensions: [".erl"],
104
+ install: { method: "manual", package: "erlang_ls — build from source" }
105
+ },
106
+ // Systems
107
+ zig: {
108
+ command: "zls", args: [],
109
+ extensions: [".zig"],
110
+ install: { method: "brew", package: "zls" }
111
+ },
112
+ haskell: {
113
+ command: "haskell-language-server", args: ["--stdio"],
114
+ extensions: [".hs", ".lhs"],
115
+ install: { method: "ghcup", package: "hls" }
116
+ },
117
+ ocaml: {
118
+ command: "ocamllsp", args: [],
119
+ extensions: [".ml", ".mli"],
120
+ install: { method: "opam", package: "ocaml-lsp-server" }
121
+ },
122
+ // Scripting
123
+ lua: {
124
+ command: "lua-language-server", args: ["--stdio"],
125
+ extensions: [".lua"],
126
+ install: { method: "brew", package: "lua-language-server" }
127
+ },
128
+ perl: {
129
+ command: "pls", args: ["--stdio"],
130
+ extensions: [".pl", ".pm"],
131
+ install: { method: "cpan", package: "PLS" }
132
+ },
133
+ r: {
134
+ command: "R", args: ["--no-save", "--slave", "-e", "languageserver::run()"],
135
+ extensions: [".r", ".R"],
136
+ install: { method: "r", package: "languageserver" }
137
+ },
138
+ julia: {
139
+ command: "julia", args: ["--project=@.", "-e", "using LanguageServer; runserver()"],
140
+ extensions: [".jl"],
141
+ install: { method: "julia", package: "LanguageServer" }
142
+ },
143
+ bash: {
144
+ command: "bash-language-server", args: ["start"],
145
+ extensions: [".sh", ".bash"],
146
+ install: { method: "npm", package: "bash-language-server" }
147
+ },
148
+ // Data & Config
149
+ sql: {
150
+ command: "sql-language-server", args: ["up", "--stdio"],
151
+ extensions: [".sql"],
152
+ install: { method: "npm", package: "sql-language-server" }
153
+ },
154
+ graphql: {
155
+ command: "graphql-lsp", args: ["--stdio"],
156
+ extensions: [".graphql", ".gql"],
157
+ install: { method: "npm", package: "graphql-language-service-cli" }
158
+ },
159
+ terraform: {
160
+ command: "terraform-ls", args: ["serve"],
161
+ extensions: [".tf"],
162
+ install: { method: "brew", package: "terraform-ls" }
163
+ },
164
+ yaml: {
165
+ command: "yaml-language-server", args: ["--stdio"],
166
+ extensions: [".yaml", ".yml"],
167
+ install: { method: "npm", package: "yaml-language-server" }
168
+ },
169
+ toml: {
170
+ command: "taplo", args: ["lsp", "stdio"],
171
+ extensions: [".toml"],
172
+ install: { method: "cargo", package: "taplo-cli" }
173
+ },
174
+ xml: {
175
+ command: "lemminx", args: [],
176
+ extensions: [".xml", ".xsd", ".xsl"],
177
+ install: { method: "manual", package: "Eclipse LemMinX" }
178
+ },
179
+ // Mobile
180
+ swift: {
181
+ command: "sourcekit-lsp", args: [],
182
+ extensions: [".swift"],
183
+ install: { method: "manual", package: "Built into Xcode" }
184
+ },
185
+ dart: {
186
+ command: "dart", args: ["language-server", "--protocol=lsp"],
187
+ extensions: [".dart"],
188
+ install: { method: "brew", package: "dart" }
189
+ },
190
+ // Infra
191
+ dockerfile: {
192
+ command: "docker-langserver", args: ["--stdio"],
193
+ extensions: [],
194
+ filenames: ["Dockerfile", "Dockerfile.*"],
195
+ install: { method: "npm", package: "dockerfile-language-server-nodejs" }
196
+ },
197
+ // Docs
198
+ markdown: {
199
+ command: "marksman", args: ["server"],
200
+ extensions: [".md", ".markdown"],
201
+ install: { method: "brew", package: "marksman" }
202
+ },
203
+ latex: {
204
+ command: "texlab", args: [],
205
+ extensions: [".tex"],
206
+ install: { method: "cargo", package: "texlab" }
207
+ },
208
+ // Other
209
+ clojure: {
210
+ command: "clojure-lsp", args: ["--stdio"],
211
+ extensions: [".clj", ".cljs", ".cljc", ".edn"],
212
+ install: { method: "brew", package: "clojure-lsp/brew/clojure-lsp" }
213
+ },
214
+ fsharp: {
215
+ command: "fsautocomplete", args: ["--lsp"],
216
+ extensions: [".fs", ".fsx", ".fsi"],
217
+ install: { method: "dotnet", package: "fsautocomplete" }
218
+ },
219
+ gleam: {
220
+ command: "gleam", args: ["lsp"],
221
+ extensions: [".gleam"],
222
+ install: { method: "brew", package: "gleam" }
223
+ },
224
+ solidity: {
225
+ command: "solidity-lsp", args: ["--stdio"],
226
+ extensions: [".sol"],
227
+ install: { method: "npm", package: "@nomicfoundation/hardhat-language-server" }
228
+ },
229
+ prisma: {
230
+ command: "prisma-language-server", args: ["--stdio"],
231
+ extensions: [".prisma"],
232
+ install: { method: "npm", package: "@prisma/language-server" }
233
+ },
234
+ protobuf: {
235
+ command: "buf", args: ["lsp"],
236
+ extensions: [".proto"],
237
+ install: { method: "brew", package: "bufbuild/buf/buf" }
238
+ },
239
+ };
240
+
241
+ // Build extension → server key lookup map
242
+ const EXTENSION_MAP = {};
243
+ for (const [key, server] of Object.entries(KNOWN_SERVERS)) {
244
+ for (const ext of server.extensions || []) {
245
+ if (!EXTENSION_MAP[ext]) EXTENSION_MAP[ext] = key;
246
+ }
247
+ }
248
+
249
+ /**
250
+ * Resolve a file extension to its language server config.
251
+ * Returns the server config object with key, or null if no server known.
252
+ */
253
+ export function resolveServer(extension, overrides = {}) {
254
+ // Check overrides first (user config)
255
+ for (const [key, server] of Object.entries(overrides)) {
256
+ if (server.extensions && server.extensions.includes(extension)) {
257
+ return { key, ...server };
258
+ }
259
+ }
260
+ // Then check built-in map
261
+ const key = EXTENSION_MAP[extension];
262
+ if (!key) return null;
263
+ return { key, ...KNOWN_SERVERS[key] };
264
+ }
265
+
266
+ /**
267
+ * Load user overrides from _meta/lsp.json.
268
+ * Returns merged server config (user overrides take precedence).
269
+ */
270
+ export function loadUserConfig(brainPath) {
271
+ try {
272
+ const configPath = `${brainPath}/_meta/lsp.json`;
273
+ const config = JSON.parse(readFileSync(configPath, "utf-8"));
274
+ return config.servers || {};
275
+ } catch {
276
+ return {};
277
+ }
278
+ }
279
+
280
+ /** Get all known extensions (for FileWatcher filtering). */
281
+ export function getKnownExtensions(overrides = {}) {
282
+ const extensions = new Set();
283
+ for (const server of Object.values(KNOWN_SERVERS)) {
284
+ for (const ext of server.extensions || []) extensions.add(ext);
285
+ }
286
+ for (const server of Object.values(overrides)) {
287
+ for (const ext of server.extensions || []) extensions.add(ext);
288
+ }
289
+ return extensions;
290
+ }
@@ -11,6 +11,20 @@ function escapeFtsQuery(query) {
11
11
  .join(" ");
12
12
  }
13
13
 
14
+ /** Weight factor for backlink count in search ranking (PageRank-lite). */
15
+ const BACKLINK_WEIGHT = 0.5;
16
+
17
+ /** Weight factor for access count in search ranking. */
18
+ const SEARCH_ACCESS_WEIGHT = 0.1;
19
+
20
+ /** Candidate scoring weights for promote mode. */
21
+ const PROMOTE_ACCESS_WEIGHT = 0.3;
22
+ const PROMOTE_SESSION_WEIGHT = 0.3;
23
+ const PROMOTE_BACKLINK_WEIGHT = 0.2;
24
+ const PROMOTE_RECENCY_WEIGHT = 0.2;
25
+ const MAX_AGE_MS = 7776000000; // 90 days
26
+ const ARCHIVE_AGE_MS = 2592000000; // 30 days
27
+
14
28
  export class SqliteSearch {
15
29
  #db;
16
30
  #brainId;
@@ -48,12 +62,74 @@ export class SqliteSearch {
48
62
  source_brain TEXT NOT NULL,
49
63
  target_path TEXT NOT NULL,
50
64
  target_brain TEXT,
65
+ rel TEXT,
51
66
  link_text TEXT
52
67
  );
53
68
 
54
69
  CREATE INDEX IF NOT EXISTS idx_links_source ON links(source_id);
55
70
  CREATE INDEX IF NOT EXISTS idx_links_target ON links(target_path);
71
+
72
+ CREATE TABLE IF NOT EXISTS access_log (
73
+ doc_id TEXT NOT NULL,
74
+ session_id TEXT NOT NULL,
75
+ accessed_at INTEGER NOT NULL
76
+ );
77
+ CREATE INDEX IF NOT EXISTS idx_access_doc ON access_log(doc_id);
78
+ CREATE INDEX IF NOT EXISTS idx_access_session ON access_log(session_id);
56
79
  `);
80
+
81
+ this.#migrate();
82
+ }
83
+
84
+ /**
85
+ * Versioned schema migration system.
86
+ * Each migration upgrades from version N-1 to N.
87
+ * Migrations are idempotent — safe to re-run.
88
+ */
89
+ #migrate() {
90
+ // Ensure _schema_version table exists
91
+ this.#db.exec(`
92
+ CREATE TABLE IF NOT EXISTS _schema_version (
93
+ version INTEGER NOT NULL
94
+ )
95
+ `);
96
+
97
+ const row = this.#db.prepare(`SELECT version FROM _schema_version LIMIT 1`).get();
98
+ let currentVersion = row ? row.version : 0;
99
+
100
+ // Migration 1: add rel column to links table + access_log table
101
+ if (currentVersion < 1) {
102
+ try { this.#db.prepare(`SELECT rel FROM links LIMIT 0`).get(); } catch {
103
+ this.#db.exec(`ALTER TABLE links ADD COLUMN rel TEXT`);
104
+ }
105
+ // access_log is created by #initSchema's CREATE TABLE IF NOT EXISTS,
106
+ // but for databases that predate it, ensure it exists
107
+ this.#db.exec(`
108
+ CREATE TABLE IF NOT EXISTS access_log (
109
+ doc_id TEXT NOT NULL, session_id TEXT NOT NULL, accessed_at INTEGER NOT NULL
110
+ );
111
+ CREATE INDEX IF NOT EXISTS idx_access_doc ON access_log(doc_id);
112
+ CREATE INDEX IF NOT EXISTS idx_access_session ON access_log(session_id);
113
+ `);
114
+ currentVersion = 1;
115
+ }
116
+
117
+ // Future migrations go here:
118
+ // if (currentVersion < 2) { ... currentVersion = 2; }
119
+
120
+ // Persist the current version
121
+ this.#db.exec(`DELETE FROM _schema_version`);
122
+ this.#db.prepare(`INSERT INTO _schema_version (version) VALUES (?)`).run(currentVersion);
123
+ }
124
+
125
+ /** Returns the current schema version number. */
126
+ schemaVersion() {
127
+ try {
128
+ const row = this.#db.prepare(`SELECT version FROM _schema_version LIMIT 1`).get();
129
+ return row ? row.version : 0;
130
+ } catch {
131
+ return 0;
132
+ }
57
133
  }
58
134
 
59
135
  index(doc) {
@@ -80,8 +156,8 @@ export class SqliteSearch {
80
156
 
81
157
  const deleteLinks = this.#db.prepare(`DELETE FROM links WHERE source_id = ?`);
82
158
  const insertLink = this.#db.prepare(`
83
- INSERT INTO links (source_id, source_brain, target_path, target_brain, link_text)
84
- VALUES (?, ?, ?, ?, ?)
159
+ INSERT INTO links (source_id, source_brain, target_path, target_brain, rel, link_text)
160
+ VALUES (?, ?, ?, ?, ?, ?)
85
161
  `);
86
162
 
87
163
  const run = this.#db.transaction(() => {
@@ -91,7 +167,7 @@ export class SqliteSearch {
91
167
  deleteLinks.run(id);
92
168
  const wikilinks = parseWikilinks(content);
93
169
  for (const link of wikilinks) {
94
- insertLink.run(id, brainId, link.path, link.brain, link.raw);
170
+ insertLink.run(id, brainId, link.path, link.brain, link.rel || null, link.raw);
95
171
  }
96
172
  });
97
173
 
@@ -119,31 +195,66 @@ export class SqliteSearch {
119
195
  run();
120
196
  }
121
197
 
122
- search({ query, limit = 10, offset = 0 }) {
198
+ search({ query, limit = 10, offset = 0, since = null, session_id = null }) {
123
199
  const escaped = escapeFtsQuery(query);
124
200
  if (!escaped) return { results: [], total_matches: 0, showing: 0 };
125
201
 
202
+ const sinceClause = since ? `AND d.indexed_at >= ?` : "";
203
+ const sinceParams = since ? [new Date(since).getTime()] : [];
204
+
126
205
  const rows = this.#db
127
206
  .prepare(`
128
207
  SELECT
129
208
  d.id,
130
209
  d.path,
131
210
  d.brain_id,
132
- snippet(documents_fts, 2, '<b>', '</b>', '…', 32) AS snippet
211
+ snippet(documents_fts, 2, '<b>', '</b>', '…', 32) AS snippet,
212
+ COALESCE(link_count.cnt, 0) AS backlink_count,
213
+ COALESCE(ac.cnt, 0) AS access_count
133
214
  FROM documents_fts f
134
215
  JOIN documents d ON d.id = f.id
216
+ LEFT JOIN (
217
+ SELECT target_path, COUNT(*) AS cnt
218
+ FROM links
219
+ GROUP BY target_path
220
+ ) link_count ON d.path = link_count.target_path
221
+ LEFT JOIN (
222
+ SELECT doc_id, COUNT(*) AS cnt
223
+ FROM access_log
224
+ GROUP BY doc_id
225
+ ) ac ON d.id = ac.doc_id
135
226
  WHERE documents_fts MATCH ?
136
- ORDER BY rank
227
+ ${sinceClause}
228
+ ORDER BY (f.rank - (COALESCE(link_count.cnt, 0) * ${BACKLINK_WEIGHT}) - (COALESCE(ac.cnt, 0) * ${SEARCH_ACCESS_WEIGHT}))
137
229
  LIMIT ? OFFSET ?
138
230
  `)
139
- .all(escaped, limit, offset);
231
+ .all(escaped, ...sinceParams, limit, offset);
140
232
 
141
233
  const countRow = this.#db
142
- .prepare(`SELECT COUNT(*) as cnt FROM documents_fts WHERE documents_fts MATCH ?`)
143
- .get(escaped);
234
+ .prepare(
235
+ `SELECT COUNT(*) as cnt FROM documents_fts f
236
+ JOIN documents d ON d.id = f.id
237
+ WHERE documents_fts MATCH ?
238
+ ${sinceClause}`
239
+ )
240
+ .get(escaped, ...sinceParams);
144
241
 
145
242
  const total_matches = countRow ? countRow.cnt : 0;
146
243
 
244
+ // Log access for each returned document if session_id provided
245
+ if (session_id && rows.length > 0) {
246
+ const logAccess = this.#db.prepare(
247
+ `INSERT INTO access_log (doc_id, session_id, accessed_at) VALUES (?, ?, ?)`
248
+ );
249
+ const now = Date.now();
250
+ const logAll = this.#db.transaction(() => {
251
+ for (const row of rows) {
252
+ logAccess.run(row.id, session_id, now);
253
+ }
254
+ });
255
+ logAll();
256
+ }
257
+
147
258
  return {
148
259
  results: rows,
149
260
  total_matches,
@@ -230,6 +341,10 @@ export class SqliteSearch {
230
341
  .prepare(`SELECT COUNT(*) as cnt FROM documents WHERE path LIKE 'wiki/%'`)
231
342
  .get().cnt;
232
343
 
344
+ const memory = this.#db
345
+ .prepare(`SELECT COUNT(*) as cnt FROM documents WHERE path LIKE 'memory/%'`)
346
+ .get().cnt;
347
+
233
348
  const lastRow = this.#db
234
349
  .prepare(`SELECT MAX(indexed_at) as last FROM documents`)
235
350
  .get();
@@ -243,7 +358,7 @@ export class SqliteSearch {
243
358
  // in-memory or inaccessible
244
359
  }
245
360
 
246
- return { total, chunks, wiki, last_indexed, db_size };
361
+ return { total, chunks, wiki, memory, last_indexed, db_size };
247
362
  }
248
363
 
249
364
  health() {
@@ -254,6 +369,97 @@ export class SqliteSearch {
254
369
  };
255
370
  }
256
371
 
372
+ candidates({ mode, limit = 20 }) {
373
+ const now = Date.now();
374
+
375
+ if (mode === "promote") {
376
+ return this.#db.prepare(`
377
+ SELECT d.id, d.path, d.indexed_at, d.frontmatter,
378
+ COALESCE(ac.access_count, 0) AS access_count,
379
+ COALESCE(ac.session_diversity, 0) AS session_diversity,
380
+ COALESCE(lc.cnt, 0) AS backlink_count
381
+ FROM documents d
382
+ LEFT JOIN (
383
+ SELECT doc_id,
384
+ COUNT(*) AS access_count,
385
+ COUNT(DISTINCT session_id) AS session_diversity
386
+ FROM access_log GROUP BY doc_id
387
+ ) ac ON d.id = ac.doc_id
388
+ LEFT JOIN (
389
+ SELECT target_path, COUNT(*) AS cnt
390
+ FROM links GROUP BY target_path
391
+ ) lc ON d.path = lc.target_path
392
+ WHERE d.path LIKE 'chunks/%' OR d.path LIKE 'memory/%'
393
+ ORDER BY (
394
+ COALESCE(ac.access_count, 0) * ${PROMOTE_ACCESS_WEIGHT}
395
+ + COALESCE(ac.session_diversity, 0) * ${PROMOTE_SESSION_WEIGHT}
396
+ + COALESCE(lc.cnt, 0) * ${PROMOTE_BACKLINK_WEIGHT}
397
+ + (1.0 - MIN(CAST((${now} - d.indexed_at) AS REAL) / ${MAX_AGE_MS}, 1.0)) * ${PROMOTE_RECENCY_WEIGHT}
398
+ ) DESC
399
+ LIMIT ?
400
+ `).all(limit);
401
+ }
402
+
403
+ if (mode === "archive") {
404
+ const cutoff = now - ARCHIVE_AGE_MS;
405
+ return this.#db.prepare(`
406
+ SELECT d.id, d.path, d.indexed_at, d.frontmatter,
407
+ COALESCE(ac.access_count, 0) AS access_count,
408
+ COALESCE(lc.cnt, 0) AS backlink_count
409
+ FROM documents d
410
+ LEFT JOIN (
411
+ SELECT doc_id, COUNT(*) AS access_count
412
+ FROM access_log GROUP BY doc_id
413
+ ) ac ON d.id = ac.doc_id
414
+ LEFT JOIN (
415
+ SELECT target_path, COUNT(*) AS cnt
416
+ FROM links GROUP BY target_path
417
+ ) lc ON d.path = lc.target_path
418
+ WHERE (d.path LIKE 'chunks/%' OR d.path LIKE 'memory/%')
419
+ AND COALESCE(ac.access_count, 0) = 0
420
+ AND COALESCE(lc.cnt, 0) = 0
421
+ AND d.indexed_at < ?
422
+ ORDER BY d.indexed_at ASC
423
+ LIMIT ?
424
+ `).all(cutoff, limit);
425
+ }
426
+
427
+ throw new Error(`Unknown candidates mode: ${mode}`);
428
+ }
429
+
430
+ accessLog(docId) {
431
+ const row = this.#db.prepare(`
432
+ SELECT
433
+ COUNT(*) as access_count,
434
+ COUNT(DISTINCT session_id) as session_diversity
435
+ FROM access_log
436
+ WHERE doc_id = ?
437
+ `).get(docId);
438
+ return row;
439
+ }
440
+
441
+ recentMemories({ days = 7, limit = 10 } = {}) {
442
+ const since = Date.now() - (days * 86400000);
443
+ return this.#db.prepare(`
444
+ SELECT id, path, frontmatter, indexed_at
445
+ FROM documents
446
+ WHERE path LIKE 'memory/%'
447
+ AND indexed_at >= ?
448
+ ORDER BY indexed_at DESC
449
+ LIMIT ?
450
+ `).all(since, limit);
451
+ }
452
+
453
+ contradictions() {
454
+ return this.#db
455
+ .prepare(`
456
+ SELECT source_id, source_brain, target_path, target_brain, link_text
457
+ FROM links
458
+ WHERE rel = 'contradicts'
459
+ `)
460
+ .all();
461
+ }
462
+
257
463
  close() {
258
464
  this.#db.close();
259
465
  }
@@ -1,5 +1,14 @@
1
1
  const WIKILINK_RE = /\[\[([^\]]+)\]\]/g;
2
2
 
3
+ const KNOWN_RELS = new Set([
4
+ "contradicts",
5
+ "supersedes",
6
+ "supports",
7
+ "caused-by",
8
+ "extends",
9
+ "depends-on",
10
+ ]);
11
+
3
12
  export function parseWikilinks(text) {
4
13
  const links = [];
5
14
  for (const match of text.matchAll(WIKILINK_RE)) {
@@ -8,11 +17,18 @@ export function parseWikilinks(text) {
8
17
  const raw = match[0];
9
18
  if (inner.includes("::")) {
10
19
  const idx = inner.indexOf("::");
11
- const brain = inner.slice(0, idx).trim();
12
- const path = inner.slice(idx + 2).trim();
13
- if (brain && path) links.push({ brain, path, raw });
20
+ const left = inner.slice(0, idx).trim();
21
+ const right = inner.slice(idx + 2).trim();
22
+ if (!left || !right) continue;
23
+ if (KNOWN_RELS.has(left)) {
24
+ // Typed relationship link
25
+ links.push({ brain: null, path: right, rel: left, raw });
26
+ } else {
27
+ // Cross-brain link
28
+ links.push({ brain: left, path: right, rel: null, raw });
29
+ }
14
30
  } else {
15
- links.push({ brain: null, path: inner, raw });
31
+ links.push({ brain: null, path: inner, rel: null, raw });
16
32
  }
17
33
  }
18
34
  return links;
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "wicked-brain-server",
3
- "version": "0.1.2",
3
+ "version": "0.3.2",
4
4
  "type": "module",
5
5
  "description": "SQLite FTS5 search server for wicked-brain digital knowledge bases",
6
6
  "keywords": [
@@ -0,0 +1,52 @@
1
+ ---
2
+ name: wicked-brain-agent
3
+ description: Factory skill for listing and dispatching wicked-brain agents. Agents enforce multi-step pipelines for consolidation, context assembly, session teardown, and project onboarding.
4
+ ---
5
+
6
+ # wicked-brain:agent
7
+
8
+ Factory skill for wicked-brain agents. Lists available agents and dispatches them.
9
+
10
+ ## Config
11
+
12
+ Read `_meta/config.json` for brain path and server port.
13
+ If it doesn't exist, trigger wicked-brain:init.
14
+
15
+ ## Parameters
16
+
17
+ - **action** (required): `list` or `dispatch`
18
+ - **agent** (dispatch mode): name of agent to dispatch (`consolidate`, `context`, `session-teardown`, `onboard`)
19
+ - **depth** (list mode, optional): 0 (summary), 1 (pipeline steps), 2 (full instructions). Default: 0.
20
+ - **params** (dispatch mode, optional): parameters to pass to the agent
21
+
22
+ ## List Mode
23
+
24
+ Read agent definitions from the `agents/` subdirectory relative to this skill file. Return at requested depth.
25
+
26
+ **Depth 0** — one-line summaries:
27
+ - `consolidate`: Three-pass lifecycle — archive noise, promote patterns, merge duplicates
28
+ - `context`: Tiered knowledge surfacing — hot path for simple prompts, fast path for complex
29
+ - `session-teardown`: Capture session learnings — decisions, patterns, gotchas into memory
30
+ - `onboard`: Full project understanding — scan, trace, ingest, compile, configure
31
+
32
+ **Depth 1** — pipeline steps (read the agent's `## Depth 1` section)
33
+
34
+ **Depth 2** — full subagent instructions (read the agent's `## Depth 2` section)
35
+
36
+ ## Dispatch Mode
37
+
38
+ 1. Read the requested agent's `.md` file from `agents/` at depth 2
39
+ 2. Dispatch as a subagent with those instructions using the host CLI's mechanism:
40
+ - Claude Code: use `Agent` tool
41
+ - Gemini CLI: `@agent_name` dispatch
42
+ - Copilot CLI: `/agent` command
43
+ - Other CLIs: inline execution (run the pipeline steps in current context)
44
+ 3. Pass brain_path, port, and any additional params to the subagent
45
+
46
+ If the host CLI does not support subagent dispatch, fall back to inline execution — run the pipeline steps directly in the current context.
47
+
48
+ ## Cross-Platform Notes
49
+
50
+ - Agent definitions are portable markdown — they work on all platforms
51
+ - Dispatch mechanism varies by CLI but instructions are identical
52
+ - Factory skill uses Read tool to load agent files (not shell commands)