moflo 4.8.21 → 4.8.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. package/.claude/agents/browser/browser-agent.yaml +182 -182
  2. package/.claude/agents/core/coder.md +265 -265
  3. package/.claude/agents/core/planner.md +167 -167
  4. package/.claude/agents/core/researcher.md +189 -189
  5. package/.claude/agents/core/reviewer.md +325 -325
  6. package/.claude/agents/core/tester.md +318 -318
  7. package/.claude/agents/database-specialist.yaml +21 -21
  8. package/.claude/agents/dual-mode/codex-coordinator.md +224 -224
  9. package/.claude/agents/dual-mode/codex-worker.md +211 -211
  10. package/.claude/agents/dual-mode/dual-orchestrator.md +291 -291
  11. package/.claude/agents/github/code-review-swarm.md +537 -537
  12. package/.claude/agents/github/github-modes.md +172 -172
  13. package/.claude/agents/github/issue-tracker.md +318 -318
  14. package/.claude/agents/github/multi-repo-swarm.md +552 -552
  15. package/.claude/agents/github/pr-manager.md +190 -190
  16. package/.claude/agents/github/project-board-sync.md +508 -508
  17. package/.claude/agents/github/release-manager.md +366 -366
  18. package/.claude/agents/github/release-swarm.md +582 -582
  19. package/.claude/agents/github/repo-architect.md +397 -397
  20. package/.claude/agents/github/swarm-issue.md +572 -572
  21. package/.claude/agents/github/swarm-pr.md +427 -427
  22. package/.claude/agents/github/sync-coordinator.md +451 -451
  23. package/.claude/agents/github/workflow-automation.md +634 -634
  24. package/.claude/agents/goal/code-goal-planner.md +445 -445
  25. package/.claude/agents/hive-mind/collective-intelligence-coordinator.md +129 -129
  26. package/.claude/agents/hive-mind/queen-coordinator.md +202 -202
  27. package/.claude/agents/hive-mind/scout-explorer.md +241 -241
  28. package/.claude/agents/hive-mind/swarm-memory-manager.md +192 -192
  29. package/.claude/agents/hive-mind/worker-specialist.md +216 -216
  30. package/.claude/agents/index.yaml +17 -17
  31. package/.claude/agents/neural/safla-neural.md +73 -73
  32. package/.claude/agents/project-coordinator.yaml +15 -15
  33. package/.claude/agents/python-specialist.yaml +21 -21
  34. package/.claude/agents/reasoning/goal-planner.md +72 -72
  35. package/.claude/agents/security-auditor.yaml +20 -20
  36. package/.claude/agents/swarm/adaptive-coordinator.md +395 -395
  37. package/.claude/agents/swarm/hierarchical-coordinator.md +326 -326
  38. package/.claude/agents/swarm/mesh-coordinator.md +391 -391
  39. package/.claude/agents/templates/migration-plan.md +745 -745
  40. package/.claude/agents/typescript-specialist.yaml +21 -21
  41. package/.claude/checkpoints/1767754460.json +8 -8
  42. package/.claude/commands/agents/agent-spawning.md +28 -28
  43. package/.claude/commands/github/github-modes.md +146 -146
  44. package/.claude/commands/github/github-swarm.md +121 -121
  45. package/.claude/commands/github/issue-tracker.md +291 -291
  46. package/.claude/commands/github/pr-manager.md +169 -169
  47. package/.claude/commands/github/release-manager.md +337 -337
  48. package/.claude/commands/github/repo-architect.md +366 -366
  49. package/.claude/commands/github/sync-coordinator.md +300 -300
  50. package/.claude/commands/memory/neural.md +47 -47
  51. package/.claude/commands/sparc/analyzer.md +51 -51
  52. package/.claude/commands/sparc/architect.md +53 -53
  53. package/.claude/commands/sparc/ask.md +97 -97
  54. package/.claude/commands/sparc/batch-executor.md +54 -54
  55. package/.claude/commands/sparc/code.md +89 -89
  56. package/.claude/commands/sparc/coder.md +54 -54
  57. package/.claude/commands/sparc/debug.md +83 -83
  58. package/.claude/commands/sparc/debugger.md +54 -54
  59. package/.claude/commands/sparc/designer.md +53 -53
  60. package/.claude/commands/sparc/devops.md +109 -109
  61. package/.claude/commands/sparc/docs-writer.md +80 -80
  62. package/.claude/commands/sparc/documenter.md +54 -54
  63. package/.claude/commands/sparc/innovator.md +54 -54
  64. package/.claude/commands/sparc/integration.md +83 -83
  65. package/.claude/commands/sparc/mcp.md +117 -117
  66. package/.claude/commands/sparc/memory-manager.md +54 -54
  67. package/.claude/commands/sparc/optimizer.md +54 -54
  68. package/.claude/commands/sparc/orchestrator.md +131 -131
  69. package/.claude/commands/sparc/post-deployment-monitoring-mode.md +83 -83
  70. package/.claude/commands/sparc/refinement-optimization-mode.md +83 -83
  71. package/.claude/commands/sparc/researcher.md +54 -54
  72. package/.claude/commands/sparc/reviewer.md +54 -54
  73. package/.claude/commands/sparc/security-review.md +80 -80
  74. package/.claude/commands/sparc/sparc-modes.md +174 -174
  75. package/.claude/commands/sparc/sparc.md +111 -111
  76. package/.claude/commands/sparc/spec-pseudocode.md +80 -80
  77. package/.claude/commands/sparc/supabase-admin.md +348 -348
  78. package/.claude/commands/sparc/swarm-coordinator.md +54 -54
  79. package/.claude/commands/sparc/tdd.md +54 -54
  80. package/.claude/commands/sparc/tester.md +54 -54
  81. package/.claude/commands/sparc/tutorial.md +79 -79
  82. package/.claude/commands/sparc/workflow-manager.md +54 -54
  83. package/.claude/commands/sparc.md +166 -166
  84. package/.claude/commands/swarm/analysis.md +95 -95
  85. package/.claude/commands/swarm/development.md +96 -96
  86. package/.claude/commands/swarm/examples.md +168 -168
  87. package/.claude/commands/swarm/maintenance.md +102 -102
  88. package/.claude/commands/swarm/optimization.md +117 -117
  89. package/.claude/commands/swarm/research.md +136 -136
  90. package/.claude/commands/swarm/testing.md +131 -131
  91. package/.claude/commands/workflows/development.md +77 -77
  92. package/.claude/commands/workflows/research.md +62 -62
  93. package/.claude/guidance/moflo-bootstrap.md +126 -126
  94. package/.claude/guidance/shipped/agent-bootstrap.md +126 -126
  95. package/.claude/guidance/shipped/guidance-memory-strategy.md +262 -262
  96. package/.claude/guidance/shipped/memory-strategy.md +204 -204
  97. package/.claude/guidance/shipped/moflo.md +668 -653
  98. package/.claude/guidance/shipped/task-swarm-integration.md +441 -441
  99. package/.claude/helpers/intelligence.cjs +207 -207
  100. package/.claude/helpers/statusline.cjs +851 -851
  101. package/.claude/settings.local.json +18 -0
  102. package/.claude/skills/fl/SKILL.md +583 -583
  103. package/.claude/skills/flo/SKILL.md +583 -583
  104. package/.claude/skills/github-code-review/SKILL.md +1140 -1140
  105. package/.claude/skills/github-multi-repo/SKILL.md +874 -874
  106. package/.claude/skills/github-project-management/SKILL.md +1277 -1277
  107. package/.claude/skills/github-release-management/SKILL.md +1081 -1081
  108. package/.claude/skills/github-workflow-automation/SKILL.md +1065 -1065
  109. package/.claude/skills/hive-mind-advanced/SKILL.md +712 -712
  110. package/.claude/skills/hooks-automation/SKILL.md +1201 -1201
  111. package/.claude/skills/performance-analysis/SKILL.md +563 -563
  112. package/.claude/skills/sparc-methodology/SKILL.md +1115 -1115
  113. package/.claude/skills/swarm-advanced/SKILL.md +973 -973
  114. package/.claude/workflow-state.json +4 -4
  115. package/LICENSE +21 -21
  116. package/README.md +698 -685
  117. package/bin/cli.js +0 -0
  118. package/bin/gate-hook.mjs +50 -50
  119. package/bin/gate.cjs +138 -138
  120. package/bin/generate-code-map.mjs +775 -775
  121. package/bin/hook-handler.cjs +83 -83
  122. package/bin/hooks.mjs +656 -656
  123. package/bin/index-guidance.mjs +892 -892
  124. package/bin/index-tests.mjs +709 -709
  125. package/bin/lib/process-manager.mjs +243 -243
  126. package/bin/lib/registry-cleanup.cjs +41 -41
  127. package/bin/prompt-hook.mjs +72 -72
  128. package/bin/semantic-search.mjs +472 -472
  129. package/bin/session-start-launcher.mjs +238 -238
  130. package/bin/setup-project.mjs +250 -250
  131. package/package.json +123 -123
  132. package/src/@claude-flow/cli/README.md +452 -452
  133. package/src/@claude-flow/cli/bin/cli.js +180 -180
  134. package/src/@claude-flow/cli/bin/preinstall.cjs +2 -2
  135. package/src/@claude-flow/cli/dist/src/commands/completions.js +409 -409
  136. package/src/@claude-flow/cli/dist/src/commands/doctor.js +18 -2
  137. package/src/@claude-flow/cli/dist/src/commands/embeddings.js +25 -25
  138. package/src/@claude-flow/cli/dist/src/commands/github.js +61 -61
  139. package/src/@claude-flow/cli/dist/src/commands/hive-mind.js +90 -90
  140. package/src/@claude-flow/cli/dist/src/commands/hooks.js +9 -9
  141. package/src/@claude-flow/cli/dist/src/commands/init.js +3 -8
  142. package/src/@claude-flow/cli/dist/src/commands/ruvector/import.js +14 -14
  143. package/src/@claude-flow/cli/dist/src/commands/ruvector/setup.js +624 -624
  144. package/src/@claude-flow/cli/dist/src/config/moflo-config.d.ts +3 -0
  145. package/src/@claude-flow/cli/dist/src/config/moflo-config.js +101 -91
  146. package/src/@claude-flow/cli/dist/src/index.d.ts +5 -0
  147. package/src/@claude-flow/cli/dist/src/index.js +44 -0
  148. package/src/@claude-flow/cli/dist/src/init/claudemd-generator.d.ts +29 -29
  149. package/src/@claude-flow/cli/dist/src/init/claudemd-generator.js +43 -43
  150. package/src/@claude-flow/cli/dist/src/init/executor.js +453 -453
  151. package/src/@claude-flow/cli/dist/src/init/helpers-generator.js +482 -482
  152. package/src/@claude-flow/cli/dist/src/init/moflo-init.d.ts +30 -30
  153. package/src/@claude-flow/cli/dist/src/init/moflo-init.js +140 -140
  154. package/src/@claude-flow/cli/dist/src/init/statusline-generator.js +876 -876
  155. package/src/@claude-flow/cli/dist/src/memory/memory-initializer.js +371 -371
  156. package/src/@claude-flow/cli/dist/src/runtime/headless.js +28 -28
  157. package/src/@claude-flow/cli/dist/src/services/container-worker-pool.d.ts +197 -0
  158. package/src/@claude-flow/cli/dist/src/services/container-worker-pool.js +584 -0
  159. package/src/@claude-flow/cli/dist/src/services/daemon-lock.d.ts +14 -0
  160. package/src/@claude-flow/cli/dist/src/services/daemon-lock.js +1 -1
  161. package/src/@claude-flow/cli/dist/src/services/headless-worker-executor.js +84 -84
  162. package/src/@claude-flow/cli/package.json +1 -1
  163. package/src/@claude-flow/guidance/README.md +1195 -1195
  164. package/src/@claude-flow/guidance/package.json +198 -198
  165. package/src/@claude-flow/memory/README.md +587 -587
  166. package/src/@claude-flow/memory/dist/agentdb-backend.js +26 -26
  167. package/src/@claude-flow/memory/dist/auto-memory-bridge.test.js +27 -27
  168. package/src/@claude-flow/memory/dist/hybrid-backend.d.ts +245 -0
  169. package/src/@claude-flow/memory/dist/hybrid-backend.js +569 -0
  170. package/src/@claude-flow/memory/dist/hybrid-backend.test.d.ts +8 -0
  171. package/src/@claude-flow/memory/dist/hybrid-backend.test.js +320 -0
  172. package/src/@claude-flow/memory/dist/sqlite-backend.d.ts +121 -0
  173. package/src/@claude-flow/memory/dist/sqlite-backend.js +572 -0
  174. package/src/@claude-flow/memory/dist/sqljs-backend.js +26 -26
  175. package/src/@claude-flow/memory/package.json +44 -44
  176. package/src/@claude-flow/shared/README.md +323 -323
  177. package/src/@claude-flow/shared/dist/events/event-store.js +31 -31
  178. package/src/README.md +493 -493
@@ -1,776 +1,776 @@
1
1
  #!/usr/bin/env node
2
- /**
3
- * Generate structural code map for a monorepo or project.
4
- *
5
- * Produces five chunk types stored in the `code-map` namespace of .swarm/memory.db:
6
- * 1. project: — one per top-level project directory (bird's-eye overview)
7
- * 2. dir: — one per directory with 2+ exported types (drill-down detail)
8
- * 3. iface-map: — batched interface-to-implementation mappings
9
- * 4. type-index: — batched type-name-to-file-path lookups
10
- * 5. file: — ONE PER FILE with exported types (file-level granularity)
11
- *
12
- * The `file:` entries are the key improvement — they enable precise semantic search
13
- * for individual types, entities, and services instead of diluting results across
14
- * large batches.
15
- *
16
- * Design: regex-based extraction (no AST parser), incremental via SHA-256 hash,
17
- * stores in sql.js memory DB, triggers embedding generation in background.
18
- *
19
- * Usage:
20
- * node node_modules/moflo/bin/generate-code-map.mjs # Incremental
21
- * node node_modules/moflo/bin/generate-code-map.mjs --force # Full regenerate
22
- * node node_modules/moflo/bin/generate-code-map.mjs --verbose # Detailed logging
23
- * node node_modules/moflo/bin/generate-code-map.mjs --no-embeddings # Skip embedding generation
24
- * node node_modules/moflo/bin/generate-code-map.mjs --stats # Print stats and exit
25
- * npx flo-codemap # Via npx
26
- */
27
-
28
- import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync } from 'fs';
29
- import { resolve, dirname, relative, basename, extname } from 'path';
30
- import { fileURLToPath } from 'url';
31
- import { createHash } from 'crypto';
32
- import { execSync, spawn } from 'child_process';
33
- import { mofloResolveURL } from './lib/moflo-resolve.mjs';
34
- const initSqlJs = (await import(mofloResolveURL('sql.js'))).default;
35
-
36
-
37
- const __dirname = dirname(fileURLToPath(import.meta.url));
38
-
39
- // Detect project root: walk up from cwd to find a package.json
40
- function findProjectRoot() {
41
- let dir = process.cwd();
42
- const root = resolve(dir, '/');
43
- while (dir !== root) {
44
- if (existsSync(resolve(dir, 'package.json'))) return dir;
45
- dir = dirname(dir);
46
- }
47
- return process.cwd();
48
- }
49
-
50
- const projectRoot = findProjectRoot();
51
- const NAMESPACE = 'code-map';
52
- const DB_PATH = resolve(projectRoot, '.swarm/memory.db');
53
- const HASH_CACHE_PATH = resolve(projectRoot, '.swarm/code-map-hash.txt');
54
-
55
- // Directories to exclude from indexing
56
- const EXCLUDE_DIRS = [
57
- 'node_modules', 'dist', 'build', '.next', 'coverage',
58
- '.claude', 'template', 'back-office-template',
59
- ];
60
-
61
- // Heuristic descriptions for well-known directory names
62
- const DIR_DESCRIPTIONS = {
63
- entities: 'MikroORM entity definitions',
64
- services: 'business logic services',
65
- routes: 'Fastify route handlers',
66
- middleware: 'request middleware (auth, validation, tenancy)',
67
- schemas: 'Zod validation schemas',
68
- types: 'TypeScript type definitions',
69
- utils: 'utility helpers',
70
- config: 'configuration',
71
- migrations: 'database migrations',
72
- scripts: 'CLI scripts',
73
- components: 'React components',
74
- pages: 'route page components',
75
- contexts: 'React context providers',
76
- hooks: 'React custom hooks',
77
- layout: 'app shell layout',
78
- themes: 'MUI theme configuration',
79
- api: 'API client layer',
80
- locales: 'i18n translation files',
81
- tests: 'test suites',
82
- e2e: 'end-to-end tests',
83
- providers: 'dependency injection providers',
84
- };
85
-
86
- // Batch sizes for chunking
87
- const IFACE_MAP_BATCH = 20;
88
- const TYPE_INDEX_BATCH = 30; // Reduced from 80 for better search relevance
89
-
90
- // Parse args
91
- const args = process.argv.slice(2);
92
- const force = args.includes('--force');
93
- const verbose = args.includes('--verbose') || args.includes('-v');
94
- const skipEmbeddings = args.includes('--no-embeddings');
95
- const statsOnly = args.includes('--stats');
96
-
97
- function log(msg) { console.log(`[code-map] ${msg}`); }
98
- function debug(msg) { if (verbose) console.log(`[code-map] ${msg}`); }
99
-
100
- // ---------------------------------------------------------------------------
101
- // Database helpers
102
- // ---------------------------------------------------------------------------
103
-
104
- function ensureDbDir() {
105
- const dir = dirname(DB_PATH);
106
- if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
107
- }
108
-
109
- async function getDb() {
110
- ensureDbDir();
111
- const SQL = await initSqlJs();
112
- let db;
113
- if (existsSync(DB_PATH)) {
114
- const buffer = readFileSync(DB_PATH);
115
- db = new SQL.Database(buffer);
116
- } else {
117
- db = new SQL.Database();
118
- }
119
-
120
- db.run(`
121
- CREATE TABLE IF NOT EXISTS memory_entries (
122
- id TEXT PRIMARY KEY,
123
- key TEXT NOT NULL,
124
- namespace TEXT DEFAULT 'default',
125
- content TEXT NOT NULL,
126
- type TEXT DEFAULT 'semantic',
127
- embedding TEXT,
128
- embedding_model TEXT DEFAULT 'local',
129
- embedding_dimensions INTEGER,
130
- tags TEXT,
131
- metadata TEXT,
132
- owner_id TEXT,
133
- created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
134
- updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
135
- expires_at INTEGER,
136
- last_accessed_at INTEGER,
137
- access_count INTEGER DEFAULT 0,
138
- status TEXT DEFAULT 'active',
139
- UNIQUE(namespace, key)
140
- )
141
- `);
142
- db.run(`CREATE INDEX IF NOT EXISTS idx_memory_key_ns ON memory_entries(key, namespace)`);
143
- db.run(`CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace)`);
144
- return db;
145
- }
146
-
147
- function saveDb(db) {
148
- const data = db.export();
149
- writeFileSync(DB_PATH, Buffer.from(data));
150
- }
151
-
152
- function generateId() {
153
- return `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
154
- }
155
-
156
- function storeEntry(db, key, content, metadata = {}, tags = []) {
157
- const now = Date.now();
158
- const id = generateId();
159
- db.run(`
160
- INSERT OR REPLACE INTO memory_entries
161
- (id, key, namespace, content, metadata, tags, created_at, updated_at, status)
162
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active')
163
- `, [id, key, NAMESPACE, content, JSON.stringify(metadata), JSON.stringify(tags), now, now]);
164
- }
165
-
166
- function deleteNamespace(db) {
167
- db.run(`DELETE FROM memory_entries WHERE namespace = ?`, [NAMESPACE]);
168
- }
169
-
170
- function countNamespace(db) {
171
- const stmt = db.prepare(`SELECT COUNT(*) as cnt FROM memory_entries WHERE namespace = ?`);
172
- stmt.bind([NAMESPACE]);
173
- let count = 0;
174
- if (stmt.step()) count = stmt.getAsObject().cnt;
175
- stmt.free();
176
- return count;
177
- }
178
-
179
- // ---------------------------------------------------------------------------
180
- // Source file enumeration — git ls-files with filesystem fallback
181
- // ---------------------------------------------------------------------------
182
-
183
- /** Read code_map config from moflo.yaml (directories, extensions, exclude). */
184
- function readCodeMapConfig() {
185
- const defaults = {
186
- directories: ['src'],
187
- extensions: ['.ts', '.tsx', '.js', '.mjs', '.jsx'],
188
- exclude: [...EXCLUDE_DIRS],
189
- };
190
- try {
191
- const yamlPath = resolve(projectRoot, 'moflo.yaml');
192
- if (!existsSync(yamlPath)) return defaults;
193
- const content = readFileSync(yamlPath, 'utf-8');
194
- // Simple YAML parsing for code_map block
195
- const block = content.match(/code_map:\s*\n((?:\s+\w+:.*\n?|\s+- .*\n?)+)/);
196
- if (!block) return defaults;
197
- const lines = block[1].split('\n');
198
- let currentKey = null;
199
- const result = { ...defaults };
200
- for (const line of lines) {
201
- const keyMatch = line.match(/^\s+(\w+):/);
202
- const itemMatch = line.match(/^\s+- (.+)/);
203
- if (keyMatch) {
204
- currentKey = keyMatch[1];
205
- // Inline array: extensions: [".ts", ".tsx"]
206
- const inlineArray = line.match(/\[([^\]]+)\]/);
207
- if (inlineArray && (currentKey === 'extensions' || currentKey === 'exclude' || currentKey === 'directories')) {
208
- result[currentKey] = inlineArray[1].split(',').map(s => s.trim().replace(/^["']|["']$/g, ''));
209
- }
210
- } else if (itemMatch && currentKey) {
211
- if (!Array.isArray(result[currentKey])) result[currentKey] = [];
212
- result[currentKey].push(itemMatch[1].trim().replace(/^["']|["']$/g, ''));
213
- }
214
- }
215
- return result;
216
- } catch { return defaults; }
217
- }
218
-
219
- /** Walk a directory tree collecting source files (filesystem fallback). */
220
- function walkDir(dir, extensions, excludeSet, maxDepth = 8, depth = 0) {
221
- if (depth > maxDepth) return [];
222
- const results = [];
223
- let entries;
224
- try {
225
- entries = readdirSync(resolve(projectRoot, dir), { withFileTypes: true });
226
- } catch { return []; }
227
- for (const entry of entries) {
228
- if (excludeSet.has(entry.name)) continue;
229
- // Use forward slashes for consistent cross-platform paths
230
- const rel = dir ? `${dir}/${entry.name}` : entry.name;
231
- if (entry.isDirectory()) {
232
- results.push(...walkDir(rel, extensions, excludeSet, maxDepth, depth + 1));
233
- } else if (entry.isFile()) {
234
- const ext = extname(entry.name);
235
- if (extensions.has(ext)) results.push(rel);
236
- }
237
- }
238
- return results;
239
- }
240
-
241
- function getSourceFiles() {
242
- // Try git ls-files first (fast, respects .gitignore)
243
- try {
244
- const raw = execSync(
245
- `git ls-files -- "*.ts" "*.tsx" "*.js" "*.mjs" "*.jsx"`,
246
- { cwd: projectRoot, encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 }
247
- ).trim();
248
-
249
- if (raw) {
250
- const files = raw.split('\n').filter(f => {
251
- for (const ex of EXCLUDE_DIRS) {
252
- if (f.startsWith(ex + '/') || f.startsWith(ex + '\\')) return false;
253
- }
254
- return true;
255
- });
256
- if (files.length > 0) return files;
257
- }
258
- } catch {
259
- // git not available or not a git repo — fall through
260
- }
261
-
262
- // Fallback: walk configured directories from moflo.yaml
263
- log('git ls-files returned no files — falling back to filesystem walk');
264
- const config = readCodeMapConfig();
265
- const extSet = new Set(config.extensions);
266
- const excludeSet = new Set(config.exclude);
267
- const files = [];
268
-
269
- for (const dir of config.directories) {
270
- if (existsSync(resolve(projectRoot, dir))) {
271
- files.push(...walkDir(dir, extSet, excludeSet));
272
- }
273
- }
274
-
275
- return files;
276
- }
277
-
278
- function computeFileListHash(files) {
279
- const sorted = [...files].sort();
280
- return createHash('sha256').update(sorted.join('\n')).digest('hex');
281
- }
282
-
283
- function isUnchanged(currentHash) {
284
- if (force) return false;
285
- if (!existsSync(HASH_CACHE_PATH)) return false;
286
- const cached = readFileSync(HASH_CACHE_PATH, 'utf-8').trim();
287
- return cached === currentHash;
288
- }
289
-
290
- // ---------------------------------------------------------------------------
291
- // Type extraction (regex-based, no AST)
292
- // ---------------------------------------------------------------------------
293
-
294
- const TS_PATTERNS = [
295
- /^export\s+(?:default\s+)?(?:abstract\s+)?class\s+(\w+)(?:\s+extends\s+([\w.]+))?(?:\s+implements\s+([\w,\s.]+))?/,
296
- /^export\s+(?:default\s+)?interface\s+(\w+)(?:\s+extends\s+([\w,\s.]+))?/,
297
- /^export\s+(?:default\s+)?type\s+(\w+)\s*[=<]/,
298
- /^export\s+(?:const\s+)?enum\s+(\w+)/,
299
- /^export\s+(?:default\s+)?(?:async\s+)?function\s+(\w+)/,
300
- /^export\s+(?:default\s+)?const\s+(\w+)\s*[=:]/,
301
- ];
302
-
303
- const ENTITY_DECORATOR = /@Entity\s*\(/;
304
-
305
- function extractTypes(filePath) {
306
- const fullPath = resolve(projectRoot, filePath);
307
- if (!existsSync(fullPath)) return [];
308
-
309
- let content;
310
- try {
311
- content = readFileSync(fullPath, 'utf-8');
312
- } catch {
313
- return [];
314
- }
315
-
316
- const lines = content.split('\n');
317
- const types = [];
318
- const seen = new Set();
319
- let isEntityNext = false;
320
-
321
- for (let i = 0; i < lines.length; i++) {
322
- const line = lines[i].trim();
323
-
324
- if (ENTITY_DECORATOR.test(line)) {
325
- isEntityNext = true;
326
- continue;
327
- }
328
-
329
- for (const pattern of TS_PATTERNS) {
330
- const m = line.match(pattern);
331
- if (m && m[1] && !seen.has(m[1])) {
332
- seen.add(m[1]);
333
- const kind = detectKind(line, m[1]);
334
- const bases = (m[2] || '').trim();
335
- const implements_ = (m[3] || '').trim();
336
- types.push({
337
- name: m[1],
338
- kind,
339
- bases: bases || null,
340
- implements: implements_ || null,
341
- isEntity: isEntityNext,
342
- file: filePath,
343
- });
344
- isEntityNext = false;
345
- break;
346
- }
347
- }
348
-
349
- if (isEntityNext && !line.startsWith('@') && !line.startsWith('export') && line.length > 0) {
350
- isEntityNext = false;
351
- }
352
- }
353
-
354
- return types;
355
- }
356
-
357
- function detectKind(line, name) {
358
- if (/\bclass\b/.test(line)) return 'class';
359
- if (/\binterface\b/.test(line)) return 'interface';
360
- if (/\btype\b/.test(line)) return 'type';
361
- if (/\benum\b/.test(line)) return 'enum';
362
- if (/\bfunction\b/.test(line)) return 'function';
363
- if (/\bconst\b/.test(line)) return 'const';
364
- return 'export';
365
- }
366
-
367
- // ---------------------------------------------------------------------------
368
- // Project structure analysis
369
- // ---------------------------------------------------------------------------
370
-
371
- function getProjectName(filePath) {
372
- const parts = filePath.split('/');
373
-
374
- if (parts[0] === 'packages' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
375
- if (parts[0] === 'back-office' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
376
- if (parts[0] === 'customer-portal' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
377
- if (parts[0] === 'admin-console' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
378
- if (parts[0] === 'webhooks' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
379
- if (parts[0] === 'mobile-app') return 'mobile-app';
380
- if (parts[0] === 'tests') return 'tests';
381
- if (parts[0] === 'scripts') return 'scripts';
382
- return parts[0];
383
- }
384
-
385
- function getDirectory(filePath) {
386
- return dirname(filePath).replace(/\\/g, '/');
387
- }
388
-
389
- function getDirDescription(dirName) {
390
- const last = dirName.split('/').pop();
391
- return DIR_DESCRIPTIONS[last] || null;
392
- }
393
-
394
- function detectLanguage(filePath) {
395
- const ext = extname(filePath);
396
- if (ext === '.tsx' || ext === '.jsx') return 'tsx';
397
- if (ext === '.ts') return 'ts';
398
- if (ext === '.mjs') return 'esm';
399
- return 'js';
400
- }
401
-
402
- // ---------------------------------------------------------------------------
403
- // Chunk generators
404
- // ---------------------------------------------------------------------------
405
-
406
- function generateProjectOverviews(filesByProject, typesByProject) {
407
- const chunks = [];
408
-
409
- for (const [project, files] of Object.entries(filesByProject)) {
410
- const types = typesByProject[project] || [];
411
- const lang = detectProjectLang(files);
412
- const dirMap = {};
413
-
414
- for (const t of types) {
415
- const rel = relative(project, dirname(t.file)).replace(/\\/g, '/') || '(root)';
416
- if (!dirMap[rel]) dirMap[rel] = [];
417
- dirMap[rel].push(t.name);
418
- }
419
-
420
- let content = `# ${project} [${lang}, ${files.length} files, ${types.length} types]\n\n`;
421
-
422
- const sortedDirs = Object.keys(dirMap).sort();
423
- for (const dir of sortedDirs) {
424
- const names = dirMap[dir];
425
- const desc = getDirDescription(dir);
426
- const descStr = desc ? ` -- ${desc}` : '';
427
- const shown = names.slice(0, 8).join(', ');
428
- const overflow = names.length > 8 ? `, ... (+${names.length - 8} more)` : '';
429
- content += ` ${dir}${descStr}: ${shown}${overflow}\n`;
430
- }
431
-
432
- chunks.push({
433
- key: `project:${project}`,
434
- content: content.trim(),
435
- metadata: { kind: 'project-overview', project, language: lang, fileCount: files.length, typeCount: types.length },
436
- tags: ['project', project],
437
- });
438
- }
439
-
440
- return chunks;
441
- }
442
-
443
- function detectProjectLang(files) {
444
- let tsx = 0, ts = 0, js = 0;
445
- for (const f of files) {
446
- const ext = extname(f);
447
- if (ext === '.tsx' || ext === '.jsx') tsx++;
448
- else if (ext === '.ts') ts++;
449
- else js++;
450
- }
451
- if (tsx > ts && tsx > js) return 'React/TypeScript';
452
- if (ts >= js) return 'TypeScript';
453
- return 'JavaScript';
454
- }
455
-
456
- function generateDirectoryDetails(typesByDir) {
457
- const chunks = [];
458
-
459
- for (const [dir, types] of Object.entries(typesByDir)) {
460
- if (types.length < 2) continue;
461
-
462
- const desc = getDirDescription(dir);
463
- let content = `# ${dir} (${types.length} types)\n`;
464
- if (desc) content += `${desc}\n`;
465
- content += '\n';
466
-
467
- const sorted = [...types].sort((a, b) => a.name.localeCompare(b.name));
468
- for (const t of sorted) {
469
- const suffix = [];
470
- if (t.bases) suffix.push(`: ${t.bases}`);
471
- if (t.implements) suffix.push(`: ${t.implements}`);
472
- const suffixStr = suffix.length ? ` ${suffix.join(' ')}` : '';
473
- const fileName = basename(t.file);
474
- content += ` ${t.name}${suffixStr} (${fileName})\n`;
475
- }
476
-
477
- chunks.push({
478
- key: `dir:${dir}`,
479
- content: content.trim(),
480
- metadata: { kind: 'directory-detail', directory: dir, typeCount: types.length },
481
- tags: ['directory', dir.split('/')[0]],
482
- });
483
- }
484
-
485
- return chunks;
486
- }
487
-
488
- function generateInterfaceMaps(allTypes) {
489
- const interfaces = new Map();
490
-
491
- for (const t of allTypes) {
492
- if (t.kind === 'interface') {
493
- if (!interfaces.has(t.name)) {
494
- interfaces.set(t.name, { defined: t.file, implementations: [] });
495
- }
496
- }
497
- }
498
-
499
- for (const t of allTypes) {
500
- if (t.kind !== 'class') continue;
501
- const impls = t.implements ? t.implements.split(',').map(s => s.trim()) : [];
502
- const bases = t.bases ? [t.bases.trim()] : [];
503
- for (const iface of [...impls, ...bases]) {
504
- if (interfaces.has(iface)) {
505
- interfaces.get(iface).implementations.push({
506
- name: t.name,
507
- project: getProjectName(t.file),
508
- });
509
- }
510
- }
511
- }
512
-
513
- const mapped = [...interfaces.entries()]
514
- .filter(([, v]) => v.implementations.length > 0)
515
- .sort(([a], [b]) => a.localeCompare(b));
516
-
517
- if (mapped.length === 0) return [];
518
-
519
- const chunks = [];
520
- const totalBatches = Math.ceil(mapped.length / IFACE_MAP_BATCH);
521
-
522
- for (let i = 0; i < mapped.length; i += IFACE_MAP_BATCH) {
523
- const batch = mapped.slice(i, i + IFACE_MAP_BATCH);
524
- const batchNum = Math.floor(i / IFACE_MAP_BATCH) + 1;
525
-
526
- let content = `# Interface-to-Implementation Map (${batchNum}/${totalBatches})\n\n`;
527
- for (const [name, info] of batch) {
528
- const implStr = info.implementations
529
- .map(impl => `${impl.name} (${impl.project})`)
530
- .join(', ');
531
- content += ` ${name} -> ${implStr}\n`;
532
- }
533
-
534
- chunks.push({
535
- key: `iface-map:${batchNum}`,
536
- content: content.trim(),
537
- metadata: { kind: 'interface-map', batch: batchNum, totalBatches, count: batch.length },
538
- tags: ['interface-map'],
539
- });
540
- }
541
-
542
- return chunks;
543
- }
544
-
545
- function generateTypeIndex(allTypes) {
546
- const sorted = [...allTypes].sort((a, b) => a.name.localeCompare(b.name));
547
- const chunks = [];
548
- const totalBatches = Math.ceil(sorted.length / TYPE_INDEX_BATCH);
549
-
550
- for (let i = 0; i < sorted.length; i += TYPE_INDEX_BATCH) {
551
- const batch = sorted.slice(i, i + TYPE_INDEX_BATCH);
552
- const batchNum = Math.floor(i / TYPE_INDEX_BATCH) + 1;
553
-
554
- let content = `# Type Index (batch ${batchNum}, ${batch.length} types)\n\n`;
555
- for (const t of batch) {
556
- const lang = detectLanguage(t.file);
557
- content += ` ${t.name} -> ${t.file} [${lang}]\n`;
558
- }
559
-
560
- chunks.push({
561
- key: `type-index:${batchNum}`,
562
- content: content.trim(),
563
- metadata: { kind: 'type-index', batch: batchNum, totalBatches, count: batch.length },
564
- tags: ['type-index'],
565
- });
566
- }
567
-
568
- return chunks;
569
- }
570
-
571
- /**
572
- * NEW: Generate file-level entries for each source file that has exported types.
573
- *
574
- * Each file gets its own entry keyed as `file:<path>`, containing:
575
- * - The file path
576
- * - All exported type names with their kind, base class, and implementations
577
- * - Whether it's a MikroORM entity
578
- * - The project and directory it belongs to
579
- *
580
- * This enables precise semantic search: a query for "CompanyAuditLog" will match
581
- * the specific file entry rather than being diluted across a batch of 80 types.
582
- */
583
- function generateFileEntries(typesByFile) {
584
- const chunks = [];
585
-
586
- for (const [filePath, types] of Object.entries(typesByFile)) {
587
- if (types.length === 0) continue;
588
-
589
- const project = getProjectName(filePath);
590
- const dir = getDirectory(filePath);
591
- const dirDesc = getDirDescription(dir);
592
- const lang = detectLanguage(filePath);
593
- const fileName = basename(filePath);
594
-
595
- // Build a rich, searchable content string
596
- let content = `# ${fileName} (${filePath})\n`;
597
- content += `Project: ${project} | Language: ${lang}\n`;
598
- if (dirDesc) content += `Directory: ${dirDesc}\n`;
599
- content += '\nExported types:\n';
600
-
601
- for (const t of types) {
602
- let line = ` ${t.kind} ${t.name}`;
603
- if (t.isEntity) line += ' [MikroORM entity]';
604
- if (t.bases) line += ` extends ${t.bases}`;
605
- if (t.implements) line += ` implements ${t.implements}`;
606
- content += line + '\n';
607
- }
608
-
609
- // Build tags for filtering
610
- const tags = ['file', project];
611
- if (types.some(t => t.isEntity)) tags.push('entity');
612
- if (types.some(t => t.kind === 'interface')) tags.push('interface');
613
- if (filePath.includes('/services/')) tags.push('service');
614
- if (filePath.includes('/routes/')) tags.push('route');
615
- if (filePath.includes('/middleware/')) tags.push('middleware');
616
-
617
- chunks.push({
618
- key: `file:${filePath}`,
619
- content: content.trim(),
620
- metadata: {
621
- kind: 'file-detail',
622
- filePath,
623
- project,
624
- directory: dir,
625
- language: lang,
626
- typeCount: types.length,
627
- hasEntities: types.some(t => t.isEntity),
628
- typeNames: types.map(t => t.name),
629
- },
630
- tags,
631
- });
632
- }
633
-
634
- return chunks;
635
- }
636
-
637
- // ---------------------------------------------------------------------------
638
- // Main
639
- // ---------------------------------------------------------------------------
640
-
641
- async function main() {
642
- const startTime = Date.now();
643
-
644
- log(`Project root: ${projectRoot}`);
645
-
646
- // 1. Get source files
647
- log('Enumerating source files via git ls-files...');
648
- const files = getSourceFiles();
649
- log(`Found ${files.length} source files`);
650
-
651
- if (files.length === 0) {
652
- log('No source files found — nothing to index');
653
- return;
654
- }
655
-
656
- // 2. Check hash for incremental skip
657
- const currentHash = computeFileListHash(files);
658
-
659
- if (statsOnly) {
660
- const db = await getDb();
661
- const count = countNamespace(db);
662
- db.close();
663
- log(`Stats: ${files.length} source files, ${count} chunks in code-map namespace`);
664
- log(`File list hash: ${currentHash.slice(0, 12)}...`);
665
- return;
666
- }
667
-
668
- if (isUnchanged(currentHash)) {
669
- const db = await getDb();
670
- const count = countNamespace(db);
671
- db.close();
672
- if (count > 0) {
673
- log(`Skipping — file list unchanged (${count} chunks in DB, hash ${currentHash.slice(0, 12)}...)`);
674
- return;
675
- }
676
- log('File list unchanged but no chunks in DB — forcing regeneration');
677
- }
678
-
679
- // 3. Extract types from all files
680
- log('Extracting type declarations...');
681
- const allTypes = [];
682
- const filesByProject = {};
683
- const typesByProject = {};
684
- const typesByDir = {};
685
- const typesByFile = {};
686
-
687
- for (const file of files) {
688
- const project = getProjectName(file);
689
- if (!filesByProject[project]) filesByProject[project] = [];
690
- filesByProject[project].push(file);
691
-
692
- const types = extractTypes(file);
693
-
694
- // Track types per file for file-level entries
695
- if (types.length > 0) {
696
- typesByFile[file] = types;
697
- }
698
-
699
- for (const t of types) {
700
- allTypes.push(t);
701
-
702
- if (!typesByProject[project]) typesByProject[project] = [];
703
- typesByProject[project].push(t);
704
-
705
- const dir = getDirectory(t.file);
706
- if (!typesByDir[dir]) typesByDir[dir] = [];
707
- typesByDir[dir].push(t);
708
- }
709
- }
710
-
711
- log(`Extracted ${allTypes.length} type declarations from ${Object.keys(filesByProject).length} projects`);
712
- log(`Files with exported types: ${Object.keys(typesByFile).length}`);
713
-
714
- // 4. Generate all chunk types
715
- log('Generating chunks...');
716
- const projectChunks = generateProjectOverviews(filesByProject, typesByProject);
717
- const dirChunks = generateDirectoryDetails(typesByDir);
718
- const ifaceChunks = generateInterfaceMaps(allTypes);
719
- const typeIdxChunks = generateTypeIndex(allTypes);
720
- const fileChunks = generateFileEntries(typesByFile);
721
-
722
- const allChunks = [...projectChunks, ...dirChunks, ...ifaceChunks, ...typeIdxChunks, ...fileChunks];
723
-
724
- log(`Generated ${allChunks.length} chunks:`);
725
- log(` Project overviews: ${projectChunks.length}`);
726
- log(` Directory details: ${dirChunks.length}`);
727
- log(` Interface maps: ${ifaceChunks.length}`);
728
- log(` Type index: ${typeIdxChunks.length}`);
729
- log(` File entries: ${fileChunks.length} (NEW — file-level granularity)`);
730
-
731
- // 5. Write to database
732
- log('Writing to memory database...');
733
- const db = await getDb();
734
- deleteNamespace(db);
735
-
736
- for (const chunk of allChunks) {
737
- storeEntry(db, chunk.key, chunk.content, chunk.metadata, chunk.tags);
738
- }
739
-
740
- saveDb(db);
741
- db.close();
742
-
743
- // 6. Save hash for incremental caching
744
- writeFileSync(HASH_CACHE_PATH, currentHash, 'utf-8');
745
-
746
- const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
747
- log(`Done in ${elapsed}s — ${allChunks.length} chunks written to code-map namespace`);
748
-
749
- // 7. Generate embeddings inline (not detached — ensures Xenova runs reliably)
750
- if (!skipEmbeddings) {
751
- // Prefer moflo's own bin script, fall back to project's .claude/scripts/
752
- const embedCandidates = [
753
- resolve(dirname(fileURLToPath(import.meta.url)), 'build-embeddings.mjs'),
754
- resolve(projectRoot, '.claude/scripts/build-embeddings.mjs'),
755
- ];
756
- const embedScript = embedCandidates.find(p => existsSync(p));
757
- if (embedScript) {
758
- log('Generating embeddings for code-map...');
759
- try {
760
- execSync(`node "${embedScript}" --namespace code-map`, {
761
- cwd: projectRoot,
762
- stdio: 'inherit',
763
- timeout: 120000,
764
- windowsHide: true,
765
- });
766
- } catch (err) {
767
- log(`Warning: embedding generation failed: ${err.message?.split('\n')[0]}`);
768
- }
769
- }
770
- }
771
- }
772
-
773
- main().catch(err => {
774
- console.error('[code-map] Fatal error:', err);
775
- process.exit(1);
776
- });
2
+ /**
3
+ * Generate structural code map for a monorepo or project.
4
+ *
5
+ * Produces five chunk types stored in the `code-map` namespace of .swarm/memory.db:
6
+ * 1. project: — one per top-level project directory (bird's-eye overview)
7
+ * 2. dir: — one per directory with 2+ exported types (drill-down detail)
8
+ * 3. iface-map: — batched interface-to-implementation mappings
9
+ * 4. type-index: — batched type-name-to-file-path lookups
10
+ * 5. file: — ONE PER FILE with exported types (file-level granularity)
11
+ *
12
+ * The `file:` entries are the key improvement — they enable precise semantic search
13
+ * for individual types, entities, and services instead of diluting results across
14
+ * large batches.
15
+ *
16
+ * Design: regex-based extraction (no AST parser), incremental via SHA-256 hash,
17
+ * stores in sql.js memory DB, triggers embedding generation in background.
18
+ *
19
+ * Usage:
20
+ * node node_modules/moflo/bin/generate-code-map.mjs # Incremental
21
+ * node node_modules/moflo/bin/generate-code-map.mjs --force # Full regenerate
22
+ * node node_modules/moflo/bin/generate-code-map.mjs --verbose # Detailed logging
23
+ * node node_modules/moflo/bin/generate-code-map.mjs --no-embeddings # Skip embedding generation
24
+ * node node_modules/moflo/bin/generate-code-map.mjs --stats # Print stats and exit
25
+ * npx flo-codemap # Via npx
26
+ */
27
+
28
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync } from 'fs';
29
+ import { resolve, dirname, relative, basename, extname } from 'path';
30
+ import { fileURLToPath } from 'url';
31
+ import { createHash } from 'crypto';
32
+ import { execSync, spawn } from 'child_process';
33
+ import { mofloResolveURL } from './lib/moflo-resolve.mjs';
34
+ const initSqlJs = (await import(mofloResolveURL('sql.js'))).default;
35
+
36
+
37
+ const __dirname = dirname(fileURLToPath(import.meta.url));
38
+
39
+ // Detect project root: walk up from cwd to find a package.json
40
+ function findProjectRoot() {
41
+ let dir = process.cwd();
42
+ const root = resolve(dir, '/');
43
+ while (dir !== root) {
44
+ if (existsSync(resolve(dir, 'package.json'))) return dir;
45
+ dir = dirname(dir);
46
+ }
47
+ return process.cwd();
48
+ }
49
+
50
+ const projectRoot = findProjectRoot();
51
+ const NAMESPACE = 'code-map';
52
+ const DB_PATH = resolve(projectRoot, '.swarm/memory.db');
53
+ const HASH_CACHE_PATH = resolve(projectRoot, '.swarm/code-map-hash.txt');
54
+
55
+ // Directories to exclude from indexing
56
+ const EXCLUDE_DIRS = [
57
+ 'node_modules', 'dist', 'build', '.next', 'coverage',
58
+ '.claude', 'template', 'back-office-template',
59
+ ];
60
+
61
+ // Heuristic descriptions for well-known directory names
62
+ const DIR_DESCRIPTIONS = {
63
+ entities: 'MikroORM entity definitions',
64
+ services: 'business logic services',
65
+ routes: 'Fastify route handlers',
66
+ middleware: 'request middleware (auth, validation, tenancy)',
67
+ schemas: 'Zod validation schemas',
68
+ types: 'TypeScript type definitions',
69
+ utils: 'utility helpers',
70
+ config: 'configuration',
71
+ migrations: 'database migrations',
72
+ scripts: 'CLI scripts',
73
+ components: 'React components',
74
+ pages: 'route page components',
75
+ contexts: 'React context providers',
76
+ hooks: 'React custom hooks',
77
+ layout: 'app shell layout',
78
+ themes: 'MUI theme configuration',
79
+ api: 'API client layer',
80
+ locales: 'i18n translation files',
81
+ tests: 'test suites',
82
+ e2e: 'end-to-end tests',
83
+ providers: 'dependency injection providers',
84
+ };
85
+
86
+ // Batch sizes for chunking
87
+ const IFACE_MAP_BATCH = 20;
88
+ const TYPE_INDEX_BATCH = 30; // Reduced from 80 for better search relevance
89
+
90
+ // Parse args
91
+ const args = process.argv.slice(2);
92
+ const force = args.includes('--force');
93
+ const verbose = args.includes('--verbose') || args.includes('-v');
94
+ const skipEmbeddings = args.includes('--no-embeddings');
95
+ const statsOnly = args.includes('--stats');
96
+
97
+ function log(msg) { console.log(`[code-map] ${msg}`); }
98
+ function debug(msg) { if (verbose) console.log(`[code-map] ${msg}`); }
99
+
100
+ // ---------------------------------------------------------------------------
101
+ // Database helpers
102
+ // ---------------------------------------------------------------------------
103
+
104
+ function ensureDbDir() {
105
+ const dir = dirname(DB_PATH);
106
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
107
+ }
108
+
109
+ async function getDb() {
110
+ ensureDbDir();
111
+ const SQL = await initSqlJs();
112
+ let db;
113
+ if (existsSync(DB_PATH)) {
114
+ const buffer = readFileSync(DB_PATH);
115
+ db = new SQL.Database(buffer);
116
+ } else {
117
+ db = new SQL.Database();
118
+ }
119
+
120
+ db.run(`
121
+ CREATE TABLE IF NOT EXISTS memory_entries (
122
+ id TEXT PRIMARY KEY,
123
+ key TEXT NOT NULL,
124
+ namespace TEXT DEFAULT 'default',
125
+ content TEXT NOT NULL,
126
+ type TEXT DEFAULT 'semantic',
127
+ embedding TEXT,
128
+ embedding_model TEXT DEFAULT 'local',
129
+ embedding_dimensions INTEGER,
130
+ tags TEXT,
131
+ metadata TEXT,
132
+ owner_id TEXT,
133
+ created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
134
+ updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
135
+ expires_at INTEGER,
136
+ last_accessed_at INTEGER,
137
+ access_count INTEGER DEFAULT 0,
138
+ status TEXT DEFAULT 'active',
139
+ UNIQUE(namespace, key)
140
+ )
141
+ `);
142
+ db.run(`CREATE INDEX IF NOT EXISTS idx_memory_key_ns ON memory_entries(key, namespace)`);
143
+ db.run(`CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace)`);
144
+ return db;
145
+ }
146
+
147
+ function saveDb(db) {
148
+ const data = db.export();
149
+ writeFileSync(DB_PATH, Buffer.from(data));
150
+ }
151
+
152
+ function generateId() {
153
+ return `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
154
+ }
155
+
156
+ function storeEntry(db, key, content, metadata = {}, tags = []) {
157
+ const now = Date.now();
158
+ const id = generateId();
159
+ db.run(`
160
+ INSERT OR REPLACE INTO memory_entries
161
+ (id, key, namespace, content, metadata, tags, created_at, updated_at, status)
162
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active')
163
+ `, [id, key, NAMESPACE, content, JSON.stringify(metadata), JSON.stringify(tags), now, now]);
164
+ }
165
+
166
+ function deleteNamespace(db) {
167
+ db.run(`DELETE FROM memory_entries WHERE namespace = ?`, [NAMESPACE]);
168
+ }
169
+
170
+ function countNamespace(db) {
171
+ const stmt = db.prepare(`SELECT COUNT(*) as cnt FROM memory_entries WHERE namespace = ?`);
172
+ stmt.bind([NAMESPACE]);
173
+ let count = 0;
174
+ if (stmt.step()) count = stmt.getAsObject().cnt;
175
+ stmt.free();
176
+ return count;
177
+ }
178
+
179
+ // ---------------------------------------------------------------------------
180
+ // Source file enumeration — git ls-files with filesystem fallback
181
+ // ---------------------------------------------------------------------------
182
+
183
+ /** Read code_map config from moflo.yaml (directories, extensions, exclude). */
184
+ function readCodeMapConfig() {
185
+ const defaults = {
186
+ directories: ['src'],
187
+ extensions: ['.ts', '.tsx', '.js', '.mjs', '.jsx'],
188
+ exclude: [...EXCLUDE_DIRS],
189
+ };
190
+ try {
191
+ const yamlPath = resolve(projectRoot, 'moflo.yaml');
192
+ if (!existsSync(yamlPath)) return defaults;
193
+ const content = readFileSync(yamlPath, 'utf-8');
194
+ // Simple YAML parsing for code_map block
195
+ const block = content.match(/code_map:\s*\n((?:\s+\w+:.*\n?|\s+- .*\n?)+)/);
196
+ if (!block) return defaults;
197
+ const lines = block[1].split('\n');
198
+ let currentKey = null;
199
+ const result = { ...defaults };
200
+ for (const line of lines) {
201
+ const keyMatch = line.match(/^\s+(\w+):/);
202
+ const itemMatch = line.match(/^\s+- (.+)/);
203
+ if (keyMatch) {
204
+ currentKey = keyMatch[1];
205
+ // Inline array: extensions: [".ts", ".tsx"]
206
+ const inlineArray = line.match(/\[([^\]]+)\]/);
207
+ if (inlineArray && (currentKey === 'extensions' || currentKey === 'exclude' || currentKey === 'directories')) {
208
+ result[currentKey] = inlineArray[1].split(',').map(s => s.trim().replace(/^["']|["']$/g, ''));
209
+ }
210
+ } else if (itemMatch && currentKey) {
211
+ if (!Array.isArray(result[currentKey])) result[currentKey] = [];
212
+ result[currentKey].push(itemMatch[1].trim().replace(/^["']|["']$/g, ''));
213
+ }
214
+ }
215
+ return result;
216
+ } catch { return defaults; }
217
+ }
218
+
219
+ /** Walk a directory tree collecting source files (filesystem fallback). */
220
+ function walkDir(dir, extensions, excludeSet, maxDepth = 8, depth = 0) {
221
+ if (depth > maxDepth) return [];
222
+ const results = [];
223
+ let entries;
224
+ try {
225
+ entries = readdirSync(resolve(projectRoot, dir), { withFileTypes: true });
226
+ } catch { return []; }
227
+ for (const entry of entries) {
228
+ if (excludeSet.has(entry.name)) continue;
229
+ // Use forward slashes for consistent cross-platform paths
230
+ const rel = dir ? `${dir}/${entry.name}` : entry.name;
231
+ if (entry.isDirectory()) {
232
+ results.push(...walkDir(rel, extensions, excludeSet, maxDepth, depth + 1));
233
+ } else if (entry.isFile()) {
234
+ const ext = extname(entry.name);
235
+ if (extensions.has(ext)) results.push(rel);
236
+ }
237
+ }
238
+ return results;
239
+ }
240
+
241
+ function getSourceFiles() {
242
+ // Try git ls-files first (fast, respects .gitignore)
243
+ try {
244
+ const raw = execSync(
245
+ `git ls-files -- "*.ts" "*.tsx" "*.js" "*.mjs" "*.jsx"`,
246
+ { cwd: projectRoot, encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 }
247
+ ).trim();
248
+
249
+ if (raw) {
250
+ const files = raw.split('\n').filter(f => {
251
+ for (const ex of EXCLUDE_DIRS) {
252
+ if (f.startsWith(ex + '/') || f.startsWith(ex + '\\')) return false;
253
+ }
254
+ return true;
255
+ });
256
+ if (files.length > 0) return files;
257
+ }
258
+ } catch {
259
+ // git not available or not a git repo — fall through
260
+ }
261
+
262
+ // Fallback: walk configured directories from moflo.yaml
263
+ log('git ls-files returned no files — falling back to filesystem walk');
264
+ const config = readCodeMapConfig();
265
+ const extSet = new Set(config.extensions);
266
+ const excludeSet = new Set(config.exclude);
267
+ const files = [];
268
+
269
+ for (const dir of config.directories) {
270
+ if (existsSync(resolve(projectRoot, dir))) {
271
+ files.push(...walkDir(dir, extSet, excludeSet));
272
+ }
273
+ }
274
+
275
+ return files;
276
+ }
277
+
278
+ function computeFileListHash(files) {
279
+ const sorted = [...files].sort();
280
+ return createHash('sha256').update(sorted.join('\n')).digest('hex');
281
+ }
282
+
283
+ function isUnchanged(currentHash) {
284
+ if (force) return false;
285
+ if (!existsSync(HASH_CACHE_PATH)) return false;
286
+ const cached = readFileSync(HASH_CACHE_PATH, 'utf-8').trim();
287
+ return cached === currentHash;
288
+ }
289
+
290
+ // ---------------------------------------------------------------------------
291
+ // Type extraction (regex-based, no AST)
292
+ // ---------------------------------------------------------------------------
293
+
294
+ const TS_PATTERNS = [
295
+ /^export\s+(?:default\s+)?(?:abstract\s+)?class\s+(\w+)(?:\s+extends\s+([\w.]+))?(?:\s+implements\s+([\w,\s.]+))?/,
296
+ /^export\s+(?:default\s+)?interface\s+(\w+)(?:\s+extends\s+([\w,\s.]+))?/,
297
+ /^export\s+(?:default\s+)?type\s+(\w+)\s*[=<]/,
298
+ /^export\s+(?:const\s+)?enum\s+(\w+)/,
299
+ /^export\s+(?:default\s+)?(?:async\s+)?function\s+(\w+)/,
300
+ /^export\s+(?:default\s+)?const\s+(\w+)\s*[=:]/,
301
+ ];
302
+
303
+ const ENTITY_DECORATOR = /@Entity\s*\(/;
304
+
305
+ function extractTypes(filePath) {
306
+ const fullPath = resolve(projectRoot, filePath);
307
+ if (!existsSync(fullPath)) return [];
308
+
309
+ let content;
310
+ try {
311
+ content = readFileSync(fullPath, 'utf-8');
312
+ } catch {
313
+ return [];
314
+ }
315
+
316
+ const lines = content.split('\n');
317
+ const types = [];
318
+ const seen = new Set();
319
+ let isEntityNext = false;
320
+
321
+ for (let i = 0; i < lines.length; i++) {
322
+ const line = lines[i].trim();
323
+
324
+ if (ENTITY_DECORATOR.test(line)) {
325
+ isEntityNext = true;
326
+ continue;
327
+ }
328
+
329
+ for (const pattern of TS_PATTERNS) {
330
+ const m = line.match(pattern);
331
+ if (m && m[1] && !seen.has(m[1])) {
332
+ seen.add(m[1]);
333
+ const kind = detectKind(line, m[1]);
334
+ const bases = (m[2] || '').trim();
335
+ const implements_ = (m[3] || '').trim();
336
+ types.push({
337
+ name: m[1],
338
+ kind,
339
+ bases: bases || null,
340
+ implements: implements_ || null,
341
+ isEntity: isEntityNext,
342
+ file: filePath,
343
+ });
344
+ isEntityNext = false;
345
+ break;
346
+ }
347
+ }
348
+
349
+ if (isEntityNext && !line.startsWith('@') && !line.startsWith('export') && line.length > 0) {
350
+ isEntityNext = false;
351
+ }
352
+ }
353
+
354
+ return types;
355
+ }
356
+
357
+ function detectKind(line, name) {
358
+ if (/\bclass\b/.test(line)) return 'class';
359
+ if (/\binterface\b/.test(line)) return 'interface';
360
+ if (/\btype\b/.test(line)) return 'type';
361
+ if (/\benum\b/.test(line)) return 'enum';
362
+ if (/\bfunction\b/.test(line)) return 'function';
363
+ if (/\bconst\b/.test(line)) return 'const';
364
+ return 'export';
365
+ }
366
+
367
+ // ---------------------------------------------------------------------------
368
+ // Project structure analysis
369
+ // ---------------------------------------------------------------------------
370
+
371
+ function getProjectName(filePath) {
372
+ const parts = filePath.split('/');
373
+
374
+ if (parts[0] === 'packages' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
375
+ if (parts[0] === 'back-office' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
376
+ if (parts[0] === 'customer-portal' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
377
+ if (parts[0] === 'admin-console' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
378
+ if (parts[0] === 'webhooks' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
379
+ if (parts[0] === 'mobile-app') return 'mobile-app';
380
+ if (parts[0] === 'tests') return 'tests';
381
+ if (parts[0] === 'scripts') return 'scripts';
382
+ return parts[0];
383
+ }
384
+
385
+ function getDirectory(filePath) {
386
+ return dirname(filePath).replace(/\\/g, '/');
387
+ }
388
+
389
+ function getDirDescription(dirName) {
390
+ const last = dirName.split('/').pop();
391
+ return DIR_DESCRIPTIONS[last] || null;
392
+ }
393
+
394
+ function detectLanguage(filePath) {
395
+ const ext = extname(filePath);
396
+ if (ext === '.tsx' || ext === '.jsx') return 'tsx';
397
+ if (ext === '.ts') return 'ts';
398
+ if (ext === '.mjs') return 'esm';
399
+ return 'js';
400
+ }
401
+
402
+ // ---------------------------------------------------------------------------
403
+ // Chunk generators
404
+ // ---------------------------------------------------------------------------
405
+
406
+ function generateProjectOverviews(filesByProject, typesByProject) {
407
+ const chunks = [];
408
+
409
+ for (const [project, files] of Object.entries(filesByProject)) {
410
+ const types = typesByProject[project] || [];
411
+ const lang = detectProjectLang(files);
412
+ const dirMap = {};
413
+
414
+ for (const t of types) {
415
+ const rel = relative(project, dirname(t.file)).replace(/\\/g, '/') || '(root)';
416
+ if (!dirMap[rel]) dirMap[rel] = [];
417
+ dirMap[rel].push(t.name);
418
+ }
419
+
420
+ let content = `# ${project} [${lang}, ${files.length} files, ${types.length} types]\n\n`;
421
+
422
+ const sortedDirs = Object.keys(dirMap).sort();
423
+ for (const dir of sortedDirs) {
424
+ const names = dirMap[dir];
425
+ const desc = getDirDescription(dir);
426
+ const descStr = desc ? ` -- ${desc}` : '';
427
+ const shown = names.slice(0, 8).join(', ');
428
+ const overflow = names.length > 8 ? `, ... (+${names.length - 8} more)` : '';
429
+ content += ` ${dir}${descStr}: ${shown}${overflow}\n`;
430
+ }
431
+
432
+ chunks.push({
433
+ key: `project:${project}`,
434
+ content: content.trim(),
435
+ metadata: { kind: 'project-overview', project, language: lang, fileCount: files.length, typeCount: types.length },
436
+ tags: ['project', project],
437
+ });
438
+ }
439
+
440
+ return chunks;
441
+ }
442
+
443
+ function detectProjectLang(files) {
444
+ let tsx = 0, ts = 0, js = 0;
445
+ for (const f of files) {
446
+ const ext = extname(f);
447
+ if (ext === '.tsx' || ext === '.jsx') tsx++;
448
+ else if (ext === '.ts') ts++;
449
+ else js++;
450
+ }
451
+ if (tsx > ts && tsx > js) return 'React/TypeScript';
452
+ if (ts >= js) return 'TypeScript';
453
+ return 'JavaScript';
454
+ }
455
+
456
+ function generateDirectoryDetails(typesByDir) {
457
+ const chunks = [];
458
+
459
+ for (const [dir, types] of Object.entries(typesByDir)) {
460
+ if (types.length < 2) continue;
461
+
462
+ const desc = getDirDescription(dir);
463
+ let content = `# ${dir} (${types.length} types)\n`;
464
+ if (desc) content += `${desc}\n`;
465
+ content += '\n';
466
+
467
+ const sorted = [...types].sort((a, b) => a.name.localeCompare(b.name));
468
+ for (const t of sorted) {
469
+ const suffix = [];
470
+ if (t.bases) suffix.push(`: ${t.bases}`);
471
+ if (t.implements) suffix.push(`: ${t.implements}`);
472
+ const suffixStr = suffix.length ? ` ${suffix.join(' ')}` : '';
473
+ const fileName = basename(t.file);
474
+ content += ` ${t.name}${suffixStr} (${fileName})\n`;
475
+ }
476
+
477
+ chunks.push({
478
+ key: `dir:${dir}`,
479
+ content: content.trim(),
480
+ metadata: { kind: 'directory-detail', directory: dir, typeCount: types.length },
481
+ tags: ['directory', dir.split('/')[0]],
482
+ });
483
+ }
484
+
485
+ return chunks;
486
+ }
487
+
488
+ function generateInterfaceMaps(allTypes) {
489
+ const interfaces = new Map();
490
+
491
+ for (const t of allTypes) {
492
+ if (t.kind === 'interface') {
493
+ if (!interfaces.has(t.name)) {
494
+ interfaces.set(t.name, { defined: t.file, implementations: [] });
495
+ }
496
+ }
497
+ }
498
+
499
+ for (const t of allTypes) {
500
+ if (t.kind !== 'class') continue;
501
+ const impls = t.implements ? t.implements.split(',').map(s => s.trim()) : [];
502
+ const bases = t.bases ? [t.bases.trim()] : [];
503
+ for (const iface of [...impls, ...bases]) {
504
+ if (interfaces.has(iface)) {
505
+ interfaces.get(iface).implementations.push({
506
+ name: t.name,
507
+ project: getProjectName(t.file),
508
+ });
509
+ }
510
+ }
511
+ }
512
+
513
+ const mapped = [...interfaces.entries()]
514
+ .filter(([, v]) => v.implementations.length > 0)
515
+ .sort(([a], [b]) => a.localeCompare(b));
516
+
517
+ if (mapped.length === 0) return [];
518
+
519
+ const chunks = [];
520
+ const totalBatches = Math.ceil(mapped.length / IFACE_MAP_BATCH);
521
+
522
+ for (let i = 0; i < mapped.length; i += IFACE_MAP_BATCH) {
523
+ const batch = mapped.slice(i, i + IFACE_MAP_BATCH);
524
+ const batchNum = Math.floor(i / IFACE_MAP_BATCH) + 1;
525
+
526
+ let content = `# Interface-to-Implementation Map (${batchNum}/${totalBatches})\n\n`;
527
+ for (const [name, info] of batch) {
528
+ const implStr = info.implementations
529
+ .map(impl => `${impl.name} (${impl.project})`)
530
+ .join(', ');
531
+ content += ` ${name} -> ${implStr}\n`;
532
+ }
533
+
534
+ chunks.push({
535
+ key: `iface-map:${batchNum}`,
536
+ content: content.trim(),
537
+ metadata: { kind: 'interface-map', batch: batchNum, totalBatches, count: batch.length },
538
+ tags: ['interface-map'],
539
+ });
540
+ }
541
+
542
+ return chunks;
543
+ }
544
+
545
+ function generateTypeIndex(allTypes) {
546
+ const sorted = [...allTypes].sort((a, b) => a.name.localeCompare(b.name));
547
+ const chunks = [];
548
+ const totalBatches = Math.ceil(sorted.length / TYPE_INDEX_BATCH);
549
+
550
+ for (let i = 0; i < sorted.length; i += TYPE_INDEX_BATCH) {
551
+ const batch = sorted.slice(i, i + TYPE_INDEX_BATCH);
552
+ const batchNum = Math.floor(i / TYPE_INDEX_BATCH) + 1;
553
+
554
+ let content = `# Type Index (batch ${batchNum}, ${batch.length} types)\n\n`;
555
+ for (const t of batch) {
556
+ const lang = detectLanguage(t.file);
557
+ content += ` ${t.name} -> ${t.file} [${lang}]\n`;
558
+ }
559
+
560
+ chunks.push({
561
+ key: `type-index:${batchNum}`,
562
+ content: content.trim(),
563
+ metadata: { kind: 'type-index', batch: batchNum, totalBatches, count: batch.length },
564
+ tags: ['type-index'],
565
+ });
566
+ }
567
+
568
+ return chunks;
569
+ }
570
+
571
+ /**
572
+ * NEW: Generate file-level entries for each source file that has exported types.
573
+ *
574
+ * Each file gets its own entry keyed as `file:<path>`, containing:
575
+ * - The file path
576
+ * - All exported type names with their kind, base class, and implementations
577
+ * - Whether it's a MikroORM entity
578
+ * - The project and directory it belongs to
579
+ *
580
+ * This enables precise semantic search: a query for "CompanyAuditLog" will match
581
+ * the specific file entry rather than being diluted across a batch of 80 types.
582
+ */
583
+ function generateFileEntries(typesByFile) {
584
+ const chunks = [];
585
+
586
+ for (const [filePath, types] of Object.entries(typesByFile)) {
587
+ if (types.length === 0) continue;
588
+
589
+ const project = getProjectName(filePath);
590
+ const dir = getDirectory(filePath);
591
+ const dirDesc = getDirDescription(dir);
592
+ const lang = detectLanguage(filePath);
593
+ const fileName = basename(filePath);
594
+
595
+ // Build a rich, searchable content string
596
+ let content = `# ${fileName} (${filePath})\n`;
597
+ content += `Project: ${project} | Language: ${lang}\n`;
598
+ if (dirDesc) content += `Directory: ${dirDesc}\n`;
599
+ content += '\nExported types:\n';
600
+
601
+ for (const t of types) {
602
+ let line = ` ${t.kind} ${t.name}`;
603
+ if (t.isEntity) line += ' [MikroORM entity]';
604
+ if (t.bases) line += ` extends ${t.bases}`;
605
+ if (t.implements) line += ` implements ${t.implements}`;
606
+ content += line + '\n';
607
+ }
608
+
609
+ // Build tags for filtering
610
+ const tags = ['file', project];
611
+ if (types.some(t => t.isEntity)) tags.push('entity');
612
+ if (types.some(t => t.kind === 'interface')) tags.push('interface');
613
+ if (filePath.includes('/services/')) tags.push('service');
614
+ if (filePath.includes('/routes/')) tags.push('route');
615
+ if (filePath.includes('/middleware/')) tags.push('middleware');
616
+
617
+ chunks.push({
618
+ key: `file:${filePath}`,
619
+ content: content.trim(),
620
+ metadata: {
621
+ kind: 'file-detail',
622
+ filePath,
623
+ project,
624
+ directory: dir,
625
+ language: lang,
626
+ typeCount: types.length,
627
+ hasEntities: types.some(t => t.isEntity),
628
+ typeNames: types.map(t => t.name),
629
+ },
630
+ tags,
631
+ });
632
+ }
633
+
634
+ return chunks;
635
+ }
636
+
637
+ // ---------------------------------------------------------------------------
638
+ // Main
639
+ // ---------------------------------------------------------------------------
640
+
641
+ async function main() {
642
+ const startTime = Date.now();
643
+
644
+ log(`Project root: ${projectRoot}`);
645
+
646
+ // 1. Get source files
647
+ log('Enumerating source files via git ls-files...');
648
+ const files = getSourceFiles();
649
+ log(`Found ${files.length} source files`);
650
+
651
+ if (files.length === 0) {
652
+ log('No source files found — nothing to index');
653
+ return;
654
+ }
655
+
656
+ // 2. Check hash for incremental skip
657
+ const currentHash = computeFileListHash(files);
658
+
659
+ if (statsOnly) {
660
+ const db = await getDb();
661
+ const count = countNamespace(db);
662
+ db.close();
663
+ log(`Stats: ${files.length} source files, ${count} chunks in code-map namespace`);
664
+ log(`File list hash: ${currentHash.slice(0, 12)}...`);
665
+ return;
666
+ }
667
+
668
+ if (isUnchanged(currentHash)) {
669
+ const db = await getDb();
670
+ const count = countNamespace(db);
671
+ db.close();
672
+ if (count > 0) {
673
+ log(`Skipping — file list unchanged (${count} chunks in DB, hash ${currentHash.slice(0, 12)}...)`);
674
+ return;
675
+ }
676
+ log('File list unchanged but no chunks in DB — forcing regeneration');
677
+ }
678
+
679
+ // 3. Extract types from all files
680
+ log('Extracting type declarations...');
681
+ const allTypes = [];
682
+ const filesByProject = {};
683
+ const typesByProject = {};
684
+ const typesByDir = {};
685
+ const typesByFile = {};
686
+
687
+ for (const file of files) {
688
+ const project = getProjectName(file);
689
+ if (!filesByProject[project]) filesByProject[project] = [];
690
+ filesByProject[project].push(file);
691
+
692
+ const types = extractTypes(file);
693
+
694
+ // Track types per file for file-level entries
695
+ if (types.length > 0) {
696
+ typesByFile[file] = types;
697
+ }
698
+
699
+ for (const t of types) {
700
+ allTypes.push(t);
701
+
702
+ if (!typesByProject[project]) typesByProject[project] = [];
703
+ typesByProject[project].push(t);
704
+
705
+ const dir = getDirectory(t.file);
706
+ if (!typesByDir[dir]) typesByDir[dir] = [];
707
+ typesByDir[dir].push(t);
708
+ }
709
+ }
710
+
711
+ log(`Extracted ${allTypes.length} type declarations from ${Object.keys(filesByProject).length} projects`);
712
+ log(`Files with exported types: ${Object.keys(typesByFile).length}`);
713
+
714
+ // 4. Generate all chunk types
715
+ log('Generating chunks...');
716
+ const projectChunks = generateProjectOverviews(filesByProject, typesByProject);
717
+ const dirChunks = generateDirectoryDetails(typesByDir);
718
+ const ifaceChunks = generateInterfaceMaps(allTypes);
719
+ const typeIdxChunks = generateTypeIndex(allTypes);
720
+ const fileChunks = generateFileEntries(typesByFile);
721
+
722
+ const allChunks = [...projectChunks, ...dirChunks, ...ifaceChunks, ...typeIdxChunks, ...fileChunks];
723
+
724
+ log(`Generated ${allChunks.length} chunks:`);
725
+ log(` Project overviews: ${projectChunks.length}`);
726
+ log(` Directory details: ${dirChunks.length}`);
727
+ log(` Interface maps: ${ifaceChunks.length}`);
728
+ log(` Type index: ${typeIdxChunks.length}`);
729
+ log(` File entries: ${fileChunks.length} (NEW — file-level granularity)`);
730
+
731
+ // 5. Write to database
732
+ log('Writing to memory database...');
733
+ const db = await getDb();
734
+ deleteNamespace(db);
735
+
736
+ for (const chunk of allChunks) {
737
+ storeEntry(db, chunk.key, chunk.content, chunk.metadata, chunk.tags);
738
+ }
739
+
740
+ saveDb(db);
741
+ db.close();
742
+
743
+ // 6. Save hash for incremental caching
744
+ writeFileSync(HASH_CACHE_PATH, currentHash, 'utf-8');
745
+
746
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
747
+ log(`Done in ${elapsed}s — ${allChunks.length} chunks written to code-map namespace`);
748
+
749
+ // 7. Generate embeddings inline (not detached — ensures Xenova runs reliably)
750
+ if (!skipEmbeddings) {
751
+ // Prefer moflo's own bin script, fall back to project's .claude/scripts/
752
+ const embedCandidates = [
753
+ resolve(dirname(fileURLToPath(import.meta.url)), 'build-embeddings.mjs'),
754
+ resolve(projectRoot, '.claude/scripts/build-embeddings.mjs'),
755
+ ];
756
+ const embedScript = embedCandidates.find(p => existsSync(p));
757
+ if (embedScript) {
758
+ log('Generating embeddings for code-map...');
759
+ try {
760
+ execSync(`node "${embedScript}" --namespace code-map`, {
761
+ cwd: projectRoot,
762
+ stdio: 'inherit',
763
+ timeout: 120000,
764
+ windowsHide: true,
765
+ });
766
+ } catch (err) {
767
+ log(`Warning: embedding generation failed: ${err.message?.split('\n')[0]}`);
768
+ }
769
+ }
770
+ }
771
+ }
772
+
773
+ main().catch(err => {
774
+ console.error('[code-map] Fatal error:', err);
775
+ process.exit(1);
776
+ });