moflo 4.8.21 → 4.8.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. package/.claude/agents/browser/browser-agent.yaml +182 -182
  2. package/.claude/agents/core/coder.md +265 -265
  3. package/.claude/agents/core/planner.md +167 -167
  4. package/.claude/agents/core/researcher.md +189 -189
  5. package/.claude/agents/core/reviewer.md +325 -325
  6. package/.claude/agents/core/tester.md +318 -318
  7. package/.claude/agents/database-specialist.yaml +21 -21
  8. package/.claude/agents/dual-mode/codex-coordinator.md +224 -224
  9. package/.claude/agents/dual-mode/codex-worker.md +211 -211
  10. package/.claude/agents/dual-mode/dual-orchestrator.md +291 -291
  11. package/.claude/agents/github/code-review-swarm.md +537 -537
  12. package/.claude/agents/github/github-modes.md +172 -172
  13. package/.claude/agents/github/issue-tracker.md +318 -318
  14. package/.claude/agents/github/multi-repo-swarm.md +552 -552
  15. package/.claude/agents/github/pr-manager.md +190 -190
  16. package/.claude/agents/github/project-board-sync.md +508 -508
  17. package/.claude/agents/github/release-manager.md +366 -366
  18. package/.claude/agents/github/release-swarm.md +582 -582
  19. package/.claude/agents/github/repo-architect.md +397 -397
  20. package/.claude/agents/github/swarm-issue.md +572 -572
  21. package/.claude/agents/github/swarm-pr.md +427 -427
  22. package/.claude/agents/github/sync-coordinator.md +451 -451
  23. package/.claude/agents/github/workflow-automation.md +634 -634
  24. package/.claude/agents/goal/code-goal-planner.md +445 -445
  25. package/.claude/agents/hive-mind/collective-intelligence-coordinator.md +129 -129
  26. package/.claude/agents/hive-mind/queen-coordinator.md +202 -202
  27. package/.claude/agents/hive-mind/scout-explorer.md +241 -241
  28. package/.claude/agents/hive-mind/swarm-memory-manager.md +192 -192
  29. package/.claude/agents/hive-mind/worker-specialist.md +216 -216
  30. package/.claude/agents/index.yaml +17 -17
  31. package/.claude/agents/neural/safla-neural.md +73 -73
  32. package/.claude/agents/project-coordinator.yaml +15 -15
  33. package/.claude/agents/python-specialist.yaml +21 -21
  34. package/.claude/agents/reasoning/goal-planner.md +72 -72
  35. package/.claude/agents/security-auditor.yaml +20 -20
  36. package/.claude/agents/swarm/adaptive-coordinator.md +395 -395
  37. package/.claude/agents/swarm/hierarchical-coordinator.md +326 -326
  38. package/.claude/agents/swarm/mesh-coordinator.md +391 -391
  39. package/.claude/agents/templates/migration-plan.md +745 -745
  40. package/.claude/agents/typescript-specialist.yaml +21 -21
  41. package/.claude/checkpoints/1767754460.json +8 -8
  42. package/.claude/commands/agents/agent-spawning.md +28 -28
  43. package/.claude/commands/github/github-modes.md +146 -146
  44. package/.claude/commands/github/github-swarm.md +121 -121
  45. package/.claude/commands/github/issue-tracker.md +291 -291
  46. package/.claude/commands/github/pr-manager.md +169 -169
  47. package/.claude/commands/github/release-manager.md +337 -337
  48. package/.claude/commands/github/repo-architect.md +366 -366
  49. package/.claude/commands/github/sync-coordinator.md +300 -300
  50. package/.claude/commands/memory/neural.md +47 -47
  51. package/.claude/commands/sparc/analyzer.md +51 -51
  52. package/.claude/commands/sparc/architect.md +53 -53
  53. package/.claude/commands/sparc/ask.md +97 -97
  54. package/.claude/commands/sparc/batch-executor.md +54 -54
  55. package/.claude/commands/sparc/code.md +89 -89
  56. package/.claude/commands/sparc/coder.md +54 -54
  57. package/.claude/commands/sparc/debug.md +83 -83
  58. package/.claude/commands/sparc/debugger.md +54 -54
  59. package/.claude/commands/sparc/designer.md +53 -53
  60. package/.claude/commands/sparc/devops.md +109 -109
  61. package/.claude/commands/sparc/docs-writer.md +80 -80
  62. package/.claude/commands/sparc/documenter.md +54 -54
  63. package/.claude/commands/sparc/innovator.md +54 -54
  64. package/.claude/commands/sparc/integration.md +83 -83
  65. package/.claude/commands/sparc/mcp.md +117 -117
  66. package/.claude/commands/sparc/memory-manager.md +54 -54
  67. package/.claude/commands/sparc/optimizer.md +54 -54
  68. package/.claude/commands/sparc/orchestrator.md +131 -131
  69. package/.claude/commands/sparc/post-deployment-monitoring-mode.md +83 -83
  70. package/.claude/commands/sparc/refinement-optimization-mode.md +83 -83
  71. package/.claude/commands/sparc/researcher.md +54 -54
  72. package/.claude/commands/sparc/reviewer.md +54 -54
  73. package/.claude/commands/sparc/security-review.md +80 -80
  74. package/.claude/commands/sparc/sparc-modes.md +174 -174
  75. package/.claude/commands/sparc/sparc.md +111 -111
  76. package/.claude/commands/sparc/spec-pseudocode.md +80 -80
  77. package/.claude/commands/sparc/supabase-admin.md +348 -348
  78. package/.claude/commands/sparc/swarm-coordinator.md +54 -54
  79. package/.claude/commands/sparc/tdd.md +54 -54
  80. package/.claude/commands/sparc/tester.md +54 -54
  81. package/.claude/commands/sparc/tutorial.md +79 -79
  82. package/.claude/commands/sparc/workflow-manager.md +54 -54
  83. package/.claude/commands/sparc.md +166 -166
  84. package/.claude/commands/swarm/analysis.md +95 -95
  85. package/.claude/commands/swarm/development.md +96 -96
  86. package/.claude/commands/swarm/examples.md +168 -168
  87. package/.claude/commands/swarm/maintenance.md +102 -102
  88. package/.claude/commands/swarm/optimization.md +117 -117
  89. package/.claude/commands/swarm/research.md +136 -136
  90. package/.claude/commands/swarm/testing.md +131 -131
  91. package/.claude/commands/workflows/development.md +77 -77
  92. package/.claude/commands/workflows/research.md +62 -62
  93. package/.claude/guidance/moflo-bootstrap.md +126 -126
  94. package/.claude/guidance/shipped/agent-bootstrap.md +126 -126
  95. package/.claude/guidance/shipped/guidance-memory-strategy.md +262 -262
  96. package/.claude/guidance/shipped/memory-strategy.md +204 -204
  97. package/.claude/guidance/shipped/moflo.md +668 -653
  98. package/.claude/guidance/shipped/task-swarm-integration.md +441 -441
  99. package/.claude/helpers/intelligence.cjs +207 -207
  100. package/.claude/helpers/statusline.cjs +851 -851
  101. package/.claude/settings.local.json +18 -0
  102. package/.claude/skills/fl/SKILL.md +583 -583
  103. package/.claude/skills/flo/SKILL.md +583 -583
  104. package/.claude/skills/github-code-review/SKILL.md +1140 -1140
  105. package/.claude/skills/github-multi-repo/SKILL.md +874 -874
  106. package/.claude/skills/github-project-management/SKILL.md +1277 -1277
  107. package/.claude/skills/github-release-management/SKILL.md +1081 -1081
  108. package/.claude/skills/github-workflow-automation/SKILL.md +1065 -1065
  109. package/.claude/skills/hive-mind-advanced/SKILL.md +712 -712
  110. package/.claude/skills/hooks-automation/SKILL.md +1201 -1201
  111. package/.claude/skills/performance-analysis/SKILL.md +563 -563
  112. package/.claude/skills/sparc-methodology/SKILL.md +1115 -1115
  113. package/.claude/skills/swarm-advanced/SKILL.md +973 -973
  114. package/.claude/workflow-state.json +4 -4
  115. package/LICENSE +21 -21
  116. package/README.md +698 -685
  117. package/bin/cli.js +0 -0
  118. package/bin/gate-hook.mjs +50 -50
  119. package/bin/gate.cjs +138 -138
  120. package/bin/generate-code-map.mjs +775 -775
  121. package/bin/hook-handler.cjs +83 -83
  122. package/bin/hooks.mjs +656 -656
  123. package/bin/index-guidance.mjs +892 -892
  124. package/bin/index-tests.mjs +709 -709
  125. package/bin/lib/process-manager.mjs +243 -243
  126. package/bin/lib/registry-cleanup.cjs +41 -41
  127. package/bin/prompt-hook.mjs +72 -72
  128. package/bin/semantic-search.mjs +472 -472
  129. package/bin/session-start-launcher.mjs +238 -238
  130. package/bin/setup-project.mjs +250 -250
  131. package/package.json +123 -123
  132. package/src/@claude-flow/cli/README.md +452 -452
  133. package/src/@claude-flow/cli/bin/cli.js +180 -180
  134. package/src/@claude-flow/cli/bin/preinstall.cjs +2 -2
  135. package/src/@claude-flow/cli/dist/src/commands/completions.js +409 -409
  136. package/src/@claude-flow/cli/dist/src/commands/doctor.js +18 -2
  137. package/src/@claude-flow/cli/dist/src/commands/embeddings.js +25 -25
  138. package/src/@claude-flow/cli/dist/src/commands/github.js +61 -61
  139. package/src/@claude-flow/cli/dist/src/commands/hive-mind.js +90 -90
  140. package/src/@claude-flow/cli/dist/src/commands/hooks.js +9 -9
  141. package/src/@claude-flow/cli/dist/src/commands/init.js +3 -8
  142. package/src/@claude-flow/cli/dist/src/commands/ruvector/import.js +14 -14
  143. package/src/@claude-flow/cli/dist/src/commands/ruvector/setup.js +624 -624
  144. package/src/@claude-flow/cli/dist/src/config/moflo-config.d.ts +3 -0
  145. package/src/@claude-flow/cli/dist/src/config/moflo-config.js +101 -91
  146. package/src/@claude-flow/cli/dist/src/index.d.ts +5 -0
  147. package/src/@claude-flow/cli/dist/src/index.js +44 -0
  148. package/src/@claude-flow/cli/dist/src/init/claudemd-generator.d.ts +29 -29
  149. package/src/@claude-flow/cli/dist/src/init/claudemd-generator.js +43 -43
  150. package/src/@claude-flow/cli/dist/src/init/executor.js +453 -453
  151. package/src/@claude-flow/cli/dist/src/init/helpers-generator.js +482 -482
  152. package/src/@claude-flow/cli/dist/src/init/moflo-init.d.ts +30 -30
  153. package/src/@claude-flow/cli/dist/src/init/moflo-init.js +140 -140
  154. package/src/@claude-flow/cli/dist/src/init/statusline-generator.js +876 -876
  155. package/src/@claude-flow/cli/dist/src/memory/memory-initializer.js +371 -371
  156. package/src/@claude-flow/cli/dist/src/runtime/headless.js +28 -28
  157. package/src/@claude-flow/cli/dist/src/services/container-worker-pool.d.ts +197 -0
  158. package/src/@claude-flow/cli/dist/src/services/container-worker-pool.js +584 -0
  159. package/src/@claude-flow/cli/dist/src/services/daemon-lock.d.ts +14 -0
  160. package/src/@claude-flow/cli/dist/src/services/daemon-lock.js +1 -1
  161. package/src/@claude-flow/cli/dist/src/services/headless-worker-executor.js +84 -84
  162. package/src/@claude-flow/cli/package.json +1 -1
  163. package/src/@claude-flow/guidance/README.md +1195 -1195
  164. package/src/@claude-flow/guidance/package.json +198 -198
  165. package/src/@claude-flow/memory/README.md +587 -587
  166. package/src/@claude-flow/memory/dist/agentdb-backend.js +26 -26
  167. package/src/@claude-flow/memory/dist/auto-memory-bridge.test.js +27 -27
  168. package/src/@claude-flow/memory/dist/hybrid-backend.d.ts +245 -0
  169. package/src/@claude-flow/memory/dist/hybrid-backend.js +569 -0
  170. package/src/@claude-flow/memory/dist/hybrid-backend.test.d.ts +8 -0
  171. package/src/@claude-flow/memory/dist/hybrid-backend.test.js +320 -0
  172. package/src/@claude-flow/memory/dist/sqlite-backend.d.ts +121 -0
  173. package/src/@claude-flow/memory/dist/sqlite-backend.js +572 -0
  174. package/src/@claude-flow/memory/dist/sqljs-backend.js +26 -26
  175. package/src/@claude-flow/memory/package.json +44 -44
  176. package/src/@claude-flow/shared/README.md +323 -323
  177. package/src/@claude-flow/shared/dist/events/event-store.js +31 -31
  178. package/src/README.md +493 -493
@@ -1,710 +1,710 @@
1
1
  #!/usr/bin/env node
2
- /**
3
- * Index test files into claude-flow memory under the `tests` namespace
4
- *
5
- * Extracts from each test file:
6
- * - File path
7
- * - Describe/it/test block names (regex-based, no AST)
8
- * - Import targets (what modules the test imports — key for reverse mapping)
9
- * - Test framework detected (vitest, jest, mocha, etc.)
10
- *
11
- * Chunk types:
12
- * test-file:{path} — Per-file entry with describe blocks, imports, test names
13
- * test-map:{source-file} — Reverse mapping: source file → test files that import it
14
- * test-dir:{path} — Directory summary of test coverage
15
- *
16
- * Usage:
17
- * node node_modules/moflo/bin/index-tests.mjs # Incremental
18
- * node node_modules/moflo/bin/index-tests.mjs --force # Full reindex
19
- * node node_modules/moflo/bin/index-tests.mjs --verbose # Detailed logging
20
- * node node_modules/moflo/bin/index-tests.mjs --no-embeddings # Skip embeddings
21
- * node node_modules/moflo/bin/index-tests.mjs --stats # Print stats and exit
22
- * npx flo-testmap # Via npx
23
- */
24
-
25
- import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, statSync } from 'fs';
26
- import { resolve, dirname, relative, basename, extname, join } from 'path';
27
- import { fileURLToPath } from 'url';
28
- import { createHash } from 'crypto';
29
- import { execSync, spawn } from 'child_process';
30
- import { mofloResolveURL } from './lib/moflo-resolve.mjs';
31
- const initSqlJs = (await import(mofloResolveURL('sql.js'))).default;
32
-
33
- const __dirname = dirname(fileURLToPath(import.meta.url));
34
-
35
- function findProjectRoot() {
36
- let dir = process.cwd();
37
- const root = resolve(dir, '/');
38
- while (dir !== root) {
39
- if (existsSync(resolve(dir, 'package.json'))) return dir;
40
- dir = dirname(dir);
41
- }
42
- return process.cwd();
43
- }
44
-
45
- const projectRoot = findProjectRoot();
46
- const NAMESPACE = 'tests';
47
- const DB_PATH = resolve(projectRoot, '.swarm/memory.db');
48
- const HASH_CACHE_PATH = resolve(projectRoot, '.swarm/tests-hash.txt');
49
-
50
- // Parse args
51
- const args = process.argv.slice(2);
52
- const force = args.includes('--force');
53
- const verbose = args.includes('--verbose') || args.includes('-v');
54
- const skipEmbeddings = args.includes('--no-embeddings');
55
- const statsOnly = args.includes('--stats');
56
-
57
- function log(msg) { console.log(`[index-tests] ${msg}`); }
58
- function debug(msg) { if (verbose) console.log(`[index-tests] ${msg}`); }
59
-
60
- // ---------------------------------------------------------------------------
61
- // Test file patterns
62
- // ---------------------------------------------------------------------------
63
-
64
- const TEST_FILE_PATTERNS = [
65
- /\.test\.\w+$/,
66
- /\.spec\.\w+$/,
67
- /\.test-\w+\.\w+$/,
68
- ];
69
-
70
- const TEST_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs']);
71
-
72
- const EXCLUDE_DIRS = new Set([
73
- 'node_modules', 'dist', 'build', '.next', 'coverage',
74
- '.claude', '.swarm', '.claude-flow', '.git',
75
- ]);
76
-
77
- // ---------------------------------------------------------------------------
78
- // Database helpers (same pattern as index-guidance.mjs / generate-code-map.mjs)
79
- // ---------------------------------------------------------------------------
80
-
81
- function ensureDbDir() {
82
- const dir = dirname(DB_PATH);
83
- if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
84
- }
85
-
86
- async function getDb() {
87
- ensureDbDir();
88
- const SQL = await initSqlJs();
89
- let db;
90
- if (existsSync(DB_PATH)) {
91
- const buffer = readFileSync(DB_PATH);
92
- db = new SQL.Database(buffer);
93
- } else {
94
- db = new SQL.Database();
95
- }
96
-
97
- db.run(`
98
- CREATE TABLE IF NOT EXISTS memory_entries (
99
- id TEXT PRIMARY KEY,
100
- key TEXT NOT NULL,
101
- namespace TEXT DEFAULT 'default',
102
- content TEXT NOT NULL,
103
- type TEXT DEFAULT 'semantic',
104
- embedding TEXT,
105
- embedding_model TEXT DEFAULT 'local',
106
- embedding_dimensions INTEGER,
107
- tags TEXT,
108
- metadata TEXT,
109
- owner_id TEXT,
110
- created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
111
- updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
112
- expires_at INTEGER,
113
- last_accessed_at INTEGER,
114
- access_count INTEGER DEFAULT 0,
115
- status TEXT DEFAULT 'active',
116
- UNIQUE(namespace, key)
117
- )
118
- `);
119
- db.run(`CREATE INDEX IF NOT EXISTS idx_memory_key_ns ON memory_entries(key, namespace)`);
120
- db.run(`CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace)`);
121
- return db;
122
- }
123
-
124
- function saveDb(db) {
125
- const data = db.export();
126
- writeFileSync(DB_PATH, Buffer.from(data));
127
- }
128
-
129
- function generateId() {
130
- return `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
131
- }
132
-
133
- function storeEntry(db, key, content, metadata = {}, tags = []) {
134
- const now = Date.now();
135
- const id = generateId();
136
- db.run(`
137
- INSERT OR REPLACE INTO memory_entries
138
- (id, key, namespace, content, metadata, tags, created_at, updated_at, status)
139
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active')
140
- `, [id, key, NAMESPACE, content, JSON.stringify(metadata), JSON.stringify(tags), now, now]);
141
- }
142
-
143
- function deleteNamespace(db) {
144
- db.run(`DELETE FROM memory_entries WHERE namespace = ?`, [NAMESPACE]);
145
- }
146
-
147
- function countNamespace(db) {
148
- const stmt = db.prepare(`SELECT COUNT(*) as cnt FROM memory_entries WHERE namespace = ?`);
149
- stmt.bind([NAMESPACE]);
150
- let count = 0;
151
- if (stmt.step()) count = stmt.getAsObject().cnt;
152
- stmt.free();
153
- return count;
154
- }
155
-
156
- // ---------------------------------------------------------------------------
157
- // Test directory discovery
158
- // ---------------------------------------------------------------------------
159
-
160
- /**
161
- * Load test directories from moflo.yaml or discover automatically.
162
- */
163
- function loadTestDirs() {
164
- const yamlPath = resolve(projectRoot, 'moflo.yaml');
165
- const jsonPath = resolve(projectRoot, 'moflo.config.json');
166
-
167
- // Try moflo.yaml first
168
- if (existsSync(yamlPath)) {
169
- try {
170
- const content = readFileSync(yamlPath, 'utf-8');
171
- const testsBlock = content.match(/tests:\s*\n\s+directories:\s*\n((?:\s+-\s+.+\n?)+)/);
172
- if (testsBlock) {
173
- const items = testsBlock[1].match(/-\s+(.+)/g);
174
- if (items && items.length > 0) {
175
- return items.map(item => item.replace(/^-\s+/, '').trim());
176
- }
177
- }
178
- } catch { /* ignore */ }
179
- }
180
-
181
- // Try moflo.config.json
182
- if (existsSync(jsonPath)) {
183
- try {
184
- const raw = JSON.parse(readFileSync(jsonPath, 'utf-8'));
185
- if (raw.tests?.directories && Array.isArray(raw.tests.directories)) {
186
- return raw.tests.directories;
187
- }
188
- } catch { /* ignore */ }
189
- }
190
-
191
- // Auto-discover common test directories
192
- return discoverTestDirs();
193
- }
194
-
195
- /**
196
- * Discover test directories by checking common locations.
197
- */
198
- function discoverTestDirs() {
199
- const candidates = ['tests', 'test', '__tests__', 'spec', 'e2e'];
200
- const found = [];
201
-
202
- for (const dir of candidates) {
203
- if (existsSync(resolve(projectRoot, dir))) {
204
- found.push(dir);
205
- }
206
- }
207
-
208
- return found;
209
- }
210
-
211
- // ---------------------------------------------------------------------------
212
- // Test file enumeration
213
- // ---------------------------------------------------------------------------
214
-
215
- /**
216
- * Find all test files using git ls-files + pattern matching.
217
- */
218
- function getTestFiles() {
219
- // Strategy 1: git ls-files for tracked test files
220
- let gitFiles = [];
221
- try {
222
- const raw = execSync(
223
- `git ls-files -- "*.test.*" "*.spec.*" "*.test-*"`,
224
- { cwd: projectRoot, encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 }
225
- ).trim();
226
-
227
- if (raw) {
228
- gitFiles = raw.split('\n').filter(f => {
229
- // Skip excluded dirs
230
- for (const ex of EXCLUDE_DIRS) {
231
- if (f.startsWith(ex + '/') || f.startsWith(ex + '\\')) return false;
232
- }
233
- // Only include recognized extensions
234
- const ext = extname(f);
235
- return TEST_EXTENSIONS.has(ext);
236
- });
237
- }
238
- } catch { /* git not available or not a repo */ }
239
-
240
- // Strategy 2: Walk configured test directories for any files
241
- const testDirs = loadTestDirs();
242
- const walkedFiles = new Set(gitFiles);
243
-
244
- for (const dir of testDirs) {
245
- const fullDir = resolve(projectRoot, dir);
246
- if (!existsSync(fullDir)) continue;
247
- walkTestFiles(fullDir, walkedFiles);
248
- }
249
-
250
- return [...walkedFiles].sort();
251
- }
252
-
253
- /**
254
- * Walk a directory for test files (*.test.*, *.spec.*, or any source file in test dirs).
255
- */
256
- function walkTestFiles(dir, results) {
257
- if (!existsSync(dir)) return;
258
-
259
- try {
260
- for (const entry of readdirSync(dir, { withFileTypes: true })) {
261
- if (entry.isDirectory()) {
262
- if (!EXCLUDE_DIRS.has(entry.name)) {
263
- walkTestFiles(resolve(dir, entry.name), results);
264
- }
265
- } else if (entry.isFile()) {
266
- const ext = extname(entry.name);
267
- if (!TEST_EXTENSIONS.has(ext)) continue;
268
-
269
- // Include if it matches test patterns OR if it's inside a test directory
270
- const isTestFile = TEST_FILE_PATTERNS.some(p => p.test(entry.name));
271
- const isInTestDir = true; // already walking a test dir
272
-
273
- if (isTestFile || isInTestDir) {
274
- const relPath = relative(projectRoot, resolve(dir, entry.name)).replace(/\\/g, '/');
275
- results.add(relPath);
276
- }
277
- }
278
- }
279
- } catch { /* skip unreadable dirs */ }
280
- }
281
-
282
- function computeFileListHash(files) {
283
- const sorted = [...files].sort();
284
- return createHash('sha256').update(sorted.join('\n')).digest('hex');
285
- }
286
-
287
- function isUnchanged(currentHash) {
288
- if (force) return false;
289
- if (!existsSync(HASH_CACHE_PATH)) return false;
290
- const cached = readFileSync(HASH_CACHE_PATH, 'utf-8').trim();
291
- return cached === currentHash;
292
- }
293
-
294
- // ---------------------------------------------------------------------------
295
- // Test file analysis (regex-based, no AST)
296
- // ---------------------------------------------------------------------------
297
-
298
- /**
299
- * Detect test framework from import statements.
300
- */
301
- function detectFramework(content) {
302
- if (/from\s+['"]vitest['"]/.test(content) || /import.*vitest/.test(content)) return 'vitest';
303
- if (/from\s+['"]@jest/.test(content) || /from\s+['"]jest['"]/.test(content)) return 'jest';
304
- if (/require\s*\(\s*['"]mocha['"]/.test(content)) return 'mocha';
305
- if (/from\s+['"]@playwright/.test(content)) return 'playwright';
306
- if (/from\s+['"]cypress['"]/.test(content)) return 'cypress';
307
- if (/from\s+['"]ava['"]/.test(content)) return 'ava';
308
- if (/describe\s*\(/.test(content) || /it\s*\(/.test(content) || /test\s*\(/.test(content)) return 'generic';
309
- return 'unknown';
310
- }
311
-
312
- /**
313
- * Extract describe/it/test block names from test file content.
314
- */
315
- function extractTestBlocks(content) {
316
- const blocks = [];
317
-
318
- // Match: describe("name", ...) / describe('name', ...) / describe(`name`, ...)
319
- const describePattern = /(?:describe|suite)\s*\(\s*(['"`])(.+?)\1/g;
320
- let m;
321
- while ((m = describePattern.exec(content)) !== null) {
322
- blocks.push({ type: 'describe', name: m[2] });
323
- }
324
-
325
- // Match: it("name", ...) / test("name", ...) / specify("name", ...)
326
- const testPattern = /(?:it|test|specify)\s*\(\s*(['"`])(.+?)\1/g;
327
- while ((m = testPattern.exec(content)) !== null) {
328
- blocks.push({ type: 'test', name: m[2] });
329
- }
330
-
331
- // Match: it.each / test.each (parameterized)
332
- const eachPattern = /(?:it|test)\.each[^(]*\(\s*[^)]*\)\s*\(\s*(['"`])(.+?)\1/g;
333
- while ((m = eachPattern.exec(content)) !== null) {
334
- blocks.push({ type: 'test', name: m[2] + ' (parameterized)' });
335
- }
336
-
337
- return blocks;
338
- }
339
-
340
- /**
341
- * Extract import targets — the source files that this test imports.
342
- * This is the KEY mechanism for mapping tests to functionality.
343
- */
344
- function extractImportTargets(content, filePath) {
345
- const imports = [];
346
- const fileDir = dirname(filePath);
347
-
348
- // ES module imports: import { X } from '../path'
349
- const esImportPattern = /import\s+(?:{[^}]*}|[\w*]+(?:\s*,\s*{[^}]*})?)\s+from\s+['"]([^'"]+)['"]/g;
350
- let m;
351
- while ((m = esImportPattern.exec(content)) !== null) {
352
- const target = m[1];
353
- // Only track relative imports (not packages)
354
- if (target.startsWith('.') || target.startsWith('/')) {
355
- imports.push(resolveImportPath(target, fileDir));
356
- }
357
- }
358
-
359
- // CJS requires: const X = require('../path')
360
- const cjsPattern = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
361
- while ((m = cjsPattern.exec(content)) !== null) {
362
- const target = m[1];
363
- if (target.startsWith('.') || target.startsWith('/')) {
364
- imports.push(resolveImportPath(target, fileDir));
365
- }
366
- }
367
-
368
- // Dynamic imports: await import('../path')
369
- const dynamicPattern = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
370
- while ((m = dynamicPattern.exec(content)) !== null) {
371
- const target = m[1];
372
- if (target.startsWith('.') || target.startsWith('/')) {
373
- imports.push(resolveImportPath(target, fileDir));
374
- }
375
- }
376
-
377
- return [...new Set(imports)];
378
- }
379
-
380
- /**
381
- * Resolve a relative import path to a project-relative path.
382
- * Strips extensions if present, normalizes to forward slashes.
383
- */
384
- function resolveImportPath(importPath, fromDir) {
385
- // Resolve relative to the importing file's directory
386
- const resolved = resolve(projectRoot, fromDir, importPath).replace(/\\/g, '/');
387
- const rel = relative(projectRoot, resolved).replace(/\\/g, '/');
388
-
389
- // Normalize: strip common source extensions (the import may omit them)
390
- return rel.replace(/\.(ts|tsx|js|jsx|mjs)$/, '');
391
- }
392
-
393
- // ---------------------------------------------------------------------------
394
- // Chunk generators
395
- // ---------------------------------------------------------------------------
396
-
397
- /**
398
- * Generate per-file test entries.
399
- */
400
- function generateTestFileEntries(testFiles, analyses) {
401
- const chunks = [];
402
-
403
- for (const filePath of testFiles) {
404
- const analysis = analyses[filePath];
405
- if (!analysis) continue;
406
-
407
- const fileName = basename(filePath);
408
- const dir = dirname(filePath);
409
-
410
- let content = `# ${fileName} (${filePath})\n`;
411
- content += `Framework: ${analysis.framework}\n`;
412
- if (analysis.importTargets.length > 0) {
413
- content += `Tests: ${analysis.importTargets.map(t => t + '.*').join(', ')}\n`;
414
- }
415
- content += '\n';
416
-
417
- // Describe blocks
418
- const describes = analysis.blocks.filter(b => b.type === 'describe');
419
- if (describes.length > 0) {
420
- content += 'Describe blocks:\n';
421
- for (const d of describes) {
422
- content += ` - ${d.name}\n`;
423
- }
424
- content += '\n';
425
- }
426
-
427
- // Test names
428
- const tests = analysis.blocks.filter(b => b.type === 'test');
429
- if (tests.length > 0) {
430
- content += `Test cases (${tests.length}):\n`;
431
- for (const t of tests.slice(0, 30)) { // Cap at 30 to avoid huge entries
432
- content += ` - ${t.name}\n`;
433
- }
434
- if (tests.length > 30) {
435
- content += ` ... and ${tests.length - 30} more\n`;
436
- }
437
- content += '\n';
438
- }
439
-
440
- // Import targets
441
- if (analysis.importTargets.length > 0) {
442
- content += 'Source files under test:\n';
443
- for (const imp of analysis.importTargets) {
444
- content += ` - ${imp}\n`;
445
- }
446
- }
447
-
448
- const tags = ['test-file', analysis.framework];
449
- if (filePath.includes('e2e') || filePath.includes('E2E')) tags.push('e2e');
450
- if (filePath.includes('integration')) tags.push('integration');
451
- if (filePath.includes('unit')) tags.push('unit');
452
-
453
- chunks.push({
454
- key: `test-file:${filePath}`,
455
- content: content.trim(),
456
- metadata: {
457
- kind: 'test-file',
458
- filePath,
459
- directory: dir,
460
- framework: analysis.framework,
461
- describeCount: describes.length,
462
- testCount: tests.length,
463
- importTargets: analysis.importTargets,
464
- describeNames: describes.map(d => d.name),
465
- },
466
- tags,
467
- });
468
- }
469
-
470
- return chunks;
471
- }
472
-
473
- /**
474
- * Generate reverse mapping: source file → test files that import it.
475
- * This is the primary value — enables "find tests for this file" queries.
476
- */
477
- function generateTestMaps(testFiles, analyses) {
478
- const chunks = [];
479
-
480
- // Build reverse map: source file → [test files]
481
- const reverseMap = {};
482
- for (const filePath of testFiles) {
483
- const analysis = analyses[filePath];
484
- if (!analysis) continue;
485
-
486
- for (const target of analysis.importTargets) {
487
- if (!reverseMap[target]) reverseMap[target] = [];
488
- reverseMap[target].push({
489
- testFile: filePath,
490
- framework: analysis.framework,
491
- testCount: analysis.blocks.filter(b => b.type === 'test').length,
492
- });
493
- }
494
- }
495
-
496
- // Generate a chunk for each source file that has tests
497
- for (const [sourceFile, testEntries] of Object.entries(reverseMap)) {
498
- let content = `# Tests for: ${sourceFile}\n\n`;
499
- content += `${testEntries.length} test file(s) cover this module:\n\n`;
500
-
501
- for (const entry of testEntries) {
502
- content += ` ${entry.testFile} [${entry.framework}, ${entry.testCount} tests]\n`;
503
- }
504
-
505
- chunks.push({
506
- key: `test-map:${sourceFile}`,
507
- content: content.trim(),
508
- metadata: {
509
- kind: 'test-map',
510
- sourceFile,
511
- testFiles: testEntries.map(e => e.testFile),
512
- totalTests: testEntries.reduce((sum, e) => sum + e.testCount, 0),
513
- },
514
- tags: ['test-map'],
515
- });
516
- }
517
-
518
- return chunks;
519
- }
520
-
521
- /**
522
- * Generate directory summaries showing test coverage per directory.
523
- */
524
- function generateTestDirSummaries(testFiles, analyses) {
525
- const chunks = [];
526
-
527
- // Group by directory
528
- const dirMap = {};
529
- for (const filePath of testFiles) {
530
- const dir = dirname(filePath);
531
- if (!dirMap[dir]) dirMap[dir] = [];
532
- dirMap[dir].push(filePath);
533
- }
534
-
535
- for (const [dir, files] of Object.entries(dirMap)) {
536
- if (files.length < 1) continue;
537
-
538
- const frameworks = new Set();
539
- let totalTests = 0;
540
- let totalDescribes = 0;
541
- const allImports = new Set();
542
-
543
- for (const f of files) {
544
- const analysis = analyses[f];
545
- if (!analysis) continue;
546
- frameworks.add(analysis.framework);
547
- totalTests += analysis.blocks.filter(b => b.type === 'test').length;
548
- totalDescribes += analysis.blocks.filter(b => b.type === 'describe').length;
549
- for (const imp of analysis.importTargets) allImports.add(imp);
550
- }
551
-
552
- let content = `# ${dir}/ (${files.length} test files)\n`;
553
- content += `Frameworks: ${[...frameworks].join(', ')}\n`;
554
- content += `Total: ${totalDescribes} suites, ${totalTests} tests\n\n`;
555
- content += 'Files:\n';
556
- for (const f of files) {
557
- content += ` ${basename(f)}\n`;
558
- }
559
- if (allImports.size > 0) {
560
- content += '\nModules under test:\n';
561
- for (const imp of [...allImports].sort().slice(0, 20)) {
562
- content += ` ${imp}\n`;
563
- }
564
- if (allImports.size > 20) {
565
- content += ` ... and ${allImports.size - 20} more\n`;
566
- }
567
- }
568
-
569
- chunks.push({
570
- key: `test-dir:${dir}`,
571
- content: content.trim(),
572
- metadata: {
573
- kind: 'test-dir',
574
- directory: dir,
575
- fileCount: files.length,
576
- frameworks: [...frameworks],
577
- totalTests,
578
- totalDescribes,
579
- },
580
- tags: ['test-dir'],
581
- });
582
- }
583
-
584
- return chunks;
585
- }
586
-
587
- // ---------------------------------------------------------------------------
588
- // Main
589
- // ---------------------------------------------------------------------------
590
-
591
- async function main() {
592
- const startTime = Date.now();
593
-
594
- log(`Project root: ${projectRoot}`);
595
-
596
- // 1. Find test files
597
- log('Discovering test files...');
598
- const testFiles = getTestFiles();
599
- log(`Found ${testFiles.length} test files`);
600
-
601
- if (testFiles.length === 0) {
602
- log('No test files found — nothing to index');
603
- return;
604
- }
605
-
606
- // 2. Check hash for incremental skip
607
- const currentHash = computeFileListHash(testFiles);
608
-
609
- if (statsOnly) {
610
- const db = await getDb();
611
- const count = countNamespace(db);
612
- db.close();
613
- log(`Stats: ${testFiles.length} test files, ${count} chunks in tests namespace`);
614
- log(`File list hash: ${currentHash.slice(0, 12)}...`);
615
- return;
616
- }
617
-
618
- if (isUnchanged(currentHash)) {
619
- const db = await getDb();
620
- const count = countNamespace(db);
621
- db.close();
622
- if (count > 0) {
623
- log(`Skipping — file list unchanged (${count} chunks in DB, hash ${currentHash.slice(0, 12)}...)`);
624
- return;
625
- }
626
- log('File list unchanged but no chunks in DB — forcing regeneration');
627
- }
628
-
629
- // 3. Analyze all test files
630
- log('Analyzing test files...');
631
- const analyses = {};
632
-
633
- for (const filePath of testFiles) {
634
- const fullPath = resolve(projectRoot, filePath);
635
- if (!existsSync(fullPath)) continue;
636
-
637
- try {
638
- const content = readFileSync(fullPath, 'utf-8');
639
- analyses[filePath] = {
640
- framework: detectFramework(content),
641
- blocks: extractTestBlocks(content),
642
- importTargets: extractImportTargets(content, filePath),
643
- };
644
- debug(` ${filePath}: ${analyses[filePath].framework}, ${analyses[filePath].blocks.length} blocks, ${analyses[filePath].importTargets.length} imports`);
645
- } catch (err) {
646
- debug(` ${filePath}: ERROR - ${err.message}`);
647
- }
648
- }
649
-
650
- const analyzedCount = Object.keys(analyses).length;
651
- log(`Analyzed ${analyzedCount} test files`);
652
-
653
- // 4. Generate all chunk types
654
- log('Generating chunks...');
655
- const fileChunks = generateTestFileEntries(testFiles, analyses);
656
- const mapChunks = generateTestMaps(testFiles, analyses);
657
- const dirChunks = generateTestDirSummaries(testFiles, analyses);
658
-
659
- const allChunks = [...fileChunks, ...mapChunks, ...dirChunks];
660
-
661
- log(`Generated ${allChunks.length} chunks:`);
662
- log(` Test file entries: ${fileChunks.length}`);
663
- log(` Reverse maps: ${mapChunks.length} (source → test files)`);
664
- log(` Directory summaries: ${dirChunks.length}`);
665
-
666
- // 5. Write to database
667
- log('Writing to memory database...');
668
- const db = await getDb();
669
- deleteNamespace(db);
670
-
671
- for (const chunk of allChunks) {
672
- storeEntry(db, chunk.key, chunk.content, chunk.metadata, chunk.tags);
673
- }
674
-
675
- saveDb(db);
676
- db.close();
677
-
678
- // 6. Save hash for incremental caching
679
- writeFileSync(HASH_CACHE_PATH, currentHash, 'utf-8');
680
-
681
- const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
682
- log(`Done in ${elapsed}s — ${allChunks.length} chunks written to tests namespace`);
683
-
684
- // 7. Generate embeddings (inline, like code-map)
685
- if (!skipEmbeddings && allChunks.length > 0) {
686
- const embedCandidates = [
687
- resolve(dirname(fileURLToPath(import.meta.url)), 'build-embeddings.mjs'),
688
- resolve(projectRoot, '.claude/scripts/build-embeddings.mjs'),
689
- ];
690
- const embedScript = embedCandidates.find(p => existsSync(p));
691
- if (embedScript) {
692
- log('Generating embeddings for tests...');
693
- try {
694
- execSync(`node "${embedScript}" --namespace tests`, {
695
- cwd: projectRoot,
696
- stdio: 'inherit',
697
- timeout: 120000,
698
- windowsHide: true,
699
- });
700
- } catch (err) {
701
- log(`Warning: embedding generation failed: ${err.message?.split('\n')[0]}`);
702
- }
703
- }
704
- }
705
- }
706
-
707
- main().catch(err => {
708
- console.error('[index-tests] Fatal error:', err);
709
- process.exit(1);
710
- });
2
+ /**
3
+ * Index test files into claude-flow memory under the `tests` namespace
4
+ *
5
+ * Extracts from each test file:
6
+ * - File path
7
+ * - Describe/it/test block names (regex-based, no AST)
8
+ * - Import targets (what modules the test imports — key for reverse mapping)
9
+ * - Test framework detected (vitest, jest, mocha, etc.)
10
+ *
11
+ * Chunk types:
12
+ * test-file:{path} — Per-file entry with describe blocks, imports, test names
13
+ * test-map:{source-file} — Reverse mapping: source file → test files that import it
14
+ * test-dir:{path} — Directory summary of test coverage
15
+ *
16
+ * Usage:
17
+ * node node_modules/moflo/bin/index-tests.mjs # Incremental
18
+ * node node_modules/moflo/bin/index-tests.mjs --force # Full reindex
19
+ * node node_modules/moflo/bin/index-tests.mjs --verbose # Detailed logging
20
+ * node node_modules/moflo/bin/index-tests.mjs --no-embeddings # Skip embeddings
21
+ * node node_modules/moflo/bin/index-tests.mjs --stats # Print stats and exit
22
+ * npx flo-testmap # Via npx
23
+ */
24
+
25
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, statSync } from 'fs';
26
+ import { resolve, dirname, relative, basename, extname, join } from 'path';
27
+ import { fileURLToPath } from 'url';
28
+ import { createHash } from 'crypto';
29
+ import { execSync, spawn } from 'child_process';
30
+ import { mofloResolveURL } from './lib/moflo-resolve.mjs';
31
+ const initSqlJs = (await import(mofloResolveURL('sql.js'))).default;
32
+
33
+ const __dirname = dirname(fileURLToPath(import.meta.url));
34
+
35
+ function findProjectRoot() {
36
+ let dir = process.cwd();
37
+ const root = resolve(dir, '/');
38
+ while (dir !== root) {
39
+ if (existsSync(resolve(dir, 'package.json'))) return dir;
40
+ dir = dirname(dir);
41
+ }
42
+ return process.cwd();
43
+ }
44
+
45
+ const projectRoot = findProjectRoot();
46
+ const NAMESPACE = 'tests';
47
+ const DB_PATH = resolve(projectRoot, '.swarm/memory.db');
48
+ const HASH_CACHE_PATH = resolve(projectRoot, '.swarm/tests-hash.txt');
49
+
50
+ // Parse args
51
+ const args = process.argv.slice(2);
52
+ const force = args.includes('--force');
53
+ const verbose = args.includes('--verbose') || args.includes('-v');
54
+ const skipEmbeddings = args.includes('--no-embeddings');
55
+ const statsOnly = args.includes('--stats');
56
+
57
+ function log(msg) { console.log(`[index-tests] ${msg}`); }
58
+ function debug(msg) { if (verbose) console.log(`[index-tests] ${msg}`); }
59
+
60
+ // ---------------------------------------------------------------------------
61
+ // Test file patterns
62
+ // ---------------------------------------------------------------------------
63
+
64
+ const TEST_FILE_PATTERNS = [
65
+ /\.test\.\w+$/,
66
+ /\.spec\.\w+$/,
67
+ /\.test-\w+\.\w+$/,
68
+ ];
69
+
70
+ const TEST_EXTENSIONS = new Set(['.ts', '.tsx', '.js', '.jsx', '.mjs']);
71
+
72
+ const EXCLUDE_DIRS = new Set([
73
+ 'node_modules', 'dist', 'build', '.next', 'coverage',
74
+ '.claude', '.swarm', '.claude-flow', '.git',
75
+ ]);
76
+
77
+ // ---------------------------------------------------------------------------
78
+ // Database helpers (same pattern as index-guidance.mjs / generate-code-map.mjs)
79
+ // ---------------------------------------------------------------------------
80
+
81
+ function ensureDbDir() {
82
+ const dir = dirname(DB_PATH);
83
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
84
+ }
85
+
86
+ async function getDb() {
87
+ ensureDbDir();
88
+ const SQL = await initSqlJs();
89
+ let db;
90
+ if (existsSync(DB_PATH)) {
91
+ const buffer = readFileSync(DB_PATH);
92
+ db = new SQL.Database(buffer);
93
+ } else {
94
+ db = new SQL.Database();
95
+ }
96
+
97
+ db.run(`
98
+ CREATE TABLE IF NOT EXISTS memory_entries (
99
+ id TEXT PRIMARY KEY,
100
+ key TEXT NOT NULL,
101
+ namespace TEXT DEFAULT 'default',
102
+ content TEXT NOT NULL,
103
+ type TEXT DEFAULT 'semantic',
104
+ embedding TEXT,
105
+ embedding_model TEXT DEFAULT 'local',
106
+ embedding_dimensions INTEGER,
107
+ tags TEXT,
108
+ metadata TEXT,
109
+ owner_id TEXT,
110
+ created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
111
+ updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
112
+ expires_at INTEGER,
113
+ last_accessed_at INTEGER,
114
+ access_count INTEGER DEFAULT 0,
115
+ status TEXT DEFAULT 'active',
116
+ UNIQUE(namespace, key)
117
+ )
118
+ `);
119
+ db.run(`CREATE INDEX IF NOT EXISTS idx_memory_key_ns ON memory_entries(key, namespace)`);
120
+ db.run(`CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace)`);
121
+ return db;
122
+ }
123
+
124
+ function saveDb(db) {
125
+ const data = db.export();
126
+ writeFileSync(DB_PATH, Buffer.from(data));
127
+ }
128
+
129
+ function generateId() {
130
+ return `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
131
+ }
132
+
133
+ function storeEntry(db, key, content, metadata = {}, tags = []) {
134
+ const now = Date.now();
135
+ const id = generateId();
136
+ db.run(`
137
+ INSERT OR REPLACE INTO memory_entries
138
+ (id, key, namespace, content, metadata, tags, created_at, updated_at, status)
139
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active')
140
+ `, [id, key, NAMESPACE, content, JSON.stringify(metadata), JSON.stringify(tags), now, now]);
141
+ }
142
+
143
+ function deleteNamespace(db) {
144
+ db.run(`DELETE FROM memory_entries WHERE namespace = ?`, [NAMESPACE]);
145
+ }
146
+
147
+ function countNamespace(db) {
148
+ const stmt = db.prepare(`SELECT COUNT(*) as cnt FROM memory_entries WHERE namespace = ?`);
149
+ stmt.bind([NAMESPACE]);
150
+ let count = 0;
151
+ if (stmt.step()) count = stmt.getAsObject().cnt;
152
+ stmt.free();
153
+ return count;
154
+ }
155
+
156
+ // ---------------------------------------------------------------------------
157
+ // Test directory discovery
158
+ // ---------------------------------------------------------------------------
159
+
160
+ /**
161
+ * Load test directories from moflo.yaml or discover automatically.
162
+ */
163
+ function loadTestDirs() {
164
+ const yamlPath = resolve(projectRoot, 'moflo.yaml');
165
+ const jsonPath = resolve(projectRoot, 'moflo.config.json');
166
+
167
+ // Try moflo.yaml first
168
+ if (existsSync(yamlPath)) {
169
+ try {
170
+ const content = readFileSync(yamlPath, 'utf-8');
171
+ const testsBlock = content.match(/tests:\s*\n\s+directories:\s*\n((?:\s+-\s+.+\n?)+)/);
172
+ if (testsBlock) {
173
+ const items = testsBlock[1].match(/-\s+(.+)/g);
174
+ if (items && items.length > 0) {
175
+ return items.map(item => item.replace(/^-\s+/, '').trim());
176
+ }
177
+ }
178
+ } catch { /* ignore */ }
179
+ }
180
+
181
+ // Try moflo.config.json
182
+ if (existsSync(jsonPath)) {
183
+ try {
184
+ const raw = JSON.parse(readFileSync(jsonPath, 'utf-8'));
185
+ if (raw.tests?.directories && Array.isArray(raw.tests.directories)) {
186
+ return raw.tests.directories;
187
+ }
188
+ } catch { /* ignore */ }
189
+ }
190
+
191
+ // Auto-discover common test directories
192
+ return discoverTestDirs();
193
+ }
194
+
195
+ /**
196
+ * Discover test directories by checking common locations.
197
+ */
198
+ function discoverTestDirs() {
199
+ const candidates = ['tests', 'test', '__tests__', 'spec', 'e2e'];
200
+ const found = [];
201
+
202
+ for (const dir of candidates) {
203
+ if (existsSync(resolve(projectRoot, dir))) {
204
+ found.push(dir);
205
+ }
206
+ }
207
+
208
+ return found;
209
+ }
210
+
211
+ // ---------------------------------------------------------------------------
212
+ // Test file enumeration
213
+ // ---------------------------------------------------------------------------
214
+
215
+ /**
216
+ * Find all test files using git ls-files + pattern matching.
217
+ */
218
+ function getTestFiles() {
219
+ // Strategy 1: git ls-files for tracked test files
220
+ let gitFiles = [];
221
+ try {
222
+ const raw = execSync(
223
+ `git ls-files -- "*.test.*" "*.spec.*" "*.test-*"`,
224
+ { cwd: projectRoot, encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 }
225
+ ).trim();
226
+
227
+ if (raw) {
228
+ gitFiles = raw.split('\n').filter(f => {
229
+ // Skip excluded dirs
230
+ for (const ex of EXCLUDE_DIRS) {
231
+ if (f.startsWith(ex + '/') || f.startsWith(ex + '\\')) return false;
232
+ }
233
+ // Only include recognized extensions
234
+ const ext = extname(f);
235
+ return TEST_EXTENSIONS.has(ext);
236
+ });
237
+ }
238
+ } catch { /* git not available or not a repo */ }
239
+
240
+ // Strategy 2: Walk configured test directories for any files
241
+ const testDirs = loadTestDirs();
242
+ const walkedFiles = new Set(gitFiles);
243
+
244
+ for (const dir of testDirs) {
245
+ const fullDir = resolve(projectRoot, dir);
246
+ if (!existsSync(fullDir)) continue;
247
+ walkTestFiles(fullDir, walkedFiles);
248
+ }
249
+
250
+ return [...walkedFiles].sort();
251
+ }
252
+
253
+ /**
254
+ * Walk a directory for test files (*.test.*, *.spec.*, or any source file in test dirs).
255
+ */
256
+ function walkTestFiles(dir, results) {
257
+ if (!existsSync(dir)) return;
258
+
259
+ try {
260
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
261
+ if (entry.isDirectory()) {
262
+ if (!EXCLUDE_DIRS.has(entry.name)) {
263
+ walkTestFiles(resolve(dir, entry.name), results);
264
+ }
265
+ } else if (entry.isFile()) {
266
+ const ext = extname(entry.name);
267
+ if (!TEST_EXTENSIONS.has(ext)) continue;
268
+
269
+ // Include if it matches test patterns OR if it's inside a test directory
270
+ const isTestFile = TEST_FILE_PATTERNS.some(p => p.test(entry.name));
271
+ const isInTestDir = true; // already walking a test dir
272
+
273
+ if (isTestFile || isInTestDir) {
274
+ const relPath = relative(projectRoot, resolve(dir, entry.name)).replace(/\\/g, '/');
275
+ results.add(relPath);
276
+ }
277
+ }
278
+ }
279
+ } catch { /* skip unreadable dirs */ }
280
+ }
281
+
282
+ function computeFileListHash(files) {
283
+ const sorted = [...files].sort();
284
+ return createHash('sha256').update(sorted.join('\n')).digest('hex');
285
+ }
286
+
287
+ function isUnchanged(currentHash) {
288
+ if (force) return false;
289
+ if (!existsSync(HASH_CACHE_PATH)) return false;
290
+ const cached = readFileSync(HASH_CACHE_PATH, 'utf-8').trim();
291
+ return cached === currentHash;
292
+ }
293
+
294
+ // ---------------------------------------------------------------------------
295
+ // Test file analysis (regex-based, no AST)
296
+ // ---------------------------------------------------------------------------
297
+
298
+ /**
299
+ * Detect test framework from import statements.
300
+ */
301
+ function detectFramework(content) {
302
+ if (/from\s+['"]vitest['"]/.test(content) || /import.*vitest/.test(content)) return 'vitest';
303
+ if (/from\s+['"]@jest/.test(content) || /from\s+['"]jest['"]/.test(content)) return 'jest';
304
+ if (/require\s*\(\s*['"]mocha['"]/.test(content)) return 'mocha';
305
+ if (/from\s+['"]@playwright/.test(content)) return 'playwright';
306
+ if (/from\s+['"]cypress['"]/.test(content)) return 'cypress';
307
+ if (/from\s+['"]ava['"]/.test(content)) return 'ava';
308
+ if (/describe\s*\(/.test(content) || /it\s*\(/.test(content) || /test\s*\(/.test(content)) return 'generic';
309
+ return 'unknown';
310
+ }
311
+
312
+ /**
313
+ * Extract describe/it/test block names from test file content.
314
+ */
315
+ function extractTestBlocks(content) {
316
+ const blocks = [];
317
+
318
+ // Match: describe("name", ...) / describe('name', ...) / describe(`name`, ...)
319
+ const describePattern = /(?:describe|suite)\s*\(\s*(['"`])(.+?)\1/g;
320
+ let m;
321
+ while ((m = describePattern.exec(content)) !== null) {
322
+ blocks.push({ type: 'describe', name: m[2] });
323
+ }
324
+
325
+ // Match: it("name", ...) / test("name", ...) / specify("name", ...)
326
+ const testPattern = /(?:it|test|specify)\s*\(\s*(['"`])(.+?)\1/g;
327
+ while ((m = testPattern.exec(content)) !== null) {
328
+ blocks.push({ type: 'test', name: m[2] });
329
+ }
330
+
331
+ // Match: it.each / test.each (parameterized)
332
+ const eachPattern = /(?:it|test)\.each[^(]*\(\s*[^)]*\)\s*\(\s*(['"`])(.+?)\1/g;
333
+ while ((m = eachPattern.exec(content)) !== null) {
334
+ blocks.push({ type: 'test', name: m[2] + ' (parameterized)' });
335
+ }
336
+
337
+ return blocks;
338
+ }
339
+
340
+ /**
341
+ * Extract import targets — the source files that this test imports.
342
+ * This is the KEY mechanism for mapping tests to functionality.
343
+ */
344
+ function extractImportTargets(content, filePath) {
345
+ const imports = [];
346
+ const fileDir = dirname(filePath);
347
+
348
+ // ES module imports: import { X } from '../path'
349
+ const esImportPattern = /import\s+(?:{[^}]*}|[\w*]+(?:\s*,\s*{[^}]*})?)\s+from\s+['"]([^'"]+)['"]/g;
350
+ let m;
351
+ while ((m = esImportPattern.exec(content)) !== null) {
352
+ const target = m[1];
353
+ // Only track relative imports (not packages)
354
+ if (target.startsWith('.') || target.startsWith('/')) {
355
+ imports.push(resolveImportPath(target, fileDir));
356
+ }
357
+ }
358
+
359
+ // CJS requires: const X = require('../path')
360
+ const cjsPattern = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
361
+ while ((m = cjsPattern.exec(content)) !== null) {
362
+ const target = m[1];
363
+ if (target.startsWith('.') || target.startsWith('/')) {
364
+ imports.push(resolveImportPath(target, fileDir));
365
+ }
366
+ }
367
+
368
+ // Dynamic imports: await import('../path')
369
+ const dynamicPattern = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
370
+ while ((m = dynamicPattern.exec(content)) !== null) {
371
+ const target = m[1];
372
+ if (target.startsWith('.') || target.startsWith('/')) {
373
+ imports.push(resolveImportPath(target, fileDir));
374
+ }
375
+ }
376
+
377
+ return [...new Set(imports)];
378
+ }
379
+
380
+ /**
381
+ * Resolve a relative import path to a project-relative path.
382
+ * Strips extensions if present, normalizes to forward slashes.
383
+ */
384
+ function resolveImportPath(importPath, fromDir) {
385
+ // Resolve relative to the importing file's directory
386
+ const resolved = resolve(projectRoot, fromDir, importPath).replace(/\\/g, '/');
387
+ const rel = relative(projectRoot, resolved).replace(/\\/g, '/');
388
+
389
+ // Normalize: strip common source extensions (the import may omit them)
390
+ return rel.replace(/\.(ts|tsx|js|jsx|mjs)$/, '');
391
+ }
392
+
393
+ // ---------------------------------------------------------------------------
394
+ // Chunk generators
395
+ // ---------------------------------------------------------------------------
396
+
397
+ /**
398
+ * Generate per-file test entries.
399
+ */
400
+ function generateTestFileEntries(testFiles, analyses) {
401
+ const chunks = [];
402
+
403
+ for (const filePath of testFiles) {
404
+ const analysis = analyses[filePath];
405
+ if (!analysis) continue;
406
+
407
+ const fileName = basename(filePath);
408
+ const dir = dirname(filePath);
409
+
410
+ let content = `# ${fileName} (${filePath})\n`;
411
+ content += `Framework: ${analysis.framework}\n`;
412
+ if (analysis.importTargets.length > 0) {
413
+ content += `Tests: ${analysis.importTargets.map(t => t + '.*').join(', ')}\n`;
414
+ }
415
+ content += '\n';
416
+
417
+ // Describe blocks
418
+ const describes = analysis.blocks.filter(b => b.type === 'describe');
419
+ if (describes.length > 0) {
420
+ content += 'Describe blocks:\n';
421
+ for (const d of describes) {
422
+ content += ` - ${d.name}\n`;
423
+ }
424
+ content += '\n';
425
+ }
426
+
427
+ // Test names
428
+ const tests = analysis.blocks.filter(b => b.type === 'test');
429
+ if (tests.length > 0) {
430
+ content += `Test cases (${tests.length}):\n`;
431
+ for (const t of tests.slice(0, 30)) { // Cap at 30 to avoid huge entries
432
+ content += ` - ${t.name}\n`;
433
+ }
434
+ if (tests.length > 30) {
435
+ content += ` ... and ${tests.length - 30} more\n`;
436
+ }
437
+ content += '\n';
438
+ }
439
+
440
+ // Import targets
441
+ if (analysis.importTargets.length > 0) {
442
+ content += 'Source files under test:\n';
443
+ for (const imp of analysis.importTargets) {
444
+ content += ` - ${imp}\n`;
445
+ }
446
+ }
447
+
448
+ const tags = ['test-file', analysis.framework];
449
+ if (filePath.includes('e2e') || filePath.includes('E2E')) tags.push('e2e');
450
+ if (filePath.includes('integration')) tags.push('integration');
451
+ if (filePath.includes('unit')) tags.push('unit');
452
+
453
+ chunks.push({
454
+ key: `test-file:${filePath}`,
455
+ content: content.trim(),
456
+ metadata: {
457
+ kind: 'test-file',
458
+ filePath,
459
+ directory: dir,
460
+ framework: analysis.framework,
461
+ describeCount: describes.length,
462
+ testCount: tests.length,
463
+ importTargets: analysis.importTargets,
464
+ describeNames: describes.map(d => d.name),
465
+ },
466
+ tags,
467
+ });
468
+ }
469
+
470
+ return chunks;
471
+ }
472
+
473
+ /**
474
+ * Generate reverse mapping: source file → test files that import it.
475
+ * This is the primary value — enables "find tests for this file" queries.
476
+ */
477
+ function generateTestMaps(testFiles, analyses) {
478
+ const chunks = [];
479
+
480
+ // Build reverse map: source file → [test files]
481
+ const reverseMap = {};
482
+ for (const filePath of testFiles) {
483
+ const analysis = analyses[filePath];
484
+ if (!analysis) continue;
485
+
486
+ for (const target of analysis.importTargets) {
487
+ if (!reverseMap[target]) reverseMap[target] = [];
488
+ reverseMap[target].push({
489
+ testFile: filePath,
490
+ framework: analysis.framework,
491
+ testCount: analysis.blocks.filter(b => b.type === 'test').length,
492
+ });
493
+ }
494
+ }
495
+
496
+ // Generate a chunk for each source file that has tests
497
+ for (const [sourceFile, testEntries] of Object.entries(reverseMap)) {
498
+ let content = `# Tests for: ${sourceFile}\n\n`;
499
+ content += `${testEntries.length} test file(s) cover this module:\n\n`;
500
+
501
+ for (const entry of testEntries) {
502
+ content += ` ${entry.testFile} [${entry.framework}, ${entry.testCount} tests]\n`;
503
+ }
504
+
505
+ chunks.push({
506
+ key: `test-map:${sourceFile}`,
507
+ content: content.trim(),
508
+ metadata: {
509
+ kind: 'test-map',
510
+ sourceFile,
511
+ testFiles: testEntries.map(e => e.testFile),
512
+ totalTests: testEntries.reduce((sum, e) => sum + e.testCount, 0),
513
+ },
514
+ tags: ['test-map'],
515
+ });
516
+ }
517
+
518
+ return chunks;
519
+ }
520
+
521
+ /**
522
+ * Generate directory summaries showing test coverage per directory.
523
+ */
524
+ function generateTestDirSummaries(testFiles, analyses) {
525
+ const chunks = [];
526
+
527
+ // Group by directory
528
+ const dirMap = {};
529
+ for (const filePath of testFiles) {
530
+ const dir = dirname(filePath);
531
+ if (!dirMap[dir]) dirMap[dir] = [];
532
+ dirMap[dir].push(filePath);
533
+ }
534
+
535
+ for (const [dir, files] of Object.entries(dirMap)) {
536
+ if (files.length < 1) continue;
537
+
538
+ const frameworks = new Set();
539
+ let totalTests = 0;
540
+ let totalDescribes = 0;
541
+ const allImports = new Set();
542
+
543
+ for (const f of files) {
544
+ const analysis = analyses[f];
545
+ if (!analysis) continue;
546
+ frameworks.add(analysis.framework);
547
+ totalTests += analysis.blocks.filter(b => b.type === 'test').length;
548
+ totalDescribes += analysis.blocks.filter(b => b.type === 'describe').length;
549
+ for (const imp of analysis.importTargets) allImports.add(imp);
550
+ }
551
+
552
+ let content = `# ${dir}/ (${files.length} test files)\n`;
553
+ content += `Frameworks: ${[...frameworks].join(', ')}\n`;
554
+ content += `Total: ${totalDescribes} suites, ${totalTests} tests\n\n`;
555
+ content += 'Files:\n';
556
+ for (const f of files) {
557
+ content += ` ${basename(f)}\n`;
558
+ }
559
+ if (allImports.size > 0) {
560
+ content += '\nModules under test:\n';
561
+ for (const imp of [...allImports].sort().slice(0, 20)) {
562
+ content += ` ${imp}\n`;
563
+ }
564
+ if (allImports.size > 20) {
565
+ content += ` ... and ${allImports.size - 20} more\n`;
566
+ }
567
+ }
568
+
569
+ chunks.push({
570
+ key: `test-dir:${dir}`,
571
+ content: content.trim(),
572
+ metadata: {
573
+ kind: 'test-dir',
574
+ directory: dir,
575
+ fileCount: files.length,
576
+ frameworks: [...frameworks],
577
+ totalTests,
578
+ totalDescribes,
579
+ },
580
+ tags: ['test-dir'],
581
+ });
582
+ }
583
+
584
+ return chunks;
585
+ }
586
+
587
+ // ---------------------------------------------------------------------------
588
+ // Main
589
+ // ---------------------------------------------------------------------------
590
+
591
+ async function main() {
592
+ const startTime = Date.now();
593
+
594
+ log(`Project root: ${projectRoot}`);
595
+
596
+ // 1. Find test files
597
+ log('Discovering test files...');
598
+ const testFiles = getTestFiles();
599
+ log(`Found ${testFiles.length} test files`);
600
+
601
+ if (testFiles.length === 0) {
602
+ log('No test files found — nothing to index');
603
+ return;
604
+ }
605
+
606
+ // 2. Check hash for incremental skip
607
+ const currentHash = computeFileListHash(testFiles);
608
+
609
+ if (statsOnly) {
610
+ const db = await getDb();
611
+ const count = countNamespace(db);
612
+ db.close();
613
+ log(`Stats: ${testFiles.length} test files, ${count} chunks in tests namespace`);
614
+ log(`File list hash: ${currentHash.slice(0, 12)}...`);
615
+ return;
616
+ }
617
+
618
+ if (isUnchanged(currentHash)) {
619
+ const db = await getDb();
620
+ const count = countNamespace(db);
621
+ db.close();
622
+ if (count > 0) {
623
+ log(`Skipping — file list unchanged (${count} chunks in DB, hash ${currentHash.slice(0, 12)}...)`);
624
+ return;
625
+ }
626
+ log('File list unchanged but no chunks in DB — forcing regeneration');
627
+ }
628
+
629
+ // 3. Analyze all test files
630
+ log('Analyzing test files...');
631
+ const analyses = {};
632
+
633
+ for (const filePath of testFiles) {
634
+ const fullPath = resolve(projectRoot, filePath);
635
+ if (!existsSync(fullPath)) continue;
636
+
637
+ try {
638
+ const content = readFileSync(fullPath, 'utf-8');
639
+ analyses[filePath] = {
640
+ framework: detectFramework(content),
641
+ blocks: extractTestBlocks(content),
642
+ importTargets: extractImportTargets(content, filePath),
643
+ };
644
+ debug(` ${filePath}: ${analyses[filePath].framework}, ${analyses[filePath].blocks.length} blocks, ${analyses[filePath].importTargets.length} imports`);
645
+ } catch (err) {
646
+ debug(` ${filePath}: ERROR - ${err.message}`);
647
+ }
648
+ }
649
+
650
+ const analyzedCount = Object.keys(analyses).length;
651
+ log(`Analyzed ${analyzedCount} test files`);
652
+
653
+ // 4. Generate all chunk types
654
+ log('Generating chunks...');
655
+ const fileChunks = generateTestFileEntries(testFiles, analyses);
656
+ const mapChunks = generateTestMaps(testFiles, analyses);
657
+ const dirChunks = generateTestDirSummaries(testFiles, analyses);
658
+
659
+ const allChunks = [...fileChunks, ...mapChunks, ...dirChunks];
660
+
661
+ log(`Generated ${allChunks.length} chunks:`);
662
+ log(` Test file entries: ${fileChunks.length}`);
663
+ log(` Reverse maps: ${mapChunks.length} (source → test files)`);
664
+ log(` Directory summaries: ${dirChunks.length}`);
665
+
666
+ // 5. Write to database
667
+ log('Writing to memory database...');
668
+ const db = await getDb();
669
+ deleteNamespace(db);
670
+
671
+ for (const chunk of allChunks) {
672
+ storeEntry(db, chunk.key, chunk.content, chunk.metadata, chunk.tags);
673
+ }
674
+
675
+ saveDb(db);
676
+ db.close();
677
+
678
+ // 6. Save hash for incremental caching
679
+ writeFileSync(HASH_CACHE_PATH, currentHash, 'utf-8');
680
+
681
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
682
+ log(`Done in ${elapsed}s — ${allChunks.length} chunks written to tests namespace`);
683
+
684
+ // 7. Generate embeddings (inline, like code-map)
685
+ if (!skipEmbeddings && allChunks.length > 0) {
686
+ const embedCandidates = [
687
+ resolve(dirname(fileURLToPath(import.meta.url)), 'build-embeddings.mjs'),
688
+ resolve(projectRoot, '.claude/scripts/build-embeddings.mjs'),
689
+ ];
690
+ const embedScript = embedCandidates.find(p => existsSync(p));
691
+ if (embedScript) {
692
+ log('Generating embeddings for tests...');
693
+ try {
694
+ execSync(`node "${embedScript}" --namespace tests`, {
695
+ cwd: projectRoot,
696
+ stdio: 'inherit',
697
+ timeout: 120000,
698
+ windowsHide: true,
699
+ });
700
+ } catch (err) {
701
+ log(`Warning: embedding generation failed: ${err.message?.split('\n')[0]}`);
702
+ }
703
+ }
704
+ }
705
+ }
706
+
707
+ main().catch(err => {
708
+ console.error('[index-tests] Fatal error:', err);
709
+ process.exit(1);
710
+ });