moflo 4.2.0 → 4.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -8,14 +8,17 @@
|
|
|
8
8
|
|
|
9
9
|
**Before reading any files or exploring code, search memory for guidance relevant to your task.**
|
|
10
10
|
|
|
11
|
-
###
|
|
11
|
+
### Three namespaces to search:
|
|
12
12
|
|
|
13
13
|
| Namespace | When to search | What it returns |
|
|
14
14
|
|-----------|---------------|-----------------|
|
|
15
15
|
| `guidance` | Understanding patterns, rules, conventions | Guidance docs, coding rules, domain context |
|
|
16
|
-
| `code-map` | Finding where code lives (files, types, services) | Project overviews, directory contents, type
|
|
16
|
+
| `code-map` | Finding where code lives (files, types, services) | Project overviews, directory contents, type-to-file mappings |
|
|
17
|
+
| `patterns` | Prior solutions, gotchas, implementation patterns | Learned patterns from previous task execution |
|
|
17
18
|
|
|
18
|
-
**
|
|
19
|
+
**Always search `patterns` alongside `guidance`.** It contains solutions to problems already solved — skipping it means repeating past mistakes or re-discovering known approaches.
|
|
20
|
+
|
|
21
|
+
**Search `code-map` BEFORE using Glob/Grep for navigation.** It's faster and returns structured results including file-level type mappings.
|
|
19
22
|
|
|
20
23
|
### Option A: MCP Tools (Preferred)
|
|
21
24
|
|
|
@@ -35,12 +38,13 @@ npx moflo memory search --query "[describe your task]" --namespace guidance --li
|
|
|
35
38
|
|
|
36
39
|
| Your task involves... | Search namespace | Example query |
|
|
37
40
|
|-----------------------|------------------|---------------|
|
|
38
|
-
| Database/entities | `guidance` | `"database entity migration"` |
|
|
39
|
-
| Frontend components | `guidance` | `"React frontend component"` |
|
|
40
|
-
| API endpoints | `guidance` | `"API route endpoint pattern"` |
|
|
41
|
-
| Authentication | `guidance` | `"auth middleware JWT"` |
|
|
42
|
-
| Unit tests | `guidance` | `"test mock vitest"` |
|
|
43
|
-
|
|
|
41
|
+
| Database/entities | `guidance` + `patterns` | `"database entity migration"` |
|
|
42
|
+
| Frontend components | `guidance` + `patterns` | `"React frontend component"` |
|
|
43
|
+
| API endpoints | `guidance` + `patterns` | `"API route endpoint pattern"` |
|
|
44
|
+
| Authentication | `guidance` + `patterns` | `"auth middleware JWT"` |
|
|
45
|
+
| Unit tests | `guidance` + `patterns` | `"test mock vitest"` |
|
|
46
|
+
| Prior solutions/gotchas | `patterns` | `"audit log service pattern"` |
|
|
47
|
+
| Where is a file/type? | `code-map` | `"CompanyEntity file location"` |
|
|
44
48
|
| What's in a directory? | `code-map` | `"back-office api routes"` |
|
|
45
49
|
|
|
46
50
|
Use results with score > 0.3. If no good results, fall back to reading project guidance docs.
|
|
@@ -0,0 +1,689 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Generate structural code map for a monorepo or project.
|
|
4
|
+
*
|
|
5
|
+
* Produces five chunk types stored in the `code-map` namespace of .swarm/memory.db:
|
|
6
|
+
* 1. project: — one per top-level project directory (bird's-eye overview)
|
|
7
|
+
* 2. dir: — one per directory with 2+ exported types (drill-down detail)
|
|
8
|
+
* 3. iface-map: — batched interface-to-implementation mappings
|
|
9
|
+
* 4. type-index: — batched type-name-to-file-path lookups
|
|
10
|
+
* 5. file: — ONE PER FILE with exported types (file-level granularity)
|
|
11
|
+
*
|
|
12
|
+
* The `file:` entries are the key improvement — they enable precise semantic search
|
|
13
|
+
* for individual types, entities, and services instead of diluting results across
|
|
14
|
+
* large batches.
|
|
15
|
+
*
|
|
16
|
+
* Design: regex-based extraction (no AST parser), incremental via SHA-256 hash,
|
|
17
|
+
* stores in sql.js memory DB, triggers embedding generation in background.
|
|
18
|
+
*
|
|
19
|
+
* Usage:
|
|
20
|
+
* node node_modules/moflo/bin/generate-code-map.mjs # Incremental
|
|
21
|
+
* node node_modules/moflo/bin/generate-code-map.mjs --force # Full regenerate
|
|
22
|
+
* node node_modules/moflo/bin/generate-code-map.mjs --verbose # Detailed logging
|
|
23
|
+
* node node_modules/moflo/bin/generate-code-map.mjs --no-embeddings # Skip embedding generation
|
|
24
|
+
* node node_modules/moflo/bin/generate-code-map.mjs --stats # Print stats and exit
|
|
25
|
+
* npx moflo-codemap # Via npx
|
|
26
|
+
*/
|
|
27
|
+
|
|
28
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';
|
|
29
|
+
import { resolve, dirname, relative, basename, extname } from 'path';
|
|
30
|
+
import { fileURLToPath } from 'url';
|
|
31
|
+
import { createHash } from 'crypto';
|
|
32
|
+
import { execSync, spawn } from 'child_process';
|
|
33
|
+
import initSqlJs from 'sql.js';
|
|
34
|
+
|
|
35
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
36
|
+
|
|
37
|
+
// Detect project root: walk up from cwd to find a package.json
|
|
38
|
+
function findProjectRoot() {
|
|
39
|
+
let dir = process.cwd();
|
|
40
|
+
const root = resolve(dir, '/');
|
|
41
|
+
while (dir !== root) {
|
|
42
|
+
if (existsSync(resolve(dir, 'package.json'))) return dir;
|
|
43
|
+
dir = dirname(dir);
|
|
44
|
+
}
|
|
45
|
+
return process.cwd();
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const projectRoot = findProjectRoot();
|
|
49
|
+
const NAMESPACE = 'code-map';
|
|
50
|
+
const DB_PATH = resolve(projectRoot, '.swarm/memory.db');
|
|
51
|
+
const HASH_CACHE_PATH = resolve(projectRoot, '.swarm/code-map-hash.txt');
|
|
52
|
+
|
|
53
|
+
// Directories to exclude from indexing
|
|
54
|
+
const EXCLUDE_DIRS = [
|
|
55
|
+
'node_modules', 'dist', 'build', '.next', 'coverage',
|
|
56
|
+
'.claude', 'template', 'back-office-template',
|
|
57
|
+
];
|
|
58
|
+
|
|
59
|
+
// Heuristic descriptions for well-known directory names
|
|
60
|
+
const DIR_DESCRIPTIONS = {
|
|
61
|
+
entities: 'MikroORM entity definitions',
|
|
62
|
+
services: 'business logic services',
|
|
63
|
+
routes: 'Fastify route handlers',
|
|
64
|
+
middleware: 'request middleware (auth, validation, tenancy)',
|
|
65
|
+
schemas: 'Zod validation schemas',
|
|
66
|
+
types: 'TypeScript type definitions',
|
|
67
|
+
utils: 'utility helpers',
|
|
68
|
+
config: 'configuration',
|
|
69
|
+
migrations: 'database migrations',
|
|
70
|
+
scripts: 'CLI scripts',
|
|
71
|
+
components: 'React components',
|
|
72
|
+
pages: 'route page components',
|
|
73
|
+
contexts: 'React context providers',
|
|
74
|
+
hooks: 'React custom hooks',
|
|
75
|
+
layout: 'app shell layout',
|
|
76
|
+
themes: 'MUI theme configuration',
|
|
77
|
+
api: 'API client layer',
|
|
78
|
+
locales: 'i18n translation files',
|
|
79
|
+
tests: 'test suites',
|
|
80
|
+
e2e: 'end-to-end tests',
|
|
81
|
+
providers: 'dependency injection providers',
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
// Batch sizes for chunking
|
|
85
|
+
const IFACE_MAP_BATCH = 20;
|
|
86
|
+
const TYPE_INDEX_BATCH = 30; // Reduced from 80 for better search relevance
|
|
87
|
+
|
|
88
|
+
// Parse args
|
|
89
|
+
const args = process.argv.slice(2);
|
|
90
|
+
const force = args.includes('--force');
|
|
91
|
+
const verbose = args.includes('--verbose') || args.includes('-v');
|
|
92
|
+
const skipEmbeddings = args.includes('--no-embeddings');
|
|
93
|
+
const statsOnly = args.includes('--stats');
|
|
94
|
+
|
|
95
|
+
function log(msg) { console.log(`[code-map] ${msg}`); }
|
|
96
|
+
function debug(msg) { if (verbose) console.log(`[code-map] ${msg}`); }
|
|
97
|
+
|
|
98
|
+
// ---------------------------------------------------------------------------
|
|
99
|
+
// Database helpers
|
|
100
|
+
// ---------------------------------------------------------------------------
|
|
101
|
+
|
|
102
|
+
function ensureDbDir() {
|
|
103
|
+
const dir = dirname(DB_PATH);
|
|
104
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
async function getDb() {
|
|
108
|
+
ensureDbDir();
|
|
109
|
+
const SQL = await initSqlJs();
|
|
110
|
+
let db;
|
|
111
|
+
if (existsSync(DB_PATH)) {
|
|
112
|
+
const buffer = readFileSync(DB_PATH);
|
|
113
|
+
db = new SQL.Database(buffer);
|
|
114
|
+
} else {
|
|
115
|
+
db = new SQL.Database();
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
db.run(`
|
|
119
|
+
CREATE TABLE IF NOT EXISTS memory_entries (
|
|
120
|
+
id TEXT PRIMARY KEY,
|
|
121
|
+
key TEXT NOT NULL,
|
|
122
|
+
namespace TEXT DEFAULT 'default',
|
|
123
|
+
content TEXT NOT NULL,
|
|
124
|
+
type TEXT DEFAULT 'semantic',
|
|
125
|
+
embedding TEXT,
|
|
126
|
+
embedding_model TEXT DEFAULT 'local',
|
|
127
|
+
embedding_dimensions INTEGER,
|
|
128
|
+
tags TEXT,
|
|
129
|
+
metadata TEXT,
|
|
130
|
+
owner_id TEXT,
|
|
131
|
+
created_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
132
|
+
updated_at INTEGER NOT NULL DEFAULT (strftime('%s', 'now') * 1000),
|
|
133
|
+
expires_at INTEGER,
|
|
134
|
+
last_accessed_at INTEGER,
|
|
135
|
+
access_count INTEGER DEFAULT 0,
|
|
136
|
+
status TEXT DEFAULT 'active',
|
|
137
|
+
UNIQUE(namespace, key)
|
|
138
|
+
)
|
|
139
|
+
`);
|
|
140
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_memory_key_ns ON memory_entries(key, namespace)`);
|
|
141
|
+
db.run(`CREATE INDEX IF NOT EXISTS idx_memory_namespace ON memory_entries(namespace)`);
|
|
142
|
+
return db;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function saveDb(db) {
|
|
146
|
+
const data = db.export();
|
|
147
|
+
writeFileSync(DB_PATH, Buffer.from(data));
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function generateId() {
|
|
151
|
+
return `mem_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
function storeEntry(db, key, content, metadata = {}, tags = []) {
|
|
155
|
+
const now = Date.now();
|
|
156
|
+
const id = generateId();
|
|
157
|
+
db.run(`
|
|
158
|
+
INSERT OR REPLACE INTO memory_entries
|
|
159
|
+
(id, key, namespace, content, metadata, tags, created_at, updated_at, status)
|
|
160
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'active')
|
|
161
|
+
`, [id, key, NAMESPACE, content, JSON.stringify(metadata), JSON.stringify(tags), now, now]);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function deleteNamespace(db) {
|
|
165
|
+
db.run(`DELETE FROM memory_entries WHERE namespace = ?`, [NAMESPACE]);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
function countNamespace(db) {
|
|
169
|
+
const stmt = db.prepare(`SELECT COUNT(*) as cnt FROM memory_entries WHERE namespace = ?`);
|
|
170
|
+
stmt.bind([NAMESPACE]);
|
|
171
|
+
let count = 0;
|
|
172
|
+
if (stmt.step()) count = stmt.getAsObject().cnt;
|
|
173
|
+
stmt.free();
|
|
174
|
+
return count;
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// ---------------------------------------------------------------------------
|
|
178
|
+
// Source file enumeration via git ls-files
|
|
179
|
+
// ---------------------------------------------------------------------------
|
|
180
|
+
|
|
181
|
+
function getSourceFiles() {
|
|
182
|
+
const raw = execSync(
|
|
183
|
+
`git ls-files -- "*.ts" "*.tsx" "*.js" "*.mjs" "*.jsx"`,
|
|
184
|
+
{ cwd: projectRoot, encoding: 'utf-8', maxBuffer: 10 * 1024 * 1024 }
|
|
185
|
+
).trim();
|
|
186
|
+
|
|
187
|
+
if (!raw) return [];
|
|
188
|
+
|
|
189
|
+
return raw.split('\n').filter(f => {
|
|
190
|
+
for (const ex of EXCLUDE_DIRS) {
|
|
191
|
+
if (f.startsWith(ex + '/') || f.startsWith(ex + '\\')) return false;
|
|
192
|
+
}
|
|
193
|
+
return true;
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
function computeFileListHash(files) {
|
|
198
|
+
const sorted = [...files].sort();
|
|
199
|
+
return createHash('sha256').update(sorted.join('\n')).digest('hex');
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
function isUnchanged(currentHash) {
|
|
203
|
+
if (force) return false;
|
|
204
|
+
if (!existsSync(HASH_CACHE_PATH)) return false;
|
|
205
|
+
const cached = readFileSync(HASH_CACHE_PATH, 'utf-8').trim();
|
|
206
|
+
return cached === currentHash;
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
// ---------------------------------------------------------------------------
|
|
210
|
+
// Type extraction (regex-based, no AST)
|
|
211
|
+
// ---------------------------------------------------------------------------
|
|
212
|
+
|
|
213
|
+
const TS_PATTERNS = [
|
|
214
|
+
/^export\s+(?:default\s+)?(?:abstract\s+)?class\s+(\w+)(?:\s+extends\s+([\w.]+))?(?:\s+implements\s+([\w,\s.]+))?/,
|
|
215
|
+
/^export\s+(?:default\s+)?interface\s+(\w+)(?:\s+extends\s+([\w,\s.]+))?/,
|
|
216
|
+
/^export\s+(?:default\s+)?type\s+(\w+)\s*[=<]/,
|
|
217
|
+
/^export\s+(?:const\s+)?enum\s+(\w+)/,
|
|
218
|
+
/^export\s+(?:default\s+)?(?:async\s+)?function\s+(\w+)/,
|
|
219
|
+
/^export\s+(?:default\s+)?const\s+(\w+)\s*[=:]/,
|
|
220
|
+
];
|
|
221
|
+
|
|
222
|
+
const ENTITY_DECORATOR = /@Entity\s*\(/;
|
|
223
|
+
|
|
224
|
+
function extractTypes(filePath) {
|
|
225
|
+
const fullPath = resolve(projectRoot, filePath);
|
|
226
|
+
if (!existsSync(fullPath)) return [];
|
|
227
|
+
|
|
228
|
+
let content;
|
|
229
|
+
try {
|
|
230
|
+
content = readFileSync(fullPath, 'utf-8');
|
|
231
|
+
} catch {
|
|
232
|
+
return [];
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
const lines = content.split('\n');
|
|
236
|
+
const types = [];
|
|
237
|
+
const seen = new Set();
|
|
238
|
+
let isEntityNext = false;
|
|
239
|
+
|
|
240
|
+
for (let i = 0; i < lines.length; i++) {
|
|
241
|
+
const line = lines[i].trim();
|
|
242
|
+
|
|
243
|
+
if (ENTITY_DECORATOR.test(line)) {
|
|
244
|
+
isEntityNext = true;
|
|
245
|
+
continue;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
for (const pattern of TS_PATTERNS) {
|
|
249
|
+
const m = line.match(pattern);
|
|
250
|
+
if (m && m[1] && !seen.has(m[1])) {
|
|
251
|
+
seen.add(m[1]);
|
|
252
|
+
const kind = detectKind(line, m[1]);
|
|
253
|
+
const bases = (m[2] || '').trim();
|
|
254
|
+
const implements_ = (m[3] || '').trim();
|
|
255
|
+
types.push({
|
|
256
|
+
name: m[1],
|
|
257
|
+
kind,
|
|
258
|
+
bases: bases || null,
|
|
259
|
+
implements: implements_ || null,
|
|
260
|
+
isEntity: isEntityNext,
|
|
261
|
+
file: filePath,
|
|
262
|
+
});
|
|
263
|
+
isEntityNext = false;
|
|
264
|
+
break;
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
if (isEntityNext && !line.startsWith('@') && !line.startsWith('export') && line.length > 0) {
|
|
269
|
+
isEntityNext = false;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
return types;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
function detectKind(line, name) {
|
|
277
|
+
if (/\bclass\b/.test(line)) return 'class';
|
|
278
|
+
if (/\binterface\b/.test(line)) return 'interface';
|
|
279
|
+
if (/\btype\b/.test(line)) return 'type';
|
|
280
|
+
if (/\benum\b/.test(line)) return 'enum';
|
|
281
|
+
if (/\bfunction\b/.test(line)) return 'function';
|
|
282
|
+
if (/\bconst\b/.test(line)) return 'const';
|
|
283
|
+
return 'export';
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// ---------------------------------------------------------------------------
|
|
287
|
+
// Project structure analysis
|
|
288
|
+
// ---------------------------------------------------------------------------
|
|
289
|
+
|
|
290
|
+
function getProjectName(filePath) {
|
|
291
|
+
const parts = filePath.split('/');
|
|
292
|
+
|
|
293
|
+
if (parts[0] === 'packages' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
|
|
294
|
+
if (parts[0] === 'back-office' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
|
|
295
|
+
if (parts[0] === 'customer-portal' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
|
|
296
|
+
if (parts[0] === 'admin-console' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
|
|
297
|
+
if (parts[0] === 'webhooks' && parts.length >= 2) return `${parts[0]}/${parts[1]}`;
|
|
298
|
+
if (parts[0] === 'mobile-app') return 'mobile-app';
|
|
299
|
+
if (parts[0] === 'tests') return 'tests';
|
|
300
|
+
if (parts[0] === 'scripts') return 'scripts';
|
|
301
|
+
return parts[0];
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
function getDirectory(filePath) {
|
|
305
|
+
return dirname(filePath).replace(/\\/g, '/');
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
function getDirDescription(dirName) {
|
|
309
|
+
const last = dirName.split('/').pop();
|
|
310
|
+
return DIR_DESCRIPTIONS[last] || null;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
function detectLanguage(filePath) {
|
|
314
|
+
const ext = extname(filePath);
|
|
315
|
+
if (ext === '.tsx' || ext === '.jsx') return 'tsx';
|
|
316
|
+
if (ext === '.ts') return 'ts';
|
|
317
|
+
if (ext === '.mjs') return 'esm';
|
|
318
|
+
return 'js';
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
// ---------------------------------------------------------------------------
|
|
322
|
+
// Chunk generators
|
|
323
|
+
// ---------------------------------------------------------------------------
|
|
324
|
+
|
|
325
|
+
function generateProjectOverviews(filesByProject, typesByProject) {
|
|
326
|
+
const chunks = [];
|
|
327
|
+
|
|
328
|
+
for (const [project, files] of Object.entries(filesByProject)) {
|
|
329
|
+
const types = typesByProject[project] || [];
|
|
330
|
+
const lang = detectProjectLang(files);
|
|
331
|
+
const dirMap = {};
|
|
332
|
+
|
|
333
|
+
for (const t of types) {
|
|
334
|
+
const rel = relative(project, dirname(t.file)).replace(/\\/g, '/') || '(root)';
|
|
335
|
+
if (!dirMap[rel]) dirMap[rel] = [];
|
|
336
|
+
dirMap[rel].push(t.name);
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
let content = `# ${project} [${lang}, ${files.length} files, ${types.length} types]\n\n`;
|
|
340
|
+
|
|
341
|
+
const sortedDirs = Object.keys(dirMap).sort();
|
|
342
|
+
for (const dir of sortedDirs) {
|
|
343
|
+
const names = dirMap[dir];
|
|
344
|
+
const desc = getDirDescription(dir);
|
|
345
|
+
const descStr = desc ? ` -- ${desc}` : '';
|
|
346
|
+
const shown = names.slice(0, 8).join(', ');
|
|
347
|
+
const overflow = names.length > 8 ? `, ... (+${names.length - 8} more)` : '';
|
|
348
|
+
content += ` ${dir}${descStr}: ${shown}${overflow}\n`;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
chunks.push({
|
|
352
|
+
key: `project:${project}`,
|
|
353
|
+
content: content.trim(),
|
|
354
|
+
metadata: { kind: 'project-overview', project, language: lang, fileCount: files.length, typeCount: types.length },
|
|
355
|
+
tags: ['project', project],
|
|
356
|
+
});
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
return chunks;
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
function detectProjectLang(files) {
|
|
363
|
+
let tsx = 0, ts = 0, js = 0;
|
|
364
|
+
for (const f of files) {
|
|
365
|
+
const ext = extname(f);
|
|
366
|
+
if (ext === '.tsx' || ext === '.jsx') tsx++;
|
|
367
|
+
else if (ext === '.ts') ts++;
|
|
368
|
+
else js++;
|
|
369
|
+
}
|
|
370
|
+
if (tsx > ts && tsx > js) return 'React/TypeScript';
|
|
371
|
+
if (ts >= js) return 'TypeScript';
|
|
372
|
+
return 'JavaScript';
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
function generateDirectoryDetails(typesByDir) {
|
|
376
|
+
const chunks = [];
|
|
377
|
+
|
|
378
|
+
for (const [dir, types] of Object.entries(typesByDir)) {
|
|
379
|
+
if (types.length < 2) continue;
|
|
380
|
+
|
|
381
|
+
const desc = getDirDescription(dir);
|
|
382
|
+
let content = `# ${dir} (${types.length} types)\n`;
|
|
383
|
+
if (desc) content += `${desc}\n`;
|
|
384
|
+
content += '\n';
|
|
385
|
+
|
|
386
|
+
const sorted = [...types].sort((a, b) => a.name.localeCompare(b.name));
|
|
387
|
+
for (const t of sorted) {
|
|
388
|
+
const suffix = [];
|
|
389
|
+
if (t.bases) suffix.push(`: ${t.bases}`);
|
|
390
|
+
if (t.implements) suffix.push(`: ${t.implements}`);
|
|
391
|
+
const suffixStr = suffix.length ? ` ${suffix.join(' ')}` : '';
|
|
392
|
+
const fileName = basename(t.file);
|
|
393
|
+
content += ` ${t.name}${suffixStr} (${fileName})\n`;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
chunks.push({
|
|
397
|
+
key: `dir:${dir}`,
|
|
398
|
+
content: content.trim(),
|
|
399
|
+
metadata: { kind: 'directory-detail', directory: dir, typeCount: types.length },
|
|
400
|
+
tags: ['directory', dir.split('/')[0]],
|
|
401
|
+
});
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
return chunks;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
function generateInterfaceMaps(allTypes) {
|
|
408
|
+
const interfaces = new Map();
|
|
409
|
+
|
|
410
|
+
for (const t of allTypes) {
|
|
411
|
+
if (t.kind === 'interface') {
|
|
412
|
+
if (!interfaces.has(t.name)) {
|
|
413
|
+
interfaces.set(t.name, { defined: t.file, implementations: [] });
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
for (const t of allTypes) {
|
|
419
|
+
if (t.kind !== 'class') continue;
|
|
420
|
+
const impls = t.implements ? t.implements.split(',').map(s => s.trim()) : [];
|
|
421
|
+
const bases = t.bases ? [t.bases.trim()] : [];
|
|
422
|
+
for (const iface of [...impls, ...bases]) {
|
|
423
|
+
if (interfaces.has(iface)) {
|
|
424
|
+
interfaces.get(iface).implementations.push({
|
|
425
|
+
name: t.name,
|
|
426
|
+
project: getProjectName(t.file),
|
|
427
|
+
});
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
const mapped = [...interfaces.entries()]
|
|
433
|
+
.filter(([, v]) => v.implementations.length > 0)
|
|
434
|
+
.sort(([a], [b]) => a.localeCompare(b));
|
|
435
|
+
|
|
436
|
+
if (mapped.length === 0) return [];
|
|
437
|
+
|
|
438
|
+
const chunks = [];
|
|
439
|
+
const totalBatches = Math.ceil(mapped.length / IFACE_MAP_BATCH);
|
|
440
|
+
|
|
441
|
+
for (let i = 0; i < mapped.length; i += IFACE_MAP_BATCH) {
|
|
442
|
+
const batch = mapped.slice(i, i + IFACE_MAP_BATCH);
|
|
443
|
+
const batchNum = Math.floor(i / IFACE_MAP_BATCH) + 1;
|
|
444
|
+
|
|
445
|
+
let content = `# Interface-to-Implementation Map (${batchNum}/${totalBatches})\n\n`;
|
|
446
|
+
for (const [name, info] of batch) {
|
|
447
|
+
const implStr = info.implementations
|
|
448
|
+
.map(impl => `${impl.name} (${impl.project})`)
|
|
449
|
+
.join(', ');
|
|
450
|
+
content += ` ${name} -> ${implStr}\n`;
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
chunks.push({
|
|
454
|
+
key: `iface-map:${batchNum}`,
|
|
455
|
+
content: content.trim(),
|
|
456
|
+
metadata: { kind: 'interface-map', batch: batchNum, totalBatches, count: batch.length },
|
|
457
|
+
tags: ['interface-map'],
|
|
458
|
+
});
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
return chunks;
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
function generateTypeIndex(allTypes) {
|
|
465
|
+
const sorted = [...allTypes].sort((a, b) => a.name.localeCompare(b.name));
|
|
466
|
+
const chunks = [];
|
|
467
|
+
const totalBatches = Math.ceil(sorted.length / TYPE_INDEX_BATCH);
|
|
468
|
+
|
|
469
|
+
for (let i = 0; i < sorted.length; i += TYPE_INDEX_BATCH) {
|
|
470
|
+
const batch = sorted.slice(i, i + TYPE_INDEX_BATCH);
|
|
471
|
+
const batchNum = Math.floor(i / TYPE_INDEX_BATCH) + 1;
|
|
472
|
+
|
|
473
|
+
let content = `# Type Index (batch ${batchNum}, ${batch.length} types)\n\n`;
|
|
474
|
+
for (const t of batch) {
|
|
475
|
+
const lang = detectLanguage(t.file);
|
|
476
|
+
content += ` ${t.name} -> ${t.file} [${lang}]\n`;
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
chunks.push({
|
|
480
|
+
key: `type-index:${batchNum}`,
|
|
481
|
+
content: content.trim(),
|
|
482
|
+
metadata: { kind: 'type-index', batch: batchNum, totalBatches, count: batch.length },
|
|
483
|
+
tags: ['type-index'],
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
return chunks;
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
/**
|
|
491
|
+
* NEW: Generate file-level entries for each source file that has exported types.
|
|
492
|
+
*
|
|
493
|
+
* Each file gets its own entry keyed as `file:<path>`, containing:
|
|
494
|
+
* - The file path
|
|
495
|
+
* - All exported type names with their kind, base class, and implementations
|
|
496
|
+
* - Whether it's a MikroORM entity
|
|
497
|
+
* - The project and directory it belongs to
|
|
498
|
+
*
|
|
499
|
+
* This enables precise semantic search: a query for "CompanyAuditLog" will match
|
|
500
|
+
* the specific file entry rather than being diluted across a batch of 80 types.
|
|
501
|
+
*/
|
|
502
|
+
function generateFileEntries(typesByFile) {
|
|
503
|
+
const chunks = [];
|
|
504
|
+
|
|
505
|
+
for (const [filePath, types] of Object.entries(typesByFile)) {
|
|
506
|
+
if (types.length === 0) continue;
|
|
507
|
+
|
|
508
|
+
const project = getProjectName(filePath);
|
|
509
|
+
const dir = getDirectory(filePath);
|
|
510
|
+
const dirDesc = getDirDescription(dir);
|
|
511
|
+
const lang = detectLanguage(filePath);
|
|
512
|
+
const fileName = basename(filePath);
|
|
513
|
+
|
|
514
|
+
// Build a rich, searchable content string
|
|
515
|
+
let content = `# ${fileName} (${filePath})\n`;
|
|
516
|
+
content += `Project: ${project} | Language: ${lang}\n`;
|
|
517
|
+
if (dirDesc) content += `Directory: ${dirDesc}\n`;
|
|
518
|
+
content += '\nExported types:\n';
|
|
519
|
+
|
|
520
|
+
for (const t of types) {
|
|
521
|
+
let line = ` ${t.kind} ${t.name}`;
|
|
522
|
+
if (t.isEntity) line += ' [MikroORM entity]';
|
|
523
|
+
if (t.bases) line += ` extends ${t.bases}`;
|
|
524
|
+
if (t.implements) line += ` implements ${t.implements}`;
|
|
525
|
+
content += line + '\n';
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
// Build tags for filtering
|
|
529
|
+
const tags = ['file', project];
|
|
530
|
+
if (types.some(t => t.isEntity)) tags.push('entity');
|
|
531
|
+
if (types.some(t => t.kind === 'interface')) tags.push('interface');
|
|
532
|
+
if (filePath.includes('/services/')) tags.push('service');
|
|
533
|
+
if (filePath.includes('/routes/')) tags.push('route');
|
|
534
|
+
if (filePath.includes('/middleware/')) tags.push('middleware');
|
|
535
|
+
|
|
536
|
+
chunks.push({
|
|
537
|
+
key: `file:${filePath}`,
|
|
538
|
+
content: content.trim(),
|
|
539
|
+
metadata: {
|
|
540
|
+
kind: 'file-detail',
|
|
541
|
+
filePath,
|
|
542
|
+
project,
|
|
543
|
+
directory: dir,
|
|
544
|
+
language: lang,
|
|
545
|
+
typeCount: types.length,
|
|
546
|
+
hasEntities: types.some(t => t.isEntity),
|
|
547
|
+
typeNames: types.map(t => t.name),
|
|
548
|
+
},
|
|
549
|
+
tags,
|
|
550
|
+
});
|
|
551
|
+
}
|
|
552
|
+
|
|
553
|
+
return chunks;
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
// ---------------------------------------------------------------------------
|
|
557
|
+
// Main
|
|
558
|
+
// ---------------------------------------------------------------------------
|
|
559
|
+
|
|
560
|
+
async function main() {
|
|
561
|
+
const startTime = Date.now();
|
|
562
|
+
|
|
563
|
+
log(`Project root: ${projectRoot}`);
|
|
564
|
+
|
|
565
|
+
// 1. Get source files
|
|
566
|
+
log('Enumerating source files via git ls-files...');
|
|
567
|
+
const files = getSourceFiles();
|
|
568
|
+
log(`Found ${files.length} source files`);
|
|
569
|
+
|
|
570
|
+
if (files.length === 0) {
|
|
571
|
+
log('No source files found — nothing to index');
|
|
572
|
+
return;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
// 2. Check hash for incremental skip
|
|
576
|
+
const currentHash = computeFileListHash(files);
|
|
577
|
+
|
|
578
|
+
if (statsOnly) {
|
|
579
|
+
const db = await getDb();
|
|
580
|
+
const count = countNamespace(db);
|
|
581
|
+
db.close();
|
|
582
|
+
log(`Stats: ${files.length} source files, ${count} chunks in code-map namespace`);
|
|
583
|
+
log(`File list hash: ${currentHash.slice(0, 12)}...`);
|
|
584
|
+
return;
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
if (isUnchanged(currentHash)) {
|
|
588
|
+
const db = await getDb();
|
|
589
|
+
const count = countNamespace(db);
|
|
590
|
+
db.close();
|
|
591
|
+
if (count > 0) {
|
|
592
|
+
log(`Skipping — file list unchanged (${count} chunks in DB, hash ${currentHash.slice(0, 12)}...)`);
|
|
593
|
+
return;
|
|
594
|
+
}
|
|
595
|
+
log('File list unchanged but no chunks in DB — forcing regeneration');
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
// 3. Extract types from all files
|
|
599
|
+
log('Extracting type declarations...');
|
|
600
|
+
const allTypes = [];
|
|
601
|
+
const filesByProject = {};
|
|
602
|
+
const typesByProject = {};
|
|
603
|
+
const typesByDir = {};
|
|
604
|
+
const typesByFile = {};
|
|
605
|
+
|
|
606
|
+
for (const file of files) {
|
|
607
|
+
const project = getProjectName(file);
|
|
608
|
+
if (!filesByProject[project]) filesByProject[project] = [];
|
|
609
|
+
filesByProject[project].push(file);
|
|
610
|
+
|
|
611
|
+
const types = extractTypes(file);
|
|
612
|
+
|
|
613
|
+
// Track types per file for file-level entries
|
|
614
|
+
if (types.length > 0) {
|
|
615
|
+
typesByFile[file] = types;
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
for (const t of types) {
|
|
619
|
+
allTypes.push(t);
|
|
620
|
+
|
|
621
|
+
if (!typesByProject[project]) typesByProject[project] = [];
|
|
622
|
+
typesByProject[project].push(t);
|
|
623
|
+
|
|
624
|
+
const dir = getDirectory(t.file);
|
|
625
|
+
if (!typesByDir[dir]) typesByDir[dir] = [];
|
|
626
|
+
typesByDir[dir].push(t);
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
log(`Extracted ${allTypes.length} type declarations from ${Object.keys(filesByProject).length} projects`);
|
|
631
|
+
log(`Files with exported types: ${Object.keys(typesByFile).length}`);
|
|
632
|
+
|
|
633
|
+
// 4. Generate all chunk types
|
|
634
|
+
log('Generating chunks...');
|
|
635
|
+
const projectChunks = generateProjectOverviews(filesByProject, typesByProject);
|
|
636
|
+
const dirChunks = generateDirectoryDetails(typesByDir);
|
|
637
|
+
const ifaceChunks = generateInterfaceMaps(allTypes);
|
|
638
|
+
const typeIdxChunks = generateTypeIndex(allTypes);
|
|
639
|
+
const fileChunks = generateFileEntries(typesByFile);
|
|
640
|
+
|
|
641
|
+
const allChunks = [...projectChunks, ...dirChunks, ...ifaceChunks, ...typeIdxChunks, ...fileChunks];
|
|
642
|
+
|
|
643
|
+
log(`Generated ${allChunks.length} chunks:`);
|
|
644
|
+
log(` Project overviews: ${projectChunks.length}`);
|
|
645
|
+
log(` Directory details: ${dirChunks.length}`);
|
|
646
|
+
log(` Interface maps: ${ifaceChunks.length}`);
|
|
647
|
+
log(` Type index: ${typeIdxChunks.length}`);
|
|
648
|
+
log(` File entries: ${fileChunks.length} (NEW — file-level granularity)`);
|
|
649
|
+
|
|
650
|
+
// 5. Write to database
|
|
651
|
+
log('Writing to memory database...');
|
|
652
|
+
const db = await getDb();
|
|
653
|
+
deleteNamespace(db);
|
|
654
|
+
|
|
655
|
+
for (const chunk of allChunks) {
|
|
656
|
+
storeEntry(db, chunk.key, chunk.content, chunk.metadata, chunk.tags);
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
saveDb(db);
|
|
660
|
+
db.close();
|
|
661
|
+
|
|
662
|
+
// 6. Save hash for incremental caching
|
|
663
|
+
writeFileSync(HASH_CACHE_PATH, currentHash, 'utf-8');
|
|
664
|
+
|
|
665
|
+
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
|
|
666
|
+
log(`Done in ${elapsed}s — ${allChunks.length} chunks written to code-map namespace`);
|
|
667
|
+
|
|
668
|
+
// 7. Trigger embedding generation in background
|
|
669
|
+
if (!skipEmbeddings) {
|
|
670
|
+
// Look for build-embeddings script in project's .claude/scripts/
|
|
671
|
+
const embedScript = resolve(projectRoot, '.claude/scripts/build-embeddings.mjs');
|
|
672
|
+
if (existsSync(embedScript)) {
|
|
673
|
+
log('Starting background embedding generation...');
|
|
674
|
+
const proc = spawn('node', [embedScript, '--namespace', 'code-map'], {
|
|
675
|
+
cwd: projectRoot,
|
|
676
|
+
stdio: 'ignore',
|
|
677
|
+
detached: true,
|
|
678
|
+
shell: false,
|
|
679
|
+
windowsHide: true,
|
|
680
|
+
});
|
|
681
|
+
proc.unref();
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
main().catch(err => {
|
|
687
|
+
console.error('[code-map] Fatal error:', err);
|
|
688
|
+
process.exit(1);
|
|
689
|
+
});
|
package/bin/setup-project.mjs
CHANGED
|
@@ -34,6 +34,27 @@ const MARKER_START = '<!-- MOFLO:SUBAGENT-PROTOCOL:START -->';
|
|
|
34
34
|
const MARKER_END = '<!-- MOFLO:SUBAGENT-PROTOCOL:END -->';
|
|
35
35
|
|
|
36
36
|
const CLAUDE_MD_SECTION = `${MARKER_START}
|
|
37
|
+
## FIRST ACTION ON EVERY PROMPT: Search Memory
|
|
38
|
+
|
|
39
|
+
Your first tool call for every new user prompt MUST be a memory search. Do this BEFORE Glob, Grep, Read, or any file exploration. No exceptions.
|
|
40
|
+
|
|
41
|
+
WHY: Memory contains curated solutions, patterns, and architectural context from previous work. Without it, you will miss existing solutions, repeat mistakes that were already solved, and waste time re-discovering what is already known. Memory search is faster than file scanning.
|
|
42
|
+
|
|
43
|
+
HOW: Use ToolSearch to load \`mcp__claude-flow__memory_search\`, then call it with a query describing your task. If MCP is unavailable, use:
|
|
44
|
+
\`node .claude/scripts/semantic-search.mjs "[task description]" --namespace guidance\`
|
|
45
|
+
|
|
46
|
+
### Namespaces to search:
|
|
47
|
+
|
|
48
|
+
| Namespace | When to search | What it returns |
|
|
49
|
+
|-----------|---------------|-----------------|
|
|
50
|
+
| \`guidance\` | Always — understanding patterns, rules, conventions | Guidance docs, coding rules, domain context |
|
|
51
|
+
| \`patterns\` | Always — prior solutions and implementation patterns | Learned patterns from previous task execution |
|
|
52
|
+
| \`code-map\` | Finding where code lives (files, types, services) | Type-to-file mappings, directory contents, project overviews |
|
|
53
|
+
|
|
54
|
+
**Always search both \`guidance\` and \`patterns\` namespaces.** The \`patterns\` namespace contains solutions to problems already solved — skipping it means repeating past mistakes.
|
|
55
|
+
|
|
56
|
+
For **codebase navigation** (finding where a type/service/component lives), also search the \`code-map\` namespace.
|
|
57
|
+
|
|
37
58
|
## Subagent Protocol (MoFlo)
|
|
38
59
|
|
|
39
60
|
All subagents MUST read \`.claude/guidance/moflo-bootstrap.md\` before starting any work.
|
package/package.json
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "moflo",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.3.0",
|
|
4
4
|
"description": "MoFlo — AI agent orchestration for Claude Code. Forked from ruflo/claude-flow with patches applied to source, plus feature-level orchestration.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"bin": {
|
|
8
8
|
"moflo": "bin/cli.js",
|
|
9
9
|
"moflo-setup": "bin/setup-project.mjs",
|
|
10
|
+
"moflo-codemap": "bin/generate-code-map.mjs",
|
|
10
11
|
"claude-flow": "bin/cli.js"
|
|
11
12
|
},
|
|
12
13
|
"homepage": "https://github.com/eric-cielo/moflo#readme",
|