monoai 0.2.9 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -41
- package/dist/commands/login.js +6 -6
- package/dist/commands/push.js +344 -101
- package/dist/utils/ast-extractor.js +410 -10
- package/dist/utils/config.js +3 -2
- package/package.json +1 -2
- package/scripts/cognee_bridge.py +0 -127
package/README.md
CHANGED
|
@@ -1,52 +1,44 @@
|
|
|
1
1
|
# MonoAI CLI
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Builds a structural map of your TypeScript codebase and syncs it to monoai.space.
|
|
4
|
+
Analyzes alignment between your code and PRD, then surfaces the issues you should focus on next.
|
|
4
5
|
|
|
5
|
-
##
|
|
6
|
+
## Product Flow
|
|
6
7
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
```
|
|
9
|
+
monoai push → AST + KG + change signals
|
|
10
|
+
PRD + structure → grounded issues
|
|
11
|
+
```
|
|
11
12
|
|
|
12
|
-
### 2. Relational Knowledge Graph (via Cognee)
|
|
13
|
-
We transform individual code entities into a connected graph. This maps the logical flow of your system, ensuring that every product intent is not just present, but structurally integrated and executable within the codebase.
|
|
14
13
|
|
|
15
|
-
##
|
|
14
|
+
## Why a Knowledge Graph
|
|
16
15
|
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
For issues to be grounded in actual code structure,
|
|
17
|
+
you need **structured data refined from code** — not raw files.
|
|
19
18
|
|
|
20
|
-
|
|
21
|
-
- **
|
|
22
|
-
- **
|
|
19
|
+
`monoai push` builds that map:
|
|
20
|
+
- **AST extraction** (ts-morph) — functions, classes, interfaces, dependencies
|
|
21
|
+
- **Knowledge graph** — relationships between code entities (calls, imports, inheritance)
|
|
22
|
+
- **Change signals** — added, modified, or removed functions, classes, and files
|
|
23
23
|
|
|
24
|
-
|
|
24
|
+
The LLM reasons on top of this map.
|
|
25
25
|
|
|
26
|
-
### Requirements
|
|
27
|
-
* Language: Official support for **TypeScript** (.ts, .tsx) codebases.
|
|
28
|
-
* Environment: Node.js 18+ and Git-based projects.
|
|
29
26
|
|
|
30
|
-
|
|
31
|
-
Execute immediately via `npx` without any installation.
|
|
27
|
+
## Usage
|
|
32
28
|
|
|
33
29
|
```bash
|
|
34
|
-
#
|
|
35
|
-
npx monoai
|
|
36
|
-
|
|
37
|
-
# Extract structure & Git history to build foundation for issue derivation
|
|
38
|
-
npx monoai push
|
|
30
|
+
npx monoai login # first time only
|
|
31
|
+
npx monoai push # build and sync the structural map
|
|
39
32
|
```
|
|
40
33
|
|
|
41
|
-
|
|
34
|
+
**Requirements:** TypeScript (.ts, .tsx) · Node.js 18+ · Git
|
|
42
35
|
|
|
43
|
-
`monoai push` reads both `.gitignore` and `.monoaiignore`.
|
|
44
|
-
If `.monoaiignore` does not exist, MonoAI creates a starter template automatically.
|
|
45
36
|
|
|
46
|
-
|
|
37
|
+
## .monoaiignore
|
|
38
|
+
|
|
39
|
+
Paths to exclude from AST scanning. Auto-generated on first push.
|
|
47
40
|
|
|
48
41
|
```gitignore
|
|
49
|
-
# Example
|
|
50
42
|
node_modules
|
|
51
43
|
dist
|
|
52
44
|
build
|
|
@@ -56,19 +48,13 @@ coverage
|
|
|
56
48
|
*.log
|
|
57
49
|
```
|
|
58
50
|
|
|
59
|
-
## .monoaiwhitelist (optional, include-only)
|
|
60
51
|
|
|
61
|
-
|
|
62
|
-
When this file exists and has rules, only matched paths are scanned and uploaded.
|
|
52
|
+
## .monoaiwhitelist
|
|
63
53
|
|
|
64
|
-
|
|
54
|
+
Include-only filter — use this to scope a single app in a monorepo.
|
|
65
55
|
|
|
66
|
-
```
|
|
67
|
-
|
|
68
|
-
step4.vite-web-migration/**
|
|
56
|
+
```
|
|
57
|
+
my-app/**
|
|
69
58
|
```
|
|
70
59
|
|
|
71
|
-
|
|
72
|
-
- `.monoaiwhitelist` limits scope first (include-only).
|
|
73
|
-
- `.gitignore` and `.monoaiignore` still exclude paths inside that scope.
|
|
74
|
-
|
|
60
|
+
Only matched paths are scanned. `.gitignore` and `.monoaiignore` still apply inside.
|
package/dist/commands/login.js
CHANGED
|
@@ -6,9 +6,9 @@ import open from 'open';
|
|
|
6
6
|
import ora from 'ora';
|
|
7
7
|
const config = new Conf({ projectName: 'monoai' });
|
|
8
8
|
// Production URLs (single source of truth)
|
|
9
|
-
const
|
|
10
|
-
process.env.
|
|
11
|
-
'https://
|
|
9
|
+
const BACKEND_URL = process.env.MONOAI_API_URL ||
|
|
10
|
+
process.env.MONOAI_CONVEX_SITE_URL ||
|
|
11
|
+
'https://monoai-backend-653798811703.us-central1.run.app';
|
|
12
12
|
const WEB_URL = 'https://monoai.space';
|
|
13
13
|
export const loginCommand = new Command('login')
|
|
14
14
|
.description('Sign in to MonoAI')
|
|
@@ -17,7 +17,7 @@ export const loginCommand = new Command('login')
|
|
|
17
17
|
try {
|
|
18
18
|
// 1. Init Session
|
|
19
19
|
const initSpinner = ora('Preparing secure sign-in...').start();
|
|
20
|
-
const initRes = await axios.post(`${
|
|
20
|
+
const initRes = await axios.post(`${BACKEND_URL}/cli/auth/init`, {
|
|
21
21
|
deviceDescription: process.platform
|
|
22
22
|
});
|
|
23
23
|
initSpinner.succeed();
|
|
@@ -34,13 +34,13 @@ export const loginCommand = new Command('login')
|
|
|
34
34
|
const maxAttempts = 60; // 2 minutes (2s * 60)
|
|
35
35
|
const pollInterval = setInterval(async () => {
|
|
36
36
|
try {
|
|
37
|
-
const pollRes = await axios.post(`${
|
|
37
|
+
const pollRes = await axios.post(`${BACKEND_URL}/cli/auth/poll`, { tempCode });
|
|
38
38
|
const { status, token, userId } = pollRes.data;
|
|
39
39
|
if (status === 'approved' && token) {
|
|
40
40
|
clearInterval(pollInterval);
|
|
41
41
|
config.set('auth_token', token);
|
|
42
42
|
config.set('user_id', userId);
|
|
43
|
-
config.set('
|
|
43
|
+
config.set('api_url', BACKEND_URL); // Store for future use
|
|
44
44
|
pollSpinner.succeed(chalk.green('✅ Sign-in complete.'));
|
|
45
45
|
console.log(chalk.dim(` Credentials saved to ${config.path}`));
|
|
46
46
|
process.exit(0);
|
package/dist/commands/push.js
CHANGED
|
@@ -6,7 +6,7 @@ import fs from 'fs';
|
|
|
6
6
|
import path from 'path';
|
|
7
7
|
import ignore from 'ignore';
|
|
8
8
|
import Conf from 'conf';
|
|
9
|
-
import { extractSkeleton } from '../utils/ast-extractor.js';
|
|
9
|
+
import { extractSkeleton, buildEdgesFromSkeleton, buildReverseImportIndex, detectCircularDeps, detectArchitectureSmells } from '../utils/ast-extractor.js';
|
|
10
10
|
const git = simpleGit();
|
|
11
11
|
const config = new Conf({ projectName: 'monoai' });
|
|
12
12
|
const MONOAIIGNORE_FILENAME = '.monoaiignore';
|
|
@@ -85,6 +85,98 @@ function buildChangedFileSignals(diffFiles) {
|
|
|
85
85
|
})
|
|
86
86
|
.filter(Boolean);
|
|
87
87
|
}
|
|
88
|
+
/**
|
|
89
|
+
* git diff --name-status로 삭제된 파일 목록 추출.
|
|
90
|
+
* simple-git diffSummary는 삭제 상태를 명시하지 않으므로 raw 명령 사용.
|
|
91
|
+
*/
|
|
92
|
+
async function detectDeletedFiles(fromRef, toRef = 'HEAD') {
|
|
93
|
+
try {
|
|
94
|
+
const raw = await git.raw(['diff', '--name-status', fromRef, toRef]);
|
|
95
|
+
const deleted = [];
|
|
96
|
+
for (const line of raw.split('\n')) {
|
|
97
|
+
const trimmed = line.trim();
|
|
98
|
+
if (trimmed.startsWith('D\t')) {
|
|
99
|
+
const fp = normalizeGitFilePath(trimmed.slice(2));
|
|
100
|
+
if (fp)
|
|
101
|
+
deleted.push(fp);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return deleted.slice(0, 12);
|
|
105
|
+
}
|
|
106
|
+
catch {
|
|
107
|
+
return [];
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
function extractChangedSymbolsFromSkeleton(oldSkeleton, newSkeleton) {
|
|
111
|
+
const symbols = [];
|
|
112
|
+
const hasOld = Object.keys(oldSkeleton || {}).length > 0;
|
|
113
|
+
// 신규/수정 심볼 탐지 (새 스냅샷 기준)
|
|
114
|
+
for (const [filePath, newData] of Object.entries(newSkeleton || {})) {
|
|
115
|
+
if (!newData || typeof newData !== 'object')
|
|
116
|
+
continue;
|
|
117
|
+
const oldData = hasOld ? oldSkeleton?.[filePath] : undefined;
|
|
118
|
+
// 함수
|
|
119
|
+
const oldFuncMap = new Map((oldData?.functions || [])
|
|
120
|
+
.filter((f) => f?.name)
|
|
121
|
+
.map((f) => [f.name, f]));
|
|
122
|
+
for (const fn of (newData.functions || [])) {
|
|
123
|
+
const fname = fn?.name;
|
|
124
|
+
if (!fname)
|
|
125
|
+
continue;
|
|
126
|
+
if (!hasOld || !oldFuncMap.has(fname)) {
|
|
127
|
+
symbols.push(`+fn:${fname}`);
|
|
128
|
+
}
|
|
129
|
+
else {
|
|
130
|
+
// 시그니처 변경: 파라미터 수 변화
|
|
131
|
+
const oldFn = oldFuncMap.get(fname);
|
|
132
|
+
const oldParamCount = (oldFn?.parameters || []).length;
|
|
133
|
+
const newParamCount = (fn?.parameters || []).length;
|
|
134
|
+
if (oldParamCount !== newParamCount) {
|
|
135
|
+
symbols.push(`~fn:${fname}`);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
// 클래스
|
|
140
|
+
const oldClassMap = new Map((oldData?.classes || [])
|
|
141
|
+
.filter((c) => c?.name)
|
|
142
|
+
.map((c) => [c.name, c]));
|
|
143
|
+
for (const cls of (newData.classes || [])) {
|
|
144
|
+
const cname = cls?.name;
|
|
145
|
+
if (!cname)
|
|
146
|
+
continue;
|
|
147
|
+
if (!hasOld || !oldClassMap.has(cname)) {
|
|
148
|
+
symbols.push(`+class:${cname}`);
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
// 메서드 수 변화 감지
|
|
152
|
+
const oldCls = oldClassMap.get(cname);
|
|
153
|
+
const oldMethodCount = (oldCls?.methods || []).length;
|
|
154
|
+
const newMethodCount = (cls?.methods || []).length;
|
|
155
|
+
if (oldMethodCount !== newMethodCount) {
|
|
156
|
+
symbols.push(`~class:${cname}`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
// 삭제된 심볼 탐지 (이전 스냅샷에는 있었지만 새 스냅샷에는 없는 파일)
|
|
162
|
+
if (hasOld) {
|
|
163
|
+
for (const [filePath, oldData] of Object.entries(oldSkeleton)) {
|
|
164
|
+
if (newSkeleton[filePath])
|
|
165
|
+
continue; // 여전히 존재
|
|
166
|
+
if (!oldData || typeof oldData !== 'object')
|
|
167
|
+
continue;
|
|
168
|
+
for (const fn of (oldData.functions || [])) {
|
|
169
|
+
if (fn?.name)
|
|
170
|
+
symbols.push(`-fn:${fn.name}`);
|
|
171
|
+
}
|
|
172
|
+
for (const cls of (oldData.classes || [])) {
|
|
173
|
+
if (cls?.name)
|
|
174
|
+
symbols.push(`-class:${cls.name}`);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
return symbols.slice(0, 16);
|
|
179
|
+
}
|
|
88
180
|
function buildGraphInsightsFromChanges(files) {
|
|
89
181
|
if (!Array.isArray(files) || files.length === 0)
|
|
90
182
|
return [];
|
|
@@ -124,6 +216,106 @@ function buildGraphInsightsFromChanges(files) {
|
|
|
124
216
|
...extensionMix,
|
|
125
217
|
])).slice(0, 24);
|
|
126
218
|
}
|
|
219
|
+
function compactLine(value, max = 180) {
|
|
220
|
+
const normalized = String(value || '').replace(/\s+/g, ' ').trim();
|
|
221
|
+
if (!normalized)
|
|
222
|
+
return '';
|
|
223
|
+
if (normalized.length <= max)
|
|
224
|
+
return normalized;
|
|
225
|
+
return `${normalized.slice(0, max)}...`;
|
|
226
|
+
}
|
|
227
|
+
function summarizeRawSnippet(filePath) {
|
|
228
|
+
try {
|
|
229
|
+
const abs = path.resolve(process.cwd(), filePath);
|
|
230
|
+
if (!fs.existsSync(abs))
|
|
231
|
+
return '';
|
|
232
|
+
const raw = fs.readFileSync(abs, 'utf8');
|
|
233
|
+
const lines = raw
|
|
234
|
+
.split(/\r?\n/g)
|
|
235
|
+
.map((line) => line.trim())
|
|
236
|
+
.filter((line) => line.length > 0 && !line.startsWith('//') && !line.startsWith('*'))
|
|
237
|
+
.slice(0, 8)
|
|
238
|
+
.map((line) => compactLine(line, 120))
|
|
239
|
+
.filter(Boolean);
|
|
240
|
+
return lines.join(' / ');
|
|
241
|
+
}
|
|
242
|
+
catch {
|
|
243
|
+
return '';
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
function buildTargetRawInsights(skeleton, changedFiles, limit = 18) {
|
|
247
|
+
const byFile = new Map(Object.entries(skeleton || {}));
|
|
248
|
+
const seeds = changedFiles
|
|
249
|
+
.map((row) => normalizeGitFilePath(row.path))
|
|
250
|
+
.filter(Boolean)
|
|
251
|
+
.slice(0, 24);
|
|
252
|
+
const hop1Set = new Set();
|
|
253
|
+
const hop2Set = new Set();
|
|
254
|
+
const importListOf = (filePath) => {
|
|
255
|
+
const row = byFile.get(filePath);
|
|
256
|
+
const imports = Array.isArray(row?.fileImports) ? row.fileImports : [];
|
|
257
|
+
return imports
|
|
258
|
+
.map((v) => normalizeGitFilePath(String(v || '')))
|
|
259
|
+
.filter((v) => v.length > 0);
|
|
260
|
+
};
|
|
261
|
+
for (const seed of seeds) {
|
|
262
|
+
for (const dep of importListOf(seed))
|
|
263
|
+
hop1Set.add(dep);
|
|
264
|
+
}
|
|
265
|
+
for (const dep1 of hop1Set) {
|
|
266
|
+
for (const dep2 of importListOf(dep1))
|
|
267
|
+
hop2Set.add(dep2);
|
|
268
|
+
}
|
|
269
|
+
const orderedTargets = Array.from(new Set([
|
|
270
|
+
...seeds.slice(0, 12),
|
|
271
|
+
...Array.from(hop1Set).slice(0, 16),
|
|
272
|
+
...Array.from(hop2Set).slice(0, 16),
|
|
273
|
+
])).slice(0, Math.max(8, limit));
|
|
274
|
+
const rows = [];
|
|
275
|
+
for (const filePath of orderedTargets) {
|
|
276
|
+
const snippet = summarizeRawSnippet(filePath);
|
|
277
|
+
if (!snippet)
|
|
278
|
+
continue;
|
|
279
|
+
rows.push(`target_raw:${filePath} :: ${snippet}`);
|
|
280
|
+
if (rows.length >= limit)
|
|
281
|
+
break;
|
|
282
|
+
}
|
|
283
|
+
return rows;
|
|
284
|
+
}
|
|
285
|
+
async function fetchLastSyncedCommit(params) {
|
|
286
|
+
try {
|
|
287
|
+
const res = await axios.post(`${params.backendUrl}/cli/git-last-sync`, { branch: params.branch }, { headers: { Authorization: `Bearer ${params.token}` } });
|
|
288
|
+
const commitId = String(res?.data?.latest?.commitId || '').trim();
|
|
289
|
+
return commitId || null;
|
|
290
|
+
}
|
|
291
|
+
catch {
|
|
292
|
+
return null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
/**
|
|
296
|
+
* 백엔드에서 마지막 동기화 스냅샷의 skeleton(structure)을 fetch.
|
|
297
|
+
* oldSkeleton으로 삭제/수정 심볼 비교에 사용.
|
|
298
|
+
* 실패 시 {} 반환 (non-critical — 추가 심볼만 탐지).
|
|
299
|
+
*/
|
|
300
|
+
async function fetchLastSyncedSkeleton(params) {
|
|
301
|
+
try {
|
|
302
|
+
const res = await axios.post(`${params.backendUrl}/cli/git-last-skeleton`, { branch: params.branch }, {
|
|
303
|
+
headers: { Authorization: `Bearer ${params.token}` },
|
|
304
|
+
timeout: 8000,
|
|
305
|
+
});
|
|
306
|
+
const structure = res?.data?.structure;
|
|
307
|
+
if (typeof structure === 'string' && structure.trim()) {
|
|
308
|
+
return JSON.parse(structure);
|
|
309
|
+
}
|
|
310
|
+
if (typeof structure === 'object' && structure !== null) {
|
|
311
|
+
return structure;
|
|
312
|
+
}
|
|
313
|
+
return {};
|
|
314
|
+
}
|
|
315
|
+
catch {
|
|
316
|
+
return {};
|
|
317
|
+
}
|
|
318
|
+
}
|
|
127
319
|
export const pushCommand = new Command('push')
|
|
128
320
|
.description('Sync your codebase structure to MonoAI')
|
|
129
321
|
.option('-v, --verbose', 'Show internal pipeline logs')
|
|
@@ -147,54 +339,101 @@ export const pushCommand = new Command('push')
|
|
|
147
339
|
});
|
|
148
340
|
};
|
|
149
341
|
try {
|
|
150
|
-
console.log(chalk.blue('🏎️ Starting codebase sync...'));
|
|
151
342
|
// 0. Auth Check
|
|
152
343
|
const token = config.get('auth_token');
|
|
153
344
|
if (!token) {
|
|
154
|
-
console.error(chalk.red('
|
|
155
|
-
console.error(chalk.white(' npx monoai login'));
|
|
345
|
+
console.error(chalk.red('not signed in. run: npx monoai login'));
|
|
156
346
|
return;
|
|
157
347
|
}
|
|
158
348
|
const isRepo = await git.checkIsRepo();
|
|
159
349
|
if (!isRepo) {
|
|
160
|
-
console.error(chalk.red('
|
|
350
|
+
console.error(chalk.red('not a git repository.'));
|
|
161
351
|
return;
|
|
162
352
|
}
|
|
163
353
|
const { matcher: whitelistMatcher, ruleCount: whitelistRuleCount } = loadMonoaiWhitelist(process.cwd());
|
|
164
354
|
const isWhitelisted = (relativePath) => !whitelistMatcher || whitelistMatcher.ignores(normalizeGitFilePath(relativePath));
|
|
165
355
|
if (whitelistMatcher) {
|
|
166
|
-
|
|
356
|
+
logDetail(` whitelist ${whitelistRuleCount} rule${whitelistRuleCount > 1 ? 's' : ''}`);
|
|
167
357
|
}
|
|
358
|
+
const BACKEND_URL = process.env.MONOAI_API_URL ||
|
|
359
|
+
process.env.MONOAI_BACKEND_URL ||
|
|
360
|
+
config.get('api_url') ||
|
|
361
|
+
config.get('convex_url') ||
|
|
362
|
+
'https://monoai-backend-653798811703.us-central1.run.app';
|
|
168
363
|
// 1. Git Metadata (Zero-HITL Intent)
|
|
169
|
-
const { lastCommit, branch, changedScopes,
|
|
364
|
+
const { lastCommit, branch, changedScopes, changedFiles, commitMessages, deletedFiles } = await track('git metadata', async () => {
|
|
170
365
|
const log = await git.log({ maxCount: 1 });
|
|
171
366
|
const lastCommit = log.latest;
|
|
172
367
|
const branch = await git.revparse(['--abbrev-ref', 'HEAD']);
|
|
173
368
|
if (!lastCommit) {
|
|
174
369
|
throw new Error('No commits found.');
|
|
175
370
|
}
|
|
176
|
-
let
|
|
371
|
+
let changedRows = [];
|
|
372
|
+
let commitMessages = [];
|
|
373
|
+
let deletedFiles = [];
|
|
374
|
+
const maxDiffCommits = Math.max(1, Math.min(Number(process.env.MONOAI_DIFF_MAX_COMMITS || 30), 200));
|
|
375
|
+
const lastSyncedCommit = force
|
|
376
|
+
? null
|
|
377
|
+
: await fetchLastSyncedCommit({
|
|
378
|
+
backendUrl: BACKEND_URL,
|
|
379
|
+
token: String(token),
|
|
380
|
+
branch,
|
|
381
|
+
});
|
|
382
|
+
let fromRef = 'HEAD~1';
|
|
177
383
|
try {
|
|
178
|
-
|
|
179
|
-
|
|
384
|
+
let diffSummary = null;
|
|
385
|
+
if (lastSyncedCommit) {
|
|
386
|
+
try {
|
|
387
|
+
const rangeCountRaw = await git.raw(['rev-list', '--count', `${lastSyncedCommit}..HEAD`]);
|
|
388
|
+
const rangeCount = Number(String(rangeCountRaw || '').trim());
|
|
389
|
+
if (Number.isFinite(rangeCount) && rangeCount > maxDiffCommits) {
|
|
390
|
+
fromRef = `HEAD~${maxDiffCommits}`;
|
|
391
|
+
diffSummary = await git.diffSummary([fromRef, 'HEAD']);
|
|
392
|
+
}
|
|
393
|
+
else {
|
|
394
|
+
fromRef = lastSyncedCommit;
|
|
395
|
+
diffSummary = await git.diffSummary([`${lastSyncedCommit}..HEAD`]);
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
catch {
|
|
399
|
+
fromRef = 'HEAD~1';
|
|
400
|
+
diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
else {
|
|
404
|
+
diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
|
|
405
|
+
}
|
|
406
|
+
changedRows = buildChangedFileSignals(diffSummary?.files || []);
|
|
407
|
+
// 삭제된 파일 탐지 (git diff --name-status)
|
|
408
|
+
deletedFiles = await detectDeletedFiles(fromRef, 'HEAD');
|
|
409
|
+
// 다중 커밋 메시지 수집 (최대 10개)
|
|
410
|
+
try {
|
|
411
|
+
const commitLog = await git.log({ maxCount: 10 });
|
|
412
|
+
commitMessages = (commitLog.all || [])
|
|
413
|
+
.map(c => `${c.hash.slice(0, 7)}: ${c.message.split('\n')[0].slice(0, 100)}`)
|
|
414
|
+
.filter(Boolean);
|
|
415
|
+
}
|
|
416
|
+
catch {
|
|
417
|
+
commitMessages = [lastCommit.message.split('\n')[0].slice(0, 100)];
|
|
418
|
+
}
|
|
180
419
|
}
|
|
181
420
|
catch {
|
|
182
|
-
|
|
421
|
+
changedRows = [];
|
|
422
|
+
commitMessages = [];
|
|
423
|
+
deletedFiles = [];
|
|
183
424
|
}
|
|
184
|
-
|
|
185
|
-
const changedScopes = Array.from(new Set(
|
|
425
|
+
changedRows = changedRows.filter((row) => isWhitelisted(row.path));
|
|
426
|
+
const changedScopes = Array.from(new Set(changedRows
|
|
186
427
|
.flatMap((row) => [row.scope2, row.scope1])
|
|
187
428
|
.map((row) => String(row || "").trim())
|
|
188
429
|
.filter(Boolean))).slice(0, 24);
|
|
189
|
-
|
|
190
|
-
return { lastCommit, branch, changedScopes, graphInsights };
|
|
430
|
+
return { lastCommit, branch, changedScopes, changedFiles: changedRows, commitMessages, deletedFiles };
|
|
191
431
|
});
|
|
192
432
|
const shortCommitId = lastCommit.hash.substring(0, 7);
|
|
193
433
|
const snapshotId = `${branch}@${shortCommitId}`;
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
console.log(chalk.blue('🔍 Scanning codebase structure...'));
|
|
434
|
+
const commitSummary = lastCommit.message.split('\n')[0].slice(0, 72);
|
|
435
|
+
console.log(` → ${branch} ${shortCommitId} ${commitSummary}`);
|
|
436
|
+
console.log(` building structure...`);
|
|
198
437
|
const { skeleton } = await track('structure scan', async () => {
|
|
199
438
|
const ig = ignore();
|
|
200
439
|
const monoaiIgnorePath = path.join(process.cwd(), MONOAIIGNORE_FILENAME);
|
|
@@ -229,16 +468,82 @@ export const pushCommand = new Command('push')
|
|
|
229
468
|
}
|
|
230
469
|
};
|
|
231
470
|
scanDir(process.cwd());
|
|
232
|
-
const skeleton = extractSkeleton(filesToAnalyze);
|
|
233
|
-
|
|
471
|
+
const skeleton = extractSkeleton(filesToAnalyze, isWhitelisted);
|
|
472
|
+
logDetail(` scanned ${filesToAnalyze.length} files`);
|
|
234
473
|
return { skeleton };
|
|
235
474
|
});
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
475
|
+
// 3. KG 엣지 + 역방향 인덱스 + 아키텍처 분석
|
|
476
|
+
const kgEdges = buildEdgesFromSkeleton(skeleton);
|
|
477
|
+
// oldSkeleton fetch (삭제/수정 심볼 탐지를 위해 이전 스냅샷 가져오기)
|
|
478
|
+
const oldSkeleton = await fetchLastSyncedSkeleton({
|
|
479
|
+
backendUrl: BACKEND_URL,
|
|
480
|
+
token: String(token),
|
|
481
|
+
branch,
|
|
482
|
+
});
|
|
483
|
+
// 변경 심볼 (oldSkeleton 있으면 삭제/수정 탐지, 없으면 추가만)
|
|
484
|
+
const changedSymbols = extractChangedSymbolsFromSkeleton(oldSkeleton, skeleton);
|
|
485
|
+
// 역방향 import 인덱스 계산 (영향 범위 분석용)
|
|
486
|
+
const reverseIndex = buildReverseImportIndex(skeleton);
|
|
487
|
+
// 아키텍처 스멜 감지 (god file, high fan-in/fan-out)
|
|
488
|
+
const archSmells = detectArchitectureSmells(skeleton, reverseIndex);
|
|
489
|
+
// 순환 의존성 감지
|
|
490
|
+
const circularDeps = detectCircularDeps(kgEdges);
|
|
491
|
+
const circularSignals = circularDeps.map(c => {
|
|
492
|
+
const fileA = c.cycleA.split('/').pop() || c.cycleA;
|
|
493
|
+
const fileB = c.cycleB.split('/').pop() || c.cycleB;
|
|
494
|
+
return `circular_dep:${fileA}↔${fileB}`;
|
|
495
|
+
});
|
|
496
|
+
// 영향 범위 신호 (변경 파일의 직접 importers)
|
|
497
|
+
const impactSignals = [];
|
|
498
|
+
for (const cf of changedFiles.slice(0, 8)) {
|
|
499
|
+
const importers = reverseIndex.get(cf.path) || [];
|
|
500
|
+
if (importers.length > 0) {
|
|
501
|
+
const affected = importers.slice(0, 3).map(f => f.split('/').pop() || f).join(',');
|
|
502
|
+
impactSignals.push(`impact_radius:${cf.path.split('/').pop()} importers=${importers.length} affected=[${affected}]`);
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
// 삭제 파일 신호
|
|
506
|
+
const deletedFileSignals = deletedFiles
|
|
507
|
+
.filter(f => isWhitelisted(f))
|
|
508
|
+
.map(f => `deleted_file:${f}`);
|
|
509
|
+
// graphInsights 조립 (56개로 확장)
|
|
510
|
+
const graphInsights = Array.from(new Set([
|
|
511
|
+
...buildGraphInsightsFromChanges(changedFiles),
|
|
512
|
+
...changedSymbols.map(s => `changed_symbol:${s}`),
|
|
513
|
+
...archSmells,
|
|
514
|
+
...circularSignals,
|
|
515
|
+
...impactSignals,
|
|
516
|
+
...deletedFileSignals,
|
|
517
|
+
...buildTargetRawInsights(skeleton, changedFiles, 16),
|
|
518
|
+
])).slice(0, 56);
|
|
519
|
+
// stats 출력
|
|
520
|
+
const statParts = [
|
|
521
|
+
`${Object.keys(skeleton).length} files`,
|
|
522
|
+
`${kgEdges.length} edges`,
|
|
523
|
+
];
|
|
524
|
+
if (changedFiles.length > 0)
|
|
525
|
+
statParts.push(`${changedFiles.length} changed`);
|
|
526
|
+
if (changedSymbols.length > 0) {
|
|
527
|
+
const readable = changedSymbols.slice(0, 3).map(s => {
|
|
528
|
+
const [prefix, ...rest] = s.split(':');
|
|
529
|
+
const name = rest.join(':').split(':').pop() || rest.join(':');
|
|
530
|
+
if (prefix === '+fn' || prefix === '+class' || prefix === '+iface')
|
|
531
|
+
return `added ${name}`;
|
|
532
|
+
if (prefix === '-fn' || prefix === '-class' || prefix === '-iface')
|
|
533
|
+
return `removed ${name}`;
|
|
534
|
+
if (prefix === '~fn' || prefix === '~class' || prefix === '~iface')
|
|
535
|
+
return `modified ${name}`;
|
|
536
|
+
return s;
|
|
537
|
+
});
|
|
538
|
+
statParts.push(readable.join(', '));
|
|
539
|
+
}
|
|
540
|
+
if (archSmells.length > 0)
|
|
541
|
+
statParts.push(`${archSmells.length} smell${archSmells.length > 1 ? 's' : ''}`);
|
|
542
|
+
if (circularDeps.length > 0)
|
|
543
|
+
statParts.push(`${circularDeps.length} circular`);
|
|
544
|
+
if (deletedFiles.length > 0)
|
|
545
|
+
statParts.push(`${deletedFiles.length} deleted`);
|
|
546
|
+
console.log(` ${statParts.join(' · ')}`);
|
|
242
547
|
// 4. Payload Construction
|
|
243
548
|
const payload = {
|
|
244
549
|
name: path.basename(process.cwd()),
|
|
@@ -246,17 +551,17 @@ export const pushCommand = new Command('push')
|
|
|
246
551
|
branch: branch,
|
|
247
552
|
commitId: shortCommitId,
|
|
248
553
|
commitMessage: lastCommit.message,
|
|
249
|
-
|
|
554
|
+
commitMessages: commitMessages.slice(0, 10),
|
|
555
|
+
structure: JSON.stringify(skeleton),
|
|
556
|
+
edges: kgEdges,
|
|
250
557
|
changedScopes,
|
|
251
558
|
graphInsights,
|
|
252
559
|
syncStatus: 'processing',
|
|
253
560
|
};
|
|
254
|
-
// 5. Send to
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
const response = await axios.post(`${CONVEX_SITE_URL}/cli/git-commit`, {
|
|
561
|
+
// 5. Send to Convex
|
|
562
|
+
const transmitResult = await track('upload', async () => {
|
|
563
|
+
const response = await axios.post(`${BACKEND_URL}/cli/git-commit`, {
|
|
258
564
|
codebaseData: payload,
|
|
259
|
-
force,
|
|
260
565
|
}, {
|
|
261
566
|
headers: {
|
|
262
567
|
'Authorization': `Bearer ${token}`
|
|
@@ -264,85 +569,23 @@ export const pushCommand = new Command('push')
|
|
|
264
569
|
});
|
|
265
570
|
return response.data;
|
|
266
571
|
});
|
|
267
|
-
if (transmitResult?.deduped) {
|
|
268
|
-
console.log(chalk.yellow('⚠ This commit was already synced. No new snapshot was created.'));
|
|
269
|
-
if (transmitResult.message && verbose) {
|
|
270
|
-
console.log(chalk.dim(` ${transmitResult.message}`));
|
|
271
|
-
}
|
|
272
|
-
const totalMs = Date.now() - totalStart;
|
|
273
|
-
console.log(chalk.blue(`⏱ Total time: ${(totalMs / 1000).toFixed(2)}s`));
|
|
274
|
-
return;
|
|
275
|
-
}
|
|
276
|
-
if (force) {
|
|
277
|
-
logDetail(' Force mode enabled: duplicate commit dedupe was bypassed.');
|
|
278
|
-
}
|
|
279
|
-
if (transmitResult?.graphJobId) {
|
|
280
|
-
const terminalStatuses = new Set(['done', 'error']);
|
|
281
|
-
const waitStart = Date.now();
|
|
282
|
-
const timeoutMs = 180000;
|
|
283
|
-
const pollIntervalMs = 2000;
|
|
284
|
-
console.log(chalk.blue('🧠 Building Knowledge Graph...'));
|
|
285
|
-
let lastStatus = 'uploaded';
|
|
286
|
-
let finalJob = null;
|
|
287
|
-
while (Date.now() - waitStart < timeoutMs) {
|
|
288
|
-
const res = await axios.post(`${CONVEX_SITE_URL}/cli/graph-job-status`, { jobId: transmitResult.graphJobId }, { headers: { Authorization: `Bearer ${token}` } });
|
|
289
|
-
const job = res.data?.job;
|
|
290
|
-
if (!job)
|
|
291
|
-
break;
|
|
292
|
-
finalJob = job;
|
|
293
|
-
if (job.status !== lastStatus && verbose) {
|
|
294
|
-
console.log(chalk.dim(` ↳ Knowledge Graph status: ${job.status}`));
|
|
295
|
-
}
|
|
296
|
-
lastStatus = job.status;
|
|
297
|
-
if (terminalStatuses.has(job.status)) {
|
|
298
|
-
break;
|
|
299
|
-
}
|
|
300
|
-
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
301
|
-
}
|
|
302
|
-
const waitMs = Date.now() - waitStart;
|
|
303
|
-
stageTimes.push({ stage: 'knowledge graph wait', ms: waitMs });
|
|
304
|
-
logDetail(` ⏱ knowledge graph wait: ${(waitMs / 1000).toFixed(2)}s`);
|
|
305
|
-
if (finalJob) {
|
|
306
|
-
const fmt = (v) => (typeof v === 'number' ? `${(v / 1000).toFixed(2)}s` : 'n/a');
|
|
307
|
-
if (verbose) {
|
|
308
|
-
console.log(chalk.blue('📊 Knowledge Graph timing'));
|
|
309
|
-
console.log(chalk.dim(` - queue wait: ${fmt(finalJob.queueWaitMs)}`));
|
|
310
|
-
console.log(chalk.dim(` - graph build: ${fmt(finalJob.cogneeMs)}`));
|
|
311
|
-
console.log(chalk.dim(` - callback: ${fmt(finalJob.callbackMs)}`));
|
|
312
|
-
console.log(chalk.dim(` - service total: ${fmt(finalJob.workerTotalMs)}`));
|
|
313
|
-
console.log(chalk.dim(` - total service time: ${fmt(finalJob.totalPipelineMs)}`));
|
|
314
|
-
}
|
|
315
|
-
if (finalJob.status === 'error' && finalJob.error) {
|
|
316
|
-
console.log(chalk.red(`❌ Could not build Knowledge Graph: ${finalJob.error}`));
|
|
317
|
-
}
|
|
318
|
-
if (finalJob.status === 'done') {
|
|
319
|
-
console.log(chalk.green('✅ Knowledge Graph is ready.'));
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
else {
|
|
323
|
-
console.log(chalk.yellow('⚠ Knowledge Graph status is unavailable. It may still be processing in the background.'));
|
|
324
|
-
}
|
|
325
|
-
}
|
|
326
|
-
console.log(chalk.green('✨ Sync complete. Check your dashboard for updated insights.'));
|
|
327
|
-
logDetail(` Message: ${lastCommit.message.split('\n')[0]}`);
|
|
328
572
|
const totalMs = Date.now() - totalStart;
|
|
329
|
-
console.log(
|
|
573
|
+
console.log(` pushed. (${(totalMs / 1000).toFixed(2)}s)`);
|
|
574
|
+
logDetail(` graphJobId: ${transmitResult?.graphJobId || 'none'}`);
|
|
330
575
|
}
|
|
331
576
|
catch (error) {
|
|
332
577
|
const totalMs = Date.now() - totalStart;
|
|
333
578
|
if (error.response?.status === 401) {
|
|
334
|
-
console.error(chalk.red('
|
|
335
|
-
console.error(chalk.white(' npx monoai login'));
|
|
579
|
+
console.error(chalk.red('auth expired. run: npx monoai login'));
|
|
336
580
|
}
|
|
337
581
|
else {
|
|
338
|
-
console.error(chalk.red(
|
|
582
|
+
console.error(chalk.red(`sync failed: ${error.message}`));
|
|
339
583
|
}
|
|
340
584
|
if (verbose && stageTimes.length > 0) {
|
|
341
|
-
console.log(chalk.yellow('\n⏱ Stage timing details'));
|
|
342
585
|
for (const item of stageTimes) {
|
|
343
|
-
console.log(chalk.dim(`
|
|
586
|
+
console.log(chalk.dim(` ${item.stage}: ${(item.ms / 1000).toFixed(2)}s`));
|
|
344
587
|
}
|
|
345
588
|
}
|
|
346
|
-
console.log(chalk.
|
|
589
|
+
console.log(chalk.dim(` (${(totalMs / 1000).toFixed(2)}s)`));
|
|
347
590
|
}
|
|
348
591
|
});
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
1
2
|
import { Project, SyntaxKind } from 'ts-morph';
|
|
2
3
|
import path from 'path';
|
|
3
4
|
// 🛡️ Security: Redaction Patterns
|
|
@@ -30,7 +31,98 @@ function toRepoRelativePath(rawPath) {
|
|
|
30
31
|
}
|
|
31
32
|
return normalized.replace(/^\.?\//, "");
|
|
32
33
|
}
|
|
33
|
-
|
|
34
|
+
function normalizePathToken(value) {
|
|
35
|
+
return String(value || '').replace(/\\/g, '/').replace(/^\.?\//, '').trim();
|
|
36
|
+
}
|
|
37
|
+
function isLikelySourceFile(value) {
|
|
38
|
+
const normalized = normalizePathToken(value);
|
|
39
|
+
if (!normalized)
|
|
40
|
+
return false;
|
|
41
|
+
if (normalized.includes('/'))
|
|
42
|
+
return true;
|
|
43
|
+
return /^[A-Za-z0-9_.-]+\.[A-Za-z0-9]+$/.test(normalized);
|
|
44
|
+
}
|
|
45
|
+
function compactText(value, max = 120) {
|
|
46
|
+
const normalized = String(value || '').replace(/\s+/g, ' ').trim();
|
|
47
|
+
if (!normalized)
|
|
48
|
+
return '';
|
|
49
|
+
if (normalized.length <= max)
|
|
50
|
+
return normalized;
|
|
51
|
+
return `${normalized.slice(0, max)}...`;
|
|
52
|
+
}
|
|
53
|
+
function uniqueStrings(values, maxItems, maxLength = 180) {
|
|
54
|
+
const seen = new Set();
|
|
55
|
+
const out = [];
|
|
56
|
+
for (const raw of values) {
|
|
57
|
+
const normalized = String(raw || '').trim();
|
|
58
|
+
if (!normalized || normalized.length > maxLength)
|
|
59
|
+
continue;
|
|
60
|
+
if (seen.has(normalized))
|
|
61
|
+
continue;
|
|
62
|
+
seen.add(normalized);
|
|
63
|
+
out.push(normalized);
|
|
64
|
+
if (out.length >= maxItems)
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
return out;
|
|
68
|
+
}
|
|
69
|
+
function uniqueDependencyEdges(edges, maxItems = 12) {
|
|
70
|
+
const seen = new Set();
|
|
71
|
+
const out = [];
|
|
72
|
+
for (const edge of edges) {
|
|
73
|
+
const target = normalizePathToken(edge?.target || '');
|
|
74
|
+
const relationType = edge?.relationType;
|
|
75
|
+
if (!target || !relationType)
|
|
76
|
+
continue;
|
|
77
|
+
const key = `${relationType}::${target}`;
|
|
78
|
+
if (seen.has(key))
|
|
79
|
+
continue;
|
|
80
|
+
seen.add(key);
|
|
81
|
+
out.push({ target, relationType });
|
|
82
|
+
if (out.length >= maxItems)
|
|
83
|
+
break;
|
|
84
|
+
}
|
|
85
|
+
return out;
|
|
86
|
+
}
|
|
87
|
+
function resolveLocalImportPath(fromFilePath, moduleSpecifier) {
|
|
88
|
+
const spec = String(moduleSpecifier || '').trim();
|
|
89
|
+
if (!spec || !spec.startsWith('.'))
|
|
90
|
+
return '';
|
|
91
|
+
const fromDir = path.dirname(fromFilePath);
|
|
92
|
+
const base = path.resolve(fromDir, spec);
|
|
93
|
+
const candidates = [
|
|
94
|
+
base,
|
|
95
|
+
`${base}.ts`,
|
|
96
|
+
`${base}.tsx`,
|
|
97
|
+
`${base}.js`,
|
|
98
|
+
`${base}.jsx`,
|
|
99
|
+
`${base}.mjs`,
|
|
100
|
+
`${base}.cjs`,
|
|
101
|
+
path.join(base, 'index.ts'),
|
|
102
|
+
path.join(base, 'index.tsx'),
|
|
103
|
+
path.join(base, 'index.js'),
|
|
104
|
+
path.join(base, 'index.jsx'),
|
|
105
|
+
path.join(base, 'index.mjs'),
|
|
106
|
+
path.join(base, 'index.cjs'),
|
|
107
|
+
];
|
|
108
|
+
for (const candidate of candidates) {
|
|
109
|
+
if (!fs.existsSync(candidate))
|
|
110
|
+
continue;
|
|
111
|
+
const resolved = toRepoRelativePath(candidate);
|
|
112
|
+
if (isLikelySourceFile(resolved))
|
|
113
|
+
return resolved;
|
|
114
|
+
}
|
|
115
|
+
const fallback = toRepoRelativePath(base);
|
|
116
|
+
return isLikelySourceFile(fallback) ? fallback : '';
|
|
117
|
+
}
|
|
118
|
+
function redactSecrets(raw) {
|
|
119
|
+
let result = raw;
|
|
120
|
+
for (const pattern of SECRET_PATTERNS) {
|
|
121
|
+
result = result.replace(pattern, '[REDACTED_SECRET]');
|
|
122
|
+
}
|
|
123
|
+
return result;
|
|
124
|
+
}
|
|
125
|
+
export function extractSkeleton(filePaths, isWhitelisted) {
|
|
34
126
|
const project = new Project();
|
|
35
127
|
// 🛡️ Security: File Filter
|
|
36
128
|
const safePaths = filePaths.filter(p => {
|
|
@@ -74,56 +166,237 @@ export function extractSkeleton(filePaths) {
|
|
|
74
166
|
functions: [],
|
|
75
167
|
classes: [],
|
|
76
168
|
interfaces: [],
|
|
77
|
-
types: []
|
|
169
|
+
types: [],
|
|
170
|
+
fileImports: [],
|
|
171
|
+
rawHints: [],
|
|
172
|
+
};
|
|
173
|
+
const importAliasToTarget = new Map();
|
|
174
|
+
const fileImportTargets = [];
|
|
175
|
+
const sourceFilePathAbs = sourceFile.getFilePath();
|
|
176
|
+
sourceFile.getImportDeclarations().forEach((imp) => {
|
|
177
|
+
const moduleSpecifier = imp.getModuleSpecifierValue();
|
|
178
|
+
const resolved = resolveLocalImportPath(sourceFilePathAbs, moduleSpecifier);
|
|
179
|
+
if (!resolved)
|
|
180
|
+
return;
|
|
181
|
+
const relationType = imp.isTypeOnly() ? 'type_ref' : 'imports';
|
|
182
|
+
fileImportTargets.push(resolved);
|
|
183
|
+
const defaultImport = imp.getDefaultImport();
|
|
184
|
+
if (defaultImport) {
|
|
185
|
+
importAliasToTarget.set(defaultImport.getText(), { target: resolved, relationType });
|
|
186
|
+
}
|
|
187
|
+
const namespaceImport = imp.getNamespaceImport();
|
|
188
|
+
if (namespaceImport) {
|
|
189
|
+
importAliasToTarget.set(namespaceImport.getText(), { target: resolved, relationType });
|
|
190
|
+
}
|
|
191
|
+
for (const named of imp.getNamedImports()) {
|
|
192
|
+
const alias = named.getAliasNode()?.getText() || named.getName();
|
|
193
|
+
if (!alias)
|
|
194
|
+
continue;
|
|
195
|
+
importAliasToTarget.set(alias, { target: resolved, relationType });
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
skeleton.fileImports = uniqueStrings(fileImportTargets, 40, 180);
|
|
199
|
+
const localFunctionNames = new Set(sourceFile
|
|
200
|
+
.getFunctions()
|
|
201
|
+
.map((fn) => String(fn.getName() || '').trim())
|
|
202
|
+
.filter(Boolean));
|
|
203
|
+
const collectNodeDependencies = (node, nodeText) => {
|
|
204
|
+
const deps = [];
|
|
205
|
+
const compactNodeText = String(nodeText || '').slice(0, 12000);
|
|
206
|
+
for (const [alias, meta] of importAliasToTarget.entries()) {
|
|
207
|
+
const aliasPattern = new RegExp(`\\b${alias.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`);
|
|
208
|
+
if (aliasPattern.test(compactNodeText)) {
|
|
209
|
+
deps.push({ target: meta.target, relationType: meta.relationType });
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
for (const callExpr of node.getDescendantsOfKind(SyntaxKind.CallExpression)) {
|
|
213
|
+
const exprText = String(callExpr.getExpression().getText() || '');
|
|
214
|
+
const rootName = exprText.split('.')[0]?.trim();
|
|
215
|
+
if (!rootName)
|
|
216
|
+
continue;
|
|
217
|
+
const imported = importAliasToTarget.get(rootName);
|
|
218
|
+
if (imported) {
|
|
219
|
+
deps.push({ target: imported.target, relationType: 'calls' });
|
|
220
|
+
continue;
|
|
221
|
+
}
|
|
222
|
+
if (localFunctionNames.has(rootName)) {
|
|
223
|
+
deps.push({ target: `${filePath}:${rootName}`, relationType: 'calls' });
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
for (const typeRef of node.getDescendantsOfKind(SyntaxKind.TypeReference)) {
|
|
227
|
+
const typeText = String(typeRef.getTypeName().getText() || '');
|
|
228
|
+
const rootName = typeText.split('.')[0]?.trim();
|
|
229
|
+
if (!rootName)
|
|
230
|
+
continue;
|
|
231
|
+
const imported = importAliasToTarget.get(rootName);
|
|
232
|
+
if (imported) {
|
|
233
|
+
deps.push({ target: imported.target, relationType: 'type_ref' });
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
return uniqueDependencyEdges(deps, 16);
|
|
78
237
|
};
|
|
79
|
-
// Extract Functions
|
|
238
|
+
// Extract Functions (function declarations + exported arrow/const functions)
|
|
239
|
+
const asyncFunctionNames = [];
|
|
80
240
|
sourceFile.getFunctions().forEach(f => {
|
|
81
241
|
if (f.isExported()) {
|
|
242
|
+
const deps = collectNodeDependencies(f, f.getText());
|
|
243
|
+
const isAsync = f.isAsync();
|
|
244
|
+
const fname = f.getName();
|
|
245
|
+
if (isAsync && fname)
|
|
246
|
+
asyncFunctionNames.push(fname);
|
|
82
247
|
skeleton.functions.push({
|
|
83
|
-
name:
|
|
248
|
+
name: fname,
|
|
84
249
|
parameters: f.getParameters().map(p => ({
|
|
85
250
|
name: p.getName(),
|
|
86
251
|
type: p.getType().getText()
|
|
87
252
|
})),
|
|
88
253
|
returnType: f.getReturnType().getText(),
|
|
89
|
-
jsDoc: f.getJsDocs().map(d => d.getCommentText()).join('\n')
|
|
254
|
+
jsDoc: f.getJsDocs().map(d => d.getCommentText()).join('\n'),
|
|
255
|
+
dependencies: deps,
|
|
256
|
+
isAsync,
|
|
257
|
+
modifiers: f.getModifiers().map(m => m.getText()),
|
|
90
258
|
});
|
|
91
259
|
}
|
|
92
260
|
});
|
|
261
|
+
// Arrow functions / const fn (exported variable declarations)
|
|
262
|
+
sourceFile.getVariableStatements().forEach(stmt => {
|
|
263
|
+
if (!stmt.isExported())
|
|
264
|
+
return;
|
|
265
|
+
stmt.getDeclarations().forEach(decl => {
|
|
266
|
+
const init = decl.getInitializer();
|
|
267
|
+
if (!init)
|
|
268
|
+
return;
|
|
269
|
+
const kind = init.getKind();
|
|
270
|
+
const isArrow = kind === SyntaxKind.ArrowFunction;
|
|
271
|
+
const isFuncExpr = kind === SyntaxKind.FunctionExpression;
|
|
272
|
+
if (!isArrow && !isFuncExpr)
|
|
273
|
+
return;
|
|
274
|
+
const fname = decl.getName();
|
|
275
|
+
if (!fname)
|
|
276
|
+
return;
|
|
277
|
+
const fn = init;
|
|
278
|
+
const isAsync = typeof fn.isAsync === 'function' ? fn.isAsync() : false;
|
|
279
|
+
if (isAsync)
|
|
280
|
+
asyncFunctionNames.push(fname);
|
|
281
|
+
const params = typeof fn.getParameters === 'function'
|
|
282
|
+
? fn.getParameters().map((p) => ({ name: p.getName(), type: p.getType().getText() }))
|
|
283
|
+
: [];
|
|
284
|
+
const returnType = typeof fn.getReturnType === 'function' ? fn.getReturnType().getText() : '';
|
|
285
|
+
const deps = collectNodeDependencies(init, init.getText());
|
|
286
|
+
skeleton.functions.push({
|
|
287
|
+
name: fname,
|
|
288
|
+
parameters: params,
|
|
289
|
+
returnType,
|
|
290
|
+
jsDoc: stmt.getJsDocs().map((d) => d.getCommentText()).join('\n'),
|
|
291
|
+
dependencies: deps,
|
|
292
|
+
isAsync,
|
|
293
|
+
modifiers: stmt.getModifiers().map(m => m.getText()),
|
|
294
|
+
});
|
|
295
|
+
});
|
|
296
|
+
});
|
|
297
|
+
if (asyncFunctionNames.length > 0) {
|
|
298
|
+
skeleton.asyncFunctions = asyncFunctionNames.slice(0, 12);
|
|
299
|
+
}
|
|
93
300
|
// Extract Classes
|
|
301
|
+
const classFieldSummaries = [];
|
|
94
302
|
sourceFile.getClasses().forEach(c => {
|
|
95
303
|
if (c.isExported()) {
|
|
304
|
+
const deps = collectNodeDependencies(c, c.getText());
|
|
305
|
+
const extendsNode = c.getExtends();
|
|
306
|
+
if (extendsNode) {
|
|
307
|
+
const baseName = String(extendsNode.getExpression().getText() || '').split('.')[0]?.trim();
|
|
308
|
+
const imported = baseName ? importAliasToTarget.get(baseName) : undefined;
|
|
309
|
+
if (imported)
|
|
310
|
+
deps.push({ target: imported.target, relationType: 'extends' });
|
|
311
|
+
}
|
|
312
|
+
for (const impl of c.getImplements()) {
|
|
313
|
+
const implName = String(impl.getExpression().getText() || '').split('.')[0]?.trim();
|
|
314
|
+
const imported = implName ? importAliasToTarget.get(implName) : undefined;
|
|
315
|
+
if (imported)
|
|
316
|
+
deps.push({ target: imported.target, relationType: 'implements' });
|
|
317
|
+
}
|
|
318
|
+
const cname = c.getName() || '';
|
|
319
|
+
// 클래스 프로퍼티(필드) 추출
|
|
320
|
+
const properties = c.getProperties().map(p => {
|
|
321
|
+
const pname = p.getName();
|
|
322
|
+
const ptype = p.getType().getText();
|
|
323
|
+
const pmods = p.getModifiers().map(m => m.getText());
|
|
324
|
+
if (cname && pname) {
|
|
325
|
+
const modStr = pmods.filter(m => m !== 'public').join(' ');
|
|
326
|
+
classFieldSummaries.push(`${cname}.${pname}${modStr ? `[${modStr}]` : ''}: ${ptype.slice(0, 60)}`);
|
|
327
|
+
}
|
|
328
|
+
return { name: pname, type: ptype, modifiers: pmods };
|
|
329
|
+
}).slice(0, 10);
|
|
96
330
|
skeleton.classes.push({
|
|
97
|
-
name:
|
|
331
|
+
name: cname,
|
|
332
|
+
isAbstract: c.isAbstract(),
|
|
98
333
|
methods: c.getMethods().map(m => ({
|
|
99
334
|
name: m.getName(),
|
|
100
335
|
parameters: m.getParameters().map(p => ({
|
|
101
336
|
name: p.getName(),
|
|
102
337
|
type: p.getType().getText()
|
|
103
338
|
})),
|
|
104
|
-
returnType: m.getReturnType().getText()
|
|
339
|
+
returnType: m.getReturnType().getText(),
|
|
340
|
+
isStatic: m.isStatic(),
|
|
341
|
+
isAsync: m.isAsync(),
|
|
105
342
|
})),
|
|
106
|
-
|
|
343
|
+
properties,
|
|
344
|
+
jsDoc: c.getJsDocs().map(d => d.getCommentText()).join('\n'),
|
|
345
|
+
dependencies: uniqueDependencyEdges(deps, 20),
|
|
107
346
|
});
|
|
108
347
|
}
|
|
109
348
|
});
|
|
349
|
+
if (classFieldSummaries.length > 0) {
|
|
350
|
+
skeleton.classFields = classFieldSummaries.slice(0, 16);
|
|
351
|
+
}
|
|
110
352
|
// Extract Interfaces
|
|
353
|
+
const interfaceShapeSummaries = [];
|
|
111
354
|
sourceFile.getInterfaces().forEach(i => {
|
|
112
355
|
if (i.isExported()) {
|
|
356
|
+
const deps = collectNodeDependencies(i, i.getText());
|
|
357
|
+
const iname = i.getName();
|
|
358
|
+
const properties = i.getProperties().map(p => {
|
|
359
|
+
const pname = p.getName();
|
|
360
|
+
const ptype = p.getType().getText();
|
|
361
|
+
const optional = p.hasQuestionToken();
|
|
362
|
+
if (iname && pname) {
|
|
363
|
+
interfaceShapeSummaries.push(`${iname}.${pname}${optional ? '?' : ''}: ${ptype.slice(0, 60)}`);
|
|
364
|
+
}
|
|
365
|
+
return { name: pname, type: ptype, optional };
|
|
366
|
+
}).slice(0, 12);
|
|
113
367
|
skeleton.interfaces.push({
|
|
114
|
-
name:
|
|
115
|
-
jsDoc: i.getJsDocs().map(d => d.getCommentText()).join('\n')
|
|
368
|
+
name: iname,
|
|
369
|
+
jsDoc: i.getJsDocs().map(d => d.getCommentText()).join('\n'),
|
|
370
|
+
dependencies: deps,
|
|
371
|
+
properties,
|
|
116
372
|
});
|
|
117
373
|
}
|
|
118
374
|
});
|
|
375
|
+
if (interfaceShapeSummaries.length > 0) {
|
|
376
|
+
skeleton.interfaceShape = interfaceShapeSummaries.slice(0, 16);
|
|
377
|
+
}
|
|
119
378
|
// Extract Types
|
|
120
379
|
sourceFile.getTypeAliases().forEach(t => {
|
|
121
380
|
if (t.isExported()) {
|
|
381
|
+
const deps = collectNodeDependencies(t, t.getText());
|
|
122
382
|
skeleton.types.push({
|
|
123
383
|
name: t.getName(),
|
|
384
|
+
dependencies: deps,
|
|
124
385
|
});
|
|
125
386
|
}
|
|
126
387
|
});
|
|
388
|
+
const fullText = sourceFile.getFullText();
|
|
389
|
+
const rawHints = fullText
|
|
390
|
+
.split(/\r?\n/g)
|
|
391
|
+
.map((line) => compactText(line, 140))
|
|
392
|
+
.filter((line) => line.length >= 12)
|
|
393
|
+
.filter((line) => !line.startsWith('//') && !line.startsWith('*'))
|
|
394
|
+
.slice(0, 12);
|
|
395
|
+
skeleton.rawHints = uniqueStrings(rawHints, 8, 140);
|
|
396
|
+
// whitelist 파일만 raw_content 포함 (시크릿 제거 후 최대 3000자)
|
|
397
|
+
if (isWhitelisted && isWhitelisted(filePath)) {
|
|
398
|
+
skeleton.raw_content = redactSecrets(fullText).slice(0, 3000);
|
|
399
|
+
}
|
|
127
400
|
result[filePath] = skeleton;
|
|
128
401
|
});
|
|
129
402
|
// 🛡️ Security: Payload Size Limit (DoS Prevention)
|
|
@@ -133,3 +406,130 @@ export function extractSkeleton(filePaths) {
|
|
|
133
406
|
}
|
|
134
407
|
return result;
|
|
135
408
|
}
|
|
409
|
+
/**
|
|
410
|
+
* AST skeleton에서 KG 엣지를 직접 생성 (app.py _build_graph 로직 이식).
|
|
411
|
+
* Railway KG 워커 없이 사용자 컴퓨팅으로 결정론적 엣지를 추출한다.
|
|
412
|
+
*/
|
|
413
|
+
export function buildEdgesFromSkeleton(skeleton) {
|
|
414
|
+
const edges = [];
|
|
415
|
+
const edgeKeys = new Set();
|
|
416
|
+
function addEdge(sourceId, targetId, relationType, sourceType, targetType) {
|
|
417
|
+
const sid = sourceId.trim();
|
|
418
|
+
const tid = targetId.trim();
|
|
419
|
+
if (!sid || !tid)
|
|
420
|
+
return;
|
|
421
|
+
const key = `${sid}::${relationType}::${tid}`;
|
|
422
|
+
if (edgeKeys.has(key))
|
|
423
|
+
return;
|
|
424
|
+
edgeKeys.add(key);
|
|
425
|
+
edges.push({ sourceId: sid, targetId: tid, relationType, sourceType, targetType });
|
|
426
|
+
}
|
|
427
|
+
for (const [filePath, sk] of Object.entries(skeleton)) {
|
|
428
|
+
if (!filePath || typeof sk !== 'object')
|
|
429
|
+
continue;
|
|
430
|
+
// 파일 → import 파일 엣지
|
|
431
|
+
for (const importPath of sk.fileImports || []) {
|
|
432
|
+
if (!importPath)
|
|
433
|
+
continue;
|
|
434
|
+
addEdge(filePath, importPath, 'imports', 'code_file', 'code_file');
|
|
435
|
+
}
|
|
436
|
+
// 파일 → 심볼 (implemented_in)
|
|
437
|
+
const allSymbols = [
|
|
438
|
+
...(sk.functions || []).map((f) => ({ name: f?.name, type: 'function' })),
|
|
439
|
+
...(sk.classes || []).map((c) => ({ name: c?.name, type: 'class' })),
|
|
440
|
+
...(sk.interfaces || []).map((i) => ({ name: i?.name, type: 'interface' })),
|
|
441
|
+
...(sk.types || []).map((t) => ({ name: t?.name, type: 'type' })),
|
|
442
|
+
];
|
|
443
|
+
for (const sym of allSymbols) {
|
|
444
|
+
if (!sym.name)
|
|
445
|
+
continue;
|
|
446
|
+
const symId = `${filePath}:${sym.name}`;
|
|
447
|
+
addEdge(filePath, symId, 'implemented_in', 'code_file', 'code_symbol');
|
|
448
|
+
// 심볼 의존성 엣지 (calls, type_ref, depends_on 등)
|
|
449
|
+
const symData = sk.functions?.find((f) => f?.name === sym.name) ||
|
|
450
|
+
sk.classes?.find((c) => c?.name === sym.name) ||
|
|
451
|
+
sk.interfaces?.find((i) => i?.name === sym.name) ||
|
|
452
|
+
sk.types?.find((t) => t?.name === sym.name);
|
|
453
|
+
const deps = Array.isArray(symData?.dependencies) ? symData.dependencies : [];
|
|
454
|
+
for (const dep of deps) {
|
|
455
|
+
const target = typeof dep === 'string' ? dep : dep?.target;
|
|
456
|
+
const rel = typeof dep === 'string' ? 'depends_on' : (dep?.relationType || 'depends_on');
|
|
457
|
+
if (!target)
|
|
458
|
+
continue;
|
|
459
|
+
const targetType = target.includes(':') ? 'code_symbol' : 'code_file';
|
|
460
|
+
addEdge(symId, target, rel, 'code_symbol', targetType);
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
return edges;
|
|
465
|
+
}
|
|
466
|
+
/**
|
|
467
|
+
* skeleton에서 역방향 import 인덱스 계산.
|
|
468
|
+
* 반환: Map<targetFile, sourceFile[]>
|
|
469
|
+
* "이 파일을 import하는 파일들"을 O(1)로 조회 가능.
|
|
470
|
+
*/
|
|
471
|
+
export function buildReverseImportIndex(skeleton) {
|
|
472
|
+
const index = new Map();
|
|
473
|
+
for (const [filePath, sk] of Object.entries(skeleton)) {
|
|
474
|
+
if (!filePath || typeof sk !== 'object')
|
|
475
|
+
continue;
|
|
476
|
+
for (const importPath of sk.fileImports || []) {
|
|
477
|
+
if (!importPath)
|
|
478
|
+
continue;
|
|
479
|
+
const existing = index.get(importPath) || [];
|
|
480
|
+
existing.push(filePath);
|
|
481
|
+
index.set(importPath, existing);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
return index;
|
|
485
|
+
}
|
|
486
|
+
/**
|
|
487
|
+
* KgEdge 목록에서 직접 순환 의존성(A→B + B→A) 탐지.
|
|
488
|
+
* import 엣지만 대상. 최대 8개 반환.
|
|
489
|
+
*/
|
|
490
|
+
export function detectCircularDeps(edges) {
|
|
491
|
+
const importEdges = edges.filter(e => e.relationType === 'imports');
|
|
492
|
+
const edgeSet = new Set(importEdges.map(e => `${e.sourceId}::${e.targetId}`));
|
|
493
|
+
const seen = new Set();
|
|
494
|
+
const cycles = [];
|
|
495
|
+
for (const e of importEdges) {
|
|
496
|
+
const reverseKey = `${e.targetId}::${e.sourceId}`;
|
|
497
|
+
const cycleKey = [e.sourceId, e.targetId].sort().join('::');
|
|
498
|
+
if (edgeSet.has(reverseKey) && !seen.has(cycleKey)) {
|
|
499
|
+
seen.add(cycleKey);
|
|
500
|
+
cycles.push({ cycleA: e.sourceId, cycleB: e.targetId });
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
return cycles.slice(0, 8);
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* skeleton + reverseIndex 활용한 아키텍처 스멜 감지.
|
|
507
|
+
* 반환: graphInsights에 추가할 신호 문자열 배열.
|
|
508
|
+
* arch_smell:god_file:filename symbols=N (심볼 과밀)
|
|
509
|
+
* arch_smell:high_fan_in:filename importers=N (많이 참조됨)
|
|
510
|
+
* arch_smell:high_fan_out:filename imports=N (너무 많이 의존)
|
|
511
|
+
*/
|
|
512
|
+
export function detectArchitectureSmells(skeleton, reverseIndex) {
|
|
513
|
+
const smells = [];
|
|
514
|
+
for (const [filePath, sk] of Object.entries(skeleton)) {
|
|
515
|
+
if (!filePath || typeof sk !== 'object')
|
|
516
|
+
continue;
|
|
517
|
+
const shortName = filePath.split('/').pop() || filePath;
|
|
518
|
+
// God file: 함수 + 클래스 수 > 15
|
|
519
|
+
const symbolCount = (sk.functions?.length || 0) + (sk.classes?.length || 0);
|
|
520
|
+
if (symbolCount > 15) {
|
|
521
|
+
smells.push(`arch_smell:god_file:${shortName} symbols=${symbolCount}`);
|
|
522
|
+
}
|
|
523
|
+
// High fan-in: 역방향 import 수 > 6 (많이 참조됨 = 변경 시 파급 큼)
|
|
524
|
+
const importers = reverseIndex.get(filePath) || [];
|
|
525
|
+
if (importers.length > 6) {
|
|
526
|
+
smells.push(`arch_smell:high_fan_in:${shortName} importers=${importers.length}`);
|
|
527
|
+
}
|
|
528
|
+
// High fan-out: 직접 import 수 > 10 (너무 많이 의존)
|
|
529
|
+
const fanOut = (sk.fileImports || []).length;
|
|
530
|
+
if (fanOut > 10) {
|
|
531
|
+
smells.push(`arch_smell:high_fan_out:${shortName} imports=${fanOut}`);
|
|
532
|
+
}
|
|
533
|
+
}
|
|
534
|
+
return smells.slice(0, 6);
|
|
535
|
+
}
|
package/dist/utils/config.js
CHANGED
|
@@ -5,15 +5,16 @@ const config = new Conf({
|
|
|
5
5
|
});
|
|
6
6
|
export const saveCredentials = (token, url) => {
|
|
7
7
|
config.set('authToken', token);
|
|
8
|
-
config.set('
|
|
8
|
+
config.set('apiUrl', url);
|
|
9
9
|
};
|
|
10
10
|
export const getCredentials = () => {
|
|
11
11
|
return {
|
|
12
12
|
authToken: config.get('authToken'),
|
|
13
|
-
|
|
13
|
+
apiUrl: config.get('apiUrl') || config.get('convexUrl')
|
|
14
14
|
};
|
|
15
15
|
};
|
|
16
16
|
export const clearCredentials = () => {
|
|
17
17
|
config.delete('authToken');
|
|
18
|
+
config.delete('apiUrl');
|
|
18
19
|
config.delete('convexUrl');
|
|
19
20
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "monoai",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.4.0",
|
|
5
5
|
"description": "MonoAI CLI for syncing codebase history",
|
|
6
6
|
"main": "dist/index.js",
|
|
7
7
|
"bin": {
|
|
@@ -9,7 +9,6 @@
|
|
|
9
9
|
},
|
|
10
10
|
"files": [
|
|
11
11
|
"dist",
|
|
12
|
-
"scripts",
|
|
13
12
|
"README.md"
|
|
14
13
|
],
|
|
15
14
|
"scripts": {
|
package/scripts/cognee_bridge.py
DELETED
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
import sys
|
|
2
|
-
import json
|
|
3
|
-
import asyncio
|
|
4
|
-
import os
|
|
5
|
-
from typing import List, Dict
|
|
6
|
-
|
|
7
|
-
# Cognee 라이브러리 로드 시도
|
|
8
|
-
try:
|
|
9
|
-
import cognee
|
|
10
|
-
except ImportError:
|
|
11
|
-
# PoC 환경에서 패키지가 없을 경우 에러 메시지 반환
|
|
12
|
-
print(json.dumps({
|
|
13
|
-
"error": "cognee library not found. Please install it with 'pip install cognee' to enable graph-based analysis.",
|
|
14
|
-
"nodes": [],
|
|
15
|
-
"edges": []
|
|
16
|
-
}))
|
|
17
|
-
sys.exit(0)
|
|
18
|
-
|
|
19
|
-
def _normalize_files(ast_data: Dict):
|
|
20
|
-
# v1 format: {"files":[{"path":"...","items":[...]}]}
|
|
21
|
-
if isinstance(ast_data.get("files"), list):
|
|
22
|
-
return ast_data.get("files", [])
|
|
23
|
-
|
|
24
|
-
# current monoai ts-morph output: {"/abs/path.ts": {"functions":[],"classes":[]...}, ...}
|
|
25
|
-
files = []
|
|
26
|
-
for file_path, skeleton in ast_data.items():
|
|
27
|
-
if not isinstance(skeleton, dict):
|
|
28
|
-
continue
|
|
29
|
-
items = []
|
|
30
|
-
for key in ("functions", "classes", "interfaces", "types"):
|
|
31
|
-
for item in skeleton.get(key, []):
|
|
32
|
-
if not isinstance(item, dict):
|
|
33
|
-
continue
|
|
34
|
-
items.append({
|
|
35
|
-
"type": key[:-1] if key.endswith("s") else key,
|
|
36
|
-
"name": item.get("name") or "anonymous",
|
|
37
|
-
"dependencies": []
|
|
38
|
-
})
|
|
39
|
-
files.append({
|
|
40
|
-
"path": file_path,
|
|
41
|
-
"items": items
|
|
42
|
-
})
|
|
43
|
-
return files
|
|
44
|
-
|
|
45
|
-
async def process_ast_to_graph(ast_data: Dict, api_key: str):
|
|
46
|
-
"""
|
|
47
|
-
AST JSON 데이터를 Cognee 지식 그래프로 변환하고 분석합니다.
|
|
48
|
-
"""
|
|
49
|
-
# 중앙 API 키 설정 (Cognee 엔진에 주입)
|
|
50
|
-
os.environ["OPENAI_API_KEY"] = api_key
|
|
51
|
-
|
|
52
|
-
# 1. 온톨로지 정의 (MVP용 간략화)
|
|
53
|
-
# Cognee의 cognify 프로세스를 통해 코드 간의 실제 '의미론적 관계'를 추출합니다.
|
|
54
|
-
|
|
55
|
-
nodes = []
|
|
56
|
-
edges = []
|
|
57
|
-
|
|
58
|
-
# AST 데이터 파싱 (ts-morph 추출본 기준)
|
|
59
|
-
files = _normalize_files(ast_data)
|
|
60
|
-
|
|
61
|
-
for file in files:
|
|
62
|
-
file_id = file.get("path")
|
|
63
|
-
nodes.append({
|
|
64
|
-
"id": file_id,
|
|
65
|
-
"type": "codebase",
|
|
66
|
-
"name": os.path.basename(file_id)
|
|
67
|
-
})
|
|
68
|
-
|
|
69
|
-
# 클래스 및 함수 관계 추출
|
|
70
|
-
for item in file.get("items", []):
|
|
71
|
-
item_id = f"{file_id}:{item.get('name')}"
|
|
72
|
-
nodes.append({
|
|
73
|
-
"id": item_id,
|
|
74
|
-
"type": item.get("type", "unknown"),
|
|
75
|
-
"name": item.get("name")
|
|
76
|
-
})
|
|
77
|
-
|
|
78
|
-
# 관계 생성 (File -> Item)
|
|
79
|
-
edges.append({
|
|
80
|
-
"sourceId": file_id,
|
|
81
|
-
"targetId": item_id,
|
|
82
|
-
"relationType": "implemented_in"
|
|
83
|
-
})
|
|
84
|
-
|
|
85
|
-
# 의존성 관계 추출 (추후 Cognee 엔진이 텍스트 분석으로 보강할 영역)
|
|
86
|
-
for dep in item.get("dependencies", []):
|
|
87
|
-
edges.append({
|
|
88
|
-
"sourceId": item_id,
|
|
89
|
-
"targetId": dep,
|
|
90
|
-
"relationType": "depends_on"
|
|
91
|
-
})
|
|
92
|
-
|
|
93
|
-
# 2. Cognee 엔진을 통한 그래프 고도화
|
|
94
|
-
# 실제 구현 시:
|
|
95
|
-
# await cognee.add(ast_data).cognify()
|
|
96
|
-
# graph = await cognee.get_graph()
|
|
97
|
-
|
|
98
|
-
return {
|
|
99
|
-
"nodes": nodes,
|
|
100
|
-
"edges": edges,
|
|
101
|
-
"status": "success",
|
|
102
|
-
"processor": "cognee-bridge-v1"
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
async def main():
|
|
106
|
-
if len(sys.argv) < 3:
|
|
107
|
-
print(json.dumps({"error": "Missing arguments. Usage: python cognee_bridge.py <ast_path> <api_key>"}))
|
|
108
|
-
return
|
|
109
|
-
|
|
110
|
-
ast_path = sys.argv[1]
|
|
111
|
-
api_key = sys.argv[2]
|
|
112
|
-
|
|
113
|
-
try:
|
|
114
|
-
if not os.path.exists(ast_path):
|
|
115
|
-
raise FileNotFoundError(f"AST file not found: {ast_path}")
|
|
116
|
-
|
|
117
|
-
with open(ast_path, 'r') as f:
|
|
118
|
-
ast_data = json.load(f)
|
|
119
|
-
|
|
120
|
-
result = await process_ast_to_graph(ast_data, api_key)
|
|
121
|
-
print(json.dumps(result))
|
|
122
|
-
|
|
123
|
-
except Exception as e:
|
|
124
|
-
print(json.dumps({"error": str(e), "nodes": [], "edges": []}))
|
|
125
|
-
|
|
126
|
-
if __name__ == "__main__":
|
|
127
|
-
asyncio.run(main())
|