monoai 0.2.9 β†’ 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -124,6 +124,82 @@ function buildGraphInsightsFromChanges(files) {
124
124
  ...extensionMix,
125
125
  ])).slice(0, 24);
126
126
  }
127
+ function compactLine(value, max = 180) {
128
+ const normalized = String(value || '').replace(/\s+/g, ' ').trim();
129
+ if (!normalized)
130
+ return '';
131
+ if (normalized.length <= max)
132
+ return normalized;
133
+ return `${normalized.slice(0, max)}...`;
134
+ }
135
+ function summarizeRawSnippet(filePath) {
136
+ try {
137
+ const abs = path.resolve(process.cwd(), filePath);
138
+ if (!fs.existsSync(abs))
139
+ return '';
140
+ const raw = fs.readFileSync(abs, 'utf8');
141
+ const lines = raw
142
+ .split(/\r?\n/g)
143
+ .map((line) => line.trim())
144
+ .filter((line) => line.length > 0 && !line.startsWith('//') && !line.startsWith('*'))
145
+ .slice(0, 8)
146
+ .map((line) => compactLine(line, 120))
147
+ .filter(Boolean);
148
+ return lines.join(' / ');
149
+ }
150
+ catch {
151
+ return '';
152
+ }
153
+ }
154
+ function buildTargetRawInsights(skeleton, changedFiles, limit = 18) {
155
+ const byFile = new Map(Object.entries(skeleton || {}));
156
+ const seeds = changedFiles
157
+ .map((row) => normalizeGitFilePath(row.path))
158
+ .filter(Boolean)
159
+ .slice(0, 24);
160
+ const hop1Set = new Set();
161
+ const hop2Set = new Set();
162
+ const importListOf = (filePath) => {
163
+ const row = byFile.get(filePath);
164
+ const imports = Array.isArray(row?.fileImports) ? row.fileImports : [];
165
+ return imports
166
+ .map((v) => normalizeGitFilePath(String(v || '')))
167
+ .filter((v) => v.length > 0);
168
+ };
169
+ for (const seed of seeds) {
170
+ for (const dep of importListOf(seed))
171
+ hop1Set.add(dep);
172
+ }
173
+ for (const dep1 of hop1Set) {
174
+ for (const dep2 of importListOf(dep1))
175
+ hop2Set.add(dep2);
176
+ }
177
+ const orderedTargets = Array.from(new Set([
178
+ ...seeds.slice(0, 12),
179
+ ...Array.from(hop1Set).slice(0, 16),
180
+ ...Array.from(hop2Set).slice(0, 16),
181
+ ])).slice(0, Math.max(8, limit));
182
+ const rows = [];
183
+ for (const filePath of orderedTargets) {
184
+ const snippet = summarizeRawSnippet(filePath);
185
+ if (!snippet)
186
+ continue;
187
+ rows.push(`target_raw:${filePath} :: ${snippet}`);
188
+ if (rows.length >= limit)
189
+ break;
190
+ }
191
+ return rows;
192
+ }
193
+ async function fetchLastSyncedCommit(params) {
194
+ try {
195
+ const res = await axios.post(`${params.convexSiteUrl}/cli/git-last-sync`, { branch: params.branch }, { headers: { Authorization: `Bearer ${params.token}` } });
196
+ const commitId = String(res?.data?.latest?.commitId || '').trim();
197
+ return commitId || null;
198
+ }
199
+ catch {
200
+ return null;
201
+ }
202
+ }
127
203
  export const pushCommand = new Command('push')
128
204
  .description('Sync your codebase structure to MonoAI')
129
205
  .option('-v, --verbose', 'Show internal pipeline logs')
@@ -165,29 +241,58 @@ export const pushCommand = new Command('push')
165
241
  if (whitelistMatcher) {
166
242
  console.log(chalk.blue(`🎯 Applying ${MONOAIWHITELIST_FILENAME} (${whitelistRuleCount} rule${whitelistRuleCount > 1 ? 's' : ''})`));
167
243
  }
244
+ const CONVEX_SITE_URL = process.env.MONOAI_CONVEX_SITE_URL ||
245
+ process.env.MONOAI_CONVEX_URL ||
246
+ config.get('convex_url') ||
247
+ 'https://majestic-crane-609.convex.site';
168
248
  // 1. Git Metadata (Zero-HITL Intent)
169
- const { lastCommit, branch, changedScopes, graphInsights } = await track('git metadata', async () => {
249
+ const { lastCommit, branch, changedScopes, changedFiles } = await track('git metadata', async () => {
170
250
  const log = await git.log({ maxCount: 1 });
171
251
  const lastCommit = log.latest;
172
252
  const branch = await git.revparse(['--abbrev-ref', 'HEAD']);
173
253
  if (!lastCommit) {
174
254
  throw new Error('No commits found.');
175
255
  }
176
- let changedFiles = [];
256
+ let changedRows = [];
257
+ const maxDiffCommits = Math.max(1, Math.min(Number(process.env.MONOAI_DIFF_MAX_COMMITS || 30), 200));
258
+ const lastSyncedCommit = force
259
+ ? null
260
+ : await fetchLastSyncedCommit({
261
+ convexSiteUrl: CONVEX_SITE_URL,
262
+ token: String(token),
263
+ branch,
264
+ });
177
265
  try {
178
- const diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
179
- changedFiles = buildChangedFileSignals(diffSummary?.files || []);
266
+ let diffSummary = null;
267
+ if (lastSyncedCommit) {
268
+ try {
269
+ const rangeCountRaw = await git.raw(['rev-list', '--count', `${lastSyncedCommit}..HEAD`]);
270
+ const rangeCount = Number(String(rangeCountRaw || '').trim());
271
+ if (Number.isFinite(rangeCount) && rangeCount > maxDiffCommits) {
272
+ diffSummary = await git.diffSummary([`HEAD~${maxDiffCommits}`, 'HEAD']);
273
+ }
274
+ else {
275
+ diffSummary = await git.diffSummary([`${lastSyncedCommit}..HEAD`]);
276
+ }
277
+ }
278
+ catch {
279
+ diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
280
+ }
281
+ }
282
+ else {
283
+ diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
284
+ }
285
+ changedRows = buildChangedFileSignals(diffSummary?.files || []);
180
286
  }
181
287
  catch {
182
- changedFiles = [];
288
+ changedRows = [];
183
289
  }
184
- changedFiles = changedFiles.filter((row) => isWhitelisted(row.path));
185
- const changedScopes = Array.from(new Set(changedFiles
290
+ changedRows = changedRows.filter((row) => isWhitelisted(row.path));
291
+ const changedScopes = Array.from(new Set(changedRows
186
292
  .flatMap((row) => [row.scope2, row.scope1])
187
293
  .map((row) => String(row || "").trim())
188
294
  .filter(Boolean))).slice(0, 24);
189
- const graphInsights = buildGraphInsightsFromChanges(changedFiles);
190
- return { lastCommit, branch, changedScopes, graphInsights };
295
+ return { lastCommit, branch, changedScopes, changedFiles: changedRows };
191
296
  });
192
297
  const shortCommitId = lastCommit.hash.substring(0, 7);
193
298
  const snapshotId = `${branch}@${shortCommitId}`;
@@ -229,14 +334,14 @@ export const pushCommand = new Command('push')
229
334
  }
230
335
  };
231
336
  scanDir(process.cwd());
232
- const skeleton = extractSkeleton(filesToAnalyze);
337
+ const skeleton = extractSkeleton(filesToAnalyze, isWhitelisted);
233
338
  console.log(chalk.dim(` Files scanned: ${filesToAnalyze.length}`));
234
339
  return { skeleton };
235
340
  });
236
- const CONVEX_SITE_URL = process.env.MONOAI_CONVEX_SITE_URL ||
237
- process.env.MONOAI_CONVEX_URL ||
238
- config.get('convex_url') ||
239
- 'https://majestic-crane-609.convex.site';
341
+ const graphInsights = Array.from(new Set([
342
+ ...buildGraphInsightsFromChanges(changedFiles),
343
+ ...buildTargetRawInsights(skeleton, changedFiles, 18),
344
+ ])).slice(0, 48);
240
345
  // 3. AST-only upload. Knowledge Graph processing is handled server-side.
241
346
  console.log(chalk.blue('πŸ“¦ Preparing data...'));
242
347
  // 4. Payload Construction
@@ -1,3 +1,4 @@
1
+ import fs from 'fs';
1
2
  import { Project, SyntaxKind } from 'ts-morph';
2
3
  import path from 'path';
3
4
  // πŸ›‘οΈ Security: Redaction Patterns
@@ -30,7 +31,98 @@ function toRepoRelativePath(rawPath) {
30
31
  }
31
32
  return normalized.replace(/^\.?\//, "");
32
33
  }
33
- export function extractSkeleton(filePaths) {
34
+ function normalizePathToken(value) {
35
+ return String(value || '').replace(/\\/g, '/').replace(/^\.?\//, '').trim();
36
+ }
37
+ function isLikelySourceFile(value) {
38
+ const normalized = normalizePathToken(value);
39
+ if (!normalized)
40
+ return false;
41
+ if (normalized.includes('/'))
42
+ return true;
43
+ return /^[A-Za-z0-9_.-]+\.[A-Za-z0-9]+$/.test(normalized);
44
+ }
45
+ function compactText(value, max = 120) {
46
+ const normalized = String(value || '').replace(/\s+/g, ' ').trim();
47
+ if (!normalized)
48
+ return '';
49
+ if (normalized.length <= max)
50
+ return normalized;
51
+ return `${normalized.slice(0, max)}...`;
52
+ }
53
+ function uniqueStrings(values, maxItems, maxLength = 180) {
54
+ const seen = new Set();
55
+ const out = [];
56
+ for (const raw of values) {
57
+ const normalized = String(raw || '').trim();
58
+ if (!normalized || normalized.length > maxLength)
59
+ continue;
60
+ if (seen.has(normalized))
61
+ continue;
62
+ seen.add(normalized);
63
+ out.push(normalized);
64
+ if (out.length >= maxItems)
65
+ break;
66
+ }
67
+ return out;
68
+ }
69
+ function uniqueDependencyEdges(edges, maxItems = 12) {
70
+ const seen = new Set();
71
+ const out = [];
72
+ for (const edge of edges) {
73
+ const target = normalizePathToken(edge?.target || '');
74
+ const relationType = edge?.relationType;
75
+ if (!target || !relationType)
76
+ continue;
77
+ const key = `${relationType}::${target}`;
78
+ if (seen.has(key))
79
+ continue;
80
+ seen.add(key);
81
+ out.push({ target, relationType });
82
+ if (out.length >= maxItems)
83
+ break;
84
+ }
85
+ return out;
86
+ }
87
+ function resolveLocalImportPath(fromFilePath, moduleSpecifier) {
88
+ const spec = String(moduleSpecifier || '').trim();
89
+ if (!spec || !spec.startsWith('.'))
90
+ return '';
91
+ const fromDir = path.dirname(fromFilePath);
92
+ const base = path.resolve(fromDir, spec);
93
+ const candidates = [
94
+ base,
95
+ `${base}.ts`,
96
+ `${base}.tsx`,
97
+ `${base}.js`,
98
+ `${base}.jsx`,
99
+ `${base}.mjs`,
100
+ `${base}.cjs`,
101
+ path.join(base, 'index.ts'),
102
+ path.join(base, 'index.tsx'),
103
+ path.join(base, 'index.js'),
104
+ path.join(base, 'index.jsx'),
105
+ path.join(base, 'index.mjs'),
106
+ path.join(base, 'index.cjs'),
107
+ ];
108
+ for (const candidate of candidates) {
109
+ if (!fs.existsSync(candidate))
110
+ continue;
111
+ const resolved = toRepoRelativePath(candidate);
112
+ if (isLikelySourceFile(resolved))
113
+ return resolved;
114
+ }
115
+ const fallback = toRepoRelativePath(base);
116
+ return isLikelySourceFile(fallback) ? fallback : '';
117
+ }
118
+ function redactSecrets(raw) {
119
+ let result = raw;
120
+ for (const pattern of SECRET_PATTERNS) {
121
+ result = result.replace(pattern, '[REDACTED_SECRET]');
122
+ }
123
+ return result;
124
+ }
125
+ export function extractSkeleton(filePaths, isWhitelisted) {
34
126
  const project = new Project();
35
127
  // πŸ›‘οΈ Security: File Filter
36
128
  const safePaths = filePaths.filter(p => {
@@ -74,11 +166,79 @@ export function extractSkeleton(filePaths) {
74
166
  functions: [],
75
167
  classes: [],
76
168
  interfaces: [],
77
- types: []
169
+ types: [],
170
+ fileImports: [],
171
+ rawHints: [],
172
+ };
173
+ const importAliasToTarget = new Map();
174
+ const fileImportTargets = [];
175
+ const sourceFilePathAbs = sourceFile.getFilePath();
176
+ sourceFile.getImportDeclarations().forEach((imp) => {
177
+ const moduleSpecifier = imp.getModuleSpecifierValue();
178
+ const resolved = resolveLocalImportPath(sourceFilePathAbs, moduleSpecifier);
179
+ if (!resolved)
180
+ return;
181
+ const relationType = imp.isTypeOnly() ? 'type_ref' : 'imports';
182
+ fileImportTargets.push(resolved);
183
+ const defaultImport = imp.getDefaultImport();
184
+ if (defaultImport) {
185
+ importAliasToTarget.set(defaultImport.getText(), { target: resolved, relationType });
186
+ }
187
+ const namespaceImport = imp.getNamespaceImport();
188
+ if (namespaceImport) {
189
+ importAliasToTarget.set(namespaceImport.getText(), { target: resolved, relationType });
190
+ }
191
+ for (const named of imp.getNamedImports()) {
192
+ const alias = named.getAliasNode()?.getText() || named.getName();
193
+ if (!alias)
194
+ continue;
195
+ importAliasToTarget.set(alias, { target: resolved, relationType });
196
+ }
197
+ });
198
+ skeleton.fileImports = uniqueStrings(fileImportTargets, 40, 180);
199
+ const localFunctionNames = new Set(sourceFile
200
+ .getFunctions()
201
+ .map((fn) => String(fn.getName() || '').trim())
202
+ .filter(Boolean));
203
+ const collectNodeDependencies = (node, nodeText) => {
204
+ const deps = [];
205
+ const compactNodeText = String(nodeText || '').slice(0, 12000);
206
+ for (const [alias, meta] of importAliasToTarget.entries()) {
207
+ const aliasPattern = new RegExp(`\\b${alias.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}\\b`);
208
+ if (aliasPattern.test(compactNodeText)) {
209
+ deps.push({ target: meta.target, relationType: meta.relationType });
210
+ }
211
+ }
212
+ for (const callExpr of node.getDescendantsOfKind(SyntaxKind.CallExpression)) {
213
+ const exprText = String(callExpr.getExpression().getText() || '');
214
+ const rootName = exprText.split('.')[0]?.trim();
215
+ if (!rootName)
216
+ continue;
217
+ const imported = importAliasToTarget.get(rootName);
218
+ if (imported) {
219
+ deps.push({ target: imported.target, relationType: 'calls' });
220
+ continue;
221
+ }
222
+ if (localFunctionNames.has(rootName)) {
223
+ deps.push({ target: `${filePath}:${rootName}`, relationType: 'calls' });
224
+ }
225
+ }
226
+ for (const typeRef of node.getDescendantsOfKind(SyntaxKind.TypeReference)) {
227
+ const typeText = String(typeRef.getTypeName().getText() || '');
228
+ const rootName = typeText.split('.')[0]?.trim();
229
+ if (!rootName)
230
+ continue;
231
+ const imported = importAliasToTarget.get(rootName);
232
+ if (imported) {
233
+ deps.push({ target: imported.target, relationType: 'type_ref' });
234
+ }
235
+ }
236
+ return uniqueDependencyEdges(deps, 16);
78
237
  };
79
238
  // Extract Functions
80
239
  sourceFile.getFunctions().forEach(f => {
81
240
  if (f.isExported()) {
241
+ const deps = collectNodeDependencies(f, f.getText());
82
242
  skeleton.functions.push({
83
243
  name: f.getName(),
84
244
  parameters: f.getParameters().map(p => ({
@@ -86,13 +246,28 @@ export function extractSkeleton(filePaths) {
86
246
  type: p.getType().getText()
87
247
  })),
88
248
  returnType: f.getReturnType().getText(),
89
- jsDoc: f.getJsDocs().map(d => d.getCommentText()).join('\n')
249
+ jsDoc: f.getJsDocs().map(d => d.getCommentText()).join('\n'),
250
+ dependencies: deps,
90
251
  });
91
252
  }
92
253
  });
93
254
  // Extract Classes
94
255
  sourceFile.getClasses().forEach(c => {
95
256
  if (c.isExported()) {
257
+ const deps = collectNodeDependencies(c, c.getText());
258
+ const extendsNode = c.getExtends();
259
+ if (extendsNode) {
260
+ const baseName = String(extendsNode.getExpression().getText() || '').split('.')[0]?.trim();
261
+ const imported = baseName ? importAliasToTarget.get(baseName) : undefined;
262
+ if (imported)
263
+ deps.push({ target: imported.target, relationType: 'extends' });
264
+ }
265
+ for (const impl of c.getImplements()) {
266
+ const implName = String(impl.getExpression().getText() || '').split('.')[0]?.trim();
267
+ const imported = implName ? importAliasToTarget.get(implName) : undefined;
268
+ if (imported)
269
+ deps.push({ target: imported.target, relationType: 'implements' });
270
+ }
96
271
  skeleton.classes.push({
97
272
  name: c.getName(),
98
273
  methods: c.getMethods().map(m => ({
@@ -103,27 +278,44 @@ export function extractSkeleton(filePaths) {
103
278
  })),
104
279
  returnType: m.getReturnType().getText()
105
280
  })),
106
- jsDoc: c.getJsDocs().map(d => d.getCommentText()).join('\n')
281
+ jsDoc: c.getJsDocs().map(d => d.getCommentText()).join('\n'),
282
+ dependencies: uniqueDependencyEdges(deps, 20),
107
283
  });
108
284
  }
109
285
  });
110
286
  // Extract Interfaces
111
287
  sourceFile.getInterfaces().forEach(i => {
112
288
  if (i.isExported()) {
289
+ const deps = collectNodeDependencies(i, i.getText());
113
290
  skeleton.interfaces.push({
114
291
  name: i.getName(),
115
- jsDoc: i.getJsDocs().map(d => d.getCommentText()).join('\n')
292
+ jsDoc: i.getJsDocs().map(d => d.getCommentText()).join('\n'),
293
+ dependencies: deps,
116
294
  });
117
295
  }
118
296
  });
119
297
  // Extract Types
120
298
  sourceFile.getTypeAliases().forEach(t => {
121
299
  if (t.isExported()) {
300
+ const deps = collectNodeDependencies(t, t.getText());
122
301
  skeleton.types.push({
123
302
  name: t.getName(),
303
+ dependencies: deps,
124
304
  });
125
305
  }
126
306
  });
307
+ const fullText = sourceFile.getFullText();
308
+ const rawHints = fullText
309
+ .split(/\r?\n/g)
310
+ .map((line) => compactText(line, 140))
311
+ .filter((line) => line.length >= 12)
312
+ .filter((line) => !line.startsWith('//') && !line.startsWith('*'))
313
+ .slice(0, 12);
314
+ skeleton.rawHints = uniqueStrings(rawHints, 8, 140);
315
+ // whitelist 파일만 raw_content 포함 (μ‹œν¬λ¦Ώ 제거 ν›„ μ΅œλŒ€ 3000자)
316
+ if (isWhitelisted && isWhitelisted(filePath)) {
317
+ skeleton.raw_content = redactSecrets(fullText).slice(0, 3000);
318
+ }
127
319
  result[filePath] = skeleton;
128
320
  });
129
321
  // πŸ›‘οΈ Security: Payload Size Limit (DoS Prevention)
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "monoai",
3
3
  "type": "module",
4
- "version": "0.2.9",
4
+ "version": "0.3.0",
5
5
  "description": "MonoAI CLI for syncing codebase history",
6
6
  "main": "dist/index.js",
7
7
  "bin": {