monoai 0.2.4 β†’ 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -38,5 +38,22 @@ npx monoai login
38
38
  npx monoai push
39
39
  ```
40
40
 
41
+ ## .monoaiignore (optional)
42
+
43
+ `monoai push` reads both `.gitignore` and `.monoaiignore`.
44
+ If `.monoaiignore` does not exist, MonoAI creates a starter template automatically.
45
+
46
+ Use it to exclude files/directories from AST scanning:
47
+
48
+ ```gitignore
49
+ # Example
50
+ node_modules
51
+ dist
52
+ build
53
+ coverage
54
+ **/_generated/**
55
+ **/.agent/**
56
+ *.log
57
+ ```
41
58
 
42
59
 
@@ -5,8 +5,10 @@ import chalk from 'chalk';
5
5
  import open from 'open';
6
6
  import ora from 'ora';
7
7
  const config = new Conf({ projectName: 'monoai' });
8
- // Production URLs
9
- const CONVEX_SITE_URL = 'https://majestic-crane-609.convex.site';
8
+ // Production URLs (single source of truth)
9
+ const CONVEX_SITE_URL = process.env.MONOAI_CONVEX_SITE_URL ||
10
+ process.env.MONOAI_CONVEX_URL ||
11
+ 'https://majestic-crane-609.convex.site';
10
12
  const WEB_URL = 'https://monoai.space';
11
13
  export const loginCommand = new Command('login')
12
14
  .description('Authenticate with MonoAI')
@@ -7,13 +7,39 @@ import path from 'path';
7
7
  import ignore from 'ignore';
8
8
  import Conf from 'conf';
9
9
  import { extractSkeleton } from '../utils/ast-extractor.js';
10
- import { execSync } from 'child_process';
11
- import { fileURLToPath } from 'url';
12
10
  const git = simpleGit();
13
11
  const config = new Conf({ projectName: 'monoai' });
12
+ const MONOAIIGNORE_FILENAME = '.monoaiignore';
13
+ const DEFAULT_MONOAIIGNORE = `# MonoAI AST scan ignore rules
14
+ # Uses .gitignore-style patterns.
15
+
16
+ # Dependencies / build
17
+ node_modules
18
+ dist
19
+ build
20
+ coverage
21
+ .next
22
+ .turbo
23
+
24
+ # Generated / local artifacts
25
+ **/_generated/**
26
+ **/.agent/**
27
+ *.log
28
+ `;
14
29
  export const pushCommand = new Command('push')
15
30
  .description('Push codebase integrity and AST skeleton to MonoAI')
16
31
  .action(async () => {
32
+ const totalStart = Date.now();
33
+ const stageTimes = [];
34
+ const track = (stage, fn) => {
35
+ const start = Date.now();
36
+ return fn().then((result) => {
37
+ const ms = Date.now() - start;
38
+ stageTimes.push({ stage, ms });
39
+ console.log(chalk.dim(` ⏱ ${stage}: ${(ms / 1000).toFixed(2)}s`));
40
+ return result;
41
+ });
42
+ };
17
43
  try {
18
44
  console.log(chalk.blue('🏎️ Starting MonoAI Strategic Push...'));
19
45
  // 0. Auth Check
@@ -29,104 +55,146 @@ export const pushCommand = new Command('push')
29
55
  return;
30
56
  }
31
57
  // 1. Git Metadata (Zero-HITL Intent)
32
- const log = await git.log({ maxCount: 1 });
33
- const lastCommit = log.latest;
34
- const branch = await git.revparse(['--abbrev-ref', 'HEAD']);
35
- if (!lastCommit) {
36
- console.error(chalk.red('❌ No commits found.'));
37
- return;
38
- }
58
+ const { lastCommit, branch, changedScopes } = await track('git metadata', async () => {
59
+ const log = await git.log({ maxCount: 1 });
60
+ const lastCommit = log.latest;
61
+ const branch = await git.revparse(['--abbrev-ref', 'HEAD']);
62
+ if (!lastCommit) {
63
+ throw new Error('No commits found.');
64
+ }
65
+ let changedScopes = [];
66
+ try {
67
+ const diffSummary = await git.diffSummary(['HEAD~1', 'HEAD']);
68
+ changedScopes = Array.from(new Set(diffSummary.files.map((f) => {
69
+ const parts = f.file.split('/');
70
+ return parts.length > 1 ? parts[0] : f.file;
71
+ })));
72
+ }
73
+ catch {
74
+ changedScopes = [];
75
+ }
76
+ return { lastCommit, branch, changedScopes };
77
+ });
78
+ const shortCommitId = lastCommit.hash.substring(0, 7);
79
+ const snapshotId = `${branch}@${shortCommitId}`;
39
80
  console.log(chalk.dim(` Branch: ${chalk.white(branch)}`));
40
- console.log(chalk.dim(` Commit: ${chalk.white(lastCommit.hash.substring(0, 7))}`));
81
+ console.log(chalk.dim(` Commit: ${chalk.white(shortCommitId)}`));
41
82
  // 2. Scan & Extract AST Skeleton
42
83
  console.log(chalk.blue('πŸ” Analyzing structural integrity (AST)...'));
43
- const ig = ignore();
44
- if (fs.existsSync('.gitignore')) {
45
- ig.add(fs.readFileSync('.gitignore').toString());
46
- }
47
- // Hardcoded safety
48
- ig.add(['node_modules', '.git', 'dist', '.env', 'build']);
49
- const filesToAnalyze = [];
50
- const scanDir = (dir) => {
51
- const items = fs.readdirSync(dir);
52
- for (const item of items) {
53
- const fullPath = path.join(dir, item);
54
- const relativePath = path.relative(process.cwd(), fullPath);
55
- if (ig.ignores(relativePath))
56
- continue;
57
- if (fs.statSync(fullPath).isDirectory()) {
58
- scanDir(fullPath);
59
- }
60
- else if (/\.(ts|tsx|js|jsx)$/.test(item)) {
61
- filesToAnalyze.push(fullPath);
62
- }
84
+ const { skeleton } = await track('ast extraction', async () => {
85
+ const ig = ignore();
86
+ const monoaiIgnorePath = path.join(process.cwd(), MONOAIIGNORE_FILENAME);
87
+ if (!fs.existsSync(monoaiIgnorePath)) {
88
+ fs.writeFileSync(monoaiIgnorePath, DEFAULT_MONOAIIGNORE, 'utf8');
89
+ console.log(chalk.dim(` Created ${MONOAIIGNORE_FILENAME} template`));
63
90
  }
64
- };
65
- scanDir(process.cwd());
66
- const skeleton = extractSkeleton(filesToAnalyze);
67
- const CONVEX_SITE_URL = config.get('convex_url') || 'https://majestic-crane-609.convex.site';
68
- // 3. Central System Intelligence (Auth required)
69
- console.log(chalk.blue('πŸ”‘ Retrieving system intelligence credentials...'));
70
- let centralKey = '';
71
- try {
72
- const keyResponse = await axios.post(`${CONVEX_SITE_URL}/cli/system/key`, {}, {
73
- headers: { 'Authorization': `Bearer ${token}` }
74
- });
75
- centralKey = keyResponse.data.key;
76
- }
77
- catch (err) {
78
- console.warn(chalk.yellow('⚠️ Failed to fetch central key, using local settings if available.'));
79
- }
80
- // 4. Cognee Knowledge Graph Bridge
81
- let graphData = { nodes: [], edges: [] };
82
- if (centralKey) {
83
- console.log(chalk.blue('🧠 Building Knowledge Graph via Cognee...'));
84
- try {
85
- const tempDir = path.join(process.cwd(), '.monoai_temp');
86
- if (!fs.existsSync(tempDir))
87
- fs.mkdirSync(tempDir);
88
- const astPath = path.join(tempDir, 'ast_skeleton.json');
89
- fs.writeFileSync(astPath, JSON.stringify(skeleton));
90
- // Locate bridge script (assuming it's in the same package)
91
- const bridgePath = path.join(path.dirname(fileURLToPath(import.meta.url)), '../../scripts/cognee_bridge.py');
92
- // Fallback to project root scripts if not found in package
93
- const finalBridgePath = fs.existsSync(bridgePath) ? bridgePath : 'scripts/cognee_bridge.py';
94
- console.log(chalk.dim(` Running Cognee on ${filesToAnalyze.length} files...`));
95
- const output = execSync(`python3 ${finalBridgePath} ${astPath} ${centralKey}`, { encoding: 'utf8' });
96
- graphData = JSON.parse(output);
97
- // Cleanup
98
- fs.unlinkSync(astPath);
99
- if (graphData.status === 'success') {
100
- console.log(chalk.green('βœ… Knowledge Graph constructed successfully.'));
101
- }
91
+ if (fs.existsSync('.gitignore')) {
92
+ ig.add(fs.readFileSync('.gitignore').toString());
102
93
  }
103
- catch (err) {
104
- console.warn(chalk.yellow('⚠️ Cognee analysis skipped or failed:'), err.message);
94
+ if (fs.existsSync(monoaiIgnorePath)) {
95
+ ig.add(fs.readFileSync(monoaiIgnorePath).toString());
105
96
  }
106
- }
107
- // 5. Payload Construction
108
- const payload = {
97
+ // Hardcoded safety
98
+ ig.add(['node_modules', '.git', 'dist', '.env', 'build']);
99
+ const filesToAnalyze = [];
100
+ const scanDir = (dir) => {
101
+ const items = fs.readdirSync(dir);
102
+ for (const item of items) {
103
+ const fullPath = path.join(dir, item);
104
+ const relativePath = path.relative(process.cwd(), fullPath);
105
+ if (ig.ignores(relativePath))
106
+ continue;
107
+ if (fs.statSync(fullPath).isDirectory()) {
108
+ scanDir(fullPath);
109
+ }
110
+ else if (/\.(ts|tsx|js|jsx)$/.test(item)) {
111
+ filesToAnalyze.push(fullPath);
112
+ }
113
+ }
114
+ };
115
+ scanDir(process.cwd());
116
+ const skeleton = extractSkeleton(filesToAnalyze);
117
+ console.log(chalk.dim(` Files analyzed: ${filesToAnalyze.length}`));
118
+ return { skeleton };
119
+ });
120
+ const CONVEX_SITE_URL = process.env.MONOAI_CONVEX_SITE_URL ||
121
+ process.env.MONOAI_CONVEX_URL ||
122
+ config.get('convex_url') ||
123
+ 'https://majestic-crane-609.convex.site';
124
+ // 3. AST-only upload. Knowledge graph processing is handled server-side.
125
+ console.log(chalk.blue('πŸ“¦ Preparing AST payload for server-side graph pipeline...'));
126
+ // 4. Payload Construction
127
+ const payload = await track('payload build', async () => ({
109
128
  name: path.basename(process.cwd()),
129
+ snapshotId,
110
130
  branch: branch,
111
- commitId: lastCommit.hash.substring(0, 7),
131
+ commitId: shortCommitId,
112
132
  commitMessage: lastCommit.message,
113
133
  structure: JSON.stringify(skeleton), // Structured AST
114
- graphData: graphData, // Knowledge Graph Data
115
- syncStatus: 'success',
116
- };
117
- // 6. Send to Navigator (Convex)
134
+ changedScopes,
135
+ syncStatus: 'processing',
136
+ }));
137
+ // 5. Send to Navigator (Convex)
118
138
  console.log(chalk.blue('πŸ“‘ Transmitting to Value Engine...'));
119
- await axios.post(`${CONVEX_SITE_URL}/cli/git-commit`, {
120
- codebaseData: payload
121
- }, {
122
- headers: {
123
- 'Authorization': `Bearer ${token}`
124
- }
139
+ const transmitResult = await track('transmit', async () => {
140
+ const response = await axios.post(`${CONVEX_SITE_URL}/cli/git-commit`, {
141
+ codebaseData: payload
142
+ }, {
143
+ headers: {
144
+ 'Authorization': `Bearer ${token}`
145
+ }
146
+ });
147
+ return response.data;
125
148
  });
149
+ if (transmitResult?.graphJobId) {
150
+ const terminalStatuses = new Set(['done', 'error']);
151
+ const waitStart = Date.now();
152
+ const timeoutMs = 180000;
153
+ const pollIntervalMs = 2000;
154
+ console.log(chalk.blue(`🧠 Waiting for KG pipeline... (job: ${transmitResult.graphJobId})`));
155
+ let lastStatus = 'uploaded';
156
+ let finalJob = null;
157
+ while (Date.now() - waitStart < timeoutMs) {
158
+ const res = await axios.post(`${CONVEX_SITE_URL}/cli/graph-job-status`, { jobId: transmitResult.graphJobId }, { headers: { Authorization: `Bearer ${token}` } });
159
+ const job = res.data?.job;
160
+ if (!job)
161
+ break;
162
+ finalJob = job;
163
+ if (job.status !== lastStatus) {
164
+ console.log(chalk.dim(` ↳ KG status: ${job.status}`));
165
+ lastStatus = job.status;
166
+ }
167
+ if (terminalStatuses.has(job.status)) {
168
+ break;
169
+ }
170
+ await new Promise((r) => setTimeout(r, pollIntervalMs));
171
+ }
172
+ const waitMs = Date.now() - waitStart;
173
+ stageTimes.push({ stage: 'kg pipeline wait', ms: waitMs });
174
+ console.log(chalk.dim(` ⏱ kg pipeline wait: ${(waitMs / 1000).toFixed(2)}s`));
175
+ if (finalJob) {
176
+ const fmt = (v) => (typeof v === 'number' ? `${(v / 1000).toFixed(2)}s` : 'n/a');
177
+ console.log(chalk.blue('πŸ“Š KG timing'));
178
+ console.log(chalk.dim(` - queue wait: ${fmt(finalJob.queueWaitMs)}`));
179
+ console.log(chalk.dim(` - cognee/graph build: ${fmt(finalJob.cogneeMs)}`));
180
+ console.log(chalk.dim(` - callback: ${fmt(finalJob.callbackMs)}`));
181
+ console.log(chalk.dim(` - worker total: ${fmt(finalJob.workerTotalMs)}`));
182
+ console.log(chalk.dim(` - pipeline total: ${fmt(finalJob.totalPipelineMs)}`));
183
+ if (finalJob.status === 'error' && finalJob.error) {
184
+ console.log(chalk.red(` - pipeline error: ${finalJob.error}`));
185
+ }
186
+ }
187
+ else {
188
+ console.log(chalk.yellow(' ⚠ KG timing unavailable (job status not returned)'));
189
+ }
190
+ }
126
191
  console.log(chalk.green('✨ [Navigator] Push complete! Check your dashboard for Alignment Analysis.'));
127
192
  console.log(chalk.dim(` Message: ${lastCommit.message.split('\n')[0]}`));
193
+ const totalMs = Date.now() - totalStart;
194
+ console.log(chalk.blue(`⏱ Total: ${(totalMs / 1000).toFixed(2)}s`));
128
195
  }
129
196
  catch (error) {
197
+ const totalMs = Date.now() - totalStart;
130
198
  if (error.response?.status === 401) {
131
199
  console.error(chalk.red('❌ Authentication Expired. Please run:'));
132
200
  console.error(chalk.white(' npx monoai login'));
@@ -134,5 +202,12 @@ export const pushCommand = new Command('push')
134
202
  else {
135
203
  console.error(chalk.red('❌ Sync failed:'), error.message);
136
204
  }
205
+ if (stageTimes.length > 0) {
206
+ console.log(chalk.yellow('\n⏱ Stage timing summary'));
207
+ for (const item of stageTimes) {
208
+ console.log(chalk.dim(` - ${item.stage}: ${(item.ms / 1000).toFixed(2)}s`));
209
+ }
210
+ }
211
+ console.log(chalk.blue(`⏱ Total: ${(totalMs / 1000).toFixed(2)}s`));
137
212
  }
138
213
  });
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "monoai",
3
3
  "type": "module",
4
- "version": "0.2.4",
4
+ "version": "0.2.6",
5
5
  "description": "MonoAI CLI for syncing codebase history",
6
6
  "main": "dist/index.js",
7
7
  "bin": {
@@ -31,4 +31,4 @@
31
31
  "@types/node": "^20.19.31",
32
32
  "typescript": "^5.3.2"
33
33
  }
34
- }
34
+ }
@@ -16,6 +16,32 @@ except ImportError:
16
16
  }))
17
17
  sys.exit(0)
18
18
 
19
+ def _normalize_files(ast_data: Dict):
20
+ # v1 format: {"files":[{"path":"...","items":[...]}]}
21
+ if isinstance(ast_data.get("files"), list):
22
+ return ast_data.get("files", [])
23
+
24
+ # current monoai ts-morph output: {"/abs/path.ts": {"functions":[],"classes":[]...}, ...}
25
+ files = []
26
+ for file_path, skeleton in ast_data.items():
27
+ if not isinstance(skeleton, dict):
28
+ continue
29
+ items = []
30
+ for key in ("functions", "classes", "interfaces", "types"):
31
+ for item in skeleton.get(key, []):
32
+ if not isinstance(item, dict):
33
+ continue
34
+ items.append({
35
+ "type": key[:-1] if key.endswith("s") else key,
36
+ "name": item.get("name") or "anonymous",
37
+ "dependencies": []
38
+ })
39
+ files.append({
40
+ "path": file_path,
41
+ "items": items
42
+ })
43
+ return files
44
+
19
45
  async def process_ast_to_graph(ast_data: Dict, api_key: str):
20
46
  """
21
47
  AST JSON 데이터λ₯Ό Cognee 지식 κ·Έλž˜ν”„λ‘œ λ³€ν™˜ν•˜κ³  λΆ„μ„ν•©λ‹ˆλ‹€.
@@ -30,7 +56,7 @@ async def process_ast_to_graph(ast_data: Dict, api_key: str):
30
56
  edges = []
31
57
 
32
58
  # AST 데이터 νŒŒμ‹± (ts-morph μΆ”μΆœλ³Έ κΈ°μ€€)
33
- files = ast_data.get("files", [])
59
+ files = _normalize_files(ast_data)
34
60
 
35
61
  for file in files:
36
62
  file_id = file.get("path")