reverse-engine 0.4.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of reverse-engine might be problematic. Click here for more details.

@@ -3,5 +3,6 @@ export interface AnalyzeOptions {
3
3
  include?: string[];
4
4
  exclude?: string[];
5
5
  framework?: string;
6
+ onProgress?: (current: number, total: number, file: string) => void;
6
7
  }
7
8
  export declare function analyze(sourcePath: string, options?: AnalyzeOptions): Promise<AnalysisResult>;
@@ -35,22 +35,28 @@ export async function analyze(sourcePath, options = {}) {
35
35
  let apiClients = [];
36
36
  let routes = [];
37
37
  // 각 파일 분석
38
- for (const file of files) {
38
+ let errorCount = 0;
39
+ for (let i = 0; i < files.length; i++) {
40
+ const file = files[i];
39
41
  const fullPath = join(sourcePath, file);
40
- const source = await readFile(fullPath, 'utf-8');
42
+ options.onProgress?.(i + 1, files.length, file);
43
+ let source;
44
+ try {
45
+ source = await readFile(fullPath, 'utf-8');
46
+ }
47
+ catch {
48
+ errorCount++;
49
+ continue;
50
+ }
41
51
  const ext = extname(file);
42
52
  try {
43
- const fileComponents = extractComponents(source, file, ext);
44
- const fileFunctions = extractFunctions(source, file, ext);
45
- const fileApiCalls = extractApiCalls(source, file, ext);
46
- const fileRoutes = extractRoutes(source, file, ext);
47
- components.push(...fileComponents);
48
- functions.push(...fileFunctions);
49
- apiClients.push(...fileApiCalls);
50
- routes.push(...fileRoutes);
53
+ components.push(...extractComponents(source, file, ext));
54
+ functions.push(...extractFunctions(source, file, ext));
55
+ apiClients.push(...extractApiCalls(source, file, ext));
56
+ routes.push(...extractRoutes(source, file, ext));
51
57
  }
52
58
  catch {
53
- // 파싱 실패한 파일은 건너뜀
59
+ errorCount++;
54
60
  }
55
61
  }
56
62
  // 역참조 구축
package/dist/cli/index.js CHANGED
@@ -55,6 +55,37 @@ function runNative(args) {
55
55
  return;
56
56
  execFileSync(nativeBin, args, { stdio: 'inherit' });
57
57
  }
58
+ /** CLI 옵션에서 인증 설정 구성 */
59
+ function buildAuth(opts, baseUrl) {
60
+ const auth = {};
61
+ if (opts.authCookie)
62
+ auth.cookie = opts.authCookie;
63
+ if (opts.authBearer)
64
+ auth.bearer = opts.authBearer;
65
+ if (opts.loginUrl) {
66
+ // --login-url이 상대경로면 baseUrl 기준으로 resolve
67
+ const loginUrl = opts.loginUrl.startsWith('http')
68
+ ? opts.loginUrl
69
+ : new URL(opts.loginUrl, baseUrl).href;
70
+ auth.loginUrl = loginUrl;
71
+ auth.credentials = {};
72
+ if (opts.loginId)
73
+ auth.credentials.email = opts.loginId;
74
+ if (opts.loginPw)
75
+ auth.credentials.password = opts.loginPw;
76
+ }
77
+ return Object.keys(auth).length > 0 ? auth : undefined;
78
+ }
79
+ /** 한 줄 진행 표시 (같은 줄 덮어쓰기) */
80
+ function progressLine(current, total, file) {
81
+ const pct = Math.round((current / total) * 100);
82
+ const short = file.length > 50 ? '...' + file.slice(-47) : file;
83
+ const line = ` ${chalk.dim(`[${current}/${total}]`)} ${chalk.dim(`${pct}%`)} ${short}`;
84
+ process.stdout.write(`\r\x1b[K${line}`);
85
+ }
86
+ function clearLine() {
87
+ process.stdout.write('\r\x1b[K');
88
+ }
58
89
  const nativeBin = findNativeBinary();
59
90
  const program = new Command();
60
91
  if (nativeBin) {
@@ -83,7 +114,9 @@ program
83
114
  const result = await analyze(sourcePath, {
84
115
  framework: opts.framework,
85
116
  include: opts.include?.split(','),
117
+ onProgress: progressLine,
86
118
  });
119
+ clearLine();
87
120
  console.log(chalk.green('✓'), '분석 완료!');
88
121
  console.log(` 프레임워크: ${result.framework}`);
89
122
  console.log(` 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length} | 의존성 ${result.dependencies.length}`);
@@ -160,20 +193,6 @@ program
160
193
  const outputDir = opts.output || DEFAULT_OUTPUT;
161
194
  console.log(chalk.green('▶'), '크롤링 시작:', chalk.cyan(url));
162
195
  console.log(` 최대 깊이: ${opts.maxDepth} | 최대 페이지: ${opts.maxPages}`);
163
- // 인증 옵션 구성
164
- const auth = {};
165
- if (opts.authCookie)
166
- auth.cookie = opts.authCookie;
167
- if (opts.authBearer)
168
- auth.bearer = opts.authBearer;
169
- if (opts.loginUrl) {
170
- auth.loginUrl = opts.loginUrl;
171
- auth.credentials = {};
172
- if (opts.loginId)
173
- auth.credentials.email = opts.loginId;
174
- if (opts.loginPw)
175
- auth.credentials.password = opts.loginPw;
176
- }
177
196
  const result = await crawl({
178
197
  url,
179
198
  maxDepth: parseInt(opts.maxDepth),
@@ -182,7 +201,7 @@ program
182
201
  headless: opts.headless !== false,
183
202
  outputDir,
184
203
  waitTime: parseInt(opts.wait),
185
- auth: Object.keys(auth).length > 0 ? auth : undefined,
204
+ auth: buildAuth(opts, url),
186
205
  });
187
206
  console.log(chalk.green('✓'), `크롤링 완료!`);
188
207
  console.log(` 페이지: ${result.pages.length}개`);
@@ -195,55 +214,81 @@ program
195
214
  // ─── full ───
196
215
  program
197
216
  .command('full')
198
- .argument('[path]', '소스코드 경로 (생략하면 현재 디렉토리)')
199
- .option('--url <url>', '실행 중인 서비스 URL (크롤링 추가)')
217
+ .argument('[target]', 'URL 또는 소스코드 경로 (생략하면 현재 디렉토리)')
218
+ .option('--source <path>', '소스코드 경로 (URL 함께 사용 시)')
200
219
  .option('--framework <name>', '프레임워크', 'auto')
201
- .option('--no-headless', '크롤링 시 브라우저 표시')
202
- .option('--auth-cookie <cookie>', '크롤링 인증 쿠키')
203
- .option('-o, --output <dir>', '출력 디렉토리 (기본: <프로젝트>/.reverse-engine)')
204
- .description('전체 파이프라인 (crawl → analyze → report → test)')
205
- .action(async (path, opts) => {
206
- const sourcePath = path ? resolve(path) : detectProjectRoot();
207
- const outputDir = resolveOutput(opts.output, sourcePath);
220
+ .option('--no-headless', '브라우저 표시 (디버깅용)')
221
+ .option('--login-url <url>', '로그인 페이지 URL')
222
+ .option('--login-id <id>', '로그인 ID')
223
+ .option('--login-pw <pw>', '로그인 PW')
224
+ .option('--auth-cookie <cookie>', '인증 쿠키')
225
+ .option('--auth-bearer <token>', 'Bearer 토큰')
226
+ .option('--max-depth <n>', '크롤링 최대 깊이', '5')
227
+ .option('--max-pages <n>', '크롤링 최대 페이지', '100')
228
+ .option('--wait <ms>', '페이지 대기시간(ms)', '1500')
229
+ .option('-o, --output <dir>', '출력 디렉토리')
230
+ .description('전체 파이프라인 — URL이면 크롤링, 경로면 코드 분석, 둘 다 가능')
231
+ .action(async (target, opts) => {
232
+ // target이 URL인지 경로인지 판별
233
+ const isUrl = target?.startsWith('http://') || target?.startsWith('https://');
234
+ const crawlUrl = isUrl ? target : undefined;
235
+ const sourcePath = isUrl
236
+ ? (opts.source ? resolve(opts.source) : null)
237
+ : (target ? resolve(target) : detectProjectRoot());
238
+ const outputDir = resolveOutput(opts.output, sourcePath || undefined);
208
239
  console.log(chalk.green('\n◆'), 'ReversEngine', nativeBin ? chalk.dim('⚡') : '', '\n');
209
- if (opts.url)
210
- console.log(` URL: ${chalk.cyan(opts.url)}`);
211
- console.log(` 소스: ${chalk.cyan(sourcePath)}`);
240
+ if (crawlUrl)
241
+ console.log(` URL: ${chalk.cyan(crawlUrl)}`);
242
+ if (sourcePath)
243
+ console.log(` 소스: ${chalk.cyan(sourcePath)}`);
212
244
  console.log(` 출력: ${chalk.cyan(outputDir)}\n`);
213
245
  await mkdir(outputDir, { recursive: true });
214
246
  const analysisPath = join(outputDir, 'analysis.json');
215
- // Step 0: 크롤링 (URL이 있는 경우)
216
- if (opts.url) {
247
+ const crawlResultPath = join(outputDir, 'crawl-result.json');
248
+ // ── 크롤링 ──
249
+ if (crawlUrl) {
217
250
  console.log(chalk.gray('━'.repeat(50)));
218
- console.log(chalk.green('▶'), '크롤링:', chalk.cyan(opts.url));
251
+ console.log(chalk.green('▶'), '크롤링:', chalk.cyan(crawlUrl));
252
+ const auth = buildAuth(opts, crawlUrl);
219
253
  const crawlResult = await crawl({
220
- url: opts.url,
254
+ url: crawlUrl,
255
+ maxDepth: parseInt(opts.maxDepth),
256
+ maxPages: parseInt(opts.maxPages),
221
257
  outputDir,
222
258
  headless: opts.headless !== false,
223
- auth: opts.authCookie ? { cookie: opts.authCookie } : undefined,
259
+ waitTime: parseInt(opts.wait),
260
+ auth,
224
261
  });
225
- console.log(chalk.green('✓'), `크롤링: 페이지 ${crawlResult.pages.length} | API ${crawlResult.pages.reduce((n, p) => n + p.apiCalls.length, 0)}`);
226
- await writeFile(join(outputDir, 'crawl-result.json'), JSON.stringify(crawlResult, null, 2));
227
- }
228
- // Step 1: 코드 분석
229
- console.log(chalk.gray('━'.repeat(50)));
230
- if (nativeBin) {
231
- runNative(['analyze', sourcePath, '--framework', opts.framework || 'auto']);
262
+ const totalApi = crawlResult.pages.reduce((n, p) => n + p.apiCalls.length, 0);
263
+ console.log(chalk.green(''), `크롤링: 페이지 ${crawlResult.pages.length} | API ${totalApi} | 스크린샷 ${crawlResult.pages.filter(p => p.screenshotPath).length}`);
264
+ await writeFile(crawlResultPath, JSON.stringify(crawlResult, null, 2));
232
265
  }
233
- else {
234
- const result = await analyze(sourcePath, { framework: opts.framework });
235
- console.log(chalk.green(''), `분석: 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length}`);
236
- await writeFile(analysisPath, JSON.stringify(result, null, 2));
266
+ // ── 코드 분석 ──
267
+ if (sourcePath) {
268
+ console.log(chalk.gray(''.repeat(50)));
269
+ if (nativeBin) {
270
+ runNative(['analyze', sourcePath, '--framework', opts.framework || 'auto']);
271
+ }
272
+ else {
273
+ const result = await analyze(sourcePath, { framework: opts.framework, onProgress: progressLine });
274
+ clearLine();
275
+ console.log(chalk.green('✓'), `분석: 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length}`);
276
+ await writeFile(analysisPath, JSON.stringify(result, null, 2));
277
+ }
237
278
  }
238
- // Step 2: 리포트
239
- if (existsSync(analysisPath)) {
240
- const data = JSON.parse(await readFile(analysisPath, 'utf-8'));
279
+ // ── 리포트 + 테스트 ──
280
+ const hasAnalysis = existsSync(analysisPath);
281
+ const hasCrawl = existsSync(crawlResultPath);
282
+ if (hasAnalysis || hasCrawl) {
283
+ // 리포트 데이터 결정 (분석 결과 우선, 없으면 크롤링 결과)
284
+ const reportData = hasAnalysis
285
+ ? JSON.parse(await readFile(analysisPath, 'utf-8'))
286
+ : JSON.parse(await readFile(crawlResultPath, 'utf-8'));
241
287
  console.log(chalk.gray('━'.repeat(50)));
242
- const reports = await generateReport(data, { outputDir: join(outputDir, 'reports') });
288
+ const reports = await generateReport(reportData, { outputDir: join(outputDir, 'reports') });
243
289
  console.log(chalk.green('✓'), '리포트:', reports.map(p => chalk.cyan(p.split('/').pop())).join(', '));
244
- // Step 3: 테스트
245
290
  console.log(chalk.gray('━'.repeat(50)));
246
- const tests = await generateTests(data, { outputDir: join(outputDir, 'tests') });
291
+ const tests = await generateTests(reportData, { outputDir: join(outputDir, 'tests') });
247
292
  console.log(chalk.green('✓'), `테스트: ${tests.length}개 파일`);
248
293
  }
249
294
  console.log(chalk.gray('━'.repeat(50)));
@@ -265,7 +310,8 @@ program
265
310
  await mkdir(outputDir, { recursive: true });
266
311
  const analysisPath = join(outputDir, 'analysis.json');
267
312
  console.log(chalk.gray('━'.repeat(50)));
268
- const result = await analyze(sourcePath, {});
313
+ const result = await analyze(sourcePath, { onProgress: progressLine });
314
+ clearLine();
269
315
  console.log(chalk.green('✓'), `분석: 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length}`);
270
316
  await writeFile(analysisPath, JSON.stringify(result, null, 2));
271
317
  const data = JSON.parse(await readFile(analysisPath, 'utf-8'));
@@ -6,7 +6,17 @@ export async function generateReport(data, options = {}) {
6
6
  await mkdir(outputDir, { recursive: true });
7
7
  const outputs = [];
8
8
  if (formats.includes('excel')) {
9
- outputs.push(await generateExcel(data, outputDir));
9
+ try {
10
+ outputs.push(await generateExcel(data, outputDir));
11
+ }
12
+ catch (e) {
13
+ if (e.code === 'EBUSY') {
14
+ console.log(' ⚠ Excel 파일이 열려 있어 덮어쓸 수 없습니다. 파일을 닫고 다시 시도하세요.');
15
+ }
16
+ else {
17
+ throw e;
18
+ }
19
+ }
10
20
  }
11
21
  if (formats.includes('mermaid')) {
12
22
  outputs.push(await generateMermaid(data, outputDir));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "reverse-engine",
3
- "version": "0.4.0",
3
+ "version": "0.5.1",
4
4
  "description": "웹 서비스 역분석 자동화 도구 - 소스코드 분석, 문서 생성, 테스트 자동화",
5
5
  "keywords": [
6
6
  "reverse-engineering",