reverse-engine 0.4.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of reverse-engine might be problematic. Click here for more details.

Files changed (2) hide show
  1. package/dist/cli/index.js +81 -49
  2. package/package.json +1 -1
package/dist/cli/index.js CHANGED
@@ -55,6 +55,27 @@ function runNative(args) {
55
55
  return;
56
56
  execFileSync(nativeBin, args, { stdio: 'inherit' });
57
57
  }
58
+ /** CLI 옵션에서 인증 설정 구성 */
59
+ function buildAuth(opts, baseUrl) {
60
+ const auth = {};
61
+ if (opts.authCookie)
62
+ auth.cookie = opts.authCookie;
63
+ if (opts.authBearer)
64
+ auth.bearer = opts.authBearer;
65
+ if (opts.loginUrl) {
66
+ // --login-url이 상대경로면 baseUrl 기준으로 resolve
67
+ const loginUrl = opts.loginUrl.startsWith('http')
68
+ ? opts.loginUrl
69
+ : new URL(opts.loginUrl, baseUrl).href;
70
+ auth.loginUrl = loginUrl;
71
+ auth.credentials = {};
72
+ if (opts.loginId)
73
+ auth.credentials.email = opts.loginId;
74
+ if (opts.loginPw)
75
+ auth.credentials.password = opts.loginPw;
76
+ }
77
+ return Object.keys(auth).length > 0 ? auth : undefined;
78
+ }
58
79
  const nativeBin = findNativeBinary();
59
80
  const program = new Command();
60
81
  if (nativeBin) {
@@ -160,20 +181,6 @@ program
160
181
  const outputDir = opts.output || DEFAULT_OUTPUT;
161
182
  console.log(chalk.green('▶'), '크롤링 시작:', chalk.cyan(url));
162
183
  console.log(` 최대 깊이: ${opts.maxDepth} | 최대 페이지: ${opts.maxPages}`);
163
- // 인증 옵션 구성
164
- const auth = {};
165
- if (opts.authCookie)
166
- auth.cookie = opts.authCookie;
167
- if (opts.authBearer)
168
- auth.bearer = opts.authBearer;
169
- if (opts.loginUrl) {
170
- auth.loginUrl = opts.loginUrl;
171
- auth.credentials = {};
172
- if (opts.loginId)
173
- auth.credentials.email = opts.loginId;
174
- if (opts.loginPw)
175
- auth.credentials.password = opts.loginPw;
176
- }
177
184
  const result = await crawl({
178
185
  url,
179
186
  maxDepth: parseInt(opts.maxDepth),
@@ -182,7 +189,7 @@ program
182
189
  headless: opts.headless !== false,
183
190
  outputDir,
184
191
  waitTime: parseInt(opts.wait),
185
- auth: Object.keys(auth).length > 0 ? auth : undefined,
192
+ auth: buildAuth(opts, url),
186
193
  });
187
194
  console.log(chalk.green('✓'), `크롤링 완료!`);
188
195
  console.log(` 페이지: ${result.pages.length}개`);
@@ -195,55 +202,80 @@ program
195
202
  // ─── full ───
196
203
  program
197
204
  .command('full')
198
- .argument('[path]', '소스코드 경로 (생략하면 현재 디렉토리)')
199
- .option('--url <url>', '실행 중인 서비스 URL (크롤링 추가)')
205
+ .argument('[target]', 'URL 또는 소스코드 경로 (생략하면 현재 디렉토리)')
206
+ .option('--source <path>', '소스코드 경로 (URL 함께 사용 시)')
200
207
  .option('--framework <name>', '프레임워크', 'auto')
201
- .option('--no-headless', '크롤링 시 브라우저 표시')
202
- .option('--auth-cookie <cookie>', '크롤링 인증 쿠키')
203
- .option('-o, --output <dir>', '출력 디렉토리 (기본: <프로젝트>/.reverse-engine)')
204
- .description('전체 파이프라인 (crawl → analyze → report → test)')
205
- .action(async (path, opts) => {
206
- const sourcePath = path ? resolve(path) : detectProjectRoot();
207
- const outputDir = resolveOutput(opts.output, sourcePath);
208
+ .option('--no-headless', '브라우저 표시 (디버깅용)')
209
+ .option('--login-url <url>', '로그인 페이지 URL')
210
+ .option('--login-id <id>', '로그인 ID')
211
+ .option('--login-pw <pw>', '로그인 PW')
212
+ .option('--auth-cookie <cookie>', '인증 쿠키')
213
+ .option('--auth-bearer <token>', 'Bearer 토큰')
214
+ .option('--max-depth <n>', '크롤링 최대 깊이', '5')
215
+ .option('--max-pages <n>', '크롤링 최대 페이지', '100')
216
+ .option('--wait <ms>', '페이지 대기시간(ms)', '1500')
217
+ .option('-o, --output <dir>', '출력 디렉토리')
218
+ .description('전체 파이프라인 — URL이면 크롤링, 경로면 코드 분석, 둘 다 가능')
219
+ .action(async (target, opts) => {
220
+ // target이 URL인지 경로인지 판별
221
+ const isUrl = target?.startsWith('http://') || target?.startsWith('https://');
222
+ const crawlUrl = isUrl ? target : undefined;
223
+ const sourcePath = isUrl
224
+ ? (opts.source ? resolve(opts.source) : null)
225
+ : (target ? resolve(target) : detectProjectRoot());
226
+ const outputDir = resolveOutput(opts.output, sourcePath || undefined);
208
227
  console.log(chalk.green('\n◆'), 'ReversEngine', nativeBin ? chalk.dim('⚡') : '', '\n');
209
- if (opts.url)
210
- console.log(` URL: ${chalk.cyan(opts.url)}`);
211
- console.log(` 소스: ${chalk.cyan(sourcePath)}`);
228
+ if (crawlUrl)
229
+ console.log(` URL: ${chalk.cyan(crawlUrl)}`);
230
+ if (sourcePath)
231
+ console.log(` 소스: ${chalk.cyan(sourcePath)}`);
212
232
  console.log(` 출력: ${chalk.cyan(outputDir)}\n`);
213
233
  await mkdir(outputDir, { recursive: true });
214
234
  const analysisPath = join(outputDir, 'analysis.json');
215
- // Step 0: 크롤링 (URL이 있는 경우)
216
- if (opts.url) {
235
+ const crawlResultPath = join(outputDir, 'crawl-result.json');
236
+ // ── 크롤링 ──
237
+ if (crawlUrl) {
217
238
  console.log(chalk.gray('━'.repeat(50)));
218
- console.log(chalk.green('▶'), '크롤링:', chalk.cyan(opts.url));
239
+ console.log(chalk.green('▶'), '크롤링:', chalk.cyan(crawlUrl));
240
+ const auth = buildAuth(opts, crawlUrl);
219
241
  const crawlResult = await crawl({
220
- url: opts.url,
242
+ url: crawlUrl,
243
+ maxDepth: parseInt(opts.maxDepth),
244
+ maxPages: parseInt(opts.maxPages),
221
245
  outputDir,
222
246
  headless: opts.headless !== false,
223
- auth: opts.authCookie ? { cookie: opts.authCookie } : undefined,
247
+ waitTime: parseInt(opts.wait),
248
+ auth,
224
249
  });
225
- console.log(chalk.green('✓'), `크롤링: 페이지 ${crawlResult.pages.length} | API ${crawlResult.pages.reduce((n, p) => n + p.apiCalls.length, 0)}`);
226
- await writeFile(join(outputDir, 'crawl-result.json'), JSON.stringify(crawlResult, null, 2));
250
+ const totalApi = crawlResult.pages.reduce((n, p) => n + p.apiCalls.length, 0);
251
+ console.log(chalk.green(''), `크롤링: 페이지 ${crawlResult.pages.length} | API ${totalApi} | 스크린샷 ${crawlResult.pages.filter(p => p.screenshotPath).length}`);
252
+ await writeFile(crawlResultPath, JSON.stringify(crawlResult, null, 2));
227
253
  }
228
- // Step 1: 코드 분석
229
- console.log(chalk.gray('━'.repeat(50)));
230
- if (nativeBin) {
231
- runNative(['analyze', sourcePath, '--framework', opts.framework || 'auto']);
232
- }
233
- else {
234
- const result = await analyze(sourcePath, { framework: opts.framework });
235
- console.log(chalk.green('✓'), `분석: 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length}`);
236
- await writeFile(analysisPath, JSON.stringify(result, null, 2));
254
+ // ── 코드 분석 ──
255
+ if (sourcePath) {
256
+ console.log(chalk.gray('━'.repeat(50)));
257
+ if (nativeBin) {
258
+ runNative(['analyze', sourcePath, '--framework', opts.framework || 'auto']);
259
+ }
260
+ else {
261
+ const result = await analyze(sourcePath, { framework: opts.framework });
262
+ console.log(chalk.green('✓'), `분석: 컴포넌트 ${result.components.length} | 함수 ${result.functions.length} | API ${result.apiClients.length} | 라우트 ${result.routes.length}`);
263
+ await writeFile(analysisPath, JSON.stringify(result, null, 2));
264
+ }
237
265
  }
238
- // Step 2: 리포트
239
- if (existsSync(analysisPath)) {
240
- const data = JSON.parse(await readFile(analysisPath, 'utf-8'));
266
+ // ── 리포트 + 테스트 ──
267
+ const hasAnalysis = existsSync(analysisPath);
268
+ const hasCrawl = existsSync(crawlResultPath);
269
+ if (hasAnalysis || hasCrawl) {
270
+ // 리포트 데이터 결정 (분석 결과 우선, 없으면 크롤링 결과)
271
+ const reportData = hasAnalysis
272
+ ? JSON.parse(await readFile(analysisPath, 'utf-8'))
273
+ : JSON.parse(await readFile(crawlResultPath, 'utf-8'));
241
274
  console.log(chalk.gray('━'.repeat(50)));
242
- const reports = await generateReport(data, { outputDir: join(outputDir, 'reports') });
275
+ const reports = await generateReport(reportData, { outputDir: join(outputDir, 'reports') });
243
276
  console.log(chalk.green('✓'), '리포트:', reports.map(p => chalk.cyan(p.split('/').pop())).join(', '));
244
- // Step 3: 테스트
245
277
  console.log(chalk.gray('━'.repeat(50)));
246
- const tests = await generateTests(data, { outputDir: join(outputDir, 'tests') });
278
+ const tests = await generateTests(reportData, { outputDir: join(outputDir, 'tests') });
247
279
  console.log(chalk.green('✓'), `테스트: ${tests.length}개 파일`);
248
280
  }
249
281
  console.log(chalk.gray('━'.repeat(50)));
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "reverse-engine",
3
- "version": "0.4.0",
3
+ "version": "0.5.0",
4
4
  "description": "웹 서비스 역분석 자동화 도구 - 소스코드 분석, 문서 생성, 테스트 자동화",
5
5
  "keywords": [
6
6
  "reverse-engineering",