@merlean/analyzer 2.2.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +75 -0
- package/bin/cli.js +51 -136
- package/package.json +10 -24
- package/bin/wrapper.js +0 -51
- package/lib/analyzer.js +0 -535
- package/scripts/postinstall.js +0 -156
package/README.md
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# @merlean/analyzer
|
|
2
|
+
|
|
3
|
+
AI-powered codebase analyzer that generates site maps for the Merlean AI assistant widget.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -g @merlean/analyzer
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# Analyze current directory
|
|
15
|
+
ai-bot-analyze --name "My App"
|
|
16
|
+
|
|
17
|
+
# Analyze specific path
|
|
18
|
+
ai-bot-analyze ./my-project --name "My App"
|
|
19
|
+
|
|
20
|
+
# Merge into existing site map
|
|
21
|
+
ai-bot-analyze ./admin-panel --merge-with site_abc123
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
Or use with npx (no installation required):
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
npx @merlean/analyzer ./my-project --name "My App"
|
|
28
|
+
```
|
|
29
|
+
|
|
30
|
+
## Options
|
|
31
|
+
|
|
32
|
+
| Option | Description |
|
|
33
|
+
|--------|-------------|
|
|
34
|
+
| `--name, -n <name>` | Site name (required for new analysis) |
|
|
35
|
+
| `--merge-with, -m <siteId>` | Merge into existing site map |
|
|
36
|
+
| `--backend, -b <url>` | Custom backend URL |
|
|
37
|
+
| `--output, -o <file>` | Save site map locally |
|
|
38
|
+
| `--help, -h` | Show help |
|
|
39
|
+
|
|
40
|
+
## Supported Frameworks
|
|
41
|
+
|
|
42
|
+
- **JavaScript/TypeScript**: Express, Fastify, NestJS, React, Vue, Angular
|
|
43
|
+
- **PHP**: CodeIgniter, Laravel, Symfony
|
|
44
|
+
- **Python**: Flask, FastAPI, Django
|
|
45
|
+
- **Ruby**: Rails, Sinatra
|
|
46
|
+
- **Go**: Gin, Echo, Chi
|
|
47
|
+
- **Java/Kotlin**: Spring Boot
|
|
48
|
+
|
|
49
|
+
## How It Works
|
|
50
|
+
|
|
51
|
+
1. Scans your codebase for API patterns
|
|
52
|
+
2. Extracts route definitions and request schemas
|
|
53
|
+
3. Uploads to Merlean backend for AI analysis
|
|
54
|
+
4. Returns a site ID for widget integration
|
|
55
|
+
|
|
56
|
+
## Integration
|
|
57
|
+
|
|
58
|
+
After analysis, add the widget to your site:
|
|
59
|
+
|
|
60
|
+
```html
|
|
61
|
+
<script src="https://ai-bot-backend.fly.dev/bot.js" data-site-id="YOUR_SITE_ID"></script>
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## Platform Support
|
|
65
|
+
|
|
66
|
+
Pre-compiled binaries are available for:
|
|
67
|
+
- macOS (Apple Silicon / ARM64)
|
|
68
|
+
- macOS (Intel / x64)
|
|
69
|
+
- Linux (x64)
|
|
70
|
+
- Windows (x64)
|
|
71
|
+
|
|
72
|
+
## Links
|
|
73
|
+
|
|
74
|
+
- [Documentation](https://github.com/zmaren/merlean#readme)
|
|
75
|
+
- [Report Issues](https://github.com/zmaren/merlean/issues)
|
package/bin/cli.js
CHANGED
|
@@ -1,152 +1,67 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* Merlean Analyzer
|
|
4
|
+
* Merlean Analyzer - Platform Binary Loader
|
|
5
5
|
*
|
|
6
|
-
*
|
|
7
|
-
*
|
|
8
|
-
*
|
|
9
|
-
* Usage:
|
|
10
|
-
* npx @merlean/analyzer ./my-project --name "My App"
|
|
6
|
+
* This wrapper loads the platform-specific binary for your OS/architecture.
|
|
7
|
+
* The actual analysis logic is compiled into native binaries.
|
|
11
8
|
*/
|
|
12
9
|
|
|
13
|
-
const {
|
|
10
|
+
const { spawn } = require('child_process');
|
|
14
11
|
const path = require('path');
|
|
15
12
|
const fs = require('fs');
|
|
16
13
|
|
|
17
|
-
const
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
options.name = args[++i];
|
|
34
|
-
} else if (arg === '--backend' || arg === '-b') {
|
|
35
|
-
options.backend = args[++i];
|
|
36
|
-
} else if (arg === '--output' || arg === '-o') {
|
|
37
|
-
options.output = args[++i];
|
|
38
|
-
} else if (arg === '--help' || arg === '-h') {
|
|
39
|
-
printHelp();
|
|
40
|
-
process.exit(0);
|
|
41
|
-
} else if (!arg.startsWith('-') && !options.path) {
|
|
42
|
-
options.path = arg;
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
return options;
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
function printHelp() {
|
|
50
|
-
console.log(`
|
|
51
|
-
Merlean Analyzer - AI-powered codebase analysis
|
|
52
|
-
|
|
53
|
-
Usage:
|
|
54
|
-
npx @merlean/analyzer --name <name>
|
|
55
|
-
npx @merlean/analyzer <path> --name <name>
|
|
56
|
-
|
|
57
|
-
Arguments:
|
|
58
|
-
<path> Path to codebase (default: current directory)
|
|
59
|
-
|
|
60
|
-
Options:
|
|
61
|
-
--name, -n <name> Site name (required)
|
|
62
|
-
--backend, -b <url> Backend URL (default: ${DEFAULT_BACKEND})
|
|
63
|
-
--output, -o <file> Save site map locally (optional)
|
|
64
|
-
--help, -h Show this help
|
|
65
|
-
|
|
66
|
-
Examples:
|
|
67
|
-
# Analyze current directory
|
|
68
|
-
npx @merlean/analyzer --name "My App"
|
|
69
|
-
|
|
70
|
-
# Analyze specific path
|
|
71
|
-
npx @merlean/analyzer ./my-app --name "My App"
|
|
72
|
-
|
|
73
|
-
# Use custom backend (for local dev)
|
|
74
|
-
npx @merlean/analyzer --name "My App" --backend http://localhost:3004
|
|
75
|
-
`);
|
|
14
|
+
const PLATFORMS = {
|
|
15
|
+
'darwin-arm64': '@merlean/analyzer-darwin-arm64',
|
|
16
|
+
'darwin-x64': '@merlean/analyzer-darwin-x64',
|
|
17
|
+
'linux-x64': '@merlean/analyzer-linux-x64',
|
|
18
|
+
'win32-x64': '@merlean/analyzer-win32-x64',
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const platformKey = `${process.platform}-${process.arch}`;
|
|
22
|
+
const packageName = PLATFORMS[platformKey];
|
|
23
|
+
|
|
24
|
+
if (!packageName) {
|
|
25
|
+
console.error(`\n❌ Unsupported platform: ${platformKey}`);
|
|
26
|
+
console.error(`\nSupported platforms:`);
|
|
27
|
+
Object.keys(PLATFORMS).forEach(p => console.error(` - ${p}`));
|
|
28
|
+
console.error(`\nPlease open an issue at https://github.com/zmaren/merlean/issues`);
|
|
29
|
+
process.exit(1);
|
|
76
30
|
}
|
|
77
31
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
const
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
console.error('❌ Error: --name is required');
|
|
86
|
-
console.log(' Run with --help for usage');
|
|
87
|
-
process.exit(1);
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
// Default to current directory if no path provided
|
|
91
|
-
const codebasePath = path.resolve(options.path || '.');
|
|
32
|
+
let binaryPath;
|
|
33
|
+
try {
|
|
34
|
+
// Find the platform-specific package
|
|
35
|
+
const packagePath = require.resolve(`${packageName}/package.json`);
|
|
36
|
+
const packageDir = path.dirname(packagePath);
|
|
37
|
+
const binaryName = process.platform === 'win32' ? 'ai-bot-analyze.exe' : 'ai-bot-analyze';
|
|
38
|
+
binaryPath = path.join(packageDir, 'bin', binaryName);
|
|
92
39
|
|
|
93
|
-
if (!fs.existsSync(
|
|
94
|
-
|
|
95
|
-
process.exit(1);
|
|
40
|
+
if (!fs.existsSync(binaryPath)) {
|
|
41
|
+
throw new Error('Binary not found in package');
|
|
96
42
|
}
|
|
43
|
+
} catch (e) {
|
|
44
|
+
console.error(`\n❌ Failed to find binary for ${platformKey}`);
|
|
45
|
+
console.error(`\nPackage: ${packageName}`);
|
|
46
|
+
console.error(`Error: ${e.message}`);
|
|
47
|
+
console.error(`\nTry reinstalling:`);
|
|
48
|
+
console.error(` npm install -g @merlean/analyzer`);
|
|
49
|
+
console.error(`\nOr with npx (downloads fresh):`);
|
|
50
|
+
console.error(` npx @merlean/analyzer@latest --help`);
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
97
53
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
const fileContents = await scanCodebase(codebasePath);
|
|
104
|
-
|
|
105
|
-
console.log(`\n📤 Uploading to backend for analysis...`);
|
|
106
|
-
|
|
107
|
-
// Send to backend for LLM analysis
|
|
108
|
-
const response = await fetch(`${options.backend}/api/analyze`, {
|
|
109
|
-
method: 'POST',
|
|
110
|
-
headers: { 'Content-Type': 'application/json' },
|
|
111
|
-
body: JSON.stringify({
|
|
112
|
-
siteName: options.name,
|
|
113
|
-
files: fileContents
|
|
114
|
-
})
|
|
115
|
-
});
|
|
116
|
-
|
|
117
|
-
if (!response.ok) {
|
|
118
|
-
const error = await response.text();
|
|
119
|
-
throw new Error(`Backend error: ${response.status} ${error}`);
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
const result = await response.json();
|
|
123
|
-
|
|
124
|
-
console.log('\n✅ Analysis complete!');
|
|
125
|
-
console.log('\n📊 Summary:');
|
|
126
|
-
console.log(` Site ID: ${result.siteId}`);
|
|
127
|
-
console.log(` Framework: ${result.framework || 'Unknown'}`);
|
|
128
|
-
console.log(` Routes: ${result.routes?.length || 0}`);
|
|
129
|
-
console.log(` Forms: ${result.forms?.length || 0}`);
|
|
130
|
-
console.log(` Actions: ${result.actions?.length || 0}`);
|
|
131
|
-
|
|
132
|
-
// Save locally if requested
|
|
133
|
-
if (options.output) {
|
|
134
|
-
const outputPath = path.resolve(options.output);
|
|
135
|
-
fs.writeFileSync(outputPath, JSON.stringify(result, null, 2));
|
|
136
|
-
console.log(`\n💾 Saved to: ${outputPath}`);
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
// Show integration instructions
|
|
140
|
-
console.log('\n' + '─'.repeat(50));
|
|
141
|
-
console.log('\n🎉 Integration Ready!\n');
|
|
142
|
-
console.log('Add this to your website:\n');
|
|
143
|
-
console.log(`<script src="${options.backend}/bot.js" data-site-id="${result.siteId}"></script>`);
|
|
144
|
-
console.log('\n' + '─'.repeat(50) + '\n');
|
|
54
|
+
// Run the binary with all arguments
|
|
55
|
+
const child = spawn(binaryPath, process.argv.slice(2), {
|
|
56
|
+
stdio: 'inherit',
|
|
57
|
+
env: process.env
|
|
58
|
+
});
|
|
145
59
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
}
|
|
60
|
+
child.on('error', (err) => {
|
|
61
|
+
console.error(`\n❌ Failed to execute binary: ${err.message}`);
|
|
62
|
+
process.exit(1);
|
|
63
|
+
});
|
|
151
64
|
|
|
152
|
-
|
|
65
|
+
child.on('exit', (code) => {
|
|
66
|
+
process.exit(code || 0);
|
|
67
|
+
});
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@merlean/analyzer",
|
|
3
|
-
"version": "
|
|
4
|
-
"description": "AI
|
|
5
|
-
"keywords": ["ai", "bot", "analyzer", "claude", "anthropic", "widget"],
|
|
3
|
+
"version": "3.0.0",
|
|
4
|
+
"description": "AI-powered codebase analyzer - generates site maps for AI assistant integration",
|
|
5
|
+
"keywords": ["ai", "bot", "analyzer", "claude", "anthropic", "widget", "merlean"],
|
|
6
6
|
"author": "zmaren",
|
|
7
7
|
"license": "MIT",
|
|
8
8
|
"repository": {
|
|
@@ -15,32 +15,18 @@
|
|
|
15
15
|
"access": "public"
|
|
16
16
|
},
|
|
17
17
|
"bin": {
|
|
18
|
-
"ai-bot-analyze": "./bin/
|
|
18
|
+
"ai-bot-analyze": "./bin/cli.js"
|
|
19
19
|
},
|
|
20
|
-
"main": "lib/analyzer.js",
|
|
21
20
|
"files": [
|
|
22
|
-
"bin/"
|
|
23
|
-
"lib/",
|
|
24
|
-
"scripts/"
|
|
21
|
+
"bin/"
|
|
25
22
|
],
|
|
26
|
-
"
|
|
27
|
-
"
|
|
28
|
-
"
|
|
29
|
-
"
|
|
30
|
-
|
|
31
|
-
"pkg": {
|
|
32
|
-
"scripts": ["lib/**/*.js", "bin/cli.js"],
|
|
33
|
-
"assets": [],
|
|
34
|
-
"targets": ["node18-linux-x64", "node18-macos-x64", "node18-macos-arm64", "node18-win-x64"],
|
|
35
|
-
"outputPath": "dist"
|
|
23
|
+
"optionalDependencies": {
|
|
24
|
+
"@merlean/analyzer-darwin-arm64": "3.0.0",
|
|
25
|
+
"@merlean/analyzer-darwin-x64": "3.0.0",
|
|
26
|
+
"@merlean/analyzer-linux-x64": "3.0.0",
|
|
27
|
+
"@merlean/analyzer-win32-x64": "3.0.0"
|
|
36
28
|
},
|
|
37
29
|
"engines": {
|
|
38
30
|
"node": ">=18.0.0"
|
|
39
|
-
},
|
|
40
|
-
"dependencies": {
|
|
41
|
-
"glob": "^10.3.10"
|
|
42
|
-
},
|
|
43
|
-
"devDependencies": {
|
|
44
|
-
"pkg": "^5.8.1"
|
|
45
31
|
}
|
|
46
32
|
}
|
package/bin/wrapper.js
DELETED
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* AI Bot Analyzer - Wrapper Script
|
|
5
|
-
*
|
|
6
|
-
* Attempts to run the compiled binary, falls back to source if not available.
|
|
7
|
-
* This allows the package to work even if binary download failed.
|
|
8
|
-
*/
|
|
9
|
-
|
|
10
|
-
const { spawn, execFileSync } = require('child_process');
|
|
11
|
-
const path = require('path');
|
|
12
|
-
const fs = require('fs');
|
|
13
|
-
|
|
14
|
-
const binDir = __dirname;
|
|
15
|
-
const binaryName = process.platform === 'win32' ? 'ai-bot-analyze.exe' : 'ai-bot-analyze';
|
|
16
|
-
const binaryPath = path.join(binDir, binaryName);
|
|
17
|
-
const sourcePath = path.join(binDir, 'cli.js');
|
|
18
|
-
const sourceModeFlagPath = path.join(binDir, '..', '.source-mode');
|
|
19
|
-
|
|
20
|
-
// Check if we should use source mode
|
|
21
|
-
const useSourceMode = fs.existsSync(sourceModeFlagPath) || !fs.existsSync(binaryPath);
|
|
22
|
-
|
|
23
|
-
if (useSourceMode) {
|
|
24
|
-
// Run source directly with Node.js
|
|
25
|
-
require('./cli.js');
|
|
26
|
-
} else {
|
|
27
|
-
// Run compiled binary
|
|
28
|
-
const args = process.argv.slice(2);
|
|
29
|
-
|
|
30
|
-
try {
|
|
31
|
-
const result = spawn(binaryPath, args, {
|
|
32
|
-
stdio: 'inherit',
|
|
33
|
-
env: process.env
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
result.on('error', (err) => {
|
|
37
|
-
// If binary fails, fall back to source
|
|
38
|
-
console.error('Binary execution failed, falling back to source mode...');
|
|
39
|
-
require('./cli.js');
|
|
40
|
-
});
|
|
41
|
-
|
|
42
|
-
result.on('exit', (code) => {
|
|
43
|
-
process.exit(code || 0);
|
|
44
|
-
});
|
|
45
|
-
|
|
46
|
-
} catch (err) {
|
|
47
|
-
// Fall back to source mode
|
|
48
|
-
require('./cli.js');
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
package/lib/analyzer.js
DELETED
|
@@ -1,535 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Framework-Agnostic Codebase Scanner
|
|
3
|
-
*
|
|
4
|
-
* Scans ANY codebase (frontend or backend) to extract API patterns:
|
|
5
|
-
* - Express/Fastify/Koa/Hapi route definitions
|
|
6
|
-
* - NestJS decorators (@Get, @Post, @Body, @Query, etc.)
|
|
7
|
-
* - Frontend HTTP calls (fetch, axios, HttpClient, etc.)
|
|
8
|
-
* - Swagger/OpenAPI annotations
|
|
9
|
-
* - Request body schemas and query parameters
|
|
10
|
-
* - TypeScript interfaces/DTOs
|
|
11
|
-
* - Validation schemas (Zod, Joi, class-validator)
|
|
12
|
-
*
|
|
13
|
-
* The goal is to understand what APIs exist, their parameters,
|
|
14
|
-
* and body structures - regardless of framework or code style.
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
const fs = require('fs');
|
|
18
|
-
const path = require('path');
|
|
19
|
-
const { glob } = require('glob');
|
|
20
|
-
|
|
21
|
-
// File patterns to scan (be inclusive)
|
|
22
|
-
const FILE_PATTERNS = [
|
|
23
|
-
'**/*.js',
|
|
24
|
-
'**/*.jsx',
|
|
25
|
-
'**/*.ts',
|
|
26
|
-
'**/*.tsx',
|
|
27
|
-
'**/*.vue',
|
|
28
|
-
'**/*.svelte',
|
|
29
|
-
'**/*.mjs',
|
|
30
|
-
'**/*.cjs'
|
|
31
|
-
];
|
|
32
|
-
|
|
33
|
-
// Only ignore truly irrelevant directories
|
|
34
|
-
const IGNORE_PATTERNS = [
|
|
35
|
-
'**/node_modules/**',
|
|
36
|
-
'**/vendor/**',
|
|
37
|
-
'**/.git/**',
|
|
38
|
-
'**/dist/**',
|
|
39
|
-
'**/build/**',
|
|
40
|
-
'**/coverage/**',
|
|
41
|
-
'**/__pycache__/**',
|
|
42
|
-
'**/venv/**',
|
|
43
|
-
'**/*.min.js',
|
|
44
|
-
'**/*.map',
|
|
45
|
-
'**/*.d.ts', // TypeScript declaration files
|
|
46
|
-
'**/*.spec.ts', // Test files
|
|
47
|
-
'**/*.spec.js',
|
|
48
|
-
'**/*.test.ts',
|
|
49
|
-
'**/*.test.js',
|
|
50
|
-
'**/__tests__/**',
|
|
51
|
-
'**/__mocks__/**'
|
|
52
|
-
];
|
|
53
|
-
|
|
54
|
-
// Files that are highly likely to contain API definitions
|
|
55
|
-
const HIGH_PRIORITY_PATTERNS = [
|
|
56
|
-
/routes?\.ts$/i,
|
|
57
|
-
/routes?\.js$/i,
|
|
58
|
-
/router\.ts$/i,
|
|
59
|
-
/router\.js$/i,
|
|
60
|
-
/controller\.ts$/i,
|
|
61
|
-
/controller\.js$/i,
|
|
62
|
-
/\.controller\.ts$/i,
|
|
63
|
-
/\.controller\.js$/i,
|
|
64
|
-
/service\.ts$/i,
|
|
65
|
-
/service\.js$/i,
|
|
66
|
-
/\.service\.ts$/i,
|
|
67
|
-
/\.service\.js$/i,
|
|
68
|
-
/api\.ts$/i,
|
|
69
|
-
/api\.js$/i,
|
|
70
|
-
/endpoints?\.ts$/i,
|
|
71
|
-
/endpoints?\.js$/i,
|
|
72
|
-
/http\.ts$/i,
|
|
73
|
-
/http\.js$/i,
|
|
74
|
-
/client\.ts$/i,
|
|
75
|
-
/client\.js$/i,
|
|
76
|
-
/dto\.ts$/i,
|
|
77
|
-
/\.dto\.ts$/i,
|
|
78
|
-
/interfaces?\.ts$/i,
|
|
79
|
-
/types?\.ts$/i,
|
|
80
|
-
/schema\.ts$/i,
|
|
81
|
-
/schemas?\.ts$/i,
|
|
82
|
-
/validation\.ts$/i
|
|
83
|
-
];
|
|
84
|
-
|
|
85
|
-
// Medium priority - might contain API patterns
|
|
86
|
-
const MEDIUM_PRIORITY_PATTERNS = [
|
|
87
|
-
/index\.ts$/i,
|
|
88
|
-
/index\.js$/i,
|
|
89
|
-
/app\.ts$/i,
|
|
90
|
-
/app\.js$/i,
|
|
91
|
-
/main\.ts$/i,
|
|
92
|
-
/main\.js$/i,
|
|
93
|
-
/server\.ts$/i,
|
|
94
|
-
/server\.js$/i
|
|
95
|
-
];
|
|
96
|
-
|
|
97
|
-
/**
|
|
98
|
-
* Scan codebase and collect API patterns from any framework
|
|
99
|
-
*/
|
|
100
|
-
async function scanCodebase(codebasePath) {
|
|
101
|
-
console.log(' Scanning files...');
|
|
102
|
-
|
|
103
|
-
// Get all matching files
|
|
104
|
-
const files = await glob(FILE_PATTERNS, {
|
|
105
|
-
cwd: codebasePath,
|
|
106
|
-
ignore: IGNORE_PATTERNS,
|
|
107
|
-
absolute: true
|
|
108
|
-
});
|
|
109
|
-
|
|
110
|
-
console.log(` Found ${files.length} files`);
|
|
111
|
-
|
|
112
|
-
// Categorize and prioritize files
|
|
113
|
-
const { highPriority, mediumPriority, other } = categorizeFiles(files, codebasePath);
|
|
114
|
-
|
|
115
|
-
console.log(` High priority: ${highPriority.length}, Medium: ${mediumPriority.length}, Other: ${other.length}`);
|
|
116
|
-
|
|
117
|
-
// Analyze high priority files first (routes, controllers, services, DTOs)
|
|
118
|
-
// Then medium priority, then scan others for API patterns
|
|
119
|
-
const filesToAnalyze = [
|
|
120
|
-
...highPriority,
|
|
121
|
-
...mediumPriority.slice(0, 20),
|
|
122
|
-
...other.slice(0, 50)
|
|
123
|
-
];
|
|
124
|
-
|
|
125
|
-
console.log(` Analyzing ${filesToAnalyze.length} files for API patterns...`);
|
|
126
|
-
|
|
127
|
-
const fileContents = [];
|
|
128
|
-
let filesWithPatterns = 0;
|
|
129
|
-
|
|
130
|
-
for (const file of filesToAnalyze) {
|
|
131
|
-
try {
|
|
132
|
-
const content = fs.readFileSync(file, 'utf-8');
|
|
133
|
-
const relativePath = path.relative(codebasePath, file);
|
|
134
|
-
|
|
135
|
-
// Extract API patterns from the file
|
|
136
|
-
const extracted = extractApiPatterns(content, relativePath, file);
|
|
137
|
-
|
|
138
|
-
if (extracted.hasApiPatterns) {
|
|
139
|
-
filesWithPatterns++;
|
|
140
|
-
fileContents.push({
|
|
141
|
-
path: relativePath,
|
|
142
|
-
content: extracted.content
|
|
143
|
-
});
|
|
144
|
-
}
|
|
145
|
-
} catch (error) {
|
|
146
|
-
// Skip files that can't be read
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
console.log(` Found API patterns in ${filesWithPatterns} files`);
|
|
151
|
-
|
|
152
|
-
return fileContents;
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
/**
|
|
156
|
-
* Categorize files by priority
|
|
157
|
-
*/
|
|
158
|
-
function categorizeFiles(files, basePath) {
|
|
159
|
-
const highPriority = [];
|
|
160
|
-
const mediumPriority = [];
|
|
161
|
-
const other = [];
|
|
162
|
-
|
|
163
|
-
for (const file of files) {
|
|
164
|
-
const relativePath = path.relative(basePath, file);
|
|
165
|
-
|
|
166
|
-
if (HIGH_PRIORITY_PATTERNS.some(p => p.test(relativePath))) {
|
|
167
|
-
highPriority.push(file);
|
|
168
|
-
} else if (MEDIUM_PRIORITY_PATTERNS.some(p => p.test(relativePath))) {
|
|
169
|
-
mediumPriority.push(file);
|
|
170
|
-
} else {
|
|
171
|
-
other.push(file);
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
return { highPriority, mediumPriority, other };
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
/**
|
|
179
|
-
* Extract Swagger/OpenAPI documentation blocks
|
|
180
|
-
*/
|
|
181
|
-
function extractSwaggerBlocks(content) {
|
|
182
|
-
const swaggerBlocks = [];
|
|
183
|
-
|
|
184
|
-
// Match JSDoc blocks with @swagger
|
|
185
|
-
const swaggerRegex = /\/\*\*[\s\S]*?@swagger[\s\S]*?\*\//g;
|
|
186
|
-
let match;
|
|
187
|
-
|
|
188
|
-
while ((match = swaggerRegex.exec(content)) !== null) {
|
|
189
|
-
swaggerBlocks.push(match[0]);
|
|
190
|
-
}
|
|
191
|
-
|
|
192
|
-
return swaggerBlocks;
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
/**
|
|
196
|
-
* Extract TypeScript interfaces and types that look like DTOs/schemas
|
|
197
|
-
*/
|
|
198
|
-
function extractTypeDefinitions(content, filePath) {
|
|
199
|
-
const typeBlocks = [];
|
|
200
|
-
const lines = content.split('\n');
|
|
201
|
-
|
|
202
|
-
// Look for interfaces and types that might be request/response schemas
|
|
203
|
-
const typeKeywords = [
|
|
204
|
-
/interface\s+\w*(Request|Response|Dto|Body|Query|Params|Payload|Input|Output)\w*\s*\{/i,
|
|
205
|
-
/type\s+\w*(Request|Response|Dto|Body|Query|Params|Payload|Input|Output)\w*\s*=/i,
|
|
206
|
-
/interface\s+\w+\s*\{/, // Any interface in DTO/schema files
|
|
207
|
-
/type\s+\w+\s*=/ // Any type in DTO/schema files
|
|
208
|
-
];
|
|
209
|
-
|
|
210
|
-
// Only extract type definitions from files that look like DTOs/types
|
|
211
|
-
const isDtoFile = /dto|interface|type|schema|model/i.test(filePath);
|
|
212
|
-
|
|
213
|
-
for (let i = 0; i < lines.length; i++) {
|
|
214
|
-
const line = lines[i];
|
|
215
|
-
|
|
216
|
-
const isTypeDefinition = typeKeywords.some(regex => regex.test(line));
|
|
217
|
-
|
|
218
|
-
if (isTypeDefinition || (isDtoFile && /^(export\s+)?(interface|type)\s+/.test(line))) {
|
|
219
|
-
// Find the complete type block (until closing brace at same indent level)
|
|
220
|
-
let braceCount = 0;
|
|
221
|
-
let started = false;
|
|
222
|
-
let endLine = i;
|
|
223
|
-
|
|
224
|
-
for (let j = i; j < lines.length && j < i + 50; j++) {
|
|
225
|
-
const l = lines[j];
|
|
226
|
-
for (const char of l) {
|
|
227
|
-
if (char === '{') {
|
|
228
|
-
braceCount++;
|
|
229
|
-
started = true;
|
|
230
|
-
} else if (char === '}') {
|
|
231
|
-
braceCount--;
|
|
232
|
-
}
|
|
233
|
-
}
|
|
234
|
-
endLine = j;
|
|
235
|
-
if (started && braceCount === 0) break;
|
|
236
|
-
}
|
|
237
|
-
|
|
238
|
-
const block = lines.slice(i, endLine + 1)
|
|
239
|
-
.map((l, idx) => `${i + idx + 1}: ${l}`)
|
|
240
|
-
.join('\n');
|
|
241
|
-
|
|
242
|
-
typeBlocks.push(block);
|
|
243
|
-
i = endLine; // Skip processed lines
|
|
244
|
-
}
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
return typeBlocks;
|
|
248
|
-
}
|
|
249
|
-
|
|
250
|
-
/**
|
|
251
|
-
* Extract request body and query parameter patterns
|
|
252
|
-
*/
|
|
253
|
-
function extractRequestPatterns(content) {
|
|
254
|
-
const patterns = [];
|
|
255
|
-
const lines = content.split('\n');
|
|
256
|
-
|
|
257
|
-
for (let i = 0; i < lines.length; i++) {
|
|
258
|
-
const line = lines[i];
|
|
259
|
-
|
|
260
|
-
// Destructuring patterns for req.body, req.query, req.params
|
|
261
|
-
if (/(?:const|let|var)\s*\{[^}]+\}\s*=\s*req\.(body|query|params)/i.test(line) ||
|
|
262
|
-
/req\.(body|query|params)\s*[;.]/.test(line)) {
|
|
263
|
-
|
|
264
|
-
// Get context around it
|
|
265
|
-
const startLine = Math.max(0, i - 2);
|
|
266
|
-
const endLine = Math.min(lines.length - 1, i + 5);
|
|
267
|
-
|
|
268
|
-
const block = lines.slice(startLine, endLine + 1)
|
|
269
|
-
.map((l, idx) => `${startLine + idx + 1}: ${l}`)
|
|
270
|
-
.join('\n');
|
|
271
|
-
|
|
272
|
-
patterns.push(`// Request parameter extraction:\n${block}`);
|
|
273
|
-
i = endLine;
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
// NestJS decorators: @Body(), @Query(), @Param()
|
|
277
|
-
if (/@(Body|Query|Param|Headers)\s*\(/i.test(line)) {
|
|
278
|
-
const startLine = Math.max(0, i - 1);
|
|
279
|
-
const endLine = Math.min(lines.length - 1, i + 3);
|
|
280
|
-
|
|
281
|
-
const block = lines.slice(startLine, endLine + 1)
|
|
282
|
-
.map((l, idx) => `${startLine + idx + 1}: ${l}`)
|
|
283
|
-
.join('\n');
|
|
284
|
-
|
|
285
|
-
patterns.push(`// NestJS parameter decorator:\n${block}`);
|
|
286
|
-
i = endLine;
|
|
287
|
-
}
|
|
288
|
-
|
|
289
|
-
// Validation schemas: Joi, Zod, class-validator
|
|
290
|
-
if (/Joi\.(object|string|number|array|boolean)\s*\(/i.test(line) ||
|
|
291
|
-
/z\.(object|string|number|array|boolean)\s*\(/i.test(line) ||
|
|
292
|
-
/@(IsString|IsNumber|IsArray|IsBoolean|IsOptional|ValidateNested)/i.test(line)) {
|
|
293
|
-
|
|
294
|
-
const startLine = Math.max(0, i - 2);
|
|
295
|
-
const endLine = Math.min(lines.length - 1, i + 10);
|
|
296
|
-
|
|
297
|
-
const block = lines.slice(startLine, endLine + 1)
|
|
298
|
-
.map((l, idx) => `${startLine + idx + 1}: ${l}`)
|
|
299
|
-
.join('\n');
|
|
300
|
-
|
|
301
|
-
patterns.push(`// Validation schema:\n${block}`);
|
|
302
|
-
i = endLine;
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
return patterns;
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
/**
|
|
310
|
-
* Extract API patterns from file content - framework agnostic
|
|
311
|
-
*/
|
|
312
|
-
function extractApiPatterns(content, filePath, absolutePath) {
|
|
313
|
-
const lines = content.split('\n');
|
|
314
|
-
let hasApiPatterns = false;
|
|
315
|
-
const extractedBlocks = [];
|
|
316
|
-
|
|
317
|
-
// Check if this is a route/controller file - if so, include more context
|
|
318
|
-
const isRouteFile = HIGH_PRIORITY_PATTERNS.some(p => p.test(filePath));
|
|
319
|
-
const isDtoFile = /dto|interface|types?|schema/i.test(filePath);
|
|
320
|
-
|
|
321
|
-
// ============================================
|
|
322
|
-
// PATTERN 0: Swagger/OpenAPI documentation
|
|
323
|
-
// ============================================
|
|
324
|
-
const swaggerBlocks = extractSwaggerBlocks(content);
|
|
325
|
-
if (swaggerBlocks.length > 0) {
|
|
326
|
-
hasApiPatterns = true;
|
|
327
|
-
extractedBlocks.push(`// Swagger/OpenAPI documentation found:\n${swaggerBlocks.join('\n\n')}`);
|
|
328
|
-
}
|
|
329
|
-
|
|
330
|
-
// ============================================
|
|
331
|
-
// PATTERN 1: TypeScript interfaces/types (DTOs, schemas)
|
|
332
|
-
// ============================================
|
|
333
|
-
const typeBlocks = extractTypeDefinitions(content, filePath);
|
|
334
|
-
if (typeBlocks.length > 0) {
|
|
335
|
-
hasApiPatterns = true;
|
|
336
|
-
extractedBlocks.push(`// TypeScript type definitions:\n${typeBlocks.join('\n\n')}`);
|
|
337
|
-
}
|
|
338
|
-
|
|
339
|
-
// ============================================
|
|
340
|
-
// PATTERN 2: Request body/query extraction
|
|
341
|
-
// ============================================
|
|
342
|
-
const requestPatterns = extractRequestPatterns(content);
|
|
343
|
-
if (requestPatterns.length > 0) {
|
|
344
|
-
hasApiPatterns = true;
|
|
345
|
-
extractedBlocks.push(requestPatterns.join('\n\n'));
|
|
346
|
-
}
|
|
347
|
-
|
|
348
|
-
// ============================================
|
|
349
|
-
// PATTERN 3: Express/Koa/Fastify Router definitions
|
|
350
|
-
// ============================================
|
|
351
|
-
const routerPatterns = [
|
|
352
|
-
// Express Router: router.get('/path', handler)
|
|
353
|
-
/\.(get|post|put|patch|delete|all|use)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
|
|
354
|
-
// Express app: app.get('/path', handler)
|
|
355
|
-
/app\.(get|post|put|patch|delete|all|use)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
|
|
356
|
-
// Fastify: fastify.get('/path', handler)
|
|
357
|
-
/fastify\.(get|post|put|patch|delete|all)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
|
|
358
|
-
];
|
|
359
|
-
|
|
360
|
-
// ============================================
|
|
361
|
-
// PATTERN 4: NestJS/Decorators
|
|
362
|
-
// ============================================
|
|
363
|
-
const decoratorPatterns = [
|
|
364
|
-
/@(Get|Post|Put|Patch|Delete|All)\s*\(\s*['"`]?([^'"`\)]*)/gi,
|
|
365
|
-
/@Controller\s*\(\s*['"`]([^'"`]+)/gi,
|
|
366
|
-
];
|
|
367
|
-
|
|
368
|
-
// ============================================
|
|
369
|
-
// PATTERN 5: Frontend HTTP calls
|
|
370
|
-
// ============================================
|
|
371
|
-
const httpCallPatterns = [
|
|
372
|
-
// fetch() calls
|
|
373
|
-
/fetch\s*\(\s*[`'"](.*?)[`'"]/g,
|
|
374
|
-
/fetch\s*\(\s*`([^`]*)`/g,
|
|
375
|
-
// axios calls
|
|
376
|
-
/axios\.(get|post|put|patch|delete)\s*\(\s*[`'"](.*?)[`'"]/g,
|
|
377
|
-
/axios\s*\(\s*\{[^}]*url\s*:\s*[`'"](.*?)[`'"]/g,
|
|
378
|
-
// Angular HttpClient
|
|
379
|
-
/this\.http\.(get|post|put|patch|delete)\s*[<(]/g,
|
|
380
|
-
/httpClient\.(get|post|put|patch|delete)\s*[<(]/g,
|
|
381
|
-
// jQuery ajax
|
|
382
|
-
/\$\.(ajax|get|post)\s*\(\s*[`'"](.*?)[`'"]/g,
|
|
383
|
-
// Generic request libraries
|
|
384
|
-
/request\.(get|post|put|patch|delete)\s*\(/g,
|
|
385
|
-
/got\.(get|post|put|patch|delete)\s*\(/g,
|
|
386
|
-
/superagent\.(get|post|put|patch|delete)\s*\(/g,
|
|
387
|
-
];
|
|
388
|
-
|
|
389
|
-
// ============================================
|
|
390
|
-
// PATTERN 6: API URL definitions
|
|
391
|
-
// ============================================
|
|
392
|
-
const urlPatterns = [
|
|
393
|
-
// API endpoints in strings
|
|
394
|
-
/['"`](\/api\/[^'"`\s]+)['"`]/g,
|
|
395
|
-
/['"`](\/v\d+\/[^'"`\s]+)['"`]/g,
|
|
396
|
-
/['"`](https?:\/\/[^'"`\s]*\/api[^'"`\s]*)['"`]/g,
|
|
397
|
-
// Base URL definitions
|
|
398
|
-
/(?:API_BASE|API_URL|BASE_URL|baseURL|apiUrl|apiBase|API_ENDPOINT|BACKEND_URL)\s*[:=]\s*['"`]([^'"`]+)['"`]/gi,
|
|
399
|
-
];
|
|
400
|
-
|
|
401
|
-
// ============================================
|
|
402
|
-
// PATTERN 7: Method + URL combinations
|
|
403
|
-
// ============================================
|
|
404
|
-
const methodUrlPatterns = [
|
|
405
|
-
/(GET|POST|PUT|PATCH|DELETE)\s*[,:]?\s*['"`](\/[^'"`]+)['"`]/gi,
|
|
406
|
-
/method:\s*['"`](GET|POST|PUT|PATCH|DELETE)['"`]/gi,
|
|
407
|
-
];
|
|
408
|
-
|
|
409
|
-
// Combine all patterns for line scanning
|
|
410
|
-
const allPatterns = [
|
|
411
|
-
...routerPatterns,
|
|
412
|
-
...decoratorPatterns,
|
|
413
|
-
...httpCallPatterns,
|
|
414
|
-
...urlPatterns,
|
|
415
|
-
...methodUrlPatterns
|
|
416
|
-
];
|
|
417
|
-
|
|
418
|
-
// If this is a route/controller file with swagger docs, include the whole file
|
|
419
|
-
if (isRouteFile && swaggerBlocks.length > 0) {
|
|
420
|
-
hasApiPatterns = true;
|
|
421
|
-
// Include entire file content (truncated if too long)
|
|
422
|
-
const maxLines = 300;
|
|
423
|
-
const truncatedContent = lines.length > maxLines
|
|
424
|
-
? lines.slice(0, maxLines).join('\n') + `\n// ... ${lines.length - maxLines} more lines ...`
|
|
425
|
-
: content;
|
|
426
|
-
|
|
427
|
-
return {
|
|
428
|
-
hasApiPatterns: true,
|
|
429
|
-
content: `// File: ${filePath}\n// Route/Controller file with Swagger docs - full content:\n\n${truncatedContent}`
|
|
430
|
-
};
|
|
431
|
-
}
|
|
432
|
-
|
|
433
|
-
// If this is a route file without swagger, still include more content
|
|
434
|
-
if (isRouteFile) {
|
|
435
|
-
const hasRouteContent = allPatterns.some(p => {
|
|
436
|
-
p.lastIndex = 0;
|
|
437
|
-
return p.test(content);
|
|
438
|
-
});
|
|
439
|
-
|
|
440
|
-
if (hasRouteContent) {
|
|
441
|
-
hasApiPatterns = true;
|
|
442
|
-
const maxLines = 200;
|
|
443
|
-
const truncatedContent = lines.length > maxLines
|
|
444
|
-
? lines.slice(0, maxLines).join('\n') + `\n// ... ${lines.length - maxLines} more lines ...`
|
|
445
|
-
: content;
|
|
446
|
-
|
|
447
|
-
return {
|
|
448
|
-
hasApiPatterns: true,
|
|
449
|
-
content: `// File: ${filePath}\n// Route/Controller file - full content:\n\n${truncatedContent}`
|
|
450
|
-
};
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
|
|
454
|
-
// If this is a DTO/types file, include full content
|
|
455
|
-
if (isDtoFile && typeBlocks.length > 0) {
|
|
456
|
-
hasApiPatterns = true;
|
|
457
|
-
const maxLines = 150;
|
|
458
|
-
const truncatedContent = lines.length > maxLines
|
|
459
|
-
? lines.slice(0, maxLines).join('\n') + `\n// ... ${lines.length - maxLines} more lines ...`
|
|
460
|
-
: content;
|
|
461
|
-
|
|
462
|
-
return {
|
|
463
|
-
hasApiPatterns: true,
|
|
464
|
-
content: `// File: ${filePath}\n// DTO/Types file - full content:\n\n${truncatedContent}`
|
|
465
|
-
};
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
// For non-route files, extract relevant sections
|
|
469
|
-
// First, extract imports and base URL definitions
|
|
470
|
-
const baseUrlLines = [];
|
|
471
|
-
|
|
472
|
-
for (let i = 0; i < Math.min(lines.length, 50); i++) {
|
|
473
|
-
const line = lines[i];
|
|
474
|
-
if (/(?:API_BASE|API_URL|BASE_URL|baseURL|apiUrl|BACKEND)/i.test(line)) {
|
|
475
|
-
baseUrlLines.push(`${i + 1}: ${line}`);
|
|
476
|
-
}
|
|
477
|
-
}
|
|
478
|
-
|
|
479
|
-
if (baseUrlLines.length > 0) {
|
|
480
|
-
extractedBlocks.push('// Base URL definitions:\n' + baseUrlLines.join('\n'));
|
|
481
|
-
hasApiPatterns = true;
|
|
482
|
-
}
|
|
483
|
-
|
|
484
|
-
// Line-by-line extraction with context
|
|
485
|
-
for (let i = 0; i < lines.length; i++) {
|
|
486
|
-
const line = lines[i];
|
|
487
|
-
|
|
488
|
-
// Check all patterns
|
|
489
|
-
let hasPattern = false;
|
|
490
|
-
for (const pattern of allPatterns) {
|
|
491
|
-
pattern.lastIndex = 0;
|
|
492
|
-
if (pattern.test(line)) {
|
|
493
|
-
hasPattern = true;
|
|
494
|
-
break;
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
|
|
498
|
-
// Also check for common API keywords
|
|
499
|
-
const hasKeyword = /\.get\(|\.post\(|\.put\(|\.patch\(|\.delete\(|fetch\(|axios|\/api\/|endpoint|@Get|@Post|@Put|@Delete/i.test(line);
|
|
500
|
-
|
|
501
|
-
if (hasPattern || hasKeyword) {
|
|
502
|
-
hasApiPatterns = true;
|
|
503
|
-
|
|
504
|
-
// Determine context needed
|
|
505
|
-
const isPostOrPut = /post|put|patch/i.test(line);
|
|
506
|
-
const contextBefore = isPostOrPut ? 20 : 5; // More context for mutations
|
|
507
|
-
const contextAfter = isPostOrPut ? 10 : 5;
|
|
508
|
-
|
|
509
|
-
const startLine = Math.max(0, i - contextBefore);
|
|
510
|
-
const endLine = Math.min(lines.length - 1, i + contextAfter);
|
|
511
|
-
|
|
512
|
-
const block = lines.slice(startLine, endLine + 1)
|
|
513
|
-
.map((l, idx) => `${startLine + idx + 1}: ${l}`)
|
|
514
|
-
.join('\n');
|
|
515
|
-
|
|
516
|
-
extractedBlocks.push(block);
|
|
517
|
-
|
|
518
|
-
// Skip ahead to avoid duplicates
|
|
519
|
-
i = endLine;
|
|
520
|
-
}
|
|
521
|
-
}
|
|
522
|
-
|
|
523
|
-
if (hasApiPatterns && extractedBlocks.length > 0) {
|
|
524
|
-
// Deduplicate blocks
|
|
525
|
-
const uniqueBlocks = [...new Set(extractedBlocks)];
|
|
526
|
-
return {
|
|
527
|
-
hasApiPatterns: true,
|
|
528
|
-
content: `// File: ${filePath}\n// API patterns extracted:\n\n${uniqueBlocks.join('\n\n// ---\n\n')}`
|
|
529
|
-
};
|
|
530
|
-
}
|
|
531
|
-
|
|
532
|
-
return { hasApiPatterns: false, content: '' };
|
|
533
|
-
}
|
|
534
|
-
|
|
535
|
-
module.exports = { scanCodebase };
|
package/scripts/postinstall.js
DELETED
|
@@ -1,156 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Postinstall script - Downloads the correct binary for the user's platform
|
|
5
|
-
* from GitHub Releases
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
const https = require('https');
|
|
9
|
-
const fs = require('fs');
|
|
10
|
-
const path = require('path');
|
|
11
|
-
const { execSync } = require('child_process');
|
|
12
|
-
|
|
13
|
-
const REPO = 'zmaren/merlean';
|
|
14
|
-
const BINARY_NAME = 'ai-bot-analyze';
|
|
15
|
-
const VERSION = require('../package.json').version;
|
|
16
|
-
|
|
17
|
-
// Map Node.js platform/arch to binary names
|
|
18
|
-
function getBinaryName() {
|
|
19
|
-
const platform = process.platform;
|
|
20
|
-
const arch = process.arch;
|
|
21
|
-
|
|
22
|
-
const platformMap = {
|
|
23
|
-
'darwin-arm64': 'ai-bot-analyze-macos-arm64',
|
|
24
|
-
'darwin-x64': 'ai-bot-analyze-macos-x64',
|
|
25
|
-
'linux-x64': 'ai-bot-analyze-linux-x64',
|
|
26
|
-
'win32-x64': 'ai-bot-analyze-win-x64.exe'
|
|
27
|
-
};
|
|
28
|
-
|
|
29
|
-
const key = `${platform}-${arch}`;
|
|
30
|
-
const binaryName = platformMap[key];
|
|
31
|
-
|
|
32
|
-
if (!binaryName) {
|
|
33
|
-
console.error(`❌ Unsupported platform: ${platform}-${arch}`);
|
|
34
|
-
console.error(' Supported: darwin-arm64, darwin-x64, linux-x64, win32-x64');
|
|
35
|
-
console.error(' Falling back to source mode (requires Node.js)');
|
|
36
|
-
return null;
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
return binaryName;
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
// Download binary from GitHub Releases
|
|
43
|
-
async function downloadBinary(binaryName) {
|
|
44
|
-
const binDir = path.join(__dirname, '..', 'bin');
|
|
45
|
-
const binaryPath = path.join(binDir, process.platform === 'win32' ? `${BINARY_NAME}.exe` : BINARY_NAME);
|
|
46
|
-
|
|
47
|
-
// Skip if binary already exists
|
|
48
|
-
if (fs.existsSync(binaryPath)) {
|
|
49
|
-
console.log('✓ Binary already exists');
|
|
50
|
-
return true;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
const url = `https://github.com/${REPO}/releases/download/v${VERSION}/${binaryName}`;
|
|
54
|
-
|
|
55
|
-
console.log(`📦 Downloading ${binaryName}...`);
|
|
56
|
-
console.log(` From: ${url}`);
|
|
57
|
-
|
|
58
|
-
return new Promise((resolve) => {
|
|
59
|
-
const download = (downloadUrl, redirectCount = 0) => {
|
|
60
|
-
if (redirectCount > 5) {
|
|
61
|
-
console.error('❌ Too many redirects');
|
|
62
|
-
resolve(false);
|
|
63
|
-
return;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
const protocol = downloadUrl.startsWith('https') ? https : require('http');
|
|
67
|
-
|
|
68
|
-
protocol.get(downloadUrl, (response) => {
|
|
69
|
-
// Handle redirects (GitHub releases redirect to S3)
|
|
70
|
-
if (response.statusCode === 301 || response.statusCode === 302) {
|
|
71
|
-
download(response.headers.location, redirectCount + 1);
|
|
72
|
-
return;
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
if (response.statusCode === 404) {
|
|
76
|
-
console.log(`⚠️ Binary not found for v${VERSION}`);
|
|
77
|
-
console.log(' Falling back to source mode');
|
|
78
|
-
resolve(false);
|
|
79
|
-
return;
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
if (response.statusCode !== 200) {
|
|
83
|
-
console.error(`❌ Download failed: HTTP ${response.statusCode}`);
|
|
84
|
-
resolve(false);
|
|
85
|
-
return;
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
// Ensure bin directory exists
|
|
89
|
-
if (!fs.existsSync(binDir)) {
|
|
90
|
-
fs.mkdirSync(binDir, { recursive: true });
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
const file = fs.createWriteStream(binaryPath);
|
|
94
|
-
response.pipe(file);
|
|
95
|
-
|
|
96
|
-
file.on('finish', () => {
|
|
97
|
-
file.close();
|
|
98
|
-
|
|
99
|
-
// Make executable on Unix
|
|
100
|
-
if (process.platform !== 'win32') {
|
|
101
|
-
fs.chmodSync(binaryPath, 0o755);
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
console.log('✓ Binary downloaded successfully');
|
|
105
|
-
resolve(true);
|
|
106
|
-
});
|
|
107
|
-
|
|
108
|
-
file.on('error', (err) => {
|
|
109
|
-
fs.unlink(binaryPath, () => {}); // Clean up
|
|
110
|
-
console.error(`❌ Write error: ${err.message}`);
|
|
111
|
-
resolve(false);
|
|
112
|
-
});
|
|
113
|
-
|
|
114
|
-
}).on('error', (err) => {
|
|
115
|
-
console.error(`❌ Download error: ${err.message}`);
|
|
116
|
-
resolve(false);
|
|
117
|
-
});
|
|
118
|
-
};
|
|
119
|
-
|
|
120
|
-
download(url);
|
|
121
|
-
});
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
// Mark that we're using source mode
|
|
125
|
-
function setSourceMode() {
|
|
126
|
-
const flagPath = path.join(__dirname, '..', '.source-mode');
|
|
127
|
-
fs.writeFileSync(flagPath, 'true');
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
// Main
|
|
131
|
-
async function main() {
|
|
132
|
-
// Skip in CI environments during package build
|
|
133
|
-
if (process.env.CI || process.env.PKG_EXECPATH) {
|
|
134
|
-
console.log('⏭️ Skipping binary download (CI/build environment)');
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
const binaryName = getBinaryName();
|
|
139
|
-
|
|
140
|
-
if (!binaryName) {
|
|
141
|
-
setSourceMode();
|
|
142
|
-
return;
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
const success = await downloadBinary(binaryName);
|
|
146
|
-
|
|
147
|
-
if (!success) {
|
|
148
|
-
setSourceMode();
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
main().catch((err) => {
|
|
153
|
-
console.error('Postinstall error:', err.message);
|
|
154
|
-
// Don't fail install, fall back to source mode
|
|
155
|
-
});
|
|
156
|
-
|