@merlean/analyzer 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.js +156 -0
- package/bin/wrapper.js +51 -0
- package/lib/analyzer.js +195 -0
- package/lib/uploader.js +37 -0
- package/package.json +48 -0
- package/scripts/postinstall.js +156 -0
package/bin/cli.js
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AI Bot Analyzer CLI (LLM-powered)
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* npx @ai-bot/analyzer /path/to/codebase --name "My App" --upload https://backend.fly.dev
|
|
8
|
+
*
|
|
9
|
+
* Options:
|
|
10
|
+
* --name, -n Site name (required)
|
|
11
|
+
* --upload, -u Backend URL to upload map (required)
|
|
12
|
+
* --output, -o Output file path (optional, default: ./site-map.json)
|
|
13
|
+
* --token, -t Auth token for upload (optional)
|
|
14
|
+
*
|
|
15
|
+
* Requires ANTHROPIC_API_KEY environment variable
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
// Load env from parent directory (.env in bot-smart/)
|
|
19
|
+
require('dotenv').config({ path: require('path').join(__dirname, '../../.env') });
|
|
20
|
+
require('dotenv').config();
|
|
21
|
+
|
|
22
|
+
const { analyzeCodebase } = require('../lib/analyzer');
|
|
23
|
+
const { uploadMap } = require('../lib/uploader');
|
|
24
|
+
const path = require('path');
|
|
25
|
+
const fs = require('fs');
|
|
26
|
+
|
|
27
|
+
// Parse CLI arguments
|
|
28
|
+
function parseArgs() {
|
|
29
|
+
const args = process.argv.slice(2);
|
|
30
|
+
const options = {
|
|
31
|
+
path: null,
|
|
32
|
+
name: null,
|
|
33
|
+
upload: null,
|
|
34
|
+
output: './site-map.json',
|
|
35
|
+
token: null
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
for (let i = 0; i < args.length; i++) {
|
|
39
|
+
const arg = args[i];
|
|
40
|
+
|
|
41
|
+
if (arg === '--name' || arg === '-n') {
|
|
42
|
+
options.name = args[++i];
|
|
43
|
+
} else if (arg === '--upload' || arg === '-u') {
|
|
44
|
+
options.upload = args[++i];
|
|
45
|
+
} else if (arg === '--output' || arg === '-o') {
|
|
46
|
+
options.output = args[++i];
|
|
47
|
+
} else if (arg === '--token' || arg === '-t') {
|
|
48
|
+
options.token = args[++i];
|
|
49
|
+
} else if (arg === '--help' || arg === '-h') {
|
|
50
|
+
printHelp();
|
|
51
|
+
process.exit(0);
|
|
52
|
+
} else if (!arg.startsWith('-') && !options.path) {
|
|
53
|
+
options.path = arg;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return options;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function printHelp() {
|
|
61
|
+
console.log(`
|
|
62
|
+
AI Bot Analyzer - Generate site maps for AI Bot integration
|
|
63
|
+
|
|
64
|
+
Usage:
|
|
65
|
+
ai-bot-analyze <path> --name <name> --upload <url>
|
|
66
|
+
|
|
67
|
+
Arguments:
|
|
68
|
+
<path> Path to codebase to analyze
|
|
69
|
+
|
|
70
|
+
Options:
|
|
71
|
+
--name, -n <name> Site name (required)
|
|
72
|
+
--upload, -u <url> Backend URL to upload map (required)
|
|
73
|
+
--output, -o <file> Output file (default: ./site-map.json)
|
|
74
|
+
--token, -t <token> Auth token for upload
|
|
75
|
+
--help, -h Show this help
|
|
76
|
+
|
|
77
|
+
Examples:
|
|
78
|
+
# Analyze and upload to production
|
|
79
|
+
ai-bot-analyze ./my-app --name "My App" --upload https://ai-bot-backend.fly.dev
|
|
80
|
+
|
|
81
|
+
# Analyze with auth token
|
|
82
|
+
ai-bot-analyze ./my-app --name "My App" --upload https://backend.fly.dev --token secret123
|
|
83
|
+
|
|
84
|
+
# Just generate local file
|
|
85
|
+
ai-bot-analyze ./my-app --name "My App" --output ./map.json
|
|
86
|
+
`);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async function main() {
|
|
90
|
+
console.log('\nš AI Bot Analyzer\n');
|
|
91
|
+
|
|
92
|
+
const options = parseArgs();
|
|
93
|
+
|
|
94
|
+
// Validate required args
|
|
95
|
+
if (!options.path) {
|
|
96
|
+
console.error('ā Error: Path to codebase is required');
|
|
97
|
+
console.log(' Run with --help for usage');
|
|
98
|
+
process.exit(1);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (!options.name) {
|
|
102
|
+
console.error('ā Error: --name is required');
|
|
103
|
+
process.exit(1);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const codebasePath = path.resolve(options.path);
|
|
107
|
+
|
|
108
|
+
if (!fs.existsSync(codebasePath)) {
|
|
109
|
+
console.error(`ā Error: Path does not exist: ${codebasePath}`);
|
|
110
|
+
process.exit(1);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
console.log(`š Analyzing: ${codebasePath}`);
|
|
114
|
+
console.log(`š Site name: ${options.name}`);
|
|
115
|
+
|
|
116
|
+
try {
|
|
117
|
+
// Analyze codebase
|
|
118
|
+
const siteMap = await analyzeCodebase(codebasePath, options.name);
|
|
119
|
+
|
|
120
|
+
console.log('\nš Analysis Summary:');
|
|
121
|
+
console.log(` Routes: ${siteMap.routes?.length || 0}`);
|
|
122
|
+
console.log(` Forms: ${siteMap.forms?.length || 0}`);
|
|
123
|
+
console.log(` Actions: ${siteMap.actions?.length || 0}`);
|
|
124
|
+
console.log(` Site ID: ${siteMap.siteId}`);
|
|
125
|
+
|
|
126
|
+
// Save locally
|
|
127
|
+
const outputPath = path.resolve(options.output);
|
|
128
|
+
fs.writeFileSync(outputPath, JSON.stringify(siteMap, null, 2));
|
|
129
|
+
console.log(`\nš¾ Saved to: ${outputPath}`);
|
|
130
|
+
|
|
131
|
+
// Upload if URL provided
|
|
132
|
+
if (options.upload) {
|
|
133
|
+
console.log(`\nš¤ Uploading to: ${options.upload}`);
|
|
134
|
+
const result = await uploadMap(options.upload, siteMap, options.token);
|
|
135
|
+
|
|
136
|
+
if (result.success) {
|
|
137
|
+
console.log('ā
Upload successful!');
|
|
138
|
+
console.log(`\nš Integration Ready!\n`);
|
|
139
|
+
console.log(` Add this to your website:\n`);
|
|
140
|
+
console.log(` <script src="${options.upload}/bot.js" data-site-id="${siteMap.siteId}"></script>\n`);
|
|
141
|
+
} else {
|
|
142
|
+
console.error(`ā Upload failed: ${result.error}`);
|
|
143
|
+
process.exit(1);
|
|
144
|
+
}
|
|
145
|
+
} else {
|
|
146
|
+
console.log('\nā ļø No --upload URL provided. Map saved locally only.');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
} catch (error) {
|
|
150
|
+
console.error(`\nā Error: ${error.message}`);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
main();
|
|
156
|
+
|
package/bin/wrapper.js
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* AI Bot Analyzer - Wrapper Script
|
|
5
|
+
*
|
|
6
|
+
* Attempts to run the compiled binary, falls back to source if not available.
|
|
7
|
+
* This allows the package to work even if binary download failed.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
const { spawn, execFileSync } = require('child_process');
|
|
11
|
+
const path = require('path');
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
|
|
14
|
+
const binDir = __dirname;
|
|
15
|
+
const binaryName = process.platform === 'win32' ? 'ai-bot-analyze.exe' : 'ai-bot-analyze';
|
|
16
|
+
const binaryPath = path.join(binDir, binaryName);
|
|
17
|
+
const sourcePath = path.join(binDir, 'cli.js');
|
|
18
|
+
const sourceModeFlagPath = path.join(binDir, '..', '.source-mode');
|
|
19
|
+
|
|
20
|
+
// Check if we should use source mode
|
|
21
|
+
const useSourceMode = fs.existsSync(sourceModeFlagPath) || !fs.existsSync(binaryPath);
|
|
22
|
+
|
|
23
|
+
if (useSourceMode) {
|
|
24
|
+
// Run source directly with Node.js
|
|
25
|
+
require('./cli.js');
|
|
26
|
+
} else {
|
|
27
|
+
// Run compiled binary
|
|
28
|
+
const args = process.argv.slice(2);
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
const result = spawn(binaryPath, args, {
|
|
32
|
+
stdio: 'inherit',
|
|
33
|
+
env: process.env
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
result.on('error', (err) => {
|
|
37
|
+
// If binary fails, fall back to source
|
|
38
|
+
console.error('Binary execution failed, falling back to source mode...');
|
|
39
|
+
require('./cli.js');
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
result.on('exit', (code) => {
|
|
43
|
+
process.exit(code || 0);
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
} catch (err) {
|
|
47
|
+
// Fall back to source mode
|
|
48
|
+
require('./cli.js');
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
package/lib/analyzer.js
ADDED
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LLM-based Codebase Analyzer
|
|
3
|
+
*
|
|
4
|
+
* Uses Claude to analyze codebase and extract:
|
|
5
|
+
* - API routes/endpoints
|
|
6
|
+
* - Forms and their fields
|
|
7
|
+
* - Actions/mutations
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
require('dotenv').config({ path: require('path').join(__dirname, '../../.env') });
|
|
11
|
+
require('dotenv').config();
|
|
12
|
+
|
|
13
|
+
const Anthropic = require('@anthropic-ai/sdk').default;
|
|
14
|
+
const fs = require('fs');
|
|
15
|
+
const path = require('path');
|
|
16
|
+
const { glob } = require('glob');
|
|
17
|
+
const crypto = require('crypto');
|
|
18
|
+
|
|
19
|
+
// Initialize Anthropic
|
|
20
|
+
const anthropic = new Anthropic({
|
|
21
|
+
apiKey: process.env.ANTHROPIC_API_KEY
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
// File patterns to scan
|
|
25
|
+
const SCAN_PATTERNS = [
|
|
26
|
+
'**/*.js',
|
|
27
|
+
'**/*.ts',
|
|
28
|
+
'**/*.jsx',
|
|
29
|
+
'**/*.tsx',
|
|
30
|
+
'**/*.php',
|
|
31
|
+
'**/*.py',
|
|
32
|
+
'**/*.rb'
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
// Directories to ignore
|
|
36
|
+
const IGNORE_PATTERNS = [
|
|
37
|
+
'**/node_modules/**',
|
|
38
|
+
'**/vendor/**',
|
|
39
|
+
'**/.git/**',
|
|
40
|
+
'**/dist/**',
|
|
41
|
+
'**/build/**',
|
|
42
|
+
'**/__pycache__/**',
|
|
43
|
+
'**/venv/**',
|
|
44
|
+
'**/*.min.js',
|
|
45
|
+
'**/*.map'
|
|
46
|
+
];
|
|
47
|
+
|
|
48
|
+
// Keywords to prioritize files
|
|
49
|
+
const PRIORITY_KEYWORDS = [
|
|
50
|
+
'route', 'router', 'controller', 'api', 'endpoint',
|
|
51
|
+
'form', 'submit', 'action', 'handler', 'service'
|
|
52
|
+
];
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Analyze a codebase using LLM
|
|
56
|
+
*/
|
|
57
|
+
async function analyzeCodebase(codebasePath, siteName) {
|
|
58
|
+
const siteId = generateSiteId();
|
|
59
|
+
|
|
60
|
+
console.log(' Scanning files...');
|
|
61
|
+
|
|
62
|
+
// Get files to scan
|
|
63
|
+
const files = await glob(SCAN_PATTERNS, {
|
|
64
|
+
cwd: codebasePath,
|
|
65
|
+
ignore: IGNORE_PATTERNS,
|
|
66
|
+
absolute: true
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
console.log(` Found ${files.length} files`);
|
|
70
|
+
|
|
71
|
+
// Prioritize and limit files
|
|
72
|
+
const prioritizedFiles = prioritizeFiles(files, codebasePath);
|
|
73
|
+
const filesToAnalyze = prioritizedFiles.slice(0, 50); // Limit to avoid rate limits
|
|
74
|
+
|
|
75
|
+
console.log(` Analyzing ${filesToAnalyze.length} priority files...`);
|
|
76
|
+
|
|
77
|
+
// Read and prepare file contents
|
|
78
|
+
const fileContents = [];
|
|
79
|
+
for (const file of filesToAnalyze) {
|
|
80
|
+
try {
|
|
81
|
+
const content = fs.readFileSync(file, 'utf-8');
|
|
82
|
+
const relativePath = path.relative(codebasePath, file);
|
|
83
|
+
|
|
84
|
+
// Limit content per file to avoid token limits
|
|
85
|
+
const truncatedContent = content.slice(0, 3000);
|
|
86
|
+
|
|
87
|
+
fileContents.push({
|
|
88
|
+
path: relativePath,
|
|
89
|
+
content: truncatedContent
|
|
90
|
+
});
|
|
91
|
+
} catch (error) {
|
|
92
|
+
// Skip files that can't be read
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Send to LLM for analysis
|
|
97
|
+
console.log(' Calling LLM for analysis...');
|
|
98
|
+
const analysis = await analyzeWithLLM(fileContents, siteName);
|
|
99
|
+
|
|
100
|
+
return {
|
|
101
|
+
siteId,
|
|
102
|
+
siteName,
|
|
103
|
+
analyzedAt: new Date().toISOString(),
|
|
104
|
+
framework: analysis.framework,
|
|
105
|
+
routes: analysis.routes || [],
|
|
106
|
+
forms: analysis.forms || [],
|
|
107
|
+
actions: analysis.actions || []
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* Prioritize files based on keywords in path/name
|
|
113
|
+
*/
|
|
114
|
+
function prioritizeFiles(files, basePath) {
|
|
115
|
+
return files.sort((a, b) => {
|
|
116
|
+
const aPath = path.relative(basePath, a).toLowerCase();
|
|
117
|
+
const bPath = path.relative(basePath, b).toLowerCase();
|
|
118
|
+
|
|
119
|
+
const aScore = PRIORITY_KEYWORDS.reduce((score, kw) =>
|
|
120
|
+
aPath.includes(kw) ? score + 1 : score, 0);
|
|
121
|
+
const bScore = PRIORITY_KEYWORDS.reduce((score, kw) =>
|
|
122
|
+
bPath.includes(kw) ? score + 1 : score, 0);
|
|
123
|
+
|
|
124
|
+
return bScore - aScore;
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Analyze files using Claude
|
|
130
|
+
*/
|
|
131
|
+
async function analyzeWithLLM(fileContents, siteName) {
|
|
132
|
+
const filesText = fileContents.map(f =>
|
|
133
|
+
`=== ${f.path} ===\n${f.content}`
|
|
134
|
+
).join('\n\n');
|
|
135
|
+
|
|
136
|
+
const prompt = `Analyze this codebase and extract API information.
|
|
137
|
+
|
|
138
|
+
CODEBASE FILES:
|
|
139
|
+
${filesText}
|
|
140
|
+
|
|
141
|
+
Extract and return as JSON:
|
|
142
|
+
1. framework: detected framework (express, laravel, django, fastapi, nextjs, wordpress, etc.) or null
|
|
143
|
+
2. routes: array of {method, path, description} - API endpoints found
|
|
144
|
+
3. forms: array of {action, method, fields: [{name, type}]} - forms found
|
|
145
|
+
4. actions: array of {name, endpoint, method, description} - API calls/actions found
|
|
146
|
+
|
|
147
|
+
Focus on:
|
|
148
|
+
- REST API routes
|
|
149
|
+
- Form submissions
|
|
150
|
+
- AJAX/fetch calls
|
|
151
|
+
- Controller methods
|
|
152
|
+
|
|
153
|
+
Return ONLY valid JSON, no markdown or explanation:
|
|
154
|
+
{"framework": "...", "routes": [...], "forms": [...], "actions": [...]}`;
|
|
155
|
+
|
|
156
|
+
try {
|
|
157
|
+
const response = await anthropic.messages.create({
|
|
158
|
+
model: 'claude-sonnet-4-20250514',
|
|
159
|
+
max_tokens: 4096,
|
|
160
|
+
messages: [{
|
|
161
|
+
role: 'user',
|
|
162
|
+
content: prompt
|
|
163
|
+
}]
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
const text = response.content[0].type === 'text' ? response.content[0].text : '';
|
|
167
|
+
|
|
168
|
+
// Extract JSON from response
|
|
169
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
170
|
+
if (jsonMatch) {
|
|
171
|
+
return JSON.parse(jsonMatch[0]);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return { framework: null, routes: [], forms: [], actions: [] };
|
|
175
|
+
|
|
176
|
+
} catch (error) {
|
|
177
|
+
if (error.status === 429) {
|
|
178
|
+
console.log(' ā ļø Rate limited, waiting 30s...');
|
|
179
|
+
await new Promise(r => setTimeout(r, 30000));
|
|
180
|
+
return analyzeWithLLM(fileContents, siteName);
|
|
181
|
+
}
|
|
182
|
+
console.error(' LLM error:', error.message);
|
|
183
|
+
return { framework: null, routes: [], forms: [], actions: [] };
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Generate a unique site ID
|
|
189
|
+
*/
|
|
190
|
+
function generateSiteId() {
|
|
191
|
+
const random = crypto.randomBytes(4).toString('hex');
|
|
192
|
+
return `site_${random}`;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
module.exports = { analyzeCodebase };
|
package/lib/uploader.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Upload site map to AI Bot backend
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
async function uploadMap(backendUrl, siteMap, token) {
|
|
6
|
+
try {
|
|
7
|
+
const url = `${backendUrl.replace(/\/$/, '')}/api/sites`;
|
|
8
|
+
|
|
9
|
+
const headers = {
|
|
10
|
+
'Content-Type': 'application/json'
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
if (token) {
|
|
14
|
+
headers['Authorization'] = `Bearer ${token}`;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const response = await fetch(url, {
|
|
18
|
+
method: 'POST',
|
|
19
|
+
headers,
|
|
20
|
+
body: JSON.stringify(siteMap)
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
if (!response.ok) {
|
|
24
|
+
const error = await response.text();
|
|
25
|
+
return { success: false, error: `HTTP ${response.status}: ${error}` };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const data = await response.json();
|
|
29
|
+
return { success: true, data };
|
|
30
|
+
|
|
31
|
+
} catch (error) {
|
|
32
|
+
return { success: false, error: error.message };
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
module.exports = { uploadMap };
|
|
37
|
+
|
package/package.json
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@merlean/analyzer",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "AI Bot codebase analyzer - generates site maps for AI assistant integration",
|
|
5
|
+
"keywords": ["ai", "bot", "analyzer", "claude", "anthropic", "widget"],
|
|
6
|
+
"author": "zmaren",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/zmaren/merlean.git",
|
|
11
|
+
"directory": "bot-smart/analyzer"
|
|
12
|
+
},
|
|
13
|
+
"publishConfig": {
|
|
14
|
+
"registry": "https://registry.npmjs.org",
|
|
15
|
+
"access": "public"
|
|
16
|
+
},
|
|
17
|
+
"bin": {
|
|
18
|
+
"ai-bot-analyze": "./bin/wrapper.js"
|
|
19
|
+
},
|
|
20
|
+
"main": "lib/analyzer.js",
|
|
21
|
+
"files": [
|
|
22
|
+
"bin/",
|
|
23
|
+
"lib/",
|
|
24
|
+
"scripts/"
|
|
25
|
+
],
|
|
26
|
+
"scripts": {
|
|
27
|
+
"postinstall": "node scripts/postinstall.js",
|
|
28
|
+
"build": "pkg . --targets node18-linux-x64,node18-macos-x64,node18-macos-arm64,node18-win-x64 --output dist/ai-bot-analyze",
|
|
29
|
+
"build:local": "pkg bin/cli.js --targets node18-macos-arm64 --output dist/ai-bot-analyze-macos-arm64"
|
|
30
|
+
},
|
|
31
|
+
"pkg": {
|
|
32
|
+
"scripts": ["lib/**/*.js", "bin/cli.js"],
|
|
33
|
+
"assets": [],
|
|
34
|
+
"targets": ["node18-linux-x64", "node18-macos-x64", "node18-macos-arm64", "node18-win-x64"],
|
|
35
|
+
"outputPath": "dist"
|
|
36
|
+
},
|
|
37
|
+
"engines": {
|
|
38
|
+
"node": ">=18.0.0"
|
|
39
|
+
},
|
|
40
|
+
"dependencies": {
|
|
41
|
+
"@anthropic-ai/sdk": "^0.39.0",
|
|
42
|
+
"dotenv": "^16.3.1",
|
|
43
|
+
"glob": "^10.3.10"
|
|
44
|
+
},
|
|
45
|
+
"devDependencies": {
|
|
46
|
+
"pkg": "^5.8.1"
|
|
47
|
+
}
|
|
48
|
+
}
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Postinstall script - Downloads the correct binary for the user's platform
|
|
5
|
+
* from GitHub Releases
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const https = require('https');
|
|
9
|
+
const fs = require('fs');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
const { execSync } = require('child_process');
|
|
12
|
+
|
|
13
|
+
const REPO = 'zmaren/merlean';
|
|
14
|
+
const BINARY_NAME = 'ai-bot-analyze';
|
|
15
|
+
const VERSION = require('../package.json').version;
|
|
16
|
+
|
|
17
|
+
// Map Node.js platform/arch to binary names
|
|
18
|
+
function getBinaryName() {
|
|
19
|
+
const platform = process.platform;
|
|
20
|
+
const arch = process.arch;
|
|
21
|
+
|
|
22
|
+
const platformMap = {
|
|
23
|
+
'darwin-arm64': 'ai-bot-analyze-macos-arm64',
|
|
24
|
+
'darwin-x64': 'ai-bot-analyze-macos-x64',
|
|
25
|
+
'linux-x64': 'ai-bot-analyze-linux-x64',
|
|
26
|
+
'win32-x64': 'ai-bot-analyze-win-x64.exe'
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
const key = `${platform}-${arch}`;
|
|
30
|
+
const binaryName = platformMap[key];
|
|
31
|
+
|
|
32
|
+
if (!binaryName) {
|
|
33
|
+
console.error(`ā Unsupported platform: ${platform}-${arch}`);
|
|
34
|
+
console.error(' Supported: darwin-arm64, darwin-x64, linux-x64, win32-x64');
|
|
35
|
+
console.error(' Falling back to source mode (requires Node.js)');
|
|
36
|
+
return null;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return binaryName;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Download binary from GitHub Releases
|
|
43
|
+
async function downloadBinary(binaryName) {
|
|
44
|
+
const binDir = path.join(__dirname, '..', 'bin');
|
|
45
|
+
const binaryPath = path.join(binDir, process.platform === 'win32' ? `${BINARY_NAME}.exe` : BINARY_NAME);
|
|
46
|
+
|
|
47
|
+
// Skip if binary already exists
|
|
48
|
+
if (fs.existsSync(binaryPath)) {
|
|
49
|
+
console.log('ā Binary already exists');
|
|
50
|
+
return true;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
const url = `https://github.com/${REPO}/releases/download/v${VERSION}/${binaryName}`;
|
|
54
|
+
|
|
55
|
+
console.log(`š¦ Downloading ${binaryName}...`);
|
|
56
|
+
console.log(` From: ${url}`);
|
|
57
|
+
|
|
58
|
+
return new Promise((resolve) => {
|
|
59
|
+
const download = (downloadUrl, redirectCount = 0) => {
|
|
60
|
+
if (redirectCount > 5) {
|
|
61
|
+
console.error('ā Too many redirects');
|
|
62
|
+
resolve(false);
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const protocol = downloadUrl.startsWith('https') ? https : require('http');
|
|
67
|
+
|
|
68
|
+
protocol.get(downloadUrl, (response) => {
|
|
69
|
+
// Handle redirects (GitHub releases redirect to S3)
|
|
70
|
+
if (response.statusCode === 301 || response.statusCode === 302) {
|
|
71
|
+
download(response.headers.location, redirectCount + 1);
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (response.statusCode === 404) {
|
|
76
|
+
console.log(`ā ļø Binary not found for v${VERSION}`);
|
|
77
|
+
console.log(' Falling back to source mode');
|
|
78
|
+
resolve(false);
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (response.statusCode !== 200) {
|
|
83
|
+
console.error(`ā Download failed: HTTP ${response.statusCode}`);
|
|
84
|
+
resolve(false);
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Ensure bin directory exists
|
|
89
|
+
if (!fs.existsSync(binDir)) {
|
|
90
|
+
fs.mkdirSync(binDir, { recursive: true });
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const file = fs.createWriteStream(binaryPath);
|
|
94
|
+
response.pipe(file);
|
|
95
|
+
|
|
96
|
+
file.on('finish', () => {
|
|
97
|
+
file.close();
|
|
98
|
+
|
|
99
|
+
// Make executable on Unix
|
|
100
|
+
if (process.platform !== 'win32') {
|
|
101
|
+
fs.chmodSync(binaryPath, 0o755);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
console.log('ā Binary downloaded successfully');
|
|
105
|
+
resolve(true);
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
file.on('error', (err) => {
|
|
109
|
+
fs.unlink(binaryPath, () => {}); // Clean up
|
|
110
|
+
console.error(`ā Write error: ${err.message}`);
|
|
111
|
+
resolve(false);
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
}).on('error', (err) => {
|
|
115
|
+
console.error(`ā Download error: ${err.message}`);
|
|
116
|
+
resolve(false);
|
|
117
|
+
});
|
|
118
|
+
};
|
|
119
|
+
|
|
120
|
+
download(url);
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Mark that we're using source mode
|
|
125
|
+
function setSourceMode() {
|
|
126
|
+
const flagPath = path.join(__dirname, '..', '.source-mode');
|
|
127
|
+
fs.writeFileSync(flagPath, 'true');
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// Main
|
|
131
|
+
async function main() {
|
|
132
|
+
// Skip in CI environments during package build
|
|
133
|
+
if (process.env.CI || process.env.PKG_EXECPATH) {
|
|
134
|
+
console.log('āļø Skipping binary download (CI/build environment)');
|
|
135
|
+
return;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const binaryName = getBinaryName();
|
|
139
|
+
|
|
140
|
+
if (!binaryName) {
|
|
141
|
+
setSourceMode();
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const success = await downloadBinary(binaryName);
|
|
146
|
+
|
|
147
|
+
if (!success) {
|
|
148
|
+
setSourceMode();
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
main().catch((err) => {
|
|
153
|
+
console.error('Postinstall error:', err.message);
|
|
154
|
+
// Don't fail install, fall back to source mode
|
|
155
|
+
});
|
|
156
|
+
|