ai-error-solution 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +75 -0
- package/LICENSE +22 -0
- package/README.md +408 -0
- package/package.json +37 -0
- package/src/fixError.js +178 -0
- package/src/index.js +18 -0
- package/src/init.js +73 -0
- package/src/logger.js +146 -0
- package/src/openaiCurl.js +194 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to this project will be documented in this file.
|
|
4
|
+
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
|
+
|
|
8
|
+
## [1.0.0] - 2025-12-31
|
|
9
|
+
|
|
10
|
+
### Added
|
|
11
|
+
- Initial release of ai-error-solution
|
|
12
|
+
- Core error handling with `fixError()` function
|
|
13
|
+
- OpenAI integration using native curl (zero dependencies)
|
|
14
|
+
- Middleware pattern with `initAutoErrorSolution()` for one-time API key setup
|
|
15
|
+
- Beautiful console logging with colors and emojis
|
|
16
|
+
- Support for ESM (ES Modules)
|
|
17
|
+
- Error analysis with AI-powered:
|
|
18
|
+
- Plain-English explanations
|
|
19
|
+
- Likely causes identification
|
|
20
|
+
- Suggested fixes with code snippets
|
|
21
|
+
- Relevant documentation links
|
|
22
|
+
- Timeout handling and retry mechanism
|
|
23
|
+
- Silent mode for programmatic access to analysis results
|
|
24
|
+
- Function wrapper utility `wrapWithErrorHandler()`
|
|
25
|
+
- Global error handler setup with `setupGlobalHandler()`
|
|
26
|
+
- Graceful failure handling when API calls fail
|
|
27
|
+
- Comprehensive README with examples
|
|
28
|
+
- MIT License
|
|
29
|
+
- Publishing guide
|
|
30
|
+
|
|
31
|
+
### Technical Details
|
|
32
|
+
- Node.js 18+ support
|
|
33
|
+
- Zero npm dependencies
|
|
34
|
+
- Uses native `child_process` with curl
|
|
35
|
+
- ESM-first architecture
|
|
36
|
+
- Production-ready error handling
|
|
37
|
+
- Privacy-focused (no telemetry, no data storage)
|
|
38
|
+
|
|
39
|
+
### Documentation
|
|
40
|
+
- Complete API reference
|
|
41
|
+
- Usage examples
|
|
42
|
+
- Environment setup guide
|
|
43
|
+
- Troubleshooting section
|
|
44
|
+
- Best practices and disclaimers
|
|
45
|
+
|
|
46
|
+
---
|
|
47
|
+
|
|
48
|
+
## [Unreleased]
|
|
49
|
+
|
|
50
|
+
### Planned Features
|
|
51
|
+
- TypeScript type definitions (.d.ts files)
|
|
52
|
+
- Support for custom AI prompts
|
|
53
|
+
- Batch error analysis
|
|
54
|
+
- Local caching of similar errors
|
|
55
|
+
- Support for other LLM providers (Anthropic, etc.)
|
|
56
|
+
- Configuration file support (.autoerrorrc)
|
|
57
|
+
|
|
58
|
+
---
|
|
59
|
+
|
|
60
|
+
## Version History
|
|
61
|
+
|
|
62
|
+
- **1.0.0** (2025-12-31) - Initial release
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Contributing
|
|
67
|
+
|
|
68
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for details on how to contribute to this project.
|
|
69
|
+
|
|
70
|
+
## Support
|
|
71
|
+
|
|
72
|
+
For issues and questions, please visit:
|
|
73
|
+
- GitHub Issues: https://github.com/yourusername/ai-error-solution/issues
|
|
74
|
+
- npm Page: https://www.npmjs.com/package/ai-error-solution
|
|
75
|
+
|
package/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 ai-error-solution
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
package/README.md
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
# 🤖 ai-error-solution
|
|
2
|
+
|
|
3
|
+
**Lightweight, AI-powered error analysis for Node.js**
|
|
4
|
+
|
|
5
|
+
Automatically capture runtime errors and get instant AI-generated explanations, causes, fixes, and documentation links—all in your console.
|
|
6
|
+
|
|
7
|
+
[](https://www.npmjs.com/package/ai-error-solution)
|
|
8
|
+
[](https://opensource.org/licenses/MIT)
|
|
9
|
+
[](https://nodejs.org)
|
|
10
|
+
|
|
11
|
+
---
|
|
12
|
+
|
|
13
|
+
## ✨ Features
|
|
14
|
+
|
|
15
|
+
- 🎯 **Zero Dependencies** - Uses native `curl` via `child_process` (no heavy HTTP libraries)
|
|
16
|
+
- 🚀 **Lightweight** - Minimal package size, maximum efficiency
|
|
17
|
+
- 🧠 **AI-Powered Analysis** - Leverages OpenAI to explain errors in plain English
|
|
18
|
+
- 🔐 **Privacy-First** - No telemetry, no data storage, direct API calls only
|
|
19
|
+
- ⚡ **ESM Native** - Modern ES Module support
|
|
20
|
+
- 🎨 **Beautiful Output** - Clean, colorized console logging
|
|
21
|
+
- 🛠️ **Production-Ready** - Timeout handling, retries, graceful failures
|
|
22
|
+
|
|
23
|
+
---
|
|
24
|
+
|
|
25
|
+
## 📦 Installation
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
npm install ai-error-solution
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
**Requirements:**
|
|
32
|
+
- Node.js 18 or higher
|
|
33
|
+
- `curl` installed on your system (usually pre-installed on macOS/Linux, available on Windows)
|
|
34
|
+
- OpenAI API key ([get one here](https://platform.openai.com/api-keys))
|
|
35
|
+
|
|
36
|
+
---
|
|
37
|
+
|
|
38
|
+
## 🚀 Quick Start
|
|
39
|
+
|
|
40
|
+
### 1. Initialize Once
|
|
41
|
+
|
|
42
|
+
Set up the package with your OpenAI API key in your main application file:
|
|
43
|
+
|
|
44
|
+
```javascript
|
|
45
|
+
import { initAutoErrorSolution, fixError } from 'ai-error-solution';
|
|
46
|
+
|
|
47
|
+
// Initialize with your API key (do this once at app startup)
|
|
48
|
+
initAutoErrorSolution({
|
|
49
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
50
|
+
model: 'gpt-4o-mini' // Optional: defaults to gpt-4o-mini
|
|
51
|
+
});
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### 2. Use Anywhere
|
|
55
|
+
|
|
56
|
+
Wrap your error handling with `fixError()`:
|
|
57
|
+
|
|
58
|
+
```javascript
|
|
59
|
+
try {
|
|
60
|
+
// Your code that might throw errors
|
|
61
|
+
const result = riskyFunction();
|
|
62
|
+
} catch (err) {
|
|
63
|
+
// Get AI-powered analysis
|
|
64
|
+
await fixError(err);
|
|
65
|
+
}
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
### 3. Enjoy AI-Powered Debugging! 🎉
|
|
69
|
+
|
|
70
|
+
You'll see beautiful, formatted output like this:
|
|
71
|
+
|
|
72
|
+
```
|
|
73
|
+
================================================================================
|
|
74
|
+
❌ Error Detected: TypeError
|
|
75
|
+
Cannot read property 'map' of undefined
|
|
76
|
+
|
|
77
|
+
🧠 AI Explanation:
|
|
78
|
+
This error occurs when you try to call the .map() method on a variable
|
|
79
|
+
that is undefined. The JavaScript engine expected an array but received
|
|
80
|
+
undefined instead.
|
|
81
|
+
|
|
82
|
+
⚠️ Likely Causes:
|
|
83
|
+
- The variable was never initialized
|
|
84
|
+
- An async function hasn't resolved yet
|
|
85
|
+
- The API response didn't include expected data
|
|
86
|
+
|
|
87
|
+
🔧 Suggested Fixes:
|
|
88
|
+
- Add optional chaining: data?.map(...)
|
|
89
|
+
- Provide a default value: (data || []).map(...)
|
|
90
|
+
- Check existence first: if (data) { data.map(...) }
|
|
91
|
+
|
|
92
|
+
📚 References:
|
|
93
|
+
- https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors
|
|
94
|
+
- https://javascript.info/optional-chaining
|
|
95
|
+
|
|
96
|
+
💡 Note: AI suggestions may not be 100% accurate. Always verify fixes before applying.
|
|
97
|
+
================================================================================
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
---
|
|
101
|
+
|
|
102
|
+
## 📖 API Reference
|
|
103
|
+
|
|
104
|
+
### `initAutoErrorSolution(options)`
|
|
105
|
+
|
|
106
|
+
Initialize the package with your OpenAI credentials. **Must be called before using `fixError()`.**
|
|
107
|
+
|
|
108
|
+
**Parameters:**
|
|
109
|
+
- `options.apiKey` (string, **required**) - Your OpenAI API key
|
|
110
|
+
- `options.model` (string, optional) - OpenAI model to use (default: `'gpt-4o-mini'`)
|
|
111
|
+
- `options.timeout` (number, optional) - API request timeout in milliseconds (default: `30000`)
|
|
112
|
+
- `options.maxRetries` (number, optional) - Maximum retry attempts (default: `1`)
|
|
113
|
+
|
|
114
|
+
**Example:**
|
|
115
|
+
```javascript
|
|
116
|
+
initAutoErrorSolution({
|
|
117
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
118
|
+
model: 'gpt-4o-mini',
|
|
119
|
+
timeout: 30000,
|
|
120
|
+
maxRetries: 2
|
|
121
|
+
});
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
---
|
|
125
|
+
|
|
126
|
+
### `fixError(error, options)`
|
|
127
|
+
|
|
128
|
+
Analyze an error using OpenAI and display formatted results.
|
|
129
|
+
|
|
130
|
+
**Parameters:**
|
|
131
|
+
- `error` (Error | string, **required**) - Error object or error message
|
|
132
|
+
- `options.silent` (boolean, optional) - Return analysis without logging (default: `false`)
|
|
133
|
+
|
|
134
|
+
**Returns:**
|
|
135
|
+
- `Promise<null>` - When logging to console (default)
|
|
136
|
+
- `Promise<Object>` - When `silent: true`, returns analysis object
|
|
137
|
+
|
|
138
|
+
**Example:**
|
|
139
|
+
```javascript
|
|
140
|
+
// Standard usage (logs to console)
|
|
141
|
+
try {
|
|
142
|
+
dangerousCode();
|
|
143
|
+
} catch (err) {
|
|
144
|
+
await fixError(err);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// Silent mode (returns data)
|
|
148
|
+
const analysis = await fixError(err, { silent: true });
|
|
149
|
+
console.log(analysis.analysis.explanation);
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
|
|
154
|
+
### `wrapWithErrorHandler(fn)`
|
|
155
|
+
|
|
156
|
+
Wrap a function with automatic error handling.
|
|
157
|
+
|
|
158
|
+
**Parameters:**
|
|
159
|
+
- `fn` (Function) - Function to wrap
|
|
160
|
+
|
|
161
|
+
**Returns:**
|
|
162
|
+
- `Function` - Wrapped function that automatically calls `fixError()` on errors
|
|
163
|
+
|
|
164
|
+
**Example:**
|
|
165
|
+
```javascript
|
|
166
|
+
const safeFunction = wrapWithErrorHandler(async () => {
|
|
167
|
+
// Code that might throw
|
|
168
|
+
return await riskyOperation();
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
await safeFunction(); // Errors automatically analyzed
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
---
|
|
175
|
+
|
|
176
|
+
### `setupGlobalHandler(options)`
|
|
177
|
+
|
|
178
|
+
Register global handlers for uncaught exceptions and unhandled promise rejections.
|
|
179
|
+
|
|
180
|
+
**Parameters:**
|
|
181
|
+
- `options.exitOnError` (boolean, optional) - Exit process after handling error (default: `false`)
|
|
182
|
+
|
|
183
|
+
**Example:**
|
|
184
|
+
```javascript
|
|
185
|
+
setupGlobalHandler({ exitOnError: true });
|
|
186
|
+
|
|
187
|
+
// Now all uncaught errors will be automatically analyzed
|
|
188
|
+
throw new Error('This will be caught and analyzed');
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
---
|
|
192
|
+
|
|
193
|
+
## 🔐 Environment Setup
|
|
194
|
+
|
|
195
|
+
### Using .env file (Recommended)
|
|
196
|
+
|
|
197
|
+
1. Install dotenv:
|
|
198
|
+
```bash
|
|
199
|
+
npm install dotenv
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
2. Create `.env`:
|
|
203
|
+
```env
|
|
204
|
+
OPENAI_API_KEY=sk-your-api-key-here
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
3. Load in your app:
|
|
208
|
+
```javascript
|
|
209
|
+
import 'dotenv/config';
|
|
210
|
+
import { initAutoErrorSolution } from 'ai-error-solution';
|
|
211
|
+
|
|
212
|
+
initAutoErrorSolution({
|
|
213
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
214
|
+
});
|
|
215
|
+
```
|
|
216
|
+
|
|
217
|
+
### Using environment variables directly
|
|
218
|
+
|
|
219
|
+
```bash
|
|
220
|
+
# Linux/macOS
|
|
221
|
+
export OPENAI_API_KEY=sk-your-api-key-here
|
|
222
|
+
node app.js
|
|
223
|
+
|
|
224
|
+
# Windows (PowerShell)
|
|
225
|
+
$env:OPENAI_API_KEY="sk-your-api-key-here"
|
|
226
|
+
node app.js
|
|
227
|
+
|
|
228
|
+
# Windows (CMD)
|
|
229
|
+
set OPENAI_API_KEY=sk-your-api-key-here
|
|
230
|
+
node app.js
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
---
|
|
234
|
+
|
|
235
|
+
## 🎯 Use Cases
|
|
236
|
+
|
|
237
|
+
### Express.js Error Middleware
|
|
238
|
+
|
|
239
|
+
```javascript
|
|
240
|
+
import express from 'express';
|
|
241
|
+
import { initAutoErrorSolution, fixError } from 'ai-error-solution';
|
|
242
|
+
|
|
243
|
+
const app = express();
|
|
244
|
+
|
|
245
|
+
initAutoErrorSolution({
|
|
246
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
// Error handling middleware
|
|
250
|
+
app.use(async (err, req, res, next) => {
|
|
251
|
+
await fixError(err);
|
|
252
|
+
res.status(500).json({ error: 'Internal Server Error' });
|
|
253
|
+
});
|
|
254
|
+
```
|
|
255
|
+
|
|
256
|
+
### Async Function Wrapper
|
|
257
|
+
|
|
258
|
+
```javascript
|
|
259
|
+
const fetchUserData = wrapWithErrorHandler(async (userId) => {
|
|
260
|
+
const response = await fetch(`/api/users/${userId}`);
|
|
261
|
+
return response.json();
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
// Automatically analyzes errors
|
|
265
|
+
await fetchUserData(123);
|
|
266
|
+
```
|
|
267
|
+
|
|
268
|
+
### Global Error Catching
|
|
269
|
+
|
|
270
|
+
```javascript
|
|
271
|
+
import { initAutoErrorSolution, setupGlobalHandler } from 'ai-error-solution';
|
|
272
|
+
|
|
273
|
+
initAutoErrorSolution({
|
|
274
|
+
apiKey: process.env.OPENAI_API_KEY
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
setupGlobalHandler({ exitOnError: false });
|
|
278
|
+
|
|
279
|
+
// All uncaught errors are now automatically analyzed
|
|
280
|
+
```
|
|
281
|
+
|
|
282
|
+
---
|
|
283
|
+
|
|
284
|
+
## ⚠️ Important Notes
|
|
285
|
+
|
|
286
|
+
### Disclaimers
|
|
287
|
+
|
|
288
|
+
- **AI Accuracy**: AI-generated suggestions may not always be correct. Always verify fixes before applying them to production code.
|
|
289
|
+
- **API Costs**: Each error analysis makes an API call to OpenAI, which incurs costs based on your OpenAI plan.
|
|
290
|
+
- **Privacy**: Error messages and stack traces are sent to OpenAI for analysis. Do not use in production if your errors may contain sensitive data.
|
|
291
|
+
- **curl Dependency**: This package requires `curl` to be installed and accessible in your system PATH.
|
|
292
|
+
|
|
293
|
+
### Best Practices
|
|
294
|
+
|
|
295
|
+
- ✅ Use in **development** and **debugging** environments
|
|
296
|
+
- ✅ Store API keys in environment variables (never commit them)
|
|
297
|
+
- ✅ Set reasonable timeout values for production environments
|
|
298
|
+
- ✅ Review AI suggestions before implementing fixes
|
|
299
|
+
|
|
300
|
+
---
|
|
301
|
+
|
|
302
|
+
## 🏗️ Architecture
|
|
303
|
+
|
|
304
|
+
This package is built with **zero dependencies** and uses:
|
|
305
|
+
|
|
306
|
+
- **ESM** - Modern ES Module system
|
|
307
|
+
- **Native curl** - No heavy HTTP libraries (axios, node-fetch, etc.)
|
|
308
|
+
- **child_process** - Native Node.js process execution
|
|
309
|
+
- **Middleware pattern** - One-time API key initialization
|
|
310
|
+
|
|
311
|
+
**Why curl?**
|
|
312
|
+
- Minimal package size
|
|
313
|
+
- No dependency vulnerabilities
|
|
314
|
+
- Universal availability across platforms
|
|
315
|
+
- Direct OpenAI API communication
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
## 🛠️ Troubleshooting
|
|
320
|
+
|
|
321
|
+
### "curl is not installed or not in PATH"
|
|
322
|
+
|
|
323
|
+
**Solution**: Install curl on your system.
|
|
324
|
+
|
|
325
|
+
```bash
|
|
326
|
+
# macOS (via Homebrew)
|
|
327
|
+
brew install curl
|
|
328
|
+
|
|
329
|
+
# Ubuntu/Debian
|
|
330
|
+
sudo apt-get install curl
|
|
331
|
+
|
|
332
|
+
# Windows (via Chocolatey)
|
|
333
|
+
choco install curl
|
|
334
|
+
|
|
335
|
+
# Windows (built-in on Windows 10+)
|
|
336
|
+
# curl should already be available
|
|
337
|
+
```
|
|
338
|
+
|
|
339
|
+
### "Package not initialized"
|
|
340
|
+
|
|
341
|
+
**Solution**: Make sure you call `initAutoErrorSolution()` before using `fixError()`.
|
|
342
|
+
|
|
343
|
+
### "OpenAI API request timed out"
|
|
344
|
+
|
|
345
|
+
**Solution**: Increase timeout or check your internet connection.
|
|
346
|
+
|
|
347
|
+
```javascript
|
|
348
|
+
initAutoErrorSolution({
|
|
349
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
350
|
+
timeout: 60000 // 60 seconds
|
|
351
|
+
});
|
|
352
|
+
```
|
|
353
|
+
|
|
354
|
+
### "OpenAI API error: Invalid API key"
|
|
355
|
+
|
|
356
|
+
**Solution**: Verify your API key is correct and has sufficient credits.
|
|
357
|
+
|
|
358
|
+
---
|
|
359
|
+
|
|
360
|
+
## 📄 License
|
|
361
|
+
|
|
362
|
+
MIT © [Your Name]
|
|
363
|
+
|
|
364
|
+
---
|
|
365
|
+
|
|
366
|
+
## 🤝 Contributing
|
|
367
|
+
|
|
368
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
|
369
|
+
|
|
370
|
+
---
|
|
371
|
+
|
|
372
|
+
## 🔗 Links
|
|
373
|
+
|
|
374
|
+
- [npm Package](https://www.npmjs.com/package/ai-error-solution)
|
|
375
|
+
- [GitHub Repository](https://github.com/yourusername/ai-error-solution)
|
|
376
|
+
- [OpenAI API Documentation](https://platform.openai.com/docs)
|
|
377
|
+
- [Report Issues](https://github.com/yourusername/ai-error-solution/issues)
|
|
378
|
+
|
|
379
|
+
---
|
|
380
|
+
|
|
381
|
+
## 💡 What This Package Does NOT Do
|
|
382
|
+
|
|
383
|
+
- ❌ **No auto-fixing** - Does not modify your code automatically
|
|
384
|
+
- ❌ **No telemetry** - Does not collect or store any usage data
|
|
385
|
+
- ❌ **No remote storage** - Errors are not stored anywhere
|
|
386
|
+
- ❌ **No background processes** - Only runs when you call it
|
|
387
|
+
|
|
388
|
+
---
|
|
389
|
+
|
|
390
|
+
## 🌟 Why This Package?
|
|
391
|
+
|
|
392
|
+
Most error analysis tools either:
|
|
393
|
+
- Require heavy dependencies (bloated package size)
|
|
394
|
+
- Send data to third-party services (privacy concerns)
|
|
395
|
+
- Auto-modify code (risky in production)
|
|
396
|
+
|
|
397
|
+
**ai-error-solution** is different:
|
|
398
|
+
- ✅ **Lightweight** - No dependencies, tiny package size
|
|
399
|
+
- ✅ **Private** - Direct API calls, no intermediaries
|
|
400
|
+
- ✅ **Safe** - Never modifies your code
|
|
401
|
+
- ✅ **Transparent** - Open source, audit the code yourself
|
|
402
|
+
|
|
403
|
+
---
|
|
404
|
+
|
|
405
|
+
**Made with ❤️ for developers who value simplicity and privacy**
|
|
406
|
+
|
|
407
|
+
*Star ⭐ this project if you find it helpful!*
|
|
408
|
+
|
package/package.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "ai-error-solution",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Lightweight Node.js error handler that uses OpenAI to provide explanations, causes, fixes, and documentation links for runtime errors",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "src/index.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": "./src/index.js"
|
|
9
|
+
},
|
|
10
|
+
"engines": {
|
|
11
|
+
"node": ">=18.0.0"
|
|
12
|
+
},
|
|
13
|
+
"scripts": {
|
|
14
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"error-handling",
|
|
18
|
+
"debugging",
|
|
19
|
+
"openai",
|
|
20
|
+
"error-analysis",
|
|
21
|
+
"ai-debugging",
|
|
22
|
+
"error-solution",
|
|
23
|
+
"runtime-errors",
|
|
24
|
+
"stack-trace"
|
|
25
|
+
],
|
|
26
|
+
"author": "",
|
|
27
|
+
"license": "MIT",
|
|
28
|
+
"repository": {
|
|
29
|
+
"type": "git",
|
|
30
|
+
"url": "https://github.com/yourusername/ai-error-solution.git"
|
|
31
|
+
},
|
|
32
|
+
"bugs": {
|
|
33
|
+
"url": "https://github.com/yourusername/ai-error-solution/issues"
|
|
34
|
+
},
|
|
35
|
+
"homepage": "https://github.com/yourusername/ai-error-solution#readme"
|
|
36
|
+
}
|
|
37
|
+
|
package/src/fixError.js
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Main error handler - captures error and requests AI analysis
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { getConfig } from './init.js';
|
|
6
|
+
import { callOpenAI, parseAIResponse } from './openaiCurl.js';
|
|
7
|
+
import { logErrorAnalysis, logAnalysisFailure } from './logger.js';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Analyze an error using OpenAI and display results
|
|
11
|
+
* @param {Error|string} error - Error object or error message
|
|
12
|
+
* @param {Object} [options] - Optional configuration
|
|
13
|
+
* @param {boolean} [options.silent=false] - If true, returns analysis without logging
|
|
14
|
+
* @returns {Promise<Object|null>} Analysis object if silent=true, null otherwise
|
|
15
|
+
*/
|
|
16
|
+
export async function fixError(error, options = {}) {
|
|
17
|
+
try {
|
|
18
|
+
// Get global configuration
|
|
19
|
+
const config = getConfig();
|
|
20
|
+
|
|
21
|
+
// Normalize error input
|
|
22
|
+
const errorObj = normalizeError(error);
|
|
23
|
+
const errorMessage = errorObj.message || 'Unknown error';
|
|
24
|
+
const stackTrace = errorObj.stack || 'No stack trace available';
|
|
25
|
+
|
|
26
|
+
// Call OpenAI API
|
|
27
|
+
let aiResponse;
|
|
28
|
+
let lastError;
|
|
29
|
+
|
|
30
|
+
for (let attempt = 0; attempt <= config.maxRetries; attempt++) {
|
|
31
|
+
try {
|
|
32
|
+
aiResponse = await callOpenAI(
|
|
33
|
+
config.apiKey,
|
|
34
|
+
config.model,
|
|
35
|
+
errorMessage,
|
|
36
|
+
stackTrace,
|
|
37
|
+
config.timeout
|
|
38
|
+
);
|
|
39
|
+
break; // Success, exit retry loop
|
|
40
|
+
} catch (err) {
|
|
41
|
+
lastError = err;
|
|
42
|
+
if (attempt < config.maxRetries) {
|
|
43
|
+
// Wait before retry (exponential backoff)
|
|
44
|
+
await sleep(1000 * Math.pow(2, attempt));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!aiResponse) {
|
|
50
|
+
throw lastError || new Error('Failed to get response from OpenAI');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Parse AI response
|
|
54
|
+
const analysis = parseAIResponse(aiResponse.content);
|
|
55
|
+
|
|
56
|
+
// Log results or return silently
|
|
57
|
+
if (options.silent) {
|
|
58
|
+
return {
|
|
59
|
+
error: {
|
|
60
|
+
name: errorObj.name,
|
|
61
|
+
message: errorMessage,
|
|
62
|
+
stack: stackTrace
|
|
63
|
+
},
|
|
64
|
+
analysis,
|
|
65
|
+
model: aiResponse.model,
|
|
66
|
+
usage: aiResponse.usage
|
|
67
|
+
};
|
|
68
|
+
} else {
|
|
69
|
+
logErrorAnalysis(errorObj, analysis);
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
} catch (analysisError) {
|
|
74
|
+
// Handle failures gracefully
|
|
75
|
+
const errorObj = normalizeError(error);
|
|
76
|
+
|
|
77
|
+
if (options.silent) {
|
|
78
|
+
return {
|
|
79
|
+
error: {
|
|
80
|
+
name: errorObj.name,
|
|
81
|
+
message: errorObj.message,
|
|
82
|
+
stack: errorObj.stack
|
|
83
|
+
},
|
|
84
|
+
analysis: null,
|
|
85
|
+
analysisError: analysisError.message
|
|
86
|
+
};
|
|
87
|
+
} else {
|
|
88
|
+
logAnalysisFailure(errorObj, analysisError.message);
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Normalize error input to Error object
|
|
96
|
+
* @private
|
|
97
|
+
*/
|
|
98
|
+
function normalizeError(error) {
|
|
99
|
+
// Already an Error object
|
|
100
|
+
if (error instanceof Error) {
|
|
101
|
+
return error;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// String message
|
|
105
|
+
if (typeof error === 'string') {
|
|
106
|
+
const err = new Error(error);
|
|
107
|
+
err.name = 'Error';
|
|
108
|
+
return err;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Object with message property
|
|
112
|
+
if (error && typeof error === 'object' && error.message) {
|
|
113
|
+
const err = new Error(error.message);
|
|
114
|
+
err.name = error.name || 'Error';
|
|
115
|
+
err.stack = error.stack || err.stack;
|
|
116
|
+
return err;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Fallback
|
|
120
|
+
const err = new Error(String(error));
|
|
121
|
+
err.name = 'Error';
|
|
122
|
+
return err;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Sleep utility for retry delays
|
|
127
|
+
* @private
|
|
128
|
+
*/
|
|
129
|
+
function sleep(ms) {
|
|
130
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Wrap a function with automatic error handling
|
|
135
|
+
* @param {Function} fn - Function to wrap
|
|
136
|
+
* @returns {Function} Wrapped function
|
|
137
|
+
*/
|
|
138
|
+
export function wrapWithErrorHandler(fn) {
|
|
139
|
+
return async function(...args) {
|
|
140
|
+
try {
|
|
141
|
+
return await fn(...args);
|
|
142
|
+
} catch (error) {
|
|
143
|
+
await fixError(error);
|
|
144
|
+
throw error; // Re-throw after analysis
|
|
145
|
+
}
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Create a global error handler for uncaught exceptions
|
|
151
|
+
* @param {Object} [options] - Configuration options
|
|
152
|
+
* @param {boolean} [options.exitOnError=false] - Exit process after handling
|
|
153
|
+
*/
|
|
154
|
+
export function setupGlobalHandler(options = {}) {
|
|
155
|
+
const { exitOnError = false } = options;
|
|
156
|
+
|
|
157
|
+
process.on('uncaughtException', async (error) => {
|
|
158
|
+
console.error('\n🚨 Uncaught Exception:');
|
|
159
|
+
await fixError(error);
|
|
160
|
+
|
|
161
|
+
if (exitOnError) {
|
|
162
|
+
process.exit(1);
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
process.on('unhandledRejection', async (reason, promise) => {
|
|
167
|
+
console.error('\n🚨 Unhandled Promise Rejection:');
|
|
168
|
+
const error = reason instanceof Error ? reason : new Error(String(reason));
|
|
169
|
+
await fixError(error);
|
|
170
|
+
|
|
171
|
+
if (exitOnError) {
|
|
172
|
+
process.exit(1);
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
console.log('✅ Global error handlers registered');
|
|
177
|
+
}
|
|
178
|
+
|
package/src/index.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ai-error-solution - Lightweight error analysis using OpenAI
|
|
3
|
+
*
|
|
4
|
+
* A minimal, production-ready package that captures JavaScript/Node.js runtime errors
|
|
5
|
+
* and provides AI-powered explanations, causes, fixes, and documentation links.
|
|
6
|
+
*
|
|
7
|
+
* @module ai-error-solution
|
|
8
|
+
* @version 1.0.0
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
export { initAutoErrorSolution, isInitialized } from './init.js';
|
|
12
|
+
export { fixError, wrapWithErrorHandler, setupGlobalHandler } from './fixError.js';
|
|
13
|
+
export { log } from './logger.js';
|
|
14
|
+
|
|
15
|
+
// Package metadata
|
|
16
|
+
export const version = '1.0.0';
|
|
17
|
+
export const name = 'ai-error-solution';
|
|
18
|
+
|
package/src/init.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Global configuration storage for ai-error-solution
|
|
3
|
+
* Stores API key and model settings after initialization
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// Global configuration object
|
|
7
|
+
let config = {
|
|
8
|
+
apiKey: null,
|
|
9
|
+
model: 'gpt-4o-mini',
|
|
10
|
+
timeout: 30000, // 30 seconds default timeout
|
|
11
|
+
maxRetries: 1,
|
|
12
|
+
initialized: false
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Initialize the ai-error-solution package with OpenAI credentials
|
|
17
|
+
* @param {Object} options - Configuration options
|
|
18
|
+
* @param {string} options.apiKey - OpenAI API key (required)
|
|
19
|
+
* @param {string} [options.model='gpt-4o-mini'] - OpenAI model to use
|
|
20
|
+
* @param {number} [options.timeout=30000] - API request timeout in milliseconds
|
|
21
|
+
* @param {number} [options.maxRetries=1] - Maximum number of retry attempts
|
|
22
|
+
* @throws {Error} If apiKey is not provided
|
|
23
|
+
*/
|
|
24
|
+
export function initAutoErrorSolution(options = {}) {
|
|
25
|
+
if (!options.apiKey) {
|
|
26
|
+
throw new Error('ai-error-solution: API key is required. Please provide your OpenAI API key.');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
config.apiKey = options.apiKey;
|
|
30
|
+
config.model = options.model || 'gpt-4o-mini';
|
|
31
|
+
config.timeout = options.timeout || 30000;
|
|
32
|
+
config.maxRetries = options.maxRetries !== undefined ? options.maxRetries : 1;
|
|
33
|
+
config.initialized = true;
|
|
34
|
+
|
|
35
|
+
console.log('✅ ai-error-solution initialized successfully');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Get the current configuration
|
|
40
|
+
* @returns {Object} Current configuration
|
|
41
|
+
* @throws {Error} If package is not initialized
|
|
42
|
+
*/
|
|
43
|
+
export function getConfig() {
|
|
44
|
+
if (!config.initialized) {
|
|
45
|
+
throw new Error(
|
|
46
|
+
'ai-error-solution: Package not initialized. Please call initAutoErrorSolution() first with your API key.'
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
return { ...config };
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Check if package is initialized
|
|
54
|
+
* @returns {boolean}
|
|
55
|
+
*/
|
|
56
|
+
export function isInitialized() {
|
|
57
|
+
return config.initialized;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Reset configuration (useful for testing)
|
|
62
|
+
* @private
|
|
63
|
+
*/
|
|
64
|
+
export function resetConfig() {
|
|
65
|
+
config = {
|
|
66
|
+
apiKey: null,
|
|
67
|
+
model: 'gpt-4o-mini',
|
|
68
|
+
timeout: 30000,
|
|
69
|
+
maxRetries: 1,
|
|
70
|
+
initialized: false
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
|
package/src/logger.js
ADDED
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Formatted console logger for error analysis results
|
|
3
|
+
* Provides clean, developer-friendly output
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* ANSI color codes for terminal output
|
|
8
|
+
*/
|
|
9
|
+
const colors = {
|
|
10
|
+
reset: '\x1b[0m',
|
|
11
|
+
bright: '\x1b[1m',
|
|
12
|
+
dim: '\x1b[2m',
|
|
13
|
+
red: '\x1b[31m',
|
|
14
|
+
green: '\x1b[32m',
|
|
15
|
+
yellow: '\x1b[33m',
|
|
16
|
+
blue: '\x1b[34m',
|
|
17
|
+
magenta: '\x1b[35m',
|
|
18
|
+
cyan: '\x1b[36m',
|
|
19
|
+
white: '\x1b[37m',
|
|
20
|
+
gray: '\x1b[90m'
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Format and log error analysis to console
|
|
25
|
+
* @param {Error} error - Original error object
|
|
26
|
+
* @param {Object} analysis - Parsed AI analysis
|
|
27
|
+
*/
|
|
28
|
+
export function logErrorAnalysis(error, analysis) {
|
|
29
|
+
console.log('\n' + '='.repeat(80));
|
|
30
|
+
|
|
31
|
+
// Error header
|
|
32
|
+
console.log(`${colors.red}${colors.bright}❌ Error Detected: ${error.name || 'Error'}${colors.reset}`);
|
|
33
|
+
console.log(`${colors.red}${error.message}${colors.reset}`);
|
|
34
|
+
console.log('');
|
|
35
|
+
|
|
36
|
+
// AI Explanation
|
|
37
|
+
if (analysis.explanation) {
|
|
38
|
+
console.log(`${colors.cyan}${colors.bright}🧠 AI Explanation:${colors.reset}`);
|
|
39
|
+
console.log(formatText(analysis.explanation, colors.white));
|
|
40
|
+
console.log('');
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Likely Causes
|
|
44
|
+
if (analysis.causes) {
|
|
45
|
+
console.log(`${colors.yellow}${colors.bright}⚠️ Likely Causes:${colors.reset}`);
|
|
46
|
+
console.log(formatText(analysis.causes, colors.yellow));
|
|
47
|
+
console.log('');
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Suggested Fixes
|
|
51
|
+
if (analysis.fixes) {
|
|
52
|
+
console.log(`${colors.green}${colors.bright}🔧 Suggested Fixes:${colors.reset}`);
|
|
53
|
+
console.log(formatText(analysis.fixes, colors.green));
|
|
54
|
+
console.log('');
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Documentation References
|
|
58
|
+
if (analysis.references && analysis.references.length > 0) {
|
|
59
|
+
console.log(`${colors.blue}${colors.bright}📚 References:${colors.reset}`);
|
|
60
|
+
analysis.references.forEach(ref => {
|
|
61
|
+
console.log(`${colors.blue} - ${ref}${colors.reset}`);
|
|
62
|
+
});
|
|
63
|
+
console.log('');
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Footer with disclaimer
|
|
67
|
+
console.log(`${colors.gray}${colors.dim}💡 Note: AI suggestions may not be 100% accurate. Always verify fixes before applying.${colors.reset}`);
|
|
68
|
+
console.log('='.repeat(80) + '\n');
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Log error when AI analysis fails
|
|
73
|
+
* @param {Error} error - Original error
|
|
74
|
+
* @param {string} reason - Reason for failure
|
|
75
|
+
*/
|
|
76
|
+
export function logAnalysisFailure(error, reason) {
|
|
77
|
+
console.log('\n' + '='.repeat(80));
|
|
78
|
+
console.log(`${colors.red}${colors.bright}❌ Error Detected: ${error.name || 'Error'}${colors.reset}`);
|
|
79
|
+
console.log(`${colors.red}${error.message}${colors.reset}`);
|
|
80
|
+
console.log('');
|
|
81
|
+
console.log(`${colors.yellow}${colors.bright}⚠️ AI Analysis Failed${colors.reset}`);
|
|
82
|
+
console.log(`${colors.yellow}Reason: ${reason}${colors.reset}`);
|
|
83
|
+
console.log('');
|
|
84
|
+
console.log(`${colors.gray}Stack trace:${colors.reset}`);
|
|
85
|
+
console.log(`${colors.gray}${error.stack || 'No stack trace available'}${colors.reset}`);
|
|
86
|
+
console.log('='.repeat(80) + '\n');
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Format text with proper indentation and wrapping
|
|
91
|
+
* @private
|
|
92
|
+
*/
|
|
93
|
+
function formatText(text, color) {
|
|
94
|
+
const lines = text.split('\n');
|
|
95
|
+
const formatted = lines.map(line => {
|
|
96
|
+
const trimmed = line.trim();
|
|
97
|
+
if (!trimmed) return '';
|
|
98
|
+
|
|
99
|
+
// Code blocks
|
|
100
|
+
if (trimmed.startsWith('```') || trimmed.match(/^[a-z]+\(/)) {
|
|
101
|
+
return ` ${colors.dim}${trimmed}${colors.reset}`;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Bullet points
|
|
105
|
+
if (trimmed.match(/^[-•*]\s/)) {
|
|
106
|
+
return ` ${color}${trimmed}${colors.reset}`;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
// Numbered lists
|
|
110
|
+
if (trimmed.match(/^\d+\.\s/)) {
|
|
111
|
+
return ` ${color}${trimmed}${colors.reset}`;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Regular text
|
|
115
|
+
return ` ${color}${trimmed}${colors.reset}`;
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
return formatted.join('\n');
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Log a simple message
|
|
123
|
+
* @param {string} message - Message to log
|
|
124
|
+
* @param {string} type - Message type: 'info', 'success', 'warning', 'error'
|
|
125
|
+
*/
|
|
126
|
+
export function log(message, type = 'info') {
|
|
127
|
+
const icons = {
|
|
128
|
+
info: 'ℹ️',
|
|
129
|
+
success: '✅',
|
|
130
|
+
warning: '⚠️',
|
|
131
|
+
error: '❌'
|
|
132
|
+
};
|
|
133
|
+
|
|
134
|
+
const colorMap = {
|
|
135
|
+
info: colors.cyan,
|
|
136
|
+
success: colors.green,
|
|
137
|
+
warning: colors.yellow,
|
|
138
|
+
error: colors.red
|
|
139
|
+
};
|
|
140
|
+
|
|
141
|
+
const icon = icons[type] || icons.info;
|
|
142
|
+
const color = colorMap[type] || colors.white;
|
|
143
|
+
|
|
144
|
+
console.log(`${color}${icon} ${message}${colors.reset}`);
|
|
145
|
+
}
|
|
146
|
+
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OpenAI API communication using curl via child_process
|
|
3
|
+
* Minimal implementation without external HTTP libraries
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { exec } from 'child_process';
|
|
7
|
+
import { promisify } from 'util';
|
|
8
|
+
|
|
9
|
+
const execAsync = promisify(exec);
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* System prompt for error analysis
|
|
13
|
+
*/
|
|
14
|
+
const SYSTEM_PROMPT = `You are a senior JavaScript debugging assistant.
|
|
15
|
+
Analyze the following runtime error and provide:
|
|
16
|
+
|
|
17
|
+
1. Plain-English explanation
|
|
18
|
+
2. Likely causes
|
|
19
|
+
3. Suggested fixes with short code snippets
|
|
20
|
+
4. Helpful documentation links
|
|
21
|
+
|
|
22
|
+
Keep response concise and practical.`;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Call OpenAI API using curl
|
|
26
|
+
* @param {string} apiKey - OpenAI API key
|
|
27
|
+
* @param {string} model - Model name (e.g., 'gpt-4o-mini')
|
|
28
|
+
* @param {string} errorMessage - Error message
|
|
29
|
+
* @param {string} stackTrace - Stack trace
|
|
30
|
+
* @param {number} timeout - Timeout in milliseconds
|
|
31
|
+
* @returns {Promise<Object>} Parsed AI response
|
|
32
|
+
*/
|
|
33
|
+
export async function callOpenAI(apiKey, model, errorMessage, stackTrace, timeout) {
|
|
34
|
+
const endpoint = 'https://api.openai.com/v1/chat/completions';
|
|
35
|
+
|
|
36
|
+
// Construct user message with error details
|
|
37
|
+
const userMessage = `Runtime Error:
|
|
38
|
+
Message: ${errorMessage}
|
|
39
|
+
|
|
40
|
+
Stack Trace:
|
|
41
|
+
${stackTrace}
|
|
42
|
+
|
|
43
|
+
Please analyze this error and provide actionable debugging guidance.`;
|
|
44
|
+
|
|
45
|
+
// Build request payload
|
|
46
|
+
const payload = {
|
|
47
|
+
model: model,
|
|
48
|
+
messages: [
|
|
49
|
+
{ role: 'system', content: SYSTEM_PROMPT },
|
|
50
|
+
{ role: 'user', content: userMessage }
|
|
51
|
+
],
|
|
52
|
+
temperature: 0.7,
|
|
53
|
+
max_tokens: 1000
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
// Escape JSON for shell (Windows-compatible)
|
|
57
|
+
const payloadJson = JSON.stringify(payload);
|
|
58
|
+
const escapedPayload = payloadJson.replace(/"/g, '\\"');
|
|
59
|
+
|
|
60
|
+
// Build curl command (Windows-compatible)
|
|
61
|
+
const curlCommand = `curl -X POST "${endpoint}" ` +
|
|
62
|
+
`-H "Content-Type: application/json" ` +
|
|
63
|
+
`-H "Authorization: Bearer ${apiKey}" ` +
|
|
64
|
+
`--max-time ${Math.floor(timeout / 1000)} ` +
|
|
65
|
+
`--data "${escapedPayload}"`;
|
|
66
|
+
|
|
67
|
+
try {
|
|
68
|
+
const { stdout, stderr } = await execAsync(curlCommand, {
|
|
69
|
+
timeout: timeout,
|
|
70
|
+
maxBuffer: 1024 * 1024 * 10 // 10MB buffer
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
if (stderr && !stdout) {
|
|
74
|
+
throw new Error(`curl error: ${stderr}`);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Parse response
|
|
78
|
+
const response = JSON.parse(stdout);
|
|
79
|
+
|
|
80
|
+
// Check for API errors
|
|
81
|
+
if (response.error) {
|
|
82
|
+
throw new Error(`OpenAI API error: ${response.error.message || JSON.stringify(response.error)}`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// Extract AI response
|
|
86
|
+
if (!response.choices || !response.choices[0] || !response.choices[0].message) {
|
|
87
|
+
throw new Error('Unexpected API response format');
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return {
|
|
91
|
+
success: true,
|
|
92
|
+
content: response.choices[0].message.content,
|
|
93
|
+
model: response.model,
|
|
94
|
+
usage: response.usage
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
} catch (error) {
|
|
98
|
+
// Handle timeout
|
|
99
|
+
if (error.killed && error.signal === 'SIGTERM') {
|
|
100
|
+
throw new Error(`OpenAI API request timed out after ${timeout}ms`);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// Handle curl not found
|
|
104
|
+
if (error.message.includes('curl') && error.message.includes('not recognized')) {
|
|
105
|
+
throw new Error('curl is not installed or not in PATH. Please install curl to use this package.');
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Re-throw other errors
|
|
109
|
+
throw error;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Parse AI response into structured format
|
|
115
|
+
* @param {string} content - Raw AI response content
|
|
116
|
+
* @returns {Object} Structured response
|
|
117
|
+
*/
|
|
118
|
+
export function parseAIResponse(content) {
|
|
119
|
+
const lines = content.split('\n').filter(line => line.trim());
|
|
120
|
+
|
|
121
|
+
return {
|
|
122
|
+
explanation: extractSection(content, ['explanation', 'plain-english', 'what happened']),
|
|
123
|
+
causes: extractSection(content, ['causes', 'likely causes', 'reasons']),
|
|
124
|
+
fixes: extractSection(content, ['fixes', 'suggested fixes', 'solutions', 'fix']),
|
|
125
|
+
references: extractReferences(content),
|
|
126
|
+
raw: content
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Extract section from AI response
|
|
132
|
+
* @private
|
|
133
|
+
*/
|
|
134
|
+
function extractSection(content, keywords) {
|
|
135
|
+
const lines = content.split('\n');
|
|
136
|
+
let capturing = false;
|
|
137
|
+
let result = [];
|
|
138
|
+
|
|
139
|
+
for (let i = 0; i < lines.length; i++) {
|
|
140
|
+
const line = lines[i];
|
|
141
|
+
const lowerLine = line.toLowerCase();
|
|
142
|
+
|
|
143
|
+
// Check if this line starts a section we're interested in
|
|
144
|
+
const isKeywordLine = keywords.some(kw =>
|
|
145
|
+
lowerLine.includes(kw) && (lowerLine.includes(':') || lowerLine.includes('**'))
|
|
146
|
+
);
|
|
147
|
+
|
|
148
|
+
if (isKeywordLine) {
|
|
149
|
+
capturing = true;
|
|
150
|
+
continue;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Check if we hit another section header
|
|
154
|
+
if (capturing && line.match(/^\d+\.|^-|^•|^[A-Z][^:]{3,}:/)) {
|
|
155
|
+
if (line.toLowerCase().includes('documentation') ||
|
|
156
|
+
line.toLowerCase().includes('reference') ||
|
|
157
|
+
line.toLowerCase().includes('link')) {
|
|
158
|
+
break; // Stop if we hit references section
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Stop capturing if we hit another numbered section
|
|
163
|
+
if (capturing && line.match(/^\d+\.\s+[A-Z]/)) {
|
|
164
|
+
// Check if it's not part of our current section
|
|
165
|
+
const nextKeywordMatch = keywords.some(kw => line.toLowerCase().includes(kw));
|
|
166
|
+
if (!nextKeywordMatch) {
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
if (capturing && line.trim()) {
|
|
172
|
+
result.push(line.trim());
|
|
173
|
+
|
|
174
|
+
// Look ahead - if next few lines are empty, stop capturing
|
|
175
|
+
if (i + 1 < lines.length && !lines[i + 1].trim() &&
|
|
176
|
+
i + 2 < lines.length && !lines[i + 2].trim()) {
|
|
177
|
+
break;
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return result.join('\n') || content.substring(0, 200) + '...';
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Extract documentation links from AI response
|
|
187
|
+
* @private
|
|
188
|
+
*/
|
|
189
|
+
function extractReferences(content) {
|
|
190
|
+
const urlRegex = /(https?:\/\/[^\s]+)/g;
|
|
191
|
+
const matches = content.match(urlRegex) || [];
|
|
192
|
+
return matches.map(url => url.replace(/[),.\]>]+$/, '')); // Clean trailing punctuation
|
|
193
|
+
}
|
|
194
|
+
|