vibecodingmachine-core 2025.12.6-1702 → 2025.12.24-2348
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/scripts/setup-database.js +108 -0
- package/src/compliance/compliance-manager.js +249 -0
- package/src/compliance/compliance-prompt.js +183 -0
- package/src/database/migrations.js +289 -0
- package/src/database/user-database-client.js +266 -0
- package/src/database/user-schema.js +118 -0
- package/src/ide-integration/applescript-manager.cjs +76 -129
- package/src/ide-integration/applescript-manager.js +62 -12
- package/src/ide-integration/claude-code-cli-manager.cjs +120 -1
- package/src/ide-integration/provider-manager.cjs +67 -1
- package/src/index.cjs +3 -1
- package/src/index.js +7 -0
- package/src/llm/direct-llm-manager.cjs +110 -73
- package/src/localization/index.js +148 -0
- package/src/localization/translations/en.js +675 -0
- package/src/localization/translations/es.js +676 -0
- package/src/quota-management/index.js +108 -0
- package/src/sync/sync-engine.js +32 -10
- package/src/utils/download-with-progress.js +92 -0
- package/src/utils/electron-update-checker.js +7 -0
- package/src/utils/env-helpers.js +54 -0
- package/src/utils/requirement-helpers.js +745 -49
- package/src/utils/requirements-parser.js +21 -7
- package/src/utils/update-checker.js +7 -0
- package/test-quota-system.js +67 -0
- package/test-requirement-stats.js +66 -0
|
@@ -5,6 +5,7 @@
|
|
|
5
5
|
|
|
6
6
|
const https = require('https');
|
|
7
7
|
const http = require('http');
|
|
8
|
+
const quotaManagement = require('../quota-management');
|
|
8
9
|
|
|
9
10
|
class DirectLLMManager {
|
|
10
11
|
constructor(sharedProviderManager = null) {
|
|
@@ -22,7 +23,7 @@ class DirectLLMManager {
|
|
|
22
23
|
}
|
|
23
24
|
}
|
|
24
25
|
}
|
|
25
|
-
|
|
26
|
+
|
|
26
27
|
/**
|
|
27
28
|
* Detect and save rate limit from error message
|
|
28
29
|
* @param {string} provider - Provider name
|
|
@@ -31,17 +32,17 @@ class DirectLLMManager {
|
|
|
31
32
|
*/
|
|
32
33
|
detectAndSaveRateLimit(provider, model, errorMessage) {
|
|
33
34
|
if (!this.providerManager) return;
|
|
34
|
-
|
|
35
|
+
|
|
35
36
|
// Check for rate limit indicators
|
|
36
|
-
const isRateLimit = errorMessage.includes('rate limit') ||
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
37
|
+
const isRateLimit = errorMessage.includes('rate limit') ||
|
|
38
|
+
errorMessage.includes('Rate limit') ||
|
|
39
|
+
errorMessage.includes('too many requests') ||
|
|
40
|
+
errorMessage.includes('429') ||
|
|
41
|
+
errorMessage.includes('quota') ||
|
|
42
|
+
errorMessage.includes('Weekly limit reached') ||
|
|
43
|
+
errorMessage.includes('Daily limit reached') ||
|
|
44
|
+
errorMessage.includes('limit reached');
|
|
45
|
+
|
|
45
46
|
if (isRateLimit) {
|
|
46
47
|
this.providerManager.markRateLimited(provider, model, errorMessage);
|
|
47
48
|
}
|
|
@@ -56,10 +57,10 @@ class DirectLLMManager {
|
|
|
56
57
|
*/
|
|
57
58
|
async callOllama(model, prompt, options = {}) {
|
|
58
59
|
const { onChunk, onComplete, onError, temperature = 0.2 } = options;
|
|
59
|
-
|
|
60
|
+
|
|
60
61
|
return new Promise((resolve) => {
|
|
61
62
|
let fullResponse = '';
|
|
62
|
-
|
|
63
|
+
|
|
63
64
|
const postData = JSON.stringify({
|
|
64
65
|
model: model,
|
|
65
66
|
prompt: prompt,
|
|
@@ -80,22 +81,22 @@ class DirectLLMManager {
|
|
|
80
81
|
}
|
|
81
82
|
}, (res) => {
|
|
82
83
|
let buffer = '';
|
|
83
|
-
|
|
84
|
+
|
|
84
85
|
res.on('data', (chunk) => {
|
|
85
86
|
buffer += chunk.toString();
|
|
86
87
|
const lines = buffer.split('\n');
|
|
87
88
|
buffer = lines.pop(); // Keep incomplete line in buffer
|
|
88
|
-
|
|
89
|
+
|
|
89
90
|
for (const line of lines) {
|
|
90
91
|
if (!line.trim()) continue;
|
|
91
|
-
|
|
92
|
+
|
|
92
93
|
try {
|
|
93
94
|
const data = JSON.parse(line);
|
|
94
95
|
if (data.response) {
|
|
95
96
|
fullResponse += data.response;
|
|
96
97
|
if (onChunk) onChunk(data.response);
|
|
97
98
|
}
|
|
98
|
-
|
|
99
|
+
|
|
99
100
|
if (data.done) {
|
|
100
101
|
if (onComplete) onComplete(fullResponse);
|
|
101
102
|
resolve({
|
|
@@ -110,7 +111,7 @@ class DirectLLMManager {
|
|
|
110
111
|
}
|
|
111
112
|
}
|
|
112
113
|
});
|
|
113
|
-
|
|
114
|
+
|
|
114
115
|
res.on('end', () => {
|
|
115
116
|
if (!fullResponse) {
|
|
116
117
|
const error = 'No response received from Ollama';
|
|
@@ -140,7 +141,7 @@ class DirectLLMManager {
|
|
|
140
141
|
*/
|
|
141
142
|
async callAnthropic(model, prompt, options = {}) {
|
|
142
143
|
const { apiKey, onChunk, onComplete, onError, temperature = 0.2, maxTokens = 8192 } = options;
|
|
143
|
-
|
|
144
|
+
|
|
144
145
|
if (!apiKey) {
|
|
145
146
|
const error = 'Anthropic API key required';
|
|
146
147
|
if (onError) onError(error);
|
|
@@ -149,7 +150,7 @@ class DirectLLMManager {
|
|
|
149
150
|
|
|
150
151
|
return new Promise((resolve) => {
|
|
151
152
|
let fullResponse = '';
|
|
152
|
-
|
|
153
|
+
|
|
153
154
|
const postData = JSON.stringify({
|
|
154
155
|
model: model,
|
|
155
156
|
max_tokens: maxTokens,
|
|
@@ -172,21 +173,21 @@ class DirectLLMManager {
|
|
|
172
173
|
}
|
|
173
174
|
}, (res) => {
|
|
174
175
|
let buffer = '';
|
|
175
|
-
|
|
176
|
+
|
|
176
177
|
res.on('data', (chunk) => {
|
|
177
178
|
buffer += chunk.toString();
|
|
178
179
|
const lines = buffer.split('\n');
|
|
179
180
|
buffer = lines.pop();
|
|
180
|
-
|
|
181
|
+
|
|
181
182
|
for (const line of lines) {
|
|
182
183
|
if (!line.trim() || !line.startsWith('data: ')) continue;
|
|
183
|
-
|
|
184
|
+
|
|
184
185
|
try {
|
|
185
186
|
const jsonStr = line.slice(6); // Remove "data: " prefix
|
|
186
187
|
if (jsonStr === '[DONE]') continue;
|
|
187
|
-
|
|
188
|
+
|
|
188
189
|
const data = JSON.parse(jsonStr);
|
|
189
|
-
|
|
190
|
+
|
|
190
191
|
if (data.type === 'content_block_delta' && data.delta?.text) {
|
|
191
192
|
fullResponse += data.delta.text;
|
|
192
193
|
if (onChunk) onChunk(data.delta.text);
|
|
@@ -203,7 +204,7 @@ class DirectLLMManager {
|
|
|
203
204
|
}
|
|
204
205
|
}
|
|
205
206
|
});
|
|
206
|
-
|
|
207
|
+
|
|
207
208
|
res.on('end', () => {
|
|
208
209
|
if (!fullResponse) {
|
|
209
210
|
const error = 'No response received from Anthropic';
|
|
@@ -233,7 +234,7 @@ class DirectLLMManager {
|
|
|
233
234
|
*/
|
|
234
235
|
async callGroq(model, prompt, options = {}) {
|
|
235
236
|
const { apiKey, onChunk, onComplete, onError, temperature = 0.2, maxTokens = 8192 } = options;
|
|
236
|
-
|
|
237
|
+
|
|
237
238
|
if (!apiKey) {
|
|
238
239
|
const error = 'Groq API key required';
|
|
239
240
|
if (onError) onError(error);
|
|
@@ -242,7 +243,7 @@ class DirectLLMManager {
|
|
|
242
243
|
|
|
243
244
|
return new Promise((resolve) => {
|
|
244
245
|
let fullResponse = '';
|
|
245
|
-
|
|
246
|
+
|
|
246
247
|
const postData = JSON.stringify({
|
|
247
248
|
model: model,
|
|
248
249
|
messages: [
|
|
@@ -265,7 +266,7 @@ class DirectLLMManager {
|
|
|
265
266
|
}, (res) => {
|
|
266
267
|
let buffer = '';
|
|
267
268
|
let statusCode = res.statusCode;
|
|
268
|
-
|
|
269
|
+
|
|
269
270
|
// Check for rate limit or error status codes
|
|
270
271
|
if (statusCode === 429 || statusCode >= 400) {
|
|
271
272
|
let errorBody = '';
|
|
@@ -280,15 +281,15 @@ class DirectLLMManager {
|
|
|
280
281
|
});
|
|
281
282
|
return;
|
|
282
283
|
}
|
|
283
|
-
|
|
284
|
+
|
|
284
285
|
res.on('data', (chunk) => {
|
|
285
286
|
buffer += chunk.toString();
|
|
286
287
|
const lines = buffer.split('\n');
|
|
287
288
|
buffer = lines.pop();
|
|
288
|
-
|
|
289
|
+
|
|
289
290
|
for (const line of lines) {
|
|
290
291
|
if (!line.trim() || !line.startsWith('data: ')) continue;
|
|
291
|
-
|
|
292
|
+
|
|
292
293
|
try {
|
|
293
294
|
const jsonStr = line.slice(6);
|
|
294
295
|
if (jsonStr === '[DONE]') {
|
|
@@ -300,10 +301,10 @@ class DirectLLMManager {
|
|
|
300
301
|
});
|
|
301
302
|
return;
|
|
302
303
|
}
|
|
303
|
-
|
|
304
|
+
|
|
304
305
|
const data = JSON.parse(jsonStr);
|
|
305
306
|
const content = data.choices?.[0]?.delta?.content;
|
|
306
|
-
|
|
307
|
+
|
|
307
308
|
if (content) {
|
|
308
309
|
fullResponse += content;
|
|
309
310
|
if (onChunk) onChunk(content);
|
|
@@ -313,7 +314,7 @@ class DirectLLMManager {
|
|
|
313
314
|
}
|
|
314
315
|
}
|
|
315
316
|
});
|
|
316
|
-
|
|
317
|
+
|
|
317
318
|
res.on('end', () => {
|
|
318
319
|
if (fullResponse) {
|
|
319
320
|
if (onComplete) onComplete(fullResponse);
|
|
@@ -348,7 +349,7 @@ class DirectLLMManager {
|
|
|
348
349
|
*/
|
|
349
350
|
async callBedrock(model, prompt, options = {}) {
|
|
350
351
|
const { region, accessKeyId, secretAccessKey, onChunk, onComplete, onError, temperature = 0.2, maxTokens = 8192 } = options;
|
|
351
|
-
|
|
352
|
+
|
|
352
353
|
if (!region || !accessKeyId || !secretAccessKey) {
|
|
353
354
|
const error = 'AWS credentials required (region, accessKeyId, secretAccessKey)';
|
|
354
355
|
if (onError) onError(error);
|
|
@@ -358,7 +359,7 @@ class DirectLLMManager {
|
|
|
358
359
|
try {
|
|
359
360
|
// Use AWS SDK v3 for Bedrock
|
|
360
361
|
const { BedrockRuntimeClient, InvokeModelWithResponseStreamCommand } = require('@aws-sdk/client-bedrock-runtime');
|
|
361
|
-
|
|
362
|
+
|
|
362
363
|
const client = new BedrockRuntimeClient({
|
|
363
364
|
region: region,
|
|
364
365
|
credentials: {
|
|
@@ -401,14 +402,14 @@ class DirectLLMManager {
|
|
|
401
402
|
for await (const event of response.body) {
|
|
402
403
|
if (event.chunk) {
|
|
403
404
|
const chunk = JSON.parse(new TextDecoder().decode(event.chunk.bytes));
|
|
404
|
-
|
|
405
|
+
|
|
405
406
|
let text = '';
|
|
406
407
|
if (chunk.delta?.text) {
|
|
407
408
|
text = chunk.delta.text; // Anthropic format
|
|
408
409
|
} else if (chunk.generation) {
|
|
409
410
|
text = chunk.generation; // Meta Llama format
|
|
410
411
|
}
|
|
411
|
-
|
|
412
|
+
|
|
412
413
|
if (text) {
|
|
413
414
|
fullResponse += text;
|
|
414
415
|
if (onChunk) onChunk(text);
|
|
@@ -418,7 +419,7 @@ class DirectLLMManager {
|
|
|
418
419
|
|
|
419
420
|
if (onComplete) onComplete(fullResponse);
|
|
420
421
|
return { success: true, response: fullResponse, model };
|
|
421
|
-
|
|
422
|
+
|
|
422
423
|
} catch (error) {
|
|
423
424
|
const errorMsg = `AWS Bedrock error: ${error.message}`;
|
|
424
425
|
if (onError) onError(errorMsg);
|
|
@@ -436,32 +437,32 @@ class DirectLLMManager {
|
|
|
436
437
|
async callClaudeCode(model, prompt, options = {}) {
|
|
437
438
|
const { onChunk, onComplete, onError } = options;
|
|
438
439
|
const { spawn } = require('child_process');
|
|
439
|
-
|
|
440
|
+
|
|
440
441
|
return new Promise((resolve) => {
|
|
441
442
|
let fullResponse = '';
|
|
442
443
|
let errorOutput = '';
|
|
443
|
-
|
|
444
|
+
|
|
444
445
|
// Call claude CLI with the prompt
|
|
445
446
|
const claude = spawn('claude', ['--dangerously-skip-permissions'], {
|
|
446
447
|
stdio: ['pipe', 'pipe', 'pipe']
|
|
447
448
|
});
|
|
448
|
-
|
|
449
|
+
|
|
449
450
|
// Send prompt to stdin
|
|
450
451
|
claude.stdin.write(prompt);
|
|
451
452
|
claude.stdin.end();
|
|
452
|
-
|
|
453
|
+
|
|
453
454
|
// Capture stdout
|
|
454
455
|
claude.stdout.on('data', (data) => {
|
|
455
456
|
const chunk = data.toString();
|
|
456
457
|
fullResponse += chunk;
|
|
457
458
|
if (onChunk) onChunk(chunk);
|
|
458
459
|
});
|
|
459
|
-
|
|
460
|
+
|
|
460
461
|
// Capture stderr
|
|
461
462
|
claude.stderr.on('data', (data) => {
|
|
462
463
|
errorOutput += data.toString();
|
|
463
464
|
});
|
|
464
|
-
|
|
465
|
+
|
|
465
466
|
// Handle completion
|
|
466
467
|
claude.on('close', (code) => {
|
|
467
468
|
if (code === 0) {
|
|
@@ -475,7 +476,7 @@ class DirectLLMManager {
|
|
|
475
476
|
resolve({ success: false, error });
|
|
476
477
|
}
|
|
477
478
|
});
|
|
478
|
-
|
|
479
|
+
|
|
479
480
|
// Handle spawn errors
|
|
480
481
|
claude.on('error', (err) => {
|
|
481
482
|
const error = `Failed to start Claude CLI: ${err.message}`;
|
|
@@ -493,27 +494,63 @@ class DirectLLMManager {
|
|
|
493
494
|
* @returns {Promise<{success: boolean, response?: string, error?: string}>}
|
|
494
495
|
*/
|
|
495
496
|
async call(config, prompt, options = {}) {
|
|
496
|
-
const { provider, model, apiKey, region, accessKeyId, secretAccessKey } = config;
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
497
|
+
const { provider, model, apiKey, region, accessKeyId, secretAccessKey, fallbackModels = [] } = config;
|
|
498
|
+
const modelsToTry = [model, ...fallbackModels];
|
|
499
|
+
let lastError = null;
|
|
500
|
+
|
|
501
|
+
for (const currentModel of modelsToTry) {
|
|
502
|
+
if (currentModel !== model) {
|
|
503
|
+
this.logger.log(`⚠️ Quota/Limit reached for previous model, failing over to ${currentModel}...`);
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
const agentId = `${provider}:${currentModel}`;
|
|
507
|
+
try {
|
|
508
|
+
const quota = await quotaManagement.fetchQuotaForAgent(agentId);
|
|
509
|
+
if (quota.isExceeded()) {
|
|
510
|
+
const errorMessage = `Quota limit reached for ${currentModel}. Resets at ${quota.resetsAt ? quota.resetsAt.toLocaleString() : 'a later time'}.`;
|
|
511
|
+
lastError = { success: false, error: errorMessage };
|
|
512
|
+
continue; // Try next model
|
|
513
|
+
}
|
|
514
|
+
} catch (error) {
|
|
515
|
+
this.logger.error(`Failed to check quota for ${agentId}: ${error.message}`);
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
const currentConfig = { ...config, model: currentModel };
|
|
519
|
+
let result;
|
|
520
|
+
|
|
521
|
+
switch (provider) {
|
|
522
|
+
case 'ollama':
|
|
523
|
+
result = await this.callOllama(currentModel, prompt, options);
|
|
524
|
+
break;
|
|
525
|
+
case 'anthropic':
|
|
526
|
+
result = await this.callAnthropic(currentModel, prompt, { ...options, apiKey });
|
|
527
|
+
break;
|
|
528
|
+
case 'groq':
|
|
529
|
+
result = await this.callGroq(currentModel, prompt, { ...options, apiKey });
|
|
530
|
+
break;
|
|
531
|
+
case 'bedrock':
|
|
532
|
+
result = await this.callBedrock(currentModel, prompt, { ...options, region, accessKeyId, secretAccessKey });
|
|
533
|
+
break;
|
|
534
|
+
case 'claude-code':
|
|
535
|
+
result = await this.callClaudeCode(currentModel, prompt, options);
|
|
536
|
+
break;
|
|
537
|
+
default:
|
|
538
|
+
return { success: false, error: `Unknown provider: ${provider}` };
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
if (result.success) {
|
|
542
|
+
return result;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// If failed, check for rate limit to save it
|
|
546
|
+
this.detectAndSaveRateLimit(provider, currentModel, result.error || '');
|
|
547
|
+
lastError = result;
|
|
548
|
+
|
|
549
|
+
// If it's a "fatal" error that isn't a rate limit, we might want to stop?
|
|
550
|
+
// But usually we want to try the next model if possible.
|
|
516
551
|
}
|
|
552
|
+
|
|
553
|
+
return lastError || { success: false, error: `All models for ${provider} failed.` };
|
|
517
554
|
}
|
|
518
555
|
|
|
519
556
|
/**
|
|
@@ -548,20 +585,20 @@ class DirectLLMManager {
|
|
|
548
585
|
*/
|
|
549
586
|
async isClaudeCodeAvailable() {
|
|
550
587
|
const { spawn } = require('child_process');
|
|
551
|
-
|
|
588
|
+
|
|
552
589
|
return new Promise((resolve) => {
|
|
553
590
|
const claude = spawn('claude', ['--version'], {
|
|
554
591
|
stdio: ['ignore', 'pipe', 'pipe']
|
|
555
592
|
});
|
|
556
|
-
|
|
593
|
+
|
|
557
594
|
claude.on('close', (code) => {
|
|
558
595
|
resolve(code === 0);
|
|
559
596
|
});
|
|
560
|
-
|
|
597
|
+
|
|
561
598
|
claude.on('error', () => {
|
|
562
599
|
resolve(false);
|
|
563
600
|
});
|
|
564
|
-
|
|
601
|
+
|
|
565
602
|
// Timeout after 2 seconds
|
|
566
603
|
setTimeout(() => {
|
|
567
604
|
claude.kill();
|
|
@@ -583,11 +620,11 @@ class DirectLLMManager {
|
|
|
583
620
|
method: 'GET'
|
|
584
621
|
}, (res) => {
|
|
585
622
|
let data = '';
|
|
586
|
-
|
|
623
|
+
|
|
587
624
|
res.on('data', (chunk) => {
|
|
588
625
|
data += chunk.toString();
|
|
589
626
|
});
|
|
590
|
-
|
|
627
|
+
|
|
591
628
|
res.on('end', () => {
|
|
592
629
|
try {
|
|
593
630
|
const json = JSON.parse(data);
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared Localization System for Vibe Coding Machine
|
|
3
|
+
* Used by both CLI and GUI applications
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const os = require('os');
|
|
7
|
+
|
|
8
|
+
// Supported locales
|
|
9
|
+
const SUPPORTED_LOCALES = ['en', 'es'];
|
|
10
|
+
const DEFAULT_LOCALE = 'en';
|
|
11
|
+
|
|
12
|
+
// Language names mapping
|
|
13
|
+
const LANGUAGE_NAMES = {
|
|
14
|
+
'en': 'English',
|
|
15
|
+
'es': 'Spanish'
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Get supported language names
|
|
20
|
+
* @returns {Object} Object mapping locale codes to language names
|
|
21
|
+
*/
|
|
22
|
+
function getSupportedLanguageNames() {
|
|
23
|
+
return { ...LANGUAGE_NAMES };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Get language display name
|
|
28
|
+
* @param {string} languageCode - Language code
|
|
29
|
+
* @returns {string} Display name for the language
|
|
30
|
+
*/
|
|
31
|
+
function getLanguageDisplayName(languageCode) {
|
|
32
|
+
if (!languageCode) return 'Not Set';
|
|
33
|
+
return LANGUAGE_NAMES[languageCode] || languageCode;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Check if a language is supported
|
|
38
|
+
* @param {string} languageCode - Language code to check
|
|
39
|
+
* @returns {boolean} True if language is supported
|
|
40
|
+
*/
|
|
41
|
+
function isLanguageSupported(languageCode) {
|
|
42
|
+
return SUPPORTED_LOCALES.includes(languageCode);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Detect user's preferred locale
|
|
47
|
+
* @returns {string} Locale code (e.g., 'en', 'es')
|
|
48
|
+
*/
|
|
49
|
+
function detectLocale() {
|
|
50
|
+
// Check environment variables first
|
|
51
|
+
const envLocale = process.env.LANG || process.env.LC_ALL || process.env.LC_MESSAGES;
|
|
52
|
+
if (envLocale) {
|
|
53
|
+
const locale = envLocale.split('.')[0].split('_')[0].toLowerCase();
|
|
54
|
+
if (SUPPORTED_LOCALES.includes(locale)) {
|
|
55
|
+
return locale;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Check system locale (Node.js)
|
|
60
|
+
try {
|
|
61
|
+
const systemLocale = Intl.DateTimeFormat().resolvedOptions().locale;
|
|
62
|
+
const locale = systemLocale.split('-')[0].toLowerCase();
|
|
63
|
+
if (SUPPORTED_LOCALES.includes(locale)) {
|
|
64
|
+
return locale;
|
|
65
|
+
}
|
|
66
|
+
} catch (error) {
|
|
67
|
+
// Fallback if Intl is not available
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return DEFAULT_LOCALE;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Get current locale
|
|
75
|
+
* @returns {string} Current locale
|
|
76
|
+
*/
|
|
77
|
+
function getCurrentLocale() {
|
|
78
|
+
return global._vibecodingmachine_locale || detectLocale();
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Set current locale
|
|
83
|
+
* @param {string} locale - Locale to set
|
|
84
|
+
*/
|
|
85
|
+
function setLocale(locale) {
|
|
86
|
+
if (SUPPORTED_LOCALES.includes(locale)) {
|
|
87
|
+
global._vibecodingmachine_locale = locale;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Translate a text string
|
|
93
|
+
* @param {string} key - Translation key
|
|
94
|
+
* @param {Object} params - Parameters for interpolation
|
|
95
|
+
* @returns {string} Translated text
|
|
96
|
+
*/
|
|
97
|
+
function t(key, params = {}) {
|
|
98
|
+
const locale = getCurrentLocale();
|
|
99
|
+
const translations = getTranslations(locale);
|
|
100
|
+
|
|
101
|
+
let text = translations[key] || key;
|
|
102
|
+
|
|
103
|
+
// Simple parameter interpolation
|
|
104
|
+
if (params && typeof params === 'object') {
|
|
105
|
+
Object.keys(params).forEach(param => {
|
|
106
|
+
text = text.replace(new RegExp(`\\{${param}\\}`, 'g'), params[param]);
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
return text;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Get translations for a specific locale
|
|
115
|
+
* @param {string} locale - Locale code
|
|
116
|
+
* @returns {Object} Translation object
|
|
117
|
+
*/
|
|
118
|
+
function getTranslations(locale) {
|
|
119
|
+
switch (locale) {
|
|
120
|
+
case 'es':
|
|
121
|
+
return require('./translations/es');
|
|
122
|
+
case 'en':
|
|
123
|
+
default:
|
|
124
|
+
return require('./translations/en');
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
/**
|
|
129
|
+
* Get available locales
|
|
130
|
+
* @returns {Array} Array of supported locale codes
|
|
131
|
+
*/
|
|
132
|
+
function getAvailableLocales() {
|
|
133
|
+
return [...SUPPORTED_LOCALES];
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
module.exports = {
|
|
137
|
+
t,
|
|
138
|
+
getCurrentLocale,
|
|
139
|
+
setLocale,
|
|
140
|
+
detectLocale,
|
|
141
|
+
getAvailableLocales,
|
|
142
|
+
getSupportedLanguageNames,
|
|
143
|
+
getLanguageDisplayName,
|
|
144
|
+
isLanguageSupported,
|
|
145
|
+
SUPPORTED_LOCALES,
|
|
146
|
+
DEFAULT_LOCALE,
|
|
147
|
+
LANGUAGE_NAMES
|
|
148
|
+
};
|