vanguard-cli 3.1.2 â 3.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/lib/commands/config.js +2 -2
- package/lib/services/scanner.js +12 -7
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,15 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 3.1.3 (2026-01-24)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* enterprise-grade AI resilience with gemini-1.5-flash-latest đĄď¸âď¸đ⨠([3ab3f1d](https://github.com/bazobehram/vanguard/commit/3ab3f1da4866c3e8bb833efd039f43b1dd6e8942))
|
|
9
|
+
* implement scanner resilience and add test suite đĄď¸đ§Şđ§Źâ¨ ([20310c3](https://github.com/bazobehram/vanguard/commit/20310c30d2787cbc2dedb0a19d8d2a5d2b308d0d))
|
|
10
|
+
* implement smart throttling and robust 429 recovery logic đĄď¸âď¸â¨ ([4123794](https://github.com/bazobehram/vanguard/commit/4123794b1cced382fb628a1b828a466d83c24a3e))
|
|
11
|
+
* initial public release of Vanguard V3 Enterprise Edition ��ď¸đ⨠([2335c1a](https://github.com/bazobehram/vanguard/commit/2335c1aa78a95a871cc600e62dd45431147088b9))
|
|
12
|
+
|
|
3
13
|
## 3.1.2 (2026-01-24)
|
|
4
14
|
|
|
5
15
|
|
package/lib/commands/config.js
CHANGED
|
@@ -14,7 +14,7 @@ export async function runConfigWizard() {
|
|
|
14
14
|
name: 'providerSelection',
|
|
15
15
|
message: 'Select AI Provider:',
|
|
16
16
|
choices: [
|
|
17
|
-
{ name: 'Google Gemini
|
|
17
|
+
{ name: 'Google Gemini (Cloud - Reliable & Smart)', value: 'gemini' },
|
|
18
18
|
{ name: 'Local Ollama (Private - Offline)', value: 'ollama' },
|
|
19
19
|
],
|
|
20
20
|
},
|
|
@@ -33,7 +33,7 @@ export async function runConfigWizard() {
|
|
|
33
33
|
},
|
|
34
34
|
]);
|
|
35
35
|
config.set('GEMINI_KEY', apiKey);
|
|
36
|
-
console.log('â
Gemini
|
|
36
|
+
console.log('â
Gemini configured and saved.');
|
|
37
37
|
} else {
|
|
38
38
|
const spinner = createSpinner('đ Detecting local Ollama models...').start();
|
|
39
39
|
try {
|
package/lib/services/scanner.js
CHANGED
|
@@ -77,20 +77,25 @@ RESPONSE FORMAT (JSON ONLY):
|
|
|
77
77
|
return await this.scanWithOllama(content);
|
|
78
78
|
}
|
|
79
79
|
} catch (error) {
|
|
80
|
-
const isRateLimit =
|
|
80
|
+
const isRateLimit =
|
|
81
|
+
error.message.includes('429') ||
|
|
82
|
+
error.message.includes('Too Many Requests') ||
|
|
83
|
+
error.message.includes('Quota');
|
|
84
|
+
|
|
81
85
|
const isServerErr = error.message.includes('500') || error.message.includes('503');
|
|
82
86
|
|
|
83
87
|
if (isRateLimit && attempt <= maxAttempts) {
|
|
88
|
+
const waitTime = attempt === 1 ? 30 : 60; // Increase wait on second failure
|
|
84
89
|
if (spinner) {
|
|
85
90
|
const originalText = spinner.text;
|
|
86
|
-
for (let i =
|
|
87
|
-
spinner.text = `âď¸
|
|
91
|
+
for (let i = waitTime; i > 0; i--) {
|
|
92
|
+
spinner.text = `âď¸ Quota hit. Cooling down API (${i}s remaining)...`;
|
|
88
93
|
await sleep(1000);
|
|
89
94
|
}
|
|
90
95
|
spinner.text = originalText;
|
|
91
96
|
} else {
|
|
92
|
-
console.log(chalk.yellow(`\nâ ď¸ Rate limit hit. Cooling down for
|
|
93
|
-
await sleep(
|
|
97
|
+
console.log(chalk.yellow(`\nâ ď¸ Rate limit hit. Cooling down for ${waitTime}s (Attempt ${attempt}/${maxAttempts})...`));
|
|
98
|
+
await sleep(waitTime * 1000);
|
|
94
99
|
}
|
|
95
100
|
return this.scanWithRetry(filePath, content, spinner, attempt + 1);
|
|
96
101
|
}
|
|
@@ -109,8 +114,8 @@ RESPONSE FORMAT (JSON ONLY):
|
|
|
109
114
|
if (!apiKey) throw new Error('Gemini API Key missing. Run "vanguard config"');
|
|
110
115
|
|
|
111
116
|
const genAI = new GoogleGenerativeAI(apiKey);
|
|
112
|
-
//
|
|
113
|
-
const model = genAI.getGenerativeModel({ model: 'gemini-1.5-flash' });
|
|
117
|
+
// Use the -latest alias for better cross-region compatibility
|
|
118
|
+
const model = genAI.getGenerativeModel({ model: 'gemini-1.5-flash-latest' });
|
|
114
119
|
const instruction = await this.getSystemInstruction();
|
|
115
120
|
|
|
116
121
|
const result = await model.generateContent({
|