vanguard-cli 3.1.2 โ†’ 3.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,15 @@
1
1
  # Changelog
2
2
 
3
+ ## 3.1.3 (2026-01-24)
4
+
5
+
6
+ ### Features
7
+
8
+ * enterprise-grade AI resilience with gemini-1.5-flash-latest ๐Ÿ›ก๏ธโ„๏ธ๐Ÿš€โœจ ([3ab3f1d](https://github.com/bazobehram/vanguard/commit/3ab3f1da4866c3e8bb833efd039f43b1dd6e8942))
9
+ * implement scanner resilience and add test suite ๐Ÿ›ก๏ธ๐Ÿงช๐Ÿงฌโœจ ([20310c3](https://github.com/bazobehram/vanguard/commit/20310c30d2787cbc2dedb0a19d8d2a5d2b308d0d))
10
+ * implement smart throttling and robust 429 recovery logic ๐Ÿ›ก๏ธโ„๏ธโœจ ([4123794](https://github.com/bazobehram/vanguard/commit/4123794b1cced382fb628a1b828a466d83c24a3e))
11
+ * initial public release of Vanguard V3 Enterprise Edition ๏ฟฝ๏ฟฝ๏ธ๐Ÿš€โœจ ([2335c1a](https://github.com/bazobehram/vanguard/commit/2335c1aa78a95a871cc600e62dd45431147088b9))
12
+
3
13
  ## 3.1.2 (2026-01-24)
4
14
 
5
15
 
@@ -0,0 +1,36 @@
1
+ # Enterprise Publication Recovery Guide ๐Ÿ›ก๏ธ๐Ÿ”‘๐Ÿ—๏ธ
2
+
3
+ If you see an `ENEEDAUTH` or `Access token expired` error when publishing to NPM (like the one you saw on the homeserver), follow these steps to restore your automated shipping pipeline.
4
+
5
+ ## 1. Refresh Your Session ๐Ÿ”„
6
+ Your CLI session for NPM has likely expired. Run this in your terminal to log back in:
7
+ ```bash
8
+ npm login
9
+ ```
10
+ *Note: This will likely open a browser windows or ask for your 2FA Passkey signature.*
11
+
12
+ ---
13
+
14
+ ## 2. Using a Permanent Token (Recommended) ๐Ÿ”‘
15
+ To avoid Passkey/Browser prompts on your remote servers, use a **Granular Access Token**:
16
+
17
+ 1. **Generate Token**: Go to [NPM Tokens](https://www.npmjs.com/settings/tokens/granular-access-tokens/new).
18
+ 2. **Permissions**: Select "Read and write" for the specific package.
19
+ 3. **Bypass 2FA**: **CHECK THIS BOX** in the NPM settings for the token.
20
+ 4. **Set Environment**: On your server, add this to your `.bashrc` or `.env`:
21
+ ```bash
22
+ export NPM_TOKEN="your_token_here"
23
+ ```
24
+
25
+ ---
26
+
27
+ ## 3. Finishing the Vanguard V3.1.5 Release ๐Ÿš€
28
+ I have already versioned and pushed the codes to GitHub. To finish the live NPM update, simply run:
29
+ ```bash
30
+ npm run ship
31
+ ```
32
+ - Select **"Patch"**.
33
+ - When asked about 2FA, select **"No"** (after you've logged in or set the token).
34
+
35
+ ---
36
+ ๐Ÿ›ก๏ธ **Vanguard is perfectly synchronized and ready for the world.**
@@ -6,7 +6,8 @@ import { showBanner } from '../utils/ui.js';
6
6
 
7
7
  export async function runConfigWizard() {
8
8
  showBanner();
9
- console.log('๐Ÿ›ฐ๏ธ Vanguard Configuration Wizard\n');
9
+ console.log('๐Ÿ›ฐ๏ธ Vanguard Configuration Wizard');
10
+ console.log(chalk.dim(`๐Ÿ“‚ Config Path: ${config.path}\n`));
10
11
 
11
12
  const { providerSelection } = await inquirer.prompt([
12
13
  {
@@ -14,7 +15,7 @@ export async function runConfigWizard() {
14
15
  name: 'providerSelection',
15
16
  message: 'Select AI Provider:',
16
17
  choices: [
17
- { name: 'Google Gemini 2.0 (Cloud - Fast & Smart)', value: 'gemini' },
18
+ { name: 'Google Gemini (Cloud - Reliable & Smart)', value: 'gemini' },
18
19
  { name: 'Local Ollama (Private - Offline)', value: 'ollama' },
19
20
  ],
20
21
  },
@@ -27,13 +28,15 @@ export async function runConfigWizard() {
27
28
  {
28
29
  type: 'password',
29
30
  name: 'apiKey',
30
- message: 'Enter your Gemini API Key:',
31
- default: config.get('GEMINI_KEY'),
32
- validate: (val) => val.length > 0 || 'Key is required',
31
+ validate: (val) => {
32
+ if (val.length === 0) return 'Key is required';
33
+ if (val.startsWith('sk-')) return 'โš ๏ธ That looks like an OpenAI key. Gemini keys usually start with "AIza"';
34
+ return true;
35
+ },
33
36
  },
34
37
  ]);
35
- config.set('GEMINI_KEY', apiKey);
36
- console.log('โœ… Gemini 2.0 configured and saved.');
38
+ config.set('GEMINI_KEY', apiKey.trim());
39
+ console.log('โœ… Gemini configured and saved.');
37
40
  } else {
38
41
  const spinner = createSpinner('๐Ÿ” Detecting local Ollama models...').start();
39
42
  try {
@@ -77,20 +77,25 @@ RESPONSE FORMAT (JSON ONLY):
77
77
  return await this.scanWithOllama(content);
78
78
  }
79
79
  } catch (error) {
80
- const isRateLimit = error.message.includes('429') || error.message.includes('Too Many Requests');
80
+ const isRateLimit =
81
+ error.message.includes('429') ||
82
+ error.message.includes('Too Many Requests') ||
83
+ error.message.includes('Quota');
84
+
81
85
  const isServerErr = error.message.includes('500') || error.message.includes('503');
82
86
 
83
87
  if (isRateLimit && attempt <= maxAttempts) {
88
+ const waitTime = attempt === 1 ? 30 : 60; // Increase wait on second failure
84
89
  if (spinner) {
85
90
  const originalText = spinner.text;
86
- for (let i = 30; i > 0; i--) {
87
- spinner.text = `โ„๏ธ Rate limit hit. Cooling down API (${i}s remaining)...`;
91
+ for (let i = waitTime; i > 0; i--) {
92
+ spinner.text = `โ„๏ธ Quota hit. Cooling down API (${i}s remaining)...`;
88
93
  await sleep(1000);
89
94
  }
90
95
  spinner.text = originalText;
91
96
  } else {
92
- console.log(chalk.yellow(`\nโš ๏ธ Rate limit hit. Cooling down for 30s (Attempt ${attempt}/${maxAttempts})...`));
93
- await sleep(30000);
97
+ console.log(chalk.yellow(`\nโš ๏ธ Rate limit hit. Cooling down for ${waitTime}s (Attempt ${attempt}/${maxAttempts})...`));
98
+ await sleep(waitTime * 1000);
94
99
  }
95
100
  return this.scanWithRetry(filePath, content, spinner, attempt + 1);
96
101
  }
@@ -108,9 +113,9 @@ RESPONSE FORMAT (JSON ONLY):
108
113
  const apiKey = config.get('GEMINI_KEY');
109
114
  if (!apiKey) throw new Error('Gemini API Key missing. Run "vanguard config"');
110
115
 
111
- const genAI = new GoogleGenerativeAI(apiKey);
112
- // Switch to higher limit model
113
- const model = genAI.getGenerativeModel({ model: 'gemini-1.5-flash' });
116
+ const genAI = new GoogleGenerativeAI(apiKey.trim());
117
+ // Use the latest stable pro flash model for the best performance/quota ratio
118
+ const model = genAI.getGenerativeModel({ model: 'gemini-2.0-flash' });
114
119
  const instruction = await this.getSystemInstruction();
115
120
 
116
121
  const result = await model.generateContent({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "vanguard-cli",
3
- "version": "3.1.2",
3
+ "version": "3.1.6",
4
4
  "description": "AI-Powered Supply Chain Firewall for Git",
5
5
  "type": "module",
6
6
  "bin": {