create-byan-agent 2.7.0 → 2.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,687 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * Turbo Whisper Setup Script
5
+ * Installs Turbo Whisper voice dictation with local Whisper server
6
+ *
7
+ * Options:
8
+ * - Local (CPU): Python + faster-whisper locally
9
+ * - Docker (GPU): Docker container with GPU support
10
+ * - Skip: Don't install Turbo Whisper
11
+ */
12
+
13
+ const fs = require('fs-extra');
14
+ const path = require('path');
15
+ const { execSync } = require('child_process');
16
+ const chalk = require('chalk');
17
+ const ora = require('ora');
18
+
19
+ class TurboWhisperInstaller {
20
+ constructor(projectRoot, mode = 'local') {
21
+ this.projectRoot = projectRoot;
22
+ this.mode = mode; // 'local', 'docker', or 'skip'
23
+ this.turboDir = path.join(projectRoot, '.turbo-whisper');
24
+ this.scriptsDir = path.join(projectRoot, 'scripts');
25
+ this.gpuInfo = null; // Will be populated by detectGPU()
26
+ }
27
+
28
+ /**
29
+ * Detect GPU and determine optimal Whisper model
30
+ * Based on official specs: https://github.com/knowall-ai/turbo-whisper
31
+ * @returns {Object} { hasGPU, vram, gpuName, model, modelSize }
32
+ */
33
+ detectGPU() {
34
+ try {
35
+ const result = execSync('nvidia-smi --query-gpu=name,memory.total --format=csv,noheader', {
36
+ encoding: 'utf-8',
37
+ stdio: ['pipe', 'pipe', 'ignore']
38
+ }).trim();
39
+
40
+ if (!result || result === 'NO_GPU') {
41
+ return { hasGPU: false, model: 'base', modelSize: '~1 GB VRAM' };
42
+ }
43
+
44
+ const [gpuName, vramStr] = result.split(',').map(s => s.trim());
45
+ const vram = parseInt(vramStr);
46
+
47
+ // Map VRAM to optimal model (official specs from GitHub)
48
+ // tiny: ~1 GB VRAM, base: ~1 GB, small: ~2 GB, medium: ~5 GB, large-v3: ~10 GB
49
+ let model, modelSize;
50
+ if (vram < 2000) {
51
+ model = 'tiny';
52
+ modelSize = '~1 GB VRAM';
53
+ } else if (vram < 4000) {
54
+ model = 'small';
55
+ modelSize = '~2 GB VRAM';
56
+ } else if (vram < 6000) {
57
+ model = 'medium';
58
+ modelSize = '~5 GB VRAM';
59
+ } else if (vram < 10000) {
60
+ model = 'large-v2';
61
+ modelSize = '~8 GB VRAM';
62
+ } else {
63
+ model = 'large-v3';
64
+ modelSize = '~10 GB VRAM';
65
+ }
66
+
67
+ return {
68
+ hasGPU: true,
69
+ vram,
70
+ gpuName,
71
+ model,
72
+ modelSize
73
+ };
74
+ } catch (error) {
75
+ return { hasGPU: false, model: 'base', modelSize: '~1 GB VRAM' };
76
+ }
77
+ }
78
+
79
+ async install() {
80
+ if (this.mode === 'skip') {
81
+ console.log(chalk.gray('Turbo Whisper installation skipped'));
82
+ return { success: true, skipped: true };
83
+ }
84
+
85
+ console.log(chalk.blue('\n📦 Installing Turbo Whisper...'));
86
+ console.log(chalk.gray(`Mode: ${this.mode}\n`));
87
+
88
+ try {
89
+ // Detect GPU first if Docker mode
90
+ if (this.mode === 'docker') {
91
+ this.gpuInfo = this.detectGPU();
92
+
93
+ if (this.gpuInfo.hasGPU) {
94
+ console.log(chalk.green(`✓ GPU detected: ${this.gpuInfo.gpuName}`));
95
+ console.log(chalk.gray(` VRAM: ${this.gpuInfo.vram} MB`));
96
+ console.log(chalk.cyan(` Optimal model: ${this.gpuInfo.model} (${this.gpuInfo.modelSize})`));
97
+ } else {
98
+ console.log(chalk.yellow('⚠ No GPU detected, using base model (CPU)'));
99
+ console.log(chalk.gray(' Tip: Install nvidia drivers for GPU acceleration\n'));
100
+ }
101
+ }
102
+
103
+ await this.checkDependencies();
104
+
105
+ if (this.mode === 'local') {
106
+ await this.installLocal();
107
+ } else if (this.mode === 'docker') {
108
+ await this.installDocker();
109
+ }
110
+
111
+ await this.createLaunchScript();
112
+ await this.createDocumentation();
113
+
114
+ console.log(chalk.green('\n✓ Turbo Whisper installed successfully!\n'));
115
+ this.printUsageInstructions();
116
+
117
+ return { success: true, mode: this.mode };
118
+ } catch (error) {
119
+ console.error(chalk.red('\n✗ Turbo Whisper installation failed:'), error.message);
120
+ return { success: false, error: error.message };
121
+ }
122
+ }
123
+
124
+ async checkDependencies() {
125
+ const spinner = ora('Checking dependencies...').start();
126
+
127
+ const required = {
128
+ python3: this.commandExists('python3'),
129
+ git: this.commandExists('git'),
130
+ wlCopy: this.commandExists('wl-copy'),
131
+ xdotool: this.commandExists('xdotool')
132
+ };
133
+
134
+ if (this.mode === 'docker') {
135
+ required.docker = this.commandExists('docker');
136
+ }
137
+
138
+ const missing = Object.entries(required)
139
+ .filter(([_, exists]) => !exists)
140
+ .map(([name]) => name);
141
+
142
+ if (missing.length > 0) {
143
+ spinner.fail('Missing dependencies');
144
+ console.log(chalk.yellow('\nMissing dependencies:'));
145
+ missing.forEach(dep => console.log(chalk.yellow(` - ${dep}`)));
146
+
147
+ console.log(chalk.gray('\nInstall with:'));
148
+ if (missing.includes('wlCopy') || missing.includes('xdotool')) {
149
+ console.log(chalk.gray(' sudo pacman -S wl-clipboard xdotool'));
150
+ }
151
+ if (missing.includes('docker')) {
152
+ console.log(chalk.gray(' sudo pacman -S docker'));
153
+ }
154
+
155
+ throw new Error('Missing required dependencies');
156
+ }
157
+
158
+ spinner.succeed('Dependencies checked');
159
+ }
160
+
161
+ commandExists(command) {
162
+ try {
163
+ execSync(`which ${command}`, { stdio: 'pipe' });
164
+ return true;
165
+ } catch {
166
+ return false;
167
+ }
168
+ }
169
+
170
+ async installLocal() {
171
+ const spinner = ora('Installing Turbo Whisper (local mode)...').start();
172
+
173
+ try {
174
+ // Clone Turbo Whisper repository
175
+ const repoUrl = 'https://github.com/knowall-ai/turbo-whisper.git';
176
+ const installDir = path.join(process.env.HOME, '.local', 'share', 'turbo-whisper');
177
+
178
+ if (await fs.pathExists(installDir)) {
179
+ spinner.text = 'Turbo Whisper already installed, updating...';
180
+ execSync('git pull', { cwd: installDir, stdio: 'pipe' });
181
+ } else {
182
+ spinner.text = 'Cloning Turbo Whisper...';
183
+ execSync(`git clone ${repoUrl} ${installDir}`, { stdio: 'pipe' });
184
+ }
185
+
186
+ // Apply UTF-8 fixes
187
+ spinner.text = 'Applying UTF-8 fixes...';
188
+ await this.applyUTF8Fixes(installDir);
189
+
190
+ // Install Python dependencies
191
+ spinner.text = 'Installing Python dependencies...';
192
+ execSync('python3 -m venv .venv', { cwd: installDir, stdio: 'pipe' });
193
+ execSync('.venv/bin/pip install -e .', { cwd: installDir, stdio: 'pipe' });
194
+
195
+ // Install faster-whisper-server
196
+ spinner.text = 'Installing faster-whisper-server...';
197
+ const serverDir = path.join(process.env.HOME, 'faster-whisper-server');
198
+
199
+ if (!(await fs.pathExists(serverDir))) {
200
+ execSync(`git clone https://github.com/fedirz/faster-whisper-server.git ${serverDir}`, { stdio: 'pipe' });
201
+ execSync('python3 -m venv .venv && .venv/bin/pip install -e .', { cwd: serverDir, stdio: 'pipe' });
202
+ }
203
+
204
+ spinner.succeed('Turbo Whisper installed (local mode)');
205
+ } catch (error) {
206
+ spinner.fail('Installation failed');
207
+ throw error;
208
+ }
209
+ }
210
+
211
+ async installDocker() {
212
+ const spinner = ora('Installing Turbo Whisper (Docker mode)...').start();
213
+
214
+ try {
215
+ // Install Turbo Whisper client
216
+ const installDir = path.join(process.env.HOME, '.local', 'share', 'turbo-whisper');
217
+ const repoUrl = 'https://github.com/knowall-ai/turbo-whisper.git';
218
+
219
+ if (await fs.pathExists(installDir)) {
220
+ spinner.text = 'Updating Turbo Whisper...';
221
+ execSync('git pull', { cwd: installDir, stdio: 'pipe' });
222
+ } else {
223
+ spinner.text = 'Cloning Turbo Whisper...';
224
+ execSync(`git clone ${repoUrl} ${installDir}`, { stdio: 'pipe' });
225
+ }
226
+
227
+ // Apply UTF-8 fixes
228
+ await this.applyUTF8Fixes(installDir);
229
+
230
+ // Install Python dependencies
231
+ spinner.text = 'Installing Python dependencies...';
232
+ execSync('python3 -m venv .venv && .venv/bin/pip install -e .', { cwd: installDir, stdio: 'pipe' });
233
+
234
+ // Create Docker compose file for Whisper server
235
+ spinner.text = 'Creating Docker configuration...';
236
+ await this.createDockerCompose();
237
+
238
+ spinner.succeed('Turbo Whisper installed (Docker mode)');
239
+ console.log(chalk.yellow('\nNote: Start Whisper server with: docker-compose up -d whisper-server'));
240
+ } catch (error) {
241
+ spinner.fail('Installation failed');
242
+ throw error;
243
+ }
244
+ }
245
+
246
+ async applyUTF8Fixes(installDir) {
247
+ // Apply UTF-8 fix to main.py
248
+ const mainPyPath = path.join(installDir, 'src', 'turbo_whisper', 'main.py');
249
+
250
+ if (await fs.pathExists(mainPyPath)) {
251
+ let mainPy = await fs.readFile(mainPyPath, 'utf-8');
252
+
253
+ // Check if fix already applied
254
+ if (!mainPy.includes('PYTHONIOENCODING')) {
255
+ const utf8Import = `
256
+ import sys
257
+ import io
258
+
259
+ # Force UTF-8 encoding for all I/O operations
260
+ if sys.platform != "win32":
261
+ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace', line_buffering=True)
262
+ sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace', line_buffering=True)
263
+
264
+ os.environ['PYTHONIOENCODING'] = 'utf-8'
265
+ os.environ.setdefault('LC_ALL', 'fr_FR.UTF-8')
266
+ os.environ.setdefault('LANG', 'fr_FR.UTF-8')
267
+ `;
268
+
269
+ // Insert after first import block
270
+ mainPy = mainPy.replace(
271
+ /import time\n/,
272
+ `import time\n${utf8Import}\n`
273
+ );
274
+
275
+ await fs.writeFile(mainPyPath, mainPy, 'utf-8');
276
+ }
277
+ }
278
+
279
+ // Apply UTF-8 fix to typer.py
280
+ const typerPyPath = path.join(installDir, 'src', 'turbo_whisper', 'typer.py');
281
+
282
+ if (await fs.pathExists(typerPyPath)) {
283
+ let typerPy = await fs.readFile(typerPyPath, 'utf-8');
284
+
285
+ // Check if clipboard paste method exists
286
+ if (!typerPy.includes('_type_clipboard_paste')) {
287
+ // Add clipboard paste method implementation
288
+ const clipboardMethod = `
289
+ def _type_clipboard_paste(self, text: str) -> bool:
290
+ """Type text using clipboard + simulated Ctrl+Shift+V (best for UTF-8 on Wayland)."""
291
+ import time
292
+
293
+ if not self.copy_to_clipboard(text):
294
+ print("Failed to copy to clipboard")
295
+ return False
296
+
297
+ time.sleep(0.1)
298
+
299
+ # Try Ctrl+Shift+V first (terminals on Linux)
300
+ if shutil.which("xdotool"):
301
+ try:
302
+ subprocess.run(
303
+ ["xdotool", "key", "--clearmodifiers", "ctrl+shift+v"],
304
+ check=True,
305
+ capture_output=True,
306
+ timeout=5
307
+ )
308
+ print("✓ Pasted via Ctrl+Shift+V")
309
+ return True
310
+ except Exception as e:
311
+ try:
312
+ subprocess.run(
313
+ ["xdotool", "key", "--clearmodifiers", "ctrl+v"],
314
+ check=True,
315
+ capture_output=True,
316
+ timeout=5
317
+ )
318
+ print("✓ Pasted via Ctrl+V")
319
+ return True
320
+ except Exception as e2:
321
+ print(f"xdotool paste failed: {e}, {e2}")
322
+ return False
323
+
324
+ return False
325
+ `;
326
+
327
+ // Insert before _type_xdotool method
328
+ typerPy = typerPy.replace(
329
+ / def _type_xdotool\(self, text: str\) -> bool:/,
330
+ `${clipboardMethod}\n def _type_xdotool(self, text: str) -> bool:`
331
+ );
332
+
333
+ await fs.writeFile(typerPyPath, typerPy, 'utf-8');
334
+ }
335
+ }
336
+ }
337
+
338
+ async createDockerCompose() {
339
+ // Use detected GPU info or fallback
340
+ const gpuInfo = this.gpuInfo || this.detectGPU();
341
+ const model = gpuInfo.model;
342
+ const useGPU = gpuInfo.hasGPU;
343
+
344
+ // Format model name according to official docs:
345
+ // https://github.com/knowall-ai/turbo-whisper
346
+ const modelFullName = `Systran/faster-whisper-${model}`;
347
+
348
+ const dockerCompose = `version: '3.8'
349
+
350
+ services:
351
+ whisper-server:
352
+ image: fedirz/faster-whisper-server:latest-${useGPU ? 'cuda' : 'cpu'}
353
+ ports:
354
+ - "8000:8000"
355
+ environment:
356
+ - WHISPER__MODEL=${modelFullName}${useGPU ? `
357
+ deploy:
358
+ resources:
359
+ reservations:
360
+ devices:
361
+ - driver: nvidia
362
+ count: 1
363
+ capabilities: [gpu]` : ''}
364
+ restart: unless-stopped
365
+ `;
366
+
367
+ const composePath = path.join(this.projectRoot, 'docker-compose.turbo-whisper.yml');
368
+ await fs.writeFile(composePath, dockerCompose, 'utf-8');
369
+
370
+ // Log configuration
371
+ console.log(chalk.gray(`\n Docker config: ${useGPU ? 'GPU (CUDA)' : 'CPU'} with model ${modelFullName}`));
372
+ }
373
+
374
+ async createLaunchScript() {
375
+ await fs.ensureDir(this.scriptsDir);
376
+
377
+ let launchScript;
378
+
379
+ if (this.mode === 'local') {
380
+ // Mode Local: Vérifie et lance le serveur si nécessaire
381
+ launchScript = `#!/bin/bash
382
+ # Launch Turbo Whisper voice dictation with automatic server startup
383
+
384
+ TURBO_DIR="$HOME/.local/share/turbo-whisper"
385
+ SERVER_DIR="$HOME/faster-whisper-server"
386
+ SERVER_PORT=8000
387
+
388
+ echo "🔍 Vérification serveur Whisper..."
389
+
390
+ # Vérifier si serveur déjà en cours
391
+ if curl -s http://localhost:$SERVER_PORT/health > /dev/null 2>&1; then
392
+ echo "✅ Serveur Whisper déjà actif"
393
+ else
394
+ echo "⚡ Démarrage serveur Whisper..."
395
+
396
+ # Lancer serveur en arrière-plan
397
+ cd "$SERVER_DIR"
398
+ nohup uv run uvicorn --factory faster_whisper_server.main:create_app > /tmp/whisper-server.log 2>&1 &
399
+ SERVER_PID=$!
400
+
401
+ echo "⏳ Attente démarrage serveur (15 secondes)..."
402
+ sleep 15
403
+
404
+ # Vérifier que le serveur répond
405
+ if curl -s http://localhost:$SERVER_PORT/health > /dev/null 2>&1; then
406
+ echo "✅ Serveur Whisper prêt (PID: $SERVER_PID)"
407
+ else
408
+ echo "❌ Erreur: Serveur n'a pas démarré"
409
+ echo "📋 Logs: tail -f /tmp/whisper-server.log"
410
+ exit 1
411
+ fi
412
+ fi
413
+
414
+ echo ""
415
+ echo "🚀 Lancement Turbo Whisper..."
416
+ echo "📍 Hotkey: Ctrl+Alt+R"
417
+ echo "📋 Logs serveur: tail -f /tmp/whisper-server.log"
418
+ echo ""
419
+
420
+ cd "$TURBO_DIR"
421
+ source .venv/bin/activate
422
+ python -m turbo_whisper.main
423
+ `;
424
+ } else if (this.mode === 'docker') {
425
+ // Get GPU info for display
426
+ const gpuInfo = this.gpuInfo || this.detectGPU();
427
+
428
+ // Mode Docker: Vérifie et lance le conteneur si nécessaire
429
+ launchScript = `#!/bin/bash
430
+ # Launch Turbo Whisper voice dictation with Docker server
431
+ # Auto-detects GPU and validates configuration
432
+
433
+ TURBO_DIR="$HOME/.local/share/turbo-whisper"
434
+ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
435
+ PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
436
+ COMPOSE_FILE="$PROJECT_DIR/docker-compose.turbo-whisper.yml"
437
+ SERVER_PORT=8000
438
+
439
+ # Function to detect GPU
440
+ detect_gpu() {
441
+ if command -v nvidia-smi &> /dev/null; then
442
+ GPU_INFO=$(nvidia-smi --query-gpu=name,memory.total --format=csv,noheader 2>/dev/null)
443
+ if [ $? -eq 0 ] && [ -n "$GPU_INFO" ]; then
444
+ GPU_NAME=$(echo "$GPU_INFO" | cut -d',' -f1 | xargs)
445
+ VRAM=$(echo "$GPU_INFO" | cut -d',' -f2 | xargs)
446
+ echo "✓ GPU: $GPU_NAME ($VRAM)"
447
+ return 0
448
+ fi
449
+ fi
450
+ echo "⚠ No GPU detected (running in CPU mode)"
451
+ return 1
452
+ }
453
+
454
+ echo "🔍 Vérification serveur Whisper Docker..."
455
+ echo "📂 Compose file: $COMPOSE_FILE"
456
+ echo ""
457
+
458
+ # Detect GPU
459
+ detect_gpu
460
+
461
+ # Vérifier que le fichier existe
462
+ if [ ! -f "$COMPOSE_FILE" ]; then
463
+ echo "❌ Erreur: $COMPOSE_FILE introuvable"
464
+ echo "💡 Le fichier devrait être dans le répertoire du projet"
465
+ exit 1
466
+ fi
467
+
468
+ # Vérifier si serveur déjà en cours
469
+ if curl -s http://localhost:$SERVER_PORT/health > /dev/null 2>&1; then
470
+ echo "✅ Serveur Whisper déjà actif"
471
+ else
472
+ echo "⚡ Démarrage conteneur Docker..."
473
+
474
+ # Lancer Docker Compose
475
+ docker-compose -f "$COMPOSE_FILE" up -d
476
+
477
+ echo "⏳ Attente démarrage serveur (20 secondes)..."
478
+ sleep 20
479
+
480
+ # Vérifier que le serveur répond
481
+ if curl -s http://localhost:$SERVER_PORT/health > /dev/null 2>&1; then
482
+ echo "✅ Serveur Whisper prêt"
483
+ else
484
+ echo "❌ Erreur: Serveur n'a pas démarré"
485
+ echo "📋 Logs: docker-compose -f $COMPOSE_FILE logs"
486
+ exit 1
487
+ fi
488
+ fi
489
+
490
+ echo ""
491
+ echo "🚀 Lancement Turbo Whisper..."
492
+ echo "📍 Hotkey: Ctrl+Alt+R"
493
+ echo "📋 Arrêter serveur: docker-compose -f $COMPOSE_FILE down"
494
+ echo ""
495
+
496
+ cd "$TURBO_DIR"
497
+ source .venv/bin/activate
498
+ python -m turbo_whisper.main
499
+ `;
500
+ } else {
501
+ // Mode Skip (ne devrait pas arriver ici)
502
+ launchScript = `#!/bin/bash
503
+ echo "⚠️ Turbo Whisper non installé"
504
+ echo "Installez avec: npm run setup-turbo-whisper"
505
+ exit 1
506
+ `;
507
+ }
508
+
509
+ const scriptPath = path.join(this.scriptsDir, 'launch-turbo-whisper.sh');
510
+ await fs.writeFile(scriptPath, launchScript, 'utf-8');
511
+ await fs.chmod(scriptPath, '755');
512
+
513
+ // Créer script d'arrêt pour mode Docker
514
+ if (this.mode === 'docker') {
515
+ const stopScript = `#!/bin/bash
516
+ # Stop Turbo Whisper Docker server
517
+
518
+ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
519
+ PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
520
+ COMPOSE_FILE="$PROJECT_DIR/docker-compose.turbo-whisper.yml"
521
+
522
+ if [ ! -f "$COMPOSE_FILE" ]; then
523
+ echo "❌ Erreur: $COMPOSE_FILE introuvable"
524
+ exit 1
525
+ fi
526
+
527
+ echo "🛑 Arrêt serveur Whisper Docker..."
528
+ docker-compose -f "$COMPOSE_FILE" down
529
+
530
+ echo "✅ Serveur arrêté"
531
+ `;
532
+
533
+ const stopScriptPath = path.join(this.scriptsDir, 'stop-whisper-server.sh');
534
+ await fs.writeFile(stopScriptPath, stopScript, 'utf-8');
535
+ await fs.chmod(stopScriptPath, '755');
536
+ }
537
+
538
+ // Create server launch script (standalone, optionnel)
539
+ if (this.mode === 'local') {
540
+ const serverScript = `#!/bin/bash
541
+ # Launch faster-whisper-server locally (standalone)
542
+
543
+ SERVER_DIR="$HOME/faster-whisper-server"
544
+
545
+ cd "$SERVER_DIR"
546
+ echo "🚀 Starting Whisper server on http://localhost:8000"
547
+ uv run uvicorn --factory faster_whisper_server.main:create_app
548
+ `;
549
+
550
+
551
+ const serverScriptPath = path.join(this.scriptsDir, 'start-whisper-server.sh');
552
+ await fs.writeFile(serverScriptPath, serverScript, 'utf-8');
553
+ await fs.chmod(serverScriptPath, '755');
554
+ }
555
+ }
556
+
557
+ async createDocumentation() {
558
+ const doc = `# Turbo Whisper - Voice Dictation
559
+
560
+ ## Installation
561
+
562
+ ✅ Turbo Whisper has been installed in: \`~/.local/share/turbo-whisper\`
563
+ ${this.mode === 'local' ? '✅ Whisper server installed in: `~/faster-whisper-server`' : ''}
564
+ ${this.mode === 'docker' ? '✅ Docker configuration: `docker-compose.turbo-whisper.yml`' : ''}
565
+
566
+ ## Usage (Simplifié - Recommandé)
567
+
568
+ ### Lancement Automatique (1 commande)
569
+
570
+ \`\`\`bash
571
+ ./scripts/launch-turbo-whisper.sh
572
+ \`\`\`
573
+
574
+ ${this.mode === 'local' ? '**Ce script:**\n1. Vérifie si le serveur Whisper tourne\n2. Le démarre automatiquement si nécessaire (arrière-plan)\n3. Lance Turbo Whisper client\n\n**Logs serveur:** `/tmp/whisper-server.log`' : ''}
575
+
576
+ ${this.mode === 'docker' ? '**Ce script:**\n1. Vérifie si le conteneur Docker tourne\n2. Le démarre automatiquement si nécessaire\n3. Lance Turbo Whisper client\n\n**Arrêter serveur:** `./scripts/stop-whisper-server.sh`' : ''}
577
+
578
+ ## Usage Avancé (Manuel)
579
+
580
+ ### Démarrer Serveur Manuellement
581
+
582
+ ${this.mode === 'local' ? '```bash\n# Option 1: Script standalone\n./scripts/start-whisper-server.sh\n\n# Option 2: Commande directe\ncd ~/faster-whisper-server\nuv run uvicorn --factory faster_whisper_server.main:create_app\n```' : ''}
583
+
584
+ ${this.mode === 'docker' ? '```bash\n# Démarrer\ndocker-compose -f docker-compose.turbo-whisper.yml up -d\n\n# Vérifier\ndocker ps | grep whisper\n\n# Logs\ndocker-compose -f docker-compose.turbo-whisper.yml logs -f\n\n# Arrêter\ndocker-compose -f docker-compose.turbo-whisper.yml down\n```' : ''}
585
+
586
+ ### Démarrer Client Seul
587
+
588
+ \`\`\`bash
589
+ cd ~/.local/share/turbo-whisper
590
+ source .venv/bin/activate
591
+ python -m turbo_whisper.main
592
+ \`\`\`
593
+
594
+ ### Hotkey
595
+
596
+ Press **Ctrl+Alt+R** to start/stop recording.
597
+
598
+ The transcribed text will be automatically typed in the active window.
599
+
600
+ ## Features
601
+
602
+ - ✅ UTF-8 support (accents français: é, à, è, ç, â, etc.)
603
+ - ✅ Wayland compatible
604
+ - ✅ GPU acceleration (${this.mode === 'docker' ? 'via Docker' : 'if available'})
605
+ - ✅ Local processing (privacy)
606
+ - ✅ Real-time waveform visualization
607
+
608
+ ## Configuration
609
+
610
+ Edit: \`~/.local/share/turbo-whisper/config.json\`
611
+
612
+ Default Whisper server: http://localhost:8000
613
+
614
+ ## Troubleshooting
615
+
616
+ ### Caractères spéciaux ne s'affichent pas
617
+
618
+ Les fixes UTF-8 ont été appliqués automatiquement. Si le problème persiste:
619
+
620
+ 1. Vérifiez que \`wl-clipboard\` et \`xdotool\` sont installés
621
+ 2. Redémarrez Turbo Whisper
622
+
623
+ ### Serveur Whisper ne démarre pas
624
+
625
+ **Mode local:**
626
+ \`\`\`bash
627
+ cd ~/faster-whisper-server
628
+ .venv/bin/pip install -e .
629
+ \`\`\`
630
+
631
+ **Mode Docker:**
632
+ \`\`\`bash
633
+ docker logs whisper-server
634
+ \`\`\`
635
+
636
+ ## Documentation
637
+
638
+ Voir: \`TURBO-WHISPER-INTEGRATION-COMPLETE.md\` pour détails complets.
639
+ `;
640
+
641
+ const docPath = path.join(this.projectRoot, 'TURBO-WHISPER-SETUP.md');
642
+ await fs.writeFile(docPath, doc, 'utf-8');
643
+ }
644
+
645
+ printUsageInstructions() {
646
+ console.log(chalk.blue('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
647
+ console.log(chalk.bold('🎤 Turbo Whisper Usage:\n'));
648
+
649
+ console.log(chalk.green('✨ Lancement Automatique (Recommandé):\n'));
650
+ console.log(chalk.white(' ./scripts/launch-turbo-whisper.sh'));
651
+ console.log(chalk.gray(' → Démarre automatiquement le serveur si nécessaire\n'));
652
+
653
+ if (this.mode === 'local') {
654
+ console.log(chalk.gray('📋 Logs serveur: /tmp/whisper-server.log'));
655
+ } else if (this.mode === 'docker') {
656
+ console.log(chalk.gray('🛑 Arrêter serveur: ./scripts/stop-whisper-server.sh'));
657
+ }
658
+
659
+ console.log(chalk.gray('\n🎯 Hotkey: Ctrl+Alt+R (start/stop recording)\n'));
660
+
661
+ console.log(chalk.blue('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
662
+ }
663
+ }
664
+
665
+ // Export for use in main installer
666
+ module.exports = TurboWhisperInstaller;
667
+
668
+ // CLI usage
669
+ if (require.main === module) {
670
+ const args = process.argv.slice(2);
671
+ const mode = args[0] || 'local';
672
+ const projectRoot = process.cwd();
673
+
674
+ const installer = new TurboWhisperInstaller(projectRoot, mode);
675
+ installer.install()
676
+ .then(result => {
677
+ if (result.success) {
678
+ process.exit(0);
679
+ } else {
680
+ process.exit(1);
681
+ }
682
+ })
683
+ .catch(error => {
684
+ console.error(chalk.red('Fatal error:'), error);
685
+ process.exit(1);
686
+ });
687
+ }