glance-cli 0.13.0 ā 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +9 -0
- package/dist/cli.js +198 -1064
- package/package.json +4 -2
- package/src/cli/commands.ts +854 -0
- package/src/cli/config.ts +24 -0
- package/src/cli/display.ts +270 -0
- package/src/cli/errors.ts +31 -0
- package/src/cli/index.ts +239 -0
- package/src/cli/logger.ts +43 -0
- package/src/cli/types.ts +114 -0
- package/src/cli/utils.ts +239 -0
- package/src/cli/validators.ts +176 -0
- package/src/cli.ts +17 -0
- package/src/core/compat.ts +96 -0
- package/src/core/extractor.ts +532 -0
- package/src/core/fetcher.ts +592 -0
- package/src/core/formatter.ts +742 -0
- package/src/core/language-detector.ts +382 -0
- package/src/core/screenshot.ts +444 -0
- package/src/core/service-detector.ts +411 -0
- package/src/core/summarizer.ts +656 -0
- package/src/core/text-cleaner.ts +150 -0
- package/src/core/voice.ts +708 -0
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Service Detection and Smart Fallback System
|
|
3
|
+
* Prioritizes free/local services to avoid API costs
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import chalk from "chalk";
|
|
7
|
+
|
|
8
|
+
interface ServiceStatus {
|
|
9
|
+
available: boolean;
|
|
10
|
+
name: string;
|
|
11
|
+
type: "free" | "paid";
|
|
12
|
+
reason?: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
interface DetectionResult {
|
|
16
|
+
ai: {
|
|
17
|
+
preferred: string;
|
|
18
|
+
available: ServiceStatus[];
|
|
19
|
+
fallbackChain: string[];
|
|
20
|
+
};
|
|
21
|
+
voice: {
|
|
22
|
+
preferred: string;
|
|
23
|
+
available: ServiceStatus[];
|
|
24
|
+
fallbackChain: string[];
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Check if Ollama is running
|
|
30
|
+
*/
|
|
31
|
+
async function checkOllama(
|
|
32
|
+
endpoint: string = "http://localhost:11434",
|
|
33
|
+
): Promise<ServiceStatus> {
|
|
34
|
+
try {
|
|
35
|
+
const controller = new AbortController();
|
|
36
|
+
const timeoutId = setTimeout(() => controller.abort(), 2000);
|
|
37
|
+
|
|
38
|
+
const res = await fetch(`${endpoint}/api/tags`, {
|
|
39
|
+
signal: controller.signal,
|
|
40
|
+
});
|
|
41
|
+
clearTimeout(timeoutId);
|
|
42
|
+
|
|
43
|
+
if (res.ok) {
|
|
44
|
+
const data = await res.json();
|
|
45
|
+
const models = (data as { models?: { name: string }[] }).models || [];
|
|
46
|
+
|
|
47
|
+
if (models.length === 0) {
|
|
48
|
+
return {
|
|
49
|
+
available: false,
|
|
50
|
+
name: "Ollama",
|
|
51
|
+
type: "free",
|
|
52
|
+
reason: "No models installed. Run: ollama pull llama3",
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return {
|
|
57
|
+
available: true,
|
|
58
|
+
name: "Ollama",
|
|
59
|
+
type: "free",
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
return {
|
|
64
|
+
available: false,
|
|
65
|
+
name: "Ollama",
|
|
66
|
+
type: "free",
|
|
67
|
+
reason: "Server not responding",
|
|
68
|
+
};
|
|
69
|
+
} catch (_error) {
|
|
70
|
+
return {
|
|
71
|
+
available: false,
|
|
72
|
+
name: "Ollama",
|
|
73
|
+
type: "free",
|
|
74
|
+
reason: "Not running. Start with: ollama serve",
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Check if OpenAI API key is set
|
|
81
|
+
*/
|
|
82
|
+
function checkOpenAI(): ServiceStatus {
|
|
83
|
+
if (!process.env.OPENAI_API_KEY) {
|
|
84
|
+
return {
|
|
85
|
+
available: false,
|
|
86
|
+
name: "OpenAI",
|
|
87
|
+
type: "paid",
|
|
88
|
+
reason: "OPENAI_API_KEY not set",
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return {
|
|
93
|
+
available: true,
|
|
94
|
+
name: "OpenAI",
|
|
95
|
+
type: "paid",
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Check if Gemini API key is set
|
|
101
|
+
*/
|
|
102
|
+
function checkGemini(): ServiceStatus {
|
|
103
|
+
if (!process.env.GEMINI_API_KEY && !process.env.GOOGLE_API_KEY) {
|
|
104
|
+
return {
|
|
105
|
+
available: false,
|
|
106
|
+
name: "Google Gemini",
|
|
107
|
+
type: "paid",
|
|
108
|
+
reason: "GEMINI_API_KEY not set",
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
return {
|
|
113
|
+
available: true,
|
|
114
|
+
name: "Google Gemini",
|
|
115
|
+
type: "paid",
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Check if ElevenLabs API key is set
|
|
121
|
+
*/
|
|
122
|
+
function checkElevenLabs(): ServiceStatus {
|
|
123
|
+
if (!process.env.ELEVENLABS_API_KEY) {
|
|
124
|
+
return {
|
|
125
|
+
available: false,
|
|
126
|
+
name: "ElevenLabs",
|
|
127
|
+
type: "paid",
|
|
128
|
+
reason: "ELEVENLABS_API_KEY not set",
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
return {
|
|
133
|
+
available: true,
|
|
134
|
+
name: "ElevenLabs",
|
|
135
|
+
type: "paid",
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Check if local TTS is available
|
|
141
|
+
*/
|
|
142
|
+
async function checkLocalTTS(): Promise<ServiceStatus> {
|
|
143
|
+
const platform = process.platform;
|
|
144
|
+
|
|
145
|
+
if (platform === "darwin") {
|
|
146
|
+
// macOS always has 'say' command
|
|
147
|
+
return {
|
|
148
|
+
available: true,
|
|
149
|
+
name: "macOS Say",
|
|
150
|
+
type: "free",
|
|
151
|
+
};
|
|
152
|
+
} else if (platform === "win32") {
|
|
153
|
+
// Windows always has SAPI
|
|
154
|
+
return {
|
|
155
|
+
available: true,
|
|
156
|
+
name: "Windows SAPI",
|
|
157
|
+
type: "free",
|
|
158
|
+
};
|
|
159
|
+
} else {
|
|
160
|
+
// Linux - check for espeak or festival
|
|
161
|
+
try {
|
|
162
|
+
const { spawn } = await import("node:child_process");
|
|
163
|
+
|
|
164
|
+
return new Promise((resolve) => {
|
|
165
|
+
const proc = spawn("which", ["espeak"]);
|
|
166
|
+
|
|
167
|
+
proc.on("close", (code) => {
|
|
168
|
+
if (code === 0) {
|
|
169
|
+
resolve({
|
|
170
|
+
available: true,
|
|
171
|
+
name: "espeak",
|
|
172
|
+
type: "free",
|
|
173
|
+
});
|
|
174
|
+
} else {
|
|
175
|
+
// Try festival
|
|
176
|
+
const festProc = spawn("which", ["festival"]);
|
|
177
|
+
festProc.on("close", (festCode) => {
|
|
178
|
+
if (festCode === 0) {
|
|
179
|
+
resolve({
|
|
180
|
+
available: true,
|
|
181
|
+
name: "festival",
|
|
182
|
+
type: "free",
|
|
183
|
+
});
|
|
184
|
+
} else {
|
|
185
|
+
resolve({
|
|
186
|
+
available: false,
|
|
187
|
+
name: "Linux TTS",
|
|
188
|
+
type: "free",
|
|
189
|
+
reason: "Install espeak or festival",
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
});
|
|
196
|
+
} catch {
|
|
197
|
+
return {
|
|
198
|
+
available: false,
|
|
199
|
+
name: "Linux TTS",
|
|
200
|
+
type: "free",
|
|
201
|
+
reason: "Unable to detect TTS engine",
|
|
202
|
+
};
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/**
|
|
208
|
+
* Detect all available services and create smart fallback chains
|
|
209
|
+
*/
|
|
210
|
+
export async function detectServices(
|
|
211
|
+
options: {
|
|
212
|
+
preferFree?: boolean;
|
|
213
|
+
ollamaEndpoint?: string;
|
|
214
|
+
verbose?: boolean;
|
|
215
|
+
} = {},
|
|
216
|
+
): Promise<DetectionResult> {
|
|
217
|
+
const { preferFree = true, ollamaEndpoint, verbose = false } = options;
|
|
218
|
+
|
|
219
|
+
// Check all services in parallel
|
|
220
|
+
const [ollama, openai, gemini, elevenlabs, localTTS] = await Promise.all([
|
|
221
|
+
checkOllama(ollamaEndpoint),
|
|
222
|
+
checkOpenAI(),
|
|
223
|
+
checkGemini(),
|
|
224
|
+
checkElevenLabs(),
|
|
225
|
+
checkLocalTTS(),
|
|
226
|
+
]);
|
|
227
|
+
|
|
228
|
+
// Create AI fallback chain
|
|
229
|
+
const aiServices = [ollama, openai, gemini];
|
|
230
|
+
const _availableAI = aiServices.filter((s) => s.available);
|
|
231
|
+
|
|
232
|
+
let aiFallbackChain: string[] = [];
|
|
233
|
+
if (preferFree) {
|
|
234
|
+
// Prioritize free services
|
|
235
|
+
aiFallbackChain = [
|
|
236
|
+
...(ollama.available ? ["ollama"] : []),
|
|
237
|
+
...(openai.available ? ["openai"] : []),
|
|
238
|
+
...(gemini.available ? ["google"] : []),
|
|
239
|
+
];
|
|
240
|
+
} else {
|
|
241
|
+
// Prioritize paid services (potentially better quality)
|
|
242
|
+
aiFallbackChain = [
|
|
243
|
+
...(openai.available ? ["openai"] : []),
|
|
244
|
+
...(gemini.available ? ["google"] : []),
|
|
245
|
+
...(ollama.available ? ["ollama"] : []),
|
|
246
|
+
];
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// Create Voice fallback chain
|
|
250
|
+
const voiceServices = [localTTS, elevenlabs];
|
|
251
|
+
const _availableVoice = voiceServices.filter((s) => s.available);
|
|
252
|
+
|
|
253
|
+
let voiceFallbackChain: string[] = [];
|
|
254
|
+
if (preferFree) {
|
|
255
|
+
// Always prioritize free TTS
|
|
256
|
+
voiceFallbackChain = [
|
|
257
|
+
...(localTTS.available ? ["local"] : []),
|
|
258
|
+
...(elevenlabs.available ? ["elevenlabs"] : []),
|
|
259
|
+
];
|
|
260
|
+
} else {
|
|
261
|
+
voiceFallbackChain = [
|
|
262
|
+
...(elevenlabs.available ? ["elevenlabs"] : []),
|
|
263
|
+
...(localTTS.available ? ["local"] : []),
|
|
264
|
+
];
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Determine preferred services
|
|
268
|
+
const preferredAI = aiFallbackChain[0] || "none";
|
|
269
|
+
const preferredVoice = voiceFallbackChain[0] || "none";
|
|
270
|
+
|
|
271
|
+
// Print status if verbose
|
|
272
|
+
if (verbose) {
|
|
273
|
+
console.log(chalk.cyan("\nš Service Detection Report:\n"));
|
|
274
|
+
|
|
275
|
+
console.log(chalk.bold("AI Services:"));
|
|
276
|
+
aiServices.forEach((service) => {
|
|
277
|
+
const icon = service.available ? "ā
" : "ā";
|
|
278
|
+
const badge =
|
|
279
|
+
service.type === "free"
|
|
280
|
+
? chalk.green("[FREE]")
|
|
281
|
+
: chalk.yellow("[PAID]");
|
|
282
|
+
console.log(
|
|
283
|
+
` ${icon} ${service.name} ${badge} ${service.reason ? chalk.dim(`- ${service.reason}`) : ""}`,
|
|
284
|
+
);
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
console.log(chalk.bold("\nVoice Services:"));
|
|
288
|
+
voiceServices.forEach((service) => {
|
|
289
|
+
const icon = service.available ? "ā
" : "ā";
|
|
290
|
+
const badge =
|
|
291
|
+
service.type === "free"
|
|
292
|
+
? chalk.green("[FREE]")
|
|
293
|
+
: chalk.yellow("[PAID]");
|
|
294
|
+
console.log(
|
|
295
|
+
` ${icon} ${service.name} ${badge} ${service.reason ? chalk.dim(`- ${service.reason}`) : ""}`,
|
|
296
|
+
);
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
console.log(chalk.bold("\nšÆ Selected:"));
|
|
300
|
+
console.log(
|
|
301
|
+
` AI: ${preferredAI !== "none" ? chalk.green(preferredAI) : chalk.red("No AI service available")}`,
|
|
302
|
+
);
|
|
303
|
+
console.log(
|
|
304
|
+
` Voice: ${preferredVoice !== "none" ? chalk.green(preferredVoice) : chalk.red("No voice service available")}`,
|
|
305
|
+
);
|
|
306
|
+
|
|
307
|
+
if (
|
|
308
|
+
preferFree &&
|
|
309
|
+
(openai.available || gemini.available || elevenlabs.available)
|
|
310
|
+
) {
|
|
311
|
+
console.log(
|
|
312
|
+
chalk.dim(
|
|
313
|
+
"\nš” Tip: Using free services by default. Use --prefer-quality for premium services.",
|
|
314
|
+
),
|
|
315
|
+
);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
return {
|
|
320
|
+
ai: {
|
|
321
|
+
preferred: preferredAI,
|
|
322
|
+
available: aiServices,
|
|
323
|
+
fallbackChain: aiFallbackChain,
|
|
324
|
+
},
|
|
325
|
+
voice: {
|
|
326
|
+
preferred: preferredVoice,
|
|
327
|
+
available: voiceServices,
|
|
328
|
+
fallbackChain: voiceFallbackChain,
|
|
329
|
+
},
|
|
330
|
+
};
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/**
|
|
334
|
+
* Get the default model based on available services
|
|
335
|
+
*/
|
|
336
|
+
export async function getDefaultModel(
|
|
337
|
+
ollamaEndpoint?: string,
|
|
338
|
+
preferQuality?: boolean,
|
|
339
|
+
): Promise<string> {
|
|
340
|
+
const detection = await detectServices({
|
|
341
|
+
ollamaEndpoint,
|
|
342
|
+
verbose: false,
|
|
343
|
+
preferFree: !preferQuality,
|
|
344
|
+
});
|
|
345
|
+
|
|
346
|
+
switch (detection.ai.preferred) {
|
|
347
|
+
case "ollama":
|
|
348
|
+
// Get the first available Ollama model
|
|
349
|
+
try {
|
|
350
|
+
const res = await fetch(
|
|
351
|
+
`${ollamaEndpoint || "http://localhost:11434"}/api/tags`,
|
|
352
|
+
);
|
|
353
|
+
if (res.ok) {
|
|
354
|
+
const data = await res.json();
|
|
355
|
+
const models = (data as { models?: { name: string }[] }).models || [];
|
|
356
|
+
if (models.length > 0) {
|
|
357
|
+
// Prefer llama3 if available, otherwise use the first model
|
|
358
|
+
const llama3 = models.find((m) => m.name.includes("llama3"));
|
|
359
|
+
return llama3?.name || models[0]?.name || "llama3";
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
} catch {}
|
|
363
|
+
return "llama3"; // Default Ollama model
|
|
364
|
+
|
|
365
|
+
case "openai":
|
|
366
|
+
return "gpt-4o-mini"; // Cheapest OpenAI model
|
|
367
|
+
|
|
368
|
+
case "google":
|
|
369
|
+
return "gemini-2.0-flash-exp"; // Free tier Gemini model
|
|
370
|
+
|
|
371
|
+
default:
|
|
372
|
+
// No service available, return a sensible default
|
|
373
|
+
return "llama3";
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
/**
|
|
378
|
+
* Show cost warning for paid services
|
|
379
|
+
*/
|
|
380
|
+
export function showCostWarning(service: string, model?: string): void {
|
|
381
|
+
const warnings: Record<string, string> = {
|
|
382
|
+
openai: `Using OpenAI (${model || "gpt-4o-mini"}) - This will consume API credits! Use --model llama3 for free local processing.`,
|
|
383
|
+
google: `Using Google Gemini (${model || "gemini-2.0-flash-exp"}) - This may consume API credits! Use --model llama3 for free local processing.`,
|
|
384
|
+
elevenlabs:
|
|
385
|
+
"Using ElevenLabs voices - This will consume API credits! Use --read without API key for free local TTS.",
|
|
386
|
+
};
|
|
387
|
+
|
|
388
|
+
if (warnings[service]) {
|
|
389
|
+
console.log(chalk.yellow(`\nā ļø ${warnings[service]}`));
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
/**
|
|
394
|
+
* Check if we should use only free services
|
|
395
|
+
*/
|
|
396
|
+
export function shouldUseFreeOnly(): boolean {
|
|
397
|
+
// Check environment variable
|
|
398
|
+
if (process.env.GLANCE_FREE_ONLY === "true") {
|
|
399
|
+
return true;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
// Check if user has no API keys set (implicit free-only mode)
|
|
403
|
+
const hasAPIKeys = !!(
|
|
404
|
+
process.env.OPENAI_API_KEY ||
|
|
405
|
+
process.env.GEMINI_API_KEY ||
|
|
406
|
+
process.env.GOOGLE_API_KEY ||
|
|
407
|
+
process.env.ELEVENLABS_API_KEY
|
|
408
|
+
);
|
|
409
|
+
|
|
410
|
+
return !hasAPIKeys;
|
|
411
|
+
}
|