veil-browser 0.1.0 ā 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai.js +35 -6
- package/dist/index.js +22 -4
- package/package.json +1 -1
package/dist/ai.js
CHANGED
|
@@ -15,8 +15,8 @@ async function loadConfig() {
|
|
|
15
15
|
}
|
|
16
16
|
}
|
|
17
17
|
function getLLMConfig(config) {
|
|
18
|
-
// Priority: config file > env vars
|
|
19
|
-
if (config.llm?.
|
|
18
|
+
// Priority: config file > env vars > ollama auto-detect
|
|
19
|
+
if (config.llm?.provider)
|
|
20
20
|
return config.llm;
|
|
21
21
|
const openaiKey = process.env.OPENAI_API_KEY;
|
|
22
22
|
if (openaiKey)
|
|
@@ -27,7 +27,10 @@ function getLLMConfig(config) {
|
|
|
27
27
|
const openrouterKey = process.env.OPENROUTER_API_KEY;
|
|
28
28
|
if (openrouterKey)
|
|
29
29
|
return { provider: 'openrouter', apiKey: openrouterKey, model: 'openai/gpt-4o-mini' };
|
|
30
|
-
|
|
30
|
+
// Ollama fallback ā no key needed
|
|
31
|
+
const ollamaUrl = process.env.OLLAMA_URL ?? 'http://localhost:11434';
|
|
32
|
+
const ollamaModel = process.env.OLLAMA_MODEL ?? 'llava';
|
|
33
|
+
return { provider: 'ollama', model: ollamaModel, baseUrl: ollamaUrl };
|
|
31
34
|
}
|
|
32
35
|
// Get a compact accessibility snapshot of the page for LLM consumption
|
|
33
36
|
async function getPageSnapshot(page) {
|
|
@@ -103,6 +106,29 @@ Instruction: ${instruction}
|
|
|
103
106
|
|
|
104
107
|
Return JSON array of action steps:`;
|
|
105
108
|
let response;
|
|
109
|
+
if (llm.provider === 'ollama') {
|
|
110
|
+
const baseUrl = llm.baseUrl ?? 'http://localhost:11434';
|
|
111
|
+
response = await fetch(`${baseUrl}/api/chat`, {
|
|
112
|
+
method: 'POST',
|
|
113
|
+
headers: { 'Content-Type': 'application/json' },
|
|
114
|
+
body: JSON.stringify({
|
|
115
|
+
model: llm.model,
|
|
116
|
+
stream: false,
|
|
117
|
+
messages: [
|
|
118
|
+
{ role: 'system', content: systemPrompt },
|
|
119
|
+
{ role: 'user', content: userPrompt },
|
|
120
|
+
],
|
|
121
|
+
}),
|
|
122
|
+
});
|
|
123
|
+
if (!response.ok)
|
|
124
|
+
throw new Error(`Ollama error: ${response.status} ${await response.text()}`);
|
|
125
|
+
const data = await response.json();
|
|
126
|
+
const content = data.message?.content ?? '';
|
|
127
|
+
const jsonMatch = content.match(/\[[\s\S]*\]/);
|
|
128
|
+
if (!jsonMatch)
|
|
129
|
+
throw new Error('Ollama returned no valid JSON array');
|
|
130
|
+
return JSON.parse(jsonMatch[0]);
|
|
131
|
+
}
|
|
106
132
|
if (llm.provider === 'openai' || llm.provider === 'openrouter') {
|
|
107
133
|
const baseUrl = llm.provider === 'openrouter'
|
|
108
134
|
? 'https://openrouter.ai/api/v1'
|
|
@@ -130,7 +156,7 @@ Return JSON array of action steps:`;
|
|
|
130
156
|
method: 'POST',
|
|
131
157
|
headers: {
|
|
132
158
|
'Content-Type': 'application/json',
|
|
133
|
-
'x-api-key': llm.apiKey,
|
|
159
|
+
'x-api-key': llm.apiKey ?? '',
|
|
134
160
|
'anthropic-version': '2023-06-01',
|
|
135
161
|
},
|
|
136
162
|
body: JSON.stringify({
|
|
@@ -212,8 +238,11 @@ export async function aiAct(page, instruction, opts = {}) {
|
|
|
212
238
|
const config = await loadConfig();
|
|
213
239
|
const llm = getLLMConfig(config);
|
|
214
240
|
if (!llm) {
|
|
215
|
-
throw new Error('No LLM configured.
|
|
216
|
-
'
|
|
241
|
+
throw new Error('No LLM configured. Options:\n' +
|
|
242
|
+
' Ollama (local): veil config llm.provider ollama && veil config llm.model llama3.2\n' +
|
|
243
|
+
' OpenAI: veil config llm.provider openai && veil config llm.apiKey sk-...\n' +
|
|
244
|
+
' Anthropic: veil config llm.provider anthropic && veil config llm.apiKey sk-ant-...\n' +
|
|
245
|
+
' OpenRouter: veil config llm.provider openrouter && veil config llm.apiKey sk-or-...');
|
|
217
246
|
}
|
|
218
247
|
const spinner = ora({ text: 'š§ Analyzing page...', color: 'cyan' }).start();
|
|
219
248
|
try {
|
package/dist/index.js
CHANGED
|
@@ -72,10 +72,8 @@ const sessionCmd = program.command('session').description('Manage saved sessions
|
|
|
72
72
|
sessionCmd.command('list').description('List all saved sessions').action(sessionListCommand);
|
|
73
73
|
// --- Config ---
|
|
74
74
|
program
|
|
75
|
-
.command('config')
|
|
76
|
-
.description('Set veil configuration
|
|
77
|
-
.argument('<key>', 'Config key, e.g. captcha.provider')
|
|
78
|
-
.argument('<value>', 'Config value')
|
|
75
|
+
.command('config [key] [value]')
|
|
76
|
+
.description('Set or show veil configuration')
|
|
79
77
|
.action(async (key, value) => {
|
|
80
78
|
const { promises: fs } = await import('fs');
|
|
81
79
|
const { homedir } = await import('os');
|
|
@@ -87,6 +85,26 @@ program
|
|
|
87
85
|
config = JSON.parse(await fs.readFile(configFile, 'utf-8'));
|
|
88
86
|
}
|
|
89
87
|
catch { }
|
|
88
|
+
// Show config
|
|
89
|
+
if (!key || key === 'show') {
|
|
90
|
+
console.log(chalk.cyan('\nš¶ļø veil config\n'));
|
|
91
|
+
console.log(chalk.bold(' LLM (for veil act):'));
|
|
92
|
+
console.log(` provider: ${chalk.green(config.llm?.provider ?? 'auto-detect')}`);
|
|
93
|
+
console.log(` model: ${chalk.green(config.llm?.model ?? 'auto-detect')}`);
|
|
94
|
+
console.log(` apiKey: ${config.llm?.apiKey ? chalk.green('set') : chalk.gray('not set')}`);
|
|
95
|
+
console.log(` baseUrl: ${chalk.green(config.llm?.baseUrl ?? 'http://localhost:11434 (ollama default)')}`);
|
|
96
|
+
console.log(chalk.bold('\n CAPTCHA:'));
|
|
97
|
+
console.log(` provider: ${chalk.green(config.captcha?.provider ?? 'local (FlareSolverr + Ollama)')}`);
|
|
98
|
+
console.log(` apiKey: ${config.captcha?.apiKey ? chalk.green('set') : chalk.gray('not set')}`);
|
|
99
|
+
console.log(chalk.bold('\n Quick setup:'));
|
|
100
|
+
console.log(chalk.gray(' Ollama: veil config llm.provider ollama'));
|
|
101
|
+
console.log(chalk.gray(' Model: veil config llm.model llama3.2'));
|
|
102
|
+
console.log(chalk.gray(' Ollama URL:veil config llm.baseUrl http://localhost:11434'));
|
|
103
|
+
console.log(chalk.gray(' OpenAI: veil config llm.provider openai && veil config llm.apiKey sk-...'));
|
|
104
|
+
console.log('');
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
// Set config key
|
|
90
108
|
const parts = key.split('.');
|
|
91
109
|
let obj = config;
|
|
92
110
|
for (let i = 0; i < parts.length - 1; i++) {
|