avg-nexus 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -0
- package/bin/avg-nexus.js +231 -0
- package/lib/findOllama.js +161 -0
- package/lib/patcher.js +48 -0
- package/lib/proxy.js +178 -0
- package/package.json +21 -0
package/README.md
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# avg-nexus
|
|
2
|
+
|
|
3
|
+
NexusAI Team saytini **offline** (Ollama bilan) ishlatish uchun tool.
|
|
4
|
+
|
|
5
|
+
Sayt kodi **hech qanday o'zgartirilmaydi** — OpenRouter so'rovlari avtomatik
|
|
6
|
+
local Ollama ga yo'naltiriladi.
|
|
7
|
+
|
|
8
|
+
## O'rnatish
|
|
9
|
+
|
|
10
|
+
```powershell
|
|
11
|
+
npm install -g avg-nexus
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Ishlatish
|
|
15
|
+
|
|
16
|
+
```powershell
|
|
17
|
+
cd E:\nexusai-team # loyiha papkasiga kiring
|
|
18
|
+
avg-nexus # ishga tushiring
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Shundan keyin `http://localhost:3000` da sayt ochiladi — to'liq offline.
|
|
22
|
+
|
|
23
|
+
## Qanday ishlaydi
|
|
24
|
+
|
|
25
|
+
```
|
|
26
|
+
Sayt → openrouter.ai → [avg-nexus proxy] → Ollama (local)
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
- Ollama avtomatik topiladi (C, D, E, F... disklar tekshiriladi)
|
|
30
|
+
- O'rnatilgan modellar avtomatik aniqlanadi
|
|
31
|
+
- `.env.local` avtomatik yaratiladi
|
|
32
|
+
- Sayt kodi tegilmaydi
|
package/bin/avg-nexus.js
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// bin/avg-nexus.js — avg-nexus CLI
|
|
3
|
+
// npm install -g avg-nexus → avg-nexus (istalgan joydan ishlatydi)
|
|
4
|
+
|
|
5
|
+
const { execSync, spawn } = require('child_process');
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
const os = require('os');
|
|
9
|
+
const http = require('http');
|
|
10
|
+
|
|
11
|
+
const { findOllama, findInstalledModels } = require('../lib/findOllama');
|
|
12
|
+
const { createProxy, PROXY_PORT } = require('../lib/proxy');
|
|
13
|
+
|
|
14
|
+
// ── Ranglar ─────────────────────────────────────────────────────────────────
|
|
15
|
+
const C = {
|
|
16
|
+
cyan: s => `\x1b[36m${s}\x1b[0m`,
|
|
17
|
+
green: s => `\x1b[32m${s}\x1b[0m`,
|
|
18
|
+
yellow: s => `\x1b[33m${s}\x1b[0m`,
|
|
19
|
+
red: s => `\x1b[31m${s}\x1b[0m`,
|
|
20
|
+
bold: s => `\x1b[1m${s}\x1b[0m`,
|
|
21
|
+
dim: s => `\x1b[2m${s}\x1b[0m`,
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
const log = m => console.log(`${C.cyan('[avg-nexus]')} ${m}`);
|
|
25
|
+
const ok = m => console.log(`${C.green('[avg-nexus]')} ${m}`);
|
|
26
|
+
const warn = m => console.log(`${C.yellow('[avg-nexus]')} ${m}`);
|
|
27
|
+
const err = m => console.error(`${C.red('[avg-nexus]')} ${m}`);
|
|
28
|
+
|
|
29
|
+
const sleep = ms => new Promise(r => setTimeout(r, ms));
|
|
30
|
+
|
|
31
|
+
// ── Banner ───────────────────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
console.log('');
|
|
34
|
+
console.log(C.cyan('╔══════════════════════════════════════════╗'));
|
|
35
|
+
console.log(C.cyan('║') + C.bold(' avg-nexus — NexusAI Offline ') + C.cyan('║'));
|
|
36
|
+
console.log(C.cyan('╚══════════════════════════════════════════╝'));
|
|
37
|
+
console.log('');
|
|
38
|
+
|
|
39
|
+
// ── 1. NexusAI loyihasi hozirgi papkadami? ───────────────────────────────────
|
|
40
|
+
|
|
41
|
+
const cwd = process.cwd();
|
|
42
|
+
const pkgPath = path.join(cwd, 'package.json');
|
|
43
|
+
const nextConfig = path.join(cwd, 'next.config.mjs');
|
|
44
|
+
|
|
45
|
+
if (!fs.existsSync(pkgPath) || !fs.existsSync(nextConfig)) {
|
|
46
|
+
err('NexusAI loyihasi topilmadi!');
|
|
47
|
+
err('Iltimos, loyiha papkasiga kiring va qayta ishga tushiring:');
|
|
48
|
+
console.log('');
|
|
49
|
+
console.log(C.yellow(' cd E:\\nexusai-team'));
|
|
50
|
+
console.log(C.yellow(' avg-nexus'));
|
|
51
|
+
console.log('');
|
|
52
|
+
process.exit(1);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
|
|
56
|
+
ok(`Loyiha topildi: ${C.bold(pkg.name || 'NexusAI')} (${cwd})`);
|
|
57
|
+
|
|
58
|
+
// ── 2. Ollama topish ──────────────────────────────────────────────────────────
|
|
59
|
+
|
|
60
|
+
log('Ollama qidirilmoqda...');
|
|
61
|
+
const ollamaInfo = findOllama();
|
|
62
|
+
|
|
63
|
+
if (!ollamaInfo) {
|
|
64
|
+
err('Ollama topilmadi!');
|
|
65
|
+
warn('O\'rnatish uchun: https://ollama.com/download');
|
|
66
|
+
warn('O\'rnatgandan keyin avg-nexus ni qayta ishga tushiring.');
|
|
67
|
+
process.exit(1);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
ok(`Ollama topildi: ${C.dim(ollamaInfo.exe)}`);
|
|
71
|
+
|
|
72
|
+
// ── 3. Ollama ishlatyaptimi? ──────────────────────────────────────────────────
|
|
73
|
+
|
|
74
|
+
async function isOllamaRunning() {
|
|
75
|
+
return new Promise(resolve => {
|
|
76
|
+
http.get('http://127.0.0.1:11434/api/tags', res => {
|
|
77
|
+
resolve(res.statusCode === 200);
|
|
78
|
+
}).on('error', () => resolve(false));
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
async function waitOllama(maxMs = 15000) {
|
|
83
|
+
const start = Date.now();
|
|
84
|
+
while (Date.now() - start < maxMs) {
|
|
85
|
+
if (await isOllamaRunning()) return true;
|
|
86
|
+
await sleep(500);
|
|
87
|
+
}
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
let ollamaProcess = null;
|
|
92
|
+
|
|
93
|
+
if (!(await isOllamaRunning())) {
|
|
94
|
+
log('Ollama ishga tushirilmoqda...');
|
|
95
|
+
|
|
96
|
+
ollamaProcess = spawn(ollamaInfo.exe, ['serve'], {
|
|
97
|
+
detached: os.platform() !== 'win32',
|
|
98
|
+
stdio: 'ignore',
|
|
99
|
+
env: { ...process.env },
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
if (os.platform() !== 'win32') ollamaProcess.unref();
|
|
103
|
+
|
|
104
|
+
const started = await waitOllama(15000);
|
|
105
|
+
if (!started) {
|
|
106
|
+
err('Ollama ishga tushmadi!');
|
|
107
|
+
err(`Qo'lda ishga tushirib ko'ring: "${ollamaInfo.exe}" serve`);
|
|
108
|
+
process.exit(1);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
ok('Ollama ishlamoqda ✓');
|
|
113
|
+
|
|
114
|
+
// ── 4. O'rnatilgan modellarni topish ─────────────────────────────────────────
|
|
115
|
+
|
|
116
|
+
const models = findInstalledModels(ollamaInfo.exe);
|
|
117
|
+
|
|
118
|
+
if (models.length === 0) {
|
|
119
|
+
err('Hech qanday Ollama modeli topilmadi!');
|
|
120
|
+
warn(`Modelni yuklab oling: "${ollamaInfo.exe}" pull llama3.2:3b`);
|
|
121
|
+
process.exit(1);
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
ok(`Modellar (${models.length} ta): ${C.bold(models.join(', '))}`);
|
|
125
|
+
log(`Ishlatiladi: ${C.bold(models[0])}`);
|
|
126
|
+
|
|
127
|
+
// ── 5. .env.local yaratish/yangilash ─────────────────────────────────────────
|
|
128
|
+
|
|
129
|
+
const envPath = path.join(cwd, '.env.local');
|
|
130
|
+
const envContent = `# avg-nexus tomonidan avtomatik yaratildi
|
|
131
|
+
# OpenRouter so'rovlari local Ollama ga yo'naltiriladi
|
|
132
|
+
|
|
133
|
+
# Proxy port (o'zgartirmang)
|
|
134
|
+
OPENROUTER_BASE_URL=http://127.0.0.1:${PROXY_PORT}
|
|
135
|
+
|
|
136
|
+
# Sayt URL
|
|
137
|
+
NEXT_PUBLIC_SITE_URL=http://localhost:3000
|
|
138
|
+
|
|
139
|
+
# Ollama modeli (avtomatik aniqlandi)
|
|
140
|
+
OLLAMA_MODEL=${models[0]}
|
|
141
|
+
`;
|
|
142
|
+
|
|
143
|
+
fs.writeFileSync(envPath, envContent, 'utf8');
|
|
144
|
+
ok(`.env.local yaratildi ✓`);
|
|
145
|
+
|
|
146
|
+
// ── 6. node_modules tekshirish ────────────────────────────────────────────────
|
|
147
|
+
|
|
148
|
+
if (!fs.existsSync(path.join(cwd, 'node_modules'))) {
|
|
149
|
+
log('npm install bajarilmoqda...');
|
|
150
|
+
try {
|
|
151
|
+
execSync('npm install', { cwd, stdio: 'inherit' });
|
|
152
|
+
} catch {
|
|
153
|
+
err('npm install muvaffaqiyatsiz bo\'ldi!');
|
|
154
|
+
process.exit(1);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// ── 7. Proxy server ishga tushirish ──────────────────────────────────────────
|
|
159
|
+
|
|
160
|
+
log(`OpenRouter proxy ishga tushirilmoqda (port ${PROXY_PORT})...`);
|
|
161
|
+
|
|
162
|
+
try {
|
|
163
|
+
await createProxy(models, { silent: false });
|
|
164
|
+
ok(`Proxy tayyor → barcha OpenRouter so'rovlar Ollama ga yo'naltiriladi ✓`);
|
|
165
|
+
} catch (e) {
|
|
166
|
+
if (e.code === 'EADDRINUSE') {
|
|
167
|
+
warn(`Port ${PROXY_PORT} band — proxy allaqachon ishlamoqda, davom etilmoqda...`);
|
|
168
|
+
} else {
|
|
169
|
+
err(`Proxy xatosi: ${e.message}`);
|
|
170
|
+
process.exit(1);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// ── 8. Next.js serverni patch qilib ishga tushirish ──────────────────────────
|
|
175
|
+
// NODE_OPTIONS orqali --require flag — fetch ni intercept qiladi
|
|
176
|
+
|
|
177
|
+
const patcherPath = path.join(__dirname, '..', 'lib', 'patcher.js');
|
|
178
|
+
|
|
179
|
+
log('NexusAI ishga tushirilmoqda...');
|
|
180
|
+
console.log('');
|
|
181
|
+
console.log(C.green('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
|
|
182
|
+
console.log(C.bold(C.green(' ✓ Sayt: http://localhost:3000')));
|
|
183
|
+
console.log(C.bold(C.green(` ✓ Model: ${models[0]}`)));
|
|
184
|
+
console.log(C.bold(C.green(' ✓ Offline rejim faol')));
|
|
185
|
+
console.log(C.green('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━'));
|
|
186
|
+
console.log('');
|
|
187
|
+
console.log(C.dim(' To\'xtatish uchun: Ctrl+C'));
|
|
188
|
+
console.log('');
|
|
189
|
+
|
|
190
|
+
// next start yoki next dev — ikkisini ham tekshiradi
|
|
191
|
+
const hasNextBuild = fs.existsSync(path.join(cwd, '.next', 'BUILD_ID'));
|
|
192
|
+
const nextCmd = hasNextBuild ? 'start' : 'dev';
|
|
193
|
+
|
|
194
|
+
if (!hasNextBuild) {
|
|
195
|
+
log('Build topilmadi — dev rejimida ishga tushirilmoqda...');
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
const nextProcess = spawn(
|
|
199
|
+
os.platform() === 'win32' ? 'npx.cmd' : 'npx',
|
|
200
|
+
['next', nextCmd],
|
|
201
|
+
{
|
|
202
|
+
cwd,
|
|
203
|
+
stdio: 'inherit',
|
|
204
|
+
env: {
|
|
205
|
+
...process.env,
|
|
206
|
+
NODE_OPTIONS: `--require "${patcherPath}"`,
|
|
207
|
+
OPENROUTER_PROXY_PORT: String(PROXY_PORT),
|
|
208
|
+
OLLAMA_MODEL: models[0],
|
|
209
|
+
},
|
|
210
|
+
}
|
|
211
|
+
);
|
|
212
|
+
|
|
213
|
+
nextProcess.on('error', e => {
|
|
214
|
+
err(`Next.js xatosi: ${e.message}`);
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
// ── Ctrl+C da hammani to'xtatish ─────────────────────────────────────────────
|
|
218
|
+
|
|
219
|
+
process.on('SIGINT', () => {
|
|
220
|
+
console.log('');
|
|
221
|
+
log('To\'xtatilmoqda...');
|
|
222
|
+
nextProcess.kill();
|
|
223
|
+
if (ollamaProcess) ollamaProcess.kill();
|
|
224
|
+
process.exit(0);
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
process.on('SIGTERM', () => {
|
|
228
|
+
nextProcess.kill();
|
|
229
|
+
if (ollamaProcess) ollamaProcess.kill();
|
|
230
|
+
process.exit(0);
|
|
231
|
+
});
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
// lib/findOllama.js
|
|
2
|
+
// Ollama'ni Windows/Mac/Linux da avtomatik topadi
|
|
3
|
+
// PATH, standart papkalar va barcha disklarni tekshiradi
|
|
4
|
+
|
|
5
|
+
const { execSync } = require('child_process');
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const os = require('os');
|
|
9
|
+
|
|
10
|
+
// ── Windows da barcha disklarni topish ─────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
function getWindowsDrives() {
|
|
13
|
+
const drives = [];
|
|
14
|
+
try {
|
|
15
|
+
// wmic orqali barcha disklar
|
|
16
|
+
const out = execSync('wmic logicaldisk get name', { stdio: 'pipe' }).toString();
|
|
17
|
+
const matches = out.match(/[A-Z]:/g);
|
|
18
|
+
if (matches) drives.push(...matches);
|
|
19
|
+
} catch {
|
|
20
|
+
// fallback: A-Z tekshir
|
|
21
|
+
for (let i = 67; i <= 90; i++) {
|
|
22
|
+
const d = String.fromCharCode(i) + ':';
|
|
23
|
+
try { if (fs.existsSync(d + '\\')) drives.push(d); } catch {}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
return drives.length ? drives : ['C:', 'D:', 'E:', 'F:'];
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// ── Ollama exe qidiruv joylari ─────────────────────────────────────────────
|
|
30
|
+
|
|
31
|
+
function getCandidatePaths() {
|
|
32
|
+
const candidates = [];
|
|
33
|
+
const platform = os.platform();
|
|
34
|
+
|
|
35
|
+
if (platform === 'win32') {
|
|
36
|
+
const drives = getWindowsDrives();
|
|
37
|
+
for (const drive of drives) {
|
|
38
|
+
candidates.push(
|
|
39
|
+
path.join(drive, '\\', 'ollama', 'ollama.exe'),
|
|
40
|
+
path.join(drive, '\\', 'Ollama', 'ollama.exe'),
|
|
41
|
+
path.join(drive, '\\', 'Program Files', 'Ollama', 'ollama.exe'),
|
|
42
|
+
path.join(drive, '\\', 'Program Files (x86)', 'Ollama', 'ollama.exe'),
|
|
43
|
+
path.join(drive, '\\', 'Tools', 'Ollama', 'ollama.exe'),
|
|
44
|
+
path.join(drive, '\\', 'Apps', 'Ollama', 'ollama.exe'),
|
|
45
|
+
path.join(drive, '\\', 'ollama.exe'),
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
// %LOCALAPPDATA% va %APPDATA%
|
|
49
|
+
if (process.env.LOCALAPPDATA) {
|
|
50
|
+
candidates.push(path.join(process.env.LOCALAPPDATA, 'Ollama', 'ollama.exe'));
|
|
51
|
+
candidates.push(path.join(process.env.LOCALAPPDATA, 'Programs', 'Ollama', 'ollama.exe'));
|
|
52
|
+
}
|
|
53
|
+
if (process.env.APPDATA) {
|
|
54
|
+
candidates.push(path.join(process.env.APPDATA, 'Ollama', 'ollama.exe'));
|
|
55
|
+
}
|
|
56
|
+
if (process.env.USERPROFILE) {
|
|
57
|
+
candidates.push(path.join(process.env.USERPROFILE, 'AppData', 'Local', 'Ollama', 'ollama.exe'));
|
|
58
|
+
}
|
|
59
|
+
} else if (platform === 'darwin') {
|
|
60
|
+
candidates.push(
|
|
61
|
+
'/usr/local/bin/ollama',
|
|
62
|
+
'/opt/homebrew/bin/ollama',
|
|
63
|
+
'/Applications/Ollama.app/Contents/MacOS/ollama',
|
|
64
|
+
path.join(os.homedir(), '.ollama', 'ollama'),
|
|
65
|
+
);
|
|
66
|
+
} else {
|
|
67
|
+
// Linux
|
|
68
|
+
candidates.push(
|
|
69
|
+
'/usr/local/bin/ollama',
|
|
70
|
+
'/usr/bin/ollama',
|
|
71
|
+
'/opt/ollama/ollama',
|
|
72
|
+
path.join(os.homedir(), '.local', 'bin', 'ollama'),
|
|
73
|
+
path.join(os.homedir(), 'ollama', 'ollama'),
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
return candidates;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// ── Ollama modellar papkasini topish ──────────────────────────────────────
|
|
81
|
+
|
|
82
|
+
function findOllamaModelsDir() {
|
|
83
|
+
const platform = os.platform();
|
|
84
|
+
const candidates = [];
|
|
85
|
+
|
|
86
|
+
if (platform === 'win32') {
|
|
87
|
+
const drives = getWindowsDrives();
|
|
88
|
+
for (const drive of drives) {
|
|
89
|
+
candidates.push(
|
|
90
|
+
path.join(drive, '\\', 'Ollama', 'models'),
|
|
91
|
+
path.join(drive, '\\', 'ollama', 'models'),
|
|
92
|
+
);
|
|
93
|
+
}
|
|
94
|
+
if (process.env.USERPROFILE) {
|
|
95
|
+
candidates.push(path.join(process.env.USERPROFILE, '.ollama', 'models'));
|
|
96
|
+
}
|
|
97
|
+
} else {
|
|
98
|
+
candidates.push(
|
|
99
|
+
path.join(os.homedir(), '.ollama', 'models'),
|
|
100
|
+
'/usr/share/ollama/models',
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
for (const c of candidates) {
|
|
105
|
+
if (fs.existsSync(c)) return c;
|
|
106
|
+
}
|
|
107
|
+
return null;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// ── Asosiy funksiya ────────────────────────────────────────────────────────
|
|
111
|
+
|
|
112
|
+
function findOllama() {
|
|
113
|
+
// 1. PATH dan tekshir (eng oddiy)
|
|
114
|
+
try {
|
|
115
|
+
const cmd = os.platform() === 'win32' ? 'where ollama' : 'which ollama';
|
|
116
|
+
const result = execSync(cmd, { stdio: 'pipe' }).toString().trim().split('\n')[0];
|
|
117
|
+
if (result && fs.existsSync(result)) {
|
|
118
|
+
return { exe: result, foundIn: 'PATH', modelsDir: findOllamaModelsDir() };
|
|
119
|
+
}
|
|
120
|
+
} catch {}
|
|
121
|
+
|
|
122
|
+
// 2. Registry dan tekshir (Windows)
|
|
123
|
+
if (os.platform() === 'win32') {
|
|
124
|
+
try {
|
|
125
|
+
const reg = execSync(
|
|
126
|
+
'reg query "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\ollama.exe" /ve',
|
|
127
|
+
{ stdio: 'pipe' }
|
|
128
|
+
).toString();
|
|
129
|
+
const match = reg.match(/REG_SZ\s+(.+)/);
|
|
130
|
+
if (match) {
|
|
131
|
+
const exePath = match[1].trim();
|
|
132
|
+
if (fs.existsSync(exePath)) {
|
|
133
|
+
return { exe: exePath, foundIn: 'Registry', modelsDir: findOllamaModelsDir() };
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
} catch {}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
// 3. Barcha kandidat yo'llarni tekshir
|
|
140
|
+
for (const candidate of getCandidatePaths()) {
|
|
141
|
+
if (fs.existsSync(candidate)) {
|
|
142
|
+
return { exe: candidate, foundIn: 'scan', modelsDir: findOllamaModelsDir() };
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
return null;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// ── O'rnatilgan modellarni topish ─────────────────────────────────────────
|
|
150
|
+
|
|
151
|
+
function findInstalledModels(ollamaExe) {
|
|
152
|
+
try {
|
|
153
|
+
const out = execSync(`"${ollamaExe}" list`, { stdio: 'pipe' }).toString();
|
|
154
|
+
const lines = out.split('\n').filter(l => l.trim() && !l.startsWith('NAME'));
|
|
155
|
+
return lines.map(l => l.split(/\s+/)[0]).filter(Boolean);
|
|
156
|
+
} catch {
|
|
157
|
+
return [];
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
module.exports = { findOllama, findInstalledModels, findOllamaModelsDir };
|
package/lib/patcher.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
// lib/patcher.js
|
|
2
|
+
// Node.js --require orqali yuklanadi
|
|
3
|
+
// fetch() ni ushlab, openrouter.ai → localhost:11435 ga yo'naltiradi
|
|
4
|
+
// Sayt kodi hech qanday o'zgartirilmaydi!
|
|
5
|
+
|
|
6
|
+
const PROXY_PORT = process.env.OPENROUTER_PROXY_PORT || '11435';
|
|
7
|
+
const PROXY_BASE = `http://127.0.0.1:${PROXY_PORT}`;
|
|
8
|
+
const OPENROUTER = 'openrouter.ai';
|
|
9
|
+
|
|
10
|
+
// ── Native fetch ni patch qilish ───────────────────────────────────────────
|
|
11
|
+
|
|
12
|
+
const _originalFetch = globalThis.fetch;
|
|
13
|
+
|
|
14
|
+
globalThis.fetch = async function patchedFetch(input, init) {
|
|
15
|
+
let url = typeof input === 'string' ? input
|
|
16
|
+
: input instanceof URL ? input.href
|
|
17
|
+
: input.url;
|
|
18
|
+
|
|
19
|
+
// Faqat openrouter.ai ga ketayotgan so'rovlarni ushlab olamiz
|
|
20
|
+
if (url && url.includes(OPENROUTER)) {
|
|
21
|
+
// URL ni proxy ga yo'naltir
|
|
22
|
+
const redirected = url.replace(/https?:\/\/openrouter\.ai/, PROXY_BASE);
|
|
23
|
+
|
|
24
|
+
// Authorization header ni saqlab qo'yamiz (proxy ham qabul qiladi)
|
|
25
|
+
const newInit = { ...init };
|
|
26
|
+
if (typeof input !== 'string') {
|
|
27
|
+
// Request object edi — yangi yaratamiz
|
|
28
|
+
input = new Request(redirected, {
|
|
29
|
+
method: input.method,
|
|
30
|
+
headers: input.headers,
|
|
31
|
+
body: input.body,
|
|
32
|
+
});
|
|
33
|
+
} else {
|
|
34
|
+
input = redirected;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return _originalFetch(input, newInit);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
// Boshqa barcha so'rovlar — o'zgarishsiz
|
|
41
|
+
return _originalFetch(input, init);
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
// ── Module.exports orqali ham chaqirilganda ishlashi uchun ─────────────────
|
|
45
|
+
|
|
46
|
+
if (process.env.OPENROUTER_PROXY_PORT) {
|
|
47
|
+
// Sukut saqlaymiz — sayt konsolida ko'rinmasin
|
|
48
|
+
}
|
package/lib/proxy.js
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
// lib/proxy.js
|
|
2
|
+
// OpenRouter API ni ushlab, Ollama ga yo'naltiradi
|
|
3
|
+
// Sayt hech narsa bilmaydi — u "OpenRouter" ga so'rov yuboradi
|
|
4
|
+
|
|
5
|
+
const http = require('http');
|
|
6
|
+
const https = require('https');
|
|
7
|
+
|
|
8
|
+
const PROXY_PORT = 11435;
|
|
9
|
+
const OLLAMA_BASE = 'http://127.0.0.1:11434';
|
|
10
|
+
|
|
11
|
+
// ── Model nomini moslashtirish ─────────────────────────────────────────────
|
|
12
|
+
// OpenRouter: "meta-llama/llama-3.3-70b-instruct:free"
|
|
13
|
+
// Ollama: "gemma4:4b" yoki "llama3.2:3b"
|
|
14
|
+
|
|
15
|
+
function mapModel(openrouterModel, localModels) {
|
|
16
|
+
// Agar model to'g'ridan-to'g'ri Ollama modeliga mos kelsa
|
|
17
|
+
for (const m of localModels) {
|
|
18
|
+
if (openrouterModel === m || openrouterModel.startsWith(m)) return m;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Default: birinchi o'rnatilgan modelni ishlatadi
|
|
22
|
+
if (localModels.length > 0) return localModels[0];
|
|
23
|
+
|
|
24
|
+
return 'gemma4:4b'; // fallback
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// ── Body yig'ish helper ──────────────────────────────────────────────────
|
|
28
|
+
|
|
29
|
+
function collectBody(req) {
|
|
30
|
+
return new Promise((resolve, reject) => {
|
|
31
|
+
const chunks = [];
|
|
32
|
+
req.on('data', c => chunks.push(c));
|
|
33
|
+
req.on('end', () => resolve(Buffer.concat(chunks)));
|
|
34
|
+
req.on('error', reject);
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// ── Ollama ga so'rov yuborish ─────────────────────────────────────────────
|
|
39
|
+
|
|
40
|
+
function callOllama(path, body) {
|
|
41
|
+
return new Promise((resolve, reject) => {
|
|
42
|
+
const bodyStr = JSON.stringify(body);
|
|
43
|
+
const options = {
|
|
44
|
+
hostname: '127.0.0.1',
|
|
45
|
+
port: 11434,
|
|
46
|
+
path,
|
|
47
|
+
method: 'POST',
|
|
48
|
+
headers: {
|
|
49
|
+
'Content-Type': 'application/json',
|
|
50
|
+
'Content-Length': Buffer.byteLength(bodyStr),
|
|
51
|
+
},
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
const req = http.request(options, (res) => {
|
|
55
|
+
const chunks = [];
|
|
56
|
+
res.on('data', c => chunks.push(c));
|
|
57
|
+
res.on('end', () => resolve({
|
|
58
|
+
status: res.statusCode,
|
|
59
|
+
headers: res.headers,
|
|
60
|
+
body: Buffer.concat(chunks),
|
|
61
|
+
}));
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
req.on('error', reject);
|
|
65
|
+
req.write(bodyStr);
|
|
66
|
+
req.end();
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// ── Proxy server ──────────────────────────────────────────────────────────
|
|
71
|
+
|
|
72
|
+
function createProxy(localModels, { silent = false } = {}) {
|
|
73
|
+
const log = silent ? () => {} : (m) => console.log(`\x1b[35m[proxy]\x1b[0m ${m}`);
|
|
74
|
+
|
|
75
|
+
const server = http.createServer(async (req, res) => {
|
|
76
|
+
// CORS headers (browser uchun)
|
|
77
|
+
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
78
|
+
res.setHeader('Access-Control-Allow-Methods', 'GET,POST,OPTIONS');
|
|
79
|
+
res.setHeader('Access-Control-Allow-Headers', 'Content-Type,Authorization,HTTP-Referer,X-Title');
|
|
80
|
+
|
|
81
|
+
if (req.method === 'OPTIONS') {
|
|
82
|
+
res.writeHead(204);
|
|
83
|
+
res.end();
|
|
84
|
+
return;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const url = req.url || '/';
|
|
88
|
+
|
|
89
|
+
// ── /v1/models → o'rnatilgan modellar ro'yxati ────────────────────
|
|
90
|
+
if (url === '/v1/models' || url === '/api/v1/models') {
|
|
91
|
+
const data = {
|
|
92
|
+
object: 'list',
|
|
93
|
+
data: localModels.map(id => ({
|
|
94
|
+
id,
|
|
95
|
+
object: 'model',
|
|
96
|
+
created: Math.floor(Date.now() / 1000),
|
|
97
|
+
owned_by: 'ollama',
|
|
98
|
+
name: id,
|
|
99
|
+
context_length: 8192,
|
|
100
|
+
pricing: { prompt: '0', completion: '0' },
|
|
101
|
+
})),
|
|
102
|
+
};
|
|
103
|
+
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
104
|
+
res.end(JSON.stringify(data));
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// ── /v1/chat/completions → Ollama ga yo'naltir ─────────────────────
|
|
109
|
+
if (url.includes('/chat/completions')) {
|
|
110
|
+
try {
|
|
111
|
+
const raw = await collectBody(req);
|
|
112
|
+
const reqBody = JSON.parse(raw.toString());
|
|
113
|
+
|
|
114
|
+
// Model nomini local ga moslashtir
|
|
115
|
+
const ollamaModel = mapModel(reqBody.model || '', localModels);
|
|
116
|
+
log(`${reqBody.model} → ${ollamaModel}`);
|
|
117
|
+
|
|
118
|
+
const ollamaBody = {
|
|
119
|
+
model: ollamaModel,
|
|
120
|
+
messages: reqBody.messages,
|
|
121
|
+
max_tokens: reqBody.max_tokens || 2048,
|
|
122
|
+
temperature: reqBody.temperature || 0.7,
|
|
123
|
+
stream: false,
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const resp = await callOllama('/v1/chat/completions', ollamaBody);
|
|
127
|
+
|
|
128
|
+
res.writeHead(resp.status, { 'Content-Type': 'application/json' });
|
|
129
|
+
res.end(resp.body);
|
|
130
|
+
|
|
131
|
+
} catch (err) {
|
|
132
|
+
log(`Error: ${err.message}`);
|
|
133
|
+
res.writeHead(500, { 'Content-Type': 'application/json' });
|
|
134
|
+
res.end(JSON.stringify({
|
|
135
|
+
error: {
|
|
136
|
+
message: `Ollama error: ${err.message}`,
|
|
137
|
+
type: 'proxy_error',
|
|
138
|
+
}
|
|
139
|
+
}));
|
|
140
|
+
}
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// ── Boshqa barcha so'rovlar → OpenRouter ga to'g'ridan-to'g'ri ────
|
|
145
|
+
// (modellar ro'yxati va boshqalar uchun)
|
|
146
|
+
try {
|
|
147
|
+
const raw = await collectBody(req);
|
|
148
|
+
const proxyReq = https.request({
|
|
149
|
+
hostname: 'openrouter.ai',
|
|
150
|
+
path: url,
|
|
151
|
+
method: req.method,
|
|
152
|
+
headers: { ...req.headers, host: 'openrouter.ai' },
|
|
153
|
+
}, (proxyRes) => {
|
|
154
|
+
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
|
155
|
+
proxyRes.pipe(res);
|
|
156
|
+
});
|
|
157
|
+
proxyReq.on('error', () => {
|
|
158
|
+
res.writeHead(502, { 'Content-Type': 'application/json' });
|
|
159
|
+
res.end(JSON.stringify({ error: 'Proxy error' }));
|
|
160
|
+
});
|
|
161
|
+
if (raw.length) proxyReq.write(raw);
|
|
162
|
+
proxyReq.end();
|
|
163
|
+
} catch {
|
|
164
|
+
res.writeHead(502);
|
|
165
|
+
res.end();
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
return new Promise((resolve, reject) => {
|
|
170
|
+
server.listen(PROXY_PORT, '127.0.0.1', () => {
|
|
171
|
+
log(`Proxy tayyor → localhost:${PROXY_PORT}`);
|
|
172
|
+
resolve(server);
|
|
173
|
+
});
|
|
174
|
+
server.on('error', reject);
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
module.exports = { createProxy, PROXY_PORT };
|
package/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "avg-nexus",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Offline proxy for NexusAI Team — runs your site with local Ollama instead of OpenRouter",
|
|
5
|
+
"main": "lib/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"avg-nexus": "./bin/avg-nexus.js"
|
|
8
|
+
},
|
|
9
|
+
"scripts": {
|
|
10
|
+
"test": "node bin/avg-nexus.js"
|
|
11
|
+
},
|
|
12
|
+
"dependencies": {
|
|
13
|
+
"http-proxy": "^1.18.1",
|
|
14
|
+
"node-fetch": "^2.7.0"
|
|
15
|
+
},
|
|
16
|
+
"keywords": ["ollama", "openrouter", "proxy", "nexusai", "offline"],
|
|
17
|
+
"license": "MIT",
|
|
18
|
+
"engines": {
|
|
19
|
+
"node": ">=18.0.0"
|
|
20
|
+
}
|
|
21
|
+
}
|