@antodevs/groundtruth 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +130 -0
- package/assets/banner.png +0 -0
- package/index.js +32 -0
- package/package.json +48 -0
- package/specification.yaml +143 -0
- package/src/cache.js +107 -0
- package/src/circuit-breaker.js +63 -0
- package/src/cli.js +58 -0
- package/src/env.js +120 -0
- package/src/http-agent.js +21 -0
- package/src/inject.js +93 -0
- package/src/logger.js +47 -0
- package/src/packages.js +87 -0
- package/src/proxy.js +164 -0
- package/src/search.js +157 -0
- package/src/state.js +37 -0
- package/src/utils/atomic-write.js +58 -0
- package/src/watcher.js +146 -0
package/src/env.js
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module env
|
|
3
|
+
* @description Scrive config automagica in files dot rc zsh/fish/bash sessione auth.
|
|
4
|
+
*/
|
|
5
|
+
import fs from 'fs/promises';
|
|
6
|
+
import { existsSync } from 'fs';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
import os from 'os';
|
|
9
|
+
import { chalk, ts, LOG_WARN, LOG_OK, log } from './logger.js';
|
|
10
|
+
import { atomicWrite } from './utils/atomic-write.js';
|
|
11
|
+
|
|
12
|
+
// ─── Setup shell environment ─────────────────────────
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @description Applica environment variables a vari rc profile bash fish unix
|
|
16
|
+
* @param {number} p - HTTP default porta su local instance target
|
|
17
|
+
* @returns {Promise<void>} Operazione asincrona
|
|
18
|
+
*/
|
|
19
|
+
export async function autoSetEnv(p) {
|
|
20
|
+
if (process.platform === 'win32') return;
|
|
21
|
+
try {
|
|
22
|
+
const targetUrl = `http://localhost:${p}`;
|
|
23
|
+
if (process.env.ANTHROPIC_BASE_URL === targetUrl) return;
|
|
24
|
+
|
|
25
|
+
const homeDir = os.homedir();
|
|
26
|
+
// Test exist pattern specifico shell config di fish locale
|
|
27
|
+
const isFish = process.env.SHELL?.includes('fish') || existsSync(`${homeDir}/.config/fish/config.fish`);
|
|
28
|
+
let foundAny = false;
|
|
29
|
+
const modifiedFiles = [];
|
|
30
|
+
|
|
31
|
+
if (isFish) {
|
|
32
|
+
const fishConfigFile = path.join(homeDir, '.config', 'fish', 'config.fish');
|
|
33
|
+
await fs.mkdir(path.dirname(fishConfigFile), { recursive: true });
|
|
34
|
+
foundAny = true;
|
|
35
|
+
try {
|
|
36
|
+
let content = existsSync(fishConfigFile) ? await fs.readFile(fishConfigFile, 'utf8') : '';
|
|
37
|
+
const lines = content ? content.split('\n') : [];
|
|
38
|
+
let modified = false, foundExport = false;
|
|
39
|
+
|
|
40
|
+
const newLines = lines.map(line => {
|
|
41
|
+
if (line.trim().startsWith('set -gx ANTHROPIC_BASE_URL')) {
|
|
42
|
+
foundExport = true;
|
|
43
|
+
if (line.trim() !== `set -gx ANTHROPIC_BASE_URL ${targetUrl}`) { modified = true; return `set -gx ANTHROPIC_BASE_URL ${targetUrl}`; }
|
|
44
|
+
}
|
|
45
|
+
return line;
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
if (!foundExport) {
|
|
49
|
+
if (newLines.length > 0 && newLines[newLines.length - 1] === '') {
|
|
50
|
+
newLines[newLines.length - 1] = `set -gx ANTHROPIC_BASE_URL ${targetUrl}`;
|
|
51
|
+
newLines.push('');
|
|
52
|
+
} else {
|
|
53
|
+
newLines.push(`set -gx ANTHROPIC_BASE_URL ${targetUrl}`);
|
|
54
|
+
}
|
|
55
|
+
modified = true;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (modified) {
|
|
59
|
+
await atomicWrite(fishConfigFile, newLines.join('\n'));
|
|
60
|
+
modifiedFiles.push(fishConfigFile);
|
|
61
|
+
}
|
|
62
|
+
} catch (e) {
|
|
63
|
+
log(LOG_WARN, chalk.yellow, chalk.white('cannot write fish config') + ` → ${chalk.yellow(e.message)}`);
|
|
64
|
+
}
|
|
65
|
+
} else {
|
|
66
|
+
const shellFiles = ['.zshrc', '.bashrc', '.bash_profile', '.profile'].map(f => path.join(homeDir, f));
|
|
67
|
+
for (const file of shellFiles) {
|
|
68
|
+
if (!existsSync(file)) continue;
|
|
69
|
+
foundAny = true;
|
|
70
|
+
try {
|
|
71
|
+
const content = await fs.readFile(file, 'utf8');
|
|
72
|
+
const lines = content.split('\n');
|
|
73
|
+
let modified = false, foundExport = false;
|
|
74
|
+
|
|
75
|
+
const newLines = lines.map(line => {
|
|
76
|
+
if (line.trim().startsWith('export ANTHROPIC_BASE_URL=')) {
|
|
77
|
+
foundExport = true;
|
|
78
|
+
if (line.trim() !== `export ANTHROPIC_BASE_URL=${targetUrl}`) { modified = true; return `export ANTHROPIC_BASE_URL=${targetUrl}`; }
|
|
79
|
+
}
|
|
80
|
+
return line;
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
if (!foundExport) {
|
|
84
|
+
if (newLines.length > 0 && newLines[newLines.length - 1] === '') {
|
|
85
|
+
newLines[newLines.length - 1] = `export ANTHROPIC_BASE_URL=${targetUrl}`;
|
|
86
|
+
newLines.push('');
|
|
87
|
+
} else {
|
|
88
|
+
newLines.push(`export ANTHROPIC_BASE_URL=${targetUrl}`);
|
|
89
|
+
}
|
|
90
|
+
modified = true;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
if (modified) {
|
|
94
|
+
await atomicWrite(file, newLines.join('\n'));
|
|
95
|
+
modifiedFiles.push(file);
|
|
96
|
+
}
|
|
97
|
+
} catch (e) {
|
|
98
|
+
log(LOG_WARN, chalk.yellow, chalk.white(`cannot write ${path.basename(file)}`) + ` → ${chalk.yellow(e.message)}`);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (!foundAny) {
|
|
104
|
+
const hint = isFish
|
|
105
|
+
? `set -gx ANTHROPIC_BASE_URL ${targetUrl}`
|
|
106
|
+
: `export ANTHROPIC_BASE_URL=${targetUrl}`;
|
|
107
|
+
log(LOG_WARN, chalk.yellow, chalk.white('no shell config found') + ` → ${chalk.yellow('add manually: ' + hint)}`);
|
|
108
|
+
} else if (modifiedFiles.length > 0) {
|
|
109
|
+
// Segnala write su standard node path a console utente post process
|
|
110
|
+
modifiedFiles.forEach(file => {
|
|
111
|
+
const rel = file.replace(homeDir, '~');
|
|
112
|
+
log(LOG_OK, chalk.green, chalk.white('ANTHROPIC_BASE_URL written to') + ' ' + chalk.white(rel));
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
process.env.ANTHROPIC_BASE_URL = targetUrl;
|
|
117
|
+
} catch (err) {
|
|
118
|
+
log(LOG_WARN, chalk.yellow, chalk.white('env setup error') + ` → ${chalk.yellow(err.message)}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module http-agent
|
|
3
|
+
* @description Pool manager per connessioni API http e requests HTTPS in proxy context.
|
|
4
|
+
*/
|
|
5
|
+
import { Agent as HttpsAgent } from 'https';
|
|
6
|
+
import { Agent as HttpAgent } from 'http';
|
|
7
|
+
|
|
8
|
+
// Evita timeout TCP handshakes costanti per network node-fetch requests proxy target
|
|
9
|
+
export const httpsAgent = new HttpsAgent({
|
|
10
|
+
keepAlive: true,
|
|
11
|
+
maxSockets: 10,
|
|
12
|
+
maxFreeSockets: 5,
|
|
13
|
+
timeout: 5000,
|
|
14
|
+
});
|
|
15
|
+
|
|
16
|
+
export const httpAgent = new HttpAgent({
|
|
17
|
+
keepAlive: true,
|
|
18
|
+
maxSockets: 10,
|
|
19
|
+
maxFreeSockets: 5,
|
|
20
|
+
timeout: 5000,
|
|
21
|
+
});
|
package/src/inject.js
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module inject
|
|
3
|
+
* @description Gestisce l'aggiunta o check dei file skills GEMINI.md system.
|
|
4
|
+
*/
|
|
5
|
+
import fs from 'fs/promises';
|
|
6
|
+
import { existsSync } from 'fs';
|
|
7
|
+
import path from 'path';
|
|
8
|
+
import os from 'os';
|
|
9
|
+
import { chalk, log, LOG_WARN, LOG_REFRESH } from './logger.js';
|
|
10
|
+
import { atomicWrite } from './utils/atomic-write.js';
|
|
11
|
+
|
|
12
|
+
// ─── Document injection rules ────────────────────────
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @description Aggiorna block target per block id customizzati in hash
|
|
16
|
+
* @param {string} filePath - Absolute path write operation target rule doc file
|
|
17
|
+
* @param {string} content - Content plain formattato markdown text raw update
|
|
18
|
+
* @param {string} blockId - identificativo 8 char associato
|
|
19
|
+
* @returns {Promise<void>}
|
|
20
|
+
*/
|
|
21
|
+
export async function injectBlock(filePath, content, blockId) {
|
|
22
|
+
let fileContent = '';
|
|
23
|
+
if (existsSync(filePath)) {
|
|
24
|
+
fileContent = await fs.readFile(filePath, 'utf8');
|
|
25
|
+
}
|
|
26
|
+
const startTag = `<!-- groundtruth:block-${blockId}:start -->`;
|
|
27
|
+
const endTag = `<!-- groundtruth:block-${blockId}:end -->`;
|
|
28
|
+
const block = `${startTag}\n${content.trim()}\n${endTag}`;
|
|
29
|
+
|
|
30
|
+
const startIndex = fileContent.indexOf(startTag);
|
|
31
|
+
const endIndex = fileContent.indexOf(endTag);
|
|
32
|
+
|
|
33
|
+
if (startIndex !== -1 && endIndex !== -1 && endIndex > startIndex) {
|
|
34
|
+
fileContent = fileContent.slice(0, startIndex) + block + fileContent.slice(endIndex + endTag.length);
|
|
35
|
+
} else {
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
await atomicWrite(filePath, fileContent);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* @description Identifica blocchi dipendenze vecchi invalidati e li cancella dal file
|
|
43
|
+
* @param {string} filePath - File path workspace markdown context rules locale
|
|
44
|
+
* @param {Set} activeBlockIds - ids attivi elaborati nel watcher logic timer task loop cycle
|
|
45
|
+
* @returns {Promise<void>}
|
|
46
|
+
*/
|
|
47
|
+
export async function removeStaleBlocks(filePath, activeBlockIds) {
|
|
48
|
+
if (!existsSync(filePath)) return;
|
|
49
|
+
let fileContent = await fs.readFile(filePath, 'utf8');
|
|
50
|
+
const regex = /<!-- groundtruth:block-(\w+):start -->[\s\S]*?<!-- groundtruth:block-\w+:end -->/g;
|
|
51
|
+
|
|
52
|
+
let modified = false;
|
|
53
|
+
fileContent = fileContent.replace(regex, (match, blockId) => {
|
|
54
|
+
if (!activeBlockIds.has(blockId)) {
|
|
55
|
+
log(LOG_REFRESH, chalk.yellow, chalk.white(`removed stale block ${blockId} from GEMINI.md`));
|
|
56
|
+
modified = true;
|
|
57
|
+
return '';
|
|
58
|
+
}
|
|
59
|
+
return match;
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
if (modified) {
|
|
63
|
+
fileContent = fileContent.replace(/\n{3,}/g, '\n\n').trim() + '\n';
|
|
64
|
+
await atomicWrite(filePath, fileContent);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* @description Interfaccia logic per sincronizzare multiple blocks local workspace skill context
|
|
70
|
+
* @param {Array} blocks - Blocchi aggiornati
|
|
71
|
+
* @returns {Promise<void>}
|
|
72
|
+
*/
|
|
73
|
+
export async function updateGeminiFiles(blocks) {
|
|
74
|
+
const homeDir = os.homedir();
|
|
75
|
+
const rulesDir = path.join(process.cwd(), '.gemini');
|
|
76
|
+
await fs.mkdir(rulesDir, { recursive: true });
|
|
77
|
+
const skillFile = path.join(rulesDir, 'GEMINI.md');
|
|
78
|
+
|
|
79
|
+
const globalRulesDir = path.join(homeDir, '.gemini');
|
|
80
|
+
await fs.mkdir(globalRulesDir, { recursive: true });
|
|
81
|
+
const globalSkillFile = path.join(globalRulesDir, 'GEMINI.md');
|
|
82
|
+
|
|
83
|
+
const samePath = path.resolve(globalSkillFile) === path.resolve(skillFile);
|
|
84
|
+
|
|
85
|
+
for (const b of blocks) {
|
|
86
|
+
if (samePath) {
|
|
87
|
+
await injectBlock(skillFile, b.workspaceContent, b.blockId);
|
|
88
|
+
} else {
|
|
89
|
+
await injectBlock(globalSkillFile, b.globalContent, b.blockId);
|
|
90
|
+
await injectBlock(skillFile, b.workspaceContent, b.blockId);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
package/src/logger.js
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module logger
|
|
3
|
+
* @description Utilities per log formattati con chalk e timestamp.
|
|
4
|
+
*/
|
|
5
|
+
import chalk from 'chalk';
|
|
6
|
+
|
|
7
|
+
// ─── Constants ───────────────────────────────────────
|
|
8
|
+
|
|
9
|
+
export const LOG_OK = '✓';
|
|
10
|
+
export const LOG_WARN = '⚠';
|
|
11
|
+
export const LOG_BOLT = '⚡';
|
|
12
|
+
export const LOG_REFRESH = '↻';
|
|
13
|
+
export const LOG_DOT = '◆';
|
|
14
|
+
export const LOG_STAR = '✻';
|
|
15
|
+
|
|
16
|
+
// ─── Formattazione ───────────────────────────────────
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @description Genera timestamp corrente per i log in formato loc-IT.
|
|
20
|
+
* @returns {string} Timestamp grigio
|
|
21
|
+
*/
|
|
22
|
+
export function ts() {
|
|
23
|
+
return chalk.gray(new Date().toLocaleTimeString('it-IT'));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @description Crea una label allineata per l'output CLI di startup.
|
|
28
|
+
* @param {string} sym - Simbolo bullet
|
|
29
|
+
* @param {string} name - Nome del campo label
|
|
30
|
+
* @param {string} value - Valore associato
|
|
31
|
+
* @returns {string} Stringa interpolata e colorata
|
|
32
|
+
*/
|
|
33
|
+
export function label(sym, name, value) {
|
|
34
|
+
return ` ${chalk.cyan(sym)} ${chalk.gray(name.padEnd(9))} ${chalk.white(value)}`;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* @description Stampa un messaggio log standardizzato con timestamp.
|
|
39
|
+
* @param {string} symbol - Simbolo costante da prefissare
|
|
40
|
+
* @param {Function} colorFn - Funzione chalk per colorare il prefisso
|
|
41
|
+
* @param {...any} parts - Testo del log iterabile
|
|
42
|
+
*/
|
|
43
|
+
export function log(symbol, colorFn, ...parts) {
|
|
44
|
+
console.log(` ${colorFn(symbol)} ${ts()} ${parts.join(' ')}`);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
export { chalk }; // Centralizziamo chalk per evitare import duplicati altrove
|
package/src/packages.js
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module packages
|
|
3
|
+
* @description Utilita per estrarre il package array e generare queries LLM a blocchi.
|
|
4
|
+
*/
|
|
5
|
+
import fs from 'fs/promises';
|
|
6
|
+
import path from 'path';
|
|
7
|
+
import { createHash } from 'crypto';
|
|
8
|
+
|
|
9
|
+
// ─── Logica Dipendenze ───────────────────────────────
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @description Analizza deps di system locali escludendo packages non rilevanti
|
|
13
|
+
* @returns {Promise<Array|null>} Array strings stack locale o null in fallback error
|
|
14
|
+
*/
|
|
15
|
+
export async function readPackageDeps() {
|
|
16
|
+
try {
|
|
17
|
+
const pkgPath = path.resolve(process.cwd(), 'package.json');
|
|
18
|
+
try {
|
|
19
|
+
await fs.access(pkgPath);
|
|
20
|
+
} catch (_) {
|
|
21
|
+
return null;
|
|
22
|
+
}
|
|
23
|
+
const pkg = JSON.parse(await fs.readFile(pkgPath, 'utf8'));
|
|
24
|
+
|
|
25
|
+
const excludeList = ["plugin", "adapter", "check", "eslint", "prettier", "vite", "rollup", "webpack", "babel"];
|
|
26
|
+
|
|
27
|
+
const filterAndFormat = (depsObj) => {
|
|
28
|
+
if (!depsObj) return [];
|
|
29
|
+
return Object.entries(depsObj)
|
|
30
|
+
.filter(([n]) => !excludeList.some(ex => n.toLowerCase().includes(ex)))
|
|
31
|
+
.map(([n, v]) => {
|
|
32
|
+
let cleanName = n;
|
|
33
|
+
if (n === '@sveltejs/kit') cleanName = 'sveltekit';
|
|
34
|
+
else if (n.startsWith('@')) cleanName = n.split('/')[1];
|
|
35
|
+
let cleanVersion = String(v).replace(/[\^~>=<]/g, '').split('.').slice(0, 2).join('.');
|
|
36
|
+
return `${cleanName} ${cleanVersion}`;
|
|
37
|
+
});
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
let selected = filterAndFormat(pkg.dependencies);
|
|
41
|
+
selected = selected.concat(filterAndFormat(pkg.devDependencies));
|
|
42
|
+
|
|
43
|
+
return selected.length > 0 ? selected : null;
|
|
44
|
+
} catch (_) {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* @description Raggruppa l'array delle dipendenze in chunk di dimensione fissa.
|
|
51
|
+
* @param {Array} deps - Array completo formattato
|
|
52
|
+
* @param {number} batchSize - Dimensione forzata dei chuncks
|
|
53
|
+
* @returns {Array<Array>} Array bidimensionale aggregato a batches
|
|
54
|
+
*/
|
|
55
|
+
export function groupIntoBatches(deps, batchSize = 3) {
|
|
56
|
+
if (!deps || !deps.length) return [];
|
|
57
|
+
const batches = [];
|
|
58
|
+
for (let i = 0; i < deps.length; i += batchSize) {
|
|
59
|
+
batches.push(deps.slice(i, i + batchSize));
|
|
60
|
+
}
|
|
61
|
+
return batches;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* @description Genera crypto hash string id da chunk object signature elements array
|
|
66
|
+
* @param {Array} batch - chunk elements content package string names
|
|
67
|
+
* @returns {string} short 8 character identifier digest
|
|
68
|
+
*/
|
|
69
|
+
export function batchHash(batch) {
|
|
70
|
+
return createHash('md5')
|
|
71
|
+
.update(batch.join(','))
|
|
72
|
+
.digest('hex')
|
|
73
|
+
.slice(0, 8);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* @description Costruisce logica query concats DDG su input array e hardcoded filter.
|
|
78
|
+
* @param {Array} deps - Array validato input dipendenze system
|
|
79
|
+
* @returns {string} Target string duck duck query
|
|
80
|
+
*/
|
|
81
|
+
export function buildQuery(deps) {
|
|
82
|
+
const year = new Date().getFullYear();
|
|
83
|
+
if (deps && deps.length > 0) {
|
|
84
|
+
return `${deps.join(' ')} latest ${year}`;
|
|
85
|
+
}
|
|
86
|
+
return `javascript web development best practices ${year}`;
|
|
87
|
+
}
|
package/src/proxy.js
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module proxy
|
|
3
|
+
* @description Proxy node routing Anthropic/Gemini http server streaming protocol.
|
|
4
|
+
*/
|
|
5
|
+
import http from 'http';
|
|
6
|
+
import https from 'https';
|
|
7
|
+
import { webSearch } from './search.js';
|
|
8
|
+
import { readPackageDeps, buildQuery } from './packages.js';
|
|
9
|
+
import { chalk, log, LOG_WARN, LOG_BOLT } from './logger.js';
|
|
10
|
+
import { httpsAgent } from './http-agent.js';
|
|
11
|
+
|
|
12
|
+
// ─── HTTP Node server daemon ─────────────────────────
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @description Main listener Anthropic port interceptor content system stream
|
|
16
|
+
* @param {boolean} usePackageJson - Overrides per fallback module args
|
|
17
|
+
* @returns {Promise<http.Server>} Istanza server network configurata listen loop
|
|
18
|
+
*/
|
|
19
|
+
export async function createServer(usePackageJson) {
|
|
20
|
+
let packageQueryCache = null;
|
|
21
|
+
if (usePackageJson) {
|
|
22
|
+
const depEntries = await readPackageDeps();
|
|
23
|
+
if (depEntries) packageQueryCache = buildQuery(depEntries);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const server = http.createServer(async (req, res) => {
|
|
27
|
+
if (req.method !== 'POST') { res.writeHead(404); res.end(); return; }
|
|
28
|
+
|
|
29
|
+
let protocol = null;
|
|
30
|
+
if (req.url.startsWith('/v1/messages')) {
|
|
31
|
+
protocol = 'ANTHROPIC';
|
|
32
|
+
} else if (req.url.startsWith('/v1beta/models/') && (req.url.includes('generateContent') || req.url.includes('streamGenerateContent'))) {
|
|
33
|
+
protocol = 'GEMINI';
|
|
34
|
+
} else {
|
|
35
|
+
res.writeHead(404); res.end(); return;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
try {
|
|
39
|
+
const MAX_PAYLOAD_SIZE = 10 * 1024 * 1024; // 10MB limit
|
|
40
|
+
let bodyChunks = [];
|
|
41
|
+
let bodyLength = 0;
|
|
42
|
+
for await (const chunk of req) {
|
|
43
|
+
bodyLength += chunk.length;
|
|
44
|
+
if (bodyLength > MAX_PAYLOAD_SIZE) {
|
|
45
|
+
res.writeHead(413, { 'Content-Type': 'text/plain' });
|
|
46
|
+
res.end('Payload Too Large');
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
bodyChunks.push(chunk);
|
|
50
|
+
}
|
|
51
|
+
const rawBody = Buffer.concat(bodyChunks);
|
|
52
|
+
|
|
53
|
+
let parsedBody;
|
|
54
|
+
try {
|
|
55
|
+
parsedBody = JSON.parse(rawBody.toString('utf8'));
|
|
56
|
+
} catch (_) {
|
|
57
|
+
res.writeHead(400); res.end('Bad Request - Invalid JSON'); return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Estrai query last context check iter logic object
|
|
61
|
+
let lastUserMessage = '';
|
|
62
|
+
if (protocol === 'ANTHROPIC') {
|
|
63
|
+
const lastUserM = (parsedBody.messages || []).slice().reverse().find(m => m.role === 'user');
|
|
64
|
+
if (lastUserM) {
|
|
65
|
+
lastUserMessage = typeof lastUserM.content === 'string'
|
|
66
|
+
? lastUserM.content
|
|
67
|
+
: (Array.isArray(lastUserM.content) ? lastUserM.content.map(c => c.text || '').join(' ') : '');
|
|
68
|
+
}
|
|
69
|
+
} else if (protocol === 'GEMINI') {
|
|
70
|
+
const lastUserM = (parsedBody.contents || []).slice().reverse().find(m => m.role === 'user');
|
|
71
|
+
if (lastUserM && Array.isArray(lastUserM.parts)) {
|
|
72
|
+
lastUserMessage = lastUserM.parts.map(p => p.text || '').join(' ');
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
let query, shortMsg;
|
|
77
|
+
if (packageQueryCache) {
|
|
78
|
+
query = packageQueryCache;
|
|
79
|
+
shortMsg = 'package.json deps';
|
|
80
|
+
} else {
|
|
81
|
+
const text = lastUserMessage || '';
|
|
82
|
+
shortMsg = text.replace(/\s+/g, ' ').trim().slice(0, 50);
|
|
83
|
+
query = text.slice(0, 120).replace(/[^\w\s]/g, ' ').replace(/\s+/g, ' ').trim();
|
|
84
|
+
const year = new Date().getFullYear();
|
|
85
|
+
query = `${query} ${year}`.trim();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const t0 = Date.now();
|
|
89
|
+
let contextBlock = '';
|
|
90
|
+
let didInject = false;
|
|
91
|
+
let resultsCount = 0;
|
|
92
|
+
|
|
93
|
+
try {
|
|
94
|
+
if (!query || query.trim() === String(new Date().getFullYear())) throw new Error('Empty query');
|
|
95
|
+
// parallel load in proxy app process to boost response load
|
|
96
|
+
const { results, pageText } = await webSearch(query, true);
|
|
97
|
+
resultsCount = results.length;
|
|
98
|
+
|
|
99
|
+
contextBlock = `\n\n--- WEB CONTEXT (live, ${new Date().toISOString()}) ---\n`;
|
|
100
|
+
results.forEach((r, i) => {
|
|
101
|
+
contextBlock += `${i + 1}. ${r.title}: ${r.snippet} (${r.url})\n`;
|
|
102
|
+
});
|
|
103
|
+
if (pageText) contextBlock += `\nFULL TEXT:\n${pageText}\n`;
|
|
104
|
+
contextBlock += `--- END WEB CONTEXT ---\n`;
|
|
105
|
+
didInject = true;
|
|
106
|
+
} catch (_) {
|
|
107
|
+
log(LOG_WARN, chalk.yellow, chalk.white('web fetch failed') + ` → ${chalk.yellow('forwarding clean')}`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
const ms = Date.now() - t0;
|
|
111
|
+
|
|
112
|
+
if (didInject) {
|
|
113
|
+
log(LOG_BOLT, chalk.cyan, chalk.white(shortMsg.slice(0, 50) + (shortMsg.length > 50 ? '…' : '')) + ` → ${chalk.cyan.bold(String(resultsCount))} ${chalk.cyan('results')} ${chalk.gray(ms + 'ms')}`);
|
|
114
|
+
|
|
115
|
+
if (protocol === 'ANTHROPIC') {
|
|
116
|
+
if (parsedBody.system) {
|
|
117
|
+
if (typeof parsedBody.system === 'string') parsedBody.system += contextBlock;
|
|
118
|
+
else if (Array.isArray(parsedBody.system)) parsedBody.system.push({ type: 'text', text: contextBlock });
|
|
119
|
+
} else {
|
|
120
|
+
parsedBody.system = contextBlock;
|
|
121
|
+
}
|
|
122
|
+
} else if (protocol === 'GEMINI') {
|
|
123
|
+
if (!parsedBody.systemInstruction) {
|
|
124
|
+
parsedBody.systemInstruction = { role: 'system', parts: [] };
|
|
125
|
+
}
|
|
126
|
+
const sys = parsedBody.systemInstruction;
|
|
127
|
+
if (!Array.isArray(sys.parts)) {
|
|
128
|
+
if (typeof sys.parts === 'object' && sys.parts !== null) {
|
|
129
|
+
sys.parts = [sys.parts];
|
|
130
|
+
} else if (typeof sys.parts === 'string') {
|
|
131
|
+
sys.parts = [{ text: sys.parts }];
|
|
132
|
+
} else {
|
|
133
|
+
sys.parts = [];
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
sys.parts.push({ text: contextBlock });
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const reqBodyStr = JSON.stringify(parsedBody);
|
|
141
|
+
const targetUrlStr = protocol === 'ANTHROPIC'
|
|
142
|
+
? `https://api.anthropic.com${req.url}`
|
|
143
|
+
: `https://generativelanguage.googleapis.com${req.url}`;
|
|
144
|
+
|
|
145
|
+
const targetUrl = new URL(targetUrlStr);
|
|
146
|
+
const headers = { ...req.headers };
|
|
147
|
+
delete headers['host'];
|
|
148
|
+
headers['content-length'] = Buffer.byteLength(reqBodyStr);
|
|
149
|
+
|
|
150
|
+
const proxyReq = https.request(targetUrl, { method: req.method, headers, agent: httpsAgent }, (proxyRes) => {
|
|
151
|
+
res.writeHead(proxyRes.statusCode, proxyRes.headers);
|
|
152
|
+
proxyRes.pipe(res);
|
|
153
|
+
});
|
|
154
|
+
proxyReq.on('error', () => { if (!res.headersSent) { res.writeHead(502); res.end('Bad Gateway'); } });
|
|
155
|
+
proxyReq.write(reqBodyStr);
|
|
156
|
+
proxyReq.end();
|
|
157
|
+
|
|
158
|
+
} catch (err) {
|
|
159
|
+
if (!res.headersSent) { res.writeHead(500); res.end('Internal Server Proxy Error'); }
|
|
160
|
+
}
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
return server;
|
|
164
|
+
}
|