ampcode-connector 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -3
- package/src/auth/auto-refresh.ts +44 -0
- package/src/auth/callback-server.ts +1 -1
- package/src/auth/discovery.ts +1 -1
- package/src/auth/oauth.ts +11 -7
- package/src/auth/store.ts +73 -40
- package/src/cli/setup.ts +4 -4
- package/src/cli/tui.ts +1 -1
- package/src/config/config.ts +11 -11
- package/src/index.ts +27 -1
- package/src/providers/antigravity.ts +1 -4
- package/src/providers/base.ts +1 -4
- package/src/providers/codex.ts +1 -1
- package/src/proxy/rewriter.ts +12 -11
- package/src/proxy/upstream.ts +1 -4
- package/src/routing/affinity.ts +54 -19
- package/src/routing/cooldown.ts +6 -6
- package/src/routing/retry.ts +83 -0
- package/src/routing/router.ts +49 -33
- package/src/server/body.ts +12 -11
- package/src/server/server.ts +79 -92
- package/src/tools/internal.ts +69 -46
- package/src/tools/web-read.ts +336 -0
- package/src/tools/web-search.ts +33 -26
- package/src/utils/code-assist.ts +1 -1
- package/src/utils/logger.ts +31 -2
- package/src/utils/path.ts +9 -4
- package/src/utils/stats.ts +69 -0
- package/src/utils/streaming.ts +10 -11
- package/tsconfig.json +3 -3
- package/src/tools/web-extract.ts +0 -137
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
/** Local handler for extractWebPageContent — fetches a URL, converts to Markdown, ranks by objective. */
|
|
2
|
+
|
|
3
|
+
import { convert, JsPreprocessingPreset } from "@kreuzberg/html-to-markdown";
|
|
4
|
+
import { logger } from "../utils/logger.ts";
|
|
5
|
+
|
|
6
|
+
export interface WebReadParams {
|
|
7
|
+
url: string;
|
|
8
|
+
objective?: string;
|
|
9
|
+
forceRefetch?: boolean;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export type WebReadResult =
|
|
13
|
+
| { ok: true; result: { excerpts: string[] } | { fullContent: string } }
|
|
14
|
+
| { ok: false; error: { code: string; message: string } };
|
|
15
|
+
|
|
16
|
+
interface Section {
|
|
17
|
+
heading: string;
|
|
18
|
+
text: string;
|
|
19
|
+
index: number;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
interface ScoredSection {
|
|
23
|
+
text: string;
|
|
24
|
+
score: number;
|
|
25
|
+
index: number;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
type FetchOk = { ok: true; body: string; contentType: string };
|
|
29
|
+
type FetchErr = WebReadResult & { ok: false };
|
|
30
|
+
|
|
31
|
+
const FETCH = {
|
|
32
|
+
TIMEOUT_MS: 30_000,
|
|
33
|
+
USER_AGENT: "Mozilla/5.0 (compatible; AmpBot/1.0)",
|
|
34
|
+
} as const;
|
|
35
|
+
|
|
36
|
+
const CACHE = {
|
|
37
|
+
MAX_ENTRIES: 50,
|
|
38
|
+
TTL_MS: 5 * 60 * 1000,
|
|
39
|
+
} as const;
|
|
40
|
+
|
|
41
|
+
const RANKING = {
|
|
42
|
+
MAX_SECTIONS: 10,
|
|
43
|
+
MAX_SECTION_WORDS: 500,
|
|
44
|
+
MIN_KEYWORD_LEN: 3,
|
|
45
|
+
HEADING_BOOST: 2,
|
|
46
|
+
BIGRAM_BOOST: 1.5,
|
|
47
|
+
POSITION_DECAY: 0.1,
|
|
48
|
+
BM25_K1: 1.5,
|
|
49
|
+
BM25_B: 0.75,
|
|
50
|
+
} as const;
|
|
51
|
+
|
|
52
|
+
const CLIPPING = {
|
|
53
|
+
MAX_BYTES: 262_144, // 256 KB — CLI truncation limit
|
|
54
|
+
MIN_TAIL_BYTES: 100,
|
|
55
|
+
EXCERPT_SEP_BYTES: 2, // "\n\n" separator
|
|
56
|
+
} as const;
|
|
57
|
+
|
|
58
|
+
const HTML_OPTIONS = {
|
|
59
|
+
skipImages: true,
|
|
60
|
+
preprocessing: { enabled: true, preset: JsPreprocessingPreset.Aggressive },
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
// biome-ignore format: compact
|
|
64
|
+
const STOP_WORDS = new Set(
|
|
65
|
+
("the and for are but not you all can her was one our out " +
|
|
66
|
+
"has have had been from this that with they which their will " +
|
|
67
|
+
"each make like just over such than them very some what about " +
|
|
68
|
+
"into more other then these when where how does also after " +
|
|
69
|
+
"should would could being there before between those through while using").split(" "),
|
|
70
|
+
);
|
|
71
|
+
|
|
72
|
+
const encoder = new TextEncoder();
|
|
73
|
+
const decoder = new TextDecoder("utf-8", { fatal: false });
|
|
74
|
+
|
|
75
|
+
const cache = new Map<string, { markdown: string; createdAt: number }>();
|
|
76
|
+
|
|
77
|
+
function getCached(url: string): string | undefined {
|
|
78
|
+
const entry = cache.get(url);
|
|
79
|
+
if (!entry) return undefined;
|
|
80
|
+
|
|
81
|
+
if (Date.now() - entry.createdAt > CACHE.TTL_MS) {
|
|
82
|
+
cache.delete(url);
|
|
83
|
+
return undefined;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Re-insert to promote as most-recent (LRU)
|
|
87
|
+
cache.delete(url);
|
|
88
|
+
cache.set(url, entry);
|
|
89
|
+
return entry.markdown;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
function setCache(url: string, markdown: string): void {
|
|
93
|
+
if (cache.size >= CACHE.MAX_ENTRIES) {
|
|
94
|
+
const oldest = cache.keys().next().value!;
|
|
95
|
+
cache.delete(oldest);
|
|
96
|
+
}
|
|
97
|
+
cache.set(url, { markdown, createdAt: Date.now() });
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export async function handleWebRead({ url, objective, forceRefetch }: WebReadParams): Promise<WebReadResult> {
|
|
101
|
+
let markdown = forceRefetch ? undefined : getCached(url);
|
|
102
|
+
|
|
103
|
+
if (!markdown) {
|
|
104
|
+
const page = await fetchPage(url);
|
|
105
|
+
if (!page.ok) return page;
|
|
106
|
+
markdown = convertToMarkdown(page.body, page.contentType);
|
|
107
|
+
setCache(url, markdown);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
if (objective) {
|
|
111
|
+
return { ok: true, result: { excerpts: rankExcerpts(markdown, objective) } };
|
|
112
|
+
}
|
|
113
|
+
return { ok: true, result: { fullContent: clipText(markdown) } };
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async function fetchPage(url: string): Promise<FetchOk | FetchErr> {
|
|
117
|
+
let response: Response;
|
|
118
|
+
try {
|
|
119
|
+
response = await fetch(url, {
|
|
120
|
+
signal: AbortSignal.timeout(FETCH.TIMEOUT_MS),
|
|
121
|
+
redirect: "follow",
|
|
122
|
+
headers: { "User-Agent": FETCH.USER_AGENT },
|
|
123
|
+
});
|
|
124
|
+
} catch (error) {
|
|
125
|
+
logger.warn("web-read fetch failed", { url, error: String(error) });
|
|
126
|
+
return fetchError(`Failed to fetch ${url}: ${String(error)}`);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (!response.ok) {
|
|
130
|
+
return fetchError(`HTTP ${response.status} from ${url}`);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const body = await response.text();
|
|
134
|
+
const contentType = response.headers.get("content-type") ?? "";
|
|
135
|
+
return { ok: true, body, contentType };
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
function fetchError(message: string): FetchErr {
|
|
139
|
+
return { ok: false, error: { code: "fetch-error", message } };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
function convertToMarkdown(raw: string, contentType: string): string {
|
|
143
|
+
if (contentType.includes("text/html") || contentType.includes("application/xhtml")) {
|
|
144
|
+
return convert(raw, HTML_OPTIONS);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
if (contentType.includes("application/json")) {
|
|
148
|
+
try {
|
|
149
|
+
return `\`\`\`json\n${JSON.stringify(JSON.parse(raw), null, 2)}\n\`\`\``;
|
|
150
|
+
} catch {
|
|
151
|
+
return raw;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
return raw;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function rankExcerpts(markdown: string, objective: string): string[] {
|
|
159
|
+
const sections = splitSections(markdown);
|
|
160
|
+
if (!sections.length) return [clipText(markdown)];
|
|
161
|
+
const { unigrams, bigrams } = parseTerms(objective);
|
|
162
|
+
if (!unigrams.length) return [clipText(markdown)];
|
|
163
|
+
const unigramPatterns = unigrams.map((w) => new RegExp(`\\b${RegExp.escape(w)}\\b`, "g"));
|
|
164
|
+
const idfWeights = computeIdf(sections, unigramPatterns);
|
|
165
|
+
const avgDocLen = sections.reduce((sum, s) => sum + (s.text.split(/\s+/).length || 1), 0) / sections.length;
|
|
166
|
+
const totalSections = sections.length;
|
|
167
|
+
const scored = sections.map((section) =>
|
|
168
|
+
scoreSection(section, unigramPatterns, bigrams, idfWeights, avgDocLen, totalSections),
|
|
169
|
+
);
|
|
170
|
+
const hits = scored.filter((s) => s.score > 0);
|
|
171
|
+
if (!hits.length) return [clipText(markdown)];
|
|
172
|
+
hits.sort((a, b) => b.score - a.score || a.index - b.index);
|
|
173
|
+
const top = hits.slice(0, RANKING.MAX_SECTIONS);
|
|
174
|
+
top.sort((a, b) => a.index - b.index);
|
|
175
|
+
return clipMany(top.map((s) => s.text));
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
function parseTerms(objective: string): { unigrams: string[]; bigrams: RegExp[] } {
|
|
179
|
+
const words = objective
|
|
180
|
+
.toLowerCase()
|
|
181
|
+
.split(/\W+/)
|
|
182
|
+
.filter((word) => word.length >= RANKING.MIN_KEYWORD_LEN && !STOP_WORDS.has(word));
|
|
183
|
+
|
|
184
|
+
const bigrams = words
|
|
185
|
+
.slice(0, -1)
|
|
186
|
+
.map((word, i) => new RegExp(`\\b${RegExp.escape(word)}\\W+${RegExp.escape(words[i + 1]!)}\\b`));
|
|
187
|
+
|
|
188
|
+
return { unigrams: words, bigrams };
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
function computeIdf(sections: Section[], patterns: RegExp[]): number[] {
|
|
192
|
+
const lowerTexts = sections.map((section) => section.text.toLowerCase());
|
|
193
|
+
const totalSections = sections.length;
|
|
194
|
+
return patterns.map((pattern) => {
|
|
195
|
+
const docFreq = lowerTexts.filter((text) => {
|
|
196
|
+
pattern.lastIndex = 0;
|
|
197
|
+
return pattern.test(text);
|
|
198
|
+
}).length;
|
|
199
|
+
return docFreq > 0 ? Math.log((totalSections - docFreq + 0.5) / (docFreq + 0.5) + 1) : 0;
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function scoreSection(
|
|
204
|
+
section: Section,
|
|
205
|
+
unigramPatterns: RegExp[],
|
|
206
|
+
bigrams: RegExp[],
|
|
207
|
+
idfWeights: number[],
|
|
208
|
+
avgDocLen: number,
|
|
209
|
+
totalSections: number,
|
|
210
|
+
): ScoredSection {
|
|
211
|
+
const lowerText = section.text.toLowerCase();
|
|
212
|
+
const lowerHeading = section.heading.toLowerCase();
|
|
213
|
+
const docLen = lowerText.split(/\s+/).length || 1;
|
|
214
|
+
|
|
215
|
+
// BM25 scoring
|
|
216
|
+
const { BM25_K1: k1, BM25_B: b } = RANKING;
|
|
217
|
+
let score = 0;
|
|
218
|
+
for (let i = 0; i < unigramPatterns.length; i++) {
|
|
219
|
+
const pattern = unigramPatterns[i]!;
|
|
220
|
+
pattern.lastIndex = 0;
|
|
221
|
+
const matches = lowerText.match(pattern);
|
|
222
|
+
if (matches) {
|
|
223
|
+
const tf = matches.length;
|
|
224
|
+
score += idfWeights[i]! * ((tf * (k1 + 1)) / (tf + k1 * (1 - b + b * (docLen / avgDocLen))));
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
// Bigram bonus
|
|
228
|
+
for (const pattern of bigrams) {
|
|
229
|
+
if (pattern.test(lowerText)) score *= RANKING.BIGRAM_BOOST;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
// Heading match boost (reuse pre-compiled patterns)
|
|
233
|
+
if (section.heading) {
|
|
234
|
+
if (
|
|
235
|
+
unigramPatterns.some((pattern) => {
|
|
236
|
+
pattern.lastIndex = 0;
|
|
237
|
+
return pattern.test(lowerHeading);
|
|
238
|
+
})
|
|
239
|
+
) {
|
|
240
|
+
score *= RANKING.HEADING_BOOST;
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// Position decay — earlier sections get mild boost
|
|
245
|
+
score *= 1 + RANKING.POSITION_DECAY * (1 - section.index / totalSections);
|
|
246
|
+
return { text: section.text, score, index: section.index };
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
function splitSections(markdown: string): Section[] {
|
|
250
|
+
const raw = parseHeadingSections(markdown);
|
|
251
|
+
return chunkOversizedSections(raw);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
function parseHeadingSections(markdown: string): Section[] {
|
|
255
|
+
const sections: Section[] = [];
|
|
256
|
+
let heading = "";
|
|
257
|
+
let body: string[] = [];
|
|
258
|
+
|
|
259
|
+
const flush = () => {
|
|
260
|
+
const joined = body.join("\n").trim();
|
|
261
|
+
if (heading || joined) {
|
|
262
|
+
const text = heading ? `${heading}\n${joined}` : joined;
|
|
263
|
+
sections.push({ heading, text, index: sections.length });
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
|
|
267
|
+
for (const line of markdown.split("\n")) {
|
|
268
|
+
if (/^#{1,6}\s/.test(line)) {
|
|
269
|
+
flush();
|
|
270
|
+
heading = line;
|
|
271
|
+
body = [];
|
|
272
|
+
} else {
|
|
273
|
+
body.push(line);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
flush();
|
|
277
|
+
|
|
278
|
+
return sections;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
function chunkOversizedSections(sections: Section[]): Section[] {
|
|
282
|
+
const result: Section[] = [];
|
|
283
|
+
|
|
284
|
+
for (const section of sections) {
|
|
285
|
+
const wordCount = section.text.split(/\s+/).length;
|
|
286
|
+
if (wordCount <= RANKING.MAX_SECTION_WORDS) {
|
|
287
|
+
result.push({ ...section, index: result.length });
|
|
288
|
+
continue;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const paragraphs = section.text.split(/\n{2,}/);
|
|
292
|
+
let chunk: string[] = [];
|
|
293
|
+
let chunkWords = 0;
|
|
294
|
+
|
|
295
|
+
for (const paragraph of paragraphs) {
|
|
296
|
+
const paraWords = paragraph.split(/\s+/).length;
|
|
297
|
+
if (chunkWords + paraWords > RANKING.MAX_SECTION_WORDS && chunk.length > 0) {
|
|
298
|
+
result.push({ heading: section.heading, text: chunk.join("\n\n"), index: result.length });
|
|
299
|
+
chunk = [];
|
|
300
|
+
chunkWords = 0;
|
|
301
|
+
}
|
|
302
|
+
chunk.push(paragraph);
|
|
303
|
+
chunkWords += paraWords;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
if (chunk.length > 0) {
|
|
307
|
+
result.push({ heading: section.heading, text: chunk.join("\n\n"), index: result.length });
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
return result;
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
function clipText(text: string): string {
|
|
315
|
+
const bytes = encoder.encode(text);
|
|
316
|
+
if (bytes.length <= CLIPPING.MAX_BYTES) return text;
|
|
317
|
+
return decoder.decode(bytes.slice(0, CLIPPING.MAX_BYTES)).replace(/\uFFFD+$/, "");
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
function clipMany(excerpts: string[]): string[] {
|
|
321
|
+
let usedBytes = 0;
|
|
322
|
+
const result: string[] = [];
|
|
323
|
+
|
|
324
|
+
for (const excerpt of excerpts) {
|
|
325
|
+
const excerptBytes = encoder.encode(excerpt).length + CLIPPING.EXCERPT_SEP_BYTES;
|
|
326
|
+
if (usedBytes + excerptBytes > CLIPPING.MAX_BYTES) {
|
|
327
|
+
const remaining = CLIPPING.MAX_BYTES - usedBytes;
|
|
328
|
+
if (remaining > CLIPPING.MIN_TAIL_BYTES) result.push(clipText(excerpt));
|
|
329
|
+
break;
|
|
330
|
+
}
|
|
331
|
+
result.push(excerpt);
|
|
332
|
+
usedBytes += excerptBytes;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
return result.length > 0 ? result : [clipText(excerpts[0]!)];
|
|
336
|
+
}
|
package/src/tools/web-search.ts
CHANGED
|
@@ -3,6 +3,17 @@
|
|
|
3
3
|
import Exa from "exa-js";
|
|
4
4
|
import { logger } from "../utils/logger.ts";
|
|
5
5
|
|
|
6
|
+
let _exa: InstanceType<typeof Exa> | null = null;
|
|
7
|
+
let _exaKey: string | null = null;
|
|
8
|
+
|
|
9
|
+
function getExa(apiKey: string): InstanceType<typeof Exa> {
|
|
10
|
+
if (!_exa || _exaKey !== apiKey) {
|
|
11
|
+
_exa = new Exa(apiKey);
|
|
12
|
+
_exaKey = apiKey;
|
|
13
|
+
}
|
|
14
|
+
return _exa;
|
|
15
|
+
}
|
|
16
|
+
|
|
6
17
|
export interface SearchParams {
|
|
7
18
|
objective: string;
|
|
8
19
|
searchQueries?: string[];
|
|
@@ -15,34 +26,30 @@ interface SearchResultItem {
|
|
|
15
26
|
excerpts: string[];
|
|
16
27
|
}
|
|
17
28
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
29
|
+
interface SearchResponse {
|
|
30
|
+
ok: true;
|
|
31
|
+
result: { results: SearchResultItem[]; showParallelAttribution: boolean };
|
|
32
|
+
}
|
|
21
33
|
|
|
22
|
-
export async function handleSearch(params: SearchParams, exaApiKey: string): Promise<
|
|
34
|
+
export async function handleSearch(params: SearchParams, exaApiKey: string): Promise<SearchResponse> {
|
|
23
35
|
const { objective, searchQueries, maxResults = 5 } = params;
|
|
24
36
|
const query = searchQueries?.length ? searchQueries.join(" ") : objective;
|
|
25
37
|
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
return { ok: true, result: { results, showParallelAttribution: false } };
|
|
44
|
-
} catch (err) {
|
|
45
|
-
logger.error("webSearch2 Exa error", { error: String(err) });
|
|
46
|
-
return { ok: false, error: { code: "search-error", message: String(err) } };
|
|
47
|
-
}
|
|
38
|
+
const exa = getExa(exaApiKey);
|
|
39
|
+
const response = await exa.search(query, {
|
|
40
|
+
numResults: maxResults,
|
|
41
|
+
type: "auto",
|
|
42
|
+
contents: {
|
|
43
|
+
highlights: { query: objective },
|
|
44
|
+
},
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
const results: SearchResultItem[] = response.results.map((r) => ({
|
|
48
|
+
title: r.title ?? "",
|
|
49
|
+
url: r.url,
|
|
50
|
+
excerpts: r.highlights?.length ? r.highlights : [],
|
|
51
|
+
}));
|
|
52
|
+
|
|
53
|
+
logger.info(`[SEARCH] Exa returned ${results.length} results for "${query.slice(0, 80)}"`);
|
|
54
|
+
return { ok: true, result: { results, showParallelAttribution: false } };
|
|
48
55
|
}
|
package/src/utils/code-assist.ts
CHANGED
|
@@ -55,6 +55,6 @@ export function maybeWrap(
|
|
|
55
55
|
opts: { userAgent: "antigravity" | "pi-coding-agent"; requestIdPrefix: "agent" | "pi"; requestType?: "agent" },
|
|
56
56
|
): string {
|
|
57
57
|
if (!parsed) return raw;
|
|
58
|
-
if (parsed
|
|
58
|
+
if (parsed.project) return raw;
|
|
59
59
|
return wrapRequest({ projectId, model, body: parsed, ...opts });
|
|
60
60
|
}
|
package/src/utils/logger.ts
CHANGED
|
@@ -23,6 +23,34 @@ const LOG_LEVELS: Record<LogLevel, number> = {
|
|
|
23
23
|
error: 3,
|
|
24
24
|
};
|
|
25
25
|
|
|
26
|
+
const isTTY = !!process.stdout.isTTY;
|
|
27
|
+
|
|
28
|
+
const RESET = "\x1b[0m";
|
|
29
|
+
const DIM = "\x1b[2m";
|
|
30
|
+
const GREEN = "\x1b[32m";
|
|
31
|
+
const YELLOW = "\x1b[33m";
|
|
32
|
+
const RED = "\x1b[31m";
|
|
33
|
+
|
|
34
|
+
const LEVEL_COLORS: Record<LogLevel, string> = {
|
|
35
|
+
debug: DIM,
|
|
36
|
+
info: "",
|
|
37
|
+
warn: YELLOW,
|
|
38
|
+
error: RED,
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const ROUTE_COLORS: Record<RouteDecision, string> = {
|
|
42
|
+
LOCAL_CLAUDE: GREEN,
|
|
43
|
+
LOCAL_CODEX: GREEN,
|
|
44
|
+
LOCAL_GEMINI: GREEN,
|
|
45
|
+
LOCAL_ANTIGRAVITY: GREEN,
|
|
46
|
+
AMP_UPSTREAM: YELLOW,
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
function colorize(text: string, color: string): string {
|
|
50
|
+
if (!isTTY || !color) return text;
|
|
51
|
+
return `${color}${text}${RESET}`;
|
|
52
|
+
}
|
|
53
|
+
|
|
26
54
|
let currentLevel: LogLevel = "info";
|
|
27
55
|
|
|
28
56
|
export function setLogLevel(level: LogLevel): void {
|
|
@@ -36,8 +64,9 @@ function shouldLog(level: LogLevel): boolean {
|
|
|
36
64
|
function format(entry: LogEntry): string {
|
|
37
65
|
const { timestamp, level, message, route, provider, model, duration, error } = entry;
|
|
38
66
|
|
|
39
|
-
|
|
40
|
-
|
|
67
|
+
const tag = colorize(`[${level.toUpperCase().padEnd(5)}]`, LEVEL_COLORS[level]);
|
|
68
|
+
let line = `${timestamp} ${tag} ${message}`;
|
|
69
|
+
if (route) line += ` route=${colorize(route, ROUTE_COLORS[route])}`;
|
|
41
70
|
if (provider) line += ` provider=${provider}`;
|
|
42
71
|
if (model) line += ` model=${model}`;
|
|
43
72
|
if (duration !== undefined) line += ` duration=${duration}ms`;
|
package/src/utils/path.ts
CHANGED
|
@@ -2,6 +2,11 @@
|
|
|
2
2
|
|
|
3
3
|
import { browserPrefixes, passthroughExact, passthroughPrefixes } from "../constants.ts";
|
|
4
4
|
|
|
5
|
+
const PROVIDER_RE = /^\/api\/provider\/([^/]+)/;
|
|
6
|
+
const SUBPATH_RE = /^\/api\/provider\/[^/]+(\/.*)/;
|
|
7
|
+
const MODEL_RE = /models\/([^/:]+)/;
|
|
8
|
+
const GEMINI_RE = /models\/([^/:]+):(\w+)/;
|
|
9
|
+
|
|
5
10
|
export function passthrough(pathname: string): boolean {
|
|
6
11
|
if ((passthroughExact as readonly string[]).includes(pathname)) return true;
|
|
7
12
|
return passthroughPrefixes.some((prefix) => pathname.startsWith(prefix));
|
|
@@ -13,22 +18,22 @@ export function browser(pathname: string): boolean {
|
|
|
13
18
|
}
|
|
14
19
|
|
|
15
20
|
export function provider(pathname: string): string | null {
|
|
16
|
-
const match = pathname.match(
|
|
21
|
+
const match = pathname.match(PROVIDER_RE);
|
|
17
22
|
return match?.[1] ?? null;
|
|
18
23
|
}
|
|
19
24
|
|
|
20
25
|
export function subpath(pathname: string): string {
|
|
21
|
-
const match = pathname.match(
|
|
26
|
+
const match = pathname.match(SUBPATH_RE);
|
|
22
27
|
return match?.[1] ?? pathname;
|
|
23
28
|
}
|
|
24
29
|
|
|
25
30
|
export function modelFromUrl(url: string): string | null {
|
|
26
|
-
const match = url.match(
|
|
31
|
+
const match = url.match(MODEL_RE);
|
|
27
32
|
return match?.[1] ?? null;
|
|
28
33
|
}
|
|
29
34
|
|
|
30
35
|
export function gemini(url: string): { model: string; action: string } | null {
|
|
31
|
-
const match = url.match(
|
|
36
|
+
const match = url.match(GEMINI_RE);
|
|
32
37
|
if (!match) return null;
|
|
33
38
|
return { model: match[1]!, action: match[2]! };
|
|
34
39
|
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
/** Request statistics tracking with ring buffer. */
|
|
2
|
+
|
|
3
|
+
import type { RouteDecision } from "../utils/logger.ts";
|
|
4
|
+
|
|
5
|
+
export interface RequestEntry {
|
|
6
|
+
timestamp: string;
|
|
7
|
+
route: RouteDecision;
|
|
8
|
+
provider: string;
|
|
9
|
+
model: string;
|
|
10
|
+
statusCode: number;
|
|
11
|
+
durationMs: number;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const MAX_ENTRIES = 1000;
|
|
15
|
+
const buffer: RequestEntry[] = [];
|
|
16
|
+
let writeIndex = 0;
|
|
17
|
+
let totalCount = 0;
|
|
18
|
+
const startedAt = Date.now();
|
|
19
|
+
|
|
20
|
+
export function record(entry: RequestEntry): void {
|
|
21
|
+
if (buffer.length < MAX_ENTRIES) {
|
|
22
|
+
buffer.push(entry);
|
|
23
|
+
} else {
|
|
24
|
+
buffer[writeIndex] = entry;
|
|
25
|
+
}
|
|
26
|
+
writeIndex = (writeIndex + 1) % MAX_ENTRIES;
|
|
27
|
+
totalCount++;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface StatsSnapshot {
|
|
31
|
+
totalRequests: number;
|
|
32
|
+
requestsByRoute: Partial<Record<RouteDecision, number>>;
|
|
33
|
+
count429: number;
|
|
34
|
+
averageDurationMs: number;
|
|
35
|
+
uptimeMs: number;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export function snapshot(): StatsSnapshot {
|
|
39
|
+
const requestsByRoute: Partial<Record<RouteDecision, number>> = {};
|
|
40
|
+
let count429 = 0;
|
|
41
|
+
let totalDuration = 0;
|
|
42
|
+
|
|
43
|
+
for (const entry of buffer) {
|
|
44
|
+
requestsByRoute[entry.route] = (requestsByRoute[entry.route] ?? 0) + 1;
|
|
45
|
+
if (entry.statusCode === 429) count429++;
|
|
46
|
+
totalDuration += entry.durationMs;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
return {
|
|
50
|
+
totalRequests: totalCount,
|
|
51
|
+
requestsByRoute,
|
|
52
|
+
count429,
|
|
53
|
+
averageDurationMs: buffer.length > 0 ? totalDuration / buffer.length : 0,
|
|
54
|
+
uptimeMs: Date.now() - startedAt,
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export function recentRequests(n: number): RequestEntry[] {
|
|
59
|
+
const count = Math.min(n, buffer.length);
|
|
60
|
+
if (count === 0) return [];
|
|
61
|
+
|
|
62
|
+
const result: RequestEntry[] = [];
|
|
63
|
+
let idx = (writeIndex - count + buffer.length) % buffer.length;
|
|
64
|
+
for (let i = 0; i < count; i++) {
|
|
65
|
+
result.push(buffer[idx]!);
|
|
66
|
+
idx = (idx + 1) % buffer.length;
|
|
67
|
+
}
|
|
68
|
+
return result;
|
|
69
|
+
}
|
package/src/utils/streaming.ts
CHANGED
|
@@ -24,7 +24,7 @@ export function parse(raw: string): Chunk[] {
|
|
|
24
24
|
chunk.id = line.slice(3).trim();
|
|
25
25
|
} else if (line.startsWith("retry:")) {
|
|
26
26
|
const val = parseInt(line.slice(6).trim(), 10);
|
|
27
|
-
if (!isNaN(val)) chunk.retry = val;
|
|
27
|
+
if (!Number.isNaN(val)) chunk.retry = val;
|
|
28
28
|
}
|
|
29
29
|
}
|
|
30
30
|
|
|
@@ -48,12 +48,13 @@ export function encode(chunk: Chunk): string {
|
|
|
48
48
|
return result;
|
|
49
49
|
}
|
|
50
50
|
|
|
51
|
+
const decoder = new TextDecoder();
|
|
52
|
+
const encoder = new TextEncoder();
|
|
53
|
+
|
|
51
54
|
export function transform(
|
|
52
55
|
source: ReadableStream<Uint8Array>,
|
|
53
56
|
fn: (data: string) => string,
|
|
54
57
|
): ReadableStream<Uint8Array> {
|
|
55
|
-
const decoder = new TextDecoder();
|
|
56
|
-
const encoder = new TextEncoder();
|
|
57
58
|
let buffer = "";
|
|
58
59
|
|
|
59
60
|
const stream = new TransformStream<Uint8Array, Uint8Array>({
|
|
@@ -86,13 +87,11 @@ export function transform(
|
|
|
86
87
|
return source.pipeThrough(stream);
|
|
87
88
|
}
|
|
88
89
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
};
|
|
95
|
-
}
|
|
90
|
+
const SSE_HEADERS: Readonly<Record<string, string>> = {
|
|
91
|
+
"Content-Type": "text/event-stream",
|
|
92
|
+
"Cache-Control": "no-cache",
|
|
93
|
+
Connection: "keep-alive",
|
|
94
|
+
};
|
|
96
95
|
|
|
97
96
|
const forwardedHeaders = [
|
|
98
97
|
"x-request-id",
|
|
@@ -114,7 +113,7 @@ export function proxy(upstream: Response, rewrite?: (data: string) => string): R
|
|
|
114
113
|
|
|
115
114
|
const body = rewrite ? transform(upstream.body, rewrite) : upstream.body;
|
|
116
115
|
|
|
117
|
-
const h: Record<string, string> = { ...
|
|
116
|
+
const h: Record<string, string> = { ...SSE_HEADERS };
|
|
118
117
|
for (const name of forwardedHeaders) {
|
|
119
118
|
const value = upstream.headers.get(name);
|
|
120
119
|
if (value) h[name] = value;
|
package/tsconfig.json
CHANGED
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
// Bundler mode
|
|
12
12
|
"moduleResolution": "bundler",
|
|
13
13
|
"allowImportingTsExtensions": true,
|
|
14
|
-
"verbatimModuleSyntax":
|
|
14
|
+
"verbatimModuleSyntax": false,
|
|
15
15
|
"noEmit": true,
|
|
16
16
|
|
|
17
17
|
// Best practices
|
|
@@ -22,8 +22,8 @@
|
|
|
22
22
|
"noImplicitOverride": true,
|
|
23
23
|
|
|
24
24
|
// Some stricter flags (disabled by default)
|
|
25
|
-
"noUnusedLocals":
|
|
26
|
-
"noUnusedParameters":
|
|
25
|
+
"noUnusedLocals": true,
|
|
26
|
+
"noUnusedParameters": true,
|
|
27
27
|
"noPropertyAccessFromIndexSignature": false
|
|
28
28
|
},
|
|
29
29
|
"exclude": ["references"]
|