jiren 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +768 -0
- package/components/cache.ts +451 -0
- package/components/client-node-native.ts +1410 -0
- package/components/client.ts +1852 -0
- package/components/index.ts +37 -0
- package/components/metrics.ts +314 -0
- package/components/native-cache-node.ts +170 -0
- package/components/native-cache.ts +222 -0
- package/components/native-json.ts +195 -0
- package/components/native-node.ts +138 -0
- package/components/native.ts +418 -0
- package/components/persistent-worker.ts +67 -0
- package/components/subprocess-worker.ts +60 -0
- package/components/types.ts +317 -0
- package/components/worker-pool.ts +153 -0
- package/components/worker.ts +154 -0
- package/dist/components/cache.d.ts +32 -0
- package/dist/components/cache.d.ts.map +1 -0
- package/dist/components/cache.js +374 -0
- package/dist/components/cache.js.map +1 -0
- package/dist/components/client-node-native.d.ts +71 -0
- package/dist/components/client-node-native.d.ts.map +1 -0
- package/dist/components/client-node-native.js +1055 -0
- package/dist/components/client-node-native.js.map +1 -0
- package/dist/components/metrics.d.ts +14 -0
- package/dist/components/metrics.d.ts.map +1 -0
- package/dist/components/metrics.js +260 -0
- package/dist/components/metrics.js.map +1 -0
- package/dist/components/native-cache-node.d.ts +41 -0
- package/dist/components/native-cache-node.d.ts.map +1 -0
- package/dist/components/native-cache-node.js +133 -0
- package/dist/components/native-cache-node.js.map +1 -0
- package/dist/components/native-node.d.ts +82 -0
- package/dist/components/native-node.d.ts.map +1 -0
- package/dist/components/native-node.js +124 -0
- package/dist/components/native-node.js.map +1 -0
- package/dist/components/types.d.ts +248 -0
- package/dist/components/types.d.ts.map +1 -0
- package/dist/components/types.js +2 -0
- package/dist/components/types.js.map +1 -0
- package/dist/index-node.d.ts +3 -0
- package/dist/index-node.d.ts.map +1 -0
- package/dist/index-node.js +5 -0
- package/dist/index-node.js.map +1 -0
- package/index-node.ts +10 -0
- package/index.ts +9 -0
- package/lib/libcurl-impersonate.dylib +0 -0
- package/lib/libhttpclient.dylib +0 -0
- package/lib/libidn2.0.dylib +0 -0
- package/lib/libintl.8.dylib +0 -0
- package/lib/libunistring.5.dylib +0 -0
- package/lib/libzstd.1.5.7.dylib +0 -0
- package/package.json +62 -0
|
@@ -0,0 +1,451 @@
|
|
|
1
|
+
import {
|
|
2
|
+
existsSync,
|
|
3
|
+
mkdirSync,
|
|
4
|
+
readFileSync,
|
|
5
|
+
writeFileSync,
|
|
6
|
+
unlinkSync,
|
|
7
|
+
readdirSync,
|
|
8
|
+
statSync,
|
|
9
|
+
} from "fs";
|
|
10
|
+
import { gzipSync, gunzipSync } from "zlib";
|
|
11
|
+
import { createHash } from "crypto";
|
|
12
|
+
import { join } from "path";
|
|
13
|
+
import type {
|
|
14
|
+
JirenResponse,
|
|
15
|
+
JirenResponseBody,
|
|
16
|
+
SerializableCacheEntry,
|
|
17
|
+
CacheEntry,
|
|
18
|
+
} from "./types.js";
|
|
19
|
+
|
|
20
|
+
function reconstructBody(bodyData: string, isText: boolean): JirenResponseBody {
|
|
21
|
+
let buffer: ArrayBuffer;
|
|
22
|
+
|
|
23
|
+
if (isText) {
|
|
24
|
+
// Text data stored directly
|
|
25
|
+
const encoder = new TextEncoder();
|
|
26
|
+
buffer = encoder.encode(bodyData).buffer as ArrayBuffer;
|
|
27
|
+
} else {
|
|
28
|
+
// Binary data stored as base64
|
|
29
|
+
const binaryString = atob(bodyData);
|
|
30
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
31
|
+
for (let i = 0; i < binaryString.length; i++) {
|
|
32
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
33
|
+
}
|
|
34
|
+
buffer = bytes.buffer as ArrayBuffer;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return {
|
|
38
|
+
bodyUsed: false,
|
|
39
|
+
text: async () => {
|
|
40
|
+
if (isText) return bodyData;
|
|
41
|
+
const decoder = new TextDecoder();
|
|
42
|
+
return decoder.decode(buffer);
|
|
43
|
+
},
|
|
44
|
+
json: async () => {
|
|
45
|
+
const text = isText ? bodyData : new TextDecoder().decode(buffer);
|
|
46
|
+
return JSON.parse(text);
|
|
47
|
+
},
|
|
48
|
+
arrayBuffer: async () => buffer,
|
|
49
|
+
jsonFields: async <T extends Record<string, any> = any>(
|
|
50
|
+
_fields: (keyof T)[]
|
|
51
|
+
) => ({}),
|
|
52
|
+
blob: async () => new Blob([buffer]),
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
interface LRUNode {
|
|
57
|
+
key: string;
|
|
58
|
+
entry: CacheEntry;
|
|
59
|
+
prev: LRUNode | null;
|
|
60
|
+
next: LRUNode | null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
class L1MemoryCache {
|
|
64
|
+
private capacity: number;
|
|
65
|
+
private cache: Map<string, LRUNode> = new Map();
|
|
66
|
+
private head: LRUNode | null = null; // Most recently used
|
|
67
|
+
private tail: LRUNode | null = null; // Least recently used
|
|
68
|
+
|
|
69
|
+
// Stats
|
|
70
|
+
public hits = 0;
|
|
71
|
+
public misses = 0;
|
|
72
|
+
|
|
73
|
+
constructor(capacity = 100) {
|
|
74
|
+
this.capacity = capacity;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
get(key: string): CacheEntry | null {
|
|
78
|
+
const node = this.cache.get(key);
|
|
79
|
+
if (!node) {
|
|
80
|
+
this.misses++;
|
|
81
|
+
return null;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Check if expired
|
|
85
|
+
const now = Date.now();
|
|
86
|
+
if (now - node.entry.timestamp > node.entry.ttl) {
|
|
87
|
+
this.delete(key);
|
|
88
|
+
this.misses++;
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// Move to front (most recently used)
|
|
93
|
+
this.moveToFront(node);
|
|
94
|
+
this.hits++;
|
|
95
|
+
return node.entry;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
set(key: string, entry: CacheEntry): void {
|
|
99
|
+
// If key exists, update and move to front
|
|
100
|
+
const existing = this.cache.get(key);
|
|
101
|
+
if (existing) {
|
|
102
|
+
existing.entry = entry;
|
|
103
|
+
this.moveToFront(existing);
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Create new node
|
|
108
|
+
const node: LRUNode = {
|
|
109
|
+
key,
|
|
110
|
+
entry,
|
|
111
|
+
prev: null,
|
|
112
|
+
next: this.head,
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
// Add to front
|
|
116
|
+
if (this.head) {
|
|
117
|
+
this.head.prev = node;
|
|
118
|
+
}
|
|
119
|
+
this.head = node;
|
|
120
|
+
if (!this.tail) {
|
|
121
|
+
this.tail = node;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
this.cache.set(key, node);
|
|
125
|
+
|
|
126
|
+
// Evict LRU if over capacity
|
|
127
|
+
if (this.cache.size > this.capacity) {
|
|
128
|
+
this.evictLRU();
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
delete(key: string): void {
|
|
133
|
+
const node = this.cache.get(key);
|
|
134
|
+
if (!node) return;
|
|
135
|
+
|
|
136
|
+
this.removeNode(node);
|
|
137
|
+
this.cache.delete(key);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
clear(): void {
|
|
141
|
+
this.cache.clear();
|
|
142
|
+
this.head = null;
|
|
143
|
+
this.tail = null;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
stats() {
|
|
147
|
+
return {
|
|
148
|
+
size: this.cache.size,
|
|
149
|
+
capacity: this.capacity,
|
|
150
|
+
hits: this.hits,
|
|
151
|
+
misses: this.misses,
|
|
152
|
+
hitRate:
|
|
153
|
+
this.hits + this.misses > 0
|
|
154
|
+
? ((this.hits / (this.hits + this.misses)) * 100).toFixed(2) + "%"
|
|
155
|
+
: "0%",
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
private moveToFront(node: LRUNode): void {
|
|
160
|
+
if (node === this.head) return; // Already at front
|
|
161
|
+
|
|
162
|
+
this.removeNode(node);
|
|
163
|
+
|
|
164
|
+
// Add to front
|
|
165
|
+
node.prev = null;
|
|
166
|
+
node.next = this.head;
|
|
167
|
+
if (this.head) {
|
|
168
|
+
this.head.prev = node;
|
|
169
|
+
}
|
|
170
|
+
this.head = node;
|
|
171
|
+
if (!this.tail) {
|
|
172
|
+
this.tail = node;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
private removeNode(node: LRUNode): void {
|
|
177
|
+
if (node.prev) {
|
|
178
|
+
node.prev.next = node.next;
|
|
179
|
+
} else {
|
|
180
|
+
this.head = node.next;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (node.next) {
|
|
184
|
+
node.next.prev = node.prev;
|
|
185
|
+
} else {
|
|
186
|
+
this.tail = node.prev;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
private evictLRU(): void {
|
|
191
|
+
if (!this.tail) return;
|
|
192
|
+
|
|
193
|
+
const key = this.tail.key;
|
|
194
|
+
this.removeNode(this.tail);
|
|
195
|
+
this.cache.delete(key);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
export class ResponseCache {
|
|
200
|
+
private l1: L1MemoryCache;
|
|
201
|
+
private cacheDir: string;
|
|
202
|
+
private maxDiskSize: number;
|
|
203
|
+
|
|
204
|
+
constructor(l1Capacity = 100, cacheDir = ".cache/jiren", maxDiskSize = 100) {
|
|
205
|
+
this.l1 = new L1MemoryCache(l1Capacity);
|
|
206
|
+
this.maxDiskSize = maxDiskSize;
|
|
207
|
+
this.cacheDir = cacheDir;
|
|
208
|
+
|
|
209
|
+
// Create cache directory if it doesn't exist
|
|
210
|
+
if (!existsSync(this.cacheDir)) {
|
|
211
|
+
mkdirSync(this.cacheDir, { recursive: true });
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
private generateKey(url: string, path?: string, options?: any): string {
|
|
216
|
+
const fullUrl = path ? `${url}${path}` : url;
|
|
217
|
+
const method = options?.method || "GET";
|
|
218
|
+
const headers = JSON.stringify(options?.headers || {});
|
|
219
|
+
const key = `${method}:${fullUrl}:${headers}`;
|
|
220
|
+
|
|
221
|
+
// Hash the key to create a valid filename
|
|
222
|
+
return createHash("md5").update(key).digest("hex");
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
private getCacheFilePath(key: string): string {
|
|
226
|
+
return join(this.cacheDir, `${key}.json.gz`);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
preloadL1(url: string, path?: string, options?: any): boolean {
|
|
230
|
+
const key = this.generateKey(url, path, options);
|
|
231
|
+
|
|
232
|
+
// Check if already in L1
|
|
233
|
+
if (this.l1.get(key)) {
|
|
234
|
+
return true;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Try to load from L2 disk
|
|
238
|
+
const l2Entry = this.getFromDisk(key);
|
|
239
|
+
if (l2Entry) {
|
|
240
|
+
this.l1.set(key, l2Entry);
|
|
241
|
+
return true;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
return false;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
get(url: string, path?: string, options?: any): JirenResponse | null {
|
|
248
|
+
const key = this.generateKey(url, path, options);
|
|
249
|
+
|
|
250
|
+
// L1: Check in-memory cache first (~0.001ms)
|
|
251
|
+
const l1Entry = this.l1.get(key);
|
|
252
|
+
if (l1Entry) {
|
|
253
|
+
return l1Entry.response;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
// L2: Check disk cache (~5ms)
|
|
257
|
+
const l2Entry = this.getFromDisk(key);
|
|
258
|
+
if (l2Entry) {
|
|
259
|
+
// Promote to L1 for faster future access
|
|
260
|
+
this.l1.set(key, l2Entry);
|
|
261
|
+
return l2Entry.response;
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
return null;
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
private getFromDisk(key: string): CacheEntry | null {
|
|
268
|
+
const filePath = this.getCacheFilePath(key);
|
|
269
|
+
|
|
270
|
+
if (!existsSync(filePath)) return null;
|
|
271
|
+
|
|
272
|
+
try {
|
|
273
|
+
// Read compressed file
|
|
274
|
+
const compressed = readFileSync(filePath);
|
|
275
|
+
|
|
276
|
+
// Decompress
|
|
277
|
+
const decompressed = gunzipSync(compressed);
|
|
278
|
+
const data = decompressed.toString("utf-8");
|
|
279
|
+
const serialized: SerializableCacheEntry = JSON.parse(data);
|
|
280
|
+
|
|
281
|
+
// Check if expired
|
|
282
|
+
const now = Date.now();
|
|
283
|
+
if (now - serialized.timestamp > serialized.ttl) {
|
|
284
|
+
// Delete expired cache file
|
|
285
|
+
try {
|
|
286
|
+
unlinkSync(filePath);
|
|
287
|
+
} catch {}
|
|
288
|
+
return null;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// Reconstruct the response with working body methods
|
|
292
|
+
const response: JirenResponse = {
|
|
293
|
+
status: serialized.status,
|
|
294
|
+
statusText: serialized.statusText,
|
|
295
|
+
headers: serialized.headers,
|
|
296
|
+
url: serialized.url,
|
|
297
|
+
ok: serialized.ok,
|
|
298
|
+
redirected: serialized.redirected,
|
|
299
|
+
type: serialized.type || "default",
|
|
300
|
+
body: reconstructBody(serialized.bodyData, serialized.bodyIsText),
|
|
301
|
+
};
|
|
302
|
+
|
|
303
|
+
return {
|
|
304
|
+
response,
|
|
305
|
+
timestamp: serialized.timestamp,
|
|
306
|
+
ttl: serialized.ttl,
|
|
307
|
+
};
|
|
308
|
+
} catch (error) {
|
|
309
|
+
// Invalid cache file, delete it
|
|
310
|
+
try {
|
|
311
|
+
unlinkSync(filePath);
|
|
312
|
+
} catch {}
|
|
313
|
+
return null;
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
set(
|
|
318
|
+
url: string,
|
|
319
|
+
response: JirenResponse,
|
|
320
|
+
ttl: number,
|
|
321
|
+
path?: string,
|
|
322
|
+
options?: any
|
|
323
|
+
): void {
|
|
324
|
+
const key = this.generateKey(url, path, options);
|
|
325
|
+
|
|
326
|
+
const entry: CacheEntry = {
|
|
327
|
+
response,
|
|
328
|
+
timestamp: Date.now(),
|
|
329
|
+
ttl,
|
|
330
|
+
};
|
|
331
|
+
|
|
332
|
+
// L1: Store in memory (~0.001ms)
|
|
333
|
+
this.l1.set(key, entry);
|
|
334
|
+
|
|
335
|
+
// L2: Store on disk (async-ish, for persistence)
|
|
336
|
+
this.setToDisk(key, entry);
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
private async setToDisk(key: string, entry: CacheEntry): Promise<void> {
|
|
340
|
+
const filePath = this.getCacheFilePath(key);
|
|
341
|
+
|
|
342
|
+
try {
|
|
343
|
+
// Extract body text for serialization
|
|
344
|
+
let bodyData: string;
|
|
345
|
+
let bodyIsText = true;
|
|
346
|
+
|
|
347
|
+
try {
|
|
348
|
+
bodyData = await entry.response.body.text();
|
|
349
|
+
} catch {
|
|
350
|
+
// If text fails, try arrayBuffer and convert to base64
|
|
351
|
+
try {
|
|
352
|
+
const buffer = await entry.response.body.arrayBuffer();
|
|
353
|
+
const bytes = new Uint8Array(buffer);
|
|
354
|
+
let binary = "";
|
|
355
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
356
|
+
binary += String.fromCharCode(bytes[i]!);
|
|
357
|
+
}
|
|
358
|
+
bodyData = btoa(binary);
|
|
359
|
+
bodyIsText = false;
|
|
360
|
+
} catch {
|
|
361
|
+
bodyData = "";
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Create serializable entry
|
|
366
|
+
const serialized: SerializableCacheEntry = {
|
|
367
|
+
status: entry.response.status,
|
|
368
|
+
statusText: entry.response.statusText,
|
|
369
|
+
headers: entry.response.headers as Record<string, string>,
|
|
370
|
+
url: entry.response.url,
|
|
371
|
+
ok: entry.response.ok,
|
|
372
|
+
redirected: entry.response.redirected,
|
|
373
|
+
type: entry.response.type || "default",
|
|
374
|
+
bodyData,
|
|
375
|
+
bodyIsText,
|
|
376
|
+
timestamp: entry.timestamp,
|
|
377
|
+
ttl: entry.ttl,
|
|
378
|
+
};
|
|
379
|
+
|
|
380
|
+
// Convert to JSON
|
|
381
|
+
const json = JSON.stringify(serialized);
|
|
382
|
+
|
|
383
|
+
// Compress with gzip
|
|
384
|
+
const compressed = gzipSync(json);
|
|
385
|
+
|
|
386
|
+
// Write compressed file
|
|
387
|
+
writeFileSync(filePath, compressed);
|
|
388
|
+
} catch (error) {
|
|
389
|
+
// Silently fail if can't write cache
|
|
390
|
+
console.warn("Failed to write cache:", error);
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
clear(url?: string): void {
|
|
395
|
+
// Clear L1
|
|
396
|
+
this.l1.clear();
|
|
397
|
+
|
|
398
|
+
// Clear L2
|
|
399
|
+
if (url) {
|
|
400
|
+
// Clear all cache files for this URL
|
|
401
|
+
// This is approximate since we hash the keys
|
|
402
|
+
// For now, just clear all to be safe
|
|
403
|
+
this.clearAllDisk();
|
|
404
|
+
} else {
|
|
405
|
+
this.clearAllDisk();
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
private clearAllDisk(): void {
|
|
410
|
+
try {
|
|
411
|
+
const files = readdirSync(this.cacheDir);
|
|
412
|
+
for (const file of files) {
|
|
413
|
+
if (file.endsWith(".json.gz")) {
|
|
414
|
+
unlinkSync(join(this.cacheDir, file));
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
} catch (error) {
|
|
418
|
+
// Silently fail
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
stats() {
|
|
423
|
+
const l1Stats = this.l1.stats();
|
|
424
|
+
|
|
425
|
+
// L2 disk stats
|
|
426
|
+
let diskSize = 0;
|
|
427
|
+
let diskFiles = 0;
|
|
428
|
+
let totalDiskBytes = 0;
|
|
429
|
+
|
|
430
|
+
try {
|
|
431
|
+
const files = readdirSync(this.cacheDir);
|
|
432
|
+
const cacheFiles = files.filter((f: string) => f.endsWith(".json.gz"));
|
|
433
|
+
diskFiles = cacheFiles.length;
|
|
434
|
+
|
|
435
|
+
for (const file of cacheFiles) {
|
|
436
|
+
const stats = statSync(join(this.cacheDir, file));
|
|
437
|
+
totalDiskBytes += stats.size;
|
|
438
|
+
}
|
|
439
|
+
} catch {}
|
|
440
|
+
|
|
441
|
+
return {
|
|
442
|
+
l1: l1Stats,
|
|
443
|
+
l2: {
|
|
444
|
+
size: diskFiles,
|
|
445
|
+
maxSize: this.maxDiskSize,
|
|
446
|
+
cacheDir: this.cacheDir,
|
|
447
|
+
totalSizeKB: (totalDiskBytes / 1024).toFixed(2),
|
|
448
|
+
},
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
}
|