jiren 1.4.0 → 1.4.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -46,6 +46,7 @@ console.log(users);
46
46
  - [Basic Usage](#basic-usage)
47
47
  - [Response Caching](#response-caching)
48
48
  - [Anti-Bot Protection](#anti-bot-protection)
49
+ - [Request Interceptors](#request-interceptors)
49
50
  - [Advanced Features](#advanced-features)
50
51
  - [Performance](#performance)
51
52
  - [API Reference](#api-reference)
@@ -242,6 +243,57 @@ const response = await client.url.protected.get({
242
243
 
243
244
  ---
244
245
 
246
+ ## Request Interceptors
247
+
248
+ Add middleware to intercept requests and responses for logging, auth injection, and more:
249
+
250
+ ### Basic Interceptors
251
+
252
+ ```typescript
253
+ const client = new JirenClient({
254
+ warmup: { api: "https://api.example.com" },
255
+ interceptors: {
256
+ // Modify requests before sending
257
+ request: [
258
+ (ctx) => ({
259
+ ...ctx,
260
+ headers: { ...ctx.headers, Authorization: `Bearer ${getToken()}` },
261
+ }),
262
+ ],
263
+ // Transform responses after receiving
264
+ response: [
265
+ (ctx) => {
266
+ console.log(`[${ctx.response.status}] ${ctx.request.url}`);
267
+ return ctx;
268
+ },
269
+ ],
270
+ // Handle errors
271
+ error: [(err, ctx) => console.error(`Failed: ${ctx.url}`, err)],
272
+ },
273
+ });
274
+ ```
275
+
276
+ ### Dynamic Interceptors with `use()`
277
+
278
+ ```typescript
279
+ // Add interceptors after client creation
280
+ client.use({
281
+ request: [
282
+ (ctx) => ({ ...ctx, headers: { ...ctx.headers, "X-Custom": "value" } }),
283
+ ],
284
+ });
285
+ ```
286
+
287
+ ### Interceptor Types
288
+
289
+ | Type | Purpose | Context |
290
+ | ---------- | ------------------------------------------------ | -------------------------------- |
291
+ | `request` | Modify method, URL, headers, body before sending | `{ method, url, headers, body }` |
292
+ | `response` | Transform response after receiving | `{ request, response }` |
293
+ | `error` | Handle errors centrally | `(error, requestContext)` |
294
+
295
+ ---
296
+
245
297
  ## Advanced Features
246
298
 
247
299
  ### TypeScript Generics
@@ -1,8 +1,36 @@
1
- import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
1
+ import {
2
+ existsSync,
3
+ mkdirSync,
4
+ readFileSync,
5
+ writeFileSync,
6
+ unlinkSync,
7
+ readdirSync,
8
+ statSync,
9
+ } from "fs";
2
10
  import { gzipSync, gunzipSync } from "zlib";
3
11
  import { createHash } from "crypto";
4
12
  import { join } from "path";
5
- import type { JirenResponse } from "./types";
13
+ import type { JirenResponse, JirenResponseBody } from "./types";
14
+
15
+ /**
16
+ * Serializable cache entry - stores raw body data for reconstruction
17
+ */
18
+ interface SerializableCacheEntry {
19
+ // Store serializable parts of response
20
+ status: number;
21
+ statusText: string;
22
+ headers: Record<string, string>;
23
+ url: string;
24
+ ok: boolean;
25
+ redirected: boolean;
26
+ type: "basic" | "cors" | "default" | "error" | "opaque" | "opaqueredirect";
27
+ // Store raw body as base64 string (for binary compatibility)
28
+ bodyData: string;
29
+ bodyIsText: boolean;
30
+ // Cache metadata
31
+ timestamp: number;
32
+ ttl: number;
33
+ }
6
34
 
7
35
  interface CacheEntry {
8
36
  response: JirenResponse;
@@ -10,12 +38,224 @@ interface CacheEntry {
10
38
  ttl: number;
11
39
  }
12
40
 
41
+ /**
42
+ * Reconstruct body methods from raw data
43
+ */
44
+ function reconstructBody(bodyData: string, isText: boolean): JirenResponseBody {
45
+ let buffer: ArrayBuffer;
46
+
47
+ if (isText) {
48
+ // Text data stored directly
49
+ const encoder = new TextEncoder();
50
+ buffer = encoder.encode(bodyData).buffer as ArrayBuffer;
51
+ } else {
52
+ // Binary data stored as base64
53
+ const binaryString = atob(bodyData);
54
+ const bytes = new Uint8Array(binaryString.length);
55
+ for (let i = 0; i < binaryString.length; i++) {
56
+ bytes[i] = binaryString.charCodeAt(i);
57
+ }
58
+ buffer = bytes.buffer as ArrayBuffer;
59
+ }
60
+
61
+ return {
62
+ bodyUsed: false,
63
+ text: async () => {
64
+ if (isText) return bodyData;
65
+ const decoder = new TextDecoder();
66
+ return decoder.decode(buffer);
67
+ },
68
+ json: async () => {
69
+ const text = isText ? bodyData : new TextDecoder().decode(buffer);
70
+ return JSON.parse(text);
71
+ },
72
+ arrayBuffer: async () => buffer,
73
+ blob: async () => new Blob([buffer]),
74
+ };
75
+ }
76
+
77
+ /**
78
+ * L1 In-Memory LRU Cache Node
79
+ */
80
+ interface LRUNode {
81
+ key: string;
82
+ entry: CacheEntry;
83
+ prev: LRUNode | null;
84
+ next: LRUNode | null;
85
+ }
86
+
87
+ /**
88
+ * L1 In-Memory LRU Cache
89
+ * Provides ~0.001ms access time (vs ~5ms for disk)
90
+ */
91
+ class L1MemoryCache {
92
+ private capacity: number;
93
+ private cache: Map<string, LRUNode> = new Map();
94
+ private head: LRUNode | null = null; // Most recently used
95
+ private tail: LRUNode | null = null; // Least recently used
96
+
97
+ // Stats
98
+ public hits = 0;
99
+ public misses = 0;
100
+
101
+ constructor(capacity = 100) {
102
+ this.capacity = capacity;
103
+ }
104
+
105
+ /**
106
+ * Get from L1 cache (O(1) lookup + LRU update)
107
+ */
108
+ get(key: string): CacheEntry | null {
109
+ const node = this.cache.get(key);
110
+ if (!node) {
111
+ this.misses++;
112
+ return null;
113
+ }
114
+
115
+ // Check if expired
116
+ const now = Date.now();
117
+ if (now - node.entry.timestamp > node.entry.ttl) {
118
+ this.delete(key);
119
+ this.misses++;
120
+ return null;
121
+ }
122
+
123
+ // Move to front (most recently used)
124
+ this.moveToFront(node);
125
+ this.hits++;
126
+ return node.entry;
127
+ }
128
+
129
+ /**
130
+ * Set in L1 cache with LRU eviction
131
+ */
132
+ set(key: string, entry: CacheEntry): void {
133
+ // If key exists, update and move to front
134
+ const existing = this.cache.get(key);
135
+ if (existing) {
136
+ existing.entry = entry;
137
+ this.moveToFront(existing);
138
+ return;
139
+ }
140
+
141
+ // Create new node
142
+ const node: LRUNode = {
143
+ key,
144
+ entry,
145
+ prev: null,
146
+ next: this.head,
147
+ };
148
+
149
+ // Add to front
150
+ if (this.head) {
151
+ this.head.prev = node;
152
+ }
153
+ this.head = node;
154
+ if (!this.tail) {
155
+ this.tail = node;
156
+ }
157
+
158
+ this.cache.set(key, node);
159
+
160
+ // Evict LRU if over capacity
161
+ if (this.cache.size > this.capacity) {
162
+ this.evictLRU();
163
+ }
164
+ }
165
+
166
+ /**
167
+ * Delete from L1 cache
168
+ */
169
+ delete(key: string): void {
170
+ const node = this.cache.get(key);
171
+ if (!node) return;
172
+
173
+ this.removeNode(node);
174
+ this.cache.delete(key);
175
+ }
176
+
177
+ /**
178
+ * Clear all L1 cache
179
+ */
180
+ clear(): void {
181
+ this.cache.clear();
182
+ this.head = null;
183
+ this.tail = null;
184
+ }
185
+
186
+ /**
187
+ * Get L1 cache stats
188
+ */
189
+ stats() {
190
+ return {
191
+ size: this.cache.size,
192
+ capacity: this.capacity,
193
+ hits: this.hits,
194
+ misses: this.misses,
195
+ hitRate:
196
+ this.hits + this.misses > 0
197
+ ? ((this.hits / (this.hits + this.misses)) * 100).toFixed(2) + "%"
198
+ : "0%",
199
+ };
200
+ }
201
+
202
+ private moveToFront(node: LRUNode): void {
203
+ if (node === this.head) return; // Already at front
204
+
205
+ this.removeNode(node);
206
+
207
+ // Add to front
208
+ node.prev = null;
209
+ node.next = this.head;
210
+ if (this.head) {
211
+ this.head.prev = node;
212
+ }
213
+ this.head = node;
214
+ if (!this.tail) {
215
+ this.tail = node;
216
+ }
217
+ }
218
+
219
+ private removeNode(node: LRUNode): void {
220
+ if (node.prev) {
221
+ node.prev.next = node.next;
222
+ } else {
223
+ this.head = node.next;
224
+ }
225
+
226
+ if (node.next) {
227
+ node.next.prev = node.prev;
228
+ } else {
229
+ this.tail = node.prev;
230
+ }
231
+ }
232
+
233
+ private evictLRU(): void {
234
+ if (!this.tail) return;
235
+
236
+ const key = this.tail.key;
237
+ this.removeNode(this.tail);
238
+ this.cache.delete(key);
239
+ }
240
+ }
241
+
242
+ /**
243
+ * Two-Tier Response Cache
244
+ *
245
+ * L1: In-Memory LRU Cache (~0.001ms access) - hot data
246
+ * L2: Disk Cache with gzip (~5ms access) - persistence
247
+ *
248
+ * Read path: L1 → L2 → Network
249
+ * Write path: L1 + L2 (write-through)
250
+ */
13
251
  export class ResponseCache {
252
+ private l1: L1MemoryCache;
14
253
  private cacheDir: string;
15
- private maxSize: number;
254
+ private maxDiskSize: number;
16
255
 
17
- constructor(maxSize = 100, cacheDir = ".cache/jiren") {
18
- this.maxSize = maxSize;
256
+ constructor(l1Capacity = 100, cacheDir = ".cache/jiren", maxDiskSize = 100) {
257
+ this.l1 = new L1MemoryCache(l1Capacity);
258
+ this.maxDiskSize = maxDiskSize;
19
259
  this.cacheDir = cacheDir;
20
260
 
21
261
  // Create cache directory if it doesn't exist
@@ -45,10 +285,58 @@ export class ResponseCache {
45
285
  }
46
286
 
47
287
  /**
48
- * Get cached response if valid
288
+ * Preload L2 disk cache entry into L1 memory cache.
289
+ * Call this during initialization to ensure first request hits L1.
290
+ * @param url - Base URL to preload
291
+ * @param path - Optional path
292
+ * @param options - Optional request options
293
+ * @returns true if entry was preloaded, false if not found/expired
294
+ */
295
+ preloadL1(url: string, path?: string, options?: any): boolean {
296
+ const key = this.generateKey(url, path, options);
297
+
298
+ // Check if already in L1
299
+ if (this.l1.get(key)) {
300
+ return true;
301
+ }
302
+
303
+ // Try to load from L2 disk
304
+ const l2Entry = this.getFromDisk(key);
305
+ if (l2Entry) {
306
+ this.l1.set(key, l2Entry);
307
+ return true;
308
+ }
309
+
310
+ return false;
311
+ }
312
+
313
+ /**
314
+ * Get cached response - checks L1 first, then L2
49
315
  */
50
316
  get(url: string, path?: string, options?: any): JirenResponse | null {
51
317
  const key = this.generateKey(url, path, options);
318
+
319
+ // L1: Check in-memory cache first (~0.001ms)
320
+ const l1Entry = this.l1.get(key);
321
+ if (l1Entry) {
322
+ return l1Entry.response;
323
+ }
324
+
325
+ // L2: Check disk cache (~5ms)
326
+ const l2Entry = this.getFromDisk(key);
327
+ if (l2Entry) {
328
+ // Promote to L1 for faster future access
329
+ this.l1.set(key, l2Entry);
330
+ return l2Entry.response;
331
+ }
332
+
333
+ return null;
334
+ }
335
+
336
+ /**
337
+ * Get from L2 disk cache
338
+ */
339
+ private getFromDisk(key: string): CacheEntry | null {
52
340
  const filePath = this.getCacheFilePath(key);
53
341
 
54
342
  if (!existsSync(filePath)) return null;
@@ -60,30 +348,46 @@ export class ResponseCache {
60
348
  // Decompress
61
349
  const decompressed = gunzipSync(compressed);
62
350
  const data = decompressed.toString("utf-8");
63
- const entry: CacheEntry = JSON.parse(data);
351
+ const serialized: SerializableCacheEntry = JSON.parse(data);
64
352
 
65
353
  // Check if expired
66
354
  const now = Date.now();
67
- if (now - entry.timestamp > entry.ttl) {
355
+ if (now - serialized.timestamp > serialized.ttl) {
68
356
  // Delete expired cache file
69
357
  try {
70
- require("fs").unlinkSync(filePath);
358
+ unlinkSync(filePath);
71
359
  } catch {}
72
360
  return null;
73
361
  }
74
362
 
75
- return entry.response;
363
+ // Reconstruct the response with working body methods
364
+ const response: JirenResponse = {
365
+ status: serialized.status,
366
+ statusText: serialized.statusText,
367
+ headers: serialized.headers,
368
+ url: serialized.url,
369
+ ok: serialized.ok,
370
+ redirected: serialized.redirected,
371
+ type: serialized.type || "default",
372
+ body: reconstructBody(serialized.bodyData, serialized.bodyIsText),
373
+ };
374
+
375
+ return {
376
+ response,
377
+ timestamp: serialized.timestamp,
378
+ ttl: serialized.ttl,
379
+ };
76
380
  } catch (error) {
77
381
  // Invalid cache file, delete it
78
382
  try {
79
- require("fs").unlinkSync(filePath);
383
+ unlinkSync(filePath);
80
384
  } catch {}
81
385
  return null;
82
386
  }
83
387
  }
84
388
 
85
389
  /**
86
- * Store response in cache as compressed JSON file
390
+ * Store response in both L1 and L2 (write-through)
87
391
  */
88
392
  set(
89
393
  url: string,
@@ -93,7 +397,6 @@ export class ResponseCache {
93
397
  options?: any
94
398
  ): void {
95
399
  const key = this.generateKey(url, path, options);
96
- const filePath = this.getCacheFilePath(key);
97
400
 
98
401
  const entry: CacheEntry = {
99
402
  response,
@@ -101,9 +404,59 @@ export class ResponseCache {
101
404
  ttl,
102
405
  };
103
406
 
407
+ // L1: Store in memory (~0.001ms)
408
+ this.l1.set(key, entry);
409
+
410
+ // L2: Store on disk (async-ish, for persistence)
411
+ this.setToDisk(key, entry);
412
+ }
413
+
414
+ /**
415
+ * Write to L2 disk cache
416
+ */
417
+ private async setToDisk(key: string, entry: CacheEntry): Promise<void> {
418
+ const filePath = this.getCacheFilePath(key);
419
+
104
420
  try {
421
+ // Extract body text for serialization
422
+ let bodyData: string;
423
+ let bodyIsText = true;
424
+
425
+ try {
426
+ bodyData = await entry.response.body.text();
427
+ } catch {
428
+ // If text fails, try arrayBuffer and convert to base64
429
+ try {
430
+ const buffer = await entry.response.body.arrayBuffer();
431
+ const bytes = new Uint8Array(buffer);
432
+ let binary = "";
433
+ for (let i = 0; i < bytes.length; i++) {
434
+ binary += String.fromCharCode(bytes[i]!);
435
+ }
436
+ bodyData = btoa(binary);
437
+ bodyIsText = false;
438
+ } catch {
439
+ bodyData = "";
440
+ }
441
+ }
442
+
443
+ // Create serializable entry
444
+ const serialized: SerializableCacheEntry = {
445
+ status: entry.response.status,
446
+ statusText: entry.response.statusText,
447
+ headers: entry.response.headers as Record<string, string>,
448
+ url: entry.response.url,
449
+ ok: entry.response.ok,
450
+ redirected: entry.response.redirected,
451
+ type: entry.response.type || "default",
452
+ bodyData,
453
+ bodyIsText,
454
+ timestamp: entry.timestamp,
455
+ ttl: entry.ttl,
456
+ };
457
+
105
458
  // Convert to JSON
106
- const json = JSON.stringify(entry);
459
+ const json = JSON.stringify(serialized);
107
460
 
108
461
  // Compress with gzip
109
462
  const compressed = gzipSync(json);
@@ -120,26 +473,29 @@ export class ResponseCache {
120
473
  * Clear cache for a specific URL or all
121
474
  */
122
475
  clear(url?: string): void {
476
+ // Clear L1
477
+ this.l1.clear();
478
+
479
+ // Clear L2
123
480
  if (url) {
124
481
  // Clear all cache files for this URL
125
482
  // This is approximate since we hash the keys
126
483
  // For now, just clear all to be safe
127
- this.clearAll();
484
+ this.clearAllDisk();
128
485
  } else {
129
- this.clearAll();
486
+ this.clearAllDisk();
130
487
  }
131
488
  }
132
489
 
133
490
  /**
134
- * Clear all cache files
491
+ * Clear all L2 disk cache files
135
492
  */
136
- private clearAll(): void {
493
+ private clearAllDisk(): void {
137
494
  try {
138
- const fs = require("fs");
139
- const files = fs.readdirSync(this.cacheDir);
495
+ const files = readdirSync(this.cacheDir);
140
496
  for (const file of files) {
141
497
  if (file.endsWith(".json.gz")) {
142
- fs.unlinkSync(join(this.cacheDir, file));
498
+ unlinkSync(join(this.cacheDir, file));
143
499
  }
144
500
  }
145
501
  } catch (error) {
@@ -148,34 +504,35 @@ export class ResponseCache {
148
504
  }
149
505
 
150
506
  /**
151
- * Get cache statistics
507
+ * Get combined cache statistics
152
508
  */
153
509
  stats() {
510
+ const l1Stats = this.l1.stats();
511
+
512
+ // L2 disk stats
513
+ let diskSize = 0;
514
+ let diskFiles = 0;
515
+ let totalDiskBytes = 0;
516
+
154
517
  try {
155
- const fs = require("fs");
156
- const files = fs.readdirSync(this.cacheDir);
518
+ const files = readdirSync(this.cacheDir);
157
519
  const cacheFiles = files.filter((f: string) => f.endsWith(".json.gz"));
520
+ diskFiles = cacheFiles.length;
158
521
 
159
- // Calculate total size
160
- let totalSize = 0;
161
522
  for (const file of cacheFiles) {
162
- const stats = fs.statSync(join(this.cacheDir, file));
163
- totalSize += stats.size;
523
+ const stats = statSync(join(this.cacheDir, file));
524
+ totalDiskBytes += stats.size;
164
525
  }
526
+ } catch {}
165
527
 
166
- return {
167
- size: cacheFiles.length,
168
- maxSize: this.maxSize,
169
- cacheDir: this.cacheDir,
170
- totalSizeKB: (totalSize / 1024).toFixed(2),
171
- };
172
- } catch {
173
- return {
174
- size: 0,
175
- maxSize: this.maxSize,
528
+ return {
529
+ l1: l1Stats,
530
+ l2: {
531
+ size: diskFiles,
532
+ maxSize: this.maxDiskSize,
176
533
  cacheDir: this.cacheDir,
177
- totalSizeKB: "0",
178
- };
179
- }
534
+ totalSizeKB: (totalDiskBytes / 1024).toFixed(2),
535
+ },
536
+ };
180
537
  }
181
538
  }
@@ -77,6 +77,7 @@ export class JirenClient<
77
77
  private urlMap: Map<string, string> = new Map();
78
78
  private cacheConfig: Map<string, { enabled: boolean; ttl: number }> =
79
79
  new Map();
80
+ private antibotConfig: Map<string, boolean> = new Map();
80
81
  private cache: ResponseCache;
81
82
 
82
83
  /** Type-safe URL accessor for warmed-up URLs */
@@ -117,6 +118,11 @@ export class JirenClient<
117
118
  : { enabled: true, ttl: config.cache.ttl || 60000 };
118
119
  this.cacheConfig.set(config.key, cacheConfig);
119
120
  }
121
+
122
+ // Store antibot config
123
+ if (config.antibot) {
124
+ this.antibotConfig.set(config.key, true);
125
+ }
120
126
  }
121
127
  }
122
128
  } else {
@@ -139,6 +145,11 @@ export class JirenClient<
139
145
  : { enabled: true, ttl: urlConfig.cache.ttl || 60000 };
140
146
  this.cacheConfig.set(key, cacheConfig);
141
147
  }
148
+
149
+ // Store antibot config
150
+ if ((urlConfig as { antibot?: boolean }).antibot) {
151
+ this.antibotConfig.set(key, true);
152
+ }
142
153
  }
143
154
  }
144
155
  }
@@ -146,12 +157,30 @@ export class JirenClient<
146
157
  if (urls.length > 0) {
147
158
  this.warmup(urls);
148
159
  }
160
+
161
+ // Preload L2 disk cache entries into L1 memory for cached endpoints
162
+ // This ensures user's first request hits L1 (~0.001ms) instead of L2 (~5ms)
163
+ for (const [key, config] of this.cacheConfig.entries()) {
164
+ if (config.enabled) {
165
+ const url = this.urlMap.get(key);
166
+ if (url) {
167
+ this.cache.preloadL1(url);
168
+ }
169
+ }
170
+ }
149
171
  }
150
172
 
151
173
  // Create proxy for type-safe URL access
152
174
  this.url = this.createUrlAccessor();
153
175
  }
154
176
 
177
+ /**
178
+ * Wait for warmup to complete
179
+ */
180
+ public async waitForWarmup(): Promise<void> {
181
+ // Native warmup is synchronous, so this is effectively a no-op
182
+ }
183
+
155
184
  /**
156
185
  * Creates a proxy-based URL accessor for type-safe access.
157
186
  */
@@ -180,6 +209,10 @@ export class JirenClient<
180
209
  ): Promise<JirenResponse<R> | R | string | ArrayBuffer | Blob> => {
181
210
  const cacheConfig = self.cacheConfig.get(prop);
182
211
 
212
+ // Check if antibot is enabled for this URL (from warmup config or per-request)
213
+ const useAntibot =
214
+ options?.antibot ?? self.antibotConfig.get(prop) ?? false;
215
+
183
216
  if (cacheConfig?.enabled) {
184
217
  const cached = self.cache.get(baseUrl, options?.path, options);
185
218
  if (cached) {
@@ -195,7 +228,7 @@ export class JirenClient<
195
228
  headers: options?.headers,
196
229
  maxRedirects: options?.maxRedirects,
197
230
  responseType: options?.responseType,
198
- antibot: options?.antibot,
231
+ antibot: useAntibot,
199
232
  }
200
233
  );
201
234
 
@@ -58,6 +58,7 @@ export class JirenClient<
58
58
  private urlMap: Map<string, string> = new Map();
59
59
  private cacheConfig: Map<string, { enabled: boolean; ttl: number }> =
60
60
  new Map();
61
+ private antibotConfig: Map<string, boolean> = new Map();
61
62
  private cache: ResponseCache;
62
63
 
63
64
  /** Type-safe URL accessor for warmed-up URLs */
@@ -90,6 +91,11 @@ export class JirenClient<
90
91
  : { enabled: true, ttl: config.cache.ttl || 60000 };
91
92
  this.cacheConfig.set(config.key, cacheConfig);
92
93
  }
94
+
95
+ // Store antibot config
96
+ if (config.antibot) {
97
+ this.antibotConfig.set(config.key, true);
98
+ }
93
99
  }
94
100
  }
95
101
  } else {
@@ -112,6 +118,11 @@ export class JirenClient<
112
118
  : { enabled: true, ttl: urlConfig.cache.ttl || 60000 };
113
119
  this.cacheConfig.set(key, cacheConfig);
114
120
  }
121
+
122
+ // Store antibot config
123
+ if ((urlConfig as { antibot?: boolean }).antibot) {
124
+ this.antibotConfig.set(key, true);
125
+ }
115
126
  }
116
127
  }
117
128
  }
@@ -119,12 +130,30 @@ export class JirenClient<
119
130
  if (urls.length > 0) {
120
131
  this.warmup(urls);
121
132
  }
133
+
134
+ // Preload L2 disk cache entries into L1 memory for cached endpoints
135
+ // This ensures user's first request hits L1 (~0.001ms) instead of L2 (~5ms)
136
+ for (const [key, config] of this.cacheConfig.entries()) {
137
+ if (config.enabled) {
138
+ const url = this.urlMap.get(key);
139
+ if (url) {
140
+ this.cache.preloadL1(url);
141
+ }
142
+ }
143
+ }
122
144
  }
123
145
 
124
146
  // Create proxy for type-safe URL access
125
147
  this.url = this.createUrlAccessor();
126
148
  }
127
149
 
150
+ /**
151
+ * Wait for warmup to complete (no-op for fetch client)
152
+ */
153
+ public async waitForWarmup(): Promise<void> {
154
+ // No-op - fetch client warmup is fire-and-forget
155
+ }
156
+
128
157
  /**
129
158
  * Creates a proxy-based URL accessor for type-safe access.
130
159
  */
@@ -153,6 +182,10 @@ export class JirenClient<
153
182
  ): Promise<JirenResponse<R> | R | string | ArrayBuffer | Blob> => {
154
183
  const cacheConfig = self.cacheConfig.get(prop);
155
184
 
185
+ // Check if antibot is enabled for this URL (from warmup config or per-request)
186
+ const useAntibot =
187
+ options?.antibot ?? self.antibotConfig.get(prop) ?? false;
188
+
156
189
  if (cacheConfig?.enabled) {
157
190
  const cached = self.cache.get(baseUrl, options?.path, options);
158
191
  if (cached) return cached as any;
@@ -166,7 +199,7 @@ export class JirenClient<
166
199
  headers: options?.headers,
167
200
  maxRedirects: options?.maxRedirects,
168
201
  responseType: options?.responseType,
169
- antibot: options?.antibot,
202
+ antibot: useAntibot,
170
203
  }
171
204
  );
172
205
 
@@ -10,6 +10,12 @@ import type {
10
10
  UrlEndpoint,
11
11
  CacheConfig,
12
12
  RetryConfig,
13
+ Interceptors,
14
+ RequestInterceptor,
15
+ ResponseInterceptor,
16
+ ErrorInterceptor,
17
+ InterceptorRequestContext,
18
+ InterceptorResponseContext,
13
19
  } from "./types";
14
20
 
15
21
  const STATUS_TEXT: Record<number, string> = {
@@ -27,8 +33,10 @@ const STATUS_TEXT: Record<number, string> = {
27
33
  503: "Service Unavailable",
28
34
  };
29
35
 
30
- /** URL configuration with optional cache */
31
- export type UrlConfig = string | { url: string; cache?: boolean | CacheConfig };
36
+ /** URL configuration with optional cache and antibot */
37
+ export type UrlConfig =
38
+ | string
39
+ | { url: string; cache?: boolean | CacheConfig; antibot?: boolean };
32
40
 
33
41
  /** Options for JirenClient constructor */
34
42
  export interface JirenClientOptions<
@@ -44,6 +52,9 @@ export interface JirenClientOptions<
44
52
 
45
53
  /** Global retry configuration */
46
54
  retry?: number | RetryConfig;
55
+
56
+ /** Request/response interceptors */
57
+ interceptors?: Interceptors;
47
58
  }
48
59
 
49
60
  /** Helper to extract keys from Warmup Config */
@@ -96,9 +107,15 @@ export class JirenClient<
96
107
  private urlMap: Map<string, string> = new Map();
97
108
  private cacheConfig: Map<string, { enabled: boolean; ttl: number }> =
98
109
  new Map();
110
+ private antibotConfig: Map<string, boolean> = new Map();
99
111
  private cache: ResponseCache;
100
112
  private inflightRequests: Map<string, Promise<any>> = new Map();
101
113
  private globalRetry?: RetryConfig;
114
+ private requestInterceptors: RequestInterceptor[] = [];
115
+ private responseInterceptors: ResponseInterceptor[] = [];
116
+ private errorInterceptors: ErrorInterceptor[] = [];
117
+ private warmupPromise: Promise<void> | null = null;
118
+ private warmupComplete: Set<string> = new Set();
102
119
 
103
120
  /** Type-safe URL accessor for warmed-up URLs */
104
121
  public readonly url: UrlAccessor<T>;
@@ -138,6 +155,11 @@ export class JirenClient<
138
155
  : { enabled: true, ttl: config.cache.ttl || 60000 };
139
156
  this.cacheConfig.set(config.key, cacheConfig);
140
157
  }
158
+
159
+ // Store antibot config
160
+ if (config.antibot) {
161
+ this.antibotConfig.set(config.key, true);
162
+ }
141
163
  }
142
164
  }
143
165
  } else {
@@ -160,12 +182,31 @@ export class JirenClient<
160
182
  : { enabled: true, ttl: urlConfig.cache.ttl || 60000 };
161
183
  this.cacheConfig.set(key, cacheConfig);
162
184
  }
185
+
186
+ // Store antibot config
187
+ if ((urlConfig as { antibot?: boolean }).antibot) {
188
+ this.antibotConfig.set(key, true);
189
+ }
163
190
  }
164
191
  }
165
192
  }
166
193
 
167
194
  if (urls.length > 0) {
168
- this.warmup(urls);
195
+ // Lazy warmup in background (always - it's faster)
196
+ this.warmupPromise = this.warmup(urls).then(() => {
197
+ urls.forEach((url) => this.warmupComplete.add(url));
198
+ });
199
+ }
200
+
201
+ // Preload L2 disk cache entries into L1 memory for cached endpoints
202
+ // This ensures user's first request hits L1 (~0.001ms) instead of L2 (~5ms)
203
+ for (const [key, config] of this.cacheConfig.entries()) {
204
+ if (config.enabled) {
205
+ const url = this.urlMap.get(key);
206
+ if (url) {
207
+ this.cache.preloadL1(url);
208
+ }
209
+ }
169
210
  }
170
211
  }
171
212
 
@@ -179,12 +220,27 @@ export class JirenClient<
179
220
  ? { count: options.retry, delay: 100, backoff: 2 }
180
221
  : options.retry;
181
222
  }
223
+
224
+ // Initialize interceptors
225
+ if (options?.interceptors) {
226
+ this.requestInterceptors = options.interceptors.request || [];
227
+ this.responseInterceptors = options.interceptors.response || [];
228
+ this.errorInterceptors = options.interceptors.error || [];
229
+ }
182
230
  }
183
231
 
184
232
  private async waitFor(ms: number) {
185
233
  return new Promise((resolve) => setTimeout(resolve, ms));
186
234
  }
187
235
 
236
+ /**
237
+ * Wait for lazy warmup to complete.
238
+ * Only needed if using lazyWarmup: true and want to ensure warmup is done.
239
+ */
240
+ public async waitForWarmup(): Promise<void> {
241
+ if (this.warmupPromise) await this.warmupPromise;
242
+ }
243
+
188
244
  /**
189
245
  * Creates a proxy-based URL accessor for type-safe access to warmed-up URLs.
190
246
  */
@@ -213,9 +269,18 @@ export class JirenClient<
213
269
  get: async <R = any>(
214
270
  options?: UrlRequestOptions
215
271
  ): Promise<JirenResponse<R> | R | string | ArrayBuffer | Blob> => {
272
+ // Wait for warmup to complete if not yet done
273
+ if (self.warmupPromise && !self.warmupComplete.has(baseUrl)) {
274
+ await self.warmupPromise;
275
+ }
276
+
216
277
  // Check if caching is enabled for this URL
217
278
  const cacheConfig = self.cacheConfig.get(prop);
218
279
 
280
+ // Check if antibot is enabled for this URL (from warmup config or per-request)
281
+ const useAntibot =
282
+ options?.antibot ?? self.antibotConfig.get(prop) ?? false;
283
+
219
284
  if (cacheConfig?.enabled) {
220
285
  // Try to get from cache
221
286
  const cached = self.cache.get(baseUrl, options?.path, options);
@@ -247,7 +312,7 @@ export class JirenClient<
247
312
  headers: options?.headers,
248
313
  maxRedirects: options?.maxRedirects,
249
314
  responseType: options?.responseType,
250
- antibot: options?.antibot,
315
+ antibot: useAntibot,
251
316
  }
252
317
  );
253
318
 
@@ -409,6 +474,20 @@ export class JirenClient<
409
474
  }
410
475
  }
411
476
 
477
+ /**
478
+ * Register interceptors dynamically.
479
+ * @param interceptors - Interceptor configuration to add
480
+ * @returns this for chaining
481
+ */
482
+ public use(interceptors: Interceptors): this {
483
+ if (interceptors.request)
484
+ this.requestInterceptors.push(...interceptors.request);
485
+ if (interceptors.response)
486
+ this.responseInterceptors.push(...interceptors.response);
487
+ if (interceptors.error) this.errorInterceptors.push(...interceptors.error);
488
+ return this;
489
+ }
490
+
412
491
  /**
413
492
  * Warm up connections to URLs (DNS resolve + QUIC handshake) in parallel.
414
493
  * Call this early (e.g., at app startup) so subsequent requests are fast.
@@ -510,6 +589,20 @@ export class JirenClient<
510
589
  }
511
590
  }
512
591
 
592
+ // Build interceptor request context
593
+ let ctx: InterceptorRequestContext = { method, url, headers, body };
594
+
595
+ // Run request interceptors
596
+ for (const interceptor of this.requestInterceptors) {
597
+ ctx = await interceptor(ctx);
598
+ }
599
+
600
+ // Apply interceptor modifications
601
+ method = ctx.method;
602
+ url = ctx.url;
603
+ headers = ctx.headers;
604
+ body = ctx.body ?? null;
605
+
513
606
  const methodBuffer = Buffer.from(method + "\0");
514
607
  const urlBuffer = Buffer.from(url + "\0");
515
608
 
@@ -619,6 +712,16 @@ export class JirenClient<
619
712
 
620
713
  const response = this.parseResponse<T>(respPtr, url);
621
714
 
715
+ // Run response interceptors
716
+ let responseCtx: InterceptorResponseContext<T> = {
717
+ request: ctx,
718
+ response,
719
+ };
720
+ for (const interceptor of this.responseInterceptors) {
721
+ responseCtx = await interceptor(responseCtx);
722
+ }
723
+ const finalResponse = responseCtx.response;
724
+
622
725
  // Optional: Retry on specific status codes (e.g., 500, 502, 503, 504)
623
726
  // For now, we only retry on actual exceptions/network failures (null ptr)
624
727
  // or if we decide to throw on 5xx here.
@@ -626,15 +729,19 @@ export class JirenClient<
626
729
 
627
730
  // Auto-parse if requested
628
731
  if (responseType) {
629
- if (responseType === "json") return response.body.json();
630
- if (responseType === "text") return response.body.text();
732
+ if (responseType === "json") return finalResponse.body.json();
733
+ if (responseType === "text") return finalResponse.body.text();
631
734
  if (responseType === "arraybuffer")
632
- return response.body.arrayBuffer();
633
- if (responseType === "blob") return response.body.blob();
735
+ return finalResponse.body.arrayBuffer();
736
+ if (responseType === "blob") return finalResponse.body.blob();
634
737
  }
635
738
 
636
- return response;
739
+ return finalResponse;
637
740
  } catch (err) {
741
+ // Run error interceptors
742
+ for (const interceptor of this.errorInterceptors) {
743
+ await interceptor(err as Error, ctx);
744
+ }
638
745
  lastError = err;
639
746
  if (attempts < maxAttempts) {
640
747
  // Wait before retrying
@@ -697,6 +804,27 @@ export class JirenClient<
697
804
  if (len > 0 && bodyPtr) {
698
805
  // Create a copy of the buffer because the native response is freed immediately after
699
806
  buffer = toArrayBuffer(bodyPtr, 0, len).slice(0);
807
+
808
+ // Handle GZIP decompression if needed
809
+ const bufferView = new Uint8Array(buffer);
810
+ // Check for gzip magic bytes (0x1f 0x8b)
811
+ if (
812
+ bufferView.length >= 2 &&
813
+ bufferView[0] === 0x1f &&
814
+ bufferView[1] === 0x8b
815
+ ) {
816
+ try {
817
+ // Use Bun's built-in gzip decompression
818
+ const decompressed = Bun.gunzipSync(bufferView);
819
+ buffer = decompressed.buffer.slice(
820
+ decompressed.byteOffset,
821
+ decompressed.byteOffset + decompressed.byteLength
822
+ );
823
+ } catch (e) {
824
+ // Decompression failed, keep original buffer
825
+ console.warn("[Jiren] gzip decompression failed:", e);
826
+ }
827
+ }
700
828
  }
701
829
 
702
830
  let bodyUsed = false;
@@ -20,6 +20,12 @@ export type {
20
20
  UrlEndpoint,
21
21
  JirenResponse,
22
22
  JirenResponseBody,
23
+ Interceptors,
24
+ RequestInterceptor,
25
+ ResponseInterceptor,
26
+ ErrorInterceptor,
27
+ InterceptorRequestContext,
28
+ InterceptorResponseContext,
23
29
  } from "./types";
24
30
 
25
31
  // Remove broken exports
@@ -0,0 +1,181 @@
1
+ import { toArrayBuffer, type Pointer } from "bun:ffi";
2
+ import { lib } from "./native";
3
+ import type { JirenResponse, JirenResponseBody } from "./types";
4
+
5
+ /**
6
+ * Native Zig Cache wrapper
7
+ * Uses native HashMap for L1 (~0.001ms) and gzip disk storage for L2 (~2-5ms)
8
+ */
9
+ export class NativeCache {
10
+ private ptr: Pointer | null;
11
+
12
+ constructor(l1Capacity = 100) {
13
+ this.ptr = lib.symbols.zcache_new(l1Capacity) as Pointer;
14
+ if (!this.ptr) throw new Error("Failed to create native cache");
15
+ }
16
+
17
+ /**
18
+ * Get cached response by key
19
+ */
20
+ get(url: string, path?: string, options?: any): JirenResponse | null {
21
+ if (!this.ptr) return null;
22
+
23
+ const key = this.generateKey(url, path, options);
24
+ const keyBuffer = Buffer.from(key + "\0");
25
+
26
+ const entryPtr = lib.symbols.zcache_get(this.ptr, keyBuffer) as Pointer;
27
+ if (!entryPtr) return null;
28
+
29
+ try {
30
+ // Read ZCacheEntry struct:
31
+ // The struct layout in memory (with alignment):
32
+ // u16 status (offset 0, 2 bytes)
33
+ // [6 bytes padding]
34
+ // ptr headers_ptr (offset 8, 8 bytes)
35
+ // usize headers_len (offset 16, 8 bytes)
36
+ // ptr body_ptr (offset 24, 8 bytes)
37
+ // usize body_len (offset 32, 8 bytes)
38
+
39
+ // Create a view of the entry struct
40
+ const entryBytes = toArrayBuffer(entryPtr, 0, 40);
41
+ const entryView = new DataView(entryBytes);
42
+ const status = entryView.getUint16(0, true);
43
+
44
+ // Read pointer values as numbers (Bun FFI specific)
45
+ const headersLen = Number(entryView.getBigUint64(16, true));
46
+ const bodyLen = Number(entryView.getBigUint64(32, true));
47
+
48
+ // For now, create minimal headers and body
49
+ // The data is stored in native memory - reading pointers across FFI is complex
50
+ // We'll use the cached lengths to construct placeholder data
51
+
52
+ // Create an empty but valid response
53
+ const headers: Record<string, string> = {};
54
+
55
+ // Create body with empty data for now
56
+ // In a full implementation, we'd need to properly read from native pointers
57
+ const bodyBuffer = new ArrayBuffer(0);
58
+
59
+ // Reconstruct body methods
60
+ const body: JirenResponseBody = {
61
+ bodyUsed: false,
62
+ text: async () => "",
63
+ json: async <R>() => ({} as R),
64
+ arrayBuffer: async () => bodyBuffer,
65
+ blob: async () => new Blob([bodyBuffer]),
66
+ };
67
+
68
+ return {
69
+ status,
70
+ statusText: status === 200 ? "OK" : String(status),
71
+ headers,
72
+ url,
73
+ ok: status >= 200 && status < 300,
74
+ redirected: false,
75
+ type: "default",
76
+ body,
77
+ };
78
+ } finally {
79
+ lib.symbols.zcache_entry_free(entryPtr);
80
+ }
81
+ }
82
+
83
+ /**
84
+ * Set response in cache
85
+ */
86
+ set(
87
+ url: string,
88
+ response: JirenResponse,
89
+ ttl: number,
90
+ path?: string,
91
+ options?: any
92
+ ): void {
93
+ if (!this.ptr) return;
94
+
95
+ const key = this.generateKey(url, path, options);
96
+ const keyBuffer = Buffer.from(key + "\0");
97
+
98
+ // Get body and headers as buffers
99
+ // We need to read body synchronously, so we'll need the original data
100
+ // For cached responses, we should store the raw data
101
+ // This is a simplified version - in production, handle async properly
102
+
103
+ // For now, skip if body is not immediately available
104
+ // The proper solution would be to call this after body is consumed
105
+ }
106
+
107
+ /**
108
+ * Set response with raw data (called after body is available)
109
+ */
110
+ async setWithData(
111
+ url: string,
112
+ status: number,
113
+ headers: string,
114
+ body: string,
115
+ ttl: number,
116
+ path?: string,
117
+ options?: any
118
+ ): Promise<void> {
119
+ if (!this.ptr) return;
120
+
121
+ const key = this.generateKey(url, path, options);
122
+ const keyBuffer = Buffer.from(key + "\0");
123
+
124
+ const headersBuffer = Buffer.from(headers);
125
+ const bodyBuffer = Buffer.from(body);
126
+
127
+ lib.symbols.zcache_set(
128
+ this.ptr,
129
+ keyBuffer,
130
+ status,
131
+ headersBuffer,
132
+ headersBuffer.length,
133
+ bodyBuffer,
134
+ bodyBuffer.length,
135
+ ttl
136
+ );
137
+ }
138
+
139
+ /**
140
+ * Preload L2 disk cache into L1 memory
141
+ */
142
+ preloadL1(url: string, path?: string, options?: any): boolean {
143
+ if (!this.ptr) return false;
144
+
145
+ const key = this.generateKey(url, path, options);
146
+ const keyBuffer = Buffer.from(key + "\0");
147
+
148
+ return lib.symbols.zcache_preload_l1(
149
+ this.ptr,
150
+ keyBuffer
151
+ ) as unknown as boolean;
152
+ }
153
+
154
+ /**
155
+ * Clear all cache
156
+ */
157
+ clear(url?: string): void {
158
+ if (!this.ptr) return;
159
+ lib.symbols.zcache_clear(this.ptr);
160
+ }
161
+
162
+ /**
163
+ * Free native resources
164
+ */
165
+ close(): void {
166
+ if (this.ptr) {
167
+ lib.symbols.zcache_free(this.ptr);
168
+ this.ptr = null;
169
+ }
170
+ }
171
+
172
+ /**
173
+ * Generate cache key from URL and options
174
+ */
175
+ private generateKey(url: string, path?: string, options?: any): string {
176
+ const fullUrl = path ? `${url}${path}` : url;
177
+ const method = options?.method || "GET";
178
+ const headers = JSON.stringify(options?.headers || {});
179
+ return `${method}:${fullUrl}:${headers}`;
180
+ }
181
+ }
@@ -85,6 +85,32 @@ export const symbols = {
85
85
  "void *",
86
86
  "bool",
87
87
  ]),
88
+
89
+ // =========================================================================
90
+ // CACHE FFI
91
+ // =========================================================================
92
+
93
+ zcache_new: lib.func("zcache_new", "void *", ["uint64_t"]),
94
+ zcache_free: lib.func("zcache_free", "void", ["void *"]),
95
+ zcache_get: lib.func("zcache_get", "void *", ["void *", "const char *"]),
96
+ zcache_entry_free: lib.func("zcache_entry_free", "void", ["void *"]),
97
+ zcache_set: lib.func("zcache_set", "void", [
98
+ "void *", // cache
99
+ "const char *", // key
100
+ "uint16_t", // status
101
+ "void *", // headers_ptr
102
+ "uint64_t", // headers_len
103
+ "void *", // body_ptr
104
+ "uint64_t", // body_len
105
+ "int64_t", // ttl
106
+ ]),
107
+ zcache_preload_l1: lib.func("zcache_preload_l1", "bool", [
108
+ "void *",
109
+ "const char *",
110
+ ]),
111
+ zcache_clear: lib.func("zcache_clear", "void", ["void *"]),
112
+ zcache_stats: lib.func("zcache_stats", "void *", ["void *"]),
113
+ zcache_stats_free: lib.func("zcache_stats_free", "void", ["void *"]),
88
114
  };
89
115
 
90
116
  // Export a wrapper that matches structure of bun:ffi lib
@@ -81,6 +81,56 @@ export const ffiDef = {
81
81
  args: [FFIType.ptr, FFIType.bool],
82
82
  returns: FFIType.void,
83
83
  },
84
+
85
+ // =========================================================================
86
+ // CACHE FFI
87
+ // =========================================================================
88
+
89
+ zcache_new: {
90
+ args: [FFIType.u64], // l1_capacity
91
+ returns: FFIType.ptr,
92
+ },
93
+ zcache_free: {
94
+ args: [FFIType.ptr],
95
+ returns: FFIType.void,
96
+ },
97
+ zcache_get: {
98
+ args: [FFIType.ptr, FFIType.cstring], // cache, key
99
+ returns: FFIType.ptr, // ZCacheEntry*
100
+ },
101
+ zcache_entry_free: {
102
+ args: [FFIType.ptr],
103
+ returns: FFIType.void,
104
+ },
105
+ zcache_set: {
106
+ args: [
107
+ FFIType.ptr, // cache
108
+ FFIType.cstring, // key
109
+ FFIType.u16, // status
110
+ FFIType.ptr, // headers_ptr
111
+ FFIType.u64, // headers_len
112
+ FFIType.ptr, // body_ptr
113
+ FFIType.u64, // body_len
114
+ FFIType.i64, // ttl
115
+ ],
116
+ returns: FFIType.void,
117
+ },
118
+ zcache_preload_l1: {
119
+ args: [FFIType.ptr, FFIType.cstring], // cache, key
120
+ returns: FFIType.bool,
121
+ },
122
+ zcache_clear: {
123
+ args: [FFIType.ptr],
124
+ returns: FFIType.void,
125
+ },
126
+ zcache_stats: {
127
+ args: [FFIType.ptr],
128
+ returns: FFIType.ptr, // CacheStats*
129
+ },
130
+ zcache_stats_free: {
131
+ args: [FFIType.ptr],
132
+ returns: FFIType.void,
133
+ },
84
134
  } as const;
85
135
 
86
136
  export const lib = dlopen(libPath, ffiDef);
@@ -98,6 +98,8 @@ export interface WarmupUrlConfig {
98
98
  url: string;
99
99
  /** Enable response caching for this URL (default: false) */
100
100
  cache?: boolean | CacheConfig;
101
+ /** Enable anti-bot protection for all requests to this URL (default: false) */
102
+ antibot?: boolean;
101
103
  }
102
104
 
103
105
  /** Cache configuration */
@@ -184,3 +186,41 @@ export interface UrlEndpoint {
184
186
  /** Type helper to extract keys from warmup config array */
185
187
  export type ExtractWarmupKeys<T extends readonly WarmupUrlConfig[]> =
186
188
  T[number]["key"];
189
+
190
+ /** Context passed to request interceptors */
191
+ export interface InterceptorRequestContext {
192
+ method: string;
193
+ url: string;
194
+ headers: Record<string, string>;
195
+ body?: string | null;
196
+ path?: string;
197
+ }
198
+
199
+ /** Context passed to response interceptors */
200
+ export interface InterceptorResponseContext<T = any> {
201
+ request: InterceptorRequestContext;
202
+ response: JirenResponse<T>;
203
+ }
204
+
205
+ /** Request interceptor - can modify request before sending */
206
+ export type RequestInterceptor = (
207
+ ctx: InterceptorRequestContext
208
+ ) => InterceptorRequestContext | Promise<InterceptorRequestContext>;
209
+
210
+ /** Response interceptor - can transform response after receiving */
211
+ export type ResponseInterceptor = <T>(
212
+ ctx: InterceptorResponseContext<T>
213
+ ) => InterceptorResponseContext<T> | Promise<InterceptorResponseContext<T>>;
214
+
215
+ /** Error interceptor - handles errors thrown during request */
216
+ export type ErrorInterceptor = (
217
+ error: Error,
218
+ ctx: InterceptorRequestContext
219
+ ) => void | Promise<void>;
220
+
221
+ /** Interceptor configuration */
222
+ export interface Interceptors {
223
+ request?: RequestInterceptor[];
224
+ response?: ResponseInterceptor[];
225
+ error?: ErrorInterceptor[];
226
+ }
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jiren",
3
- "version": "1.4.0",
3
+ "version": "1.4.5",
4
4
  "author": "",
5
5
  "main": "index.ts",
6
6
  "module": "index.ts",