@effect-native/fetch-hooks 0.0.1-placeholder → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +35 -0
- package/dist/binary-extractor.d.ts +9 -0
- package/dist/binary-extractor.d.ts.map +1 -0
- package/dist/binary-extractor.js +85 -0
- package/dist/binary-extractor.js.map +1 -0
- package/dist/cache-manager.d.ts +8 -0
- package/dist/cache-manager.d.ts.map +1 -0
- package/dist/cache-manager.js +408 -0
- package/dist/cache-manager.js.map +1 -0
- package/dist/environment.d.ts +5 -0
- package/dist/environment.d.ts.map +1 -0
- package/dist/environment.js +25 -0
- package/dist/environment.js.map +1 -0
- package/dist/filesystem-storage.d.ts +10 -0
- package/dist/filesystem-storage.d.ts.map +1 -0
- package/dist/filesystem-storage.js +112 -0
- package/dist/filesystem-storage.js.map +1 -0
- package/dist/flat-file-storage.d.ts +33 -0
- package/dist/flat-file-storage.d.ts.map +1 -0
- package/dist/flat-file-storage.js +153 -0
- package/dist/flat-file-storage.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +183 -0
- package/dist/index.js.map +1 -0
- package/dist/request-hasher.d.ts +5 -0
- package/dist/request-hasher.d.ts.map +1 -0
- package/dist/request-hasher.js +74 -0
- package/dist/request-hasher.js.map +1 -0
- package/dist/sse-handler.d.ts +9 -0
- package/dist/sse-handler.d.ts.map +1 -0
- package/dist/sse-handler.js +225 -0
- package/dist/sse-handler.js.map +1 -0
- package/dist/types.d.ts +116 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/package.json +52 -3
- package/src/binary-extractor.ts +104 -0
- package/src/cache-manager.ts +499 -0
- package/src/environment.ts +27 -0
- package/src/filesystem-storage.ts +125 -0
- package/src/flat-file-storage.ts +170 -0
- package/src/index.ts +249 -0
- package/src/request-hasher.ts +86 -0
- package/src/sse-handler.ts +281 -0
- package/src/types.ts +140 -0
|
@@ -0,0 +1,499 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
CachedRequest,
|
|
3
|
+
CachedRequestBody,
|
|
4
|
+
CachedResponseMeta,
|
|
5
|
+
CacheKey,
|
|
6
|
+
CacheStorage,
|
|
7
|
+
GeneratorTransformHook,
|
|
8
|
+
HashableRequest,
|
|
9
|
+
RawCachedRequest,
|
|
10
|
+
StorableResponse,
|
|
11
|
+
TimedChunk,
|
|
12
|
+
TransformHook
|
|
13
|
+
} from "./types.js"
|
|
14
|
+
|
|
15
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"
|
|
16
|
+
import { join } from "node:path"
|
|
17
|
+
import {
|
|
18
|
+
extractDataUrls,
|
|
19
|
+
isBinaryContentType,
|
|
20
|
+
readBinaryFile,
|
|
21
|
+
restoreDataUrls,
|
|
22
|
+
writeBinaryFile
|
|
23
|
+
} from "./binary-extractor.js"
|
|
24
|
+
import { getCacheDir } from "./environment.js"
|
|
25
|
+
import { getStorableHeaders, hashRequest, headersToRecord } from "./request-hasher.js"
|
|
26
|
+
import {
|
|
27
|
+
isSSEResponse,
|
|
28
|
+
jsonlToTimedChunks,
|
|
29
|
+
recordStreamWithTiming,
|
|
30
|
+
replayStreamFromAsyncIterable,
|
|
31
|
+
replayStreamWithTiming,
|
|
32
|
+
timedChunksToJsonl
|
|
33
|
+
} from "./sse-handler.js"
|
|
34
|
+
|
|
35
|
+
const DEV_FS_LOGS_PORT = 1090
|
|
36
|
+
|
|
37
|
+
// Internal fetch function, set via setInternalFetchForCache
|
|
38
|
+
let _internalFetch: typeof globalThis.fetch = globalThis.fetch
|
|
39
|
+
|
|
40
|
+
/** @internal Set the fetch function used for dev-fs-logs communication. */
|
|
41
|
+
export function setInternalFetchForCache(fetchFn: typeof globalThis.fetch): void {
|
|
42
|
+
_internalFetch = fetchFn
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function tryDevFsLogsWrite(cacheId: string, data: unknown): Promise<boolean> {
|
|
46
|
+
return _internalFetch(`http://localhost:${DEV_FS_LOGS_PORT}/cache`, {
|
|
47
|
+
method: "POST",
|
|
48
|
+
headers: {
|
|
49
|
+
"Content-Type": "application/json"
|
|
50
|
+
},
|
|
51
|
+
body: JSON.stringify({
|
|
52
|
+
cacheId,
|
|
53
|
+
data
|
|
54
|
+
})
|
|
55
|
+
})
|
|
56
|
+
.then((response) => response.ok)
|
|
57
|
+
.catch(() => false)
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
async function tryDevFsLogsRead(cacheId: string): Promise<unknown | null> {
|
|
61
|
+
return _internalFetch(`http://localhost:${DEV_FS_LOGS_PORT}/cache/${cacheId}`)
|
|
62
|
+
.then((response) => {
|
|
63
|
+
if (!response.ok) {
|
|
64
|
+
return null
|
|
65
|
+
}
|
|
66
|
+
return response.json()
|
|
67
|
+
})
|
|
68
|
+
.catch(() => null)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function ensureCacheDir(cacheKey: string): string {
|
|
72
|
+
const baseDir = getCacheDir()
|
|
73
|
+
const cacheDir = join(baseDir, cacheKey)
|
|
74
|
+
if (!existsSync(cacheDir)) {
|
|
75
|
+
try {
|
|
76
|
+
mkdirSync(cacheDir, {
|
|
77
|
+
recursive: true
|
|
78
|
+
})
|
|
79
|
+
} catch (error: unknown) {
|
|
80
|
+
// Handle potential race condition where the directory was created
|
|
81
|
+
// between the existsSync check and mkdirSync call.
|
|
82
|
+
if (!(error instanceof Error && (error as NodeJS.ErrnoException).code === "EEXIST")) {
|
|
83
|
+
throw error
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
return cacheDir
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function writeJsonFile(filePath: string, data: unknown): void {
|
|
91
|
+
writeFileSync(filePath, JSON.stringify(data, null, 2), "utf-8")
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function readJsonFile<T>(filePath: string): T | null {
|
|
95
|
+
if (!existsSync(filePath)) {
|
|
96
|
+
return null
|
|
97
|
+
}
|
|
98
|
+
const content = readFileSync(filePath, "utf-8")
|
|
99
|
+
return JSON.parse(content) as T
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function writeTextFile(filePath: string, content: string): void {
|
|
103
|
+
writeFileSync(filePath, content, "utf-8")
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
function readTextFile(filePath: string): string | null {
|
|
107
|
+
if (!existsSync(filePath)) {
|
|
108
|
+
return null
|
|
109
|
+
}
|
|
110
|
+
return readFileSync(filePath, "utf-8")
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/** Convert an array to an async iterable for generator hooks */
|
|
114
|
+
async function* arrayToAsyncIterable<T>(arr: Array<T>): AsyncIterable<T> {
|
|
115
|
+
for (const item of arr) {
|
|
116
|
+
yield item
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function parseRequestBody(body: string | undefined): CachedRequestBody | undefined {
|
|
121
|
+
if (body === undefined) {
|
|
122
|
+
return undefined
|
|
123
|
+
}
|
|
124
|
+
try {
|
|
125
|
+
const parsed = JSON.parse(body)
|
|
126
|
+
return { json: parsed }
|
|
127
|
+
} catch {
|
|
128
|
+
return { text: body }
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export async function storeRequest(
|
|
133
|
+
params: RawCachedRequest,
|
|
134
|
+
providedCacheKey?: string,
|
|
135
|
+
skipHeaderFiltering?: boolean,
|
|
136
|
+
storage?: CacheStorage,
|
|
137
|
+
hashableRequest?: HashableRequest
|
|
138
|
+
): Promise<string> {
|
|
139
|
+
const { body, headers, method, url } = params
|
|
140
|
+
const cacheKey = providedCacheKey ?? hashRequest({
|
|
141
|
+
url,
|
|
142
|
+
method,
|
|
143
|
+
headers,
|
|
144
|
+
body
|
|
145
|
+
})
|
|
146
|
+
|
|
147
|
+
const request: CachedRequest = {
|
|
148
|
+
url,
|
|
149
|
+
method,
|
|
150
|
+
headers: skipHeaderFiltering ? headers : getStorableHeaders(headers),
|
|
151
|
+
body: parseRequestBody(body)
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
if (storage) {
|
|
155
|
+
const key: CacheKey = hashableRequest ? [cacheKey, hashableRequest] : [cacheKey]
|
|
156
|
+
await storage.requests.set(key, request)
|
|
157
|
+
} else {
|
|
158
|
+
// Legacy filesystem path
|
|
159
|
+
const cacheDir = ensureCacheDir(cacheKey)
|
|
160
|
+
const requestPath = join(cacheDir, "request.json")
|
|
161
|
+
writeJsonFile(requestPath, request)
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
await tryDevFsLogsWrite(`${cacheKey}/request`, request)
|
|
165
|
+
|
|
166
|
+
return cacheKey
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
export async function storeResponse(
|
|
170
|
+
cacheKey: string,
|
|
171
|
+
response: Response,
|
|
172
|
+
beforeStoreResponse?: TransformHook<StorableResponse>,
|
|
173
|
+
storage?: CacheStorage,
|
|
174
|
+
hashableRequest?: HashableRequest
|
|
175
|
+
): Promise<Response> {
|
|
176
|
+
const key: CacheKey = hashableRequest ? [cacheKey, hashableRequest] : [cacheKey]
|
|
177
|
+
const startTime = Date.now()
|
|
178
|
+
const headers = headersToRecord(response.headers)
|
|
179
|
+
const contentType = headers["content-type"] ?? ""
|
|
180
|
+
const isSSE = isSSEResponse(response.headers)
|
|
181
|
+
const isBinary = isBinaryContentType(contentType)
|
|
182
|
+
|
|
183
|
+
const meta: CachedResponseMeta = {
|
|
184
|
+
status: response.status,
|
|
185
|
+
statusText: response.statusText,
|
|
186
|
+
headers,
|
|
187
|
+
ttfb_ms: Date.now() - startTime,
|
|
188
|
+
total_ms: 0,
|
|
189
|
+
is_sse: isSSE,
|
|
190
|
+
is_binary: isBinary,
|
|
191
|
+
cached_at: new Date().toISOString()
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (!response.body) {
|
|
195
|
+
meta.total_ms = Date.now() - startTime
|
|
196
|
+
if (storage) {
|
|
197
|
+
await storage.responseMeta.set(key, meta)
|
|
198
|
+
} else {
|
|
199
|
+
const cacheDir = ensureCacheDir(cacheKey)
|
|
200
|
+
const metaPath = join(cacheDir, "response.meta.json")
|
|
201
|
+
writeJsonFile(metaPath, meta)
|
|
202
|
+
}
|
|
203
|
+
await tryDevFsLogsWrite(`${cacheKey}/meta`, meta)
|
|
204
|
+
return new Response(null, {
|
|
205
|
+
status: response.status,
|
|
206
|
+
statusText: response.statusText,
|
|
207
|
+
headers: response.headers
|
|
208
|
+
})
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (isBinary) {
|
|
212
|
+
const responseArrayBuffer = await response.arrayBuffer()
|
|
213
|
+
const data = new Uint8Array(responseArrayBuffer)
|
|
214
|
+
meta.total_ms = Date.now() - startTime
|
|
215
|
+
const binaryMeta = {
|
|
216
|
+
...meta,
|
|
217
|
+
size: data.length,
|
|
218
|
+
content_type: contentType
|
|
219
|
+
}
|
|
220
|
+
if (storage) {
|
|
221
|
+
await storage.responseMeta.set(key, binaryMeta)
|
|
222
|
+
await storage.binaryBody.set(key, data)
|
|
223
|
+
} else {
|
|
224
|
+
const cacheDir = ensureCacheDir(cacheKey)
|
|
225
|
+
const binPath = join(cacheDir, "response.bin")
|
|
226
|
+
writeBinaryFile(binPath, data)
|
|
227
|
+
const metaPath = join(cacheDir, "response.meta.json")
|
|
228
|
+
writeJsonFile(metaPath, binaryMeta)
|
|
229
|
+
}
|
|
230
|
+
await tryDevFsLogsWrite(`${cacheKey}/meta`, meta)
|
|
231
|
+
return new Response(responseArrayBuffer, {
|
|
232
|
+
status: response.status,
|
|
233
|
+
statusText: response.statusText,
|
|
234
|
+
headers: response.headers
|
|
235
|
+
})
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (isSSE) {
|
|
239
|
+
const [recordStream, returnStream] = response.body.tee()
|
|
240
|
+
const chunks = await recordStreamWithTiming(recordStream)
|
|
241
|
+
meta.total_ms = Date.now() - startTime
|
|
242
|
+
|
|
243
|
+
if (storage) {
|
|
244
|
+
await storage.responseMeta.set(key, meta)
|
|
245
|
+
await storage.sseChunks.set(key, chunks)
|
|
246
|
+
} else {
|
|
247
|
+
const cacheDir = ensureCacheDir(cacheKey)
|
|
248
|
+
const jsonlPath = join(cacheDir, "response.jsonl")
|
|
249
|
+
const assetsDir = join(cacheDir, "response.jsonl.assets")
|
|
250
|
+
let jsonlContent = timedChunksToJsonl(chunks)
|
|
251
|
+
const { content: extractedContent } = extractDataUrls(jsonlContent, assetsDir)
|
|
252
|
+
jsonlContent = extractedContent
|
|
253
|
+
writeTextFile(jsonlPath, jsonlContent)
|
|
254
|
+
|
|
255
|
+
const metaPath = join(cacheDir, "response.meta.json")
|
|
256
|
+
writeJsonFile(metaPath, meta)
|
|
257
|
+
}
|
|
258
|
+
await tryDevFsLogsWrite(`${cacheKey}/meta`, meta)
|
|
259
|
+
await tryDevFsLogsWrite(`${cacheKey}/sse`, chunks)
|
|
260
|
+
|
|
261
|
+
return new Response(returnStream, {
|
|
262
|
+
status: response.status,
|
|
263
|
+
statusText: response.statusText,
|
|
264
|
+
headers: response.headers
|
|
265
|
+
})
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const text = await response.text()
|
|
269
|
+
meta.total_ms = Date.now() - startTime
|
|
270
|
+
|
|
271
|
+
// Apply beforeStoreResponse hook if provided
|
|
272
|
+
const storableResponse: StorableResponse = { body: text, meta }
|
|
273
|
+
const transformedResponse = beforeStoreResponse
|
|
274
|
+
? await beforeStoreResponse(storableResponse)
|
|
275
|
+
: storableResponse
|
|
276
|
+
|
|
277
|
+
if (storage) {
|
|
278
|
+
await storage.responseMeta.set(key, transformedResponse.meta)
|
|
279
|
+
await storage.responseBody.set(key, transformedResponse.body ?? "")
|
|
280
|
+
} else {
|
|
281
|
+
const cacheDir = ensureCacheDir(cacheKey)
|
|
282
|
+
const jsonPath = join(cacheDir, "response.json")
|
|
283
|
+
const assetsDir = join(cacheDir, "response.json.assets")
|
|
284
|
+
const { content: extractedContent } = extractDataUrls(transformedResponse.body ?? "", assetsDir)
|
|
285
|
+
writeJsonFile(jsonPath, {
|
|
286
|
+
body: extractedContent,
|
|
287
|
+
meta: transformedResponse.meta
|
|
288
|
+
})
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
await tryDevFsLogsWrite(`${cacheKey}/response`, {
|
|
292
|
+
body: transformedResponse.body,
|
|
293
|
+
meta: transformedResponse.meta
|
|
294
|
+
})
|
|
295
|
+
|
|
296
|
+
return new Response(text, {
|
|
297
|
+
status: response.status,
|
|
298
|
+
statusText: response.statusText,
|
|
299
|
+
headers: response.headers
|
|
300
|
+
})
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
export async function getCachedResponse(
|
|
304
|
+
cacheKey: string,
|
|
305
|
+
afterLoadResponse?: TransformHook<StorableResponse>,
|
|
306
|
+
transformSSEChunk?: GeneratorTransformHook<TimedChunk>,
|
|
307
|
+
storage?: CacheStorage
|
|
308
|
+
): Promise<Response | null> {
|
|
309
|
+
const key: CacheKey = [cacheKey]
|
|
310
|
+
|
|
311
|
+
// Try to get response metadata
|
|
312
|
+
let responseMeta: CachedResponseMeta | null = null
|
|
313
|
+
|
|
314
|
+
if (storage) {
|
|
315
|
+
responseMeta = await storage.responseMeta.get(key)
|
|
316
|
+
} else {
|
|
317
|
+
const cacheDir = join(getCacheDir(), cacheKey)
|
|
318
|
+
const metaPath = join(cacheDir, "response.meta.json")
|
|
319
|
+
responseMeta = readJsonFile<CachedResponseMeta>(metaPath)
|
|
320
|
+
|
|
321
|
+
// For non-streaming responses, metadata is stored inside response.json
|
|
322
|
+
if (!responseMeta) {
|
|
323
|
+
const jsonPath = join(cacheDir, "response.json")
|
|
324
|
+
const cached = readJsonFile<{
|
|
325
|
+
body: string
|
|
326
|
+
meta: CachedResponseMeta
|
|
327
|
+
}>(jsonPath)
|
|
328
|
+
if (cached?.meta) {
|
|
329
|
+
responseMeta = cached.meta
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
if (!responseMeta) {
|
|
335
|
+
const devFsMeta = (await tryDevFsLogsRead(`${cacheKey}/meta`)) as CachedResponseMeta | null
|
|
336
|
+
if (!devFsMeta) {
|
|
337
|
+
return null
|
|
338
|
+
}
|
|
339
|
+
responseMeta = devFsMeta
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
const headers = new Headers(responseMeta.headers)
|
|
343
|
+
|
|
344
|
+
// Handle binary responses
|
|
345
|
+
if (responseMeta.is_binary) {
|
|
346
|
+
let data: Uint8Array | null = null
|
|
347
|
+
if (storage) {
|
|
348
|
+
data = await storage.binaryBody.get(key)
|
|
349
|
+
} else {
|
|
350
|
+
const cacheDir = join(getCacheDir(), cacheKey)
|
|
351
|
+
const binPath = join(cacheDir, "response.bin")
|
|
352
|
+
data = readBinaryFile(binPath)
|
|
353
|
+
}
|
|
354
|
+
if (!data) {
|
|
355
|
+
return null
|
|
356
|
+
}
|
|
357
|
+
const arrayBuffer = data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength) as ArrayBuffer
|
|
358
|
+
return new Response(arrayBuffer, {
|
|
359
|
+
status: responseMeta.status,
|
|
360
|
+
statusText: responseMeta.statusText,
|
|
361
|
+
headers
|
|
362
|
+
})
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
// Handle SSE responses
|
|
366
|
+
if (responseMeta.is_sse) {
|
|
367
|
+
if (storage) {
|
|
368
|
+
const chunks = storage.sseChunks.get(key)
|
|
369
|
+
// Check if there are any chunks by peeking
|
|
370
|
+
const iterator = chunks[Symbol.asyncIterator]()
|
|
371
|
+
const first = await iterator.next()
|
|
372
|
+
if (first.done) {
|
|
373
|
+
// No chunks in storage, try dev-fs-logs fallback
|
|
374
|
+
const devFsChunks = (await tryDevFsLogsRead(`${cacheKey}/sse`)) as Array<TimedChunk> | null
|
|
375
|
+
if (!devFsChunks) {
|
|
376
|
+
return null
|
|
377
|
+
}
|
|
378
|
+
const stream = transformSSEChunk
|
|
379
|
+
? replayStreamFromAsyncIterable(transformSSEChunk(arrayToAsyncIterable(devFsChunks)))
|
|
380
|
+
: replayStreamWithTiming(devFsChunks)
|
|
381
|
+
return new Response(stream, {
|
|
382
|
+
status: responseMeta.status,
|
|
383
|
+
statusText: responseMeta.statusText,
|
|
384
|
+
headers
|
|
385
|
+
})
|
|
386
|
+
}
|
|
387
|
+
// Reconstruct the async iterable with the first chunk prepended
|
|
388
|
+
async function* prependFirst(): AsyncIterable<TimedChunk> {
|
|
389
|
+
yield first.value
|
|
390
|
+
for await (const chunk of { [Symbol.asyncIterator]: () => iterator }) {
|
|
391
|
+
yield chunk
|
|
392
|
+
}
|
|
393
|
+
}
|
|
394
|
+
const chunksIterable = prependFirst()
|
|
395
|
+
const stream = transformSSEChunk
|
|
396
|
+
? replayStreamFromAsyncIterable(transformSSEChunk(chunksIterable))
|
|
397
|
+
: replayStreamFromAsyncIterable(chunksIterable)
|
|
398
|
+
return new Response(stream, {
|
|
399
|
+
status: responseMeta.status,
|
|
400
|
+
statusText: responseMeta.statusText,
|
|
401
|
+
headers
|
|
402
|
+
})
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
// Legacy filesystem path
|
|
406
|
+
const cacheDir = join(getCacheDir(), cacheKey)
|
|
407
|
+
const jsonlPath = join(cacheDir, "response.jsonl")
|
|
408
|
+
const assetsDir = join(cacheDir, "response.jsonl.assets")
|
|
409
|
+
let jsonlContent = readTextFile(jsonlPath)
|
|
410
|
+
|
|
411
|
+
if (!jsonlContent) {
|
|
412
|
+
const devFsChunks = (await tryDevFsLogsRead(`${cacheKey}/sse`)) as Array<TimedChunk> | null
|
|
413
|
+
if (!devFsChunks) {
|
|
414
|
+
return null
|
|
415
|
+
}
|
|
416
|
+
const stream = transformSSEChunk
|
|
417
|
+
? replayStreamFromAsyncIterable(transformSSEChunk(arrayToAsyncIterable(devFsChunks)))
|
|
418
|
+
: replayStreamWithTiming(devFsChunks)
|
|
419
|
+
return new Response(stream, {
|
|
420
|
+
status: responseMeta.status,
|
|
421
|
+
statusText: responseMeta.statusText,
|
|
422
|
+
headers
|
|
423
|
+
})
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
jsonlContent = restoreDataUrls(jsonlContent, assetsDir)
|
|
427
|
+
const chunks = jsonlToTimedChunks(jsonlContent)
|
|
428
|
+
const stream = transformSSEChunk
|
|
429
|
+
? replayStreamFromAsyncIterable(transformSSEChunk(arrayToAsyncIterable(chunks)))
|
|
430
|
+
: replayStreamWithTiming(chunks)
|
|
431
|
+
|
|
432
|
+
return new Response(stream, {
|
|
433
|
+
status: responseMeta.status,
|
|
434
|
+
statusText: responseMeta.statusText,
|
|
435
|
+
headers
|
|
436
|
+
})
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// Handle text/JSON responses
|
|
440
|
+
let body: string | null = null
|
|
441
|
+
let cachedMeta: CachedResponseMeta | null = responseMeta
|
|
442
|
+
|
|
443
|
+
if (storage) {
|
|
444
|
+
body = await storage.responseBody.get(key)
|
|
445
|
+
} else {
|
|
446
|
+
const cacheDir = join(getCacheDir(), cacheKey)
|
|
447
|
+
const jsonPath = join(cacheDir, "response.json")
|
|
448
|
+
const assetsDir = join(cacheDir, "response.json.assets")
|
|
449
|
+
const cached = readJsonFile<{
|
|
450
|
+
body: string
|
|
451
|
+
meta: CachedResponseMeta
|
|
452
|
+
}>(jsonPath)
|
|
453
|
+
|
|
454
|
+
if (cached) {
|
|
455
|
+
body = restoreDataUrls(cached.body, assetsDir)
|
|
456
|
+
cachedMeta = cached.meta
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
if (!body) {
|
|
461
|
+
const devFsResponse = (await tryDevFsLogsRead(`${cacheKey}/response`)) as {
|
|
462
|
+
body: string
|
|
463
|
+
meta: CachedResponseMeta
|
|
464
|
+
} | null
|
|
465
|
+
if (!devFsResponse) {
|
|
466
|
+
return null
|
|
467
|
+
}
|
|
468
|
+
const transformed = afterLoadResponse
|
|
469
|
+
? await afterLoadResponse({ body: devFsResponse.body, meta: devFsResponse.meta })
|
|
470
|
+
: devFsResponse
|
|
471
|
+
return new Response(transformed.body, {
|
|
472
|
+
status: transformed.meta.status,
|
|
473
|
+
statusText: transformed.meta.statusText,
|
|
474
|
+
headers: new Headers(transformed.meta.headers)
|
|
475
|
+
})
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
// Apply afterLoadResponse hook if provided
|
|
479
|
+
const storableResponse: StorableResponse = { body, meta: cachedMeta }
|
|
480
|
+
const transformedResponse = afterLoadResponse
|
|
481
|
+
? await afterLoadResponse(storableResponse)
|
|
482
|
+
: storableResponse
|
|
483
|
+
|
|
484
|
+
return new Response(transformedResponse.body, {
|
|
485
|
+
status: transformedResponse.meta.status,
|
|
486
|
+
statusText: transformedResponse.meta.statusText,
|
|
487
|
+
headers: new Headers(transformedResponse.meta.headers)
|
|
488
|
+
})
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
export function getCacheKeyFromUrl(url: string): string | null {
|
|
492
|
+
const cacheSchemes = ["cache://", "devcache://"]
|
|
493
|
+
for (const scheme of cacheSchemes) {
|
|
494
|
+
if (url.startsWith(scheme)) {
|
|
495
|
+
return url.slice(scheme.length)
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
return null
|
|
499
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
export function isProduction(): boolean {
|
|
2
|
+
if (process.env.NEXT_PUBLIC_VERCEL_ENV === "production") {
|
|
3
|
+
return true
|
|
4
|
+
}
|
|
5
|
+
return process.env.OR_ENV === "production"
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function isCacheEnabled(): boolean {
|
|
9
|
+
if (isProduction()) {
|
|
10
|
+
return false
|
|
11
|
+
}
|
|
12
|
+
if (process.env.DEV_FETCH_CACHE === "0") {
|
|
13
|
+
return false
|
|
14
|
+
}
|
|
15
|
+
if (process.argv.includes("--no-fetch-cache")) {
|
|
16
|
+
return false
|
|
17
|
+
}
|
|
18
|
+
return process.env.DEV_FETCH_CACHE === "1"
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function isReplayOnly(): boolean {
|
|
22
|
+
return process.env.DEV_FETCH_CACHE_RECORD === "0"
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function getCacheDir(): string {
|
|
26
|
+
return process.env.DEV_FETCH_CACHE_DIR ?? ".cache/fetch"
|
|
27
|
+
}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import type { CachedRequest, CachedResponseMeta, CacheKey, CacheStorage, KV, KVStream, TimedChunk } from "./types.js"
|
|
2
|
+
|
|
3
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"
|
|
4
|
+
import { join } from "node:path"
|
|
5
|
+
import { jsonlToTimedChunks, timedChunksToJsonl } from "./sse-handler.js"
|
|
6
|
+
|
|
7
|
+
function ensureCacheDir(baseDir: string, cacheKey: string): string {
|
|
8
|
+
const cacheDir = join(baseDir, cacheKey)
|
|
9
|
+
if (!existsSync(cacheDir)) {
|
|
10
|
+
try {
|
|
11
|
+
mkdirSync(cacheDir, { recursive: true })
|
|
12
|
+
} catch (error: unknown) {
|
|
13
|
+
// Handle race condition where directory was created between check and mkdir
|
|
14
|
+
if (!(error instanceof Error && (error as NodeJS.ErrnoException).code === "EEXIST")) {
|
|
15
|
+
throw error
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return cacheDir
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/** Create a KV store backed by JSON files */
|
|
23
|
+
export function createJsonFileKV<T>(baseDir: string, filename: string): KV<CacheKey, T> {
|
|
24
|
+
return {
|
|
25
|
+
async get([key]: CacheKey): Promise<T | null> {
|
|
26
|
+
const filePath = join(baseDir, key, filename)
|
|
27
|
+
if (!existsSync(filePath)) {
|
|
28
|
+
return null
|
|
29
|
+
}
|
|
30
|
+
const content = readFileSync(filePath, "utf-8")
|
|
31
|
+
return JSON.parse(content) as T
|
|
32
|
+
},
|
|
33
|
+
|
|
34
|
+
async set([key]: CacheKey, value: T): Promise<void> {
|
|
35
|
+
const cacheDir = ensureCacheDir(baseDir, key)
|
|
36
|
+
const filePath = join(cacheDir, filename)
|
|
37
|
+
writeFileSync(filePath, JSON.stringify(value, null, 2), "utf-8")
|
|
38
|
+
},
|
|
39
|
+
|
|
40
|
+
async has([key]: CacheKey): Promise<boolean> {
|
|
41
|
+
const filePath = join(baseDir, key, filename)
|
|
42
|
+
return existsSync(filePath)
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/** Create a KV store backed by binary files */
|
|
48
|
+
export function createBinaryFileKV(baseDir: string, filename: string): KV<CacheKey, Uint8Array> {
|
|
49
|
+
return {
|
|
50
|
+
async get([key]: CacheKey): Promise<Uint8Array | null> {
|
|
51
|
+
const filePath = join(baseDir, key, filename)
|
|
52
|
+
if (!existsSync(filePath)) {
|
|
53
|
+
return null
|
|
54
|
+
}
|
|
55
|
+
const buffer = readFileSync(filePath)
|
|
56
|
+
return new Uint8Array(buffer)
|
|
57
|
+
},
|
|
58
|
+
|
|
59
|
+
async set([key]: CacheKey, value: Uint8Array): Promise<void> {
|
|
60
|
+
const cacheDir = ensureCacheDir(baseDir, key)
|
|
61
|
+
const filePath = join(cacheDir, filename)
|
|
62
|
+
writeFileSync(filePath, value)
|
|
63
|
+
},
|
|
64
|
+
|
|
65
|
+
async has([key]: CacheKey): Promise<boolean> {
|
|
66
|
+
const filePath = join(baseDir, key, filename)
|
|
67
|
+
return existsSync(filePath)
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/** Create a streaming KV store for SSE chunks backed by JSONL files */
|
|
73
|
+
export function createJsonlFileKVStream(baseDir: string, filename: string): KVStream<CacheKey, TimedChunk> {
|
|
74
|
+
return {
|
|
75
|
+
async *get([key]: CacheKey): AsyncIterable<TimedChunk> {
|
|
76
|
+
const filePath = join(baseDir, key, filename)
|
|
77
|
+
if (!existsSync(filePath)) {
|
|
78
|
+
return
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Read entire file and parse - streaming line-by-line is complex due to JSONL format
|
|
82
|
+
// Future optimization: could stream lines and parse incrementally
|
|
83
|
+
const content = readFileSync(filePath, "utf-8")
|
|
84
|
+
const chunks = jsonlToTimedChunks(content)
|
|
85
|
+
for (const chunk of chunks) {
|
|
86
|
+
yield chunk
|
|
87
|
+
}
|
|
88
|
+
},
|
|
89
|
+
|
|
90
|
+
async set([key]: CacheKey, values: Array<TimedChunk> | AsyncIterable<TimedChunk>): Promise<void> {
|
|
91
|
+
const cacheDir = ensureCacheDir(baseDir, key)
|
|
92
|
+
const filePath = join(cacheDir, filename)
|
|
93
|
+
|
|
94
|
+
// Collect all values if async iterable
|
|
95
|
+
let chunks: Array<TimedChunk>
|
|
96
|
+
if (Array.isArray(values)) {
|
|
97
|
+
chunks = values
|
|
98
|
+
} else {
|
|
99
|
+
chunks = []
|
|
100
|
+
for await (const chunk of values) {
|
|
101
|
+
chunks.push(chunk)
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const jsonl = timedChunksToJsonl(chunks)
|
|
106
|
+
writeFileSync(filePath, jsonl, "utf-8")
|
|
107
|
+
},
|
|
108
|
+
|
|
109
|
+
async has([key]: CacheKey): Promise<boolean> {
|
|
110
|
+
const filePath = join(baseDir, key, filename)
|
|
111
|
+
return existsSync(filePath)
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
/** Create a complete CacheStorage backed by the filesystem */
|
|
117
|
+
export function createFilesystemStorage(baseDir: string): CacheStorage {
|
|
118
|
+
return {
|
|
119
|
+
requests: createJsonFileKV<CachedRequest>(baseDir, "request.json"),
|
|
120
|
+
responseMeta: createJsonFileKV<CachedResponseMeta>(baseDir, "response.meta.json"),
|
|
121
|
+
responseBody: createJsonFileKV<string>(baseDir, "response.json"),
|
|
122
|
+
binaryBody: createBinaryFileKV(baseDir, "response.bin"),
|
|
123
|
+
sseChunks: createJsonlFileKVStream(baseDir, "response.jsonl")
|
|
124
|
+
}
|
|
125
|
+
}
|