undici 7.16.0 → 7.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -1
- package/docs/docs/api/Client.md +1 -0
- package/docs/docs/api/DiagnosticsChannel.md +57 -0
- package/docs/docs/api/Dispatcher.md +86 -0
- package/docs/docs/api/RoundRobinPool.md +145 -0
- package/docs/docs/api/WebSocket.md +21 -0
- package/docs/docs/best-practices/crawling.md +58 -0
- package/index.js +4 -1
- package/lib/api/api-upgrade.js +2 -1
- package/lib/core/connect.js +4 -1
- package/lib/core/diagnostics.js +28 -1
- package/lib/core/symbols.js +3 -0
- package/lib/core/util.js +29 -31
- package/lib/dispatcher/balanced-pool.js +10 -0
- package/lib/dispatcher/client-h1.js +0 -16
- package/lib/dispatcher/client-h2.js +153 -23
- package/lib/dispatcher/client.js +7 -2
- package/lib/dispatcher/dispatcher-base.js +11 -12
- package/lib/dispatcher/h2c-client.js +7 -78
- package/lib/dispatcher/pool-base.js +1 -1
- package/lib/dispatcher/proxy-agent.js +13 -2
- package/lib/dispatcher/round-robin-pool.js +137 -0
- package/lib/encoding/index.js +33 -0
- package/lib/handler/cache-handler.js +84 -27
- package/lib/handler/deduplication-handler.js +216 -0
- package/lib/handler/retry-handler.js +0 -2
- package/lib/interceptor/cache.js +35 -17
- package/lib/interceptor/decompress.js +2 -1
- package/lib/interceptor/deduplicate.js +109 -0
- package/lib/interceptor/dns.js +55 -13
- package/lib/mock/mock-utils.js +1 -2
- package/lib/mock/snapshot-agent.js +11 -5
- package/lib/mock/snapshot-recorder.js +12 -4
- package/lib/mock/snapshot-utils.js +4 -4
- package/lib/util/cache.js +29 -1
- package/lib/util/runtime-features.js +124 -0
- package/lib/web/cookies/parse.js +1 -1
- package/lib/web/fetch/body.js +29 -39
- package/lib/web/fetch/data-url.js +12 -160
- package/lib/web/fetch/formdata-parser.js +204 -127
- package/lib/web/fetch/index.js +9 -6
- package/lib/web/fetch/request.js +6 -0
- package/lib/web/fetch/response.js +2 -3
- package/lib/web/fetch/util.js +2 -65
- package/lib/web/infra/index.js +229 -0
- package/lib/web/subresource-integrity/subresource-integrity.js +6 -5
- package/lib/web/webidl/index.js +4 -2
- package/lib/web/websocket/connection.js +31 -21
- package/lib/web/websocket/frame.js +9 -15
- package/lib/web/websocket/stream/websocketstream.js +1 -1
- package/lib/web/websocket/util.js +2 -1
- package/package.json +5 -4
- package/types/agent.d.ts +1 -1
- package/types/api.d.ts +2 -2
- package/types/balanced-pool.d.ts +2 -1
- package/types/cache-interceptor.d.ts +1 -0
- package/types/client.d.ts +1 -1
- package/types/connector.d.ts +2 -2
- package/types/diagnostics-channel.d.ts +2 -2
- package/types/dispatcher.d.ts +12 -12
- package/types/fetch.d.ts +4 -4
- package/types/formdata.d.ts +1 -1
- package/types/h2c-client.d.ts +1 -1
- package/types/index.d.ts +9 -1
- package/types/interceptors.d.ts +36 -2
- package/types/pool.d.ts +1 -1
- package/types/readable.d.ts +2 -2
- package/types/round-robin-pool.d.ts +41 -0
- package/types/websocket.d.ts +9 -9
package/lib/interceptor/cache.js
CHANGED
|
@@ -9,6 +9,8 @@ const CacheRevalidationHandler = require('../handler/cache-revalidation-handler'
|
|
|
9
9
|
const { assertCacheStore, assertCacheMethods, makeCacheKey, normalizeHeaders, parseCacheControlHeader } = require('../util/cache.js')
|
|
10
10
|
const { AbortError } = require('../core/errors.js')
|
|
11
11
|
|
|
12
|
+
const nop = () => {}
|
|
13
|
+
|
|
12
14
|
/**
|
|
13
15
|
* @typedef {(options: import('../../types/dispatcher.d.ts').default.DispatchOptions, handler: import('../../types/dispatcher.d.ts').default.DispatchHandler) => void} DispatchFn
|
|
14
16
|
*/
|
|
@@ -16,19 +18,34 @@ const { AbortError } = require('../core/errors.js')
|
|
|
16
18
|
/**
|
|
17
19
|
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
|
18
20
|
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
|
|
21
|
+
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} opts
|
|
19
22
|
* @returns {boolean}
|
|
20
23
|
*/
|
|
21
|
-
function needsRevalidation (result, cacheControlDirectives) {
|
|
24
|
+
function needsRevalidation (result, cacheControlDirectives, { headers = {} }) {
|
|
25
|
+
// Always revalidate requests with the no-cache request directive.
|
|
22
26
|
if (cacheControlDirectives?.['no-cache']) {
|
|
23
|
-
// Always revalidate requests with the no-cache request directive
|
|
24
27
|
return true
|
|
25
28
|
}
|
|
26
29
|
|
|
30
|
+
// Always revalidate requests with unqualified no-cache response directive.
|
|
27
31
|
if (result.cacheControlDirectives?.['no-cache'] && !Array.isArray(result.cacheControlDirectives['no-cache'])) {
|
|
28
|
-
// Always revalidate requests with unqualified no-cache response directive
|
|
29
32
|
return true
|
|
30
33
|
}
|
|
31
34
|
|
|
35
|
+
// Always revalidate requests with conditional headers.
|
|
36
|
+
if (headers['if-modified-since'] || headers['if-none-match']) {
|
|
37
|
+
return true
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return false
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
|
|
45
|
+
* @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives | undefined} cacheControlDirectives
|
|
46
|
+
* @returns {boolean}
|
|
47
|
+
*/
|
|
48
|
+
function isStale (result, cacheControlDirectives) {
|
|
32
49
|
const now = Date.now()
|
|
33
50
|
if (now > result.staleAt) {
|
|
34
51
|
// Response is stale
|
|
@@ -102,7 +119,7 @@ function handleUncachedResponse (
|
|
|
102
119
|
}
|
|
103
120
|
|
|
104
121
|
if (typeof handler.onHeaders === 'function') {
|
|
105
|
-
handler.onHeaders(504, [],
|
|
122
|
+
handler.onHeaders(504, [], nop, 'Gateway Timeout')
|
|
106
123
|
if (aborted) {
|
|
107
124
|
return
|
|
108
125
|
}
|
|
@@ -239,8 +256,11 @@ function handleResult (
|
|
|
239
256
|
return dispatch(opts, handler)
|
|
240
257
|
}
|
|
241
258
|
|
|
259
|
+
const stale = isStale(result, reqCacheControl)
|
|
260
|
+
const revalidate = needsRevalidation(result, reqCacheControl, opts)
|
|
261
|
+
|
|
242
262
|
// Check if the response is stale
|
|
243
|
-
if (
|
|
263
|
+
if (stale || revalidate) {
|
|
244
264
|
if (util.isStream(opts.body) && util.bodyLength(opts.body) !== 0) {
|
|
245
265
|
// If body is a stream we can't revalidate...
|
|
246
266
|
// TODO (fix): This could be less strict...
|
|
@@ -248,8 +268,8 @@ function handleResult (
|
|
|
248
268
|
}
|
|
249
269
|
|
|
250
270
|
// RFC 5861: If we're within stale-while-revalidate window, serve stale immediately
|
|
251
|
-
// and revalidate in background
|
|
252
|
-
if (withinStaleWhileRevalidateWindow(result)) {
|
|
271
|
+
// and revalidate in background, unless immediate revalidation is necessary
|
|
272
|
+
if (!revalidate && withinStaleWhileRevalidateWindow(result)) {
|
|
253
273
|
// Serve stale response immediately
|
|
254
274
|
sendCachedValue(handler, opts, result, age, null, true)
|
|
255
275
|
|
|
@@ -323,9 +343,10 @@ function handleResult (
|
|
|
323
343
|
new CacheRevalidationHandler(
|
|
324
344
|
(success, context) => {
|
|
325
345
|
if (success) {
|
|
326
|
-
|
|
346
|
+
// TODO: successful revalidation should be considered fresh (not give stale warning).
|
|
347
|
+
sendCachedValue(handler, opts, result, age, context, stale)
|
|
327
348
|
} else if (util.isStream(result.body)) {
|
|
328
|
-
result.body.on('error',
|
|
349
|
+
result.body.on('error', nop).destroy()
|
|
329
350
|
}
|
|
330
351
|
},
|
|
331
352
|
new CacheHandler(globalOpts, cacheKey, handler),
|
|
@@ -336,7 +357,7 @@ function handleResult (
|
|
|
336
357
|
|
|
337
358
|
// Dump request body.
|
|
338
359
|
if (util.isStream(opts.body)) {
|
|
339
|
-
opts.body.on('error',
|
|
360
|
+
opts.body.on('error', nop).destroy()
|
|
340
361
|
}
|
|
341
362
|
|
|
342
363
|
sendCachedValue(handler, opts, result, age, null, false)
|
|
@@ -405,18 +426,17 @@ module.exports = (opts = {}) => {
|
|
|
405
426
|
const result = store.get(cacheKey)
|
|
406
427
|
|
|
407
428
|
if (result && typeof result.then === 'function') {
|
|
408
|
-
result
|
|
409
|
-
handleResult(dispatch,
|
|
429
|
+
return result
|
|
430
|
+
.then(result => handleResult(dispatch,
|
|
410
431
|
globalOpts,
|
|
411
432
|
cacheKey,
|
|
412
433
|
handler,
|
|
413
434
|
opts,
|
|
414
435
|
reqCacheControl,
|
|
415
436
|
result
|
|
416
|
-
)
|
|
417
|
-
})
|
|
437
|
+
))
|
|
418
438
|
} else {
|
|
419
|
-
handleResult(
|
|
439
|
+
return handleResult(
|
|
420
440
|
dispatch,
|
|
421
441
|
globalOpts,
|
|
422
442
|
cacheKey,
|
|
@@ -426,8 +446,6 @@ module.exports = (opts = {}) => {
|
|
|
426
446
|
result
|
|
427
447
|
)
|
|
428
448
|
}
|
|
429
|
-
|
|
430
|
-
return true
|
|
431
449
|
}
|
|
432
450
|
}
|
|
433
451
|
}
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
const { createInflate, createGunzip, createBrotliDecompress, createZstdDecompress } = require('node:zlib')
|
|
4
4
|
const { pipeline } = require('node:stream')
|
|
5
5
|
const DecoratorHandler = require('../handler/decorator-handler')
|
|
6
|
+
const { runtimeFeatures } = require('../util/runtime-features')
|
|
6
7
|
|
|
7
8
|
/** @typedef {import('node:stream').Transform} Transform */
|
|
8
9
|
/** @typedef {import('node:stream').Transform} Controller */
|
|
@@ -16,7 +17,7 @@ const supportedEncodings = {
|
|
|
16
17
|
deflate: createInflate,
|
|
17
18
|
compress: createInflate,
|
|
18
19
|
'x-compress': createInflate,
|
|
19
|
-
...(
|
|
20
|
+
...(runtimeFeatures.has('zstd') ? { zstd: createZstdDecompress } : {})
|
|
20
21
|
}
|
|
21
22
|
|
|
22
23
|
const defaultSkipStatusCodes = /** @type {const} */ ([204, 304])
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const diagnosticsChannel = require('node:diagnostics_channel')
|
|
4
|
+
const util = require('../core/util')
|
|
5
|
+
const DeduplicationHandler = require('../handler/deduplication-handler')
|
|
6
|
+
const { normalizeHeaders, makeCacheKey, makeDeduplicationKey } = require('../util/cache.js')
|
|
7
|
+
|
|
8
|
+
const pendingRequestsChannel = diagnosticsChannel.channel('undici:request:pending-requests')
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* @param {import('../../types/interceptors.d.ts').default.DeduplicateInterceptorOpts} [opts]
|
|
12
|
+
* @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
|
|
13
|
+
*/
|
|
14
|
+
module.exports = (opts = {}) => {
|
|
15
|
+
const {
|
|
16
|
+
methods = ['GET'],
|
|
17
|
+
skipHeaderNames = [],
|
|
18
|
+
excludeHeaderNames = []
|
|
19
|
+
} = opts
|
|
20
|
+
|
|
21
|
+
if (typeof opts !== 'object' || opts === null) {
|
|
22
|
+
throw new TypeError(`expected type of opts to be an Object, got ${opts === null ? 'null' : typeof opts}`)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (!Array.isArray(methods)) {
|
|
26
|
+
throw new TypeError(`expected opts.methods to be an array, got ${typeof methods}`)
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
for (const method of methods) {
|
|
30
|
+
if (!util.safeHTTPMethods.includes(method)) {
|
|
31
|
+
throw new TypeError(`expected opts.methods to only contain safe HTTP methods, got ${method}`)
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (!Array.isArray(skipHeaderNames)) {
|
|
36
|
+
throw new TypeError(`expected opts.skipHeaderNames to be an array, got ${typeof skipHeaderNames}`)
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
if (!Array.isArray(excludeHeaderNames)) {
|
|
40
|
+
throw new TypeError(`expected opts.excludeHeaderNames to be an array, got ${typeof excludeHeaderNames}`)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Convert to lowercase Set for case-insensitive header matching
|
|
44
|
+
const skipHeaderNamesSet = new Set(skipHeaderNames.map(name => name.toLowerCase()))
|
|
45
|
+
|
|
46
|
+
// Convert to lowercase Set for case-insensitive header exclusion from deduplication key
|
|
47
|
+
const excludeHeaderNamesSet = new Set(excludeHeaderNames.map(name => name.toLowerCase()))
|
|
48
|
+
|
|
49
|
+
const safeMethodsToNotDeduplicate = util.safeHTTPMethods.filter(method => methods.includes(method) === false)
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Map of pending requests for deduplication
|
|
53
|
+
* @type {Map<string, DeduplicationHandler>}
|
|
54
|
+
*/
|
|
55
|
+
const pendingRequests = new Map()
|
|
56
|
+
|
|
57
|
+
return dispatch => {
|
|
58
|
+
return (opts, handler) => {
|
|
59
|
+
if (!opts.origin || safeMethodsToNotDeduplicate.includes(opts.method)) {
|
|
60
|
+
return dispatch(opts, handler)
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
opts = {
|
|
64
|
+
...opts,
|
|
65
|
+
headers: normalizeHeaders(opts)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Skip deduplication if request contains any of the specified headers
|
|
69
|
+
if (skipHeaderNamesSet.size > 0) {
|
|
70
|
+
for (const headerName of Object.keys(opts.headers)) {
|
|
71
|
+
if (skipHeaderNamesSet.has(headerName.toLowerCase())) {
|
|
72
|
+
return dispatch(opts, handler)
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const cacheKey = makeCacheKey(opts)
|
|
78
|
+
const dedupeKey = makeDeduplicationKey(cacheKey, excludeHeaderNamesSet)
|
|
79
|
+
|
|
80
|
+
// Check if there's already a pending request for this key
|
|
81
|
+
const pendingHandler = pendingRequests.get(dedupeKey)
|
|
82
|
+
if (pendingHandler) {
|
|
83
|
+
// Add this handler to the waiting list
|
|
84
|
+
pendingHandler.addWaitingHandler(handler)
|
|
85
|
+
return true
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Create a new deduplication handler
|
|
89
|
+
const deduplicationHandler = new DeduplicationHandler(
|
|
90
|
+
handler,
|
|
91
|
+
() => {
|
|
92
|
+
// Clean up when request completes
|
|
93
|
+
pendingRequests.delete(dedupeKey)
|
|
94
|
+
if (pendingRequestsChannel.hasSubscribers) {
|
|
95
|
+
pendingRequestsChannel.publish({ size: pendingRequests.size, key: dedupeKey, type: 'removed' })
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
// Register the pending request
|
|
101
|
+
pendingRequests.set(dedupeKey, deduplicationHandler)
|
|
102
|
+
if (pendingRequestsChannel.hasSubscribers) {
|
|
103
|
+
pendingRequestsChannel.publish({ size: pendingRequests.size, key: dedupeKey, type: 'added' })
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return dispatch(opts, deduplicationHandler)
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
package/lib/interceptor/dns.js
CHANGED
|
@@ -5,14 +5,44 @@ const DecoratorHandler = require('../handler/decorator-handler')
|
|
|
5
5
|
const { InvalidArgumentError, InformationalError } = require('../core/errors')
|
|
6
6
|
const maxInt = Math.pow(2, 31) - 1
|
|
7
7
|
|
|
8
|
+
class DNSStorage {
|
|
9
|
+
#maxItems = 0
|
|
10
|
+
#records = new Map()
|
|
11
|
+
|
|
12
|
+
constructor (opts) {
|
|
13
|
+
this.#maxItems = opts.maxItems
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
get size () {
|
|
17
|
+
return this.#records.size
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
get (hostname) {
|
|
21
|
+
return this.#records.get(hostname) ?? null
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
set (hostname, records) {
|
|
25
|
+
this.#records.set(hostname, records)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
delete (hostname) {
|
|
29
|
+
this.#records.delete(hostname)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Delegate to storage decide can we do more lookups or not
|
|
33
|
+
full () {
|
|
34
|
+
return this.size >= this.#maxItems
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
8
38
|
class DNSInstance {
|
|
9
39
|
#maxTTL = 0
|
|
10
40
|
#maxItems = 0
|
|
11
|
-
#records = new Map()
|
|
12
41
|
dualStack = true
|
|
13
42
|
affinity = null
|
|
14
43
|
lookup = null
|
|
15
44
|
pick = null
|
|
45
|
+
storage = null
|
|
16
46
|
|
|
17
47
|
constructor (opts) {
|
|
18
48
|
this.#maxTTL = opts.maxTTL
|
|
@@ -21,17 +51,14 @@ class DNSInstance {
|
|
|
21
51
|
this.affinity = opts.affinity
|
|
22
52
|
this.lookup = opts.lookup ?? this.#defaultLookup
|
|
23
53
|
this.pick = opts.pick ?? this.#defaultPick
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
get full () {
|
|
27
|
-
return this.#records.size === this.#maxItems
|
|
54
|
+
this.storage = opts.storage ?? new DNSStorage(opts)
|
|
28
55
|
}
|
|
29
56
|
|
|
30
57
|
runLookup (origin, opts, cb) {
|
|
31
|
-
const ips = this
|
|
58
|
+
const ips = this.storage.get(origin.hostname)
|
|
32
59
|
|
|
33
60
|
// If full, we just return the origin
|
|
34
|
-
if (ips == null && this.full) {
|
|
61
|
+
if (ips == null && this.storage.full()) {
|
|
35
62
|
cb(null, origin)
|
|
36
63
|
return
|
|
37
64
|
}
|
|
@@ -55,7 +82,7 @@ class DNSInstance {
|
|
|
55
82
|
}
|
|
56
83
|
|
|
57
84
|
this.setRecords(origin, addresses)
|
|
58
|
-
const records = this
|
|
85
|
+
const records = this.storage.get(origin.hostname)
|
|
59
86
|
|
|
60
87
|
const ip = this.pick(
|
|
61
88
|
origin,
|
|
@@ -89,7 +116,7 @@ class DNSInstance {
|
|
|
89
116
|
|
|
90
117
|
// If no IPs we lookup - deleting old records
|
|
91
118
|
if (ip == null) {
|
|
92
|
-
this
|
|
119
|
+
this.storage.delete(origin.hostname)
|
|
93
120
|
this.runLookup(origin, opts, cb)
|
|
94
121
|
return
|
|
95
122
|
}
|
|
@@ -193,7 +220,7 @@ class DNSInstance {
|
|
|
193
220
|
}
|
|
194
221
|
|
|
195
222
|
pickFamily (origin, ipFamily) {
|
|
196
|
-
const records = this
|
|
223
|
+
const records = this.storage.get(origin.hostname)?.records
|
|
197
224
|
if (!records) {
|
|
198
225
|
return null
|
|
199
226
|
}
|
|
@@ -227,11 +254,13 @@ class DNSInstance {
|
|
|
227
254
|
setRecords (origin, addresses) {
|
|
228
255
|
const timestamp = Date.now()
|
|
229
256
|
const records = { records: { 4: null, 6: null } }
|
|
257
|
+
let minTTL = this.#maxTTL
|
|
230
258
|
for (const record of addresses) {
|
|
231
259
|
record.timestamp = timestamp
|
|
232
260
|
if (typeof record.ttl === 'number') {
|
|
233
261
|
// The record TTL is expected to be in ms
|
|
234
262
|
record.ttl = Math.min(record.ttl, this.#maxTTL)
|
|
263
|
+
minTTL = Math.min(minTTL, record.ttl)
|
|
235
264
|
} else {
|
|
236
265
|
record.ttl = this.#maxTTL
|
|
237
266
|
}
|
|
@@ -242,11 +271,12 @@ class DNSInstance {
|
|
|
242
271
|
records.records[record.family] = familyRecords
|
|
243
272
|
}
|
|
244
273
|
|
|
245
|
-
|
|
274
|
+
// We provide a default TTL if external storage will be used without TTL per record-level support
|
|
275
|
+
this.storage.set(origin.hostname, records, { ttl: minTTL })
|
|
246
276
|
}
|
|
247
277
|
|
|
248
278
|
deleteRecords (origin) {
|
|
249
|
-
this
|
|
279
|
+
this.storage.delete(origin.hostname)
|
|
250
280
|
}
|
|
251
281
|
|
|
252
282
|
getHandler (meta, opts) {
|
|
@@ -372,6 +402,17 @@ module.exports = interceptorOpts => {
|
|
|
372
402
|
throw new InvalidArgumentError('Invalid pick. Must be a function')
|
|
373
403
|
}
|
|
374
404
|
|
|
405
|
+
if (
|
|
406
|
+
interceptorOpts?.storage != null &&
|
|
407
|
+
(typeof interceptorOpts?.storage?.get !== 'function' ||
|
|
408
|
+
typeof interceptorOpts?.storage?.set !== 'function' ||
|
|
409
|
+
typeof interceptorOpts?.storage?.full !== 'function' ||
|
|
410
|
+
typeof interceptorOpts?.storage?.delete !== 'function'
|
|
411
|
+
)
|
|
412
|
+
) {
|
|
413
|
+
throw new InvalidArgumentError('Invalid storage. Must be a object with methods: { get, set, full, delete }')
|
|
414
|
+
}
|
|
415
|
+
|
|
375
416
|
const dualStack = interceptorOpts?.dualStack ?? true
|
|
376
417
|
let affinity
|
|
377
418
|
if (dualStack) {
|
|
@@ -386,7 +427,8 @@ module.exports = interceptorOpts => {
|
|
|
386
427
|
pick: interceptorOpts?.pick ?? null,
|
|
387
428
|
dualStack,
|
|
388
429
|
affinity,
|
|
389
|
-
maxItems: interceptorOpts?.maxItems ?? Infinity
|
|
430
|
+
maxItems: interceptorOpts?.maxItems ?? Infinity,
|
|
431
|
+
storage: interceptorOpts?.storage
|
|
390
432
|
}
|
|
391
433
|
|
|
392
434
|
const instance = new DNSInstance(opts)
|
package/lib/mock/mock-utils.js
CHANGED
|
@@ -337,8 +337,7 @@ function mockDispatch (opts, handler) {
|
|
|
337
337
|
// synchronously throw the error, which breaks some tests.
|
|
338
338
|
// Rather, we wait for the callback to resolve if it is a
|
|
339
339
|
// promise, and then re-run handleReply with the new body.
|
|
340
|
-
body.then((newData) => handleReply(mockDispatches, newData))
|
|
341
|
-
return
|
|
340
|
+
return body.then((newData) => handleReply(mockDispatches, newData))
|
|
342
341
|
}
|
|
343
342
|
|
|
344
343
|
const responseData = getResponseData(body)
|
|
@@ -64,7 +64,9 @@ class SnapshotAgent extends MockAgent {
|
|
|
64
64
|
this[kSnapshotLoaded] = false
|
|
65
65
|
|
|
66
66
|
// For recording/update mode, we need a real agent to make actual requests
|
|
67
|
-
|
|
67
|
+
// For playback mode, we need a real agent if there are excluded URLs
|
|
68
|
+
if (this[kSnapshotMode] === 'record' || this[kSnapshotMode] === 'update' ||
|
|
69
|
+
(this[kSnapshotMode] === 'playback' && opts.excludeUrls && opts.excludeUrls.length > 0)) {
|
|
68
70
|
this[kRealAgent] = new Agent(opts)
|
|
69
71
|
}
|
|
70
72
|
|
|
@@ -80,6 +82,12 @@ class SnapshotAgent extends MockAgent {
|
|
|
80
82
|
handler = WrapHandler.wrap(handler)
|
|
81
83
|
const mode = this[kSnapshotMode]
|
|
82
84
|
|
|
85
|
+
// Check if URL should be excluded (pass through without mocking/recording)
|
|
86
|
+
if (this[kSnapshotRecorder].isUrlExcluded(opts)) {
|
|
87
|
+
// Real agent is guaranteed by constructor when excludeUrls is configured
|
|
88
|
+
return this[kRealAgent].dispatch(opts, handler)
|
|
89
|
+
}
|
|
90
|
+
|
|
83
91
|
if (mode === 'playback' || mode === 'update') {
|
|
84
92
|
// Ensure snapshots are loaded
|
|
85
93
|
if (!this[kSnapshotLoaded]) {
|
|
@@ -162,11 +170,9 @@ class SnapshotAgent extends MockAgent {
|
|
|
162
170
|
headers: responseData.headers,
|
|
163
171
|
body: responseBody,
|
|
164
172
|
trailers: responseData.trailers
|
|
165
|
-
}).then(() => {
|
|
166
|
-
handler.onResponseEnd(controller, trailers)
|
|
167
|
-
}).catch((error) => {
|
|
168
|
-
handler.onResponseError(controller, error)
|
|
169
173
|
})
|
|
174
|
+
.then(() => handler.onResponseEnd(controller, trailers))
|
|
175
|
+
.catch((error) => handler.onResponseError(controller, error))
|
|
170
176
|
}
|
|
171
177
|
}
|
|
172
178
|
|
|
@@ -283,8 +283,7 @@ class SnapshotRecorder {
|
|
|
283
283
|
}
|
|
284
284
|
|
|
285
285
|
// Check URL exclusion patterns
|
|
286
|
-
|
|
287
|
-
if (this.#isUrlExcluded(url)) {
|
|
286
|
+
if (this.isUrlExcluded(requestOpts)) {
|
|
288
287
|
return // Skip recording
|
|
289
288
|
}
|
|
290
289
|
|
|
@@ -330,6 +329,16 @@ class SnapshotRecorder {
|
|
|
330
329
|
}
|
|
331
330
|
}
|
|
332
331
|
|
|
332
|
+
/**
|
|
333
|
+
* Checks if a URL should be excluded from recording/playback
|
|
334
|
+
* @param {SnapshotRequestOptions} requestOpts - Request options to check
|
|
335
|
+
* @returns {boolean} - True if URL is excluded
|
|
336
|
+
*/
|
|
337
|
+
isUrlExcluded (requestOpts) {
|
|
338
|
+
const url = new URL(requestOpts.path, requestOpts.origin).toString()
|
|
339
|
+
return this.#isUrlExcluded(url)
|
|
340
|
+
}
|
|
341
|
+
|
|
333
342
|
/**
|
|
334
343
|
* Finds a matching snapshot for the given request
|
|
335
344
|
* Returns the appropriate response based on call count for sequential responses
|
|
@@ -344,8 +353,7 @@ class SnapshotRecorder {
|
|
|
344
353
|
}
|
|
345
354
|
|
|
346
355
|
// Check URL exclusion patterns
|
|
347
|
-
|
|
348
|
-
if (this.#isUrlExcluded(url)) {
|
|
356
|
+
if (this.isUrlExcluded(requestOpts)) {
|
|
349
357
|
return undefined // Skip playback
|
|
350
358
|
}
|
|
351
359
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
3
|
const { InvalidArgumentError } = require('../core/errors')
|
|
4
|
+
const { runtimeFeatures } = require('../util/runtime-features.js')
|
|
4
5
|
|
|
5
6
|
/**
|
|
6
7
|
* @typedef {Object} HeaderFilters
|
|
@@ -25,10 +26,9 @@ function createHeaderFilters (matchOptions = {}) {
|
|
|
25
26
|
}
|
|
26
27
|
}
|
|
27
28
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
} catch { /* Fallback if crypto is not available */ }
|
|
29
|
+
const crypto = runtimeFeatures.has('crypto')
|
|
30
|
+
? require('node:crypto')
|
|
31
|
+
: null
|
|
32
32
|
|
|
33
33
|
/**
|
|
34
34
|
* @callback HashIdFunction
|
package/lib/util/cache.js
CHANGED
|
@@ -364,6 +364,33 @@ function assertCacheMethods (methods, name = 'CacheMethods') {
|
|
|
364
364
|
}
|
|
365
365
|
}
|
|
366
366
|
|
|
367
|
+
/**
|
|
368
|
+
* Creates a string key for request deduplication purposes.
|
|
369
|
+
* This key is used to identify in-flight requests that can be shared.
|
|
370
|
+
* @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey
|
|
371
|
+
* @param {Set<string>} [excludeHeaders] Set of lowercase header names to exclude from the key
|
|
372
|
+
* @returns {string}
|
|
373
|
+
*/
|
|
374
|
+
function makeDeduplicationKey (cacheKey, excludeHeaders) {
|
|
375
|
+
// Create a deterministic string key from the cache key
|
|
376
|
+
// Include origin, method, path, and sorted headers
|
|
377
|
+
let key = `${cacheKey.origin}:${cacheKey.method}:${cacheKey.path}`
|
|
378
|
+
|
|
379
|
+
if (cacheKey.headers) {
|
|
380
|
+
const sortedHeaders = Object.keys(cacheKey.headers).sort()
|
|
381
|
+
for (const header of sortedHeaders) {
|
|
382
|
+
// Skip excluded headers
|
|
383
|
+
if (excludeHeaders?.has(header.toLowerCase())) {
|
|
384
|
+
continue
|
|
385
|
+
}
|
|
386
|
+
const value = cacheKey.headers[header]
|
|
387
|
+
key += `:${header}=${Array.isArray(value) ? value.join(',') : value}`
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
return key
|
|
392
|
+
}
|
|
393
|
+
|
|
367
394
|
module.exports = {
|
|
368
395
|
makeCacheKey,
|
|
369
396
|
normalizeHeaders,
|
|
@@ -373,5 +400,6 @@ module.exports = {
|
|
|
373
400
|
parseVaryHeader,
|
|
374
401
|
isEtagUsable,
|
|
375
402
|
assertCacheMethods,
|
|
376
|
-
assertCacheStore
|
|
403
|
+
assertCacheStore,
|
|
404
|
+
makeDeduplicationKey
|
|
377
405
|
}
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
/** @typedef {`node:${string}`} NodeModuleName */
|
|
4
|
+
|
|
5
|
+
/** @type {Record<NodeModuleName, () => any>} */
|
|
6
|
+
const lazyLoaders = {
|
|
7
|
+
__proto__: null,
|
|
8
|
+
'node:crypto': () => require('node:crypto'),
|
|
9
|
+
'node:sqlite': () => require('node:sqlite'),
|
|
10
|
+
'node:worker_threads': () => require('node:worker_threads'),
|
|
11
|
+
'node:zlib': () => require('node:zlib')
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @param {NodeModuleName} moduleName
|
|
16
|
+
* @returns {boolean}
|
|
17
|
+
*/
|
|
18
|
+
function detectRuntimeFeatureByNodeModule (moduleName) {
|
|
19
|
+
try {
|
|
20
|
+
lazyLoaders[moduleName]()
|
|
21
|
+
return true
|
|
22
|
+
} catch (err) {
|
|
23
|
+
if (err.code !== 'ERR_UNKNOWN_BUILTIN_MODULE') {
|
|
24
|
+
throw err
|
|
25
|
+
}
|
|
26
|
+
return false
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {NodeModuleName} moduleName
|
|
32
|
+
* @param {string} property
|
|
33
|
+
* @returns {boolean}
|
|
34
|
+
*/
|
|
35
|
+
function detectRuntimeFeatureByExportedProperty (moduleName, property) {
|
|
36
|
+
const module = lazyLoaders[moduleName]()
|
|
37
|
+
return typeof module[property] !== 'undefined'
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const runtimeFeaturesByExportedProperty = /** @type {const} */ (['markAsUncloneable', 'zstd'])
|
|
41
|
+
|
|
42
|
+
/** @type {Record<RuntimeFeatureByExportedProperty, [NodeModuleName, string]>} */
|
|
43
|
+
const exportedPropertyLookup = {
|
|
44
|
+
markAsUncloneable: ['node:worker_threads', 'markAsUncloneable'],
|
|
45
|
+
zstd: ['node:zlib', 'createZstdDecompress']
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/** @typedef {typeof runtimeFeaturesByExportedProperty[number]} RuntimeFeatureByExportedProperty */
|
|
49
|
+
|
|
50
|
+
const runtimeFeaturesAsNodeModule = /** @type {const} */ (['crypto', 'sqlite'])
|
|
51
|
+
/** @typedef {typeof runtimeFeaturesAsNodeModule[number]} RuntimeFeatureByNodeModule */
|
|
52
|
+
|
|
53
|
+
const features = /** @type {const} */ ([
|
|
54
|
+
...runtimeFeaturesAsNodeModule,
|
|
55
|
+
...runtimeFeaturesByExportedProperty
|
|
56
|
+
])
|
|
57
|
+
|
|
58
|
+
/** @typedef {typeof features[number]} Feature */
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* @param {Feature} feature
|
|
62
|
+
* @returns {boolean}
|
|
63
|
+
*/
|
|
64
|
+
function detectRuntimeFeature (feature) {
|
|
65
|
+
if (runtimeFeaturesAsNodeModule.includes(/** @type {RuntimeFeatureByNodeModule} */ (feature))) {
|
|
66
|
+
return detectRuntimeFeatureByNodeModule(`node:${feature}`)
|
|
67
|
+
} else if (runtimeFeaturesByExportedProperty.includes(/** @type {RuntimeFeatureByExportedProperty} */ (feature))) {
|
|
68
|
+
const [moduleName, property] = exportedPropertyLookup[feature]
|
|
69
|
+
return detectRuntimeFeatureByExportedProperty(moduleName, property)
|
|
70
|
+
}
|
|
71
|
+
throw new TypeError(`unknown feature: ${feature}`)
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* @class
|
|
76
|
+
* @name RuntimeFeatures
|
|
77
|
+
*/
|
|
78
|
+
class RuntimeFeatures {
|
|
79
|
+
/** @type {Map<Feature, boolean>} */
|
|
80
|
+
#map = new Map()
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Clears all cached feature detections.
|
|
84
|
+
*/
|
|
85
|
+
clear () {
|
|
86
|
+
this.#map.clear()
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* @param {Feature} feature
|
|
91
|
+
* @returns {boolean}
|
|
92
|
+
*/
|
|
93
|
+
has (feature) {
|
|
94
|
+
return (
|
|
95
|
+
this.#map.get(feature) ?? this.#detectRuntimeFeature(feature)
|
|
96
|
+
)
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* @param {Feature} feature
|
|
101
|
+
* @param {boolean} value
|
|
102
|
+
*/
|
|
103
|
+
set (feature, value) {
|
|
104
|
+
if (features.includes(feature) === false) {
|
|
105
|
+
throw new TypeError(`unknown feature: ${feature}`)
|
|
106
|
+
}
|
|
107
|
+
this.#map.set(feature, value)
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* @param {Feature} feature
|
|
112
|
+
* @returns {boolean}
|
|
113
|
+
*/
|
|
114
|
+
#detectRuntimeFeature (feature) {
|
|
115
|
+
const result = detectRuntimeFeature(feature)
|
|
116
|
+
this.#map.set(feature, result)
|
|
117
|
+
return result
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const instance = new RuntimeFeatures()
|
|
122
|
+
|
|
123
|
+
module.exports.runtimeFeatures = instance
|
|
124
|
+
module.exports.default = instance
|
package/lib/web/cookies/parse.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
+
const { collectASequenceOfCodePointsFast } = require('../infra')
|
|
3
4
|
const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
|
4
5
|
const { isCTLExcludingHtab } = require('./util')
|
|
5
|
-
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
|
|
6
6
|
const assert = require('node:assert')
|
|
7
7
|
const { unescape: qsUnescape } = require('node:querystring')
|
|
8
8
|
|