undici 7.14.0 → 7.16.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/docs/docs/api/Agent.md +1 -0
- package/docs/docs/api/Dispatcher.md +59 -0
- package/docs/docs/api/Errors.md +0 -1
- package/index-fetch.js +2 -2
- package/index.js +6 -9
- package/lib/api/api-request.js +22 -8
- package/lib/api/readable.js +7 -5
- package/lib/core/errors.js +217 -13
- package/lib/core/request.js +5 -1
- package/lib/core/util.js +45 -11
- package/lib/dispatcher/agent.js +44 -23
- package/lib/dispatcher/client-h1.js +20 -9
- package/lib/dispatcher/client-h2.js +13 -3
- package/lib/dispatcher/client.js +57 -57
- package/lib/dispatcher/dispatcher-base.js +12 -7
- package/lib/dispatcher/env-http-proxy-agent.js +12 -16
- package/lib/dispatcher/fixed-queue.js +15 -39
- package/lib/dispatcher/h2c-client.js +6 -6
- package/lib/dispatcher/pool-base.js +60 -43
- package/lib/dispatcher/pool.js +2 -2
- package/lib/dispatcher/proxy-agent.js +14 -9
- package/lib/global.js +19 -1
- package/lib/interceptor/cache.js +61 -0
- package/lib/interceptor/decompress.js +253 -0
- package/lib/llhttp/constants.d.ts +99 -1
- package/lib/llhttp/constants.js +34 -1
- package/lib/llhttp/llhttp-wasm.js +1 -1
- package/lib/llhttp/llhttp_simd-wasm.js +1 -1
- package/lib/llhttp/utils.d.ts +2 -2
- package/lib/llhttp/utils.js +3 -6
- package/lib/mock/mock-agent.js +4 -4
- package/lib/mock/mock-errors.js +10 -0
- package/lib/mock/mock-utils.js +12 -10
- package/lib/util/cache.js +6 -7
- package/lib/util/date.js +534 -140
- package/lib/web/cookies/index.js +1 -1
- package/lib/web/cookies/parse.js +2 -2
- package/lib/web/eventsource/eventsource-stream.js +2 -2
- package/lib/web/eventsource/eventsource.js +34 -29
- package/lib/web/eventsource/util.js +1 -9
- package/lib/web/fetch/body.js +20 -26
- package/lib/web/fetch/index.js +15 -16
- package/lib/web/fetch/response.js +2 -4
- package/lib/web/fetch/util.js +8 -230
- package/lib/web/subresource-integrity/Readme.md +9 -0
- package/lib/web/subresource-integrity/subresource-integrity.js +306 -0
- package/lib/web/webidl/index.js +203 -42
- package/lib/web/websocket/connection.js +4 -3
- package/lib/web/websocket/events.js +1 -1
- package/lib/web/websocket/stream/websocketerror.js +22 -1
- package/lib/web/websocket/stream/websocketstream.js +16 -7
- package/lib/web/websocket/websocket.js +32 -42
- package/package.json +9 -7
- package/types/agent.d.ts +1 -0
- package/types/diagnostics-channel.d.ts +0 -1
- package/types/errors.d.ts +5 -15
- package/types/interceptors.d.ts +5 -0
- package/types/snapshot-agent.d.ts +5 -3
- package/types/webidl.d.ts +82 -21
- package/lib/api/util.js +0 -95
- package/lib/llhttp/constants.js.map +0 -1
- package/lib/llhttp/utils.js.map +0 -1
package/lib/web/cookies/index.js
CHANGED
package/lib/web/cookies/parse.js
CHANGED
|
@@ -4,7 +4,7 @@ const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
|
|
4
4
|
const { isCTLExcludingHtab } = require('./util')
|
|
5
5
|
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
|
|
6
6
|
const assert = require('node:assert')
|
|
7
|
-
const { unescape } = require('node:querystring')
|
|
7
|
+
const { unescape: qsUnescape } = require('node:querystring')
|
|
8
8
|
|
|
9
9
|
/**
|
|
10
10
|
* @description Parses the field-value attributes of a set-cookie header string.
|
|
@@ -82,7 +82,7 @@ function parseSetCookie (header) {
|
|
|
82
82
|
// store arbitrary data in a cookie-value SHOULD encode that data, for
|
|
83
83
|
// example, using Base64 [RFC4648].
|
|
84
84
|
return {
|
|
85
|
-
name, value:
|
|
85
|
+
name, value: qsUnescape(value), ...parseUnparsedAttributes(unparsedAttributes)
|
|
86
86
|
}
|
|
87
87
|
}
|
|
88
88
|
|
|
@@ -236,7 +236,7 @@ class EventSourceStream extends Transform {
|
|
|
236
236
|
this.buffer = this.buffer.subarray(this.pos + 1)
|
|
237
237
|
this.pos = 0
|
|
238
238
|
if (
|
|
239
|
-
this.event.data !== undefined || this.event.event || this.event.id || this.event.retry) {
|
|
239
|
+
this.event.data !== undefined || this.event.event || this.event.id !== undefined || this.event.retry) {
|
|
240
240
|
this.processEvent(this.event)
|
|
241
241
|
}
|
|
242
242
|
this.clearEvent()
|
|
@@ -367,7 +367,7 @@ class EventSourceStream extends Transform {
|
|
|
367
367
|
this.state.reconnectionTime = parseInt(event.retry, 10)
|
|
368
368
|
}
|
|
369
369
|
|
|
370
|
-
if (event.id && isValidLastEventId(event.id)) {
|
|
370
|
+
if (event.id !== undefined && isValidLastEventId(event.id)) {
|
|
371
371
|
this.state.lastEventId = event.id
|
|
372
372
|
}
|
|
373
373
|
|
|
@@ -8,7 +8,6 @@ const { EventSourceStream } = require('./eventsource-stream')
|
|
|
8
8
|
const { parseMIMEType } = require('../fetch/data-url')
|
|
9
9
|
const { createFastMessageEvent } = require('../websocket/events')
|
|
10
10
|
const { isNetworkError } = require('../fetch/response')
|
|
11
|
-
const { delay } = require('./util')
|
|
12
11
|
const { kEnumerableProperty } = require('../../core/util')
|
|
13
12
|
const { environmentSettingsObject } = require('../fetch/util')
|
|
14
13
|
|
|
@@ -318,9 +317,9 @@ class EventSource extends EventTarget {
|
|
|
318
317
|
|
|
319
318
|
/**
|
|
320
319
|
* @see https://html.spec.whatwg.org/multipage/server-sent-events.html#sse-processing-model
|
|
321
|
-
* @returns {
|
|
320
|
+
* @returns {void}
|
|
322
321
|
*/
|
|
323
|
-
|
|
322
|
+
#reconnect () {
|
|
324
323
|
// When a user agent is to reestablish the connection, the user agent must
|
|
325
324
|
// run the following steps. These steps are run in parallel, not as part of
|
|
326
325
|
// a task. (The tasks that it queues, of course, are run like normal tasks
|
|
@@ -338,27 +337,27 @@ class EventSource extends EventTarget {
|
|
|
338
337
|
this.dispatchEvent(new Event('error'))
|
|
339
338
|
|
|
340
339
|
// 2. Wait a delay equal to the reconnection time of the event source.
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
}
|
|
340
|
+
setTimeout(() => {
|
|
341
|
+
// 5. Queue a task to run the following steps:
|
|
342
|
+
|
|
343
|
+
// 1. If the EventSource object's readyState attribute is not set to
|
|
344
|
+
// CONNECTING, then return.
|
|
345
|
+
if (this.#readyState !== CONNECTING) return
|
|
346
|
+
|
|
347
|
+
// 2. Let request be the EventSource object's request.
|
|
348
|
+
// 3. If the EventSource object's last event ID string is not the empty
|
|
349
|
+
// string, then:
|
|
350
|
+
// 1. Let lastEventIDValue be the EventSource object's last event ID
|
|
351
|
+
// string, encoded as UTF-8.
|
|
352
|
+
// 2. Set (`Last-Event-ID`, lastEventIDValue) in request's header
|
|
353
|
+
// list.
|
|
354
|
+
if (this.#state.lastEventId.length) {
|
|
355
|
+
this.#request.headersList.set('last-event-id', this.#state.lastEventId, true)
|
|
356
|
+
}
|
|
359
357
|
|
|
360
|
-
|
|
361
|
-
|
|
358
|
+
// 4. Fetch request and process the response obtained in this fashion, if any, as described earlier in this section.
|
|
359
|
+
this.#connect()
|
|
360
|
+
}, this.#state.reconnectionTime)?.unref()
|
|
362
361
|
}
|
|
363
362
|
|
|
364
363
|
/**
|
|
@@ -383,9 +382,11 @@ class EventSource extends EventTarget {
|
|
|
383
382
|
this.removeEventListener('open', this.#events.open)
|
|
384
383
|
}
|
|
385
384
|
|
|
386
|
-
|
|
385
|
+
const listener = webidl.converters.EventHandlerNonNull(fn)
|
|
386
|
+
|
|
387
|
+
if (listener !== null) {
|
|
388
|
+
this.addEventListener('open', listener)
|
|
387
389
|
this.#events.open = fn
|
|
388
|
-
this.addEventListener('open', fn)
|
|
389
390
|
} else {
|
|
390
391
|
this.#events.open = null
|
|
391
392
|
}
|
|
@@ -400,9 +401,11 @@ class EventSource extends EventTarget {
|
|
|
400
401
|
this.removeEventListener('message', this.#events.message)
|
|
401
402
|
}
|
|
402
403
|
|
|
403
|
-
|
|
404
|
+
const listener = webidl.converters.EventHandlerNonNull(fn)
|
|
405
|
+
|
|
406
|
+
if (listener !== null) {
|
|
407
|
+
this.addEventListener('message', listener)
|
|
404
408
|
this.#events.message = fn
|
|
405
|
-
this.addEventListener('message', fn)
|
|
406
409
|
} else {
|
|
407
410
|
this.#events.message = null
|
|
408
411
|
}
|
|
@@ -417,9 +420,11 @@ class EventSource extends EventTarget {
|
|
|
417
420
|
this.removeEventListener('error', this.#events.error)
|
|
418
421
|
}
|
|
419
422
|
|
|
420
|
-
|
|
423
|
+
const listener = webidl.converters.EventHandlerNonNull(fn)
|
|
424
|
+
|
|
425
|
+
if (listener !== null) {
|
|
426
|
+
this.addEventListener('error', listener)
|
|
421
427
|
this.#events.error = fn
|
|
422
|
-
this.addEventListener('error', fn)
|
|
423
428
|
} else {
|
|
424
429
|
this.#events.error = null
|
|
425
430
|
}
|
|
@@ -23,15 +23,7 @@ function isASCIINumber (value) {
|
|
|
23
23
|
return true
|
|
24
24
|
}
|
|
25
25
|
|
|
26
|
-
// https://github.com/nodejs/undici/issues/2664
|
|
27
|
-
function delay (ms) {
|
|
28
|
-
return new Promise((resolve) => {
|
|
29
|
-
setTimeout(resolve, ms)
|
|
30
|
-
})
|
|
31
|
-
}
|
|
32
|
-
|
|
33
26
|
module.exports = {
|
|
34
27
|
isValidLastEventId,
|
|
35
|
-
isASCIINumber
|
|
36
|
-
delay
|
|
28
|
+
isASCIINumber
|
|
37
29
|
}
|
package/lib/web/fetch/body.js
CHANGED
|
@@ -60,7 +60,7 @@ function extractBody (object, keepalive = false) {
|
|
|
60
60
|
// 4. Otherwise, set stream to a new ReadableStream object, and set
|
|
61
61
|
// up stream with byte reading support.
|
|
62
62
|
stream = new ReadableStream({
|
|
63
|
-
|
|
63
|
+
pull (controller) {
|
|
64
64
|
const buffer = typeof source === 'string' ? textEncoder.encode(source) : source
|
|
65
65
|
|
|
66
66
|
if (buffer.byteLength) {
|
|
@@ -110,22 +110,16 @@ function extractBody (object, keepalive = false) {
|
|
|
110
110
|
|
|
111
111
|
// Set type to `application/x-www-form-urlencoded;charset=UTF-8`.
|
|
112
112
|
type = 'application/x-www-form-urlencoded;charset=UTF-8'
|
|
113
|
-
} else if (
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
source = new Uint8Array(object.slice())
|
|
118
|
-
} else if (ArrayBuffer.isView(object)) {
|
|
119
|
-
// BufferSource/ArrayBufferView
|
|
120
|
-
|
|
121
|
-
// Set source to a copy of the bytes held by object.
|
|
122
|
-
source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
|
|
113
|
+
} else if (webidl.is.BufferSource(object)) {
|
|
114
|
+
source = isArrayBuffer(object)
|
|
115
|
+
? new Uint8Array(object.slice())
|
|
116
|
+
: new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength))
|
|
123
117
|
} else if (webidl.is.FormData(object)) {
|
|
124
118
|
const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}`
|
|
125
119
|
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
|
|
126
120
|
|
|
127
121
|
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
|
|
128
|
-
const
|
|
122
|
+
const formdataEscape = (str) =>
|
|
129
123
|
str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
|
|
130
124
|
const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
|
|
131
125
|
|
|
@@ -143,13 +137,13 @@ function extractBody (object, keepalive = false) {
|
|
|
143
137
|
for (const [name, value] of object) {
|
|
144
138
|
if (typeof value === 'string') {
|
|
145
139
|
const chunk = textEncoder.encode(prefix +
|
|
146
|
-
`; name="${
|
|
140
|
+
`; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
|
147
141
|
`\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
|
|
148
142
|
blobParts.push(chunk)
|
|
149
143
|
length += chunk.byteLength
|
|
150
144
|
} else {
|
|
151
|
-
const chunk = textEncoder.encode(`${prefix}; name="${
|
|
152
|
-
(value.name ? `; filename="${
|
|
145
|
+
const chunk = textEncoder.encode(`${prefix}; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
|
146
|
+
(value.name ? `; filename="${formdataEscape(value.name)}"` : '') + '\r\n' +
|
|
153
147
|
`Content-Type: ${
|
|
154
148
|
value.type || 'application/octet-stream'
|
|
155
149
|
}\r\n\r\n`)
|
|
@@ -320,12 +314,6 @@ function cloneBody (body) {
|
|
|
320
314
|
}
|
|
321
315
|
}
|
|
322
316
|
|
|
323
|
-
function throwIfAborted (state) {
|
|
324
|
-
if (state.aborted) {
|
|
325
|
-
throw new DOMException('The operation was aborted.', 'AbortError')
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
|
|
329
317
|
function bodyMixinMethods (instance, getInternalState) {
|
|
330
318
|
const methods = {
|
|
331
319
|
blob () {
|
|
@@ -443,24 +431,30 @@ function mixinBody (prototype, getInternalState) {
|
|
|
443
431
|
* @param {any} instance
|
|
444
432
|
* @param {(target: any) => any} getInternalState
|
|
445
433
|
*/
|
|
446
|
-
|
|
447
|
-
|
|
434
|
+
function consumeBody (object, convertBytesToJSValue, instance, getInternalState) {
|
|
435
|
+
try {
|
|
436
|
+
webidl.brandCheck(object, instance)
|
|
437
|
+
} catch (e) {
|
|
438
|
+
return Promise.reject(e)
|
|
439
|
+
}
|
|
448
440
|
|
|
449
441
|
const state = getInternalState(object)
|
|
450
442
|
|
|
451
443
|
// 1. If object is unusable, then return a promise rejected
|
|
452
444
|
// with a TypeError.
|
|
453
445
|
if (bodyUnusable(state)) {
|
|
454
|
-
|
|
446
|
+
return Promise.reject(new TypeError('Body is unusable: Body has already been read'))
|
|
455
447
|
}
|
|
456
448
|
|
|
457
|
-
|
|
449
|
+
if (state.aborted) {
|
|
450
|
+
return Promise.reject(new DOMException('The operation was aborted.', 'AbortError'))
|
|
451
|
+
}
|
|
458
452
|
|
|
459
453
|
// 2. Let promise be a new promise.
|
|
460
454
|
const promise = createDeferredPromise()
|
|
461
455
|
|
|
462
456
|
// 3. Let errorSteps given error be to reject promise with error.
|
|
463
|
-
const errorSteps =
|
|
457
|
+
const errorSteps = promise.reject
|
|
464
458
|
|
|
465
459
|
// 4. Let successSteps given a byte sequence data be to resolve
|
|
466
460
|
// promise with the result of running convertBytesToJSValue
|
package/lib/web/fetch/index.js
CHANGED
|
@@ -14,7 +14,6 @@ const { HeadersList } = require('./headers')
|
|
|
14
14
|
const { Request, cloneRequest, getRequestDispatcher, getRequestState } = require('./request')
|
|
15
15
|
const zlib = require('node:zlib')
|
|
16
16
|
const {
|
|
17
|
-
bytesMatch,
|
|
18
17
|
makePolicyContainer,
|
|
19
18
|
clonePolicyContainer,
|
|
20
19
|
requestBadPort,
|
|
@@ -62,7 +61,11 @@ const { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = requ
|
|
|
62
61
|
const { getGlobalDispatcher } = require('../../global')
|
|
63
62
|
const { webidl } = require('../webidl')
|
|
64
63
|
const { STATUS_CODES } = require('node:http')
|
|
64
|
+
const { bytesMatch } = require('../subresource-integrity/subresource-integrity')
|
|
65
65
|
const { createDeferredPromise } = require('../../util/promise')
|
|
66
|
+
|
|
67
|
+
const hasZstd = typeof zlib.createZstdDecompress === 'function'
|
|
68
|
+
|
|
66
69
|
const GET_OR_HEAD = ['GET', 'HEAD']
|
|
67
70
|
|
|
68
71
|
const defaultUserAgent = typeof __UNDICI_IS_NODE__ !== 'undefined' || typeof esbuildDetection !== 'undefined'
|
|
@@ -2104,33 +2107,29 @@ async function httpNetworkFetch (
|
|
|
2104
2107
|
return false
|
|
2105
2108
|
}
|
|
2106
2109
|
|
|
2107
|
-
/** @type {string[]} */
|
|
2108
|
-
let codings = []
|
|
2109
|
-
|
|
2110
2110
|
const headersList = new HeadersList()
|
|
2111
2111
|
|
|
2112
2112
|
for (let i = 0; i < rawHeaders.length; i += 2) {
|
|
2113
2113
|
headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString('latin1'), true)
|
|
2114
2114
|
}
|
|
2115
|
-
const contentEncoding = headersList.get('content-encoding', true)
|
|
2116
|
-
if (contentEncoding) {
|
|
2117
|
-
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
|
|
2118
|
-
// "All content-coding values are case-insensitive..."
|
|
2119
|
-
codings = contentEncoding.toLowerCase().split(',').map((x) => x.trim())
|
|
2120
|
-
}
|
|
2121
2115
|
const location = headersList.get('location', true)
|
|
2122
2116
|
|
|
2123
2117
|
this.body = new Readable({ read: resume })
|
|
2124
2118
|
|
|
2125
|
-
const decoders = []
|
|
2126
|
-
|
|
2127
2119
|
const willFollow = location && request.redirect === 'follow' &&
|
|
2128
2120
|
redirectStatusSet.has(status)
|
|
2129
2121
|
|
|
2122
|
+
const decoders = []
|
|
2123
|
+
|
|
2130
2124
|
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
|
|
2131
|
-
if (
|
|
2125
|
+
if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
|
|
2126
|
+
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
|
|
2127
|
+
const contentEncoding = headersList.get('content-encoding', true)
|
|
2128
|
+
// "All content-coding values are case-insensitive..."
|
|
2129
|
+
/** @type {string[]} */
|
|
2130
|
+
const codings = contentEncoding ? contentEncoding.toLowerCase().split(',') : []
|
|
2132
2131
|
for (let i = codings.length - 1; i >= 0; --i) {
|
|
2133
|
-
const coding = codings[i]
|
|
2132
|
+
const coding = codings[i].trim()
|
|
2134
2133
|
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
|
|
2135
2134
|
if (coding === 'x-gzip' || coding === 'gzip') {
|
|
2136
2135
|
decoders.push(zlib.createGunzip({
|
|
@@ -2151,8 +2150,8 @@ async function httpNetworkFetch (
|
|
|
2151
2150
|
flush: zlib.constants.BROTLI_OPERATION_FLUSH,
|
|
2152
2151
|
finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH
|
|
2153
2152
|
}))
|
|
2154
|
-
} else if (coding === 'zstd' &&
|
|
2155
|
-
|
|
2153
|
+
} else if (coding === 'zstd' && hasZstd) {
|
|
2154
|
+
// Node.js v23.8.0+ and v22.15.0+ supports Zstandard
|
|
2156
2155
|
decoders.push(zlib.createZstdDecompress({
|
|
2157
2156
|
flush: zlib.constants.ZSTD_e_continue,
|
|
2158
2157
|
finishFlush: zlib.constants.ZSTD_e_end
|
|
@@ -23,8 +23,6 @@ const { URLSerializer } = require('./data-url')
|
|
|
23
23
|
const { kConstruct } = require('../../core/symbols')
|
|
24
24
|
const assert = require('node:assert')
|
|
25
25
|
|
|
26
|
-
const { isArrayBuffer } = nodeUtil.types
|
|
27
|
-
|
|
28
26
|
const textEncoder = new TextEncoder('utf-8')
|
|
29
27
|
|
|
30
28
|
// https://fetch.spec.whatwg.org/#response-class
|
|
@@ -120,7 +118,7 @@ class Response {
|
|
|
120
118
|
}
|
|
121
119
|
|
|
122
120
|
if (body !== null) {
|
|
123
|
-
body = webidl.converters.BodyInit(body)
|
|
121
|
+
body = webidl.converters.BodyInit(body, 'Response', 'body')
|
|
124
122
|
}
|
|
125
123
|
|
|
126
124
|
init = webidl.converters.ResponseInit(init)
|
|
@@ -580,7 +578,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V, prefix, name) {
|
|
|
580
578
|
return V
|
|
581
579
|
}
|
|
582
580
|
|
|
583
|
-
if (
|
|
581
|
+
if (webidl.is.BufferSource(V)) {
|
|
584
582
|
return V
|
|
585
583
|
}
|
|
586
584
|
|
package/lib/web/fetch/util.js
CHANGED
|
@@ -11,20 +11,6 @@ const assert = require('node:assert')
|
|
|
11
11
|
const { isUint8Array } = require('node:util/types')
|
|
12
12
|
const { webidl } = require('../webidl')
|
|
13
13
|
|
|
14
|
-
let supportedHashes = []
|
|
15
|
-
|
|
16
|
-
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
|
17
|
-
/** @type {import('crypto')} */
|
|
18
|
-
let crypto
|
|
19
|
-
try {
|
|
20
|
-
crypto = require('node:crypto')
|
|
21
|
-
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
|
22
|
-
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
|
23
|
-
/* c8 ignore next 3 */
|
|
24
|
-
} catch {
|
|
25
|
-
|
|
26
|
-
}
|
|
27
|
-
|
|
28
14
|
function responseURL (response) {
|
|
29
15
|
// https://fetch.spec.whatwg.org/#responses
|
|
30
16
|
// A response has an associated URL. It is a pointer to the last URL
|
|
@@ -516,8 +502,8 @@ function determineRequestsReferrer (request) {
|
|
|
516
502
|
if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) {
|
|
517
503
|
return 'no-referrer'
|
|
518
504
|
}
|
|
519
|
-
// 2. Return
|
|
520
|
-
return
|
|
505
|
+
// 2. Return referrerURL.
|
|
506
|
+
return referrerURL
|
|
521
507
|
}
|
|
522
508
|
}
|
|
523
509
|
}
|
|
@@ -568,17 +554,11 @@ function stripURLForReferrer (url, originOnly = false) {
|
|
|
568
554
|
return url
|
|
569
555
|
}
|
|
570
556
|
|
|
571
|
-
const
|
|
572
|
-
|
|
573
|
-
'(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){2}' +
|
|
574
|
-
'(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[1-9])' +
|
|
575
|
-
')$')
|
|
557
|
+
const isPotentialleTrustworthyIPv4 = RegExp.prototype.test
|
|
558
|
+
.bind(/^127\.(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)\.){2}(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)$/)
|
|
576
559
|
|
|
577
|
-
const
|
|
578
|
-
|
|
579
|
-
'(?:(?:0{1,4}):){1,6}(?::(?:0{0,3}1))|' +
|
|
580
|
-
'(?:::(?:0{0,3}1))|' +
|
|
581
|
-
')$')
|
|
560
|
+
const isPotentiallyTrustworthyIPv6 = RegExp.prototype.test
|
|
561
|
+
.bind(/^(?:(?:0{1,4}:){7}|(?:0{1,4}:){1,6}:|::)0{0,3}1$/)
|
|
582
562
|
|
|
583
563
|
/**
|
|
584
564
|
* Check if host matches one of the CIDR notations 127.0.0.0/8 or ::1/128.
|
|
@@ -593,11 +573,11 @@ function isOriginIPPotentiallyTrustworthy (origin) {
|
|
|
593
573
|
if (origin[0] === '[' && origin[origin.length - 1] === ']') {
|
|
594
574
|
origin = origin.slice(1, -1)
|
|
595
575
|
}
|
|
596
|
-
return
|
|
576
|
+
return isPotentiallyTrustworthyIPv6(origin)
|
|
597
577
|
}
|
|
598
578
|
|
|
599
579
|
// IPv4
|
|
600
|
-
return
|
|
580
|
+
return isPotentialleTrustworthyIPv4(origin)
|
|
601
581
|
}
|
|
602
582
|
|
|
603
583
|
/**
|
|
@@ -698,206 +678,6 @@ function isURLPotentiallyTrustworthy (url) {
|
|
|
698
678
|
return isOriginPotentiallyTrustworthy(url.origin)
|
|
699
679
|
}
|
|
700
680
|
|
|
701
|
-
/**
|
|
702
|
-
* @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
|
|
703
|
-
* @param {Uint8Array} bytes
|
|
704
|
-
* @param {string} metadataList
|
|
705
|
-
*/
|
|
706
|
-
function bytesMatch (bytes, metadataList) {
|
|
707
|
-
// If node is not built with OpenSSL support, we cannot check
|
|
708
|
-
// a request's integrity, so allow it by default (the spec will
|
|
709
|
-
// allow requests if an invalid hash is given, as precedence).
|
|
710
|
-
/* istanbul ignore if: only if node is built with --without-ssl */
|
|
711
|
-
if (crypto === undefined) {
|
|
712
|
-
return true
|
|
713
|
-
}
|
|
714
|
-
|
|
715
|
-
// 1. Let parsedMetadata be the result of parsing metadataList.
|
|
716
|
-
const parsedMetadata = parseMetadata(metadataList)
|
|
717
|
-
|
|
718
|
-
// 2. If parsedMetadata is no metadata, return true.
|
|
719
|
-
if (parsedMetadata === 'no metadata') {
|
|
720
|
-
return true
|
|
721
|
-
}
|
|
722
|
-
|
|
723
|
-
// 3. If response is not eligible for integrity validation, return false.
|
|
724
|
-
// TODO
|
|
725
|
-
|
|
726
|
-
// 4. If parsedMetadata is the empty set, return true.
|
|
727
|
-
if (parsedMetadata.length === 0) {
|
|
728
|
-
return true
|
|
729
|
-
}
|
|
730
|
-
|
|
731
|
-
// 5. Let metadata be the result of getting the strongest
|
|
732
|
-
// metadata from parsedMetadata.
|
|
733
|
-
const strongest = getStrongestMetadata(parsedMetadata)
|
|
734
|
-
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
|
735
|
-
|
|
736
|
-
// 6. For each item in metadata:
|
|
737
|
-
for (const item of metadata) {
|
|
738
|
-
// 1. Let algorithm be the alg component of item.
|
|
739
|
-
const algorithm = item.algo
|
|
740
|
-
|
|
741
|
-
// 2. Let expectedValue be the val component of item.
|
|
742
|
-
const expectedValue = item.hash
|
|
743
|
-
|
|
744
|
-
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
|
745
|
-
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
|
746
|
-
|
|
747
|
-
// 3. Let actualValue be the result of applying algorithm to bytes.
|
|
748
|
-
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
|
749
|
-
|
|
750
|
-
if (actualValue[actualValue.length - 1] === '=') {
|
|
751
|
-
if (actualValue[actualValue.length - 2] === '=') {
|
|
752
|
-
actualValue = actualValue.slice(0, -2)
|
|
753
|
-
} else {
|
|
754
|
-
actualValue = actualValue.slice(0, -1)
|
|
755
|
-
}
|
|
756
|
-
}
|
|
757
|
-
|
|
758
|
-
// 4. If actualValue is a case-sensitive match for expectedValue,
|
|
759
|
-
// return true.
|
|
760
|
-
if (compareBase64Mixed(actualValue, expectedValue)) {
|
|
761
|
-
return true
|
|
762
|
-
}
|
|
763
|
-
}
|
|
764
|
-
|
|
765
|
-
// 7. Return false.
|
|
766
|
-
return false
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
|
770
|
-
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
|
771
|
-
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
|
772
|
-
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
|
773
|
-
|
|
774
|
-
/**
|
|
775
|
-
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
|
776
|
-
* @param {string} metadata
|
|
777
|
-
*/
|
|
778
|
-
function parseMetadata (metadata) {
|
|
779
|
-
// 1. Let result be the empty set.
|
|
780
|
-
/** @type {{ algo: string, hash: string }[]} */
|
|
781
|
-
const result = []
|
|
782
|
-
|
|
783
|
-
// 2. Let empty be equal to true.
|
|
784
|
-
let empty = true
|
|
785
|
-
|
|
786
|
-
// 3. For each token returned by splitting metadata on spaces:
|
|
787
|
-
for (const token of metadata.split(' ')) {
|
|
788
|
-
// 1. Set empty to false.
|
|
789
|
-
empty = false
|
|
790
|
-
|
|
791
|
-
// 2. Parse token as a hash-with-options.
|
|
792
|
-
const parsedToken = parseHashWithOptions.exec(token)
|
|
793
|
-
|
|
794
|
-
// 3. If token does not parse, continue to the next token.
|
|
795
|
-
if (
|
|
796
|
-
parsedToken === null ||
|
|
797
|
-
parsedToken.groups === undefined ||
|
|
798
|
-
parsedToken.groups.algo === undefined
|
|
799
|
-
) {
|
|
800
|
-
// Note: Chromium blocks the request at this point, but Firefox
|
|
801
|
-
// gives a warning that an invalid integrity was given. The
|
|
802
|
-
// correct behavior is to ignore these, and subsequently not
|
|
803
|
-
// check the integrity of the resource.
|
|
804
|
-
continue
|
|
805
|
-
}
|
|
806
|
-
|
|
807
|
-
// 4. Let algorithm be the hash-algo component of token.
|
|
808
|
-
const algorithm = parsedToken.groups.algo.toLowerCase()
|
|
809
|
-
|
|
810
|
-
// 5. If algorithm is a hash function recognized by the user
|
|
811
|
-
// agent, add the parsed token to result.
|
|
812
|
-
if (supportedHashes.includes(algorithm)) {
|
|
813
|
-
result.push(parsedToken.groups)
|
|
814
|
-
}
|
|
815
|
-
}
|
|
816
|
-
|
|
817
|
-
// 4. Return no metadata if empty is true, otherwise return result.
|
|
818
|
-
if (empty === true) {
|
|
819
|
-
return 'no metadata'
|
|
820
|
-
}
|
|
821
|
-
|
|
822
|
-
return result
|
|
823
|
-
}
|
|
824
|
-
|
|
825
|
-
/**
|
|
826
|
-
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
|
827
|
-
*/
|
|
828
|
-
function getStrongestMetadata (metadataList) {
|
|
829
|
-
// Let algorithm be the algo component of the first item in metadataList.
|
|
830
|
-
// Can be sha256
|
|
831
|
-
let algorithm = metadataList[0].algo
|
|
832
|
-
// If the algorithm is sha512, then it is the strongest
|
|
833
|
-
// and we can return immediately
|
|
834
|
-
if (algorithm[3] === '5') {
|
|
835
|
-
return algorithm
|
|
836
|
-
}
|
|
837
|
-
|
|
838
|
-
for (let i = 1; i < metadataList.length; ++i) {
|
|
839
|
-
const metadata = metadataList[i]
|
|
840
|
-
// If the algorithm is sha512, then it is the strongest
|
|
841
|
-
// and we can break the loop immediately
|
|
842
|
-
if (metadata.algo[3] === '5') {
|
|
843
|
-
algorithm = 'sha512'
|
|
844
|
-
break
|
|
845
|
-
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
|
846
|
-
} else if (algorithm[3] === '3') {
|
|
847
|
-
continue
|
|
848
|
-
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
|
849
|
-
// the strongest
|
|
850
|
-
} else if (metadata.algo[3] === '3') {
|
|
851
|
-
algorithm = 'sha384'
|
|
852
|
-
}
|
|
853
|
-
}
|
|
854
|
-
return algorithm
|
|
855
|
-
}
|
|
856
|
-
|
|
857
|
-
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
|
858
|
-
if (metadataList.length === 1) {
|
|
859
|
-
return metadataList
|
|
860
|
-
}
|
|
861
|
-
|
|
862
|
-
let pos = 0
|
|
863
|
-
for (let i = 0; i < metadataList.length; ++i) {
|
|
864
|
-
if (metadataList[i].algo === algorithm) {
|
|
865
|
-
metadataList[pos++] = metadataList[i]
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
|
|
869
|
-
metadataList.length = pos
|
|
870
|
-
|
|
871
|
-
return metadataList
|
|
872
|
-
}
|
|
873
|
-
|
|
874
|
-
/**
|
|
875
|
-
* Compares two base64 strings, allowing for base64url
|
|
876
|
-
* in the second string.
|
|
877
|
-
*
|
|
878
|
-
* @param {string} actualValue always base64
|
|
879
|
-
* @param {string} expectedValue base64 or base64url
|
|
880
|
-
* @returns {boolean}
|
|
881
|
-
*/
|
|
882
|
-
function compareBase64Mixed (actualValue, expectedValue) {
|
|
883
|
-
if (actualValue.length !== expectedValue.length) {
|
|
884
|
-
return false
|
|
885
|
-
}
|
|
886
|
-
for (let i = 0; i < actualValue.length; ++i) {
|
|
887
|
-
if (actualValue[i] !== expectedValue[i]) {
|
|
888
|
-
if (
|
|
889
|
-
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
|
890
|
-
(actualValue[i] === '/' && expectedValue[i] === '_')
|
|
891
|
-
) {
|
|
892
|
-
continue
|
|
893
|
-
}
|
|
894
|
-
return false
|
|
895
|
-
}
|
|
896
|
-
}
|
|
897
|
-
|
|
898
|
-
return true
|
|
899
|
-
}
|
|
900
|
-
|
|
901
681
|
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
|
902
682
|
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
|
903
683
|
// TODO
|
|
@@ -1761,7 +1541,6 @@ module.exports = {
|
|
|
1761
1541
|
isValidHeaderValue,
|
|
1762
1542
|
isErrorLike,
|
|
1763
1543
|
fullyReadBody,
|
|
1764
|
-
bytesMatch,
|
|
1765
1544
|
readableStreamClose,
|
|
1766
1545
|
isomorphicEncode,
|
|
1767
1546
|
urlIsLocal,
|
|
@@ -1770,7 +1549,6 @@ module.exports = {
|
|
|
1770
1549
|
readAllBytes,
|
|
1771
1550
|
simpleRangeHeaderValue,
|
|
1772
1551
|
buildContentRange,
|
|
1773
|
-
parseMetadata,
|
|
1774
1552
|
createInflate,
|
|
1775
1553
|
extractMimeType,
|
|
1776
1554
|
getDecodeSplit,
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# Subresource Integrity
|
|
2
|
+
|
|
3
|
+
based on Editor’s Draft, 12 June 2025
|
|
4
|
+
|
|
5
|
+
This module provides support for Subresource Integrity (SRI) in the context of web fetch operations. SRI is a security feature that allows clients to verify that fetched resources are delivered without unexpected manipulation.
|
|
6
|
+
|
|
7
|
+
## Links
|
|
8
|
+
|
|
9
|
+
- [Subresource Integrity](https://w3c.github.io/webappsec-subresource-integrity/)
|