undici 7.13.0 → 7.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -5
- package/docs/docs/api/DiagnosticsChannel.md +25 -1
- package/docs/docs/api/Dispatcher.md +59 -0
- package/index.js +2 -1
- package/lib/core/util.js +13 -1
- package/lib/dispatcher/agent.js +25 -16
- package/lib/dispatcher/client-h1.js +1 -1
- package/lib/dispatcher/proxy-agent.js +1 -2
- package/lib/handler/cache-handler.js +22 -4
- package/lib/interceptor/cache.js +2 -2
- package/lib/interceptor/decompress.js +253 -0
- package/lib/llhttp/constants.d.ts +99 -1
- package/lib/llhttp/constants.js +34 -1
- package/lib/llhttp/llhttp-wasm.js +1 -1
- package/lib/llhttp/llhttp_simd-wasm.js +1 -1
- package/lib/llhttp/utils.d.ts +2 -2
- package/lib/llhttp/utils.js +3 -6
- package/lib/mock/snapshot-agent.js +73 -59
- package/lib/mock/snapshot-recorder.js +254 -191
- package/lib/mock/snapshot-utils.js +158 -0
- package/lib/util/cache.js +9 -10
- package/lib/web/cache/cache.js +4 -4
- package/lib/web/cookies/parse.js +2 -2
- package/lib/web/eventsource/eventsource.js +17 -2
- package/lib/web/fetch/body.js +4 -4
- package/lib/web/fetch/formdata.js +1 -1
- package/lib/web/fetch/index.js +1 -1
- package/lib/web/fetch/response.js +8 -4
- package/lib/web/fetch/util.js +0 -216
- package/lib/web/subresource-integrity/Readme.md +9 -0
- package/lib/web/subresource-integrity/subresource-integrity.js +306 -0
- package/lib/web/websocket/stream/websocketstream.js +2 -2
- package/lib/web/websocket/websocket.js +11 -4
- package/package.json +8 -7
- package/types/diagnostics-channel.d.ts +0 -1
- package/types/eventsource.d.ts +6 -1
- package/types/index.d.ts +4 -1
- package/types/interceptors.d.ts +5 -0
- package/types/snapshot-agent.d.ts +5 -3
- package/lib/api/util.js +0 -95
- package/lib/llhttp/constants.js.map +0 -1
- package/lib/llhttp/utils.js.map +0 -1
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { InvalidArgumentError } = require('../core/errors')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* @typedef {Object} HeaderFilters
|
|
7
|
+
* @property {Set<string>} ignore - Set of headers to ignore for matching
|
|
8
|
+
* @property {Set<string>} exclude - Set of headers to exclude from matching
|
|
9
|
+
* @property {Set<string>} match - Set of headers to match (empty means match
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Creates cached header sets for performance
|
|
14
|
+
*
|
|
15
|
+
* @param {import('./snapshot-recorder').SnapshotRecorderMatchOptions} matchOptions - Matching options for headers
|
|
16
|
+
* @returns {HeaderFilters} - Cached sets for ignore, exclude, and match headers
|
|
17
|
+
*/
|
|
18
|
+
function createHeaderFilters (matchOptions = {}) {
|
|
19
|
+
const { ignoreHeaders = [], excludeHeaders = [], matchHeaders = [], caseSensitive = false } = matchOptions
|
|
20
|
+
|
|
21
|
+
return {
|
|
22
|
+
ignore: new Set(ignoreHeaders.map(header => caseSensitive ? header : header.toLowerCase())),
|
|
23
|
+
exclude: new Set(excludeHeaders.map(header => caseSensitive ? header : header.toLowerCase())),
|
|
24
|
+
match: new Set(matchHeaders.map(header => caseSensitive ? header : header.toLowerCase()))
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
let crypto
|
|
29
|
+
try {
|
|
30
|
+
crypto = require('node:crypto')
|
|
31
|
+
} catch { /* Fallback if crypto is not available */ }
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* @callback HashIdFunction
|
|
35
|
+
* @param {string} value - The value to hash
|
|
36
|
+
* @returns {string} - The base64url encoded hash of the value
|
|
37
|
+
*/
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Generates a hash for a given value
|
|
41
|
+
* @type {HashIdFunction}
|
|
42
|
+
*/
|
|
43
|
+
const hashId = crypto?.hash
|
|
44
|
+
? (value) => crypto.hash('sha256', value, 'base64url')
|
|
45
|
+
: (value) => Buffer.from(value).toString('base64url')
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @typedef {(url: string) => boolean} IsUrlExcluded Checks if a URL matches any of the exclude patterns
|
|
49
|
+
*/
|
|
50
|
+
|
|
51
|
+
/** @typedef {{[key: Lowercase<string>]: string}} NormalizedHeaders */
|
|
52
|
+
/** @typedef {Array<string>} UndiciHeaders */
|
|
53
|
+
/** @typedef {Record<string, string|string[]>} Headers */
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* @param {*} headers
|
|
57
|
+
* @returns {headers is UndiciHeaders}
|
|
58
|
+
*/
|
|
59
|
+
function isUndiciHeaders (headers) {
|
|
60
|
+
return Array.isArray(headers) && (headers.length & 1) === 0
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Factory function to create a URL exclusion checker
|
|
65
|
+
* @param {Array<string| RegExp>} [excludePatterns=[]] - Array of patterns to exclude
|
|
66
|
+
* @returns {IsUrlExcluded} - A function that checks if a URL matches any of the exclude patterns
|
|
67
|
+
*/
|
|
68
|
+
function isUrlExcludedFactory (excludePatterns = []) {
|
|
69
|
+
if (excludePatterns.length === 0) {
|
|
70
|
+
return () => false
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return function isUrlExcluded (url) {
|
|
74
|
+
let urlLowerCased
|
|
75
|
+
|
|
76
|
+
for (const pattern of excludePatterns) {
|
|
77
|
+
if (typeof pattern === 'string') {
|
|
78
|
+
if (!urlLowerCased) {
|
|
79
|
+
// Convert URL to lowercase only once
|
|
80
|
+
urlLowerCased = url.toLowerCase()
|
|
81
|
+
}
|
|
82
|
+
// Simple string match (case-insensitive)
|
|
83
|
+
if (urlLowerCased.includes(pattern.toLowerCase())) {
|
|
84
|
+
return true
|
|
85
|
+
}
|
|
86
|
+
} else if (pattern instanceof RegExp) {
|
|
87
|
+
// Regex pattern match
|
|
88
|
+
if (pattern.test(url)) {
|
|
89
|
+
return true
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return false
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/**
|
|
99
|
+
* Normalizes headers for consistent comparison
|
|
100
|
+
*
|
|
101
|
+
* @param {Object|UndiciHeaders} headers - Headers to normalize
|
|
102
|
+
* @returns {NormalizedHeaders} - Normalized headers as a lowercase object
|
|
103
|
+
*/
|
|
104
|
+
function normalizeHeaders (headers) {
|
|
105
|
+
/** @type {NormalizedHeaders} */
|
|
106
|
+
const normalizedHeaders = {}
|
|
107
|
+
|
|
108
|
+
if (!headers) return normalizedHeaders
|
|
109
|
+
|
|
110
|
+
// Handle array format (undici internal format: [name, value, name, value, ...])
|
|
111
|
+
if (isUndiciHeaders(headers)) {
|
|
112
|
+
for (let i = 0; i < headers.length; i += 2) {
|
|
113
|
+
const key = headers[i]
|
|
114
|
+
const value = headers[i + 1]
|
|
115
|
+
if (key && value !== undefined) {
|
|
116
|
+
// Convert Buffers to strings if needed
|
|
117
|
+
const keyStr = Buffer.isBuffer(key) ? key.toString() : key
|
|
118
|
+
const valueStr = Buffer.isBuffer(value) ? value.toString() : value
|
|
119
|
+
normalizedHeaders[keyStr.toLowerCase()] = valueStr
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return normalizedHeaders
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Handle object format
|
|
126
|
+
if (headers && typeof headers === 'object') {
|
|
127
|
+
for (const [key, value] of Object.entries(headers)) {
|
|
128
|
+
if (key && typeof key === 'string') {
|
|
129
|
+
normalizedHeaders[key.toLowerCase()] = Array.isArray(value) ? value.join(', ') : String(value)
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return normalizedHeaders
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
const validSnapshotModes = /** @type {const} */ (['record', 'playback', 'update'])
|
|
138
|
+
|
|
139
|
+
/** @typedef {typeof validSnapshotModes[number]} SnapshotMode */
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* @param {*} mode - The snapshot mode to validate
|
|
143
|
+
* @returns {asserts mode is SnapshotMode}
|
|
144
|
+
*/
|
|
145
|
+
function validateSnapshotMode (mode) {
|
|
146
|
+
if (!validSnapshotModes.includes(mode)) {
|
|
147
|
+
throw new InvalidArgumentError(`Invalid snapshot mode: ${mode}. Must be one of: ${validSnapshotModes.join(', ')}`)
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
module.exports = {
|
|
152
|
+
createHeaderFilters,
|
|
153
|
+
hashId,
|
|
154
|
+
isUndiciHeaders,
|
|
155
|
+
normalizeHeaders,
|
|
156
|
+
isUrlExcludedFactory,
|
|
157
|
+
validateSnapshotMode
|
|
158
|
+
}
|
package/lib/util/cache.js
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
3
|
const {
|
|
4
|
-
safeHTTPMethods
|
|
4
|
+
safeHTTPMethods,
|
|
5
|
+
pathHasQueryOrFragment
|
|
5
6
|
} = require('../core/util')
|
|
6
7
|
|
|
7
8
|
const { serializePathWithQuery } = require('../core/util')
|
|
@@ -14,12 +15,10 @@ function makeCacheKey (opts) {
|
|
|
14
15
|
throw new Error('opts.origin is undefined')
|
|
15
16
|
}
|
|
16
17
|
|
|
17
|
-
let fullPath
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
// If fails (path already has query params), use as-is
|
|
22
|
-
fullPath = opts.path || '/'
|
|
18
|
+
let fullPath = opts.path || '/'
|
|
19
|
+
|
|
20
|
+
if (opts.query && !pathHasQueryOrFragment(opts.path)) {
|
|
21
|
+
fullPath = serializePathWithQuery(fullPath, opts.query)
|
|
23
22
|
}
|
|
24
23
|
|
|
25
24
|
return {
|
|
@@ -34,7 +33,7 @@ function makeCacheKey (opts) {
|
|
|
34
33
|
* @param {Record<string, string[] | string>}
|
|
35
34
|
* @returns {Record<string, string[] | string>}
|
|
36
35
|
*/
|
|
37
|
-
function
|
|
36
|
+
function normalizeHeaders (opts) {
|
|
38
37
|
let headers
|
|
39
38
|
if (opts.headers == null) {
|
|
40
39
|
headers = {}
|
|
@@ -234,7 +233,7 @@ function parseCacheControlHeader (header) {
|
|
|
234
233
|
}
|
|
235
234
|
}
|
|
236
235
|
} else {
|
|
237
|
-
// Something like `no-cache=some-header`
|
|
236
|
+
// Something like `no-cache="some-header"`
|
|
238
237
|
if (key in output) {
|
|
239
238
|
output[key] = output[key].concat(value)
|
|
240
239
|
} else {
|
|
@@ -367,7 +366,7 @@ function assertCacheMethods (methods, name = 'CacheMethods') {
|
|
|
367
366
|
|
|
368
367
|
module.exports = {
|
|
369
368
|
makeCacheKey,
|
|
370
|
-
|
|
369
|
+
normalizeHeaders,
|
|
371
370
|
assertCacheKey,
|
|
372
371
|
assertCacheValue,
|
|
373
372
|
parseCacheControlHeader,
|
package/lib/web/cache/cache.js
CHANGED
|
@@ -18,7 +18,7 @@ const { createDeferredPromise } = require('../../util/promise')
|
|
|
18
18
|
* @property {'delete' | 'put'} type
|
|
19
19
|
* @property {any} request
|
|
20
20
|
* @property {any} response
|
|
21
|
-
* @property {import('
|
|
21
|
+
* @property {import('../../../types/cache').CacheQueryOptions} options
|
|
22
22
|
*/
|
|
23
23
|
|
|
24
24
|
/**
|
|
@@ -452,7 +452,7 @@ class Cache {
|
|
|
452
452
|
/**
|
|
453
453
|
* @see https://w3c.github.io/ServiceWorker/#dom-cache-keys
|
|
454
454
|
* @param {any} request
|
|
455
|
-
* @param {import('
|
|
455
|
+
* @param {import('../../../types/cache').CacheQueryOptions} options
|
|
456
456
|
* @returns {Promise<readonly Request[]>}
|
|
457
457
|
*/
|
|
458
458
|
async keys (request = undefined, options = {}) {
|
|
@@ -670,7 +670,7 @@ class Cache {
|
|
|
670
670
|
/**
|
|
671
671
|
* @see https://w3c.github.io/ServiceWorker/#query-cache
|
|
672
672
|
* @param {any} requestQuery
|
|
673
|
-
* @param {import('
|
|
673
|
+
* @param {import('../../../types/cache').CacheQueryOptions} options
|
|
674
674
|
* @param {requestResponseList} targetStorage
|
|
675
675
|
* @returns {requestResponseList}
|
|
676
676
|
*/
|
|
@@ -695,7 +695,7 @@ class Cache {
|
|
|
695
695
|
* @param {any} requestQuery
|
|
696
696
|
* @param {any} request
|
|
697
697
|
* @param {any | null} response
|
|
698
|
-
* @param {import('
|
|
698
|
+
* @param {import('../../../types/cache').CacheQueryOptions | undefined} options
|
|
699
699
|
* @returns {boolean}
|
|
700
700
|
*/
|
|
701
701
|
#requestMatchesCachedItem (requestQuery, request, response = null, options) {
|
package/lib/web/cookies/parse.js
CHANGED
|
@@ -4,7 +4,7 @@ const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
|
|
|
4
4
|
const { isCTLExcludingHtab } = require('./util')
|
|
5
5
|
const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
|
|
6
6
|
const assert = require('node:assert')
|
|
7
|
-
const { unescape } = require('node:querystring')
|
|
7
|
+
const { unescape: qsUnescape } = require('node:querystring')
|
|
8
8
|
|
|
9
9
|
/**
|
|
10
10
|
* @description Parses the field-value attributes of a set-cookie header string.
|
|
@@ -82,7 +82,7 @@ function parseSetCookie (header) {
|
|
|
82
82
|
// store arbitrary data in a cookie-value SHOULD encode that data, for
|
|
83
83
|
// example, using Base64 [RFC4648].
|
|
84
84
|
return {
|
|
85
|
-
name, value:
|
|
85
|
+
name, value: qsUnescape(value), ...parseUnparsedAttributes(unparsedAttributes)
|
|
86
86
|
}
|
|
87
87
|
}
|
|
88
88
|
|
|
@@ -124,10 +124,10 @@ class EventSource extends EventTarget {
|
|
|
124
124
|
url = webidl.converters.USVString(url)
|
|
125
125
|
eventSourceInitDict = webidl.converters.EventSourceInitDict(eventSourceInitDict, prefix, 'eventSourceInitDict')
|
|
126
126
|
|
|
127
|
-
this.#dispatcher = eventSourceInitDict.dispatcher
|
|
127
|
+
this.#dispatcher = eventSourceInitDict.node.dispatcher || eventSourceInitDict.dispatcher
|
|
128
128
|
this.#state = {
|
|
129
129
|
lastEventId: '',
|
|
130
|
-
reconnectionTime:
|
|
130
|
+
reconnectionTime: eventSourceInitDict.node.reconnectionTime
|
|
131
131
|
}
|
|
132
132
|
|
|
133
133
|
// 2. Let settings be ev's relevant settings object.
|
|
@@ -472,6 +472,21 @@ webidl.converters.EventSourceInitDict = webidl.dictionaryConverter([
|
|
|
472
472
|
{
|
|
473
473
|
key: 'dispatcher', // undici only
|
|
474
474
|
converter: webidl.converters.any
|
|
475
|
+
},
|
|
476
|
+
{
|
|
477
|
+
key: 'node', // undici only
|
|
478
|
+
converter: webidl.dictionaryConverter([
|
|
479
|
+
{
|
|
480
|
+
key: 'reconnectionTime',
|
|
481
|
+
converter: webidl.converters['unsigned long'],
|
|
482
|
+
defaultValue: () => defaultReconnectionTime
|
|
483
|
+
},
|
|
484
|
+
{
|
|
485
|
+
key: 'dispatcher',
|
|
486
|
+
converter: webidl.converters.any
|
|
487
|
+
}
|
|
488
|
+
]),
|
|
489
|
+
defaultValue: () => ({})
|
|
475
490
|
}
|
|
476
491
|
])
|
|
477
492
|
|
package/lib/web/fetch/body.js
CHANGED
|
@@ -125,7 +125,7 @@ function extractBody (object, keepalive = false) {
|
|
|
125
125
|
const prefix = `--${boundary}\r\nContent-Disposition: form-data`
|
|
126
126
|
|
|
127
127
|
/*! formdata-polyfill. MIT License. Jimmy Wärting <https://jimmy.warting.se/opensource> */
|
|
128
|
-
const
|
|
128
|
+
const formdataEscape = (str) =>
|
|
129
129
|
str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22')
|
|
130
130
|
const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n')
|
|
131
131
|
|
|
@@ -143,13 +143,13 @@ function extractBody (object, keepalive = false) {
|
|
|
143
143
|
for (const [name, value] of object) {
|
|
144
144
|
if (typeof value === 'string') {
|
|
145
145
|
const chunk = textEncoder.encode(prefix +
|
|
146
|
-
`; name="${
|
|
146
|
+
`; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
|
147
147
|
`\r\n\r\n${normalizeLinefeeds(value)}\r\n`)
|
|
148
148
|
blobParts.push(chunk)
|
|
149
149
|
length += chunk.byteLength
|
|
150
150
|
} else {
|
|
151
|
-
const chunk = textEncoder.encode(`${prefix}; name="${
|
|
152
|
-
(value.name ? `; filename="${
|
|
151
|
+
const chunk = textEncoder.encode(`${prefix}; name="${formdataEscape(normalizeLinefeeds(name))}"` +
|
|
152
|
+
(value.name ? `; filename="${formdataEscape(value.name)}"` : '') + '\r\n' +
|
|
153
153
|
`Content-Type: ${
|
|
154
154
|
value.type || 'application/octet-stream'
|
|
155
155
|
}\r\n\r\n`)
|
package/lib/web/fetch/index.js
CHANGED
|
@@ -14,7 +14,6 @@ const { HeadersList } = require('./headers')
|
|
|
14
14
|
const { Request, cloneRequest, getRequestDispatcher, getRequestState } = require('./request')
|
|
15
15
|
const zlib = require('node:zlib')
|
|
16
16
|
const {
|
|
17
|
-
bytesMatch,
|
|
18
17
|
makePolicyContainer,
|
|
19
18
|
clonePolicyContainer,
|
|
20
19
|
requestBadPort,
|
|
@@ -62,6 +61,7 @@ const { dataURLProcessor, serializeAMimeType, minimizeSupportedMimeType } = requ
|
|
|
62
61
|
const { getGlobalDispatcher } = require('../../global')
|
|
63
62
|
const { webidl } = require('../webidl')
|
|
64
63
|
const { STATUS_CODES } = require('node:http')
|
|
64
|
+
const { bytesMatch } = require('../subresource-integrity/subresource-integrity')
|
|
65
65
|
const { createDeferredPromise } = require('../../util/promise')
|
|
66
66
|
const GET_OR_HEAD = ['GET', 'HEAD']
|
|
67
67
|
|
|
@@ -22,7 +22,8 @@ const { webidl } = require('../webidl')
|
|
|
22
22
|
const { URLSerializer } = require('./data-url')
|
|
23
23
|
const { kConstruct } = require('../../core/symbols')
|
|
24
24
|
const assert = require('node:assert')
|
|
25
|
-
|
|
25
|
+
|
|
26
|
+
const { isArrayBuffer } = nodeUtil.types
|
|
26
27
|
|
|
27
28
|
const textEncoder = new TextEncoder('utf-8')
|
|
28
29
|
|
|
@@ -243,6 +244,11 @@ class Response {
|
|
|
243
244
|
// 2. Let clonedResponse be the result of cloning this’s response.
|
|
244
245
|
const clonedResponse = cloneResponse(this.#state)
|
|
245
246
|
|
|
247
|
+
// Note: To re-register because of a new stream.
|
|
248
|
+
if (this.#state.body?.stream) {
|
|
249
|
+
streamRegistry.register(this, new WeakRef(this.#state.body.stream))
|
|
250
|
+
}
|
|
251
|
+
|
|
246
252
|
// 3. Return the result of creating a Response object, given
|
|
247
253
|
// clonedResponse, this’s headers’s guard, and this’s relevant Realm.
|
|
248
254
|
return fromInnerResponse(clonedResponse, getHeadersGuard(this.#headers))
|
|
@@ -353,8 +359,6 @@ function cloneResponse (response) {
|
|
|
353
359
|
// result of cloning response’s body.
|
|
354
360
|
if (response.body != null) {
|
|
355
361
|
newResponse.body = cloneBody(response.body)
|
|
356
|
-
|
|
357
|
-
streamRegistry.register(newResponse, new WeakRef(response.body.stream))
|
|
358
362
|
}
|
|
359
363
|
|
|
360
364
|
// 4. Return newResponse.
|
|
@@ -576,7 +580,7 @@ webidl.converters.XMLHttpRequestBodyInit = function (V, prefix, name) {
|
|
|
576
580
|
return V
|
|
577
581
|
}
|
|
578
582
|
|
|
579
|
-
if (ArrayBuffer.isView(V) ||
|
|
583
|
+
if (ArrayBuffer.isView(V) || isArrayBuffer(V)) {
|
|
580
584
|
return V
|
|
581
585
|
}
|
|
582
586
|
|
package/lib/web/fetch/util.js
CHANGED
|
@@ -11,20 +11,6 @@ const assert = require('node:assert')
|
|
|
11
11
|
const { isUint8Array } = require('node:util/types')
|
|
12
12
|
const { webidl } = require('../webidl')
|
|
13
13
|
|
|
14
|
-
let supportedHashes = []
|
|
15
|
-
|
|
16
|
-
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
|
17
|
-
/** @type {import('crypto')} */
|
|
18
|
-
let crypto
|
|
19
|
-
try {
|
|
20
|
-
crypto = require('node:crypto')
|
|
21
|
-
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
|
22
|
-
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
|
23
|
-
/* c8 ignore next 3 */
|
|
24
|
-
} catch {
|
|
25
|
-
|
|
26
|
-
}
|
|
27
|
-
|
|
28
14
|
function responseURL (response) {
|
|
29
15
|
// https://fetch.spec.whatwg.org/#responses
|
|
30
16
|
// A response has an associated URL. It is a pointer to the last URL
|
|
@@ -698,206 +684,6 @@ function isURLPotentiallyTrustworthy (url) {
|
|
|
698
684
|
return isOriginPotentiallyTrustworthy(url.origin)
|
|
699
685
|
}
|
|
700
686
|
|
|
701
|
-
/**
|
|
702
|
-
* @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist
|
|
703
|
-
* @param {Uint8Array} bytes
|
|
704
|
-
* @param {string} metadataList
|
|
705
|
-
*/
|
|
706
|
-
function bytesMatch (bytes, metadataList) {
|
|
707
|
-
// If node is not built with OpenSSL support, we cannot check
|
|
708
|
-
// a request's integrity, so allow it by default (the spec will
|
|
709
|
-
// allow requests if an invalid hash is given, as precedence).
|
|
710
|
-
/* istanbul ignore if: only if node is built with --without-ssl */
|
|
711
|
-
if (crypto === undefined) {
|
|
712
|
-
return true
|
|
713
|
-
}
|
|
714
|
-
|
|
715
|
-
// 1. Let parsedMetadata be the result of parsing metadataList.
|
|
716
|
-
const parsedMetadata = parseMetadata(metadataList)
|
|
717
|
-
|
|
718
|
-
// 2. If parsedMetadata is no metadata, return true.
|
|
719
|
-
if (parsedMetadata === 'no metadata') {
|
|
720
|
-
return true
|
|
721
|
-
}
|
|
722
|
-
|
|
723
|
-
// 3. If response is not eligible for integrity validation, return false.
|
|
724
|
-
// TODO
|
|
725
|
-
|
|
726
|
-
// 4. If parsedMetadata is the empty set, return true.
|
|
727
|
-
if (parsedMetadata.length === 0) {
|
|
728
|
-
return true
|
|
729
|
-
}
|
|
730
|
-
|
|
731
|
-
// 5. Let metadata be the result of getting the strongest
|
|
732
|
-
// metadata from parsedMetadata.
|
|
733
|
-
const strongest = getStrongestMetadata(parsedMetadata)
|
|
734
|
-
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
|
735
|
-
|
|
736
|
-
// 6. For each item in metadata:
|
|
737
|
-
for (const item of metadata) {
|
|
738
|
-
// 1. Let algorithm be the alg component of item.
|
|
739
|
-
const algorithm = item.algo
|
|
740
|
-
|
|
741
|
-
// 2. Let expectedValue be the val component of item.
|
|
742
|
-
const expectedValue = item.hash
|
|
743
|
-
|
|
744
|
-
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
|
745
|
-
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
|
746
|
-
|
|
747
|
-
// 3. Let actualValue be the result of applying algorithm to bytes.
|
|
748
|
-
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
|
749
|
-
|
|
750
|
-
if (actualValue[actualValue.length - 1] === '=') {
|
|
751
|
-
if (actualValue[actualValue.length - 2] === '=') {
|
|
752
|
-
actualValue = actualValue.slice(0, -2)
|
|
753
|
-
} else {
|
|
754
|
-
actualValue = actualValue.slice(0, -1)
|
|
755
|
-
}
|
|
756
|
-
}
|
|
757
|
-
|
|
758
|
-
// 4. If actualValue is a case-sensitive match for expectedValue,
|
|
759
|
-
// return true.
|
|
760
|
-
if (compareBase64Mixed(actualValue, expectedValue)) {
|
|
761
|
-
return true
|
|
762
|
-
}
|
|
763
|
-
}
|
|
764
|
-
|
|
765
|
-
// 7. Return false.
|
|
766
|
-
return false
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
|
770
|
-
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
|
771
|
-
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
|
772
|
-
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
|
773
|
-
|
|
774
|
-
/**
|
|
775
|
-
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
|
776
|
-
* @param {string} metadata
|
|
777
|
-
*/
|
|
778
|
-
function parseMetadata (metadata) {
|
|
779
|
-
// 1. Let result be the empty set.
|
|
780
|
-
/** @type {{ algo: string, hash: string }[]} */
|
|
781
|
-
const result = []
|
|
782
|
-
|
|
783
|
-
// 2. Let empty be equal to true.
|
|
784
|
-
let empty = true
|
|
785
|
-
|
|
786
|
-
// 3. For each token returned by splitting metadata on spaces:
|
|
787
|
-
for (const token of metadata.split(' ')) {
|
|
788
|
-
// 1. Set empty to false.
|
|
789
|
-
empty = false
|
|
790
|
-
|
|
791
|
-
// 2. Parse token as a hash-with-options.
|
|
792
|
-
const parsedToken = parseHashWithOptions.exec(token)
|
|
793
|
-
|
|
794
|
-
// 3. If token does not parse, continue to the next token.
|
|
795
|
-
if (
|
|
796
|
-
parsedToken === null ||
|
|
797
|
-
parsedToken.groups === undefined ||
|
|
798
|
-
parsedToken.groups.algo === undefined
|
|
799
|
-
) {
|
|
800
|
-
// Note: Chromium blocks the request at this point, but Firefox
|
|
801
|
-
// gives a warning that an invalid integrity was given. The
|
|
802
|
-
// correct behavior is to ignore these, and subsequently not
|
|
803
|
-
// check the integrity of the resource.
|
|
804
|
-
continue
|
|
805
|
-
}
|
|
806
|
-
|
|
807
|
-
// 4. Let algorithm be the hash-algo component of token.
|
|
808
|
-
const algorithm = parsedToken.groups.algo.toLowerCase()
|
|
809
|
-
|
|
810
|
-
// 5. If algorithm is a hash function recognized by the user
|
|
811
|
-
// agent, add the parsed token to result.
|
|
812
|
-
if (supportedHashes.includes(algorithm)) {
|
|
813
|
-
result.push(parsedToken.groups)
|
|
814
|
-
}
|
|
815
|
-
}
|
|
816
|
-
|
|
817
|
-
// 4. Return no metadata if empty is true, otherwise return result.
|
|
818
|
-
if (empty === true) {
|
|
819
|
-
return 'no metadata'
|
|
820
|
-
}
|
|
821
|
-
|
|
822
|
-
return result
|
|
823
|
-
}
|
|
824
|
-
|
|
825
|
-
/**
|
|
826
|
-
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
|
827
|
-
*/
|
|
828
|
-
function getStrongestMetadata (metadataList) {
|
|
829
|
-
// Let algorithm be the algo component of the first item in metadataList.
|
|
830
|
-
// Can be sha256
|
|
831
|
-
let algorithm = metadataList[0].algo
|
|
832
|
-
// If the algorithm is sha512, then it is the strongest
|
|
833
|
-
// and we can return immediately
|
|
834
|
-
if (algorithm[3] === '5') {
|
|
835
|
-
return algorithm
|
|
836
|
-
}
|
|
837
|
-
|
|
838
|
-
for (let i = 1; i < metadataList.length; ++i) {
|
|
839
|
-
const metadata = metadataList[i]
|
|
840
|
-
// If the algorithm is sha512, then it is the strongest
|
|
841
|
-
// and we can break the loop immediately
|
|
842
|
-
if (metadata.algo[3] === '5') {
|
|
843
|
-
algorithm = 'sha512'
|
|
844
|
-
break
|
|
845
|
-
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
|
846
|
-
} else if (algorithm[3] === '3') {
|
|
847
|
-
continue
|
|
848
|
-
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
|
849
|
-
// the strongest
|
|
850
|
-
} else if (metadata.algo[3] === '3') {
|
|
851
|
-
algorithm = 'sha384'
|
|
852
|
-
}
|
|
853
|
-
}
|
|
854
|
-
return algorithm
|
|
855
|
-
}
|
|
856
|
-
|
|
857
|
-
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
|
858
|
-
if (metadataList.length === 1) {
|
|
859
|
-
return metadataList
|
|
860
|
-
}
|
|
861
|
-
|
|
862
|
-
let pos = 0
|
|
863
|
-
for (let i = 0; i < metadataList.length; ++i) {
|
|
864
|
-
if (metadataList[i].algo === algorithm) {
|
|
865
|
-
metadataList[pos++] = metadataList[i]
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
|
|
869
|
-
metadataList.length = pos
|
|
870
|
-
|
|
871
|
-
return metadataList
|
|
872
|
-
}
|
|
873
|
-
|
|
874
|
-
/**
|
|
875
|
-
* Compares two base64 strings, allowing for base64url
|
|
876
|
-
* in the second string.
|
|
877
|
-
*
|
|
878
|
-
* @param {string} actualValue always base64
|
|
879
|
-
* @param {string} expectedValue base64 or base64url
|
|
880
|
-
* @returns {boolean}
|
|
881
|
-
*/
|
|
882
|
-
function compareBase64Mixed (actualValue, expectedValue) {
|
|
883
|
-
if (actualValue.length !== expectedValue.length) {
|
|
884
|
-
return false
|
|
885
|
-
}
|
|
886
|
-
for (let i = 0; i < actualValue.length; ++i) {
|
|
887
|
-
if (actualValue[i] !== expectedValue[i]) {
|
|
888
|
-
if (
|
|
889
|
-
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
|
890
|
-
(actualValue[i] === '/' && expectedValue[i] === '_')
|
|
891
|
-
) {
|
|
892
|
-
continue
|
|
893
|
-
}
|
|
894
|
-
return false
|
|
895
|
-
}
|
|
896
|
-
}
|
|
897
|
-
|
|
898
|
-
return true
|
|
899
|
-
}
|
|
900
|
-
|
|
901
687
|
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
|
902
688
|
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
|
903
689
|
// TODO
|
|
@@ -1761,7 +1547,6 @@ module.exports = {
|
|
|
1761
1547
|
isValidHeaderValue,
|
|
1762
1548
|
isErrorLike,
|
|
1763
1549
|
fullyReadBody,
|
|
1764
|
-
bytesMatch,
|
|
1765
1550
|
readableStreamClose,
|
|
1766
1551
|
isomorphicEncode,
|
|
1767
1552
|
urlIsLocal,
|
|
@@ -1770,7 +1555,6 @@ module.exports = {
|
|
|
1770
1555
|
readAllBytes,
|
|
1771
1556
|
simpleRangeHeaderValue,
|
|
1772
1557
|
buildContentRange,
|
|
1773
|
-
parseMetadata,
|
|
1774
1558
|
createInflate,
|
|
1775
1559
|
extractMimeType,
|
|
1776
1560
|
getDecodeSplit,
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
# Subresource Integrity
|
|
2
|
+
|
|
3
|
+
based on Editor’s Draft, 12 June 2025
|
|
4
|
+
|
|
5
|
+
This module provides support for Subresource Integrity (SRI) in the context of web fetch operations. SRI is a security feature that allows clients to verify that fetched resources are delivered without unexpected manipulation.
|
|
6
|
+
|
|
7
|
+
## Links
|
|
8
|
+
|
|
9
|
+
- [Subresource Integrity](https://w3c.github.io/webappsec-subresource-integrity/)
|