undici 6.10.2 → 6.11.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/lib/core/util.js +0 -3
- package/lib/dispatcher/client-h2.js +27 -1
- package/lib/handler/redirect-handler.js +2 -2
- package/lib/mock/pending-interceptors-formatter.js +4 -1
- package/lib/web/fetch/data-url.js +2 -2
- package/lib/web/fetch/headers.js +1 -1
- package/lib/web/fetch/index.js +0 -23
- package/lib/web/fetch/util.js +105 -33
- package/package.json +9 -5
- package/types/diagnostics-channel.d.ts +1 -2
package/README.md
CHANGED
|
@@ -7,8 +7,12 @@ An HTTP/1.1 client, written from scratch for Node.js.
|
|
|
7
7
|
> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici.
|
|
8
8
|
It is also a Stranger Things reference.
|
|
9
9
|
|
|
10
|
+
## How to get involved
|
|
11
|
+
|
|
10
12
|
Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel.
|
|
11
13
|
|
|
14
|
+
Looking to contribute? Start by reading the [contributing guide](./CONTRIBUTING.md)
|
|
15
|
+
|
|
12
16
|
## Install
|
|
13
17
|
|
|
14
18
|
```
|
package/lib/core/util.js
CHANGED
|
@@ -246,9 +246,6 @@ function bufferToLowerCasedHeaderName (value) {
|
|
|
246
246
|
* @returns {Record<string, string | string[]>}
|
|
247
247
|
*/
|
|
248
248
|
function parseHeaders (headers, obj) {
|
|
249
|
-
// For H2 support
|
|
250
|
-
if (!Array.isArray(headers)) return headers
|
|
251
|
-
|
|
252
249
|
if (obj === undefined) obj = {}
|
|
253
250
|
for (let i = 0; i < headers.length; i += 2) {
|
|
254
251
|
const key = headerNameToString(headers[i])
|
|
@@ -54,6 +54,20 @@ const {
|
|
|
54
54
|
}
|
|
55
55
|
} = http2
|
|
56
56
|
|
|
57
|
+
function parseH2Headers (headers) {
|
|
58
|
+
// set-cookie is always an array. Duplicates are added to the array.
|
|
59
|
+
// For duplicate cookie headers, the values are joined together with '; '.
|
|
60
|
+
headers = Object.entries(headers).flat(2)
|
|
61
|
+
|
|
62
|
+
const result = []
|
|
63
|
+
|
|
64
|
+
for (const header of headers) {
|
|
65
|
+
result.push(Buffer.from(header))
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return result
|
|
69
|
+
}
|
|
70
|
+
|
|
57
71
|
async function connectH2 (client, socket) {
|
|
58
72
|
client[kSocket] = socket
|
|
59
73
|
|
|
@@ -391,7 +405,19 @@ function writeH2 (client, request) {
|
|
|
391
405
|
const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers
|
|
392
406
|
request.onResponseStarted()
|
|
393
407
|
|
|
394
|
-
|
|
408
|
+
// Due to the stream nature, it is possible we face a race condition
|
|
409
|
+
// where the stream has been assigned, but the request has been aborted
|
|
410
|
+
// the request remains in-flight and headers hasn't been received yet
|
|
411
|
+
// for those scenarios, best effort is to destroy the stream immediately
|
|
412
|
+
// as there's no value to keep it open.
|
|
413
|
+
if (request.aborted || request.completed) {
|
|
414
|
+
const err = new RequestAbortedError()
|
|
415
|
+
errorRequest(client, request, err)
|
|
416
|
+
util.destroy(stream, err)
|
|
417
|
+
return
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
if (request.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), '') === false) {
|
|
395
421
|
stream.pause()
|
|
396
422
|
}
|
|
397
423
|
|
|
@@ -201,9 +201,9 @@ function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
|
|
201
201
|
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
|
202
202
|
return true
|
|
203
203
|
}
|
|
204
|
-
if (unknownOrigin && (header.length === 13 || header.length === 6)) {
|
|
204
|
+
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
|
205
205
|
const name = util.headerNameToString(header)
|
|
206
|
-
return name === 'authorization' || name === 'cookie'
|
|
206
|
+
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
|
207
207
|
}
|
|
208
208
|
return false
|
|
209
209
|
}
|
|
@@ -3,6 +3,9 @@
|
|
|
3
3
|
const { Transform } = require('node:stream')
|
|
4
4
|
const { Console } = require('node:console')
|
|
5
5
|
|
|
6
|
+
const PERSISTENT = process.versions.icu ? '✅' : 'Y '
|
|
7
|
+
const NOT_PERSISTENT = process.versions.icu ? '❌' : 'N '
|
|
8
|
+
|
|
6
9
|
/**
|
|
7
10
|
* Gets the output of `console.table(…)` as a string.
|
|
8
11
|
*/
|
|
@@ -29,7 +32,7 @@ module.exports = class PendingInterceptorsFormatter {
|
|
|
29
32
|
Origin: origin,
|
|
30
33
|
Path: path,
|
|
31
34
|
'Status code': statusCode,
|
|
32
|
-
Persistent: persist ?
|
|
35
|
+
Persistent: persist ? PERSISTENT : NOT_PERSISTENT,
|
|
33
36
|
Invocations: timesInvoked,
|
|
34
37
|
Remaining: persist ? Infinity : times - timesInvoked
|
|
35
38
|
}))
|
|
@@ -8,12 +8,12 @@ const encoder = new TextEncoder()
|
|
|
8
8
|
* @see https://mimesniff.spec.whatwg.org/#http-token-code-point
|
|
9
9
|
*/
|
|
10
10
|
const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/
|
|
11
|
-
const HTTP_WHITESPACE_REGEX = /[\u000A
|
|
11
|
+
const HTTP_WHITESPACE_REGEX = /[\u000A\u000D\u0009\u0020]/ // eslint-disable-line
|
|
12
12
|
const ASCII_WHITESPACE_REPLACE_REGEX = /[\u0009\u000A\u000C\u000D\u0020]/g // eslint-disable-line
|
|
13
13
|
/**
|
|
14
14
|
* @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point
|
|
15
15
|
*/
|
|
16
|
-
const HTTP_QUOTED_STRING_TOKENS = /[\u0009
|
|
16
|
+
const HTTP_QUOTED_STRING_TOKENS = /[\u0009\u0020-\u007E\u0080-\u00FF]/ // eslint-disable-line
|
|
17
17
|
|
|
18
18
|
// https://fetch.spec.whatwg.org/#data-url-processor
|
|
19
19
|
/** @param {URL} dataURL */
|
package/lib/web/fetch/headers.js
CHANGED
|
@@ -12,7 +12,7 @@ const {
|
|
|
12
12
|
} = require('./util')
|
|
13
13
|
const { webidl } = require('./webidl')
|
|
14
14
|
const assert = require('node:assert')
|
|
15
|
-
const util = require('util')
|
|
15
|
+
const util = require('node:util')
|
|
16
16
|
|
|
17
17
|
const kHeadersMap = Symbol('headers map')
|
|
18
18
|
const kHeadersSortedMap = Symbol('headers map sorted')
|
package/lib/web/fetch/index.js
CHANGED
|
@@ -2141,29 +2141,6 @@ async function httpNetworkFetch (
|
|
|
2141
2141
|
codings = contentEncoding.toLowerCase().split(',').map((x) => x.trim())
|
|
2142
2142
|
}
|
|
2143
2143
|
location = headersList.get('location', true)
|
|
2144
|
-
} else {
|
|
2145
|
-
const keys = Object.keys(rawHeaders)
|
|
2146
|
-
for (let i = 0; i < keys.length; ++i) {
|
|
2147
|
-
// The header names are already in lowercase.
|
|
2148
|
-
const key = keys[i]
|
|
2149
|
-
const value = rawHeaders[key]
|
|
2150
|
-
if (key === 'set-cookie') {
|
|
2151
|
-
for (let j = 0; j < value.length; ++j) {
|
|
2152
|
-
headersList.append(key, value[j], true)
|
|
2153
|
-
}
|
|
2154
|
-
} else {
|
|
2155
|
-
headersList.append(key, value, true)
|
|
2156
|
-
}
|
|
2157
|
-
}
|
|
2158
|
-
// For H2, The header names are already in lowercase,
|
|
2159
|
-
// so we can avoid the `HeadersList#get` call here.
|
|
2160
|
-
const contentEncoding = rawHeaders['content-encoding']
|
|
2161
|
-
if (contentEncoding) {
|
|
2162
|
-
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
|
|
2163
|
-
// "All content-coding values are case-insensitive..."
|
|
2164
|
-
codings = contentEncoding.toLowerCase().split(',').map((x) => x.trim()).reverse()
|
|
2165
|
-
}
|
|
2166
|
-
location = rawHeaders.location
|
|
2167
2144
|
}
|
|
2168
2145
|
|
|
2169
2146
|
this.body = new Readable({ read: resume })
|
package/lib/web/fetch/util.js
CHANGED
|
@@ -11,11 +11,15 @@ const assert = require('node:assert')
|
|
|
11
11
|
const { isUint8Array } = require('node:util/types')
|
|
12
12
|
const { webidl } = require('./webidl')
|
|
13
13
|
|
|
14
|
+
let supportedHashes = []
|
|
15
|
+
|
|
14
16
|
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
|
15
17
|
/** @type {import('crypto')} */
|
|
16
18
|
let crypto
|
|
17
19
|
try {
|
|
18
20
|
crypto = require('node:crypto')
|
|
21
|
+
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
|
22
|
+
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
|
19
23
|
/* c8 ignore next 3 */
|
|
20
24
|
} catch {
|
|
21
25
|
|
|
@@ -565,66 +569,56 @@ function bytesMatch (bytes, metadataList) {
|
|
|
565
569
|
return true
|
|
566
570
|
}
|
|
567
571
|
|
|
568
|
-
// 3. If
|
|
572
|
+
// 3. If response is not eligible for integrity validation, return false.
|
|
573
|
+
// TODO
|
|
574
|
+
|
|
575
|
+
// 4. If parsedMetadata is the empty set, return true.
|
|
569
576
|
if (parsedMetadata.length === 0) {
|
|
570
577
|
return true
|
|
571
578
|
}
|
|
572
579
|
|
|
573
|
-
//
|
|
580
|
+
// 5. Let metadata be the result of getting the strongest
|
|
574
581
|
// metadata from parsedMetadata.
|
|
575
|
-
const
|
|
576
|
-
|
|
577
|
-
const strongest = list[0].algo
|
|
578
|
-
// get all entries that use the strongest algorithm; ignore weaker
|
|
579
|
-
const metadata = list.filter((item) => item.algo === strongest)
|
|
582
|
+
const strongest = getStrongestMetadata(parsedMetadata)
|
|
583
|
+
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
|
580
584
|
|
|
581
|
-
//
|
|
585
|
+
// 6. For each item in metadata:
|
|
582
586
|
for (const item of metadata) {
|
|
583
587
|
// 1. Let algorithm be the alg component of item.
|
|
584
588
|
const algorithm = item.algo
|
|
585
589
|
|
|
586
590
|
// 2. Let expectedValue be the val component of item.
|
|
587
|
-
|
|
591
|
+
const expectedValue = item.hash
|
|
588
592
|
|
|
589
593
|
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
|
590
594
|
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
|
591
595
|
|
|
592
|
-
if (expectedValue.endsWith('==')) {
|
|
593
|
-
expectedValue = expectedValue.slice(0, -2)
|
|
594
|
-
}
|
|
595
|
-
|
|
596
596
|
// 3. Let actualValue be the result of applying algorithm to bytes.
|
|
597
597
|
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
|
598
598
|
|
|
599
|
-
if (actualValue.
|
|
600
|
-
|
|
599
|
+
if (actualValue[actualValue.length - 1] === '=') {
|
|
600
|
+
if (actualValue[actualValue.length - 2] === '=') {
|
|
601
|
+
actualValue = actualValue.slice(0, -2)
|
|
602
|
+
} else {
|
|
603
|
+
actualValue = actualValue.slice(0, -1)
|
|
604
|
+
}
|
|
601
605
|
}
|
|
602
606
|
|
|
603
607
|
// 4. If actualValue is a case-sensitive match for expectedValue,
|
|
604
608
|
// return true.
|
|
605
|
-
if (actualValue
|
|
606
|
-
return true
|
|
607
|
-
}
|
|
608
|
-
|
|
609
|
-
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
|
|
610
|
-
|
|
611
|
-
if (actualBase64URL.endsWith('==')) {
|
|
612
|
-
actualBase64URL = actualBase64URL.slice(0, -2)
|
|
613
|
-
}
|
|
614
|
-
|
|
615
|
-
if (actualBase64URL === expectedValue) {
|
|
609
|
+
if (compareBase64Mixed(actualValue, expectedValue)) {
|
|
616
610
|
return true
|
|
617
611
|
}
|
|
618
612
|
}
|
|
619
613
|
|
|
620
|
-
//
|
|
614
|
+
// 7. Return false.
|
|
621
615
|
return false
|
|
622
616
|
}
|
|
623
617
|
|
|
624
618
|
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
|
625
619
|
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
|
626
620
|
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
|
627
|
-
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-(?<hash>[A-Za-z0-9+/]
|
|
621
|
+
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
|
628
622
|
|
|
629
623
|
/**
|
|
630
624
|
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
|
@@ -638,8 +632,6 @@ function parseMetadata (metadata) {
|
|
|
638
632
|
// 2. Let empty be equal to true.
|
|
639
633
|
let empty = true
|
|
640
634
|
|
|
641
|
-
const supportedHashes = crypto.getHashes()
|
|
642
|
-
|
|
643
635
|
// 3. For each token returned by splitting metadata on spaces:
|
|
644
636
|
for (const token of metadata.split(' ')) {
|
|
645
637
|
// 1. Set empty to false.
|
|
@@ -649,7 +641,11 @@ function parseMetadata (metadata) {
|
|
|
649
641
|
const parsedToken = parseHashWithOptions.exec(token)
|
|
650
642
|
|
|
651
643
|
// 3. If token does not parse, continue to the next token.
|
|
652
|
-
if (
|
|
644
|
+
if (
|
|
645
|
+
parsedToken === null ||
|
|
646
|
+
parsedToken.groups === undefined ||
|
|
647
|
+
parsedToken.groups.algo === undefined
|
|
648
|
+
) {
|
|
653
649
|
// Note: Chromium blocks the request at this point, but Firefox
|
|
654
650
|
// gives a warning that an invalid integrity was given. The
|
|
655
651
|
// correct behavior is to ignore these, and subsequently not
|
|
@@ -658,11 +654,11 @@ function parseMetadata (metadata) {
|
|
|
658
654
|
}
|
|
659
655
|
|
|
660
656
|
// 4. Let algorithm be the hash-algo component of token.
|
|
661
|
-
const algorithm = parsedToken.groups.algo
|
|
657
|
+
const algorithm = parsedToken.groups.algo.toLowerCase()
|
|
662
658
|
|
|
663
659
|
// 5. If algorithm is a hash function recognized by the user
|
|
664
660
|
// agent, add the parsed token to result.
|
|
665
|
-
if (supportedHashes.includes(algorithm
|
|
661
|
+
if (supportedHashes.includes(algorithm)) {
|
|
666
662
|
result.push(parsedToken.groups)
|
|
667
663
|
}
|
|
668
664
|
}
|
|
@@ -675,6 +671,82 @@ function parseMetadata (metadata) {
|
|
|
675
671
|
return result
|
|
676
672
|
}
|
|
677
673
|
|
|
674
|
+
/**
|
|
675
|
+
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
|
676
|
+
*/
|
|
677
|
+
function getStrongestMetadata (metadataList) {
|
|
678
|
+
// Let algorithm be the algo component of the first item in metadataList.
|
|
679
|
+
// Can be sha256
|
|
680
|
+
let algorithm = metadataList[0].algo
|
|
681
|
+
// If the algorithm is sha512, then it is the strongest
|
|
682
|
+
// and we can return immediately
|
|
683
|
+
if (algorithm[3] === '5') {
|
|
684
|
+
return algorithm
|
|
685
|
+
}
|
|
686
|
+
|
|
687
|
+
for (let i = 1; i < metadataList.length; ++i) {
|
|
688
|
+
const metadata = metadataList[i]
|
|
689
|
+
// If the algorithm is sha512, then it is the strongest
|
|
690
|
+
// and we can break the loop immediately
|
|
691
|
+
if (metadata.algo[3] === '5') {
|
|
692
|
+
algorithm = 'sha512'
|
|
693
|
+
break
|
|
694
|
+
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
|
695
|
+
} else if (algorithm[3] === '3') {
|
|
696
|
+
continue
|
|
697
|
+
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
|
698
|
+
// the strongest
|
|
699
|
+
} else if (metadata.algo[3] === '3') {
|
|
700
|
+
algorithm = 'sha384'
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
return algorithm
|
|
704
|
+
}
|
|
705
|
+
|
|
706
|
+
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
|
707
|
+
if (metadataList.length === 1) {
|
|
708
|
+
return metadataList
|
|
709
|
+
}
|
|
710
|
+
|
|
711
|
+
let pos = 0
|
|
712
|
+
for (let i = 0; i < metadataList.length; ++i) {
|
|
713
|
+
if (metadataList[i].algo === algorithm) {
|
|
714
|
+
metadataList[pos++] = metadataList[i]
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
metadataList.length = pos
|
|
719
|
+
|
|
720
|
+
return metadataList
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
/**
|
|
724
|
+
* Compares two base64 strings, allowing for base64url
|
|
725
|
+
* in the second string.
|
|
726
|
+
*
|
|
727
|
+
* @param {string} actualValue always base64
|
|
728
|
+
* @param {string} expectedValue base64 or base64url
|
|
729
|
+
* @returns {boolean}
|
|
730
|
+
*/
|
|
731
|
+
function compareBase64Mixed (actualValue, expectedValue) {
|
|
732
|
+
if (actualValue.length !== expectedValue.length) {
|
|
733
|
+
return false
|
|
734
|
+
}
|
|
735
|
+
for (let i = 0; i < actualValue.length; ++i) {
|
|
736
|
+
if (actualValue[i] !== expectedValue[i]) {
|
|
737
|
+
if (
|
|
738
|
+
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
|
739
|
+
(actualValue[i] === '/' && expectedValue[i] === '_')
|
|
740
|
+
) {
|
|
741
|
+
continue
|
|
742
|
+
}
|
|
743
|
+
return false
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
|
|
747
|
+
return true
|
|
748
|
+
}
|
|
749
|
+
|
|
678
750
|
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
|
679
751
|
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
|
680
752
|
// TODO
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "undici",
|
|
3
|
-
"version": "6.
|
|
3
|
+
"version": "6.11.1",
|
|
4
4
|
"description": "An HTTP/1.1 client, written from scratch for Node.js",
|
|
5
5
|
"homepage": "https://undici.nodejs.org",
|
|
6
6
|
"bugs": {
|
|
@@ -69,10 +69,13 @@
|
|
|
69
69
|
"lint:fix": "standard --fix | snazzy",
|
|
70
70
|
"test": "npm run test:javascript && cross-env NODE_V8_COVERAGE= npm run test:typescript",
|
|
71
71
|
"test:javascript": "node scripts/generate-pem && npm run test:unit && npm run test:node-fetch && npm run test:fetch && npm run test:cookies && npm run test:eventsource && npm run test:wpt && npm run test:websocket && npm run test:node-test && npm run test:jest",
|
|
72
|
+
"test:javascript:withoutintl": "node scripts/generate-pem && npm run test:unit && npm run test:node-fetch && npm run test:fetch:nobuild && npm run test:cookies && npm run test:eventsource:nobuild && npm run test:wpt:withoutintl && npm run test:node-test",
|
|
72
73
|
"test:cookies": "borp -p \"test/cookie/*.js\"",
|
|
73
74
|
"test:node-fetch": "borp -p \"test/node-fetch/**/*.js\"",
|
|
74
|
-
"test:eventsource": "npm run build:node &&
|
|
75
|
-
"test:
|
|
75
|
+
"test:eventsource": "npm run build:node && npm run test:eventsource:nobuild",
|
|
76
|
+
"test:eventsource:nobuild": "borp --expose-gc -p \"test/eventsource/*.js\"",
|
|
77
|
+
"test:fetch": "npm run build:node && npm run test:fetch:nobuild",
|
|
78
|
+
"test:fetch:nobuild": "borp --expose-gc -p \"test/fetch/*.js\" && borp -p \"test/webidl/*.js\" && borp -p \"test/busboy/*.js\"",
|
|
76
79
|
"test:jest": "cross-env NODE_V8_COVERAGE= jest",
|
|
77
80
|
"test:unit": "borp --expose-gc -p \"test/*.js\"",
|
|
78
81
|
"test:node-test": "borp -p \"test/node-test/**/*.js\"",
|
|
@@ -81,6 +84,7 @@
|
|
|
81
84
|
"test:typescript": "tsd && tsc --skipLibCheck test/imports/undici-import.ts",
|
|
82
85
|
"test:websocket": "borp -p \"test/websocket/*.js\"",
|
|
83
86
|
"test:wpt": "node test/wpt/start-fetch.mjs && node test/wpt/start-FileAPI.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs && node test/wpt/start-cacheStorage.mjs && node test/wpt/start-eventsource.mjs",
|
|
87
|
+
"test:wpt:withoutintl": "node test/wpt/start-fetch.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-cacheStorage.mjs && node test/wpt/start-eventsource.mjs",
|
|
84
88
|
"coverage": "npm run coverage:clean && cross-env NODE_V8_COVERAGE=./coverage/tmp npm run test:javascript && npm run coverage:report",
|
|
85
89
|
"coverage:ci": "npm run coverage:clean && cross-env NODE_V8_COVERAGE=./coverage/tmp npm run test:javascript && npm run coverage:report:ci",
|
|
86
90
|
"coverage:clean": "node ./scripts/clean-coverage.js",
|
|
@@ -96,7 +100,7 @@
|
|
|
96
100
|
"@sinonjs/fake-timers": "^11.1.0",
|
|
97
101
|
"@types/node": "^18.0.3",
|
|
98
102
|
"abort-controller": "^3.0.0",
|
|
99
|
-
"borp": "^0.
|
|
103
|
+
"borp": "^0.10.0",
|
|
100
104
|
"c8": "^9.1.0",
|
|
101
105
|
"cross-env": "^7.0.3",
|
|
102
106
|
"dns-packet": "^5.4.0",
|
|
@@ -112,7 +116,7 @@
|
|
|
112
116
|
"proxy": "^2.1.1",
|
|
113
117
|
"snazzy": "^9.0.0",
|
|
114
118
|
"standard": "^17.0.0",
|
|
115
|
-
"tsd": "^0.
|
|
119
|
+
"tsd": "^0.31.0",
|
|
116
120
|
"typescript": "^5.0.2",
|
|
117
121
|
"ws": "^8.11.0"
|
|
118
122
|
},
|