undici 7.0.0-alpha.3 → 7.0.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -229,7 +229,7 @@ class RetryHandler {
229
229
  return false
230
230
  }
231
231
 
232
- const { start, size, end = size } = contentRange
232
+ const { start, size, end = size - 1 } = contentRange
233
233
 
234
234
  assert(this.start === start, 'content-range mismatch')
235
235
  assert(this.end == null || this.end === end, 'content-range mismatch')
@@ -252,7 +252,7 @@ class RetryHandler {
252
252
  )
253
253
  }
254
254
 
255
- const { start, size, end = size } = range
255
+ const { start, size, end = size - 1 } = range
256
256
  assert(
257
257
  start != null && Number.isFinite(start),
258
258
  'content-range mismatch'
@@ -266,7 +266,7 @@ class RetryHandler {
266
266
  // We make our best to checkpoint the body for further range headers
267
267
  if (this.end == null) {
268
268
  const contentLength = headers['content-length']
269
- this.end = contentLength != null ? Number(contentLength) : null
269
+ this.end = contentLength != null ? Number(contentLength) - 1 : null
270
270
  }
271
271
 
272
272
  assert(Number.isFinite(this.start))
@@ -1,13 +1,19 @@
1
1
  'use strict'
2
2
 
3
+ const assert = require('node:assert')
4
+ const { Readable } = require('node:stream')
3
5
  const util = require('../core/util')
4
6
  const CacheHandler = require('../handler/cache-handler')
5
7
  const MemoryCacheStore = require('../cache/memory-cache-store')
6
8
  const CacheRevalidationHandler = require('../handler/cache-revalidation-handler')
7
- const { assertCacheStore, assertCacheMethods } = require('../util/cache.js')
9
+ const { assertCacheStore, assertCacheMethods, makeCacheKey } = require('../util/cache.js')
8
10
 
9
11
  const AGE_HEADER = Buffer.from('age')
10
12
 
13
+ /**
14
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.CachedResponse} CachedResponse
15
+ */
16
+
11
17
  /**
12
18
  * @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions} [opts]
13
19
  * @returns {import('../../types/dispatcher.d.ts').default.DispatcherComposeInterceptor}
@@ -34,136 +40,151 @@ module.exports = (opts = {}) => {
34
40
 
35
41
  return dispatch => {
36
42
  return (opts, handler) => {
43
+ // TODO (fix): What if e.g. opts.headers has if-modified-since header? Or other headers
44
+ // that make things ambigious?
45
+
37
46
  if (!opts.origin || safeMethodsToNotCache.includes(opts.method)) {
38
47
  // Not a method we want to cache or we don't have the origin, skip
39
48
  return dispatch(opts, handler)
40
49
  }
41
50
 
42
- const stream = store.createReadStream(opts)
43
- if (!stream) {
44
- // Request isn't cached
45
- return dispatch(opts, new CacheHandler(globalOpts, opts, handler))
51
+ /**
52
+ * @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
53
+ */
54
+ const cacheKey = makeCacheKey(opts)
55
+
56
+ // TODO (perf): For small entries support returning a Buffer instead of a stream.
57
+ // Maybe store should return { staleAt, headers, body, etc... } instead of a stream + stream.value?
58
+ // Where body can be a Buffer, string, stream or blob?
59
+ const result = store.get(cacheKey)
60
+ if (!result) {
61
+ return dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
46
62
  }
47
63
 
48
- let onErrorCalled = false
49
-
50
64
  /**
51
- * @param {import('../../types/cache-interceptor.d.ts').default.CacheStoreReadable} stream
52
- * @param {import('../../types/cache-interceptor.d.ts').default.CacheStoreValue} value
65
+ * @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
53
66
  */
54
- const respondWithCachedValue = (stream, value) => {
55
- const ac = new AbortController()
56
- const signal = ac.signal
57
-
58
- signal.onabort = (_, err) => {
59
- stream.destroy()
60
- if (!onErrorCalled) {
61
- handler.onError(err)
62
- onErrorCalled = true
63
- }
64
- }
67
+ const respondWithCachedValue = ({ cachedAt, rawHeaders, statusCode, statusMessage, body }) => {
68
+ const stream = util.isStream(body)
69
+ ? body
70
+ : Readable.from(body ?? [])
71
+
72
+ assert(!stream.destroyed, 'stream should not be destroyed')
73
+ assert(!stream.readableDidRead, 'stream should not be readableDidRead')
74
+
75
+ stream
76
+ .on('error', function (err) {
77
+ if (!this.readableEnded) {
78
+ if (typeof handler.onError === 'function') {
79
+ handler.onError(err)
80
+ } else {
81
+ throw err
82
+ }
83
+ }
84
+ })
85
+ .on('close', function () {
86
+ if (!this.errored && typeof handler.onComplete === 'function') {
87
+ handler.onComplete([])
88
+ }
89
+ })
65
90
 
66
- stream.on('error', (err) => {
67
- if (!onErrorCalled) {
68
- handler.onError(err)
69
- onErrorCalled = true
70
- }
71
- })
91
+ if (typeof handler.onConnect === 'function') {
92
+ handler.onConnect((err) => {
93
+ stream.destroy(err)
94
+ })
72
95
 
73
- try {
74
- if (typeof handler.onConnect === 'function') {
75
- handler.onConnect(ac.abort)
76
- signal.throwIfAborted()
96
+ if (stream.destroyed) {
97
+ return
77
98
  }
99
+ }
78
100
 
79
- if (typeof handler.onHeaders === 'function') {
80
- // Add the age header
81
- // https://www.rfc-editor.org/rfc/rfc9111.html#name-age
82
- const age = Math.round((Date.now() - value.cachedAt) / 1000)
101
+ if (typeof handler.onHeaders === 'function') {
102
+ // Add the age header
103
+ // https://www.rfc-editor.org/rfc/rfc9111.html#name-age
104
+ const age = Math.round((Date.now() - cachedAt) / 1000)
83
105
 
84
- value.rawHeaders.push(AGE_HEADER, Buffer.from(`${age}`))
106
+ // TODO (fix): What if rawHeaders already contains age header?
107
+ rawHeaders = [...rawHeaders, AGE_HEADER, Buffer.from(`${age}`)]
85
108
 
86
- handler.onHeaders(value.statusCode, value.rawHeaders, stream.resume, value.statusMessage)
87
- signal.throwIfAborted()
109
+ if (handler.onHeaders(statusCode, rawHeaders, () => stream?.resume(), statusMessage) === false) {
110
+ stream.pause()
88
111
  }
112
+ }
89
113
 
90
- if (opts.method === 'HEAD') {
91
- if (typeof handler.onComplete === 'function') {
92
- handler.onComplete(null)
93
- stream.destroy()
94
- }
95
- } else {
96
- if (typeof handler.onData === 'function') {
97
- stream.on('data', chunk => {
98
- if (!handler.onData(chunk)) {
99
- stream.pause()
100
- }
101
- })
102
- }
103
-
104
- if (typeof handler.onComplete === 'function') {
105
- stream.on('end', () => {
106
- handler.onComplete(value.rawTrailers ?? [])
107
- })
114
+ if (opts.method === 'HEAD') {
115
+ stream.destroy()
116
+ } else {
117
+ stream.on('data', function (chunk) {
118
+ if (typeof handler.onData === 'function' && !handler.onData(chunk)) {
119
+ stream.pause()
108
120
  }
109
- }
110
- } catch (err) {
111
- stream.destroy(err)
112
- if (!onErrorCalled && typeof handler.onError === 'function') {
113
- handler.onError(err)
114
- onErrorCalled = true
115
- }
121
+ })
116
122
  }
117
123
  }
118
124
 
119
125
  /**
120
- * @param {import('../../types/cache-interceptor.d.ts').default.CacheStoreReadable | undefined} stream
126
+ * @param {import('../../types/cache-interceptor.d.ts').default.GetResult} result
121
127
  */
122
- const handleStream = (stream) => {
123
- if (!stream) {
124
- // Request isn't cached
125
- return dispatch(opts, new CacheHandler(globalOpts, opts, handler))
126
- }
127
-
128
- const { value } = stream
128
+ const handleResult = (result) => {
129
+ // TODO (perf): Readable.from path can be optimized...
129
130
 
130
- // Dump body on error
131
- if (util.isStream(opts.body)) {
132
- opts.body?.on('error', () => {}).resume()
131
+ if (!result.body && opts.method !== 'HEAD') {
132
+ throw new Error('stream is undefined but method isn\'t HEAD')
133
133
  }
134
134
 
135
135
  // Check if the response is stale
136
136
  const now = Date.now()
137
- if (now >= value.staleAt) {
138
- if (now >= value.deleteAt) {
139
- // Safety check in case the store gave us a response that should've been
140
- // deleted already
141
- dispatch(opts, new CacheHandler(globalOpts, opts, handler))
142
- return
137
+ if (now < result.staleAt) {
138
+ // Dump request body.
139
+ if (util.isStream(opts.body)) {
140
+ opts.body.on('error', () => {}).destroy()
143
141
  }
144
-
145
- if (!opts.headers) {
146
- opts.headers = {}
147
- }
148
-
149
- opts.headers['if-modified-since'] = new Date(value.cachedAt).toUTCString()
150
-
142
+ respondWithCachedValue(result)
143
+ } else if (util.isStream(opts.body) && util.bodyLength(opts.body) !== 0) {
144
+ // If body is is stream we can't revalidate...
145
+ // TODO (fix): This could be less strict...
146
+ dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
147
+ } else {
151
148
  // Need to revalidate the response
152
149
  dispatch(
153
- opts,
150
+ {
151
+ ...opts,
152
+ headers: {
153
+ ...opts.headers,
154
+ 'if-modified-since': new Date(result.cachedAt).toUTCString()
155
+ }
156
+ },
154
157
  new CacheRevalidationHandler(
155
- () => respondWithCachedValue(stream, value),
156
- new CacheHandler(globalOpts, opts, handler)
158
+ (success) => {
159
+ if (success) {
160
+ respondWithCachedValue(result)
161
+ } else if (util.isStream(result.body)) {
162
+ result.body.on('error', () => {}).destroy()
163
+ }
164
+ },
165
+ new CacheHandler(globalOpts, cacheKey, handler)
157
166
  )
158
167
  )
159
-
160
- return
161
168
  }
162
-
163
- respondWithCachedValue(stream, value)
164
169
  }
165
170
 
166
- Promise.resolve(stream).then(handleStream).catch(handler.onError)
171
+ if (typeof result.then === 'function') {
172
+ result.then((result) => {
173
+ if (!result) {
174
+ dispatch(opts, new CacheHandler(globalOpts, cacheKey, handler))
175
+ } else {
176
+ handleResult(result)
177
+ }
178
+ }, err => {
179
+ if (typeof handler.onError === 'function') {
180
+ handler.onError(err)
181
+ } else {
182
+ throw err
183
+ }
184
+ })
185
+ } else {
186
+ handleResult(result)
187
+ }
167
188
 
168
189
  return true
169
190
  }
@@ -13,7 +13,6 @@ class DNSInstance {
13
13
  affinity = null
14
14
  lookup = null
15
15
  pick = null
16
- lastIpFamily = null
17
16
 
18
17
  constructor (opts) {
19
18
  this.#maxTTL = opts.maxTTL
@@ -61,16 +60,23 @@ class DNSInstance {
61
60
  const ip = this.pick(
62
61
  origin,
63
62
  records,
64
- // Only set affinity if dual stack is disabled
65
- // otherwise let it go through normal flow
66
- !newOpts.dualStack && newOpts.affinity
63
+ newOpts.affinity
67
64
  )
68
65
 
66
+ let port
67
+ if (typeof ip.port === 'number') {
68
+ port = `:${ip.port}`
69
+ } else if (origin.port !== '') {
70
+ port = `:${origin.port}`
71
+ } else {
72
+ port = ''
73
+ }
74
+
69
75
  cb(
70
76
  null,
71
77
  `${origin.protocol}//${
72
78
  ip.family === 6 ? `[${ip.address}]` : ip.address
73
- }${origin.port === '' ? '' : `:${origin.port}`}`
79
+ }${port}`
74
80
  )
75
81
  })
76
82
  } else {
@@ -78,9 +84,7 @@ class DNSInstance {
78
84
  const ip = this.pick(
79
85
  origin,
80
86
  ips,
81
- // Only set affinity if dual stack is disabled
82
- // otherwise let it go through normal flow
83
- !newOpts.dualStack && newOpts.affinity
87
+ newOpts.affinity
84
88
  )
85
89
 
86
90
  // If no IPs we lookup - deleting old records
@@ -90,11 +94,20 @@ class DNSInstance {
90
94
  return
91
95
  }
92
96
 
97
+ let port
98
+ if (typeof ip.port === 'number') {
99
+ port = `:${ip.port}`
100
+ } else if (origin.port !== '') {
101
+ port = `:${origin.port}`
102
+ } else {
103
+ port = ''
104
+ }
105
+
93
106
  cb(
94
107
  null,
95
108
  `${origin.protocol}//${
96
109
  ip.family === 6 ? `[${ip.address}]` : ip.address
97
- }${origin.port === '' ? '' : `:${origin.port}`}`
110
+ }${port}`
98
111
  )
99
112
  }
100
113
  }
@@ -102,7 +115,11 @@ class DNSInstance {
102
115
  #defaultLookup (origin, opts, cb) {
103
116
  lookup(
104
117
  origin.hostname,
105
- { all: true, family: this.dualStack === false ? this.affinity : 0 },
118
+ {
119
+ all: true,
120
+ family: this.dualStack === false ? this.affinity : 0,
121
+ order: 'ipv4first'
122
+ },
106
123
  (err, addresses) => {
107
124
  if (err) {
108
125
  return cb(err)
@@ -111,15 +128,9 @@ class DNSInstance {
111
128
  const results = new Map()
112
129
 
113
130
  for (const addr of addresses) {
114
- const record = {
115
- address: addr.address,
116
- ttl: opts.maxTTL,
117
- family: addr.family
118
- }
119
-
120
131
  // On linux we found duplicates, we attempt to remove them with
121
132
  // the latest record
122
- results.set(`${record.address}:${record.family}`, record)
133
+ results.set(`${addr.address}:${addr.family}`, addr)
123
134
  }
124
135
 
125
136
  cb(null, results.values())
@@ -129,36 +140,36 @@ class DNSInstance {
129
140
 
130
141
  #defaultPick (origin, hostnameRecords, affinity) {
131
142
  let ip = null
132
- const { records, offset = 0 } = hostnameRecords
133
- let newOffset = 0
143
+ const { records, offset } = hostnameRecords
144
+
145
+ let family
146
+ if (this.dualStack) {
147
+ if (affinity == null) {
148
+ // Balance between ip families
149
+ if (offset == null || offset === maxInt) {
150
+ hostnameRecords.offset = 0
151
+ affinity = 4
152
+ } else {
153
+ hostnameRecords.offset++
154
+ affinity = (hostnameRecords.offset & 1) === 1 ? 6 : 4
155
+ }
156
+ }
134
157
 
135
- if (offset === maxInt) {
136
- newOffset = 0
158
+ if (records[affinity] != null && records[affinity].ips.length > 0) {
159
+ family = records[affinity]
160
+ } else {
161
+ family = records[affinity === 4 ? 6 : 4]
162
+ }
137
163
  } else {
138
- newOffset = offset + 1
164
+ family = records[affinity]
139
165
  }
140
166
 
141
- // We balance between the two IP families
142
- // If dual-stack disabled, we automatically pick the affinity
143
- const newIpFamily = (newOffset & 1) === 1 ? 4 : 6
144
- const family =
145
- this.dualStack === false
146
- ? records[this.affinity] // If dual-stack is disabled, we pick the default affiniy
147
- : records[affinity] ?? records[newIpFamily]
148
-
149
- // If no IPs and we have tried both families or dual stack is disabled, we return null
150
- if (
151
- (family == null || family.ips.length === 0) &&
152
- // eslint-disable-next-line eqeqeq
153
- (this.dualStack === false || this.lastIpFamily != newIpFamily)
154
- ) {
167
+ // If no IPs we return null
168
+ if (family == null || family.ips.length === 0) {
155
169
  return ip
156
170
  }
157
171
 
158
- family.offset = family.offset ?? 0
159
- hostnameRecords.offset = newOffset
160
-
161
- if (family.offset === maxInt) {
172
+ if (family.offset == null || family.offset === maxInt) {
162
173
  family.offset = 0
163
174
  } else {
164
175
  family.offset++
@@ -171,24 +182,28 @@ class DNSInstance {
171
182
  return ip
172
183
  }
173
184
 
174
- const timestamp = Date.now()
175
- // Record TTL is already in ms
176
- if (ip.timestamp != null && timestamp - ip.timestamp > ip.ttl) {
185
+ if (Date.now() - ip.timestamp > ip.ttl) { // record TTL is already in ms
177
186
  // We delete expired records
178
187
  // It is possible that they have different TTL, so we manage them individually
179
188
  family.ips.splice(position, 1)
180
189
  return this.pick(origin, hostnameRecords, affinity)
181
190
  }
182
191
 
183
- ip.timestamp = timestamp
184
-
185
- this.lastIpFamily = newIpFamily
186
192
  return ip
187
193
  }
188
194
 
189
195
  setRecords (origin, addresses) {
196
+ const timestamp = Date.now()
190
197
  const records = { records: { 4: null, 6: null } }
191
198
  for (const record of addresses) {
199
+ record.timestamp = timestamp
200
+ if (typeof record.ttl === 'number') {
201
+ // The record TTL is expected to be in ms
202
+ record.ttl = Math.min(record.ttl, this.#maxTTL)
203
+ } else {
204
+ record.ttl = this.#maxTTL
205
+ }
206
+
192
207
  const familyRecords = records.records[record.family] ?? { ips: [] }
193
208
 
194
209
  familyRecords.ips.push(record)
@@ -302,12 +317,20 @@ module.exports = interceptorOpts => {
302
317
  throw new InvalidArgumentError('Invalid pick. Must be a function')
303
318
  }
304
319
 
320
+ const dualStack = interceptorOpts?.dualStack ?? true
321
+ let affinity
322
+ if (dualStack) {
323
+ affinity = interceptorOpts?.affinity ?? null
324
+ } else {
325
+ affinity = interceptorOpts?.affinity ?? 4
326
+ }
327
+
305
328
  const opts = {
306
329
  maxTTL: interceptorOpts?.maxTTL ?? 10e3, // Expressed in ms
307
330
  lookup: interceptorOpts?.lookup ?? null,
308
331
  pick: interceptorOpts?.pick ?? null,
309
- dualStack: interceptorOpts?.dualStack ?? true,
310
- affinity: interceptorOpts?.affinity ?? 4,
332
+ dualStack,
333
+ affinity,
311
334
  maxItems: interceptorOpts?.maxItems ?? Infinity
312
335
  }
313
336
 
package/lib/util/cache.js CHANGED
@@ -4,6 +4,28 @@ const {
4
4
  safeHTTPMethods
5
5
  } = require('../core/util')
6
6
 
7
+ /**
8
+ *
9
+ * @param {import('../../types/dispatcher.d.ts').default.DispatchOptions} opts
10
+ */
11
+ function makeCacheKey (opts) {
12
+ if (!opts.origin) {
13
+ throw new Error('opts.origin is undefined')
14
+ }
15
+
16
+ /**
17
+ * @type {import('../../types/cache-interceptor.d.ts').default.CacheKey}
18
+ */
19
+ const cacheKey = {
20
+ origin: opts.origin.toString(),
21
+ method: opts.method,
22
+ path: opts.path,
23
+ headers: opts.headers
24
+ }
25
+
26
+ return cacheKey
27
+ }
28
+
7
29
  /**
8
30
  * @see https://www.rfc-editor.org/rfc/rfc9111.html#name-cache-control
9
31
  * @see https://www.iana.org/assignments/http-cache-directives/http-cache-directives.xhtml
@@ -27,7 +49,7 @@ const {
27
49
  * 'only-if-cached'?: true;
28
50
  * }} CacheControlDirectives
29
51
  *
30
- * @param {string} header
52
+ * @param {string | string[]} header
31
53
  * @returns {CacheControlDirectives}
32
54
  */
33
55
  function parseCacheControlHeader (header) {
@@ -36,9 +58,9 @@ function parseCacheControlHeader (header) {
36
58
  */
37
59
  const output = {}
38
60
 
39
- const directives = header.toLowerCase().split(',')
61
+ const directives = Array.isArray(header) ? header : header.split(',')
40
62
  for (let i = 0; i < directives.length; i++) {
41
- const directive = directives[i]
63
+ const directive = directives[i].toLowerCase()
42
64
  const keyValueDelimiter = directive.indexOf('=')
43
65
 
44
66
  let key
@@ -154,20 +176,22 @@ function parseCacheControlHeader (header) {
154
176
  }
155
177
 
156
178
  /**
157
- * @param {string} varyHeader Vary header from the server
158
- * @param {Record<string, string>} headers Request headers
159
- * @returns {Record<string, string>}
179
+ * @param {string | string[]} varyHeader Vary header from the server
180
+ * @param {Record<string, string | string[]>} headers Request headers
181
+ * @returns {Record<string, string | string[]>}
160
182
  */
161
183
  function parseVaryHeader (varyHeader, headers) {
162
- if (varyHeader === '*') {
184
+ if (typeof varyHeader === 'string' && varyHeader === '*') {
163
185
  return headers
164
186
  }
165
187
 
166
- const output = /** @type {Record<string, string>} */ ({})
188
+ const output = /** @type {Record<string, string | string[]>} */ ({})
167
189
 
168
- const varyingHeaders = varyHeader.toLowerCase().split(',')
190
+ const varyingHeaders = typeof varyHeader === 'string'
191
+ ? varyHeader.split(',')
192
+ : varyHeader
169
193
  for (const header of varyingHeaders) {
170
- const trimmedHeader = header.trim()
194
+ const trimmedHeader = header.trim().toLowerCase()
171
195
 
172
196
  if (headers[trimmedHeader]) {
173
197
  output[trimmedHeader] = headers[trimmedHeader]
@@ -186,14 +210,14 @@ function assertCacheStore (store, name = 'CacheStore') {
186
210
  throw new TypeError(`expected type of ${name} to be a CacheStore, got ${store === null ? 'null' : typeof store}`)
187
211
  }
188
212
 
189
- for (const fn of ['createReadStream', 'createWriteStream', 'deleteByOrigin']) {
213
+ for (const fn of ['get', 'createWriteStream', 'delete']) {
190
214
  if (typeof store[fn] !== 'function') {
191
215
  throw new TypeError(`${name} needs to have a \`${fn}()\` function`)
192
216
  }
193
217
  }
194
218
 
195
- if (typeof store.isFull !== 'boolean') {
196
- throw new TypeError(`${name} needs a isFull getter with type boolean, current type: ${typeof store.isFull}`)
219
+ if (typeof store.isFull !== 'undefined' && typeof store.isFull !== 'boolean') {
220
+ throw new TypeError(`${name} needs a isFull getter with type boolean or undefined, current type: ${typeof store.isFull}`)
197
221
  }
198
222
  }
199
223
  /**
@@ -217,6 +241,7 @@ function assertCacheMethods (methods, name = 'CacheMethods') {
217
241
  }
218
242
 
219
243
  module.exports = {
244
+ makeCacheKey,
220
245
  parseCacheControlHeader,
221
246
  parseVaryHeader,
222
247
  assertCacheMethods,
@@ -91,6 +91,16 @@ function getSetCookies (headers) {
91
91
  return cookies.map((pair) => parseSetCookie(pair))
92
92
  }
93
93
 
94
+ /**
95
+ * Parses a cookie string
96
+ * @param {string} cookie
97
+ */
98
+ function parseCookie (cookie) {
99
+ cookie = webidl.converters.DOMString(cookie)
100
+
101
+ return parseSetCookie(cookie)
102
+ }
103
+
94
104
  /**
95
105
  * @param {Headers} headers
96
106
  * @param {Cookie} cookie
@@ -184,5 +194,6 @@ module.exports = {
184
194
  getCookies,
185
195
  deleteCookie,
186
196
  getSetCookies,
187
- setCookie
197
+ setCookie,
198
+ parseCookie
188
199
  }
@@ -4,6 +4,7 @@ const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants')
4
4
  const { isCTLExcludingHtab } = require('./util')
5
5
  const { collectASequenceOfCodePointsFast } = require('../fetch/data-url')
6
6
  const assert = require('node:assert')
7
+ const { unescape } = require('node:querystring')
7
8
 
8
9
  /**
9
10
  * @description Parses the field-value attributes of a set-cookie header string.
@@ -76,8 +77,12 @@ function parseSetCookie (header) {
76
77
 
77
78
  // 6. The cookie-name is the name string, and the cookie-value is the
78
79
  // value string.
80
+ // https://datatracker.ietf.org/doc/html/rfc6265
81
+ // To maximize compatibility with user agents, servers that wish to
82
+ // store arbitrary data in a cookie-value SHOULD encode that data, for
83
+ // example, using Base64 [RFC4648].
79
84
  return {
80
- name, value, ...parseUnparsedAttributes(unparsedAttributes)
85
+ name, value: unescape(value), ...parseUnparsedAttributes(unparsedAttributes)
81
86
  }
82
87
  }
83
88
 
@@ -364,12 +364,8 @@ function bodyMixinMethods (instance, getInternalState) {
364
364
  switch (mimeType.essence) {
365
365
  case 'multipart/form-data': {
366
366
  // 1. ... [long step]
367
- const parsed = multipartFormDataParser(value, mimeType)
368
-
369
367
  // 2. If that fails for some reason, then throw a TypeError.
370
- if (parsed === 'failure') {
371
- throw new TypeError('Failed to parse body as FormData.')
372
- }
368
+ const parsed = multipartFormDataParser(value, mimeType)
373
369
 
374
370
  // 3. Return a new FormData object, appending each entry,
375
371
  // resulting from the parsing operation, to its entry list.