undici 7.10.0 → 7.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/README.md +157 -0
  2. package/docs/docs/api/CacheStore.md +3 -3
  3. package/docs/docs/api/Debug.md +13 -13
  4. package/docs/docs/api/DiagnosticsChannel.md +25 -0
  5. package/docs/docs/api/Dispatcher.md +20 -1
  6. package/docs/docs/api/GlobalInstallation.md +91 -0
  7. package/docs/docs/api/MockClient.md +4 -0
  8. package/docs/docs/api/MockPool.md +6 -0
  9. package/docs/docs/api/ProxyAgent.md +2 -0
  10. package/docs/docs/api/RetryAgent.md +6 -1
  11. package/docs/docs/api/RetryHandler.md +1 -0
  12. package/index.js +15 -0
  13. package/lib/api/api-stream.js +1 -1
  14. package/lib/cache/memory-cache-store.js +3 -3
  15. package/lib/cache/sqlite-cache-store.js +1 -1
  16. package/lib/core/connect.js +21 -51
  17. package/lib/core/diagnostics.js +6 -4
  18. package/lib/core/request.js +6 -0
  19. package/lib/core/util.js +0 -45
  20. package/lib/dispatcher/client-h1.js +1 -1
  21. package/lib/dispatcher/proxy-agent.js +2 -1
  22. package/lib/handler/retry-handler.js +110 -56
  23. package/lib/mock/mock-client.js +4 -0
  24. package/lib/mock/mock-pool.js +4 -0
  25. package/lib/util/cache.js +11 -1
  26. package/lib/util/timers.js +11 -9
  27. package/lib/web/cache/cache.js +1 -1
  28. package/lib/web/cache/cachestorage.js +1 -1
  29. package/lib/web/cookies/index.js +1 -1
  30. package/lib/web/eventsource/eventsource.js +3 -6
  31. package/lib/web/eventsource/util.js +1 -1
  32. package/lib/web/fetch/body.js +2 -2
  33. package/lib/web/fetch/dispatcher-weakref.js +0 -41
  34. package/lib/web/fetch/formdata-parser.js +4 -4
  35. package/lib/web/fetch/formdata.js +1 -1
  36. package/lib/web/fetch/headers.js +1 -1
  37. package/lib/web/fetch/index.js +7 -1
  38. package/lib/web/fetch/request.js +1 -1
  39. package/lib/web/fetch/response.js +1 -1
  40. package/lib/web/fetch/util.js +2 -2
  41. package/lib/web/{fetch/webidl.js → webidl/index.js} +57 -9
  42. package/lib/web/websocket/connection.js +4 -3
  43. package/lib/web/websocket/events.js +1 -1
  44. package/lib/web/websocket/frame.js +2 -1
  45. package/lib/web/websocket/stream/websocketerror.js +1 -1
  46. package/lib/web/websocket/stream/websocketstream.js +1 -1
  47. package/lib/web/websocket/websocket.js +4 -4
  48. package/package.json +4 -4
  49. package/types/diagnostics-channel.d.ts +9 -0
  50. package/types/dispatcher.d.ts +3 -2
  51. package/types/env-http-proxy-agent.d.ts +2 -1
  52. package/types/eventsource.d.ts +3 -3
  53. package/types/fetch.d.ts +1 -0
  54. package/types/handlers.d.ts +1 -1
  55. package/types/mock-client.d.ts +2 -0
  56. package/types/mock-interceptor.d.ts +2 -0
  57. package/types/mock-pool.d.ts +2 -0
  58. package/types/retry-handler.d.ts +9 -0
  59. package/types/webidl.d.ts +19 -15
  60. package/types/websocket.d.ts +1 -1
package/README.md CHANGED
@@ -43,6 +43,125 @@ The benchmark is a simple getting data [example](https://github.com/nodejs/undic
43
43
  └────────────────────────┴─────────┴────────────────────┴────────────┴─────────────────────────┘
44
44
  ```
45
45
 
46
+ ## Undici vs. Fetch
47
+
48
+ ### Overview
49
+
50
+ Node.js includes a built-in `fetch()` implementation powered by undici starting from Node.js v18. However, there are important differences between using the built-in fetch and installing undici as a separate module.
51
+
52
+ ### Built-in Fetch (Node.js v18+)
53
+
54
+ Node.js's built-in fetch is powered by a bundled version of undici:
55
+
56
+ ```js
57
+ // Available globally in Node.js v18+
58
+ const response = await fetch('https://api.example.com/data');
59
+ const data = await response.json();
60
+
61
+ // Check the bundled undici version
62
+ console.log(process.versions.undici); // e.g., "5.28.4"
63
+ ```
64
+
65
+ **Pros:**
66
+ - No additional dependencies required
67
+ - Works across different JavaScript runtimes
68
+ - Automatic compression handling (gzip, deflate, br)
69
+ - Built-in caching support (in development)
70
+
71
+ **Cons:**
72
+ - Limited to the undici version bundled with your Node.js version
73
+ - Less control over connection pooling and advanced features
74
+ - Error handling follows Web API standards (errors wrapped in `TypeError`)
75
+ - Performance overhead due to Web Streams implementation
76
+
77
+ ### Undici Module
78
+
79
+ Installing undici as a separate module gives you access to the latest features and APIs:
80
+
81
+ ```bash
82
+ npm install undici
83
+ ```
84
+
85
+ ```js
86
+ import { request, fetch, Agent, setGlobalDispatcher } from 'undici';
87
+
88
+ // Use undici.request for maximum performance
89
+ const { statusCode, headers, body } = await request('https://api.example.com/data');
90
+ const data = await body.json();
91
+
92
+ // Or use undici.fetch with custom configuration
93
+ const agent = new Agent({ keepAliveTimeout: 10000 });
94
+ setGlobalDispatcher(agent);
95
+ const response = await fetch('https://api.example.com/data');
96
+ ```
97
+
98
+ **Pros:**
99
+ - Latest undici features and bug fixes
100
+ - Access to advanced APIs (`request`, `stream`, `pipeline`)
101
+ - Fine-grained control over connection pooling
102
+ - Better error handling with clearer error messages
103
+ - Superior performance, especially with `undici.request`
104
+ - HTTP/1.1 pipelining support
105
+ - Custom interceptors and middleware
106
+ - Advanced features like `ProxyAgent`, `MockAgent`
107
+
108
+ **Cons:**
109
+ - Additional dependency to manage
110
+ - Larger bundle size
111
+
112
+ ### When to Use Each
113
+
114
+ #### Use Built-in Fetch When:
115
+ - You want zero dependencies
116
+ - Building isomorphic code that runs in browsers and Node.js
117
+ - Simple HTTP requests without advanced configuration
118
+ - You're okay with the undici version bundled in your Node.js version
119
+
120
+ #### Use Undici Module When:
121
+ - You need the latest undici features and performance improvements
122
+ - You require advanced connection pooling configuration
123
+ - You need APIs not available in the built-in fetch (`ProxyAgent`, `MockAgent`, etc.)
124
+ - Performance is critical (use `undici.request` for maximum speed)
125
+ - You want better error handling and debugging capabilities
126
+ - You need HTTP/1.1 pipelining or advanced interceptors
127
+ - You prefer decoupled protocol and API interfaces
128
+
129
+ ### Performance Comparison
130
+
131
+ Based on benchmarks, here's the typical performance hierarchy:
132
+
133
+ 1. **`undici.request()`** - Fastest, most efficient
134
+ 2. **`undici.fetch()`** - Good performance, standard compliance
135
+ 3. **Node.js `http`/`https`** - Baseline performance
136
+
137
+ ### Migration Guide
138
+
139
+ If you're currently using built-in fetch and want to migrate to undici:
140
+
141
+ ```js
142
+ // Before: Built-in fetch
143
+ const response = await fetch('https://api.example.com/data');
144
+
145
+ // After: Undici fetch (drop-in replacement)
146
+ import { fetch } from 'undici';
147
+ const response = await fetch('https://api.example.com/data');
148
+
149
+ // Or: Undici request (better performance)
150
+ import { request } from 'undici';
151
+ const { statusCode, body } = await request('https://api.example.com/data');
152
+ const data = await body.json();
153
+ ```
154
+
155
+ ### Version Compatibility
156
+
157
+ You can check which version of undici is bundled with your Node.js version:
158
+
159
+ ```js
160
+ console.log(process.versions.undici);
161
+ ```
162
+
163
+ Installing undici as a module allows you to use a newer version than what's bundled with Node.js, giving you access to the latest features and performance improvements.
164
+
46
165
  ## Quick Start
47
166
 
48
167
  ```js
@@ -63,6 +182,44 @@ for await (const data of body) { console.log('data', data) }
63
182
  console.log('trailers', trailers)
64
183
  ```
65
184
 
185
+ ## Global Installation
186
+
187
+ Undici provides an `install()` function to add all WHATWG fetch classes to `globalThis`, making them available globally:
188
+
189
+ ```js
190
+ import { install } from 'undici'
191
+
192
+ // Install all WHATWG fetch classes globally
193
+ install()
194
+
195
+ // Now you can use fetch classes globally without importing
196
+ const response = await fetch('https://api.example.com/data')
197
+ const data = await response.json()
198
+
199
+ // All classes are available globally:
200
+ const headers = new Headers([['content-type', 'application/json']])
201
+ const request = new Request('https://example.com')
202
+ const formData = new FormData()
203
+ const ws = new WebSocket('wss://example.com')
204
+ const eventSource = new EventSource('https://example.com/events')
205
+ ```
206
+
207
+ The `install()` function adds the following classes to `globalThis`:
208
+
209
+ - `fetch` - The fetch function
210
+ - `Headers` - HTTP headers management
211
+ - `Response` - HTTP response representation
212
+ - `Request` - HTTP request representation
213
+ - `FormData` - Form data handling
214
+ - `WebSocket` - WebSocket client
215
+ - `CloseEvent`, `ErrorEvent`, `MessageEvent` - WebSocket events
216
+ - `EventSource` - Server-sent events client
217
+
218
+ This is useful for:
219
+ - Polyfilling environments that don't have fetch
220
+ - Ensuring consistent fetch behavior across different Node.js versions
221
+ - Making undici's implementations available globally for libraries that expect them
222
+
66
223
  ## Body Mixins
67
224
 
68
225
  The `body` mixins are the most common way to format the request/response body. Mixins include:
@@ -13,9 +13,9 @@ The `MemoryCacheStore` stores the responses in-memory.
13
13
 
14
14
  **Options**
15
15
 
16
- - `maxSize` - The maximum total size in bytes of all stored responses. Default `Infinity`.
17
- - `maxCount` - The maximum amount of responses to store. Default `Infinity`.
18
- - `maxEntrySize` - The maximum size in bytes that a response's body can be. If a response's body is greater than or equal to this, the response will not be cached. Default `Infinity`.
16
+ - `maxSize` - The maximum total size in bytes of all stored responses. Default `104857600` (100MB).
17
+ - `maxCount` - The maximum amount of responses to store. Default `1024`.
18
+ - `maxEntrySize` - The maximum size in bytes that a response's body can be. If a response's body is greater than or equal to this, the response will not be cached. Default `5242880` (5MB).
19
19
 
20
20
  ### Getters
21
21
 
@@ -14,14 +14,14 @@ NODE_DEBUG=undici node script.js
14
14
  UNDICI 16241: connecting to nodejs.org using https:h1
15
15
  UNDICI 16241: connecting to nodejs.org using https:h1
16
16
  UNDICI 16241: connected to nodejs.org using https:h1
17
- UNDICI 16241: sending request to GET https://nodejs.org//
18
- UNDICI 16241: received response to GET https://nodejs.org// - HTTP 307
17
+ UNDICI 16241: sending request to GET https://nodejs.org/
18
+ UNDICI 16241: received response to GET https://nodejs.org/ - HTTP 307
19
19
  UNDICI 16241: connecting to nodejs.org using https:h1
20
- UNDICI 16241: trailers received from GET https://nodejs.org//
20
+ UNDICI 16241: trailers received from GET https://nodejs.org/
21
21
  UNDICI 16241: connected to nodejs.org using https:h1
22
- UNDICI 16241: sending request to GET https://nodejs.org//en
23
- UNDICI 16241: received response to GET https://nodejs.org//en - HTTP 200
24
- UNDICI 16241: trailers received from GET https://nodejs.org//en
22
+ UNDICI 16241: sending request to GET https://nodejs.org/en
23
+ UNDICI 16241: received response to GET https://nodejs.org/en - HTTP 200
24
+ UNDICI 16241: trailers received from GET https://nodejs.org/en
25
25
  ```
26
26
 
27
27
  ## `fetch`
@@ -36,14 +36,14 @@ NODE_DEBUG=fetch node script.js
36
36
  FETCH 16241: connecting to nodejs.org using https:h1
37
37
  FETCH 16241: connecting to nodejs.org using https:h1
38
38
  FETCH 16241: connected to nodejs.org using https:h1
39
- FETCH 16241: sending request to GET https://nodejs.org//
40
- FETCH 16241: received response to GET https://nodejs.org// - HTTP 307
39
+ FETCH 16241: sending request to GET https://nodejs.org/
40
+ FETCH 16241: received response to GET https://nodejs.org/ - HTTP 307
41
41
  FETCH 16241: connecting to nodejs.org using https:h1
42
- FETCH 16241: trailers received from GET https://nodejs.org//
42
+ FETCH 16241: trailers received from GET https://nodejs.org/
43
43
  FETCH 16241: connected to nodejs.org using https:h1
44
- FETCH 16241: sending request to GET https://nodejs.org//en
45
- FETCH 16241: received response to GET https://nodejs.org//en - HTTP 200
46
- FETCH 16241: trailers received from GET https://nodejs.org//en
44
+ FETCH 16241: sending request to GET https://nodejs.org/en
45
+ FETCH 16241: received response to GET https://nodejs.org/en - HTTP 200
46
+ FETCH 16241: trailers received from GET https://nodejs.org/en
47
47
  ```
48
48
 
49
49
  ## `websocket`
@@ -57,6 +57,6 @@ NODE_DEBUG=websocket node script.js
57
57
 
58
58
  WEBSOCKET 18309: connecting to echo.websocket.org using https:h1
59
59
  WEBSOCKET 18309: connected to echo.websocket.org using https:h1
60
- WEBSOCKET 18309: sending request to GET https://echo.websocket.org//
60
+ WEBSOCKET 18309: sending request to GET https://echo.websocket.org/
61
61
  WEBSOCKET 18309: connection opened <ip_address>
62
62
  ```
@@ -27,9 +27,22 @@ diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => {
27
27
 
28
28
  Note: a request is only loosely completed to a given socket.
29
29
 
30
+ ## `undici:request:bodyChunkSent`
31
+
32
+ This message is published when a chunk of the request body is being sent.
33
+
34
+ ```js
35
+ import diagnosticsChannel from 'diagnostics_channel'
36
+
37
+ diagnosticsChannel.channel('undici:request:bodyChunkSent').subscribe(({ request, chunk }) => {
38
+ // request is the same object undici:request:create
39
+ })
40
+ ```
30
41
 
31
42
  ## `undici:request:bodySent`
32
43
 
44
+ This message is published after the request body has been fully sent.
45
+
33
46
  ```js
34
47
  import diagnosticsChannel from 'diagnostics_channel'
35
48
 
@@ -54,6 +67,18 @@ diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, respo
54
67
  })
55
68
  ```
56
69
 
70
+ ## `undici:request:bodyChunkReceived`
71
+
72
+ This message is published after a chunk of the response body has been received.
73
+
74
+ ```js
75
+ import diagnosticsChannel from 'diagnostics_channel'
76
+
77
+ diagnosticsChannel.channel('undici:request:bodyChunkReceived').subscribe(({ request, chunk }) => {
78
+ // request is the same object undici:request:create
79
+ })
80
+ ```
81
+
57
82
  ## `undici:request:trailers`
58
83
 
59
84
  This message is published after the response body and trailers have been received, i.e. the response has been completed.
@@ -841,9 +841,28 @@ try {
841
841
  Compose a new dispatcher from the current dispatcher and the given interceptors.
842
842
 
843
843
  > _Notes_:
844
- > - The order of the interceptors matters. The first interceptor will be the first to be called.
844
+ > - The order of the interceptors matters. The last interceptor will be the first to be called.
845
845
  > - It is important to note that the `interceptor` function should return a function that follows the `Dispatcher.dispatch` signature.
846
846
  > - Any fork of the chain of `interceptors` can lead to unexpected results.
847
+ >
848
+ > **Interceptor Stack Visualization:**
849
+ > ```
850
+ > compose([interceptor1, interceptor2, interceptor3])
851
+ >
852
+ > Request Flow:
853
+ > ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐
854
+ > │ Request │───▶│interceptor3 │───▶│interceptor2 │───▶│interceptor1 │───▶│ dispatcher │
855
+ > └─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘ │ .dispatch │
856
+ > ▲ ▲ ▲ └─────────────┘
857
+ > │ │ │ ▲
858
+ > (called first) (called second) (called last) │
859
+ > │
860
+ > ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
861
+ > │ Response │◀───│interceptor3 │◀───│interceptor2 │◀───│interceptor1 │◀─────────┘
862
+ > └─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘
863
+ >
864
+ > The interceptors are composed in reverse order due to function composition.
865
+ > ```
847
866
 
848
867
  Arguments:
849
868
 
@@ -0,0 +1,91 @@
1
+ # Global Installation
2
+
3
+ Undici provides an `install()` function to add all WHATWG fetch classes to `globalThis`, making them available globally without requiring imports.
4
+
5
+ ## `install()`
6
+
7
+ Install all WHATWG fetch classes globally on `globalThis`.
8
+
9
+ **Example:**
10
+
11
+ ```js
12
+ import { install } from 'undici'
13
+
14
+ // Install all WHATWG fetch classes globally
15
+ install()
16
+
17
+ // Now you can use fetch classes globally without importing
18
+ const response = await fetch('https://api.example.com/data')
19
+ const data = await response.json()
20
+
21
+ // All classes are available globally:
22
+ const headers = new Headers([['content-type', 'application/json']])
23
+ const request = new Request('https://example.com')
24
+ const formData = new FormData()
25
+ const ws = new WebSocket('wss://example.com')
26
+ const eventSource = new EventSource('https://example.com/events')
27
+ ```
28
+
29
+ ## Installed Classes
30
+
31
+ The `install()` function adds the following classes to `globalThis`:
32
+
33
+ | Class | Description |
34
+ |-------|-------------|
35
+ | `fetch` | The fetch function for making HTTP requests |
36
+ | `Headers` | HTTP headers management |
37
+ | `Response` | HTTP response representation |
38
+ | `Request` | HTTP request representation |
39
+ | `FormData` | Form data handling |
40
+ | `WebSocket` | WebSocket client |
41
+ | `CloseEvent` | WebSocket close event |
42
+ | `ErrorEvent` | WebSocket error event |
43
+ | `MessageEvent` | WebSocket message event |
44
+ | `EventSource` | Server-sent events client |
45
+
46
+ ## Use Cases
47
+
48
+ Global installation is useful for:
49
+
50
+ - **Polyfilling environments** that don't have native fetch support
51
+ - **Ensuring consistent behavior** across different Node.js versions
52
+ - **Library compatibility** when third-party libraries expect global fetch
53
+ - **Migration scenarios** where you want to replace built-in implementations
54
+ - **Testing environments** where you need predictable fetch behavior
55
+
56
+ ## Example: Polyfilling an Environment
57
+
58
+ ```js
59
+ import { install } from 'undici'
60
+
61
+ // Check if fetch is available and install if needed
62
+ if (typeof globalThis.fetch === 'undefined') {
63
+ install()
64
+ console.log('Undici fetch installed globally')
65
+ }
66
+
67
+ // Now fetch is guaranteed to be available
68
+ const response = await fetch('https://api.example.com')
69
+ ```
70
+
71
+ ## Example: Testing Environment
72
+
73
+ ```js
74
+ import { install } from 'undici'
75
+
76
+ // In test setup, ensure consistent fetch behavior
77
+ install()
78
+
79
+ // Now all tests use undici's implementations
80
+ test('fetch API test', async () => {
81
+ const response = await fetch('https://example.com')
82
+ expect(response).toBeInstanceOf(Response)
83
+ })
84
+ ```
85
+
86
+ ## Notes
87
+
88
+ - The `install()` function overwrites any existing global implementations
89
+ - Classes installed are undici's implementations, not Node.js built-ins
90
+ - This provides access to undici's latest features and performance improvements
91
+ - The global installation persists for the lifetime of the process
@@ -38,6 +38,10 @@ const mockClient = mockAgent.get('http://localhost:3000')
38
38
 
39
39
  Implements: [`MockPool.intercept(options)`](/docs/docs/api/MockPool.md#mockpoolinterceptoptions)
40
40
 
41
+ ### `MockClient.cleanMocks()`
42
+
43
+ Implements: [`MockPool.cleanMocks()`](/docs/docs/api/MockPool.md#mockpoolcleanmocks)
44
+
41
45
  ### `MockClient.close()`
42
46
 
43
47
  Implements: [`MockPool.close()`](/docs/docs/api/MockPool.md#mockpoolclose)
@@ -546,3 +546,9 @@ for await (const data of body) {
546
546
  console.log('data', data.toString('utf8')) // data foo
547
547
  }
548
548
  ```
549
+
550
+ ### `MockPool.cleanMocks()`
551
+
552
+ This method cleans up all the prepared mocks.
553
+
554
+ Returns: `void`
@@ -17,6 +17,8 @@ Returns: `ProxyAgent`
17
17
  Extends: [`AgentOptions`](/docs/docs/api/Agent.md#parameter-agentoptions)
18
18
  > It ommits `AgentOptions#connect`.
19
19
 
20
+ > **Note:** When `AgentOptions#connections` is set, and different from `0`, the non-standard [`proxy-connection` header](https://udger.com/resources/http-request-headers-detail?header=Proxy-Connection) will be set to `keep-alive` in the request.
21
+
20
22
  * **uri** `string | URL` (required) - The URI of the proxy server. This can be provided as a string, as an instance of the URL class, or as an object with a `uri` property of type string.
21
23
  If the `uri` is provided as a string or `uri` is an object with an `uri` property of type string, then it will be parsed into a `URL` object according to the [WHATWG URL Specification](https://url.spec.whatwg.org).
22
24
  For detailed information on the parsing process and potential validation errors, please refer to the ["Writing" section](https://url.spec.whatwg.org/#writing) of the WHATWG URL Specification.
@@ -16,6 +16,7 @@ Returns: `ProxyAgent`
16
16
 
17
17
  ### Parameter: `RetryHandlerOptions`
18
18
 
19
+ - **throwOnError** `boolean` (optional) - Disable to prevent throwing error on last retry attept, useful if you need the body on errors from server or if you have custom error handler. Default: `true`
19
20
  - **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
20
21
  - **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
21
22
  - **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
@@ -39,7 +40,11 @@ import { Agent, RetryAgent } from 'undici'
39
40
 
40
41
  const agent = new RetryAgent(new Agent())
41
42
 
42
- const res = await agent.request('http://example.com')
43
+ const res = await agent.request({
44
+ method: 'GET',
45
+ origin: 'http://example.com',
46
+ path: '/',
47
+ })
43
48
  console.log(res.statusCode)
44
49
  console.log(await res.body.text())
45
50
  ```
@@ -19,6 +19,7 @@ Extends: [`Dispatch.DispatchOptions`](/docs/docs/api/Dispatcher.md#parameter-dis
19
19
 
20
20
  #### `RetryOptions`
21
21
 
22
+ - **throwOnError** `boolean` (optional) - Disable to prevent throwing error on last retry attept, useful if you need the body on errors from server or if you have custom error handler.
22
23
  - **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => number | null` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed.
23
24
  - **maxRetries** `number` (optional) - Maximum number of retries. Default: `5`
24
25
  - **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds)
package/index.js CHANGED
@@ -181,3 +181,18 @@ module.exports.mockErrors = mockErrors
181
181
  const { EventSource } = require('./lib/web/eventsource/eventsource')
182
182
 
183
183
  module.exports.EventSource = EventSource
184
+
185
+ function install () {
186
+ globalThis.fetch = module.exports.fetch
187
+ globalThis.Headers = module.exports.Headers
188
+ globalThis.Response = module.exports.Response
189
+ globalThis.Request = module.exports.Request
190
+ globalThis.FormData = module.exports.FormData
191
+ globalThis.WebSocket = module.exports.WebSocket
192
+ globalThis.CloseEvent = module.exports.CloseEvent
193
+ globalThis.ErrorEvent = module.exports.ErrorEvent
194
+ globalThis.MessageEvent = module.exports.MessageEvent
195
+ globalThis.EventSource = module.exports.EventSource
196
+ }
197
+
198
+ module.exports.install = install
@@ -117,7 +117,7 @@ class StreamHandler extends AsyncResource {
117
117
  const { callback, res, opaque, trailers, abort } = this
118
118
 
119
119
  this.res = null
120
- if (err || !res.readable) {
120
+ if (err || !res?.readable) {
121
121
  util.destroy(res, err)
122
122
  }
123
123
 
@@ -16,9 +16,9 @@ const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
16
16
  * @extends {EventEmitter}
17
17
  */
18
18
  class MemoryCacheStore extends EventEmitter {
19
- #maxCount = Infinity
20
- #maxSize = Infinity
21
- #maxEntrySize = Infinity
19
+ #maxCount = 1024
20
+ #maxSize = 104857600 // 100MB
21
+ #maxEntrySize = 5242880 // 5MB
22
22
 
23
23
  #size = 0
24
24
  #count = 0
@@ -1,6 +1,6 @@
1
1
  'use strict'
2
2
 
3
- const { Writable } = require('stream')
3
+ const { Writable } = require('node:stream')
4
4
  const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
5
5
 
6
6
  let DatabaseSync
@@ -12,64 +12,34 @@ let tls // include tls conditionally since it is not always available
12
12
  // resolve the same servername multiple times even when
13
13
  // re-use is enabled.
14
14
 
15
- let SessionCache
16
- // FIXME: remove workaround when the Node bug is fixed
17
- // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308
18
- if (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) {
19
- SessionCache = class WeakSessionCache {
20
- constructor (maxCachedSessions) {
21
- this._maxCachedSessions = maxCachedSessions
22
- this._sessionCache = new Map()
23
- this._sessionRegistry = new global.FinalizationRegistry((key) => {
24
- if (this._sessionCache.size < this._maxCachedSessions) {
25
- return
26
- }
27
-
28
- const ref = this._sessionCache.get(key)
29
- if (ref !== undefined && ref.deref() === undefined) {
30
- this._sessionCache.delete(key)
31
- }
32
- })
33
- }
34
-
35
- get (sessionKey) {
36
- const ref = this._sessionCache.get(sessionKey)
37
- return ref ? ref.deref() : null
38
- }
39
-
40
- set (sessionKey, session) {
41
- if (this._maxCachedSessions === 0) {
15
+ const SessionCache = class WeakSessionCache {
16
+ constructor (maxCachedSessions) {
17
+ this._maxCachedSessions = maxCachedSessions
18
+ this._sessionCache = new Map()
19
+ this._sessionRegistry = new FinalizationRegistry((key) => {
20
+ if (this._sessionCache.size < this._maxCachedSessions) {
42
21
  return
43
22
  }
44
23
 
45
- this._sessionCache.set(sessionKey, new WeakRef(session))
46
- this._sessionRegistry.register(session, sessionKey)
47
- }
48
- }
49
- } else {
50
- SessionCache = class SimpleSessionCache {
51
- constructor (maxCachedSessions) {
52
- this._maxCachedSessions = maxCachedSessions
53
- this._sessionCache = new Map()
54
- }
55
-
56
- get (sessionKey) {
57
- return this._sessionCache.get(sessionKey)
58
- }
59
-
60
- set (sessionKey, session) {
61
- if (this._maxCachedSessions === 0) {
62
- return
24
+ const ref = this._sessionCache.get(key)
25
+ if (ref !== undefined && ref.deref() === undefined) {
26
+ this._sessionCache.delete(key)
63
27
  }
28
+ })
29
+ }
64
30
 
65
- if (this._sessionCache.size >= this._maxCachedSessions) {
66
- // remove the oldest session
67
- const { value: oldestKey } = this._sessionCache.keys().next()
68
- this._sessionCache.delete(oldestKey)
69
- }
31
+ get (sessionKey) {
32
+ const ref = this._sessionCache.get(sessionKey)
33
+ return ref ? ref.deref() : null
34
+ }
70
35
 
71
- this._sessionCache.set(sessionKey, session)
36
+ set (sessionKey, session) {
37
+ if (this._maxCachedSessions === 0) {
38
+ return
72
39
  }
40
+
41
+ this._sessionCache.set(sessionKey, new WeakRef(session))
42
+ this._sessionRegistry.register(session, sessionKey)
73
43
  }
74
44
  }
75
45
 
@@ -16,6 +16,8 @@ const channels = {
16
16
  // Request
17
17
  create: diagnosticsChannel.channel('undici:request:create'),
18
18
  bodySent: diagnosticsChannel.channel('undici:request:bodySent'),
19
+ bodyChunkSent: diagnosticsChannel.channel('undici:request:bodyChunkSent'),
20
+ bodyChunkReceived: diagnosticsChannel.channel('undici:request:bodyChunkReceived'),
19
21
  headers: diagnosticsChannel.channel('undici:request:headers'),
20
22
  trailers: diagnosticsChannel.channel('undici:request:trailers'),
21
23
  error: diagnosticsChannel.channel('undici:request:error'),
@@ -85,7 +87,7 @@ function trackClientEvents (debugLog = undiciDebugLog) {
85
87
  const {
86
88
  request: { method, path, origin }
87
89
  } = evt
88
- debugLog('sending request to %s %s/%s', method, origin, path)
90
+ debugLog('sending request to %s %s%s', method, origin, path)
89
91
  })
90
92
  }
91
93
 
@@ -105,7 +107,7 @@ function trackRequestEvents (debugLog = undiciDebugLog) {
105
107
  response: { statusCode }
106
108
  } = evt
107
109
  debugLog(
108
- 'received response to %s %s/%s - HTTP %d',
110
+ 'received response to %s %s%s - HTTP %d',
109
111
  method,
110
112
  origin,
111
113
  path,
@@ -118,7 +120,7 @@ function trackRequestEvents (debugLog = undiciDebugLog) {
118
120
  const {
119
121
  request: { method, path, origin }
120
122
  } = evt
121
- debugLog('trailers received from %s %s/%s', method, origin, path)
123
+ debugLog('trailers received from %s %s%s', method, origin, path)
122
124
  })
123
125
 
124
126
  diagnosticsChannel.subscribe('undici:request:error',
@@ -128,7 +130,7 @@ function trackRequestEvents (debugLog = undiciDebugLog) {
128
130
  error
129
131
  } = evt
130
132
  debugLog(
131
- 'request to %s %s/%s errored - %s',
133
+ 'request to %s %s%s errored - %s',
132
134
  method,
133
135
  origin,
134
136
  path,
@@ -194,6 +194,9 @@ class Request {
194
194
  }
195
195
 
196
196
  onBodySent (chunk) {
197
+ if (channels.bodyChunkSent.hasSubscribers) {
198
+ channels.bodyChunkSent.publish({ request: this, chunk })
199
+ }
197
200
  if (this[kHandler].onBodySent) {
198
201
  try {
199
202
  return this[kHandler].onBodySent(chunk)
@@ -252,6 +255,9 @@ class Request {
252
255
  assert(!this.aborted)
253
256
  assert(!this.completed)
254
257
 
258
+ if (channels.bodyChunkReceived.hasSubscribers) {
259
+ channels.bodyChunkReceived.publish({ request: this, chunk })
260
+ }
255
261
  try {
256
262
  return this[kHandler].onData(chunk)
257
263
  } catch (err) {