@defra-fish/connectors-lib 1.59.0 → 1.60.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@defra-fish/connectors-lib",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.60.0-rc.1",
|
|
4
4
|
"description": "Shared connectors",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -37,9 +37,10 @@
|
|
|
37
37
|
"@airbrake/node": "^2.1.7",
|
|
38
38
|
"aws-sdk": "^2.1074.0",
|
|
39
39
|
"debug": "^4.3.3",
|
|
40
|
+
"http-status-codes": "^2.3.0",
|
|
40
41
|
"ioredis": "^4.28.5",
|
|
41
42
|
"node-fetch": "^2.6.7",
|
|
42
43
|
"redlock": "^4.2.0"
|
|
43
44
|
},
|
|
44
|
-
"gitHead": "
|
|
45
|
+
"gitHead": "788bfa677a11c509bc44e31f98bd12e98d3f57b4"
|
|
45
46
|
}
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
import HTTPRequestBatcher from '../http-request-batcher.js'
|
|
2
|
+
import fetch from 'node-fetch'
|
|
3
|
+
import db from 'debug'
|
|
4
|
+
const [{ value: debug }] = db.mock.results
|
|
5
|
+
|
|
6
|
+
jest.mock('node-fetch', () => jest.fn(() => ({ status: 200 })))
|
|
7
|
+
jest.mock('debug', () => jest.fn(() => jest.fn()))
|
|
8
|
+
|
|
9
|
+
describe('HTTP Request Batcher', () => {
|
|
10
|
+
beforeEach(jest.clearAllMocks)
|
|
11
|
+
|
|
12
|
+
it('initialises debug with the expected namespace', () => {
|
|
13
|
+
jest.isolateModules(() => {
|
|
14
|
+
const debug = require('debug')
|
|
15
|
+
require('../http-request-batcher.js')
|
|
16
|
+
expect(debug).toHaveBeenCalledWith('connectors:http-request-batcher')
|
|
17
|
+
})
|
|
18
|
+
})
|
|
19
|
+
|
|
20
|
+
it('initialises with a default batch size of 50', () => {
|
|
21
|
+
const batcher = new HTTPRequestBatcher()
|
|
22
|
+
expect(batcher.batchSize).toBe(50)
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
it('initialises with a default delay of 1000ms', () => {
|
|
26
|
+
const batcher = new HTTPRequestBatcher()
|
|
27
|
+
expect(batcher.delay).toBe(1000)
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
it('initialises with an empty request queue', () => {
|
|
31
|
+
const batcher = new HTTPRequestBatcher()
|
|
32
|
+
expect(batcher.requestQueue).toEqual([])
|
|
33
|
+
})
|
|
34
|
+
|
|
35
|
+
it('initialises with an empty response queue', () => {
|
|
36
|
+
const batcher = new HTTPRequestBatcher()
|
|
37
|
+
expect(batcher.responses).toEqual([])
|
|
38
|
+
})
|
|
39
|
+
|
|
40
|
+
it('initialises with a custom batch size', () => {
|
|
41
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 5 })
|
|
42
|
+
expect(batcher.batchSize).toBe(5)
|
|
43
|
+
})
|
|
44
|
+
|
|
45
|
+
it('adds a request to the queue', () => {
|
|
46
|
+
const batcher = new HTTPRequestBatcher()
|
|
47
|
+
|
|
48
|
+
batcher.addRequest('https://api-one.example.com', { method: 'GET' })
|
|
49
|
+
batcher.addRequest('https://api-b.example.com', { method: 'POST' })
|
|
50
|
+
batcher.addRequest('https://api-three.example.com', { method: 'PUT' })
|
|
51
|
+
|
|
52
|
+
expect(batcher.requestQueue).toEqual([
|
|
53
|
+
{ url: 'https://api-one.example.com', options: { method: 'GET' } },
|
|
54
|
+
{ url: 'https://api-b.example.com', options: { method: 'POST' } },
|
|
55
|
+
{ url: 'https://api-three.example.com', options: { method: 'PUT' } }
|
|
56
|
+
])
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
it('throws an error if url is not provided when adding a request', () => {
|
|
60
|
+
const batcher = new HTTPRequestBatcher()
|
|
61
|
+
expect(() => batcher.addRequest()).toThrow('URL is required')
|
|
62
|
+
})
|
|
63
|
+
|
|
64
|
+
it('calls fetch for each item in the queue', async () => {
|
|
65
|
+
const batcher = new HTTPRequestBatcher(3)
|
|
66
|
+
|
|
67
|
+
batcher.addRequest('https://api-one.example.com', { method: 'GET' })
|
|
68
|
+
batcher.addRequest('https://api-b.example.com', { method: 'POST' })
|
|
69
|
+
batcher.addRequest('https://api-three.example.com', { method: 'PUT' })
|
|
70
|
+
await batcher.fetch()
|
|
71
|
+
|
|
72
|
+
expect(fetch).toHaveBeenCalledTimes(3)
|
|
73
|
+
expect(fetch).toHaveBeenNthCalledWith(1, 'https://api-one.example.com', { method: 'GET' })
|
|
74
|
+
expect(fetch).toHaveBeenNthCalledWith(2, 'https://api-b.example.com', { method: 'POST' })
|
|
75
|
+
expect(fetch).toHaveBeenNthCalledWith(3, 'https://api-three.example.com', { method: 'PUT' })
|
|
76
|
+
})
|
|
77
|
+
|
|
78
|
+
it('makes multiple requests in parallel', () => {
|
|
79
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 2 })
|
|
80
|
+
const url = 'https://api.example.com'
|
|
81
|
+
const options = { method: 'GET' }
|
|
82
|
+
|
|
83
|
+
for (let x = 0; x < 2; x++) {
|
|
84
|
+
fetch.mockImplementationOnce(() => new Promise(() => {})) // return unresolved promises so we can check for parallel calls
|
|
85
|
+
batcher.addRequest(url, options)
|
|
86
|
+
}
|
|
87
|
+
batcher.fetch()
|
|
88
|
+
|
|
89
|
+
expect(fetch).toHaveBeenCalledTimes(2)
|
|
90
|
+
})
|
|
91
|
+
|
|
92
|
+
it('populates responses property after fetch succeeds', async () => {
|
|
93
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 2 })
|
|
94
|
+
const url = 'https://api.example.com'
|
|
95
|
+
const options = { method: 'GET' }
|
|
96
|
+
|
|
97
|
+
for (let x = 0; x < 2; x++) {
|
|
98
|
+
batcher.addRequest(url, options)
|
|
99
|
+
}
|
|
100
|
+
await batcher.fetch()
|
|
101
|
+
|
|
102
|
+
expect(batcher.responses).toEqual([{ status: 200 }, { status: 200 }])
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
describe('multiple batches', () => {
|
|
106
|
+
beforeEach(() => {
|
|
107
|
+
jest.useFakeTimers()
|
|
108
|
+
jest.spyOn(global, 'setTimeout')
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
afterEach(() => {
|
|
112
|
+
jest.useRealTimers()
|
|
113
|
+
})
|
|
114
|
+
|
|
115
|
+
const setupBatcherAndAddRequest = (batcherArgs = {}) => {
|
|
116
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1, ...batcherArgs })
|
|
117
|
+
batcher.addRequest('https://api.example.com')
|
|
118
|
+
batcher.addRequest('https://alt-api.example.com')
|
|
119
|
+
return batcher
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
it.each([1000, 100, 380, 4826])('delays for %ims between batches', async delay => {
|
|
123
|
+
const batcher = setupBatcherAndAddRequest({ delay })
|
|
124
|
+
batcher.fetch()
|
|
125
|
+
await Promise.all(fetch.mock.results.map(r => r.value))
|
|
126
|
+
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), delay)
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
it("second fetch isn't made immediately", async () => {
|
|
130
|
+
const batcher = setupBatcherAndAddRequest()
|
|
131
|
+
batcher.fetch()
|
|
132
|
+
await Promise.all(fetch.mock.results.map(r => r.value))
|
|
133
|
+
expect(fetch).not.toHaveBeenCalledWith('https://alt-api.example.com', expect.any(Object))
|
|
134
|
+
})
|
|
135
|
+
|
|
136
|
+
it('second batch is fetched after a one second delay', async () => {
|
|
137
|
+
const batcher = setupBatcherAndAddRequest()
|
|
138
|
+
const fetchPromise = batcher.fetch()
|
|
139
|
+
await Promise.all(fetch.mock.results.map(r => r.value))
|
|
140
|
+
jest.advanceTimersByTime(1000)
|
|
141
|
+
await fetchPromise
|
|
142
|
+
expect(fetch).toHaveBeenCalledTimes(2)
|
|
143
|
+
expect(fetch).toHaveBeenCalledWith('https://alt-api.example.com', undefined)
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
it("doesn't pause if it's the last batch", async () => {
|
|
147
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1 })
|
|
148
|
+
batcher.addRequest('https://api.example.com')
|
|
149
|
+
await batcher.fetch()
|
|
150
|
+
expect(setTimeout).not.toHaveBeenCalled()
|
|
151
|
+
})
|
|
152
|
+
|
|
153
|
+
it("sends final batch if it doesn't form a full batch", async () => {
|
|
154
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 2 })
|
|
155
|
+
for (let x = 0; x < 3; x++) {
|
|
156
|
+
batcher.addRequest('https://api.example.com')
|
|
157
|
+
}
|
|
158
|
+
const batchPromise = batcher.fetch()
|
|
159
|
+
await Promise.all(fetch.mock.results.map(r => r.value))
|
|
160
|
+
jest.runAllTimers()
|
|
161
|
+
await batchPromise
|
|
162
|
+
expect(fetch).toHaveBeenCalledTimes(3)
|
|
163
|
+
})
|
|
164
|
+
|
|
165
|
+
it('stores all responses', async () => {
|
|
166
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1 })
|
|
167
|
+
batcher.addRequest('https://api.example.com')
|
|
168
|
+
batcher.addRequest('https://alt-api.example.com')
|
|
169
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
170
|
+
await batcher.fetch()
|
|
171
|
+
expect(batcher.responses).toEqual([{ status: 200 }, { status: 200 }])
|
|
172
|
+
})
|
|
173
|
+
|
|
174
|
+
it('retries requests that received a 429 response', async () => {
|
|
175
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1 })
|
|
176
|
+
fetch.mockImplementationOnce(() => ({ status: 429 }))
|
|
177
|
+
batcher.addRequest('https://api.example.com')
|
|
178
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
179
|
+
await batcher.fetch()
|
|
180
|
+
expect(fetch).toHaveBeenCalledTimes(2)
|
|
181
|
+
})
|
|
182
|
+
|
|
183
|
+
it('retries requests with the same options as the original request', async () => {
|
|
184
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 3 })
|
|
185
|
+
fetch.mockResolvedValueOnce({ status: 200 }).mockResolvedValueOnce({ status: 429 })
|
|
186
|
+
batcher.addRequest('https://api.example.com')
|
|
187
|
+
const sampleOptions = { method: 'POST', body: Symbol('body') }
|
|
188
|
+
batcher.addRequest('https://alt-api.example.com', sampleOptions)
|
|
189
|
+
batcher.addRequest('https://api-three.example.com')
|
|
190
|
+
batcher.addRequest('https://api-four.example.com')
|
|
191
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
192
|
+
await batcher.fetch()
|
|
193
|
+
expect(fetch).toHaveBeenNthCalledWith(5, 'https://alt-api.example.com', sampleOptions)
|
|
194
|
+
})
|
|
195
|
+
|
|
196
|
+
it('adjusts batch size if a 429 response is received', async () => {
|
|
197
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 3 })
|
|
198
|
+
fetch.mockImplementationOnce(() => ({ status: 429 }))
|
|
199
|
+
batcher.addRequest('https://api.example.com')
|
|
200
|
+
batcher.addRequest('https://alt-api.example.com')
|
|
201
|
+
batcher.addRequest('https://api-three.example.com')
|
|
202
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
203
|
+
await batcher.fetch()
|
|
204
|
+
expect(batcher.batchSize).toBe(2)
|
|
205
|
+
})
|
|
206
|
+
|
|
207
|
+
it('logs if batch size is reduced', async () => {
|
|
208
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 3 })
|
|
209
|
+
fetch.mockImplementationOnce(() => ({ status: 429 }))
|
|
210
|
+
batcher.addRequest('https://api.example.com')
|
|
211
|
+
batcher.addRequest('https://alt-api.example.com')
|
|
212
|
+
batcher.addRequest('https://api-three.example.com')
|
|
213
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
214
|
+
await batcher.fetch()
|
|
215
|
+
expect(debug).toHaveBeenCalledWith('429 response received for https://api.example.com, reducing batch size to 2')
|
|
216
|
+
})
|
|
217
|
+
|
|
218
|
+
it('logs at start of fetch', async () => {
|
|
219
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 3 })
|
|
220
|
+
batcher.addRequest('https://api.example.com')
|
|
221
|
+
batcher.addRequest('https://api.example.com')
|
|
222
|
+
batcher.addRequest('https://api.example.com')
|
|
223
|
+
batcher.addRequest('https://api.example.com')
|
|
224
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
225
|
+
await batcher.fetch()
|
|
226
|
+
expect(debug).toHaveBeenCalledWith(
|
|
227
|
+
'Beginning batched fetch of 4 requests with initial batch size of 3 and delay between batches of 1000ms'
|
|
228
|
+
)
|
|
229
|
+
})
|
|
230
|
+
|
|
231
|
+
it("doesn't reduce batch size below 1", async () => {
|
|
232
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1 })
|
|
233
|
+
fetch.mockImplementationOnce(() => ({ status: 429 }))
|
|
234
|
+
batcher.addRequest('https://api.example.com')
|
|
235
|
+
global.setTimeout.mockImplementationOnce(cb => cb())
|
|
236
|
+
await batcher.fetch()
|
|
237
|
+
expect(batcher.batchSize).toBe(1)
|
|
238
|
+
})
|
|
239
|
+
|
|
240
|
+
it('only retry once if a 429 response is received again', async () => {
|
|
241
|
+
const batcher = new HTTPRequestBatcher({ batchSize: 1 })
|
|
242
|
+
fetch.mockResolvedValueOnce({ status: 429 }).mockResolvedValueOnce({ status: 429 })
|
|
243
|
+
batcher.addRequest('https://api.example.com')
|
|
244
|
+
global.setTimeout.mockImplementation(cb => cb())
|
|
245
|
+
await batcher.fetch()
|
|
246
|
+
expect(fetch).toHaveBeenCalledTimes(2)
|
|
247
|
+
})
|
|
248
|
+
})
|
|
249
|
+
})
|
package/src/connectors.js
CHANGED
|
@@ -3,4 +3,5 @@ import * as salesApi from './sales-api-connector.js'
|
|
|
3
3
|
import * as airbrake from './airbrake.js'
|
|
4
4
|
import * as govUkPayApi from './govuk-pay-api.js'
|
|
5
5
|
import { DistributedLock } from './distributed-lock.js'
|
|
6
|
-
|
|
6
|
+
import HTTPRequestBatcher from './http-request-batcher.js'
|
|
7
|
+
export { AWS, salesApi, govUkPayApi, DistributedLock, airbrake, HTTPRequestBatcher }
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import fetch from 'node-fetch'
|
|
2
|
+
import db from 'debug'
|
|
3
|
+
import { StatusCodes } from 'http-status-codes'
|
|
4
|
+
|
|
5
|
+
const debug = db('connectors:http-request-batcher')
|
|
6
|
+
export default class HTTPRequestBatcher {
|
|
7
|
+
#batchSize
|
|
8
|
+
#delay
|
|
9
|
+
#requests = []
|
|
10
|
+
#responses = []
|
|
11
|
+
|
|
12
|
+
constructor ({ batchSize = 50, delay = 1000 } = {}) {
|
|
13
|
+
this.#batchSize = batchSize
|
|
14
|
+
this.#delay = delay
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
get batchSize () {
|
|
18
|
+
return this.#batchSize
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
get requestQueue () {
|
|
22
|
+
return this.#requests
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
get responses () {
|
|
26
|
+
return this.#responses
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
get delay () {
|
|
30
|
+
return this.#delay
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
addRequest (url, options) {
|
|
34
|
+
if (!url) {
|
|
35
|
+
throw new Error('URL is required')
|
|
36
|
+
}
|
|
37
|
+
this.#requests.push({ url, options })
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async _sendBatch (fetchRequests, sentRequests, requestQueue) {
|
|
41
|
+
const batchResponses = await Promise.all(fetchRequests)
|
|
42
|
+
this.#responses.push(...batchResponses)
|
|
43
|
+
for (let x = 0; x < batchResponses.length; x++) {
|
|
44
|
+
const response = batchResponses[x]
|
|
45
|
+
if (response.status === StatusCodes.TOO_MANY_REQUESTS && sentRequests[x].attempts < 2) {
|
|
46
|
+
requestQueue.push({ ...sentRequests[x], attempts: sentRequests[x].attempts + 1 })
|
|
47
|
+
this.#batchSize = Math.max(this.#batchSize - 1, 1)
|
|
48
|
+
debug(`429 response received for ${sentRequests[x].url}, reducing batch size to ${this.#batchSize}`)
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
fetchRequests.length = 0
|
|
52
|
+
sentRequests.length = 0
|
|
53
|
+
if (requestQueue.length) {
|
|
54
|
+
// don't wait if this is the last batch
|
|
55
|
+
await new Promise(resolve => setTimeout(resolve, this.#delay))
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async fetch () {
|
|
60
|
+
debug(
|
|
61
|
+
`Beginning batched fetch of ${this.#requests.length} requests with initial batch size of ${
|
|
62
|
+
this.#batchSize
|
|
63
|
+
} and delay between batches of ${this.#delay}ms`
|
|
64
|
+
)
|
|
65
|
+
const requestQueue = [...this.#requests]
|
|
66
|
+
const sentRequests = []
|
|
67
|
+
const fetchRequests = []
|
|
68
|
+
while (requestQueue.length) {
|
|
69
|
+
const request = requestQueue.shift()
|
|
70
|
+
fetchRequests.push(fetch(request.url, request.options))
|
|
71
|
+
sentRequests.push({ attempts: 1, ...request })
|
|
72
|
+
if (fetchRequests.length === this.#batchSize) {
|
|
73
|
+
await this._sendBatch(fetchRequests, sentRequests, requestQueue)
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
debug('Batched fetch complete')
|
|
77
|
+
}
|
|
78
|
+
}
|