@defra-fish/fulfilment-job 1.61.0-rc.9 → 1.62.0-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -5
- package/src/__tests__/config.spec.js +10 -6
- package/src/config.js +1 -2
- package/src/transport/__tests__/s3.spec.js +49 -32
- package/src/transport/s3.js +1 -2
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@defra-fish/fulfilment-job",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.62.0-rc.0",
|
|
4
4
|
"description": "Rod Licensing Sales Fulfilment Job",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
7
|
-
"node": ">=
|
|
7
|
+
"node": ">=20"
|
|
8
8
|
},
|
|
9
9
|
"keywords": [
|
|
10
10
|
"rod",
|
|
@@ -35,8 +35,8 @@
|
|
|
35
35
|
"test": "echo \"Error: run tests from root\" && exit 1"
|
|
36
36
|
},
|
|
37
37
|
"dependencies": {
|
|
38
|
-
"@defra-fish/connectors-lib": "1.
|
|
39
|
-
"@defra-fish/dynamics-lib": "1.
|
|
38
|
+
"@defra-fish/connectors-lib": "1.62.0-rc.0",
|
|
39
|
+
"@defra-fish/dynamics-lib": "1.62.0-rc.0",
|
|
40
40
|
"commander": "^7.2.0",
|
|
41
41
|
"debug": "^4.3.3",
|
|
42
42
|
"merge2": "^1.4.1",
|
|
@@ -44,5 +44,5 @@
|
|
|
44
44
|
"openpgp": "^5.0.0-1",
|
|
45
45
|
"pluralize": "^8.0.0"
|
|
46
46
|
},
|
|
47
|
-
"gitHead": "
|
|
47
|
+
"gitHead": "d56e1a0d0f4b22f1dded800c0ed511b737cb91a7"
|
|
48
48
|
}
|
|
@@ -1,7 +1,14 @@
|
|
|
1
|
-
import AwsMock from 'aws-sdk'
|
|
2
1
|
import config from '../config.js'
|
|
3
2
|
import { AWS } from '@defra-fish/connectors-lib'
|
|
4
|
-
const { secretsManager } = AWS
|
|
3
|
+
const { secretsManager } = AWS.mock.results[0].value
|
|
4
|
+
|
|
5
|
+
jest.mock('@defra-fish/connectors-lib', () => ({
|
|
6
|
+
AWS: jest.fn(() => ({
|
|
7
|
+
secretsManager: {
|
|
8
|
+
getSecretValue: jest.fn(() => ({ SecretString: 'test-ssh-key' }))
|
|
9
|
+
}
|
|
10
|
+
}))
|
|
11
|
+
}))
|
|
5
12
|
|
|
6
13
|
const setEnvVars = () => {
|
|
7
14
|
for (const envVar in envVars) {
|
|
@@ -24,9 +31,6 @@ const envVars = Object.freeze({
|
|
|
24
31
|
|
|
25
32
|
describe('config', () => {
|
|
26
33
|
beforeAll(async () => {
|
|
27
|
-
AwsMock.SecretsManager.__setResponse('getSecretValue', {
|
|
28
|
-
SecretString: 'test-ssh-key'
|
|
29
|
-
})
|
|
30
34
|
setEnvVars()
|
|
31
35
|
await config.initialise()
|
|
32
36
|
})
|
|
@@ -48,7 +52,7 @@ describe('config', () => {
|
|
|
48
52
|
|
|
49
53
|
describe('pgp config', () => {
|
|
50
54
|
const init = async (samplePublicKey = 'sample-pgp-key') => {
|
|
51
|
-
|
|
55
|
+
secretsManager.getSecretValue.mockResolvedValueOnce({ SecretString: samplePublicKey })
|
|
52
56
|
await config.initialise()
|
|
53
57
|
}
|
|
54
58
|
beforeAll(setEnvVars)
|
package/src/config.js
CHANGED
|
@@ -32,8 +32,7 @@ class Config {
|
|
|
32
32
|
bucket: process.env.FULFILMENT_S3_BUCKET
|
|
33
33
|
}
|
|
34
34
|
this._pgp = {
|
|
35
|
-
publicKey: (await secretsManager.getSecretValue({ SecretId: process.env.FULFILMENT_PGP_PUBLIC_KEY_SECRET_ID }).
|
|
36
|
-
.SecretString,
|
|
35
|
+
publicKey: (await secretsManager.getSecretValue({ SecretId: process.env.FULFILMENT_PGP_PUBLIC_KEY_SECRET_ID })).SecretString,
|
|
37
36
|
sendUnencryptedFile: toBoolean(process.env.FULFILMENT_SEND_UNENCRYPTED_FILE)
|
|
38
37
|
}
|
|
39
38
|
}
|
|
@@ -1,28 +1,40 @@
|
|
|
1
1
|
import { writeS3PartFile, readS3PartFiles, createS3WriteStream } from '../s3.js'
|
|
2
2
|
import stream from 'stream'
|
|
3
|
-
import AwsMock from 'aws-sdk'
|
|
4
3
|
import { FulfilmentRequestFile } from '@defra-fish/dynamics-lib'
|
|
5
4
|
import { fulfilmentDataTransformer } from '../../transform/fulfilment-transform.js'
|
|
5
|
+
import { AWS } from '@defra-fish/connectors-lib'
|
|
6
|
+
const { s3 } = AWS.mock.results[0].value
|
|
6
7
|
|
|
7
|
-
jest.mock('fs')
|
|
8
8
|
jest.mock('stream')
|
|
9
9
|
jest.mock('../../config.js', () => ({
|
|
10
10
|
s3: {
|
|
11
11
|
bucket: 'testbucket'
|
|
12
12
|
}
|
|
13
13
|
}))
|
|
14
|
+
jest.mock('@defra-fish/connectors-lib', () => ({
|
|
15
|
+
AWS: jest.fn(() => ({
|
|
16
|
+
s3: {
|
|
17
|
+
getObject: jest.fn(() => ({
|
|
18
|
+
createReadStream: jest.fn(() => ({
|
|
19
|
+
setEncoding: jest.fn()
|
|
20
|
+
}))
|
|
21
|
+
})),
|
|
22
|
+
listObjectsV2: jest.fn(async () => ({ Contents: [] })),
|
|
23
|
+
upload: jest.fn(async () => ({ Location: 'The subterranean server room' }))
|
|
24
|
+
}
|
|
25
|
+
}))
|
|
26
|
+
}))
|
|
14
27
|
|
|
15
28
|
describe('s3', () => {
|
|
16
29
|
beforeEach(() => {
|
|
17
30
|
jest.clearAllMocks()
|
|
18
|
-
AwsMock.__resetAll()
|
|
19
31
|
})
|
|
20
32
|
|
|
21
33
|
describe('writeS3PartFile', () => {
|
|
22
34
|
it('writes a part file to S3', async () => {
|
|
23
35
|
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
24
36
|
const mockDataArray = []
|
|
25
|
-
|
|
37
|
+
s3.upload.mockResolvedValueOnce({ Location: 'example.json/part0' })
|
|
26
38
|
stream.pipeline.mockImplementation(
|
|
27
39
|
jest.fn((streams, callback) => {
|
|
28
40
|
expect(streams[0]).toStrictEqual(mockDataArray)
|
|
@@ -33,7 +45,7 @@ describe('s3', () => {
|
|
|
33
45
|
)
|
|
34
46
|
await writeS3PartFile(testFile, 0, [])
|
|
35
47
|
expect(stream.pipeline).toHaveBeenCalled()
|
|
36
|
-
expect(
|
|
48
|
+
expect(s3.upload).toHaveBeenCalledWith({
|
|
37
49
|
Bucket: 'testbucket',
|
|
38
50
|
Key: 'example.json/part0',
|
|
39
51
|
Body: expect.any(stream.PassThrough)
|
|
@@ -42,47 +54,56 @@ describe('s3', () => {
|
|
|
42
54
|
})
|
|
43
55
|
|
|
44
56
|
describe('readS3PartFiles', () => {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
57
|
+
describe('reads all part files for a given file and returns a stream for each', () => {
|
|
58
|
+
beforeEach(() => {
|
|
59
|
+
s3.listObjectsV2.mockResolvedValueOnce({
|
|
60
|
+
Contents: [{ Key: '/example.json/part0' }, { Key: '/example.json/part1' }]
|
|
61
|
+
})
|
|
49
62
|
})
|
|
50
|
-
AwsMock.S3.mockedMethods.getObject.mockImplementation(() => ({ createReadStream: mockCreateReadStream }))
|
|
51
63
|
|
|
52
|
-
|
|
53
|
-
|
|
64
|
+
it('first stream matches createReadStream result from s3 object', async () => {
|
|
65
|
+
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
66
|
+
const [stream1] = await readS3PartFiles(testFile)
|
|
67
|
+
expect(s3.getObject.mock.results[0].value.createReadStream.mock.results[0].value).toBe(stream1)
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
it('second stream matches createReadStream result from s3 object', async () => {
|
|
71
|
+
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
72
|
+
const stream2 = (await readS3PartFiles(testFile))[1]
|
|
73
|
+
expect(s3.getObject.mock.results[1].value.createReadStream.mock.results[0].value).toBe(stream2)
|
|
74
|
+
})
|
|
54
75
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
76
|
+
it('makes first call to s3.getObject with bucket details and first part file details', async () => {
|
|
77
|
+
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
78
|
+
await readS3PartFiles(testFile)
|
|
79
|
+
expect(s3.getObject).toHaveBeenNthCalledWith(1, { Bucket: 'testbucket', Key: '/example.json/part0' })
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
it('makes second call to s3.getObject with bucket details and second part file details', async () => {
|
|
83
|
+
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
84
|
+
await readS3PartFiles(testFile)
|
|
85
|
+
expect(s3.getObject).toHaveBeenNthCalledWith(2, { Bucket: 'testbucket', Key: '/example.json/part1' })
|
|
86
|
+
})
|
|
59
87
|
})
|
|
60
88
|
|
|
61
89
|
it('sets encoding on readable stream', async () => {
|
|
62
|
-
|
|
63
|
-
AwsMock.S3.__setResponse('listObjectsV2', {
|
|
90
|
+
s3.listObjectsV2.mockResolvedValueOnce({
|
|
64
91
|
Contents: [{ Key: '/example.json/part0' }]
|
|
65
92
|
})
|
|
66
|
-
AwsMock.S3.mockedMethods.getObject.mockImplementation(() => ({ createReadStream: mockCreateReadStream }))
|
|
67
93
|
const testFile = Object.assign(new FulfilmentRequestFile(), { fileName: 'example.json' })
|
|
68
94
|
const [readStream] = await readS3PartFiles(testFile)
|
|
69
95
|
expect(readStream.setEncoding).toHaveBeenCalledWith('utf8')
|
|
70
96
|
})
|
|
71
|
-
|
|
72
|
-
const createMockReadStream = () =>
|
|
73
|
-
jest.fn(() => ({
|
|
74
|
-
setEncoding: jest.fn()
|
|
75
|
-
}))
|
|
76
97
|
})
|
|
77
98
|
|
|
78
99
|
describe('createS3WriteStream', () => {
|
|
79
100
|
it('creates a writable stream to an object in S3', async () => {
|
|
80
|
-
|
|
101
|
+
s3.upload.mockResolvedValueOnce({ Location: 'example/key' })
|
|
81
102
|
const passThroughEmitSpy = jest.spyOn(stream.PassThrough.prototype, 'emit')
|
|
82
103
|
const { s3WriteStream, managedUpload } = createS3WriteStream('example/key')
|
|
83
104
|
await expect(managedUpload).resolves.toBeUndefined()
|
|
84
105
|
|
|
85
|
-
expect(
|
|
106
|
+
expect(s3.upload).toHaveBeenCalledWith({
|
|
86
107
|
Bucket: 'testbucket',
|
|
87
108
|
Key: 'example/key',
|
|
88
109
|
Body: expect.any(stream.PassThrough)
|
|
@@ -92,14 +113,10 @@ describe('s3', () => {
|
|
|
92
113
|
})
|
|
93
114
|
|
|
94
115
|
it('rejects the managed upload promise if an error occurs uploading', async () => {
|
|
95
|
-
|
|
96
|
-
promise: jest.fn(async () => {
|
|
97
|
-
throw new Error('Test error')
|
|
98
|
-
})
|
|
99
|
-
}))
|
|
116
|
+
s3.upload.mockRejectedValueOnce(new Error('Test error'))
|
|
100
117
|
const { s3WriteStream, managedUpload } = createS3WriteStream('example/key')
|
|
101
118
|
await expect(managedUpload).rejects.toThrow('Test error')
|
|
102
|
-
expect(
|
|
119
|
+
expect(s3.upload).toHaveBeenCalledWith({
|
|
103
120
|
Bucket: 'testbucket',
|
|
104
121
|
Key: 'example/key',
|
|
105
122
|
Body: s3WriteStream
|
package/src/transport/s3.js
CHANGED
|
@@ -30,7 +30,7 @@ export const writeS3PartFile = async (fulfilmentRequestFile, partNumber, data) =
|
|
|
30
30
|
* @returns {Array<stream.Readable>} Readable streams for each of the part files
|
|
31
31
|
*/
|
|
32
32
|
export async function readS3PartFiles (fulfilmentRequestFile) {
|
|
33
|
-
const { Contents: files } = await s3.listObjectsV2({ Bucket: config.s3.bucket, Prefix: `${fulfilmentRequestFile.fileName}/` })
|
|
33
|
+
const { Contents: files } = await s3.listObjectsV2({ Bucket: config.s3.bucket, Prefix: `${fulfilmentRequestFile.fileName}/` })
|
|
34
34
|
return files
|
|
35
35
|
.filter(f => /part\d+$/.test(f.Key))
|
|
36
36
|
.map(f => {
|
|
@@ -51,7 +51,6 @@ export const createS3WriteStream = key => {
|
|
|
51
51
|
s3WriteStream: passThrough,
|
|
52
52
|
managedUpload: s3
|
|
53
53
|
.upload({ Bucket: config.s3.bucket, Key: key, Body: passThrough })
|
|
54
|
-
.promise()
|
|
55
54
|
.then(data => debug(`File successfully uploaded to S3 at ${data.Location}`))
|
|
56
55
|
}
|
|
57
56
|
}
|