@squiz/db-lib 1.72.0 → 1.74.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/lib/dynamodb/AbstractDynamoDbRepository.d.ts +13 -6
- package/lib/dynamodb/AbstractDynamoDbRepository.d.ts.map +1 -1
- package/lib/dynamodb/AbstractDynamoDbRepository.js +84 -25
- package/lib/dynamodb/AbstractDynamoDbRepository.js.map +1 -1
- package/lib/dynamodb/AbstractDynamoDbRepository.spec.d.ts.map +1 -1
- package/lib/dynamodb/AbstractDynamoDbRepository.spec.js +382 -2
- package/lib/dynamodb/AbstractDynamoDbRepository.spec.js.map +1 -1
- package/package.json +1 -1
- package/src/dynamodb/AbstractDynamoDbRepository.spec.ts +415 -2
- package/src/dynamodb/AbstractDynamoDbRepository.ts +100 -26
- package/tsconfig.tsbuildinfo +1 -1
@@ -16,6 +16,8 @@ import {
|
|
16
16
|
TransactWriteCommandInput,
|
17
17
|
BatchGetCommand,
|
18
18
|
BatchGetCommandInput,
|
19
|
+
BatchWriteCommand,
|
20
|
+
BatchWriteCommandInput,
|
19
21
|
} from '@aws-sdk/lib-dynamodb';
|
20
22
|
import { ConditionalCheckFailedException } from '@aws-sdk/client-dynamodb';
|
21
23
|
import { DynamoDbManager, Transaction } from './DynamoDbManager';
|
@@ -230,6 +232,8 @@ describe('AbstractRepository', () => {
|
|
230
232
|
Attributes: {
|
231
233
|
name: 'foo',
|
232
234
|
age: 99,
|
235
|
+
// country attribute is part of gsi key
|
236
|
+
// hence updating this will also update gsi key value
|
233
237
|
country: 'au-updated',
|
234
238
|
data: {},
|
235
239
|
},
|
@@ -237,12 +241,14 @@ describe('AbstractRepository', () => {
|
|
237
241
|
const input: UpdateCommandInput = {
|
238
242
|
TableName: TABLE_NAME,
|
239
243
|
Key: { pk: 'test_item#foo', sk: '#meta' },
|
240
|
-
UpdateExpression: 'SET #country = :country',
|
244
|
+
UpdateExpression: 'SET #country = :country, #gsi1_pk = :gsi1_pk',
|
241
245
|
ExpressionAttributeNames: {
|
242
246
|
'#country': 'country',
|
247
|
+
'#gsi1_pk': 'gsi1_pk',
|
243
248
|
},
|
244
249
|
ExpressionAttributeValues: {
|
245
250
|
':country': 'au-updated',
|
251
|
+
':gsi1_pk': 'country#au-updated',
|
246
252
|
},
|
247
253
|
ConditionExpression: `attribute_exists(pk)`,
|
248
254
|
};
|
@@ -263,6 +269,60 @@ describe('AbstractRepository', () => {
|
|
263
269
|
);
|
264
270
|
});
|
265
271
|
|
272
|
+
it('should only update the changed attributes', async () => {
|
273
|
+
ddbClientMock.on(GetCommand).resolves({
|
274
|
+
$metadata: {
|
275
|
+
httpStatusCode: 200,
|
276
|
+
},
|
277
|
+
Item: {
|
278
|
+
name: 'foo',
|
279
|
+
age: 99,
|
280
|
+
country: 'au',
|
281
|
+
data: {},
|
282
|
+
},
|
283
|
+
});
|
284
|
+
ddbClientMock.on(UpdateCommand).resolves({
|
285
|
+
$metadata: {
|
286
|
+
httpStatusCode: 200,
|
287
|
+
},
|
288
|
+
Attributes: {
|
289
|
+
name: 'foo',
|
290
|
+
age: 99,
|
291
|
+
country: 'au',
|
292
|
+
data: { active: true },
|
293
|
+
},
|
294
|
+
});
|
295
|
+
const input: UpdateCommandInput = {
|
296
|
+
TableName: TABLE_NAME,
|
297
|
+
Key: { pk: 'test_item#foo', sk: '#meta' },
|
298
|
+
UpdateExpression: 'SET #data = :data',
|
299
|
+
ExpressionAttributeNames: {
|
300
|
+
'#data': 'data',
|
301
|
+
},
|
302
|
+
ExpressionAttributeValues: {
|
303
|
+
':data': { active: true },
|
304
|
+
},
|
305
|
+
ConditionExpression: `attribute_exists(pk)`,
|
306
|
+
};
|
307
|
+
|
308
|
+
const updateItem = {
|
309
|
+
name: 'foo',
|
310
|
+
age: 99,
|
311
|
+
// this is the only change attribute value
|
312
|
+
data: { active: true },
|
313
|
+
};
|
314
|
+
const result = await repository.updateItem(updateItem);
|
315
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, UpdateCommand, input);
|
316
|
+
expect(result).toEqual(
|
317
|
+
new TestItem({
|
318
|
+
name: 'foo',
|
319
|
+
age: 99,
|
320
|
+
country: 'au',
|
321
|
+
data: { active: true },
|
322
|
+
}),
|
323
|
+
);
|
324
|
+
});
|
325
|
+
|
266
326
|
it('should not trigger update request if the input attributes are same as in the existing item', async () => {
|
267
327
|
ddbClientMock.on(GetCommand).resolves({
|
268
328
|
$metadata: {
|
@@ -557,6 +617,156 @@ describe('AbstractRepository', () => {
|
|
557
617
|
]);
|
558
618
|
});
|
559
619
|
|
620
|
+
it('should retry if unprocessed keys returned', async () => {
|
621
|
+
const input1: BatchGetCommandInput = {
|
622
|
+
RequestItems: {
|
623
|
+
[TABLE_NAME]: {
|
624
|
+
Keys: [
|
625
|
+
{ pk: 'test_item#foo', sk: '#meta' },
|
626
|
+
{ pk: 'test_item#foo2', sk: '#meta' },
|
627
|
+
],
|
628
|
+
},
|
629
|
+
},
|
630
|
+
};
|
631
|
+
const input2: BatchGetCommandInput = {
|
632
|
+
RequestItems: {
|
633
|
+
[TABLE_NAME]: {
|
634
|
+
Keys: [{ pk: 'test_item#foo2', sk: '#meta' }],
|
635
|
+
},
|
636
|
+
},
|
637
|
+
};
|
638
|
+
|
639
|
+
ddbClientMock.on(BatchGetCommand, input1).resolves({
|
640
|
+
$metadata: {
|
641
|
+
httpStatusCode: 200,
|
642
|
+
},
|
643
|
+
Responses: {
|
644
|
+
[TABLE_NAME]: [
|
645
|
+
{
|
646
|
+
name: 'foo',
|
647
|
+
age: 99,
|
648
|
+
country: 'au',
|
649
|
+
data: {},
|
650
|
+
data2: '{"foo":"bar","num":123}',
|
651
|
+
},
|
652
|
+
],
|
653
|
+
},
|
654
|
+
UnprocessedKeys: {
|
655
|
+
[TABLE_NAME]: {
|
656
|
+
Keys: [{ pk: 'test_item#foo2', sk: '#meta' }],
|
657
|
+
},
|
658
|
+
},
|
659
|
+
});
|
660
|
+
|
661
|
+
ddbClientMock.on(BatchGetCommand, input2).resolves({
|
662
|
+
$metadata: {
|
663
|
+
httpStatusCode: 200,
|
664
|
+
},
|
665
|
+
Responses: {
|
666
|
+
[TABLE_NAME]: [
|
667
|
+
{
|
668
|
+
name: 'foo2',
|
669
|
+
age: 999,
|
670
|
+
country: 'au',
|
671
|
+
data: {},
|
672
|
+
data2: '{"foo":"bar","num":123}',
|
673
|
+
},
|
674
|
+
],
|
675
|
+
},
|
676
|
+
UnprocessedKeys: {},
|
677
|
+
});
|
678
|
+
|
679
|
+
const requestItems = [{ name: 'foo' }, { name: 'foo2' }];
|
680
|
+
const result = await repository.getItems(requestItems);
|
681
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(1, BatchGetCommand, input1);
|
682
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, BatchGetCommand, input2);
|
683
|
+
expect(result).toEqual([
|
684
|
+
new TestItem({
|
685
|
+
name: 'foo',
|
686
|
+
age: 99,
|
687
|
+
country: 'au',
|
688
|
+
data: {},
|
689
|
+
data2: {
|
690
|
+
foo: 'bar',
|
691
|
+
num: 123,
|
692
|
+
},
|
693
|
+
}),
|
694
|
+
new TestItem({
|
695
|
+
name: 'foo2',
|
696
|
+
age: 999,
|
697
|
+
country: 'au',
|
698
|
+
data: {},
|
699
|
+
data2: {
|
700
|
+
foo: 'bar',
|
701
|
+
num: 123,
|
702
|
+
},
|
703
|
+
}),
|
704
|
+
]);
|
705
|
+
});
|
706
|
+
|
707
|
+
it('should fail after max retries for unprocessed keys', async () => {
|
708
|
+
const input1: BatchGetCommandInput = {
|
709
|
+
RequestItems: {
|
710
|
+
[TABLE_NAME]: {
|
711
|
+
Keys: [
|
712
|
+
{ pk: 'test_item#foo', sk: '#meta' },
|
713
|
+
{ pk: 'test_item#foo2', sk: '#meta' },
|
714
|
+
],
|
715
|
+
},
|
716
|
+
},
|
717
|
+
};
|
718
|
+
const input2: BatchGetCommandInput = {
|
719
|
+
RequestItems: {
|
720
|
+
[TABLE_NAME]: {
|
721
|
+
Keys: [{ pk: 'test_item#foo2', sk: '#meta' }],
|
722
|
+
},
|
723
|
+
},
|
724
|
+
};
|
725
|
+
|
726
|
+
ddbClientMock.on(BatchGetCommand, input1).resolves({
|
727
|
+
$metadata: {
|
728
|
+
httpStatusCode: 200,
|
729
|
+
},
|
730
|
+
Responses: {
|
731
|
+
[TABLE_NAME]: [
|
732
|
+
{
|
733
|
+
name: 'foo',
|
734
|
+
age: 99,
|
735
|
+
country: 'au',
|
736
|
+
data: {},
|
737
|
+
data2: '{"foo":"bar","num":123}',
|
738
|
+
},
|
739
|
+
],
|
740
|
+
},
|
741
|
+
UnprocessedKeys: {
|
742
|
+
[TABLE_NAME]: {
|
743
|
+
Keys: [{ pk: 'test_item#foo2', sk: '#meta' }],
|
744
|
+
},
|
745
|
+
},
|
746
|
+
});
|
747
|
+
|
748
|
+
ddbClientMock.on(BatchGetCommand, input2).resolves({
|
749
|
+
$metadata: {
|
750
|
+
httpStatusCode: 200,
|
751
|
+
},
|
752
|
+
Responses: {},
|
753
|
+
UnprocessedKeys: {
|
754
|
+
[TABLE_NAME]: {
|
755
|
+
Keys: [{ pk: 'test_item#foo2', sk: '#meta' }],
|
756
|
+
},
|
757
|
+
},
|
758
|
+
});
|
759
|
+
|
760
|
+
const requestItems = [{ name: 'foo' }, { name: 'foo2' }];
|
761
|
+
await expect(repository.getItems(requestItems)).rejects.toEqual(
|
762
|
+
new Error('Maximum allowed retries exceeded for unprocessed items'),
|
763
|
+
);
|
764
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(1, BatchGetCommand, input1);
|
765
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, BatchGetCommand, input2);
|
766
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(3, BatchGetCommand, input2);
|
767
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(4, BatchGetCommand, input2);
|
768
|
+
});
|
769
|
+
|
560
770
|
it('should request BatchGetItem in batch of 100 items to get result', async () => {
|
561
771
|
ddbClientMock.on(BatchGetCommand).resolves({
|
562
772
|
$metadata: {
|
@@ -608,6 +818,207 @@ describe('AbstractRepository', () => {
|
|
608
818
|
});
|
609
819
|
});
|
610
820
|
|
821
|
+
describe('deleteItems()', () => {
|
822
|
+
it('should use batchWrite() to get result', async () => {
|
823
|
+
ddbClientMock.on(BatchWriteCommand).resolves({
|
824
|
+
$metadata: {
|
825
|
+
httpStatusCode: 200,
|
826
|
+
},
|
827
|
+
ItemCollectionMetrics: {
|
828
|
+
[TABLE_NAME]: [{}],
|
829
|
+
},
|
830
|
+
});
|
831
|
+
const input: BatchWriteCommandInput = {
|
832
|
+
RequestItems: {
|
833
|
+
[TABLE_NAME]: [
|
834
|
+
{
|
835
|
+
DeleteRequest: {
|
836
|
+
Key: { pk: 'test_item#foo', sk: '#meta' },
|
837
|
+
},
|
838
|
+
},
|
839
|
+
{
|
840
|
+
DeleteRequest: {
|
841
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
842
|
+
},
|
843
|
+
},
|
844
|
+
],
|
845
|
+
},
|
846
|
+
};
|
847
|
+
|
848
|
+
const requestItems = [{ name: 'foo' }, { name: 'foo2' }];
|
849
|
+
await repository.deleteItems(requestItems);
|
850
|
+
expect(ddbClientMock).toHaveReceivedCommandWith(BatchWriteCommand, input);
|
851
|
+
});
|
852
|
+
|
853
|
+
it('should use re-try if unprocessed items returned', async () => {
|
854
|
+
const input1: BatchWriteCommandInput = {
|
855
|
+
RequestItems: {
|
856
|
+
[TABLE_NAME]: [
|
857
|
+
{
|
858
|
+
DeleteRequest: {
|
859
|
+
Key: { pk: 'test_item#foo', sk: '#meta' },
|
860
|
+
},
|
861
|
+
},
|
862
|
+
{
|
863
|
+
DeleteRequest: {
|
864
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
865
|
+
},
|
866
|
+
},
|
867
|
+
],
|
868
|
+
},
|
869
|
+
};
|
870
|
+
const input2: BatchWriteCommandInput = {
|
871
|
+
RequestItems: {
|
872
|
+
[TABLE_NAME]: [
|
873
|
+
{
|
874
|
+
DeleteRequest: {
|
875
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
876
|
+
},
|
877
|
+
},
|
878
|
+
],
|
879
|
+
},
|
880
|
+
};
|
881
|
+
|
882
|
+
ddbClientMock.on(BatchWriteCommand, input1).resolves({
|
883
|
+
$metadata: {
|
884
|
+
httpStatusCode: 200,
|
885
|
+
},
|
886
|
+
UnprocessedItems: {
|
887
|
+
[TABLE_NAME]: [
|
888
|
+
{
|
889
|
+
DeleteRequest: {
|
890
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
891
|
+
},
|
892
|
+
},
|
893
|
+
],
|
894
|
+
},
|
895
|
+
});
|
896
|
+
ddbClientMock.on(BatchWriteCommand, input2).resolves({
|
897
|
+
$metadata: {
|
898
|
+
httpStatusCode: 200,
|
899
|
+
},
|
900
|
+
UnprocessedItems: {},
|
901
|
+
});
|
902
|
+
|
903
|
+
const requestItems = [{ name: 'foo' }, { name: 'foo2' }];
|
904
|
+
await repository.deleteItems(requestItems);
|
905
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(1, BatchWriteCommand, input1);
|
906
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, BatchWriteCommand, input2);
|
907
|
+
});
|
908
|
+
|
909
|
+
it('should fail after max number of retries', async () => {
|
910
|
+
const input1: BatchWriteCommandInput = {
|
911
|
+
RequestItems: {
|
912
|
+
[TABLE_NAME]: [
|
913
|
+
{
|
914
|
+
DeleteRequest: {
|
915
|
+
Key: { pk: 'test_item#foo', sk: '#meta' },
|
916
|
+
},
|
917
|
+
},
|
918
|
+
{
|
919
|
+
DeleteRequest: {
|
920
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
921
|
+
},
|
922
|
+
},
|
923
|
+
],
|
924
|
+
},
|
925
|
+
};
|
926
|
+
const input2: BatchWriteCommandInput = {
|
927
|
+
RequestItems: {
|
928
|
+
[TABLE_NAME]: [
|
929
|
+
{
|
930
|
+
DeleteRequest: {
|
931
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
932
|
+
},
|
933
|
+
},
|
934
|
+
],
|
935
|
+
},
|
936
|
+
};
|
937
|
+
|
938
|
+
ddbClientMock.on(BatchWriteCommand).resolves({
|
939
|
+
$metadata: {
|
940
|
+
httpStatusCode: 200,
|
941
|
+
},
|
942
|
+
UnprocessedItems: {
|
943
|
+
[TABLE_NAME]: [
|
944
|
+
{
|
945
|
+
DeleteRequest: {
|
946
|
+
Key: { pk: 'test_item#foo2', sk: '#meta' },
|
947
|
+
},
|
948
|
+
},
|
949
|
+
],
|
950
|
+
},
|
951
|
+
});
|
952
|
+
|
953
|
+
const requestItems = [{ name: 'foo' }, { name: 'foo2' }];
|
954
|
+
await expect(repository.deleteItems(requestItems)).rejects.toEqual(
|
955
|
+
new Error('Maximum allowed retries exceeded for unprocessed items'),
|
956
|
+
);
|
957
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(1, BatchWriteCommand, input1);
|
958
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, BatchWriteCommand, input2);
|
959
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(3, BatchWriteCommand, input2);
|
960
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(4, BatchWriteCommand, input2);
|
961
|
+
});
|
962
|
+
|
963
|
+
it('should request batchWrite in batch of 25 items to get result', async () => {
|
964
|
+
ddbClientMock.on(BatchWriteCommand).resolves({
|
965
|
+
$metadata: {
|
966
|
+
httpStatusCode: 200,
|
967
|
+
},
|
968
|
+
});
|
969
|
+
|
970
|
+
const requestItems = [];
|
971
|
+
for (let i = 0; i < 30; i++) {
|
972
|
+
requestItems.push({ name: `foo${i}` });
|
973
|
+
}
|
974
|
+
// keys for first batch request
|
975
|
+
const keys1 = [];
|
976
|
+
for (let i = 0; i < 25; i++) {
|
977
|
+
keys1.push({ pk: `test_item#foo${i}`, sk: '#meta' });
|
978
|
+
}
|
979
|
+
// keys for second batch request
|
980
|
+
const keys2 = [];
|
981
|
+
for (let i = 25; i < 30; i++) {
|
982
|
+
keys2.push({ pk: `test_item#foo${i}`, sk: '#meta' });
|
983
|
+
}
|
984
|
+
|
985
|
+
const input1: BatchWriteCommandInput = {
|
986
|
+
RequestItems: {
|
987
|
+
[TABLE_NAME]: keys1.map((key) => {
|
988
|
+
return {
|
989
|
+
DeleteRequest: {
|
990
|
+
Key: key,
|
991
|
+
},
|
992
|
+
};
|
993
|
+
}),
|
994
|
+
},
|
995
|
+
};
|
996
|
+
const input2: BatchWriteCommandInput = {
|
997
|
+
RequestItems: {
|
998
|
+
[TABLE_NAME]: keys2.map((key) => {
|
999
|
+
return {
|
1000
|
+
DeleteRequest: {
|
1001
|
+
Key: key,
|
1002
|
+
},
|
1003
|
+
};
|
1004
|
+
}),
|
1005
|
+
},
|
1006
|
+
};
|
1007
|
+
|
1008
|
+
await repository.deleteItems(requestItems);
|
1009
|
+
expect(ddbClientMock).toHaveReceivedCommandTimes(BatchWriteCommand, 2);
|
1010
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(1, BatchWriteCommand, input1);
|
1011
|
+
expect(ddbClientMock).toHaveReceivedNthCommandWith(2, BatchWriteCommand, input2);
|
1012
|
+
});
|
1013
|
+
|
1014
|
+
it('should throw error if any input item does not includes key field(s)', async () => {
|
1015
|
+
const requestItems = [{ name: 'foo' }, { age: 22 }];
|
1016
|
+
await expect(repository.deleteItems(requestItems)).rejects.toEqual(
|
1017
|
+
new MissingKeyValuesError('Key field "name" must be specified in the input item in entity test-item-entity'),
|
1018
|
+
);
|
1019
|
+
});
|
1020
|
+
});
|
1021
|
+
|
611
1022
|
describe('queryItems()', () => {
|
612
1023
|
it('should return the items if found', async () => {
|
613
1024
|
ddbClientMock.on(QueryCommand).resolves({
|
@@ -1038,16 +1449,18 @@ describe('AbstractRepository', () => {
|
|
1038
1449
|
ConditionExpression: 'attribute_exists(pk)',
|
1039
1450
|
ExpressionAttributeNames: {
|
1040
1451
|
'#age': 'age',
|
1452
|
+
'#gsi1_sk': 'gsi1_sk',
|
1041
1453
|
},
|
1042
1454
|
ExpressionAttributeValues: {
|
1043
1455
|
':age': 55,
|
1456
|
+
':gsi1_sk': 'age#55',
|
1044
1457
|
},
|
1045
1458
|
Key: {
|
1046
1459
|
pk: 'test_item#foo2',
|
1047
1460
|
sk: '#meta',
|
1048
1461
|
},
|
1049
1462
|
TableName: 'test-table',
|
1050
|
-
UpdateExpression: 'SET #age = :age',
|
1463
|
+
UpdateExpression: 'SET #age = :age, #gsi1_sk = :gsi1_sk',
|
1051
1464
|
},
|
1052
1465
|
},
|
1053
1466
|
{
|
@@ -4,6 +4,10 @@ import {
|
|
4
4
|
UpdateCommandOutput,
|
5
5
|
PutCommandInput,
|
6
6
|
DeleteCommandInput,
|
7
|
+
BatchWriteCommandInput,
|
8
|
+
BatchWriteCommandOutput,
|
9
|
+
BatchGetCommandInput,
|
10
|
+
BatchGetCommandOutput,
|
7
11
|
} from '@aws-sdk/lib-dynamodb';
|
8
12
|
|
9
13
|
import { Transaction, DynamoDbManager, MissingKeyValuesError, InvalidDbSchemaError } from '..';
|
@@ -30,14 +34,15 @@ export type QueryOptions = {
|
|
30
34
|
|
31
35
|
interface Reader<T> {
|
32
36
|
queryItems(partialItem: Partial<T>, options?: QueryOptions): Promise<T[]>;
|
33
|
-
|
34
37
|
getItem(id: string | Partial<T>): Promise<T | undefined>;
|
38
|
+
getItems(partialItem: Partial<T>[]): Promise<T[]>;
|
35
39
|
}
|
36
40
|
|
37
41
|
interface Writer<T> {
|
38
42
|
createItem(item: Partial<T>): Promise<T>;
|
39
43
|
updateItem(partialItem: Partial<T>): Promise<T | undefined>;
|
40
44
|
deleteItem(partialItem: Partial<T>): Promise<number>;
|
45
|
+
deleteItems(partialItem: Partial<T>[]): Promise<void>;
|
41
46
|
}
|
42
47
|
|
43
48
|
type Repository<T> = Reader<T> & Writer<T>;
|
@@ -65,6 +70,8 @@ export type EntityDefinition = {
|
|
65
70
|
fieldsAsJsonString: string[];
|
66
71
|
};
|
67
72
|
|
73
|
+
const MAX_REATTEMPTS = 3;
|
74
|
+
|
68
75
|
export abstract class AbstractDynamoDbRepository<SHAPE extends object, DATA_CLASS extends SHAPE>
|
69
76
|
implements Reader<SHAPE>, Writer<SHAPE>
|
70
77
|
{
|
@@ -153,25 +160,84 @@ export abstract class AbstractDynamoDbRepository<SHAPE extends object, DATA_CLAS
|
|
153
160
|
* @returns
|
154
161
|
*/
|
155
162
|
private async getBatchItems(items: Partial<SHAPE>[]): Promise<DATA_CLASS[]> {
|
156
|
-
|
163
|
+
let resultItems: DATA_CLASS[] = [];
|
164
|
+
|
165
|
+
let requestKeys: BatchGetCommandInput['RequestItems'] = {
|
166
|
+
[this.tableName]: {
|
167
|
+
Keys: this.getBatchKeys(items),
|
168
|
+
},
|
169
|
+
};
|
170
|
+
let reattemptsCount = 0;
|
171
|
+
while (requestKeys && Object.keys(requestKeys).length) {
|
172
|
+
if (reattemptsCount++ > MAX_REATTEMPTS) {
|
173
|
+
throw Error('Maximum allowed retries exceeded for unprocessed items');
|
174
|
+
}
|
175
|
+
const output: BatchGetCommandOutput = await this.client.batchGet({
|
176
|
+
RequestItems: requestKeys,
|
177
|
+
});
|
178
|
+
requestKeys = output.UnprocessedKeys;
|
179
|
+
if (output.Responses && output.Responses[this.tableName] && output.Responses[this.tableName].length) {
|
180
|
+
resultItems = resultItems.concat(output.Responses[this.tableName].map((i) => this.hydrateItem(i)));
|
181
|
+
}
|
182
|
+
}
|
183
|
+
|
184
|
+
return resultItems;
|
185
|
+
}
|
186
|
+
|
187
|
+
/**
|
188
|
+
* Delete items in a batch
|
189
|
+
* Uses batchWrite() with 25 items
|
190
|
+
*
|
191
|
+
* @param item
|
192
|
+
*
|
193
|
+
* @throws MissingKeyValuesError
|
194
|
+
*/
|
195
|
+
public async deleteItems(items: Partial<SHAPE>[]): Promise<void> {
|
196
|
+
// this is the maximum items allowed by BatchWriteItem()
|
197
|
+
const batchSize = 25;
|
198
|
+
|
199
|
+
for (let i = 0; i < items.length; i += batchSize) {
|
200
|
+
const keys = this.getBatchKeys(items.slice(i, i + batchSize));
|
201
|
+
await this.deleteBatchItems(keys);
|
202
|
+
}
|
203
|
+
}
|
204
|
+
|
205
|
+
private async deleteBatchItems(keys: { [key: string]: string }[]): Promise<void> {
|
206
|
+
let requestItems: BatchWriteCommandInput['RequestItems'] = {
|
207
|
+
[this.tableName]: Object.values(keys).map((key) => {
|
208
|
+
return {
|
209
|
+
DeleteRequest: {
|
210
|
+
Key: key,
|
211
|
+
},
|
212
|
+
};
|
213
|
+
}),
|
214
|
+
};
|
215
|
+
let reattemptsCount = 0;
|
216
|
+
while (requestItems && Object.keys(requestItems).length) {
|
217
|
+
if (reattemptsCount++ > MAX_REATTEMPTS) {
|
218
|
+
throw Error('Maximum allowed retries exceeded for unprocessed items');
|
219
|
+
}
|
220
|
+
const response: BatchWriteCommandOutput = await this.client.batchWrite({
|
221
|
+
RequestItems: requestItems,
|
222
|
+
});
|
223
|
+
requestItems = response.UnprocessedItems;
|
224
|
+
}
|
225
|
+
}
|
226
|
+
|
227
|
+
private getBatchKeys(items: Partial<SHAPE>[]) {
|
228
|
+
const keys: { [key: string]: string }[] = [];
|
157
229
|
for (const item of items) {
|
158
230
|
keys.push({
|
159
231
|
[this.keys.pk.attributeName]: this.getPk(item),
|
160
232
|
[this.keys.sk.attributeName]: this.getSk(item),
|
161
233
|
});
|
162
234
|
}
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
},
|
168
|
-
},
|
169
|
-
});
|
235
|
+
// keys.push({
|
236
|
+
// [this.keys.pk.attributeName]: 'foo1',
|
237
|
+
// [this.keys.sk.attributeName]: 'foo2',
|
238
|
+
// });
|
170
239
|
|
171
|
-
|
172
|
-
return output.Responses[this.tableName].map((i) => this.hydrateItem(i));
|
173
|
-
}
|
174
|
-
return [];
|
240
|
+
return keys;
|
175
241
|
}
|
176
242
|
|
177
243
|
/**
|
@@ -264,12 +330,15 @@ export abstract class AbstractDynamoDbRepository<SHAPE extends object, DATA_CLAS
|
|
264
330
|
this.assertValueMatchesModel(value);
|
265
331
|
|
266
332
|
this.convertSelectedValuesToJsonString(newValue);
|
333
|
+
this.convertSelectedValuesToJsonString(oldValue as Record<string, unknown>);
|
267
334
|
|
268
335
|
const updateExpression = [];
|
269
336
|
const expressionAttributeNames: Record<string, string> = {};
|
270
337
|
const expressionAttributeValues: Record<string, unknown> = {};
|
338
|
+
const updatedAttributes: string[] = [];
|
271
339
|
for (const modelProperty of Object.keys(newValue)) {
|
272
340
|
const propValue = newValue[modelProperty as keyof SHAPE] ?? null;
|
341
|
+
|
273
342
|
if (propValue === oldValue[modelProperty as keyof SHAPE]) {
|
274
343
|
// don't need to update the properties that are unchanged
|
275
344
|
continue;
|
@@ -281,19 +350,35 @@ export abstract class AbstractDynamoDbRepository<SHAPE extends object, DATA_CLAS
|
|
281
350
|
updateExpression.push(`${propName} = ${propValuePlaceHolder}`);
|
282
351
|
expressionAttributeNames[propName] = modelProperty;
|
283
352
|
expressionAttributeValues[propValuePlaceHolder] = propValue;
|
353
|
+
updatedAttributes.push(modelProperty);
|
284
354
|
}
|
285
|
-
if (!
|
355
|
+
if (!updatedAttributes.length) {
|
286
356
|
// nothing to update
|
287
357
|
return value;
|
288
358
|
}
|
289
359
|
|
360
|
+
// also update the gsi attributes if needed
|
361
|
+
Object.keys(this.indexes).forEach((key) => {
|
362
|
+
const index = this.indexes[key];
|
363
|
+
[index.pk.attributeName, index.sk.attributeName].forEach((keyAttributeName) => {
|
364
|
+
const keyFormat = this.keysFormat[keyAttributeName];
|
365
|
+
if (updatedAttributes.find((attr) => keyFormat.search(`{${attr}}`) !== -1)) {
|
366
|
+
const propName = `#${keyAttributeName}`;
|
367
|
+
const propValuePlaceHolder = `:${keyAttributeName}`;
|
368
|
+
updateExpression.push(`${propName} = ${propValuePlaceHolder}`);
|
369
|
+
expressionAttributeNames[propName] = keyAttributeName;
|
370
|
+
expressionAttributeValues[propValuePlaceHolder] = this.getKey(value, keyAttributeName);
|
371
|
+
}
|
372
|
+
});
|
373
|
+
});
|
374
|
+
|
290
375
|
const updateCommandInput = {
|
291
376
|
TableName: this.tableName,
|
292
377
|
Key: {
|
293
378
|
[this.keys.pk.attributeName]: this.getPk(newValue),
|
294
379
|
[this.keys.sk.attributeName]: this.getSk(newValue),
|
295
380
|
},
|
296
|
-
UpdateExpression: 'SET ' + updateExpression.join(','),
|
381
|
+
UpdateExpression: 'SET ' + updateExpression.join(', '),
|
297
382
|
ExpressionAttributeValues: expressionAttributeValues,
|
298
383
|
ExpressionAttributeNames: expressionAttributeNames,
|
299
384
|
ConditionExpression: `attribute_exists(${this.keys.pk.attributeName})`,
|
@@ -545,17 +630,6 @@ export abstract class AbstractDynamoDbRepository<SHAPE extends object, DATA_CLAS
|
|
545
630
|
return keyFormat;
|
546
631
|
}
|
547
632
|
|
548
|
-
/**
|
549
|
-
* Whether the given property name is part of the entity's pk/sk string
|
550
|
-
* @param propertyName
|
551
|
-
* @returns boolean
|
552
|
-
*/
|
553
|
-
private isPropertyPartOfKeys(propertyName: string) {
|
554
|
-
if (this.keysFormat[this.keys.pk.attributeName].search(`{${propertyName}}`) !== -1) return true;
|
555
|
-
if (this.keysFormat[this.keys.sk.attributeName].search(`{${propertyName}}`) !== -1) return true;
|
556
|
-
return false;
|
557
|
-
}
|
558
|
-
|
559
633
|
/**
|
560
634
|
* Validate the data matches with "DATA_MODEL"
|
561
635
|
* @param value
|