ag-common 0.0.745 → 0.0.747
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/helpers/dynamo/delete.d.ts +2 -2
- package/dist/api/helpers/dynamo/delete.js +4 -4
- package/dist/api/helpers/dynamo/get.js +4 -2
- package/dist/api/helpers/dynamo/index.d.ts +2 -2
- package/dist/api/helpers/dynamo/set.d.ts +2 -2
- package/dist/api/helpers/dynamo/set.js +1 -1
- package/dist/api/helpers/dynamo/types.d.ts +2 -1
- package/package.json +1 -1
|
@@ -6,8 +6,8 @@ export declare const batchDelete: (params: {
|
|
|
6
6
|
opt?: {
|
|
7
7
|
/** default 20 */
|
|
8
8
|
batchSize?: number;
|
|
9
|
-
/** option to
|
|
10
|
-
|
|
9
|
+
/** option to control retry behavior: undefined = 3 retries, null = infinite. default 3 */
|
|
10
|
+
maxRetries?: number | null;
|
|
11
11
|
};
|
|
12
12
|
}) => Promise<DynamoDBResult<void>>;
|
|
13
13
|
export declare const wipeTable: (tableName: string) => Promise<DynamoDBResult<void>>;
|
|
@@ -26,7 +26,7 @@ const get_1 = require("./get");
|
|
|
26
26
|
const batchDelete = (params) => __awaiter(void 0, void 0, void 0, function* () {
|
|
27
27
|
var _a;
|
|
28
28
|
try {
|
|
29
|
-
const { batchSize = 20,
|
|
29
|
+
const { batchSize = 20, maxRetries } = (_a = params.opt) !== null && _a !== void 0 ? _a : {};
|
|
30
30
|
const chunked = (0, array_1.chunk)(params.keys, batchSize);
|
|
31
31
|
let processed = 0;
|
|
32
32
|
yield (0, async_1.asyncForEach)(chunked, (chunk) => __awaiter(void 0, void 0, void 0, function* () {
|
|
@@ -38,7 +38,7 @@ const batchDelete = (params) => __awaiter(void 0, void 0, void 0, function* () {
|
|
|
38
38
|
},
|
|
39
39
|
};
|
|
40
40
|
yield (0, withRetry_1.withRetry)(() => _1.dynamoDb.send(new lib_dynamodb_1.BatchWriteCommand(batchDeleteParams)), `batchdelete ${processed}/${params.keys.length}. size=${batchSize}`, {
|
|
41
|
-
maxRetries:
|
|
41
|
+
maxRetries: maxRetries === undefined ? 3 : maxRetries,
|
|
42
42
|
});
|
|
43
43
|
processed += chunk.length;
|
|
44
44
|
}));
|
|
@@ -54,7 +54,7 @@ const wipeTable = (tableName) => __awaiter(void 0, void 0, void 0, function* ()
|
|
|
54
54
|
try {
|
|
55
55
|
const generator = (0, get_1.scanWithGenerator)(tableName, {
|
|
56
56
|
BATCH_SIZE: 100, // Process in chunks of 100 items
|
|
57
|
-
|
|
57
|
+
maxRetries: null, // Always retry on 429 since we want to ensure complete deletion
|
|
58
58
|
});
|
|
59
59
|
try {
|
|
60
60
|
// Process each batch of items
|
|
@@ -69,7 +69,7 @@ const wipeTable = (tableName) => __awaiter(void 0, void 0, void 0, function* ()
|
|
|
69
69
|
keys: pks,
|
|
70
70
|
pkName: 'PK',
|
|
71
71
|
opt: {
|
|
72
|
-
|
|
72
|
+
maxRetries: null, // Always retry on 429 since we want to ensure complete deletion
|
|
73
73
|
},
|
|
74
74
|
});
|
|
75
75
|
if ('error' in result) {
|
|
@@ -65,7 +65,9 @@ const executeQuery = (params, startKey) => __awaiter(void 0, void 0, void 0, fun
|
|
|
65
65
|
const queryParams = Object.assign({ TableName: params.tableName, KeyConditionExpression: kce, ExpressionAttributeNames: Object.assign(Object.assign({}, ean), (_b = params.filter) === null || _b === void 0 ? void 0 : _b.attrNames), ExpressionAttributeValues: Object.assign(Object.assign({}, eav), (_c = params.filter) === null || _c === void 0 ? void 0 : _c.attrValues), ScanIndexForward: (_d = params.sortAscending) !== null && _d !== void 0 ? _d : true, Limit: (_e = params.BATCH_SIZE) !== null && _e !== void 0 ? _e : params.limit, IndexName: params.indexName, ExclusiveStartKey: startKey }, (params.filter && Object.assign({ FilterExpression: params.filter.filterExpression }, (params.filter.attrValues && {
|
|
66
66
|
ExpressionAttributeValues: Object.assign(Object.assign({}, eav), params.filter.attrValues),
|
|
67
67
|
}))));
|
|
68
|
-
return (0, withRetry_1.withRetry)(() => _1.dynamoDb.send(new lib_dynamodb_1.QueryCommand(queryParams)), 'queryDynamo'
|
|
68
|
+
return (0, withRetry_1.withRetry)(() => _1.dynamoDb.send(new lib_dynamodb_1.QueryCommand(queryParams)), 'queryDynamo', {
|
|
69
|
+
maxRetries: params.maxRetries === undefined ? 3 : params.maxRetries,
|
|
70
|
+
});
|
|
69
71
|
});
|
|
70
72
|
/**
|
|
71
73
|
* Helper function that builds the scan parameters and executes the scan
|
|
@@ -87,7 +89,7 @@ const executeScan = (tableName, options, exclusiveStartKey) => __awaiter(void 0,
|
|
|
87
89
|
.join(', '),
|
|
88
90
|
})), { ExclusiveStartKey: exclusiveStartKey });
|
|
89
91
|
return (0, withRetry_1.withRetry)(() => _1.dynamoDb.send(new lib_dynamodb_1.ScanCommand(scanParams)), `scan. already seen=${exclusiveStartKey ? 'some' : '0'} items`, {
|
|
90
|
-
maxRetries: (options === null || options === void 0 ? void 0 : options.
|
|
92
|
+
maxRetries: (options === null || options === void 0 ? void 0 : options.maxRetries) === undefined ? 3 : options.maxRetries,
|
|
91
93
|
});
|
|
92
94
|
});
|
|
93
95
|
const getItemsDynamo = (params) => __awaiter(void 0, void 0, void 0, function* () {
|
|
@@ -23,7 +23,7 @@ export declare const getItemDynamo: <T>(params: {
|
|
|
23
23
|
export declare const putDynamo: <T extends Record<string, unknown>>(item: T, tableName: string, opt?: {
|
|
24
24
|
pkName?: string;
|
|
25
25
|
}) => Promise<import("./types").DynamoDBResult<void>>, batchWrite: <T extends Record<string, unknown>>(tableName: string, items: T[], opt?: {
|
|
26
|
-
|
|
26
|
+
maxRetries?: number | null;
|
|
27
27
|
batchSize?: number;
|
|
28
28
|
}) => Promise<import("./types").DynamoDBResult<void>>, getDynamoUpdates: <T extends Record<string, unknown>>(item: T, options?: {
|
|
29
29
|
excludeKeys?: string[];
|
|
@@ -39,7 +39,7 @@ export declare const batchDelete: (params: {
|
|
|
39
39
|
pkName: string;
|
|
40
40
|
opt?: {
|
|
41
41
|
batchSize?: number;
|
|
42
|
-
|
|
42
|
+
maxRetries?: number | null;
|
|
43
43
|
};
|
|
44
44
|
}) => Promise<import("./types").DynamoDBResult<void>>, wipeTable: (tableName: string) => Promise<import("./types").DynamoDBResult<void>>;
|
|
45
45
|
export * from './types';
|
|
@@ -3,8 +3,8 @@ export declare const putDynamo: <T extends Record<string, unknown>>(item: T, tab
|
|
|
3
3
|
pkName?: string;
|
|
4
4
|
}) => Promise<DynamoDBResult<void>>;
|
|
5
5
|
export declare const batchWrite: <T extends Record<string, unknown>>(tableName: string, items: T[], opt?: {
|
|
6
|
-
/** option to
|
|
7
|
-
|
|
6
|
+
/** option to control retry behavior: undefined = 3 retries, null = infinite */
|
|
7
|
+
maxRetries?: number | null;
|
|
8
8
|
/** default 20 */
|
|
9
9
|
batchSize?: number;
|
|
10
10
|
}) => Promise<DynamoDBResult<void>>;
|
|
@@ -40,7 +40,7 @@ const batchWrite = (tableName, items, opt) => __awaiter(void 0, void 0, void 0,
|
|
|
40
40
|
},
|
|
41
41
|
};
|
|
42
42
|
yield (0, withRetry_1.withRetry)(() => _1.dynamoDb.send(new lib_dynamodb_1.BatchWriteCommand(batchWriteParams)), `batchwrite ${processed}/${items.length}. size=${batchSize}`, {
|
|
43
|
-
maxRetries: (opt === null || opt === void 0 ? void 0 : opt.
|
|
43
|
+
maxRetries: (opt === null || opt === void 0 ? void 0 : opt.maxRetries) === undefined ? 3 : opt.maxRetries,
|
|
44
44
|
});
|
|
45
45
|
processed += chunk.length;
|
|
46
46
|
}));
|
|
@@ -23,7 +23,7 @@ export interface ScanOptions {
|
|
|
23
23
|
filter?: DynamoFilter;
|
|
24
24
|
requiredAttributeList?: string[];
|
|
25
25
|
indexName?: string;
|
|
26
|
-
|
|
26
|
+
maxRetries?: number | null;
|
|
27
27
|
}
|
|
28
28
|
export interface DynamoQueryParams {
|
|
29
29
|
tableName: string;
|
|
@@ -37,6 +37,7 @@ export interface DynamoQueryParams {
|
|
|
37
37
|
limit?: number;
|
|
38
38
|
filter?: DynamoFilter;
|
|
39
39
|
sortAscending?: boolean;
|
|
40
|
+
maxRetries?: number | null;
|
|
40
41
|
}
|
|
41
42
|
export interface DynamoBatchQueryParams {
|
|
42
43
|
tableName: string;
|
package/package.json
CHANGED