@bitblit/ratchet-aws 4.0.420-alpha → 4.0.421-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/batch/aws-batch-background-processor.d.ts +8 -0
- package/lib/batch/aws-batch-background-processor.js +46 -0
- package/lib/batch/aws-batch-background-processor.js.map +1 -0
- package/lib/batch/aws-batch-ratchet.d.ts +13 -0
- package/lib/batch/aws-batch-ratchet.js +56 -0
- package/lib/batch/aws-batch-ratchet.js.map +1 -0
- package/lib/batch/index.d.ts +2 -0
- package/lib/batch/index.js +3 -0
- package/lib/batch/index.js.map +1 -0
- package/lib/build/index.d.ts +1 -0
- package/lib/build/index.js +2 -0
- package/lib/build/index.js.map +1 -0
- package/lib/build/ratchet-aws-info.d.ts +5 -0
- package/lib/build/ratchet-aws-info.js +15 -0
- package/lib/build/ratchet-aws-info.js.map +1 -0
- package/lib/cache/index.d.ts +5 -0
- package/lib/cache/index.js +6 -0
- package/lib/cache/index.js.map +1 -0
- package/lib/cache/memory-storage-provider.d.ts +10 -0
- package/lib/cache/memory-storage-provider.js +28 -0
- package/lib/cache/memory-storage-provider.js.map +1 -0
- package/lib/cache/simple-cache-object-wrapper.d.ts +7 -0
- package/lib/cache/simple-cache-object-wrapper.js +2 -0
- package/lib/cache/simple-cache-object-wrapper.js.map +1 -0
- package/lib/cache/simple-cache-read-options.d.ts +5 -0
- package/lib/cache/simple-cache-read-options.js +2 -0
- package/lib/cache/simple-cache-read-options.js.map +1 -0
- package/lib/cache/simple-cache-storage-provider.d.ts +8 -0
- package/lib/cache/simple-cache-storage-provider.js +2 -0
- package/lib/cache/simple-cache-storage-provider.js.map +1 -0
- package/lib/cache/simple-cache.d.ts +14 -0
- package/lib/cache/simple-cache.js +65 -0
- package/lib/cache/simple-cache.js.map +1 -0
- package/lib/cloudwatch/cloud-watch-log-group-ratchet.d.ts +10 -0
- package/lib/cloudwatch/cloud-watch-log-group-ratchet.js +75 -0
- package/lib/cloudwatch/cloud-watch-log-group-ratchet.js.map +1 -0
- package/lib/cloudwatch/cloud-watch-logs-ratchet.d.ts +15 -0
- package/lib/cloudwatch/cloud-watch-logs-ratchet.js +174 -0
- package/lib/cloudwatch/cloud-watch-logs-ratchet.js.map +1 -0
- package/lib/cloudwatch/cloud-watch-metrics-ratchet.d.ts +10 -0
- package/lib/cloudwatch/cloud-watch-metrics-ratchet.js +57 -0
- package/lib/cloudwatch/cloud-watch-metrics-ratchet.js.map +1 -0
- package/lib/cloudwatch/index.d.ts +3 -0
- package/lib/cloudwatch/index.js +4 -0
- package/lib/cloudwatch/index.js.map +1 -0
- package/lib/daemon/daemon-like.d.ts +16 -0
- package/lib/daemon/daemon-like.js +2 -0
- package/lib/daemon/daemon-like.js.map +1 -0
- package/lib/daemon/daemon-process-create-options.d.ts +7 -0
- package/lib/daemon/daemon-process-create-options.js +2 -0
- package/lib/daemon/daemon-process-create-options.js.map +1 -0
- package/lib/daemon/daemon-process-state-public-token.d.ts +4 -0
- package/lib/daemon/daemon-process-state-public-token.js +2 -0
- package/lib/daemon/daemon-process-state-public-token.js.map +1 -0
- package/lib/daemon/daemon-process-state.d.ts +13 -0
- package/lib/daemon/daemon-process-state.js +2 -0
- package/lib/daemon/daemon-process-state.js.map +1 -0
- package/lib/daemon/daemon-stream-data-options.d.ts +5 -0
- package/lib/daemon/daemon-stream-data-options.js +2 -0
- package/lib/daemon/daemon-stream-data-options.js.map +1 -0
- package/lib/daemon/daemon-util.d.ts +17 -0
- package/lib/daemon/daemon-util.js +154 -0
- package/lib/daemon/daemon-util.js.map +1 -0
- package/lib/daemon/daemon.d.ts +32 -0
- package/lib/daemon/daemon.js +129 -0
- package/lib/daemon/daemon.js.map +1 -0
- package/lib/daemon/index.d.ts +7 -0
- package/lib/daemon/index.js +8 -0
- package/lib/daemon/index.js.map +1 -0
- package/lib/dao/example-prototype-dao-item.d.ts +8 -0
- package/lib/dao/example-prototype-dao-item.js +2 -0
- package/lib/dao/example-prototype-dao-item.js.map +1 -0
- package/lib/dao/index.d.ts +8 -0
- package/lib/dao/index.js +9 -0
- package/lib/dao/index.js.map +1 -0
- package/lib/dao/memory-prototype-dao-provider.d.ts +7 -0
- package/lib/dao/memory-prototype-dao-provider.js +11 -0
- package/lib/dao/memory-prototype-dao-provider.js.map +1 -0
- package/lib/dao/prototype-dao-config.d.ts +8 -0
- package/lib/dao/prototype-dao-config.js +2 -0
- package/lib/dao/prototype-dao-config.js.map +1 -0
- package/lib/dao/prototype-dao-db.d.ts +4 -0
- package/lib/dao/prototype-dao-db.js +2 -0
- package/lib/dao/prototype-dao-db.js.map +1 -0
- package/lib/dao/prototype-dao-provider.d.ts +5 -0
- package/lib/dao/prototype-dao-provider.js +2 -0
- package/lib/dao/prototype-dao-provider.js.map +1 -0
- package/lib/dao/prototype-dao.d.ts +15 -0
- package/lib/dao/prototype-dao.js +89 -0
- package/lib/dao/prototype-dao.js.map +1 -0
- package/lib/dao/s3-simple-dao.d.ts +15 -0
- package/lib/dao/s3-simple-dao.js +77 -0
- package/lib/dao/s3-simple-dao.js.map +1 -0
- package/lib/dao/simple-dao-item.d.ts +5 -0
- package/lib/dao/simple-dao-item.js +2 -0
- package/lib/dao/simple-dao-item.js.map +1 -0
- package/lib/dynamodb/dynamo-ratchet-like.d.ts +25 -0
- package/lib/dynamodb/dynamo-ratchet-like.js +2 -0
- package/lib/dynamodb/dynamo-ratchet-like.js.map +1 -0
- package/lib/dynamodb/dynamo-ratchet.d.ts +34 -0
- package/lib/dynamodb/dynamo-ratchet.js +671 -0
- package/lib/dynamodb/dynamo-ratchet.js.map +1 -0
- package/lib/dynamodb/dynamo-table-ratchet.d.ts +13 -0
- package/lib/dynamodb/dynamo-table-ratchet.js +140 -0
- package/lib/dynamodb/dynamo-table-ratchet.js.map +1 -0
- package/lib/dynamodb/hash-spreader.d.ts +15 -0
- package/lib/dynamodb/hash-spreader.js +66 -0
- package/lib/dynamodb/hash-spreader.js.map +1 -0
- package/lib/dynamodb/impl/dynamo-db-storage-provider.d.ts +25 -0
- package/lib/dynamodb/impl/dynamo-db-storage-provider.js +110 -0
- package/lib/dynamodb/impl/dynamo-db-storage-provider.js.map +1 -0
- package/lib/dynamodb/impl/dynamo-db-sync-lock.d.ts +10 -0
- package/lib/dynamodb/impl/dynamo-db-sync-lock.js +71 -0
- package/lib/dynamodb/impl/dynamo-db-sync-lock.js.map +1 -0
- package/lib/dynamodb/impl/dynamo-expiring-code-provider.d.ts +12 -0
- package/lib/dynamodb/impl/dynamo-expiring-code-provider.js +25 -0
- package/lib/dynamodb/impl/dynamo-expiring-code-provider.js.map +1 -0
- package/lib/dynamodb/impl/dynamo-runtime-parameter-provider.d.ts +11 -0
- package/lib/dynamodb/impl/dynamo-runtime-parameter-provider.js +37 -0
- package/lib/dynamodb/impl/dynamo-runtime-parameter-provider.js.map +1 -0
- package/lib/dynamodb/index.d.ts +8 -0
- package/lib/dynamodb/index.js +9 -0
- package/lib/dynamodb/index.js.map +1 -0
- package/lib/ec2/ec2-ratchet.d.ts +16 -0
- package/lib/ec2/ec2-ratchet.js +114 -0
- package/lib/ec2/ec2-ratchet.js.map +1 -0
- package/lib/ec2/index.d.ts +1 -0
- package/lib/ec2/index.js +2 -0
- package/lib/ec2/index.js.map +1 -0
- package/lib/ecr/ecr-unused-image-cleaner-options.d.ts +7 -0
- package/lib/ecr/ecr-unused-image-cleaner-options.js +2 -0
- package/lib/ecr/ecr-unused-image-cleaner-options.js.map +1 -0
- package/lib/ecr/ecr-unused-image-cleaner-output.d.ts +7 -0
- package/lib/ecr/ecr-unused-image-cleaner-output.js +2 -0
- package/lib/ecr/ecr-unused-image-cleaner-output.js.map +1 -0
- package/lib/ecr/ecr-unused-image-cleaner-repository-output.d.ts +8 -0
- package/lib/ecr/ecr-unused-image-cleaner-repository-output.js +2 -0
- package/lib/ecr/ecr-unused-image-cleaner-repository-output.js.map +1 -0
- package/lib/ecr/ecr-unused-image-cleaner.d.ts +18 -0
- package/lib/ecr/ecr-unused-image-cleaner.js +136 -0
- package/lib/ecr/ecr-unused-image-cleaner.js.map +1 -0
- package/lib/ecr/index.d.ts +9 -0
- package/lib/ecr/index.js +10 -0
- package/lib/ecr/index.js.map +1 -0
- package/lib/ecr/retained-image-descriptor.d.ts +6 -0
- package/lib/ecr/retained-image-descriptor.js +2 -0
- package/lib/ecr/retained-image-descriptor.js.map +1 -0
- package/lib/ecr/retained-image-reason.d.ts +4 -0
- package/lib/ecr/retained-image-reason.js +6 -0
- package/lib/ecr/retained-image-reason.js.map +1 -0
- package/lib/ecr/used-image-finder.d.ts +3 -0
- package/lib/ecr/used-image-finder.js +2 -0
- package/lib/ecr/used-image-finder.js.map +1 -0
- package/lib/ecr/used-image-finders/aws-batch-used-image-finder.d.ts +8 -0
- package/lib/ecr/used-image-finders/aws-batch-used-image-finder.js +32 -0
- package/lib/ecr/used-image-finders/aws-batch-used-image-finder.js.map +1 -0
- package/lib/ecr/used-image-finders/lambda-used-image-finder.d.ts +8 -0
- package/lib/ecr/used-image-finders/lambda-used-image-finder.js +40 -0
- package/lib/ecr/used-image-finders/lambda-used-image-finder.js.map +1 -0
- package/lib/environment/cascade-environment-service-provider.d.ts +6 -0
- package/lib/environment/cascade-environment-service-provider.js +25 -0
- package/lib/environment/cascade-environment-service-provider.js.map +1 -0
- package/lib/environment/env-var-environment-service-provider.d.ts +6 -0
- package/lib/environment/env-var-environment-service-provider.js +31 -0
- package/lib/environment/env-var-environment-service-provider.js.map +1 -0
- package/lib/environment/environment-service-config.d.ts +4 -0
- package/lib/environment/environment-service-config.js +2 -0
- package/lib/environment/environment-service-config.js.map +1 -0
- package/lib/environment/environment-service-provider.d.ts +3 -0
- package/lib/environment/environment-service-provider.js +2 -0
- package/lib/environment/environment-service-provider.js.map +1 -0
- package/lib/environment/environment-service.d.ts +11 -0
- package/lib/environment/environment-service.js +51 -0
- package/lib/environment/environment-service.js.map +1 -0
- package/lib/environment/fixed-environment-service-provider.d.ts +7 -0
- package/lib/environment/fixed-environment-service-provider.js +22 -0
- package/lib/environment/fixed-environment-service-provider.js.map +1 -0
- package/lib/environment/index.d.ts +7 -0
- package/lib/environment/index.js +8 -0
- package/lib/environment/index.js.map +1 -0
- package/lib/environment/ssm-environment-service-provider.d.ts +8 -0
- package/lib/environment/ssm-environment-service-provider.js +59 -0
- package/lib/environment/ssm-environment-service-provider.js.map +1 -0
- package/lib/expiring-code/expiring-code-params.d.ts +7 -0
- package/lib/expiring-code/expiring-code-params.js +2 -0
- package/lib/expiring-code/expiring-code-params.js.map +1 -0
- package/lib/expiring-code/expiring-code-provider.d.ts +5 -0
- package/lib/expiring-code/expiring-code-provider.js +2 -0
- package/lib/expiring-code/expiring-code-provider.js.map +1 -0
- package/lib/expiring-code/expiring-code-ratchet.d.ts +10 -0
- package/lib/expiring-code/expiring-code-ratchet.js +35 -0
- package/lib/expiring-code/expiring-code-ratchet.js.map +1 -0
- package/lib/expiring-code/expiring-code.d.ts +6 -0
- package/lib/expiring-code/expiring-code.js +2 -0
- package/lib/expiring-code/expiring-code.js.map +1 -0
- package/lib/expiring-code/index.d.ts +4 -0
- package/lib/expiring-code/index.js +5 -0
- package/lib/expiring-code/index.js.map +1 -0
- package/lib/iam/aws-credentials-ratchet.d.ts +4 -0
- package/lib/iam/aws-credentials-ratchet.js +19 -0
- package/lib/iam/aws-credentials-ratchet.js.map +1 -0
- package/lib/iam/index.d.ts +1 -0
- package/lib/iam/index.js +2 -0
- package/lib/iam/index.js.map +1 -0
- package/lib/lambda/index.d.ts +2 -0
- package/lib/lambda/index.js +3 -0
- package/lib/lambda/index.js.map +1 -0
- package/lib/lambda/lambda-event-detector.d.ts +15 -0
- package/lib/lambda/lambda-event-detector.js +42 -0
- package/lib/lambda/lambda-event-detector.js.map +1 -0
- package/lib/lambda/lambda-event-type-guards.d.ts +11 -0
- package/lib/lambda/lambda-event-type-guards.js +28 -0
- package/lib/lambda/lambda-event-type-guards.js.map +1 -0
- package/lib/model/cloud-watch-metrics-minute-level-dynamo-count-request.d.ts +12 -0
- package/lib/model/cloud-watch-metrics-minute-level-dynamo-count-request.js +2 -0
- package/lib/model/cloud-watch-metrics-minute-level-dynamo-count-request.js.map +1 -0
- package/lib/model/dynamo-count-result.d.ts +5 -0
- package/lib/model/dynamo-count-result.js +2 -0
- package/lib/model/dynamo-count-result.js.map +1 -0
- package/lib/model/index.d.ts +2 -0
- package/lib/model/index.js +3 -0
- package/lib/model/index.js.map +1 -0
- package/lib/route53/index.d.ts +1 -0
- package/lib/route53/index.js +2 -0
- package/lib/route53/index.js.map +1 -0
- package/lib/route53/route-53-ratchet.d.ts +8 -0
- package/lib/route53/route-53-ratchet.js +59 -0
- package/lib/route53/route-53-ratchet.js.map +1 -0
- package/lib/runtime-parameter/cached-stored-runtime-parameter.d.ts +4 -0
- package/lib/runtime-parameter/cached-stored-runtime-parameter.js +2 -0
- package/lib/runtime-parameter/cached-stored-runtime-parameter.js.map +1 -0
- package/lib/runtime-parameter/global-variable-override-runtime-parameter-provider.d.ts +17 -0
- package/lib/runtime-parameter/global-variable-override-runtime-parameter-provider.js +52 -0
- package/lib/runtime-parameter/global-variable-override-runtime-parameter-provider.js.map +1 -0
- package/lib/runtime-parameter/index.d.ts +6 -0
- package/lib/runtime-parameter/index.js +7 -0
- package/lib/runtime-parameter/index.js.map +1 -0
- package/lib/runtime-parameter/memory-runtime-parameter-provider.d.ts +10 -0
- package/lib/runtime-parameter/memory-runtime-parameter-provider.js +35 -0
- package/lib/runtime-parameter/memory-runtime-parameter-provider.js.map +1 -0
- package/lib/runtime-parameter/runtime-parameter-provider.d.ts +6 -0
- package/lib/runtime-parameter/runtime-parameter-provider.js +2 -0
- package/lib/runtime-parameter/runtime-parameter-provider.js.map +1 -0
- package/lib/runtime-parameter/runtime-parameter-ratchet.d.ts +15 -0
- package/lib/runtime-parameter/runtime-parameter-ratchet.js +72 -0
- package/lib/runtime-parameter/runtime-parameter-ratchet.js.map +1 -0
- package/lib/runtime-parameter/stored-runtime-parameter.d.ts +6 -0
- package/lib/runtime-parameter/stored-runtime-parameter.js +2 -0
- package/lib/runtime-parameter/stored-runtime-parameter.js.map +1 -0
- package/lib/s3/expanded-file-children.d.ts +5 -0
- package/lib/s3/expanded-file-children.js +2 -0
- package/lib/s3/expanded-file-children.js.map +1 -0
- package/lib/s3/impl/s3-environment-service-provider.d.ts +15 -0
- package/lib/s3/impl/s3-environment-service-provider.js +28 -0
- package/lib/s3/impl/s3-environment-service-provider.js.map +1 -0
- package/lib/s3/impl/s3-expiring-code-provider.d.ts +17 -0
- package/lib/s3/impl/s3-expiring-code-provider.js +49 -0
- package/lib/s3/impl/s3-expiring-code-provider.js.map +1 -0
- package/lib/s3/impl/s3-prototype-dao-provider.d.ts +10 -0
- package/lib/s3/impl/s3-prototype-dao-provider.js +27 -0
- package/lib/s3/impl/s3-prototype-dao-provider.js.map +1 -0
- package/lib/s3/impl/s3-storage-provider.d.ts +14 -0
- package/lib/s3/impl/s3-storage-provider.js +44 -0
- package/lib/s3/impl/s3-storage-provider.js.map +1 -0
- package/lib/s3/index.d.ts +9 -0
- package/lib/s3/index.js +10 -0
- package/lib/s3/index.js.map +1 -0
- package/lib/s3/s3-cache-ratchet-like.d.ts +25 -0
- package/lib/s3/s3-cache-ratchet-like.js +2 -0
- package/lib/s3/s3-cache-ratchet-like.js.map +1 -0
- package/lib/s3/s3-cache-ratchet.d.ts +35 -0
- package/lib/s3/s3-cache-ratchet.js +360 -0
- package/lib/s3/s3-cache-ratchet.js.map +1 -0
- package/lib/s3/s3-location-sync-ratchet.d.ts +21 -0
- package/lib/s3/s3-location-sync-ratchet.js +140 -0
- package/lib/s3/s3-location-sync-ratchet.js.map +1 -0
- package/lib/s3/s3-ratchet.d.ts +5 -0
- package/lib/s3/s3-ratchet.js +23 -0
- package/lib/s3/s3-ratchet.js.map +1 -0
- package/lib/ses/index.d.ts +1 -0
- package/lib/ses/index.js +2 -0
- package/lib/ses/index.js.map +1 -0
- package/lib/ses/ses-mail-sending-provider.d.ts +15 -0
- package/lib/ses/ses-mail-sending-provider.js +68 -0
- package/lib/ses/ses-mail-sending-provider.js.map +1 -0
- package/lib/sns/index.d.ts +1 -0
- package/lib/sns/index.js +2 -0
- package/lib/sns/index.js.map +1 -0
- package/lib/sns/sns-ratchet.d.ts +9 -0
- package/lib/sns/sns-ratchet.js +49 -0
- package/lib/sns/sns-ratchet.js.map +1 -0
- package/lib/sync-lock/index.d.ts +2 -0
- package/lib/sync-lock/index.js +3 -0
- package/lib/sync-lock/index.js.map +1 -0
- package/lib/sync-lock/memory-sync-lock.d.ts +8 -0
- package/lib/sync-lock/memory-sync-lock.js +36 -0
- package/lib/sync-lock/memory-sync-lock.js.map +1 -0
- package/lib/sync-lock/sync-lock-provider.d.ts +5 -0
- package/lib/sync-lock/sync-lock-provider.js +2 -0
- package/lib/sync-lock/sync-lock-provider.js.map +1 -0
- package/package.json +55 -56
- package/lib/index.mjs +0 -2
- package/lib/types.d.ts +0 -818
|
@@ -0,0 +1,671 @@
|
|
|
1
|
+
import { BatchGetCommand, BatchWriteCommand, DeleteCommand, GetCommand, PutCommand, QueryCommand, ScanCommand, UpdateCommand, } from '@aws-sdk/lib-dynamodb';
|
|
2
|
+
import { ConditionalCheckFailedException, ProvisionedThroughputExceededException } from '@aws-sdk/client-dynamodb';
|
|
3
|
+
import { Logger } from '@bitblit/ratchet-common/logger/logger';
|
|
4
|
+
import { PromiseRatchet } from '@bitblit/ratchet-common/lang/promise-ratchet';
|
|
5
|
+
import { ErrorRatchet } from '@bitblit/ratchet-common/lang/error-ratchet';
|
|
6
|
+
import { DurationRatchet } from '@bitblit/ratchet-common/lang/duration-ratchet';
|
|
7
|
+
import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet';
|
|
8
|
+
import { NumberRatchet } from '@bitblit/ratchet-common/lang/number-ratchet';
|
|
9
|
+
export class DynamoRatchet {
|
|
10
|
+
awsDDB;
|
|
11
|
+
constructor(awsDDB) {
|
|
12
|
+
this.awsDDB = awsDDB;
|
|
13
|
+
if (!awsDDB) {
|
|
14
|
+
throw 'awsDDB may not be null';
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
get dynamoDBDocumentClient() {
|
|
18
|
+
return this.awsDDB;
|
|
19
|
+
}
|
|
20
|
+
getDDB() {
|
|
21
|
+
return this.awsDDB;
|
|
22
|
+
}
|
|
23
|
+
async tableIsEmpty(tableName) {
|
|
24
|
+
const scan = {
|
|
25
|
+
TableName: tableName,
|
|
26
|
+
Limit: 1,
|
|
27
|
+
};
|
|
28
|
+
const ScanCommandOutput = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan);
|
|
29
|
+
return ScanCommandOutput.Items.length === 0;
|
|
30
|
+
}
|
|
31
|
+
async scanPromise(input) {
|
|
32
|
+
return this.awsDDB.send(new ScanCommand(input));
|
|
33
|
+
}
|
|
34
|
+
async queryPromise(input) {
|
|
35
|
+
return this.awsDDB.send(new QueryCommand(input));
|
|
36
|
+
}
|
|
37
|
+
async throughputSafeScanOrQuery(proc, input, maxTries, inCurrentTry) {
|
|
38
|
+
let rval = null;
|
|
39
|
+
if (input) {
|
|
40
|
+
let currentTry = inCurrentTry ?? 0;
|
|
41
|
+
do {
|
|
42
|
+
currentTry++;
|
|
43
|
+
try {
|
|
44
|
+
rval = await proc(input);
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
48
|
+
const wait = Math.pow(2, currentTry) * 1000;
|
|
49
|
+
Logger.debug('Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)', input, currentTry, maxTries, wait);
|
|
50
|
+
await PromiseRatchet.wait(wait);
|
|
51
|
+
currentTry++;
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
throw err;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
} while (!rval && (!maxTries || currentTry < maxTries));
|
|
58
|
+
if (!rval) {
|
|
59
|
+
ErrorRatchet.throwFormattedErr('throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j', maxTries, input);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return rval;
|
|
63
|
+
}
|
|
64
|
+
async fullyExecuteQueryCount(qry, delayMS = 0) {
|
|
65
|
+
try {
|
|
66
|
+
qry.Select = 'COUNT';
|
|
67
|
+
Logger.debug('Executing count query : %j', qry);
|
|
68
|
+
const rval = {
|
|
69
|
+
count: 0,
|
|
70
|
+
scannedCount: 0,
|
|
71
|
+
pages: 0,
|
|
72
|
+
};
|
|
73
|
+
const start = new Date().getTime();
|
|
74
|
+
let qryResults = null;
|
|
75
|
+
const myLimit = qry.Limit;
|
|
76
|
+
qry.Limit = null;
|
|
77
|
+
do {
|
|
78
|
+
qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry);
|
|
79
|
+
rval.count += qryResults['Count'];
|
|
80
|
+
rval.scannedCount += qryResults['ScannedCount'];
|
|
81
|
+
rval.pages++;
|
|
82
|
+
qry['ExclusiveStartKey'] = qryResults.LastEvaluatedKey;
|
|
83
|
+
await PromiseRatchet.wait(delayMS);
|
|
84
|
+
Logger.silly('Rval is now %j', rval);
|
|
85
|
+
if (myLimit && rval.count >= myLimit && qry['ExclusiveStartKey']) {
|
|
86
|
+
Logger.info('Aborting query since hit limit of %d', myLimit);
|
|
87
|
+
qry['ExclusiveStartKey'] = null;
|
|
88
|
+
}
|
|
89
|
+
} while (qry['ExclusiveStartKey']);
|
|
90
|
+
const end = new Date().getTime();
|
|
91
|
+
Logger.debug('Finished, returned %j in %s for %j', rval, DurationRatchet.formatMsDuration(end - start, true), qry);
|
|
92
|
+
return rval;
|
|
93
|
+
}
|
|
94
|
+
catch (err) {
|
|
95
|
+
Logger.error('Failed with %s, q: %j', err, qry, err);
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
async fullyExecuteQuery(qry, delayMS = 0, softLimit = null) {
|
|
100
|
+
const rval = [];
|
|
101
|
+
await this.fullyExecuteProcessOverQuery(qry, async (v) => {
|
|
102
|
+
rval.push(v);
|
|
103
|
+
}, delayMS, softLimit);
|
|
104
|
+
return rval;
|
|
105
|
+
}
|
|
106
|
+
async fullyExecuteProcessOverQuery(qry, proc, delayMS = 0, softLimit = null) {
|
|
107
|
+
let cnt = 0;
|
|
108
|
+
try {
|
|
109
|
+
Logger.debug('Executing query : %j', qry);
|
|
110
|
+
const start = new Date().getTime();
|
|
111
|
+
Logger.debug('Pulling %j', qry);
|
|
112
|
+
let qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry);
|
|
113
|
+
for (let i = 0; i < qryResults.Items.length; i++) {
|
|
114
|
+
await proc(qryResults.Items[i]);
|
|
115
|
+
cnt++;
|
|
116
|
+
}
|
|
117
|
+
let pages = 0;
|
|
118
|
+
let blankPages = 0;
|
|
119
|
+
while (qryResults.LastEvaluatedKey && (softLimit === null || cnt < softLimit) && !qry.Limit) {
|
|
120
|
+
Logger.silly('Found more rows - requery with key %j', qryResults.LastEvaluatedKey);
|
|
121
|
+
qry['ExclusiveStartKey'] = qryResults.LastEvaluatedKey;
|
|
122
|
+
qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry);
|
|
123
|
+
for (let i = 0; i < qryResults.Items.length; i++) {
|
|
124
|
+
await proc(qryResults.Items[i]);
|
|
125
|
+
cnt++;
|
|
126
|
+
}
|
|
127
|
+
Logger.silly('Have processed %d items', cnt);
|
|
128
|
+
pages++;
|
|
129
|
+
blankPages += qryResults.Count === 0 ? 1 : 0;
|
|
130
|
+
await PromiseRatchet.wait(delayMS);
|
|
131
|
+
}
|
|
132
|
+
const end = new Date().getTime();
|
|
133
|
+
Logger.debug('Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)', cnt, DurationRatchet.formatMsDuration(end - start, true), qry, blankPages, pages);
|
|
134
|
+
}
|
|
135
|
+
catch (err) {
|
|
136
|
+
Logger.error('Failed with %s, q: %j', err, qry, err);
|
|
137
|
+
}
|
|
138
|
+
return cnt;
|
|
139
|
+
}
|
|
140
|
+
async fullyExecuteScanCount(scan, delayMS = 0) {
|
|
141
|
+
try {
|
|
142
|
+
scan.Select = 'COUNT';
|
|
143
|
+
const rval = {
|
|
144
|
+
count: 0,
|
|
145
|
+
scannedCount: 0,
|
|
146
|
+
pages: 0,
|
|
147
|
+
};
|
|
148
|
+
Logger.debug('Executing scan count : %j', scan);
|
|
149
|
+
const start = new Date().getTime();
|
|
150
|
+
let qryResults = null;
|
|
151
|
+
const myLimit = scan.Limit;
|
|
152
|
+
scan.Limit = null;
|
|
153
|
+
do {
|
|
154
|
+
qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan);
|
|
155
|
+
rval.count += qryResults['Count'];
|
|
156
|
+
rval.scannedCount += qryResults['ScannedCount'];
|
|
157
|
+
rval.pages++;
|
|
158
|
+
scan['ExclusiveStartKey'] = qryResults?.LastEvaluatedKey;
|
|
159
|
+
await PromiseRatchet.wait(delayMS);
|
|
160
|
+
Logger.silly('Rval is now %j', rval);
|
|
161
|
+
if (myLimit && rval.count >= myLimit && scan['ExclusiveStartKey']) {
|
|
162
|
+
Logger.info('Aborting scan since hit limit of %d', myLimit);
|
|
163
|
+
scan['ExclusiveStartKey'] = null;
|
|
164
|
+
}
|
|
165
|
+
} while (scan['ExclusiveStartKey']);
|
|
166
|
+
const end = new Date().getTime();
|
|
167
|
+
Logger.debug('Finished, returned %j in %s for %j', rval, DurationRatchet.formatMsDuration(end - start, true), scan);
|
|
168
|
+
return rval;
|
|
169
|
+
}
|
|
170
|
+
catch (err) {
|
|
171
|
+
Logger.error('Failed with %s, q: %j', err, scan, err);
|
|
172
|
+
return null;
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
async fullyExecuteScan(scan, delayMS = 0, softLimit = null) {
|
|
176
|
+
const rval = [];
|
|
177
|
+
await this.fullyExecuteProcessOverScan(scan, async (v) => {
|
|
178
|
+
rval.push(v);
|
|
179
|
+
}, delayMS, softLimit);
|
|
180
|
+
return rval;
|
|
181
|
+
}
|
|
182
|
+
async fullyExecuteProcessOverScan(scan, proc, delayMS = 0, softLimit = null) {
|
|
183
|
+
let cnt = 0;
|
|
184
|
+
try {
|
|
185
|
+
Logger.debug('Executing scan : %j', scan);
|
|
186
|
+
const start = new Date().getTime();
|
|
187
|
+
Logger.debug('Pulling %j', scan);
|
|
188
|
+
let qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan);
|
|
189
|
+
for (let i = 0; i < qryResults.Items.length; i++) {
|
|
190
|
+
await proc(qryResults.Items[i]);
|
|
191
|
+
cnt++;
|
|
192
|
+
}
|
|
193
|
+
while (qryResults.LastEvaluatedKey && (softLimit === null || cnt < softLimit) && !scan.Limit) {
|
|
194
|
+
Logger.silly('Found more rows - requery with key %j', qryResults.LastEvaluatedKey);
|
|
195
|
+
scan['ExclusiveStartKey'] = qryResults.LastEvaluatedKey;
|
|
196
|
+
qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan);
|
|
197
|
+
for (let i = 0; i < qryResults.Items.length; i++) {
|
|
198
|
+
await proc(qryResults.Items[i]);
|
|
199
|
+
cnt++;
|
|
200
|
+
}
|
|
201
|
+
Logger.silly('Rval is now %d items', cnt);
|
|
202
|
+
await PromiseRatchet.wait(delayMS);
|
|
203
|
+
}
|
|
204
|
+
const end = new Date().getTime();
|
|
205
|
+
Logger.debug('Finished, processed %d results in %s for %j', cnt, DurationRatchet.formatMsDuration(end - start, true), scan);
|
|
206
|
+
}
|
|
207
|
+
catch (err) {
|
|
208
|
+
Logger.error('Failed with %s, q: %j', err, scan, err);
|
|
209
|
+
}
|
|
210
|
+
return cnt;
|
|
211
|
+
}
|
|
212
|
+
async writeAllInBatches(tableName, elements, batchSize) {
|
|
213
|
+
if (!batchSize || batchSize < 2) {
|
|
214
|
+
throw new Error('Batch size needs to be at least 2, was ' + batchSize);
|
|
215
|
+
}
|
|
216
|
+
let rval = 0;
|
|
217
|
+
if (!!elements && elements.length > 0) {
|
|
218
|
+
let batchItems = [];
|
|
219
|
+
elements.forEach((el) => {
|
|
220
|
+
batchItems.push({
|
|
221
|
+
PutRequest: {
|
|
222
|
+
Item: el,
|
|
223
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
224
|
+
TableName: tableName,
|
|
225
|
+
},
|
|
226
|
+
});
|
|
227
|
+
});
|
|
228
|
+
Logger.debug('Processing %d batch items to %s', batchItems.length, tableName);
|
|
229
|
+
while (batchItems.length > 0) {
|
|
230
|
+
const curBatch = batchItems.slice(0, Math.min(batchItems.length, batchSize));
|
|
231
|
+
batchItems = batchItems.slice(curBatch.length);
|
|
232
|
+
const params = {
|
|
233
|
+
RequestItems: {},
|
|
234
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
235
|
+
ReturnItemCollectionMetrics: 'SIZE',
|
|
236
|
+
};
|
|
237
|
+
params.RequestItems[tableName] = curBatch;
|
|
238
|
+
let tryCount = 1;
|
|
239
|
+
let done = false;
|
|
240
|
+
let batchResults = null;
|
|
241
|
+
while (!done && tryCount < 7) {
|
|
242
|
+
try {
|
|
243
|
+
batchResults = await this.awsDDB.send(new BatchWriteCommand(params));
|
|
244
|
+
}
|
|
245
|
+
catch (err) {
|
|
246
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
247
|
+
Logger.info('Caught ProvisionedThroughputExceededException - retrying delete');
|
|
248
|
+
batchResults = { UnprocessedItems: params.RequestItems };
|
|
249
|
+
}
|
|
250
|
+
else {
|
|
251
|
+
throw err;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
if (!!batchResults &&
|
|
255
|
+
!!batchResults.UnprocessedItems &&
|
|
256
|
+
!!batchResults.UnprocessedItems[tableName] &&
|
|
257
|
+
batchResults.UnprocessedItems[tableName].length > 0) {
|
|
258
|
+
const backoff = Math.pow(2, tryCount);
|
|
259
|
+
Logger.warn('Found %d unprocessed items. Backing off %d seconds and trying again', batchResults.UnprocessedItems[tableName].length, backoff);
|
|
260
|
+
await PromiseRatchet.wait(backoff * 1000);
|
|
261
|
+
tryCount++;
|
|
262
|
+
params.RequestItems[tableName] = batchResults.UnprocessedItems[tableName];
|
|
263
|
+
}
|
|
264
|
+
else {
|
|
265
|
+
done = true;
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
if (!!batchResults &&
|
|
269
|
+
!!batchResults.UnprocessedItems &&
|
|
270
|
+
!!batchResults.UnprocessedItems[tableName] &&
|
|
271
|
+
batchResults.UnprocessedItems[tableName].length > 0) {
|
|
272
|
+
Logger.error('After 6 tries there were still %d unprocessed items', batchResults.UnprocessedItems[tableName].length);
|
|
273
|
+
rval += curBatch.length - batchResults.UnprocessedItems[tableName].length;
|
|
274
|
+
Logger.warn('FIX Unprocessed : %j', batchResults.UnprocessedItems);
|
|
275
|
+
}
|
|
276
|
+
else {
|
|
277
|
+
rval += curBatch.length;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
return rval;
|
|
282
|
+
}
|
|
283
|
+
async fetchFullObjectsMatchingKeysOnlyIndexQuery(qry, keyNames, batchSize = 25) {
|
|
284
|
+
RequireRatchet.notNullOrUndefined(qry);
|
|
285
|
+
RequireRatchet.notNullOrUndefined(qry.TableName);
|
|
286
|
+
RequireRatchet.notNullOrUndefined(keyNames);
|
|
287
|
+
RequireRatchet.true(keyNames.length > 0);
|
|
288
|
+
const keyDataSrc = await this.fullyExecuteQuery(qry);
|
|
289
|
+
const keysOnly = DynamoRatchet.stripAllToKeysOnly(keyDataSrc, keyNames);
|
|
290
|
+
const rval = await this.fetchAllInBatches(qry.TableName, keysOnly, batchSize);
|
|
291
|
+
return rval;
|
|
292
|
+
}
|
|
293
|
+
async fetchAllInBatches(tableName, inKeys, batchSize) {
|
|
294
|
+
if (!batchSize || batchSize < 2 || batchSize > 100) {
|
|
295
|
+
throw new Error('Batch size needs to be at least 2 and no more than 100, was ' + batchSize);
|
|
296
|
+
}
|
|
297
|
+
let rval = [];
|
|
298
|
+
const batches = [];
|
|
299
|
+
let remain = Object.assign([], inKeys);
|
|
300
|
+
while (remain.length > 0) {
|
|
301
|
+
const curBatch = remain.slice(0, Math.min(remain.length, batchSize));
|
|
302
|
+
remain = remain.slice(curBatch.length);
|
|
303
|
+
const tableEntry = {};
|
|
304
|
+
tableEntry[tableName] = {
|
|
305
|
+
Keys: curBatch,
|
|
306
|
+
};
|
|
307
|
+
const nextBatch = {
|
|
308
|
+
RequestItems: tableEntry,
|
|
309
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
310
|
+
};
|
|
311
|
+
batches.push(nextBatch);
|
|
312
|
+
}
|
|
313
|
+
Logger.debug('Created %d batches', batches.length);
|
|
314
|
+
for (let i = 0; i < batches.length; i++) {
|
|
315
|
+
if (batches.length > 1) {
|
|
316
|
+
Logger.info('Processing batch %d of %d', i + 1, batches.length);
|
|
317
|
+
}
|
|
318
|
+
const input = batches[i];
|
|
319
|
+
let tryCount = 1;
|
|
320
|
+
do {
|
|
321
|
+
Logger.silly('Pulling %j', input);
|
|
322
|
+
const res = await this.awsDDB.send(new BatchGetCommand(input));
|
|
323
|
+
rval = rval.concat(res.Responses[tableName]);
|
|
324
|
+
if (!!res.UnprocessedKeys && !!res.UnprocessedKeys[tableName] && res.UnprocessedKeys[tableName].Keys.length > 0 && tryCount < 15) {
|
|
325
|
+
Logger.silly('Found %d unprocessed, waiting', res.UnprocessedKeys[tableName].Keys);
|
|
326
|
+
await PromiseRatchet.wait(Math.pow(2, tryCount) * 1000);
|
|
327
|
+
tryCount++;
|
|
328
|
+
}
|
|
329
|
+
input.RequestItems = res.UnprocessedKeys;
|
|
330
|
+
} while (!input.RequestItems && input.RequestItems[tableName].Keys.length > 0);
|
|
331
|
+
}
|
|
332
|
+
return rval;
|
|
333
|
+
}
|
|
334
|
+
async deleteAllInBatches(tableName, keys, batchSize) {
|
|
335
|
+
if (!batchSize || batchSize < 2) {
|
|
336
|
+
throw new Error('Batch size needs to be at least 2, was ' + batchSize);
|
|
337
|
+
}
|
|
338
|
+
let rval = 0;
|
|
339
|
+
if (!!keys && keys.length > 0) {
|
|
340
|
+
let batchItems = [];
|
|
341
|
+
keys.forEach((el) => {
|
|
342
|
+
batchItems.push({
|
|
343
|
+
DeleteRequest: {
|
|
344
|
+
Key: el,
|
|
345
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
346
|
+
TableName: tableName,
|
|
347
|
+
},
|
|
348
|
+
});
|
|
349
|
+
});
|
|
350
|
+
Logger.debug('Processing %d DeleteBatch items to %s', batchItems.length, tableName);
|
|
351
|
+
while (batchItems.length > 0) {
|
|
352
|
+
const curBatch = batchItems.slice(0, Math.min(batchItems.length, batchSize));
|
|
353
|
+
batchItems = batchItems.slice(curBatch.length);
|
|
354
|
+
const params = {
|
|
355
|
+
RequestItems: {},
|
|
356
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
357
|
+
ReturnItemCollectionMetrics: 'SIZE',
|
|
358
|
+
};
|
|
359
|
+
params.RequestItems[tableName] = curBatch;
|
|
360
|
+
let tryCount = 1;
|
|
361
|
+
let done = false;
|
|
362
|
+
let batchResults = null;
|
|
363
|
+
while (!done && tryCount < 7) {
|
|
364
|
+
try {
|
|
365
|
+
batchResults = await this.awsDDB.send(new BatchWriteCommand(params));
|
|
366
|
+
}
|
|
367
|
+
catch (err) {
|
|
368
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
369
|
+
Logger.info('Caught ProvisionedThroughputExceededException - retrying delete');
|
|
370
|
+
batchResults = { UnprocessedItems: params.RequestItems };
|
|
371
|
+
}
|
|
372
|
+
else {
|
|
373
|
+
throw err;
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
if (!!batchResults &&
|
|
377
|
+
!!batchResults.UnprocessedItems &&
|
|
378
|
+
!!batchResults.UnprocessedItems[tableName] &&
|
|
379
|
+
batchResults.UnprocessedItems[tableName].length > 0) {
|
|
380
|
+
const backoff = Math.pow(2, tryCount);
|
|
381
|
+
Logger.warn('Found %d unprocessed items. Backing off %d seconds and trying again', batchResults.UnprocessedItems[tableName].length, backoff);
|
|
382
|
+
await PromiseRatchet.wait(backoff * 1000);
|
|
383
|
+
tryCount++;
|
|
384
|
+
params.RequestItems[tableName] = batchResults.UnprocessedItems[tableName];
|
|
385
|
+
}
|
|
386
|
+
else {
|
|
387
|
+
done = true;
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
if (!!batchResults &&
|
|
391
|
+
!!batchResults.UnprocessedItems &&
|
|
392
|
+
!!batchResults.UnprocessedItems[tableName] &&
|
|
393
|
+
batchResults.UnprocessedItems[tableName].length > 0) {
|
|
394
|
+
Logger.error('After 6 tries there were still %d unprocessed items', batchResults.UnprocessedItems[tableName].length);
|
|
395
|
+
rval += curBatch.length - batchResults.UnprocessedItems[tableName].length;
|
|
396
|
+
Logger.warn('FIX Unprocessed : %j', batchResults.UnprocessedItems);
|
|
397
|
+
}
|
|
398
|
+
else {
|
|
399
|
+
rval += curBatch.length;
|
|
400
|
+
}
|
|
401
|
+
Logger.debug('%d Remain, DeleteBatch Results : %j', batchItems.length, batchResults);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
return rval;
|
|
405
|
+
}
|
|
406
|
+
async simplePut(tableName, value, autoRetryCount = 3) {
|
|
407
|
+
let rval = null;
|
|
408
|
+
let currentTry = 0;
|
|
409
|
+
const params = {
|
|
410
|
+
Item: value,
|
|
411
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
412
|
+
TableName: tableName,
|
|
413
|
+
};
|
|
414
|
+
while (!rval && currentTry < autoRetryCount) {
|
|
415
|
+
try {
|
|
416
|
+
rval = await this.awsDDB.send(new PutCommand(params));
|
|
417
|
+
}
|
|
418
|
+
catch (err) {
|
|
419
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
420
|
+
const wait = Math.pow(2, currentTry) * 1000;
|
|
421
|
+
Logger.debug('Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait);
|
|
422
|
+
await PromiseRatchet.wait(wait);
|
|
423
|
+
currentTry++;
|
|
424
|
+
}
|
|
425
|
+
else {
|
|
426
|
+
throw err;
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
if (!rval) {
|
|
431
|
+
Logger.warn('Unable to write %j to DDB after %d tries, giving up', params, autoRetryCount);
|
|
432
|
+
}
|
|
433
|
+
return rval;
|
|
434
|
+
}
|
|
435
|
+
async simplePutOnlyIfFieldIsNullOrUndefined(tableName, value, fieldName) {
|
|
436
|
+
let rval = false;
|
|
437
|
+
const params = {
|
|
438
|
+
Item: value,
|
|
439
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
440
|
+
ConditionExpression: 'attribute_not_exists(#fieldName) OR #fieldName = :null ',
|
|
441
|
+
ExpressionAttributeNames: {
|
|
442
|
+
'#fieldName': fieldName,
|
|
443
|
+
},
|
|
444
|
+
ExpressionAttributeValues: {
|
|
445
|
+
':null': null,
|
|
446
|
+
},
|
|
447
|
+
TableName: tableName,
|
|
448
|
+
};
|
|
449
|
+
try {
|
|
450
|
+
const wrote = await this.awsDDB.send(new PutCommand(params));
|
|
451
|
+
Logger.silly('Wrote : %j', wrote);
|
|
452
|
+
rval = true;
|
|
453
|
+
}
|
|
454
|
+
catch (err) {
|
|
455
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
456
|
+
Logger.debug('Exceeded write throughput for %j : (Waiting 2000 ms)', params);
|
|
457
|
+
await PromiseRatchet.wait(2000);
|
|
458
|
+
rval = await this.simplePutOnlyIfFieldIsNullOrUndefined(tableName, value, fieldName);
|
|
459
|
+
}
|
|
460
|
+
else if (err && err instanceof ConditionalCheckFailedException) {
|
|
461
|
+
Logger.debug('Failed to write %j due to null field failure');
|
|
462
|
+
rval = false;
|
|
463
|
+
}
|
|
464
|
+
else {
|
|
465
|
+
throw err;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
return rval;
|
|
469
|
+
}
|
|
470
|
+
async simplePutWithCollisionAvoidance(tableName, value, keyNames, adjustFunction, maxAdjusts = null, autoRetryCount = 3) {
|
|
471
|
+
RequireRatchet.true(keyNames && keyNames.length > 0 && keyNames.length < 3, 'You must pass 1 or 2 key names');
|
|
472
|
+
let pio = null;
|
|
473
|
+
let currentTry = 0;
|
|
474
|
+
const attrNames = {
|
|
475
|
+
'#key0': keyNames[0],
|
|
476
|
+
};
|
|
477
|
+
const attrValues = {
|
|
478
|
+
':key0': value[keyNames[0]],
|
|
479
|
+
};
|
|
480
|
+
let condExp = '#key0 <> :key0';
|
|
481
|
+
if (keyNames.length > 1) {
|
|
482
|
+
condExp += ' AND #key1 <> :key1';
|
|
483
|
+
attrNames['#key1'] = keyNames[1];
|
|
484
|
+
attrValues[':key1'] = value[keyNames[1]];
|
|
485
|
+
}
|
|
486
|
+
const params = {
|
|
487
|
+
Item: value,
|
|
488
|
+
ReturnConsumedCapacity: 'TOTAL',
|
|
489
|
+
ConditionExpression: condExp,
|
|
490
|
+
ExpressionAttributeNames: attrNames,
|
|
491
|
+
ExpressionAttributeValues: attrValues,
|
|
492
|
+
TableName: tableName,
|
|
493
|
+
};
|
|
494
|
+
let adjustCount = 0;
|
|
495
|
+
while (!pio && currentTry < autoRetryCount && (!maxAdjusts || adjustCount < maxAdjusts)) {
|
|
496
|
+
try {
|
|
497
|
+
pio = await this.awsDDB.send(new PutCommand(params));
|
|
498
|
+
}
|
|
499
|
+
catch (err) {
|
|
500
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
501
|
+
currentTry++;
|
|
502
|
+
const wait = Math.pow(2, currentTry) * 1000;
|
|
503
|
+
Logger.debug('Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait);
|
|
504
|
+
await PromiseRatchet.wait(wait);
|
|
505
|
+
}
|
|
506
|
+
else if (err && err instanceof ConditionalCheckFailedException) {
|
|
507
|
+
let newValue = Object.assign({}, params.Item);
|
|
508
|
+
Logger.info('Failed to write %j due to collision - adjusting and retrying', newValue);
|
|
509
|
+
newValue = adjustFunction(newValue);
|
|
510
|
+
params.Item = newValue;
|
|
511
|
+
params.ExpressionAttributeValues[':key0'] = newValue[keyNames[0]];
|
|
512
|
+
if (keyNames.length > 1) {
|
|
513
|
+
params.ExpressionAttributeValues[':key1'] = newValue[keyNames[1]];
|
|
514
|
+
}
|
|
515
|
+
adjustCount++;
|
|
516
|
+
}
|
|
517
|
+
else {
|
|
518
|
+
throw err;
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
if (pio && adjustCount > 0) {
|
|
523
|
+
Logger.info('After adjustment, wrote %j as %j', value, params.Item);
|
|
524
|
+
}
|
|
525
|
+
if (!pio) {
|
|
526
|
+
Logger.warn('Unable to write %j to DDB after %d provision tries and %d adjusts, giving up', params, currentTry, adjustCount);
|
|
527
|
+
}
|
|
528
|
+
return pio ? params.Item : null;
|
|
529
|
+
}
|
|
530
|
+
async simpleGet(tableName, keys, autoRetryCount = 3) {
|
|
531
|
+
let holder = null;
|
|
532
|
+
let currentTry = 0;
|
|
533
|
+
const params = {
|
|
534
|
+
TableName: tableName,
|
|
535
|
+
Key: keys,
|
|
536
|
+
};
|
|
537
|
+
while (!holder && currentTry < autoRetryCount) {
|
|
538
|
+
try {
|
|
539
|
+
holder = await this.awsDDB.send(new GetCommand(params));
|
|
540
|
+
}
|
|
541
|
+
catch (err) {
|
|
542
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
543
|
+
const wait = Math.pow(2, currentTry) * 1000;
|
|
544
|
+
Logger.debug('Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait);
|
|
545
|
+
await PromiseRatchet.wait(wait);
|
|
546
|
+
currentTry++;
|
|
547
|
+
}
|
|
548
|
+
else {
|
|
549
|
+
throw err;
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
if (!holder) {
|
|
554
|
+
Logger.warn('Unable to read %j from DDB after %d tries, giving up', params, autoRetryCount);
|
|
555
|
+
}
|
|
556
|
+
const rval = !!holder && !!holder.Item ? Object.assign({}, holder.Item) : null;
|
|
557
|
+
return rval;
|
|
558
|
+
}
|
|
559
|
+
static objectIsErrorWithProvisionedThroughputExceededExceptionCode(err) {
|
|
560
|
+
return !!err && err instanceof ProvisionedThroughputExceededException;
|
|
561
|
+
}
|
|
562
|
+
async simpleGetWithCounterDecrement(tableName, keys, counterAttributeName, deleteOnZero, autoRetryCount = 3) {
|
|
563
|
+
let holder = null;
|
|
564
|
+
let currentTry = 0;
|
|
565
|
+
const params = {
|
|
566
|
+
TableName: tableName,
|
|
567
|
+
Key: keys,
|
|
568
|
+
UpdateExpression: 'set #counter = #counter-:decVal',
|
|
569
|
+
ExpressionAttributeNames: {
|
|
570
|
+
'#counter': counterAttributeName,
|
|
571
|
+
},
|
|
572
|
+
ExpressionAttributeValues: {
|
|
573
|
+
':decVal': 1,
|
|
574
|
+
':minVal': 0,
|
|
575
|
+
},
|
|
576
|
+
ConditionExpression: '#counter > :minVal',
|
|
577
|
+
ReturnValues: 'ALL_NEW',
|
|
578
|
+
};
|
|
579
|
+
let updateFailed = false;
|
|
580
|
+
while (!holder && currentTry < autoRetryCount && !updateFailed) {
|
|
581
|
+
try {
|
|
582
|
+
holder = await this.awsDDB.send(new UpdateCommand(params));
|
|
583
|
+
}
|
|
584
|
+
catch (err) {
|
|
585
|
+
if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) {
|
|
586
|
+
const wait = Math.pow(2, currentTry) * 1000;
|
|
587
|
+
Logger.debug('Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait);
|
|
588
|
+
await PromiseRatchet.wait(wait);
|
|
589
|
+
currentTry++;
|
|
590
|
+
}
|
|
591
|
+
else if (!!err && err instanceof ConditionalCheckFailedException) {
|
|
592
|
+
Logger.info('Cannot fetch requested row (%j) - the update check failed', keys);
|
|
593
|
+
updateFailed = true;
|
|
594
|
+
}
|
|
595
|
+
else {
|
|
596
|
+
throw err;
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
if (!holder && !updateFailed) {
|
|
601
|
+
Logger.warn('Unable to update %j from DDB after %d tries, giving up', params, autoRetryCount);
|
|
602
|
+
}
|
|
603
|
+
const rval = !!holder && !!holder.Attributes ? Object.assign({}, holder.Attributes) : null;
|
|
604
|
+
if (deleteOnZero && rval && rval[counterAttributeName] === 0) {
|
|
605
|
+
Logger.info('Delete on 0 specified, removing');
|
|
606
|
+
await this.simpleDelete(tableName, keys);
|
|
607
|
+
}
|
|
608
|
+
return rval;
|
|
609
|
+
}
|
|
610
|
+
async simpleDelete(tableName, keys) {
|
|
611
|
+
const params = {
|
|
612
|
+
TableName: tableName,
|
|
613
|
+
Key: keys,
|
|
614
|
+
};
|
|
615
|
+
const holder = await this.awsDDB.send(new DeleteCommand(params));
|
|
616
|
+
return holder;
|
|
617
|
+
}
|
|
618
|
+
async atomicCounter(tableName, keys, counterFieldName, increment = 1) {
|
|
619
|
+
const update = {
|
|
620
|
+
TableName: tableName,
|
|
621
|
+
Key: keys,
|
|
622
|
+
UpdateExpression: 'SET #counterFieldName = #counterFieldName + :inc',
|
|
623
|
+
ExpressionAttributeNames: {
|
|
624
|
+
'#counterFieldName': counterFieldName,
|
|
625
|
+
},
|
|
626
|
+
ExpressionAttributeValues: {
|
|
627
|
+
':inc': increment,
|
|
628
|
+
},
|
|
629
|
+
ReturnValues: 'UPDATED_NEW',
|
|
630
|
+
};
|
|
631
|
+
const ui = await this.awsDDB.send(new UpdateCommand(update));
|
|
632
|
+
const rval = NumberRatchet.safeNumber(ui.Attributes[counterFieldName]);
|
|
633
|
+
return rval;
|
|
634
|
+
}
|
|
635
|
+
static cleanObject(ob) {
|
|
636
|
+
if (ob) {
|
|
637
|
+
const rem = [];
|
|
638
|
+
Object.keys(ob).forEach((k) => {
|
|
639
|
+
const v = ob[k];
|
|
640
|
+
if (v === '') {
|
|
641
|
+
rem.push(k);
|
|
642
|
+
}
|
|
643
|
+
else if (v instanceof Object) {
|
|
644
|
+
DynamoRatchet.cleanObject(v);
|
|
645
|
+
}
|
|
646
|
+
});
|
|
647
|
+
Logger.silly('Removing keys : %j', rem);
|
|
648
|
+
rem.forEach((k) => {
|
|
649
|
+
delete ob[k];
|
|
650
|
+
});
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
static stripToKeysOnly(input, keysNames) {
|
|
654
|
+
let rval = null;
|
|
655
|
+
if (!!input && !!keysNames && keysNames.length > 0) {
|
|
656
|
+
rval = {};
|
|
657
|
+
keysNames.forEach((k) => {
|
|
658
|
+
if (!input[k]) {
|
|
659
|
+
ErrorRatchet.throwFormattedErr('Failed key extraction on %j - missing %s', input, k);
|
|
660
|
+
}
|
|
661
|
+
rval[k] = input[k];
|
|
662
|
+
});
|
|
663
|
+
}
|
|
664
|
+
return rval;
|
|
665
|
+
}
|
|
666
|
+
static stripAllToKeysOnly(input, keys) {
|
|
667
|
+
const rval = input.map((i) => DynamoRatchet.stripToKeysOnly(i, keys));
|
|
668
|
+
return rval;
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
//# sourceMappingURL=dynamo-ratchet.js.map
|