qstd 0.2.27 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +52 -0
- package/dist/client/index.cjs +22 -5
- package/dist/client/index.js +22 -5
- package/dist/server/aws/ddb/domain.d.ts +257 -0
- package/dist/server/aws/ddb/domain.d.ts.map +1 -0
- package/dist/server/aws/ddb/fns.d.ts +25 -0
- package/dist/server/aws/ddb/fns.d.ts.map +1 -0
- package/dist/server/aws/ddb/index.d.ts +4 -0
- package/dist/server/aws/ddb/index.d.ts.map +1 -0
- package/dist/server/aws/ddb/literals.d.ts +30 -0
- package/dist/server/aws/ddb/literals.d.ts.map +1 -0
- package/dist/server/aws/ddb/types.d.ts +216 -0
- package/dist/server/aws/ddb/types.d.ts.map +1 -0
- package/dist/server/index.cjs +623 -7
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +623 -8
- package/dist/shared/log.d.ts +0 -10
- package/dist/shared/log.d.ts.map +1 -1
- package/dist/shared/time.d.ts +36 -0
- package/dist/shared/time.d.ts.map +1 -1
- package/package.json +3 -1
package/dist/server/index.cjs
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
var dateFns = require('date-fns');
|
|
4
4
|
var fs = require('fs');
|
|
5
|
+
var libDynamodb = require('@aws-sdk/lib-dynamodb');
|
|
6
|
+
var clientDynamodb = require('@aws-sdk/client-dynamodb');
|
|
5
7
|
|
|
6
8
|
var __defProp = Object.defineProperty;
|
|
7
9
|
var __export = (target, all) => {
|
|
@@ -261,6 +263,7 @@ __export(time_exports, {
|
|
|
261
263
|
formatThreadDateRange: () => formatThreadDateRange,
|
|
262
264
|
now: () => now,
|
|
263
265
|
sleep: () => sleep,
|
|
266
|
+
startTimer: () => startTimer,
|
|
264
267
|
toMs: () => toMs
|
|
265
268
|
});
|
|
266
269
|
var formatDuration = (ms, options = {}) => {
|
|
@@ -455,6 +458,27 @@ var toMs = (value, unit = "seconds") => {
|
|
|
455
458
|
};
|
|
456
459
|
return value * multipliers[unit];
|
|
457
460
|
};
|
|
461
|
+
function startTimer(options = {}) {
|
|
462
|
+
const start = Date.now();
|
|
463
|
+
const fmt = options.format ?? "ms";
|
|
464
|
+
const getElapsed = () => {
|
|
465
|
+
const elapsed = Date.now() - start;
|
|
466
|
+
if (fmt === "compact") {
|
|
467
|
+
const hours = Math.floor(elapsed / 36e5);
|
|
468
|
+
const minutes = Math.floor(elapsed % 36e5 / 6e4);
|
|
469
|
+
const seconds = Math.floor(elapsed % 6e4 / 1e3);
|
|
470
|
+
const milliseconds = elapsed % 1e3;
|
|
471
|
+
let result = "";
|
|
472
|
+
if (hours > 0) result += `${hours}h`;
|
|
473
|
+
if (minutes > 0) result += `${minutes}m`;
|
|
474
|
+
if (seconds > 0) result += `${seconds}s`;
|
|
475
|
+
if (milliseconds > 0 || result === "") result += `${milliseconds}ms`;
|
|
476
|
+
return result;
|
|
477
|
+
}
|
|
478
|
+
return elapsed;
|
|
479
|
+
};
|
|
480
|
+
return { stop: getElapsed, elapsed: getElapsed };
|
|
481
|
+
}
|
|
458
482
|
|
|
459
483
|
// src/shared/flow.ts
|
|
460
484
|
var flow_exports = {};
|
|
@@ -552,14 +576,9 @@ __export(log_exports, {
|
|
|
552
576
|
info: () => info,
|
|
553
577
|
label: () => label,
|
|
554
578
|
log: () => log,
|
|
555
|
-
startTimer: () => startTimer,
|
|
556
579
|
warn: () => warn
|
|
557
580
|
});
|
|
558
581
|
var stringify = (value) => JSON.stringify(value, null, 2);
|
|
559
|
-
var startTimer = () => {
|
|
560
|
-
const start = Date.now();
|
|
561
|
-
return () => Date.now() - start;
|
|
562
|
-
};
|
|
563
582
|
var log = (...values) => {
|
|
564
583
|
console.log(...values.map(stringify));
|
|
565
584
|
};
|
|
@@ -607,8 +626,8 @@ var readFile = async (filePath, encoding = "utf-8") => {
|
|
|
607
626
|
var writeBufferToFile = async (filepath, buffer) => {
|
|
608
627
|
if (!buffer) return;
|
|
609
628
|
const chunks = [];
|
|
610
|
-
for await (const
|
|
611
|
-
chunks.push(
|
|
629
|
+
for await (const chunk3 of buffer) {
|
|
630
|
+
chunks.push(chunk3);
|
|
612
631
|
}
|
|
613
632
|
await fs.promises.writeFile(filepath, Buffer.concat(chunks));
|
|
614
633
|
};
|
|
@@ -616,6 +635,603 @@ var writeFile = async (filepath, content) => {
|
|
|
616
635
|
return fs.promises.writeFile(filepath, content);
|
|
617
636
|
};
|
|
618
637
|
|
|
638
|
+
// src/server/aws/ddb/index.ts
|
|
639
|
+
var ddb_exports = {};
|
|
640
|
+
__export(ddb_exports, {
|
|
641
|
+
batchDelete: () => batchDelete,
|
|
642
|
+
batchGet: () => batchGet,
|
|
643
|
+
batchWrite: () => batchWrite,
|
|
644
|
+
create: () => create2,
|
|
645
|
+
deleteTable: () => deleteTable,
|
|
646
|
+
find: () => find,
|
|
647
|
+
lsi: () => lsi,
|
|
648
|
+
lsi2: () => lsi2,
|
|
649
|
+
lsiNormalized: () => lsiNormalized,
|
|
650
|
+
lsiPhash: () => lsiPhash,
|
|
651
|
+
lsiUsername: () => lsiUsername,
|
|
652
|
+
remove: () => remove,
|
|
653
|
+
save: () => save,
|
|
654
|
+
tableExists: () => tableExists
|
|
655
|
+
});
|
|
656
|
+
|
|
657
|
+
// src/server/aws/ddb/fns.ts
|
|
658
|
+
var validateFindProps = (props) => {
|
|
659
|
+
const isScan = "scan" in props && props.scan === true;
|
|
660
|
+
if (!props.tableName) {
|
|
661
|
+
throw new Error(`[ddb] "tableName" is required`);
|
|
662
|
+
}
|
|
663
|
+
if (props.limit && props.recursive) {
|
|
664
|
+
throw new Error(`[ddb] "limit" and "recursive" cannot be used together`);
|
|
665
|
+
}
|
|
666
|
+
if (!isScan) {
|
|
667
|
+
const queryProps = props;
|
|
668
|
+
if (!queryProps.pk) {
|
|
669
|
+
throw new Error(`[ddb] [find] "pk" is required for Query mode. Use scan: true to scan without pk.`);
|
|
670
|
+
}
|
|
671
|
+
if (queryProps.sk && "key" in queryProps.sk && queryProps.sk.key && queryProps.sk.key !== "sk" && !queryProps.indexName) {
|
|
672
|
+
throw new Error(
|
|
673
|
+
`[ddb] [find] you provided a custom sk but no indexName. If this is a mistake, change this error to a warn.`
|
|
674
|
+
);
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
};
|
|
678
|
+
var buildKeyConditionExpression = (pk, sk, names, values) => {
|
|
679
|
+
const pkKey = pk.key ?? "pk";
|
|
680
|
+
names["#pk"] = pkKey;
|
|
681
|
+
values[":pk"] = pk.value;
|
|
682
|
+
if (!sk) return "#pk = :pk";
|
|
683
|
+
const skName = sk.key ?? "sk";
|
|
684
|
+
names["#sk"] = skName;
|
|
685
|
+
if ("op" in sk && sk.op) {
|
|
686
|
+
switch (sk.op) {
|
|
687
|
+
case "=":
|
|
688
|
+
case ">=":
|
|
689
|
+
case ">":
|
|
690
|
+
case "<=":
|
|
691
|
+
case "<":
|
|
692
|
+
values[":sk"] = sk.value;
|
|
693
|
+
return `#pk = :pk AND #sk ${sk.op} :sk`;
|
|
694
|
+
case "begins_with":
|
|
695
|
+
values[":sk"] = sk.value;
|
|
696
|
+
return `#pk = :pk AND begins_with(#sk, :sk)`;
|
|
697
|
+
case "between": {
|
|
698
|
+
const [from, to] = sk.value;
|
|
699
|
+
values[":skFrom"] = from;
|
|
700
|
+
values[":skTo"] = to;
|
|
701
|
+
return `#pk = :pk AND #sk BETWEEN :skFrom AND :skTo`;
|
|
702
|
+
}
|
|
703
|
+
default:
|
|
704
|
+
throw new Error(`Unsupported SK op: ${JSON.stringify(sk, null, 2)}`);
|
|
705
|
+
}
|
|
706
|
+
}
|
|
707
|
+
if ("value" in sk && sk.value !== void 0) {
|
|
708
|
+
values[":sk"] = sk.value;
|
|
709
|
+
return `#pk = :pk AND #sk = :sk`;
|
|
710
|
+
}
|
|
711
|
+
if ("valueBeginsWith" in sk && sk.valueBeginsWith !== void 0) {
|
|
712
|
+
values[":sk"] = sk.valueBeginsWith;
|
|
713
|
+
return `#pk = :pk AND begins_with(#sk, :sk)`;
|
|
714
|
+
}
|
|
715
|
+
throw new Error(
|
|
716
|
+
`Invalid SK condition: expected 'op' or legacy 'value'/'valueBeginsWith'`
|
|
717
|
+
);
|
|
718
|
+
};
|
|
719
|
+
var buildFilterExpression = (filters, names, values) => {
|
|
720
|
+
if (!filters || filters.length === 0) return void 0;
|
|
721
|
+
const frags = [];
|
|
722
|
+
filters.forEach((f, i) => {
|
|
723
|
+
const nameToken = `#f${i}`;
|
|
724
|
+
names[nameToken] = f.key;
|
|
725
|
+
switch (f.op) {
|
|
726
|
+
case "attribute_exists":
|
|
727
|
+
frags.push(`attribute_exists(${nameToken})`);
|
|
728
|
+
break;
|
|
729
|
+
case "attribute_not_exists":
|
|
730
|
+
frags.push(`attribute_not_exists(${nameToken})`);
|
|
731
|
+
break;
|
|
732
|
+
case "between": {
|
|
733
|
+
const [lo, hi] = f.value;
|
|
734
|
+
const loPh = `:f${i}_lo`;
|
|
735
|
+
const hiPh = `:f${i}_hi`;
|
|
736
|
+
values[loPh] = lo;
|
|
737
|
+
values[hiPh] = hi;
|
|
738
|
+
frags.push(`${nameToken} BETWEEN ${loPh} AND ${hiPh}`);
|
|
739
|
+
break;
|
|
740
|
+
}
|
|
741
|
+
case "in": {
|
|
742
|
+
const arr = f.value;
|
|
743
|
+
if (!arr.length) {
|
|
744
|
+
throw new Error(
|
|
745
|
+
`'in' filter for ${f.key} requires at least one value`
|
|
746
|
+
);
|
|
747
|
+
}
|
|
748
|
+
const phs = arr.map((_, j) => `:f${i}_${j}`);
|
|
749
|
+
arr.forEach((v, j) => {
|
|
750
|
+
const ph = phs[j];
|
|
751
|
+
if (ph) values[ph] = v;
|
|
752
|
+
});
|
|
753
|
+
frags.push(`${nameToken} IN (${phs.join(", ")})`);
|
|
754
|
+
break;
|
|
755
|
+
}
|
|
756
|
+
case "contains":
|
|
757
|
+
case "begins_with": {
|
|
758
|
+
const ph = `:f${i}`;
|
|
759
|
+
values[ph] = f.value;
|
|
760
|
+
frags.push(`${f.op}(${nameToken}, ${ph})`);
|
|
761
|
+
break;
|
|
762
|
+
}
|
|
763
|
+
case "=":
|
|
764
|
+
case "<>":
|
|
765
|
+
case ">":
|
|
766
|
+
case ">=":
|
|
767
|
+
case "<":
|
|
768
|
+
case "<=": {
|
|
769
|
+
const ph = `:f${i}`;
|
|
770
|
+
values[ph] = f.value;
|
|
771
|
+
frags.push(`${nameToken} ${f.op} ${ph}`);
|
|
772
|
+
break;
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
});
|
|
776
|
+
return frags.join(" AND ");
|
|
777
|
+
};
|
|
778
|
+
var buildProjectionExpression = (projection, names) => {
|
|
779
|
+
const projectionNames = projection.map((key, idx) => {
|
|
780
|
+
const nameKey = `#proj${idx}`;
|
|
781
|
+
names[nameKey] = key;
|
|
782
|
+
return nameKey;
|
|
783
|
+
});
|
|
784
|
+
return projectionNames.join(", ");
|
|
785
|
+
};
|
|
786
|
+
var sleep3 = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
787
|
+
var backoffDelay = (attempt) => {
|
|
788
|
+
return Math.min(1e3 * Math.pow(2, attempt), 1e4);
|
|
789
|
+
};
|
|
790
|
+
var chunk2 = (arr, size) => {
|
|
791
|
+
return Array.from(
|
|
792
|
+
{ length: Math.ceil(arr.length / size) },
|
|
793
|
+
(_, i) => arr.slice(i * size, i * size + size)
|
|
794
|
+
);
|
|
795
|
+
};
|
|
796
|
+
|
|
797
|
+
// src/server/aws/ddb/domain.ts
|
|
798
|
+
var create2 = (props) => {
|
|
799
|
+
const tableName = props?.tableName;
|
|
800
|
+
const credentials = props?.credentials;
|
|
801
|
+
const client = libDynamodb.DynamoDBDocumentClient.from(
|
|
802
|
+
new clientDynamodb.DynamoDBClient({
|
|
803
|
+
...credentials && { credentials },
|
|
804
|
+
region: "us-east-1"
|
|
805
|
+
}),
|
|
806
|
+
{
|
|
807
|
+
marshallOptions: {
|
|
808
|
+
// Whether to automatically convert empty strings, blobs, and sets to `null`.
|
|
809
|
+
convertEmptyValues: false,
|
|
810
|
+
// false, by default.
|
|
811
|
+
// Whether to remove undefined values while marshalling.
|
|
812
|
+
removeUndefinedValues: false,
|
|
813
|
+
// false, by default.
|
|
814
|
+
// Whether to convert typeof object to map attribute.
|
|
815
|
+
convertClassInstanceToMap: false
|
|
816
|
+
// false, by default.
|
|
817
|
+
},
|
|
818
|
+
unmarshallOptions: {
|
|
819
|
+
// Whether to return numbers as a string instead of converting them to native JavaScript numbers.
|
|
820
|
+
wrapNumbers: false
|
|
821
|
+
// false, by default.
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
);
|
|
825
|
+
return { client, tableName };
|
|
826
|
+
};
|
|
827
|
+
async function find(ddb, props) {
|
|
828
|
+
try {
|
|
829
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
830
|
+
const { filters, projection, debug } = props;
|
|
831
|
+
const isScan = "scan" in props && props.scan === true;
|
|
832
|
+
validateFindProps(props);
|
|
833
|
+
const names = {};
|
|
834
|
+
const values = {};
|
|
835
|
+
const KeyConditionExpression = isScan ? void 0 : buildKeyConditionExpression(
|
|
836
|
+
props.pk,
|
|
837
|
+
props.sk,
|
|
838
|
+
names,
|
|
839
|
+
values
|
|
840
|
+
);
|
|
841
|
+
const FilterExpression = filters ? buildFilterExpression(filters, names, values) : void 0;
|
|
842
|
+
if (debug) {
|
|
843
|
+
console.log(`[debug] [ddb] [find] input:`, {
|
|
844
|
+
isScan,
|
|
845
|
+
filters,
|
|
846
|
+
FilterExpression,
|
|
847
|
+
names,
|
|
848
|
+
values
|
|
849
|
+
});
|
|
850
|
+
}
|
|
851
|
+
const ProjectionExpression = projection ? buildProjectionExpression(projection, names) : void 0;
|
|
852
|
+
const all = [];
|
|
853
|
+
let startKey = props.startKey;
|
|
854
|
+
let pageCount = 0;
|
|
855
|
+
let totalItems = 0;
|
|
856
|
+
do {
|
|
857
|
+
if (props.maxPages && pageCount >= props.maxPages) break;
|
|
858
|
+
const command = isScan ? new libDynamodb.ScanCommand({
|
|
859
|
+
TableName,
|
|
860
|
+
IndexName: props.indexName,
|
|
861
|
+
FilterExpression,
|
|
862
|
+
Limit: props.limit,
|
|
863
|
+
ProjectionExpression,
|
|
864
|
+
ExclusiveStartKey: startKey,
|
|
865
|
+
ConsistentRead: props.strong,
|
|
866
|
+
...Object.keys(names).length > 0 && {
|
|
867
|
+
ExpressionAttributeNames: names
|
|
868
|
+
},
|
|
869
|
+
...Object.keys(values).length > 0 && {
|
|
870
|
+
ExpressionAttributeValues: values
|
|
871
|
+
}
|
|
872
|
+
}) : new libDynamodb.QueryCommand({
|
|
873
|
+
TableName,
|
|
874
|
+
IndexName: props.indexName,
|
|
875
|
+
KeyConditionExpression,
|
|
876
|
+
FilterExpression,
|
|
877
|
+
Limit: props.limit,
|
|
878
|
+
ProjectionExpression,
|
|
879
|
+
ExclusiveStartKey: startKey,
|
|
880
|
+
ConsistentRead: props.strong,
|
|
881
|
+
ExpressionAttributeNames: names,
|
|
882
|
+
ExpressionAttributeValues: values,
|
|
883
|
+
ScanIndexForward: props.sort === "desc" ? false : true
|
|
884
|
+
});
|
|
885
|
+
const result = await ddb.client.send(command);
|
|
886
|
+
const pageItems = result.Items ?? [];
|
|
887
|
+
all.push(...pageItems);
|
|
888
|
+
totalItems += pageItems.length;
|
|
889
|
+
pageCount++;
|
|
890
|
+
startKey = result.LastEvaluatedKey;
|
|
891
|
+
if (props.recursive) {
|
|
892
|
+
if (props.maxItems && totalItems >= props.maxItems) break;
|
|
893
|
+
if (!startKey) break;
|
|
894
|
+
if (typeof props.recursive === "function") {
|
|
895
|
+
const page = {
|
|
896
|
+
lastEvaluatedKey: result.LastEvaluatedKey,
|
|
897
|
+
scannedCount: result.ScannedCount ?? 0,
|
|
898
|
+
count: result.Count ?? 0,
|
|
899
|
+
items: pageItems
|
|
900
|
+
};
|
|
901
|
+
const shouldContinue = props.recursive(page, pageCount, totalItems);
|
|
902
|
+
if (!shouldContinue) break;
|
|
903
|
+
}
|
|
904
|
+
} else {
|
|
905
|
+
break;
|
|
906
|
+
}
|
|
907
|
+
} while (true);
|
|
908
|
+
const rawResponse = {
|
|
909
|
+
lastEvaluatedKey: startKey,
|
|
910
|
+
scannedCount: totalItems,
|
|
911
|
+
count: totalItems,
|
|
912
|
+
items: all
|
|
913
|
+
};
|
|
914
|
+
if (props.raw && props.first) {
|
|
915
|
+
return {
|
|
916
|
+
count: rawResponse.count,
|
|
917
|
+
item: rawResponse.items[0],
|
|
918
|
+
scannedCount: rawResponse.scannedCount
|
|
919
|
+
};
|
|
920
|
+
}
|
|
921
|
+
if (props.raw) return rawResponse;
|
|
922
|
+
if (props.first) return rawResponse.items[0];
|
|
923
|
+
return rawResponse.items;
|
|
924
|
+
} catch (error2) {
|
|
925
|
+
const err = error2;
|
|
926
|
+
console.log(`[error] [ddb] [find] failed with ${err.message}. Input:`);
|
|
927
|
+
console.dir(props, { depth: 100 });
|
|
928
|
+
throw err;
|
|
929
|
+
}
|
|
930
|
+
}
|
|
931
|
+
var remove = async (ddb, props) => {
|
|
932
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
933
|
+
const command = new libDynamodb.DeleteCommand({ Key: props.key, TableName });
|
|
934
|
+
return ddb.client.send(command);
|
|
935
|
+
};
|
|
936
|
+
var save = async (ddb, props) => {
|
|
937
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
938
|
+
const command = new libDynamodb.PutCommand({
|
|
939
|
+
Item: props.item,
|
|
940
|
+
TableName
|
|
941
|
+
});
|
|
942
|
+
return ddb.client.send(command);
|
|
943
|
+
};
|
|
944
|
+
async function batchGet(ddb, props) {
|
|
945
|
+
const { keys, maxRetries = 3, strong = false } = props;
|
|
946
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
947
|
+
const chunks = chunk2(keys, 100);
|
|
948
|
+
const allItems = [];
|
|
949
|
+
const requestedCount = keys.length;
|
|
950
|
+
let totalConsumedCapacity = 0;
|
|
951
|
+
for (const chunk3 of chunks) {
|
|
952
|
+
let attempt = 0;
|
|
953
|
+
let keysToFetch = chunk3;
|
|
954
|
+
while (keysToFetch.length > 0 && attempt <= maxRetries) {
|
|
955
|
+
if (attempt > 0) {
|
|
956
|
+
const delay = backoffDelay(attempt);
|
|
957
|
+
console.log(
|
|
958
|
+
`[info] [ddb] [batchGet]: Retrying ${keysToFetch.length} keys (attempt ${attempt}/${maxRetries}) after ${delay}ms`
|
|
959
|
+
);
|
|
960
|
+
await sleep3(delay);
|
|
961
|
+
}
|
|
962
|
+
try {
|
|
963
|
+
const command = new libDynamodb.BatchGetCommand({
|
|
964
|
+
RequestItems: {
|
|
965
|
+
[TableName]: { Keys: keysToFetch, ConsistentRead: strong }
|
|
966
|
+
}
|
|
967
|
+
});
|
|
968
|
+
const result = await ddb.client.send(command);
|
|
969
|
+
const items = result.Responses?.[TableName] ?? [];
|
|
970
|
+
allItems.push(...items);
|
|
971
|
+
if (result.ConsumedCapacity) {
|
|
972
|
+
totalConsumedCapacity += result.ConsumedCapacity.reduce(
|
|
973
|
+
(sum, cap) => sum + (cap.CapacityUnits ?? 0),
|
|
974
|
+
0
|
|
975
|
+
);
|
|
976
|
+
}
|
|
977
|
+
const unprocessed = result.UnprocessedKeys?.[TableName]?.Keys;
|
|
978
|
+
if (unprocessed && unprocessed.length > 0) {
|
|
979
|
+
keysToFetch = unprocessed;
|
|
980
|
+
attempt++;
|
|
981
|
+
} else {
|
|
982
|
+
keysToFetch = [];
|
|
983
|
+
}
|
|
984
|
+
} catch (error2) {
|
|
985
|
+
console.log(
|
|
986
|
+
`[error] [ddb] [batchGet]: Error fetching chunk (attempt ${attempt}/${maxRetries}):`,
|
|
987
|
+
error2
|
|
988
|
+
);
|
|
989
|
+
if (attempt >= maxRetries) throw error2;
|
|
990
|
+
attempt++;
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
if (keysToFetch.length > 0) {
|
|
994
|
+
console.log(
|
|
995
|
+
`[error] [ddb] [batchGet]: Failed to fetch ${keysToFetch.length} keys after ${maxRetries} retries`
|
|
996
|
+
);
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
const missing = requestedCount - allItems.length;
|
|
1000
|
+
return {
|
|
1001
|
+
missing,
|
|
1002
|
+
items: allItems,
|
|
1003
|
+
count: allItems.length,
|
|
1004
|
+
consumedCapacity: totalConsumedCapacity || void 0
|
|
1005
|
+
};
|
|
1006
|
+
}
|
|
1007
|
+
async function batchWrite(ddb, props) {
|
|
1008
|
+
const { maxRetries = 3 } = props;
|
|
1009
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
1010
|
+
const items = props.transform ? props.items.map(props.transform) : props.items;
|
|
1011
|
+
const hasConditions = items.some((x) => x.cond);
|
|
1012
|
+
const chunkSize = hasConditions ? 100 : 25;
|
|
1013
|
+
const chunks = chunk2(items, chunkSize);
|
|
1014
|
+
let processedCount = 0;
|
|
1015
|
+
let failedCount = 0;
|
|
1016
|
+
let totalConsumedCapacity = 0;
|
|
1017
|
+
for (const chunk3 of chunks) {
|
|
1018
|
+
let attempt = 0;
|
|
1019
|
+
let itemsToWrite = chunk3;
|
|
1020
|
+
while (itemsToWrite.length > 0 && attempt <= maxRetries) {
|
|
1021
|
+
if (attempt > 0) {
|
|
1022
|
+
const delay = backoffDelay(attempt);
|
|
1023
|
+
console.log(
|
|
1024
|
+
`[info] [ddb] [batchWrite]: Retrying ${itemsToWrite.length} items (attempt ${attempt}/${maxRetries}) after ${delay}ms`
|
|
1025
|
+
);
|
|
1026
|
+
await sleep3(delay);
|
|
1027
|
+
}
|
|
1028
|
+
try {
|
|
1029
|
+
if (hasConditions) {
|
|
1030
|
+
const transactItems = itemsToWrite.map((x) => ({
|
|
1031
|
+
Put: {
|
|
1032
|
+
TableName,
|
|
1033
|
+
Item: x.item,
|
|
1034
|
+
...x.cond && { ConditionExpression: x.cond }
|
|
1035
|
+
}
|
|
1036
|
+
}));
|
|
1037
|
+
const command = new libDynamodb.TransactWriteCommand({
|
|
1038
|
+
TransactItems: transactItems
|
|
1039
|
+
});
|
|
1040
|
+
const result = await ddb.client.send(command);
|
|
1041
|
+
if (result.ConsumedCapacity) {
|
|
1042
|
+
totalConsumedCapacity += result.ConsumedCapacity.reduce(
|
|
1043
|
+
(sum, cap) => sum + (cap.CapacityUnits ?? 0),
|
|
1044
|
+
0
|
|
1045
|
+
);
|
|
1046
|
+
}
|
|
1047
|
+
processedCount += itemsToWrite.length;
|
|
1048
|
+
itemsToWrite = [];
|
|
1049
|
+
} else {
|
|
1050
|
+
const writeRequests = itemsToWrite.map((x) => ({
|
|
1051
|
+
PutRequest: { Item: x.item }
|
|
1052
|
+
}));
|
|
1053
|
+
const command = new libDynamodb.BatchWriteCommand({
|
|
1054
|
+
RequestItems: { [TableName]: writeRequests }
|
|
1055
|
+
});
|
|
1056
|
+
const result = await ddb.client.send(command);
|
|
1057
|
+
if (result.ConsumedCapacity) {
|
|
1058
|
+
totalConsumedCapacity += result.ConsumedCapacity.reduce(
|
|
1059
|
+
(sum, cap) => sum + (cap.CapacityUnits ?? 0),
|
|
1060
|
+
0
|
|
1061
|
+
);
|
|
1062
|
+
}
|
|
1063
|
+
const unprocessed = result.UnprocessedItems?.[TableName];
|
|
1064
|
+
if (unprocessed && unprocessed.length > 0) {
|
|
1065
|
+
itemsToWrite = unprocessed.map((req) => ({
|
|
1066
|
+
item: req.PutRequest.Item
|
|
1067
|
+
}));
|
|
1068
|
+
processedCount += chunk3.length - itemsToWrite.length;
|
|
1069
|
+
attempt++;
|
|
1070
|
+
} else {
|
|
1071
|
+
processedCount += itemsToWrite.length;
|
|
1072
|
+
itemsToWrite = [];
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
} catch (error2) {
|
|
1076
|
+
console.log(
|
|
1077
|
+
`[error] [ddb] [batchWrite]: Error writing chunk (attempt ${attempt}/${maxRetries}):`,
|
|
1078
|
+
error2
|
|
1079
|
+
);
|
|
1080
|
+
if (attempt >= maxRetries) {
|
|
1081
|
+
failedCount += itemsToWrite.length;
|
|
1082
|
+
itemsToWrite = [];
|
|
1083
|
+
} else {
|
|
1084
|
+
attempt++;
|
|
1085
|
+
}
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
if (itemsToWrite.length > 0) {
|
|
1089
|
+
console.log(
|
|
1090
|
+
`[error] [ddb] [batchWrite]: Failed to write ${itemsToWrite.length} items after ${maxRetries} retries`
|
|
1091
|
+
);
|
|
1092
|
+
failedCount += itemsToWrite.length;
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
return {
|
|
1096
|
+
failed: failedCount,
|
|
1097
|
+
processed: processedCount,
|
|
1098
|
+
consumedCapacity: totalConsumedCapacity || void 0
|
|
1099
|
+
};
|
|
1100
|
+
}
|
|
1101
|
+
async function batchDelete(ddb, props) {
|
|
1102
|
+
const { maxRetries = 3 } = props;
|
|
1103
|
+
const TableName = props.tableName ?? ddb.tableName;
|
|
1104
|
+
const keys = props.transform ? props.keys.map(props.transform) : props.keys;
|
|
1105
|
+
const hasConditions = keys.some((x) => x.cond);
|
|
1106
|
+
const chunkSize = hasConditions ? 100 : 25;
|
|
1107
|
+
const chunks = chunk2(keys, chunkSize);
|
|
1108
|
+
let processedCount = 0;
|
|
1109
|
+
let failedCount = 0;
|
|
1110
|
+
let totalConsumedCapacity = 0;
|
|
1111
|
+
for (const chunk3 of chunks) {
|
|
1112
|
+
let attempt = 0;
|
|
1113
|
+
let itemsToDelete = chunk3;
|
|
1114
|
+
while (itemsToDelete.length > 0 && attempt <= maxRetries) {
|
|
1115
|
+
if (attempt > 0) {
|
|
1116
|
+
const delay = backoffDelay(attempt);
|
|
1117
|
+
console.log(
|
|
1118
|
+
`[info] [ddb] [batchDelete]: Retrying ${itemsToDelete.length} items (attempt ${attempt}/${maxRetries}) after ${delay}ms`
|
|
1119
|
+
);
|
|
1120
|
+
await sleep3(delay);
|
|
1121
|
+
}
|
|
1122
|
+
try {
|
|
1123
|
+
if (hasConditions) {
|
|
1124
|
+
const transactItems = itemsToDelete.map((x) => ({
|
|
1125
|
+
Delete: {
|
|
1126
|
+
TableName,
|
|
1127
|
+
Key: x.key,
|
|
1128
|
+
...x.cond && { ConditionExpression: x.cond }
|
|
1129
|
+
}
|
|
1130
|
+
}));
|
|
1131
|
+
const command = new libDynamodb.TransactWriteCommand({
|
|
1132
|
+
TransactItems: transactItems
|
|
1133
|
+
});
|
|
1134
|
+
const result = await ddb.client.send(command);
|
|
1135
|
+
if (result.ConsumedCapacity) {
|
|
1136
|
+
totalConsumedCapacity += result.ConsumedCapacity.reduce(
|
|
1137
|
+
(sum, cap) => sum + (cap.CapacityUnits ?? 0),
|
|
1138
|
+
0
|
|
1139
|
+
);
|
|
1140
|
+
}
|
|
1141
|
+
processedCount += itemsToDelete.length;
|
|
1142
|
+
itemsToDelete = [];
|
|
1143
|
+
} else {
|
|
1144
|
+
const writeRequests = itemsToDelete.map((x) => ({
|
|
1145
|
+
DeleteRequest: { Key: x.key }
|
|
1146
|
+
}));
|
|
1147
|
+
const command = new libDynamodb.BatchWriteCommand({
|
|
1148
|
+
RequestItems: { [TableName]: writeRequests }
|
|
1149
|
+
});
|
|
1150
|
+
const result = await ddb.client.send(command);
|
|
1151
|
+
if (result.ConsumedCapacity) {
|
|
1152
|
+
totalConsumedCapacity += result.ConsumedCapacity.reduce(
|
|
1153
|
+
(sum, cap) => sum + (cap.CapacityUnits ?? 0),
|
|
1154
|
+
0
|
|
1155
|
+
);
|
|
1156
|
+
}
|
|
1157
|
+
const unprocessed = result.UnprocessedItems?.[TableName];
|
|
1158
|
+
if (unprocessed && unprocessed.length > 0) {
|
|
1159
|
+
itemsToDelete = unprocessed.map((req) => ({
|
|
1160
|
+
key: req.DeleteRequest.Key
|
|
1161
|
+
}));
|
|
1162
|
+
processedCount += chunk3.length - itemsToDelete.length;
|
|
1163
|
+
attempt++;
|
|
1164
|
+
} else {
|
|
1165
|
+
processedCount += itemsToDelete.length;
|
|
1166
|
+
itemsToDelete = [];
|
|
1167
|
+
}
|
|
1168
|
+
}
|
|
1169
|
+
} catch (error2) {
|
|
1170
|
+
console.log(
|
|
1171
|
+
`[error] [ddb] [batchDelete]: Error deleting chunk (attempt ${attempt}/${maxRetries}):`,
|
|
1172
|
+
error2
|
|
1173
|
+
);
|
|
1174
|
+
if (attempt >= maxRetries) {
|
|
1175
|
+
failedCount += itemsToDelete.length;
|
|
1176
|
+
itemsToDelete = [];
|
|
1177
|
+
} else {
|
|
1178
|
+
attempt++;
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
if (itemsToDelete.length > 0) {
|
|
1183
|
+
console.log(
|
|
1184
|
+
`[error] [ddb] [batchDelete]: Failed to delete ${itemsToDelete.length} items after ${maxRetries} retries`
|
|
1185
|
+
);
|
|
1186
|
+
failedCount += itemsToDelete.length;
|
|
1187
|
+
}
|
|
1188
|
+
}
|
|
1189
|
+
return {
|
|
1190
|
+
failed: failedCount,
|
|
1191
|
+
processed: processedCount,
|
|
1192
|
+
consumedCapacity: totalConsumedCapacity || void 0
|
|
1193
|
+
};
|
|
1194
|
+
}
|
|
1195
|
+
var deleteTable = (ddb, tableName) => {
|
|
1196
|
+
const TableName = tableName;
|
|
1197
|
+
const command = new clientDynamodb.DeleteTableCommand({ TableName });
|
|
1198
|
+
return ddb.client.send(command);
|
|
1199
|
+
};
|
|
1200
|
+
var tableExists = async (ddb, props) => {
|
|
1201
|
+
try {
|
|
1202
|
+
const TableName = props.tableName;
|
|
1203
|
+
const command = new clientDynamodb.DescribeTableCommand({ TableName });
|
|
1204
|
+
await ddb.client.send(command);
|
|
1205
|
+
return true;
|
|
1206
|
+
} catch (err) {
|
|
1207
|
+
if (err instanceof clientDynamodb.DynamoDBServiceException) {
|
|
1208
|
+
if (err.name === "ResourceNotFoundException" || err.$metadata.httpStatusCode === 400) {
|
|
1209
|
+
return false;
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
throw err;
|
|
1213
|
+
}
|
|
1214
|
+
};
|
|
1215
|
+
|
|
1216
|
+
// src/server/aws/ddb/literals.ts
|
|
1217
|
+
var lsi = { name: "lsi", sk: "lsi" };
|
|
1218
|
+
var lsi2 = { name: "lsi2", sk: "lsi2" };
|
|
1219
|
+
var lsiUsername = {
|
|
1220
|
+
name: "username-lsi",
|
|
1221
|
+
sk: "username"
|
|
1222
|
+
};
|
|
1223
|
+
var lsiNormalized = {
|
|
1224
|
+
/** use in index_name */
|
|
1225
|
+
name: "normalized-lsi",
|
|
1226
|
+
sk: "normalized"
|
|
1227
|
+
};
|
|
1228
|
+
var lsiPhash = {
|
|
1229
|
+
/** use in index_name */
|
|
1230
|
+
name: "phash-lsi",
|
|
1231
|
+
sk: "phash"
|
|
1232
|
+
};
|
|
1233
|
+
|
|
1234
|
+
exports.DDB = ddb_exports;
|
|
619
1235
|
exports.Dict = dict_exports;
|
|
620
1236
|
exports.File = file_exports;
|
|
621
1237
|
exports.Flow = flow_exports;
|
package/dist/server/index.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AACrC,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AACrC,OAAO,KAAK,KAAK,MAAM,iBAAiB,CAAC;AACzC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,MAAM,MAAM,kBAAkB,CAAC;AAC3C,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AAGrC,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/server/index.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AACrC,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AACrC,OAAO,KAAK,KAAK,MAAM,iBAAiB,CAAC;AACzC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,gBAAgB,CAAC;AACvC,OAAO,KAAK,MAAM,MAAM,kBAAkB,CAAC;AAC3C,OAAO,KAAK,GAAG,MAAM,eAAe,CAAC;AAGrC,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,OAAO,KAAK,GAAG,MAAM,WAAW,CAAC"}
|