ag-common 0.0.730 → 0.0.732

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -46,17 +46,70 @@ interface DynamoQueryParams {
46
46
  sortAscending?: boolean;
47
47
  }
48
48
  export declare let dynamoDb: DynamoDBDocument;
49
+ /**
50
+ * Sets up the DynamoDB client with the specified region and credentials.
51
+ * @param region - AWS region to connect to
52
+ * @param credentials - Optional AWS credentials
53
+ * @returns Configured DynamoDBDocument client
54
+ */
49
55
  export declare const setDynamo: (region: string, credentials?: AwsCredentialIdentity) => DynamoDBDocument;
56
+ /**
57
+ * Puts a single item into a DynamoDB table.
58
+ * @param item - The item to put into the table
59
+ * @param tableName - Name of the DynamoDB table
60
+ * @param opt - Optional parameters including primary key name for conditional put
61
+ * @returns Promise resolving to void on success or error message on failure
62
+ */
50
63
  export declare const putDynamo: <T extends Record<string, unknown>>(item: T, tableName: string, opt?: {
51
64
  pkName?: string;
52
65
  }) => Promise<DynamoDBResult<void>>;
53
- export declare const batchWrite: <T extends Record<string, unknown>>(tableName: string, items: T[]) => Promise<DynamoDBResult<void>>;
66
+ /**
67
+ * Writes multiple items to a DynamoDB table in batches.
68
+ * Automatically chunks items into batches of 20 (or specified size) to comply with DynamoDB limits.
69
+ * @param tableName - Name of the DynamoDB table
70
+ * @param items - Array of items to write
71
+ * @param opt - Optional parameters including batch size and retry behavior
72
+ * @returns Promise resolving to void on success or error message on failure
73
+ */
74
+ export declare const batchWrite: <T extends Record<string, unknown>>(tableName: string, items: T[], opt?: {
75
+ /** option to always retry on 429 until done */
76
+ alwaysRetry?: boolean;
77
+ /** default 20 */
78
+ batchSize?: number;
79
+ }) => Promise<DynamoDBResult<void>>;
80
+ /**
81
+ * Deletes multiple items from a DynamoDB table in batches.
82
+ * Automatically chunks keys into batches of 20 (or specified size) to comply with DynamoDB limits.
83
+ * @param params - Parameters including table name, keys to delete, and options
84
+ * @returns Promise resolving to void on success or error message on failure
85
+ */
54
86
  export declare const batchDelete: (params: {
55
87
  tableName: string;
56
88
  keys: string[];
57
89
  pkName: string;
90
+ opt?: {
91
+ /** default 20 */
92
+ batchSize?: number;
93
+ /** option to always retry on 429 until done. default false */
94
+ alwaysRetry?: boolean;
95
+ };
58
96
  }) => Promise<DynamoDBResult<void>>;
97
+ /**
98
+ * Scans a DynamoDB table and returns all matching items.
99
+ * Handles pagination automatically and supports filtering and projection.
100
+ * @param tableName - Name of the DynamoDB table
101
+ * @param options - Optional parameters for filtering, projection, and index usage
102
+ * @returns Promise resolving to array of items on success or error message on failure
103
+ */
59
104
  export declare const scan: <T>(tableName: string, options?: ScanOptions) => Promise<DynamoDBResult<T[]>>;
105
+ /**
106
+ * Scans a DynamoDB table and yields items in batches.
107
+ * Useful for processing large tables without loading all items into memory.
108
+ * @param tableName - Name of the DynamoDB table
109
+ * @param options - Optional parameters including batch size, filtering, and projection
110
+ * @returns AsyncGenerator yielding batches of items
111
+ * @throws Error if the scan operation fails
112
+ */
60
113
  export declare function scanWithGenerator<T>(tableName: string, options?: ScanOptions & {
61
114
  /** how many to return in scan generator. default 100 */
62
115
  BATCH_SIZE?: number;
@@ -28,39 +28,14 @@ const client_dynamodb_1 = require("@aws-sdk/client-dynamodb");
28
28
  const lib_dynamodb_1 = require("@aws-sdk/lib-dynamodb");
29
29
  const array_1 = require("../../common/helpers/array");
30
30
  const async_1 = require("../../common/helpers/async");
31
- const log_1 = require("../../common/helpers/log");
32
- const sleep_1 = require("../../common/helpers/sleep");
33
- const RETRY_CONFIG = {
34
- maxRetries: 3,
35
- baseDelay: 2000,
36
- };
31
+ const withRetry_1 = require("./withRetry");
37
32
  const isError = (result) => 'error' in result;
38
- const withRetry = (operation, operationName) => __awaiter(void 0, void 0, void 0, function* () {
39
- let retryCount = 0;
40
- // eslint-disable-next-line
41
- while (true) {
42
- try {
43
- return yield operation();
44
- }
45
- catch (e) {
46
- const error = e;
47
- const errorString = error.toString();
48
- if (errorString.includes('429') ||
49
- errorString.includes('ProvisionedThroughputExceeded')) {
50
- retryCount++;
51
- if (retryCount >= RETRY_CONFIG.maxRetries) {
52
- (0, log_1.warn)(`${operationName}: Max retries exceeded`);
53
- throw error;
54
- }
55
- const delay = RETRY_CONFIG.baseDelay * Math.pow(2, retryCount - 1);
56
- (0, log_1.warn)(`${operationName}: Throttled. Retry ${retryCount}`);
57
- yield (0, sleep_1.sleep)(delay);
58
- continue;
59
- }
60
- throw error;
61
- }
62
- }
63
- });
33
+ /**
34
+ * Sets up the DynamoDB client with the specified region and credentials.
35
+ * @param region - AWS region to connect to
36
+ * @param credentials - Optional AWS credentials
37
+ * @returns Configured DynamoDBDocument client
38
+ */
64
39
  const setDynamo = (region, credentials) => {
65
40
  const client = new client_dynamodb_1.DynamoDBClient({ region, credentials });
66
41
  exports.dynamoDb = lib_dynamodb_1.DynamoDBDocument.from(client, {
@@ -70,12 +45,19 @@ const setDynamo = (region, credentials) => {
70
45
  };
71
46
  exports.setDynamo = setDynamo;
72
47
  exports.dynamoDb = (0, exports.setDynamo)('ap-southeast-2');
48
+ /**
49
+ * Puts a single item into a DynamoDB table.
50
+ * @param item - The item to put into the table
51
+ * @param tableName - Name of the DynamoDB table
52
+ * @param opt - Optional parameters including primary key name for conditional put
53
+ * @returns Promise resolving to void on success or error message on failure
54
+ */
73
55
  const putDynamo = (item, tableName, opt) => __awaiter(void 0, void 0, void 0, function* () {
74
56
  const params = new lib_dynamodb_1.PutCommand(Object.assign({ TableName: tableName, Item: item }, ((opt === null || opt === void 0 ? void 0 : opt.pkName) && {
75
57
  ConditionExpression: `attribute_not_exists(${opt.pkName})`,
76
58
  })));
77
59
  try {
78
- yield withRetry(() => exports.dynamoDb.send(params), 'putDynamo');
60
+ yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(params), 'putDynamo');
79
61
  return { data: undefined };
80
62
  }
81
63
  catch (e) {
@@ -83,16 +65,27 @@ const putDynamo = (item, tableName, opt) => __awaiter(void 0, void 0, void 0, fu
83
65
  }
84
66
  });
85
67
  exports.putDynamo = putDynamo;
86
- const batchWrite = (tableName, items) => __awaiter(void 0, void 0, void 0, function* () {
68
+ /**
69
+ * Writes multiple items to a DynamoDB table in batches.
70
+ * Automatically chunks items into batches of 20 (or specified size) to comply with DynamoDB limits.
71
+ * @param tableName - Name of the DynamoDB table
72
+ * @param items - Array of items to write
73
+ * @param opt - Optional parameters including batch size and retry behavior
74
+ * @returns Promise resolving to void on success or error message on failure
75
+ */
76
+ const batchWrite = (tableName, items, opt) => __awaiter(void 0, void 0, void 0, function* () {
87
77
  try {
88
- const chunked = (0, array_1.chunk)(items, 20);
78
+ const { batchSize = 20 } = opt !== null && opt !== void 0 ? opt : {};
79
+ const chunked = (0, array_1.chunk)(items, batchSize);
89
80
  yield (0, async_1.asyncForEach)(chunked, (chunk) => __awaiter(void 0, void 0, void 0, function* () {
90
81
  const params = new lib_dynamodb_1.BatchWriteCommand({
91
82
  RequestItems: {
92
83
  [tableName]: chunk.map((Item) => ({ PutRequest: { Item } })),
93
84
  },
94
85
  });
95
- yield withRetry(() => exports.dynamoDb.send(params), 'batchWrite');
86
+ yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(params), 'batchWrite', {
87
+ maxRetries: (opt === null || opt === void 0 ? void 0 : opt.alwaysRetry) ? null : undefined,
88
+ });
96
89
  }));
97
90
  return { data: undefined };
98
91
  }
@@ -101,9 +94,17 @@ const batchWrite = (tableName, items) => __awaiter(void 0, void 0, void 0, funct
101
94
  }
102
95
  });
103
96
  exports.batchWrite = batchWrite;
97
+ /**
98
+ * Deletes multiple items from a DynamoDB table in batches.
99
+ * Automatically chunks keys into batches of 20 (or specified size) to comply with DynamoDB limits.
100
+ * @param params - Parameters including table name, keys to delete, and options
101
+ * @returns Promise resolving to void on success or error message on failure
102
+ */
104
103
  const batchDelete = (params) => __awaiter(void 0, void 0, void 0, function* () {
104
+ var _a;
105
105
  try {
106
- const chunked = (0, array_1.chunk)(params.keys, 20);
106
+ const { batchSize = 20, alwaysRetry = false } = (_a = params.opt) !== null && _a !== void 0 ? _a : {};
107
+ const chunked = (0, array_1.chunk)(params.keys, batchSize);
107
108
  yield (0, async_1.asyncForEach)(chunked, (chunk) => __awaiter(void 0, void 0, void 0, function* () {
108
109
  const command = new lib_dynamodb_1.BatchWriteCommand({
109
110
  RequestItems: {
@@ -112,7 +113,9 @@ const batchDelete = (params) => __awaiter(void 0, void 0, void 0, function* () {
112
113
  })),
113
114
  },
114
115
  });
115
- yield withRetry(() => exports.dynamoDb.send(command), 'batchDelete');
116
+ yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(command), 'batchDelete', {
117
+ maxRetries: alwaysRetry ? null : undefined,
118
+ });
116
119
  }));
117
120
  return { data: undefined };
118
121
  }
@@ -121,6 +124,13 @@ const batchDelete = (params) => __awaiter(void 0, void 0, void 0, function* () {
121
124
  }
122
125
  });
123
126
  exports.batchDelete = batchDelete;
127
+ /**
128
+ * Scans a DynamoDB table and returns all matching items.
129
+ * Handles pagination automatically and supports filtering and projection.
130
+ * @param tableName - Name of the DynamoDB table
131
+ * @param options - Optional parameters for filtering, projection, and index usage
132
+ * @returns Promise resolving to array of items on success or error message on failure
133
+ */
124
134
  const scan = (tableName, options) => __awaiter(void 0, void 0, void 0, function* () {
125
135
  var _a, _b;
126
136
  try {
@@ -141,7 +151,7 @@ const scan = (tableName, options) => __awaiter(void 0, void 0, void 0, function*
141
151
  .map((_, index) => `#proj${index}`)
142
152
  .join(', '),
143
153
  })), { ExclusiveStartKey }));
144
- const result = yield withRetry(() => exports.dynamoDb.send(params), 'scan');
154
+ const result = yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(params), 'scan');
145
155
  if (result.Items) {
146
156
  Items.push(...result.Items);
147
157
  }
@@ -154,6 +164,14 @@ const scan = (tableName, options) => __awaiter(void 0, void 0, void 0, function*
154
164
  }
155
165
  });
156
166
  exports.scan = scan;
167
+ /**
168
+ * Scans a DynamoDB table and yields items in batches.
169
+ * Useful for processing large tables without loading all items into memory.
170
+ * @param tableName - Name of the DynamoDB table
171
+ * @param options - Optional parameters including batch size, filtering, and projection
172
+ * @returns AsyncGenerator yielding batches of items
173
+ * @throws Error if the scan operation fails
174
+ */
157
175
  function scanWithGenerator(tableName, options) {
158
176
  return __asyncGenerator(this, arguments, function* scanWithGenerator_1() {
159
177
  var _a, _b, _c;
@@ -176,7 +194,7 @@ function scanWithGenerator(tableName, options) {
176
194
  .map((_, index) => `#proj${index}`)
177
195
  .join(', '),
178
196
  })), { ExclusiveStartKey: exclusiveStartKey }));
179
- const result = yield __await(withRetry(() => exports.dynamoDb.send(params), 'scanWithGenerator'));
197
+ const result = yield __await((0, withRetry_1.withRetry)(() => exports.dynamoDb.send(params), 'scanWithGenerator'));
180
198
  if (result.Items) {
181
199
  items.push(...result.Items);
182
200
  // Process items in chunks of BATCH_SIZE
@@ -210,7 +228,7 @@ const getItemsDynamo = (params) => __awaiter(void 0, void 0, void 0, function* (
210
228
  },
211
229
  },
212
230
  });
213
- const result = yield withRetry(() => exports.dynamoDb.send(command), 'getItemsDynamo');
231
+ const result = yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(command), 'getItemsDynamo');
214
232
  return {
215
233
  data: (_b = (_a = result.Responses) === null || _a === void 0 ? void 0 : _a[params.tableName]) !== null && _b !== void 0 ? _b : [],
216
234
  };
@@ -284,7 +302,7 @@ const queryDynamo = (params) => __awaiter(void 0, void 0, void 0, function* () {
284
302
  ExclusiveStartKey: startKey,
285
303
  FilterExpression,
286
304
  });
287
- const result = yield withRetry(() => exports.dynamoDb.send(queryParams), 'queryDynamo');
305
+ const result = yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(queryParams), 'queryDynamo');
288
306
  if (result.Items) {
289
307
  items.push(...result.Items);
290
308
  }
@@ -310,7 +328,7 @@ exports.getDynamoTtlMinutes = getDynamoTtlMinutes;
310
328
  const wipeTable = (tableName) => __awaiter(void 0, void 0, void 0, function* () {
311
329
  var _a, _b, _c;
312
330
  try {
313
- const info = yield withRetry(() => exports.dynamoDb.send(new client_dynamodb_1.DescribeTableCommand({ TableName: tableName })), 'wipeTable-describe');
331
+ const info = yield (0, withRetry_1.withRetry)(() => exports.dynamoDb.send(new client_dynamodb_1.DescribeTableCommand({ TableName: tableName })), 'wipeTable-describe');
314
332
  const keyHash = (_c = (_b = (_a = info.Table) === null || _a === void 0 ? void 0 : _a.KeySchema) === null || _b === void 0 ? void 0 : _b.find((k) => k.KeyType === 'HASH')) === null || _c === void 0 ? void 0 : _c.AttributeName;
315
333
  if (!keyHash) {
316
334
  throw new Error('Could not find hash key');
@@ -0,0 +1,4 @@
1
+ export declare const withRetry: <T>(operation: () => Promise<T>, operationName: string, opt?: {
2
+ /** default 3. null for infinite */
3
+ maxRetries?: number | null;
4
+ }) => Promise<T>;
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ Object.defineProperty(exports, "__esModule", { value: true });
12
+ exports.withRetry = void 0;
13
+ const log_1 = require("../../common/helpers/log");
14
+ const sleep_1 = require("../../common/helpers/sleep");
15
+ const withRetry = (operation, operationName, opt) => __awaiter(void 0, void 0, void 0, function* () {
16
+ let retryCount = 0;
17
+ var baseDelay = 1000;
18
+ let { maxRetries = 3 } = opt !== null && opt !== void 0 ? opt : {};
19
+ // eslint-disable-next-line
20
+ while (true) {
21
+ try {
22
+ return yield operation();
23
+ }
24
+ catch (e) {
25
+ const error = e;
26
+ const errorString = error.toString().toLowerCase();
27
+ if (errorString.includes('429') ||
28
+ errorString.includes('provisionedthroughputexceeded') ||
29
+ errorString.includes('too large')) {
30
+ retryCount++;
31
+ if (maxRetries !== null && retryCount >= maxRetries) {
32
+ (0, log_1.warn)(`${operationName}: Max retries exceeded`);
33
+ throw error;
34
+ }
35
+ const delay = baseDelay * retryCount;
36
+ (0, log_1.warn)(`${operationName}: Throttled. Retry ${retryCount}. Sleeping for ${delay}ms`);
37
+ yield (0, sleep_1.sleep)(delay);
38
+ continue;
39
+ }
40
+ throw error;
41
+ }
42
+ }
43
+ });
44
+ exports.withRetry = withRetry;
@@ -17,7 +17,7 @@ function getStringFromStream(stream) {
17
17
  const reader = stream.getReader();
18
18
  let result = '';
19
19
  try {
20
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
20
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition,no-constant-condition
21
21
  while (true) {
22
22
  const { done, value } = yield reader.read();
23
23
  if (done)
@@ -23,7 +23,7 @@ function getCookieRawWrapper({ name, cookieDocument, defaultValue, parse: parseR
23
23
  };
24
24
  let raw = '';
25
25
  let currentCount = 0;
26
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
26
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition,no-constant-condition
27
27
  while (true) {
28
28
  const newv = (0, raw_1.getCookie)({
29
29
  name: name + currentCount,
@@ -48,7 +48,7 @@ function wipeCookies(name) {
48
48
  return;
49
49
  }
50
50
  let currentCount = 0;
51
- // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
51
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition,no-constant-condition
52
52
  while (true) {
53
53
  if ((0, exports.getCookie)({
54
54
  name: name + currentCount,
package/package.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "version": "0.0.730",
2
+ "version": "0.0.732",
3
3
  "name": "ag-common",
4
4
  "main": "./dist/index.js",
5
5
  "types": "./dist/index.d.ts",