@baadal-sdk/dapi 0.31.5 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +62 -2
  2. package/dist/index.d.ts +674 -0
  3. package/dist/index.js +1118 -0
  4. package/dist/index.js.map +1 -0
  5. package/package.json +44 -106
  6. package/LICENSE.txt +0 -21
  7. package/dist/cjs/index.js +0 -3
  8. package/dist/cjs/index.js.LICENSE.txt +0 -1
  9. package/dist/cjs/index.js.map +0 -1
  10. package/dist/cjs/package.json +0 -3
  11. package/dist/esm/index.js +0 -3
  12. package/dist/esm/index.js.LICENSE.txt +0 -1
  13. package/dist/esm/index.js.map +0 -1
  14. package/dist/esm/package.json +0 -3
  15. package/dist/types/aws/client.d.ts +0 -13
  16. package/dist/types/aws/client.d.ts.map +0 -1
  17. package/dist/types/aws/db.d.ts +0 -291
  18. package/dist/types/aws/db.d.ts.map +0 -1
  19. package/dist/types/aws/index.d.ts +0 -12
  20. package/dist/types/aws/index.d.ts.map +0 -1
  21. package/dist/types/aws/s3.d.ts +0 -90
  22. package/dist/types/aws/s3.d.ts.map +0 -1
  23. package/dist/types/common/common.model.d.ts +0 -4
  24. package/dist/types/common/common.model.d.ts.map +0 -1
  25. package/dist/types/common/const.d.ts +0 -4
  26. package/dist/types/common/const.d.ts.map +0 -1
  27. package/dist/types/common/error.d.ts +0 -4
  28. package/dist/types/common/error.d.ts.map +0 -1
  29. package/dist/types/common/logger.d.ts +0 -29
  30. package/dist/types/common/logger.d.ts.map +0 -1
  31. package/dist/types/fs/index.d.ts +0 -102
  32. package/dist/types/fs/index.d.ts.map +0 -1
  33. package/dist/types/gh/index.d.ts +0 -22
  34. package/dist/types/gh/index.d.ts.map +0 -1
  35. package/dist/types/index.d.ts +0 -13
  36. package/dist/types/index.d.ts.map +0 -1
  37. package/dist/types/utils/index.d.ts +0 -6
  38. package/dist/types/utils/index.d.ts.map +0 -1
  39. package/src/aws/client.ts +0 -18
  40. package/src/aws/db.ts +0 -764
  41. package/src/aws/index.ts +0 -33
  42. package/src/aws/s3.ts +0 -476
  43. package/src/common/common.model.ts +0 -3
  44. package/src/common/const.ts +0 -3
  45. package/src/common/error.ts +0 -12
  46. package/src/common/logger.ts +0 -18
  47. package/src/fs/index.ts +0 -316
  48. package/src/gh/index.ts +0 -60
  49. package/src/index.ts +0 -8
  50. package/src/typings/index.d.ts +0 -0
  51. package/src/utils/index.ts +0 -39
package/dist/index.js ADDED
@@ -0,0 +1,1118 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ aws: () => aws_exports,
34
+ default: () => index_default,
35
+ fs: () => fs_exports,
36
+ gh: () => gh_exports,
37
+ utils: () => utils_exports
38
+ });
39
+ module.exports = __toCommonJS(index_exports);
40
+
41
+ // src/fs/index.ts
42
+ var fs_exports = {};
43
+ __export(fs_exports, {
44
+ appendToFile: () => appendToFile,
45
+ createDir: () => createDir,
46
+ deleteDir: () => deleteDir,
47
+ deleteFile: () => deleteFile,
48
+ existsDirSync: () => existsDirSync,
49
+ existsFileSync: () => existsFileSync,
50
+ readDir: () => readDir,
51
+ readDirDirs: () => readDirDirs,
52
+ readDirFiles: () => readDirFiles,
53
+ readDirFilesRec: () => readDirFilesRec,
54
+ readFile: () => readFile,
55
+ readFileSync: () => readFileSync,
56
+ renameFile: () => renameFile,
57
+ writeFile: () => writeFile
58
+ });
59
+ var import_node_util = __toESM(require("util"));
60
+ var import_node_fs = __toESM(require("fs"));
61
+ var import_promises2 = __toESM(require("fs/promises"));
62
+
63
+ // src/utils/index.ts
64
+ var utils_exports = {};
65
+ __export(utils_exports, {
66
+ arrayToMap: () => arrayToMap,
67
+ assertPath: () => assertPath,
68
+ base64Decode: () => base64Decode,
69
+ base64Encode: () => base64Encode,
70
+ chunkifyArray: () => chunkifyArray,
71
+ fileHash: () => fileHash,
72
+ sha1Hash: () => sha1Hash,
73
+ sha256Hash: () => sha256Hash
74
+ });
75
+
76
+ // src/utils/crypto.ts
77
+ var import_node_crypto = __toESM(require("crypto"));
78
+ function sha1Hash(input) {
79
+ const hash = import_node_crypto.default.createHash("sha1").update(input, "utf8").digest("hex");
80
+ return hash;
81
+ }
82
+ function sha256Hash(input) {
83
+ if (!input) return null;
84
+ const hashSum = import_node_crypto.default.createHash("sha256");
85
+ hashSum.update(input);
86
+ return hashSum.digest("hex");
87
+ }
88
+
89
+ // src/utils/utils.ts
90
+ var import_node_path = __toESM(require("path"));
91
+ var import_promises = __toESM(require("fs/promises"));
92
+ var import_js_base64 = require("js-base64");
93
+
94
+ // src/common/logger.ts
95
+ var warn = console.warn;
96
+ var error = console.error;
97
+
98
+ // src/utils/utils.ts
99
+ var assertPath = (p) => {
100
+ if (!p || p.startsWith("/")) return p;
101
+ return import_node_path.default.resolve(process.cwd(), p);
102
+ };
103
+ var fileHash = async (file) => {
104
+ if (!file) return null;
105
+ let contents = null;
106
+ try {
107
+ contents = await import_promises.default.readFile(file);
108
+ } catch (e) {
109
+ error(e);
110
+ return null;
111
+ }
112
+ return sha256Hash(contents);
113
+ };
114
+ var base64Encode = (data) => {
115
+ return (0, import_js_base64.encode)(data);
116
+ };
117
+ var base64Decode = (base64) => {
118
+ return (0, import_js_base64.decode)(base64);
119
+ };
120
+ var arrayToMap = (arr, key) => {
121
+ if (!arr || !Array.isArray(arr)) return null;
122
+ const map = /* @__PURE__ */ new Map();
123
+ if (!arr.length) return map;
124
+ if (!key && typeof arr[0] !== "string" && typeof arr[0] !== "number") return null;
125
+ arr.forEach((item) => {
126
+ if (!key) {
127
+ map.set(`${item}`, true);
128
+ } else if (typeof item === "object") {
129
+ const itemKey = item[key];
130
+ if (itemKey && typeof itemKey === "string") {
131
+ map.set(itemKey, item);
132
+ }
133
+ }
134
+ });
135
+ return map.size ? map : null;
136
+ };
137
+ var chunkifyArray = (arr, chunkSize) => {
138
+ const chunkedItems = [];
139
+ for (let i = 0; i < arr.length; i += chunkSize) {
140
+ const chunk = arr.slice(i, i + chunkSize);
141
+ chunkedItems.push(chunk);
142
+ }
143
+ return chunkedItems;
144
+ };
145
+
146
+ // src/common/error.ts
147
+ var CustomError = class _CustomError extends Error {
148
+ constructor(message, options = {}) {
149
+ super(message, { cause: options.cause });
150
+ if (Error.captureStackTrace) {
151
+ Error.captureStackTrace(this, _CustomError);
152
+ }
153
+ this.name = options.name || "CustomError";
154
+ }
155
+ };
156
+
157
+ // src/fs/index.ts
158
+ var existsFileSync = (file, loud = false) => {
159
+ if (!file) return null;
160
+ file = assertPath(file);
161
+ try {
162
+ if (!import_node_fs.default.existsSync(file)) {
163
+ if (!loud) return false;
164
+ throw new CustomError(`File does not exist: ${file}`);
165
+ }
166
+ } catch (e) {
167
+ if (!loud) return null;
168
+ if (e instanceof CustomError) {
169
+ throw e;
170
+ } else {
171
+ throw new CustomError(`Error while accessing file: ${file}`);
172
+ }
173
+ }
174
+ return true;
175
+ };
176
+ var existsDirSync = (dir, loud = false) => {
177
+ if (!dir) return null;
178
+ dir = assertPath(dir);
179
+ try {
180
+ if (!import_node_fs.default.existsSync(dir)) {
181
+ if (!loud) return false;
182
+ throw new CustomError(`Directory does not exist: ${dir}`);
183
+ }
184
+ } catch (e) {
185
+ if (!loud) return null;
186
+ if (e instanceof CustomError) {
187
+ throw e;
188
+ } else {
189
+ throw new CustomError(`Error while accessing directory: ${dir}`);
190
+ }
191
+ }
192
+ return true;
193
+ };
194
+ var readFile = async (file, warn2 = false) => {
195
+ if (!file) return null;
196
+ file = assertPath(file);
197
+ let contents = null;
198
+ try {
199
+ contents = await import_promises2.default.readFile(file, "utf8");
200
+ } catch (e) {
201
+ if (warn2) warn(`Cannot read file: ${file}`);
202
+ }
203
+ return contents;
204
+ };
205
+ var readFileSync = (file, warn2 = false) => {
206
+ if (!file) return null;
207
+ file = assertPath(file);
208
+ let contents = null;
209
+ try {
210
+ contents = import_node_fs.default.readFileSync(file, "utf8");
211
+ } catch (e) {
212
+ if (warn2) warn(`Cannot read file: ${file}`);
213
+ }
214
+ return contents;
215
+ };
216
+ var readDir = async (dir, warn2 = false, hiddenItems = false) => {
217
+ if (!dir) return null;
218
+ dir = assertPath(dir);
219
+ let dirs = [];
220
+ let files = [];
221
+ try {
222
+ const items = await import_promises2.default.readdir(dir, { withFileTypes: true });
223
+ items.forEach((item) => {
224
+ if (item.isDirectory()) {
225
+ dirs.push(item.name);
226
+ } else if (item.isFile()) {
227
+ files.push(item.name);
228
+ }
229
+ });
230
+ } catch (e) {
231
+ if (warn2) warn(`Cannot read dir: ${dir}`);
232
+ return null;
233
+ }
234
+ if (!hiddenItems) {
235
+ dirs = dirs.filter((d) => !d.startsWith("."));
236
+ files = files.filter((f) => !f.startsWith("."));
237
+ }
238
+ return { dirs, files };
239
+ };
240
+ var readDirFiles = async (dir, warn2 = false, hiddenItems = false) => {
241
+ if (!dir) return null;
242
+ dir = assertPath(dir);
243
+ return (await readDir(dir, warn2, hiddenItems))?.files || null;
244
+ };
245
+ var readDirDirs = async (dir, warn2 = false, hiddenItems = false) => {
246
+ if (!dir) return null;
247
+ dir = assertPath(dir);
248
+ return (await readDir(dir, warn2, hiddenItems))?.dirs || null;
249
+ };
250
+ var readDirFilesRecHelper = async (dir, basePath = "") => {
251
+ if (!dir) return null;
252
+ dir = assertPath(dir);
253
+ const dirPath = basePath ? `${dir}/${basePath}` : dir;
254
+ const readDirObj = await readDir(dirPath);
255
+ if (!readDirObj) return null;
256
+ const { dirs, files } = readDirObj;
257
+ let allFiles = files || [];
258
+ allFiles = allFiles.map((file) => basePath ? `${basePath}/${file}` : file);
259
+ const absDirs = (dirs || []).map((d) => basePath ? `${basePath}/${d}` : d);
260
+ const pList = absDirs.map((dirx) => readDirFilesRecHelper(dir, dirx));
261
+ const filesxList = await Promise.all(pList);
262
+ filesxList.forEach((filesx) => {
263
+ if (filesx) {
264
+ allFiles = [...allFiles, ...filesx];
265
+ }
266
+ });
267
+ return allFiles;
268
+ };
269
+ var readDirFilesRec = async (dir, hiddenItems = false) => {
270
+ let allFiles = await readDirFilesRecHelper(dir);
271
+ if (!hiddenItems) {
272
+ if (allFiles) allFiles = allFiles.filter((f) => !f.startsWith("."));
273
+ }
274
+ return allFiles;
275
+ };
276
+ var writeFile = async (file, contents) => {
277
+ if (!file || !contents) return null;
278
+ file = assertPath(file);
279
+ try {
280
+ const dir = file.substring(0, file.lastIndexOf("/"));
281
+ await import_promises2.default.mkdir(dir, { recursive: true });
282
+ await import_promises2.default.writeFile(file, contents);
283
+ } catch (e) {
284
+ console.error(`Error while writing to ${file}`, e);
285
+ return null;
286
+ }
287
+ return true;
288
+ };
289
+ var appendToFile = async (file, contents) => {
290
+ if (!file || !contents) return null;
291
+ file = assertPath(file);
292
+ try {
293
+ const dir = file.substring(0, file.lastIndexOf("/"));
294
+ await import_promises2.default.mkdir(dir, { recursive: true });
295
+ await import_promises2.default.appendFile(file, contents + "\n");
296
+ } catch (e) {
297
+ console.error(`Error while appending to ${file}`, e);
298
+ return null;
299
+ }
300
+ return true;
301
+ };
302
+ var renameFile = async (oldpath, newpath) => {
303
+ if (!oldpath || !newpath) return null;
304
+ oldpath = assertPath(oldpath);
305
+ newpath = assertPath(newpath);
306
+ try {
307
+ await import_promises2.default.rename(oldpath, newpath);
308
+ } catch (e) {
309
+ console.error(`Error while renaming file ${oldpath} to ${newpath}`, e);
310
+ return null;
311
+ }
312
+ return true;
313
+ };
314
+ var createDir = async (dir) => {
315
+ if (!dir) return null;
316
+ dir = assertPath(dir);
317
+ try {
318
+ if (!existsDirSync(dir)) {
319
+ await import_promises2.default.mkdir(dir, { recursive: true });
320
+ }
321
+ } catch (e) {
322
+ console.error(`Error while creating directory: ${dir}`, e);
323
+ return null;
324
+ }
325
+ return true;
326
+ };
327
+ var deleteFile = async (file) => {
328
+ if (!file) return null;
329
+ file = assertPath(file);
330
+ try {
331
+ await import_promises2.default.unlink(file);
332
+ } catch (e) {
333
+ console.error(`Error while deleting file ${file}`, e);
334
+ return null;
335
+ }
336
+ return true;
337
+ };
338
+ var deleteDir = async (dir) => {
339
+ if (!dir) return null;
340
+ dir = assertPath(dir);
341
+ try {
342
+ const rimraf = require("rimraf");
343
+ const rimrafPr = import_node_util.default.promisify(rimraf);
344
+ await rimrafPr(dir);
345
+ } catch (e) {
346
+ console.error(`Error while deleting dir ${dir}`, e);
347
+ return null;
348
+ }
349
+ return true;
350
+ };
351
+
352
+ // src/aws/index.ts
353
+ var aws_exports = {};
354
+ __export(aws_exports, {
355
+ db: () => db_exports,
356
+ init: () => init3,
357
+ s3: () => s3_exports,
358
+ status: () => status3
359
+ });
360
+
361
+ // src/aws/db.ts
362
+ var db_exports = {};
363
+ __export(db_exports, {
364
+ deleteItem: () => deleteItem,
365
+ deleteItemsAll: () => deleteItemsAll,
366
+ init: () => init,
367
+ queryItems: () => queryItems,
368
+ readItem: () => readItem,
369
+ readItemsAll: () => readItemsAll,
370
+ scanItems: () => scanItems,
371
+ status: () => status,
372
+ updateItem: () => updateItem,
373
+ writeItem: () => writeItem,
374
+ writeItemForce: () => writeItemForce,
375
+ writeItemUnique: () => writeItemUnique,
376
+ writeItemsAll: () => writeItemsAll
377
+ });
378
+ var import_client_dynamodb = require("@aws-sdk/client-dynamodb");
379
+ var import_lib_dynamodb = require("@aws-sdk/lib-dynamodb");
380
+ var short = __toESM(require("short-uuid"));
381
+
382
+ // src/aws/client.ts
383
+ var dbDocClient = null;
384
+ var awsS3Client = null;
385
+ var dbClient = { client: dbDocClient, id: null };
386
+ var s3Client = { client: awsS3Client, id: null };
387
+
388
+ // src/common/const.ts
389
+ var BATCH_SIZE = 20;
390
+ var CHUNK_SIZE = 10;
391
+ var MAX_RETRY_ATTEMPTS = 3;
392
+
393
+ // src/aws/db.ts
394
+ var DynamoDBError = (msg) => new CustomError(msg, { name: "DynamoDBError" });
395
+ var init = async (_region) => {
396
+ let region = _region ?? process.env.AWS_REGION;
397
+ if (!region) {
398
+ const client = new import_client_dynamodb.DynamoDBClient({});
399
+ region = await client.config.region();
400
+ }
401
+ if (region && !dbClient.client) {
402
+ const dydbClient = new import_client_dynamodb.DynamoDBClient({ region });
403
+ dbClient.client = import_lib_dynamodb.DynamoDBDocumentClient.from(dydbClient);
404
+ dbClient.id = short.generate();
405
+ return true;
406
+ }
407
+ return !!dbClient.client;
408
+ };
409
+ var status = () => dbClient.id;
410
+ var tryInit = async (silent = false) => {
411
+ if (dbClient.client) return;
412
+ if (await init()) {
413
+ return;
414
+ }
415
+ if (!silent) {
416
+ throw DynamoDBError("Could not auto-initialize DynamoDB!");
417
+ }
418
+ };
419
+ (async () => {
420
+ await tryInit(true);
421
+ })();
422
+ var writeItemForceHelper = async (table, item, key, i, imax) => {
423
+ if (!dbClient.client) await tryInit();
424
+ if (!dbClient.client) return null;
425
+ if (!table || !item) return null;
426
+ if (!item[key]) {
427
+ item[key] = short.generate();
428
+ }
429
+ const cmdParams = { TableName: table, Item: item, ConditionExpression: `attribute_not_exists(${key})` };
430
+ const command = new import_lib_dynamodb.PutCommand(cmdParams);
431
+ const numberOfAttempts = imax ?? MAX_RETRY_ATTEMPTS;
432
+ try {
433
+ await dbClient.client.send(command);
434
+ } catch (err) {
435
+ if (err.name === "ConditionalCheckFailedException") {
436
+ if (i < numberOfAttempts - 1) {
437
+ item[key] = short.generate();
438
+ const ret = await writeItemForceHelper(table, item, key, i + 1, imax);
439
+ return ret;
440
+ }
441
+ console.error("PutCommandInput:", cmdParams);
442
+ if (numberOfAttempts === 1) {
443
+ error(`[ERROR] An item with the same key(${item[key]}) already exists!`);
444
+ } else {
445
+ error("[ERROR] Maximum attempts overflow!");
446
+ }
447
+ }
448
+ return null;
449
+ }
450
+ return item;
451
+ };
452
+ var writeItemForce = async (input) => {
453
+ const key = input.key || "id";
454
+ return writeItemForceHelper(input.table, input.item, key, 0);
455
+ };
456
+ var writeItemUnique = async (input) => {
457
+ const key = input.key || "id";
458
+ return writeItemForceHelper(input.table, input.item, key, 0, 1);
459
+ };
460
+ var writeItem = async (input) => {
461
+ if (!dbClient.client) await tryInit();
462
+ if (!dbClient.client) return null;
463
+ if (!input.table || !input.item) return null;
464
+ const cmdParams = { TableName: input.table, Item: input.item };
465
+ const command = new import_lib_dynamodb.PutCommand(cmdParams);
466
+ try {
467
+ await dbClient.client.send(command);
468
+ } catch (err) {
469
+ console.error("PutCommandInput:", cmdParams);
470
+ console.error(err);
471
+ return null;
472
+ }
473
+ return true;
474
+ };
475
+ var batchWriteItems = async (table, items) => {
476
+ if (!dbClient.client) await tryInit();
477
+ if (!dbClient.client) return null;
478
+ if (!table || !items || !Array.isArray(items)) return null;
479
+ if (!items.length) return true;
480
+ const reqList = items.map((item) => ({ PutRequest: { Item: item } }));
481
+ const cmdParams = {
482
+ RequestItems: {
483
+ [table]: reqList
484
+ }
485
+ };
486
+ const command = new import_lib_dynamodb.BatchWriteCommand(cmdParams);
487
+ try {
488
+ await dbClient.client.send(command);
489
+ } catch (err) {
490
+ console.error("BatchWriteCommandInput:", cmdParams);
491
+ console.error(err);
492
+ return null;
493
+ }
494
+ return true;
495
+ };
496
+ var writeItemsAll = async (input) => {
497
+ if (!dbClient.client) await tryInit();
498
+ if (!dbClient.client) return null;
499
+ if (!input.table || !input.items || !Array.isArray(input.items)) return null;
500
+ if (!input.items.length) return true;
501
+ let errFlag = false;
502
+ const batchedItems = chunkifyArray(input.items, BATCH_SIZE);
503
+ const chunkedItems = chunkifyArray(batchedItems, CHUNK_SIZE);
504
+ for (let i = 0; i < chunkedItems.length; i += 1) {
505
+ const bchunks = chunkedItems[i] ?? [];
506
+ const brlist = bchunks.map((iItems) => batchWriteItems(input.table, iItems));
507
+ const bslist = await Promise.all(brlist);
508
+ const isSuccess = bslist.every((e) => e === true);
509
+ if (!isSuccess) errFlag = true;
510
+ }
511
+ return errFlag ? null : true;
512
+ };
513
+ var updateItem = async (input) => {
514
+ if (!dbClient.client) await tryInit();
515
+ if (!dbClient.client) return null;
516
+ if (!input.table || !input.key || !input.update || !input.attr) return null;
517
+ let cmdParams = {
518
+ TableName: input.table,
519
+ Key: input.key,
520
+ UpdateExpression: input.update,
521
+ ExpressionAttributeValues: input.attr
522
+ };
523
+ if (input.attrNames) cmdParams = { ...cmdParams, ExpressionAttributeNames: input.attrNames };
524
+ const command = new import_lib_dynamodb.UpdateCommand(cmdParams);
525
+ try {
526
+ await dbClient.client.send(command);
527
+ } catch (err) {
528
+ console.error("UpdateCommandInput:", cmdParams);
529
+ console.error(err);
530
+ return null;
531
+ }
532
+ return true;
533
+ };
534
+ var readItem = async (input) => {
535
+ if (!dbClient.client) await tryInit();
536
+ if (!dbClient.client) return null;
537
+ if (!input.table || !input.key) return null;
538
+ let contents = null;
539
+ let cmdParams = { TableName: input.table, Key: input.key, ConsistentRead: true };
540
+ if (input.projection) cmdParams = { ...cmdParams, ProjectionExpression: input.projection };
541
+ if (input.attrNames) cmdParams = { ...cmdParams, ExpressionAttributeNames: input.attrNames };
542
+ const command = new import_lib_dynamodb.GetCommand(cmdParams);
543
+ try {
544
+ const results = await dbClient.client.send(command);
545
+ const item = results.Item;
546
+ if (item) {
547
+ contents = item;
548
+ }
549
+ } catch (err) {
550
+ console.error("GetCommandInput:", cmdParams);
551
+ console.error(err);
552
+ if (input.loud) {
553
+ throw err;
554
+ } else {
555
+ return null;
556
+ }
557
+ }
558
+ return contents;
559
+ };
560
+ var batchReadItems = async (table, keys, projection, attrNames) => {
561
+ if (!dbClient.client) await tryInit();
562
+ if (!dbClient.client) return null;
563
+ if (!table || !keys || !Array.isArray(keys)) return null;
564
+ if (!keys.length) return [];
565
+ let contents = [];
566
+ let reqParams = { Keys: keys };
567
+ if (projection) reqParams = { ...reqParams, ProjectionExpression: projection };
568
+ if (attrNames) reqParams = { ...reqParams, ExpressionAttributeNames: attrNames };
569
+ const cmdParams = {
570
+ RequestItems: {
571
+ [table]: reqParams
572
+ }
573
+ };
574
+ const command = new import_lib_dynamodb.BatchGetCommand(cmdParams);
575
+ try {
576
+ const results = await dbClient.client.send(command);
577
+ const items = results.Responses;
578
+ if (items && items[table]) {
579
+ contents = items[table];
580
+ }
581
+ } catch (err) {
582
+ console.error("BatchGetCommandInput:", cmdParams);
583
+ console.error(err);
584
+ return null;
585
+ }
586
+ return contents;
587
+ };
588
+ var readItemsAll = async (input) => {
589
+ if (!dbClient.client) await tryInit();
590
+ if (!dbClient.client) return null;
591
+ if (!input.table || !input.keys || !Array.isArray(input.keys)) return null;
592
+ if (!input.keys.length) return [];
593
+ let contents = [];
594
+ let errFlag = false;
595
+ const batchedKeys = chunkifyArray(input.keys, BATCH_SIZE);
596
+ const chunkedKeys = chunkifyArray(batchedKeys, CHUNK_SIZE);
597
+ for (let i = 0; i < chunkedKeys.length; i += 1) {
598
+ const bchunks = chunkedKeys[i] ?? [];
599
+ const brlist = bchunks.map((ikeys) => batchReadItems(input.table, ikeys, input.projection, input.attrNames));
600
+ const bslist = await Promise.all(brlist);
601
+ const icontents = bslist.flat();
602
+ const isError = icontents.find((e) => e === null) === null;
603
+ if (isError) {
604
+ errFlag = true;
605
+ return null;
606
+ }
607
+ if (!errFlag) {
608
+ contents = contents.concat(icontents);
609
+ }
610
+ }
611
+ return contents;
612
+ };
613
+ var queryItems = async (input) => {
614
+ if (!dbClient.client) await tryInit();
615
+ if (!dbClient.client) return null;
616
+ if (!input.table || !input.cond || !input.attr) return null;
617
+ let contents = [];
618
+ const desc = input.desc || false;
619
+ let cmdParams = {
620
+ TableName: input.table,
621
+ KeyConditionExpression: input.cond,
622
+ ExpressionAttributeValues: input.attr
623
+ // FilterExpression: "contains (category_id, :cid)",
624
+ };
625
+ if (input.indexName) cmdParams = { ...cmdParams, IndexName: input.indexName };
626
+ if (input.attrNames) cmdParams = { ...cmdParams, ExpressionAttributeNames: input.attrNames };
627
+ if (input.projection) cmdParams = { ...cmdParams, ProjectionExpression: input.projection };
628
+ if (desc) cmdParams = { ...cmdParams, ScanIndexForward: false };
629
+ const command = new import_lib_dynamodb.QueryCommand(cmdParams);
630
+ try {
631
+ const results = await dbClient.client.send(command);
632
+ const items = results.Items;
633
+ if (items) {
634
+ contents = items;
635
+ }
636
+ } catch (err) {
637
+ console.error("QueryCommandInput:", command.input);
638
+ console.error(err);
639
+ return null;
640
+ }
641
+ return contents;
642
+ };
643
+ var scanItems = async (input) => {
644
+ if (!dbClient.client) await tryInit();
645
+ if (!dbClient.client) return null;
646
+ if (!input.table) return null;
647
+ let contents = [];
648
+ let cmdParams = {
649
+ TableName: input.table
650
+ };
651
+ if (input.projection) cmdParams = { ...cmdParams, ProjectionExpression: input.projection };
652
+ const command = new import_lib_dynamodb.ScanCommand(cmdParams);
653
+ try {
654
+ const results = await dbClient.client.send(command);
655
+ const items = results.Items;
656
+ if (results.LastEvaluatedKey) {
657
+ warn("[scanItems] Partial results obtained! Consider pagination.");
658
+ }
659
+ if (items) {
660
+ contents = items;
661
+ }
662
+ } catch (err) {
663
+ console.error("ScanCommandInput:", cmdParams);
664
+ console.error(err);
665
+ return null;
666
+ }
667
+ return contents;
668
+ };
669
+ var deleteItem = async (input) => {
670
+ if (!dbClient.client) await tryInit();
671
+ if (!dbClient.client) return null;
672
+ if (!input.table || !input.key) return null;
673
+ const cmdParams = { TableName: input.table, Key: input.key };
674
+ const command = new import_lib_dynamodb.DeleteCommand(cmdParams);
675
+ try {
676
+ await dbClient.client.send(command);
677
+ } catch (err) {
678
+ console.error("DeleteCommandInput:", cmdParams);
679
+ console.error(err);
680
+ return null;
681
+ }
682
+ return true;
683
+ };
684
+ var batchDeleteItems = async (table, keys) => {
685
+ if (!dbClient.client) await tryInit();
686
+ if (!dbClient.client) return null;
687
+ if (!table || !keys || !Array.isArray(keys)) return null;
688
+ if (!keys.length) return true;
689
+ const reqList = keys.map((key) => ({ DeleteRequest: { Key: key } }));
690
+ const cmdParams = {
691
+ RequestItems: {
692
+ [table]: reqList
693
+ }
694
+ };
695
+ const command = new import_lib_dynamodb.BatchWriteCommand(cmdParams);
696
+ try {
697
+ await dbClient.client.send(command);
698
+ } catch (err) {
699
+ console.error("BatchWriteCommandInput:", cmdParams);
700
+ console.error(err);
701
+ return null;
702
+ }
703
+ return true;
704
+ };
705
+ var deleteItemsAll = async (input) => {
706
+ if (!dbClient.client) await tryInit();
707
+ if (!dbClient.client) return null;
708
+ if (!input.table || !input.keys || !Array.isArray(input.keys)) return null;
709
+ if (!input.keys.length) return true;
710
+ let errFlag = false;
711
+ const batchedItems = chunkifyArray(input.keys, BATCH_SIZE);
712
+ const chunkedItems = chunkifyArray(batchedItems, CHUNK_SIZE);
713
+ for (let i = 0; i < chunkedItems.length; i += 1) {
714
+ const bchunks = chunkedItems[i] ?? [];
715
+ const brlist = bchunks.map((ikeys) => batchDeleteItems(input.table, ikeys));
716
+ const bslist = await Promise.all(brlist);
717
+ const isSuccess = bslist.every((e) => e === true);
718
+ if (!isSuccess) errFlag = true;
719
+ }
720
+ return errFlag ? null : true;
721
+ };
722
+
723
+ // src/aws/s3.ts
724
+ var s3_exports = {};
725
+ __export(s3_exports, {
726
+ deleteObject: () => deleteObject,
727
+ deleteObjectsAll: () => deleteObjectsAll,
728
+ downloadFile: () => downloadFile,
729
+ getObject: () => getObject,
730
+ getObjectHead: () => getObjectHead,
731
+ getObjectHeadsAll: () => getObjectHeadsAll,
732
+ init: () => init2,
733
+ listObjects: () => listObjects,
734
+ putObject: () => putObject,
735
+ status: () => status2,
736
+ uploadFile: () => uploadFile,
737
+ uploadFilesAll: () => uploadFilesAll
738
+ });
739
+ var import_node_path2 = __toESM(require("path"));
740
+ var import_node_fs2 = __toESM(require("fs"));
741
+ var import_client_s3 = require("@aws-sdk/client-s3");
742
+ var short2 = __toESM(require("short-uuid"));
743
+ var import_mime_types = __toESM(require("mime-types"));
744
+ var AWSS3Error = (msg) => new CustomError(msg, { name: "AWSS3Error" });
745
+ var init2 = async (_region) => {
746
+ let region = _region ?? process.env.AWS_REGION;
747
+ if (!region) {
748
+ const client = new import_client_s3.S3Client({});
749
+ region = await client.config.region();
750
+ }
751
+ if (region && !s3Client.client) {
752
+ const awsS3Client2 = new import_client_s3.S3Client({ region });
753
+ s3Client.client = awsS3Client2;
754
+ s3Client.id = short2.generate();
755
+ return true;
756
+ }
757
+ return !!s3Client.client;
758
+ };
759
+ var status2 = () => s3Client.id;
760
+ var tryInit2 = async (silent = false) => {
761
+ if (s3Client.client) return;
762
+ if (await init2()) {
763
+ return;
764
+ }
765
+ if (!silent) {
766
+ throw AWSS3Error("Could not auto-initialize AWS S3!");
767
+ }
768
+ };
769
+ (async () => {
770
+ await tryInit2(true);
771
+ })();
772
+ var putObject = async (bucket, s3path, contents) => {
773
+ if (!s3Client.client) await tryInit2();
774
+ if (!s3Client.client) return null;
775
+ if (!bucket || !s3path || !contents) return null;
776
+ let baseParams = null;
777
+ try {
778
+ baseParams = {
779
+ Bucket: bucket,
780
+ Key: s3path,
781
+ Body: "<contents>",
782
+ ContentType: "text/plain; charset=utf-8"
783
+ // CacheControl: 'max-age=86400,public',
784
+ };
785
+ const cmdParams = { ...baseParams, Body: contents };
786
+ const command = new import_client_s3.PutObjectCommand(cmdParams);
787
+ await s3Client.client.send(command);
788
+ } catch (err) {
789
+ console.error("PutObjectCommandInput:", baseParams);
790
+ console.error(err);
791
+ return null;
792
+ }
793
+ return true;
794
+ };
795
+ var uploadFile = async (bucket, file, s3path) => {
796
+ if (!s3Client.client) await tryInit2();
797
+ if (!s3Client.client) return null;
798
+ if (!bucket || !file) return null;
799
+ const filepath = assertPath(file);
800
+ const basename = import_node_path2.default.basename(filepath);
801
+ const ext = basename.substr(basename.lastIndexOf("."));
802
+ const contentType = import_mime_types.default.lookup(ext);
803
+ if (!contentType) {
804
+ error(`Could not detect file type for: ${basename} [${filepath}]`);
805
+ return null;
806
+ }
807
+ if (!s3path) {
808
+ if (file !== filepath) {
809
+ s3path = file;
810
+ } else {
811
+ s3path = basename;
812
+ }
813
+ }
814
+ let baseParams = null;
815
+ try {
816
+ const hash = await fileHash(filepath) || "";
817
+ const fileStream = import_node_fs2.default.createReadStream(filepath);
818
+ baseParams = {
819
+ Bucket: bucket,
820
+ Key: s3path,
821
+ Body: "<fileStream>",
822
+ ContentType: contentType,
823
+ // CacheControl: 'max-age=86400,public',
824
+ Metadata: { hash }
825
+ };
826
+ const cmdParams = { ...baseParams, Body: fileStream };
827
+ const command = new import_client_s3.PutObjectCommand(cmdParams);
828
+ await s3Client.client.send(command);
829
+ } catch (err) {
830
+ console.error("PutObjectCommandInput:", baseParams);
831
+ console.error(err);
832
+ return null;
833
+ }
834
+ return true;
835
+ };
836
+ var uploadFilesAll = async (bucket, files, s3paths) => {
837
+ if (!s3Client.client) await tryInit2();
838
+ if (!s3Client.client) return null;
839
+ if (!bucket || !files || !Array.isArray(files)) return null;
840
+ if (!files.length) return true;
841
+ if (s3paths && (!Array.isArray(s3paths) || !s3paths.length || files.length !== s3paths.length)) return null;
842
+ let errFlag = false;
843
+ const chunkedFiles = chunkifyArray(files, CHUNK_SIZE);
844
+ const chunkedPaths = s3paths ? chunkifyArray(s3paths, CHUNK_SIZE) : chunkedFiles;
845
+ for (let i = 0; i < chunkedFiles.length; i += 1) {
846
+ const filesChunk = chunkedFiles[i] ?? [];
847
+ const pathsChunk = chunkedPaths[i] ?? [];
848
+ const pList = filesChunk.map((item, j) => uploadFile(bucket, item, pathsChunk[j]));
849
+ const rList = await Promise.all(pList);
850
+ const isSuccess = rList.every((e) => e === true);
851
+ if (!isSuccess) errFlag = true;
852
+ }
853
+ return errFlag ? null : true;
854
+ };
855
+ var getObject = async (bucket, s3path) => {
856
+ if (!s3Client.client) await tryInit2();
857
+ if (!s3Client.client) return null;
858
+ if (!bucket || !s3path) return null;
859
+ let contents = null;
860
+ const cmdParams = { Bucket: bucket, Key: s3path };
861
+ const command = new import_client_s3.GetObjectCommand(cmdParams);
862
+ try {
863
+ const streamToString = (stream) => new Promise((resolve, reject) => {
864
+ const chunks = [];
865
+ stream.on("data", (chunk) => chunks.push(chunk));
866
+ stream.on("error", reject);
867
+ stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
868
+ });
869
+ const data = await s3Client.client.send(command);
870
+ const bodyContents = await streamToString(data.Body);
871
+ if (bodyContents) {
872
+ contents = bodyContents;
873
+ }
874
+ } catch (err) {
875
+ console.error("GetObjectCommandInput:", cmdParams);
876
+ console.error(err);
877
+ return null;
878
+ }
879
+ return contents;
880
+ };
881
+ var downloadFile = async (bucket, s3path, outPath) => {
882
+ if (!s3Client.client) await tryInit2();
883
+ if (!s3Client.client) return null;
884
+ if (!bucket || !s3path) return null;
885
+ if (!outPath) outPath = s3path;
886
+ outPath = assertPath(outPath);
887
+ const cmdParams = { Bucket: bucket, Key: s3path };
888
+ const command = new import_client_s3.GetObjectCommand(cmdParams);
889
+ try {
890
+ const writeStreamToFile = (stream) => new Promise((resolve, reject) => {
891
+ const writeStream = import_node_fs2.default.createWriteStream(outPath);
892
+ stream.on("data", (chunk) => writeStream.write(chunk));
893
+ stream.on("error", reject);
894
+ stream.on("end", () => resolve(writeStream.close()));
895
+ });
896
+ const data = await s3Client.client.send(command);
897
+ await writeStreamToFile(data.Body);
898
+ } catch (err) {
899
+ if (err.name !== "NoSuchKey") {
900
+ console.error("GetObjectCommandInput:", cmdParams);
901
+ console.error(err);
902
+ }
903
+ return null;
904
+ }
905
+ return true;
906
+ };
907
+ var listObjects = async (bucket, prefix) => {
908
+ if (!s3Client.client) await tryInit2();
909
+ if (!s3Client.client) return null;
910
+ if (!bucket) return null;
911
+ let filesList = [];
912
+ let cmdParams = { Bucket: bucket };
913
+ if (prefix) cmdParams = { ...cmdParams, Prefix: prefix };
914
+ const command = new import_client_s3.ListObjectsV2Command(cmdParams);
915
+ try {
916
+ const results = await s3Client.client.send(command);
917
+ const items = results.Contents;
918
+ if (results.IsTruncated) {
919
+ warn("[listObjects] Partial results obtained! Consider pagination.");
920
+ }
921
+ if (items) {
922
+ filesList = items.map((t) => t.Key).filter((e) => !!e);
923
+ }
924
+ } catch (err) {
925
+ console.error("ListObjectsV2CommandInput:", cmdParams);
926
+ console.error(err);
927
+ return null;
928
+ }
929
+ return filesList;
930
+ };
931
+ var getObjectHead = async (bucket, s3path) => {
932
+ if (!s3Client.client) await tryInit2();
933
+ if (!s3Client.client) return null;
934
+ if (!bucket || !s3path) return null;
935
+ let contents = null;
936
+ const cmdParams = { Bucket: bucket, Key: s3path };
937
+ const command = new import_client_s3.HeadObjectCommand(cmdParams);
938
+ try {
939
+ const data = await s3Client.client.send(command);
940
+ if (data) {
941
+ const { ContentLength, ContentType, ETag, CacheControl, Expires, LastModified, Metadata } = data;
942
+ contents = { Key: s3path, ContentLength, ContentType, ETag, CacheControl, Expires, LastModified, Metadata };
943
+ }
944
+ } catch (err) {
945
+ if (err.name !== "NotFound") {
946
+ console.error("HeadObjectCommandInput:", cmdParams);
947
+ console.error(err);
948
+ }
949
+ return null;
950
+ }
951
+ return contents;
952
+ };
953
+ var getObjectHeadsAll = async (bucket, s3paths) => {
954
+ if (!s3Client.client) await tryInit2();
955
+ if (!s3Client.client) return null;
956
+ if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
957
+ if (!s3paths.length) return [];
958
+ let contents = [];
959
+ const chunkedItems = chunkifyArray(s3paths, CHUNK_SIZE);
960
+ for (let i = 0; i < chunkedItems.length; i += 1) {
961
+ const chunk = chunkedItems[i] ?? [];
962
+ const pList = chunk.map((item) => getObjectHead(bucket, item));
963
+ const rList = await Promise.all(pList);
964
+ contents = contents.concat(rList);
965
+ }
966
+ if (contents.length) {
967
+ contents = contents.filter((e) => !!e);
968
+ }
969
+ return contents;
970
+ };
971
+ var deleteObject = async (bucket, s3path) => {
972
+ if (!s3Client.client) await tryInit2();
973
+ if (!s3Client.client) return null;
974
+ if (!bucket || !s3path) return null;
975
+ const cmdParams = { Bucket: bucket, Key: s3path };
976
+ const command = new import_client_s3.DeleteObjectCommand(cmdParams);
977
+ try {
978
+ await s3Client.client.send(command);
979
+ } catch (err) {
980
+ console.error("DeleteObjectCommandInput:", cmdParams);
981
+ console.error(err);
982
+ return null;
983
+ }
984
+ return true;
985
+ };
986
+ var batchDeleteObjects = async (bucket, s3paths) => {
987
+ if (!s3Client.client) await tryInit2();
988
+ if (!s3Client.client) return null;
989
+ if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
990
+ if (!s3paths.length) return true;
991
+ const keys = s3paths.map((key) => ({ Key: key }));
992
+ const cmdParams = { Bucket: bucket, Delete: { Objects: keys } };
993
+ const command = new import_client_s3.DeleteObjectsCommand(cmdParams);
994
+ try {
995
+ await s3Client.client.send(command);
996
+ } catch (err) {
997
+ console.error("DeleteObjectsCommandInput:", cmdParams);
998
+ console.error(err);
999
+ return null;
1000
+ }
1001
+ return true;
1002
+ };
1003
+ var deleteObjectsAll = async (bucket, s3paths) => {
1004
+ if (!s3Client.client) await tryInit2();
1005
+ if (!s3Client.client) return null;
1006
+ if (!bucket || !s3paths || !Array.isArray(s3paths)) return null;
1007
+ if (!s3paths.length) return true;
1008
+ let errFlag = false;
1009
+ const batchedFiles = chunkifyArray(s3paths, BATCH_SIZE);
1010
+ const chunkedFiles = chunkifyArray(batchedFiles, CHUNK_SIZE);
1011
+ for (let i = 0; i < chunkedFiles.length; i += 1) {
1012
+ const batchFiles = chunkedFiles[i] ?? [];
1013
+ const brlist = batchFiles.map((paths) => batchDeleteObjects(bucket, paths));
1014
+ const bslist = await Promise.all(brlist);
1015
+ const isSuccess = bslist.every((e) => e === true);
1016
+ if (!isSuccess) errFlag = true;
1017
+ }
1018
+ return errFlag ? null : true;
1019
+ };
1020
+
1021
+ // src/aws/index.ts
1022
+ var { init: dbInit, status: dbStatus } = db_exports;
1023
+ var { init: s3Init, status: s3Status } = s3_exports;
1024
+ var init3 = async (region) => {
1025
+ warn("[@baadal-sdk/dapi] aws explicit init deprecated!");
1026
+ if (!region) {
1027
+ error(`AWS initialization error! Missing region: ${region}`);
1028
+ return false;
1029
+ }
1030
+ const cond1 = await dbInit(region);
1031
+ const cond2 = await s3Init(region);
1032
+ return cond1 && cond2;
1033
+ };
1034
+ var status3 = () => {
1035
+ return {
1036
+ db: dbStatus(),
1037
+ s3: s3Status()
1038
+ };
1039
+ };
1040
+
1041
+ // src/gh/index.ts
1042
+ var gh_exports = {};
1043
+ __export(gh_exports, {
1044
+ GitHubClient: () => GitHubClient,
1045
+ getInstance: () => getInstance
1046
+ });
1047
+ var import_octokit = require("octokit");
1048
+ var GitHubError = (msg, cause) => new CustomError(msg, { name: "GitHubError", cause });
1049
+ function getInstance(authToken, owner) {
1050
+ return new GitHubClient(authToken, owner);
1051
+ }
1052
+ var GitHubClient = class {
1053
+ client;
1054
+ owner;
1055
+ constructor(authToken, owner) {
1056
+ if (!authToken) {
1057
+ throw GitHubError("Missing GitHub auth token");
1058
+ }
1059
+ if (!owner) {
1060
+ throw GitHubError("Missing GitHub owner");
1061
+ }
1062
+ this.client = new import_octokit.Octokit({ auth: authToken });
1063
+ this.owner = owner;
1064
+ }
1065
+ /**
1066
+ * Get contents of a particular file in a repo
1067
+ * @param repo name of the repo
1068
+ * @param path path of a file in the repo
1069
+ * @returns an object {data, headers} containing normalized file content and response headers
1070
+ * @throws {GitHubError} in case of failure or invalid response
1071
+ */
1072
+ async getContent(repo, path3) {
1073
+ if (!repo || !path3) {
1074
+ throw GitHubError("Missing required parameters: repo or path");
1075
+ }
1076
+ try {
1077
+ const response = await this.client.rest.repos.getContent({
1078
+ owner: this.owner,
1079
+ repo,
1080
+ path: path3
1081
+ });
1082
+ if (Array.isArray(response.data)) {
1083
+ throw GitHubError("Expected a file but received a directory");
1084
+ }
1085
+ if (!("content" in response.data)) {
1086
+ throw GitHubError("Invalid response: missing file content");
1087
+ }
1088
+ const decodedContent = Buffer.from(
1089
+ response.data.content,
1090
+ "base64"
1091
+ ).toString("utf-8");
1092
+ return {
1093
+ data: {
1094
+ path: response.data.path,
1095
+ content: decodedContent,
1096
+ sha: response.data.sha
1097
+ },
1098
+ headers: response.headers
1099
+ };
1100
+ } catch (err) {
1101
+ throw GitHubError(
1102
+ `[GitHub:getContent] Failed to fetch ${this.owner}/${repo}/${path3}`,
1103
+ err
1104
+ );
1105
+ }
1106
+ }
1107
+ };
1108
+
1109
+ // src/index.ts
1110
+ var index_default = { fs: fs_exports, aws: aws_exports, gh: gh_exports, utils: utils_exports };
1111
+ // Annotate the CommonJS export names for ESM import in node:
1112
+ 0 && (module.exports = {
1113
+ aws,
1114
+ fs,
1115
+ gh,
1116
+ utils
1117
+ });
1118
+ //# sourceMappingURL=index.js.map