@izara_project/izara-core-library-dynamodb 1.0.11 → 1.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js CHANGED
@@ -18,4 +18,6 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
18
18
  'use strict';
19
19
 
20
20
  // Re-export everything from DynamoDBSharedLib
21
- module.exports = require('./src/DynamoDBSharedLib');
21
+ import dynamoDBSharedLib from './src/DynamoDBSharedLib.js';
22
+
23
+ export default dynamoDBSharedLib;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@izara_project/izara-core-library-dynamodb",
3
- "version": "1.0.11",
3
+ "version": "1.0.13",
4
4
  "description": "Connecting with AWS DynamoDB Resource",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -14,16 +14,17 @@
14
14
  "license": "AGPL-3.0-or-later",
15
15
  "homepage": "https://bitbucket.org/izara-core-libraries/izara-core-library-dynamodb/src/master/README.md",
16
16
  "devDependencies": {
17
- "jest": "^30.0.5"
17
+ "jest": "^30.2.0"
18
18
  },
19
19
  "jest": {
20
20
  "testEnvironment": "node"
21
21
  },
22
+ "type": "module",
22
23
  "dependencies": {
23
- "@aws-sdk/client-dynamodb": "^3.864.0",
24
- "@aws-sdk/lib-dynamodb": "^3.864.0",
25
- "@aws-sdk/util-dynamodb": "^3.864.0",
26
- "@izara_project/izara-core-library-core": "^1.0.20",
24
+ "@aws-sdk/client-dynamodb": "^3.932.0",
25
+ "@aws-sdk/lib-dynamodb": "^3.932.0",
26
+ "@aws-sdk/util-dynamodb": "^3.932.0",
27
+ "@izara_project/izara-core-library-core": "^1.0.28",
27
28
  "lodash.clonedeep": "^4.5.0"
28
29
  }
29
30
  }
@@ -17,15 +17,15 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
17
17
 
18
18
  'use strict';
19
19
 
20
- const { DynamoDBDocument } = require('@aws-sdk/lib-dynamodb');
21
- const { DynamoDB } = require('@aws-sdk/client-dynamodb');
22
- const { marshall, unmarshall } = require('@aws-sdk/util-dynamodb');
20
+ import { DynamoDBDocument } from '@aws-sdk/lib-dynamodb';
21
+ import { DynamoDB } from '@aws-sdk/client-dynamodb';
22
+ import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
23
23
 
24
- const cloneDeep = require('lodash.clonedeep');
24
+ import cloneDeep from 'lodash.clonedeep';
25
25
 
26
26
  // External Izara project imports
27
- const NoRetryError = require('@izara_project/izara-core-library-core').NoRetryError;
28
- // const { getServiceNameWithCache } = require('@izara_project/izara-core-library-service-schemas').serviceConfig;
27
+ import { NoRetryError } from '@izara_project/izara-core-library-core';
28
+ // import { getServiceNameWithCache } = require('@izara_project/izara-core-library-service-schemas').serviceConfig;
29
29
 
30
30
  // Constants
31
31
  const MAX_RECURSION_LEVEL = 20;
@@ -1547,46 +1547,48 @@ async function batchDeleteItems(
1547
1547
  writeItems,
1548
1548
  keyFieldName
1549
1549
  ) {
1550
+ try {
1550
1551
 
1551
- let size = 25; // maybe should check size < 400 KB
1552
- let chunked = [];
1552
+ let size = 25; // maybe should check size < 400 KB
1553
+ let chunked = [];
1553
1554
 
1554
- for (let i = 0; i < writeItems.length; i += size) {
1555
- chunked.push(writeItems.slice(i, i + size)
1556
- );
1557
- }
1558
-
1559
- // ---- perform each chuck < 25 ----
1560
- for (let chunkIdx = 0; chunkIdx < chunked.length; chunkIdx++) {
1561
- let chunkSet = chunked[chunkIdx];
1562
- let requestItems = new Array();
1563
- // console.log('chunkSet', chunkSet);
1564
- // check sortKeyFieldName
1565
- for (let i = 0; i < chunkSet.length; i++) {
1566
- requestItems.push({
1567
- DeleteRequest: {
1568
- Key: {
1569
- [keyFieldName.partitionKeyFieldName]: chunkSet[i][keyFieldName.partitionKeyFieldName],
1570
- [keyFieldName.sortKeyFieldName]: chunkSet[i][keyFieldName.sortKeyFieldName]
1571
- }
1572
- }
1573
- });
1555
+ for (let i = 0; i < writeItems.length; i += size) {
1556
+ chunked.push(writeItems.slice(i, i + size)
1557
+ );
1574
1558
  }
1575
1559
 
1576
- let payload = {
1577
- RequestItems: {
1578
- [tableName]: requestItems
1579
- },
1580
- ReturnConsumedCapacity: "TOTAL"
1581
- };
1582
- _izContext.logger.debug('payload writeItems... ', payload);
1583
-
1584
- const returnValue = await dynamodb.batchWrite(payload);
1585
- _izContext.logger.debug("batchDeleteItems status: ", returnValue['$metadata']);
1586
- await captureCapacityUsed(_izContext, returnValue.ConsumedCapacity, 'write', 'delete');
1560
+ // ---- perform each chuck < 25 ----
1561
+ for (let chunkIdx = 0; chunkIdx < chunked.length; chunkIdx++) {
1562
+ let chunkSet = chunked[chunkIdx];
1563
+ let requestItems = new Array();
1564
+ // console.log('chunkSet', chunkSet);
1565
+ // check sortKeyFieldName
1566
+ for (let i = 0; i < chunkSet.length; i++) {
1567
+ requestItems.push({
1568
+ DeleteRequest: {
1569
+ Key: {
1570
+ [keyFieldName.partitionKeyFieldName]: chunkSet[i][keyFieldName.partitionKeyFieldName],
1571
+ [keyFieldName.sortKeyFieldName]: chunkSet[i][keyFieldName.sortKeyFieldName]
1572
+ }
1573
+ }
1574
+ });
1575
+ }
1587
1576
 
1588
- } // end loop
1577
+ let payload = {
1578
+ RequestItems: {
1579
+ [tableName]: requestItems
1580
+ },
1581
+ ReturnConsumedCapacity: "TOTAL"
1582
+ };
1583
+ _izContext.logger.debug('payload writeItems... ', payload);
1589
1584
 
1585
+ const returnValue = await dynamodb.batchWrite(payload);
1586
+ _izContext.logger.debug("batchDeleteItems status: ", returnValue['$metadata']);
1587
+ await captureCapacityUsed(_izContext, returnValue.ConsumedCapacity, 'write', 'delete');
1588
+ } // end loop
1589
+ } catch (err) {
1590
+ throw ('Error:baatchDeleteItems', err);
1591
+ };
1590
1592
  _izContext.logger.info('----- finish all batchDeleteItems -----');
1591
1593
 
1592
1594
  };
@@ -1603,84 +1605,79 @@ async function batchPutItems(
1603
1605
  writeItems,
1604
1606
  settings // for batchWriteItems command,e.g., ReturnConsumedCapacity, ConsumedCapacity
1605
1607
  ) {
1608
+ try {
1609
+ let size = 25; // maybe should check size < 400 KB
1610
+ let chunked = [];
1611
+
1612
+ // sample usage --------------------
1613
+ // writeItems = [
1614
+ // {
1615
+ // attributes: {
1616
+ // configKey: 'configKey_1',
1617
+ // configTag: 'configTag_2',
1618
+ // configValue: 'configValue_3',
1619
+ // configValue2: 'configValue_3',
1620
+ // configValue3: 'configValue_3',
1621
+ // },
1622
+ // queryElements: {},
1623
+ // settings: {} // will not work in this command
1624
+ // },
1625
+ // ]
1626
+ // ---------------------------------
1627
+
1628
+
1629
+ for (let i = 0; i < writeItems.length; i += size) {
1630
+ chunked.push(writeItems.slice(i, i + size)
1631
+ );
1632
+ }
1606
1633
 
1607
- let size = 25; // maybe should check size < 400 KB
1608
- let chunked = [];
1609
-
1610
- // sample usage --------------------
1611
- // writeItems = [
1612
- // {
1613
- // attributes: {
1614
- // configKey: 'configKey_1',
1615
- // configTag: 'configTag_2',
1616
- // configValue: 'configValue_3',
1617
- // configValue2: 'configValue_3',
1618
- // configValue3: 'configValue_3',
1619
- // },
1620
- // queryElements: {},
1621
- // settings: {} // will not work in this command
1622
- // },
1623
- // ]
1624
- // ---------------------------------
1625
-
1626
-
1627
- for (let i = 0; i < writeItems.length; i += size) {
1628
- chunked.push(writeItems.slice(i, i + size)
1629
- );
1630
- }
1631
-
1632
- // ---- perform each chuck < 25 ----
1633
- for (let chunkIdx = 0; chunkIdx < chunked.length; chunkIdx++) {
1634
- let chunkSet = chunked[chunkIdx];
1635
- let requestItems = new Array();
1636
- // console.log('chunkSet', chunkSet);
1634
+ // ---- perform each chuck < 25 ----
1635
+ for (let chunkIdx = 0; chunkIdx < chunked.length; chunkIdx++) {
1636
+ let chunkSet = chunked[chunkIdx];
1637
+ let requestItems = new Array();
1638
+ // console.log('chunkSet', chunkSet);
1637
1639
 
1638
- for (let i = 0; i < chunkSet.length; i++) {
1640
+ for (let i = 0; i < chunkSet.length; i++) {
1639
1641
 
1640
- // hard code set to return just query
1641
- chunkSet[i].settings.returnQuery = true;
1642
+ // hard code set to return just query
1643
+ chunkSet[i].settings.returnQuery = true;
1642
1644
 
1643
- // NOTE: not help, cannot use complex in this batchWrite command just validata attributes and add context.
1644
- let putItemPayload = await module.exports.putItem(
1645
- _izContext,
1646
- await tableName(_izContext, tableNameData),
1647
- chunkSet[i].attributes,
1648
- chunkSet[i].queryElements,
1649
- chunkSet[i].settings
1650
- );
1645
+ // NOTE: not help, cannot use complex in this batchWrite command just validata attributes and add context.
1646
+ let putItemPayload = await putItem(
1647
+ _izContext,
1648
+ await tableName(_izContext, tableNameData),
1649
+ chunkSet[i].attributes,
1650
+ chunkSet[i].queryElements,
1651
+ chunkSet[i].settings
1652
+ );
1651
1653
 
1652
- requestItems.push({
1653
- PutRequest: {
1654
- // Item: chunkSet[i] // just basic using
1655
- Item: putItemPayload.Item
1656
- }
1657
- });
1658
- }
1654
+ requestItems.push({
1655
+ PutRequest: {
1656
+ // Item: chunkSet[i] // just basic using
1657
+ Item: putItemPayload.Item
1658
+ }
1659
+ });
1660
+ }
1659
1661
 
1660
- let payload = {
1661
- RequestItems: {
1662
- [tableNameData]: requestItems
1663
- },
1664
- ReturnConsumedCapacity: "TOTAL"
1665
- };
1666
- _izContext.logger.debug('payload writeItems... ', payload);
1667
1662
 
1668
- const returnValue = await dynamodb.batchWrite(payload);
1669
- _izContext.logger.debug("batchPutItems status: ", returnValue['$metadata']);
1670
- await captureCapacityUsed(_izContext, returnValue.ConsumedCapacity, 'write', 'put');
1663
+ let payload = {
1664
+ RequestItems: {
1665
+ [tableNameData]: requestItems
1666
+ },
1667
+ ReturnConsumedCapacity: "TOTAL"
1668
+ };
1669
+ _izContext.logger.debug('payload writeItems... ', payload);
1671
1670
 
1672
- } // end loop
1671
+ const returnValue = await dynamodb.batchWrite(payload);
1672
+ _izContext.logger.debug("batchPutItems status: ", returnValue['$metadata']);
1673
+ await captureCapacityUsed(_izContext, returnValue.ConsumedCapacity, 'write', 'put');
1674
+ } // end loop
1675
+ } catch (err) {
1676
+ throw ('Error:baatchPutItems', err);
1677
+ }
1673
1678
 
1674
1679
  };
1675
1680
 
1676
- // this.batchPutItems(
1677
- // {
1678
- // correlationIds: require('@izara_project/izara-core-library-correlation-ids'),
1679
- // logger: require('@izara_project/izara-core-library-logger'),
1680
- // // integrationTestDetail: require('@izara_project/izara-core-library-integration-tests')
1681
- // }
1682
- // )
1683
-
1684
1681
  /**
1685
1682
  *
1686
1683
  * @param {object} _izContext
@@ -1718,7 +1715,7 @@ async function captureCapacityUsed(_izContext, capacityUsed, capacityStatus, que
1718
1715
  }
1719
1716
 
1720
1717
  // Consolidated exports
1721
- module.exports = {
1718
+ export default {
1722
1719
  // Table name functions
1723
1720
  tableName,
1724
1721