@webiny/api-headless-cms-ddb 5.15.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +25 -0
  3. package/configurations.d.ts +18 -0
  4. package/configurations.js +24 -0
  5. package/configurations.js.map +1 -0
  6. package/definitions/entry.d.ts +6 -0
  7. package/definitions/entry.js +86 -0
  8. package/definitions/entry.js.map +1 -0
  9. package/definitions/table.d.ts +14 -0
  10. package/definitions/table.js +30 -0
  11. package/definitions/table.js.map +1 -0
  12. package/dynamoDb/index.d.ts +2 -0
  13. package/dynamoDb/index.js +23 -0
  14. package/dynamoDb/index.js.map +1 -0
  15. package/dynamoDb/path/plainObject.d.ts +3 -0
  16. package/dynamoDb/path/plainObject.js +34 -0
  17. package/dynamoDb/path/plainObject.js.map +1 -0
  18. package/dynamoDb/path/ref.d.ts +3 -0
  19. package/dynamoDb/path/ref.js +28 -0
  20. package/dynamoDb/path/ref.js.map +1 -0
  21. package/dynamoDb/storage/date.d.ts +3 -0
  22. package/dynamoDb/storage/date.js +69 -0
  23. package/dynamoDb/storage/date.js.map +1 -0
  24. package/dynamoDb/storage/richText.d.ts +8 -0
  25. package/dynamoDb/storage/richText.js +108 -0
  26. package/dynamoDb/storage/richText.js.map +1 -0
  27. package/dynamoDb/transformValue/datetime.d.ts +3 -0
  28. package/dynamoDb/transformValue/datetime.js +48 -0
  29. package/dynamoDb/transformValue/datetime.js.map +1 -0
  30. package/index.d.ts +6 -0
  31. package/index.js +30 -0
  32. package/index.js.map +1 -0
  33. package/operations/entry/CmsContentEntryDynamo.d.ts +86 -0
  34. package/operations/entry/CmsContentEntryDynamo.js +972 -0
  35. package/operations/entry/CmsContentEntryDynamo.js.map +1 -0
  36. package/operations/entry/dataLoaders.d.ts +23 -0
  37. package/operations/entry/dataLoaders.js +283 -0
  38. package/operations/entry/dataLoaders.js.map +1 -0
  39. package/operations/entry/index.d.ts +4 -0
  40. package/operations/entry/index.js +25 -0
  41. package/operations/entry/index.js.map +1 -0
  42. package/operations/entry/systemFields.d.ts +2 -0
  43. package/operations/entry/systemFields.js +51 -0
  44. package/operations/entry/systemFields.js.map +1 -0
  45. package/operations/entry/utils.d.ts +26 -0
  46. package/operations/entry/utils.js +362 -0
  47. package/operations/entry/utils.js.map +1 -0
  48. package/operations/helpers.d.ts +5 -0
  49. package/operations/helpers.js +96 -0
  50. package/operations/helpers.js.map +1 -0
  51. package/operations/model/CmsContentModelDynamo.d.ts +18 -0
  52. package/operations/model/CmsContentModelDynamo.js +234 -0
  53. package/operations/model/CmsContentModelDynamo.js.map +1 -0
  54. package/operations/model/index.d.ts +3 -0
  55. package/operations/model/index.js +26 -0
  56. package/operations/model/index.js.map +1 -0
  57. package/operations/modelGroup/CmsContentModelGroupDynamo.d.ts +42 -0
  58. package/operations/modelGroup/CmsContentModelGroupDynamo.js +230 -0
  59. package/operations/modelGroup/CmsContentModelGroupDynamo.js.map +1 -0
  60. package/operations/modelGroup/index.d.ts +3 -0
  61. package/operations/modelGroup/index.js +26 -0
  62. package/operations/modelGroup/index.js.map +1 -0
  63. package/operations/settings/CmsSettingsDynamo.d.ts +16 -0
  64. package/operations/settings/CmsSettingsDynamo.js +145 -0
  65. package/operations/settings/CmsSettingsDynamo.js.map +1 -0
  66. package/operations/settings/index.d.ts +3 -0
  67. package/operations/settings/index.js +25 -0
  68. package/operations/settings/index.js.map +1 -0
  69. package/operations/system/CmsSystemDynamo.d.ts +16 -0
  70. package/operations/system/CmsSystemDynamo.js +126 -0
  71. package/operations/system/CmsSystemDynamo.js.map +1 -0
  72. package/operations/system/index.d.ts +3 -0
  73. package/operations/system/index.js +25 -0
  74. package/operations/system/index.js.map +1 -0
  75. package/package.json +61 -0
  76. package/types.d.ts +54 -0
  77. package/types.js +6 -0
  78. package/types.js.map +1 -0
  79. package/utils.d.ts +5 -0
  80. package/utils.js +62 -0
  81. package/utils.js.map +1 -0
@@ -0,0 +1,108 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
5
+ Object.defineProperty(exports, "__esModule", {
6
+ value: true
7
+ });
8
+ exports.default = void 0;
9
+
10
+ var _jsonpack = _interopRequireDefault(require("jsonpack"));
11
+
12
+ var _error = _interopRequireDefault(require("@webiny/error"));
13
+
14
+ /**
15
+ * Remove when jsonpack gets PR with a fix merged
16
+ * https://github.com/rgcl/jsonpack/pull/25/files
17
+ * NOTE 2021-07-28: it seems PR is not going to be merged so keep this.
18
+ */
19
+ const transformArray = value => {
20
+ let isArray = Array.isArray(value);
21
+ const shouldBeArray = value instanceof Array === false && isArray;
22
+
23
+ if (shouldBeArray) {
24
+ value = Array.from(value);
25
+ isArray = true;
26
+ }
27
+
28
+ if (typeof value === "object" || isArray) {
29
+ for (const k in value) {
30
+ value[k] = transformArray(value[k]);
31
+ }
32
+ }
33
+
34
+ return value;
35
+ };
36
+
37
+ var _default = () => {
38
+ return {
39
+ type: "cms-model-field-to-storage",
40
+ name: "cms-model-field-to-storage-rich-text",
41
+ fieldType: "rich-text",
42
+
43
+ async fromStorage({
44
+ field,
45
+ value: storageValue
46
+ }) {
47
+ if (!storageValue) {
48
+ return storageValue;
49
+ } else if (typeof storageValue !== "object") {
50
+ throw new _error.default(`Value received in "fromStorage" function is not an object in field "${field.fieldId}".`);
51
+ }
52
+ /**
53
+ * This is to circumvent a bug introduced with 5.8.0 storage operations.
54
+ * Do not remove.
55
+ */
56
+
57
+
58
+ if (storageValue.hasOwnProperty("compression") === false) {
59
+ return storageValue;
60
+ }
61
+
62
+ const {
63
+ compression,
64
+ value
65
+ } = storageValue;
66
+
67
+ if (!compression) {
68
+ throw new _error.default(`Missing compression in "fromStorage" function in field "${field.fieldId}": ${JSON.stringify(storageValue)}.`, "MISSING_COMPRESSION", {
69
+ value: storageValue
70
+ });
71
+ }
72
+
73
+ if (compression !== "jsonpack") {
74
+ throw new _error.default(`This plugin cannot transform something not packed with "jsonpack".`, "WRONG_COMPRESSION", {
75
+ compression
76
+ });
77
+ }
78
+
79
+ try {
80
+ return _jsonpack.default.unpack(value);
81
+ } catch {
82
+ return null;
83
+ }
84
+ },
85
+
86
+ async toStorage({
87
+ value
88
+ }) {
89
+ /**
90
+ * There is a possibility that we are trying to compress already compressed value.
91
+ * Introduced a bug with 5.8.0 storage operations, so just return the value to correct it.
92
+ */
93
+ if (value && value.hasOwnProperty("compression") === true) {
94
+ return value;
95
+ }
96
+
97
+ value = transformArray(value);
98
+ return {
99
+ compression: "jsonpack",
100
+ value: value ? _jsonpack.default.pack(value) : value
101
+ };
102
+ }
103
+
104
+ };
105
+ };
106
+
107
+ exports.default = _default;
108
+ //# sourceMappingURL=richText.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/dynamoDb/storage/richText.ts"],"names":["transformArray","value","isArray","Array","shouldBeArray","from","k","type","name","fieldType","fromStorage","field","storageValue","WebinyError","fieldId","hasOwnProperty","compression","JSON","stringify","jsonpack","unpack","toStorage","pack"],"mappings":";;;;;;;;;AAAA;;AACA;;AAUA;AACA;AACA;AACA;AACA;AACA,MAAMA,cAAc,GAAIC,KAAD,IAAwC;AAC3D,MAAIC,OAAO,GAAGC,KAAK,CAACD,OAAN,CAAcD,KAAd,CAAd;AACA,QAAMG,aAAa,GAAGH,KAAK,YAAYE,KAAjB,KAA2B,KAA3B,IAAoCD,OAA1D;;AACA,MAAIE,aAAJ,EAAmB;AACfH,IAAAA,KAAK,GAAGE,KAAK,CAACE,IAAN,CAAWJ,KAAX,CAAR;AACAC,IAAAA,OAAO,GAAG,IAAV;AACH;;AACD,MAAI,OAAOD,KAAP,KAAiB,QAAjB,IAA6BC,OAAjC,EAA0C;AACtC,SAAK,MAAMI,CAAX,IAAgBL,KAAhB,EAAuB;AACnBA,MAAAA,KAAK,CAACK,CAAD,CAAL,GAAWN,cAAc,CAACC,KAAK,CAACK,CAAD,CAAN,CAAzB;AACH;AACJ;;AACD,SAAOL,KAAP;AACH,CAbD;;eAee,MAAiE;AAC5E,SAAO;AACHM,IAAAA,IAAI,EAAE,4BADH;AAEHC,IAAAA,IAAI,EAAE,sCAFH;AAGHC,IAAAA,SAAS,EAAE,WAHR;;AAIH,UAAMC,WAAN,CAAkB;AAAEC,MAAAA,KAAF;AAASV,MAAAA,KAAK,EAAEW;AAAhB,KAAlB,EAAkD;AAC9C,UAAI,CAACA,YAAL,EAAmB;AACf,eAAOA,YAAP;AACH,OAFD,MAEO,IAAI,OAAOA,YAAP,KAAwB,QAA5B,EAAsC;AACzC,cAAM,IAAIC,cAAJ,CACD,uEAAsEF,KAAK,CAACG,OAAQ,IADnF,CAAN;AAGH;AACD;AACZ;AACA;AACA;;;AACY,UAAIF,YAAY,CAACG,cAAb,CAA4B,aAA5B,MAA+C,KAAnD,EAA0D;AACtD,eAAOH,YAAP;AACH;;AACD,YAAM;AAAEI,QAAAA,WAAF;AAAef,QAAAA;AAAf,UAAyBW,YAA/B;;AACA,UAAI,CAACI,WAAL,EAAkB;AACd,cAAM,IAAIH,cAAJ,CACD,2DACGF,KAAK,CAACG,OACT,MAAKG,IAAI,CAACC,SAAL,CAAeN,YAAf,CAA6B,GAHjC,EAIF,qBAJE,EAKF;AACIX,UAAAA,KAAK,EAAEW;AADX,SALE,CAAN;AASH;;AACD,UAAII,WAAW,KAAK,UAApB,EAAgC;AAC5B,cAAM,IAAIH,cAAJ,CACD,oEADC,EAEF,mBAFE,EAGF;AACIG,UAAAA;AADJ,SAHE,CAAN;AAOH;;AACD,UAAI;AACA,eAAOG,kBAASC,MAAT,CAAgBnB,KAAhB,CAAP;AACH,OAFD,CAEE,MAAM;AACJ,eAAO,IAAP;AACH;AACJ,KA7CE;;AA8CH,UAAMoB,SAAN,CAAgB;AAAEpB,MAAAA;AAAF,KAAhB,EAA2B;AACvB;AACZ;AACA;AACA;AACY,UAAIA,KAAK,IAAIA,KAAK,CAACc,cAAN,CAAqB,aAArB,MAAwC,IAArD,EAA2D;AACvD,eAAOd,KAAP;AACH;;AACDA,MAAAA,KAAK,GAAGD,cAAc,CAACC,KAAD,CAAtB;AACA,aAAO;AACHe,QAAAA,WAAW,EAAE,UADV;AAEHf,QAAAA,KAAK,EAAEA,KAAK,GAAGkB,kBAASG,IAAT,CAAcrB,KAAd,CAAH,GAA0BA;AAFnC,OAAP;AAIH;;AA3DE,GAAP;AA6DH,C","sourcesContent":["import jsonpack from \"jsonpack\";\nimport WebinyError from \"@webiny/error\";\nimport { CmsModelFieldToStoragePlugin } from \"@webiny/api-headless-cms/types\";\n\nexport type OriginalValue = Record<string, any> | any[];\n\nexport interface StorageValue {\n compression: string;\n value: any;\n}\n\n/**\n * Remove when jsonpack gets PR with a fix merged\n * https://github.com/rgcl/jsonpack/pull/25/files\n * NOTE 2021-07-28: it seems PR is not going to be merged so keep this.\n */\nconst transformArray = (value: Record<string, any> | any[]) => {\n let isArray = Array.isArray(value);\n const shouldBeArray = value instanceof Array === false && isArray;\n if (shouldBeArray) {\n value = Array.from(value as any);\n isArray = true;\n }\n if (typeof value === \"object\" || isArray) {\n for (const k in value) {\n value[k] = transformArray(value[k]);\n }\n }\n return value;\n};\n\nexport default (): CmsModelFieldToStoragePlugin<OriginalValue, StorageValue> => {\n return {\n type: \"cms-model-field-to-storage\",\n name: \"cms-model-field-to-storage-rich-text\",\n fieldType: \"rich-text\",\n async fromStorage({ field, value: storageValue }) {\n if (!storageValue) {\n return storageValue;\n } else if (typeof storageValue !== \"object\") {\n throw new WebinyError(\n `Value received in \"fromStorage\" function is not an object in field \"${field.fieldId}\".`\n );\n }\n /**\n * This is to circumvent a bug introduced with 5.8.0 storage operations.\n * Do not remove.\n */\n if (storageValue.hasOwnProperty(\"compression\") === false) {\n return storageValue;\n }\n const { compression, value } = storageValue;\n if (!compression) {\n throw new WebinyError(\n `Missing compression in \"fromStorage\" function in field \"${\n field.fieldId\n }\": ${JSON.stringify(storageValue)}.`,\n \"MISSING_COMPRESSION\",\n {\n value: storageValue\n }\n );\n }\n if (compression !== \"jsonpack\") {\n throw new WebinyError(\n `This plugin cannot transform something not packed with \"jsonpack\".`,\n \"WRONG_COMPRESSION\",\n {\n compression\n }\n );\n }\n try {\n return jsonpack.unpack(value);\n } catch {\n return null;\n }\n },\n async toStorage({ value }) {\n /**\n * There is a possibility that we are trying to compress already compressed value.\n * Introduced a bug with 5.8.0 storage operations, so just return the value to correct it.\n */\n if (value && value.hasOwnProperty(\"compression\") === true) {\n return value as any;\n }\n value = transformArray(value);\n return {\n compression: \"jsonpack\",\n value: value ? jsonpack.pack(value) : value\n };\n }\n };\n};\n"],"file":"richText.js"}
@@ -0,0 +1,3 @@
1
+ import { CmsFieldFilterValueTransformPlugin } from "../../types";
2
+ declare const _default: () => CmsFieldFilterValueTransformPlugin<Date | string | number, number>;
3
+ export default _default;
@@ -0,0 +1,48 @@
1
+ "use strict";
2
+
3
+ Object.defineProperty(exports, "__esModule", {
4
+ value: true
5
+ });
6
+ exports.default = void 0;
7
+
8
+ var _TimeTransformPlugin = require("@webiny/db-dynamodb/plugins/definitions/TimeTransformPlugin");
9
+
10
+ var _DateTimeTransformPlugin = require("@webiny/db-dynamodb/plugins/definitions/DateTimeTransformPlugin");
11
+
12
+ const timeTransformer = new _TimeTransformPlugin.TimeTransformPlugin({
13
+ fields: ["*"]
14
+ });
15
+ const dateTimeTransformer = new _DateTimeTransformPlugin.DateTimeTransformPlugin({
16
+ fields: ["*"]
17
+ });
18
+
19
+ var _default = () => ({
20
+ type: "cms-field-filter-value-transform",
21
+ name: "cms-field-value-filter-transform-datetime",
22
+ fieldType: "datetime",
23
+
24
+ /**
25
+ * Always transform into the milliseconds.
26
+ */
27
+ transform: ({
28
+ field,
29
+ value
30
+ }) => {
31
+ const {
32
+ type
33
+ } = field.settings || {};
34
+
35
+ if (type === "time") {
36
+ return timeTransformer.transform({
37
+ value
38
+ });
39
+ }
40
+
41
+ return dateTimeTransformer.transform({
42
+ value
43
+ });
44
+ }
45
+ });
46
+
47
+ exports.default = _default;
48
+ //# sourceMappingURL=datetime.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/dynamoDb/transformValue/datetime.ts"],"names":["timeTransformer","TimeTransformPlugin","fields","dateTimeTransformer","DateTimeTransformPlugin","type","name","fieldType","transform","field","value","settings"],"mappings":";;;;;;;AACA;;AACA;;AAEA,MAAMA,eAAe,GAAG,IAAIC,wCAAJ,CAAwB;AAC5CC,EAAAA,MAAM,EAAE,CAAC,GAAD;AADoC,CAAxB,CAAxB;AAGA,MAAMC,mBAAmB,GAAG,IAAIC,gDAAJ,CAA4B;AACpDF,EAAAA,MAAM,EAAE,CAAC,GAAD;AAD4C,CAA5B,CAA5B;;eAIe,OAA2E;AACtFG,EAAAA,IAAI,EAAE,kCADgF;AAEtFC,EAAAA,IAAI,EAAE,2CAFgF;AAGtFC,EAAAA,SAAS,EAAE,UAH2E;;AAItF;AACJ;AACA;AACIC,EAAAA,SAAS,EAAE,CAAC;AAAEC,IAAAA,KAAF;AAASC,IAAAA;AAAT,GAAD,KAAsB;AAC7B,UAAM;AAAEL,MAAAA;AAAF,QAAWI,KAAK,CAACE,QAAN,IAAkB,EAAnC;;AACA,QAAIN,IAAI,KAAK,MAAb,EAAqB;AACjB,aAAOL,eAAe,CAACQ,SAAhB,CAA0B;AAC7BE,QAAAA;AAD6B,OAA1B,CAAP;AAGH;;AACD,WAAOP,mBAAmB,CAACK,SAApB,CAA8B;AACjCE,MAAAA;AADiC,KAA9B,CAAP;AAGH;AAjBqF,CAA3E,C","sourcesContent":["import { CmsFieldFilterValueTransformPlugin } from \"~/types\";\nimport { TimeTransformPlugin } from \"@webiny/db-dynamodb/plugins/definitions/TimeTransformPlugin\";\nimport { DateTimeTransformPlugin } from \"@webiny/db-dynamodb/plugins/definitions/DateTimeTransformPlugin\";\n\nconst timeTransformer = new TimeTransformPlugin({\n fields: [\"*\"]\n});\nconst dateTimeTransformer = new DateTimeTransformPlugin({\n fields: [\"*\"]\n});\n\nexport default (): CmsFieldFilterValueTransformPlugin<Date | string | number, number> => ({\n type: \"cms-field-filter-value-transform\",\n name: \"cms-field-value-filter-transform-datetime\",\n fieldType: \"datetime\",\n /**\n * Always transform into the milliseconds.\n */\n transform: ({ field, value }) => {\n const { type } = field.settings || {};\n if (type === \"time\") {\n return timeTransformer.transform({\n value\n });\n }\n return dateTimeTransformer.transform({\n value\n });\n }\n});\n"],"file":"datetime.js"}
package/index.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ import { CmsContentEntryConfiguration } from "./operations/entry/CmsContentEntryDynamo";
2
+ interface Configuration {
3
+ entry?: CmsContentEntryConfiguration;
4
+ }
5
+ declare const _default: (configuration?: Configuration) => (import("@webiny/api-headless-cms/types").CmsContentModelGroupStorageOperationsProvider | import("@webiny/api-headless-cms/types").CmsContentModelStorageOperationsProvider | import("@webiny/api-headless-cms/types").CmsContentEntryStorageOperationsProvider | import("@webiny/api-headless-cms/types").CmsSettingsStorageOperationsProviderPlugin | import("@webiny/api-headless-cms/types").CmsSystemStorageOperationsProviderPlugin | (import("./types").CmsFieldFilterPathPlugin | import("@webiny/api-headless-cms/types").CmsModelFieldToStoragePlugin<import("./dynamoDb/storage/richText").OriginalValue, import("./dynamoDb/storage/richText").StorageValue> | import("@webiny/api-headless-cms/types").CmsModelFieldToStoragePlugin<string | Date, string> | import("./types").CmsFieldFilterValueTransformPlugin<string | number | Date, number>)[])[];
6
+ export default _default;
package/index.js ADDED
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
5
+ Object.defineProperty(exports, "__esModule", {
6
+ value: true
7
+ });
8
+ exports.default = void 0;
9
+
10
+ var _settings = _interopRequireDefault(require("./operations/settings"));
11
+
12
+ var _system = _interopRequireDefault(require("./operations/system"));
13
+
14
+ var _modelGroup = _interopRequireDefault(require("./operations/modelGroup"));
15
+
16
+ var _model = _interopRequireDefault(require("./operations/model"));
17
+
18
+ var _entry = _interopRequireDefault(require("./operations/entry"));
19
+
20
+ var _dynamoDb = _interopRequireDefault(require("./dynamoDb"));
21
+
22
+ var _default = configuration => {
23
+ const {
24
+ entry
25
+ } = configuration || {};
26
+ return [(0, _settings.default)(), (0, _system.default)(), (0, _modelGroup.default)(), (0, _model.default)(), (0, _entry.default)(entry), (0, _dynamoDb.default)()];
27
+ };
28
+
29
+ exports.default = _default;
30
+ //# sourceMappingURL=index.js.map
package/index.js.map ADDED
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"names":["configuration","entry"],"mappings":";;;;;;;;;AAAA;;AACA;;AACA;;AACA;;AACA;;AACA;;eAMgBA,aAAD,IAAmC;AAC9C,QAAM;AAAEC,IAAAA;AAAF,MAAYD,aAAa,IAAI,EAAnC;AACA,SAAO,CACH,wBADG,EAEH,sBAFG,EAGH,0BAHG,EAIH,qBAJG,EAKH,oBAAsCC,KAAtC,CALG,EAMH,wBANG,CAAP;AAQH,C","sourcesContent":["import settingsOperationsProvider from \"./operations/settings\";\nimport systemOperationsProvider from \"./operations/system\";\nimport contentModelGroupStorageOperationsProvider from \"./operations/modelGroup\";\nimport contentModelStorageOperationsProvider from \"./operations/model\";\nimport contentEntryStorageOperationsProvider from \"./operations/entry\";\nimport dynamoDbPlugins from \"./dynamoDb\";\nimport { CmsContentEntryConfiguration } from \"./operations/entry/CmsContentEntryDynamo\";\n\ninterface Configuration {\n entry?: CmsContentEntryConfiguration;\n}\nexport default (configuration?: Configuration) => {\n const { entry } = configuration || {};\n return [\n settingsOperationsProvider(),\n systemOperationsProvider(),\n contentModelGroupStorageOperationsProvider(),\n contentModelStorageOperationsProvider(),\n contentEntryStorageOperationsProvider(entry),\n dynamoDbPlugins()\n ];\n};\n"],"file":"index.js"}
@@ -0,0 +1,86 @@
1
+ import { CmsContentEntry, CmsContentEntryStorageOperations, CmsContentEntryStorageOperationsCreateArgs, CmsContentEntryStorageOperationsCreateRevisionFromArgs, CmsContentEntryStorageOperationsDeleteArgs, CmsContentEntryStorageOperationsDeleteRevisionArgs, CmsContentEntryStorageOperationsGetArgs, CmsContentEntryStorageOperationsListArgs, CmsContentEntryStorageOperationsListResponse, CmsContentEntryStorageOperationsPublishArgs, CmsContentEntryStorageOperationsRequestChangesArgs, CmsContentEntryStorageOperationsRequestReviewArgs, CmsContentEntryStorageOperationsUnpublishArgs, CmsContentEntryStorageOperationsUpdateArgs, CmsContentModel, CmsContext } from "@webiny/api-headless-cms/types";
2
+ import { Entity, Table } from "dynamodb-toolbox";
3
+ import { queryOptions as DynamoDBToolboxQueryOptions } from "dynamodb-toolbox/dist/classes/Table";
4
+ export declare const TYPE_ENTRY = "cms.entry";
5
+ export declare const TYPE_ENTRY_LATEST: string;
6
+ export declare const TYPE_ENTRY_PUBLISHED: string;
7
+ export interface CmsContentEntryConfiguration {
8
+ defaultLimit?: number;
9
+ maxLimit?: number;
10
+ }
11
+ interface ConstructorArgs {
12
+ context: CmsContext;
13
+ configuration: CmsContentEntryConfiguration;
14
+ }
15
+ interface RunQueryArgs {
16
+ options?: DynamoDBToolboxQueryOptions;
17
+ partitionKey: string;
18
+ }
19
+ /**
20
+ * We do not use transactions in this storage operations implementation due to their cost.
21
+ */
22
+ export declare class CmsContentEntryDynamo implements CmsContentEntryStorageOperations {
23
+ private readonly _context;
24
+ private readonly _configuration;
25
+ private readonly _modelPartitionKey;
26
+ private readonly _dataLoaders;
27
+ private readonly _table;
28
+ private readonly _entity;
29
+ private get context();
30
+ private get configuration();
31
+ get table(): Table;
32
+ get entity(): Entity<any>;
33
+ constructor({ context, configuration }: ConstructorArgs);
34
+ create(model: CmsContentModel, args: CmsContentEntryStorageOperationsCreateArgs): Promise<CmsContentEntry>;
35
+ createRevisionFrom(model: CmsContentModel, args: CmsContentEntryStorageOperationsCreateRevisionFromArgs): Promise<import("@webiny/api-headless-cms/types").CmsStorageContentEntry>;
36
+ delete(model: CmsContentModel, args: CmsContentEntryStorageOperationsDeleteArgs): Promise<void>;
37
+ deleteRevision(model: CmsContentModel, args: CmsContentEntryStorageOperationsDeleteRevisionArgs): Promise<void>;
38
+ get(model: CmsContentModel, args: CmsContentEntryStorageOperationsGetArgs): Promise<CmsContentEntry | null>;
39
+ list(model: CmsContentModel, args: CmsContentEntryStorageOperationsListArgs): Promise<CmsContentEntryStorageOperationsListResponse>;
40
+ update(model: CmsContentModel, args: CmsContentEntryStorageOperationsUpdateArgs): Promise<CmsContentEntry>;
41
+ publish(model: CmsContentModel, args: CmsContentEntryStorageOperationsPublishArgs): Promise<CmsContentEntry>;
42
+ unpublish(model: CmsContentModel, args: CmsContentEntryStorageOperationsUnpublishArgs): Promise<CmsContentEntry>;
43
+ requestChanges(model: CmsContentModel, args: CmsContentEntryStorageOperationsRequestChangesArgs): Promise<CmsContentEntry>;
44
+ requestReview(model: CmsContentModel, args: CmsContentEntryStorageOperationsRequestReviewArgs): Promise<CmsContentEntry>;
45
+ getAllRevisionsByIds(model: CmsContentModel, ids: readonly string[]): Promise<CmsContentEntry[]>;
46
+ getByIds(model: CmsContentModel, ids: readonly string[]): Promise<CmsContentEntry[]>;
47
+ getPublishedByIds(model: CmsContentModel, ids: readonly string[]): Promise<CmsContentEntry[]>;
48
+ getLatestByIds(model: CmsContentModel, ids: readonly string[]): Promise<CmsContentEntry[]>;
49
+ getRevisions(model: CmsContentModel, id: string): Promise<CmsContentEntry[]>;
50
+ getRevisionById(model: CmsContentModel, id: string): Promise<CmsContentEntry | null>;
51
+ getPublishedRevisionByEntryId(model: CmsContentModel, entryId: string): Promise<CmsContentEntry | null>;
52
+ getLatestRevisionByEntryId(model: CmsContentModel, entryId: string): Promise<CmsContentEntry | null>;
53
+ getPreviousRevision(model: CmsContentModel, entryId: string, version: number): Promise<CmsContentEntry | null>;
54
+ private getSingleDynamoDbItem;
55
+ getPartitionKey(id: string): string;
56
+ private get partitionKey();
57
+ private getGSIPartitionKey;
58
+ private getGSIEntryPartitionKey;
59
+ private getGSILatestPartitionKey;
60
+ private getGSIPublishedPartitionKey;
61
+ private getGSISortKey;
62
+ /**
63
+ * Gets a secondary key in form of REV#version from:
64
+ * id#0003
65
+ * 0003
66
+ * 3
67
+ */
68
+ getSortKeyRevision(version: string | number): string;
69
+ getSortKeyLatest(): string;
70
+ getSortKeyPublished(): string;
71
+ /**
72
+ * Method to build the query partition keys, always an array, and create the target index:
73
+ * - if undefined then it is primary
74
+ * - if populated then it is that given one (and partition keys are reflecting that)
75
+ */
76
+ private createQueryOptions;
77
+ /**
78
+ * A method to query the database at the given partition key with the built query options.
79
+ * Method runs in the loop until it reads everything it needs to.
80
+ * We could impose the limit on the records read but there is no point since we MUST read everything to be able
81
+ * to filter and sort the data.
82
+ */
83
+ runQuery(args: RunQueryArgs): Promise<CmsContentEntry[]>;
84
+ private query;
85
+ }
86
+ export {};
@@ -0,0 +1,972 @@
1
+ "use strict";
2
+
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
5
+ Object.defineProperty(exports, "__esModule", {
6
+ value: true
7
+ });
8
+ exports.CmsContentEntryDynamo = exports.TYPE_ENTRY_PUBLISHED = exports.TYPE_ENTRY_LATEST = exports.TYPE_ENTRY = void 0;
9
+
10
+ var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
11
+
12
+ var _error = _interopRequireDefault(require("@webiny/error"));
13
+
14
+ var _dataLoaders = require("./dataLoaders");
15
+
16
+ var _types = require("@webiny/api-headless-cms/types");
17
+
18
+ var _utils = require("@webiny/api-headless-cms/utils");
19
+
20
+ var _utils2 = require("../../utils");
21
+
22
+ var _utils3 = require("./utils");
23
+
24
+ var _lodash = _interopRequireDefault(require("lodash.clonedeep"));
25
+
26
+ var _entry = require("../../definitions/entry");
27
+
28
+ var _table = require("../../definitions/table");
29
+
30
+ function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
31
+
32
+ function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
33
+
34
+ const TYPE_ENTRY = "cms.entry";
35
+ exports.TYPE_ENTRY = TYPE_ENTRY;
36
+ const TYPE_ENTRY_LATEST = TYPE_ENTRY + ".l";
37
+ exports.TYPE_ENTRY_LATEST = TYPE_ENTRY_LATEST;
38
+ const TYPE_ENTRY_PUBLISHED = TYPE_ENTRY + ".p";
39
+ exports.TYPE_ENTRY_PUBLISHED = TYPE_ENTRY_PUBLISHED;
40
+ const GSI1_INDEX = "GSI1";
41
+ const configurationDefaults = {
42
+ defaultLimit: 100,
43
+ maxLimit: undefined
44
+ };
45
+ /**
46
+ * We do not use transactions in this storage operations implementation due to their cost.
47
+ */
48
+
49
+ class CmsContentEntryDynamo {
50
+ get context() {
51
+ return this._context;
52
+ }
53
+
54
+ get configuration() {
55
+ return this._configuration;
56
+ }
57
+
58
+ get table() {
59
+ return this._table;
60
+ }
61
+
62
+ get entity() {
63
+ return this._entity;
64
+ }
65
+
66
+ constructor({
67
+ context,
68
+ configuration
69
+ }) {
70
+ (0, _defineProperty2.default)(this, "_context", void 0);
71
+ (0, _defineProperty2.default)(this, "_configuration", void 0);
72
+ (0, _defineProperty2.default)(this, "_modelPartitionKey", void 0);
73
+ (0, _defineProperty2.default)(this, "_dataLoaders", void 0);
74
+ (0, _defineProperty2.default)(this, "_table", void 0);
75
+ (0, _defineProperty2.default)(this, "_entity", void 0);
76
+ this._context = context;
77
+ this._configuration = _objectSpread(_objectSpread({}, configurationDefaults), configuration || {});
78
+ this._modelPartitionKey = `${this.partitionKey}#M`;
79
+ this._dataLoaders = new _dataLoaders.DataLoadersHandler(context, this);
80
+ this._table = (0, _table.createTable)({
81
+ context,
82
+ indexes: {
83
+ [GSI1_INDEX]: {
84
+ partitionKey: "GSI1_PK",
85
+ sortKey: "GSI1_SK"
86
+ }
87
+ }
88
+ });
89
+ this._entity = (0, _entry.createEntryEntity)({
90
+ table: this._table
91
+ });
92
+ }
93
+
94
+ async create(model, args) {
95
+ const {
96
+ entry,
97
+ storageEntry
98
+ } = args;
99
+ const partitionKey = this.getPartitionKey(entry.id);
100
+ /**
101
+ * We need to:
102
+ * - create new main entry item
103
+ * - create new or update latest entry item
104
+ */
105
+
106
+ const items = [this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
107
+ PK: partitionKey,
108
+ SK: this.getSortKeyRevision(entry.version),
109
+ TYPE: TYPE_ENTRY,
110
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
111
+ GSI1_SK: this.getGSISortKey(storageEntry)
112
+ })), this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
113
+ PK: partitionKey,
114
+ SK: this.getSortKeyLatest(),
115
+ TYPE: TYPE_ENTRY_LATEST,
116
+ GSI1_PK: this.getGSILatestPartitionKey(model),
117
+ GSI1_SK: this.getGSISortKey(storageEntry)
118
+ }))];
119
+
120
+ try {
121
+ await this._table.batchWrite(items);
122
+ } catch (ex) {
123
+ throw new _error.default(ex.message || "Could not insert data into the DynamoDB.", ex.code || "CREATE_ENTRY_ERROR", {
124
+ error: ex,
125
+ entry
126
+ });
127
+ }
128
+
129
+ return storageEntry;
130
+ }
131
+
132
+ async createRevisionFrom(model, args) {
133
+ const {
134
+ originalEntry,
135
+ entry,
136
+ storageEntry,
137
+ latestEntry
138
+ } = args;
139
+ const partitionKey = this.getPartitionKey(storageEntry.id);
140
+ /**
141
+ * We need to:
142
+ * - create the main entry item
143
+ * - update the last entry item to a current one
144
+ */
145
+
146
+ const items = [this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
147
+ PK: partitionKey,
148
+ SK: this.getSortKeyRevision(storageEntry.version),
149
+ TYPE: TYPE_ENTRY,
150
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
151
+ GSI1_SK: this.getGSISortKey(storageEntry)
152
+ })), this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
153
+ PK: partitionKey,
154
+ SK: this.getSortKeyLatest(),
155
+ TYPE: TYPE_ENTRY_LATEST,
156
+ GSI1_PK: this.getGSILatestPartitionKey(model),
157
+ GSI1_SK: this.getGSISortKey(storageEntry)
158
+ }))];
159
+
160
+ try {
161
+ await this._table.batchWrite(items);
162
+ } catch (ex) {
163
+ throw new _error.default(ex.message || "Could not create revision from given entry.", ex.code || "CREATE_REVISION_ERROR", {
164
+ error: ex,
165
+ originalEntry,
166
+ latestEntry,
167
+ entry,
168
+ storageEntry
169
+ });
170
+ }
171
+ /**
172
+ * There are no modifications on the entry created so just return the data.
173
+ */
174
+
175
+
176
+ return storageEntry;
177
+ }
178
+
179
+ async delete(model, args) {
180
+ const {
181
+ entry
182
+ } = args;
183
+ const partitionKey = this.getPartitionKey(entry.id);
184
+ const results = await this._entity.query(partitionKey, {
185
+ gte: " "
186
+ });
187
+ const keys = results.Items.map(item => ({
188
+ PK: partitionKey,
189
+ SK: item.SK
190
+ }));
191
+
192
+ try {
193
+ await this._table.batchWrite(keys.map(key => this._entity.deleteBatch(key)));
194
+ } catch (ex) {
195
+ throw new _error.default(ex.message || "Could not delete the entry.", ex.code || "DELETE_ENTRY_ERROR", {
196
+ error: ex,
197
+ partitionKey,
198
+ keys
199
+ });
200
+ }
201
+ }
202
+
203
+ async deleteRevision(model, args) {
204
+ const {
205
+ entryToDelete,
206
+ entryToSetAsLatest,
207
+ storageEntryToSetAsLatest
208
+ } = args;
209
+ const partitionKey = this.getPartitionKey(entryToDelete.id);
210
+ const items = [this._entity.deleteBatch({
211
+ PK: partitionKey,
212
+ SK: this.getSortKeyRevision(entryToDelete.id)
213
+ })];
214
+ const publishedStorageEntry = await this.getPublishedRevisionByEntryId(model, entryToDelete.id);
215
+ /**
216
+ * If revision we are deleting is the published one as well, we need to delete those records as well.
217
+ */
218
+
219
+ if (publishedStorageEntry && entryToDelete.id === publishedStorageEntry.id) {
220
+ items.push(this._entity.deleteBatch({
221
+ PK: partitionKey,
222
+ SK: this.getSortKeyPublished()
223
+ }));
224
+ }
225
+
226
+ if (storageEntryToSetAsLatest) {
227
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntryToSetAsLatest), {}, {
228
+ PK: partitionKey,
229
+ SK: this.getSortKeyLatest(),
230
+ TYPE: TYPE_ENTRY_LATEST,
231
+ GSI1_PK: this.getGSILatestPartitionKey(model),
232
+ GSI1_SK: this.getGSISortKey(storageEntryToSetAsLatest)
233
+ })));
234
+ }
235
+
236
+ try {
237
+ await this._table.batchWrite(items);
238
+ } catch (ex) {
239
+ throw new _error.default(ex.message, ex.code, {
240
+ error: ex,
241
+ entryToDelete,
242
+ entryToSetAsLatest
243
+ });
244
+ }
245
+ }
246
+
247
+ async get(model, args) {
248
+ const {
249
+ items
250
+ } = await this.list(model, _objectSpread(_objectSpread({}, args || {}), {}, {
251
+ limit: 1
252
+ }));
253
+
254
+ if (items.length === 0) {
255
+ return null;
256
+ }
257
+
258
+ return items.shift();
259
+ }
260
+
261
+ async list(model, args) {
262
+ const {
263
+ limit: initialLimit,
264
+ where: originalWhere,
265
+ after,
266
+ sort
267
+ } = args;
268
+ /**
269
+ * There is no max limit imposed because that is up to the devs using this.
270
+ * Default is some reasonable number for us but users can set their own when initializing the plugin.
271
+ */
272
+
273
+ const defaultLimit = this.configuration.defaultLimit || configurationDefaults.defaultLimit;
274
+ const maxLimit = this.configuration.maxLimit || defaultLimit;
275
+ const limit = !initialLimit || initialLimit <= 0 ? initialLimit > maxLimit ? maxLimit : defaultLimit : initialLimit;
276
+ const items = [];
277
+ const queryOptions = this.createQueryOptions({
278
+ where: originalWhere,
279
+ model
280
+ });
281
+
282
+ try {
283
+ /**
284
+ * We run the query method on all the partition keys that were built in the createQueryOptions() method.
285
+ * Partition keys are always built as array because of the possibility that we might need to read from different partitions
286
+ * which is the case if where condition is something like id_in or entryId_in.
287
+ * If we are reading from the GSI1_PK it is a single partition but we keep it as an array
288
+ * just to make it easier to read in all of the cases.
289
+ */
290
+ for (const partitionKey of queryOptions.queryPartitionKeys) {
291
+ const results = await this.runQuery({
292
+ partitionKey,
293
+ options: queryOptions.options
294
+ });
295
+ items.push(...results);
296
+ }
297
+ } catch (ex) {
298
+ throw new _error.default(ex.message, "SCAN_ERROR", {
299
+ error: ex
300
+ });
301
+ }
302
+ /**
303
+ * We need a object containing field, transformers and paths.
304
+ * Just build it here and pass on into other methods that require it to avoid mapping multiple times.
305
+ */
306
+
307
+
308
+ const modelFields = (0, _utils3.buildModelFields)({
309
+ context: this.context,
310
+ model
311
+ });
312
+ /**
313
+ * Filter the read items via the code.
314
+ * It will build the filters out of the where input and transform the values it is using.
315
+ */
316
+
317
+ const filteredItems = (0, _utils3.filterItems)({
318
+ items,
319
+ where: queryOptions.where,
320
+ context: this.context,
321
+ fields: modelFields
322
+ });
323
+ const totalCount = filteredItems.length;
324
+ /**
325
+ * Sorting is also done via the code.
326
+ * It takes the sort input and sorts by it via the lodash sortBy method.
327
+ */
328
+
329
+ const sortedItems = (0, _utils3.sortEntryItems)({
330
+ items: filteredItems,
331
+ sort,
332
+ fields: modelFields
333
+ });
334
+ const start = (0, _utils2.decodePaginationCursor)(after) || 0;
335
+ const hasMoreItems = totalCount > start + limit;
336
+ const end = limit > totalCount + start + limit ? undefined : start + limit;
337
+ const slicedItems = sortedItems.slice(start, end);
338
+ /**
339
+ * Although we do not need a cursor here, we will use it as such to keep it standardized.
340
+ * Number is simply encoded.
341
+ */
342
+
343
+ const cursor = totalCount > start + limit ? (0, _utils2.encodePaginationCursor)(start + limit) : null;
344
+ return {
345
+ hasMoreItems,
346
+ totalCount,
347
+ cursor,
348
+ items: slicedItems
349
+ };
350
+ }
351
+
352
+ async update(model, args) {
353
+ const {
354
+ originalEntry,
355
+ entry,
356
+ storageEntry
357
+ } = args;
358
+ const partitionKey = this.getPartitionKey(originalEntry.id);
359
+ const items = [];
360
+ /**
361
+ * We need to:
362
+ * - update the current entry
363
+ * - update the latest entry if the current entry is the latest one
364
+ */
365
+
366
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
367
+ PK: partitionKey,
368
+ SK: this.getSortKeyRevision(storageEntry.version),
369
+ TYPE: TYPE_ENTRY,
370
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
371
+ GSI1_SK: this.getGSISortKey(storageEntry)
372
+ })));
373
+ /**
374
+ * We need the latest entry to update it as well if neccessary.
375
+ */
376
+
377
+ const latestStorageEntry = await this.getLatestRevisionByEntryId(model, entry.id);
378
+
379
+ if (latestStorageEntry && latestStorageEntry.id === entry.id) {
380
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
381
+ PK: partitionKey,
382
+ SK: this.getSortKeyLatest(),
383
+ TYPE: TYPE_ENTRY_LATEST,
384
+ GSI1_PK: this.getGSILatestPartitionKey(model),
385
+ GSI1_SK: this.getGSISortKey(entry)
386
+ })));
387
+ }
388
+
389
+ try {
390
+ await this._table.batchWrite(items);
391
+ return storageEntry;
392
+ } catch (ex) {
393
+ throw new _error.default(ex.message || "Could not update entry.", ex.code || "UPDATE_ERROR", {
394
+ error: ex,
395
+ originalEntry,
396
+ entry,
397
+ latestStorageEntry
398
+ });
399
+ }
400
+ }
401
+
402
+ async publish(model, args) {
403
+ const {
404
+ entry,
405
+ storageEntry
406
+ } = args;
407
+ const partitionKey = this.getPartitionKey(entry.id);
408
+ /**
409
+ * We need the latest and published entries to see if something needs to be updated along side the publishing one.
410
+ */
411
+
412
+ const latestStorageEntry = await this.getLatestRevisionByEntryId(model, entry.id);
413
+ const publishedStorageEntry = await this.getPublishedRevisionByEntryId(model, entry.id);
414
+ /**
415
+ * We need to update:
416
+ * - current entry revision sort key
417
+ * - published sort key
418
+ * - latest sort key - if entry updated is actually latest
419
+ * - previous published entry to unpublished status - if any previously published entry
420
+ */
421
+
422
+ const items = [this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
423
+ PK: partitionKey,
424
+ SK: this.getSortKeyRevision(entry.version),
425
+ TYPE: TYPE_ENTRY,
426
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
427
+ GSI1_SK: this.getGSISortKey(entry)
428
+ })), this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
429
+ PK: partitionKey,
430
+ SK: this.getSortKeyPublished(),
431
+ TYPE: TYPE_ENTRY_PUBLISHED,
432
+ GSI1_PK: this.getGSIPublishedPartitionKey(model),
433
+ GSI1_SK: this.getGSISortKey(entry)
434
+ }))];
435
+
436
+ if (entry.id === latestStorageEntry.id) {
437
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
438
+ PK: partitionKey,
439
+ SK: this.getSortKeyLatest(),
440
+ TYPE: TYPE_ENTRY_LATEST,
441
+ GSI1_PK: this.getGSILatestPartitionKey(model),
442
+ GSI1_SK: this.getGSISortKey(entry)
443
+ })));
444
+ }
445
+
446
+ if (publishedStorageEntry) {
447
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, publishedStorageEntry), {}, {
448
+ PK: partitionKey,
449
+ SK: this.getSortKeyRevision(publishedStorageEntry.version),
450
+ TYPE: TYPE_ENTRY,
451
+ status: _types.CONTENT_ENTRY_STATUS.UNPUBLISHED,
452
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
453
+ GSI1_SK: this.getGSISortKey(publishedStorageEntry)
454
+ })));
455
+ }
456
+
457
+ try {
458
+ await this._table.batchWrite(items);
459
+
460
+ this._dataLoaders.clearAllEntryRevisions(model, entry);
461
+
462
+ return entry;
463
+ } catch (ex) {
464
+ throw new _error.default(ex.message || "Could not execute the publishing batch.", ex.code || "PUBLISH_ERROR", {
465
+ entry,
466
+ latestStorageEntry,
467
+ publishedStorageEntry
468
+ });
469
+ }
470
+ }
471
+
472
+ async unpublish(model, args) {
473
+ const {
474
+ entry,
475
+ storageEntry
476
+ } = args;
477
+ const partitionKey = this.getPartitionKey(entry.id);
478
+ /**
479
+ * We need to:
480
+ * - delete currently published entry
481
+ * - update current entry revision with new data
482
+ * - update latest entry status - if entry being unpublished is latest
483
+ */
484
+
485
+ const items = [this._entity.deleteBatch({
486
+ PK: partitionKey,
487
+ SK: this.getSortKeyPublished()
488
+ }), this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
489
+ PK: partitionKey,
490
+ SK: this.getSortKeyRevision(entry.version),
491
+ TYPE: TYPE_ENTRY,
492
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
493
+ GSI1_SK: this.getGSISortKey(entry)
494
+ }))];
495
+ /**
496
+ * We need the latest entry to see if something needs to be updated along side the unpublishing one.
497
+ */
498
+
499
+ const latestStorageEntry = await this.getLatestRevisionByEntryId(model, entry.id);
500
+
501
+ if (entry.id === latestStorageEntry.id) {
502
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
503
+ PK: partitionKey,
504
+ SK: this.getSortKeyLatest(),
505
+ TYPE: TYPE_ENTRY_LATEST,
506
+ GSI1_PK: this.getGSILatestPartitionKey(model),
507
+ GSI1_SK: this.getGSISortKey(entry)
508
+ })));
509
+ }
510
+
511
+ try {
512
+ await this._table.batchWrite(items);
513
+ return storageEntry;
514
+ } catch (ex) {
515
+ throw new _error.default(ex.message || "Could not execute unpublish batch.", ex.code || "UNPUBLISH_ERROR", {
516
+ entry,
517
+ storageEntry
518
+ });
519
+ }
520
+ }
521
+
522
+ async requestChanges(model, args) {
523
+ const {
524
+ entry,
525
+ storageEntry,
526
+ originalEntry
527
+ } = args;
528
+ const partitionKey = this.getPartitionKey(entry.id);
529
+ /**
530
+ * We need to:
531
+ * - update the existing entry
532
+ * - update latest version - if existing entry is the latest version
533
+ */
534
+
535
+ const items = [this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
536
+ PK: partitionKey,
537
+ SK: this.getSortKeyRevision(entry.version),
538
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
539
+ GSI1_SK: this.getGSISortKey(entry)
540
+ }))];
541
+ /**
542
+ * We need the latest entry to see if something needs to be updated along side the request changes one.
543
+ */
544
+
545
+ const latestStorageEntry = await this.getLatestRevisionByEntryId(model, entry.id);
546
+
547
+ if (latestStorageEntry.id === entry.id) {
548
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
549
+ PK: partitionKey,
550
+ SK: this.getSortKeyLatest(),
551
+ TYPE: TYPE_ENTRY_LATEST,
552
+ GSI1_PK: this.getGSILatestPartitionKey(model),
553
+ GSI1_SK: this.getGSISortKey(entry)
554
+ })));
555
+ }
556
+
557
+ try {
558
+ await this._table.batchWrite(items);
559
+ } catch (ex) {
560
+ throw new _error.default(ex.message || "Could not execute the request changes batch.", ex.code || "REQUEST_CHANGES_ERROR", {
561
+ entry,
562
+ originalEntry
563
+ });
564
+ }
565
+
566
+ return entry;
567
+ }
568
+
569
+ async requestReview(model, args) {
570
+ const {
571
+ entry,
572
+ storageEntry,
573
+ originalEntry
574
+ } = args;
575
+ const partitionKey = this.getPartitionKey(entry.id);
576
+ /**
577
+ * We need to:
578
+ * - update existing entry
579
+ * - update latest entry - if existing entry is the latest entry
580
+ */
581
+
582
+ const items = [this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
583
+ PK: partitionKey,
584
+ SK: this.getSortKeyRevision(entry.version),
585
+ GSI1_PK: this.getGSIEntryPartitionKey(model),
586
+ GSI1_SK: this.getGSISortKey(entry)
587
+ }))];
588
+ /**
589
+ * We need the latest entry to see if something needs to be updated along side the request review one.
590
+ */
591
+
592
+ const latestStorageEntry = await this.getLatestRevisionByEntryId(model, entry.id);
593
+
594
+ if (latestStorageEntry.id === entry.id) {
595
+ items.push(this._entity.putBatch(_objectSpread(_objectSpread({}, storageEntry), {}, {
596
+ PK: partitionKey,
597
+ SK: this.getSortKeyLatest(),
598
+ TYPE: TYPE_ENTRY_LATEST,
599
+ GSI1_PK: this.getGSILatestPartitionKey(model),
600
+ GSI1_SK: this.getGSISortKey(entry)
601
+ })));
602
+ }
603
+
604
+ try {
605
+ await this._table.batchWrite(items);
606
+ return entry;
607
+ } catch (ex) {
608
+ throw new _error.default(ex.message || "Could not execute request review batch.", ex.code || "REQUEST_REVIEW_ERROR", {
609
+ entry,
610
+ storageEntry,
611
+ originalEntry
612
+ });
613
+ }
614
+ }
615
+
616
+ async getAllRevisionsByIds(model, ids) {
617
+ if (ids.length === 0) {
618
+ return [];
619
+ }
620
+
621
+ try {
622
+ return await this._dataLoaders.getAllEntryRevisions(model, ids);
623
+ } catch (ex) {
624
+ throw new _error.default(ex.message || "Could not read multiple entries.", ex.code || "GET_ALL_REVISIONS_BY_IDS_ERROR", {
625
+ ids
626
+ });
627
+ }
628
+ }
629
+
630
+ async getByIds(model, ids) {
631
+ if (ids.length === 0) {
632
+ return [];
633
+ }
634
+
635
+ try {
636
+ return await this._dataLoaders.getRevisionById(model, ids);
637
+ } catch (ex) {
638
+ throw new _error.default(ex.message || "Could not read multiple entries.", ex.code || "GET_BY_IDS_ERROR", {
639
+ ids
640
+ });
641
+ }
642
+ }
643
+
644
+ async getPublishedByIds(model, ids) {
645
+ if (ids.length === 0) {
646
+ return [];
647
+ }
648
+
649
+ try {
650
+ return await this._dataLoaders.getPublishedRevisionByEntryId(model, ids);
651
+ } catch (ex) {
652
+ throw new _error.default(ex.message || "Could not read multiple entries.", ex.code || "GET_BY_IDS_ERROR", {
653
+ ids
654
+ });
655
+ }
656
+ }
657
+
658
+ async getLatestByIds(model, ids) {
659
+ if (ids.length === 0) {
660
+ return [];
661
+ }
662
+
663
+ try {
664
+ return await this._dataLoaders.getLatestRevisionByEntryId(model, ids);
665
+ } catch (ex) {
666
+ throw new _error.default(ex.message || "Could not read multiple entries.", ex.code || "GET_BY_IDS_ERROR", {
667
+ ids
668
+ });
669
+ }
670
+ }
671
+
672
+ async getRevisions(model, id) {
673
+ try {
674
+ return await this._dataLoaders.getAllEntryRevisions(model, [id]);
675
+ } catch (ex) {
676
+ throw new _error.default(ex.message || "Could not read multiple entries.", ex.code || "GET_ALL_REVISIONS_BY_IDS_ERROR", {
677
+ id
678
+ });
679
+ }
680
+ }
681
+
682
+ async getRevisionById(model, id) {
683
+ return this.getSingleDynamoDbItem({
684
+ partitionKey: this.getPartitionKey(id),
685
+ value: this.getSortKeyRevision(id)
686
+ });
687
+ }
688
+
689
+ async getPublishedRevisionByEntryId(model, entryId) {
690
+ return this.getSingleDynamoDbItem({
691
+ partitionKey: this.getPartitionKey(entryId),
692
+ value: this.getSortKeyPublished()
693
+ });
694
+ }
695
+
696
+ async getLatestRevisionByEntryId(model, entryId) {
697
+ return this.getSingleDynamoDbItem({
698
+ partitionKey: this.getPartitionKey(entryId),
699
+ value: this.getSortKeyLatest()
700
+ });
701
+ }
702
+
703
+ async getPreviousRevision(model, entryId, version) {
704
+ const entry = await this.getSingleDynamoDbItem({
705
+ partitionKey: this.getPartitionKey(entryId),
706
+ op: "lt",
707
+ value: this.getSortKeyRevision(version),
708
+ order: "DESC"
709
+ });
710
+
711
+ if (entry.TYPE !== TYPE_ENTRY) {
712
+ return null;
713
+ }
714
+
715
+ return entry;
716
+ }
717
+
718
+ async getSingleDynamoDbItem(args) {
719
+ const {
720
+ partitionKey,
721
+ op = "eq",
722
+ value,
723
+ order = "ASC"
724
+ } = args;
725
+ const queryOptions = {
726
+ [op]: value,
727
+ reverse: order === "DESC",
728
+ limit: 1
729
+ };
730
+
731
+ try {
732
+ const result = await this._entity.query(partitionKey, queryOptions);
733
+
734
+ if (!result || Array.isArray(result.Items) === false) {
735
+ throw new _error.default("Error when querying for content entries - no result.", "QUERY_ERROR", {
736
+ partitionKey,
737
+ queryOptions
738
+ });
739
+ }
740
+
741
+ if (result.Items.length === 0) {
742
+ return null;
743
+ }
744
+
745
+ return result.Items.shift();
746
+ } catch (ex) {
747
+ throw new _error.default(ex.message || "Could not read from the DynamoDB.", ex.code || "DDB_READ_ERROR", {
748
+ partitionKey,
749
+ queryOptions
750
+ });
751
+ }
752
+ }
753
+
754
+ getPartitionKey(id) {
755
+ /**
756
+ * If ID includes # it means it is composed of ID and VERSION.
757
+ * We need ID only so extract it.
758
+ */
759
+ if (id.match("#") !== null) {
760
+ id = id.split("#").shift();
761
+ }
762
+
763
+ return `${this.partitionKey}#${id}`;
764
+ }
765
+
766
+ get partitionKey() {
767
+ return `${(0, _utils2.createBasePartitionKey)(this.context)}#CME`;
768
+ }
769
+
770
+ getGSIPartitionKey(type, model) {
771
+ return `${this.partitionKey}#M#${model.modelId}#${type}`;
772
+ }
773
+
774
+ getGSIEntryPartitionKey(model) {
775
+ return this.getGSIPartitionKey("A", model);
776
+ }
777
+
778
+ getGSILatestPartitionKey(model) {
779
+ return this.getGSIPartitionKey("L", model);
780
+ }
781
+
782
+ getGSIPublishedPartitionKey(model) {
783
+ return this.getGSIPartitionKey("P", model);
784
+ }
785
+
786
+ getGSISortKey(entry) {
787
+ return entry.id;
788
+ }
789
+ /**
790
+ * Gets a secondary key in form of REV#version from:
791
+ * id#0003
792
+ * 0003
793
+ * 3
794
+ */
795
+
796
+
797
+ getSortKeyRevision(version) {
798
+ if (typeof version === "string" && version.includes("#") === true) {
799
+ version = version.split("#").pop();
800
+ }
801
+
802
+ return `REV#${(0, _utils.zeroPad)(version)}`;
803
+ }
804
+
805
+ getSortKeyLatest() {
806
+ return "L";
807
+ }
808
+
809
+ getSortKeyPublished() {
810
+ return "P";
811
+ }
812
+ /**
813
+ * Method to build the query partition keys, always an array, and create the target index:
814
+ * - if undefined then it is primary
815
+ * - if populated then it is that given one (and partition keys are reflecting that)
816
+ */
817
+
818
+
819
+ createQueryOptions({
820
+ where: originalWhere,
821
+ model
822
+ }) {
823
+ const options = {
824
+ filters: [],
825
+ index: undefined
826
+ };
827
+ const where = (0, _lodash.default)(originalWhere);
828
+ /**
829
+ * if we have id or entry ID, we will query via the primary key
830
+ * just add all the possible IDs to find
831
+ */
832
+
833
+ const queryPartitionKeys = [];
834
+
835
+ if (where.id) {
836
+ queryPartitionKeys.push(this.getPartitionKey(where.id));
837
+ }
838
+
839
+ if (where.entryId) {
840
+ queryPartitionKeys.push(this.getPartitionKey(where.entryId));
841
+ }
842
+
843
+ if (where.id_in) {
844
+ queryPartitionKeys.push(...where.id_in.map(id => this.getPartitionKey(id)));
845
+ }
846
+
847
+ if (where.entryId_in) {
848
+ queryPartitionKeys.push(...where.entryId_in.map(id => this.getPartitionKey(id)));
849
+ }
850
+ /**
851
+ * If we do not have any of the IDs, we will query via the GSI1_PK just depending on the entry type
852
+ * At this point there will probably be a lot of results
853
+ * but we will apply some basic dynamodb filters so we dont get much data from the db
854
+ * NOTE: It is still going to get charged tho
855
+ */
856
+
857
+
858
+ if (queryPartitionKeys.length === 0) {
859
+ options.index = GSI1_INDEX;
860
+
861
+ if (where.published) {
862
+ queryPartitionKeys.push(this.getGSIPartitionKey("P", model));
863
+ } else if (where.latest) {
864
+ queryPartitionKeys.push(this.getGSIPartitionKey("L", model));
865
+ } else {
866
+ queryPartitionKeys.push(this.getGSIPartitionKey("A", model));
867
+ }
868
+ }
869
+ /**
870
+ * If index is the primary one, we can filter records by type (latest, published or regular)
871
+ * so we do not need to filter in the code
872
+ */
873
+
874
+
875
+ if (!options.index) {
876
+ if (where.published) {
877
+ options.eq = this.getSortKeyPublished();
878
+ } else if (where.latest) {
879
+ options.eq = this.getSortKeyLatest();
880
+ } else {
881
+ options.beginsWith = "REV#";
882
+ }
883
+ }
884
+ /**
885
+ * we remove all the used where conditions
886
+ */
887
+
888
+
889
+ delete where["id"];
890
+ delete where["id_in"];
891
+ delete where["entryId"];
892
+ delete where["entryId_in"];
893
+ delete where["published"];
894
+ delete where["latest"];
895
+ return {
896
+ options,
897
+ queryPartitionKeys,
898
+ where
899
+ };
900
+ }
901
+ /**
902
+ * A method to query the database at the given partition key with the built query options.
903
+ * Method runs in the loop until it reads everything it needs to.
904
+ * We could impose the limit on the records read but there is no point since we MUST read everything to be able
905
+ * to filter and sort the data.
906
+ */
907
+
908
+
909
+ async runQuery(args) {
910
+ let previousResult = undefined;
911
+ let results;
912
+ const items = [];
913
+
914
+ while (results = await this.query(previousResult, args)) {
915
+ items.push(...results.Items);
916
+ previousResult = results;
917
+ }
918
+
919
+ return items;
920
+ }
921
+
922
+ async query(previousResult, args) {
923
+ const {
924
+ partitionKey,
925
+ options
926
+ } = args;
927
+ let result;
928
+ /**
929
+ * In case there is no previous result we must make a new query.
930
+ * This is the first query on the given partition key.
931
+ */
932
+
933
+ if (!previousResult) {
934
+ result = await this._entity.query(partitionKey, options);
935
+ } else if (typeof previousResult.next === "function") {
936
+ /**
937
+ * In case we have a previous result and it has a next method, we run it.
938
+ * In case result of the next method is false, it means it has nothing else to read
939
+ * and we return a null to keep the query from repeating.
940
+ */
941
+ result = await previousResult.next();
942
+
943
+ if (result === false) {
944
+ return null;
945
+ }
946
+ } else {
947
+ /**
948
+ * This could probably never happen but keep it here just in case to break the query loop.
949
+ * Basically, either previousResult does not exist or it exists and has a next method
950
+ * and at that point a result returned will be null and loop should not start again.
951
+ */
952
+ return null;
953
+ }
954
+ /**
955
+ * We expect the result to contain an Items array and if not, something went wrong, very wrong.
956
+ */
957
+
958
+
959
+ if (!result || !result.Items || !Array.isArray(result.Items)) {
960
+ throw new _error.default("Error when querying for content entries - no result.", "QUERY_ERROR", {
961
+ partitionKey,
962
+ options
963
+ });
964
+ }
965
+
966
+ return result;
967
+ }
968
+
969
+ }
970
+
971
+ exports.CmsContentEntryDynamo = CmsContentEntryDynamo;
972
+ //# sourceMappingURL=CmsContentEntryDynamo.js.map