@webiny/api-headless-cms-ddb 5.34.8 → 5.35.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. package/definitions/entry.js +3 -5
  2. package/definitions/entry.js.map +1 -1
  3. package/definitions/group.js +0 -5
  4. package/definitions/group.js.map +1 -1
  5. package/definitions/model.js +17 -5
  6. package/definitions/model.js.map +1 -1
  7. package/definitions/settings.js +0 -5
  8. package/definitions/settings.js.map +1 -1
  9. package/definitions/system.js +0 -5
  10. package/definitions/system.js.map +1 -1
  11. package/definitions/table.js +0 -3
  12. package/definitions/table.js.map +1 -1
  13. package/dynamoDb/index.js +0 -7
  14. package/dynamoDb/index.js.map +1 -1
  15. package/dynamoDb/path/plainObject.js +1 -8
  16. package/dynamoDb/path/plainObject.js.map +1 -1
  17. package/dynamoDb/storage/date.js +1 -19
  18. package/dynamoDb/storage/date.js.map +1 -1
  19. package/dynamoDb/storage/longText.js +1 -18
  20. package/dynamoDb/storage/longText.js.map +1 -1
  21. package/dynamoDb/storage/richText.js +0 -18
  22. package/dynamoDb/storage/richText.js.map +1 -1
  23. package/dynamoDb/transformValue/datetime.js +1 -8
  24. package/dynamoDb/transformValue/datetime.js.map +1 -1
  25. package/index.js +6 -30
  26. package/index.js.map +1 -1
  27. package/operations/entry/dataLoaders.js +1 -44
  28. package/operations/entry/dataLoaders.js.map +1 -1
  29. package/operations/entry/filtering/createExpressions.js +2 -39
  30. package/operations/entry/filtering/createExpressions.js.map +1 -1
  31. package/operations/entry/filtering/createFields.js +0 -18
  32. package/operations/entry/filtering/createFields.js.map +1 -1
  33. package/operations/entry/filtering/extractSort.js +0 -10
  34. package/operations/entry/filtering/extractSort.js.map +1 -1
  35. package/operations/entry/filtering/filter.js +1 -36
  36. package/operations/entry/filtering/filter.js.map +1 -1
  37. package/operations/entry/filtering/fullTextSearch.js +0 -12
  38. package/operations/entry/filtering/fullTextSearch.js.map +1 -1
  39. package/operations/entry/filtering/getValue.js +0 -18
  40. package/operations/entry/filtering/getValue.js.map +1 -1
  41. package/operations/entry/filtering/index.js +0 -2
  42. package/operations/entry/filtering/index.js.map +1 -1
  43. package/operations/entry/filtering/mapPlugins.js +0 -8
  44. package/operations/entry/filtering/mapPlugins.js.map +1 -1
  45. package/operations/entry/filtering/plugins/defaultFilterCreate.js +0 -7
  46. package/operations/entry/filtering/plugins/defaultFilterCreate.js.map +1 -1
  47. package/operations/entry/filtering/plugins/index.js +0 -5
  48. package/operations/entry/filtering/plugins/index.js.map +1 -1
  49. package/operations/entry/filtering/plugins/objectFilterCreate.js +0 -21
  50. package/operations/entry/filtering/plugins/objectFilterCreate.js.map +1 -1
  51. package/operations/entry/filtering/plugins/refFilterCreate.js +0 -18
  52. package/operations/entry/filtering/plugins/refFilterCreate.js.map +1 -1
  53. package/operations/entry/filtering/sort.js +0 -15
  54. package/operations/entry/filtering/sort.js.map +1 -1
  55. package/operations/entry/filtering/systemFields.js +0 -2
  56. package/operations/entry/filtering/systemFields.js.map +1 -1
  57. package/operations/entry/filtering/transform.js +0 -3
  58. package/operations/entry/filtering/transform.js.map +1 -1
  59. package/operations/entry/filtering/values.js +0 -6
  60. package/operations/entry/filtering/values.js.map +1 -1
  61. package/operations/entry/filtering/where.js +0 -6
  62. package/operations/entry/filtering/where.js.map +1 -1
  63. package/operations/entry/index.js +58 -116
  64. package/operations/entry/index.js.map +1 -1
  65. package/operations/entry/keys.js +0 -18
  66. package/operations/entry/keys.js.map +1 -1
  67. package/operations/entry/systemFields.js +0 -2
  68. package/operations/entry/systemFields.js.map +1 -1
  69. package/operations/group/index.js +2 -32
  70. package/operations/group/index.js.map +1 -1
  71. package/operations/model/index.js +0 -25
  72. package/operations/model/index.js.map +1 -1
  73. package/operations/settings/index.js +0 -24
  74. package/operations/settings/index.js.map +1 -1
  75. package/operations/system/index.js +0 -17
  76. package/operations/system/index.js.map +1 -1
  77. package/package.json +13 -13
  78. package/plugins/CmsEntryFieldFilterPathPlugin.js +0 -14
  79. package/plugins/CmsEntryFieldFilterPathPlugin.js.map +1 -1
  80. package/plugins/CmsEntryFieldFilterPlugin.js +0 -7
  81. package/plugins/CmsEntryFieldFilterPlugin.js.map +1 -1
  82. package/plugins/CmsEntryFieldSortingPlugin.js +0 -10
  83. package/plugins/CmsEntryFieldSortingPlugin.js.map +1 -1
  84. package/plugins/CmsFieldFilterValueTransformPlugin.d.ts +11 -0
  85. package/plugins/CmsFieldFilterValueTransformPlugin.js +24 -0
  86. package/plugins/CmsFieldFilterValueTransformPlugin.js.map +1 -0
  87. package/plugins/index.d.ts +1 -0
  88. package/plugins/index.js +11 -6
  89. package/plugins/index.js.map +1 -1
  90. package/types.js +0 -1
  91. package/types.js.map +1 -1
@@ -1,50 +1,32 @@
1
1
  "use strict";
2
2
 
3
3
  var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault").default;
4
-
5
4
  Object.defineProperty(exports, "__esModule", {
6
5
  value: true
7
6
  });
8
7
  exports.createEntriesStorageOperations = void 0;
9
-
10
8
  var _objectSpread2 = _interopRequireDefault(require("@babel/runtime/helpers/objectSpread2"));
11
-
12
9
  var _error = _interopRequireDefault(require("@webiny/error"));
13
-
14
10
  var _dataLoaders = require("./dataLoaders");
15
-
16
11
  var _types = require("@webiny/api-headless-cms/types");
17
-
18
12
  var _keys = require("./keys");
19
-
20
13
  var _batchWrite = require("@webiny/db-dynamodb/utils/batchWrite");
21
-
22
14
  var _query = require("@webiny/db-dynamodb/utils/query");
23
-
24
15
  var _cleanup = require("@webiny/db-dynamodb/utils/cleanup");
25
-
26
16
  var _cursor = require("@webiny/utils/cursor");
27
-
28
17
  var _zeroPad = require("@webiny/utils/zeroPad");
29
-
30
18
  var _apiHeadlessCms = require("@webiny/api-headless-cms");
31
-
32
19
  var _createFields = require("./filtering/createFields");
33
-
34
20
  var _filtering = require("./filtering");
35
-
36
21
  const createType = () => {
37
22
  return "cms.entry";
38
23
  };
39
-
40
24
  const createLatestType = () => {
41
25
  return `${createType()}.l`;
42
26
  };
43
-
44
27
  const createPublishedType = () => {
45
28
  return `${createType()}.p`;
46
29
  };
47
-
48
30
  const convertToStorageEntry = params => {
49
31
  const {
50
32
  model,
@@ -58,7 +40,6 @@ const convertToStorageEntry = params => {
58
40
  values
59
41
  });
60
42
  };
61
-
62
43
  const convertFromStorageEntry = params => {
63
44
  const {
64
45
  model,
@@ -72,12 +53,23 @@ const convertFromStorageEntry = params => {
72
53
  values
73
54
  });
74
55
  };
75
-
76
56
  const createEntriesStorageOperations = params => {
77
57
  const {
78
58
  entity,
79
59
  plugins
80
60
  } = params;
61
+ let storageOperationsCmsModelPlugin;
62
+ const getStorageOperationsCmsModelPlugin = () => {
63
+ if (storageOperationsCmsModelPlugin) {
64
+ return storageOperationsCmsModelPlugin;
65
+ }
66
+ storageOperationsCmsModelPlugin = plugins.oneByType(_apiHeadlessCms.StorageOperationsCmsModelPlugin.type);
67
+ return storageOperationsCmsModelPlugin;
68
+ };
69
+ const getStorageOperationsModel = model => {
70
+ const plugin = getStorageOperationsCmsModelPlugin();
71
+ return plugin.getModel(model);
72
+ };
81
73
  const dataLoaders = new _dataLoaders.DataLoadersHandler({
82
74
  entity
83
75
  });
@@ -85,34 +77,29 @@ const createEntriesStorageOperations = params => {
85
77
  collection[plugin.fieldType] = plugin;
86
78
  return collection;
87
79
  }, {});
88
-
89
80
  const createStorageTransformCallable = model => {
90
81
  return (field, value) => {
91
82
  const plugin = storageTransformPlugins[field.type];
92
-
93
83
  if (!plugin) {
94
84
  return value;
95
85
  }
96
-
97
86
  return plugin.fromStorage({
98
87
  model,
99
88
  field,
100
89
  value,
101
-
102
90
  getStoragePlugin(fieldType) {
103
91
  return storageTransformPlugins[fieldType] || storageTransformPlugins["*"];
104
92
  },
105
-
106
93
  plugins
107
94
  });
108
95
  };
109
96
  };
110
-
111
- const create = async (model, params) => {
97
+ const create = async (initialModel, params) => {
112
98
  const {
113
99
  entry,
114
100
  storageEntry: initialStorageEntry
115
101
  } = params;
102
+ const model = getStorageOperationsModel(initialModel);
116
103
  const partitionKey = (0, _keys.createPartitionKey)({
117
104
  id: entry.id,
118
105
  locale: model.locale,
@@ -127,9 +114,8 @@ const createEntriesStorageOperations = params => {
127
114
  /**
128
115
  * We need to:
129
116
  * - create new main entry item
130
- * - create new or update latest entry item
117
+ * - create new or update the latest entry item
131
118
  */
132
-
133
119
  const items = [entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
134
120
  locked,
135
121
  PK: partitionKey,
@@ -145,10 +131,10 @@ const createEntriesStorageOperations = params => {
145
131
  GSI1_PK: (0, _keys.createGSIPartitionKey)(model, "L"),
146
132
  GSI1_SK: (0, _keys.createGSISortKey)(storageEntry)
147
133
  }))];
134
+
148
135
  /**
149
136
  * We need to create published entry if
150
137
  */
151
-
152
138
  if (isPublished) {
153
139
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
154
140
  locked,
@@ -159,7 +145,6 @@ const createEntriesStorageOperations = params => {
159
145
  GSI1_SK: (0, _keys.createGSISortKey)(storageEntry)
160
146
  })));
161
147
  }
162
-
163
148
  try {
164
149
  await (0, _batchWrite.batchWriteAll)({
165
150
  table: entity.table,
@@ -174,15 +159,14 @@ const createEntriesStorageOperations = params => {
174
159
  entry
175
160
  });
176
161
  }
177
-
178
162
  return initialStorageEntry;
179
163
  };
180
-
181
- const createRevisionFrom = async (model, params) => {
164
+ const createRevisionFrom = async (initialModel, params) => {
182
165
  const {
183
166
  entry,
184
167
  storageEntry: initialStorageEntry
185
168
  } = params;
169
+ const model = getStorageOperationsModel(initialModel);
186
170
  const partitionKey = (0, _keys.createPartitionKey)({
187
171
  id: entry.id,
188
172
  locale: model.locale,
@@ -197,7 +181,6 @@ const createEntriesStorageOperations = params => {
197
181
  * - create the main entry item
198
182
  * - update the last entry item to a current one
199
183
  */
200
-
201
184
  const items = [entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
202
185
  PK: partitionKey,
203
186
  SK: (0, _keys.createRevisionSortKey)(storageEntry),
@@ -211,7 +194,6 @@ const createEntriesStorageOperations = params => {
211
194
  GSI1_PK: (0, _keys.createGSIPartitionKey)(model, "L"),
212
195
  GSI1_SK: (0, _keys.createGSISortKey)(storageEntry)
213
196
  }))];
214
-
215
197
  try {
216
198
  await (0, _batchWrite.batchWriteAll)({
217
199
  table: entity.table,
@@ -230,16 +212,14 @@ const createEntriesStorageOperations = params => {
230
212
  /**
231
213
  * There are no modifications on the entry created so just return the data.
232
214
  */
233
-
234
-
235
215
  return initialStorageEntry;
236
216
  };
237
-
238
- const update = async (model, params) => {
217
+ const update = async (initialModel, params) => {
239
218
  const {
240
219
  entry,
241
220
  storageEntry: initialStorageEntry
242
221
  } = params;
222
+ const model = getStorageOperationsModel(initialModel);
243
223
  const partitionKey = (0, _keys.createPartitionKey)({
244
224
  id: entry.id,
245
225
  locale: model.locale,
@@ -257,7 +237,6 @@ const createEntriesStorageOperations = params => {
257
237
  * - update the current entry
258
238
  * - update the latest entry if the current entry is the latest one
259
239
  */
260
-
261
240
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
262
241
  locked,
263
242
  PK: partitionKey,
@@ -266,7 +245,6 @@ const createEntriesStorageOperations = params => {
266
245
  GSI1_PK: (0, _keys.createGSIPartitionKey)(model, "A"),
267
246
  GSI1_SK: (0, _keys.createGSISortKey)(storageEntry)
268
247
  })));
269
-
270
248
  if (isPublished) {
271
249
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
272
250
  locked,
@@ -277,13 +255,11 @@ const createEntriesStorageOperations = params => {
277
255
  GSI1_SK: (0, _keys.createGSISortKey)(storageEntry)
278
256
  })));
279
257
  }
258
+
280
259
  /**
281
- * We need the latest entry to update it as well if neccessary.
260
+ * We need the latest entry to update it as well if necessary.
282
261
  */
283
-
284
-
285
262
  const latestStorageEntry = await getLatestRevisionByEntryId(model, entry);
286
-
287
263
  if (latestStorageEntry && latestStorageEntry.id === entry.id) {
288
264
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
289
265
  locked,
@@ -294,7 +270,6 @@ const createEntriesStorageOperations = params => {
294
270
  GSI1_SK: (0, _keys.createGSISortKey)(entry)
295
271
  })));
296
272
  }
297
-
298
273
  try {
299
274
  await (0, _batchWrite.batchWriteAll)({
300
275
  table: entity.table,
@@ -312,11 +287,11 @@ const createEntriesStorageOperations = params => {
312
287
  });
313
288
  }
314
289
  };
315
-
316
- const deleteEntry = async (model, params) => {
290
+ const deleteEntry = async (initialModel, params) => {
317
291
  const {
318
292
  entry
319
293
  } = params;
294
+ const model = getStorageOperationsModel(initialModel);
320
295
  const queryAllParams = {
321
296
  entity,
322
297
  partitionKey: (0, _keys.createPartitionKey)({
@@ -329,7 +304,6 @@ const createEntriesStorageOperations = params => {
329
304
  }
330
305
  };
331
306
  let records = [];
332
-
333
307
  try {
334
308
  records = await (0, _query.queryAll)(queryAllParams);
335
309
  } catch (ex) {
@@ -338,14 +312,12 @@ const createEntriesStorageOperations = params => {
338
312
  entry
339
313
  });
340
314
  }
341
-
342
315
  const items = records.map(item => {
343
316
  return entity.deleteBatch({
344
317
  PK: item.PK,
345
318
  SK: item.SK
346
319
  });
347
320
  });
348
-
349
321
  try {
350
322
  await (0, _batchWrite.batchWriteAll)({
351
323
  table: entity.table,
@@ -362,13 +334,13 @@ const createEntriesStorageOperations = params => {
362
334
  });
363
335
  }
364
336
  };
365
-
366
- const deleteRevision = async (model, params) => {
337
+ const deleteRevision = async (initialModel, params) => {
367
338
  const {
368
339
  entry,
369
340
  latestEntry,
370
341
  latestStorageEntry: initialLatestStorageEntry
371
342
  } = params;
343
+ const model = getStorageOperationsModel(initialModel);
372
344
  const partitionKey = (0, _keys.createPartitionKey)({
373
345
  id: entry.id,
374
346
  locale: model.locale,
@@ -379,17 +351,16 @@ const createEntriesStorageOperations = params => {
379
351
  SK: (0, _keys.createRevisionSortKey)(entry)
380
352
  })];
381
353
  const publishedStorageEntry = await getPublishedRevisionByEntryId(model, entry);
354
+
382
355
  /**
383
356
  * If revision we are deleting is the published one as well, we need to delete those records as well.
384
357
  */
385
-
386
358
  if (publishedStorageEntry && entry.id === publishedStorageEntry.id) {
387
359
  items.push(entity.deleteBatch({
388
360
  PK: partitionKey,
389
361
  SK: (0, _keys.createPublishedSortKey)()
390
362
  }));
391
363
  }
392
-
393
364
  if (initialLatestStorageEntry) {
394
365
  const latestStorageEntry = convertToStorageEntry({
395
366
  storageEntry: initialLatestStorageEntry,
@@ -403,7 +374,6 @@ const createEntriesStorageOperations = params => {
403
374
  GSI1_SK: (0, _keys.createGSISortKey)(latestStorageEntry)
404
375
  })));
405
376
  }
406
-
407
377
  try {
408
378
  await (0, _batchWrite.batchWriteAll)({
409
379
  table: entity.table,
@@ -420,59 +390,53 @@ const createEntriesStorageOperations = params => {
420
390
  });
421
391
  }
422
392
  };
423
-
424
- const getLatestRevisionByEntryId = async (model, params) => {
393
+ const getLatestRevisionByEntryId = async (initialModel, params) => {
394
+ const model = getStorageOperationsModel(initialModel);
425
395
  const items = await dataLoaders.getLatestRevisionByEntryId({
426
396
  model,
427
397
  ids: [params.id]
428
398
  });
429
399
  const item = items.shift() || null;
430
-
431
400
  if (!item) {
432
401
  return null;
433
402
  }
434
-
435
403
  return convertFromStorageEntry({
436
404
  storageEntry: item,
437
405
  model
438
406
  });
439
407
  };
440
-
441
- const getPublishedRevisionByEntryId = async (model, params) => {
408
+ const getPublishedRevisionByEntryId = async (initialModel, params) => {
409
+ const model = getStorageOperationsModel(initialModel);
442
410
  const items = await dataLoaders.getPublishedRevisionByEntryId({
443
411
  model,
444
412
  ids: [params.id]
445
413
  });
446
414
  const item = items.shift() || null;
447
-
448
415
  if (!item) {
449
416
  return null;
450
417
  }
451
-
452
418
  return convertFromStorageEntry({
453
419
  storageEntry: item,
454
420
  model
455
421
  });
456
422
  };
457
-
458
- const getRevisionById = async (model, params) => {
423
+ const getRevisionById = async (initialModel, params) => {
424
+ const model = getStorageOperationsModel(initialModel);
459
425
  const items = await dataLoaders.getRevisionById({
460
426
  model,
461
427
  ids: [params.id]
462
428
  });
463
429
  const item = items.shift() || null;
464
-
465
430
  if (!item) {
466
431
  return null;
467
432
  }
468
-
469
433
  return convertFromStorageEntry({
470
434
  storageEntry: item,
471
435
  model
472
436
  });
473
437
  };
474
-
475
- const getRevisions = async (model, params) => {
438
+ const getRevisions = async (initialModel, params) => {
439
+ const model = getStorageOperationsModel(initialModel);
476
440
  const items = await dataLoaders.getAllEntryRevisions({
477
441
  model,
478
442
  ids: [params.id]
@@ -484,8 +448,8 @@ const createEntriesStorageOperations = params => {
484
448
  });
485
449
  });
486
450
  };
487
-
488
- const getByIds = async (model, params) => {
451
+ const getByIds = async (initialModel, params) => {
452
+ const model = getStorageOperationsModel(initialModel);
489
453
  const items = await dataLoaders.getRevisionById({
490
454
  model,
491
455
  ids: params.ids
@@ -497,8 +461,8 @@ const createEntriesStorageOperations = params => {
497
461
  });
498
462
  });
499
463
  };
500
-
501
- const getLatestByIds = async (model, params) => {
464
+ const getLatestByIds = async (initialModel, params) => {
465
+ const model = getStorageOperationsModel(initialModel);
502
466
  const items = await dataLoaders.getLatestRevisionByEntryId({
503
467
  model,
504
468
  ids: params.ids
@@ -510,8 +474,8 @@ const createEntriesStorageOperations = params => {
510
474
  });
511
475
  });
512
476
  };
513
-
514
- const getPublishedByIds = async (model, params) => {
477
+ const getPublishedByIds = async (initialModel, params) => {
478
+ const model = getStorageOperationsModel(initialModel);
515
479
  const items = await dataLoaders.getPublishedRevisionByEntryId({
516
480
  model,
517
481
  ids: params.ids
@@ -523,8 +487,8 @@ const createEntriesStorageOperations = params => {
523
487
  });
524
488
  });
525
489
  };
526
-
527
- const getPreviousRevision = async (model, params) => {
490
+ const getPreviousRevision = async (initialModel, params) => {
491
+ const model = getStorageOperationsModel(initialModel);
528
492
  const {
529
493
  entryId,
530
494
  version
@@ -538,7 +502,6 @@ const createEntriesStorageOperations = params => {
538
502
  }),
539
503
  options: {
540
504
  lt: `REV#${(0, _zeroPad.zeroPad)(version)}`,
541
-
542
505
  /**
543
506
  * We need to have extra checks because DynamoDB will return published or latest record if there is no REV# record.
544
507
  */
@@ -552,15 +515,12 @@ const createEntriesStorageOperations = params => {
552
515
  reverse: true
553
516
  }
554
517
  };
555
-
556
518
  try {
557
519
  const result = await (0, _query.queryOne)(queryParams);
558
520
  const storageEntry = (0, _cleanup.cleanupItem)(entity, result);
559
-
560
521
  if (!storageEntry) {
561
522
  return null;
562
523
  }
563
-
564
524
  return convertFromStorageEntry({
565
525
  storageEntry,
566
526
  model
@@ -574,8 +534,8 @@ const createEntriesStorageOperations = params => {
574
534
  }));
575
535
  }
576
536
  };
577
-
578
- const list = async (model, params) => {
537
+ const list = async (initialModel, params) => {
538
+ const model = getStorageOperationsModel(initialModel);
579
539
  const {
580
540
  limit: initialLimit = 10,
581
541
  where: initialWhere,
@@ -595,7 +555,6 @@ const createEntriesStorageOperations = params => {
595
555
  }
596
556
  };
597
557
  let storageEntries = [];
598
-
599
558
  try {
600
559
  storageEntries = await (0, _query.queryAll)(queryAllParams);
601
560
  } catch (ex) {
@@ -605,7 +564,6 @@ const createEntriesStorageOperations = params => {
605
564
  options: queryAllParams.options
606
565
  });
607
566
  }
608
-
609
567
  if (storageEntries.length === 0) {
610
568
  return {
611
569
  hasMoreItems: false,
@@ -614,7 +572,6 @@ const createEntriesStorageOperations = params => {
614
572
  items: []
615
573
  };
616
574
  }
617
-
618
575
  const where = (0, _objectSpread2.default)({}, initialWhere);
619
576
  delete where["published"];
620
577
  delete where["latest"];
@@ -622,7 +579,6 @@ const createEntriesStorageOperations = params => {
622
579
  * We need an object containing field, transformers and paths.
623
580
  * Just build it here and pass on into other methods that require it to avoid mapping multiple times.
624
581
  */
625
-
626
582
  const modelFields = (0, _createFields.createFields)({
627
583
  plugins,
628
584
  fields: model.fields
@@ -633,24 +589,20 @@ const createEntriesStorageOperations = params => {
633
589
  *
634
590
  * This is always being done, but at least its in parallel.
635
591
  */
636
-
637
592
  const records = await Promise.all(storageEntries.map(async storageEntry => {
638
593
  const entry = convertFromStorageEntry({
639
594
  storageEntry,
640
595
  model
641
596
  });
642
-
643
597
  for (const field of model.fields) {
644
598
  entry.values[field.fieldId] = await fromStorage(field, entry.values[field.fieldId]);
645
599
  }
646
-
647
600
  return entry;
648
601
  }));
649
602
  /**
650
603
  * Filter the read items via the code.
651
604
  * It will build the filters out of the where input and transform the values it is using.
652
605
  */
653
-
654
606
  const filteredItems = (0, _filtering.filter)({
655
607
  items: records,
656
608
  where,
@@ -662,11 +614,11 @@ const createEntriesStorageOperations = params => {
662
614
  }
663
615
  });
664
616
  const totalCount = filteredItems.length;
617
+
665
618
  /**
666
619
  * Sorting is also done via the code.
667
620
  * It takes the sort input and sorts by it via the lodash sortBy method.
668
621
  */
669
-
670
622
  const sortedItems = (0, _filtering.sort)({
671
623
  model,
672
624
  plugins,
@@ -682,7 +634,6 @@ const createEntriesStorageOperations = params => {
682
634
  * Although we do not need a cursor here, we will use it as such to keep it standardized.
683
635
  * Number is simply encoded.
684
636
  */
685
-
686
637
  const cursor = totalCount > start + limit ? (0, _cursor.encodeCursor)(`${start + limit}`) : null;
687
638
  return {
688
639
  hasMoreItems,
@@ -691,8 +642,8 @@ const createEntriesStorageOperations = params => {
691
642
  items: (0, _cleanup.cleanupItems)(entity, slicedItems)
692
643
  };
693
644
  };
694
-
695
- const get = async (model, params) => {
645
+ const get = async (initialModel, params) => {
646
+ const model = getStorageOperationsModel(initialModel);
696
647
  const {
697
648
  items
698
649
  } = await list(model, (0, _objectSpread2.default)((0, _objectSpread2.default)({}, params), {}, {
@@ -700,21 +651,21 @@ const createEntriesStorageOperations = params => {
700
651
  }));
701
652
  return items.shift() || null;
702
653
  };
703
-
704
- const publish = async (model, params) => {
654
+ const publish = async (initialModel, params) => {
705
655
  const {
706
656
  entry,
707
657
  storageEntry: initialStorageEntry
708
658
  } = params;
659
+ const model = getStorageOperationsModel(initialModel);
709
660
  const partitionKey = (0, _keys.createPartitionKey)({
710
661
  id: entry.id,
711
662
  locale: model.locale,
712
663
  tenant: model.tenant
713
664
  });
665
+
714
666
  /**
715
- * We need the latest and published entries to see if something needs to be updated along side the publishing one.
667
+ * We need the latest and published entries to see if something needs to be updated alongside the publishing one.
716
668
  */
717
-
718
669
  const initialLatestStorageEntry = await getLatestRevisionByEntryId(model, entry);
719
670
  const initialPublishedStorageEntry = await getPublishedRevisionByEntryId(model, entry);
720
671
  const storageEntry = convertToStorageEntry({
@@ -725,10 +676,9 @@ const createEntriesStorageOperations = params => {
725
676
  * We need to update:
726
677
  * - current entry revision sort key
727
678
  * - published sort key
728
- * - latest sort key - if entry updated is actually latest
679
+ * - the latest sort key - if entry updated is actually latest
729
680
  * - previous published entry to unpublished status - if any previously published entry
730
681
  */
731
-
732
682
  const items = [entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
733
683
  PK: partitionKey,
734
684
  SK: (0, _keys.createRevisionSortKey)(entry),
@@ -742,7 +692,6 @@ const createEntriesStorageOperations = params => {
742
692
  GSI1_PK: (0, _keys.createGSIPartitionKey)(model, "P"),
743
693
  GSI1_SK: (0, _keys.createGSISortKey)(entry)
744
694
  }))];
745
-
746
695
  if (initialLatestStorageEntry && entry.id === initialLatestStorageEntry.id) {
747
696
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
748
697
  PK: partitionKey,
@@ -752,7 +701,6 @@ const createEntriesStorageOperations = params => {
752
701
  GSI1_SK: (0, _keys.createGSISortKey)(entry)
753
702
  })));
754
703
  }
755
-
756
704
  if (initialPublishedStorageEntry && initialPublishedStorageEntry.id !== entry.id) {
757
705
  const publishedStorageEntry = convertToStorageEntry({
758
706
  storageEntry: initialPublishedStorageEntry,
@@ -767,7 +715,6 @@ const createEntriesStorageOperations = params => {
767
715
  GSI1_SK: (0, _keys.createGSISortKey)(publishedStorageEntry)
768
716
  })));
769
717
  }
770
-
771
718
  try {
772
719
  await (0, _batchWrite.batchWriteAll)({
773
720
  table: entity.table,
@@ -785,12 +732,12 @@ const createEntriesStorageOperations = params => {
785
732
  });
786
733
  }
787
734
  };
788
-
789
- const unpublish = async (model, params) => {
735
+ const unpublish = async (initialModel, params) => {
790
736
  const {
791
737
  entry,
792
738
  storageEntry: initialStorageEntry
793
739
  } = params;
740
+ const model = getStorageOperationsModel(initialModel);
794
741
  const partitionKey = (0, _keys.createPartitionKey)({
795
742
  id: entry.id,
796
743
  locale: model.locale,
@@ -804,9 +751,8 @@ const createEntriesStorageOperations = params => {
804
751
  * We need to:
805
752
  * - delete currently published entry
806
753
  * - update current entry revision with new data
807
- * - update latest entry status - if entry being unpublished is latest
754
+ * - update the latest entry status - if entry being unpublished is latest
808
755
  */
809
-
810
756
  const items = [entity.deleteBatch({
811
757
  PK: partitionKey,
812
758
  SK: (0, _keys.createPublishedSortKey)()
@@ -817,12 +763,11 @@ const createEntriesStorageOperations = params => {
817
763
  GSI1_PK: (0, _keys.createGSIPartitionKey)(model, "A"),
818
764
  GSI1_SK: (0, _keys.createGSISortKey)(entry)
819
765
  }))];
766
+
820
767
  /**
821
- * We need the latest entry to see if something needs to be updated along side the unpublishing one.
768
+ * We need the latest entry to see if something needs to be updated alongside the unpublishing one.
822
769
  */
823
-
824
770
  const latestStorageEntry = await getLatestRevisionByEntryId(model, entry);
825
-
826
771
  if (latestStorageEntry && entry.id === latestStorageEntry.id) {
827
772
  items.push(entity.putBatch((0, _objectSpread2.default)((0, _objectSpread2.default)({}, storageEntry), {}, {
828
773
  PK: partitionKey,
@@ -832,7 +777,6 @@ const createEntriesStorageOperations = params => {
832
777
  GSI1_SK: (0, _keys.createGSISortKey)(entry)
833
778
  })));
834
779
  }
835
-
836
780
  try {
837
781
  await (0, _batchWrite.batchWriteAll)({
838
782
  table: entity.table,
@@ -849,7 +793,6 @@ const createEntriesStorageOperations = params => {
849
793
  });
850
794
  }
851
795
  };
852
-
853
796
  return {
854
797
  create,
855
798
  createRevisionFrom,
@@ -870,5 +813,4 @@ const createEntriesStorageOperations = params => {
870
813
  unpublish
871
814
  };
872
815
  };
873
-
874
816
  exports.createEntriesStorageOperations = createEntriesStorageOperations;