better-convex 0.7.2 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/aggregate/index.d.ts +1 -1
  2. package/dist/aggregate/index.js +1 -1
  3. package/dist/auth/http/index.d.ts +1 -1
  4. package/dist/auth/index.d.ts +10 -10
  5. package/dist/auth/index.js +5 -4
  6. package/dist/auth/nextjs/index.d.ts +2 -2
  7. package/dist/auth/nextjs/index.js +2 -2
  8. package/dist/{caller-factory-D3OuR1eI.js → caller-factory-CCsm4Dut.js} +2 -2
  9. package/dist/cli.mjs +414 -5
  10. package/dist/{codegen-Cz1idI3-.mjs → codegen-BS36cYTH.mjs} +88 -5
  11. package/dist/{create-schema-orm-69VF4CFV.js → create-schema-orm-OcyA0apQ.js} +10 -13
  12. package/dist/crpc/index.d.ts +2 -2
  13. package/dist/crpc/index.js +3 -3
  14. package/dist/customFunctions-RnzME_cJ.js +167 -0
  15. package/dist/{http-types-BCf2wCgp.d.ts → http-types-BK7FuIcR.d.ts} +1 -1
  16. package/dist/id-BcBb900m.js +121 -0
  17. package/dist/orm/index.d.ts +4 -3
  18. package/dist/orm/index.js +706 -165
  19. package/dist/plugins/index.d.ts +9 -0
  20. package/dist/plugins/index.js +3 -0
  21. package/dist/plugins/ratelimit/index.d.ts +222 -0
  22. package/dist/plugins/ratelimit/index.js +846 -0
  23. package/dist/plugins/ratelimit/react/index.d.ts +76 -0
  24. package/dist/plugins/ratelimit/react/index.js +294 -0
  25. package/dist/{procedure-caller-CcjtUFvL.d.ts → procedure-caller-DYjpq7rG.d.ts} +4 -19
  26. package/dist/rsc/index.d.ts +3 -3
  27. package/dist/rsc/index.js +4 -4
  28. package/dist/runtime-C0WcYGY0.js +1028 -0
  29. package/dist/schema-Bx6j2doh.js +204 -0
  30. package/dist/server/index.d.ts +2 -2
  31. package/dist/server/index.js +4 -3
  32. package/dist/{runtime-B9xQFY8W.js → table-B7yzBihE.js} +3 -1088
  33. package/dist/text-enum-CFdcLUuw.js +30 -0
  34. package/dist/{types-CIBGEYXq.d.ts → types-f53SgpBL.d.ts} +1 -1
  35. package/dist/validators-BcQFm1oY.d.ts +88 -0
  36. package/dist/{customFunctions-CZnCwoR3.js → validators-D_i3BK7v.js} +67 -165
  37. package/dist/watcher.mjs +1 -1
  38. package/dist/{where-clause-compiler-CRP-i1Qa.d.ts → where-clause-compiler-BIjTkVVJ.d.ts} +138 -2
  39. package/package.json +4 -1
  40. /package/dist/{create-schema-BdZOL6ns.js → create-schema-BsN0jL5S.js} +0 -0
  41. /package/dist/{error-Be4OcwwD.js → error-CAGGSN5H.js} +0 -0
  42. /package/dist/{meta-utils-DDVYp9Xf.js → meta-utils-NRyocOSc.js} +0 -0
  43. /package/dist/{query-context-BDSis9rT.js → query-context-DEUFBhXS.js} +0 -0
  44. /package/dist/{query-context-DGExXZIV.d.ts → query-context-ji7By8u0.d.ts} +0 -0
  45. /package/dist/{query-options-B0c1b6pZ.js → query-options-CSCmKYdJ.js} +0 -0
  46. /package/dist/{transformer-Dh0w2py0.js → transformer-ogg-4d78.js} +0 -0
  47. /package/dist/{types-DwGkkq2s.d.ts → types-BTb_4BaU.d.ts} +0 -0
  48. /package/dist/{types-DgwvxKbT.d.ts → types-CM67ko7K.d.ts} +0 -0
@@ -0,0 +1,1028 @@
1
+ import { C as integer, g as index, t as convexTable, x as text } from "./table-B7yzBihE.js";
2
+ import { r as custom, t as id } from "./id-BcBb900m.js";
3
+ import { ConvexError, convexToJson, jsonToConvex, v } from "convex/values";
4
+
5
+ //#region src/aggregate-core/compare.ts
6
+ function compareValues$1(k1, k2) {
7
+ return compareAsTuples(makeComparable(k1), makeComparable(k2));
8
+ }
9
+ function compareAsTuples(a, b) {
10
+ if (a[0] === b[0]) return compareSameTypeValues(a[1], b[1]);
11
+ if (a[0] < b[0]) return -1;
12
+ return 1;
13
+ }
14
+ function compareSameTypeValues(v1, v2) {
15
+ if (v1 === void 0 || v1 === null) return 0;
16
+ if (typeof v1 === "bigint" || typeof v1 === "number" || typeof v1 === "boolean" || typeof v1 === "string") return v1 < v2 ? -1 : v1 === v2 ? 0 : 1;
17
+ if (!Array.isArray(v1) || !Array.isArray(v2)) throw new Error(`Unexpected type ${v1}`);
18
+ for (let i = 0; i < v1.length && i < v2.length; i++) {
19
+ const cmp = compareAsTuples(v1[i], v2[i]);
20
+ if (cmp !== 0) return cmp;
21
+ }
22
+ if (v1.length < v2.length) return -1;
23
+ if (v1.length > v2.length) return 1;
24
+ return 0;
25
+ }
26
+ function makeComparable(v) {
27
+ if (v === void 0) return [0, void 0];
28
+ if (v === null) return [1, null];
29
+ if (typeof v === "bigint") return [2, v];
30
+ if (typeof v === "number") {
31
+ if (Number.isNaN(v)) return [3.5, 0];
32
+ return [3, v];
33
+ }
34
+ if (typeof v === "boolean") return [4, v];
35
+ if (typeof v === "string") return [5, v];
36
+ if (v instanceof ArrayBuffer) return [6, Array.from(new Uint8Array(v)).map(makeComparable)];
37
+ if (Array.isArray(v)) return [7, v.map(makeComparable)];
38
+ return [8, Object.keys(v).sort().map((k) => [k, v[k]]).map(makeComparable)];
39
+ }
40
+
41
+ //#endregion
42
+ //#region src/aggregate-core/schema.ts
43
+ const AGGREGATE_TREE_TABLE = "aggregate_rank_tree";
44
+ const AGGREGATE_NODE_TABLE = "aggregate_rank_node";
45
+ const aggregateCounterValidator = v.object({
46
+ count: v.number(),
47
+ sum: v.number()
48
+ });
49
+ const aggregateItemValidator = v.object({
50
+ k: v.any(),
51
+ v: v.any(),
52
+ s: v.number()
53
+ });
54
+ const aggregateTreeTable = convexTable(AGGREGATE_TREE_TABLE, {
55
+ aggregateName: text().notNull(),
56
+ maxNodeSize: integer().notNull(),
57
+ namespace: custom(v.any()),
58
+ root: id(AGGREGATE_NODE_TABLE).notNull()
59
+ }, (tree) => [index("by_namespace").on(tree.namespace), index("by_aggregate_name").on(tree.aggregateName)]);
60
+ const aggregateNodeTable = convexTable(AGGREGATE_NODE_TABLE, {
61
+ aggregate: custom(aggregateCounterValidator),
62
+ items: custom(v.array(aggregateItemValidator)).notNull(),
63
+ subtrees: custom(v.array(v.string())).notNull()
64
+ });
65
+ const aggregateStorageTables = {
66
+ [AGGREGATE_NODE_TABLE]: aggregateNodeTable,
67
+ [AGGREGATE_TREE_TABLE]: aggregateTreeTable
68
+ };
69
+
70
+ //#endregion
71
+ //#region src/aggregate-core/btree.ts
72
+ const DEFAULT_MAX_NODE_SIZE = 16;
73
+ const LEGACY_AGGREGATE_NAME = "__legacy__";
74
+ function p(v) {
75
+ try {
76
+ return JSON.stringify(v);
77
+ } catch {
78
+ return String(v);
79
+ }
80
+ }
81
+ function log(s) {}
82
+ function aggregateNameFromNamespace(namespace) {
83
+ if (Array.isArray(namespace) && namespace.length === 3 && typeof namespace[0] === "string" && (namespace[2] === 0 || namespace[2] === 1)) return namespace[0];
84
+ return LEGACY_AGGREGATE_NAME;
85
+ }
86
+ async function insertHandler(ctx, args) {
87
+ const tree = await getOrCreateTree(ctx.db, args.namespace, DEFAULT_MAX_NODE_SIZE, true);
88
+ const summand = args.summand ?? 0;
89
+ const pushUp = await insertIntoNode(ctx, args.namespace, tree.root, {
90
+ k: args.key,
91
+ v: args.value,
92
+ s: summand
93
+ });
94
+ if (pushUp) {
95
+ const total = pushUp.leftSubtreeCount && pushUp.rightSubtreeCount && add(add(pushUp.leftSubtreeCount, pushUp.rightSubtreeCount), itemAggregate(pushUp.item));
96
+ const newRoot = await ctx.db.insert(AGGREGATE_NODE_TABLE, {
97
+ items: [pushUp.item],
98
+ subtrees: [pushUp.leftSubtree, pushUp.rightSubtree],
99
+ aggregate: total
100
+ });
101
+ await ctx.db.patch(tree._id, { root: newRoot });
102
+ }
103
+ }
104
+ async function deleteHandler(ctx, args) {
105
+ const tree = await getOrCreateTree(ctx.db, args.namespace, DEFAULT_MAX_NODE_SIZE, true);
106
+ await deleteFromNode(ctx, args.namespace, tree.root, args.key);
107
+ const root = await ctx.db.get(tree.root);
108
+ if (root.items.length === 0 && root.subtrees.length === 1) {
109
+ log(`collapsing root ${root._id} because its only child is ${root.subtrees[0]}`);
110
+ await ctx.db.patch(tree._id, { root: root.subtrees[0] });
111
+ if (root.aggregate === void 0) await ctx.db.patch(root.subtrees[0], { aggregate: void 0 });
112
+ await ctx.db.delete(root._id);
113
+ }
114
+ }
115
+ async function MAX_NODE_SIZE(ctx, namespace) {
116
+ return (await mustGetTree(ctx.db, namespace)).maxNodeSize;
117
+ }
118
+ async function MIN_NODE_SIZE(ctx, namespace) {
119
+ const max = await MAX_NODE_SIZE(ctx, namespace);
120
+ if (max % 2 !== 0 || max < 4) throw new Error("MAX_NODE_SIZE must be even and at least 4");
121
+ return max / 2;
122
+ }
123
+ async function aggregateBetweenHandler(ctx, args) {
124
+ const tree = await getTree(ctx.db, args.namespace);
125
+ if (tree === null) return {
126
+ count: 0,
127
+ sum: 0
128
+ };
129
+ return await aggregateBetweenInNode(ctx.db, tree.root, args.k1, args.k2);
130
+ }
131
+ async function filterBetween(db, node, k1, k2) {
132
+ const n = await db.get(node);
133
+ const included = [];
134
+ function includeSubtree(i, unboundedRight) {
135
+ const unboundedLeft = k1 === void 0 || included.length > 0;
136
+ if (unboundedLeft && unboundedRight) included.push({
137
+ type: "subtree",
138
+ subtree: n.subtrees[i]
139
+ });
140
+ else included.push(filterBetween(db, n.subtrees[i], unboundedLeft ? void 0 : k1, unboundedRight ? void 0 : k2));
141
+ }
142
+ let done = false;
143
+ for (let i = 0; i < n.items.length; i++) {
144
+ const k1IsLeft = k1 === void 0 || compareKeys(k1, n.items[i].k) === -1;
145
+ const k2IsRight = k2 === void 0 || compareKeys(k2, n.items[i].k) === 1;
146
+ if (k1IsLeft && n.subtrees.length > 0) includeSubtree(i, k2IsRight);
147
+ if (!k2IsRight) {
148
+ done = true;
149
+ break;
150
+ }
151
+ if (k1IsLeft) included.push({
152
+ type: "item",
153
+ item: n.items[i]
154
+ });
155
+ }
156
+ if (!done && n.subtrees.length > 0) includeSubtree(n.subtrees.length - 1, k2 === void 0);
157
+ return (await Promise.all(included)).flat(1);
158
+ }
159
+ async function aggregateBetweenInNode(db, node, k1, k2) {
160
+ const filtered = await filterBetween(db, node, k1, k2);
161
+ const counts = await Promise.all(filtered.map(async (included) => {
162
+ if (included.type === "item") return itemAggregate(included.item);
163
+ return await nodeAggregate(db, await db.get(included.subtree));
164
+ }));
165
+ let count = {
166
+ count: 0,
167
+ sum: 0
168
+ };
169
+ for (const c of counts) count = add(count, c);
170
+ return count;
171
+ }
172
+ async function atOffsetHandler(ctx, args) {
173
+ if (args.offset < 0) throw new Error("offset must be non-negative");
174
+ if (!Number.isInteger(args.offset)) throw new Error("offset must be an integer");
175
+ const tree = await getTree(ctx.db, args.namespace);
176
+ if (tree === null) throw new ConvexError("tree is empty");
177
+ return await atOffsetInNode(ctx.db, tree.root, args.offset, args.k1, args.k2);
178
+ }
179
+ async function atNegativeOffsetHandler(ctx, args) {
180
+ if (args.offset < 0) throw new Error("offset must be non-negative");
181
+ if (!Number.isInteger(args.offset)) throw new Error("offset must be an integer");
182
+ const tree = await getTree(ctx.db, args.namespace);
183
+ if (tree === null) throw new ConvexError("tree is empty");
184
+ return await negativeOffsetInNode(ctx.db, tree.root, args.offset, args.k1, args.k2);
185
+ }
186
+ async function offsetHandler(ctx, args) {
187
+ return (await aggregateBetweenHandler(ctx, {
188
+ k1: args.k1,
189
+ k2: args.key,
190
+ namespace: args.namespace
191
+ })).count;
192
+ }
193
+ async function offsetUntilHandler(ctx, args) {
194
+ return (await aggregateBetweenHandler(ctx, {
195
+ k1: args.key,
196
+ k2: args.k2,
197
+ namespace: args.namespace
198
+ })).count;
199
+ }
200
+ async function deleteFromNode(ctx, namespace, node, key) {
201
+ let n = await ctx.db.get(node);
202
+ let foundItem = null;
203
+ let i = 0;
204
+ for (; i < n.items.length; i++) {
205
+ const compare = compareKeys(key, n.items[i].k);
206
+ if (compare === -1) break;
207
+ if (compare === 0) {
208
+ log(`found key ${p(key)} in node ${n._id}`);
209
+ if (n.subtrees.length === 0) {
210
+ await ctx.db.patch(node, {
211
+ items: [...n.items.slice(0, i), ...n.items.slice(i + 1)],
212
+ aggregate: n.aggregate && sub(n.aggregate, itemAggregate(n.items[i]))
213
+ });
214
+ return n.items[i];
215
+ }
216
+ const predecessor = await negativeOffsetInNode(ctx.db, n.subtrees[i], 0);
217
+ log(`replacing ${p(key)} with predecessor ${p(predecessor.k)}`);
218
+ foundItem = n.items[i];
219
+ await ctx.db.patch(node, {
220
+ items: [
221
+ ...n.items.slice(0, i),
222
+ predecessor,
223
+ ...n.items.slice(i + 1)
224
+ ],
225
+ aggregate: n.aggregate && sub(add(n.aggregate, itemAggregate(predecessor)), itemAggregate(n.items[i]))
226
+ });
227
+ n = await ctx.db.get(node);
228
+ key = predecessor.k;
229
+ break;
230
+ }
231
+ }
232
+ if (n.subtrees.length === 0) throw new ConvexError({
233
+ code: "DELETE_MISSING_KEY",
234
+ message: `key ${p(key)} not found in node ${n._id}`
235
+ });
236
+ const deleted = await deleteFromNode(ctx, namespace, n.subtrees[i], key);
237
+ if (!deleted) return null;
238
+ if (!foundItem) foundItem = deleted;
239
+ const newAggregate = n.aggregate && sub(n.aggregate, itemAggregate(deleted));
240
+ if (newAggregate) await ctx.db.patch(node, { aggregate: newAggregate });
241
+ const deficientSubtree = await ctx.db.get(n.subtrees[i]);
242
+ const minNodeSize = await MIN_NODE_SIZE(ctx, namespace);
243
+ if (deficientSubtree.items.length < minNodeSize) {
244
+ log(`deficient subtree ${deficientSubtree._id}`);
245
+ if (i > 0) {
246
+ const leftSibling = await ctx.db.get(n.subtrees[i - 1]);
247
+ if (leftSibling.items.length > minNodeSize) {
248
+ log(`rotating right with left sibling ${leftSibling._id}`);
249
+ const grandchild = leftSibling.subtrees.length ? await ctx.db.get(leftSibling.subtrees[leftSibling.subtrees.length - 1]) : null;
250
+ const grandchildCount = grandchild ? grandchild.aggregate : {
251
+ count: 0,
252
+ sum: 0
253
+ };
254
+ await ctx.db.patch(deficientSubtree._id, {
255
+ items: [n.items[i - 1], ...deficientSubtree.items],
256
+ subtrees: grandchild ? [grandchild._id, ...deficientSubtree.subtrees] : [],
257
+ aggregate: deficientSubtree.aggregate && grandchildCount && add(add(deficientSubtree.aggregate, grandchildCount), itemAggregate(n.items[i - 1]))
258
+ });
259
+ await ctx.db.patch(leftSibling._id, {
260
+ items: leftSibling.items.slice(0, leftSibling.items.length - 1),
261
+ subtrees: grandchild ? leftSibling.subtrees.slice(0, leftSibling.subtrees.length - 1) : [],
262
+ aggregate: leftSibling.aggregate && grandchildCount && sub(sub(leftSibling.aggregate, grandchildCount), itemAggregate(leftSibling.items[leftSibling.items.length - 1]))
263
+ });
264
+ await ctx.db.patch(node, { items: [
265
+ ...n.items.slice(0, i - 1),
266
+ leftSibling.items[leftSibling.items.length - 1],
267
+ ...n.items.slice(i)
268
+ ] });
269
+ return foundItem;
270
+ }
271
+ }
272
+ if (i < n.subtrees.length - 1) {
273
+ const rightSibling = await ctx.db.get(n.subtrees[i + 1]);
274
+ if (rightSibling.items.length > minNodeSize) {
275
+ log(`rotating left with right sibling ${rightSibling._id}`);
276
+ const grandchild = rightSibling.subtrees.length ? await ctx.db.get(rightSibling.subtrees[0]) : null;
277
+ const grandchildCount = grandchild ? grandchild.aggregate : {
278
+ count: 0,
279
+ sum: 0
280
+ };
281
+ await ctx.db.patch(deficientSubtree._id, {
282
+ items: [...deficientSubtree.items, n.items[i]],
283
+ subtrees: grandchild ? [...deficientSubtree.subtrees, grandchild._id] : [],
284
+ aggregate: deficientSubtree.aggregate && grandchildCount && add(add(deficientSubtree.aggregate, grandchildCount), itemAggregate(n.items[i]))
285
+ });
286
+ await ctx.db.patch(rightSibling._id, {
287
+ items: rightSibling.items.slice(1),
288
+ subtrees: grandchild ? rightSibling.subtrees.slice(1) : [],
289
+ aggregate: rightSibling.aggregate && grandchildCount && sub(sub(rightSibling.aggregate, grandchildCount), itemAggregate(rightSibling.items[0]))
290
+ });
291
+ await ctx.db.patch(node, { items: [
292
+ ...n.items.slice(0, i),
293
+ rightSibling.items[0],
294
+ ...n.items.slice(i + 1)
295
+ ] });
296
+ return foundItem;
297
+ }
298
+ }
299
+ if (i > 0) {
300
+ log("merging with left sibling");
301
+ await mergeNodes(ctx.db, n, i - 1);
302
+ } else {
303
+ log("merging with right sibling");
304
+ await mergeNodes(ctx.db, n, i);
305
+ }
306
+ }
307
+ return foundItem;
308
+ }
309
+ async function mergeNodes(db, parent, leftIndex) {
310
+ const left = await db.get(parent.subtrees[leftIndex]);
311
+ const right = await db.get(parent.subtrees[leftIndex + 1]);
312
+ log(`merging ${right._id} into ${left._id}`);
313
+ await db.patch(left._id, {
314
+ items: [
315
+ ...left.items,
316
+ parent.items[leftIndex],
317
+ ...right.items
318
+ ],
319
+ subtrees: [...left.subtrees, ...right.subtrees],
320
+ aggregate: left.aggregate && right.aggregate && add(add(left.aggregate, right.aggregate), itemAggregate(parent.items[leftIndex]))
321
+ });
322
+ await db.patch(parent._id, {
323
+ items: [...parent.items.slice(0, leftIndex), ...parent.items.slice(leftIndex + 1)],
324
+ subtrees: [...parent.subtrees.slice(0, leftIndex + 1), ...parent.subtrees.slice(leftIndex + 2)]
325
+ });
326
+ await db.delete(right._id);
327
+ }
328
+ async function negativeOffsetInNode(db, node, index, k1, k2) {
329
+ const filtered = await filterBetween(db, node, k1, k2);
330
+ for (const included of filtered.reverse()) if (included.type === "item") {
331
+ if (index === 0) return included.item;
332
+ index -= 1;
333
+ } else {
334
+ const subtreeCount = (await nodeAggregate(db, await db.get(included.subtree))).count;
335
+ if (index < subtreeCount) return await negativeOffsetInNode(db, included.subtree, index);
336
+ index -= subtreeCount;
337
+ }
338
+ throw new ConvexError(`negative offset exceeded count by ${index} (in node ${node})`);
339
+ }
340
+ async function atOffsetInNode(db, node, index, k1, k2) {
341
+ const filtered = await filterBetween(db, node, k1, k2);
342
+ for (const included of filtered) if (included.type === "item") {
343
+ if (index === 0) return included.item;
344
+ index -= 1;
345
+ } else {
346
+ const subtreeCount = (await nodeAggregate(db, await db.get(included.subtree))).count;
347
+ if (index < subtreeCount) return await atOffsetInNode(db, included.subtree, index);
348
+ index -= subtreeCount;
349
+ }
350
+ throw new ConvexError(`offset exceeded count by ${index} (in node ${node})`);
351
+ }
352
+ function itemAggregate(item) {
353
+ return {
354
+ count: 1,
355
+ sum: item.s
356
+ };
357
+ }
358
+ function nodeCounts(node) {
359
+ return node.items.map(itemAggregate);
360
+ }
361
+ async function subtreeCounts(db, node) {
362
+ return await Promise.all(node.subtrees.map(async (subtree) => {
363
+ return nodeAggregate(db, await db.get(subtree));
364
+ }));
365
+ }
366
+ async function nodeAggregate(db, node) {
367
+ if (node.aggregate !== void 0) return node.aggregate;
368
+ const subCounts = await subtreeCounts(db, node);
369
+ return add(accumulate(nodeCounts(node)), accumulate(subCounts));
370
+ }
371
+ function add(a, b) {
372
+ return {
373
+ count: a.count + b.count,
374
+ sum: a.sum + b.sum
375
+ };
376
+ }
377
+ function sub(a, b) {
378
+ return {
379
+ count: a.count - b.count,
380
+ sum: a.sum - b.sum
381
+ };
382
+ }
383
+ function accumulate(nums) {
384
+ return nums.reduce(add, {
385
+ count: 0,
386
+ sum: 0
387
+ });
388
+ }
389
+ async function insertIntoNode(ctx, namespace, node, item) {
390
+ const n = await ctx.db.get(node);
391
+ let i = 0;
392
+ for (; i < n.items.length; i++) {
393
+ const compare = compareKeys(item.k, n.items[i].k);
394
+ if (compare === -1) break;
395
+ if (compare === 0) throw new ConvexError(`key ${p(item.k)} already exists in node ${n._id}`);
396
+ }
397
+ if (n.subtrees.length > 0) {
398
+ const pushUp = await insertIntoNode(ctx, namespace, n.subtrees[i], item);
399
+ if (pushUp) await ctx.db.patch(node, {
400
+ items: [
401
+ ...n.items.slice(0, i),
402
+ pushUp.item,
403
+ ...n.items.slice(i)
404
+ ],
405
+ subtrees: [
406
+ ...n.subtrees.slice(0, i),
407
+ pushUp.leftSubtree,
408
+ pushUp.rightSubtree,
409
+ ...n.subtrees.slice(i + 1)
410
+ ]
411
+ });
412
+ } else await ctx.db.patch(node, { items: [
413
+ ...n.items.slice(0, i),
414
+ item,
415
+ ...n.items.slice(i)
416
+ ] });
417
+ const newAggregate = n.aggregate && add(n.aggregate, itemAggregate(item));
418
+ if (newAggregate) await ctx.db.patch(node, { aggregate: newAggregate });
419
+ const newN = await ctx.db.get(node);
420
+ const maxNodeSize = await MAX_NODE_SIZE(ctx, namespace);
421
+ const minNodeSize = await MIN_NODE_SIZE(ctx, namespace);
422
+ if (newN.items.length > maxNodeSize) {
423
+ if (newN.items.length !== maxNodeSize + 1 || newN.items.length !== 2 * minNodeSize + 1) throw new Error(`bad ${newN.items.length}`);
424
+ log(`splitting node ${newN._id} at ${newN.items[minNodeSize].k}`);
425
+ const topLevel = nodeCounts(newN);
426
+ const subCounts = await subtreeCounts(ctx.db, newN);
427
+ const leftCount = add(accumulate(topLevel.slice(0, minNodeSize)), accumulate(subCounts.length ? subCounts.slice(0, minNodeSize + 1) : []));
428
+ const rightCount = add(accumulate(topLevel.slice(minNodeSize + 1)), accumulate(subCounts.length ? subCounts.slice(minNodeSize + 1) : []));
429
+ if (newN.aggregate && leftCount.count + rightCount.count + 1 !== newN.aggregate.count) throw new Error(`bad count split ${leftCount.count} ${rightCount.count} ${newN.aggregate.count}`);
430
+ if (newN.aggregate && Math.abs(leftCount.sum + rightCount.sum + newN.items[minNodeSize].s - newN.aggregate.sum) > 1e-5) throw new Error(`bad sum split ${leftCount.sum} ${rightCount.sum} ${newN.items[minNodeSize].s} ${newN.aggregate.sum}`);
431
+ await ctx.db.patch(node, {
432
+ items: newN.items.slice(0, minNodeSize),
433
+ subtrees: newN.subtrees.length ? newN.subtrees.slice(0, minNodeSize + 1) : [],
434
+ aggregate: leftCount
435
+ });
436
+ const splitN = await ctx.db.insert(AGGREGATE_NODE_TABLE, {
437
+ items: newN.items.slice(minNodeSize + 1),
438
+ subtrees: newN.subtrees.length ? newN.subtrees.slice(minNodeSize + 1) : [],
439
+ aggregate: rightCount
440
+ });
441
+ return {
442
+ item: newN.items[minNodeSize],
443
+ leftSubtree: node,
444
+ rightSubtree: splitN,
445
+ leftSubtreeCount: newN.aggregate && leftCount,
446
+ rightSubtreeCount: newN.aggregate && rightCount
447
+ };
448
+ }
449
+ return null;
450
+ }
451
+ function compareKeys(k1, k2) {
452
+ return compareValues$1(k1, k2);
453
+ }
454
+ async function getTree(db, namespace) {
455
+ return await db.query(AGGREGATE_TREE_TABLE).withIndex("by_namespace", (q) => q.eq("namespace", namespace)).unique();
456
+ }
457
+ async function mustGetTree(db, namespace) {
458
+ const tree = await getTree(db, namespace);
459
+ if (!tree) throw new Error("btree not initialized");
460
+ return tree;
461
+ }
462
+ async function getOrCreateTree(db, namespace, maxNodeSize, rootLazy) {
463
+ const originalTree = await getTree(db, namespace);
464
+ const aggregateName = aggregateNameFromNamespace(namespace);
465
+ if (originalTree) {
466
+ if (originalTree.aggregateName !== aggregateName) {
467
+ await db.patch(originalTree._id, { aggregateName });
468
+ return {
469
+ ...originalTree,
470
+ aggregateName
471
+ };
472
+ }
473
+ return originalTree;
474
+ }
475
+ const root = await db.insert(AGGREGATE_NODE_TABLE, {
476
+ items: [],
477
+ subtrees: [],
478
+ aggregate: {
479
+ count: 0,
480
+ sum: 0
481
+ }
482
+ });
483
+ const effectiveMaxNodeSize = maxNodeSize ?? await MAX_NODE_SIZE({ db }, void 0) ?? DEFAULT_MAX_NODE_SIZE;
484
+ const effectiveRootLazy = rootLazy ?? await isRootLazy(db, void 0) ?? true;
485
+ const id = await db.insert(AGGREGATE_TREE_TABLE, {
486
+ aggregateName,
487
+ root,
488
+ maxNodeSize: effectiveMaxNodeSize,
489
+ namespace
490
+ });
491
+ const newTree = await db.get(id);
492
+ await MIN_NODE_SIZE({ db }, namespace);
493
+ if (effectiveRootLazy) await db.patch(root, { aggregate: void 0 });
494
+ return newTree;
495
+ }
496
+ async function isRootLazy(db, namespace) {
497
+ const tree = await getTree(db, namespace);
498
+ if (!tree) return true;
499
+ return (await db.get(tree.root))?.aggregate === void 0;
500
+ }
501
+ async function deleteTreeNodes(db, node) {
502
+ const current = await db.get(node);
503
+ if (!current) return;
504
+ for (const subtree of current.subtrees) await deleteTreeNodes(db, subtree);
505
+ await db.delete(node);
506
+ }
507
+ async function clearTree(db, args) {
508
+ const tree = await getTree(db, args.namespace);
509
+ let existingRootLazy = true;
510
+ let existingMaxNodeSize = DEFAULT_MAX_NODE_SIZE;
511
+ if (tree) {
512
+ await db.delete(tree._id);
513
+ const root = await db.get(tree.root);
514
+ if (root) {
515
+ existingRootLazy = root.aggregate === void 0;
516
+ await deleteTreeNodes(db, tree.root);
517
+ }
518
+ existingMaxNodeSize = tree.maxNodeSize;
519
+ }
520
+ await getOrCreateTree(db, args.namespace, args.maxNodeSize ?? existingMaxNodeSize, args.rootLazy ?? existingRootLazy);
521
+ }
522
+ async function paginateHandler(ctx, args) {
523
+ const tree = await getTree(ctx.db, args.namespace);
524
+ if (tree === null) return {
525
+ page: [],
526
+ cursor: "",
527
+ isDone: true
528
+ };
529
+ return await paginateInNode(ctx.db, tree.root, args.limit, args.order, args.cursor, args.k1, args.k2);
530
+ }
531
+ async function paginateInNode(db, node, limit, order, cursor, k1, k2) {
532
+ if (limit <= 0) throw new ConvexError("limit must be positive");
533
+ if (cursor !== void 0 && cursor.length === 0) return {
534
+ page: [],
535
+ cursor: "",
536
+ isDone: true
537
+ };
538
+ const items = [];
539
+ const filtered = await filterBetween(db, node, cursor === void 0 || order === "desc" ? k1 : jsonToConvex(JSON.parse(cursor)), cursor === void 0 || order === "asc" ? k2 : jsonToConvex(JSON.parse(cursor)));
540
+ if (order === "desc") filtered.reverse();
541
+ for (const included of filtered) {
542
+ if (items.length >= limit) return {
543
+ page: items,
544
+ cursor: JSON.stringify(convexToJson(items[items.length - 1].k)),
545
+ isDone: false
546
+ };
547
+ if (included.type === "item") items.push(included.item);
548
+ else {
549
+ const { page, cursor: newCursor, isDone } = await paginateInNode(db, included.subtree, limit - items.length, order);
550
+ items.push(...page);
551
+ if (!isDone) return {
552
+ page: items,
553
+ cursor: newCursor,
554
+ isDone: false
555
+ };
556
+ }
557
+ }
558
+ return {
559
+ page: items,
560
+ cursor: "",
561
+ isDone: true
562
+ };
563
+ }
564
+ async function paginateNamespacesHandler(ctx, args) {
565
+ if (args.cursor === "endcursor") return {
566
+ page: [],
567
+ cursor: "endcursor",
568
+ isDone: true
569
+ };
570
+ const { page: trees, continueCursor, isDone } = await (args.aggregateName === void 0 ? ctx.db.query(AGGREGATE_TREE_TABLE) : ctx.db.query(AGGREGATE_TREE_TABLE).withIndex("by_aggregate_name", (q) => q.eq("aggregateName", args.aggregateName))).paginate({
571
+ cursor: args.cursor ?? null,
572
+ numItems: args.limit
573
+ });
574
+ return {
575
+ page: trees.map((t) => t.namespace ?? null),
576
+ cursor: isDone ? "endcursor" : continueCursor ?? "endcursor",
577
+ isDone
578
+ };
579
+ }
580
+ async function aggregateBetweenBatchHandler(ctx, args) {
581
+ return await Promise.all(args.queries.map((query) => aggregateBetweenHandler(ctx, query)));
582
+ }
583
+ async function atOffsetBatchHandler(ctx, args) {
584
+ return await Promise.all(args.queries.map((query) => query.offset >= 0 ? atOffsetHandler(ctx, query) : atNegativeOffsetHandler(ctx, {
585
+ ...query,
586
+ offset: -query.offset - 1
587
+ })));
588
+ }
589
+
590
+ //#endregion
591
+ //#region src/aggregate-core/positions.ts
592
+ const BEFORE_ALL_IDS = null;
593
+ const AFTER_ALL_IDS = [];
594
+ function explodeKey(key) {
595
+ if (Array.isArray(key)) {
596
+ const exploded = [""];
597
+ for (const item of key) {
598
+ exploded.push(item);
599
+ exploded.push("");
600
+ }
601
+ return exploded;
602
+ }
603
+ return key;
604
+ }
605
+ function implodeKey(k) {
606
+ if (Array.isArray(k)) {
607
+ const imploded = [];
608
+ for (let i = 1; i < k.length; i += 2) imploded.push(k[i]);
609
+ return imploded;
610
+ }
611
+ return k;
612
+ }
613
+ function keyToPosition(key, id) {
614
+ return [
615
+ explodeKey(key),
616
+ id,
617
+ ""
618
+ ];
619
+ }
620
+ function positionToKey(position) {
621
+ return {
622
+ key: implodeKey(position[0]),
623
+ id: position[1]
624
+ };
625
+ }
626
+ function boundsToPositions(bounds) {
627
+ if (bounds === void 0) return {};
628
+ if ("eq" in bounds) return {
629
+ k1: boundToPosition("lower", {
630
+ key: bounds.eq,
631
+ inclusive: true
632
+ }),
633
+ k2: boundToPosition("upper", {
634
+ key: bounds.eq,
635
+ inclusive: true
636
+ })
637
+ };
638
+ if ("prefix" in bounds) {
639
+ const prefix = bounds.prefix;
640
+ const exploded = [];
641
+ for (const item of prefix) {
642
+ exploded.push("");
643
+ exploded.push(item);
644
+ }
645
+ return {
646
+ k1: [
647
+ exploded.concat([BEFORE_ALL_IDS]),
648
+ BEFORE_ALL_IDS,
649
+ BEFORE_ALL_IDS
650
+ ],
651
+ k2: [
652
+ exploded.concat([AFTER_ALL_IDS]),
653
+ AFTER_ALL_IDS,
654
+ AFTER_ALL_IDS
655
+ ]
656
+ };
657
+ }
658
+ return {
659
+ k1: boundToPosition("lower", bounds.lower),
660
+ k2: boundToPosition("upper", bounds.upper)
661
+ };
662
+ }
663
+ function boundToPosition(direction, bound) {
664
+ if (bound === void 0) return;
665
+ if (direction === "lower") return [
666
+ explodeKey(bound.key),
667
+ bound.id ?? (bound.inclusive ? BEFORE_ALL_IDS : AFTER_ALL_IDS),
668
+ bound.inclusive ? BEFORE_ALL_IDS : AFTER_ALL_IDS
669
+ ];
670
+ return [
671
+ explodeKey(bound.key),
672
+ bound.id ?? (bound.inclusive ? AFTER_ALL_IDS : BEFORE_ALL_IDS),
673
+ bound.inclusive ? AFTER_ALL_IDS : BEFORE_ALL_IDS
674
+ ];
675
+ }
676
+
677
+ //#endregion
678
+ //#region src/aggregate-core/runtime.ts
679
+ const INTERNAL_NAMESPACE_MARKER_MISSING = 0;
680
+ const INTERNAL_NAMESPACE_MARKER_PRESENT = 1;
681
+ const encodeNamespace = (aggregateName, namespace) => [
682
+ aggregateName,
683
+ namespace === void 0 ? null : namespace,
684
+ namespace === void 0 ? INTERNAL_NAMESPACE_MARKER_MISSING : INTERNAL_NAMESPACE_MARKER_PRESENT
685
+ ];
686
+ const isInternalNamespace = (value) => Array.isArray(value) && value.length === 3 && typeof value[0] === "string" && (value[2] === INTERNAL_NAMESPACE_MARKER_MISSING || value[2] === INTERNAL_NAMESPACE_MARKER_PRESENT);
687
+ const decodeNamespace = (namespace) => {
688
+ if (namespace[2] === INTERNAL_NAMESPACE_MARKER_MISSING) return;
689
+ return namespace[1];
690
+ };
691
+ const namespaceForOpts = (aggregateName, opts) => encodeNamespace(aggregateName, namespaceFromOpts(opts));
692
+ const namespaceForArg = (aggregateName, args) => encodeNamespace(aggregateName, namespaceFromArg(args));
693
+ /**
694
+ * Write data to be aggregated, and read aggregated data.
695
+ */
696
+ var Aggregate = class {
697
+ constructor(aggregateName) {
698
+ this.aggregateName = aggregateName;
699
+ }
700
+ async count(ctx, ...opts) {
701
+ return (await aggregateBetweenHandler({ db: ctx.db }, {
702
+ ...boundsToPositions(opts[0]?.bounds),
703
+ namespace: namespaceForOpts(this.aggregateName, opts)
704
+ })).count;
705
+ }
706
+ async countBatch(ctx, queries) {
707
+ return (await aggregateBetweenBatchHandler({ db: ctx.db }, { queries: queries.map((query) => {
708
+ if (!query) throw new Error("You must pass bounds and/or namespace");
709
+ return {
710
+ ...boundsToPositions(query.bounds),
711
+ namespace: namespaceForArg(this.aggregateName, query)
712
+ };
713
+ }) })).map((result) => result.count);
714
+ }
715
+ async sum(ctx, ...opts) {
716
+ return (await aggregateBetweenHandler({ db: ctx.db }, {
717
+ ...boundsToPositions(opts[0]?.bounds),
718
+ namespace: namespaceForOpts(this.aggregateName, opts)
719
+ })).sum;
720
+ }
721
+ async sumBatch(ctx, queries) {
722
+ return (await aggregateBetweenBatchHandler({ db: ctx.db }, { queries: queries.map((query) => {
723
+ if (!query) throw new Error("You must pass bounds and/or namespace");
724
+ return {
725
+ ...boundsToPositions(query.bounds),
726
+ namespace: namespaceForArg(this.aggregateName, query)
727
+ };
728
+ }) })).map((result) => result.sum);
729
+ }
730
+ async at(ctx, offset, ...opts) {
731
+ const encodedNamespace = namespaceForOpts(this.aggregateName, opts);
732
+ return btreeItemToAggregateItem(offset < 0 ? await atNegativeOffsetHandler({ db: ctx.db }, {
733
+ ...boundsToPositions(opts[0]?.bounds),
734
+ namespace: encodedNamespace,
735
+ offset: -offset - 1
736
+ }) : await atOffsetHandler({ db: ctx.db }, {
737
+ ...boundsToPositions(opts[0]?.bounds),
738
+ namespace: encodedNamespace,
739
+ offset
740
+ }));
741
+ }
742
+ async atBatch(ctx, queries) {
743
+ return (await atOffsetBatchHandler({ db: ctx.db }, { queries: queries.map((query) => ({
744
+ ...boundsToPositions(query.bounds),
745
+ namespace: namespaceForArg(this.aggregateName, query),
746
+ offset: query.offset
747
+ })) })).map(btreeItemToAggregateItem);
748
+ }
749
+ async indexOf(ctx, key, ...opts) {
750
+ const { k1, k2 } = boundsToPositions(opts[0]?.bounds);
751
+ const namespace = namespaceForOpts(this.aggregateName, opts);
752
+ if (opts[0]?.order === "desc") return offsetUntilHandler({ db: ctx.db }, {
753
+ k2,
754
+ key: boundToPosition("upper", {
755
+ id: opts[0]?.id,
756
+ inclusive: true,
757
+ key
758
+ }),
759
+ namespace
760
+ });
761
+ return offsetHandler({ db: ctx.db }, {
762
+ k1,
763
+ key: boundToPosition("lower", {
764
+ id: opts[0]?.id,
765
+ inclusive: true,
766
+ key
767
+ }),
768
+ namespace
769
+ });
770
+ }
771
+ async offsetOf(ctx, key, namespace, id, bounds) {
772
+ return this.indexOf(ctx, key, {
773
+ bounds,
774
+ id,
775
+ namespace,
776
+ order: "asc"
777
+ });
778
+ }
779
+ async offsetUntil(ctx, key, namespace, id, bounds) {
780
+ return this.indexOf(ctx, key, {
781
+ bounds,
782
+ id,
783
+ namespace,
784
+ order: "desc"
785
+ });
786
+ }
787
+ async min(ctx, ...opts) {
788
+ const { page } = await this.paginate(ctx, {
789
+ bounds: opts[0]?.bounds,
790
+ namespace: namespaceFromOpts(opts),
791
+ order: "asc",
792
+ pageSize: 1
793
+ });
794
+ return page[0] ?? null;
795
+ }
796
+ async max(ctx, ...opts) {
797
+ const { page } = await this.paginate(ctx, {
798
+ bounds: opts[0]?.bounds,
799
+ namespace: namespaceFromOpts(opts),
800
+ order: "desc",
801
+ pageSize: 1
802
+ });
803
+ return page[0] ?? null;
804
+ }
805
+ async random(ctx, ...opts) {
806
+ const count = await this.count(ctx, ...opts);
807
+ if (count === 0) return null;
808
+ return this.at(ctx, Math.floor(Math.random() * count), ...opts);
809
+ }
810
+ async paginate(ctx, ...opts) {
811
+ const result = await paginateHandler({ db: ctx.db }, {
812
+ ...boundsToPositions(opts[0]?.bounds),
813
+ cursor: opts[0]?.cursor,
814
+ limit: opts[0]?.pageSize ?? 100,
815
+ namespace: namespaceForOpts(this.aggregateName, opts),
816
+ order: opts[0]?.order ?? "asc"
817
+ });
818
+ return {
819
+ cursor: result.cursor,
820
+ isDone: result.isDone,
821
+ page: result.page.map(btreeItemToAggregateItem)
822
+ };
823
+ }
824
+ async *iter(ctx, ...opts) {
825
+ const bounds = opts[0]?.bounds;
826
+ const namespace = namespaceFromOpts(opts);
827
+ const order = opts[0]?.order ?? "asc";
828
+ const pageSize = opts[0]?.pageSize ?? 100;
829
+ let cursor;
830
+ let isDone = false;
831
+ while (!isDone) {
832
+ const page = await this.paginate(ctx, {
833
+ bounds,
834
+ cursor,
835
+ namespace,
836
+ order,
837
+ pageSize
838
+ });
839
+ for (const item of page.page) yield item;
840
+ cursor = page.cursor;
841
+ isDone = page.isDone;
842
+ }
843
+ }
844
+ async _insert(ctx, namespace, key, id, summand) {
845
+ await insertHandler({ db: ctx.db }, {
846
+ key: keyToPosition(key, id),
847
+ namespace: namespaceForArg(this.aggregateName, { namespace }),
848
+ summand,
849
+ value: id
850
+ });
851
+ }
852
+ async _delete(ctx, namespace, key, id) {
853
+ await deleteHandler({ db: ctx.db }, {
854
+ key: keyToPosition(key, id),
855
+ namespace: namespaceForArg(this.aggregateName, { namespace })
856
+ });
857
+ }
858
+ async _replace(ctx, currentNamespace, currentKey, newNamespace, newKey, id, summand) {
859
+ await deleteHandler({ db: ctx.db }, {
860
+ key: keyToPosition(currentKey, id),
861
+ namespace: namespaceForArg(this.aggregateName, { namespace: currentNamespace })
862
+ });
863
+ await insertHandler({ db: ctx.db }, {
864
+ key: keyToPosition(newKey, id),
865
+ namespace: namespaceForArg(this.aggregateName, { namespace: newNamespace }),
866
+ summand,
867
+ value: id
868
+ });
869
+ }
870
+ async _insertIfDoesNotExist(ctx, namespace, key, id, summand) {
871
+ await this._replaceOrInsert(ctx, namespace, key, namespace, key, id, summand);
872
+ }
873
+ async _deleteIfExists(ctx, namespace, key, id) {
874
+ try {
875
+ await this._delete(ctx, namespace, key, id);
876
+ } catch (error) {
877
+ if (error instanceof ConvexError && error.data?.code === "DELETE_MISSING_KEY") return;
878
+ throw error;
879
+ }
880
+ }
881
+ async _replaceOrInsert(ctx, currentNamespace, currentKey, newNamespace, newKey, id, summand) {
882
+ try {
883
+ await this._delete(ctx, currentNamespace, currentKey, id);
884
+ } catch (error) {
885
+ if (!(error instanceof ConvexError && error.data?.code === "DELETE_MISSING_KEY")) throw error;
886
+ }
887
+ await this._insert(ctx, newNamespace, newKey, id, summand);
888
+ }
889
+ async clear(ctx, ...opts) {
890
+ await clearTree(ctx.db, {
891
+ maxNodeSize: opts[0]?.maxNodeSize,
892
+ namespace: namespaceForOpts(this.aggregateName, opts),
893
+ rootLazy: opts[0]?.rootLazy
894
+ });
895
+ }
896
+ async makeRootLazy(ctx, namespace) {
897
+ const tree = await getOrCreateTree(ctx.db, namespaceForArg(this.aggregateName, { namespace }));
898
+ await ctx.db.patch(tree.root, { aggregate: void 0 });
899
+ }
900
+ async paginateNamespaces(ctx, cursor, pageSize = 100) {
901
+ const result = await paginateNamespacesHandler({ db: ctx.db }, {
902
+ aggregateName: this.aggregateName,
903
+ cursor,
904
+ limit: pageSize
905
+ });
906
+ const page = [];
907
+ for (const namespace of result.page) {
908
+ if (!isInternalNamespace(namespace)) continue;
909
+ if (namespace[0] !== this.aggregateName) continue;
910
+ page.push(decodeNamespace(namespace));
911
+ }
912
+ return {
913
+ cursor: result.cursor,
914
+ isDone: result.isDone,
915
+ page
916
+ };
917
+ }
918
+ async *iterNamespaces(ctx, pageSize = 100) {
919
+ let cursor;
920
+ let isDone = false;
921
+ while (!isDone) {
922
+ const page = await this.paginateNamespaces(ctx, cursor, pageSize);
923
+ for (const namespace of page.page) yield namespace;
924
+ cursor = page.cursor;
925
+ isDone = page.isDone;
926
+ }
927
+ }
928
+ async clearAll(ctx, opts) {
929
+ for await (const namespace of this.iterNamespaces(ctx)) await this.clear(ctx, {
930
+ ...opts,
931
+ namespace
932
+ });
933
+ await this.clear(ctx, {
934
+ ...opts,
935
+ namespace: void 0
936
+ });
937
+ }
938
+ async makeAllRootsLazy(ctx) {
939
+ for await (const namespace of this.iterNamespaces(ctx)) await this.makeRootLazy(ctx, namespace);
940
+ }
941
+ };
942
+ var DirectAggregate = class extends Aggregate {
943
+ constructor(config) {
944
+ super(config.name);
945
+ }
946
+ async insert(ctx, args) {
947
+ await this._insert(ctx, namespaceFromArg(args), args.key, args.id, args.sumValue);
948
+ }
949
+ async delete(ctx, args) {
950
+ await this._delete(ctx, namespaceFromArg(args), args.key, args.id);
951
+ }
952
+ async replace(ctx, currentItem, newItem) {
953
+ await this._replace(ctx, namespaceFromArg(currentItem), currentItem.key, namespaceFromArg(newItem), newItem.key, currentItem.id, newItem.sumValue);
954
+ }
955
+ async insertIfDoesNotExist(ctx, args) {
956
+ await this._insertIfDoesNotExist(ctx, namespaceFromArg(args), args.key, args.id, args.sumValue);
957
+ }
958
+ async deleteIfExists(ctx, args) {
959
+ await this._deleteIfExists(ctx, namespaceFromArg(args), args.key, args.id);
960
+ }
961
+ async replaceOrInsert(ctx, currentItem, newItem) {
962
+ await this._replaceOrInsert(ctx, namespaceFromArg(currentItem), currentItem.key, namespaceFromArg(newItem), newItem.key, currentItem.id, newItem.sumValue);
963
+ }
964
+ };
965
+ var TableAggregate = class extends Aggregate {
966
+ constructor(options) {
967
+ super(options.name);
968
+ this.options = options;
969
+ }
970
+ options;
971
+ async insert(ctx, doc) {
972
+ await this._insert(ctx, this.options.namespace?.(doc), this.options.sortKey(doc), doc._id, this.options.sumValue?.(doc));
973
+ }
974
+ async delete(ctx, doc) {
975
+ await this._delete(ctx, this.options.namespace?.(doc), this.options.sortKey(doc), doc._id);
976
+ }
977
+ async replace(ctx, oldDoc, newDoc) {
978
+ await this._replace(ctx, this.options.namespace?.(oldDoc), this.options.sortKey(oldDoc), this.options.namespace?.(newDoc), this.options.sortKey(newDoc), newDoc._id, this.options.sumValue?.(newDoc));
979
+ }
980
+ async insertIfDoesNotExist(ctx, doc) {
981
+ await this._insertIfDoesNotExist(ctx, this.options.namespace?.(doc), this.options.sortKey(doc), doc._id, this.options.sumValue?.(doc));
982
+ }
983
+ async deleteIfExists(ctx, doc) {
984
+ await this._deleteIfExists(ctx, this.options.namespace?.(doc), this.options.sortKey(doc), doc._id);
985
+ }
986
+ async replaceOrInsert(ctx, oldDoc, newDoc) {
987
+ await this._replaceOrInsert(ctx, this.options.namespace?.(oldDoc), this.options.sortKey(oldDoc), this.options.namespace?.(newDoc), this.options.sortKey(newDoc), newDoc._id, this.options.sumValue?.(newDoc));
988
+ }
989
+ async indexOfDoc(ctx, doc, opts) {
990
+ return this.indexOf(ctx, this.options.sortKey(doc), {
991
+ namespace: this.options.namespace?.(doc),
992
+ ...opts
993
+ });
994
+ }
995
+ trigger() {
996
+ return async (ctx, change) => {
997
+ if (change.operation === "insert") await this.insert(ctx, change.newDoc);
998
+ else if (change.operation === "update") await this.replace(ctx, change.oldDoc, change.newDoc);
999
+ else if (change.operation === "delete") await this.delete(ctx, change.oldDoc);
1000
+ };
1001
+ }
1002
+ idempotentTrigger() {
1003
+ return async (ctx, change) => {
1004
+ if (change.operation === "insert") await this.insertIfDoesNotExist(ctx, change.newDoc);
1005
+ else if (change.operation === "update") await this.replaceOrInsert(ctx, change.oldDoc, change.newDoc);
1006
+ else if (change.operation === "delete") await this.deleteIfExists(ctx, change.oldDoc);
1007
+ };
1008
+ }
1009
+ };
1010
+ function btreeItemToAggregateItem({ k, s }) {
1011
+ const { key, id } = positionToKey(k);
1012
+ return {
1013
+ id,
1014
+ key,
1015
+ sumValue: s
1016
+ };
1017
+ }
1018
+ function namespaceFromArg(args) {
1019
+ if ("namespace" in args) return args.namespace;
1020
+ }
1021
+ function namespaceFromOpts(opts) {
1022
+ if (opts.length === 0) return;
1023
+ const [{ namespace }] = opts;
1024
+ return namespace;
1025
+ }
1026
+
1027
+ //#endregion
1028
+ export { TableAggregate as n, aggregateStorageTables as r, DirectAggregate as t };