@instantdb/core 0.22.164 → 0.22.165

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/__tests__/src/infiniteQuery.e2e.test.ts +384 -0
  2. package/__tests__/src/simple.e2e.test.ts +0 -1
  3. package/__tests__/src/utils/e2e.ts +1 -1
  4. package/dist/commonjs/Reactor.d.ts +1 -1
  5. package/dist/commonjs/Reactor.js +3 -3
  6. package/dist/commonjs/Reactor.js.map +1 -1
  7. package/dist/commonjs/index.d.ts +23 -2
  8. package/dist/commonjs/index.d.ts.map +1 -1
  9. package/dist/commonjs/index.js +25 -1
  10. package/dist/commonjs/index.js.map +1 -1
  11. package/dist/commonjs/infiniteQuery.d.ts +26 -0
  12. package/dist/commonjs/infiniteQuery.d.ts.map +1 -0
  13. package/dist/commonjs/infiniteQuery.js +422 -0
  14. package/dist/commonjs/infiniteQuery.js.map +1 -0
  15. package/dist/commonjs/instaql.d.ts.map +1 -1
  16. package/dist/commonjs/instaql.js +18 -5
  17. package/dist/commonjs/instaql.js.map +1 -1
  18. package/dist/commonjs/queryTypes.d.ts +2 -2
  19. package/dist/commonjs/queryTypes.d.ts.map +1 -1
  20. package/dist/commonjs/queryTypes.js.map +1 -1
  21. package/dist/commonjs/utils/Deferred.d.ts +5 -4
  22. package/dist/commonjs/utils/Deferred.d.ts.map +1 -1
  23. package/dist/commonjs/utils/Deferred.js.map +1 -1
  24. package/dist/commonjs/utils/weakHash.d.ts.map +1 -1
  25. package/dist/commonjs/utils/weakHash.js +4 -0
  26. package/dist/commonjs/utils/weakHash.js.map +1 -1
  27. package/dist/esm/Reactor.d.ts +1 -1
  28. package/dist/esm/Reactor.js +1 -1
  29. package/dist/esm/Reactor.js.map +1 -1
  30. package/dist/esm/index.d.ts +23 -2
  31. package/dist/esm/index.d.ts.map +1 -1
  32. package/dist/esm/index.js +25 -0
  33. package/dist/esm/index.js.map +1 -1
  34. package/dist/esm/infiniteQuery.d.ts +26 -0
  35. package/dist/esm/infiniteQuery.d.ts.map +1 -0
  36. package/dist/esm/infiniteQuery.js +417 -0
  37. package/dist/esm/infiniteQuery.js.map +1 -0
  38. package/dist/esm/instaql.d.ts.map +1 -1
  39. package/dist/esm/instaql.js +18 -5
  40. package/dist/esm/instaql.js.map +1 -1
  41. package/dist/esm/queryTypes.d.ts +2 -2
  42. package/dist/esm/queryTypes.d.ts.map +1 -1
  43. package/dist/esm/queryTypes.js.map +1 -1
  44. package/dist/esm/utils/Deferred.d.ts +5 -4
  45. package/dist/esm/utils/Deferred.d.ts.map +1 -1
  46. package/dist/esm/utils/Deferred.js.map +1 -1
  47. package/dist/esm/utils/weakHash.d.ts.map +1 -1
  48. package/dist/esm/utils/weakHash.js +4 -0
  49. package/dist/esm/utils/weakHash.js.map +1 -1
  50. package/dist/standalone/index.js +1731 -1432
  51. package/dist/standalone/index.umd.cjs +3 -3
  52. package/package.json +2 -2
  53. package/src/Reactor.js +1 -1
  54. package/src/index.ts +49 -0
  55. package/src/infiniteQuery.ts +573 -0
  56. package/src/instaql.ts +25 -7
  57. package/src/queryTypes.ts +1 -2
  58. package/src/utils/{Deferred.js → Deferred.ts} +4 -4
  59. package/src/utils/weakHash.ts +4 -0
  60. package/vitest.config.ts +6 -0
@@ -0,0 +1,417 @@
1
+ import { coerceQuery, QueryValidationError, } from "./index.js";
2
+ import { assert } from "./utils/error.js";
3
+ // Example for {order: {value: "asc"}}
4
+ //
5
+ // 0
6
+ // <------------------|------------------------------------------------------>
7
+ // <- starter sub ->
8
+ //
9
+ // Bootstrap phase: until the limit (4 in this example) items are reached, the
10
+ // starter subscription is the only subscription and it writes to the forwardChunks map with the key PRE_BOOTSTRAP_CURSOR.
11
+ //
12
+ // When the limit is reached it automatically becomes a real forward chunk and has a definite start and end.
13
+ // A new reverse chunk gets added to watch for any new items at the start of the list.
14
+ //
15
+ // 0 1 2 3
16
+ // <------------------|------------------------------------------------------>
17
+ // <- starter sub ->
18
+ //
19
+ // ↓ BECOMES ↓
20
+ //
21
+ // 0 1 2 3
22
+ // <------------------|------------------------------------------------------>
23
+ // <-reverse chunk][forward chunk ]
24
+ //
25
+ // 0 1 2 3 4
26
+ // <------------------|------------------------------------------------------>
27
+ // <-reverse chunk][forward chunk ]
28
+ // When item 4 is added, the forward chunk subscription gets updated so that
29
+ // hasNextPage is `true`. This tells the user that a new page can be loaded.
30
+ //
31
+ // User clicks: loadNextPage
32
+ // 0 1 2 3 4
33
+ // <------------------|------------------------------------------------------>
34
+ // <-reverse chunk][ frozen forward chunk ][ new forward chunk ]
35
+ //
36
+ // More numbers get added
37
+ // 0 1 2 3 4 5 6 7 8
38
+ // <------------------|------------------------------------------------------>
39
+ // <-reverse chunk][ frozen forward chunk ][ forward chunk ] ^
40
+ // hasNextPage=true^
41
+ //
42
+ //
43
+ // User clicks: loadNextPage
44
+ //
45
+ // 0 1 2 3 4 5 6 7 8
46
+ // <------------------|------------------------------------------------------>
47
+ // <-reverse chunk][ frozen forward chunk ][ frozen forward chunk ][ new chunk
48
+ //
49
+ // The reverse chunks work in the same way as the forward chunks but the order in the query is reversed.
50
+ // When a reverse chunks recieves an update it will check to see if more can be loaded and it will
51
+ // automatically freeze the chunk and add a new one. i.e. : works the same as if
52
+ // loadNextPage was automatically clicked when hasNextPage became true.
53
+ //
54
+ // Chunks are indexed by their starting point cursor, for forward chunks this is the "[" point.
55
+ // Their starting point cursor is inclusive in the query and exclusive from the following query
56
+ const makeCursorKey = (cursor) => JSON.stringify(cursor);
57
+ const parseCursorKey = (cursorKey) => JSON.parse(cursorKey);
58
+ const chunkHasEndCursor = (chunk) => {
59
+ return !!chunk.endCursor;
60
+ };
61
+ const readCanLoadNextPage = (forwardChunks) => {
62
+ const chunksInOrder = Array.from(forwardChunks.values());
63
+ if (chunksInOrder.length === 0)
64
+ return false;
65
+ return chunksInOrder[chunksInOrder.length - 1]?.hasMore || false;
66
+ };
67
+ // Chunk sub key is used to create keys to keep track of the subscriptions
68
+ // while the chunk maps are keyed by the cursor, here we disinguish between
69
+ // forward and reverse because the first 2 chunks will have the same starting
70
+ // cursor.
71
+ const chunkSubKey = (direction, cursor) => `${direction}:${JSON.stringify(cursor)}`;
72
+ const reverseOrder = (order) => {
73
+ if (!order) {
74
+ return {
75
+ serverCreatedAt: 'asc',
76
+ };
77
+ }
78
+ const key = Object.keys(order).at(0);
79
+ if (!key) {
80
+ return {
81
+ serverCreatedAt: 'asc',
82
+ };
83
+ }
84
+ return {
85
+ [key]: order[key] === 'asc' ? 'desc' : 'asc',
86
+ };
87
+ };
88
+ const normalizeChunks = (forwardChunks, reverseChunks) => {
89
+ const chunks = [
90
+ ...Array.from(reverseChunks.values()).slice().reverse(),
91
+ ...Array.from(forwardChunks.values()),
92
+ ];
93
+ const data = [
94
+ ...Array.from(reverseChunks.values())
95
+ .slice()
96
+ .reverse()
97
+ .flatMap((chunk) => chunk.data.slice().reverse()),
98
+ ...Array.from(forwardChunks.values()).flatMap((chunk) => chunk.data),
99
+ ];
100
+ return { chunks, data };
101
+ };
102
+ const PRE_BOOTSTRAP_CURSOR = ['bootstrap', 'bootstrap', 'bootstrap', 1];
103
+ export const subscribeInfiniteQuery = (db, fullQuery, cb, opts) => {
104
+ const { entityName, entityQuery: query } = splitAndValidateQuery(fullQuery);
105
+ const pageSize = query.$?.limit || 10;
106
+ const entity = entityName;
107
+ const forwardChunks = new Map();
108
+ const reverseChunks = new Map();
109
+ // Keeps track of all subscriptions (besides starter sub)
110
+ const allUnsubs = new Map();
111
+ let hasKickstarted = false;
112
+ let isActive = true;
113
+ let lastReverseAdvancedChunkKey = null;
114
+ let starterUnsub = null;
115
+ const sendError = (err) => {
116
+ cb({ error: err, data: undefined, canLoadNextPage: false });
117
+ };
118
+ const pushUpdate = () => {
119
+ if (!isActive)
120
+ return;
121
+ const { chunks, data } = normalizeChunks(forwardChunks, reverseChunks);
122
+ cb({
123
+ data: { [entity]: data },
124
+ // @ts-expect-error hidden debug variable
125
+ chunks,
126
+ canLoadNextPage: readCanLoadNextPage(forwardChunks),
127
+ });
128
+ };
129
+ const setForwardChunk = (startCursor, chunk) => {
130
+ forwardChunks.set(makeCursorKey(startCursor), chunk);
131
+ pushUpdate();
132
+ };
133
+ const setReverseChunk = (startCursor, chunk) => {
134
+ reverseChunks.set(makeCursorKey(startCursor), chunk);
135
+ maybeAdvanceReverse();
136
+ pushUpdate();
137
+ };
138
+ const freezeReverse = (chunkKey, chunk) => {
139
+ const startCursor = parseCursorKey(chunkKey);
140
+ const currentSub = allUnsubs.get(chunkSubKey('reverse', startCursor));
141
+ currentSub?.();
142
+ const nextSub = db.subscribeQuery({
143
+ [entity]: {
144
+ ...query,
145
+ $: {
146
+ after: startCursor,
147
+ before: chunk.endCursor,
148
+ beforeInclusive: true,
149
+ where: query.$?.where,
150
+ fields: query.$?.fields,
151
+ order: reverseOrder(query.$?.order),
152
+ },
153
+ },
154
+ }, (frozenData) => {
155
+ if (frozenData.error) {
156
+ return sendError(frozenData.error);
157
+ }
158
+ const rows = frozenData.data[entity];
159
+ const pageInfo = frozenData.pageInfo[entity];
160
+ assert(rows && pageInfo, 'Expected query subscription to contain rows and pageInfo');
161
+ setReverseChunk(startCursor, {
162
+ data: rows,
163
+ status: 'frozen',
164
+ hasMore: pageInfo.hasNextPage,
165
+ endCursor: pageInfo.endCursor,
166
+ });
167
+ }, opts);
168
+ allUnsubs.set(chunkSubKey('reverse', startCursor), nextSub);
169
+ };
170
+ const pushNewReverse = (startCursor) => {
171
+ const querySub = db.subscribeQuery({
172
+ [entity]: {
173
+ ...query,
174
+ $: {
175
+ limit: pageSize,
176
+ after: startCursor,
177
+ where: query.$?.where,
178
+ fields: query.$?.fields,
179
+ order: reverseOrder(query.$?.order),
180
+ },
181
+ },
182
+ }, (windowData) => {
183
+ if (windowData.error) {
184
+ return sendError(windowData.error);
185
+ }
186
+ const rows = windowData.data[entity];
187
+ const pageInfo = windowData.pageInfo[entity];
188
+ assert(rows && pageInfo, 'Expected rows and pageInfo');
189
+ setReverseChunk(startCursor, {
190
+ data: rows,
191
+ status: 'bootstrapping',
192
+ hasMore: pageInfo.hasNextPage,
193
+ endCursor: pageInfo.endCursor,
194
+ });
195
+ }, opts);
196
+ allUnsubs.set(chunkSubKey('reverse', startCursor), querySub);
197
+ };
198
+ const pushNewForward = (startCursor, afterInclusive = false) => {
199
+ const querySub = db.subscribeQuery({
200
+ [entity]: {
201
+ ...query,
202
+ $: {
203
+ limit: pageSize,
204
+ after: startCursor,
205
+ afterInclusive,
206
+ where: query.$?.where,
207
+ fields: query.$?.fields,
208
+ order: query.$?.order,
209
+ },
210
+ },
211
+ }, (windowData) => {
212
+ if (windowData.error) {
213
+ return sendError(windowData.error);
214
+ }
215
+ const rows = windowData.data[entity];
216
+ const pageInfo = windowData.pageInfo[entity];
217
+ assert(rows && pageInfo, 'Page info and rows');
218
+ setForwardChunk(startCursor, {
219
+ data: rows,
220
+ status: 'bootstrapping',
221
+ hasMore: pageInfo.hasNextPage,
222
+ endCursor: pageInfo.endCursor,
223
+ afterInclusive,
224
+ });
225
+ }, opts);
226
+ allUnsubs.set(chunkSubKey('forward', startCursor), querySub);
227
+ };
228
+ const freezeForward = (startCursor) => {
229
+ const key = makeCursorKey(startCursor);
230
+ const currentSub = allUnsubs.get(chunkSubKey('forward', startCursor));
231
+ currentSub?.();
232
+ const chunk = forwardChunks.get(key);
233
+ if (!chunk?.endCursor)
234
+ return;
235
+ const nextSub = db.subscribeQuery({
236
+ [entity]: {
237
+ ...query,
238
+ $: {
239
+ after: startCursor,
240
+ afterInclusive: chunk.afterInclusive,
241
+ before: chunk.endCursor,
242
+ beforeInclusive: true,
243
+ where: query.$?.where,
244
+ fields: query.$?.fields,
245
+ order: query.$?.order,
246
+ },
247
+ },
248
+ }, (frozenData) => {
249
+ if (frozenData.error) {
250
+ return sendError(frozenData.error);
251
+ }
252
+ const rows = frozenData.data[entity];
253
+ const pageInfo = frozenData.pageInfo[entity];
254
+ assert(rows && pageInfo, 'Expected rows and pageInfo');
255
+ setForwardChunk(startCursor, {
256
+ data: rows,
257
+ status: 'frozen',
258
+ hasMore: pageInfo.hasNextPage,
259
+ endCursor: pageInfo.endCursor,
260
+ afterInclusive: chunk.afterInclusive,
261
+ });
262
+ }, opts);
263
+ allUnsubs.set(chunkSubKey('forward', startCursor), nextSub);
264
+ };
265
+ // Consider order: {val: "asc"} with pageItems = 4
266
+ // A reverse chunk captures all the new items coming in before us.
267
+ // If we hit 4 then we freeze the current chunk and create a new reverse chunk
268
+ const maybeAdvanceReverse = () => {
269
+ const tailEntry = Array.from(reverseChunks.entries()).at(-1);
270
+ if (!tailEntry)
271
+ return;
272
+ const [chunkKey, chunk] = tailEntry;
273
+ // If a chunk has more, then it must have an endCursor
274
+ if (!chunk?.hasMore)
275
+ return;
276
+ if (!chunkHasEndCursor(chunk))
277
+ return;
278
+ // maybeAdvanceReverse can run multiple times if multiple changes are made
279
+ // to the reverse chunk
280
+ // This prevents adding the same new reverse frame twice
281
+ const advanceKey = `${chunkKey}:${makeCursorKey(chunk.endCursor)}`;
282
+ if (advanceKey == lastReverseAdvancedChunkKey)
283
+ return;
284
+ lastReverseAdvancedChunkKey = advanceKey;
285
+ freezeReverse(chunkKey, chunk);
286
+ pushNewReverse(chunk.endCursor);
287
+ };
288
+ const loadNextPage = () => {
289
+ const tailEntry = Array.from(forwardChunks.entries()).at(-1);
290
+ if (!tailEntry)
291
+ return;
292
+ const [chunkKey, chunk] = tailEntry;
293
+ // If the chunk has more items after it, it must have an end cursor, and we can
294
+ // load more items
295
+ // if (!chunk?.hasMore) return;
296
+ if (!chunk.endCursor)
297
+ return;
298
+ freezeForward(parseCursorKey(chunkKey));
299
+ pushNewForward(chunk.endCursor);
300
+ };
301
+ starterUnsub = db.subscribeQuery({
302
+ [entity]: {
303
+ ...query,
304
+ $: {
305
+ limit: pageSize,
306
+ where: query.$?.where,
307
+ fields: query.$?.fields,
308
+ order: query.$?.order,
309
+ },
310
+ },
311
+ }, async (starterData) => {
312
+ if (hasKickstarted)
313
+ return;
314
+ if (starterData.error) {
315
+ return sendError(starterData.error);
316
+ }
317
+ const pageInfo = starterData.pageInfo[entity];
318
+ const rows = starterData?.data?.[entity];
319
+ assert(rows && pageInfo, 'Expected rows and pageInfo');
320
+ if (rows.length < pageSize) {
321
+ // If the rows are less than the page size, then we don't need to
322
+ // create forward and reverse chunks.
323
+ // We just treat the starter query as a forward chunk
324
+ setForwardChunk(PRE_BOOTSTRAP_CURSOR, {
325
+ data: rows,
326
+ status: 'pre-bootstrap',
327
+ });
328
+ return;
329
+ }
330
+ // Consider a query with no items; the server will return a result with
331
+ // no start cursor. If we add {pageSize} optimistic updates we can
332
+ // get here and still have no startCursor. By returning we are skipping
333
+ // the optimistic update and just waiting for the result from the
334
+ // server.
335
+ const initialForwardCursor = pageInfo.startCursor;
336
+ if (!initialForwardCursor) {
337
+ return;
338
+ }
339
+ forwardChunks.delete(makeCursorKey(PRE_BOOTSTRAP_CURSOR));
340
+ pushNewForward(initialForwardCursor, true);
341
+ pushNewReverse(pageInfo.startCursor);
342
+ hasKickstarted = true;
343
+ // Flush the initial boostrap querysub data
344
+ // because immediately unsubscribing will never save it for offline in idb
345
+ await db._reactor.querySubs.flush();
346
+ // Unsubscribe the starter subscription
347
+ starterUnsub?.();
348
+ starterUnsub = null;
349
+ }, opts);
350
+ const unsubscribe = () => {
351
+ if (!isActive)
352
+ return;
353
+ isActive = false;
354
+ starterUnsub?.();
355
+ starterUnsub = null;
356
+ for (const unsub of allUnsubs.values()) {
357
+ unsub?.();
358
+ }
359
+ allUnsubs.clear();
360
+ };
361
+ return {
362
+ unsubscribe,
363
+ loadNextPage,
364
+ };
365
+ };
366
+ export const getInfiniteQueryInitialSnapshot = (db, fullQuery, opts) => {
367
+ if (!fullQuery) {
368
+ return {
369
+ canLoadNextPage: false,
370
+ data: undefined,
371
+ error: undefined,
372
+ };
373
+ }
374
+ const { entityName, entityQuery } = splitAndValidateQuery(fullQuery);
375
+ const pageSize = entityQuery.$?.limit || 10;
376
+ let coercedQuery = fullQuery
377
+ ? coerceQuery({
378
+ [entityName]: {
379
+ ...entityQuery,
380
+ $: {
381
+ limit: pageSize,
382
+ where: entityQuery.$?.where,
383
+ fields: entityQuery.$?.fields,
384
+ order: entityQuery.$?.order,
385
+ },
386
+ },
387
+ })
388
+ : null;
389
+ if (opts && 'ruleParams' in opts) {
390
+ coercedQuery = {
391
+ $$ruleParams: opts.ruleParams,
392
+ ...fullQuery,
393
+ };
394
+ }
395
+ const queryResult = db._reactor.getPreviousResult(coercedQuery);
396
+ return {
397
+ canLoadNextPage: false,
398
+ data: queryResult?.data || undefined,
399
+ error: undefined,
400
+ };
401
+ };
402
+ /**
403
+ * @throws QueryValidationError
404
+ * @param fullQuery a ValidQuery with one key (entity)
405
+ */
406
+ const splitAndValidateQuery = (fullQuery) => {
407
+ const entityNames = Object.keys(fullQuery);
408
+ if (entityNames.length !== 1) {
409
+ throw new QueryValidationError('subscribeInfiniteQuery expects exactly one entity');
410
+ }
411
+ const [entityName, entityQuery] = Object.entries(fullQuery)[0];
412
+ if (!entityName || !entityQuery) {
413
+ throw new QueryValidationError('No query provided for infinite query');
414
+ }
415
+ return { entityName, entityQuery };
416
+ };
417
+ //# sourceMappingURL=infiniteQuery.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"infiniteQuery.js","sourceRoot":"","sources":["../../src/infiniteQuery.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,WAAW,EACX,oBAAoB,GAGrB,MAAM,YAAY,CAAC;AAQpB,OAAO,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAE1C,wCAAwC;AACxC,EAAE;AACF,yBAAyB;AACzB,gFAAgF;AAChF,wCAAwC;AACxC,EAAE;AACF,gFAAgF;AAChF,4HAA4H;AAC5H,EAAE;AACF,8GAA8G;AAC9G,wFAAwF;AACxF,EAAE;AACF,wCAAwC;AACxC,gFAAgF;AAChF,yCAAyC;AACzC,EAAE;AACF,mCAAmC;AACnC,EAAE;AACF,wCAAwC;AACxC,gFAAgF;AAChF,0CAA0C;AAC1C,EAAE;AACF,6CAA6C;AAC7C,gFAAgF;AAChF,wCAAwC;AACxC,8EAA8E;AAC9E,8EAA8E;AAC9E,EAAE;AACF,8BAA8B;AAC9B,qDAAqD;AACrD,gFAAgF;AAChF,uEAAuE;AACvE,EAAE;AACF,2BAA2B;AAC3B,0EAA0E;AAC1E,gFAAgF;AAChF,0EAA0E;AAC1E,0EAA0E;AAC1E,EAAE;AACF,EAAE;AACF,8BAA8B;AAC9B,EAAE;AACF,6EAA6E;AAC7E,gFAAgF;AAChF,oFAAoF;AACpF,EAAE;AACF,0GAA0G;AAC1G,oGAAoG;AACpG,kFAAkF;AAClF,yEAAyE;AACzE,EAAE;AACF,iGAAiG;AACjG,iGAAiG;AAEjG,MAAM,aAAa,GAAG,CAAC,MAAc,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;AACjE,MAAM,cAAc,GAAG,CAAC,SAAiB,EAAE,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,SAAS,CAAW,CAAC;AAa9E,MAAM,iBAAiB,GAAG,CAAC,KAAY,EAA+B,EAAE;IACtE,OAAO,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;AAC3B,CAAC,CAAC;AAOF,MAAM,mBAAmB,GAAG,CAAC,aAAiC,EAAE,EAAE;IAChE,MAAM,aAAa,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC;IACzD,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAC7C,OAAO,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,CAAC,EAAE,OAAO,IAAI,KAAK,CAAC;AACnE,CAAC,CAAC;AAEF,0EAA0E;AAC1E,2EAA2E;AAC3E,6EAA6E;AAC7E,UAAU;AACV,MAAM,WAAW,GAAG,CAAC,SAAgC,EAAE,MAAc,EAAE,EAAE,CACvE,GAAG,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,CAAC;AAE3C,MAAM,YAAY,GAAG,CAInB,KAA6B,EACN,EAAE;IACzB,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,OAAO;YACL,eAAe,EAAE,KAAK;SACS,CAAC;IACpC,CAAC;IACD,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IACrC,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,OAAO;YACL,eAAe,EAAE,KAAK;SACS,CAAC;IACpC,CAAC;IACD,OAAO;QACL,CAAC,GAAG,CAAC,EAAE,KAAK,CAAC,GAAyB,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK;KAC1C,CAAC;AAC7B,CAAC,CAAC;AAEF,MAAM,eAAe,GAAG,CACtB,aAAiC,EACjC,aAAiC,EACC,EAAE;IACpC,MAAM,MAAM,GAAG;QACb,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE;QACvD,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC;KACtC,CAAC;IAEF,MAAM,IAAI,GAAG;QACX,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC;aAClC,KAAK,EAAE;aACP,OAAO,EAAE;aACT,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC,OAAO,EAAE,CAAC;QACnD,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC;KACrE,CAAC;IACF,OAAO,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;AAC1B,CAAC,CAAC;AAEF,MAAM,oBAAoB,GAAW,CAAC,WAAW,EAAE,WAAW,EAAE,WAAW,EAAE,CAAC,CAAC,CAAC;AAkBhF,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAKpC,EAAyC,EACzC,SAAY,EACZ,EAAsE,EACtE,IAAqB,EACM,EAAE;IAC7B,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,KAAK,EAAE,GAAG,qBAAqB,CAAC,SAAS,CAAC,CAAC;IAE5E,MAAM,QAAQ,GAAG,KAAK,CAAC,CAAC,EAAE,KAAK,IAAI,EAAE,CAAC;IACtC,MAAM,MAAM,GAAG,UAAU,CAAC;IAE1B,MAAM,aAAa,GAAG,IAAI,GAAG,EAAiB,CAAC;IAC/C,MAAM,aAAa,GAAG,IAAI,GAAG,EAAiB,CAAC;IAC/C,yDAAyD;IACzD,MAAM,SAAS,GAAG,IAAI,GAAG,EAAsB,CAAC;IAEhD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,IAAI,QAAQ,GAAG,IAAI,CAAC;IACpB,IAAI,2BAA2B,GAAkB,IAAI,CAAC;IACtD,IAAI,YAAY,GAAwB,IAAI,CAAC;IAE7C,MAAM,SAAS,GAAG,CAAC,GAAwB,EAAE,EAAE;QAC7C,EAAE,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,CAAC,CAAC;IAC9D,CAAC,CAAC;IAEF,MAAM,UAAU,GAAG,GAAG,EAAE;QACtB,IAAI,CAAC,QAAQ;YAAE,OAAO;QAEtB,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,eAAe,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;QACvE,EAAE,CAAC;YACD,IAAI,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,IAAI,EAIrB;YACD,yCAAyC;YACzC,MAAM;YACN,eAAe,EAAE,mBAAmB,CAAC,aAAa,CAAC;SACpD,CAAC,CAAC;IACL,CAAC,CAAC;IAEF,MAAM,eAAe,GAAG,CAAC,WAAmB,EAAE,KAAY,EAAE,EAAE;QAC5D,aAAa,CAAC,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE,KAAK,CAAC,CAAC;QACrD,UAAU,EAAE,CAAC;IACf,CAAC,CAAC;IAEF,MAAM,eAAe,GAAG,CAAC,WAAmB,EAAE,KAAY,EAAE,EAAE;QAC5D,aAAa,CAAC,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE,KAAK,CAAC,CAAC;QACrD,mBAAmB,EAAE,CAAC;QACtB,UAAU,EAAE,CAAC;IACf,CAAC,CAAC;IAEF,MAAM,aAAa,GAAG,CAAC,QAAgB,EAAE,KAAyB,EAAE,EAAE;QACpE,MAAM,WAAW,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAC;QAC7C,MAAM,UAAU,GAAG,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC,CAAC;QACtE,UAAU,EAAE,EAAE,CAAC;QAEf,MAAM,OAAO,GAAG,EAAE,CAAC,cAAc,CAC/B;YACE,CAAC,MAAM,CAAC,EAAE;gBACR,GAAG,KAAK;gBACR,CAAC,EAAE;oBACD,KAAK,EAAE,WAAW;oBAClB,MAAM,EAAE,KAAK,CAAC,SAAS;oBACvB,eAAe,EAAE,IAAI;oBACrB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;oBACrB,MAAM,EAAE,KAAK,CAAC,CAAC,EAAE,MAAM;oBACvB,KAAK,EAAE,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC;iBACpC;aACF;SACc,EACjB,CAAC,UAAU,EAAE,EAAE;YACb,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;gBACrB,OAAO,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YACrC,CAAC;YAED,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAC7C,MAAM,CACJ,IAAI,IAAI,QAAQ,EAChB,0DAA0D,CAC3D,CAAC;YAEF,eAAe,CAAC,WAAW,EAAE;gBAC3B,IAAI,EAAE,IAAI;gBACV,MAAM,EAAE,QAAQ;gBAChB,OAAO,EAAE,QAAQ,CAAC,WAAW;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS;aAC9B,CAAC,CAAC;QACL,CAAC,EACD,IAAI,CACL,CAAC;QAEF,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC,CAAC;IAEF,MAAM,cAAc,GAAG,CAAC,WAAmB,EAAE,EAAE;QAC7C,MAAM,QAAQ,GAAG,EAAE,CAAC,cAAc,CAChC;YACE,CAAC,MAAM,CAAC,EAAE;gBACR,GAAG,KAAK;gBACR,CAAC,EAAE;oBACD,KAAK,EAAE,QAAQ;oBACf,KAAK,EAAE,WAAW;oBAClB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;oBACrB,MAAM,EAAE,KAAK,CAAC,CAAC,EAAE,MAAM;oBACvB,KAAK,EAAE,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC;iBACpC;aACF;SACc,EACjB,CAAC,UAAU,EAAE,EAAE;YACb,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;gBACrB,OAAO,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YACrC,CAAC;YAED,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAC7C,MAAM,CAAC,IAAI,IAAI,QAAQ,EAAE,4BAA4B,CAAC,CAAC;YAEvD,eAAe,CAAC,WAAW,EAAE;gBAC3B,IAAI,EAAE,IAAI;gBACV,MAAM,EAAE,eAAe;gBACvB,OAAO,EAAE,QAAQ,CAAC,WAAW;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS;aAC9B,CAAC,CAAC;QACL,CAAC,EACD,IAAI,CACL,CAAC;QAEF,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IAC/D,CAAC,CAAC;IAEF,MAAM,cAAc,GAAG,CAAC,WAAmB,EAAE,cAAc,GAAG,KAAK,EAAE,EAAE;QACrE,MAAM,QAAQ,GAAG,EAAE,CAAC,cAAc,CAChC;YACE,CAAC,MAAM,CAAC,EAAE;gBACR,GAAG,KAAK;gBACR,CAAC,EAAE;oBACD,KAAK,EAAE,QAAQ;oBACf,KAAK,EAAE,WAAW;oBAClB,cAAc;oBACd,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;oBACrB,MAAM,EAAE,KAAK,CAAC,CAAC,EAAE,MAAM;oBACvB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;iBACtB;aACF;SACc,EACjB,CAAC,UAAU,EAAE,EAAE;YACb,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;gBACrB,OAAO,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YACrC,CAAC;YAED,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAC7C,MAAM,CAAC,IAAI,IAAI,QAAQ,EAAE,oBAAoB,CAAC,CAAC;YAE/C,eAAe,CAAC,WAAW,EAAE;gBAC3B,IAAI,EAAE,IAAI;gBACV,MAAM,EAAE,eAAe;gBACvB,OAAO,EAAE,QAAQ,CAAC,WAAW;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,cAAc;aACf,CAAC,CAAC;QACL,CAAC,EACD,IAAI,CACL,CAAC;QAEF,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IAC/D,CAAC,CAAC;IAEF,MAAM,aAAa,GAAG,CAAC,WAAmB,EAAE,EAAE;QAC5C,MAAM,GAAG,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;QACvC,MAAM,UAAU,GAAG,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC,CAAC;QACtE,UAAU,EAAE,EAAE,CAAC;QAEf,MAAM,KAAK,GAAG,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE,SAAS;YAAE,OAAO;QAE9B,MAAM,OAAO,GAAG,EAAE,CAAC,cAAc,CAC/B;YACE,CAAC,MAAM,CAAC,EAAE;gBACR,GAAG,KAAK;gBACR,CAAC,EAAE;oBACD,KAAK,EAAE,WAAW;oBAClB,cAAc,EAAE,KAAK,CAAC,cAAc;oBACpC,MAAM,EAAE,KAAK,CAAC,SAAS;oBACvB,eAAe,EAAE,IAAI;oBACrB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;oBACrB,MAAM,EAAE,KAAK,CAAC,CAAC,EAAE,MAAM;oBACvB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;iBACtB;aACF;SACc,EACjB,CAAC,UAAU,EAAE,EAAE;YACb,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;gBACrB,OAAO,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YACrC,CAAC;YAED,MAAM,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;YAC7C,MAAM,CAAC,IAAI,IAAI,QAAQ,EAAE,4BAA4B,CAAC,CAAC;YAEvD,eAAe,CAAC,WAAW,EAAE;gBAC3B,IAAI,EAAE,IAAI;gBACV,MAAM,EAAE,QAAQ;gBAChB,OAAO,EAAE,QAAQ,CAAC,WAAW;gBAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,cAAc,EAAE,KAAK,CAAC,cAAc;aACrC,CAAC,CAAC;QACL,CAAC,EACD,IAAI,CACL,CAAC;QAEF,SAAS,CAAC,GAAG,CAAC,WAAW,CAAC,SAAS,EAAE,WAAW,CAAC,EAAE,OAAO,CAAC,CAAC;IAC9D,CAAC,CAAC;IAEF,kDAAkD;IAClD,kEAAkE;IAClE,8EAA8E;IAC9E,MAAM,mBAAmB,GAAG,GAAG,EAAE;QAC/B,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7D,IAAI,CAAC,SAAS;YAAE,OAAO;QAEvB,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,SAAS,CAAC;QAEpC,sDAAsD;QACtD,IAAI,CAAC,KAAK,EAAE,OAAO;YAAE,OAAO;QAC5B,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC;YAAE,OAAO;QAEtC,0EAA0E;QAC1E,uBAAuB;QACvB,wDAAwD;QACxD,MAAM,UAAU,GAAG,GAAG,QAAQ,IAAI,aAAa,CAAC,KAAK,CAAC,SAAS,CAAC,EAAE,CAAC;QACnE,IAAI,UAAU,IAAI,2BAA2B;YAAE,OAAO;QACtD,2BAA2B,GAAG,UAAU,CAAC;QAEzC,aAAa,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;QAC/B,cAAc,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IAClC,CAAC,CAAC;IAEF,MAAM,YAAY,GAAG,GAAG,EAAE;QACxB,MAAM,SAAS,GAAG,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;QAC7D,IAAI,CAAC,SAAS;YAAE,OAAO;QAEvB,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,SAAS,CAAC;QAEpC,+EAA+E;QAC/E,kBAAkB;QAClB,+BAA+B;QAC/B,IAAI,CAAC,KAAK,CAAC,SAAS;YAAE,OAAO;QAE7B,aAAa,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAC;QACxC,cAAc,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IAClC,CAAC,CAAC;IAEF,YAAY,GAAG,EAAE,CAAC,cAAc,CAC9B;QACE,CAAC,MAAM,CAAC,EAAE;YACR,GAAG,KAAK;YACR,CAAC,EAAE;gBACD,KAAK,EAAE,QAAQ;gBACf,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;gBACrB,MAAM,EAAE,KAAK,CAAC,CAAC,EAAE,MAAM;gBACvB,KAAK,EAAE,KAAK,CAAC,CAAC,EAAE,KAAK;aACtB;SACF;KACc,EACjB,KAAK,EAAE,WAAW,EAAE,EAAE;QACpB,IAAI,cAAc;YAAE,OAAO;QAC3B,IAAI,WAAW,CAAC,KAAK,EAAE,CAAC;YACtB,OAAO,SAAS,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;QACtC,CAAC;QACD,MAAM,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAE9C,MAAM,IAAI,GAAG,WAAW,EAAE,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC;QACzC,MAAM,CAAC,IAAI,IAAI,QAAQ,EAAE,4BAA4B,CAAC,CAAC;QAEvD,IAAI,IAAI,CAAC,MAAM,GAAG,QAAQ,EAAE,CAAC;YAC3B,iEAAiE;YACjE,qCAAqC;YACrC,qDAAqD;YACrD,eAAe,CAAC,oBAAoB,EAAE;gBACpC,IAAI,EAAE,IAAI;gBACV,MAAM,EAAE,eAAe;aACxB,CAAC,CAAC;YACH,OAAO;QACT,CAAC;QAED,uEAAuE;QACvE,kEAAkE;QAClE,uEAAuE;QACvE,iEAAiE;QACjE,UAAU;QACV,MAAM,oBAAoB,GAAG,QAAQ,CAAC,WAAW,CAAC;QAClD,IAAI,CAAC,oBAAoB,EAAE,CAAC;YAC1B,OAAO;QACT,CAAC;QACD,aAAa,CAAC,MAAM,CAAC,aAAa,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAE1D,cAAc,CAAC,oBAAoB,EAAE,IAAI,CAAC,CAAC;QAC3C,cAAc,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;QACrC,cAAc,GAAG,IAAI,CAAC;QAEtB,2CAA2C;QAC3C,0EAA0E;QAC1E,MAAM,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC;QAEpC,uCAAuC;QACvC,YAAY,EAAE,EAAE,CAAC;QACjB,YAAY,GAAG,IAAI,CAAC;IACtB,CAAC,EACD,IAAI,CACL,CAAC;IAEF,MAAM,WAAW,GAAG,GAAG,EAAE;QACvB,IAAI,CAAC,QAAQ;YAAE,OAAO;QACtB,QAAQ,GAAG,KAAK,CAAC;QACjB,YAAY,EAAE,EAAE,CAAC;QACjB,YAAY,GAAG,IAAI,CAAC;QACpB,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC;YACvC,KAAK,EAAE,EAAE,CAAC;QACZ,CAAC;QACD,SAAS,CAAC,KAAK,EAAE,CAAC;IACpB,CAAC,CAAC;IAEF,OAAO;QACL,WAAW;QACX,YAAY;KACb,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAAG,CAK7C,EAAyC,EACzC,SAAmB,EACnB,IAAqB,EAOjB,EAAE;IACN,IAAI,CAAC,SAAS,EAAE,CAAC;QACf,OAAO;YACL,eAAe,EAAE,KAAK;YACtB,IAAI,EAAE,SAAS;YACf,KAAK,EAAE,SAAS;SACjB,CAAC;IACJ,CAAC;IACD,MAAM,EAAE,UAAU,EAAE,WAAW,EAAE,GAAG,qBAAqB,CAAC,SAAS,CAAC,CAAC;IAErE,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,EAAE,KAAK,IAAI,EAAE,CAAC;IAE5C,IAAI,YAAY,GAAG,SAAS;QAC1B,CAAC,CAAC,WAAW,CAAC;YACV,CAAC,UAAU,CAAC,EAAE;gBACZ,GAAG,WAAW;gBACd,CAAC,EAAE;oBACD,KAAK,EAAE,QAAQ;oBACf,KAAK,EAAE,WAAW,CAAC,CAAC,EAAE,KAAK;oBAC3B,MAAM,EAAE,WAAW,CAAC,CAAC,EAAE,MAAM;oBAC7B,KAAK,EAAE,WAAW,CAAC,CAAC,EAAE,KAAK;iBAC5B;aACF;SACF,CAAC;QACJ,CAAC,CAAC,IAAI,CAAC;IAET,IAAI,IAAI,IAAI,YAAY,IAAI,IAAI,EAAE,CAAC;QACjC,YAAY,GAAG;YACb,YAAY,EAAE,IAAI,CAAC,UAAU;YAC7B,GAAG,SAAS;SACb,CAAC;IACJ,CAAC;IACD,MAAM,WAAW,GAAG,EAAE,CAAC,QAAQ,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEhE,OAAO;QACL,eAAe,EAAE,KAAK;QACtB,IAAI,EAAE,WAAW,EAAE,IAAI,IAAI,SAAS;QACpC,KAAK,EAAE,SAAS;KACjB,CAAC;AACJ,CAAC,CAAC;AAEF;;;GAGG;AACH,MAAM,qBAAqB,GAAG,CAAC,SAA8B,EAAE,EAAE;IAC/D,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IAC3C,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC7B,MAAM,IAAI,oBAAoB,CAC5B,mDAAmD,CACpD,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,UAAU,EAAE,WAAW,CAAC,GAAG,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;IAE/D,IAAI,CAAC,UAAU,IAAI,CAAC,WAAW,EAAE,CAAC;QAChC,MAAM,IAAI,oBAAoB,CAAC,sCAAsC,CAAC,CAAC;IACzE,CAAC;IACD,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,CAAC;AACrC,CAAC,CAAC","sourcesContent":["import {\n coerceQuery,\n QueryValidationError,\n type InstantCoreDatabase,\n type ValidQuery,\n} from './index.ts';\nimport {\n InstaQLResponse,\n InstaQLOptions,\n Cursor,\n Order,\n} from './queryTypes.ts';\nimport { InstantSchemaDef } from './schemaTypes.ts';\nimport { assert } from './utils/error.ts';\n\n// Example for {order: {value: \"asc\"}}\n//\n// 0\n// <------------------|------------------------------------------------------>\n// <- starter sub ->\n//\n// Bootstrap phase: until the limit (4 in this example) items are reached, the\n// starter subscription is the only subscription and it writes to the forwardChunks map with the key PRE_BOOTSTRAP_CURSOR.\n//\n// When the limit is reached it automatically becomes a real forward chunk and has a definite start and end.\n// A new reverse chunk gets added to watch for any new items at the start of the list.\n//\n// 0 1 2 3\n// <------------------|------------------------------------------------------>\n// <- starter sub ->\n//\n// ↓ BECOMES ↓\n//\n// 0 1 2 3\n// <------------------|------------------------------------------------------>\n// <-reverse chunk][forward chunk ]\n//\n// 0 1 2 3 4\n// <------------------|------------------------------------------------------>\n// <-reverse chunk][forward chunk ]\n// When item 4 is added, the forward chunk subscription gets updated so that\n// hasNextPage is `true`. This tells the user that a new page can be loaded.\n//\n// User clicks: loadNextPage\n// 0 1 2 3 4\n// <------------------|------------------------------------------------------>\n// <-reverse chunk][ frozen forward chunk ][ new forward chunk ]\n//\n// More numbers get added\n// 0 1 2 3 4 5 6 7 8\n// <------------------|------------------------------------------------------>\n// <-reverse chunk][ frozen forward chunk ][ forward chunk ] ^\n// hasNextPage=true^\n//\n//\n// User clicks: loadNextPage\n//\n// 0 1 2 3 4 5 6 7 8\n// <------------------|------------------------------------------------------>\n// <-reverse chunk][ frozen forward chunk ][ frozen forward chunk ][ new chunk\n//\n// The reverse chunks work in the same way as the forward chunks but the order in the query is reversed.\n// When a reverse chunks recieves an update it will check to see if more can be loaded and it will\n// automatically freeze the chunk and add a new one. i.e. : works the same as if\n// loadNextPage was automatically clicked when hasNextPage became true.\n//\n// Chunks are indexed by their starting point cursor, for forward chunks this is the \"[\" point.\n// Their starting point cursor is inclusive in the query and exclusive from the following query\n\nconst makeCursorKey = (cursor: Cursor) => JSON.stringify(cursor);\nconst parseCursorKey = (cursorKey: string) => JSON.parse(cursorKey) as Cursor;\n\nexport type ChunkStatus = 'pre-bootstrap' | 'bootstrapping' | 'frozen';\ntype Chunk = {\n status: ChunkStatus;\n data: any[];\n hasMore?: boolean;\n endCursor?: Cursor;\n afterInclusive?: boolean;\n};\n\ntype ChunkWithEndCursor = Chunk & { endCursor: Cursor };\n\nconst chunkHasEndCursor = (chunk: Chunk): chunk is ChunkWithEndCursor => {\n return !!chunk.endCursor;\n};\n\nexport interface InfiniteQuerySubscription {\n unsubscribe: () => void;\n loadNextPage: () => void;\n}\n\nconst readCanLoadNextPage = (forwardChunks: Map<string, Chunk>) => {\n const chunksInOrder = Array.from(forwardChunks.values());\n if (chunksInOrder.length === 0) return false;\n return chunksInOrder[chunksInOrder.length - 1]?.hasMore || false;\n};\n\n// Chunk sub key is used to create keys to keep track of the subscriptions\n// while the chunk maps are keyed by the cursor, here we disinguish between\n// forward and reverse because the first 2 chunks will have the same starting\n// cursor.\nconst chunkSubKey = (direction: 'forward' | 'reverse', cursor: Cursor) =>\n `${direction}:${JSON.stringify(cursor)}`;\n\nconst reverseOrder = <\n Schema extends InstantSchemaDef<any, any, any>,\n Entity extends keyof Schema['entities'],\n>(\n order?: Order<Schema, Entity>,\n): Order<Schema, Entity> => {\n if (!order) {\n return {\n serverCreatedAt: 'asc',\n } satisfies Order<Schema, Entity>;\n }\n const key = Object.keys(order).at(0);\n if (!key) {\n return {\n serverCreatedAt: 'asc',\n } satisfies Order<Schema, Entity>;\n }\n return {\n [key]: order[key as keyof typeof order] === 'asc' ? 'desc' : 'asc',\n } as Order<Schema, Entity>;\n};\n\nconst normalizeChunks = (\n forwardChunks: Map<string, Chunk>,\n reverseChunks: Map<string, Chunk>,\n): { chunks: Chunk[]; data: any[] } => {\n const chunks = [\n ...Array.from(reverseChunks.values()).slice().reverse(),\n ...Array.from(forwardChunks.values()),\n ];\n\n const data = [\n ...Array.from(reverseChunks.values())\n .slice()\n .reverse()\n .flatMap((chunk) => chunk.data.slice().reverse()),\n ...Array.from(forwardChunks.values()).flatMap((chunk) => chunk.data),\n ];\n return { chunks, data };\n};\n\nconst PRE_BOOTSTRAP_CURSOR: Cursor = ['bootstrap', 'bootstrap', 'bootstrap', 1];\n\nexport type InfiniteQueryCallbackResponse<\n Schema extends InstantSchemaDef<any, any, any>,\n Query extends Record<string, any>,\n UseDatesLocal extends boolean,\n> =\n | {\n error: { message: string };\n data: undefined;\n canLoadNextPage: boolean;\n }\n | {\n error: undefined;\n data: InstaQLResponse<Schema, Query, UseDatesLocal>;\n canLoadNextPage: boolean;\n };\n\nexport const subscribeInfiniteQuery = <\n Schema extends InstantSchemaDef<any, any, any>,\n Q extends ValidQuery<Q, Schema>,\n UseDates extends boolean,\n>(\n db: InstantCoreDatabase<Schema, UseDates>,\n fullQuery: Q,\n cb: (resp: InfiniteQueryCallbackResponse<Schema, Q, UseDates>) => void,\n opts?: InstaQLOptions,\n): InfiniteQuerySubscription => {\n const { entityName, entityQuery: query } = splitAndValidateQuery(fullQuery);\n\n const pageSize = query.$?.limit || 10;\n const entity = entityName;\n\n const forwardChunks = new Map<string, Chunk>();\n const reverseChunks = new Map<string, Chunk>();\n // Keeps track of all subscriptions (besides starter sub)\n const allUnsubs = new Map<string, () => void>();\n\n let hasKickstarted = false;\n let isActive = true;\n let lastReverseAdvancedChunkKey: string | null = null;\n let starterUnsub: (() => void) | null = null;\n\n const sendError = (err: { message: string }) => {\n cb({ error: err, data: undefined, canLoadNextPage: false });\n };\n\n const pushUpdate = () => {\n if (!isActive) return;\n\n const { chunks, data } = normalizeChunks(forwardChunks, reverseChunks);\n cb({\n data: { [entity]: data } as InstaQLResponse<\n Schema,\n typeof query,\n UseDates\n >,\n // @ts-expect-error hidden debug variable\n chunks,\n canLoadNextPage: readCanLoadNextPage(forwardChunks),\n });\n };\n\n const setForwardChunk = (startCursor: Cursor, chunk: Chunk) => {\n forwardChunks.set(makeCursorKey(startCursor), chunk);\n pushUpdate();\n };\n\n const setReverseChunk = (startCursor: Cursor, chunk: Chunk) => {\n reverseChunks.set(makeCursorKey(startCursor), chunk);\n maybeAdvanceReverse();\n pushUpdate();\n };\n\n const freezeReverse = (chunkKey: string, chunk: ChunkWithEndCursor) => {\n const startCursor = parseCursorKey(chunkKey);\n const currentSub = allUnsubs.get(chunkSubKey('reverse', startCursor));\n currentSub?.();\n\n const nextSub = db.subscribeQuery(\n {\n [entity]: {\n ...query,\n $: {\n after: startCursor,\n before: chunk.endCursor,\n beforeInclusive: true,\n where: query.$?.where,\n fields: query.$?.fields,\n order: reverseOrder(query.$?.order),\n },\n },\n } as unknown as Q,\n (frozenData) => {\n if (frozenData.error) {\n return sendError(frozenData.error);\n }\n\n const rows = frozenData.data[entity];\n const pageInfo = frozenData.pageInfo[entity];\n assert(\n rows && pageInfo,\n 'Expected query subscription to contain rows and pageInfo',\n );\n\n setReverseChunk(startCursor, {\n data: rows,\n status: 'frozen',\n hasMore: pageInfo.hasNextPage,\n endCursor: pageInfo.endCursor,\n });\n },\n opts,\n );\n\n allUnsubs.set(chunkSubKey('reverse', startCursor), nextSub);\n };\n\n const pushNewReverse = (startCursor: Cursor) => {\n const querySub = db.subscribeQuery(\n {\n [entity]: {\n ...query,\n $: {\n limit: pageSize,\n after: startCursor,\n where: query.$?.where,\n fields: query.$?.fields,\n order: reverseOrder(query.$?.order),\n },\n },\n } as unknown as Q,\n (windowData) => {\n if (windowData.error) {\n return sendError(windowData.error);\n }\n\n const rows = windowData.data[entity];\n const pageInfo = windowData.pageInfo[entity];\n assert(rows && pageInfo, 'Expected rows and pageInfo');\n\n setReverseChunk(startCursor, {\n data: rows,\n status: 'bootstrapping',\n hasMore: pageInfo.hasNextPage,\n endCursor: pageInfo.endCursor,\n });\n },\n opts,\n );\n\n allUnsubs.set(chunkSubKey('reverse', startCursor), querySub);\n };\n\n const pushNewForward = (startCursor: Cursor, afterInclusive = false) => {\n const querySub = db.subscribeQuery(\n {\n [entity]: {\n ...query,\n $: {\n limit: pageSize,\n after: startCursor,\n afterInclusive,\n where: query.$?.where,\n fields: query.$?.fields,\n order: query.$?.order,\n },\n },\n } as unknown as Q,\n (windowData) => {\n if (windowData.error) {\n return sendError(windowData.error);\n }\n\n const rows = windowData.data[entity];\n const pageInfo = windowData.pageInfo[entity];\n assert(rows && pageInfo, 'Page info and rows');\n\n setForwardChunk(startCursor, {\n data: rows,\n status: 'bootstrapping',\n hasMore: pageInfo.hasNextPage,\n endCursor: pageInfo.endCursor,\n afterInclusive,\n });\n },\n opts,\n );\n\n allUnsubs.set(chunkSubKey('forward', startCursor), querySub);\n };\n\n const freezeForward = (startCursor: Cursor) => {\n const key = makeCursorKey(startCursor);\n const currentSub = allUnsubs.get(chunkSubKey('forward', startCursor));\n currentSub?.();\n\n const chunk = forwardChunks.get(key);\n if (!chunk?.endCursor) return;\n\n const nextSub = db.subscribeQuery(\n {\n [entity]: {\n ...query,\n $: {\n after: startCursor,\n afterInclusive: chunk.afterInclusive,\n before: chunk.endCursor,\n beforeInclusive: true,\n where: query.$?.where,\n fields: query.$?.fields,\n order: query.$?.order,\n },\n },\n } as unknown as Q,\n (frozenData) => {\n if (frozenData.error) {\n return sendError(frozenData.error);\n }\n\n const rows = frozenData.data[entity];\n const pageInfo = frozenData.pageInfo[entity];\n assert(rows && pageInfo, 'Expected rows and pageInfo');\n\n setForwardChunk(startCursor, {\n data: rows,\n status: 'frozen',\n hasMore: pageInfo.hasNextPage,\n endCursor: pageInfo.endCursor,\n afterInclusive: chunk.afterInclusive,\n });\n },\n opts,\n );\n\n allUnsubs.set(chunkSubKey('forward', startCursor), nextSub);\n };\n\n // Consider order: {val: \"asc\"} with pageItems = 4\n // A reverse chunk captures all the new items coming in before us.\n // If we hit 4 then we freeze the current chunk and create a new reverse chunk\n const maybeAdvanceReverse = () => {\n const tailEntry = Array.from(reverseChunks.entries()).at(-1);\n if (!tailEntry) return;\n\n const [chunkKey, chunk] = tailEntry;\n\n // If a chunk has more, then it must have an endCursor\n if (!chunk?.hasMore) return;\n if (!chunkHasEndCursor(chunk)) return;\n\n // maybeAdvanceReverse can run multiple times if multiple changes are made\n // to the reverse chunk\n // This prevents adding the same new reverse frame twice\n const advanceKey = `${chunkKey}:${makeCursorKey(chunk.endCursor)}`;\n if (advanceKey == lastReverseAdvancedChunkKey) return;\n lastReverseAdvancedChunkKey = advanceKey;\n\n freezeReverse(chunkKey, chunk);\n pushNewReverse(chunk.endCursor);\n };\n\n const loadNextPage = () => {\n const tailEntry = Array.from(forwardChunks.entries()).at(-1);\n if (!tailEntry) return;\n\n const [chunkKey, chunk] = tailEntry;\n\n // If the chunk has more items after it, it must have an end cursor, and we can\n // load more items\n // if (!chunk?.hasMore) return;\n if (!chunk.endCursor) return;\n\n freezeForward(parseCursorKey(chunkKey));\n pushNewForward(chunk.endCursor);\n };\n\n starterUnsub = db.subscribeQuery(\n {\n [entity]: {\n ...query,\n $: {\n limit: pageSize,\n where: query.$?.where,\n fields: query.$?.fields,\n order: query.$?.order,\n },\n },\n } as unknown as Q,\n async (starterData) => {\n if (hasKickstarted) return;\n if (starterData.error) {\n return sendError(starterData.error);\n }\n const pageInfo = starterData.pageInfo[entity];\n\n const rows = starterData?.data?.[entity];\n assert(rows && pageInfo, 'Expected rows and pageInfo');\n\n if (rows.length < pageSize) {\n // If the rows are less than the page size, then we don't need to\n // create forward and reverse chunks.\n // We just treat the starter query as a forward chunk\n setForwardChunk(PRE_BOOTSTRAP_CURSOR, {\n data: rows,\n status: 'pre-bootstrap',\n });\n return;\n }\n\n // Consider a query with no items; the server will return a result with\n // no start cursor. If we add {pageSize} optimistic updates we can\n // get here and still have no startCursor. By returning we are skipping\n // the optimistic update and just waiting for the result from the\n // server.\n const initialForwardCursor = pageInfo.startCursor;\n if (!initialForwardCursor) {\n return;\n }\n forwardChunks.delete(makeCursorKey(PRE_BOOTSTRAP_CURSOR));\n\n pushNewForward(initialForwardCursor, true);\n pushNewReverse(pageInfo.startCursor);\n hasKickstarted = true;\n\n // Flush the initial boostrap querysub data\n // because immediately unsubscribing will never save it for offline in idb\n await db._reactor.querySubs.flush();\n\n // Unsubscribe the starter subscription\n starterUnsub?.();\n starterUnsub = null;\n },\n opts,\n );\n\n const unsubscribe = () => {\n if (!isActive) return;\n isActive = false;\n starterUnsub?.();\n starterUnsub = null;\n for (const unsub of allUnsubs.values()) {\n unsub?.();\n }\n allUnsubs.clear();\n };\n\n return {\n unsubscribe,\n loadNextPage,\n };\n};\n\nexport const getInfiniteQueryInitialSnapshot = <\n Schema extends InstantSchemaDef<any, any, any>,\n Q extends ValidQuery<Q, Schema>,\n UseDates extends boolean,\n>(\n db: InstantCoreDatabase<Schema, UseDates>,\n fullQuery: Q | null,\n opts?: InstaQLOptions,\n):\n | InfiniteQueryCallbackResponse<Schema, Q, UseDates>\n | {\n canLoadNextPage: false;\n data: undefined;\n error: undefined;\n } => {\n if (!fullQuery) {\n return {\n canLoadNextPage: false,\n data: undefined,\n error: undefined,\n };\n }\n const { entityName, entityQuery } = splitAndValidateQuery(fullQuery);\n\n const pageSize = entityQuery.$?.limit || 10;\n\n let coercedQuery = fullQuery\n ? coerceQuery({\n [entityName]: {\n ...entityQuery,\n $: {\n limit: pageSize,\n where: entityQuery.$?.where,\n fields: entityQuery.$?.fields,\n order: entityQuery.$?.order,\n },\n },\n })\n : null;\n\n if (opts && 'ruleParams' in opts) {\n coercedQuery = {\n $$ruleParams: opts.ruleParams,\n ...fullQuery,\n };\n }\n const queryResult = db._reactor.getPreviousResult(coercedQuery);\n\n return {\n canLoadNextPage: false,\n data: queryResult?.data || undefined,\n error: undefined,\n };\n};\n\n/**\n * @throws QueryValidationError\n * @param fullQuery a ValidQuery with one key (entity)\n */\nconst splitAndValidateQuery = (fullQuery: Record<string, any>) => {\n const entityNames = Object.keys(fullQuery);\n if (entityNames.length !== 1) {\n throw new QueryValidationError(\n 'subscribeInfiniteQuery expects exactly one entity',\n );\n }\n\n const [entityName, entityQuery] = Object.entries(fullQuery)[0];\n\n if (!entityName || !entityQuery) {\n throw new QueryValidationError('No query provided for infinite query');\n }\n return { entityName, entityQuery };\n};\n"]}
@@ -1 +1 @@
1
- {"version":3,"file":"instaql.d.ts","sourceRoot":"","sources":["../../src/instaql.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,CAAC,MAAM,YAAY,CAAC;AAqlBhC,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,QAAQ,KAAA,UAa1E;AAyRD,MAAM,CAAC,OAAO,UAAU,KAAK,CAC3B,EACE,KAAK,EACL,UAAU,EACV,QAAQ,EACR,SAAS,GACV,EAAE;IACD,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC;IACf,UAAU,EAAE,CAAC,CAAC,UAAU,CAAC;IACzB,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,SAAS,CAAC,EAAE,GAAG,CAAC;CACjB,EACD,CAAC,KAAA;UAiBqB,GAAG;eAAa,GAAG;gBAAc,GAAG;EAU3D"}
1
+ {"version":3,"file":"instaql.d.ts","sourceRoot":"","sources":["../../src/instaql.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,CAAC,MAAM,YAAY,CAAC;AAqlBhC,wBAAgB,YAAY,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,IAAI,EAAE,MAAM,EAAE,GAAG,KAAA,EAAE,QAAQ,KAAA,UAa1E;AA2SD,MAAM,CAAC,OAAO,UAAU,KAAK,CAC3B,EACE,KAAK,EACL,UAAU,EACV,QAAQ,EACR,SAAS,GACV,EAAE;IACD,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC;IACf,UAAU,EAAE,CAAC,CAAC,UAAU,CAAC;IACzB,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,SAAS,CAAC,EAAE,GAAG,CAAC;CACjB,EACD,CAAC,KAAA;UAiBqB,GAAG;eAAa,GAAG;gBAAc,GAAG;EAU3D"}
@@ -386,17 +386,24 @@ function comparableDate(x) {
386
386
  }
387
387
  return new Date(x).getTime();
388
388
  }
389
- function isBefore(startCursor, orderAttr, direction, idVec) {
390
- const [c_e, _c_a, c_v, c_t] = startCursor;
391
- const compareVal = direction === 'desc' ? 1 : -1;
389
+ function compareToCursor(cursor, orderAttr, idVec) {
390
+ const [c_e, _c_a, c_v, c_t] = cursor;
392
391
  if (orderAttr['forward-identity']?.[2] === 'id') {
393
- return compareOrderTriples(idVec, [c_e, c_t], null) === compareVal;
392
+ return compareOrderTriples(idVec, [c_e, c_t], null);
394
393
  }
395
394
  const [e, v] = idVec;
396
395
  const dataType = orderAttr['checked-data-type'];
397
396
  const v_new = dataType === 'date' ? comparableDate(v) : v;
398
397
  const c_v_new = dataType === 'date' ? comparableDate(c_v) : c_v;
399
- return (compareOrderTriples([e, v_new], [c_e, c_v_new], dataType) === compareVal);
398
+ return compareOrderTriples([e, v_new], [c_e, c_v_new], dataType);
399
+ }
400
+ function isBefore(startCursor, orderAttr, direction, idVec) {
401
+ const cmp = compareToCursor(startCursor, orderAttr, idVec);
402
+ return direction === 'desc' ? cmp > 0 : cmp < 0;
403
+ }
404
+ function isAfter(endCursor, orderAttr, direction, idVec) {
405
+ const cmp = compareToCursor(endCursor, orderAttr, idVec);
406
+ return direction === 'desc' ? cmp < 0 : cmp > 0;
400
407
  }
401
408
  function orderAttrFromCursor(attrsStore, cursor) {
402
409
  const cursorAttrId = cursor[1];
@@ -442,6 +449,7 @@ function runDataloadAndReturnObjects(store, attrsStore, { etype, pageInfo, dq, f
442
449
  const direction = determineDirection(form);
443
450
  let idVecs = datalogQuery(store, dq);
444
451
  const startCursor = pageInfo?.['start-cursor'];
452
+ const endCursor = pageInfo?.['end-cursor'];
445
453
  const orderAttr = getOrderAttr(attrsStore, etype, startCursor, order);
446
454
  if (orderAttr && orderAttr?.['forward-identity']?.[2] !== 'id') {
447
455
  const isDate = orderAttr['checked-data-type'] === 'date';
@@ -476,6 +484,11 @@ function runDataloadAndReturnObjects(store, attrsStore, { etype, pageInfo, dq, f
476
484
  isBefore(startCursor, orderAttr, direction, idVec)) {
477
485
  continue;
478
486
  }
487
+ if (endCursor &&
488
+ orderAttr &&
489
+ isAfter(endCursor, orderAttr, direction, idVec)) {
490
+ continue;
491
+ }
479
492
  const obj = s.getAsObject(store, attrs, id);
480
493
  if (obj) {
481
494
  objects[id] = obj;