@tanstack/db 0.4.5 → 0.4.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/dist/cjs/collection/change-events.cjs +1 -1
  2. package/dist/cjs/collection/change-events.cjs.map +1 -1
  3. package/dist/cjs/collection/change-events.d.cts +1 -1
  4. package/dist/cjs/collection/index.cjs +11 -0
  5. package/dist/cjs/collection/index.cjs.map +1 -1
  6. package/dist/cjs/collection/index.d.cts +8 -1
  7. package/dist/cjs/collection/lifecycle.cjs +4 -1
  8. package/dist/cjs/collection/lifecycle.cjs.map +1 -1
  9. package/dist/cjs/collection/mutations.cjs +4 -4
  10. package/dist/cjs/collection/mutations.cjs.map +1 -1
  11. package/dist/cjs/collection/subscription.cjs +21 -1
  12. package/dist/cjs/collection/subscription.cjs.map +1 -1
  13. package/dist/cjs/collection/subscription.d.cts +4 -3
  14. package/dist/cjs/collection/sync.cjs +94 -71
  15. package/dist/cjs/collection/sync.cjs.map +1 -1
  16. package/dist/cjs/collection/sync.d.cts +9 -1
  17. package/dist/cjs/index.cjs +2 -0
  18. package/dist/cjs/index.cjs.map +1 -1
  19. package/dist/cjs/index.d.cts +2 -0
  20. package/dist/cjs/indexes/auto-index.cjs +4 -1
  21. package/dist/cjs/indexes/auto-index.cjs.map +1 -1
  22. package/dist/cjs/local-only.cjs +21 -2
  23. package/dist/cjs/local-only.cjs.map +1 -1
  24. package/dist/cjs/local-only.d.cts +64 -7
  25. package/dist/cjs/local-storage.cjs +71 -3
  26. package/dist/cjs/local-storage.cjs.map +1 -1
  27. package/dist/cjs/local-storage.d.cts +55 -2
  28. package/dist/cjs/query/compiler/expressions.cjs +19 -0
  29. package/dist/cjs/query/compiler/expressions.cjs.map +1 -1
  30. package/dist/cjs/query/compiler/expressions.d.cts +2 -1
  31. package/dist/cjs/query/compiler/order-by.cjs +2 -1
  32. package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
  33. package/dist/cjs/query/compiler/order-by.d.cts +2 -1
  34. package/dist/cjs/query/live/collection-subscriber.cjs +18 -8
  35. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
  36. package/dist/cjs/query/live/collection-subscriber.d.cts +1 -0
  37. package/dist/cjs/types.d.cts +11 -1
  38. package/dist/esm/collection/change-events.d.ts +1 -1
  39. package/dist/esm/collection/change-events.js +1 -1
  40. package/dist/esm/collection/change-events.js.map +1 -1
  41. package/dist/esm/collection/index.d.ts +8 -1
  42. package/dist/esm/collection/index.js +11 -0
  43. package/dist/esm/collection/index.js.map +1 -1
  44. package/dist/esm/collection/lifecycle.js +4 -1
  45. package/dist/esm/collection/lifecycle.js.map +1 -1
  46. package/dist/esm/collection/mutations.js +4 -4
  47. package/dist/esm/collection/mutations.js.map +1 -1
  48. package/dist/esm/collection/subscription.d.ts +4 -3
  49. package/dist/esm/collection/subscription.js +22 -2
  50. package/dist/esm/collection/subscription.js.map +1 -1
  51. package/dist/esm/collection/sync.d.ts +9 -1
  52. package/dist/esm/collection/sync.js +94 -71
  53. package/dist/esm/collection/sync.js.map +1 -1
  54. package/dist/esm/index.d.ts +2 -0
  55. package/dist/esm/index.js +2 -0
  56. package/dist/esm/index.js.map +1 -1
  57. package/dist/esm/indexes/auto-index.js +4 -1
  58. package/dist/esm/indexes/auto-index.js.map +1 -1
  59. package/dist/esm/local-only.d.ts +64 -7
  60. package/dist/esm/local-only.js +21 -2
  61. package/dist/esm/local-only.js.map +1 -1
  62. package/dist/esm/local-storage.d.ts +55 -2
  63. package/dist/esm/local-storage.js +72 -4
  64. package/dist/esm/local-storage.js.map +1 -1
  65. package/dist/esm/query/compiler/expressions.d.ts +2 -1
  66. package/dist/esm/query/compiler/expressions.js +19 -0
  67. package/dist/esm/query/compiler/expressions.js.map +1 -1
  68. package/dist/esm/query/compiler/order-by.d.ts +2 -1
  69. package/dist/esm/query/compiler/order-by.js +2 -1
  70. package/dist/esm/query/compiler/order-by.js.map +1 -1
  71. package/dist/esm/query/live/collection-subscriber.d.ts +1 -0
  72. package/dist/esm/query/live/collection-subscriber.js +19 -9
  73. package/dist/esm/query/live/collection-subscriber.js.map +1 -1
  74. package/dist/esm/types.d.ts +11 -1
  75. package/package.json +1 -1
  76. package/src/collection/change-events.ts +5 -2
  77. package/src/collection/index.ts +13 -0
  78. package/src/collection/lifecycle.ts +4 -1
  79. package/src/collection/mutations.ts +8 -4
  80. package/src/collection/subscription.ts +34 -4
  81. package/src/collection/sync.ts +147 -110
  82. package/src/index.ts +5 -0
  83. package/src/indexes/auto-index.ts +4 -1
  84. package/src/local-only.ts +119 -30
  85. package/src/local-storage.ts +170 -5
  86. package/src/query/compiler/expressions.ts +26 -1
  87. package/src/query/compiler/order-by.ts +3 -1
  88. package/src/query/live/collection-subscriber.ts +31 -10
  89. package/src/types.ts +13 -1
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and } from \"../query/builder/functions.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type { ChangeMessage } from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n minValue?: any\n limit: number\n}\n\ntype CollectionSubscriptionOptions = {\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: () => void\n}\n\nexport class CollectionSubscription {\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that is limited to the first `limit` rows that fulfill the `where` clause and are bigger than `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({ limit, minValue }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n let keys: Array<string | number> = index.take(limit, minValue, filterFn)\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n }\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n this.options.onUnsubscribe?.()\n }\n}\n"],"names":["ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression"],"mappings":";;;;;AA4BO,MAAM,uBAAuB;AAAA,EAclC,YACU,YACA,UACA,SACR;AAHQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAhBV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAGvB,SAAQ,+BAAe,IAAA;AAYrB,QAAI,QAAQ,iBAAiB;AAC3BA,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAEA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,uBAAuB,EAAE,OAAO,YAA2C;AACzE,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAC5D,QAAI,OAA+B,MAAM,KAAK,OAAO,UAAU,QAAQ;AAEvE,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AAAA,MACzB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AACZ,SAAK,QAAQ,gBAAA;AAAA,EACf;AACF;;"}
1
+ {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and, gt, lt } from \"../query/builder/functions.js\"\nimport { Value } from \"../query/ir.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression, OrderBy } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type { ChangeMessage } from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n minValue?: any\n}\n\ntype CollectionSubscriptionOptions = {\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: () => void\n}\n\nexport class CollectionSubscription {\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n this.collection.syncMore({\n where: stateOpts.where,\n })\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that is limited to the first `limit` rows that fulfill the `where` clause and are bigger than `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValue,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n let keys: Array<string | number> = index.take(limit, minValue, filterFn)\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n\n let whereWithValueFilter = where\n if (typeof minValue !== `undefined`) {\n // Only request data that we haven't seen yet (i.e. is bigger than the minValue)\n const { expression, compareOptions } = orderBy[0]!\n const operator = compareOptions.direction === `asc` ? gt : lt\n const valueFilter = operator(expression, new Value(minValue))\n whereWithValueFilter = where ? and(where, valueFilter) : valueFilter\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n this.collection.syncMore({\n where: whereWithValueFilter,\n limit,\n orderBy,\n })\n }\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n this.options.onUnsubscribe?.()\n }\n}\n"],"names":["ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","gt","lt","Value"],"mappings":";;;;;;AA8BO,MAAM,uBAAuB;AAAA,EAclC,YACU,YACA,UACA,SACR;AAHQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAhBV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAGvB,SAAQ,+BAAe,IAAA;AAYrB,QAAI,QAAQ,iBAAiB;AAC3BA,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,SAAK,WAAW,SAAS;AAAA,MACvB,OAAO,UAAU;AAAA,IAAA,CAClB;AAGD,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAC5D,QAAI,OAA+B,MAAM,KAAK,OAAO,UAAU,QAAQ;AAEvE,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AAAA,MACzB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAErB,QAAI,uBAAuB;AAC3B,QAAI,OAAO,aAAa,aAAa;AAEnC,YAAM,EAAE,YAAY,mBAAmB,QAAQ,CAAC;AAChD,YAAM,WAAW,eAAe,cAAc,QAAQC,UAAAA,KAAKC,UAAAA;AAC3D,YAAM,cAAc,SAAS,YAAY,IAAIC,GAAAA,MAAM,QAAQ,CAAC;AAC5D,6BAAuB,QAAQJ,UAAAA,IAAI,OAAO,WAAW,IAAI;AAAA,IAC3D;AAIA,SAAK,WAAW,SAAS;AAAA,MACvB,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AACZ,SAAK,QAAQ,gBAAA;AAAA,EACf;AACF;;"}
@@ -1,4 +1,4 @@
1
- import { BasicExpression } from '../query/ir.js';
1
+ import { BasicExpression, OrderBy } from '../query/ir.js';
2
2
  import { IndexInterface } from '../indexes/base-index.js';
3
3
  import { ChangeMessage } from '../types.js';
4
4
  import { CollectionImpl } from './index.js';
@@ -7,8 +7,9 @@ type RequestSnapshotOptions = {
7
7
  optimizedOnly?: boolean;
8
8
  };
9
9
  type RequestLimitedSnapshotOptions = {
10
- minValue?: any;
10
+ orderBy: OrderBy;
11
11
  limit: number;
12
+ minValue?: any;
12
13
  };
13
14
  type CollectionSubscriptionOptions = {
14
15
  /** Pre-compiled expression for filtering changes */
@@ -44,7 +45,7 @@ export declare class CollectionSubscription {
44
45
  * It uses that range index to load the items in the order of the index.
45
46
  * Note: it does not send keys that have already been sent before.
46
47
  */
47
- requestLimitedSnapshot({ limit, minValue }: RequestLimitedSnapshotOptions): void;
48
+ requestLimitedSnapshot({ orderBy, limit, minValue, }: RequestLimitedSnapshotOptions): void;
48
49
  /**
49
50
  * Filters and flips changes for keys that have not been sent yet.
50
51
  * Deletes are filtered out for keys that have not been sent yet.
@@ -9,6 +9,7 @@ class CollectionSyncManager {
9
9
  constructor(config, id) {
10
10
  this.preloadPromise = null;
11
11
  this.syncCleanupFn = null;
12
+ this.syncOnLoadMoreFn = null;
12
13
  this.config = config;
13
14
  this.id = id;
14
15
  }
@@ -22,85 +23,87 @@ class CollectionSyncManager {
22
23
  * This is called when the collection is first accessed or preloaded
23
24
  */
24
25
  startSync() {
25
- const state = this.state;
26
26
  if (this.lifecycle.status !== `idle` && this.lifecycle.status !== `cleaned-up`) {
27
27
  return;
28
28
  }
29
29
  this.lifecycle.setStatus(`loading`);
30
30
  try {
31
- const cleanupFn = this.config.sync.sync({
32
- collection: this.collection,
33
- begin: () => {
34
- state.pendingSyncedTransactions.push({
35
- committed: false,
36
- operations: [],
37
- deletedKeys: /* @__PURE__ */ new Set()
38
- });
39
- },
40
- write: (messageWithoutKey) => {
41
- const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
42
- if (!pendingTransaction) {
43
- throw new errors.NoPendingSyncTransactionWriteError();
44
- }
45
- if (pendingTransaction.committed) {
46
- throw new errors.SyncTransactionAlreadyCommittedWriteError();
47
- }
48
- const key = this.config.getKey(messageWithoutKey.value);
49
- let messageType = messageWithoutKey.type;
50
- if (messageWithoutKey.type === `insert`) {
51
- const insertingIntoExistingSynced = state.syncedData.has(key);
52
- const hasPendingDeleteForKey = pendingTransaction.deletedKeys.has(key);
53
- const isTruncateTransaction = pendingTransaction.truncate === true;
54
- if (insertingIntoExistingSynced && !hasPendingDeleteForKey && !isTruncateTransaction) {
55
- const existingValue = state.syncedData.get(key);
56
- if (existingValue !== void 0 && utils.deepEquals(existingValue, messageWithoutKey.value)) {
57
- messageType = `update`;
58
- } else {
59
- throw new errors.DuplicateKeySyncError(key, this.id);
31
+ const syncRes = normalizeSyncFnResult(
32
+ this.config.sync.sync({
33
+ collection: this.collection,
34
+ begin: () => {
35
+ this.state.pendingSyncedTransactions.push({
36
+ committed: false,
37
+ operations: [],
38
+ deletedKeys: /* @__PURE__ */ new Set()
39
+ });
40
+ },
41
+ write: (messageWithoutKey) => {
42
+ const pendingTransaction = this.state.pendingSyncedTransactions[this.state.pendingSyncedTransactions.length - 1];
43
+ if (!pendingTransaction) {
44
+ throw new errors.NoPendingSyncTransactionWriteError();
45
+ }
46
+ if (pendingTransaction.committed) {
47
+ throw new errors.SyncTransactionAlreadyCommittedWriteError();
48
+ }
49
+ const key = this.config.getKey(messageWithoutKey.value);
50
+ let messageType = messageWithoutKey.type;
51
+ if (messageWithoutKey.type === `insert`) {
52
+ const insertingIntoExistingSynced = this.state.syncedData.has(key);
53
+ const hasPendingDeleteForKey = pendingTransaction.deletedKeys.has(key);
54
+ const isTruncateTransaction = pendingTransaction.truncate === true;
55
+ if (insertingIntoExistingSynced && !hasPendingDeleteForKey && !isTruncateTransaction) {
56
+ const existingValue = this.state.syncedData.get(key);
57
+ if (existingValue !== void 0 && utils.deepEquals(existingValue, messageWithoutKey.value)) {
58
+ messageType = `update`;
59
+ } else {
60
+ throw new errors.DuplicateKeySyncError(key, this.id);
61
+ }
60
62
  }
61
63
  }
64
+ const message = {
65
+ ...messageWithoutKey,
66
+ type: messageType,
67
+ key
68
+ };
69
+ pendingTransaction.operations.push(message);
70
+ if (messageType === `delete`) {
71
+ pendingTransaction.deletedKeys.add(key);
72
+ }
73
+ },
74
+ commit: () => {
75
+ const pendingTransaction = this.state.pendingSyncedTransactions[this.state.pendingSyncedTransactions.length - 1];
76
+ if (!pendingTransaction) {
77
+ throw new errors.NoPendingSyncTransactionCommitError();
78
+ }
79
+ if (pendingTransaction.committed) {
80
+ throw new errors.SyncTransactionAlreadyCommittedError();
81
+ }
82
+ pendingTransaction.committed = true;
83
+ if (this.lifecycle.status === `loading`) {
84
+ this.lifecycle.setStatus(`initialCommit`);
85
+ }
86
+ this.state.commitPendingTransactions();
87
+ },
88
+ markReady: () => {
89
+ this.lifecycle.markReady();
90
+ },
91
+ truncate: () => {
92
+ const pendingTransaction = this.state.pendingSyncedTransactions[this.state.pendingSyncedTransactions.length - 1];
93
+ if (!pendingTransaction) {
94
+ throw new errors.NoPendingSyncTransactionWriteError();
95
+ }
96
+ if (pendingTransaction.committed) {
97
+ throw new errors.SyncTransactionAlreadyCommittedWriteError();
98
+ }
99
+ pendingTransaction.operations = [];
100
+ pendingTransaction.deletedKeys.clear();
101
+ pendingTransaction.truncate = true;
62
102
  }
63
- const message = {
64
- ...messageWithoutKey,
65
- type: messageType,
66
- key
67
- };
68
- pendingTransaction.operations.push(message);
69
- if (messageType === `delete`) {
70
- pendingTransaction.deletedKeys.add(key);
71
- }
72
- },
73
- commit: () => {
74
- const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
75
- if (!pendingTransaction) {
76
- throw new errors.NoPendingSyncTransactionCommitError();
77
- }
78
- if (pendingTransaction.committed) {
79
- throw new errors.SyncTransactionAlreadyCommittedError();
80
- }
81
- pendingTransaction.committed = true;
82
- if (this.lifecycle.status === `loading`) {
83
- this.lifecycle.setStatus(`initialCommit`);
84
- }
85
- state.commitPendingTransactions();
86
- },
87
- markReady: () => {
88
- this.lifecycle.markReady();
89
- },
90
- truncate: () => {
91
- const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
92
- if (!pendingTransaction) {
93
- throw new errors.NoPendingSyncTransactionWriteError();
94
- }
95
- if (pendingTransaction.committed) {
96
- throw new errors.SyncTransactionAlreadyCommittedWriteError();
97
- }
98
- pendingTransaction.operations = [];
99
- pendingTransaction.deletedKeys.clear();
100
- pendingTransaction.truncate = true;
101
- }
102
- });
103
- this.syncCleanupFn = typeof cleanupFn === `function` ? cleanupFn : null;
103
+ })
104
+ );
105
+ this.syncCleanupFn = syncRes?.cleanup ?? null;
106
+ this.syncOnLoadMoreFn = syncRes?.onLoadMore ?? null;
104
107
  } catch (error) {
105
108
  this.lifecycle.setStatus(`error`);
106
109
  throw error;
@@ -137,6 +140,17 @@ class CollectionSyncManager {
137
140
  });
138
141
  return this.preloadPromise;
139
142
  }
143
+ /**
144
+ * Requests the sync layer to load more data.
145
+ * @param options Options to control what data is being loaded
146
+ * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.
147
+ * If data loading is synchronous, the data is loaded when the method returns.
148
+ */
149
+ syncMore(options) {
150
+ if (this.syncOnLoadMoreFn) {
151
+ return this.syncOnLoadMoreFn(options);
152
+ }
153
+ }
140
154
  cleanup() {
141
155
  try {
142
156
  if (this.syncCleanupFn) {
@@ -158,5 +172,14 @@ class CollectionSyncManager {
158
172
  this.preloadPromise = null;
159
173
  }
160
174
  }
175
+ function normalizeSyncFnResult(result) {
176
+ if (typeof result === `function`) {
177
+ return { cleanup: result };
178
+ }
179
+ if (typeof result === `object`) {
180
+ return result;
181
+ }
182
+ return void 0;
183
+ }
161
184
  exports.CollectionSyncManager = CollectionSyncManager;
162
185
  //# sourceMappingURL=sync.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"sync.cjs","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from \"../errors\"\nimport { deepEquals } from \"../utils\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type { ChangeMessage, CollectionConfig } from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionStateManager } from \"./state\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n const state = this.state\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const cleanupFn = this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n throw new DuplicateKeySyncError(key, this.id)\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n // Update status to initialCommit when transitioning from loading\n // This indicates we're in the process of committing the first transaction\n if (this.lifecycle.status === `loading`) {\n this.lifecycle.setStatus(`initialCommit`)\n }\n\n state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n },\n })\n\n // Store cleanup function if provided\n this.syncCleanupFn = typeof cleanupFn === `function` ? cleanupFn : null\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n"],"names":["NoPendingSyncTransactionWriteError","SyncTransactionAlreadyCommittedWriteError","deepEquals","DuplicateKeySyncError","NoPendingSyncTransactionCommitError","SyncTransactionAlreadyCommittedError","CollectionIsInErrorStateError","SyncCleanupError"],"mappings":";;;;AAgBO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAaA,YAAY,QAAkD,IAAY;AAN1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAM1C,SAAK,SAAS;AACd,SAAK,KAAK;AAAA,EACZ;AAAA,EAEA,QAAQ,MAIL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,UAAM,QAAQ,KAAK;AACnB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,YAAY,KAAK,OAAO,KAAK,KAAK;AAAA,QACtC,YAAY,KAAK;AAAA,QACjB,OAAO,MAAM;AACX,gBAAM,0BAA0B,KAAK;AAAA,YACnC,WAAW;AAAA,YACX,YAAY,CAAA;AAAA,YACZ,iCAAiB,IAAA;AAAA,UAAI,CACtB;AAAA,QACH;AAAA,QACA,OAAO,CAAC,sBAA2D;AACjE,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAIA,OAAAA,mCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAIC,OAAAA,0CAAA;AAAA,UACZ;AACA,gBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,cAAI,cAAc,kBAAkB;AAGpC,cAAI,kBAAkB,SAAS,UAAU;AACvC,kBAAM,8BAA8B,MAAM,WAAW,IAAI,GAAG;AAC5D,kBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,kBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,gBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,oBAAM,gBAAgB,MAAM,WAAW,IAAI,GAAG;AAC9C,kBACE,kBAAkB,UAClBC,MAAAA,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,8BAAc;AAAA,cAChB,OAAO;AACL,sBAAM,IAAIC,OAAAA,sBAAsB,KAAK,KAAK,EAAE;AAAA,cAC9C;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,UAAkC;AAAA,YACtC,GAAG;AAAA,YACH,MAAM;AAAA,YACN;AAAA,UAAA;AAEF,6BAAmB,WAAW,KAAK,OAAO;AAE1C,cAAI,gBAAgB,UAAU;AAC5B,+BAAmB,YAAY,IAAI,GAAG;AAAA,UACxC;AAAA,QACF;AAAA,QACA,QAAQ,MAAM;AACZ,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAIC,OAAAA,oCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAIC,OAAAA,qCAAA;AAAA,UACZ;AAEA,6BAAmB,YAAY;AAI/B,cAAI,KAAK,UAAU,WAAW,WAAW;AACvC,iBAAK,UAAU,UAAU,eAAe;AAAA,UAC1C;AAEA,gBAAM,0BAAA;AAAA,QACR;AAAA,QACA,WAAW,MAAM;AACf,eAAK,UAAU,UAAA;AAAA,QACjB;AAAA,QACA,UAAU,MAAM;AACd,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAIL,OAAAA,mCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAIC,OAAAA,0CAAA;AAAA,UACZ;AAGA,6BAAmB,aAAa,CAAA;AAChC,6BAAmB,YAAY,MAAA;AAO/B,6BAAmB,WAAW;AAAA,QAChC;AAAA,MAAA,CACD;AAGD,WAAK,gBAAgB,OAAO,cAAc,aAAa,YAAY;AAAA,IACrE,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAIK,OAAAA,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAIC,OAAAA,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAIA,OAAAA,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;;"}
1
+ {"version":3,"file":"sync.cjs","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from \"../errors\"\nimport { deepEquals } from \"../utils\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ChangeMessage,\n CleanupFn,\n CollectionConfig,\n OnLoadMoreOptions,\n SyncConfigRes,\n} from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionStateManager } from \"./state\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncOnLoadMoreFn:\n | ((options: OnLoadMoreOptions) => void | Promise<void>)\n | null = null\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n throw new DuplicateKeySyncError(key, this.id)\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n // Update status to initialCommit when transitioning from loading\n // This indicates we're in the process of committing the first transaction\n if (this.lifecycle.status === `loading`) {\n this.lifecycle.setStatus(`initialCommit`)\n }\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n },\n })\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store onLoadMore function if provided\n this.syncOnLoadMoreFn = syncRes?.onLoadMore ?? null\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * If data loading is synchronous, the data is loaded when the method returns.\n */\n public syncMore(options: OnLoadMoreOptions): void | Promise<void> {\n if (this.syncOnLoadMoreFn) {\n return this.syncOnLoadMoreFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":["NoPendingSyncTransactionWriteError","SyncTransactionAlreadyCommittedWriteError","deepEquals","DuplicateKeySyncError","NoPendingSyncTransactionCommitError","SyncTransactionAlreadyCommittedError","CollectionIsInErrorStateError","SyncCleanupError"],"mappings":";;;;AAsBO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAgBA,YAAY,QAAkD,IAAY;AAT1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AAMT,SAAK,SAAS;AACd,SAAK,KAAK;AAAA,EACZ;AAAA,EAEA,QAAQ,MAIL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CAAC,sBAA2D;AACjE,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIA,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AACA,kBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,gBAAI,cAAc,kBAAkB;AAGpC,gBAAI,kBAAkB,SAAS,UAAU;AACvC,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClBC,MAAAA,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAM,IAAIC,OAAAA,sBAAsB,KAAK,KAAK,EAAE;AAAA,gBAC9C;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAkC;AAAA,cACtC,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIC,OAAAA,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAI/B,gBAAI,KAAK,UAAU,WAAW,WAAW;AACvC,mBAAK,UAAU,UAAU,eAAe;AAAA,YAC1C;AAEA,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIL,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAAA,UAChC;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAAA,IACjD,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAIK,OAAAA,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,SAAS,SAAkD;AAChE,QAAI,KAAK,kBAAkB;AACzB,aAAO,KAAK,iBAAiB,OAAO;AAAA,IACtC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAIC,OAAAA,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAIA,OAAAA,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;"}
@@ -1,5 +1,5 @@
1
1
  import { StandardSchemaV1 } from '@standard-schema/spec';
2
- import { CollectionConfig } from '../types.cjs';
2
+ import { CollectionConfig, OnLoadMoreOptions } from '../types.cjs';
3
3
  import { CollectionImpl } from './index.js';
4
4
  import { CollectionStateManager } from './state.cjs';
5
5
  import { CollectionLifecycleManager } from './lifecycle.cjs';
@@ -11,6 +11,7 @@ export declare class CollectionSyncManager<TOutput extends object = Record<strin
11
11
  private id;
12
12
  preloadPromise: Promise<void> | null;
13
13
  syncCleanupFn: (() => void) | null;
14
+ syncOnLoadMoreFn: ((options: OnLoadMoreOptions) => void | Promise<void>) | null;
14
15
  /**
15
16
  * Creates a new CollectionSyncManager instance
16
17
  */
@@ -30,5 +31,12 @@ export declare class CollectionSyncManager<TOutput extends object = Record<strin
30
31
  * Multiple concurrent calls will share the same promise
31
32
  */
32
33
  preload(): Promise<void>;
34
+ /**
35
+ * Requests the sync layer to load more data.
36
+ * @param options Options to control what data is being loaded
37
+ * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.
38
+ * If data loading is synchronous, the data is loaded when the method returns.
39
+ */
40
+ syncMore(options: OnLoadMoreOptions): void | Promise<void>;
33
41
  cleanup(): void;
34
42
  }
@@ -1,5 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
+ const ir = require("./query/ir.cjs");
3
4
  const index = require("./collection/index.cjs");
4
5
  const SortedMap = require("./SortedMap.cjs");
5
6
  const transactions = require("./transactions.cjs");
@@ -15,6 +16,7 @@ const index$1 = require("./query/builder/index.cjs");
15
16
  const functions = require("./query/builder/functions.cjs");
16
17
  const index$2 = require("./query/compiler/index.cjs");
17
18
  const liveQueryCollection = require("./query/live-query-collection.cjs");
19
+ exports.IR = ir;
18
20
  exports.CollectionImpl = index.CollectionImpl;
19
21
  exports.createCollection = index.createCollection;
20
22
  exports.SortedMap = SortedMap.SortedMap;
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
@@ -1,3 +1,4 @@
1
+ import * as IR from "./query/ir.js";
1
2
  export * from './collection/index.js';
2
3
  export * from './SortedMap.cjs';
3
4
  export * from './transactions.cjs';
@@ -13,3 +14,4 @@ export * from './indexes/btree-index.js';
13
14
  export * from './indexes/lazy-index.js';
14
15
  export { type IndexOptions } from './indexes/index-options.js';
15
16
  export type { Collection } from './collection/index.js';
17
+ export { IR };
@@ -28,7 +28,10 @@ function ensureIndexForField(fieldName, fieldPath, collection, compareOptions =
28
28
  options: compareFn ? { compareFn, compareOptions } : {}
29
29
  });
30
30
  } catch (error) {
31
- console.warn(`Failed to create auto-index for field "${fieldName}":`, error);
31
+ console.warn(
32
+ `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field "${fieldName}":`,
33
+ error
34
+ );
32
35
  }
33
36
  }
34
37
  function ensureIndexForExpression(expression, collection) {
@@ -1 +1 @@
1
- {"version":3,"file":"auto-index.cjs","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from \"../utils\"\nimport { BTreeIndex } from \"./btree-index\"\nimport type { CompareOptions } from \"../query/builder/types\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection/index.js\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n // Don't auto-index during sync operations\n if (\n collection.status === `loading` ||\n collection.status === `initialCommit`\n ) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareOptions: CompareOptions = DEFAULT_COMPARE_OPTIONS,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find(\n (index) =>\n index.matchesField(fieldPath) &&\n index.matchesCompareOptions(compareOptions)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n collection.createIndex((row) => (row as any)[fieldName], {\n name: `auto_${fieldName}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn, compareOptions } : {},\n })\n } catch (error) {\n console.warn(`Failed to create auto-index for field \"${fieldName}\":`, error)\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference (single field)\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if it's not a simple field (e.g., nested properties or array access)\n if (fieldPath.length !== 1) {\n return\n }\n\n const fieldName = fieldPath[0]\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":["DEFAULT_COMPARE_OPTIONS","BTreeIndex"],"mappings":";;;;AAUA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAGA,MACE,WAAW,WAAW,aACtB,WAAW,WAAW,iBACtB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,iBAAiCA,MAAAA,yBACjC,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAC5D,CAAC,UACC,MAAM,aAAa,SAAS,KAC5B,MAAM,sBAAsB,cAAc;AAAA,EAAA;AAG9C,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AACF,eAAW,YAAY,CAAC,QAAS,IAAY,SAAS,GAAG;AAAA,MACvD,MAAM,QAAQ,SAAS;AAAA,MACvB,WAAWC,WAAAA;AAAAA,MACX,SAAS,YAAY,EAAE,WAAW,eAAA,IAAmB,CAAA;AAAA,IAAC,CACvD;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,KAAK,0CAA0C,SAAS,MAAM,KAAK;AAAA,EAC7E;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAEA,UAAM,YAAY,UAAU,CAAC;AAC7B,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;;;"}
1
+ {"version":3,"file":"auto-index.cjs","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { DEFAULT_COMPARE_OPTIONS } from \"../utils\"\nimport { BTreeIndex } from \"./btree-index\"\nimport type { CompareOptions } from \"../query/builder/types\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection/index.js\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n // Don't auto-index during sync operations\n if (\n collection.status === `loading` ||\n collection.status === `initialCommit`\n ) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareOptions: CompareOptions = DEFAULT_COMPARE_OPTIONS,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find(\n (index) =>\n index.matchesField(fieldPath) &&\n index.matchesCompareOptions(compareOptions)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n collection.createIndex((row) => (row as any)[fieldName], {\n name: `auto_${fieldName}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn, compareOptions } : {},\n })\n } catch (error) {\n console.warn(\n `${collection.id ? `[${collection.id}] ` : ``}Failed to create auto-index for field \"${fieldName}\":`,\n error\n )\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference (single field)\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if it's not a simple field (e.g., nested properties or array access)\n if (fieldPath.length !== 1) {\n return\n }\n\n const fieldName = fieldPath[0]\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":["DEFAULT_COMPARE_OPTIONS","BTreeIndex"],"mappings":";;;;AAUA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAGA,MACE,WAAW,WAAW,aACtB,WAAW,WAAW,iBACtB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,iBAAiCA,MAAAA,yBACjC,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAC5D,CAAC,UACC,MAAM,aAAa,SAAS,KAC5B,MAAM,sBAAsB,cAAc;AAAA,EAAA;AAG9C,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AACF,eAAW,YAAY,CAAC,QAAS,IAAY,SAAS,GAAG;AAAA,MACvD,MAAM,QAAQ,SAAS;AAAA,MACvB,WAAWC,WAAAA;AAAAA,MACX,SAAS,YAAY,EAAE,WAAW,eAAA,IAAmB,CAAA;AAAA,IAAC,CACvD;AAAA,EACH,SAAS,OAAO;AACd,YAAQ;AAAA,MACN,GAAG,WAAW,KAAK,IAAI,WAAW,EAAE,OAAO,EAAE,0CAA0C,SAAS;AAAA,MAChG;AAAA,IAAA;AAAA,EAEJ;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAEA,UAAM,YAAY,UAAU,CAAC;AAC7B,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;;;"}
@@ -27,13 +27,29 @@ function localOnlyCollectionOptions(config) {
27
27
  syncResult.confirmOperationsSync(params.transaction.mutations);
28
28
  return handlerResult;
29
29
  };
30
+ const acceptMutations = (transaction) => {
31
+ const collectionMutations = transaction.mutations.filter(
32
+ (m) => (
33
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
34
+ m.collection === syncResult.collection
35
+ )
36
+ );
37
+ if (collectionMutations.length === 0) {
38
+ return;
39
+ }
40
+ syncResult.confirmOperationsSync(
41
+ collectionMutations
42
+ );
43
+ };
30
44
  return {
31
45
  ...restConfig,
32
46
  sync: syncResult.sync,
33
47
  onInsert: wrappedOnInsert,
34
48
  onUpdate: wrappedOnUpdate,
35
49
  onDelete: wrappedOnDelete,
36
- utils: {},
50
+ utils: {
51
+ acceptMutations
52
+ },
37
53
  startSync: true,
38
54
  gcTime: 0
39
55
  };
@@ -42,6 +58,7 @@ function createLocalOnlySync(initialData) {
42
58
  let syncBegin = null;
43
59
  let syncWrite = null;
44
60
  let syncCommit = null;
61
+ let collection = null;
45
62
  const sync = {
46
63
  /**
47
64
  * Sync function that captures sync parameters and applies initial data
@@ -53,6 +70,7 @@ function createLocalOnlySync(initialData) {
53
70
  syncBegin = begin;
54
71
  syncWrite = write;
55
72
  syncCommit = commit;
73
+ collection = params.collection;
56
74
  if (initialData && initialData.length > 0) {
57
75
  begin();
58
76
  initialData.forEach((item) => {
@@ -90,7 +108,8 @@ function createLocalOnlySync(initialData) {
90
108
  };
91
109
  return {
92
110
  sync,
93
- confirmOperationsSync
111
+ confirmOperationsSync,
112
+ collection
94
113
  };
95
114
  }
96
115
  exports.localOnlyCollectionOptions = localOnlyCollectionOptions;
@@ -1 +1 @@
1
- {"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFnParams,\n OperationType,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n > {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type (currently empty but matches the pattern)\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities (currently empty but follows the pattern)\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n utils: LocalOnlyCollectionUtils\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n utils: LocalOnlyCollectionUtils\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions(\n config: LocalOnlyCollectionConfig<any, any, string | number>\n): CollectionConfig<any, string | number, any> & {\n utils: LocalOnlyCollectionUtils\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<\n any,\n string | number,\n LocalOnlyCollectionUtils\n >\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<\n any,\n string | number,\n LocalOnlyCollectionUtils\n >\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<\n any,\n string | number,\n LocalOnlyCollectionUtils\n >\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {} as LocalOnlyCollectionUtils,\n startSync: true,\n gcTime: 0,\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>\n) {\n // Capture sync functions for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions for later use by confirmOperationsSync\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<any>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n }\n}\n"],"names":[],"mappings":";;AA+GO,SAAS,2BACd,QAIA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAAoB,WAAW;AAMlD,QAAM,kBAAkB,OACtB,WAKG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WAKG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WAKG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO,CAAA;AAAA,IACP,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAEZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AAEtC,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AAGb,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAA0B;AACvD,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EAAA;AAEJ;;"}
1
+ {"version":3,"file":"local-only.cjs","sources":["../../src/local-only.ts"],"sourcesContent":["import type {\n BaseCollectionConfig,\n CollectionConfig,\n DeleteMutationFn,\n DeleteMutationFnParams,\n InferSchemaOutput,\n InsertMutationFn,\n InsertMutationFnParams,\n OperationType,\n PendingMutation,\n SyncConfig,\n UpdateMutationFn,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"./types\"\nimport type { Collection } from \"./collection/index\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n/**\n * Configuration interface for Local-only collection options\n * @template T - The type of items in the collection\n * @template TSchema - The schema type for validation\n * @template TKey - The type of the key returned by `getKey`\n */\nexport interface LocalOnlyCollectionConfig<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n> extends Omit<\n BaseCollectionConfig<T, TKey, TSchema, LocalOnlyCollectionUtils>,\n `gcTime` | `startSync`\n > {\n /**\n * Optional initial data to populate the collection with on creation\n * This data will be applied during the initial sync process\n */\n initialData?: Array<T>\n}\n\n/**\n * Local-only collection utilities type\n */\nexport interface LocalOnlyCollectionUtils extends UtilsRecord {\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them.\n * This should be called in your transaction's mutationFn to persist local-only data.\n *\n * @param transaction - The transaction containing mutations to accept\n * @example\n * const localData = createCollection(localOnlyCollectionOptions({...}))\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Make API call first\n * await api.save(...)\n * // Then persist local-only mutations after success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n */\n acceptMutations: (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => void\n}\n\ntype LocalOnlyCollectionOptionsResult<\n T extends object,\n TKey extends string | number,\n TSchema extends StandardSchemaV1 | never = never,\n> = Omit<\n CollectionConfig<T, TKey, TSchema>,\n `onInsert` | `onUpdate` | `onDelete`\n> & {\n onInsert?: InsertMutationFn<T, TKey, LocalOnlyCollectionUtils>\n onUpdate?: UpdateMutationFn<T, TKey, LocalOnlyCollectionUtils>\n onDelete?: DeleteMutationFn<T, TKey, LocalOnlyCollectionUtils>\n utils: LocalOnlyCollectionUtils\n}\n\n/**\n * Creates Local-only collection options for use with a standard Collection\n *\n * This is an in-memory collection that doesn't sync with external sources but uses a loopback sync config\n * that immediately \"syncs\" all optimistic changes to the collection, making them permanent.\n * Perfect for local-only data that doesn't need persistence or external synchronization.\n *\n * **Using with Manual Transactions:**\n *\n * For manual transactions, you must call `utils.acceptMutations()` in your transaction's `mutationFn`\n * to persist changes made during `tx.mutate()`. This is necessary because local-only collections\n * don't participate in the standard mutation handler flow for manual transactions.\n *\n * @template T - The schema type if a schema is provided, otherwise the type of items in the collection\n * @template TKey - The type of the key returned by getKey\n * @param config - Configuration options for the Local-only collection\n * @returns Collection options with utilities including acceptMutations\n *\n * @example\n * // Basic local-only collection\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Local-only collection with initial data\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * initialData: [\n * { id: 1, name: 'Item 1' },\n * { id: 2, name: 'Item 2' },\n * ],\n * })\n * )\n *\n * @example\n * // Local-only collection with mutation handlers\n * const collection = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * console.log('Item inserted:', transaction.mutations[0].modified)\n * // Custom logic after insert\n * },\n * })\n * )\n *\n * @example\n * // Using with manual transactions\n * const localData = createCollection(\n * localOnlyCollectionOptions({\n * getKey: (item) => item.id,\n * })\n * )\n *\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Use local data in API call\n * const localMutations = transaction.mutations.filter(m => m.collection === localData)\n * await api.save({ metadata: localMutations[0]?.modified })\n *\n * // Persist local-only mutations after API success\n * localData.utils.acceptMutations(transaction)\n * }\n * })\n *\n * tx.mutate(() => {\n * localData.insert({ id: 1, data: 'metadata' })\n * apiCollection.insert({ id: 2, data: 'main data' })\n * })\n *\n * await tx.commit()\n */\n\n// Overload for when schema is provided\nexport function localOnlyCollectionOptions<\n T extends StandardSchemaV1,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<InferSchemaOutput<T>, T, TKey> & {\n schema: T\n }\n): LocalOnlyCollectionOptionsResult<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n}\n\n// Overload for when no schema is provided\n// the type T needs to be passed explicitly unless it can be inferred from the getKey function in the config\nexport function localOnlyCollectionOptions<\n T extends object,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, never, TKey> & {\n schema?: never // prohibit schema\n }\n): LocalOnlyCollectionOptionsResult<T, TKey> & {\n schema?: never // no schema in the result\n}\n\nexport function localOnlyCollectionOptions<\n T extends object = object,\n TSchema extends StandardSchemaV1 = never,\n TKey extends string | number = string | number,\n>(\n config: LocalOnlyCollectionConfig<T, TSchema, TKey>\n): LocalOnlyCollectionOptionsResult<T, TKey, TSchema> & {\n schema?: StandardSchemaV1\n} {\n const { initialData, onInsert, onUpdate, onDelete, ...restConfig } = config\n\n // Create the sync configuration with transaction confirmation capability\n const syncResult = createLocalOnlySync(initialData)\n\n /**\n * Create wrapper handlers that call user handlers first, then confirm transactions\n * Wraps the user's onInsert handler to also confirm the transaction immediately\n */\n const wrappedOnInsert = async (\n params: InsertMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onInsert) {\n handlerResult = (await onInsert(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onUpdate handler that also confirms the transaction immediately\n */\n const wrappedOnUpdate = async (\n params: UpdateMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onUpdate) {\n handlerResult = (await onUpdate(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Wrapper for onDelete handler that also confirms the transaction immediately\n */\n const wrappedOnDelete = async (\n params: DeleteMutationFnParams<T, TKey, LocalOnlyCollectionUtils>\n ) => {\n // Call user handler first if provided\n let handlerResult\n if (onDelete) {\n handlerResult = (await onDelete(params)) ?? {}\n }\n\n // Then synchronously confirm the transaction by looping through mutations\n syncResult.confirmOperationsSync(params.transaction.mutations)\n\n return handlerResult\n }\n\n /**\n * Accepts mutations from a transaction that belong to this collection and persists them\n */\n const acceptMutations = (transaction: {\n mutations: Array<PendingMutation<Record<string, unknown>>>\n }) => {\n // Filter mutations that belong to this collection\n const collectionMutations = transaction.mutations.filter(\n (m) =>\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n m.collection === syncResult.collection\n )\n\n if (collectionMutations.length === 0) {\n return\n }\n\n // Persist the mutations through sync\n syncResult.confirmOperationsSync(\n collectionMutations as Array<PendingMutation<T>>\n )\n }\n\n return {\n ...restConfig,\n sync: syncResult.sync,\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n acceptMutations,\n } as LocalOnlyCollectionUtils,\n startSync: true,\n gcTime: 0,\n }\n}\n\n/**\n * Internal function to create Local-only sync configuration with transaction confirmation\n *\n * This captures the sync functions and provides synchronous confirmation of operations.\n * It creates a loopback sync that immediately confirms all optimistic operations,\n * making them permanent in the collection.\n *\n * @param initialData - Optional array of initial items to populate the collection\n * @returns Object with sync configuration and confirmOperationsSync function\n */\nfunction createLocalOnlySync<T extends object, TKey extends string | number>(\n initialData?: Array<T>\n) {\n // Capture sync functions and collection for transaction confirmation\n let syncBegin: (() => void) | null = null\n let syncWrite: ((message: { type: OperationType; value: T }) => void) | null =\n null\n let syncCommit: (() => void) | null = null\n let collection: Collection<T, TKey, LocalOnlyCollectionUtils> | null = null\n\n const sync: SyncConfig<T, TKey> = {\n /**\n * Sync function that captures sync parameters and applies initial data\n * @param params - Sync parameters containing begin, write, and commit functions\n * @returns Unsubscribe function (empty since no ongoing sync is needed)\n */\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n\n // Capture sync functions and collection for later use\n syncBegin = begin\n syncWrite = write\n syncCommit = commit\n collection = params.collection\n\n // Apply initial data if provided\n if (initialData && initialData.length > 0) {\n begin()\n initialData.forEach((item) => {\n write({\n type: `insert`,\n value: item,\n })\n })\n commit()\n }\n\n // Mark collection as ready since local-only collections are immediately ready\n markReady()\n\n // Return empty unsubscribe function - no ongoing sync needed\n return () => {}\n },\n /**\n * Get sync metadata - returns empty object for local-only collections\n * @returns Empty metadata object\n */\n getSyncMetadata: () => ({}),\n }\n\n /**\n * Synchronously confirms optimistic operations by immediately writing through sync\n *\n * This loops through transaction mutations and applies them to move from optimistic to synced state.\n * It's called after user handlers to make optimistic changes permanent.\n *\n * @param mutations - Array of mutation objects from the transaction\n */\n const confirmOperationsSync = (mutations: Array<PendingMutation<T>>) => {\n if (!syncBegin || !syncWrite || !syncCommit) {\n return // Sync not initialized yet, which is fine\n }\n\n // Immediately write back through sync interface\n syncBegin()\n mutations.forEach((mutation) => {\n if (syncWrite) {\n syncWrite({\n type: mutation.type,\n value: mutation.modified,\n })\n }\n })\n syncCommit()\n }\n\n return {\n sync,\n confirmOperationsSync,\n collection,\n }\n}\n"],"names":[],"mappings":";;AAqLO,SAAS,2BAKd,QAGA;AACA,QAAM,EAAE,aAAa,UAAU,UAAU,UAAU,GAAG,eAAe;AAGrE,QAAM,aAAa,oBAAoB,WAAW;AAMlD,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,OACtB,WACG;AAEH,QAAI;AACJ,QAAI,UAAU;AACZ,sBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAAA,IAC9C;AAGA,eAAW,sBAAsB,OAAO,YAAY,SAAS;AAE7D,WAAO;AAAA,EACT;AAKA,QAAM,kBAAkB,CAAC,gBAEnB;AAEJ,UAAM,sBAAsB,YAAY,UAAU;AAAA,MAChD,CAAC;AAAA;AAAA,QAEC,EAAE,eAAe,WAAW;AAAA;AAAA,IAAA;AAGhC,QAAI,oBAAoB,WAAW,GAAG;AACpC;AAAA,IACF;AAGA,eAAW;AAAA,MACT;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,WAAW;AAAA,IACjB,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,IAAA;AAAA,IAEF,WAAW;AAAA,IACX,QAAQ;AAAA,EAAA;AAEZ;AAYA,SAAS,oBACP,aACA;AAEA,MAAI,YAAiC;AACrC,MAAI,YACF;AACF,MAAI,aAAkC;AACtC,MAAI,aAAmE;AAEvE,QAAM,OAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMhC,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAG5C,kBAAY;AACZ,kBAAY;AACZ,mBAAa;AACb,mBAAa,OAAO;AAGpB,UAAI,eAAe,YAAY,SAAS,GAAG;AACzC,cAAA;AACA,oBAAY,QAAQ,CAAC,SAAS;AAC5B,gBAAM;AAAA,YACJ,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,CAAC;AACD,eAAA;AAAA,MACF;AAGA,gBAAA;AAGA,aAAO,MAAM;AAAA,MAAC;AAAA,IAChB;AAAA;AAAA;AAAA;AAAA;AAAA,IAKA,iBAAiB,OAAO,CAAA;AAAA,EAAC;AAW3B,QAAM,wBAAwB,CAAC,cAAyC;AACtE,QAAI,CAAC,aAAa,CAAC,aAAa,CAAC,YAAY;AAC3C;AAAA,IACF;AAGA,cAAA;AACA,cAAU,QAAQ,CAAC,aAAa;AAC9B,UAAI,WAAW;AACb,kBAAU;AAAA,UACR,MAAM,SAAS;AAAA,UACf,OAAO,SAAS;AAAA,QAAA,CACjB;AAAA,MACH;AAAA,IACF,CAAC;AACD,eAAA;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;;"}