@tanstack/db 0.3.2 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. package/dist/cjs/{change-events.cjs → collection/change-events.cjs} +13 -42
  2. package/dist/cjs/collection/change-events.cjs.map +1 -0
  3. package/dist/{esm/change-events.d.ts → cjs/collection/change-events.d.cts} +6 -6
  4. package/dist/cjs/collection/changes.cjs +108 -0
  5. package/dist/cjs/collection/changes.cjs.map +1 -0
  6. package/dist/cjs/collection/changes.d.cts +53 -0
  7. package/dist/cjs/{collection-events.cjs → collection/events.cjs} +7 -5
  8. package/dist/cjs/collection/events.cjs.map +1 -0
  9. package/dist/cjs/{collection-events.d.cts → collection/events.d.cts} +7 -4
  10. package/dist/cjs/collection/index.cjs +417 -0
  11. package/dist/cjs/collection/index.cjs.map +1 -0
  12. package/dist/{esm/collection.d.ts → cjs/collection/index.d.cts} +46 -184
  13. package/dist/cjs/collection/indexes.cjs +124 -0
  14. package/dist/cjs/collection/indexes.cjs.map +1 -0
  15. package/dist/cjs/collection/indexes.d.cts +47 -0
  16. package/dist/cjs/collection/lifecycle.cjs +196 -0
  17. package/dist/cjs/collection/lifecycle.cjs.map +1 -0
  18. package/dist/cjs/collection/lifecycle.d.cts +81 -0
  19. package/dist/cjs/collection/mutations.cjs +315 -0
  20. package/dist/cjs/collection/mutations.cjs.map +1 -0
  21. package/dist/cjs/collection/mutations.d.cts +33 -0
  22. package/dist/cjs/collection/state.cjs +597 -0
  23. package/dist/cjs/collection/state.cjs.map +1 -0
  24. package/dist/cjs/collection/state.d.cts +122 -0
  25. package/dist/cjs/collection/subscription.cjs +160 -0
  26. package/dist/cjs/collection/subscription.cjs.map +1 -0
  27. package/dist/cjs/collection/subscription.d.cts +57 -0
  28. package/dist/cjs/collection/sync.cjs +154 -0
  29. package/dist/cjs/collection/sync.cjs.map +1 -0
  30. package/dist/cjs/collection/sync.d.cts +34 -0
  31. package/dist/cjs/index.cjs +8 -8
  32. package/dist/cjs/index.d.cts +2 -2
  33. package/dist/cjs/indexes/auto-index.cjs.map +1 -1
  34. package/dist/cjs/indexes/auto-index.d.cts +1 -1
  35. package/dist/cjs/indexes/base-index.cjs.map +1 -1
  36. package/dist/cjs/indexes/base-index.d.cts +2 -2
  37. package/dist/cjs/indexes/btree-index.cjs +2 -2
  38. package/dist/cjs/indexes/btree-index.cjs.map +1 -1
  39. package/dist/cjs/indexes/btree-index.d.cts +1 -1
  40. package/dist/cjs/query/builder/index.cjs +2 -2
  41. package/dist/cjs/query/builder/index.cjs.map +1 -1
  42. package/dist/cjs/query/builder/types.d.cts +1 -1
  43. package/dist/cjs/query/compiler/index.cjs +5 -2
  44. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  45. package/dist/cjs/query/compiler/index.d.cts +3 -2
  46. package/dist/cjs/query/compiler/joins.cjs +22 -24
  47. package/dist/cjs/query/compiler/joins.cjs.map +1 -1
  48. package/dist/cjs/query/compiler/joins.d.cts +3 -2
  49. package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
  50. package/dist/cjs/query/compiler/order-by.d.cts +1 -1
  51. package/dist/cjs/query/ir.cjs.map +1 -1
  52. package/dist/cjs/query/ir.d.cts +1 -1
  53. package/dist/cjs/query/live/collection-config-builder.cjs +29 -12
  54. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
  55. package/dist/cjs/query/live/collection-config-builder.d.cts +3 -0
  56. package/dist/cjs/query/live/collection-subscriber.cjs +43 -104
  57. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
  58. package/dist/cjs/query/live/collection-subscriber.d.cts +4 -7
  59. package/dist/cjs/query/live-query-collection.cjs +2 -2
  60. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  61. package/dist/cjs/query/live-query-collection.d.cts +1 -1
  62. package/dist/cjs/transactions.cjs +3 -3
  63. package/dist/cjs/transactions.cjs.map +1 -1
  64. package/dist/cjs/types.d.cts +12 -10
  65. package/dist/cjs/utils/browser-polyfills.cjs +22 -0
  66. package/dist/cjs/utils/browser-polyfills.cjs.map +1 -0
  67. package/dist/cjs/utils/browser-polyfills.d.cts +9 -0
  68. package/dist/{cjs/change-events.d.cts → esm/collection/change-events.d.ts} +6 -6
  69. package/dist/esm/{change-events.js → collection/change-events.js} +13 -42
  70. package/dist/esm/collection/change-events.js.map +1 -0
  71. package/dist/esm/collection/changes.d.ts +53 -0
  72. package/dist/esm/collection/changes.js +108 -0
  73. package/dist/esm/collection/changes.js.map +1 -0
  74. package/dist/esm/{collection-events.d.ts → collection/events.d.ts} +7 -4
  75. package/dist/esm/{collection-events.js → collection/events.js} +7 -5
  76. package/dist/esm/collection/events.js.map +1 -0
  77. package/dist/{cjs/collection.d.cts → esm/collection/index.d.ts} +46 -184
  78. package/dist/esm/collection/index.js +417 -0
  79. package/dist/esm/collection/index.js.map +1 -0
  80. package/dist/esm/collection/indexes.d.ts +47 -0
  81. package/dist/esm/collection/indexes.js +124 -0
  82. package/dist/esm/collection/indexes.js.map +1 -0
  83. package/dist/esm/collection/lifecycle.d.ts +81 -0
  84. package/dist/esm/collection/lifecycle.js +196 -0
  85. package/dist/esm/collection/lifecycle.js.map +1 -0
  86. package/dist/esm/collection/mutations.d.ts +33 -0
  87. package/dist/esm/collection/mutations.js +315 -0
  88. package/dist/esm/collection/mutations.js.map +1 -0
  89. package/dist/esm/collection/state.d.ts +122 -0
  90. package/dist/esm/collection/state.js +597 -0
  91. package/dist/esm/collection/state.js.map +1 -0
  92. package/dist/esm/collection/subscription.d.ts +57 -0
  93. package/dist/esm/collection/subscription.js +160 -0
  94. package/dist/esm/collection/subscription.js.map +1 -0
  95. package/dist/esm/collection/sync.d.ts +34 -0
  96. package/dist/esm/collection/sync.js +154 -0
  97. package/dist/esm/collection/sync.js.map +1 -0
  98. package/dist/esm/index.d.ts +2 -2
  99. package/dist/esm/index.js +1 -1
  100. package/dist/esm/indexes/auto-index.d.ts +1 -1
  101. package/dist/esm/indexes/auto-index.js.map +1 -1
  102. package/dist/esm/indexes/base-index.d.ts +2 -2
  103. package/dist/esm/indexes/base-index.js.map +1 -1
  104. package/dist/esm/indexes/btree-index.d.ts +1 -1
  105. package/dist/esm/indexes/btree-index.js +2 -2
  106. package/dist/esm/indexes/btree-index.js.map +1 -1
  107. package/dist/esm/proxy.js +1 -1
  108. package/dist/esm/query/builder/index.js +1 -1
  109. package/dist/esm/query/builder/index.js.map +1 -1
  110. package/dist/esm/query/builder/types.d.ts +1 -1
  111. package/dist/esm/query/compiler/index.d.ts +3 -2
  112. package/dist/esm/query/compiler/index.js +5 -2
  113. package/dist/esm/query/compiler/index.js.map +1 -1
  114. package/dist/esm/query/compiler/joins.d.ts +3 -2
  115. package/dist/esm/query/compiler/joins.js +22 -24
  116. package/dist/esm/query/compiler/joins.js.map +1 -1
  117. package/dist/esm/query/compiler/order-by.d.ts +1 -1
  118. package/dist/esm/query/compiler/order-by.js.map +1 -1
  119. package/dist/esm/query/ir.d.ts +1 -1
  120. package/dist/esm/query/ir.js.map +1 -1
  121. package/dist/esm/query/live/collection-config-builder.d.ts +3 -0
  122. package/dist/esm/query/live/collection-config-builder.js +29 -12
  123. package/dist/esm/query/live/collection-config-builder.js.map +1 -1
  124. package/dist/esm/query/live/collection-subscriber.d.ts +4 -7
  125. package/dist/esm/query/live/collection-subscriber.js +43 -104
  126. package/dist/esm/query/live/collection-subscriber.js.map +1 -1
  127. package/dist/esm/query/live-query-collection.d.ts +1 -1
  128. package/dist/esm/query/live-query-collection.js +1 -1
  129. package/dist/esm/query/live-query-collection.js.map +1 -1
  130. package/dist/esm/transactions.js +3 -3
  131. package/dist/esm/transactions.js.map +1 -1
  132. package/dist/esm/types.d.ts +12 -10
  133. package/dist/esm/utils/browser-polyfills.d.ts +9 -0
  134. package/dist/esm/utils/browser-polyfills.js +22 -0
  135. package/dist/esm/utils/browser-polyfills.js.map +1 -0
  136. package/package.json +2 -2
  137. package/src/{change-events.ts → collection/change-events.ts} +25 -39
  138. package/src/collection/changes.ts +163 -0
  139. package/src/{collection-events.ts → collection/events.ts} +8 -6
  140. package/src/collection/index.ts +808 -0
  141. package/src/collection/indexes.ts +172 -0
  142. package/src/collection/lifecycle.ts +289 -0
  143. package/src/collection/mutations.ts +535 -0
  144. package/src/collection/state.ts +866 -0
  145. package/src/collection/subscription.ts +239 -0
  146. package/src/collection/sync.ts +235 -0
  147. package/src/index.ts +2 -2
  148. package/src/indexes/auto-index.ts +1 -1
  149. package/src/indexes/base-index.ts +3 -3
  150. package/src/indexes/btree-index.ts +2 -2
  151. package/src/query/builder/index.ts +1 -1
  152. package/src/query/builder/types.ts +1 -1
  153. package/src/query/compiler/index.ts +7 -1
  154. package/src/query/compiler/joins.ts +28 -41
  155. package/src/query/compiler/order-by.ts +1 -1
  156. package/src/query/ir.ts +1 -1
  157. package/src/query/live/collection-config-builder.ts +48 -22
  158. package/src/query/live/collection-subscriber.ts +63 -168
  159. package/src/query/live-query-collection.ts +2 -2
  160. package/src/transactions.ts +3 -3
  161. package/src/types.ts +14 -15
  162. package/src/utils/browser-polyfills.ts +39 -0
  163. package/dist/cjs/change-events.cjs.map +0 -1
  164. package/dist/cjs/collection-events.cjs.map +0 -1
  165. package/dist/cjs/collection.cjs +0 -1625
  166. package/dist/cjs/collection.cjs.map +0 -1
  167. package/dist/esm/change-events.js.map +0 -1
  168. package/dist/esm/collection-events.js.map +0 -1
  169. package/dist/esm/collection.js +0 -1625
  170. package/dist/esm/collection.js.map +0 -1
  171. package/src/collection.ts +0 -2564
@@ -0,0 +1 @@
1
+ {"version":3,"file":"state.js","sources":["../../../src/collection/state.ts"],"sourcesContent":["import { deepEquals } from \"../utils\"\nimport { SortedMap } from \"../SortedMap\"\nimport type { Transaction } from \"../transactions\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n OptimisticChangeMessage,\n} from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\nimport type { CollectionChangesManager } from \"./changes\"\nimport type { CollectionIndexesManager } from \"./indexes\"\n\ninterface PendingSyncedTransaction<T extends object = Record<string, unknown>> {\n committed: boolean\n operations: Array<OptimisticChangeMessage<T>>\n truncate?: boolean\n deletedKeys: Set<string | number>\n}\n\nexport class CollectionStateManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n public config!: CollectionConfig<TOutput, TKey, TSchema>\n public collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n public lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n public changes!: CollectionChangesManager<TOutput, TKey, TSchema, TInput>\n public indexes!: CollectionIndexesManager<TOutput, TKey, TSchema, TInput>\n\n // Core state - make public for testing\n public transactions: SortedMap<string, Transaction<any>>\n public pendingSyncedTransactions: Array<PendingSyncedTransaction<TOutput>> =\n []\n public syncedData: Map<TKey, TOutput> | SortedMap<TKey, TOutput>\n public syncedMetadata = new Map<TKey, unknown>()\n\n // Optimistic state tracking - make public for testing\n public optimisticUpserts = new Map<TKey, TOutput>()\n public optimisticDeletes = new Set<TKey>()\n\n // Cached size for performance\n public size = 0\n\n // State used for computing the change events\n public syncedKeys = new Set<TKey>()\n public preSyncVisibleState = new Map<TKey, TOutput>()\n public recentlySyncedKeys = new Set<TKey>()\n public hasReceivedFirstCommit = false\n public isCommittingSyncTransactions = false\n\n /**\n * Creates a new CollectionState manager\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>) {\n this.config = config\n this.transactions = new SortedMap<string, Transaction<any>>((a, b) =>\n a.compareCreatedAt(b)\n )\n\n // Set up data storage with optional comparison function\n if (config.compare) {\n this.syncedData = new SortedMap<TKey, TOutput>(config.compare)\n } else {\n this.syncedData = new Map<TKey, TOutput>()\n }\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n changes: CollectionChangesManager<TOutput, TKey, TSchema, TInput>\n indexes: CollectionIndexesManager<TOutput, TKey, TSchema, TInput>\n }) {\n this.collection = deps.collection\n this.lifecycle = deps.lifecycle\n this.changes = deps.changes\n this.indexes = deps.indexes\n }\n\n /**\n * Get the current value for a key (virtual derived state)\n */\n public get(key: TKey): TOutput | undefined {\n const { optimisticDeletes, optimisticUpserts, syncedData } = this\n // Check if optimistically deleted\n if (optimisticDeletes.has(key)) {\n return undefined\n }\n\n // Check optimistic upserts first\n if (optimisticUpserts.has(key)) {\n return optimisticUpserts.get(key)\n }\n\n // Fall back to synced data\n return syncedData.get(key)\n }\n\n /**\n * Check if a key exists in the collection (virtual derived state)\n */\n public has(key: TKey): boolean {\n const { optimisticDeletes, optimisticUpserts, syncedData } = this\n // Check if optimistically deleted\n if (optimisticDeletes.has(key)) {\n return false\n }\n\n // Check optimistic upserts first\n if (optimisticUpserts.has(key)) {\n return true\n }\n\n // Fall back to synced data\n return syncedData.has(key)\n }\n\n /**\n * Get all keys (virtual derived state)\n */\n public *keys(): IterableIterator<TKey> {\n const { syncedData, optimisticDeletes, optimisticUpserts } = this\n // Yield keys from synced data, skipping any that are deleted.\n for (const key of syncedData.keys()) {\n if (!optimisticDeletes.has(key)) {\n yield key\n }\n }\n // Yield keys from upserts that were not already in synced data.\n for (const key of optimisticUpserts.keys()) {\n if (!syncedData.has(key) && !optimisticDeletes.has(key)) {\n // The optimisticDeletes check is technically redundant if inserts/updates always remove from deletes,\n // but it's safer to keep it.\n yield key\n }\n }\n }\n\n /**\n * Get all values (virtual derived state)\n */\n public *values(): IterableIterator<TOutput> {\n for (const key of this.keys()) {\n const value = this.get(key)\n if (value !== undefined) {\n yield value\n }\n }\n }\n\n /**\n * Get all entries (virtual derived state)\n */\n public *entries(): IterableIterator<[TKey, TOutput]> {\n for (const key of this.keys()) {\n const value = this.get(key)\n if (value !== undefined) {\n yield [key, value]\n }\n }\n }\n\n /**\n * Get all entries (virtual derived state)\n */\n public *[Symbol.iterator](): IterableIterator<[TKey, TOutput]> {\n for (const [key, value] of this.entries()) {\n yield [key, value]\n }\n }\n\n /**\n * Execute a callback for each entry in the collection\n */\n public forEach(\n callbackfn: (value: TOutput, key: TKey, index: number) => void\n ): void {\n let index = 0\n for (const [key, value] of this.entries()) {\n callbackfn(value, key, index++)\n }\n }\n\n /**\n * Create a new array with the results of calling a function for each entry in the collection\n */\n public map<U>(\n callbackfn: (value: TOutput, key: TKey, index: number) => U\n ): Array<U> {\n const result: Array<U> = []\n let index = 0\n for (const [key, value] of this.entries()) {\n result.push(callbackfn(value, key, index++))\n }\n return result\n }\n\n /**\n * Check if the given collection is this collection\n * @param collection The collection to check\n * @returns True if the given collection is this collection, false otherwise\n */\n private isThisCollection(\n collection: CollectionImpl<any, any, any, any, any>\n ): boolean {\n return collection === this.collection\n }\n\n /**\n * Recompute optimistic state from active transactions\n */\n public recomputeOptimisticState(\n triggeredByUserAction: boolean = false\n ): void {\n // Skip redundant recalculations when we're in the middle of committing sync transactions\n if (this.isCommittingSyncTransactions) {\n return\n }\n\n const previousState = new Map(this.optimisticUpserts)\n const previousDeletes = new Set(this.optimisticDeletes)\n\n // Clear current optimistic state\n this.optimisticUpserts.clear()\n this.optimisticDeletes.clear()\n\n const activeTransactions: Array<Transaction<any>> = []\n\n for (const transaction of this.transactions.values()) {\n if (![`completed`, `failed`].includes(transaction.state)) {\n activeTransactions.push(transaction)\n }\n }\n\n // Apply active transactions only (completed transactions are handled by sync operations)\n for (const transaction of activeTransactions) {\n for (const mutation of transaction.mutations) {\n if (this.isThisCollection(mutation.collection) && mutation.optimistic) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n this.optimisticUpserts.set(\n mutation.key,\n mutation.modified as TOutput\n )\n this.optimisticDeletes.delete(mutation.key)\n break\n case `delete`:\n this.optimisticUpserts.delete(mutation.key)\n this.optimisticDeletes.add(mutation.key)\n break\n }\n }\n }\n }\n\n // Update cached size\n this.size = this.calculateSize()\n\n // Collect events for changes\n const events: Array<ChangeMessage<TOutput, TKey>> = []\n this.collectOptimisticChanges(previousState, previousDeletes, events)\n\n // Filter out events for recently synced keys to prevent duplicates\n // BUT: Only filter out events that are actually from sync operations\n // New user transactions should NOT be filtered even if the key was recently synced\n const filteredEventsBySyncStatus = events.filter((event) => {\n if (!this.recentlySyncedKeys.has(event.key)) {\n return true // Key not recently synced, allow event through\n }\n\n // Key was recently synced - allow if this is a user-triggered action\n if (triggeredByUserAction) {\n return true\n }\n\n // Otherwise filter out duplicate sync events\n return false\n })\n\n // Filter out redundant delete events if there are pending sync transactions\n // that will immediately restore the same data, but only for completed transactions\n // IMPORTANT: Skip complex filtering for user-triggered actions to prevent UI blocking\n if (this.pendingSyncedTransactions.length > 0 && !triggeredByUserAction) {\n const pendingSyncKeys = new Set<TKey>()\n\n // Collect keys from pending sync operations\n for (const transaction of this.pendingSyncedTransactions) {\n for (const operation of transaction.operations) {\n pendingSyncKeys.add(operation.key as TKey)\n }\n }\n\n // Only filter out delete events for keys that:\n // 1. Have pending sync operations AND\n // 2. Are from completed transactions (being cleaned up)\n const filteredEvents = filteredEventsBySyncStatus.filter((event) => {\n if (event.type === `delete` && pendingSyncKeys.has(event.key)) {\n // Check if this delete is from clearing optimistic state of completed transactions\n // We can infer this by checking if we have no remaining optimistic mutations for this key\n const hasActiveOptimisticMutation = activeTransactions.some((tx) =>\n tx.mutations.some(\n (m) => this.isThisCollection(m.collection) && m.key === event.key\n )\n )\n\n if (!hasActiveOptimisticMutation) {\n return false // Skip this delete event as sync will restore the data\n }\n }\n return true\n })\n\n // Update indexes for the filtered events\n if (filteredEvents.length > 0) {\n this.indexes.updateIndexes(filteredEvents)\n }\n this.changes.emitEvents(filteredEvents, triggeredByUserAction)\n } else {\n // Update indexes for all events\n if (filteredEventsBySyncStatus.length > 0) {\n this.indexes.updateIndexes(filteredEventsBySyncStatus)\n }\n // Emit all events if no pending sync transactions\n this.changes.emitEvents(filteredEventsBySyncStatus, triggeredByUserAction)\n }\n }\n\n /**\n * Calculate the current size based on synced data and optimistic changes\n */\n private calculateSize(): number {\n const syncedSize = this.syncedData.size\n const deletesFromSynced = Array.from(this.optimisticDeletes).filter(\n (key) => this.syncedData.has(key) && !this.optimisticUpserts.has(key)\n ).length\n const upsertsNotInSynced = Array.from(this.optimisticUpserts.keys()).filter(\n (key) => !this.syncedData.has(key)\n ).length\n\n return syncedSize - deletesFromSynced + upsertsNotInSynced\n }\n\n /**\n * Collect events for optimistic changes\n */\n private collectOptimisticChanges(\n previousUpserts: Map<TKey, TOutput>,\n previousDeletes: Set<TKey>,\n events: Array<ChangeMessage<TOutput, TKey>>\n ): void {\n const allKeys = new Set([\n ...previousUpserts.keys(),\n ...this.optimisticUpserts.keys(),\n ...previousDeletes,\n ...this.optimisticDeletes,\n ])\n\n for (const key of allKeys) {\n const currentValue = this.get(key)\n const previousValue = this.getPreviousValue(\n key,\n previousUpserts,\n previousDeletes\n )\n\n if (previousValue !== undefined && currentValue === undefined) {\n events.push({ type: `delete`, key, value: previousValue })\n } else if (previousValue === undefined && currentValue !== undefined) {\n events.push({ type: `insert`, key, value: currentValue })\n } else if (\n previousValue !== undefined &&\n currentValue !== undefined &&\n previousValue !== currentValue\n ) {\n events.push({\n type: `update`,\n key,\n value: currentValue,\n previousValue,\n })\n }\n }\n }\n\n /**\n * Get the previous value for a key given previous optimistic state\n */\n private getPreviousValue(\n key: TKey,\n previousUpserts: Map<TKey, TOutput>,\n previousDeletes: Set<TKey>\n ): TOutput | undefined {\n if (previousDeletes.has(key)) {\n return undefined\n }\n if (previousUpserts.has(key)) {\n return previousUpserts.get(key)\n }\n return this.syncedData.get(key)\n }\n\n /**\n * Attempts to commit pending synced transactions if there are no active transactions\n * This method processes operations from pending transactions and applies them to the synced data\n */\n commitPendingTransactions = () => {\n // Check if there are any persisting transaction\n let hasPersistingTransaction = false\n for (const transaction of this.transactions.values()) {\n if (transaction.state === `persisting`) {\n hasPersistingTransaction = true\n break\n }\n }\n\n // pending synced transactions could be either `committed` or still open.\n // we only want to process `committed` transactions here\n const {\n committedSyncedTransactions,\n uncommittedSyncedTransactions,\n hasTruncateSync,\n } = this.pendingSyncedTransactions.reduce(\n (acc, t) => {\n if (t.committed) {\n acc.committedSyncedTransactions.push(t)\n if (t.truncate === true) {\n acc.hasTruncateSync = true\n }\n } else {\n acc.uncommittedSyncedTransactions.push(t)\n }\n return acc\n },\n {\n committedSyncedTransactions: [] as Array<\n PendingSyncedTransaction<TOutput>\n >,\n uncommittedSyncedTransactions: [] as Array<\n PendingSyncedTransaction<TOutput>\n >,\n hasTruncateSync: false,\n }\n )\n\n if (!hasPersistingTransaction || hasTruncateSync) {\n // Set flag to prevent redundant optimistic state recalculations\n this.isCommittingSyncTransactions = true\n\n // First collect all keys that will be affected by sync operations\n const changedKeys = new Set<TKey>()\n for (const transaction of committedSyncedTransactions) {\n for (const operation of transaction.operations) {\n changedKeys.add(operation.key as TKey)\n }\n }\n\n // Use pre-captured state if available (from optimistic scenarios),\n // otherwise capture current state (for pure sync scenarios)\n let currentVisibleState = this.preSyncVisibleState\n if (currentVisibleState.size === 0) {\n // No pre-captured state, capture it now for pure sync operations\n currentVisibleState = new Map<TKey, TOutput>()\n for (const key of changedKeys) {\n const currentValue = this.get(key)\n if (currentValue !== undefined) {\n currentVisibleState.set(key, currentValue)\n }\n }\n }\n\n const events: Array<ChangeMessage<TOutput, TKey>> = []\n const rowUpdateMode = this.config.sync.rowUpdateMode || `partial`\n\n for (const transaction of committedSyncedTransactions) {\n // Handle truncate operations first\n if (transaction.truncate) {\n // TRUNCATE PHASE\n // 1) Emit a delete for every currently-synced key so downstream listeners/indexes\n // observe a clear-before-rebuild. We intentionally skip keys already in\n // optimisticDeletes because their delete was previously emitted by the user.\n for (const key of this.syncedData.keys()) {\n if (this.optimisticDeletes.has(key)) continue\n const previousValue =\n this.optimisticUpserts.get(key) || this.syncedData.get(key)\n if (previousValue !== undefined) {\n events.push({ type: `delete`, key, value: previousValue })\n }\n }\n\n // 2) Clear the authoritative synced base. Subsequent server ops in this\n // same commit will rebuild the base atomically.\n this.syncedData.clear()\n this.syncedMetadata.clear()\n this.syncedKeys.clear()\n\n // 3) Clear currentVisibleState for truncated keys to ensure subsequent operations\n // are compared against the post-truncate state (undefined) rather than pre-truncate state\n // This ensures that re-inserted keys are emitted as INSERT events, not UPDATE events\n for (const key of changedKeys) {\n currentVisibleState.delete(key)\n }\n }\n\n for (const operation of transaction.operations) {\n const key = operation.key as TKey\n this.syncedKeys.add(key)\n\n // Update metadata\n switch (operation.type) {\n case `insert`:\n this.syncedMetadata.set(key, operation.metadata)\n break\n case `update`:\n this.syncedMetadata.set(\n key,\n Object.assign(\n {},\n this.syncedMetadata.get(key),\n operation.metadata\n )\n )\n break\n case `delete`:\n this.syncedMetadata.delete(key)\n break\n }\n\n // Update synced data\n switch (operation.type) {\n case `insert`:\n this.syncedData.set(key, operation.value)\n break\n case `update`: {\n if (rowUpdateMode === `partial`) {\n const updatedValue = Object.assign(\n {},\n this.syncedData.get(key),\n operation.value\n )\n this.syncedData.set(key, updatedValue)\n } else {\n this.syncedData.set(key, operation.value)\n }\n break\n }\n case `delete`:\n this.syncedData.delete(key)\n break\n }\n }\n }\n\n // After applying synced operations, if this commit included a truncate,\n // re-apply optimistic mutations on top of the fresh synced base. This ensures\n // the UI preserves local intent while respecting server rebuild semantics.\n // Ordering: deletes (above) -> server ops (just applied) -> optimistic upserts.\n if (hasTruncateSync) {\n // Avoid duplicating keys that were inserted/updated by synced operations in this commit\n const syncedInsertedOrUpdatedKeys = new Set<TKey>()\n for (const t of committedSyncedTransactions) {\n for (const op of t.operations) {\n if (op.type === `insert` || op.type === `update`) {\n syncedInsertedOrUpdatedKeys.add(op.key as TKey)\n }\n }\n }\n\n // Build re-apply sets from ACTIVE optimistic transactions against the new synced base\n // We do not copy maps; we compute intent directly from transactions to avoid drift.\n const reapplyUpserts = new Map<TKey, TOutput>()\n const reapplyDeletes = new Set<TKey>()\n\n for (const tx of this.transactions.values()) {\n if ([`completed`, `failed`].includes(tx.state)) continue\n for (const mutation of tx.mutations) {\n if (\n !this.isThisCollection(mutation.collection) ||\n !mutation.optimistic\n )\n continue\n const key = mutation.key as TKey\n switch (mutation.type) {\n case `insert`:\n reapplyUpserts.set(key, mutation.modified as TOutput)\n reapplyDeletes.delete(key)\n break\n case `update`: {\n const base = this.syncedData.get(key)\n const next = base\n ? (Object.assign({}, base, mutation.changes) as TOutput)\n : (mutation.modified as TOutput)\n reapplyUpserts.set(key, next)\n reapplyDeletes.delete(key)\n break\n }\n case `delete`:\n reapplyUpserts.delete(key)\n reapplyDeletes.add(key)\n break\n }\n }\n }\n\n // Emit inserts for re-applied upserts, skipping any keys that have an optimistic delete.\n // If the server also inserted/updated the same key in this batch, override that value\n // with the optimistic value to preserve local intent.\n for (const [key, value] of reapplyUpserts) {\n if (reapplyDeletes.has(key)) continue\n if (syncedInsertedOrUpdatedKeys.has(key)) {\n let foundInsert = false\n for (let i = events.length - 1; i >= 0; i--) {\n const evt = events[i]!\n if (evt.key === key && evt.type === `insert`) {\n evt.value = value\n foundInsert = true\n break\n }\n }\n if (!foundInsert) {\n events.push({ type: `insert`, key, value })\n }\n } else {\n events.push({ type: `insert`, key, value })\n }\n }\n\n // Finally, ensure we do NOT insert keys that have an outstanding optimistic delete.\n if (events.length > 0 && reapplyDeletes.size > 0) {\n const filtered: Array<ChangeMessage<TOutput, TKey>> = []\n for (const evt of events) {\n if (evt.type === `insert` && reapplyDeletes.has(evt.key)) {\n continue\n }\n filtered.push(evt)\n }\n events.length = 0\n events.push(...filtered)\n }\n\n // Ensure listeners are active before emitting this critical batch\n if (this.lifecycle.status !== `ready`) {\n this.lifecycle.setStatus(`ready`)\n }\n }\n\n // Maintain optimistic state appropriately\n // Clear optimistic state since sync operations will now provide the authoritative data.\n // Any still-active user transactions will be re-applied below in recompute.\n this.optimisticUpserts.clear()\n this.optimisticDeletes.clear()\n\n // Reset flag and recompute optimistic state for any remaining active transactions\n this.isCommittingSyncTransactions = false\n for (const transaction of this.transactions.values()) {\n if (![`completed`, `failed`].includes(transaction.state)) {\n for (const mutation of transaction.mutations) {\n if (\n this.isThisCollection(mutation.collection) &&\n mutation.optimistic\n ) {\n switch (mutation.type) {\n case `insert`:\n case `update`:\n this.optimisticUpserts.set(\n mutation.key,\n mutation.modified as TOutput\n )\n this.optimisticDeletes.delete(mutation.key)\n break\n case `delete`:\n this.optimisticUpserts.delete(mutation.key)\n this.optimisticDeletes.add(mutation.key)\n break\n }\n }\n }\n }\n }\n\n // Check for redundant sync operations that match completed optimistic operations\n const completedOptimisticOps = new Map<TKey, any>()\n\n for (const transaction of this.transactions.values()) {\n if (transaction.state === `completed`) {\n for (const mutation of transaction.mutations) {\n if (\n this.isThisCollection(mutation.collection) &&\n changedKeys.has(mutation.key)\n ) {\n completedOptimisticOps.set(mutation.key, {\n type: mutation.type,\n value: mutation.modified,\n })\n }\n }\n }\n }\n\n // Now check what actually changed in the final visible state\n for (const key of changedKeys) {\n const previousVisibleValue = currentVisibleState.get(key)\n const newVisibleValue = this.get(key) // This returns the new derived state\n\n // Check if this sync operation is redundant with a completed optimistic operation\n const completedOp = completedOptimisticOps.get(key)\n const isRedundantSync =\n completedOp &&\n newVisibleValue !== undefined &&\n deepEquals(completedOp.value, newVisibleValue)\n\n if (!isRedundantSync) {\n if (\n previousVisibleValue === undefined &&\n newVisibleValue !== undefined\n ) {\n events.push({\n type: `insert`,\n key,\n value: newVisibleValue,\n })\n } else if (\n previousVisibleValue !== undefined &&\n newVisibleValue === undefined\n ) {\n events.push({\n type: `delete`,\n key,\n value: previousVisibleValue,\n })\n } else if (\n previousVisibleValue !== undefined &&\n newVisibleValue !== undefined &&\n !deepEquals(previousVisibleValue, newVisibleValue)\n ) {\n events.push({\n type: `update`,\n key,\n value: newVisibleValue,\n previousValue: previousVisibleValue,\n })\n }\n }\n }\n\n // Update cached size after synced data changes\n this.size = this.calculateSize()\n\n // Update indexes for all events before emitting\n if (events.length > 0) {\n this.indexes.updateIndexes(events)\n }\n\n // End batching and emit all events (combines any batched events with sync events)\n this.changes.emitEvents(events, true)\n\n this.pendingSyncedTransactions = uncommittedSyncedTransactions\n\n // Clear the pre-sync state since sync operations are complete\n this.preSyncVisibleState.clear()\n\n // Clear recently synced keys after a microtask to allow recomputeOptimisticState to see them\n Promise.resolve().then(() => {\n this.recentlySyncedKeys.clear()\n })\n\n // Call any registered one-time commit listeners\n if (!this.hasReceivedFirstCommit) {\n this.hasReceivedFirstCommit = true\n const callbacks = [...this.lifecycle.onFirstReadyCallbacks]\n this.lifecycle.onFirstReadyCallbacks = []\n callbacks.forEach((callback) => callback())\n }\n }\n }\n\n /**\n * Schedule cleanup of a transaction when it completes\n */\n public scheduleTransactionCleanup(transaction: Transaction<any>): void {\n // Only schedule cleanup for transactions that aren't already completed\n if (transaction.state === `completed`) {\n this.transactions.delete(transaction.id)\n return\n }\n\n // Schedule cleanup when the transaction completes\n transaction.isPersisted.promise\n .then(() => {\n // Transaction completed successfully, remove it immediately\n this.transactions.delete(transaction.id)\n })\n .catch(() => {\n // Transaction failed, but we want to keep failed transactions for reference\n // so don't remove it.\n // This empty catch block is necessary to prevent unhandled promise rejections.\n })\n }\n\n /**\n * Capture visible state for keys that will be affected by pending sync operations\n * This must be called BEFORE onTransactionStateChange clears optimistic state\n */\n public capturePreSyncVisibleState(): void {\n if (this.pendingSyncedTransactions.length === 0) return\n\n // Clear any previous capture\n this.preSyncVisibleState.clear()\n\n // Get all keys that will be affected by sync operations\n const syncedKeys = new Set<TKey>()\n for (const transaction of this.pendingSyncedTransactions) {\n for (const operation of transaction.operations) {\n syncedKeys.add(operation.key as TKey)\n }\n }\n\n // Mark keys as about to be synced to suppress intermediate events from recomputeOptimisticState\n for (const key of syncedKeys) {\n this.recentlySyncedKeys.add(key)\n }\n\n // Only capture current visible state for keys that will be affected by sync operations\n // This is much more efficient than capturing the entire collection state\n for (const key of syncedKeys) {\n const currentValue = this.get(key)\n if (currentValue !== undefined) {\n this.preSyncVisibleState.set(key, currentValue)\n }\n }\n }\n\n /**\n * Trigger a recomputation when transactions change\n * This method should be called by the Transaction class when state changes\n */\n public onTransactionStateChange(): void {\n // Check if commitPendingTransactions will be called after this\n // by checking if there are pending sync transactions (same logic as in transactions.ts)\n this.changes.shouldBatchEvents = this.pendingSyncedTransactions.length > 0\n\n // CRITICAL: Capture visible state BEFORE clearing optimistic state\n this.capturePreSyncVisibleState()\n\n this.recomputeOptimisticState(false)\n }\n\n /**\n * Clean up the collection by stopping sync and clearing data\n * This can be called manually or automatically by garbage collection\n */\n public cleanup(): void {\n this.syncedData.clear()\n this.syncedMetadata.clear()\n this.optimisticUpserts.clear()\n this.optimisticDeletes.clear()\n this.size = 0\n this.pendingSyncedTransactions = []\n this.syncedKeys.clear()\n this.hasReceivedFirstCommit = false\n }\n}\n"],"names":[],"mappings":";;AAqBO,MAAM,uBAKX;AAAA;AAAA;AAAA;AAAA,EA+BA,YAAY,QAAkD;AAtB9D,SAAO,4BACL,CAAA;AAEF,SAAO,qCAAqB,IAAA;AAG5B,SAAO,wCAAwB,IAAA;AAC/B,SAAO,wCAAwB,IAAA;AAG/B,SAAO,OAAO;AAGd,SAAO,iCAAiB,IAAA;AACxB,SAAO,0CAA0B,IAAA;AACjC,SAAO,yCAAyB,IAAA;AAChC,SAAO,yBAAyB;AAChC,SAAO,+BAA+B;AAsWtC,SAAA,4BAA4B,MAAM;AAEhC,UAAI,2BAA2B;AAC/B,iBAAW,eAAe,KAAK,aAAa,OAAA,GAAU;AACpD,YAAI,YAAY,UAAU,cAAc;AACtC,qCAA2B;AAC3B;AAAA,QACF;AAAA,MACF;AAIA,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,MAAA,IACE,KAAK,0BAA0B;AAAA,QACjC,CAAC,KAAK,MAAM;AACV,cAAI,EAAE,WAAW;AACf,gBAAI,4BAA4B,KAAK,CAAC;AACtC,gBAAI,EAAE,aAAa,MAAM;AACvB,kBAAI,kBAAkB;AAAA,YACxB;AAAA,UACF,OAAO;AACL,gBAAI,8BAA8B,KAAK,CAAC;AAAA,UAC1C;AACA,iBAAO;AAAA,QACT;AAAA,QACA;AAAA,UACE,6BAA6B,CAAA;AAAA,UAG7B,+BAA+B,CAAA;AAAA,UAG/B,iBAAiB;AAAA,QAAA;AAAA,MACnB;AAGF,UAAI,CAAC,4BAA4B,iBAAiB;AAEhD,aAAK,+BAA+B;AAGpC,cAAM,kCAAkB,IAAA;AACxB,mBAAW,eAAe,6BAA6B;AACrD,qBAAW,aAAa,YAAY,YAAY;AAC9C,wBAAY,IAAI,UAAU,GAAW;AAAA,UACvC;AAAA,QACF;AAIA,YAAI,sBAAsB,KAAK;AAC/B,YAAI,oBAAoB,SAAS,GAAG;AAElC,oDAA0B,IAAA;AAC1B,qBAAW,OAAO,aAAa;AAC7B,kBAAM,eAAe,KAAK,IAAI,GAAG;AACjC,gBAAI,iBAAiB,QAAW;AAC9B,kCAAoB,IAAI,KAAK,YAAY;AAAA,YAC3C;AAAA,UACF;AAAA,QACF;AAEA,cAAM,SAA8C,CAAA;AACpD,cAAM,gBAAgB,KAAK,OAAO,KAAK,iBAAiB;AAExD,mBAAW,eAAe,6BAA6B;AAErD,cAAI,YAAY,UAAU;AAKxB,uBAAW,OAAO,KAAK,WAAW,KAAA,GAAQ;AACxC,kBAAI,KAAK,kBAAkB,IAAI,GAAG,EAAG;AACrC,oBAAM,gBACJ,KAAK,kBAAkB,IAAI,GAAG,KAAK,KAAK,WAAW,IAAI,GAAG;AAC5D,kBAAI,kBAAkB,QAAW;AAC/B,uBAAO,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,eAAe;AAAA,cAC3D;AAAA,YACF;AAIA,iBAAK,WAAW,MAAA;AAChB,iBAAK,eAAe,MAAA;AACpB,iBAAK,WAAW,MAAA;AAKhB,uBAAW,OAAO,aAAa;AAC7B,kCAAoB,OAAO,GAAG;AAAA,YAChC;AAAA,UACF;AAEA,qBAAW,aAAa,YAAY,YAAY;AAC9C,kBAAM,MAAM,UAAU;AACtB,iBAAK,WAAW,IAAI,GAAG;AAGvB,oBAAQ,UAAU,MAAA;AAAA,cAChB,KAAK;AACH,qBAAK,eAAe,IAAI,KAAK,UAAU,QAAQ;AAC/C;AAAA,cACF,KAAK;AACH,qBAAK,eAAe;AAAA,kBAClB;AAAA,kBACA,OAAO;AAAA,oBACL,CAAA;AAAA,oBACA,KAAK,eAAe,IAAI,GAAG;AAAA,oBAC3B,UAAU;AAAA,kBAAA;AAAA,gBACZ;AAEF;AAAA,cACF,KAAK;AACH,qBAAK,eAAe,OAAO,GAAG;AAC9B;AAAA,YAAA;AAIJ,oBAAQ,UAAU,MAAA;AAAA,cAChB,KAAK;AACH,qBAAK,WAAW,IAAI,KAAK,UAAU,KAAK;AACxC;AAAA,cACF,KAAK,UAAU;AACb,oBAAI,kBAAkB,WAAW;AAC/B,wBAAM,eAAe,OAAO;AAAA,oBAC1B,CAAA;AAAA,oBACA,KAAK,WAAW,IAAI,GAAG;AAAA,oBACvB,UAAU;AAAA,kBAAA;AAEZ,uBAAK,WAAW,IAAI,KAAK,YAAY;AAAA,gBACvC,OAAO;AACL,uBAAK,WAAW,IAAI,KAAK,UAAU,KAAK;AAAA,gBAC1C;AACA;AAAA,cACF;AAAA,cACA,KAAK;AACH,qBAAK,WAAW,OAAO,GAAG;AAC1B;AAAA,YAAA;AAAA,UAEN;AAAA,QACF;AAMA,YAAI,iBAAiB;AAEnB,gBAAM,kDAAkC,IAAA;AACxC,qBAAW,KAAK,6BAA6B;AAC3C,uBAAW,MAAM,EAAE,YAAY;AAC7B,kBAAI,GAAG,SAAS,YAAY,GAAG,SAAS,UAAU;AAChD,4CAA4B,IAAI,GAAG,GAAW;AAAA,cAChD;AAAA,YACF;AAAA,UACF;AAIA,gBAAM,qCAAqB,IAAA;AAC3B,gBAAM,qCAAqB,IAAA;AAE3B,qBAAW,MAAM,KAAK,aAAa,OAAA,GAAU;AAC3C,gBAAI,CAAC,aAAa,QAAQ,EAAE,SAAS,GAAG,KAAK,EAAG;AAChD,uBAAW,YAAY,GAAG,WAAW;AACnC,kBACE,CAAC,KAAK,iBAAiB,SAAS,UAAU,KAC1C,CAAC,SAAS;AAEV;AACF,oBAAM,MAAM,SAAS;AACrB,sBAAQ,SAAS,MAAA;AAAA,gBACf,KAAK;AACH,iCAAe,IAAI,KAAK,SAAS,QAAmB;AACpD,iCAAe,OAAO,GAAG;AACzB;AAAA,gBACF,KAAK,UAAU;AACb,wBAAM,OAAO,KAAK,WAAW,IAAI,GAAG;AACpC,wBAAM,OAAO,OACR,OAAO,OAAO,CAAA,GAAI,MAAM,SAAS,OAAO,IACxC,SAAS;AACd,iCAAe,IAAI,KAAK,IAAI;AAC5B,iCAAe,OAAO,GAAG;AACzB;AAAA,gBACF;AAAA,gBACA,KAAK;AACH,iCAAe,OAAO,GAAG;AACzB,iCAAe,IAAI,GAAG;AACtB;AAAA,cAAA;AAAA,YAEN;AAAA,UACF;AAKA,qBAAW,CAAC,KAAK,KAAK,KAAK,gBAAgB;AACzC,gBAAI,eAAe,IAAI,GAAG,EAAG;AAC7B,gBAAI,4BAA4B,IAAI,GAAG,GAAG;AACxC,kBAAI,cAAc;AAClB,uBAAS,IAAI,OAAO,SAAS,GAAG,KAAK,GAAG,KAAK;AAC3C,sBAAM,MAAM,OAAO,CAAC;AACpB,oBAAI,IAAI,QAAQ,OAAO,IAAI,SAAS,UAAU;AAC5C,sBAAI,QAAQ;AACZ,gCAAc;AACd;AAAA,gBACF;AAAA,cACF;AACA,kBAAI,CAAC,aAAa;AAChB,uBAAO,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO;AAAA,cAC5C;AAAA,YACF,OAAO;AACL,qBAAO,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO;AAAA,YAC5C;AAAA,UACF;AAGA,cAAI,OAAO,SAAS,KAAK,eAAe,OAAO,GAAG;AAChD,kBAAM,WAAgD,CAAA;AACtD,uBAAW,OAAO,QAAQ;AACxB,kBAAI,IAAI,SAAS,YAAY,eAAe,IAAI,IAAI,GAAG,GAAG;AACxD;AAAA,cACF;AACA,uBAAS,KAAK,GAAG;AAAA,YACnB;AACA,mBAAO,SAAS;AAChB,mBAAO,KAAK,GAAG,QAAQ;AAAA,UACzB;AAGA,cAAI,KAAK,UAAU,WAAW,SAAS;AACrC,iBAAK,UAAU,UAAU,OAAO;AAAA,UAClC;AAAA,QACF;AAKA,aAAK,kBAAkB,MAAA;AACvB,aAAK,kBAAkB,MAAA;AAGvB,aAAK,+BAA+B;AACpC,mBAAW,eAAe,KAAK,aAAa,OAAA,GAAU;AACpD,cAAI,CAAC,CAAC,aAAa,QAAQ,EAAE,SAAS,YAAY,KAAK,GAAG;AACxD,uBAAW,YAAY,YAAY,WAAW;AAC5C,kBACE,KAAK,iBAAiB,SAAS,UAAU,KACzC,SAAS,YACT;AACA,wBAAQ,SAAS,MAAA;AAAA,kBACf,KAAK;AAAA,kBACL,KAAK;AACH,yBAAK,kBAAkB;AAAA,sBACrB,SAAS;AAAA,sBACT,SAAS;AAAA,oBAAA;AAEX,yBAAK,kBAAkB,OAAO,SAAS,GAAG;AAC1C;AAAA,kBACF,KAAK;AACH,yBAAK,kBAAkB,OAAO,SAAS,GAAG;AAC1C,yBAAK,kBAAkB,IAAI,SAAS,GAAG;AACvC;AAAA,gBAAA;AAAA,cAEN;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAGA,cAAM,6CAA6B,IAAA;AAEnC,mBAAW,eAAe,KAAK,aAAa,OAAA,GAAU;AACpD,cAAI,YAAY,UAAU,aAAa;AACrC,uBAAW,YAAY,YAAY,WAAW;AAC5C,kBACE,KAAK,iBAAiB,SAAS,UAAU,KACzC,YAAY,IAAI,SAAS,GAAG,GAC5B;AACA,uCAAuB,IAAI,SAAS,KAAK;AAAA,kBACvC,MAAM,SAAS;AAAA,kBACf,OAAO,SAAS;AAAA,gBAAA,CACjB;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAGA,mBAAW,OAAO,aAAa;AAC7B,gBAAM,uBAAuB,oBAAoB,IAAI,GAAG;AACxD,gBAAM,kBAAkB,KAAK,IAAI,GAAG;AAGpC,gBAAM,cAAc,uBAAuB,IAAI,GAAG;AAClD,gBAAM,kBACJ,eACA,oBAAoB,UACpB,WAAW,YAAY,OAAO,eAAe;AAE/C,cAAI,CAAC,iBAAiB;AACpB,gBACE,yBAAyB,UACzB,oBAAoB,QACpB;AACA,qBAAO,KAAK;AAAA,gBACV,MAAM;AAAA,gBACN;AAAA,gBACA,OAAO;AAAA,cAAA,CACR;AAAA,YACH,WACE,yBAAyB,UACzB,oBAAoB,QACpB;AACA,qBAAO,KAAK;AAAA,gBACV,MAAM;AAAA,gBACN;AAAA,gBACA,OAAO;AAAA,cAAA,CACR;AAAA,YACH,WACE,yBAAyB,UACzB,oBAAoB,UACpB,CAAC,WAAW,sBAAsB,eAAe,GACjD;AACA,qBAAO,KAAK;AAAA,gBACV,MAAM;AAAA,gBACN;AAAA,gBACA,OAAO;AAAA,gBACP,eAAe;AAAA,cAAA,CAChB;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAGA,aAAK,OAAO,KAAK,cAAA;AAGjB,YAAI,OAAO,SAAS,GAAG;AACrB,eAAK,QAAQ,cAAc,MAAM;AAAA,QACnC;AAGA,aAAK,QAAQ,WAAW,QAAQ,IAAI;AAEpC,aAAK,4BAA4B;AAGjC,aAAK,oBAAoB,MAAA;AAGzB,gBAAQ,UAAU,KAAK,MAAM;AAC3B,eAAK,mBAAmB,MAAA;AAAA,QAC1B,CAAC;AAGD,YAAI,CAAC,KAAK,wBAAwB;AAChC,eAAK,yBAAyB;AAC9B,gBAAM,YAAY,CAAC,GAAG,KAAK,UAAU,qBAAqB;AAC1D,eAAK,UAAU,wBAAwB,CAAA;AACvC,oBAAU,QAAQ,CAAC,aAAa,SAAA,CAAU;AAAA,QAC5C;AAAA,MACF;AAAA,IACF;AAhtBE,SAAK,SAAS;AACd,SAAK,eAAe,IAAI;AAAA,MAAoC,CAAC,GAAG,MAC9D,EAAE,iBAAiB,CAAC;AAAA,IAAA;AAItB,QAAI,OAAO,SAAS;AAClB,WAAK,aAAa,IAAI,UAAyB,OAAO,OAAO;AAAA,IAC/D,OAAO;AACL,WAAK,iCAAiB,IAAA;AAAA,IACxB;AAAA,EACF;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AACpB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKO,IAAI,KAAgC;AACzC,UAAM,EAAE,mBAAmB,mBAAmB,WAAA,IAAe;AAE7D,QAAI,kBAAkB,IAAI,GAAG,GAAG;AAC9B,aAAO;AAAA,IACT;AAGA,QAAI,kBAAkB,IAAI,GAAG,GAAG;AAC9B,aAAO,kBAAkB,IAAI,GAAG;AAAA,IAClC;AAGA,WAAO,WAAW,IAAI,GAAG;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKO,IAAI,KAAoB;AAC7B,UAAM,EAAE,mBAAmB,mBAAmB,WAAA,IAAe;AAE7D,QAAI,kBAAkB,IAAI,GAAG,GAAG;AAC9B,aAAO;AAAA,IACT;AAGA,QAAI,kBAAkB,IAAI,GAAG,GAAG;AAC9B,aAAO;AAAA,IACT;AAGA,WAAO,WAAW,IAAI,GAAG;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,CAAQ,OAA+B;AACrC,UAAM,EAAE,YAAY,mBAAmB,kBAAA,IAAsB;AAE7D,eAAW,OAAO,WAAW,QAAQ;AACnC,UAAI,CAAC,kBAAkB,IAAI,GAAG,GAAG;AAC/B,cAAM;AAAA,MACR;AAAA,IACF;AAEA,eAAW,OAAO,kBAAkB,QAAQ;AAC1C,UAAI,CAAC,WAAW,IAAI,GAAG,KAAK,CAAC,kBAAkB,IAAI,GAAG,GAAG;AAGvD,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,CAAQ,SAAoC;AAC1C,eAAW,OAAO,KAAK,QAAQ;AAC7B,YAAM,QAAQ,KAAK,IAAI,GAAG;AAC1B,UAAI,UAAU,QAAW;AACvB,cAAM;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,CAAQ,UAA6C;AACnD,eAAW,OAAO,KAAK,QAAQ;AAC7B,YAAM,QAAQ,KAAK,IAAI,GAAG;AAC1B,UAAI,UAAU,QAAW;AACvB,cAAM,CAAC,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,EAAS,OAAO,QAAQ,IAAuC;AAC7D,eAAW,CAAC,KAAK,KAAK,KAAK,KAAK,WAAW;AACzC,YAAM,CAAC,KAAK,KAAK;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,QACL,YACM;AACN,QAAI,QAAQ;AACZ,eAAW,CAAC,KAAK,KAAK,KAAK,KAAK,WAAW;AACzC,iBAAW,OAAO,KAAK,OAAO;AAAA,IAChC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,IACL,YACU;AACV,UAAM,SAAmB,CAAA;AACzB,QAAI,QAAQ;AACZ,eAAW,CAAC,KAAK,KAAK,KAAK,KAAK,WAAW;AACzC,aAAO,KAAK,WAAW,OAAO,KAAK,OAAO,CAAC;AAAA,IAC7C;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,iBACN,YACS;AACT,WAAO,eAAe,KAAK;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKO,yBACL,wBAAiC,OAC3B;AAEN,QAAI,KAAK,8BAA8B;AACrC;AAAA,IACF;AAEA,UAAM,gBAAgB,IAAI,IAAI,KAAK,iBAAiB;AACpD,UAAM,kBAAkB,IAAI,IAAI,KAAK,iBAAiB;AAGtD,SAAK,kBAAkB,MAAA;AACvB,SAAK,kBAAkB,MAAA;AAEvB,UAAM,qBAA8C,CAAA;AAEpD,eAAW,eAAe,KAAK,aAAa,OAAA,GAAU;AACpD,UAAI,CAAC,CAAC,aAAa,QAAQ,EAAE,SAAS,YAAY,KAAK,GAAG;AACxD,2BAAmB,KAAK,WAAW;AAAA,MACrC;AAAA,IACF;AAGA,eAAW,eAAe,oBAAoB;AAC5C,iBAAW,YAAY,YAAY,WAAW;AAC5C,YAAI,KAAK,iBAAiB,SAAS,UAAU,KAAK,SAAS,YAAY;AACrE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AAAA,YACL,KAAK;AACH,mBAAK,kBAAkB;AAAA,gBACrB,SAAS;AAAA,gBACT,SAAS;AAAA,cAAA;AAEX,mBAAK,kBAAkB,OAAO,SAAS,GAAG;AAC1C;AAAA,YACF,KAAK;AACH,mBAAK,kBAAkB,OAAO,SAAS,GAAG;AAC1C,mBAAK,kBAAkB,IAAI,SAAS,GAAG;AACvC;AAAA,UAAA;AAAA,QAEN;AAAA,MACF;AAAA,IACF;AAGA,SAAK,OAAO,KAAK,cAAA;AAGjB,UAAM,SAA8C,CAAA;AACpD,SAAK,yBAAyB,eAAe,iBAAiB,MAAM;AAKpE,UAAM,6BAA6B,OAAO,OAAO,CAAC,UAAU;AAC1D,UAAI,CAAC,KAAK,mBAAmB,IAAI,MAAM,GAAG,GAAG;AAC3C,eAAO;AAAA,MACT;AAGA,UAAI,uBAAuB;AACzB,eAAO;AAAA,MACT;AAGA,aAAO;AAAA,IACT,CAAC;AAKD,QAAI,KAAK,0BAA0B,SAAS,KAAK,CAAC,uBAAuB;AACvE,YAAM,sCAAsB,IAAA;AAG5B,iBAAW,eAAe,KAAK,2BAA2B;AACxD,mBAAW,aAAa,YAAY,YAAY;AAC9C,0BAAgB,IAAI,UAAU,GAAW;AAAA,QAC3C;AAAA,MACF;AAKA,YAAM,iBAAiB,2BAA2B,OAAO,CAAC,UAAU;AAClE,YAAI,MAAM,SAAS,YAAY,gBAAgB,IAAI,MAAM,GAAG,GAAG;AAG7D,gBAAM,8BAA8B,mBAAmB;AAAA,YAAK,CAAC,OAC3D,GAAG,UAAU;AAAA,cACX,CAAC,MAAM,KAAK,iBAAiB,EAAE,UAAU,KAAK,EAAE,QAAQ,MAAM;AAAA,YAAA;AAAA,UAChE;AAGF,cAAI,CAAC,6BAA6B;AAChC,mBAAO;AAAA,UACT;AAAA,QACF;AACA,eAAO;AAAA,MACT,CAAC;AAGD,UAAI,eAAe,SAAS,GAAG;AAC7B,aAAK,QAAQ,cAAc,cAAc;AAAA,MAC3C;AACA,WAAK,QAAQ,WAAW,gBAAgB,qBAAqB;AAAA,IAC/D,OAAO;AAEL,UAAI,2BAA2B,SAAS,GAAG;AACzC,aAAK,QAAQ,cAAc,0BAA0B;AAAA,MACvD;AAEA,WAAK,QAAQ,WAAW,4BAA4B,qBAAqB;AAAA,IAC3E;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAwB;AAC9B,UAAM,aAAa,KAAK,WAAW;AACnC,UAAM,oBAAoB,MAAM,KAAK,KAAK,iBAAiB,EAAE;AAAA,MAC3D,CAAC,QAAQ,KAAK,WAAW,IAAI,GAAG,KAAK,CAAC,KAAK,kBAAkB,IAAI,GAAG;AAAA,IAAA,EACpE;AACF,UAAM,qBAAqB,MAAM,KAAK,KAAK,kBAAkB,KAAA,CAAM,EAAE;AAAA,MACnE,CAAC,QAAQ,CAAC,KAAK,WAAW,IAAI,GAAG;AAAA,IAAA,EACjC;AAEF,WAAO,aAAa,oBAAoB;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,yBACN,iBACA,iBACA,QACM;AACN,UAAM,8BAAc,IAAI;AAAA,MACtB,GAAG,gBAAgB,KAAA;AAAA,MACnB,GAAG,KAAK,kBAAkB,KAAA;AAAA,MAC1B,GAAG;AAAA,MACH,GAAG,KAAK;AAAA,IAAA,CACT;AAED,eAAW,OAAO,SAAS;AACzB,YAAM,eAAe,KAAK,IAAI,GAAG;AACjC,YAAM,gBAAgB,KAAK;AAAA,QACzB;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAGF,UAAI,kBAAkB,UAAa,iBAAiB,QAAW;AAC7D,eAAO,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,eAAe;AAAA,MAC3D,WAAW,kBAAkB,UAAa,iBAAiB,QAAW;AACpE,eAAO,KAAK,EAAE,MAAM,UAAU,KAAK,OAAO,cAAc;AAAA,MAC1D,WACE,kBAAkB,UAClB,iBAAiB,UACjB,kBAAkB,cAClB;AACA,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA,CACD;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,KACA,iBACA,iBACqB;AACrB,QAAI,gBAAgB,IAAI,GAAG,GAAG;AAC5B,aAAO;AAAA,IACT;AACA,QAAI,gBAAgB,IAAI,GAAG,GAAG;AAC5B,aAAO,gBAAgB,IAAI,GAAG;AAAA,IAChC;AACA,WAAO,KAAK,WAAW,IAAI,GAAG;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EA2XO,2BAA2B,aAAqC;AAErE,QAAI,YAAY,UAAU,aAAa;AACrC,WAAK,aAAa,OAAO,YAAY,EAAE;AACvC;AAAA,IACF;AAGA,gBAAY,YAAY,QACrB,KAAK,MAAM;AAEV,WAAK,aAAa,OAAO,YAAY,EAAE;AAAA,IACzC,CAAC,EACA,MAAM,MAAM;AAAA,IAIb,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,6BAAmC;AACxC,QAAI,KAAK,0BAA0B,WAAW,EAAG;AAGjD,SAAK,oBAAoB,MAAA;AAGzB,UAAM,iCAAiB,IAAA;AACvB,eAAW,eAAe,KAAK,2BAA2B;AACxD,iBAAW,aAAa,YAAY,YAAY;AAC9C,mBAAW,IAAI,UAAU,GAAW;AAAA,MACtC;AAAA,IACF;AAGA,eAAW,OAAO,YAAY;AAC5B,WAAK,mBAAmB,IAAI,GAAG;AAAA,IACjC;AAIA,eAAW,OAAO,YAAY;AAC5B,YAAM,eAAe,KAAK,IAAI,GAAG;AACjC,UAAI,iBAAiB,QAAW;AAC9B,aAAK,oBAAoB,IAAI,KAAK,YAAY;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,2BAAiC;AAGtC,SAAK,QAAQ,oBAAoB,KAAK,0BAA0B,SAAS;AAGzE,SAAK,2BAAA;AAEL,SAAK,yBAAyB,KAAK;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAgB;AACrB,SAAK,WAAW,MAAA;AAChB,SAAK,eAAe,MAAA;AACpB,SAAK,kBAAkB,MAAA;AACvB,SAAK,kBAAkB,MAAA;AACvB,SAAK,OAAO;AACZ,SAAK,4BAA4B,CAAA;AACjC,SAAK,WAAW,MAAA;AAChB,SAAK,yBAAyB;AAAA,EAChC;AACF;"}
@@ -0,0 +1,57 @@
1
+ import { BasicExpression } from '../query/ir.js';
2
+ import { BaseIndex } from '../indexes/base-index.js';
3
+ import { ChangeMessage } from '../types.js';
4
+ import { CollectionImpl } from './index.js';
5
+ type RequestSnapshotOptions = {
6
+ where?: BasicExpression<boolean>;
7
+ optimizedOnly?: boolean;
8
+ };
9
+ type RequestLimitedSnapshotOptions = {
10
+ minValue?: any;
11
+ limit: number;
12
+ };
13
+ type CollectionSubscriptionOptions = {
14
+ /** Pre-compiled expression for filtering changes */
15
+ whereExpression?: BasicExpression<boolean>;
16
+ /** Callback to call when the subscription is unsubscribed */
17
+ onUnsubscribe?: () => void;
18
+ };
19
+ export declare class CollectionSubscription {
20
+ private collection;
21
+ private callback;
22
+ private options;
23
+ private loadedInitialState;
24
+ private snapshotSent;
25
+ private sentKeys;
26
+ private filteredCallback;
27
+ private orderByIndex;
28
+ constructor(collection: CollectionImpl<any, any, any, any, any>, callback: (changes: Array<ChangeMessage<any, any>>) => void, options: CollectionSubscriptionOptions);
29
+ setOrderByIndex(index: BaseIndex<any>): void;
30
+ hasLoadedInitialState(): boolean;
31
+ hasSentAtLeastOneSnapshot(): boolean;
32
+ emitEvents(changes: Array<ChangeMessage<any, any>>): void;
33
+ /**
34
+ * Sends the snapshot to the callback.
35
+ * Returns a boolean indicating if it succeeded.
36
+ * It can only fail if there is no index to fulfill the request
37
+ * and the optimizedOnly option is set to true,
38
+ * or, the entire state was already loaded.
39
+ */
40
+ requestSnapshot(opts?: RequestSnapshotOptions): boolean;
41
+ /**
42
+ * Sends a snapshot that is limited to the first `limit` rows that fulfill the `where` clause and are bigger than `minValue`.
43
+ * Requires a range index to be set with `setOrderByIndex` prior to calling this method.
44
+ * It uses that range index to load the items in the order of the index.
45
+ * Note: it does not send keys that have already been sent before.
46
+ */
47
+ requestLimitedSnapshot({ limit, minValue }: RequestLimitedSnapshotOptions): void;
48
+ /**
49
+ * Filters and flips changes for keys that have not been sent yet.
50
+ * Deletes are filtered out for keys that have not been sent yet.
51
+ * Updates are flipped into inserts for keys that have not been sent yet.
52
+ */
53
+ private filterAndFlipChanges;
54
+ private trackSentKeys;
55
+ unsubscribe(): void;
56
+ }
57
+ export {};
@@ -0,0 +1,160 @@
1
+ import { ensureIndexForExpression } from "../indexes/auto-index.js";
2
+ import { createFilteredCallback, createFilterFunctionFromExpression } from "./change-events.js";
3
+ import { and } from "../query/builder/functions.js";
4
+ class CollectionSubscription {
5
+ constructor(collection, callback, options) {
6
+ this.collection = collection;
7
+ this.callback = callback;
8
+ this.options = options;
9
+ this.loadedInitialState = false;
10
+ this.snapshotSent = false;
11
+ this.sentKeys = /* @__PURE__ */ new Set();
12
+ if (options.whereExpression) {
13
+ ensureIndexForExpression(options.whereExpression, this.collection);
14
+ }
15
+ const callbackWithSentKeysTracking = (changes) => {
16
+ callback(changes);
17
+ this.trackSentKeys(changes);
18
+ };
19
+ this.callback = callbackWithSentKeysTracking;
20
+ this.filteredCallback = options.whereExpression ? createFilteredCallback(this.callback, options) : this.callback;
21
+ }
22
+ setOrderByIndex(index) {
23
+ this.orderByIndex = index;
24
+ }
25
+ hasLoadedInitialState() {
26
+ return this.loadedInitialState;
27
+ }
28
+ hasSentAtLeastOneSnapshot() {
29
+ return this.snapshotSent;
30
+ }
31
+ emitEvents(changes) {
32
+ const newChanges = this.filterAndFlipChanges(changes);
33
+ this.filteredCallback(newChanges);
34
+ }
35
+ /**
36
+ * Sends the snapshot to the callback.
37
+ * Returns a boolean indicating if it succeeded.
38
+ * It can only fail if there is no index to fulfill the request
39
+ * and the optimizedOnly option is set to true,
40
+ * or, the entire state was already loaded.
41
+ */
42
+ requestSnapshot(opts) {
43
+ if (this.loadedInitialState) {
44
+ return false;
45
+ }
46
+ const stateOpts = {
47
+ where: this.options.whereExpression,
48
+ optimizedOnly: (opts == null ? void 0 : opts.optimizedOnly) ?? false
49
+ };
50
+ if (opts) {
51
+ if (`where` in opts) {
52
+ const snapshotWhereExp = opts.where;
53
+ if (stateOpts.where) {
54
+ const subWhereExp = stateOpts.where;
55
+ const combinedWhereExp = and(subWhereExp, snapshotWhereExp);
56
+ stateOpts.where = combinedWhereExp;
57
+ } else {
58
+ stateOpts.where = snapshotWhereExp;
59
+ }
60
+ }
61
+ } else {
62
+ this.loadedInitialState = true;
63
+ }
64
+ const snapshot = this.collection.currentStateAsChanges(stateOpts);
65
+ if (snapshot === void 0) {
66
+ return false;
67
+ }
68
+ const filteredSnapshot = snapshot.filter(
69
+ (change) => !this.sentKeys.has(change.key)
70
+ );
71
+ this.snapshotSent = true;
72
+ this.callback(filteredSnapshot);
73
+ return true;
74
+ }
75
+ /**
76
+ * Sends a snapshot that is limited to the first `limit` rows that fulfill the `where` clause and are bigger than `minValue`.
77
+ * Requires a range index to be set with `setOrderByIndex` prior to calling this method.
78
+ * It uses that range index to load the items in the order of the index.
79
+ * Note: it does not send keys that have already been sent before.
80
+ */
81
+ requestLimitedSnapshot({ limit, minValue }) {
82
+ if (!limit) throw new Error(`limit is required`);
83
+ if (!this.orderByIndex) {
84
+ throw new Error(
85
+ `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`
86
+ );
87
+ }
88
+ const index = this.orderByIndex;
89
+ const where = this.options.whereExpression;
90
+ const whereFilterFn = where ? createFilterFunctionFromExpression(where) : void 0;
91
+ const filterFn = (key) => {
92
+ if (this.sentKeys.has(key)) {
93
+ return false;
94
+ }
95
+ const value = this.collection.get(key);
96
+ if (value === void 0) {
97
+ return false;
98
+ }
99
+ return (whereFilterFn == null ? void 0 : whereFilterFn(value)) ?? true;
100
+ };
101
+ let biggestObservedValue = minValue;
102
+ const changes = [];
103
+ let keys = index.take(limit, minValue, filterFn);
104
+ const valuesNeeded = () => Math.max(limit - changes.length, 0);
105
+ const collectionExhausted = () => keys.length === 0;
106
+ while (valuesNeeded() > 0 && !collectionExhausted()) {
107
+ for (const key of keys) {
108
+ const value = this.collection.get(key);
109
+ changes.push({
110
+ type: `insert`,
111
+ key,
112
+ value
113
+ });
114
+ biggestObservedValue = value;
115
+ }
116
+ keys = index.take(valuesNeeded(), biggestObservedValue, filterFn);
117
+ }
118
+ this.callback(changes);
119
+ }
120
+ /**
121
+ * Filters and flips changes for keys that have not been sent yet.
122
+ * Deletes are filtered out for keys that have not been sent yet.
123
+ * Updates are flipped into inserts for keys that have not been sent yet.
124
+ */
125
+ filterAndFlipChanges(changes) {
126
+ if (this.loadedInitialState) {
127
+ return changes;
128
+ }
129
+ const newChanges = [];
130
+ for (const change of changes) {
131
+ let newChange = change;
132
+ if (!this.sentKeys.has(change.key)) {
133
+ if (change.type === `update`) {
134
+ newChange = { ...change, type: `insert`, previousValue: void 0 };
135
+ } else if (change.type === `delete`) {
136
+ continue;
137
+ }
138
+ this.sentKeys.add(change.key);
139
+ }
140
+ newChanges.push(newChange);
141
+ }
142
+ return newChanges;
143
+ }
144
+ trackSentKeys(changes) {
145
+ if (this.loadedInitialState) {
146
+ return;
147
+ }
148
+ for (const change of changes) {
149
+ this.sentKeys.add(change.key);
150
+ }
151
+ }
152
+ unsubscribe() {
153
+ var _a, _b;
154
+ (_b = (_a = this.options).onUnsubscribe) == null ? void 0 : _b.call(_a);
155
+ }
156
+ }
157
+ export {
158
+ CollectionSubscription
159
+ };
160
+ //# sourceMappingURL=subscription.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and } from \"../query/index.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression } from \"../query/ir.js\"\nimport type { BaseIndex } from \"../indexes/base-index.js\"\nimport type { ChangeMessage } from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n minValue?: any\n limit: number\n}\n\ntype CollectionSubscriptionOptions = {\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: () => void\n}\n\nexport class CollectionSubscription {\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: BaseIndex<string | number> | undefined\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: BaseIndex<any>) {\n this.orderByIndex = index\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that is limited to the first `limit` rows that fulfill the `where` clause and are bigger than `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({ limit, minValue }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n let keys: Array<string | number> = index.take(limit, minValue, filterFn)\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n }\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n this.options.onUnsubscribe?.()\n }\n}\n"],"names":[],"mappings":";;;AA4BO,MAAM,uBAAuB;AAAA,EAclC,YACU,YACA,UACA,SACR;AAHQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAhBV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAGvB,SAAQ,+BAAe,IAAA;AAYrB,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAEA,gBAAgB,OAAuB;AACrC,SAAK,eAAe;AAAA,EACtB;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,gBAAe,6BAAM,kBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAEA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,uBAAuB,EAAE,OAAO,YAA2C;AACzE,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,cAAO,+CAAgB,WAAU;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAC5D,QAAI,OAA+B,MAAM,KAAK,OAAO,UAAU,QAAQ;AAEvE,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AAAA,MACzB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;;AACZ,qBAAK,SAAQ,kBAAb;AAAA,EACF;AACF;"}
@@ -0,0 +1,34 @@
1
+ import { StandardSchemaV1 } from '@standard-schema/spec';
2
+ import { CollectionConfig } from '../types.js';
3
+ import { CollectionImpl } from './index.js';
4
+ import { CollectionStateManager } from './state.js';
5
+ import { CollectionLifecycleManager } from './lifecycle.js';
6
+ export declare class CollectionSyncManager<TOutput extends object = Record<string, unknown>, TKey extends string | number = string | number, TSchema extends StandardSchemaV1 = StandardSchemaV1, TInput extends object = TOutput> {
7
+ private collection;
8
+ private state;
9
+ private lifecycle;
10
+ private config;
11
+ private id;
12
+ preloadPromise: Promise<void> | null;
13
+ syncCleanupFn: (() => void) | null;
14
+ /**
15
+ * Creates a new CollectionSyncManager instance
16
+ */
17
+ constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string);
18
+ setDeps(deps: {
19
+ collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>;
20
+ state: CollectionStateManager<TOutput, TKey, TSchema, TInput>;
21
+ lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>;
22
+ }): void;
23
+ /**
24
+ * Start the sync process for this collection
25
+ * This is called when the collection is first accessed or preloaded
26
+ */
27
+ startSync(): void;
28
+ /**
29
+ * Preload the collection data by starting sync if not already started
30
+ * Multiple concurrent calls will share the same promise
31
+ */
32
+ preload(): Promise<void>;
33
+ cleanup(): void;
34
+ }
@@ -0,0 +1,154 @@
1
+ import { NoPendingSyncTransactionWriteError, SyncTransactionAlreadyCommittedWriteError, NoPendingSyncTransactionCommitError, SyncTransactionAlreadyCommittedError, DuplicateKeySyncError, CollectionIsInErrorStateError, SyncCleanupError } from "../errors.js";
2
+ class CollectionSyncManager {
3
+ /**
4
+ * Creates a new CollectionSyncManager instance
5
+ */
6
+ constructor(config, id) {
7
+ this.preloadPromise = null;
8
+ this.syncCleanupFn = null;
9
+ this.config = config;
10
+ this.id = id;
11
+ }
12
+ setDeps(deps) {
13
+ this.collection = deps.collection;
14
+ this.state = deps.state;
15
+ this.lifecycle = deps.lifecycle;
16
+ }
17
+ /**
18
+ * Start the sync process for this collection
19
+ * This is called when the collection is first accessed or preloaded
20
+ */
21
+ startSync() {
22
+ const state = this.state;
23
+ if (this.lifecycle.status !== `idle` && this.lifecycle.status !== `cleaned-up`) {
24
+ return;
25
+ }
26
+ this.lifecycle.setStatus(`loading`);
27
+ try {
28
+ const cleanupFn = this.config.sync.sync({
29
+ collection: this.collection,
30
+ begin: () => {
31
+ state.pendingSyncedTransactions.push({
32
+ committed: false,
33
+ operations: [],
34
+ deletedKeys: /* @__PURE__ */ new Set()
35
+ });
36
+ },
37
+ write: (messageWithoutKey) => {
38
+ const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
39
+ if (!pendingTransaction) {
40
+ throw new NoPendingSyncTransactionWriteError();
41
+ }
42
+ if (pendingTransaction.committed) {
43
+ throw new SyncTransactionAlreadyCommittedWriteError();
44
+ }
45
+ const key = this.config.getKey(messageWithoutKey.value);
46
+ if (messageWithoutKey.type === `insert`) {
47
+ const insertingIntoExistingSynced = state.syncedData.has(key);
48
+ const hasPendingDeleteForKey = pendingTransaction.deletedKeys.has(key);
49
+ const isTruncateTransaction = pendingTransaction.truncate === true;
50
+ if (insertingIntoExistingSynced && !hasPendingDeleteForKey && !isTruncateTransaction) {
51
+ throw new DuplicateKeySyncError(key, this.id);
52
+ }
53
+ }
54
+ const message = {
55
+ ...messageWithoutKey,
56
+ key
57
+ };
58
+ pendingTransaction.operations.push(message);
59
+ if (messageWithoutKey.type === `delete`) {
60
+ pendingTransaction.deletedKeys.add(key);
61
+ }
62
+ },
63
+ commit: () => {
64
+ const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
65
+ if (!pendingTransaction) {
66
+ throw new NoPendingSyncTransactionCommitError();
67
+ }
68
+ if (pendingTransaction.committed) {
69
+ throw new SyncTransactionAlreadyCommittedError();
70
+ }
71
+ pendingTransaction.committed = true;
72
+ if (this.lifecycle.status === `loading`) {
73
+ this.lifecycle.setStatus(`initialCommit`);
74
+ }
75
+ state.commitPendingTransactions();
76
+ },
77
+ markReady: () => {
78
+ this.lifecycle.markReady();
79
+ },
80
+ truncate: () => {
81
+ const pendingTransaction = state.pendingSyncedTransactions[state.pendingSyncedTransactions.length - 1];
82
+ if (!pendingTransaction) {
83
+ throw new NoPendingSyncTransactionWriteError();
84
+ }
85
+ if (pendingTransaction.committed) {
86
+ throw new SyncTransactionAlreadyCommittedWriteError();
87
+ }
88
+ pendingTransaction.operations = [];
89
+ pendingTransaction.deletedKeys.clear();
90
+ pendingTransaction.truncate = true;
91
+ }
92
+ });
93
+ this.syncCleanupFn = typeof cleanupFn === `function` ? cleanupFn : null;
94
+ } catch (error) {
95
+ this.lifecycle.setStatus(`error`);
96
+ throw error;
97
+ }
98
+ }
99
+ /**
100
+ * Preload the collection data by starting sync if not already started
101
+ * Multiple concurrent calls will share the same promise
102
+ */
103
+ preload() {
104
+ if (this.preloadPromise) {
105
+ return this.preloadPromise;
106
+ }
107
+ this.preloadPromise = new Promise((resolve, reject) => {
108
+ if (this.lifecycle.status === `ready`) {
109
+ resolve();
110
+ return;
111
+ }
112
+ if (this.lifecycle.status === `error`) {
113
+ reject(new CollectionIsInErrorStateError());
114
+ return;
115
+ }
116
+ this.lifecycle.onFirstReady(() => {
117
+ resolve();
118
+ });
119
+ if (this.lifecycle.status === `idle` || this.lifecycle.status === `cleaned-up`) {
120
+ try {
121
+ this.startSync();
122
+ } catch (error) {
123
+ reject(error);
124
+ return;
125
+ }
126
+ }
127
+ });
128
+ return this.preloadPromise;
129
+ }
130
+ cleanup() {
131
+ try {
132
+ if (this.syncCleanupFn) {
133
+ this.syncCleanupFn();
134
+ this.syncCleanupFn = null;
135
+ }
136
+ } catch (error) {
137
+ queueMicrotask(() => {
138
+ if (error instanceof Error) {
139
+ const wrappedError = new SyncCleanupError(this.id, error);
140
+ wrappedError.cause = error;
141
+ wrappedError.stack = error.stack;
142
+ throw wrappedError;
143
+ } else {
144
+ throw new SyncCleanupError(this.id, error);
145
+ }
146
+ });
147
+ }
148
+ this.preloadPromise = null;
149
+ }
150
+ }
151
+ export {
152
+ CollectionSyncManager
153
+ };
154
+ //# sourceMappingURL=sync.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sync.js","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from \"../errors\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type { ChangeMessage, CollectionConfig } from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionStateManager } from \"./state\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n const state = this.state\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const cleanupFn = this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n throw new DuplicateKeySyncError(key, this.id)\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageWithoutKey.type === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n // Update status to initialCommit when transitioning from loading\n // This indicates we're in the process of committing the first transaction\n if (this.lifecycle.status === `loading`) {\n this.lifecycle.setStatus(`initialCommit`)\n }\n\n state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n state.pendingSyncedTransactions[\n state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n },\n })\n\n // Store cleanup function if provided\n this.syncCleanupFn = typeof cleanupFn === `function` ? cleanupFn : null\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n"],"names":[],"mappings":";AAeO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAaA,YAAY,QAAkD,IAAY;AAN1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAM1C,SAAK,SAAS;AACd,SAAK,KAAK;AAAA,EACZ;AAAA,EAEA,QAAQ,MAIL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,UAAM,QAAQ,KAAK;AACnB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,YAAY,KAAK,OAAO,KAAK,KAAK;AAAA,QACtC,YAAY,KAAK;AAAA,QACjB,OAAO,MAAM;AACX,gBAAM,0BAA0B,KAAK;AAAA,YACnC,WAAW;AAAA,YACX,YAAY,CAAA;AAAA,YACZ,iCAAiB,IAAA;AAAA,UAAI,CACtB;AAAA,QACH;AAAA,QACA,OAAO,CAAC,sBAA2D;AACjE,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAI,mCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAI,0CAAA;AAAA,UACZ;AACA,gBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAGtD,cAAI,kBAAkB,SAAS,UAAU;AACvC,kBAAM,8BAA8B,MAAM,WAAW,IAAI,GAAG;AAC5D,kBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,kBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,gBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,oBAAM,IAAI,sBAAsB,KAAK,KAAK,EAAE;AAAA,YAC9C;AAAA,UACF;AAEA,gBAAM,UAAkC;AAAA,YACtC,GAAG;AAAA,YACH;AAAA,UAAA;AAEF,6BAAmB,WAAW,KAAK,OAAO;AAE1C,cAAI,kBAAkB,SAAS,UAAU;AACvC,+BAAmB,YAAY,IAAI,GAAG;AAAA,UACxC;AAAA,QACF;AAAA,QACA,QAAQ,MAAM;AACZ,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAI,oCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAI,qCAAA;AAAA,UACZ;AAEA,6BAAmB,YAAY;AAI/B,cAAI,KAAK,UAAU,WAAW,WAAW;AACvC,iBAAK,UAAU,UAAU,eAAe;AAAA,UAC1C;AAEA,gBAAM,0BAAA;AAAA,QACR;AAAA,QACA,WAAW,MAAM;AACf,eAAK,UAAU,UAAA;AAAA,QACjB;AAAA,QACA,UAAU,MAAM;AACd,gBAAM,qBACJ,MAAM,0BACJ,MAAM,0BAA0B,SAAS,CAC3C;AACF,cAAI,CAAC,oBAAoB;AACvB,kBAAM,IAAI,mCAAA;AAAA,UACZ;AACA,cAAI,mBAAmB,WAAW;AAChC,kBAAM,IAAI,0CAAA;AAAA,UACZ;AAGA,6BAAmB,aAAa,CAAA;AAChC,6BAAmB,YAAY,MAAA;AAO/B,6BAAmB,WAAW;AAAA,QAChC;AAAA,MAAA,CACD;AAGD,WAAK,gBAAgB,OAAO,cAAc,aAAa,YAAY;AAAA,IACrE,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAI,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAI,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAI,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;"}
@@ -1,4 +1,4 @@
1
- export * from './collection.js';
1
+ export * from './collection/index.js';
2
2
  export * from './SortedMap.js';
3
3
  export * from './transactions.js';
4
4
  export * from './types.js';
@@ -12,4 +12,4 @@ export * from './indexes/base-index.js';
12
12
  export * from './indexes/btree-index.js';
13
13
  export * from './indexes/lazy-index.js';
14
14
  export { type IndexOptions } from './indexes/index-options.js';
15
- export type { Collection } from './collection.js';
15
+ export type { Collection } from './collection/index.js';
package/dist/esm/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { CollectionImpl, createCollection } from "./collection.js";
1
+ import { CollectionImpl, createCollection } from "./collection/index.js";
2
2
  import { SortedMap } from "./SortedMap.js";
3
3
  import { createTransaction, getActiveTransaction } from "./transactions.js";
4
4
  import { createArrayChangeProxy, createChangeProxy, withArrayChangeTracking, withChangeTracking } from "./proxy.js";
@@ -1,5 +1,5 @@
1
1
  import { BasicExpression } from '../query/ir.js';
2
- import { CollectionImpl } from '../collection.js';
2
+ import { CollectionImpl } from '../collection/index.js';
3
3
  export interface AutoIndexConfig {
4
4
  autoIndex?: `off` | `eager`;
5
5
  }
@@ -1 +1 @@
1
- {"version":3,"file":"auto-index.js","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { BTreeIndex } from \"./btree-index\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n // Don't auto-index during sync operations\n if (\n collection.status === `loading` ||\n collection.status === `initialCommit`\n ) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find((index) =>\n index.matchesField(fieldPath)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n collection.createIndex((row) => (row as any)[fieldName], {\n name: `auto_${fieldName}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn } : {},\n })\n } catch (error) {\n console.warn(`Failed to create auto-index for field \"${fieldName}\":`, error)\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference (single field)\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if it's not a simple field (e.g., nested properties or array access)\n if (fieldPath.length !== 1) {\n return\n }\n\n const fieldName = fieldPath[0]\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":[],"mappings":";AAQA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAGA,MACE,WAAW,WAAW,aACtB,WAAW,WAAW,iBACtB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAAK,CAAC,UAClE,MAAM,aAAa,SAAS;AAAA,EAAA;AAG9B,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AACF,eAAW,YAAY,CAAC,QAAS,IAAY,SAAS,GAAG;AAAA,MACvD,MAAM,QAAQ,SAAS;AAAA,MACvB,WAAW;AAAA,MACX,SAAS,YAAY,EAAE,cAAc,CAAA;AAAA,IAAC,CACvC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,KAAK,0CAA0C,SAAS,MAAM,KAAK;AAAA,EAC7E;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAEA,UAAM,YAAY,UAAU,CAAC;AAC7B,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;"}
1
+ {"version":3,"file":"auto-index.js","sources":["../../../src/indexes/auto-index.ts"],"sourcesContent":["import { BTreeIndex } from \"./btree-index\"\nimport type { BasicExpression } from \"../query/ir\"\nimport type { CollectionImpl } from \"../collection/index.js\"\n\nexport interface AutoIndexConfig {\n autoIndex?: `off` | `eager`\n}\n\nfunction shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {\n // Only proceed if auto-indexing is enabled\n if (collection.config.autoIndex !== `eager`) {\n return false\n }\n\n // Don't auto-index during sync operations\n if (\n collection.status === `loading` ||\n collection.status === `initialCommit`\n ) {\n return false\n }\n\n return true\n}\n\nexport function ensureIndexForField<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n fieldName: string,\n fieldPath: Array<string>,\n collection: CollectionImpl<T, TKey, any, any, any>,\n compareFn?: (a: any, b: any) => number\n) {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Check if we already have an index for this field\n const existingIndex = Array.from(collection.indexes.values()).find((index) =>\n index.matchesField(fieldPath)\n )\n\n if (existingIndex) {\n return // Index already exists\n }\n\n // Create a new index for this field using the collection's createIndex method\n try {\n collection.createIndex((row) => (row as any)[fieldName], {\n name: `auto_${fieldName}`,\n indexType: BTreeIndex,\n options: compareFn ? { compareFn } : {},\n })\n } catch (error) {\n console.warn(`Failed to create auto-index for field \"${fieldName}\":`, error)\n }\n}\n\n/**\n * Analyzes a where expression and creates indexes for all simple operations on single fields\n */\nexport function ensureIndexForExpression<\n T extends Record<string, any>,\n TKey extends string | number,\n>(\n expression: BasicExpression,\n collection: CollectionImpl<T, TKey, any, any, any>\n): void {\n if (!shouldAutoIndex(collection)) {\n return\n }\n\n // Extract all indexable expressions and create indexes for them\n const indexableExpressions = extractIndexableExpressions(expression)\n\n for (const { fieldName, fieldPath } of indexableExpressions) {\n ensureIndexForField(fieldName, fieldPath, collection)\n }\n}\n\n/**\n * Extracts all indexable expressions from a where expression\n */\nfunction extractIndexableExpressions(\n expression: BasicExpression\n): Array<{ fieldName: string; fieldPath: Array<string> }> {\n const results: Array<{ fieldName: string; fieldPath: Array<string> }> = []\n\n function extractFromExpression(expr: BasicExpression): void {\n if (expr.type !== `func`) {\n return\n }\n\n const func = expr as any\n\n // Handle 'and' expressions by recursively processing all arguments\n if (func.name === `and`) {\n for (const arg of func.args) {\n extractFromExpression(arg)\n }\n return\n }\n\n // Check if this is a supported operation\n const supportedOperations = [`eq`, `gt`, `gte`, `lt`, `lte`, `in`]\n if (!supportedOperations.includes(func.name)) {\n return\n }\n\n // Check if the first argument is a property reference (single field)\n if (func.args.length < 1 || func.args[0].type !== `ref`) {\n return\n }\n\n const fieldRef = func.args[0]\n const fieldPath = fieldRef.path\n\n // Skip if it's not a simple field (e.g., nested properties or array access)\n if (fieldPath.length !== 1) {\n return\n }\n\n const fieldName = fieldPath[0]\n results.push({ fieldName, fieldPath })\n }\n\n extractFromExpression(expression)\n return results\n}\n"],"names":[],"mappings":";AAQA,SAAS,gBAAgB,YAAqD;AAE5E,MAAI,WAAW,OAAO,cAAc,SAAS;AAC3C,WAAO;AAAA,EACT;AAGA,MACE,WAAW,WAAW,aACtB,WAAW,WAAW,iBACtB;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,SAAS,oBAId,WACA,WACA,YACA,WACA;AACA,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,WAAW,QAAQ,OAAA,CAAQ,EAAE;AAAA,IAAK,CAAC,UAClE,MAAM,aAAa,SAAS;AAAA,EAAA;AAG9B,MAAI,eAAe;AACjB;AAAA,EACF;AAGA,MAAI;AACF,eAAW,YAAY,CAAC,QAAS,IAAY,SAAS,GAAG;AAAA,MACvD,MAAM,QAAQ,SAAS;AAAA,MACvB,WAAW;AAAA,MACX,SAAS,YAAY,EAAE,cAAc,CAAA;AAAA,IAAC,CACvC;AAAA,EACH,SAAS,OAAO;AACd,YAAQ,KAAK,0CAA0C,SAAS,MAAM,KAAK;AAAA,EAC7E;AACF;AAKO,SAAS,yBAId,YACA,YACM;AACN,MAAI,CAAC,gBAAgB,UAAU,GAAG;AAChC;AAAA,EACF;AAGA,QAAM,uBAAuB,4BAA4B,UAAU;AAEnE,aAAW,EAAE,WAAW,UAAA,KAAe,sBAAsB;AAC3D,wBAAoB,WAAW,WAAW,UAAU;AAAA,EACtD;AACF;AAKA,SAAS,4BACP,YACwD;AACxD,QAAM,UAAkE,CAAA;AAExE,WAAS,sBAAsB,MAA6B;AAC1D,QAAI,KAAK,SAAS,QAAQ;AACxB;AAAA,IACF;AAEA,UAAM,OAAO;AAGb,QAAI,KAAK,SAAS,OAAO;AACvB,iBAAW,OAAO,KAAK,MAAM;AAC3B,8BAAsB,GAAG;AAAA,MAC3B;AACA;AAAA,IACF;AAGA,UAAM,sBAAsB,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,IAAI;AACjE,QAAI,CAAC,oBAAoB,SAAS,KAAK,IAAI,GAAG;AAC5C;AAAA,IACF;AAGA,QAAI,KAAK,KAAK,SAAS,KAAK,KAAK,KAAK,CAAC,EAAE,SAAS,OAAO;AACvD;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,KAAK,CAAC;AAC5B,UAAM,YAAY,SAAS;AAG3B,QAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,IACF;AAEA,UAAM,YAAY,UAAU,CAAC;AAC7B,YAAQ,KAAK,EAAE,WAAW,UAAA,CAAW;AAAA,EACvC;AAEA,wBAAsB,UAAU;AAChC,SAAO;AACT;"}
@@ -1,5 +1,5 @@
1
1
  import { comparisonFunctions } from '../query/builder/functions.js';
2
- import { BasicExpression, OrderByDirection } from '../query/ir.js';
2
+ import { BasicExpression } from '../query/ir.js';
3
3
  /**
4
4
  * Operations that indexes can support, imported from available comparison functions
5
5
  */
@@ -35,7 +35,7 @@ export declare abstract class BaseIndex<TKey extends string | number = string |
35
35
  abstract build(entries: Iterable<[TKey, any]>): void;
36
36
  abstract clear(): void;
37
37
  abstract lookup(operation: IndexOperation, value: any): Set<TKey>;
38
- abstract take(n: number, direction?: OrderByDirection, from?: TKey): Array<TKey>;
38
+ abstract take(n: number, from?: TKey, filterFn?: (key: TKey) => boolean): Array<TKey>;
39
39
  abstract get keyCount(): number;
40
40
  supports(operation: IndexOperation): boolean;
41
41
  matchesField(fieldPath: Array<string>): boolean;