@tanstack/db 0.5.14 → 0.5.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ const autoIndex = require("../indexes/auto-index.cjs");
4
4
  const functions = require("../query/builder/functions.cjs");
5
5
  const ir = require("../query/ir.cjs");
6
6
  const eventEmitter = require("../event-emitter.cjs");
7
+ const evaluators = require("../query/compiler/evaluators.cjs");
7
8
  const cursor = require("../utils/cursor.cjs");
8
9
  const changeEvents = require("./change-events.cjs");
9
10
  class CollectionSubscription extends eventEmitter.EventEmitter {
@@ -287,6 +288,8 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
287
288
  }
288
289
  const valuesNeeded = () => Math.max(limit - changes.length, 0);
289
290
  const collectionExhausted = () => keys.length === 0;
291
+ const orderByExpression = orderBy[0].expression;
292
+ const valueExtractor = orderByExpression.type === `ref` ? evaluators.compileExpression(new ir.PropRef(orderByExpression.path), true) : null;
290
293
  while (valuesNeeded() > 0 && !collectionExhausted()) {
291
294
  const insertedKeys = /* @__PURE__ */ new Set();
292
295
  for (const key of keys) {
@@ -296,7 +299,7 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
296
299
  key,
297
300
  value
298
301
  });
299
- biggestObservedValue = value;
302
+ biggestObservedValue = valueExtractor ? valueExtractor(value) : value;
300
303
  insertedKeys.add(key);
301
304
  }
302
305
  keys = index.take(valuesNeeded(), biggestObservedValue, filterFn);
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
1
+ {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { PropRef, Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { compileExpression } from '../query/compiler/evaluators.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n // Create a value extractor for the orderBy field to properly track the biggest indexed value\n const orderByExpression = orderBy[0]!.expression\n const valueExtractor =\n orderByExpression.type === `ref`\n ? compileExpression(new PropRef(orderByExpression.path), true)\n : null\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n // Extract the indexed value (e.g., salary) from the row, not the full row\n // This is needed for index.take() to work correctly with the BTree comparator\n biggestObservedValue = valueExtractor ? valueExtractor(value) : value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","compileExpression","PropRef","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;;AAiDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAGlD,UAAM,oBAAoB,QAAQ,CAAC,EAAG;AACtC,UAAM,iBACJ,kBAAkB,SAAS,QACvBC,WAAAA,kBAAkB,IAAIC,GAAAA,QAAQ,kBAAkB,IAAI,GAAG,IAAI,IAC3D;AAEN,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AAGD,+BAAuB,iBAAiB,eAAe,KAAK,IAAI;AAChE,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBP,UAAAA;AAAAA,YACnBQ,UAAAA,IAAI,YAAY,IAAIL,GAAAA,MAAMI,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIN,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAMI,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
@@ -13,6 +13,7 @@ class CollectionSubscriber {
13
13
  this.collectionConfigBuilder = collectionConfigBuilder;
14
14
  this.biggest = void 0;
15
15
  this.subscriptionLoadingPromises = /* @__PURE__ */ new Map();
16
+ this.sentToD2Keys = /* @__PURE__ */ new Set();
16
17
  }
17
18
  subscribe() {
18
19
  const whereClause = this.getWhereClauseForAlias();
@@ -82,10 +83,23 @@ class CollectionSubscriber {
82
83
  return subscription;
83
84
  }
84
85
  sendChangesToPipeline(changes, callback) {
86
+ const changesArray = Array.isArray(changes) ? changes : [...changes];
87
+ const filteredChanges = [];
88
+ for (const change of changesArray) {
89
+ if (change.type === `insert`) {
90
+ if (this.sentToD2Keys.has(change.key)) {
91
+ continue;
92
+ }
93
+ this.sentToD2Keys.add(change.key);
94
+ } else if (change.type === `delete`) {
95
+ this.sentToD2Keys.delete(change.key);
96
+ }
97
+ filteredChanges.push(change);
98
+ }
85
99
  const input = this.collectionConfigBuilder.currentSyncState.inputs[this.alias];
86
100
  const sentChanges = sendChangesToInput(
87
101
  input,
88
- changes,
102
+ filteredChanges,
89
103
  this.collection.config.getKey
90
104
  );
91
105
  const dataLoader = sentChanges > 0 ? callback : void 0;
@@ -98,7 +112,7 @@ class CollectionSubscriber {
98
112
  this.sendChangesToPipeline(changes);
99
113
  };
100
114
  const subscription = this.collection.subscribeChanges(sendChanges, {
101
- includeInitialState,
115
+ ...includeInitialState && { includeInitialState },
102
116
  whereExpression
103
117
  });
104
118
  return subscription;
@@ -114,6 +128,7 @@ class CollectionSubscriber {
114
128
  });
115
129
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
116
130
  this.biggest = void 0;
131
+ this.sentToD2Keys.clear();
117
132
  });
118
133
  subscription.on(`unsubscribed`, () => {
119
134
  truncateUnsubscribe();
@@ -1 +1 @@
1
- {"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":["normalizeExpressionPaths","normalizeOrderByPaths","MultiSet"],"mappings":";;;;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAID,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AAAA,IACjB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoBC,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoBA,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;"}
1
+ {"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts\n // This is necessary because different code paths (initial load, change events)\n // can potentially send the same item to D2 multiple times.\n private sentToD2Keys = new Set<string | number>()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // Filter changes to prevent duplicate inserts to D2 pipeline.\n // This ensures D2 multiplicity stays at 1 for visible items, so deletes\n // properly reduce multiplicity to 0 (triggering DELETE output).\n const changesArray = Array.isArray(changes) ? changes : [...changes]\n const filteredChanges: Array<ChangeMessage<any, string | number>> = []\n for (const change of changesArray) {\n if (change.type === `insert`) {\n if (this.sentToD2Keys.has(change.key)) {\n // Skip duplicate insert - already sent to D2\n continue\n }\n this.sentToD2Keys.add(change.key)\n } else if (change.type === `delete`) {\n // Remove from tracking so future re-inserts are allowed\n this.sentToD2Keys.delete(change.key)\n }\n // Updates are handled as delete+insert by splitUpdates, so no special handling needed\n filteredChanges.push(change)\n }\n\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n filteredChanges,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n // Only pass includeInitialState when true. When it's false, we leave it\n // undefined so that user subscriptions with explicit `includeInitialState: false`\n // can be distinguished from internal lazy-loading subscriptions.\n // If we pass `false`, changes.ts would call markAllStateAsSeen() which\n // disables filtering - but internal subscriptions still need filtering.\n const subscription = this.collection.subscribeChanges(sendChanges, {\n ...(includeInitialState && { includeInitialState }),\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state and sentToD2Keys\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n // and allow re-inserts of previously sent keys\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n this.sentToD2Keys.clear()\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":["normalizeExpressionPaths","normalizeOrderByPaths","MultiSet"],"mappings":";;;;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAeA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAjBV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAQ1C,SAAQ,mCAAmB,IAAA;AAAA,EAOxB;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAIA,UAAM,eAAe,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,GAAG,OAAO;AACnE,UAAM,kBAA8D,CAAA;AACpE,eAAW,UAAU,cAAc;AACjC,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,KAAK,aAAa,IAAI,OAAO,GAAG,GAAG;AAErC;AAAA,QACF;AACA,aAAK,aAAa,IAAI,OAAO,GAAG;AAAA,MAClC,WAAW,OAAO,SAAS,UAAU;AAEnC,aAAK,aAAa,OAAO,OAAO,GAAG;AAAA,MACrC;AAEA,sBAAgB,KAAK,MAAM;AAAA,IAC7B;AAIA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAOA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE,GAAI,uBAAuB,EAAE,oBAAA;AAAA,MAC7B;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAKD,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AACf,WAAK,aAAa,MAAA;AAAA,IACpB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoBC,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoBA,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;"}
@@ -9,6 +9,7 @@ export declare class CollectionSubscriber<TContext extends Context, TResult exte
9
9
  private collectionConfigBuilder;
10
10
  private biggest;
11
11
  private subscriptionLoadingPromises;
12
+ private sentToD2Keys;
12
13
  constructor(alias: string, collectionId: string, collection: Collection, collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>);
13
14
  subscribe(): CollectionSubscription;
14
15
  private subscribeToChanges;
@@ -1,7 +1,8 @@
1
1
  import { ensureIndexForExpression } from "../indexes/auto-index.js";
2
2
  import { and, eq, lt, gte } from "../query/builder/functions.js";
3
- import { Value } from "../query/ir.js";
3
+ import { Value, PropRef } from "../query/ir.js";
4
4
  import { EventEmitter } from "../event-emitter.js";
5
+ import { compileExpression } from "../query/compiler/evaluators.js";
5
6
  import { buildCursor } from "../utils/cursor.js";
6
7
  import { createFilteredCallback, createFilterFunctionFromExpression } from "./change-events.js";
7
8
  class CollectionSubscription extends EventEmitter {
@@ -285,6 +286,8 @@ class CollectionSubscription extends EventEmitter {
285
286
  }
286
287
  const valuesNeeded = () => Math.max(limit - changes.length, 0);
287
288
  const collectionExhausted = () => keys.length === 0;
289
+ const orderByExpression = orderBy[0].expression;
290
+ const valueExtractor = orderByExpression.type === `ref` ? compileExpression(new PropRef(orderByExpression.path), true) : null;
288
291
  while (valuesNeeded() > 0 && !collectionExhausted()) {
289
292
  const insertedKeys = /* @__PURE__ */ new Set();
290
293
  for (const key of keys) {
@@ -294,7 +297,7 @@ class CollectionSubscription extends EventEmitter {
294
297
  key,
295
298
  value
296
299
  });
297
- biggestObservedValue = value;
300
+ biggestObservedValue = valueExtractor ? valueExtractor(value) : value;
298
301
  insertedKeys.add(key);
299
302
  }
300
303
  keys = index.take(valuesNeeded(), biggestObservedValue, filterFn);
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
1
+ {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { PropRef, Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { compileExpression } from '../query/compiler/evaluators.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n // Create a value extractor for the orderBy field to properly track the biggest indexed value\n const orderByExpression = orderBy[0]!.expression\n const valueExtractor =\n orderByExpression.type === `ref`\n ? compileExpression(new PropRef(orderByExpression.path), true)\n : null\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n // Extract the indexed value (e.g., salary) from the row, not the full row\n // This is needed for index.take() to work correctly with the BTree comparator\n biggestObservedValue = valueExtractor ? valueExtractor(value) : value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;;AAiDO,MAAM,+BACH,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAGlD,UAAM,oBAAoB,QAAQ,CAAC,EAAG;AACtC,UAAM,iBACJ,kBAAkB,SAAS,QACvB,kBAAkB,IAAI,QAAQ,kBAAkB,IAAI,GAAG,IAAI,IAC3D;AAEN,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AAGD,+BAAuB,iBAAiB,eAAe,KAAK,IAAI;AAChE,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
@@ -9,6 +9,7 @@ export declare class CollectionSubscriber<TContext extends Context, TResult exte
9
9
  private collectionConfigBuilder;
10
10
  private biggest;
11
11
  private subscriptionLoadingPromises;
12
+ private sentToD2Keys;
12
13
  constructor(alias: string, collectionId: string, collection: Collection, collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>);
13
14
  subscribe(): CollectionSubscription;
14
15
  private subscribeToChanges;
@@ -11,6 +11,7 @@ class CollectionSubscriber {
11
11
  this.collectionConfigBuilder = collectionConfigBuilder;
12
12
  this.biggest = void 0;
13
13
  this.subscriptionLoadingPromises = /* @__PURE__ */ new Map();
14
+ this.sentToD2Keys = /* @__PURE__ */ new Set();
14
15
  }
15
16
  subscribe() {
16
17
  const whereClause = this.getWhereClauseForAlias();
@@ -80,10 +81,23 @@ class CollectionSubscriber {
80
81
  return subscription;
81
82
  }
82
83
  sendChangesToPipeline(changes, callback) {
84
+ const changesArray = Array.isArray(changes) ? changes : [...changes];
85
+ const filteredChanges = [];
86
+ for (const change of changesArray) {
87
+ if (change.type === `insert`) {
88
+ if (this.sentToD2Keys.has(change.key)) {
89
+ continue;
90
+ }
91
+ this.sentToD2Keys.add(change.key);
92
+ } else if (change.type === `delete`) {
93
+ this.sentToD2Keys.delete(change.key);
94
+ }
95
+ filteredChanges.push(change);
96
+ }
83
97
  const input = this.collectionConfigBuilder.currentSyncState.inputs[this.alias];
84
98
  const sentChanges = sendChangesToInput(
85
99
  input,
86
- changes,
100
+ filteredChanges,
87
101
  this.collection.config.getKey
88
102
  );
89
103
  const dataLoader = sentChanges > 0 ? callback : void 0;
@@ -96,7 +110,7 @@ class CollectionSubscriber {
96
110
  this.sendChangesToPipeline(changes);
97
111
  };
98
112
  const subscription = this.collection.subscribeChanges(sendChanges, {
99
- includeInitialState,
113
+ ...includeInitialState && { includeInitialState },
100
114
  whereExpression
101
115
  });
102
116
  return subscription;
@@ -112,6 +126,7 @@ class CollectionSubscriber {
112
126
  });
113
127
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
114
128
  this.biggest = void 0;
129
+ this.sentToD2Keys.clear();
115
130
  });
116
131
  subscription.on(`unsubscribed`, () => {
117
132
  truncateUnsubscribe();
@@ -1 +1 @@
1
- {"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":[],"mappings":";;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAID,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AAAA,IACjB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
1
+ {"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts\n // This is necessary because different code paths (initial load, change events)\n // can potentially send the same item to D2 multiple times.\n private sentToD2Keys = new Set<string | number>()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // Filter changes to prevent duplicate inserts to D2 pipeline.\n // This ensures D2 multiplicity stays at 1 for visible items, so deletes\n // properly reduce multiplicity to 0 (triggering DELETE output).\n const changesArray = Array.isArray(changes) ? changes : [...changes]\n const filteredChanges: Array<ChangeMessage<any, string | number>> = []\n for (const change of changesArray) {\n if (change.type === `insert`) {\n if (this.sentToD2Keys.has(change.key)) {\n // Skip duplicate insert - already sent to D2\n continue\n }\n this.sentToD2Keys.add(change.key)\n } else if (change.type === `delete`) {\n // Remove from tracking so future re-inserts are allowed\n this.sentToD2Keys.delete(change.key)\n }\n // Updates are handled as delete+insert by splitUpdates, so no special handling needed\n filteredChanges.push(change)\n }\n\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n filteredChanges,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n // Only pass includeInitialState when true. When it's false, we leave it\n // undefined so that user subscriptions with explicit `includeInitialState: false`\n // can be distinguished from internal lazy-loading subscriptions.\n // If we pass `false`, changes.ts would call markAllStateAsSeen() which\n // disables filtering - but internal subscriptions still need filtering.\n const subscription = this.collection.subscribeChanges(sendChanges, {\n ...(includeInitialState && { includeInitialState }),\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state and sentToD2Keys\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n // and allow re-inserts of previously sent keys\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n this.sentToD2Keys.clear()\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":[],"mappings":";;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAeA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAjBV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAQ1C,SAAQ,mCAAmB,IAAA;AAAA,EAOxB;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAIA,UAAM,eAAe,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,GAAG,OAAO;AACnE,UAAM,kBAA8D,CAAA;AACpE,eAAW,UAAU,cAAc;AACjC,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,KAAK,aAAa,IAAI,OAAO,GAAG,GAAG;AAErC;AAAA,QACF;AACA,aAAK,aAAa,IAAI,OAAO,GAAG;AAAA,MAClC,WAAW,OAAO,SAAS,UAAU;AAEnC,aAAK,aAAa,OAAO,OAAO,GAAG;AAAA,MACrC;AAEA,sBAAgB,KAAK,MAAM;AAAA,IAC7B;AAIA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAOA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE,GAAI,uBAAuB,EAAE,oBAAA;AAAA,MAC7B;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAKD,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AACf,WAAK,aAAa,MAAA;AAAA,IACpB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
- "version": "0.5.14",
3
+ "version": "0.5.16",
4
4
  "description": "A reactive client store for building super fast apps on sync",
5
5
  "author": "Kyle Mathews",
6
6
  "license": "MIT",
@@ -1,7 +1,8 @@
1
1
  import { ensureIndexForExpression } from '../indexes/auto-index.js'
2
2
  import { and, eq, gte, lt } from '../query/builder/functions.js'
3
- import { Value } from '../query/ir.js'
3
+ import { PropRef, Value } from '../query/ir.js'
4
4
  import { EventEmitter } from '../event-emitter.js'
5
+ import { compileExpression } from '../query/compiler/evaluators.js'
5
6
  import { buildCursor } from '../utils/cursor.js'
6
7
  import {
7
8
  createFilterFunctionFromExpression,
@@ -494,6 +495,13 @@ export class CollectionSubscription
494
495
  const valuesNeeded = () => Math.max(limit - changes.length, 0)
495
496
  const collectionExhausted = () => keys.length === 0
496
497
 
498
+ // Create a value extractor for the orderBy field to properly track the biggest indexed value
499
+ const orderByExpression = orderBy[0]!.expression
500
+ const valueExtractor =
501
+ orderByExpression.type === `ref`
502
+ ? compileExpression(new PropRef(orderByExpression.path), true)
503
+ : null
504
+
497
505
  while (valuesNeeded() > 0 && !collectionExhausted()) {
498
506
  const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration
499
507
 
@@ -504,7 +512,9 @@ export class CollectionSubscription
504
512
  key,
505
513
  value,
506
514
  })
507
- biggestObservedValue = value
515
+ // Extract the indexed value (e.g., salary) from the row, not the full row
516
+ // This is needed for index.take() to work correctly with the BTree comparator
517
+ biggestObservedValue = valueExtractor ? valueExtractor(value) : value
508
518
  insertedKeys.add(key) // Track this key
509
519
  }
510
520
 
@@ -29,6 +29,11 @@ export class CollectionSubscriber<
29
29
  { resolve: () => void }
30
30
  >()
31
31
 
32
+ // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts
33
+ // This is necessary because different code paths (initial load, change events)
34
+ // can potentially send the same item to D2 multiple times.
35
+ private sentToD2Keys = new Set<string | number>()
36
+
32
37
  constructor(
33
38
  private alias: string,
34
39
  private collectionId: string,
@@ -129,13 +134,33 @@ export class CollectionSubscriber<
129
134
  changes: Iterable<ChangeMessage<any, string | number>>,
130
135
  callback?: () => boolean,
131
136
  ) {
137
+ // Filter changes to prevent duplicate inserts to D2 pipeline.
138
+ // This ensures D2 multiplicity stays at 1 for visible items, so deletes
139
+ // properly reduce multiplicity to 0 (triggering DELETE output).
140
+ const changesArray = Array.isArray(changes) ? changes : [...changes]
141
+ const filteredChanges: Array<ChangeMessage<any, string | number>> = []
142
+ for (const change of changesArray) {
143
+ if (change.type === `insert`) {
144
+ if (this.sentToD2Keys.has(change.key)) {
145
+ // Skip duplicate insert - already sent to D2
146
+ continue
147
+ }
148
+ this.sentToD2Keys.add(change.key)
149
+ } else if (change.type === `delete`) {
150
+ // Remove from tracking so future re-inserts are allowed
151
+ this.sentToD2Keys.delete(change.key)
152
+ }
153
+ // Updates are handled as delete+insert by splitUpdates, so no special handling needed
154
+ filteredChanges.push(change)
155
+ }
156
+
132
157
  // currentSyncState and input are always defined when this method is called
133
158
  // (only called from active subscriptions during a sync session)
134
159
  const input =
135
160
  this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!
136
161
  const sentChanges = sendChangesToInput(
137
162
  input,
138
- changes,
163
+ filteredChanges,
139
164
  this.collection.config.getKey,
140
165
  )
141
166
 
@@ -162,8 +187,13 @@ export class CollectionSubscriber<
162
187
  this.sendChangesToPipeline(changes)
163
188
  }
164
189
 
190
+ // Only pass includeInitialState when true. When it's false, we leave it
191
+ // undefined so that user subscriptions with explicit `includeInitialState: false`
192
+ // can be distinguished from internal lazy-loading subscriptions.
193
+ // If we pass `false`, changes.ts would call markAllStateAsSeen() which
194
+ // disables filtering - but internal subscriptions still need filtering.
165
195
  const subscription = this.collection.subscribeChanges(sendChanges, {
166
- includeInitialState,
196
+ ...(includeInitialState && { includeInitialState }),
167
197
  whereExpression,
168
198
  })
169
199
 
@@ -190,10 +220,12 @@ export class CollectionSubscriber<
190
220
  whereExpression,
191
221
  })
192
222
 
193
- // Listen for truncate events to reset cursor tracking state
223
+ // Listen for truncate events to reset cursor tracking state and sentToD2Keys
194
224
  // This ensures that after a must-refetch/truncate, we don't use stale cursor data
225
+ // and allow re-inserts of previously sent keys
195
226
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
196
227
  this.biggest = undefined
228
+ this.sentToD2Keys.clear()
197
229
  })
198
230
 
199
231
  // Clean up truncate listener when subscription is unsubscribed