@tanstack/db 0.5.13 → 0.5.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
20
20
  this.limitedSnapshotRowCount = 0;
21
21
  this._status = `ready`;
22
22
  this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
23
+ this.isBufferingForTruncate = false;
24
+ this.truncateBuffer = [];
25
+ this.pendingTruncateRefetches = /* @__PURE__ */ new Set();
23
26
  if (options.onUnsubscribe) {
24
27
  this.on(`unsubscribed`, (event) => options.onUnsubscribe(event));
25
28
  }
@@ -32,10 +35,80 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
32
35
  };
33
36
  this.callback = callbackWithSentKeysTracking;
34
37
  this.filteredCallback = options.whereExpression ? changeEvents.createFilteredCallback(this.callback, options) : this.callback;
38
+ this.truncateCleanup = this.collection.on(`truncate`, () => {
39
+ this.handleTruncate();
40
+ });
35
41
  }
36
42
  get status() {
37
43
  return this._status;
38
44
  }
45
+ /**
46
+ * Handle collection truncate event by resetting state and re-requesting subsets.
47
+ * This is called when the sync layer receives a must-refetch and clears all data.
48
+ *
49
+ * To prevent a flash of missing content, we buffer all changes (deletes from truncate
50
+ * and inserts from refetch) until all loadSubset promises resolve, then emit them together.
51
+ */
52
+ handleTruncate() {
53
+ const subsetsToReload = [...this.loadedSubsets];
54
+ const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null;
55
+ if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
56
+ this.snapshotSent = false;
57
+ this.loadedInitialState = false;
58
+ this.limitedSnapshotRowCount = 0;
59
+ this.lastSentKey = void 0;
60
+ this.loadedSubsets = [];
61
+ return;
62
+ }
63
+ this.isBufferingForTruncate = true;
64
+ this.truncateBuffer = [];
65
+ this.pendingTruncateRefetches.clear();
66
+ this.snapshotSent = false;
67
+ this.loadedInitialState = false;
68
+ this.limitedSnapshotRowCount = 0;
69
+ this.lastSentKey = void 0;
70
+ this.loadedSubsets = [];
71
+ queueMicrotask(() => {
72
+ if (!this.isBufferingForTruncate) {
73
+ return;
74
+ }
75
+ for (const options of subsetsToReload) {
76
+ const syncResult = this.collection._sync.loadSubset(options);
77
+ this.loadedSubsets.push(options);
78
+ this.trackLoadSubsetPromise(syncResult);
79
+ if (syncResult instanceof Promise) {
80
+ this.pendingTruncateRefetches.add(syncResult);
81
+ syncResult.catch(() => {
82
+ }).finally(() => {
83
+ this.pendingTruncateRefetches.delete(syncResult);
84
+ this.checkTruncateRefetchComplete();
85
+ });
86
+ }
87
+ }
88
+ if (this.pendingTruncateRefetches.size === 0) {
89
+ this.flushTruncateBuffer();
90
+ }
91
+ });
92
+ }
93
+ /**
94
+ * Check if all truncate refetch promises have completed and flush buffer if so
95
+ */
96
+ checkTruncateRefetchComplete() {
97
+ if (this.pendingTruncateRefetches.size === 0 && this.isBufferingForTruncate) {
98
+ this.flushTruncateBuffer();
99
+ }
100
+ }
101
+ /**
102
+ * Flush the truncate buffer, emitting all buffered changes to the callback
103
+ */
104
+ flushTruncateBuffer() {
105
+ this.isBufferingForTruncate = false;
106
+ const merged = this.truncateBuffer.flat();
107
+ if (merged.length > 0) {
108
+ this.filteredCallback(merged);
109
+ }
110
+ this.truncateBuffer = [];
111
+ }
39
112
  setOrderByIndex(index) {
40
113
  this.orderByIndex = index;
41
114
  }
@@ -85,7 +158,13 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
85
158
  }
86
159
  emitEvents(changes) {
87
160
  const newChanges = this.filterAndFlipChanges(changes);
88
- this.filteredCallback(newChanges);
161
+ if (this.isBufferingForTruncate) {
162
+ if (newChanges.length > 0) {
163
+ this.truncateBuffer.push(newChanges);
164
+ }
165
+ } else {
166
+ this.filteredCallback(newChanges);
167
+ }
89
168
  }
90
169
  /**
91
170
  * Sends the snapshot to the callback.
@@ -281,6 +360,7 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
281
360
  if (this.loadedInitialState || this.skipFiltering) {
282
361
  return changes;
283
362
  }
363
+ const skipDeleteFilter = this.isBufferingForTruncate;
284
364
  const newChanges = [];
285
365
  for (const change of changes) {
286
366
  let newChange = change;
@@ -289,7 +369,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
289
369
  if (change.type === `update`) {
290
370
  newChange = { ...change, type: `insert`, previousValue: void 0 };
291
371
  } else if (change.type === `delete`) {
292
- continue;
372
+ if (!skipDeleteFilter) {
373
+ continue;
374
+ }
293
375
  }
294
376
  this.sentKeys.add(change.key);
295
377
  } else {
@@ -324,6 +406,11 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
324
406
  this.skipFiltering = true;
325
407
  }
326
408
  unsubscribe() {
409
+ this.truncateCleanup?.();
410
+ this.truncateCleanup = void 0;
411
+ this.isBufferingForTruncate = false;
412
+ this.truncateBuffer = [];
413
+ this.pendingTruncateRefetches.clear();
327
414
  for (const options of this.loadedSubsets) {
328
415
  this.collection._sync.unloadSubset(options);
329
416
  }
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
1
+ {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
@@ -46,8 +46,28 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
46
46
  private orderByIndex;
47
47
  private _status;
48
48
  private pendingLoadSubsetPromises;
49
+ private truncateCleanup;
50
+ private isBufferingForTruncate;
51
+ private truncateBuffer;
52
+ private pendingTruncateRefetches;
49
53
  get status(): SubscriptionStatus;
50
54
  constructor(collection: CollectionImpl<any, any, any, any, any>, callback: (changes: Array<ChangeMessage<any, any>>) => void, options: CollectionSubscriptionOptions);
55
+ /**
56
+ * Handle collection truncate event by resetting state and re-requesting subsets.
57
+ * This is called when the sync layer receives a must-refetch and clears all data.
58
+ *
59
+ * To prevent a flash of missing content, we buffer all changes (deletes from truncate
60
+ * and inserts from refetch) until all loadSubset promises resolve, then emit them together.
61
+ */
62
+ private handleTruncate;
63
+ /**
64
+ * Check if all truncate refetch promises have completed and flush buffer if so
65
+ */
66
+ private checkTruncateRefetchComplete;
67
+ /**
68
+ * Flush the truncate buffer, emitting all buffered changes to the callback
69
+ */
70
+ private flushTruncateBuffer;
51
71
  setOrderByIndex(index: IndexInterface<any>): void;
52
72
  /**
53
73
  * Set subscription status and emit events if changed
@@ -13,6 +13,7 @@ class CollectionSubscriber {
13
13
  this.collectionConfigBuilder = collectionConfigBuilder;
14
14
  this.biggest = void 0;
15
15
  this.subscriptionLoadingPromises = /* @__PURE__ */ new Map();
16
+ this.sentToD2Keys = /* @__PURE__ */ new Set();
16
17
  }
17
18
  subscribe() {
18
19
  const whereClause = this.getWhereClauseForAlias();
@@ -82,10 +83,23 @@ class CollectionSubscriber {
82
83
  return subscription;
83
84
  }
84
85
  sendChangesToPipeline(changes, callback) {
86
+ const changesArray = Array.isArray(changes) ? changes : [...changes];
87
+ const filteredChanges = [];
88
+ for (const change of changesArray) {
89
+ if (change.type === `insert`) {
90
+ if (this.sentToD2Keys.has(change.key)) {
91
+ continue;
92
+ }
93
+ this.sentToD2Keys.add(change.key);
94
+ } else if (change.type === `delete`) {
95
+ this.sentToD2Keys.delete(change.key);
96
+ }
97
+ filteredChanges.push(change);
98
+ }
85
99
  const input = this.collectionConfigBuilder.currentSyncState.inputs[this.alias];
86
100
  const sentChanges = sendChangesToInput(
87
101
  input,
88
- changes,
102
+ filteredChanges,
89
103
  this.collection.config.getKey
90
104
  );
91
105
  const dataLoader = sentChanges > 0 ? callback : void 0;
@@ -98,7 +112,7 @@ class CollectionSubscriber {
98
112
  this.sendChangesToPipeline(changes);
99
113
  };
100
114
  const subscription = this.collection.subscribeChanges(sendChanges, {
101
- includeInitialState,
115
+ ...includeInitialState && { includeInitialState },
102
116
  whereExpression
103
117
  });
104
118
  return subscription;
@@ -114,6 +128,7 @@ class CollectionSubscriber {
114
128
  });
115
129
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
116
130
  this.biggest = void 0;
131
+ this.sentToD2Keys.clear();
117
132
  });
118
133
  subscription.on(`unsubscribed`, () => {
119
134
  truncateUnsubscribe();
@@ -1 +1 @@
1
- {"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":["normalizeExpressionPaths","normalizeOrderByPaths","MultiSet"],"mappings":";;;;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAID,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AAAA,IACjB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoBC,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoBA,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;"}
1
+ {"version":3,"file":"collection-subscriber.cjs","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts\n // This is necessary because different code paths (initial load, change events)\n // can potentially send the same item to D2 multiple times.\n private sentToD2Keys = new Set<string | number>()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // Filter changes to prevent duplicate inserts to D2 pipeline.\n // This ensures D2 multiplicity stays at 1 for visible items, so deletes\n // properly reduce multiplicity to 0 (triggering DELETE output).\n const changesArray = Array.isArray(changes) ? changes : [...changes]\n const filteredChanges: Array<ChangeMessage<any, string | number>> = []\n for (const change of changesArray) {\n if (change.type === `insert`) {\n if (this.sentToD2Keys.has(change.key)) {\n // Skip duplicate insert - already sent to D2\n continue\n }\n this.sentToD2Keys.add(change.key)\n } else if (change.type === `delete`) {\n // Remove from tracking so future re-inserts are allowed\n this.sentToD2Keys.delete(change.key)\n }\n // Updates are handled as delete+insert by splitUpdates, so no special handling needed\n filteredChanges.push(change)\n }\n\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n filteredChanges,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n // Only pass includeInitialState when true. When it's false, we leave it\n // undefined so that user subscriptions with explicit `includeInitialState: false`\n // can be distinguished from internal lazy-loading subscriptions.\n // If we pass `false`, changes.ts would call markAllStateAsSeen() which\n // disables filtering - but internal subscriptions still need filtering.\n const subscription = this.collection.subscribeChanges(sendChanges, {\n ...(includeInitialState && { includeInitialState }),\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state and sentToD2Keys\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n // and allow re-inserts of previously sent keys\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n this.sentToD2Keys.clear()\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":["normalizeExpressionPaths","normalizeOrderByPaths","MultiSet"],"mappings":";;;;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAeA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAjBV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAQ1C,SAAQ,mCAAmB,IAAA;AAAA,EAOxB;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkBA,YAAAA,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAIA,UAAM,eAAe,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,GAAG,OAAO;AACnE,UAAM,kBAA8D,CAAA;AACpE,eAAW,UAAU,cAAc;AACjC,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,KAAK,aAAa,IAAI,OAAO,GAAG,GAAG;AAErC;AAAA,QACF;AACA,aAAK,aAAa,IAAI,OAAO,GAAG;AAAA,MAClC,WAAW,OAAO,SAAS,UAAU;AAEnC,aAAK,aAAa,OAAO,OAAO,GAAG;AAAA,MACrC;AAEA,sBAAgB,KAAK,MAAM;AAAA,IAC7B;AAIA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAOA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE,GAAI,uBAAuB,EAAE,oBAAA;AAAA,MAC7B;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAKD,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AACf,WAAK,aAAa,MAAA;AAAA,IACpB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoBC,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoBA,YAAAA,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAIC,MAAAA,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;;"}
@@ -9,6 +9,7 @@ export declare class CollectionSubscriber<TContext extends Context, TResult exte
9
9
  private collectionConfigBuilder;
10
10
  private biggest;
11
11
  private subscriptionLoadingPromises;
12
+ private sentToD2Keys;
12
13
  constructor(alias: string, collectionId: string, collection: Collection, collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>);
13
14
  subscribe(): CollectionSubscription;
14
15
  private subscribeToChanges;
@@ -46,8 +46,28 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
46
46
  private orderByIndex;
47
47
  private _status;
48
48
  private pendingLoadSubsetPromises;
49
+ private truncateCleanup;
50
+ private isBufferingForTruncate;
51
+ private truncateBuffer;
52
+ private pendingTruncateRefetches;
49
53
  get status(): SubscriptionStatus;
50
54
  constructor(collection: CollectionImpl<any, any, any, any, any>, callback: (changes: Array<ChangeMessage<any, any>>) => void, options: CollectionSubscriptionOptions);
55
+ /**
56
+ * Handle collection truncate event by resetting state and re-requesting subsets.
57
+ * This is called when the sync layer receives a must-refetch and clears all data.
58
+ *
59
+ * To prevent a flash of missing content, we buffer all changes (deletes from truncate
60
+ * and inserts from refetch) until all loadSubset promises resolve, then emit them together.
61
+ */
62
+ private handleTruncate;
63
+ /**
64
+ * Check if all truncate refetch promises have completed and flush buffer if so
65
+ */
66
+ private checkTruncateRefetchComplete;
67
+ /**
68
+ * Flush the truncate buffer, emitting all buffered changes to the callback
69
+ */
70
+ private flushTruncateBuffer;
51
71
  setOrderByIndex(index: IndexInterface<any>): void;
52
72
  /**
53
73
  * Set subscription status and emit events if changed
@@ -18,6 +18,9 @@ class CollectionSubscription extends EventEmitter {
18
18
  this.limitedSnapshotRowCount = 0;
19
19
  this._status = `ready`;
20
20
  this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
21
+ this.isBufferingForTruncate = false;
22
+ this.truncateBuffer = [];
23
+ this.pendingTruncateRefetches = /* @__PURE__ */ new Set();
21
24
  if (options.onUnsubscribe) {
22
25
  this.on(`unsubscribed`, (event) => options.onUnsubscribe(event));
23
26
  }
@@ -30,10 +33,80 @@ class CollectionSubscription extends EventEmitter {
30
33
  };
31
34
  this.callback = callbackWithSentKeysTracking;
32
35
  this.filteredCallback = options.whereExpression ? createFilteredCallback(this.callback, options) : this.callback;
36
+ this.truncateCleanup = this.collection.on(`truncate`, () => {
37
+ this.handleTruncate();
38
+ });
33
39
  }
34
40
  get status() {
35
41
  return this._status;
36
42
  }
43
+ /**
44
+ * Handle collection truncate event by resetting state and re-requesting subsets.
45
+ * This is called when the sync layer receives a must-refetch and clears all data.
46
+ *
47
+ * To prevent a flash of missing content, we buffer all changes (deletes from truncate
48
+ * and inserts from refetch) until all loadSubset promises resolve, then emit them together.
49
+ */
50
+ handleTruncate() {
51
+ const subsetsToReload = [...this.loadedSubsets];
52
+ const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null;
53
+ if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
54
+ this.snapshotSent = false;
55
+ this.loadedInitialState = false;
56
+ this.limitedSnapshotRowCount = 0;
57
+ this.lastSentKey = void 0;
58
+ this.loadedSubsets = [];
59
+ return;
60
+ }
61
+ this.isBufferingForTruncate = true;
62
+ this.truncateBuffer = [];
63
+ this.pendingTruncateRefetches.clear();
64
+ this.snapshotSent = false;
65
+ this.loadedInitialState = false;
66
+ this.limitedSnapshotRowCount = 0;
67
+ this.lastSentKey = void 0;
68
+ this.loadedSubsets = [];
69
+ queueMicrotask(() => {
70
+ if (!this.isBufferingForTruncate) {
71
+ return;
72
+ }
73
+ for (const options of subsetsToReload) {
74
+ const syncResult = this.collection._sync.loadSubset(options);
75
+ this.loadedSubsets.push(options);
76
+ this.trackLoadSubsetPromise(syncResult);
77
+ if (syncResult instanceof Promise) {
78
+ this.pendingTruncateRefetches.add(syncResult);
79
+ syncResult.catch(() => {
80
+ }).finally(() => {
81
+ this.pendingTruncateRefetches.delete(syncResult);
82
+ this.checkTruncateRefetchComplete();
83
+ });
84
+ }
85
+ }
86
+ if (this.pendingTruncateRefetches.size === 0) {
87
+ this.flushTruncateBuffer();
88
+ }
89
+ });
90
+ }
91
+ /**
92
+ * Check if all truncate refetch promises have completed and flush buffer if so
93
+ */
94
+ checkTruncateRefetchComplete() {
95
+ if (this.pendingTruncateRefetches.size === 0 && this.isBufferingForTruncate) {
96
+ this.flushTruncateBuffer();
97
+ }
98
+ }
99
+ /**
100
+ * Flush the truncate buffer, emitting all buffered changes to the callback
101
+ */
102
+ flushTruncateBuffer() {
103
+ this.isBufferingForTruncate = false;
104
+ const merged = this.truncateBuffer.flat();
105
+ if (merged.length > 0) {
106
+ this.filteredCallback(merged);
107
+ }
108
+ this.truncateBuffer = [];
109
+ }
37
110
  setOrderByIndex(index) {
38
111
  this.orderByIndex = index;
39
112
  }
@@ -83,7 +156,13 @@ class CollectionSubscription extends EventEmitter {
83
156
  }
84
157
  emitEvents(changes) {
85
158
  const newChanges = this.filterAndFlipChanges(changes);
86
- this.filteredCallback(newChanges);
159
+ if (this.isBufferingForTruncate) {
160
+ if (newChanges.length > 0) {
161
+ this.truncateBuffer.push(newChanges);
162
+ }
163
+ } else {
164
+ this.filteredCallback(newChanges);
165
+ }
87
166
  }
88
167
  /**
89
168
  * Sends the snapshot to the callback.
@@ -279,6 +358,7 @@ class CollectionSubscription extends EventEmitter {
279
358
  if (this.loadedInitialState || this.skipFiltering) {
280
359
  return changes;
281
360
  }
361
+ const skipDeleteFilter = this.isBufferingForTruncate;
282
362
  const newChanges = [];
283
363
  for (const change of changes) {
284
364
  let newChange = change;
@@ -287,7 +367,9 @@ class CollectionSubscription extends EventEmitter {
287
367
  if (change.type === `update`) {
288
368
  newChange = { ...change, type: `insert`, previousValue: void 0 };
289
369
  } else if (change.type === `delete`) {
290
- continue;
370
+ if (!skipDeleteFilter) {
371
+ continue;
372
+ }
291
373
  }
292
374
  this.sentKeys.add(change.key);
293
375
  } else {
@@ -322,6 +404,11 @@ class CollectionSubscription extends EventEmitter {
322
404
  this.skipFiltering = true;
323
405
  }
324
406
  unsubscribe() {
407
+ this.truncateCleanup?.();
408
+ this.truncateCleanup = void 0;
409
+ this.isBufferingForTruncate = false;
410
+ this.truncateBuffer = [];
411
+ this.pendingTruncateRefetches.clear();
325
412
  for (const options of this.loadedSubsets) {
326
413
  this.collection._sync.unloadSubset(options);
327
414
  }
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
1
+ {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
@@ -9,6 +9,7 @@ export declare class CollectionSubscriber<TContext extends Context, TResult exte
9
9
  private collectionConfigBuilder;
10
10
  private biggest;
11
11
  private subscriptionLoadingPromises;
12
+ private sentToD2Keys;
12
13
  constructor(alias: string, collectionId: string, collection: Collection, collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>);
13
14
  subscribe(): CollectionSubscription;
14
15
  private subscribeToChanges;
@@ -11,6 +11,7 @@ class CollectionSubscriber {
11
11
  this.collectionConfigBuilder = collectionConfigBuilder;
12
12
  this.biggest = void 0;
13
13
  this.subscriptionLoadingPromises = /* @__PURE__ */ new Map();
14
+ this.sentToD2Keys = /* @__PURE__ */ new Set();
14
15
  }
15
16
  subscribe() {
16
17
  const whereClause = this.getWhereClauseForAlias();
@@ -80,10 +81,23 @@ class CollectionSubscriber {
80
81
  return subscription;
81
82
  }
82
83
  sendChangesToPipeline(changes, callback) {
84
+ const changesArray = Array.isArray(changes) ? changes : [...changes];
85
+ const filteredChanges = [];
86
+ for (const change of changesArray) {
87
+ if (change.type === `insert`) {
88
+ if (this.sentToD2Keys.has(change.key)) {
89
+ continue;
90
+ }
91
+ this.sentToD2Keys.add(change.key);
92
+ } else if (change.type === `delete`) {
93
+ this.sentToD2Keys.delete(change.key);
94
+ }
95
+ filteredChanges.push(change);
96
+ }
83
97
  const input = this.collectionConfigBuilder.currentSyncState.inputs[this.alias];
84
98
  const sentChanges = sendChangesToInput(
85
99
  input,
86
- changes,
100
+ filteredChanges,
87
101
  this.collection.config.getKey
88
102
  );
89
103
  const dataLoader = sentChanges > 0 ? callback : void 0;
@@ -96,7 +110,7 @@ class CollectionSubscriber {
96
110
  this.sendChangesToPipeline(changes);
97
111
  };
98
112
  const subscription = this.collection.subscribeChanges(sendChanges, {
99
- includeInitialState,
113
+ ...includeInitialState && { includeInitialState },
100
114
  whereExpression
101
115
  });
102
116
  return subscription;
@@ -112,6 +126,7 @@ class CollectionSubscriber {
112
126
  });
113
127
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
114
128
  this.biggest = void 0;
129
+ this.sentToD2Keys.clear();
115
130
  });
116
131
  subscription.on(`unsubscribed`, () => {
117
132
  truncateUnsubscribe();
@@ -1 +1 @@
1
- {"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":[],"mappings":";;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAID,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AAAA,IACjB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
1
+ {"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from '@tanstack/db-ivm'\nimport {\n normalizeExpressionPaths,\n normalizeOrderByPaths,\n} from '../compiler/expressions.js'\nimport type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'\nimport type { Collection } from '../../collection/index.js'\nimport type { ChangeMessage } from '../../types.js'\nimport type { Context, GetResult } from '../builder/types.js'\nimport type { BasicExpression } from '../ir.js'\nimport type { OrderByOptimizationInfo } from '../compiler/order-by.js'\nimport type { CollectionConfigBuilder } from './collection-config-builder.js'\nimport type { CollectionSubscription } from '../../collection/subscription.js'\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`,\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts\n // This is necessary because different code paths (initial load, change events)\n // can potentially send the same item to D2 multiple times.\n private sentToD2Keys = new Set<string | number>()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>,\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = normalizeExpressionPaths(whereClause, this.alias)\n return this.subscribeToChanges(whereExpression)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo,\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias,\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState,\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise,\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe,\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean,\n ) {\n // Filter changes to prevent duplicate inserts to D2 pipeline.\n // This ensures D2 multiplicity stays at 1 for visible items, so deletes\n // properly reduce multiplicity to 0 (triggering DELETE output).\n const changesArray = Array.isArray(changes) ? changes : [...changes]\n const filteredChanges: Array<ChangeMessage<any, string | number>> = []\n for (const change of changesArray) {\n if (change.type === `insert`) {\n if (this.sentToD2Keys.has(change.key)) {\n // Skip duplicate insert - already sent to D2\n continue\n }\n this.sentToD2Keys.add(change.key)\n } else if (change.type === `delete`) {\n // Remove from tracking so future re-inserts are allowed\n this.sentToD2Keys.delete(change.key)\n }\n // Updates are handled as delete+insert by splitUpdates, so no special handling needed\n filteredChanges.push(change)\n }\n\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n filteredChanges,\n this.collection.config.getKey,\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false,\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>,\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n // Only pass includeInitialState when true. When it's false, we leave it\n // undefined so that user subscriptions with explicit `includeInitialState: false`\n // can be distinguished from internal lazy-loading subscriptions.\n // If we pass `false`, changes.ts would call markAllStateAsSeen() which\n // disables filtering - but internal subscriptions still need filtering.\n const subscription = this.collection.subscribeChanges(sendChanges, {\n ...(includeInitialState && { includeInitialState }),\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo,\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n // Listen for truncate events to reset cursor tracking state and sentToD2Keys\n // This ensures that after a must-refetch/truncate, we don't use stale cursor data\n // and allow re-inserts of previously sent keys\n const truncateUnsubscribe = this.collection.on(`truncate`, () => {\n this.biggest = undefined\n this.sentToD2Keys.clear()\n })\n\n // Clean up truncate listener when subscription is unsubscribed\n subscription.on(`unsubscribed`, () => {\n truncateUnsubscribe()\n })\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n if (index) {\n // We have an index on the first orderBy column - use lazy loading optimization\n // This works for both single-column and multi-column orderBy:\n // - Single-column: index provides exact ordering\n // - Multi-column: index provides ordering on first column, secondary sort in memory\n subscription.setOrderByIndex(index)\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n } else {\n // No index available (e.g., non-ref expression): pass orderBy/limit to loadSubset\n // so the sync layer can optimize if the backend supports it\n subscription.requestSnapshot({\n orderBy: normalizedOrderBy,\n limit: offset + limit,\n })\n }\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // dataNeeded is not set when there's no index (e.g., non-ref expression).\n // In this case, we've already loaded all data via requestSnapshot\n // and don't need to lazily load more.\n return true\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription,\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol],\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow, offset } = orderByInfo\n const biggestSentRow = this.biggest\n\n // Extract all orderBy column values from the biggest sent row\n // For single-column: returns single value, for multi-column: returns array\n const extractedValues = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : undefined\n\n // Normalize to array format for minValues\n const minValues =\n extractedValues !== undefined\n ? Array.isArray(extractedValues)\n ? extractedValues\n : [extractedValues]\n : undefined\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = normalizeOrderByPaths(orderBy, this.alias)\n\n // Take the `n` items after the biggest sent value\n // Pass the current window offset to ensure proper deduplication\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValues,\n offset,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n ) {\n for (const change of changes) {\n // Only track inserts/updates for cursor positioning, not deletes\n if (change.type !== `delete`) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any,\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":[],"mappings":";;AAcA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAeA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAjBV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAQ1C,SAAQ,mCAAmB,IAAA;AAAA,EAOxB;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AACxE,aAAO,KAAK,mBAAmB,eAAe;AAAA,IAChD;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAIA,UAAM,eAAe,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,GAAG,OAAO;AACnE,UAAM,kBAA8D,CAAA;AACpE,eAAW,UAAU,cAAc;AACjC,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,KAAK,aAAa,IAAI,OAAO,GAAG,GAAG;AAErC;AAAA,QACF;AACA,aAAK,aAAa,IAAI,OAAO,GAAG;AAAA,MAClC,WAAW,OAAO,SAAS,UAAU;AAEnC,aAAK,aAAa,OAAO,OAAO,GAAG;AAAA,MACrC;AAEA,sBAAgB,KAAK,MAAM;AAAA,IAC7B;AAIA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAOA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE,GAAI,uBAAuB,EAAE,oBAAA;AAAA,MAC7B;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAKD,UAAM,sBAAsB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC/D,WAAK,UAAU;AACf,WAAK,aAAa,MAAA;AAAA,IACpB,CAAC;AAGD,iBAAa,GAAG,gBAAgB,MAAM;AACpC,0BAAA;AAAA,IACF,CAAC;AAGD,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAEnE,QAAI,OAAO;AAKT,mBAAa,gBAAgB,KAAK;AAIlC,mBAAa,uBAAuB;AAAA,QAClC,OAAO,SAAS;AAAA,QAChB,SAAS;AAAA,MAAA,CACV;AAAA,IACH,OAAO;AAGL,mBAAa,gBAAgB;AAAA,QAC3B,SAAS;AAAA,QACT,OAAO,SAAS;AAAA,MAAA,CACjB;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAIf,aAAO;AAAA,IACT;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,yBAAyB,OAAA,IAAW;AACrD,UAAM,iBAAiB,KAAK;AAI5B,UAAM,kBAAkB,iBACpB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,YACJ,oBAAoB,SAChB,MAAM,QAAQ,eAAe,IAC3B,kBACA,CAAC,eAAe,IAClB;AAGN,UAAM,oBAAoB,sBAAsB,SAAS,KAAK,KAAK;AAInE,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAE5B,UAAI,OAAO,SAAS,UAAU;AAC5B,YAAI,CAAC,KAAK,SAAS;AACjB,eAAK,UAAU,OAAO;AAAA,QACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
- "version": "0.5.13",
3
+ "version": "0.5.15",
4
4
  "description": "A reactive client store for building super fast apps on sync",
5
5
  "author": "Kyle Mathews",
6
6
  "license": "MIT",
@@ -84,6 +84,16 @@ export class CollectionSubscription
84
84
  private _status: SubscriptionStatus = `ready`
85
85
  private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()
86
86
 
87
+ // Cleanup function for truncate event listener
88
+ private truncateCleanup: (() => void) | undefined
89
+
90
+ // Truncate buffering state
91
+ // When a truncate occurs, we buffer changes until all loadSubset refetches complete
92
+ // This prevents a flash of missing content between deletes and new inserts
93
+ private isBufferingForTruncate = false
94
+ private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []
95
+ private pendingTruncateRefetches: Set<Promise<void>> = new Set()
96
+
87
97
  public get status(): SubscriptionStatus {
88
98
  return this._status
89
99
  }
@@ -116,6 +126,123 @@ export class CollectionSubscription
116
126
  this.filteredCallback = options.whereExpression
117
127
  ? createFilteredCallback(this.callback, options)
118
128
  : this.callback
129
+
130
+ // Listen for truncate events to re-request data after must-refetch
131
+ // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.
132
+ // We need to re-request all previously loaded subsets to repopulate the data.
133
+ this.truncateCleanup = this.collection.on(`truncate`, () => {
134
+ this.handleTruncate()
135
+ })
136
+ }
137
+
138
+ /**
139
+ * Handle collection truncate event by resetting state and re-requesting subsets.
140
+ * This is called when the sync layer receives a must-refetch and clears all data.
141
+ *
142
+ * To prevent a flash of missing content, we buffer all changes (deletes from truncate
143
+ * and inserts from refetch) until all loadSubset promises resolve, then emit them together.
144
+ */
145
+ private handleTruncate() {
146
+ // Copy the loaded subsets before clearing (we'll re-request them)
147
+ const subsetsToReload = [...this.loadedSubsets]
148
+
149
+ // Only buffer if there's an actual loadSubset handler that can do async work.
150
+ // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.
151
+ // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.
152
+ const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null
153
+
154
+ // If there are no subsets to reload OR no loadSubset handler, just reset state
155
+ if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
156
+ this.snapshotSent = false
157
+ this.loadedInitialState = false
158
+ this.limitedSnapshotRowCount = 0
159
+ this.lastSentKey = undefined
160
+ this.loadedSubsets = []
161
+ return
162
+ }
163
+
164
+ // Start buffering BEFORE we receive the delete events from the truncate commit
165
+ // This ensures we capture both the deletes and subsequent inserts
166
+ this.isBufferingForTruncate = true
167
+ this.truncateBuffer = []
168
+ this.pendingTruncateRefetches.clear()
169
+
170
+ // Reset snapshot/pagination tracking state
171
+ // Note: We don't need to populate sentKeys here because filterAndFlipChanges
172
+ // will skip the delete filter when isBufferingForTruncate is true
173
+ this.snapshotSent = false
174
+ this.loadedInitialState = false
175
+ this.limitedSnapshotRowCount = 0
176
+ this.lastSentKey = undefined
177
+
178
+ // Clear the loadedSubsets array since we're re-requesting fresh
179
+ this.loadedSubsets = []
180
+
181
+ // Defer the loadSubset calls to a microtask so the truncate commit's delete events
182
+ // are buffered BEFORE the loadSubset calls potentially trigger nested commits.
183
+ // This ensures correct event ordering: deletes first, then inserts.
184
+ queueMicrotask(() => {
185
+ // Check if we were unsubscribed while waiting
186
+ if (!this.isBufferingForTruncate) {
187
+ return
188
+ }
189
+
190
+ // Re-request all previously loaded subsets and track their promises
191
+ for (const options of subsetsToReload) {
192
+ const syncResult = this.collection._sync.loadSubset(options)
193
+
194
+ // Track this loadSubset call so we can unload it later
195
+ this.loadedSubsets.push(options)
196
+ this.trackLoadSubsetPromise(syncResult)
197
+
198
+ // Track the promise for buffer flushing
199
+ if (syncResult instanceof Promise) {
200
+ this.pendingTruncateRefetches.add(syncResult)
201
+ syncResult
202
+ .catch(() => {
203
+ // Ignore errors - we still want to flush the buffer even if some requests fail
204
+ })
205
+ .finally(() => {
206
+ this.pendingTruncateRefetches.delete(syncResult)
207
+ this.checkTruncateRefetchComplete()
208
+ })
209
+ }
210
+ }
211
+
212
+ // If all loadSubset calls were synchronous (returned true), flush now
213
+ // At this point, delete events have already been buffered from the truncate commit
214
+ if (this.pendingTruncateRefetches.size === 0) {
215
+ this.flushTruncateBuffer()
216
+ }
217
+ })
218
+ }
219
+
220
+ /**
221
+ * Check if all truncate refetch promises have completed and flush buffer if so
222
+ */
223
+ private checkTruncateRefetchComplete() {
224
+ if (
225
+ this.pendingTruncateRefetches.size === 0 &&
226
+ this.isBufferingForTruncate
227
+ ) {
228
+ this.flushTruncateBuffer()
229
+ }
230
+ }
231
+
232
+ /**
233
+ * Flush the truncate buffer, emitting all buffered changes to the callback
234
+ */
235
+ private flushTruncateBuffer() {
236
+ this.isBufferingForTruncate = false
237
+
238
+ // Flatten all buffered changes into a single array for atomic emission
239
+ // This ensures consumers see all truncate changes (deletes + inserts) in one callback
240
+ const merged = this.truncateBuffer.flat()
241
+ if (merged.length > 0) {
242
+ this.filteredCallback(merged)
243
+ }
244
+
245
+ this.truncateBuffer = []
119
246
  }
120
247
 
121
248
  setOrderByIndex(index: IndexInterface<any>) {
@@ -179,7 +306,16 @@ export class CollectionSubscription
179
306
 
180
307
  emitEvents(changes: Array<ChangeMessage<any, any>>) {
181
308
  const newChanges = this.filterAndFlipChanges(changes)
182
- this.filteredCallback(newChanges)
309
+
310
+ if (this.isBufferingForTruncate) {
311
+ // Buffer the changes instead of emitting immediately
312
+ // This prevents a flash of missing content during truncate/refetch
313
+ if (newChanges.length > 0) {
314
+ this.truncateBuffer.push(newChanges)
315
+ }
316
+ } else {
317
+ this.filteredCallback(newChanges)
318
+ }
183
319
  }
184
320
 
185
321
  /**
@@ -469,6 +605,14 @@ export class CollectionSubscription
469
605
  return changes
470
606
  }
471
607
 
608
+ // When buffering for truncate, we need all changes (including deletes) to pass through.
609
+ // This is important because:
610
+ // 1. If loadedInitialState was previously true, sentKeys will be empty
611
+ // (trackSentKeys early-returns when loadedInitialState is true)
612
+ // 2. The truncate deletes are for keys that WERE sent to the subscriber
613
+ // 3. We're collecting all changes atomically, so filtering doesn't make sense
614
+ const skipDeleteFilter = this.isBufferingForTruncate
615
+
472
616
  const newChanges = []
473
617
  for (const change of changes) {
474
618
  let newChange = change
@@ -478,8 +622,11 @@ export class CollectionSubscription
478
622
  if (change.type === `update`) {
479
623
  newChange = { ...change, type: `insert`, previousValue: undefined }
480
624
  } else if (change.type === `delete`) {
481
- // filter out deletes for keys that have not been sent
482
- continue
625
+ // Filter out deletes for keys that have not been sent,
626
+ // UNLESS we're buffering for truncate (where all deletes should pass through)
627
+ if (!skipDeleteFilter) {
628
+ continue
629
+ }
483
630
  }
484
631
  this.sentKeys.add(change.key)
485
632
  } else {
@@ -529,6 +676,15 @@ export class CollectionSubscription
529
676
  }
530
677
 
531
678
  unsubscribe() {
679
+ // Clean up truncate event listener
680
+ this.truncateCleanup?.()
681
+ this.truncateCleanup = undefined
682
+
683
+ // Clean up truncate buffer state
684
+ this.isBufferingForTruncate = false
685
+ this.truncateBuffer = []
686
+ this.pendingTruncateRefetches.clear()
687
+
532
688
  // Unload all subsets that this subscription loaded
533
689
  // We pass the exact same LoadSubsetOptions we used for loadSubset
534
690
  for (const options of this.loadedSubsets) {
@@ -29,6 +29,11 @@ export class CollectionSubscriber<
29
29
  { resolve: () => void }
30
30
  >()
31
31
 
32
+ // Track keys that have been sent to the D2 pipeline to prevent duplicate inserts
33
+ // This is necessary because different code paths (initial load, change events)
34
+ // can potentially send the same item to D2 multiple times.
35
+ private sentToD2Keys = new Set<string | number>()
36
+
32
37
  constructor(
33
38
  private alias: string,
34
39
  private collectionId: string,
@@ -129,13 +134,33 @@ export class CollectionSubscriber<
129
134
  changes: Iterable<ChangeMessage<any, string | number>>,
130
135
  callback?: () => boolean,
131
136
  ) {
137
+ // Filter changes to prevent duplicate inserts to D2 pipeline.
138
+ // This ensures D2 multiplicity stays at 1 for visible items, so deletes
139
+ // properly reduce multiplicity to 0 (triggering DELETE output).
140
+ const changesArray = Array.isArray(changes) ? changes : [...changes]
141
+ const filteredChanges: Array<ChangeMessage<any, string | number>> = []
142
+ for (const change of changesArray) {
143
+ if (change.type === `insert`) {
144
+ if (this.sentToD2Keys.has(change.key)) {
145
+ // Skip duplicate insert - already sent to D2
146
+ continue
147
+ }
148
+ this.sentToD2Keys.add(change.key)
149
+ } else if (change.type === `delete`) {
150
+ // Remove from tracking so future re-inserts are allowed
151
+ this.sentToD2Keys.delete(change.key)
152
+ }
153
+ // Updates are handled as delete+insert by splitUpdates, so no special handling needed
154
+ filteredChanges.push(change)
155
+ }
156
+
132
157
  // currentSyncState and input are always defined when this method is called
133
158
  // (only called from active subscriptions during a sync session)
134
159
  const input =
135
160
  this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!
136
161
  const sentChanges = sendChangesToInput(
137
162
  input,
138
- changes,
163
+ filteredChanges,
139
164
  this.collection.config.getKey,
140
165
  )
141
166
 
@@ -162,8 +187,13 @@ export class CollectionSubscriber<
162
187
  this.sendChangesToPipeline(changes)
163
188
  }
164
189
 
190
+ // Only pass includeInitialState when true. When it's false, we leave it
191
+ // undefined so that user subscriptions with explicit `includeInitialState: false`
192
+ // can be distinguished from internal lazy-loading subscriptions.
193
+ // If we pass `false`, changes.ts would call markAllStateAsSeen() which
194
+ // disables filtering - but internal subscriptions still need filtering.
165
195
  const subscription = this.collection.subscribeChanges(sendChanges, {
166
- includeInitialState,
196
+ ...(includeInitialState && { includeInitialState }),
167
197
  whereExpression,
168
198
  })
169
199
 
@@ -190,10 +220,12 @@ export class CollectionSubscriber<
190
220
  whereExpression,
191
221
  })
192
222
 
193
- // Listen for truncate events to reset cursor tracking state
223
+ // Listen for truncate events to reset cursor tracking state and sentToD2Keys
194
224
  // This ensures that after a must-refetch/truncate, we don't use stale cursor data
225
+ // and allow re-inserts of previously sent keys
195
226
  const truncateUnsubscribe = this.collection.on(`truncate`, () => {
196
227
  this.biggest = undefined
228
+ this.sentToD2Keys.clear()
197
229
  })
198
230
 
199
231
  // Clean up truncate listener when subscription is unsubscribed