@tanstack/db 0.5.13 → 0.5.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/subscription.cjs +89 -2
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +20 -0
- package/dist/esm/collection/subscription.d.ts +20 -0
- package/dist/esm/collection/subscription.js +89 -2
- package/dist/esm/collection/subscription.js.map +1 -1
- package/package.json +1 -1
- package/src/collection/subscription.ts +159 -3
|
@@ -20,6 +20,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
20
20
|
this.limitedSnapshotRowCount = 0;
|
|
21
21
|
this._status = `ready`;
|
|
22
22
|
this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
|
|
23
|
+
this.isBufferingForTruncate = false;
|
|
24
|
+
this.truncateBuffer = [];
|
|
25
|
+
this.pendingTruncateRefetches = /* @__PURE__ */ new Set();
|
|
23
26
|
if (options.onUnsubscribe) {
|
|
24
27
|
this.on(`unsubscribed`, (event) => options.onUnsubscribe(event));
|
|
25
28
|
}
|
|
@@ -32,10 +35,80 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
32
35
|
};
|
|
33
36
|
this.callback = callbackWithSentKeysTracking;
|
|
34
37
|
this.filteredCallback = options.whereExpression ? changeEvents.createFilteredCallback(this.callback, options) : this.callback;
|
|
38
|
+
this.truncateCleanup = this.collection.on(`truncate`, () => {
|
|
39
|
+
this.handleTruncate();
|
|
40
|
+
});
|
|
35
41
|
}
|
|
36
42
|
get status() {
|
|
37
43
|
return this._status;
|
|
38
44
|
}
|
|
45
|
+
/**
|
|
46
|
+
* Handle collection truncate event by resetting state and re-requesting subsets.
|
|
47
|
+
* This is called when the sync layer receives a must-refetch and clears all data.
|
|
48
|
+
*
|
|
49
|
+
* To prevent a flash of missing content, we buffer all changes (deletes from truncate
|
|
50
|
+
* and inserts from refetch) until all loadSubset promises resolve, then emit them together.
|
|
51
|
+
*/
|
|
52
|
+
handleTruncate() {
|
|
53
|
+
const subsetsToReload = [...this.loadedSubsets];
|
|
54
|
+
const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null;
|
|
55
|
+
if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
|
|
56
|
+
this.snapshotSent = false;
|
|
57
|
+
this.loadedInitialState = false;
|
|
58
|
+
this.limitedSnapshotRowCount = 0;
|
|
59
|
+
this.lastSentKey = void 0;
|
|
60
|
+
this.loadedSubsets = [];
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
this.isBufferingForTruncate = true;
|
|
64
|
+
this.truncateBuffer = [];
|
|
65
|
+
this.pendingTruncateRefetches.clear();
|
|
66
|
+
this.snapshotSent = false;
|
|
67
|
+
this.loadedInitialState = false;
|
|
68
|
+
this.limitedSnapshotRowCount = 0;
|
|
69
|
+
this.lastSentKey = void 0;
|
|
70
|
+
this.loadedSubsets = [];
|
|
71
|
+
queueMicrotask(() => {
|
|
72
|
+
if (!this.isBufferingForTruncate) {
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
for (const options of subsetsToReload) {
|
|
76
|
+
const syncResult = this.collection._sync.loadSubset(options);
|
|
77
|
+
this.loadedSubsets.push(options);
|
|
78
|
+
this.trackLoadSubsetPromise(syncResult);
|
|
79
|
+
if (syncResult instanceof Promise) {
|
|
80
|
+
this.pendingTruncateRefetches.add(syncResult);
|
|
81
|
+
syncResult.catch(() => {
|
|
82
|
+
}).finally(() => {
|
|
83
|
+
this.pendingTruncateRefetches.delete(syncResult);
|
|
84
|
+
this.checkTruncateRefetchComplete();
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
if (this.pendingTruncateRefetches.size === 0) {
|
|
89
|
+
this.flushTruncateBuffer();
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
/**
|
|
94
|
+
* Check if all truncate refetch promises have completed and flush buffer if so
|
|
95
|
+
*/
|
|
96
|
+
checkTruncateRefetchComplete() {
|
|
97
|
+
if (this.pendingTruncateRefetches.size === 0 && this.isBufferingForTruncate) {
|
|
98
|
+
this.flushTruncateBuffer();
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Flush the truncate buffer, emitting all buffered changes to the callback
|
|
103
|
+
*/
|
|
104
|
+
flushTruncateBuffer() {
|
|
105
|
+
this.isBufferingForTruncate = false;
|
|
106
|
+
const merged = this.truncateBuffer.flat();
|
|
107
|
+
if (merged.length > 0) {
|
|
108
|
+
this.filteredCallback(merged);
|
|
109
|
+
}
|
|
110
|
+
this.truncateBuffer = [];
|
|
111
|
+
}
|
|
39
112
|
setOrderByIndex(index) {
|
|
40
113
|
this.orderByIndex = index;
|
|
41
114
|
}
|
|
@@ -85,7 +158,13 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
85
158
|
}
|
|
86
159
|
emitEvents(changes) {
|
|
87
160
|
const newChanges = this.filterAndFlipChanges(changes);
|
|
88
|
-
this.
|
|
161
|
+
if (this.isBufferingForTruncate) {
|
|
162
|
+
if (newChanges.length > 0) {
|
|
163
|
+
this.truncateBuffer.push(newChanges);
|
|
164
|
+
}
|
|
165
|
+
} else {
|
|
166
|
+
this.filteredCallback(newChanges);
|
|
167
|
+
}
|
|
89
168
|
}
|
|
90
169
|
/**
|
|
91
170
|
* Sends the snapshot to the callback.
|
|
@@ -281,6 +360,7 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
281
360
|
if (this.loadedInitialState || this.skipFiltering) {
|
|
282
361
|
return changes;
|
|
283
362
|
}
|
|
363
|
+
const skipDeleteFilter = this.isBufferingForTruncate;
|
|
284
364
|
const newChanges = [];
|
|
285
365
|
for (const change of changes) {
|
|
286
366
|
let newChange = change;
|
|
@@ -289,7 +369,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
289
369
|
if (change.type === `update`) {
|
|
290
370
|
newChange = { ...change, type: `insert`, previousValue: void 0 };
|
|
291
371
|
} else if (change.type === `delete`) {
|
|
292
|
-
|
|
372
|
+
if (!skipDeleteFilter) {
|
|
373
|
+
continue;
|
|
374
|
+
}
|
|
293
375
|
}
|
|
294
376
|
this.sentKeys.add(change.key);
|
|
295
377
|
} else {
|
|
@@ -324,6 +406,11 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
|
|
|
324
406
|
this.skipFiltering = true;
|
|
325
407
|
}
|
|
326
408
|
unsubscribe() {
|
|
409
|
+
this.truncateCleanup?.();
|
|
410
|
+
this.truncateCleanup = void 0;
|
|
411
|
+
this.isBufferingForTruncate = false;
|
|
412
|
+
this.truncateBuffer = [];
|
|
413
|
+
this.pendingTruncateRefetches.clear();
|
|
327
414
|
for (const options of this.loadedSubsets) {
|
|
328
415
|
this.collection._sync.unloadSubset(options);
|
|
329
416
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
|
|
1
|
+
{"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
|
|
@@ -46,8 +46,28 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
|
|
|
46
46
|
private orderByIndex;
|
|
47
47
|
private _status;
|
|
48
48
|
private pendingLoadSubsetPromises;
|
|
49
|
+
private truncateCleanup;
|
|
50
|
+
private isBufferingForTruncate;
|
|
51
|
+
private truncateBuffer;
|
|
52
|
+
private pendingTruncateRefetches;
|
|
49
53
|
get status(): SubscriptionStatus;
|
|
50
54
|
constructor(collection: CollectionImpl<any, any, any, any, any>, callback: (changes: Array<ChangeMessage<any, any>>) => void, options: CollectionSubscriptionOptions);
|
|
55
|
+
/**
|
|
56
|
+
* Handle collection truncate event by resetting state and re-requesting subsets.
|
|
57
|
+
* This is called when the sync layer receives a must-refetch and clears all data.
|
|
58
|
+
*
|
|
59
|
+
* To prevent a flash of missing content, we buffer all changes (deletes from truncate
|
|
60
|
+
* and inserts from refetch) until all loadSubset promises resolve, then emit them together.
|
|
61
|
+
*/
|
|
62
|
+
private handleTruncate;
|
|
63
|
+
/**
|
|
64
|
+
* Check if all truncate refetch promises have completed and flush buffer if so
|
|
65
|
+
*/
|
|
66
|
+
private checkTruncateRefetchComplete;
|
|
67
|
+
/**
|
|
68
|
+
* Flush the truncate buffer, emitting all buffered changes to the callback
|
|
69
|
+
*/
|
|
70
|
+
private flushTruncateBuffer;
|
|
51
71
|
setOrderByIndex(index: IndexInterface<any>): void;
|
|
52
72
|
/**
|
|
53
73
|
* Set subscription status and emit events if changed
|
|
@@ -46,8 +46,28 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
|
|
|
46
46
|
private orderByIndex;
|
|
47
47
|
private _status;
|
|
48
48
|
private pendingLoadSubsetPromises;
|
|
49
|
+
private truncateCleanup;
|
|
50
|
+
private isBufferingForTruncate;
|
|
51
|
+
private truncateBuffer;
|
|
52
|
+
private pendingTruncateRefetches;
|
|
49
53
|
get status(): SubscriptionStatus;
|
|
50
54
|
constructor(collection: CollectionImpl<any, any, any, any, any>, callback: (changes: Array<ChangeMessage<any, any>>) => void, options: CollectionSubscriptionOptions);
|
|
55
|
+
/**
|
|
56
|
+
* Handle collection truncate event by resetting state and re-requesting subsets.
|
|
57
|
+
* This is called when the sync layer receives a must-refetch and clears all data.
|
|
58
|
+
*
|
|
59
|
+
* To prevent a flash of missing content, we buffer all changes (deletes from truncate
|
|
60
|
+
* and inserts from refetch) until all loadSubset promises resolve, then emit them together.
|
|
61
|
+
*/
|
|
62
|
+
private handleTruncate;
|
|
63
|
+
/**
|
|
64
|
+
* Check if all truncate refetch promises have completed and flush buffer if so
|
|
65
|
+
*/
|
|
66
|
+
private checkTruncateRefetchComplete;
|
|
67
|
+
/**
|
|
68
|
+
* Flush the truncate buffer, emitting all buffered changes to the callback
|
|
69
|
+
*/
|
|
70
|
+
private flushTruncateBuffer;
|
|
51
71
|
setOrderByIndex(index: IndexInterface<any>): void;
|
|
52
72
|
/**
|
|
53
73
|
* Set subscription status and emit events if changed
|
|
@@ -18,6 +18,9 @@ class CollectionSubscription extends EventEmitter {
|
|
|
18
18
|
this.limitedSnapshotRowCount = 0;
|
|
19
19
|
this._status = `ready`;
|
|
20
20
|
this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
|
|
21
|
+
this.isBufferingForTruncate = false;
|
|
22
|
+
this.truncateBuffer = [];
|
|
23
|
+
this.pendingTruncateRefetches = /* @__PURE__ */ new Set();
|
|
21
24
|
if (options.onUnsubscribe) {
|
|
22
25
|
this.on(`unsubscribed`, (event) => options.onUnsubscribe(event));
|
|
23
26
|
}
|
|
@@ -30,10 +33,80 @@ class CollectionSubscription extends EventEmitter {
|
|
|
30
33
|
};
|
|
31
34
|
this.callback = callbackWithSentKeysTracking;
|
|
32
35
|
this.filteredCallback = options.whereExpression ? createFilteredCallback(this.callback, options) : this.callback;
|
|
36
|
+
this.truncateCleanup = this.collection.on(`truncate`, () => {
|
|
37
|
+
this.handleTruncate();
|
|
38
|
+
});
|
|
33
39
|
}
|
|
34
40
|
get status() {
|
|
35
41
|
return this._status;
|
|
36
42
|
}
|
|
43
|
+
/**
|
|
44
|
+
* Handle collection truncate event by resetting state and re-requesting subsets.
|
|
45
|
+
* This is called when the sync layer receives a must-refetch and clears all data.
|
|
46
|
+
*
|
|
47
|
+
* To prevent a flash of missing content, we buffer all changes (deletes from truncate
|
|
48
|
+
* and inserts from refetch) until all loadSubset promises resolve, then emit them together.
|
|
49
|
+
*/
|
|
50
|
+
handleTruncate() {
|
|
51
|
+
const subsetsToReload = [...this.loadedSubsets];
|
|
52
|
+
const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null;
|
|
53
|
+
if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
|
|
54
|
+
this.snapshotSent = false;
|
|
55
|
+
this.loadedInitialState = false;
|
|
56
|
+
this.limitedSnapshotRowCount = 0;
|
|
57
|
+
this.lastSentKey = void 0;
|
|
58
|
+
this.loadedSubsets = [];
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
this.isBufferingForTruncate = true;
|
|
62
|
+
this.truncateBuffer = [];
|
|
63
|
+
this.pendingTruncateRefetches.clear();
|
|
64
|
+
this.snapshotSent = false;
|
|
65
|
+
this.loadedInitialState = false;
|
|
66
|
+
this.limitedSnapshotRowCount = 0;
|
|
67
|
+
this.lastSentKey = void 0;
|
|
68
|
+
this.loadedSubsets = [];
|
|
69
|
+
queueMicrotask(() => {
|
|
70
|
+
if (!this.isBufferingForTruncate) {
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
for (const options of subsetsToReload) {
|
|
74
|
+
const syncResult = this.collection._sync.loadSubset(options);
|
|
75
|
+
this.loadedSubsets.push(options);
|
|
76
|
+
this.trackLoadSubsetPromise(syncResult);
|
|
77
|
+
if (syncResult instanceof Promise) {
|
|
78
|
+
this.pendingTruncateRefetches.add(syncResult);
|
|
79
|
+
syncResult.catch(() => {
|
|
80
|
+
}).finally(() => {
|
|
81
|
+
this.pendingTruncateRefetches.delete(syncResult);
|
|
82
|
+
this.checkTruncateRefetchComplete();
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (this.pendingTruncateRefetches.size === 0) {
|
|
87
|
+
this.flushTruncateBuffer();
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Check if all truncate refetch promises have completed and flush buffer if so
|
|
93
|
+
*/
|
|
94
|
+
checkTruncateRefetchComplete() {
|
|
95
|
+
if (this.pendingTruncateRefetches.size === 0 && this.isBufferingForTruncate) {
|
|
96
|
+
this.flushTruncateBuffer();
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Flush the truncate buffer, emitting all buffered changes to the callback
|
|
101
|
+
*/
|
|
102
|
+
flushTruncateBuffer() {
|
|
103
|
+
this.isBufferingForTruncate = false;
|
|
104
|
+
const merged = this.truncateBuffer.flat();
|
|
105
|
+
if (merged.length > 0) {
|
|
106
|
+
this.filteredCallback(merged);
|
|
107
|
+
}
|
|
108
|
+
this.truncateBuffer = [];
|
|
109
|
+
}
|
|
37
110
|
setOrderByIndex(index) {
|
|
38
111
|
this.orderByIndex = index;
|
|
39
112
|
}
|
|
@@ -83,7 +156,13 @@ class CollectionSubscription extends EventEmitter {
|
|
|
83
156
|
}
|
|
84
157
|
emitEvents(changes) {
|
|
85
158
|
const newChanges = this.filterAndFlipChanges(changes);
|
|
86
|
-
this.
|
|
159
|
+
if (this.isBufferingForTruncate) {
|
|
160
|
+
if (newChanges.length > 0) {
|
|
161
|
+
this.truncateBuffer.push(newChanges);
|
|
162
|
+
}
|
|
163
|
+
} else {
|
|
164
|
+
this.filteredCallback(newChanges);
|
|
165
|
+
}
|
|
87
166
|
}
|
|
88
167
|
/**
|
|
89
168
|
* Sends the snapshot to the callback.
|
|
@@ -279,6 +358,7 @@ class CollectionSubscription extends EventEmitter {
|
|
|
279
358
|
if (this.loadedInitialState || this.skipFiltering) {
|
|
280
359
|
return changes;
|
|
281
360
|
}
|
|
361
|
+
const skipDeleteFilter = this.isBufferingForTruncate;
|
|
282
362
|
const newChanges = [];
|
|
283
363
|
for (const change of changes) {
|
|
284
364
|
let newChange = change;
|
|
@@ -287,7 +367,9 @@ class CollectionSubscription extends EventEmitter {
|
|
|
287
367
|
if (change.type === `update`) {
|
|
288
368
|
newChange = { ...change, type: `insert`, previousValue: void 0 };
|
|
289
369
|
} else if (change.type === `delete`) {
|
|
290
|
-
|
|
370
|
+
if (!skipDeleteFilter) {
|
|
371
|
+
continue;
|
|
372
|
+
}
|
|
291
373
|
}
|
|
292
374
|
this.sentKeys.add(change.key);
|
|
293
375
|
} else {
|
|
@@ -322,6 +404,11 @@ class CollectionSubscription extends EventEmitter {
|
|
|
322
404
|
this.skipFiltering = true;
|
|
323
405
|
}
|
|
324
406
|
unsubscribe() {
|
|
407
|
+
this.truncateCleanup?.();
|
|
408
|
+
this.truncateCleanup = void 0;
|
|
409
|
+
this.isBufferingForTruncate = false;
|
|
410
|
+
this.truncateBuffer = [];
|
|
411
|
+
this.pendingTruncateRefetches.clear();
|
|
325
412
|
for (const options of this.loadedSubsets) {
|
|
326
413
|
this.collection._sync.unloadSubset(options);
|
|
327
414
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
|
|
1
|
+
{"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n // Cleanup function for truncate event listener\n private truncateCleanup: (() => void) | undefined\n\n // Truncate buffering state\n // When a truncate occurs, we buffer changes until all loadSubset refetches complete\n // This prevents a flash of missing content between deletes and new inserts\n private isBufferingForTruncate = false\n private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []\n private pendingTruncateRefetches: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n\n // Listen for truncate events to re-request data after must-refetch\n // When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.\n // We need to re-request all previously loaded subsets to repopulate the data.\n this.truncateCleanup = this.collection.on(`truncate`, () => {\n this.handleTruncate()\n })\n }\n\n /**\n * Handle collection truncate event by resetting state and re-requesting subsets.\n * This is called when the sync layer receives a must-refetch and clears all data.\n *\n * To prevent a flash of missing content, we buffer all changes (deletes from truncate\n * and inserts from refetch) until all loadSubset promises resolve, then emit them together.\n */\n private handleTruncate() {\n // Copy the loaded subsets before clearing (we'll re-request them)\n const subsetsToReload = [...this.loadedSubsets]\n\n // Only buffer if there's an actual loadSubset handler that can do async work.\n // Without a loadSubset handler, there's nothing to re-request and no reason to buffer.\n // This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.\n const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null\n\n // If there are no subsets to reload OR no loadSubset handler, just reset state\n if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n this.loadedSubsets = []\n return\n }\n\n // Start buffering BEFORE we receive the delete events from the truncate commit\n // This ensures we capture both the deletes and subsequent inserts\n this.isBufferingForTruncate = true\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Reset snapshot/pagination tracking state\n // Note: We don't need to populate sentKeys here because filterAndFlipChanges\n // will skip the delete filter when isBufferingForTruncate is true\n this.snapshotSent = false\n this.loadedInitialState = false\n this.limitedSnapshotRowCount = 0\n this.lastSentKey = undefined\n\n // Clear the loadedSubsets array since we're re-requesting fresh\n this.loadedSubsets = []\n\n // Defer the loadSubset calls to a microtask so the truncate commit's delete events\n // are buffered BEFORE the loadSubset calls potentially trigger nested commits.\n // This ensures correct event ordering: deletes first, then inserts.\n queueMicrotask(() => {\n // Check if we were unsubscribed while waiting\n if (!this.isBufferingForTruncate) {\n return\n }\n\n // Re-request all previously loaded subsets and track their promises\n for (const options of subsetsToReload) {\n const syncResult = this.collection._sync.loadSubset(options)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(options)\n this.trackLoadSubsetPromise(syncResult)\n\n // Track the promise for buffer flushing\n if (syncResult instanceof Promise) {\n this.pendingTruncateRefetches.add(syncResult)\n syncResult\n .catch(() => {\n // Ignore errors - we still want to flush the buffer even if some requests fail\n })\n .finally(() => {\n this.pendingTruncateRefetches.delete(syncResult)\n this.checkTruncateRefetchComplete()\n })\n }\n }\n\n // If all loadSubset calls were synchronous (returned true), flush now\n // At this point, delete events have already been buffered from the truncate commit\n if (this.pendingTruncateRefetches.size === 0) {\n this.flushTruncateBuffer()\n }\n })\n }\n\n /**\n * Check if all truncate refetch promises have completed and flush buffer if so\n */\n private checkTruncateRefetchComplete() {\n if (\n this.pendingTruncateRefetches.size === 0 &&\n this.isBufferingForTruncate\n ) {\n this.flushTruncateBuffer()\n }\n }\n\n /**\n * Flush the truncate buffer, emitting all buffered changes to the callback\n */\n private flushTruncateBuffer() {\n this.isBufferingForTruncate = false\n\n // Flatten all buffered changes into a single array for atomic emission\n // This ensures consumers see all truncate changes (deletes + inserts) in one callback\n const merged = this.truncateBuffer.flat()\n if (merged.length > 0) {\n this.filteredCallback(merged)\n }\n\n this.truncateBuffer = []\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n\n if (this.isBufferingForTruncate) {\n // Buffer the changes instead of emitting immediately\n // This prevents a flash of missing content during truncate/refetch\n if (newChanges.length > 0) {\n this.truncateBuffer.push(newChanges)\n }\n } else {\n this.filteredCallback(newChanges)\n }\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n // When buffering for truncate, we need all changes (including deletes) to pass through.\n // This is important because:\n // 1. If loadedInitialState was previously true, sentKeys will be empty\n // (trackSentKeys early-returns when loadedInitialState is true)\n // 2. The truncate deletes are for keys that WERE sent to the subscriber\n // 3. We're collecting all changes atomically, so filtering doesn't make sense\n const skipDeleteFilter = this.isBufferingForTruncate\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // Filter out deletes for keys that have not been sent,\n // UNLESS we're buffering for truncate (where all deletes should pass through)\n if (!skipDeleteFilter) {\n continue\n }\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Clean up truncate event listener\n this.truncateCleanup?.()\n this.truncateCleanup = undefined\n\n // Clean up truncate buffer state\n this.isBufferingForTruncate = false\n this.truncateBuffer = []\n this.pendingTruncateRefetches.clear()\n\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAiDE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAnDV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAQ5D,SAAQ,yBAAyB;AACjC,SAAQ,iBAAwD,CAAA;AAChE,SAAQ,+CAAmD,IAAA;AAYzD,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAKT,SAAK,kBAAkB,KAAK,WAAW,GAAG,YAAY,MAAM;AAC1D,WAAK,eAAA;AAAA,IACP,CAAC;AAAA,EACH;AAAA,EAvCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8CQ,iBAAiB;AAEvB,UAAM,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAK9C,UAAM,uBAAuB,KAAK,WAAW,MAAM,qBAAqB;AAGxE,QAAI,gBAAgB,WAAW,KAAK,CAAC,sBAAsB;AACzD,WAAK,eAAe;AACpB,WAAK,qBAAqB;AAC1B,WAAK,0BAA0B;AAC/B,WAAK,cAAc;AACnB,WAAK,gBAAgB,CAAA;AACrB;AAAA,IACF;AAIA,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAK9B,SAAK,eAAe;AACpB,SAAK,qBAAqB;AAC1B,SAAK,0BAA0B;AAC/B,SAAK,cAAc;AAGnB,SAAK,gBAAgB,CAAA;AAKrB,mBAAe,MAAM;AAEnB,UAAI,CAAC,KAAK,wBAAwB;AAChC;AAAA,MACF;AAGA,iBAAW,WAAW,iBAAiB;AACrC,cAAM,aAAa,KAAK,WAAW,MAAM,WAAW,OAAO;AAG3D,aAAK,cAAc,KAAK,OAAO;AAC/B,aAAK,uBAAuB,UAAU;AAGtC,YAAI,sBAAsB,SAAS;AACjC,eAAK,yBAAyB,IAAI,UAAU;AAC5C,qBACG,MAAM,MAAM;AAAA,UAEb,CAAC,EACA,QAAQ,MAAM;AACb,iBAAK,yBAAyB,OAAO,UAAU;AAC/C,iBAAK,6BAAA;AAAA,UACP,CAAC;AAAA,QACL;AAAA,MACF;AAIA,UAAI,KAAK,yBAAyB,SAAS,GAAG;AAC5C,aAAK,oBAAA;AAAA,MACP;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,+BAA+B;AACrC,QACE,KAAK,yBAAyB,SAAS,KACvC,KAAK,wBACL;AACA,WAAK,oBAAA;AAAA,IACP;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB;AAC5B,SAAK,yBAAyB;AAI9B,UAAM,SAAS,KAAK,eAAe,KAAA;AACnC,QAAI,OAAO,SAAS,GAAG;AACrB,WAAK,iBAAiB,MAAM;AAAA,IAC9B;AAEA,SAAK,iBAAiB,CAAA;AAAA,EACxB;AAAA,EAEA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AAEpD,QAAI,KAAK,wBAAwB;AAG/B,UAAI,WAAW,SAAS,GAAG;AACzB,aAAK,eAAe,KAAK,UAAU;AAAA,MACrC;AAAA,IACF,OAAO;AACL,WAAK,iBAAiB,UAAU;AAAA,IAClC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAQA,UAAM,mBAAmB,KAAK;AAE9B,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAGnC,cAAI,CAAC,kBAAkB;AACrB;AAAA,UACF;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAEZ,SAAK,kBAAA;AACL,SAAK,kBAAkB;AAGvB,SAAK,yBAAyB;AAC9B,SAAK,iBAAiB,CAAA;AACtB,SAAK,yBAAyB,MAAA;AAI9B,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
|
package/package.json
CHANGED
|
@@ -84,6 +84,16 @@ export class CollectionSubscription
|
|
|
84
84
|
private _status: SubscriptionStatus = `ready`
|
|
85
85
|
private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()
|
|
86
86
|
|
|
87
|
+
// Cleanup function for truncate event listener
|
|
88
|
+
private truncateCleanup: (() => void) | undefined
|
|
89
|
+
|
|
90
|
+
// Truncate buffering state
|
|
91
|
+
// When a truncate occurs, we buffer changes until all loadSubset refetches complete
|
|
92
|
+
// This prevents a flash of missing content between deletes and new inserts
|
|
93
|
+
private isBufferingForTruncate = false
|
|
94
|
+
private truncateBuffer: Array<Array<ChangeMessage<any, any>>> = []
|
|
95
|
+
private pendingTruncateRefetches: Set<Promise<void>> = new Set()
|
|
96
|
+
|
|
87
97
|
public get status(): SubscriptionStatus {
|
|
88
98
|
return this._status
|
|
89
99
|
}
|
|
@@ -116,6 +126,123 @@ export class CollectionSubscription
|
|
|
116
126
|
this.filteredCallback = options.whereExpression
|
|
117
127
|
? createFilteredCallback(this.callback, options)
|
|
118
128
|
: this.callback
|
|
129
|
+
|
|
130
|
+
// Listen for truncate events to re-request data after must-refetch
|
|
131
|
+
// When a truncate happens (e.g., from a 409 must-refetch), all collection data is cleared.
|
|
132
|
+
// We need to re-request all previously loaded subsets to repopulate the data.
|
|
133
|
+
this.truncateCleanup = this.collection.on(`truncate`, () => {
|
|
134
|
+
this.handleTruncate()
|
|
135
|
+
})
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Handle collection truncate event by resetting state and re-requesting subsets.
|
|
140
|
+
* This is called when the sync layer receives a must-refetch and clears all data.
|
|
141
|
+
*
|
|
142
|
+
* To prevent a flash of missing content, we buffer all changes (deletes from truncate
|
|
143
|
+
* and inserts from refetch) until all loadSubset promises resolve, then emit them together.
|
|
144
|
+
*/
|
|
145
|
+
private handleTruncate() {
|
|
146
|
+
// Copy the loaded subsets before clearing (we'll re-request them)
|
|
147
|
+
const subsetsToReload = [...this.loadedSubsets]
|
|
148
|
+
|
|
149
|
+
// Only buffer if there's an actual loadSubset handler that can do async work.
|
|
150
|
+
// Without a loadSubset handler, there's nothing to re-request and no reason to buffer.
|
|
151
|
+
// This prevents unnecessary buffering in eager sync mode or when loadSubset isn't implemented.
|
|
152
|
+
const hasLoadSubsetHandler = this.collection._sync.syncLoadSubsetFn !== null
|
|
153
|
+
|
|
154
|
+
// If there are no subsets to reload OR no loadSubset handler, just reset state
|
|
155
|
+
if (subsetsToReload.length === 0 || !hasLoadSubsetHandler) {
|
|
156
|
+
this.snapshotSent = false
|
|
157
|
+
this.loadedInitialState = false
|
|
158
|
+
this.limitedSnapshotRowCount = 0
|
|
159
|
+
this.lastSentKey = undefined
|
|
160
|
+
this.loadedSubsets = []
|
|
161
|
+
return
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// Start buffering BEFORE we receive the delete events from the truncate commit
|
|
165
|
+
// This ensures we capture both the deletes and subsequent inserts
|
|
166
|
+
this.isBufferingForTruncate = true
|
|
167
|
+
this.truncateBuffer = []
|
|
168
|
+
this.pendingTruncateRefetches.clear()
|
|
169
|
+
|
|
170
|
+
// Reset snapshot/pagination tracking state
|
|
171
|
+
// Note: We don't need to populate sentKeys here because filterAndFlipChanges
|
|
172
|
+
// will skip the delete filter when isBufferingForTruncate is true
|
|
173
|
+
this.snapshotSent = false
|
|
174
|
+
this.loadedInitialState = false
|
|
175
|
+
this.limitedSnapshotRowCount = 0
|
|
176
|
+
this.lastSentKey = undefined
|
|
177
|
+
|
|
178
|
+
// Clear the loadedSubsets array since we're re-requesting fresh
|
|
179
|
+
this.loadedSubsets = []
|
|
180
|
+
|
|
181
|
+
// Defer the loadSubset calls to a microtask so the truncate commit's delete events
|
|
182
|
+
// are buffered BEFORE the loadSubset calls potentially trigger nested commits.
|
|
183
|
+
// This ensures correct event ordering: deletes first, then inserts.
|
|
184
|
+
queueMicrotask(() => {
|
|
185
|
+
// Check if we were unsubscribed while waiting
|
|
186
|
+
if (!this.isBufferingForTruncate) {
|
|
187
|
+
return
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// Re-request all previously loaded subsets and track their promises
|
|
191
|
+
for (const options of subsetsToReload) {
|
|
192
|
+
const syncResult = this.collection._sync.loadSubset(options)
|
|
193
|
+
|
|
194
|
+
// Track this loadSubset call so we can unload it later
|
|
195
|
+
this.loadedSubsets.push(options)
|
|
196
|
+
this.trackLoadSubsetPromise(syncResult)
|
|
197
|
+
|
|
198
|
+
// Track the promise for buffer flushing
|
|
199
|
+
if (syncResult instanceof Promise) {
|
|
200
|
+
this.pendingTruncateRefetches.add(syncResult)
|
|
201
|
+
syncResult
|
|
202
|
+
.catch(() => {
|
|
203
|
+
// Ignore errors - we still want to flush the buffer even if some requests fail
|
|
204
|
+
})
|
|
205
|
+
.finally(() => {
|
|
206
|
+
this.pendingTruncateRefetches.delete(syncResult)
|
|
207
|
+
this.checkTruncateRefetchComplete()
|
|
208
|
+
})
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// If all loadSubset calls were synchronous (returned true), flush now
|
|
213
|
+
// At this point, delete events have already been buffered from the truncate commit
|
|
214
|
+
if (this.pendingTruncateRefetches.size === 0) {
|
|
215
|
+
this.flushTruncateBuffer()
|
|
216
|
+
}
|
|
217
|
+
})
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Check if all truncate refetch promises have completed and flush buffer if so
|
|
222
|
+
*/
|
|
223
|
+
private checkTruncateRefetchComplete() {
|
|
224
|
+
if (
|
|
225
|
+
this.pendingTruncateRefetches.size === 0 &&
|
|
226
|
+
this.isBufferingForTruncate
|
|
227
|
+
) {
|
|
228
|
+
this.flushTruncateBuffer()
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Flush the truncate buffer, emitting all buffered changes to the callback
|
|
234
|
+
*/
|
|
235
|
+
private flushTruncateBuffer() {
|
|
236
|
+
this.isBufferingForTruncate = false
|
|
237
|
+
|
|
238
|
+
// Flatten all buffered changes into a single array for atomic emission
|
|
239
|
+
// This ensures consumers see all truncate changes (deletes + inserts) in one callback
|
|
240
|
+
const merged = this.truncateBuffer.flat()
|
|
241
|
+
if (merged.length > 0) {
|
|
242
|
+
this.filteredCallback(merged)
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
this.truncateBuffer = []
|
|
119
246
|
}
|
|
120
247
|
|
|
121
248
|
setOrderByIndex(index: IndexInterface<any>) {
|
|
@@ -179,7 +306,16 @@ export class CollectionSubscription
|
|
|
179
306
|
|
|
180
307
|
emitEvents(changes: Array<ChangeMessage<any, any>>) {
|
|
181
308
|
const newChanges = this.filterAndFlipChanges(changes)
|
|
182
|
-
|
|
309
|
+
|
|
310
|
+
if (this.isBufferingForTruncate) {
|
|
311
|
+
// Buffer the changes instead of emitting immediately
|
|
312
|
+
// This prevents a flash of missing content during truncate/refetch
|
|
313
|
+
if (newChanges.length > 0) {
|
|
314
|
+
this.truncateBuffer.push(newChanges)
|
|
315
|
+
}
|
|
316
|
+
} else {
|
|
317
|
+
this.filteredCallback(newChanges)
|
|
318
|
+
}
|
|
183
319
|
}
|
|
184
320
|
|
|
185
321
|
/**
|
|
@@ -469,6 +605,14 @@ export class CollectionSubscription
|
|
|
469
605
|
return changes
|
|
470
606
|
}
|
|
471
607
|
|
|
608
|
+
// When buffering for truncate, we need all changes (including deletes) to pass through.
|
|
609
|
+
// This is important because:
|
|
610
|
+
// 1. If loadedInitialState was previously true, sentKeys will be empty
|
|
611
|
+
// (trackSentKeys early-returns when loadedInitialState is true)
|
|
612
|
+
// 2. The truncate deletes are for keys that WERE sent to the subscriber
|
|
613
|
+
// 3. We're collecting all changes atomically, so filtering doesn't make sense
|
|
614
|
+
const skipDeleteFilter = this.isBufferingForTruncate
|
|
615
|
+
|
|
472
616
|
const newChanges = []
|
|
473
617
|
for (const change of changes) {
|
|
474
618
|
let newChange = change
|
|
@@ -478,8 +622,11 @@ export class CollectionSubscription
|
|
|
478
622
|
if (change.type === `update`) {
|
|
479
623
|
newChange = { ...change, type: `insert`, previousValue: undefined }
|
|
480
624
|
} else if (change.type === `delete`) {
|
|
481
|
-
//
|
|
482
|
-
|
|
625
|
+
// Filter out deletes for keys that have not been sent,
|
|
626
|
+
// UNLESS we're buffering for truncate (where all deletes should pass through)
|
|
627
|
+
if (!skipDeleteFilter) {
|
|
628
|
+
continue
|
|
629
|
+
}
|
|
483
630
|
}
|
|
484
631
|
this.sentKeys.add(change.key)
|
|
485
632
|
} else {
|
|
@@ -529,6 +676,15 @@ export class CollectionSubscription
|
|
|
529
676
|
}
|
|
530
677
|
|
|
531
678
|
unsubscribe() {
|
|
679
|
+
// Clean up truncate event listener
|
|
680
|
+
this.truncateCleanup?.()
|
|
681
|
+
this.truncateCleanup = undefined
|
|
682
|
+
|
|
683
|
+
// Clean up truncate buffer state
|
|
684
|
+
this.isBufferingForTruncate = false
|
|
685
|
+
this.truncateBuffer = []
|
|
686
|
+
this.pendingTruncateRefetches.clear()
|
|
687
|
+
|
|
532
688
|
// Unload all subsets that this subscription loaded
|
|
533
689
|
// We pass the exact same LoadSubsetOptions we used for loadSubset
|
|
534
690
|
for (const options of this.loadedSubsets) {
|