@tanstack/db 0.5.12 → 0.5.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -64,6 +64,8 @@ class CollectionChangesManager {
64
64
  });
65
65
  if (options.includeInitialState) {
66
66
  subscription$1.requestSnapshot({ trackLoadSubsetPromise: false });
67
+ } else if (options.includeInitialState === false) {
68
+ subscription$1.markAllStateAsSeen();
67
69
  }
68
70
  this.changeSubscriptions.add(subscription$1);
69
71
  return subscription$1;
@@ -1 +1 @@
1
- {"version":3,"file":"changes.cjs","sources":["../../../src/collection/changes.ts"],"sourcesContent":["import { NegativeActiveSubscribersError } from '../errors'\nimport { CollectionSubscription } from './subscription.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { ChangeMessage, SubscribeChangesOptions } from '../types'\nimport type { CollectionLifecycleManager } from './lifecycle.js'\nimport type { CollectionSyncManager } from './sync.js'\nimport type { CollectionEventsManager } from './events.js'\nimport type { CollectionImpl } from './index.js'\n\nexport class CollectionChangesManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private sync!: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n private events!: CollectionEventsManager\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n\n public activeSubscribersCount = 0\n public changeSubscriptions = new Set<CollectionSubscription>()\n public batchedEvents: Array<ChangeMessage<TOutput, TKey>> = []\n public shouldBatchEvents = false\n\n /**\n * Creates a new CollectionChangesManager instance\n */\n constructor() {}\n\n public setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n sync: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.sync = deps.sync\n this.events = deps.events\n this.collection = deps.collection\n }\n\n /**\n * Emit an empty ready event to notify subscribers that the collection is ready\n * This bypasses the normal empty array check in emitEvents\n */\n public emitEmptyReadyEvent(): void {\n // Emit empty array directly to all subscribers\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents([])\n }\n }\n\n /**\n * Emit events either immediately or batch them for later emission\n */\n public emitEvents(\n changes: Array<ChangeMessage<TOutput, TKey>>,\n forceEmit = false,\n ): void {\n // Skip batching for user actions (forceEmit=true) to keep UI responsive\n if (this.shouldBatchEvents && !forceEmit) {\n // Add events to the batch\n this.batchedEvents.push(...changes)\n return\n }\n\n // Either we're not batching, or we're forcing emission (user action or ending batch cycle)\n let eventsToEmit = changes\n\n if (forceEmit) {\n // Force emit is used to end a batch (e.g. after a sync commit). Combine any\n // buffered optimistic events with the final changes so subscribers see the\n // whole picture, even if the sync diff is empty.\n if (this.batchedEvents.length > 0) {\n eventsToEmit = [...this.batchedEvents, ...changes]\n }\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n\n if (eventsToEmit.length === 0) {\n return\n }\n\n // Emit to all listeners\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents(eventsToEmit)\n }\n }\n\n /**\n * Subscribe to changes in the collection\n */\n public subscribeChanges(\n callback: (changes: Array<ChangeMessage<TOutput>>) => void,\n options: SubscribeChangesOptions = {},\n ): CollectionSubscription {\n // Start sync and track subscriber\n this.addSubscriber()\n\n const subscription = new CollectionSubscription(this.collection, callback, {\n ...options,\n onUnsubscribe: () => {\n this.removeSubscriber()\n this.changeSubscriptions.delete(subscription)\n },\n })\n\n if (options.includeInitialState) {\n subscription.requestSnapshot({ trackLoadSubsetPromise: false })\n }\n\n // Add to batched listeners\n this.changeSubscriptions.add(subscription)\n\n return subscription\n }\n\n /**\n * Increment the active subscribers count and start sync if needed\n */\n private addSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount++\n this.lifecycle.cancelGCTimer()\n\n // Start sync if collection was cleaned up\n if (\n this.lifecycle.status === `cleaned-up` ||\n this.lifecycle.status === `idle`\n ) {\n this.sync.startSync()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Decrement the active subscribers count and start GC timer if needed\n */\n private removeSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount--\n\n if (this.activeSubscribersCount === 0) {\n this.lifecycle.startGCTimer()\n } else if (this.activeSubscribersCount < 0) {\n throw new NegativeActiveSubscribersError()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Clean up the collection by stopping sync and clearing data\n * This can be called manually or automatically by garbage collection\n */\n public cleanup(): void {\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n}\n"],"names":["subscription","CollectionSubscription","NegativeActiveSubscribersError"],"mappings":";;;;AASO,MAAM,yBAKX;AAAA;AAAA;AAAA;AAAA,EAcA,cAAc;AARd,SAAO,yBAAyB;AAChC,SAAO,0CAA0B,IAAA;AACjC,SAAO,gBAAqD,CAAA;AAC5D,SAAO,oBAAoB;AAAA,EAKZ;AAAA,EAER,QAAQ,MAKZ;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,OAAO,KAAK;AACjB,SAAK,SAAS,KAAK;AACnB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,sBAA4B;AAEjC,eAAWA,iBAAgB,KAAK,qBAAqB;AACnD,MAAAA,cAAa,WAAW,EAAE;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,WACL,SACA,YAAY,OACN;AAEN,QAAI,KAAK,qBAAqB,CAAC,WAAW;AAExC,WAAK,cAAc,KAAK,GAAG,OAAO;AAClC;AAAA,IACF;AAGA,QAAI,eAAe;AAEnB,QAAI,WAAW;AAIb,UAAI,KAAK,cAAc,SAAS,GAAG;AACjC,uBAAe,CAAC,GAAG,KAAK,eAAe,GAAG,OAAO;AAAA,MACnD;AACA,WAAK,gBAAgB,CAAA;AACrB,WAAK,oBAAoB;AAAA,IAC3B;AAEA,QAAI,aAAa,WAAW,GAAG;AAC7B;AAAA,IACF;AAGA,eAAWA,iBAAgB,KAAK,qBAAqB;AACnD,MAAAA,cAAa,WAAW,YAAY;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,iBACL,UACA,UAAmC,IACX;AAExB,SAAK,cAAA;AAEL,UAAMA,iBAAe,IAAIC,aAAAA,uBAAuB,KAAK,YAAY,UAAU;AAAA,MACzE,GAAG;AAAA,MACH,eAAe,MAAM;AACnB,aAAK,iBAAA;AACL,aAAK,oBAAoB,OAAOD,cAAY;AAAA,MAC9C;AAAA,IAAA,CACD;AAED,QAAI,QAAQ,qBAAqB;AAC/BA,qBAAa,gBAAgB,EAAE,wBAAwB,MAAA,CAAO;AAAA,IAChE;AAGA,SAAK,oBAAoB,IAAIA,cAAY;AAEzC,WAAOA;AAAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAsB;AAC5B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AACL,SAAK,UAAU,cAAA;AAGf,QACE,KAAK,UAAU,WAAW,gBAC1B,KAAK,UAAU,WAAW,QAC1B;AACA,WAAK,KAAK,UAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AAEL,QAAI,KAAK,2BAA2B,GAAG;AACrC,WAAK,UAAU,aAAA;AAAA,IACjB,WAAW,KAAK,yBAAyB,GAAG;AAC1C,YAAM,IAAIE,OAAAA,+BAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAgB;AACrB,SAAK,gBAAgB,CAAA;AACrB,SAAK,oBAAoB;AAAA,EAC3B;AACF;;"}
1
+ {"version":3,"file":"changes.cjs","sources":["../../../src/collection/changes.ts"],"sourcesContent":["import { NegativeActiveSubscribersError } from '../errors'\nimport { CollectionSubscription } from './subscription.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { ChangeMessage, SubscribeChangesOptions } from '../types'\nimport type { CollectionLifecycleManager } from './lifecycle.js'\nimport type { CollectionSyncManager } from './sync.js'\nimport type { CollectionEventsManager } from './events.js'\nimport type { CollectionImpl } from './index.js'\n\nexport class CollectionChangesManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private sync!: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n private events!: CollectionEventsManager\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n\n public activeSubscribersCount = 0\n public changeSubscriptions = new Set<CollectionSubscription>()\n public batchedEvents: Array<ChangeMessage<TOutput, TKey>> = []\n public shouldBatchEvents = false\n\n /**\n * Creates a new CollectionChangesManager instance\n */\n constructor() {}\n\n public setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n sync: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.sync = deps.sync\n this.events = deps.events\n this.collection = deps.collection\n }\n\n /**\n * Emit an empty ready event to notify subscribers that the collection is ready\n * This bypasses the normal empty array check in emitEvents\n */\n public emitEmptyReadyEvent(): void {\n // Emit empty array directly to all subscribers\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents([])\n }\n }\n\n /**\n * Emit events either immediately or batch them for later emission\n */\n public emitEvents(\n changes: Array<ChangeMessage<TOutput, TKey>>,\n forceEmit = false,\n ): void {\n // Skip batching for user actions (forceEmit=true) to keep UI responsive\n if (this.shouldBatchEvents && !forceEmit) {\n // Add events to the batch\n this.batchedEvents.push(...changes)\n return\n }\n\n // Either we're not batching, or we're forcing emission (user action or ending batch cycle)\n let eventsToEmit = changes\n\n if (forceEmit) {\n // Force emit is used to end a batch (e.g. after a sync commit). Combine any\n // buffered optimistic events with the final changes so subscribers see the\n // whole picture, even if the sync diff is empty.\n if (this.batchedEvents.length > 0) {\n eventsToEmit = [...this.batchedEvents, ...changes]\n }\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n\n if (eventsToEmit.length === 0) {\n return\n }\n\n // Emit to all listeners\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents(eventsToEmit)\n }\n }\n\n /**\n * Subscribe to changes in the collection\n */\n public subscribeChanges(\n callback: (changes: Array<ChangeMessage<TOutput>>) => void,\n options: SubscribeChangesOptions = {},\n ): CollectionSubscription {\n // Start sync and track subscriber\n this.addSubscriber()\n\n const subscription = new CollectionSubscription(this.collection, callback, {\n ...options,\n onUnsubscribe: () => {\n this.removeSubscriber()\n this.changeSubscriptions.delete(subscription)\n },\n })\n\n if (options.includeInitialState) {\n subscription.requestSnapshot({ trackLoadSubsetPromise: false })\n } else if (options.includeInitialState === false) {\n // When explicitly set to false (not just undefined), mark all state as \"seen\"\n // so that all future changes (including deletes) pass through unfiltered.\n subscription.markAllStateAsSeen()\n }\n\n // Add to batched listeners\n this.changeSubscriptions.add(subscription)\n\n return subscription\n }\n\n /**\n * Increment the active subscribers count and start sync if needed\n */\n private addSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount++\n this.lifecycle.cancelGCTimer()\n\n // Start sync if collection was cleaned up\n if (\n this.lifecycle.status === `cleaned-up` ||\n this.lifecycle.status === `idle`\n ) {\n this.sync.startSync()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Decrement the active subscribers count and start GC timer if needed\n */\n private removeSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount--\n\n if (this.activeSubscribersCount === 0) {\n this.lifecycle.startGCTimer()\n } else if (this.activeSubscribersCount < 0) {\n throw new NegativeActiveSubscribersError()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Clean up the collection by stopping sync and clearing data\n * This can be called manually or automatically by garbage collection\n */\n public cleanup(): void {\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n}\n"],"names":["subscription","CollectionSubscription","NegativeActiveSubscribersError"],"mappings":";;;;AASO,MAAM,yBAKX;AAAA;AAAA;AAAA;AAAA,EAcA,cAAc;AARd,SAAO,yBAAyB;AAChC,SAAO,0CAA0B,IAAA;AACjC,SAAO,gBAAqD,CAAA;AAC5D,SAAO,oBAAoB;AAAA,EAKZ;AAAA,EAER,QAAQ,MAKZ;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,OAAO,KAAK;AACjB,SAAK,SAAS,KAAK;AACnB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,sBAA4B;AAEjC,eAAWA,iBAAgB,KAAK,qBAAqB;AACnD,MAAAA,cAAa,WAAW,EAAE;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,WACL,SACA,YAAY,OACN;AAEN,QAAI,KAAK,qBAAqB,CAAC,WAAW;AAExC,WAAK,cAAc,KAAK,GAAG,OAAO;AAClC;AAAA,IACF;AAGA,QAAI,eAAe;AAEnB,QAAI,WAAW;AAIb,UAAI,KAAK,cAAc,SAAS,GAAG;AACjC,uBAAe,CAAC,GAAG,KAAK,eAAe,GAAG,OAAO;AAAA,MACnD;AACA,WAAK,gBAAgB,CAAA;AACrB,WAAK,oBAAoB;AAAA,IAC3B;AAEA,QAAI,aAAa,WAAW,GAAG;AAC7B;AAAA,IACF;AAGA,eAAWA,iBAAgB,KAAK,qBAAqB;AACnD,MAAAA,cAAa,WAAW,YAAY;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,iBACL,UACA,UAAmC,IACX;AAExB,SAAK,cAAA;AAEL,UAAMA,iBAAe,IAAIC,aAAAA,uBAAuB,KAAK,YAAY,UAAU;AAAA,MACzE,GAAG;AAAA,MACH,eAAe,MAAM;AACnB,aAAK,iBAAA;AACL,aAAK,oBAAoB,OAAOD,cAAY;AAAA,MAC9C;AAAA,IAAA,CACD;AAED,QAAI,QAAQ,qBAAqB;AAC/BA,qBAAa,gBAAgB,EAAE,wBAAwB,MAAA,CAAO;AAAA,IAChE,WAAW,QAAQ,wBAAwB,OAAO;AAGhDA,qBAAa,mBAAA;AAAA,IACf;AAGA,SAAK,oBAAoB,IAAIA,cAAY;AAEzC,WAAOA;AAAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAsB;AAC5B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AACL,SAAK,UAAU,cAAA;AAGf,QACE,KAAK,UAAU,WAAW,gBAC1B,KAAK,UAAU,WAAW,QAC1B;AACA,WAAK,KAAK,UAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AAEL,QAAI,KAAK,2BAA2B,GAAG;AACrC,WAAK,UAAU,aAAA;AAAA,IACjB,WAAW,KAAK,yBAAyB,GAAG;AAC1C,YAAM,IAAIE,OAAAA,+BAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAgB;AACrB,SAAK,gBAAgB,CAAA;AACrB,SAAK,oBAAoB;AAAA,EAC3B;AACF;;"}
@@ -13,6 +13,7 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
13
13
  this.callback = callback;
14
14
  this.options = options;
15
15
  this.loadedInitialState = false;
16
+ this.skipFiltering = false;
16
17
  this.snapshotSent = false;
17
18
  this.loadedSubsets = [];
18
19
  this.sentKeys = /* @__PURE__ */ new Set();
@@ -135,6 +136,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
135
136
  const filteredSnapshot = snapshot.filter(
136
137
  (change) => !this.sentKeys.has(change.key)
137
138
  );
139
+ for (const change of filteredSnapshot) {
140
+ this.sentKeys.add(change.key);
141
+ }
138
142
  this.snapshotSent = true;
139
143
  this.callback(filteredSnapshot);
140
144
  return true;
@@ -219,6 +223,9 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
219
223
  keys = index.take(valuesNeeded(), biggestObservedValue, filterFn);
220
224
  }
221
225
  const currentOffset = this.limitedSnapshotRowCount;
226
+ for (const change of changes) {
227
+ this.sentKeys.add(change.key);
228
+ }
222
229
  this.callback(changes);
223
230
  this.limitedSnapshotRowCount += changes.length;
224
231
  if (changes.length > 0) {
@@ -268,34 +275,54 @@ class CollectionSubscription extends eventEmitter.EventEmitter {
268
275
  * Filters and flips changes for keys that have not been sent yet.
269
276
  * Deletes are filtered out for keys that have not been sent yet.
270
277
  * Updates are flipped into inserts for keys that have not been sent yet.
278
+ * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.
271
279
  */
272
280
  filterAndFlipChanges(changes) {
273
- if (this.loadedInitialState) {
281
+ if (this.loadedInitialState || this.skipFiltering) {
274
282
  return changes;
275
283
  }
276
284
  const newChanges = [];
277
285
  for (const change of changes) {
278
286
  let newChange = change;
279
- if (!this.sentKeys.has(change.key)) {
287
+ const keyInSentKeys = this.sentKeys.has(change.key);
288
+ if (!keyInSentKeys) {
280
289
  if (change.type === `update`) {
281
290
  newChange = { ...change, type: `insert`, previousValue: void 0 };
282
291
  } else if (change.type === `delete`) {
283
292
  continue;
284
293
  }
285
294
  this.sentKeys.add(change.key);
295
+ } else {
296
+ if (change.type === `insert`) {
297
+ continue;
298
+ } else if (change.type === `delete`) {
299
+ this.sentKeys.delete(change.key);
300
+ }
286
301
  }
287
302
  newChanges.push(newChange);
288
303
  }
289
304
  return newChanges;
290
305
  }
291
306
  trackSentKeys(changes) {
292
- if (this.loadedInitialState) {
307
+ if (this.loadedInitialState || this.skipFiltering) {
293
308
  return;
294
309
  }
295
310
  for (const change of changes) {
296
- this.sentKeys.add(change.key);
311
+ if (change.type === `delete`) {
312
+ this.sentKeys.delete(change.key);
313
+ } else {
314
+ this.sentKeys.add(change.key);
315
+ }
297
316
  }
298
317
  }
318
+ /**
319
+ * Mark that the subscription should not filter any changes.
320
+ * This is used when includeInitialState is explicitly set to false,
321
+ * meaning the caller doesn't want initial state but does want ALL future changes.
322
+ */
323
+ markAllStateAsSeen() {
324
+ this.skipFiltering = true;
325
+ }
299
326
  unsubscribe() {
300
327
  for (const options of this.loadedSubsets) {
301
328
  this.collection._sync.unloadSubset(options);
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAkCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AApCV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAE3B,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
1
+ {"version":3,"file":"subscription.cjs","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["EventEmitter","ensureIndexForExpression","createFilteredCallback","and","createFilterFunctionFromExpression","eq","Value","buildCursor","minValue","gte","lt"],"mappings":";;;;;;;;AAgDO,MAAM,+BACHA,aAAAA,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3BC,gBAAAA,yBAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5BC,aAAAA,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmBC,UAAAA,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClBC,gDAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAOC,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkBC,OAAAA,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMC,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqBL,UAAAA;AAAAA,YACnBM,UAAAA,IAAI,YAAY,IAAIH,GAAAA,MAAME,SAAQ,CAAC;AAAA,YACnCE,UAAAA,GAAG,YAAY,IAAIJ,GAAAA,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqBD,UAAAA,GAAG,YAAY,IAAIC,GAAAA,MAAME,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;;"}
@@ -32,6 +32,7 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
32
32
  private callback;
33
33
  private options;
34
34
  private loadedInitialState;
35
+ private skipFiltering;
35
36
  private snapshotSent;
36
37
  /**
37
38
  * Track all loadSubset calls made by this subscription so we can unload them on cleanup.
@@ -85,9 +86,16 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
85
86
  * Filters and flips changes for keys that have not been sent yet.
86
87
  * Deletes are filtered out for keys that have not been sent yet.
87
88
  * Updates are flipped into inserts for keys that have not been sent yet.
89
+ * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.
88
90
  */
89
91
  private filterAndFlipChanges;
90
92
  private trackSentKeys;
93
+ /**
94
+ * Mark that the subscription should not filter any changes.
95
+ * This is used when includeInitialState is explicitly set to false,
96
+ * meaning the caller doesn't want initial state but does want ALL future changes.
97
+ */
98
+ markAllStateAsSeen(): void;
91
99
  unsubscribe(): void;
92
100
  }
93
101
  export {};
@@ -43,7 +43,7 @@ class CollectionSyncManager {
43
43
  deletedKeys: /* @__PURE__ */ new Set()
44
44
  });
45
45
  },
46
- write: (messageWithoutKey) => {
46
+ write: (messageWithOptionalKey) => {
47
47
  const pendingTransaction = this.state.pendingSyncedTransactions[this.state.pendingSyncedTransactions.length - 1];
48
48
  if (!pendingTransaction) {
49
49
  throw new errors.NoPendingSyncTransactionWriteError();
@@ -51,15 +51,20 @@ class CollectionSyncManager {
51
51
  if (pendingTransaction.committed) {
52
52
  throw new errors.SyncTransactionAlreadyCommittedWriteError();
53
53
  }
54
- const key = this.config.getKey(messageWithoutKey.value);
55
- let messageType = messageWithoutKey.type;
56
- if (messageWithoutKey.type === `insert`) {
54
+ let key = void 0;
55
+ if (`key` in messageWithOptionalKey) {
56
+ key = messageWithOptionalKey.key;
57
+ } else {
58
+ key = this.config.getKey(messageWithOptionalKey.value);
59
+ }
60
+ let messageType = messageWithOptionalKey.type;
61
+ if (messageWithOptionalKey.type === `insert`) {
57
62
  const insertingIntoExistingSynced = this.state.syncedData.has(key);
58
63
  const hasPendingDeleteForKey = pendingTransaction.deletedKeys.has(key);
59
64
  const isTruncateTransaction = pendingTransaction.truncate === true;
60
65
  if (insertingIntoExistingSynced && !hasPendingDeleteForKey && !isTruncateTransaction) {
61
66
  const existingValue = this.state.syncedData.get(key);
62
- if (existingValue !== void 0 && utils.deepEquals(existingValue, messageWithoutKey.value)) {
67
+ if (existingValue !== void 0 && utils.deepEquals(existingValue, messageWithOptionalKey.value)) {
63
68
  messageType = `update`;
64
69
  } else {
65
70
  const utils2 = this.config.utils;
@@ -72,7 +77,7 @@ class CollectionSyncManager {
72
77
  }
73
78
  }
74
79
  const message = {
75
- ...messageWithoutKey,
80
+ ...messageWithOptionalKey,
76
81
  type: messageType,
77
82
  key
78
83
  };
@@ -1 +1 @@
1
- {"version":3,"file":"sync.cjs","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from '../errors'\nimport { deepEquals } from '../utils'\nimport { LIVE_QUERY_INTERNAL } from '../query/live/internal.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n ChangeMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n SyncConfigRes,\n} from '../types'\nimport type { CollectionImpl } from './index.js'\nimport type { CollectionStateManager } from './state'\nimport type { CollectionLifecycleManager } from './lifecycle'\nimport type { CollectionEventsManager } from './events.js'\nimport type { LiveQueryCollectionUtils } from '../query/live/collection-config-builder.js'\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n public syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null =\n null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n }),\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Store unloadSubset function if provided\n this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`,\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`,\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n /**\n * Notifies the sync layer that a subset is no longer needed.\n * @param options Options that identify what data is being unloaded\n */\n public unloadSubset(options: LoadSubsetOptions): void {\n if (this.syncUnloadSubsetFn) {\n this.syncUnloadSubsetFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":["NoPendingSyncTransactionWriteError","SyncTransactionAlreadyCommittedWriteError","deepEquals","utils","internal","LIVE_QUERY_INTERNAL","DuplicateKeySyncError","NoPendingSyncTransactionCommitError","SyncTransactionAlreadyCommittedError","CollectionConfigurationError","CollectionIsInErrorStateError","SyncCleanupError"],"mappings":";;;;;AA0BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAsBA,YAAY,QAAkD,IAAY;AAb1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AACX,SAAO,qBACL;AAEF,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CAAC,sBAA2D;AACjE,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIA,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AACA,kBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,gBAAI,cAAc,kBAAkB;AAGpC,gBAAI,kBAAkB,SAAS,UAAU;AACvC,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClBC,MAAAA,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAMC,SAAQ,KAAK,OAChB;AACH,wBAAMC,aAAWD,OAAME,4BAAmB;AAC1C,wBAAM,IAAIC,OAAAA,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiBF,YAAU,mBAAmB;AAAA,oBAC9C,UAAUA,YAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAkC;AAAA,cACtC,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIG,OAAAA,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIR,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,WAAK,qBAAqB,SAAS,gBAAgB;AAGnD,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAIQ,OAAAA;AAAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAIC,OAAAA,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aAAa,SAAkC;AACpD,QAAI,KAAK,oBAAoB;AAC3B,WAAK,mBAAmB,OAAO;AAAA,IACjC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAIC,OAAAA,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAIA,OAAAA,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;"}
1
+ {"version":3,"file":"sync.cjs","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from '../errors'\nimport { deepEquals } from '../utils'\nimport { LIVE_QUERY_INTERNAL } from '../query/live/internal.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n ChangeMessageOrDeleteKeyMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n OptimisticChangeMessage,\n SyncConfigRes,\n} from '../types'\nimport type { CollectionImpl } from './index.js'\nimport type { CollectionStateManager } from './state'\nimport type { CollectionLifecycleManager } from './lifecycle'\nimport type { CollectionEventsManager } from './events.js'\nimport type { LiveQueryCollectionUtils } from '../query/live/collection-config-builder.js'\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n public syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null =\n null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (\n messageWithOptionalKey: ChangeMessageOrDeleteKeyMessage<\n TOutput,\n TKey\n >,\n ) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n let key: TKey | undefined = undefined\n if (`key` in messageWithOptionalKey) {\n key = messageWithOptionalKey.key\n } else {\n key = this.config.getKey(messageWithOptionalKey.value)\n }\n\n let messageType = messageWithOptionalKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithOptionalKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithOptionalKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message = {\n ...messageWithOptionalKey,\n type: messageType,\n key,\n } as OptimisticChangeMessage<TOutput, TKey>\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n }),\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Store unloadSubset function if provided\n this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`,\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`,\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n /**\n * Notifies the sync layer that a subset is no longer needed.\n * @param options Options that identify what data is being unloaded\n */\n public unloadSubset(options: LoadSubsetOptions): void {\n if (this.syncUnloadSubsetFn) {\n this.syncUnloadSubsetFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":["NoPendingSyncTransactionWriteError","SyncTransactionAlreadyCommittedWriteError","deepEquals","utils","internal","LIVE_QUERY_INTERNAL","DuplicateKeySyncError","NoPendingSyncTransactionCommitError","SyncTransactionAlreadyCommittedError","CollectionConfigurationError","CollectionIsInErrorStateError","SyncCleanupError"],"mappings":";;;;;AA2BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAsBA,YAAY,QAAkD,IAAY;AAb1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AACX,SAAO,qBACL;AAEF,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CACL,2BAIG;AACH,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIA,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AAEA,gBAAI,MAAwB;AAC5B,gBAAI,SAAS,wBAAwB;AACnC,oBAAM,uBAAuB;AAAA,YAC/B,OAAO;AACL,oBAAM,KAAK,OAAO,OAAO,uBAAuB,KAAK;AAAA,YACvD;AAEA,gBAAI,cAAc,uBAAuB;AAGzC,gBAAI,uBAAuB,SAAS,UAAU;AAC5C,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClBC,MAAAA,WAAW,eAAe,uBAAuB,KAAK,GACtD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAMC,SAAQ,KAAK,OAChB;AACH,wBAAMC,aAAWD,OAAME,4BAAmB;AAC1C,wBAAM,IAAIC,OAAAA,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiBF,YAAU,mBAAmB;AAAA,oBAC9C,UAAUA,YAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAU;AAAA,cACd,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIG,OAAAA,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAIR,OAAAA,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAIC,OAAAA,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,WAAK,qBAAqB,SAAS,gBAAgB;AAGnD,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAIQ,OAAAA;AAAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAIC,OAAAA,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aAAa,SAAkC;AACpD,QAAI,KAAK,oBAAoB;AAC3B,WAAK,mBAAmB,OAAO;AAAA,IACjC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAIC,OAAAA,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAIA,OAAAA,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;;"}
@@ -247,7 +247,7 @@ export interface SyncConfig<T extends object = Record<string, unknown>, TKey ext
247
247
  sync: (params: {
248
248
  collection: Collection<T, TKey, any, any, any>;
249
249
  begin: () => void;
250
- write: (message: Omit<ChangeMessage<T>, `key`>) => void;
250
+ write: (message: ChangeMessageOrDeleteKeyMessage<T, TKey>) => void;
251
251
  commit: () => void;
252
252
  markReady: () => void;
253
253
  truncate: () => void;
@@ -273,9 +273,15 @@ export interface ChangeMessage<T extends object = Record<string, unknown>, TKey
273
273
  type: OperationType;
274
274
  metadata?: Record<string, unknown>;
275
275
  }
276
- export interface OptimisticChangeMessage<T extends object = Record<string, unknown>> extends ChangeMessage<T> {
276
+ export type DeleteKeyMessage<TKey extends string | number = string | number> = Omit<ChangeMessage<any, TKey>, `value` | `previousValue` | `type`> & {
277
+ type: `delete`;
278
+ };
279
+ export type ChangeMessageOrDeleteKeyMessage<T extends object = Record<string, unknown>, TKey extends string | number = string | number> = Omit<ChangeMessage<T>, `key`> | DeleteKeyMessage<TKey>;
280
+ export type OptimisticChangeMessage<T extends object = Record<string, unknown>, TKey extends string | number = string | number> = (ChangeMessage<T> & {
277
281
  isActive?: boolean;
278
- }
282
+ }) | (DeleteKeyMessage<TKey> & {
283
+ isActive?: boolean;
284
+ });
279
285
  /**
280
286
  * The Standard Schema interface.
281
287
  * This follows the standard-schema specification: https://github.com/standard-schema/standard-schema
@@ -652,3 +658,4 @@ type WritableObjectDeep<ObjectType extends object> = {
652
658
  };
653
659
  type WritableArrayDeep<ArrayType extends ReadonlyArray<unknown>> = ArrayType extends readonly [] ? [] : ArrayType extends readonly [...infer U, infer V] ? [...WritableArrayDeep<U>, WritableDeep<V>] : ArrayType extends readonly [infer U, ...infer V] ? [WritableDeep<U>, ...WritableArrayDeep<V>] : ArrayType extends ReadonlyArray<infer U> ? Array<WritableDeep<U>> : ArrayType extends Array<infer U> ? Array<WritableDeep<U>> : ArrayType;
654
660
  export type WritableDeep<T> = T extends BuiltIns ? T : T extends (...arguments_: Array<any>) => unknown ? {} extends WritableObjectDeep<T> ? T : HasMultipleCallSignatures<T> extends true ? T : ((...arguments_: Parameters<T>) => ReturnType<T>) & WritableObjectDeep<T> : T extends ReadonlyMap<unknown, unknown> ? WritableMapDeep<T> : T extends ReadonlySet<unknown> ? WritableSetDeep<T> : T extends ReadonlyArray<unknown> ? WritableArrayDeep<T> : T extends object ? WritableObjectDeep<T> : unknown;
661
+ export type MakeOptional<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
@@ -62,6 +62,8 @@ class CollectionChangesManager {
62
62
  });
63
63
  if (options.includeInitialState) {
64
64
  subscription.requestSnapshot({ trackLoadSubsetPromise: false });
65
+ } else if (options.includeInitialState === false) {
66
+ subscription.markAllStateAsSeen();
65
67
  }
66
68
  this.changeSubscriptions.add(subscription);
67
69
  return subscription;
@@ -1 +1 @@
1
- {"version":3,"file":"changes.js","sources":["../../../src/collection/changes.ts"],"sourcesContent":["import { NegativeActiveSubscribersError } from '../errors'\nimport { CollectionSubscription } from './subscription.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { ChangeMessage, SubscribeChangesOptions } from '../types'\nimport type { CollectionLifecycleManager } from './lifecycle.js'\nimport type { CollectionSyncManager } from './sync.js'\nimport type { CollectionEventsManager } from './events.js'\nimport type { CollectionImpl } from './index.js'\n\nexport class CollectionChangesManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private sync!: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n private events!: CollectionEventsManager\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n\n public activeSubscribersCount = 0\n public changeSubscriptions = new Set<CollectionSubscription>()\n public batchedEvents: Array<ChangeMessage<TOutput, TKey>> = []\n public shouldBatchEvents = false\n\n /**\n * Creates a new CollectionChangesManager instance\n */\n constructor() {}\n\n public setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n sync: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.sync = deps.sync\n this.events = deps.events\n this.collection = deps.collection\n }\n\n /**\n * Emit an empty ready event to notify subscribers that the collection is ready\n * This bypasses the normal empty array check in emitEvents\n */\n public emitEmptyReadyEvent(): void {\n // Emit empty array directly to all subscribers\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents([])\n }\n }\n\n /**\n * Emit events either immediately or batch them for later emission\n */\n public emitEvents(\n changes: Array<ChangeMessage<TOutput, TKey>>,\n forceEmit = false,\n ): void {\n // Skip batching for user actions (forceEmit=true) to keep UI responsive\n if (this.shouldBatchEvents && !forceEmit) {\n // Add events to the batch\n this.batchedEvents.push(...changes)\n return\n }\n\n // Either we're not batching, or we're forcing emission (user action or ending batch cycle)\n let eventsToEmit = changes\n\n if (forceEmit) {\n // Force emit is used to end a batch (e.g. after a sync commit). Combine any\n // buffered optimistic events with the final changes so subscribers see the\n // whole picture, even if the sync diff is empty.\n if (this.batchedEvents.length > 0) {\n eventsToEmit = [...this.batchedEvents, ...changes]\n }\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n\n if (eventsToEmit.length === 0) {\n return\n }\n\n // Emit to all listeners\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents(eventsToEmit)\n }\n }\n\n /**\n * Subscribe to changes in the collection\n */\n public subscribeChanges(\n callback: (changes: Array<ChangeMessage<TOutput>>) => void,\n options: SubscribeChangesOptions = {},\n ): CollectionSubscription {\n // Start sync and track subscriber\n this.addSubscriber()\n\n const subscription = new CollectionSubscription(this.collection, callback, {\n ...options,\n onUnsubscribe: () => {\n this.removeSubscriber()\n this.changeSubscriptions.delete(subscription)\n },\n })\n\n if (options.includeInitialState) {\n subscription.requestSnapshot({ trackLoadSubsetPromise: false })\n }\n\n // Add to batched listeners\n this.changeSubscriptions.add(subscription)\n\n return subscription\n }\n\n /**\n * Increment the active subscribers count and start sync if needed\n */\n private addSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount++\n this.lifecycle.cancelGCTimer()\n\n // Start sync if collection was cleaned up\n if (\n this.lifecycle.status === `cleaned-up` ||\n this.lifecycle.status === `idle`\n ) {\n this.sync.startSync()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Decrement the active subscribers count and start GC timer if needed\n */\n private removeSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount--\n\n if (this.activeSubscribersCount === 0) {\n this.lifecycle.startGCTimer()\n } else if (this.activeSubscribersCount < 0) {\n throw new NegativeActiveSubscribersError()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Clean up the collection by stopping sync and clearing data\n * This can be called manually or automatically by garbage collection\n */\n public cleanup(): void {\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n}\n"],"names":[],"mappings":";;AASO,MAAM,yBAKX;AAAA;AAAA;AAAA;AAAA,EAcA,cAAc;AARd,SAAO,yBAAyB;AAChC,SAAO,0CAA0B,IAAA;AACjC,SAAO,gBAAqD,CAAA;AAC5D,SAAO,oBAAoB;AAAA,EAKZ;AAAA,EAER,QAAQ,MAKZ;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,OAAO,KAAK;AACjB,SAAK,SAAS,KAAK;AACnB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,sBAA4B;AAEjC,eAAW,gBAAgB,KAAK,qBAAqB;AACnD,mBAAa,WAAW,EAAE;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,WACL,SACA,YAAY,OACN;AAEN,QAAI,KAAK,qBAAqB,CAAC,WAAW;AAExC,WAAK,cAAc,KAAK,GAAG,OAAO;AAClC;AAAA,IACF;AAGA,QAAI,eAAe;AAEnB,QAAI,WAAW;AAIb,UAAI,KAAK,cAAc,SAAS,GAAG;AACjC,uBAAe,CAAC,GAAG,KAAK,eAAe,GAAG,OAAO;AAAA,MACnD;AACA,WAAK,gBAAgB,CAAA;AACrB,WAAK,oBAAoB;AAAA,IAC3B;AAEA,QAAI,aAAa,WAAW,GAAG;AAC7B;AAAA,IACF;AAGA,eAAW,gBAAgB,KAAK,qBAAqB;AACnD,mBAAa,WAAW,YAAY;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,iBACL,UACA,UAAmC,IACX;AAExB,SAAK,cAAA;AAEL,UAAM,eAAe,IAAI,uBAAuB,KAAK,YAAY,UAAU;AAAA,MACzE,GAAG;AAAA,MACH,eAAe,MAAM;AACnB,aAAK,iBAAA;AACL,aAAK,oBAAoB,OAAO,YAAY;AAAA,MAC9C;AAAA,IAAA,CACD;AAED,QAAI,QAAQ,qBAAqB;AAC/B,mBAAa,gBAAgB,EAAE,wBAAwB,MAAA,CAAO;AAAA,IAChE;AAGA,SAAK,oBAAoB,IAAI,YAAY;AAEzC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAsB;AAC5B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AACL,SAAK,UAAU,cAAA;AAGf,QACE,KAAK,UAAU,WAAW,gBAC1B,KAAK,UAAU,WAAW,QAC1B;AACA,WAAK,KAAK,UAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AAEL,QAAI,KAAK,2BAA2B,GAAG;AACrC,WAAK,UAAU,aAAA;AAAA,IACjB,WAAW,KAAK,yBAAyB,GAAG;AAC1C,YAAM,IAAI,+BAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAgB;AACrB,SAAK,gBAAgB,CAAA;AACrB,SAAK,oBAAoB;AAAA,EAC3B;AACF;"}
1
+ {"version":3,"file":"changes.js","sources":["../../../src/collection/changes.ts"],"sourcesContent":["import { NegativeActiveSubscribersError } from '../errors'\nimport { CollectionSubscription } from './subscription.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { ChangeMessage, SubscribeChangesOptions } from '../types'\nimport type { CollectionLifecycleManager } from './lifecycle.js'\nimport type { CollectionSyncManager } from './sync.js'\nimport type { CollectionEventsManager } from './events.js'\nimport type { CollectionImpl } from './index.js'\n\nexport class CollectionChangesManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private sync!: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n private events!: CollectionEventsManager\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n\n public activeSubscribersCount = 0\n public changeSubscriptions = new Set<CollectionSubscription>()\n public batchedEvents: Array<ChangeMessage<TOutput, TKey>> = []\n public shouldBatchEvents = false\n\n /**\n * Creates a new CollectionChangesManager instance\n */\n constructor() {}\n\n public setDeps(deps: {\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n sync: CollectionSyncManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n }) {\n this.lifecycle = deps.lifecycle\n this.sync = deps.sync\n this.events = deps.events\n this.collection = deps.collection\n }\n\n /**\n * Emit an empty ready event to notify subscribers that the collection is ready\n * This bypasses the normal empty array check in emitEvents\n */\n public emitEmptyReadyEvent(): void {\n // Emit empty array directly to all subscribers\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents([])\n }\n }\n\n /**\n * Emit events either immediately or batch them for later emission\n */\n public emitEvents(\n changes: Array<ChangeMessage<TOutput, TKey>>,\n forceEmit = false,\n ): void {\n // Skip batching for user actions (forceEmit=true) to keep UI responsive\n if (this.shouldBatchEvents && !forceEmit) {\n // Add events to the batch\n this.batchedEvents.push(...changes)\n return\n }\n\n // Either we're not batching, or we're forcing emission (user action or ending batch cycle)\n let eventsToEmit = changes\n\n if (forceEmit) {\n // Force emit is used to end a batch (e.g. after a sync commit). Combine any\n // buffered optimistic events with the final changes so subscribers see the\n // whole picture, even if the sync diff is empty.\n if (this.batchedEvents.length > 0) {\n eventsToEmit = [...this.batchedEvents, ...changes]\n }\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n\n if (eventsToEmit.length === 0) {\n return\n }\n\n // Emit to all listeners\n for (const subscription of this.changeSubscriptions) {\n subscription.emitEvents(eventsToEmit)\n }\n }\n\n /**\n * Subscribe to changes in the collection\n */\n public subscribeChanges(\n callback: (changes: Array<ChangeMessage<TOutput>>) => void,\n options: SubscribeChangesOptions = {},\n ): CollectionSubscription {\n // Start sync and track subscriber\n this.addSubscriber()\n\n const subscription = new CollectionSubscription(this.collection, callback, {\n ...options,\n onUnsubscribe: () => {\n this.removeSubscriber()\n this.changeSubscriptions.delete(subscription)\n },\n })\n\n if (options.includeInitialState) {\n subscription.requestSnapshot({ trackLoadSubsetPromise: false })\n } else if (options.includeInitialState === false) {\n // When explicitly set to false (not just undefined), mark all state as \"seen\"\n // so that all future changes (including deletes) pass through unfiltered.\n subscription.markAllStateAsSeen()\n }\n\n // Add to batched listeners\n this.changeSubscriptions.add(subscription)\n\n return subscription\n }\n\n /**\n * Increment the active subscribers count and start sync if needed\n */\n private addSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount++\n this.lifecycle.cancelGCTimer()\n\n // Start sync if collection was cleaned up\n if (\n this.lifecycle.status === `cleaned-up` ||\n this.lifecycle.status === `idle`\n ) {\n this.sync.startSync()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Decrement the active subscribers count and start GC timer if needed\n */\n private removeSubscriber(): void {\n const previousSubscriberCount = this.activeSubscribersCount\n this.activeSubscribersCount--\n\n if (this.activeSubscribersCount === 0) {\n this.lifecycle.startGCTimer()\n } else if (this.activeSubscribersCount < 0) {\n throw new NegativeActiveSubscribersError()\n }\n\n this.events.emitSubscribersChange(\n this.activeSubscribersCount,\n previousSubscriberCount,\n )\n }\n\n /**\n * Clean up the collection by stopping sync and clearing data\n * This can be called manually or automatically by garbage collection\n */\n public cleanup(): void {\n this.batchedEvents = []\n this.shouldBatchEvents = false\n }\n}\n"],"names":[],"mappings":";;AASO,MAAM,yBAKX;AAAA;AAAA;AAAA;AAAA,EAcA,cAAc;AARd,SAAO,yBAAyB;AAChC,SAAO,0CAA0B,IAAA;AACjC,SAAO,gBAAqD,CAAA;AAC5D,SAAO,oBAAoB;AAAA,EAKZ;AAAA,EAER,QAAQ,MAKZ;AACD,SAAK,YAAY,KAAK;AACtB,SAAK,OAAO,KAAK;AACjB,SAAK,SAAS,KAAK;AACnB,SAAK,aAAa,KAAK;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,sBAA4B;AAEjC,eAAW,gBAAgB,KAAK,qBAAqB;AACnD,mBAAa,WAAW,EAAE;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,WACL,SACA,YAAY,OACN;AAEN,QAAI,KAAK,qBAAqB,CAAC,WAAW;AAExC,WAAK,cAAc,KAAK,GAAG,OAAO;AAClC;AAAA,IACF;AAGA,QAAI,eAAe;AAEnB,QAAI,WAAW;AAIb,UAAI,KAAK,cAAc,SAAS,GAAG;AACjC,uBAAe,CAAC,GAAG,KAAK,eAAe,GAAG,OAAO;AAAA,MACnD;AACA,WAAK,gBAAgB,CAAA;AACrB,WAAK,oBAAoB;AAAA,IAC3B;AAEA,QAAI,aAAa,WAAW,GAAG;AAC7B;AAAA,IACF;AAGA,eAAW,gBAAgB,KAAK,qBAAqB;AACnD,mBAAa,WAAW,YAAY;AAAA,IACtC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKO,iBACL,UACA,UAAmC,IACX;AAExB,SAAK,cAAA;AAEL,UAAM,eAAe,IAAI,uBAAuB,KAAK,YAAY,UAAU;AAAA,MACzE,GAAG;AAAA,MACH,eAAe,MAAM;AACnB,aAAK,iBAAA;AACL,aAAK,oBAAoB,OAAO,YAAY;AAAA,MAC9C;AAAA,IAAA,CACD;AAED,QAAI,QAAQ,qBAAqB;AAC/B,mBAAa,gBAAgB,EAAE,wBAAwB,MAAA,CAAO;AAAA,IAChE,WAAW,QAAQ,wBAAwB,OAAO;AAGhD,mBAAa,mBAAA;AAAA,IACf;AAGA,SAAK,oBAAoB,IAAI,YAAY;AAEzC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAsB;AAC5B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AACL,SAAK,UAAU,cAAA;AAGf,QACE,KAAK,UAAU,WAAW,gBAC1B,KAAK,UAAU,WAAW,QAC1B;AACA,WAAK,KAAK,UAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAyB;AAC/B,UAAM,0BAA0B,KAAK;AACrC,SAAK;AAEL,QAAI,KAAK,2BAA2B,GAAG;AACrC,WAAK,UAAU,aAAA;AAAA,IACjB,WAAW,KAAK,yBAAyB,GAAG;AAC1C,YAAM,IAAI,+BAAA;AAAA,IACZ;AAEA,SAAK,OAAO;AAAA,MACV,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAgB;AACrB,SAAK,gBAAgB,CAAA;AACrB,SAAK,oBAAoB;AAAA,EAC3B;AACF;"}
@@ -32,6 +32,7 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
32
32
  private callback;
33
33
  private options;
34
34
  private loadedInitialState;
35
+ private skipFiltering;
35
36
  private snapshotSent;
36
37
  /**
37
38
  * Track all loadSubset calls made by this subscription so we can unload them on cleanup.
@@ -85,9 +86,16 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
85
86
  * Filters and flips changes for keys that have not been sent yet.
86
87
  * Deletes are filtered out for keys that have not been sent yet.
87
88
  * Updates are flipped into inserts for keys that have not been sent yet.
89
+ * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.
88
90
  */
89
91
  private filterAndFlipChanges;
90
92
  private trackSentKeys;
93
+ /**
94
+ * Mark that the subscription should not filter any changes.
95
+ * This is used when includeInitialState is explicitly set to false,
96
+ * meaning the caller doesn't want initial state but does want ALL future changes.
97
+ */
98
+ markAllStateAsSeen(): void;
91
99
  unsubscribe(): void;
92
100
  }
93
101
  export {};
@@ -11,6 +11,7 @@ class CollectionSubscription extends EventEmitter {
11
11
  this.callback = callback;
12
12
  this.options = options;
13
13
  this.loadedInitialState = false;
14
+ this.skipFiltering = false;
14
15
  this.snapshotSent = false;
15
16
  this.loadedSubsets = [];
16
17
  this.sentKeys = /* @__PURE__ */ new Set();
@@ -133,6 +134,9 @@ class CollectionSubscription extends EventEmitter {
133
134
  const filteredSnapshot = snapshot.filter(
134
135
  (change) => !this.sentKeys.has(change.key)
135
136
  );
137
+ for (const change of filteredSnapshot) {
138
+ this.sentKeys.add(change.key);
139
+ }
136
140
  this.snapshotSent = true;
137
141
  this.callback(filteredSnapshot);
138
142
  return true;
@@ -217,6 +221,9 @@ class CollectionSubscription extends EventEmitter {
217
221
  keys = index.take(valuesNeeded(), biggestObservedValue, filterFn);
218
222
  }
219
223
  const currentOffset = this.limitedSnapshotRowCount;
224
+ for (const change of changes) {
225
+ this.sentKeys.add(change.key);
226
+ }
220
227
  this.callback(changes);
221
228
  this.limitedSnapshotRowCount += changes.length;
222
229
  if (changes.length > 0) {
@@ -266,34 +273,54 @@ class CollectionSubscription extends EventEmitter {
266
273
  * Filters and flips changes for keys that have not been sent yet.
267
274
  * Deletes are filtered out for keys that have not been sent yet.
268
275
  * Updates are flipped into inserts for keys that have not been sent yet.
276
+ * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.
269
277
  */
270
278
  filterAndFlipChanges(changes) {
271
- if (this.loadedInitialState) {
279
+ if (this.loadedInitialState || this.skipFiltering) {
272
280
  return changes;
273
281
  }
274
282
  const newChanges = [];
275
283
  for (const change of changes) {
276
284
  let newChange = change;
277
- if (!this.sentKeys.has(change.key)) {
285
+ const keyInSentKeys = this.sentKeys.has(change.key);
286
+ if (!keyInSentKeys) {
278
287
  if (change.type === `update`) {
279
288
  newChange = { ...change, type: `insert`, previousValue: void 0 };
280
289
  } else if (change.type === `delete`) {
281
290
  continue;
282
291
  }
283
292
  this.sentKeys.add(change.key);
293
+ } else {
294
+ if (change.type === `insert`) {
295
+ continue;
296
+ } else if (change.type === `delete`) {
297
+ this.sentKeys.delete(change.key);
298
+ }
284
299
  }
285
300
  newChanges.push(newChange);
286
301
  }
287
302
  return newChanges;
288
303
  }
289
304
  trackSentKeys(changes) {
290
- if (this.loadedInitialState) {
305
+ if (this.loadedInitialState || this.skipFiltering) {
291
306
  return;
292
307
  }
293
308
  for (const change of changes) {
294
- this.sentKeys.add(change.key);
309
+ if (change.type === `delete`) {
310
+ this.sentKeys.delete(change.key);
311
+ } else {
312
+ this.sentKeys.add(change.key);
313
+ }
295
314
  }
296
315
  }
316
+ /**
317
+ * Mark that the subscription should not filter any changes.
318
+ * This is used when includeInitialState is explicitly set to false,
319
+ * meaning the caller doesn't want initial state but does want ALL future changes.
320
+ */
321
+ markAllStateAsSeen() {
322
+ this.skipFiltering = true;
323
+ }
297
324
  unsubscribe() {
298
325
  for (const options of this.loadedSubsets) {
299
326
  this.collection._sync.unloadSubset(options);
@@ -1 +1 @@
1
- {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAkCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AApCV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAE3B,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
1
+ {"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from '../indexes/auto-index.js'\nimport { and, eq, gte, lt } from '../query/builder/functions.js'\nimport { Value } from '../query/ir.js'\nimport { EventEmitter } from '../event-emitter.js'\nimport { buildCursor } from '../utils/cursor.js'\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from './change-events.js'\nimport type { BasicExpression, OrderBy } from '../query/ir.js'\nimport type { IndexInterface } from '../indexes/base-index.js'\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from '../types.js'\nimport type { CollectionImpl } from './index.js'\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n /** Optional orderBy to pass to loadSubset for backend optimization */\n orderBy?: OrderBy\n /** Optional limit to pass to loadSubset for backend optimization */\n limit?: number\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n /** All column values for cursor (first value used for local index, all values for sync layer) */\n minValues?: Array<unknown>\n /** Row offset for offset-based pagination (passed to sync layer) */\n offset?: number\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to skip filtering in filterAndFlipChanges.\n // This is separate from loadedInitialState because we want to allow\n // requestSnapshot to still work even when filtering is skipped.\n private skipFiltering = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n // Track the count of rows sent via requestLimitedSnapshot for offset-based pagination\n private limitedSnapshotRowCount = 0\n\n // Track the last key sent via requestLimitedSnapshot for cursor-based pagination\n private lastSentKey: string | number | undefined\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions,\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>,\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n // Include orderBy and limit if provided so sync layer can optimize the query\n orderBy: opts?.orderBy,\n limit: opts?.limit,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key),\n )\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of filteredSnapshot) {\n this.sentKeys.add(change.key)\n }\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to the cursor.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n *\n * For multi-column orderBy:\n * - Uses first value from `minValues` for LOCAL index operations (wide bounds, ensures no missed rows)\n * - Uses all `minValues` to build a precise composite cursor for SYNC layer loadSubset\n *\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to the first cursor value + limit values greater.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValues,\n offset,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`,\n )\n }\n\n // Derive first column value from minValues (used for local index operations)\n const minValue = minValues?.[0]\n // Cast for index operations (index expects string | number)\n const minValueForIndex = minValue as string | number | undefined\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValueForIndex\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n //\n // For multi-column orderBy, we use the first column value for index operations (wide bounds)\n // This may load some duplicates but ensures we never miss any rows.\n let keys: Array<string | number> = []\n if (minValueForIndex !== undefined) {\n // First, get all items with the same FIRST COLUMN value as minValue\n // This provides wide bounds for the local index\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValueForIndex)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValueForIndex,\n filterFn,\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n } else {\n keys = index.take(limit, minValueForIndex, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n // Track row count for offset-based pagination (before sending to callback)\n // Use the current count as the offset for this load\n const currentOffset = this.limitedSnapshotRowCount\n\n // Add keys to sentKeys BEFORE calling callback to prevent race condition.\n // If a change event arrives while the callback is executing, it will see\n // the keys already in sentKeys and filter out duplicates correctly.\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n\n this.callback(changes)\n\n // Update the row count and last key after sending (for next call's offset/cursor)\n this.limitedSnapshotRowCount += changes.length\n if (changes.length > 0) {\n this.lastSentKey = changes[changes.length - 1]!.key\n }\n\n // Build cursor expressions for sync layer loadSubset\n // The cursor expressions are separate from the main where clause\n // so the sync layer can choose cursor-based or offset-based pagination\n let cursorExpressions:\n | {\n whereFrom: BasicExpression<boolean>\n whereCurrent: BasicExpression<boolean>\n lastKey?: string | number\n }\n | undefined\n\n if (minValues !== undefined && minValues.length > 0) {\n const whereFromCursor = buildCursor(orderBy, minValues)\n\n if (whereFromCursor) {\n const { expression } = orderBy[0]!\n const minValue = minValues[0]\n\n // Build the whereCurrent expression for the first orderBy column\n // For Date values, we need to handle precision differences between JS (ms) and backends (μs)\n // A JS Date represents a 1ms range, so we query for all values within that range\n let whereCurrentCursor: BasicExpression<boolean>\n if (minValue instanceof Date) {\n const minValuePlus1ms = new Date(minValue.getTime() + 1)\n whereCurrentCursor = and(\n gte(expression, new Value(minValue)),\n lt(expression, new Value(minValuePlus1ms)),\n )\n } else {\n whereCurrentCursor = eq(expression, new Value(minValue))\n }\n\n cursorExpressions = {\n whereFrom: whereFromCursor,\n whereCurrent: whereCurrentCursor,\n lastKey: this.lastSentKey,\n }\n }\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n // Note: `where` does NOT include cursor expressions - they are passed separately\n // The sync layer can choose to use cursor-based or offset-based pagination\n const loadOptions: LoadSubsetOptions = {\n where, // Main filter only, no cursor\n limit,\n orderBy,\n cursor: cursorExpressions, // Cursor expressions passed separately\n offset: offset ?? currentOffset, // Use provided offset, or auto-tracked offset\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions)\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // We loaded the entire initial state or filtering is explicitly skipped\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n const keyInSentKeys = this.sentKeys.has(change.key)\n\n if (!keyInSentKeys) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n } else {\n // Key was already sent - handle based on change type\n if (change.type === `insert`) {\n // Filter out duplicate inserts - the key was already inserted.\n // This prevents D2 multiplicity from going above 1, which would\n // cause deletes to not properly remove items (multiplicity would\n // go from 2 to 1 instead of 1 to 0).\n continue\n } else if (change.type === `delete`) {\n // Remove from sentKeys so future inserts for this key are allowed\n // (e.g., after truncate + reinsert)\n this.sentKeys.delete(change.key)\n }\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState || this.skipFiltering) {\n // No need to track sent keys if we loaded the entire state or filtering is skipped.\n // Since filtering won't be applied, all keys are effectively \"observed\".\n return\n }\n\n for (const change of changes) {\n if (change.type === `delete`) {\n // Remove deleted keys from sentKeys so future re-inserts are allowed\n this.sentKeys.delete(change.key)\n } else {\n // For inserts and updates, track the key as sent\n this.sentKeys.add(change.key)\n }\n }\n }\n\n /**\n * Mark that the subscription should not filter any changes.\n * This is used when includeInitialState is explicitly set to false,\n * meaning the caller doesn't want initial state but does want ALL future changes.\n */\n markAllStateAsSeen() {\n this.skipFiltering = true\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":["minValue"],"mappings":";;;;;;AAgDO,MAAM,+BACH,aAEV;AAAA,EAuCE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAzCV,SAAQ,qBAAqB;AAK7B,SAAQ,gBAAgB;AAIxB,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAGvB,SAAQ,0BAA0B;AAUlC,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,OAAO,MAAM;AAAA,IAAA;AAEf,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAM3C,eAAW,UAAU,kBAAkB;AACrC,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,WAAW,YAAY,CAAC;AAE9B,UAAM,mBAAmB;AAEzB,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAW5D,QAAI,OAA+B,CAAA;AACnC,QAAI,qBAAqB,QAAW;AAGlC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,gBAAgB,CAAC;AAAA,MAAA,CAClD;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,MACrD;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,kBAAkB,QAAQ;AAAA,IACrD;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAIA,UAAM,gBAAgB,KAAK;AAK3B,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAEA,SAAK,SAAS,OAAO;AAGrB,SAAK,2BAA2B,QAAQ;AACxC,QAAI,QAAQ,SAAS,GAAG;AACtB,WAAK,cAAc,QAAQ,QAAQ,SAAS,CAAC,EAAG;AAAA,IAClD;AAKA,QAAI;AAQJ,QAAI,cAAc,UAAa,UAAU,SAAS,GAAG;AACnD,YAAM,kBAAkB,YAAY,SAAS,SAAS;AAEtD,UAAI,iBAAiB;AACnB,cAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,cAAMA,YAAW,UAAU,CAAC;AAK5B,YAAI;AACJ,YAAIA,qBAAoB,MAAM;AAC5B,gBAAM,kBAAkB,IAAI,KAAKA,UAAS,QAAA,IAAY,CAAC;AACvD,+BAAqB;AAAA,YACnB,IAAI,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,YACnC,GAAG,YAAY,IAAI,MAAM,eAAe,CAAC;AAAA,UAAA;AAAA,QAE7C,OAAO;AACL,+BAAqB,GAAG,YAAY,IAAI,MAAMA,SAAQ,CAAC;AAAA,QACzD;AAEA,4BAAoB;AAAA,UAClB,WAAW;AAAA,UACX,cAAc;AAAA,UACd,SAAS,KAAK;AAAA,QAAA;AAAA,MAElB;AAAA,IACF;AAMA,UAAM,cAAiC;AAAA,MACrC;AAAA;AAAA,MACA;AAAA,MACA;AAAA,MACA,QAAQ;AAAA;AAAA,MACR,QAAQ,UAAU;AAAA;AAAA,MAClB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AACnC,SAAK,uBAAuB,UAAU;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,YAAM,gBAAgB,KAAK,SAAS,IAAI,OAAO,GAAG;AAElD,UAAI,CAAC,eAAe;AAClB,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B,OAAO;AAEL,YAAI,OAAO,SAAS,UAAU;AAK5B;AAAA,QACF,WAAW,OAAO,SAAS,UAAU;AAGnC,eAAK,SAAS,OAAO,OAAO,GAAG;AAAA,QACjC;AAAA,MACF;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,sBAAsB,KAAK,eAAe;AAGjD;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,UAAI,OAAO,SAAS,UAAU;AAE5B,aAAK,SAAS,OAAO,OAAO,GAAG;AAAA,MACjC,OAAO;AAEL,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,qBAAqB;AACnB,SAAK,gBAAgB;AAAA,EACvB;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
@@ -41,7 +41,7 @@ class CollectionSyncManager {
41
41
  deletedKeys: /* @__PURE__ */ new Set()
42
42
  });
43
43
  },
44
- write: (messageWithoutKey) => {
44
+ write: (messageWithOptionalKey) => {
45
45
  const pendingTransaction = this.state.pendingSyncedTransactions[this.state.pendingSyncedTransactions.length - 1];
46
46
  if (!pendingTransaction) {
47
47
  throw new NoPendingSyncTransactionWriteError();
@@ -49,15 +49,20 @@ class CollectionSyncManager {
49
49
  if (pendingTransaction.committed) {
50
50
  throw new SyncTransactionAlreadyCommittedWriteError();
51
51
  }
52
- const key = this.config.getKey(messageWithoutKey.value);
53
- let messageType = messageWithoutKey.type;
54
- if (messageWithoutKey.type === `insert`) {
52
+ let key = void 0;
53
+ if (`key` in messageWithOptionalKey) {
54
+ key = messageWithOptionalKey.key;
55
+ } else {
56
+ key = this.config.getKey(messageWithOptionalKey.value);
57
+ }
58
+ let messageType = messageWithOptionalKey.type;
59
+ if (messageWithOptionalKey.type === `insert`) {
55
60
  const insertingIntoExistingSynced = this.state.syncedData.has(key);
56
61
  const hasPendingDeleteForKey = pendingTransaction.deletedKeys.has(key);
57
62
  const isTruncateTransaction = pendingTransaction.truncate === true;
58
63
  if (insertingIntoExistingSynced && !hasPendingDeleteForKey && !isTruncateTransaction) {
59
64
  const existingValue = this.state.syncedData.get(key);
60
- if (existingValue !== void 0 && deepEquals(existingValue, messageWithoutKey.value)) {
65
+ if (existingValue !== void 0 && deepEquals(existingValue, messageWithOptionalKey.value)) {
61
66
  messageType = `update`;
62
67
  } else {
63
68
  const utils = this.config.utils;
@@ -70,7 +75,7 @@ class CollectionSyncManager {
70
75
  }
71
76
  }
72
77
  const message = {
73
- ...messageWithoutKey,
78
+ ...messageWithOptionalKey,
74
79
  type: messageType,
75
80
  key
76
81
  };
@@ -1 +1 @@
1
- {"version":3,"file":"sync.js","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from '../errors'\nimport { deepEquals } from '../utils'\nimport { LIVE_QUERY_INTERNAL } from '../query/live/internal.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n ChangeMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n SyncConfigRes,\n} from '../types'\nimport type { CollectionImpl } from './index.js'\nimport type { CollectionStateManager } from './state'\nimport type { CollectionLifecycleManager } from './lifecycle'\nimport type { CollectionEventsManager } from './events.js'\nimport type { LiveQueryCollectionUtils } from '../query/live/collection-config-builder.js'\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n public syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null =\n null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n }),\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Store unloadSubset function if provided\n this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`,\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`,\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n /**\n * Notifies the sync layer that a subset is no longer needed.\n * @param options Options that identify what data is being unloaded\n */\n public unloadSubset(options: LoadSubsetOptions): void {\n if (this.syncUnloadSubsetFn) {\n this.syncUnloadSubsetFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":[],"mappings":";;;AA0BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAsBA,YAAY,QAAkD,IAAY;AAb1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AACX,SAAO,qBACL;AAEF,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CAAC,sBAA2D;AACjE,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AACA,kBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,gBAAI,cAAc,kBAAkB;AAGpC,gBAAI,kBAAkB,SAAS,UAAU;AACvC,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClB,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAM,QAAQ,KAAK,OAChB;AACH,wBAAM,WAAW,MAAM,mBAAmB;AAC1C,wBAAM,IAAI,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiB,UAAU,mBAAmB;AAAA,oBAC9C,UAAU,UAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAkC;AAAA,cACtC,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,WAAK,qBAAqB,SAAS,gBAAgB;AAGnD,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAI,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aAAa,SAAkC;AACpD,QAAI,KAAK,oBAAoB;AAC3B,WAAK,mBAAmB,OAAO;AAAA,IACjC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAI,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAI,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
1
+ {"version":3,"file":"sync.js","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from '../errors'\nimport { deepEquals } from '../utils'\nimport { LIVE_QUERY_INTERNAL } from '../query/live/internal.js'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n ChangeMessageOrDeleteKeyMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n OptimisticChangeMessage,\n SyncConfigRes,\n} from '../types'\nimport type { CollectionImpl } from './index.js'\nimport type { CollectionStateManager } from './state'\nimport type { CollectionLifecycleManager } from './lifecycle'\nimport type { CollectionEventsManager } from './events.js'\nimport type { LiveQueryCollectionUtils } from '../query/live/collection-config-builder.js'\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n public syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null =\n null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (\n messageWithOptionalKey: ChangeMessageOrDeleteKeyMessage<\n TOutput,\n TKey\n >,\n ) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n let key: TKey | undefined = undefined\n if (`key` in messageWithOptionalKey) {\n key = messageWithOptionalKey.key\n } else {\n key = this.config.getKey(messageWithOptionalKey.value)\n }\n\n let messageType = messageWithOptionalKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithOptionalKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithOptionalKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message = {\n ...messageWithOptionalKey,\n type: messageType,\n key,\n } as OptimisticChangeMessage<TOutput, TKey>\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n }),\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Store unloadSubset function if provided\n this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`,\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`,\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n /**\n * Notifies the sync layer that a subset is no longer needed.\n * @param options Options that identify what data is being unloaded\n */\n public unloadSubset(options: LoadSubsetOptions): void {\n if (this.syncUnloadSubsetFn) {\n this.syncUnloadSubsetFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":[],"mappings":";;;AA2BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAsBA,YAAY,QAAkD,IAAY;AAb1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AACX,SAAO,qBACL;AAEF,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CACL,2BAIG;AACH,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AAEA,gBAAI,MAAwB;AAC5B,gBAAI,SAAS,wBAAwB;AACnC,oBAAM,uBAAuB;AAAA,YAC/B,OAAO;AACL,oBAAM,KAAK,OAAO,OAAO,uBAAuB,KAAK;AAAA,YACvD;AAEA,gBAAI,cAAc,uBAAuB;AAGzC,gBAAI,uBAAuB,SAAS,UAAU;AAC5C,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClB,WAAW,eAAe,uBAAuB,KAAK,GACtD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAM,QAAQ,KAAK,OAChB;AACH,wBAAM,WAAW,MAAM,mBAAmB;AAC1C,wBAAM,IAAI,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiB,UAAU,mBAAmB;AAAA,oBAC9C,UAAU,UAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAU;AAAA,cACd,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,WAAK,qBAAqB,SAAS,gBAAgB;AAGnD,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAI,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aAAa,SAAkC;AACpD,QAAI,KAAK,oBAAoB;AAC3B,WAAK,mBAAmB,OAAO;AAAA,IACjC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAI,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAI,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
@@ -247,7 +247,7 @@ export interface SyncConfig<T extends object = Record<string, unknown>, TKey ext
247
247
  sync: (params: {
248
248
  collection: Collection<T, TKey, any, any, any>;
249
249
  begin: () => void;
250
- write: (message: Omit<ChangeMessage<T>, `key`>) => void;
250
+ write: (message: ChangeMessageOrDeleteKeyMessage<T, TKey>) => void;
251
251
  commit: () => void;
252
252
  markReady: () => void;
253
253
  truncate: () => void;
@@ -273,9 +273,15 @@ export interface ChangeMessage<T extends object = Record<string, unknown>, TKey
273
273
  type: OperationType;
274
274
  metadata?: Record<string, unknown>;
275
275
  }
276
- export interface OptimisticChangeMessage<T extends object = Record<string, unknown>> extends ChangeMessage<T> {
276
+ export type DeleteKeyMessage<TKey extends string | number = string | number> = Omit<ChangeMessage<any, TKey>, `value` | `previousValue` | `type`> & {
277
+ type: `delete`;
278
+ };
279
+ export type ChangeMessageOrDeleteKeyMessage<T extends object = Record<string, unknown>, TKey extends string | number = string | number> = Omit<ChangeMessage<T>, `key`> | DeleteKeyMessage<TKey>;
280
+ export type OptimisticChangeMessage<T extends object = Record<string, unknown>, TKey extends string | number = string | number> = (ChangeMessage<T> & {
277
281
  isActive?: boolean;
278
- }
282
+ }) | (DeleteKeyMessage<TKey> & {
283
+ isActive?: boolean;
284
+ });
279
285
  /**
280
286
  * The Standard Schema interface.
281
287
  * This follows the standard-schema specification: https://github.com/standard-schema/standard-schema
@@ -652,3 +658,4 @@ type WritableObjectDeep<ObjectType extends object> = {
652
658
  };
653
659
  type WritableArrayDeep<ArrayType extends ReadonlyArray<unknown>> = ArrayType extends readonly [] ? [] : ArrayType extends readonly [...infer U, infer V] ? [...WritableArrayDeep<U>, WritableDeep<V>] : ArrayType extends readonly [infer U, ...infer V] ? [WritableDeep<U>, ...WritableArrayDeep<V>] : ArrayType extends ReadonlyArray<infer U> ? Array<WritableDeep<U>> : ArrayType extends Array<infer U> ? Array<WritableDeep<U>> : ArrayType;
654
660
  export type WritableDeep<T> = T extends BuiltIns ? T : T extends (...arguments_: Array<any>) => unknown ? {} extends WritableObjectDeep<T> ? T : HasMultipleCallSignatures<T> extends true ? T : ((...arguments_: Parameters<T>) => ReturnType<T>) & WritableObjectDeep<T> : T extends ReadonlyMap<unknown, unknown> ? WritableMapDeep<T> : T extends ReadonlySet<unknown> ? WritableSetDeep<T> : T extends ReadonlyArray<unknown> ? WritableArrayDeep<T> : T extends object ? WritableObjectDeep<T> : unknown;
661
+ export type MakeOptional<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
- "version": "0.5.12",
3
+ "version": "0.5.13",
4
4
  "description": "A reactive client store for building super fast apps on sync",
5
5
  "author": "Kyle Mathews",
6
6
  "license": "MIT",
@@ -109,6 +109,10 @@ export class CollectionChangesManager<
109
109
 
110
110
  if (options.includeInitialState) {
111
111
  subscription.requestSnapshot({ trackLoadSubsetPromise: false })
112
+ } else if (options.includeInitialState === false) {
113
+ // When explicitly set to false (not just undefined), mark all state as "seen"
114
+ // so that all future changes (including deletes) pass through unfiltered.
115
+ subscription.markAllStateAsSeen()
112
116
  }
113
117
 
114
118
  // Add to batched listeners
@@ -52,6 +52,11 @@ export class CollectionSubscription
52
52
  {
53
53
  private loadedInitialState = false
54
54
 
55
+ // Flag to skip filtering in filterAndFlipChanges.
56
+ // This is separate from loadedInitialState because we want to allow
57
+ // requestSnapshot to still work even when filtering is skipped.
58
+ private skipFiltering = false
59
+
55
60
  // Flag to indicate that we have sent at least 1 snapshot.
56
61
  // While `snapshotSent` is false we filter out all changes from subscription to the collection.
57
62
  private snapshotSent = false
@@ -244,6 +249,13 @@ export class CollectionSubscription
244
249
  (change) => !this.sentKeys.has(change.key),
245
250
  )
246
251
 
252
+ // Add keys to sentKeys BEFORE calling callback to prevent race condition.
253
+ // If a change event arrives while the callback is executing, it will see
254
+ // the keys already in sentKeys and filter out duplicates correctly.
255
+ for (const change of filteredSnapshot) {
256
+ this.sentKeys.add(change.key)
257
+ }
258
+
247
259
  this.snapshotSent = true
248
260
  this.callback(filteredSnapshot)
249
261
  return true
@@ -367,6 +379,13 @@ export class CollectionSubscription
367
379
  // Use the current count as the offset for this load
368
380
  const currentOffset = this.limitedSnapshotRowCount
369
381
 
382
+ // Add keys to sentKeys BEFORE calling callback to prevent race condition.
383
+ // If a change event arrives while the callback is executing, it will see
384
+ // the keys already in sentKeys and filter out duplicates correctly.
385
+ for (const change of changes) {
386
+ this.sentKeys.add(change.key)
387
+ }
388
+
370
389
  this.callback(changes)
371
390
 
372
391
  // Update the row count and last key after sending (for next call's offset/cursor)
@@ -441,10 +460,11 @@ export class CollectionSubscription
441
460
  * Filters and flips changes for keys that have not been sent yet.
442
461
  * Deletes are filtered out for keys that have not been sent yet.
443
462
  * Updates are flipped into inserts for keys that have not been sent yet.
463
+ * Duplicate inserts are filtered out to prevent D2 multiplicity > 1.
444
464
  */
445
465
  private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {
446
- if (this.loadedInitialState) {
447
- // We loaded the entire initial state
466
+ if (this.loadedInitialState || this.skipFiltering) {
467
+ // We loaded the entire initial state or filtering is explicitly skipped
448
468
  // so no need to filter or flip changes
449
469
  return changes
450
470
  }
@@ -452,7 +472,9 @@ export class CollectionSubscription
452
472
  const newChanges = []
453
473
  for (const change of changes) {
454
474
  let newChange = change
455
- if (!this.sentKeys.has(change.key)) {
475
+ const keyInSentKeys = this.sentKeys.has(change.key)
476
+
477
+ if (!keyInSentKeys) {
456
478
  if (change.type === `update`) {
457
479
  newChange = { ...change, type: `insert`, previousValue: undefined }
458
480
  } else if (change.type === `delete`) {
@@ -460,6 +482,19 @@ export class CollectionSubscription
460
482
  continue
461
483
  }
462
484
  this.sentKeys.add(change.key)
485
+ } else {
486
+ // Key was already sent - handle based on change type
487
+ if (change.type === `insert`) {
488
+ // Filter out duplicate inserts - the key was already inserted.
489
+ // This prevents D2 multiplicity from going above 1, which would
490
+ // cause deletes to not properly remove items (multiplicity would
491
+ // go from 2 to 1 instead of 1 to 0).
492
+ continue
493
+ } else if (change.type === `delete`) {
494
+ // Remove from sentKeys so future inserts for this key are allowed
495
+ // (e.g., after truncate + reinsert)
496
+ this.sentKeys.delete(change.key)
497
+ }
463
498
  }
464
499
  newChanges.push(newChange)
465
500
  }
@@ -467,17 +502,32 @@ export class CollectionSubscription
467
502
  }
468
503
 
469
504
  private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {
470
- if (this.loadedInitialState) {
471
- // No need to track sent keys if we loaded the entire state.
472
- // Since we sent everything, all keys must have been observed.
505
+ if (this.loadedInitialState || this.skipFiltering) {
506
+ // No need to track sent keys if we loaded the entire state or filtering is skipped.
507
+ // Since filtering won't be applied, all keys are effectively "observed".
473
508
  return
474
509
  }
475
510
 
476
511
  for (const change of changes) {
477
- this.sentKeys.add(change.key)
512
+ if (change.type === `delete`) {
513
+ // Remove deleted keys from sentKeys so future re-inserts are allowed
514
+ this.sentKeys.delete(change.key)
515
+ } else {
516
+ // For inserts and updates, track the key as sent
517
+ this.sentKeys.add(change.key)
518
+ }
478
519
  }
479
520
  }
480
521
 
522
+ /**
523
+ * Mark that the subscription should not filter any changes.
524
+ * This is used when includeInitialState is explicitly set to false,
525
+ * meaning the caller doesn't want initial state but does want ALL future changes.
526
+ */
527
+ markAllStateAsSeen() {
528
+ this.skipFiltering = true
529
+ }
530
+
481
531
  unsubscribe() {
482
532
  // Unload all subsets that this subscription loaded
483
533
  // We pass the exact same LoadSubsetOptions we used for loadSubset
@@ -12,10 +12,11 @@ import { deepEquals } from '../utils'
12
12
  import { LIVE_QUERY_INTERNAL } from '../query/live/internal.js'
13
13
  import type { StandardSchemaV1 } from '@standard-schema/spec'
14
14
  import type {
15
- ChangeMessage,
15
+ ChangeMessageOrDeleteKeyMessage,
16
16
  CleanupFn,
17
17
  CollectionConfig,
18
18
  LoadSubsetOptions,
19
+ OptimisticChangeMessage,
19
20
  SyncConfigRes,
20
21
  } from '../types'
21
22
  import type { CollectionImpl } from './index.js'
@@ -94,7 +95,12 @@ export class CollectionSyncManager<
94
95
  deletedKeys: new Set(),
95
96
  })
96
97
  },
97
- write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {
98
+ write: (
99
+ messageWithOptionalKey: ChangeMessageOrDeleteKeyMessage<
100
+ TOutput,
101
+ TKey
102
+ >,
103
+ ) => {
98
104
  const pendingTransaction =
99
105
  this.state.pendingSyncedTransactions[
100
106
  this.state.pendingSyncedTransactions.length - 1
@@ -105,12 +111,18 @@ export class CollectionSyncManager<
105
111
  if (pendingTransaction.committed) {
106
112
  throw new SyncTransactionAlreadyCommittedWriteError()
107
113
  }
108
- const key = this.config.getKey(messageWithoutKey.value)
109
114
 
110
- let messageType = messageWithoutKey.type
115
+ let key: TKey | undefined = undefined
116
+ if (`key` in messageWithOptionalKey) {
117
+ key = messageWithOptionalKey.key
118
+ } else {
119
+ key = this.config.getKey(messageWithOptionalKey.value)
120
+ }
121
+
122
+ let messageType = messageWithOptionalKey.type
111
123
 
112
124
  // Check if an item with this key already exists when inserting
113
- if (messageWithoutKey.type === `insert`) {
125
+ if (messageWithOptionalKey.type === `insert`) {
114
126
  const insertingIntoExistingSynced = this.state.syncedData.has(key)
115
127
  const hasPendingDeleteForKey =
116
128
  pendingTransaction.deletedKeys.has(key)
@@ -124,7 +136,7 @@ export class CollectionSyncManager<
124
136
  const existingValue = this.state.syncedData.get(key)
125
137
  if (
126
138
  existingValue !== undefined &&
127
- deepEquals(existingValue, messageWithoutKey.value)
139
+ deepEquals(existingValue, messageWithOptionalKey.value)
128
140
  ) {
129
141
  // The "insert" is an echo of a value we already have locally.
130
142
  // Treat it as an update so we preserve optimistic intent without
@@ -142,11 +154,11 @@ export class CollectionSyncManager<
142
154
  }
143
155
  }
144
156
 
145
- const message: ChangeMessage<TOutput> = {
146
- ...messageWithoutKey,
157
+ const message = {
158
+ ...messageWithOptionalKey,
147
159
  type: messageType,
148
160
  key,
149
- }
161
+ } as OptimisticChangeMessage<TOutput, TKey>
150
162
  pendingTransaction.operations.push(message)
151
163
 
152
164
  if (messageType === `delete`) {
package/src/types.ts CHANGED
@@ -328,7 +328,7 @@ export interface SyncConfig<
328
328
  sync: (params: {
329
329
  collection: Collection<T, TKey, any, any, any>
330
330
  begin: () => void
331
- write: (message: Omit<ChangeMessage<T>, `key`>) => void
331
+ write: (message: ChangeMessageOrDeleteKeyMessage<T, TKey>) => void
332
332
  commit: () => void
333
333
  markReady: () => void
334
334
  truncate: () => void
@@ -361,12 +361,28 @@ export interface ChangeMessage<
361
361
  metadata?: Record<string, unknown>
362
362
  }
363
363
 
364
- export interface OptimisticChangeMessage<
364
+ export type DeleteKeyMessage<TKey extends string | number = string | number> =
365
+ Omit<ChangeMessage<any, TKey>, `value` | `previousValue` | `type`> & {
366
+ type: `delete`
367
+ }
368
+
369
+ export type ChangeMessageOrDeleteKeyMessage<
365
370
  T extends object = Record<string, unknown>,
366
- > extends ChangeMessage<T> {
367
- // Is this change message part of an active transaction. Only applies to optimistic changes.
368
- isActive?: boolean
369
- }
371
+ TKey extends string | number = string | number,
372
+ > = Omit<ChangeMessage<T>, `key`> | DeleteKeyMessage<TKey>
373
+
374
+ export type OptimisticChangeMessage<
375
+ T extends object = Record<string, unknown>,
376
+ TKey extends string | number = string | number,
377
+ > =
378
+ | (ChangeMessage<T> & {
379
+ // Is this change message part of an active transaction. Only applies to optimistic changes.
380
+ isActive?: boolean
381
+ })
382
+ | (DeleteKeyMessage<TKey> & {
383
+ // Is this change message part of an active transaction. Only applies to optimistic changes.
384
+ isActive?: boolean
385
+ })
370
386
 
371
387
  /**
372
388
  * The Standard Schema interface.
@@ -894,3 +910,6 @@ export type WritableDeep<T> = T extends BuiltIns
894
910
  : T extends object
895
911
  ? WritableObjectDeep<T>
896
912
  : unknown
913
+
914
+ export type MakeOptional<T, K extends keyof T> = Omit<T, K> &
915
+ Partial<Pick<T, K>>