@tanstack/db 0.5.4 → 0.5.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/subscription.cjs +17 -6
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +5 -0
- package/dist/cjs/collection/sync.cjs +11 -0
- package/dist/cjs/collection/sync.cjs.map +1 -1
- package/dist/cjs/collection/sync.d.cts +6 -0
- package/dist/cjs/query/live/collection-config-builder.cjs +13 -0
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.d.cts +4 -0
- package/dist/cjs/scheduler.cjs +6 -1
- package/dist/cjs/scheduler.cjs.map +1 -1
- package/dist/cjs/types.d.cts +2 -0
- package/dist/esm/collection/subscription.d.ts +5 -0
- package/dist/esm/collection/subscription.js +17 -6
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/collection/sync.d.ts +6 -0
- package/dist/esm/collection/sync.js +11 -0
- package/dist/esm/collection/sync.js.map +1 -1
- package/dist/esm/query/live/collection-config-builder.d.ts +4 -0
- package/dist/esm/query/live/collection-config-builder.js +13 -0
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/scheduler.js +6 -1
- package/dist/esm/scheduler.js.map +1 -1
- package/dist/esm/types.d.ts +2 -0
- package/package.json +1 -1
- package/src/collection/subscription.ts +32 -6
- package/src/collection/sync.ts +15 -0
- package/src/query/live/collection-config-builder.ts +17 -0
- package/src/scheduler.ts +25 -1
- package/src/types.ts +3 -0
package/dist/cjs/scheduler.cjs
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
function isPendingAwareJob(dep) {
|
|
4
|
+
return typeof dep === `object` && dep !== null && typeof dep.hasPendingGraphRun === `function`;
|
|
5
|
+
}
|
|
3
6
|
class Scheduler {
|
|
4
7
|
constructor() {
|
|
5
8
|
this.contexts = /* @__PURE__ */ new Map();
|
|
@@ -69,7 +72,9 @@ class Scheduler {
|
|
|
69
72
|
if (deps) {
|
|
70
73
|
ready = true;
|
|
71
74
|
for (const dep of deps) {
|
|
72
|
-
if (dep
|
|
75
|
+
if (dep === jobId) continue;
|
|
76
|
+
const depHasPending = isPendingAwareJob(dep) && dep.hasPendingGraphRun(contextId);
|
|
77
|
+
if (jobs.has(dep) && !completed.has(dep) || !jobs.has(dep) && depHasPending) {
|
|
73
78
|
ready = false;
|
|
74
79
|
break;
|
|
75
80
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scheduler.cjs","sources":["../../src/scheduler.ts"],"sourcesContent":["/**\n * Identifier used to scope scheduled work. Maps to a transaction id for live queries.\n */\nexport type SchedulerContextId = string | symbol\n\n/**\n * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context\n * and may declare dependencies.\n */\ninterface ScheduleOptions {\n contextId?: SchedulerContextId\n jobId: unknown\n dependencies?: Iterable<unknown>\n run: () => void\n}\n\n/**\n * State per context. Queue preserves order, jobs hold run functions, dependencies track\n * prerequisites, and completed records which jobs have run during the current flush.\n */\ninterface SchedulerContextState {\n queue: Array<unknown>\n jobs: Map<unknown, () => void>\n dependencies: Map<unknown, Set<unknown>>\n completed: Set<unknown>\n}\n\n/**\n * Scoped scheduler that coalesces work by context and job.\n *\n * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.\n * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.\n * - Without a context id, work executes immediately.\n *\n * Callers manage their own state; the scheduler only orchestrates execution order.\n */\nexport class Scheduler {\n private contexts = new Map<SchedulerContextId, SchedulerContextState>()\n private clearListeners = new Set<(contextId: SchedulerContextId) => void>()\n\n /**\n * Get or create the state bucket for a context.\n */\n private getOrCreateContext(\n contextId: SchedulerContextId\n ): SchedulerContextState {\n let context = this.contexts.get(contextId)\n if (!context) {\n context = {\n queue: [],\n jobs: new Map(),\n dependencies: new Map(),\n completed: new Set(),\n }\n this.contexts.set(contextId, context)\n }\n return context\n }\n\n /**\n * Schedule work. Without a context id, executes immediately.\n * Otherwise queues the job to be flushed once dependencies are satisfied.\n * Scheduling the same jobId again replaces the previous run function.\n */\n schedule({ contextId, jobId, dependencies, run }: ScheduleOptions): void {\n if (typeof contextId === `undefined`) {\n run()\n return\n }\n\n const context = this.getOrCreateContext(contextId)\n\n // If this is a new job, add it to the queue\n if (!context.jobs.has(jobId)) {\n context.queue.push(jobId)\n }\n\n // Store or replace the run function\n context.jobs.set(jobId, run)\n\n // Update dependencies\n if (dependencies) {\n const depSet = new Set<unknown>(dependencies)\n depSet.delete(jobId)\n context.dependencies.set(jobId, depSet)\n } else if (!context.dependencies.has(jobId)) {\n context.dependencies.set(jobId, new Set())\n }\n\n // Clear completion status since we're rescheduling\n context.completed.delete(jobId)\n }\n\n /**\n * Flush all queued work for a context. Jobs with unmet dependencies are retried.\n * Throws if a pass completes without running any job (dependency cycle).\n */\n flush(contextId: SchedulerContextId): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n const { queue, jobs, dependencies, completed } = context\n\n while (queue.length > 0) {\n let ranThisPass = false\n const jobsThisPass = queue.length\n\n for (let i = 0; i < jobsThisPass; i++) {\n const jobId = queue.shift()!\n const run = jobs.get(jobId)\n if (!run) {\n dependencies.delete(jobId)\n completed.delete(jobId)\n continue\n }\n\n const deps = dependencies.get(jobId)\n let ready = !deps\n if (deps) {\n ready = true\n for (const dep of deps) {\n if (dep
|
|
1
|
+
{"version":3,"file":"scheduler.cjs","sources":["../../src/scheduler.ts"],"sourcesContent":["/**\n * Identifier used to scope scheduled work. Maps to a transaction id for live queries.\n */\nexport type SchedulerContextId = string | symbol\n\n/**\n * Options for {@link Scheduler.schedule}. Jobs are identified by `jobId` within a context\n * and may declare dependencies.\n */\ninterface ScheduleOptions {\n contextId?: SchedulerContextId\n jobId: unknown\n dependencies?: Iterable<unknown>\n run: () => void\n}\n\n/**\n * State per context. Queue preserves order, jobs hold run functions, dependencies track\n * prerequisites, and completed records which jobs have run during the current flush.\n */\ninterface SchedulerContextState {\n queue: Array<unknown>\n jobs: Map<unknown, () => void>\n dependencies: Map<unknown, Set<unknown>>\n completed: Set<unknown>\n}\n\ninterface PendingAwareJob {\n hasPendingGraphRun: (contextId: SchedulerContextId) => boolean\n}\n\nfunction isPendingAwareJob(dep: any): dep is PendingAwareJob {\n return (\n typeof dep === `object` &&\n dep !== null &&\n typeof dep.hasPendingGraphRun === `function`\n )\n}\n\n/**\n * Scoped scheduler that coalesces work by context and job.\n *\n * - **context** (e.g. transaction id) defines the batching boundary; work is queued until flushed.\n * - **job id** deduplicates work within a context; scheduling the same job replaces the previous run function.\n * - Without a context id, work executes immediately.\n *\n * Callers manage their own state; the scheduler only orchestrates execution order.\n */\nexport class Scheduler {\n private contexts = new Map<SchedulerContextId, SchedulerContextState>()\n private clearListeners = new Set<(contextId: SchedulerContextId) => void>()\n\n /**\n * Get or create the state bucket for a context.\n */\n private getOrCreateContext(\n contextId: SchedulerContextId\n ): SchedulerContextState {\n let context = this.contexts.get(contextId)\n if (!context) {\n context = {\n queue: [],\n jobs: new Map(),\n dependencies: new Map(),\n completed: new Set(),\n }\n this.contexts.set(contextId, context)\n }\n return context\n }\n\n /**\n * Schedule work. Without a context id, executes immediately.\n * Otherwise queues the job to be flushed once dependencies are satisfied.\n * Scheduling the same jobId again replaces the previous run function.\n */\n schedule({ contextId, jobId, dependencies, run }: ScheduleOptions): void {\n if (typeof contextId === `undefined`) {\n run()\n return\n }\n\n const context = this.getOrCreateContext(contextId)\n\n // If this is a new job, add it to the queue\n if (!context.jobs.has(jobId)) {\n context.queue.push(jobId)\n }\n\n // Store or replace the run function\n context.jobs.set(jobId, run)\n\n // Update dependencies\n if (dependencies) {\n const depSet = new Set<unknown>(dependencies)\n depSet.delete(jobId)\n context.dependencies.set(jobId, depSet)\n } else if (!context.dependencies.has(jobId)) {\n context.dependencies.set(jobId, new Set())\n }\n\n // Clear completion status since we're rescheduling\n context.completed.delete(jobId)\n }\n\n /**\n * Flush all queued work for a context. Jobs with unmet dependencies are retried.\n * Throws if a pass completes without running any job (dependency cycle).\n */\n flush(contextId: SchedulerContextId): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n const { queue, jobs, dependencies, completed } = context\n\n while (queue.length > 0) {\n let ranThisPass = false\n const jobsThisPass = queue.length\n\n for (let i = 0; i < jobsThisPass; i++) {\n const jobId = queue.shift()!\n const run = jobs.get(jobId)\n if (!run) {\n dependencies.delete(jobId)\n completed.delete(jobId)\n continue\n }\n\n const deps = dependencies.get(jobId)\n let ready = !deps\n if (deps) {\n ready = true\n for (const dep of deps) {\n if (dep === jobId) continue\n\n const depHasPending =\n isPendingAwareJob(dep) && dep.hasPendingGraphRun(contextId)\n\n // Treat dependencies as blocking if the dep has a pending run in this\n // context or if it's enqueued and not yet complete. If the dep is\n // neither pending nor enqueued, consider it satisfied to avoid deadlocks\n // on lazy sources that never schedule work.\n if (\n (jobs.has(dep) && !completed.has(dep)) ||\n (!jobs.has(dep) && depHasPending)\n ) {\n ready = false\n break\n }\n }\n }\n\n if (ready) {\n jobs.delete(jobId)\n dependencies.delete(jobId)\n // Run the job. If it throws, we don't mark it complete, allowing the\n // error to propagate while maintaining scheduler state consistency.\n run()\n completed.add(jobId)\n ranThisPass = true\n } else {\n queue.push(jobId)\n }\n }\n\n if (!ranThisPass) {\n throw new Error(\n `Scheduler detected unresolved dependencies for context ${String(\n contextId\n )}.`\n )\n }\n }\n\n this.contexts.delete(contextId)\n }\n\n /**\n * Flush all contexts with pending work. Useful during tear-down.\n */\n flushAll(): void {\n for (const contextId of Array.from(this.contexts.keys())) {\n this.flush(contextId)\n }\n }\n\n /** Clear all scheduled jobs for a context. */\n clear(contextId: SchedulerContextId): void {\n this.contexts.delete(contextId)\n // Notify listeners that this context was cleared\n this.clearListeners.forEach((listener) => listener(contextId))\n }\n\n /** Register a listener to be notified when a context is cleared. */\n onClear(listener: (contextId: SchedulerContextId) => void): () => void {\n this.clearListeners.add(listener)\n return () => this.clearListeners.delete(listener)\n }\n\n /** Check if a context has pending jobs. */\n hasPendingJobs(contextId: SchedulerContextId): boolean {\n const context = this.contexts.get(contextId)\n return !!context && context.jobs.size > 0\n }\n\n /** Remove a single job from a context and clean up its dependencies. */\n clearJob(contextId: SchedulerContextId, jobId: unknown): void {\n const context = this.contexts.get(contextId)\n if (!context) return\n\n context.jobs.delete(jobId)\n context.dependencies.delete(jobId)\n context.completed.delete(jobId)\n context.queue = context.queue.filter((id) => id !== jobId)\n\n if (context.jobs.size === 0) {\n this.contexts.delete(contextId)\n }\n }\n}\n\nexport const transactionScopedScheduler = new Scheduler()\n"],"names":[],"mappings":";;AA+BA,SAAS,kBAAkB,KAAkC;AAC3D,SACE,OAAO,QAAQ,YACf,QAAQ,QACR,OAAO,IAAI,uBAAuB;AAEtC;AAWO,MAAM,UAAU;AAAA,EAAhB,cAAA;AACL,SAAQ,+BAAe,IAAA;AACvB,SAAQ,qCAAqB,IAAA;AAAA,EAA6C;AAAA;AAAA;AAAA;AAAA,EAKlE,mBACN,WACuB;AACvB,QAAI,UAAU,KAAK,SAAS,IAAI,SAAS;AACzC,QAAI,CAAC,SAAS;AACZ,gBAAU;AAAA,QACR,OAAO,CAAA;AAAA,QACP,0BAAU,IAAA;AAAA,QACV,kCAAkB,IAAA;AAAA,QAClB,+BAAe,IAAA;AAAA,MAAI;AAErB,WAAK,SAAS,IAAI,WAAW,OAAO;AAAA,IACtC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,SAAS,EAAE,WAAW,OAAO,cAAc,OAA8B;AACvE,QAAI,OAAO,cAAc,aAAa;AACpC,UAAA;AACA;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,mBAAmB,SAAS;AAGjD,QAAI,CAAC,QAAQ,KAAK,IAAI,KAAK,GAAG;AAC5B,cAAQ,MAAM,KAAK,KAAK;AAAA,IAC1B;AAGA,YAAQ,KAAK,IAAI,OAAO,GAAG;AAG3B,QAAI,cAAc;AAChB,YAAM,SAAS,IAAI,IAAa,YAAY;AAC5C,aAAO,OAAO,KAAK;AACnB,cAAQ,aAAa,IAAI,OAAO,MAAM;AAAA,IACxC,WAAW,CAAC,QAAQ,aAAa,IAAI,KAAK,GAAG;AAC3C,cAAQ,aAAa,IAAI,OAAO,oBAAI,KAAK;AAAA,IAC3C;AAGA,YAAQ,UAAU,OAAO,KAAK;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAqC;AACzC,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,UAAM,EAAE,OAAO,MAAM,cAAc,cAAc;AAEjD,WAAO,MAAM,SAAS,GAAG;AACvB,UAAI,cAAc;AAClB,YAAM,eAAe,MAAM;AAE3B,eAAS,IAAI,GAAG,IAAI,cAAc,KAAK;AACrC,cAAM,QAAQ,MAAM,MAAA;AACpB,cAAM,MAAM,KAAK,IAAI,KAAK;AAC1B,YAAI,CAAC,KAAK;AACR,uBAAa,OAAO,KAAK;AACzB,oBAAU,OAAO,KAAK;AACtB;AAAA,QACF;AAEA,cAAM,OAAO,aAAa,IAAI,KAAK;AACnC,YAAI,QAAQ,CAAC;AACb,YAAI,MAAM;AACR,kBAAQ;AACR,qBAAW,OAAO,MAAM;AACtB,gBAAI,QAAQ,MAAO;AAEnB,kBAAM,gBACJ,kBAAkB,GAAG,KAAK,IAAI,mBAAmB,SAAS;AAM5D,gBACG,KAAK,IAAI,GAAG,KAAK,CAAC,UAAU,IAAI,GAAG,KACnC,CAAC,KAAK,IAAI,GAAG,KAAK,eACnB;AACA,sBAAQ;AACR;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,YAAI,OAAO;AACT,eAAK,OAAO,KAAK;AACjB,uBAAa,OAAO,KAAK;AAGzB,cAAA;AACA,oBAAU,IAAI,KAAK;AACnB,wBAAc;AAAA,QAChB,OAAO;AACL,gBAAM,KAAK,KAAK;AAAA,QAClB;AAAA,MACF;AAEA,UAAI,CAAC,aAAa;AAChB,cAAM,IAAI;AAAA,UACR,0DAA0D;AAAA,YACxD;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MAEL;AAAA,IACF;AAEA,SAAK,SAAS,OAAO,SAAS;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAiB;AACf,eAAW,aAAa,MAAM,KAAK,KAAK,SAAS,KAAA,CAAM,GAAG;AACxD,WAAK,MAAM,SAAS;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,WAAqC;AACzC,SAAK,SAAS,OAAO,SAAS;AAE9B,SAAK,eAAe,QAAQ,CAAC,aAAa,SAAS,SAAS,CAAC;AAAA,EAC/D;AAAA;AAAA,EAGA,QAAQ,UAA+D;AACrE,SAAK,eAAe,IAAI,QAAQ;AAChC,WAAO,MAAM,KAAK,eAAe,OAAO,QAAQ;AAAA,EAClD;AAAA;AAAA,EAGA,eAAe,WAAwC;AACrD,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,WAAO,CAAC,CAAC,WAAW,QAAQ,KAAK,OAAO;AAAA,EAC1C;AAAA;AAAA,EAGA,SAAS,WAA+B,OAAsB;AAC5D,UAAM,UAAU,KAAK,SAAS,IAAI,SAAS;AAC3C,QAAI,CAAC,QAAS;AAEd,YAAQ,KAAK,OAAO,KAAK;AACzB,YAAQ,aAAa,OAAO,KAAK;AACjC,YAAQ,UAAU,OAAO,KAAK;AAC9B,YAAQ,QAAQ,QAAQ,MAAM,OAAO,CAAC,OAAO,OAAO,KAAK;AAEzD,QAAI,QAAQ,KAAK,SAAS,GAAG;AAC3B,WAAK,SAAS,OAAO,SAAS;AAAA,IAChC;AAAA,EACF;AACF;AAEO,MAAM,6BAA6B,IAAI,UAAA;;;"}
|
package/dist/cjs/types.d.cts
CHANGED
|
@@ -198,10 +198,12 @@ export type LoadSubsetOptions = {
|
|
|
198
198
|
subscription?: Subscription;
|
|
199
199
|
};
|
|
200
200
|
export type LoadSubsetFn = (options: LoadSubsetOptions) => true | Promise<void>;
|
|
201
|
+
export type UnloadSubsetFn = (options: LoadSubsetOptions) => void;
|
|
201
202
|
export type CleanupFn = () => void;
|
|
202
203
|
export type SyncConfigRes = {
|
|
203
204
|
cleanup?: CleanupFn;
|
|
204
205
|
loadSubset?: LoadSubsetFn;
|
|
206
|
+
unloadSubset?: UnloadSubsetFn;
|
|
205
207
|
};
|
|
206
208
|
export interface SyncConfig<T extends object = Record<string, unknown>, TKey extends string | number = string | number> {
|
|
207
209
|
sync: (params: {
|
|
@@ -26,6 +26,11 @@ export declare class CollectionSubscription extends EventEmitter<SubscriptionEve
|
|
|
26
26
|
private options;
|
|
27
27
|
private loadedInitialState;
|
|
28
28
|
private snapshotSent;
|
|
29
|
+
/**
|
|
30
|
+
* Track all loadSubset calls made by this subscription so we can unload them on cleanup.
|
|
31
|
+
* We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.
|
|
32
|
+
*/
|
|
33
|
+
private loadedSubsets;
|
|
29
34
|
private sentKeys;
|
|
30
35
|
private filteredCallback;
|
|
31
36
|
private orderByIndex;
|
|
@@ -11,6 +11,7 @@ class CollectionSubscription extends EventEmitter {
|
|
|
11
11
|
this.options = options;
|
|
12
12
|
this.loadedInitialState = false;
|
|
13
13
|
this.snapshotSent = false;
|
|
14
|
+
this.loadedSubsets = [];
|
|
14
15
|
this.sentKeys = /* @__PURE__ */ new Set();
|
|
15
16
|
this._status = `ready`;
|
|
16
17
|
this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
|
|
@@ -110,10 +111,12 @@ class CollectionSubscription extends EventEmitter {
|
|
|
110
111
|
} else {
|
|
111
112
|
this.loadedInitialState = true;
|
|
112
113
|
}
|
|
113
|
-
const
|
|
114
|
+
const loadOptions = {
|
|
114
115
|
where: stateOpts.where,
|
|
115
116
|
subscription: this
|
|
116
|
-
}
|
|
117
|
+
};
|
|
118
|
+
const syncResult = this.collection._sync.loadSubset(loadOptions);
|
|
119
|
+
this.loadedSubsets.push(loadOptions);
|
|
117
120
|
const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true;
|
|
118
121
|
if (trackLoadSubsetPromise) {
|
|
119
122
|
this.trackLoadSubsetPromise(syncResult);
|
|
@@ -208,20 +211,24 @@ class CollectionSubscription extends EventEmitter {
|
|
|
208
211
|
const valueFilter = operator(expression, new Value(minValue));
|
|
209
212
|
whereWithValueFilter = where ? and(where, valueFilter) : valueFilter;
|
|
210
213
|
}
|
|
211
|
-
const
|
|
214
|
+
const loadOptions1 = {
|
|
212
215
|
where: whereWithValueFilter,
|
|
213
216
|
limit,
|
|
214
217
|
orderBy,
|
|
215
218
|
subscription: this
|
|
216
|
-
}
|
|
219
|
+
};
|
|
220
|
+
const syncResult = this.collection._sync.loadSubset(loadOptions1);
|
|
221
|
+
this.loadedSubsets.push(loadOptions1);
|
|
217
222
|
const promises = [];
|
|
218
223
|
if (typeof minValue !== `undefined`) {
|
|
219
224
|
const { expression } = orderBy[0];
|
|
220
225
|
const exactValueFilter = eq(expression, new Value(minValue));
|
|
221
|
-
const
|
|
226
|
+
const loadOptions2 = {
|
|
222
227
|
where: exactValueFilter,
|
|
223
228
|
subscription: this
|
|
224
|
-
}
|
|
229
|
+
};
|
|
230
|
+
const equalValueResult = this.collection._sync.loadSubset(loadOptions2);
|
|
231
|
+
this.loadedSubsets.push(loadOptions2);
|
|
225
232
|
if (equalValueResult instanceof Promise) {
|
|
226
233
|
promises.push(equalValueResult);
|
|
227
234
|
}
|
|
@@ -272,6 +279,10 @@ class CollectionSubscription extends EventEmitter {
|
|
|
272
279
|
}
|
|
273
280
|
}
|
|
274
281
|
unsubscribe() {
|
|
282
|
+
for (const options of this.loadedSubsets) {
|
|
283
|
+
this.collection._sync.unloadSubset(options);
|
|
284
|
+
}
|
|
285
|
+
this.loadedSubsets = [];
|
|
275
286
|
this.emitInner(`unsubscribed`, {
|
|
276
287
|
type: `unsubscribed`,
|
|
277
288
|
subscription: this
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and, eq, gt, lt } from \"../query/builder/functions.js\"\nimport { Value } from \"../query/ir.js\"\nimport { EventEmitter } from \"../event-emitter.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression, OrderBy } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type {\n ChangeMessage,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n minValue?: any\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const syncResult = this.collection._sync.loadSubset({\n where: stateOpts.where,\n subscription: this,\n })\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to `minValue` + limit values greater than `minValue`.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValue,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n let keys: Array<string | number> = []\n if (minValue !== undefined) {\n // First, get all items with the same value as minValue\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValue)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValue,\n filterFn\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n\n let whereWithValueFilter = where\n if (typeof minValue !== `undefined`) {\n // Only request data that we haven't seen yet (i.e. is bigger than the minValue)\n const { expression, compareOptions } = orderBy[0]!\n const operator = compareOptions.direction === `asc` ? gt : lt\n const valueFilter = operator(expression, new Value(minValue))\n whereWithValueFilter = where ? and(where, valueFilter) : valueFilter\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const syncResult = this.collection._sync.loadSubset({\n where: whereWithValueFilter,\n limit,\n orderBy,\n subscription: this,\n })\n\n // Make parallel loadSubset calls for values equal to minValue and values greater than minValue\n const promises: Array<Promise<void>> = []\n\n // First promise: load all values equal to minValue\n if (typeof minValue !== `undefined`) {\n const { expression } = orderBy[0]!\n const exactValueFilter = eq(expression, new Value(minValue))\n\n const equalValueResult = this.collection._sync.loadSubset({\n where: exactValueFilter,\n subscription: this,\n })\n\n if (equalValueResult instanceof Promise) {\n promises.push(equalValueResult)\n }\n }\n\n // Second promise: load values greater than minValue\n if (syncResult instanceof Promise) {\n promises.push(syncResult)\n }\n\n // Track the combined promise\n if (promises.length > 0) {\n const combinedPromise = Promise.all(promises).then(() => {})\n this.trackLoadSubsetPromise(combinedPromise)\n } else {\n this.trackLoadSubsetPromise(syncResult)\n }\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":[],"mappings":";;;;;AAuCO,MAAM,+BACH,aAEV;AAAA,EAsBE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AAxBV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAGvB,SAAQ,+BAAe,IAAA;AAOvB,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW;AAAA,MAClD,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA,IAAA,CACf;AAED,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAQ5D,QAAI,OAA+B,CAAA;AACnC,QAAI,aAAa,QAAW;AAE1B,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,QAAQ,CAAC;AAAA,MAAA,CAC1C;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,IAC7C;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAErB,QAAI,uBAAuB;AAC3B,QAAI,OAAO,aAAa,aAAa;AAEnC,YAAM,EAAE,YAAY,mBAAmB,QAAQ,CAAC;AAChD,YAAM,WAAW,eAAe,cAAc,QAAQ,KAAK;AAC3D,YAAM,cAAc,SAAS,YAAY,IAAI,MAAM,QAAQ,CAAC;AAC5D,6BAAuB,QAAQ,IAAI,OAAO,WAAW,IAAI;AAAA,IAC3D;AAIA,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW;AAAA,MAClD,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,cAAc;AAAA,IAAA,CACf;AAGD,UAAM,WAAiC,CAAA;AAGvC,QAAI,OAAO,aAAa,aAAa;AACnC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,mBAAmB,GAAG,YAAY,IAAI,MAAM,QAAQ,CAAC;AAE3D,YAAM,mBAAmB,KAAK,WAAW,MAAM,WAAW;AAAA,QACxD,OAAO;AAAA,QACP,cAAc;AAAA,MAAA,CACf;AAED,UAAI,4BAA4B,SAAS;AACvC,iBAAS,KAAK,gBAAgB;AAAA,MAChC;AAAA,IACF;AAGA,QAAI,sBAAsB,SAAS;AACjC,eAAS,KAAK,UAAU;AAAA,IAC1B;AAGA,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,kBAAkB,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AAAA,MAAC,CAAC;AAC3D,WAAK,uBAAuB,eAAe;AAAA,IAC7C,OAAO;AACL,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AACZ,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
|
|
1
|
+
{"version":3,"file":"subscription.js","sources":["../../../src/collection/subscription.ts"],"sourcesContent":["import { ensureIndexForExpression } from \"../indexes/auto-index.js\"\nimport { and, eq, gt, lt } from \"../query/builder/functions.js\"\nimport { Value } from \"../query/ir.js\"\nimport { EventEmitter } from \"../event-emitter.js\"\nimport {\n createFilterFunctionFromExpression,\n createFilteredCallback,\n} from \"./change-events.js\"\nimport type { BasicExpression, OrderBy } from \"../query/ir.js\"\nimport type { IndexInterface } from \"../indexes/base-index.js\"\nimport type {\n ChangeMessage,\n LoadSubsetOptions,\n Subscription,\n SubscriptionEvents,\n SubscriptionStatus,\n SubscriptionUnsubscribedEvent,\n} from \"../types.js\"\nimport type { CollectionImpl } from \"./index.js\"\n\ntype RequestSnapshotOptions = {\n where?: BasicExpression<boolean>\n optimizedOnly?: boolean\n trackLoadSubsetPromise?: boolean\n}\n\ntype RequestLimitedSnapshotOptions = {\n orderBy: OrderBy\n limit: number\n minValue?: any\n}\n\ntype CollectionSubscriptionOptions = {\n includeInitialState?: boolean\n /** Pre-compiled expression for filtering changes */\n whereExpression?: BasicExpression<boolean>\n /** Callback to call when the subscription is unsubscribed */\n onUnsubscribe?: (event: SubscriptionUnsubscribedEvent) => void\n}\n\nexport class CollectionSubscription\n extends EventEmitter<SubscriptionEvents>\n implements Subscription\n{\n private loadedInitialState = false\n\n // Flag to indicate that we have sent at least 1 snapshot.\n // While `snapshotSent` is false we filter out all changes from subscription to the collection.\n private snapshotSent = false\n\n /**\n * Track all loadSubset calls made by this subscription so we can unload them on cleanup.\n * We store the exact LoadSubsetOptions we passed to loadSubset to ensure symmetric unload.\n */\n private loadedSubsets: Array<LoadSubsetOptions> = []\n\n // Keep track of the keys we've sent (needed for join and orderBy optimizations)\n private sentKeys = new Set<string | number>()\n\n private filteredCallback: (changes: Array<ChangeMessage<any, any>>) => void\n\n private orderByIndex: IndexInterface<string | number> | undefined\n\n // Status tracking\n private _status: SubscriptionStatus = `ready`\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n public get status(): SubscriptionStatus {\n return this._status\n }\n\n constructor(\n private collection: CollectionImpl<any, any, any, any, any>,\n private callback: (changes: Array<ChangeMessage<any, any>>) => void,\n private options: CollectionSubscriptionOptions\n ) {\n super()\n if (options.onUnsubscribe) {\n this.on(`unsubscribed`, (event) => options.onUnsubscribe!(event))\n }\n\n // Auto-index for where expressions if enabled\n if (options.whereExpression) {\n ensureIndexForExpression(options.whereExpression, this.collection)\n }\n\n const callbackWithSentKeysTracking = (\n changes: Array<ChangeMessage<any, any>>\n ) => {\n callback(changes)\n this.trackSentKeys(changes)\n }\n\n this.callback = callbackWithSentKeysTracking\n\n // Create a filtered callback if where clause is provided\n this.filteredCallback = options.whereExpression\n ? createFilteredCallback(this.callback, options)\n : this.callback\n }\n\n setOrderByIndex(index: IndexInterface<any>) {\n this.orderByIndex = index\n }\n\n /**\n * Set subscription status and emit events if changed\n */\n private setStatus(newStatus: SubscriptionStatus) {\n if (this._status === newStatus) {\n return // No change\n }\n\n const previousStatus = this._status\n this._status = newStatus\n\n // Emit status:change event\n this.emitInner(`status:change`, {\n type: `status:change`,\n subscription: this,\n previousStatus,\n status: newStatus,\n })\n\n // Emit specific status event\n const eventKey: `status:${SubscriptionStatus}` = `status:${newStatus}`\n this.emitInner(eventKey, {\n type: eventKey,\n subscription: this,\n previousStatus,\n status: newStatus,\n } as SubscriptionEvents[typeof eventKey])\n }\n\n /**\n * Track a loadSubset promise and manage loading status\n */\n private trackLoadSubsetPromise(syncResult: Promise<void> | true) {\n // Track the promise if it's actually a promise (async work)\n if (syncResult instanceof Promise) {\n this.pendingLoadSubsetPromises.add(syncResult)\n this.setStatus(`loadingSubset`)\n\n syncResult.finally(() => {\n this.pendingLoadSubsetPromises.delete(syncResult)\n if (this.pendingLoadSubsetPromises.size === 0) {\n this.setStatus(`ready`)\n }\n })\n }\n }\n\n hasLoadedInitialState() {\n return this.loadedInitialState\n }\n\n hasSentAtLeastOneSnapshot() {\n return this.snapshotSent\n }\n\n emitEvents(changes: Array<ChangeMessage<any, any>>) {\n const newChanges = this.filterAndFlipChanges(changes)\n this.filteredCallback(newChanges)\n }\n\n /**\n * Sends the snapshot to the callback.\n * Returns a boolean indicating if it succeeded.\n * It can only fail if there is no index to fulfill the request\n * and the optimizedOnly option is set to true,\n * or, the entire state was already loaded.\n */\n requestSnapshot(opts?: RequestSnapshotOptions): boolean {\n if (this.loadedInitialState) {\n // Subscription was deoptimized so we already sent the entire initial state\n return false\n }\n\n const stateOpts: RequestSnapshotOptions = {\n where: this.options.whereExpression,\n optimizedOnly: opts?.optimizedOnly ?? false,\n }\n\n if (opts) {\n if (`where` in opts) {\n const snapshotWhereExp = opts.where\n if (stateOpts.where) {\n // Combine the two where expressions\n const subWhereExp = stateOpts.where\n const combinedWhereExp = and(subWhereExp, snapshotWhereExp)\n stateOpts.where = combinedWhereExp\n } else {\n stateOpts.where = snapshotWhereExp\n }\n }\n } else {\n // No options provided so it's loading the entire initial state\n this.loadedInitialState = true\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions: LoadSubsetOptions = {\n where: stateOpts.where,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions)\n\n // Track this loadSubset call so we can unload it later\n this.loadedSubsets.push(loadOptions)\n\n const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true\n if (trackLoadSubsetPromise) {\n this.trackLoadSubsetPromise(syncResult)\n }\n\n // Also load data immediately from the collection\n const snapshot = this.collection.currentStateAsChanges(stateOpts)\n\n if (snapshot === undefined) {\n // Couldn't load from indexes\n return false\n }\n\n // Only send changes that have not been sent yet\n const filteredSnapshot = snapshot.filter(\n (change) => !this.sentKeys.has(change.key)\n )\n\n this.snapshotSent = true\n this.callback(filteredSnapshot)\n return true\n }\n\n /**\n * Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to `minValue`.\n * Requires a range index to be set with `setOrderByIndex` prior to calling this method.\n * It uses that range index to load the items in the order of the index.\n * Note 1: it may load more rows than the provided LIMIT because it loads all values equal to `minValue` + limit values greater than `minValue`.\n * This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.\n * Note 2: it does not send keys that have already been sent before.\n */\n requestLimitedSnapshot({\n orderBy,\n limit,\n minValue,\n }: RequestLimitedSnapshotOptions) {\n if (!limit) throw new Error(`limit is required`)\n\n if (!this.orderByIndex) {\n throw new Error(\n `Ordered snapshot was requested but no index was found. You have to call setOrderByIndex before requesting an ordered snapshot.`\n )\n }\n\n const index = this.orderByIndex\n const where = this.options.whereExpression\n const whereFilterFn = where\n ? createFilterFunctionFromExpression(where)\n : undefined\n\n const filterFn = (key: string | number): boolean => {\n if (this.sentKeys.has(key)) {\n return false\n }\n\n const value = this.collection.get(key)\n if (value === undefined) {\n return false\n }\n\n return whereFilterFn?.(value) ?? true\n }\n\n let biggestObservedValue = minValue\n const changes: Array<ChangeMessage<any, string | number>> = []\n\n // If we have a minValue we need to handle the case\n // where there might be duplicate values equal to minValue that we need to include\n // because we can have data like this: [1, 2, 3, 3, 3, 4, 5]\n // so if minValue is 3 then the previous snapshot may not have included all 3s\n // e.g. if it was offset 0 and limit 3 it would only have loaded the first 3\n // so we load all rows equal to minValue first, to be sure we don't skip any duplicate values\n let keys: Array<string | number> = []\n if (minValue !== undefined) {\n // First, get all items with the same value as minValue\n const { expression } = orderBy[0]!\n const allRowsWithMinValue = this.collection.currentStateAsChanges({\n where: eq(expression, new Value(minValue)),\n })\n\n if (allRowsWithMinValue) {\n const keysWithMinValue = allRowsWithMinValue\n .map((change) => change.key)\n .filter((key) => !this.sentKeys.has(key) && filterFn(key))\n\n // Add items with the minValue first\n keys.push(...keysWithMinValue)\n\n // Then get items greater than minValue\n const keysGreaterThanMin = index.take(\n limit - keys.length,\n minValue,\n filterFn\n )\n keys.push(...keysGreaterThanMin)\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n } else {\n keys = index.take(limit, minValue, filterFn)\n }\n\n const valuesNeeded = () => Math.max(limit - changes.length, 0)\n const collectionExhausted = () => keys.length === 0\n\n while (valuesNeeded() > 0 && !collectionExhausted()) {\n const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration\n\n for (const key of keys) {\n const value = this.collection.get(key)!\n changes.push({\n type: `insert`,\n key,\n value,\n })\n biggestObservedValue = value\n insertedKeys.add(key) // Track this key\n }\n\n keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)\n }\n\n this.callback(changes)\n\n let whereWithValueFilter = where\n if (typeof minValue !== `undefined`) {\n // Only request data that we haven't seen yet (i.e. is bigger than the minValue)\n const { expression, compareOptions } = orderBy[0]!\n const operator = compareOptions.direction === `asc` ? gt : lt\n const valueFilter = operator(expression, new Value(minValue))\n whereWithValueFilter = where ? and(where, valueFilter) : valueFilter\n }\n\n // Request the sync layer to load more data\n // don't await it, we will load the data into the collection when it comes in\n const loadOptions1: LoadSubsetOptions = {\n where: whereWithValueFilter,\n limit,\n orderBy,\n subscription: this,\n }\n const syncResult = this.collection._sync.loadSubset(loadOptions1)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions1)\n\n // Make parallel loadSubset calls for values equal to minValue and values greater than minValue\n const promises: Array<Promise<void>> = []\n\n // First promise: load all values equal to minValue\n if (typeof minValue !== `undefined`) {\n const { expression } = orderBy[0]!\n const exactValueFilter = eq(expression, new Value(minValue))\n\n const loadOptions2: LoadSubsetOptions = {\n where: exactValueFilter,\n subscription: this,\n }\n const equalValueResult = this.collection._sync.loadSubset(loadOptions2)\n\n // Track this loadSubset call\n this.loadedSubsets.push(loadOptions2)\n\n if (equalValueResult instanceof Promise) {\n promises.push(equalValueResult)\n }\n }\n\n // Second promise: load values greater than minValue\n if (syncResult instanceof Promise) {\n promises.push(syncResult)\n }\n\n // Track the combined promise\n if (promises.length > 0) {\n const combinedPromise = Promise.all(promises).then(() => {})\n this.trackLoadSubsetPromise(combinedPromise)\n } else {\n this.trackLoadSubsetPromise(syncResult)\n }\n }\n\n // TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function\n // and that that also works properly (i.e. does not skip duplicate values)\n\n /**\n * Filters and flips changes for keys that have not been sent yet.\n * Deletes are filtered out for keys that have not been sent yet.\n * Updates are flipped into inserts for keys that have not been sent yet.\n */\n private filterAndFlipChanges(changes: Array<ChangeMessage<any, any>>) {\n if (this.loadedInitialState) {\n // We loaded the entire initial state\n // so no need to filter or flip changes\n return changes\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!this.sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert`, previousValue: undefined }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n this.sentKeys.add(change.key)\n }\n newChanges.push(newChange)\n }\n return newChanges\n }\n\n private trackSentKeys(changes: Array<ChangeMessage<any, string | number>>) {\n if (this.loadedInitialState) {\n // No need to track sent keys if we loaded the entire state.\n // Since we sent everything, all keys must have been observed.\n return\n }\n\n for (const change of changes) {\n this.sentKeys.add(change.key)\n }\n }\n\n unsubscribe() {\n // Unload all subsets that this subscription loaded\n // We pass the exact same LoadSubsetOptions we used for loadSubset\n for (const options of this.loadedSubsets) {\n this.collection._sync.unloadSubset(options)\n }\n this.loadedSubsets = []\n\n this.emitInner(`unsubscribed`, {\n type: `unsubscribed`,\n subscription: this,\n })\n // Clear all event listeners to prevent memory leaks\n this.clearListeners()\n }\n}\n"],"names":[],"mappings":";;;;;AAwCO,MAAM,+BACH,aAEV;AAAA,EA4BE,YACU,YACA,UACA,SACR;AACA,UAAA;AAJQ,SAAA,aAAA;AACA,SAAA,WAAA;AACA,SAAA,UAAA;AA9BV,SAAQ,qBAAqB;AAI7B,SAAQ,eAAe;AAMvB,SAAQ,gBAA0C,CAAA;AAGlD,SAAQ,+BAAe,IAAA;AAOvB,SAAQ,UAA8B;AACtC,SAAQ,gDAAoD,IAAA;AAY1D,QAAI,QAAQ,eAAe;AACzB,WAAK,GAAG,gBAAgB,CAAC,UAAU,QAAQ,cAAe,KAAK,CAAC;AAAA,IAClE;AAGA,QAAI,QAAQ,iBAAiB;AAC3B,+BAAyB,QAAQ,iBAAiB,KAAK,UAAU;AAAA,IACnE;AAEA,UAAM,+BAA+B,CACnC,YACG;AACH,eAAS,OAAO;AAChB,WAAK,cAAc,OAAO;AAAA,IAC5B;AAEA,SAAK,WAAW;AAGhB,SAAK,mBAAmB,QAAQ,kBAC5B,uBAAuB,KAAK,UAAU,OAAO,IAC7C,KAAK;AAAA,EACX;AAAA,EAhCA,IAAW,SAA6B;AACtC,WAAO,KAAK;AAAA,EACd;AAAA,EAgCA,gBAAgB,OAA4B;AAC1C,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,WAA+B;AAC/C,QAAI,KAAK,YAAY,WAAW;AAC9B;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK;AAC5B,SAAK,UAAU;AAGf,SAAK,UAAU,iBAAiB;AAAA,MAC9B,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CACT;AAGD,UAAM,WAA2C,UAAU,SAAS;AACpE,SAAK,UAAU,UAAU;AAAA,MACvB,MAAM;AAAA,MACN,cAAc;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,IAAA,CAC8B;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,uBAAuB,YAAkC;AAE/D,QAAI,sBAAsB,SAAS;AACjC,WAAK,0BAA0B,IAAI,UAAU;AAC7C,WAAK,UAAU,eAAe;AAE9B,iBAAW,QAAQ,MAAM;AACvB,aAAK,0BAA0B,OAAO,UAAU;AAChD,YAAI,KAAK,0BAA0B,SAAS,GAAG;AAC7C,eAAK,UAAU,OAAO;AAAA,QACxB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAAA,EAEA,wBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,4BAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,WAAW,SAAyC;AAClD,UAAM,aAAa,KAAK,qBAAqB,OAAO;AACpD,SAAK,iBAAiB,UAAU;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,gBAAgB,MAAwC;AACtD,QAAI,KAAK,oBAAoB;AAE3B,aAAO;AAAA,IACT;AAEA,UAAM,YAAoC;AAAA,MACxC,OAAO,KAAK,QAAQ;AAAA,MACpB,eAAe,MAAM,iBAAiB;AAAA,IAAA;AAGxC,QAAI,MAAM;AACR,UAAI,WAAW,MAAM;AACnB,cAAM,mBAAmB,KAAK;AAC9B,YAAI,UAAU,OAAO;AAEnB,gBAAM,cAAc,UAAU;AAC9B,gBAAM,mBAAmB,IAAI,aAAa,gBAAgB;AAC1D,oBAAU,QAAQ;AAAA,QACpB,OAAO;AACL,oBAAU,QAAQ;AAAA,QACpB;AAAA,MACF;AAAA,IACF,OAAO;AAEL,WAAK,qBAAqB;AAAA,IAC5B;AAIA,UAAM,cAAiC;AAAA,MACrC,OAAO,UAAU;AAAA,MACjB,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,WAAW;AAG/D,SAAK,cAAc,KAAK,WAAW;AAEnC,UAAM,yBAAyB,MAAM,0BAA0B;AAC/D,QAAI,wBAAwB;AAC1B,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAGA,UAAM,WAAW,KAAK,WAAW,sBAAsB,SAAS;AAEhE,QAAI,aAAa,QAAW;AAE1B,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,SAAS;AAAA,MAChC,CAAC,WAAW,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAAA;AAG3C,SAAK,eAAe;AACpB,SAAK,SAAS,gBAAgB;AAC9B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,uBAAuB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GACgC;AAChC,QAAI,CAAC,MAAO,OAAM,IAAI,MAAM,mBAAmB;AAE/C,QAAI,CAAC,KAAK,cAAc;AACtB,YAAM,IAAI;AAAA,QACR;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,QAAQ,KAAK;AACnB,UAAM,QAAQ,KAAK,QAAQ;AAC3B,UAAM,gBAAgB,QAClB,mCAAmC,KAAK,IACxC;AAEJ,UAAM,WAAW,CAAC,QAAkC;AAClD,UAAI,KAAK,SAAS,IAAI,GAAG,GAAG;AAC1B,eAAO;AAAA,MACT;AAEA,YAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,UAAI,UAAU,QAAW;AACvB,eAAO;AAAA,MACT;AAEA,aAAO,gBAAgB,KAAK,KAAK;AAAA,IACnC;AAEA,QAAI,uBAAuB;AAC3B,UAAM,UAAsD,CAAA;AAQ5D,QAAI,OAA+B,CAAA;AACnC,QAAI,aAAa,QAAW;AAE1B,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,sBAAsB,KAAK,WAAW,sBAAsB;AAAA,QAChE,OAAO,GAAG,YAAY,IAAI,MAAM,QAAQ,CAAC;AAAA,MAAA,CAC1C;AAED,UAAI,qBAAqB;AACvB,cAAM,mBAAmB,oBACtB,IAAI,CAAC,WAAW,OAAO,GAAG,EAC1B,OAAO,CAAC,QAAQ,CAAC,KAAK,SAAS,IAAI,GAAG,KAAK,SAAS,GAAG,CAAC;AAG3D,aAAK,KAAK,GAAG,gBAAgB;AAG7B,cAAM,qBAAqB,MAAM;AAAA,UAC/B,QAAQ,KAAK;AAAA,UACb;AAAA,UACA;AAAA,QAAA;AAEF,aAAK,KAAK,GAAG,kBAAkB;AAAA,MACjC,OAAO;AACL,eAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,aAAO,MAAM,KAAK,OAAO,UAAU,QAAQ;AAAA,IAC7C;AAEA,UAAM,eAAe,MAAM,KAAK,IAAI,QAAQ,QAAQ,QAAQ,CAAC;AAC7D,UAAM,sBAAsB,MAAM,KAAK,WAAW;AAElD,WAAO,aAAA,IAAiB,KAAK,CAAC,uBAAuB;AACnD,YAAM,mCAAmB,IAAA;AAEzB,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAQ,KAAK,WAAW,IAAI,GAAG;AACrC,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN;AAAA,UACA;AAAA,QAAA,CACD;AACD,+BAAuB;AACvB,qBAAa,IAAI,GAAG;AAAA,MACtB;AAEA,aAAO,MAAM,KAAK,aAAA,GAAgB,sBAAsB,QAAQ;AAAA,IAClE;AAEA,SAAK,SAAS,OAAO;AAErB,QAAI,uBAAuB;AAC3B,QAAI,OAAO,aAAa,aAAa;AAEnC,YAAM,EAAE,YAAY,mBAAmB,QAAQ,CAAC;AAChD,YAAM,WAAW,eAAe,cAAc,QAAQ,KAAK;AAC3D,YAAM,cAAc,SAAS,YAAY,IAAI,MAAM,QAAQ,CAAC;AAC5D,6BAAuB,QAAQ,IAAI,OAAO,WAAW,IAAI;AAAA,IAC3D;AAIA,UAAM,eAAkC;AAAA,MACtC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,cAAc;AAAA,IAAA;AAEhB,UAAM,aAAa,KAAK,WAAW,MAAM,WAAW,YAAY;AAGhE,SAAK,cAAc,KAAK,YAAY;AAGpC,UAAM,WAAiC,CAAA;AAGvC,QAAI,OAAO,aAAa,aAAa;AACnC,YAAM,EAAE,WAAA,IAAe,QAAQ,CAAC;AAChC,YAAM,mBAAmB,GAAG,YAAY,IAAI,MAAM,QAAQ,CAAC;AAE3D,YAAM,eAAkC;AAAA,QACtC,OAAO;AAAA,QACP,cAAc;AAAA,MAAA;AAEhB,YAAM,mBAAmB,KAAK,WAAW,MAAM,WAAW,YAAY;AAGtE,WAAK,cAAc,KAAK,YAAY;AAEpC,UAAI,4BAA4B,SAAS;AACvC,iBAAS,KAAK,gBAAgB;AAAA,MAChC;AAAA,IACF;AAGA,QAAI,sBAAsB,SAAS;AACjC,eAAS,KAAK,UAAU;AAAA,IAC1B;AAGA,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,kBAAkB,QAAQ,IAAI,QAAQ,EAAE,KAAK,MAAM;AAAA,MAAC,CAAC;AAC3D,WAAK,uBAAuB,eAAe;AAAA,IAC7C,OAAO;AACL,WAAK,uBAAuB,UAAU;AAAA,IACxC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,qBAAqB,SAAyC;AACpE,QAAI,KAAK,oBAAoB;AAG3B,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,CAAA;AACnB,eAAW,UAAU,SAAS;AAC5B,UAAI,YAAY;AAChB,UAAI,CAAC,KAAK,SAAS,IAAI,OAAO,GAAG,GAAG;AAClC,YAAI,OAAO,SAAS,UAAU;AAC5B,sBAAY,EAAE,GAAG,QAAQ,MAAM,UAAU,eAAe,OAAA;AAAA,QAC1D,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,QACF;AACA,aAAK,SAAS,IAAI,OAAO,GAAG;AAAA,MAC9B;AACA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,SAAqD;AACzE,QAAI,KAAK,oBAAoB;AAG3B;AAAA,IACF;AAEA,eAAW,UAAU,SAAS;AAC5B,WAAK,SAAS,IAAI,OAAO,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA,EAEA,cAAc;AAGZ,eAAW,WAAW,KAAK,eAAe;AACxC,WAAK,WAAW,MAAM,aAAa,OAAO;AAAA,IAC5C;AACA,SAAK,gBAAgB,CAAA;AAErB,SAAK,UAAU,gBAAgB;AAAA,MAC7B,MAAM;AAAA,MACN,cAAc;AAAA,IAAA,CACf;AAED,SAAK,eAAA;AAAA,EACP;AACF;"}
|
|
@@ -15,6 +15,7 @@ export declare class CollectionSyncManager<TOutput extends object = Record<strin
|
|
|
15
15
|
preloadPromise: Promise<void> | null;
|
|
16
16
|
syncCleanupFn: (() => void) | null;
|
|
17
17
|
syncLoadSubsetFn: ((options: LoadSubsetOptions) => true | Promise<void>) | null;
|
|
18
|
+
syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null;
|
|
18
19
|
private pendingLoadSubsetPromises;
|
|
19
20
|
/**
|
|
20
21
|
* Creates a new CollectionSyncManager instance
|
|
@@ -52,5 +53,10 @@ export declare class CollectionSyncManager<TOutput extends object = Record<strin
|
|
|
52
53
|
* Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.
|
|
53
54
|
*/
|
|
54
55
|
loadSubset(options: LoadSubsetOptions): Promise<void> | true;
|
|
56
|
+
/**
|
|
57
|
+
* Notifies the sync layer that a subset is no longer needed.
|
|
58
|
+
* @param options Options that identify what data is being unloaded
|
|
59
|
+
*/
|
|
60
|
+
unloadSubset(options: LoadSubsetOptions): void;
|
|
55
61
|
cleanup(): void;
|
|
56
62
|
}
|
|
@@ -9,6 +9,7 @@ class CollectionSyncManager {
|
|
|
9
9
|
this.preloadPromise = null;
|
|
10
10
|
this.syncCleanupFn = null;
|
|
11
11
|
this.syncLoadSubsetFn = null;
|
|
12
|
+
this.syncUnloadSubsetFn = null;
|
|
12
13
|
this.pendingLoadSubsetPromises = /* @__PURE__ */ new Set();
|
|
13
14
|
this.config = config;
|
|
14
15
|
this.id = id;
|
|
@@ -112,6 +113,7 @@ class CollectionSyncManager {
|
|
|
112
113
|
);
|
|
113
114
|
this.syncCleanupFn = syncRes?.cleanup ?? null;
|
|
114
115
|
this.syncLoadSubsetFn = syncRes?.loadSubset ?? null;
|
|
116
|
+
this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null;
|
|
115
117
|
if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {
|
|
116
118
|
throw new CollectionConfigurationError(
|
|
117
119
|
`Collection "${this.id}" is configured with syncMode "on-demand" but the sync function did not return a loadSubset handler. Either provide a loadSubset handler or use syncMode "eager".`
|
|
@@ -213,6 +215,15 @@ class CollectionSyncManager {
|
|
|
213
215
|
}
|
|
214
216
|
return true;
|
|
215
217
|
}
|
|
218
|
+
/**
|
|
219
|
+
* Notifies the sync layer that a subset is no longer needed.
|
|
220
|
+
* @param options Options that identify what data is being unloaded
|
|
221
|
+
*/
|
|
222
|
+
unloadSubset(options) {
|
|
223
|
+
if (this.syncUnloadSubsetFn) {
|
|
224
|
+
this.syncUnloadSubsetFn(options);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
216
227
|
cleanup() {
|
|
217
228
|
try {
|
|
218
229
|
if (this.syncCleanupFn) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sync.js","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from \"../errors\"\nimport { deepEquals } from \"../utils\"\nimport { LIVE_QUERY_INTERNAL } from \"../query/live/internal.js\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ChangeMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n SyncConfigRes,\n} from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionStateManager } from \"./state\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\nimport type { CollectionEventsManager } from \"./events.js\"\nimport type { LiveQueryCollectionUtils } from \"../query/live/collection-config-builder.js\"\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n })\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":[],"mappings":";;;AA0BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAoBA,YAAY,QAAkD,IAAY;AAX1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AAEX,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CAAC,sBAA2D;AACjE,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AACA,kBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,gBAAI,cAAc,kBAAkB;AAGpC,gBAAI,kBAAkB,SAAS,UAAU;AACvC,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClB,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAM,QAAQ,KAAK,OAChB;AACH,wBAAM,WAAW,MAAM,mBAAmB;AAC1C,wBAAM,IAAI,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiB,UAAU,mBAAmB;AAAA,oBAC9C,UAAU,UAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAkC;AAAA,cACtC,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAI,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAI,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAI,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
|
|
1
|
+
{"version":3,"file":"sync.js","sources":["../../../src/collection/sync.ts"],"sourcesContent":["import {\n CollectionConfigurationError,\n CollectionIsInErrorStateError,\n DuplicateKeySyncError,\n NoPendingSyncTransactionCommitError,\n NoPendingSyncTransactionWriteError,\n SyncCleanupError,\n SyncTransactionAlreadyCommittedError,\n SyncTransactionAlreadyCommittedWriteError,\n} from \"../errors\"\nimport { deepEquals } from \"../utils\"\nimport { LIVE_QUERY_INTERNAL } from \"../query/live/internal.js\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n ChangeMessage,\n CleanupFn,\n CollectionConfig,\n LoadSubsetOptions,\n SyncConfigRes,\n} from \"../types\"\nimport type { CollectionImpl } from \"./index.js\"\nimport type { CollectionStateManager } from \"./state\"\nimport type { CollectionLifecycleManager } from \"./lifecycle\"\nimport type { CollectionEventsManager } from \"./events.js\"\nimport type { LiveQueryCollectionUtils } from \"../query/live/collection-config-builder.js\"\n\nexport class CollectionSyncManager<\n TOutput extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = StandardSchemaV1,\n TInput extends object = TOutput,\n> {\n private collection!: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n private state!: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n private lifecycle!: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n private _events!: CollectionEventsManager\n private config!: CollectionConfig<TOutput, TKey, TSchema>\n private id: string\n private syncMode: `eager` | `on-demand`\n\n public preloadPromise: Promise<void> | null = null\n public syncCleanupFn: (() => void) | null = null\n public syncLoadSubsetFn:\n | ((options: LoadSubsetOptions) => true | Promise<void>)\n | null = null\n public syncUnloadSubsetFn: ((options: LoadSubsetOptions) => void) | null =\n null\n\n private pendingLoadSubsetPromises: Set<Promise<void>> = new Set()\n\n /**\n * Creates a new CollectionSyncManager instance\n */\n constructor(config: CollectionConfig<TOutput, TKey, TSchema>, id: string) {\n this.config = config\n this.id = id\n this.syncMode = config.syncMode ?? `eager`\n }\n\n setDeps(deps: {\n collection: CollectionImpl<TOutput, TKey, any, TSchema, TInput>\n state: CollectionStateManager<TOutput, TKey, TSchema, TInput>\n lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>\n events: CollectionEventsManager\n }) {\n this.collection = deps.collection\n this.state = deps.state\n this.lifecycle = deps.lifecycle\n this._events = deps.events\n }\n\n /**\n * Start the sync process for this collection\n * This is called when the collection is first accessed or preloaded\n */\n public startSync(): void {\n if (\n this.lifecycle.status !== `idle` &&\n this.lifecycle.status !== `cleaned-up`\n ) {\n return // Already started or in progress\n }\n\n this.lifecycle.setStatus(`loading`)\n\n try {\n const syncRes = normalizeSyncFnResult(\n this.config.sync.sync({\n collection: this.collection,\n begin: () => {\n this.state.pendingSyncedTransactions.push({\n committed: false,\n operations: [],\n deletedKeys: new Set(),\n })\n },\n write: (messageWithoutKey: Omit<ChangeMessage<TOutput>, `key`>) => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n const key = this.config.getKey(messageWithoutKey.value)\n\n let messageType = messageWithoutKey.type\n\n // Check if an item with this key already exists when inserting\n if (messageWithoutKey.type === `insert`) {\n const insertingIntoExistingSynced = this.state.syncedData.has(key)\n const hasPendingDeleteForKey =\n pendingTransaction.deletedKeys.has(key)\n const isTruncateTransaction = pendingTransaction.truncate === true\n // Allow insert after truncate in the same transaction even if it existed in syncedData\n if (\n insertingIntoExistingSynced &&\n !hasPendingDeleteForKey &&\n !isTruncateTransaction\n ) {\n const existingValue = this.state.syncedData.get(key)\n if (\n existingValue !== undefined &&\n deepEquals(existingValue, messageWithoutKey.value)\n ) {\n // The \"insert\" is an echo of a value we already have locally.\n // Treat it as an update so we preserve optimistic intent without\n // throwing a duplicate-key error during reconciliation.\n messageType = `update`\n } else {\n const utils = this.config\n .utils as Partial<LiveQueryCollectionUtils>\n const internal = utils[LIVE_QUERY_INTERNAL]\n throw new DuplicateKeySyncError(key, this.id, {\n hasCustomGetKey: internal?.hasCustomGetKey ?? false,\n hasJoins: internal?.hasJoins ?? false,\n })\n }\n }\n }\n\n const message: ChangeMessage<TOutput> = {\n ...messageWithoutKey,\n type: messageType,\n key,\n }\n pendingTransaction.operations.push(message)\n\n if (messageType === `delete`) {\n pendingTransaction.deletedKeys.add(key)\n }\n },\n commit: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionCommitError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedError()\n }\n\n pendingTransaction.committed = true\n\n this.state.commitPendingTransactions()\n },\n markReady: () => {\n this.lifecycle.markReady()\n },\n truncate: () => {\n const pendingTransaction =\n this.state.pendingSyncedTransactions[\n this.state.pendingSyncedTransactions.length - 1\n ]\n if (!pendingTransaction) {\n throw new NoPendingSyncTransactionWriteError()\n }\n if (pendingTransaction.committed) {\n throw new SyncTransactionAlreadyCommittedWriteError()\n }\n\n // Clear all operations from the current transaction\n pendingTransaction.operations = []\n pendingTransaction.deletedKeys.clear()\n\n // Mark the transaction as a truncate operation. During commit, this triggers:\n // - Delete events for all previously synced keys (excluding optimistic-deleted keys)\n // - Clearing of syncedData/syncedMetadata\n // - Subsequent synced ops applied on the fresh base\n // - Finally, optimistic mutations re-applied on top (single batch)\n pendingTransaction.truncate = true\n\n // Capture optimistic state NOW to preserve it even if transactions complete\n // before this truncate transaction is committed\n pendingTransaction.optimisticSnapshot = {\n upserts: new Map(this.state.optimisticUpserts),\n deletes: new Set(this.state.optimisticDeletes),\n }\n },\n })\n )\n\n // Store cleanup function if provided\n this.syncCleanupFn = syncRes?.cleanup ?? null\n\n // Store loadSubset function if provided\n this.syncLoadSubsetFn = syncRes?.loadSubset ?? null\n\n // Store unloadSubset function if provided\n this.syncUnloadSubsetFn = syncRes?.unloadSubset ?? null\n\n // Validate: on-demand mode requires a loadSubset function\n if (this.syncMode === `on-demand` && !this.syncLoadSubsetFn) {\n throw new CollectionConfigurationError(\n `Collection \"${this.id}\" is configured with syncMode \"on-demand\" but the sync function did not return a loadSubset handler. ` +\n `Either provide a loadSubset handler or use syncMode \"eager\".`\n )\n }\n } catch (error) {\n this.lifecycle.setStatus(`error`)\n throw error\n }\n }\n\n /**\n * Preload the collection data by starting sync if not already started\n * Multiple concurrent calls will share the same promise\n */\n public preload(): Promise<void> {\n if (this.preloadPromise) {\n return this.preloadPromise\n }\n\n // Warn when calling preload on an on-demand collection\n if (this.syncMode === `on-demand`) {\n console.warn(\n `${this.id ? `[${this.id}] ` : ``}Calling .preload() on a collection with syncMode \"on-demand\" is a no-op. ` +\n `In on-demand mode, data is only loaded when queries request it. ` +\n `Instead, create a live query and call .preload() on that to load the specific data you need. ` +\n `See https://tanstack.com/blog/tanstack-db-0.5-query-driven-sync for more details.`\n )\n }\n\n this.preloadPromise = new Promise<void>((resolve, reject) => {\n if (this.lifecycle.status === `ready`) {\n resolve()\n return\n }\n\n if (this.lifecycle.status === `error`) {\n reject(new CollectionIsInErrorStateError())\n return\n }\n\n // Register callback BEFORE starting sync to avoid race condition\n this.lifecycle.onFirstReady(() => {\n resolve()\n })\n\n // Start sync if collection hasn't started yet or was cleaned up\n if (\n this.lifecycle.status === `idle` ||\n this.lifecycle.status === `cleaned-up`\n ) {\n try {\n this.startSync()\n } catch (error) {\n reject(error)\n return\n }\n }\n })\n\n return this.preloadPromise\n }\n\n /**\n * Gets whether the collection is currently loading more data\n */\n public get isLoadingSubset(): boolean {\n return this.pendingLoadSubsetPromises.size > 0\n }\n\n /**\n * Tracks a load promise for isLoadingSubset state.\n * @internal This is for internal coordination (e.g., live-query glue code), not for general use.\n */\n public trackLoadPromise(promise: Promise<void>): void {\n const loadingStarting = !this.isLoadingSubset\n this.pendingLoadSubsetPromises.add(promise)\n\n if (loadingStarting) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: true,\n previousIsLoadingSubset: false,\n loadingSubsetTransition: `start`,\n })\n }\n\n promise.finally(() => {\n const loadingEnding =\n this.pendingLoadSubsetPromises.size === 1 &&\n this.pendingLoadSubsetPromises.has(promise)\n this.pendingLoadSubsetPromises.delete(promise)\n\n if (loadingEnding) {\n this._events.emit(`loadingSubset:change`, {\n type: `loadingSubset:change`,\n collection: this.collection,\n isLoadingSubset: false,\n previousIsLoadingSubset: true,\n loadingSubsetTransition: `end`,\n })\n }\n })\n }\n\n /**\n * Requests the sync layer to load more data.\n * @param options Options to control what data is being loaded\n * @returns If data loading is asynchronous, this method returns a promise that resolves when the data is loaded.\n * Returns true if no sync function is configured, if syncMode is 'eager', or if there is no work to do.\n */\n public loadSubset(options: LoadSubsetOptions): Promise<void> | true {\n // Bypass loadSubset when syncMode is 'eager'\n if (this.syncMode === `eager`) {\n return true\n }\n\n if (this.syncLoadSubsetFn) {\n const result = this.syncLoadSubsetFn(options)\n // If the result is a promise, track it\n if (result instanceof Promise) {\n this.trackLoadPromise(result)\n return result\n }\n }\n\n return true\n }\n\n /**\n * Notifies the sync layer that a subset is no longer needed.\n * @param options Options that identify what data is being unloaded\n */\n public unloadSubset(options: LoadSubsetOptions): void {\n if (this.syncUnloadSubsetFn) {\n this.syncUnloadSubsetFn(options)\n }\n }\n\n public cleanup(): void {\n try {\n if (this.syncCleanupFn) {\n this.syncCleanupFn()\n this.syncCleanupFn = null\n }\n } catch (error) {\n // Re-throw in a microtask to surface the error after cleanup completes\n queueMicrotask(() => {\n if (error instanceof Error) {\n // Preserve the original error and stack trace\n const wrappedError = new SyncCleanupError(this.id, error)\n wrappedError.cause = error\n wrappedError.stack = error.stack\n throw wrappedError\n } else {\n throw new SyncCleanupError(this.id, error as Error | string)\n }\n })\n }\n this.preloadPromise = null\n }\n}\n\nfunction normalizeSyncFnResult(result: void | CleanupFn | SyncConfigRes) {\n if (typeof result === `function`) {\n return { cleanup: result }\n }\n\n if (typeof result === `object`) {\n return result\n }\n\n return undefined\n}\n"],"names":[],"mappings":";;;AA0BO,MAAM,sBAKX;AAAA;AAAA;AAAA;AAAA,EAsBA,YAAY,QAAkD,IAAY;AAb1E,SAAO,iBAAuC;AAC9C,SAAO,gBAAqC;AAC5C,SAAO,mBAEI;AACX,SAAO,qBACL;AAEF,SAAQ,gDAAoD,IAAA;AAM1D,SAAK,SAAS;AACd,SAAK,KAAK;AACV,SAAK,WAAW,OAAO,YAAY;AAAA,EACrC;AAAA,EAEA,QAAQ,MAKL;AACD,SAAK,aAAa,KAAK;AACvB,SAAK,QAAQ,KAAK;AAClB,SAAK,YAAY,KAAK;AACtB,SAAK,UAAU,KAAK;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,YAAkB;AACvB,QACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA;AAAA,IACF;AAEA,SAAK,UAAU,UAAU,SAAS;AAElC,QAAI;AACF,YAAM,UAAU;AAAA,QACd,KAAK,OAAO,KAAK,KAAK;AAAA,UACpB,YAAY,KAAK;AAAA,UACjB,OAAO,MAAM;AACX,iBAAK,MAAM,0BAA0B,KAAK;AAAA,cACxC,WAAW;AAAA,cACX,YAAY,CAAA;AAAA,cACZ,iCAAiB,IAAA;AAAA,YAAI,CACtB;AAAA,UACH;AAAA,UACA,OAAO,CAAC,sBAA2D;AACjE,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AACA,kBAAM,MAAM,KAAK,OAAO,OAAO,kBAAkB,KAAK;AAEtD,gBAAI,cAAc,kBAAkB;AAGpC,gBAAI,kBAAkB,SAAS,UAAU;AACvC,oBAAM,8BAA8B,KAAK,MAAM,WAAW,IAAI,GAAG;AACjE,oBAAM,yBACJ,mBAAmB,YAAY,IAAI,GAAG;AACxC,oBAAM,wBAAwB,mBAAmB,aAAa;AAE9D,kBACE,+BACA,CAAC,0BACD,CAAC,uBACD;AACA,sBAAM,gBAAgB,KAAK,MAAM,WAAW,IAAI,GAAG;AACnD,oBACE,kBAAkB,UAClB,WAAW,eAAe,kBAAkB,KAAK,GACjD;AAIA,gCAAc;AAAA,gBAChB,OAAO;AACL,wBAAM,QAAQ,KAAK,OAChB;AACH,wBAAM,WAAW,MAAM,mBAAmB;AAC1C,wBAAM,IAAI,sBAAsB,KAAK,KAAK,IAAI;AAAA,oBAC5C,iBAAiB,UAAU,mBAAmB;AAAA,oBAC9C,UAAU,UAAU,YAAY;AAAA,kBAAA,CACjC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAEA,kBAAM,UAAkC;AAAA,cACtC,GAAG;AAAA,cACH,MAAM;AAAA,cACN;AAAA,YAAA;AAEF,+BAAmB,WAAW,KAAK,OAAO;AAE1C,gBAAI,gBAAgB,UAAU;AAC5B,iCAAmB,YAAY,IAAI,GAAG;AAAA,YACxC;AAAA,UACF;AAAA,UACA,QAAQ,MAAM;AACZ,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,oCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,qCAAA;AAAA,YACZ;AAEA,+BAAmB,YAAY;AAE/B,iBAAK,MAAM,0BAAA;AAAA,UACb;AAAA,UACA,WAAW,MAAM;AACf,iBAAK,UAAU,UAAA;AAAA,UACjB;AAAA,UACA,UAAU,MAAM;AACd,kBAAM,qBACJ,KAAK,MAAM,0BACT,KAAK,MAAM,0BAA0B,SAAS,CAChD;AACF,gBAAI,CAAC,oBAAoB;AACvB,oBAAM,IAAI,mCAAA;AAAA,YACZ;AACA,gBAAI,mBAAmB,WAAW;AAChC,oBAAM,IAAI,0CAAA;AAAA,YACZ;AAGA,+BAAmB,aAAa,CAAA;AAChC,+BAAmB,YAAY,MAAA;AAO/B,+BAAmB,WAAW;AAI9B,+BAAmB,qBAAqB;AAAA,cACtC,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,cAC7C,SAAS,IAAI,IAAI,KAAK,MAAM,iBAAiB;AAAA,YAAA;AAAA,UAEjD;AAAA,QAAA,CACD;AAAA,MAAA;AAIH,WAAK,gBAAgB,SAAS,WAAW;AAGzC,WAAK,mBAAmB,SAAS,cAAc;AAG/C,WAAK,qBAAqB,SAAS,gBAAgB;AAGnD,UAAI,KAAK,aAAa,eAAe,CAAC,KAAK,kBAAkB;AAC3D,cAAM,IAAI;AAAA,UACR,eAAe,KAAK,EAAE;AAAA,QAAA;AAAA,MAG1B;AAAA,IACF,SAAS,OAAO;AACd,WAAK,UAAU,UAAU,OAAO;AAChC,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,UAAyB;AAC9B,QAAI,KAAK,gBAAgB;AACvB,aAAO,KAAK;AAAA,IACd;AAGA,QAAI,KAAK,aAAa,aAAa;AACjC,cAAQ;AAAA,QACN,GAAG,KAAK,KAAK,IAAI,KAAK,EAAE,OAAO,EAAE;AAAA,MAAA;AAAA,IAKrC;AAEA,SAAK,iBAAiB,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3D,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,KAAK,UAAU,WAAW,SAAS;AACrC,eAAO,IAAI,+BAA+B;AAC1C;AAAA,MACF;AAGA,WAAK,UAAU,aAAa,MAAM;AAChC,gBAAA;AAAA,MACF,CAAC;AAGD,UACE,KAAK,UAAU,WAAW,UAC1B,KAAK,UAAU,WAAW,cAC1B;AACA,YAAI;AACF,eAAK,UAAA;AAAA,QACP,SAAS,OAAO;AACd,iBAAO,KAAK;AACZ;AAAA,QACF;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,IAAW,kBAA2B;AACpC,WAAO,KAAK,0BAA0B,OAAO;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,iBAAiB,SAA8B;AACpD,UAAM,kBAAkB,CAAC,KAAK;AAC9B,SAAK,0BAA0B,IAAI,OAAO;AAE1C,QAAI,iBAAiB;AACnB,WAAK,QAAQ,KAAK,wBAAwB;AAAA,QACxC,MAAM;AAAA,QACN,YAAY,KAAK;AAAA,QACjB,iBAAiB;AAAA,QACjB,yBAAyB;AAAA,QACzB,yBAAyB;AAAA,MAAA,CAC1B;AAAA,IACH;AAEA,YAAQ,QAAQ,MAAM;AACpB,YAAM,gBACJ,KAAK,0BAA0B,SAAS,KACxC,KAAK,0BAA0B,IAAI,OAAO;AAC5C,WAAK,0BAA0B,OAAO,OAAO;AAE7C,UAAI,eAAe;AACjB,aAAK,QAAQ,KAAK,wBAAwB;AAAA,UACxC,MAAM;AAAA,UACN,YAAY,KAAK;AAAA,UACjB,iBAAiB;AAAA,UACjB,yBAAyB;AAAA,UACzB,yBAAyB;AAAA,QAAA,CAC1B;AAAA,MACH;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,WAAW,SAAkD;AAElE,QAAI,KAAK,aAAa,SAAS;AAC7B,aAAO;AAAA,IACT;AAEA,QAAI,KAAK,kBAAkB;AACzB,YAAM,SAAS,KAAK,iBAAiB,OAAO;AAE5C,UAAI,kBAAkB,SAAS;AAC7B,aAAK,iBAAiB,MAAM;AAC5B,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMO,aAAa,SAAkC;AACpD,QAAI,KAAK,oBAAoB;AAC3B,WAAK,mBAAmB,OAAO;AAAA,IACjC;AAAA,EACF;AAAA,EAEO,UAAgB;AACrB,QAAI;AACF,UAAI,KAAK,eAAe;AACtB,aAAK,cAAA;AACL,aAAK,gBAAgB;AAAA,MACvB;AAAA,IACF,SAAS,OAAO;AAEd,qBAAe,MAAM;AACnB,YAAI,iBAAiB,OAAO;AAE1B,gBAAM,eAAe,IAAI,iBAAiB,KAAK,IAAI,KAAK;AACxD,uBAAa,QAAQ;AACrB,uBAAa,QAAQ,MAAM;AAC3B,gBAAM;AAAA,QACR,OAAO;AACL,gBAAM,IAAI,iBAAiB,KAAK,IAAI,KAAuB;AAAA,QAC7D;AAAA,MACF,CAAC;AAAA,IACH;AACA,SAAK,iBAAiB;AAAA,EACxB;AACF;AAEA,SAAS,sBAAsB,QAA0C;AACvE,MAAI,OAAO,WAAW,YAAY;AAChC,WAAO,EAAE,SAAS,OAAA;AAAA,EACpB;AAEA,MAAI,OAAO,WAAW,UAAU;AAC9B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;"}
|
|
@@ -117,6 +117,10 @@ export declare class CollectionConfigBuilder<TContext extends Context, TResult e
|
|
|
117
117
|
* Called when the scheduler clears a context (e.g., transaction rollback/abort).
|
|
118
118
|
*/
|
|
119
119
|
clearPendingGraphRun(contextId: SchedulerContextId): void;
|
|
120
|
+
/**
|
|
121
|
+
* Returns true if this builder has a pending graph run for the given context.
|
|
122
|
+
*/
|
|
123
|
+
hasPendingGraphRun(contextId: SchedulerContextId): boolean;
|
|
120
124
|
/**
|
|
121
125
|
* Executes a pending graph run. Called by the scheduler when dependencies are satisfied.
|
|
122
126
|
* Clears the pending state BEFORE execution so that any re-schedules during the run
|
|
@@ -215,6 +215,13 @@ class CollectionConfigBuilder {
|
|
|
215
215
|
deps.delete(this);
|
|
216
216
|
return Array.from(deps);
|
|
217
217
|
})();
|
|
218
|
+
if (contextId) {
|
|
219
|
+
for (const dep of dependentBuilders) {
|
|
220
|
+
if (typeof dep.scheduleGraphRun === `function`) {
|
|
221
|
+
dep.scheduleGraphRun(void 0, { contextId });
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
218
225
|
if (!this.currentSyncConfig || !this.currentSyncState) {
|
|
219
226
|
throw new Error(
|
|
220
227
|
`scheduleGraphRun called without active sync session. This should not happen.`
|
|
@@ -247,6 +254,12 @@ class CollectionConfigBuilder {
|
|
|
247
254
|
clearPendingGraphRun(contextId) {
|
|
248
255
|
this.pendingGraphRuns.delete(contextId);
|
|
249
256
|
}
|
|
257
|
+
/**
|
|
258
|
+
* Returns true if this builder has a pending graph run for the given context.
|
|
259
|
+
*/
|
|
260
|
+
hasPendingGraphRun(contextId) {
|
|
261
|
+
return this.pendingGraphRuns.has(contextId);
|
|
262
|
+
}
|
|
250
263
|
/**
|
|
251
264
|
* Executes a pending graph run. Called by the scheduler when dependencies are satisfied.
|
|
252
265
|
* Clears the pending state BEFORE execution so that any re-schedules during the run
|