@tanstack/db 0.4.17 → 0.4.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/changes.cjs +1 -1
- package/dist/cjs/collection/changes.cjs.map +1 -1
- package/dist/cjs/collection/subscription.cjs +55 -5
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/collection/subscription.d.cts +5 -2
- package/dist/cjs/errors.cjs +8 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +8 -0
- package/dist/cjs/index.cjs +1 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs +40 -0
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.cjs +3 -6
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +20 -34
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/esm/collection/changes.js +1 -1
- package/dist/esm/collection/changes.js.map +1 -1
- package/dist/esm/collection/subscription.d.ts +5 -2
- package/dist/esm/collection/subscription.js +56 -6
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/errors.d.ts +8 -0
- package/dist/esm/errors.js +8 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.js +2 -1
- package/dist/esm/query/compiler/index.js +41 -1
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/order-by.js +3 -6
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.js +20 -34
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/package.json +1 -1
- package/src/collection/changes.ts +1 -1
- package/src/collection/subscription.ts +82 -6
- package/src/errors.ts +16 -0
- package/src/query/compiler/index.ts +74 -0
- package/src/query/compiler/order-by.ts +3 -5
- package/src/query/live/collection-subscriber.ts +26 -72
|
@@ -41,20 +41,26 @@ class CollectionSubscriber {
|
|
|
41
41
|
includeInitialState
|
|
42
42
|
);
|
|
43
43
|
}
|
|
44
|
+
const trackLoadPromise = () => {
|
|
45
|
+
if (!this.subscriptionLoadingPromises.has(subscription)) {
|
|
46
|
+
let resolve;
|
|
47
|
+
const promise = new Promise((res) => {
|
|
48
|
+
resolve = res;
|
|
49
|
+
});
|
|
50
|
+
this.subscriptionLoadingPromises.set(subscription, {
|
|
51
|
+
resolve
|
|
52
|
+
});
|
|
53
|
+
this.collectionConfigBuilder.liveQueryCollection._sync.trackLoadPromise(
|
|
54
|
+
promise
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
if (subscription.status === `loadingSubset`) {
|
|
59
|
+
trackLoadPromise();
|
|
60
|
+
}
|
|
44
61
|
const statusUnsubscribe = subscription.on(`status:change`, (event) => {
|
|
45
62
|
if (event.status === `loadingSubset`) {
|
|
46
|
-
|
|
47
|
-
let resolve;
|
|
48
|
-
const promise = new Promise((res) => {
|
|
49
|
-
resolve = res;
|
|
50
|
-
});
|
|
51
|
-
this.subscriptionLoadingPromises.set(subscription, {
|
|
52
|
-
resolve
|
|
53
|
-
});
|
|
54
|
-
this.collectionConfigBuilder.liveQueryCollection._sync.trackLoadPromise(
|
|
55
|
-
promise
|
|
56
|
-
);
|
|
57
|
-
}
|
|
63
|
+
trackLoadPromise();
|
|
58
64
|
} else {
|
|
59
65
|
const deferred = this.subscriptionLoadingPromises.get(subscription);
|
|
60
66
|
if (deferred) {
|
|
@@ -100,18 +106,10 @@ class CollectionSubscriber {
|
|
|
100
106
|
return subscription;
|
|
101
107
|
}
|
|
102
108
|
subscribeToOrderedChanges(whereExpression, orderByInfo) {
|
|
103
|
-
const { orderBy, offset, limit,
|
|
109
|
+
const { orderBy, offset, limit, index } = orderByInfo;
|
|
104
110
|
const sendChangesInRange = (changes) => {
|
|
105
111
|
const splittedChanges = splitUpdates(changes);
|
|
106
|
-
|
|
107
|
-
if (dataNeeded && dataNeeded() === 0) {
|
|
108
|
-
filteredChanges = filterChangesSmallerOrEqualToMax(
|
|
109
|
-
splittedChanges,
|
|
110
|
-
comparator,
|
|
111
|
-
this.biggest
|
|
112
|
-
);
|
|
113
|
-
}
|
|
114
|
-
this.sendChangesToPipelineWithTracking(filteredChanges, subscription);
|
|
112
|
+
this.sendChangesToPipelineWithTracking(splittedChanges, subscription);
|
|
115
113
|
};
|
|
116
114
|
const subscription = this.collection.subscribeChanges(sendChangesInRange, {
|
|
117
115
|
whereExpression
|
|
@@ -234,18 +232,6 @@ function* splitUpdates(changes) {
|
|
|
234
232
|
}
|
|
235
233
|
}
|
|
236
234
|
}
|
|
237
|
-
function* filterChanges(changes, f) {
|
|
238
|
-
for (const change of changes) {
|
|
239
|
-
if (f(change)) {
|
|
240
|
-
yield change;
|
|
241
|
-
}
|
|
242
|
-
}
|
|
243
|
-
}
|
|
244
|
-
function* filterChangesSmallerOrEqualToMax(changes, comparator, maxValue) {
|
|
245
|
-
yield* filterChanges(changes, (change) => {
|
|
246
|
-
return !maxValue || comparator(change.value, maxValue) <= 0;
|
|
247
|
-
});
|
|
248
|
-
}
|
|
249
235
|
export {
|
|
250
236
|
CollectionSubscriber
|
|
251
237
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport { WhereClauseConversionError } from \"../../errors.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = convertToBasicExpression(whereClause, this.alias)\n\n if (whereExpression) {\n return this.subscribeToChanges(whereExpression)\n }\n\n throw new WhereClauseConversionError(this.collectionId, this.alias)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n // TODO: For now we are setting this loading state whenever the subscription\n // status changes to 'loadingSubset'. But we have discussed it only happening\n // when the the live query has it's offset/limit changed, and that triggers the\n // subscription to request a snapshot. This will require more work to implement,\n // and builds on https://github.com/TanStack/db/pull/663 which this PR\n // does not yet depend on.\n if (event.status === `loadingSubset`) {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise\n )\n }\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo\n ) {\n const { orderBy, offset, limit, comparator, dataNeeded, index } =\n orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n // and filter out changes that are bigger than the biggest value we've sent so far\n // because they can't affect the topK (and if later we need more data, we will dynamically load more data)\n const splittedChanges = splitUpdates(changes)\n let filteredChanges = splittedChanges\n if (dataNeeded && dataNeeded() === 0) {\n // If the topK is full [..., maxSentValue] then we do not need to send changes > maxSentValue\n // because they can never make it into the topK.\n // However, if the topK isn't full yet, we need to also send changes > maxSentValue\n // because they will make it into the topK\n filteredChanges = filterChangesSmallerOrEqualToMax(\n splittedChanges,\n comparator,\n this.biggest\n )\n }\n\n this.sendChangesToPipelineWithTracking(filteredChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol]\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow } = orderByInfo\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n\nfunction* filterChanges<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n f: (change: ChangeMessage<T, TKey>) => boolean\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (f(change)) {\n yield change\n }\n }\n}\n\n/**\n * Filters changes to only include those that are smaller or equal to the provided max value\n * @param changes - Iterable of changes to filter\n * @param comparator - Comparator function to use for filtering\n * @param maxValue - Range to filter changes within (range boundaries are exclusive)\n * @returns Iterable of changes that fall within the range\n */\nfunction* filterChangesSmallerOrEqualToMax<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n comparator: (a: any, b: any) => number,\n maxValue: any\n): Generator<ChangeMessage<T, TKey>> {\n yield* filterChanges(changes, (change) => {\n return !maxValue || comparator(change.value, maxValue) <= 0\n })\n}\n"],"names":[],"mappings":";;;AAeA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AAExE,UAAI,iBAAiB;AACnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD;AAEA,YAAM,IAAI,2BAA2B,KAAK,cAAc,KAAK,KAAK;AAAA,IACpE;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AAOpE,UAAI,MAAM,WAAW,iBAAiB;AAEpC,YAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,cAAI;AACJ,gBAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,sBAAU;AAAA,UACZ,CAAC;AAED,eAAK,4BAA4B,IAAI,cAAc;AAAA,YACjD;AAAA,UAAA,CACD;AACD,eAAK,wBAAwB,oBAAqB,MAAM;AAAA,YACtD;AAAA,UAAA;AAAA,QAEJ;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,YAAY,YAAY,UACtD;AAEF,UAAM,qBAAqB,CACzB,YACG;AAIH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,UAAI,kBAAkB;AACtB,UAAI,cAAc,WAAA,MAAiB,GAAG;AAKpC,0BAAkB;AAAA,UAChB;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAAA,MAET;AAEA,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,wBAAA,IAA4B;AAC7C,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,cAIR,SACA,GACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,EAAE,MAAM,GAAG;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACF;AASA,UAAU,iCAIR,SACA,YACA,UACmC;AACnC,SAAO,cAAc,SAAS,CAAC,WAAW;AACxC,WAAO,CAAC,YAAY,WAAW,OAAO,OAAO,QAAQ,KAAK;AAAA,EAC5D,CAAC;AACH;"}
|
|
1
|
+
{"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport { WhereClauseConversionError } from \"../../errors.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nconst loadMoreCallbackSymbol = Symbol.for(\n `@tanstack/db.collection-config-builder`\n)\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n // Track deferred promises for subscription loading states\n private subscriptionLoadingPromises = new Map<\n CollectionSubscription,\n { resolve: () => void }\n >()\n\n constructor(\n private alias: string,\n private collectionId: string,\n private collection: Collection,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {}\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseForAlias()\n\n if (whereClause) {\n const whereExpression = convertToBasicExpression(whereClause, this.alias)\n\n if (whereExpression) {\n return this.subscribeToChanges(whereExpression)\n }\n\n throw new WhereClauseConversionError(this.collectionId, this.alias)\n }\n\n return this.subscribeToChanges()\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n const orderByInfo = this.getOrderByInfo()\n if (orderByInfo) {\n subscription = this.subscribeToOrderedChanges(\n whereExpression,\n orderByInfo\n )\n } else {\n // If the source alias is lazy then we should not include the initial state\n const includeInitialState = !this.collectionConfigBuilder.isLazyAlias(\n this.alias\n )\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n\n const trackLoadPromise = () => {\n // Guard against duplicate transitions\n if (!this.subscriptionLoadingPromises.has(subscription)) {\n let resolve: () => void\n const promise = new Promise<void>((res) => {\n resolve = res\n })\n\n this.subscriptionLoadingPromises.set(subscription, {\n resolve: resolve!,\n })\n this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(\n promise\n )\n }\n }\n\n // It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).\n // So we also check the status here and if it's `loadingSubset` then we track the load promise\n if (subscription.status === `loadingSubset`) {\n trackLoadPromise()\n }\n\n // Subscribe to subscription status changes to propagate loading state\n const statusUnsubscribe = subscription.on(`status:change`, (event) => {\n if (event.status === `loadingSubset`) {\n trackLoadPromise()\n } else {\n // status is 'ready'\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n }\n })\n\n const unsubscribe = () => {\n // If subscription has a pending promise, resolve it before unsubscribing\n const deferred = this.subscriptionLoadingPromises.get(subscription)\n if (deferred) {\n // Clear the map entry FIRST (before resolving)\n this.subscriptionLoadingPromises.delete(subscription)\n deferred.resolve()\n }\n\n statusUnsubscribe()\n subscription.unsubscribe()\n }\n // currentSyncState is always defined when subscribe() is called\n // (called during sync session setup)\n this.collectionConfigBuilder.currentSyncState!.unsubscribeCallbacks.add(\n unsubscribe\n )\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n // currentSyncState and input are always defined when this method is called\n // (only called from active subscriptions during a sync session)\n const input =\n this.collectionConfigBuilder.currentSyncState!.inputs[this.alias]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to schedule a graph run even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `scheduleGraphRun`\n this.collectionConfigBuilder.scheduleGraphRun(dataLoader, {\n alias: this.alias,\n })\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n orderByInfo: OrderByOptimizationInfo\n ) {\n const { orderBy, offset, limit, index } = orderByInfo\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n const splittedChanges = splitUpdates(changes)\n this.sendChangesToPipelineWithTracking(splittedChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n this.sendChangesToPipeline(changes)\n return\n }\n\n const trackedChanges = this.trackSentValues(changes, orderByInfo.comparator)\n\n // Cache the loadMoreIfNeeded callback on the subscription using a symbol property.\n // This ensures we pass the same function instance to the scheduler each time,\n // allowing it to deduplicate callbacks when multiple changes arrive during a transaction.\n type SubscriptionWithLoader = CollectionSubscription & {\n [loadMoreCallbackSymbol]?: () => boolean\n }\n\n const subscriptionWithLoader = subscription as SubscriptionWithLoader\n\n subscriptionWithLoader[loadMoreCallbackSymbol] ??=\n this.loadMoreIfNeeded.bind(this, subscription)\n\n this.sendChangesToPipeline(\n trackedChanges,\n subscriptionWithLoader[loadMoreCallbackSymbol]\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const orderByInfo = this.getOrderByInfo()\n if (!orderByInfo) {\n return\n }\n const { orderBy, valueExtractorForRawRow } = orderByInfo\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.alias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseForAlias(): BasicExpression<boolean> | undefined {\n const sourceWhereClausesCache =\n this.collectionConfigBuilder.sourceWhereClausesCache\n if (!sourceWhereClausesCache) {\n return undefined\n }\n return sourceWhereClausesCache.get(this.alias)\n }\n\n private getOrderByInfo(): OrderByOptimizationInfo | undefined {\n const info =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n if (info && info.alias === this.alias) {\n return info\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n"],"names":[],"mappings":";;;AAeA,MAAM,yBAAyB,OAAO;AAAA,EACpC;AACF;AAEO,MAAM,qBAGX;AAAA,EAUA,YACU,OACA,cACA,YACA,yBACR;AAJQ,SAAA,QAAA;AACA,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,0BAAA;AAZV,SAAQ,UAAe;AAGvB,SAAQ,kDAAkC,IAAA;AAAA,EAUvC;AAAA,EAEH,YAAoC;AAClC,UAAM,cAAc,KAAK,uBAAA;AAEzB,QAAI,aAAa;AACf,YAAM,kBAAkB,yBAAyB,aAAa,KAAK,KAAK;AAExE,UAAI,iBAAiB;AACnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD;AAEA,YAAM,IAAI,2BAA2B,KAAK,cAAc,KAAK,KAAK;AAAA,IACpE;AAEA,WAAO,KAAK,mBAAA;AAAA,EACd;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,aAAa;AACf,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ,OAAO;AAEL,YAAM,sBAAsB,CAAC,KAAK,wBAAwB;AAAA,QACxD,KAAK;AAAA,MAAA;AAGP,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AAEA,UAAM,mBAAmB,MAAM;AAE7B,UAAI,CAAC,KAAK,4BAA4B,IAAI,YAAY,GAAG;AACvD,YAAI;AACJ,cAAM,UAAU,IAAI,QAAc,CAAC,QAAQ;AACzC,oBAAU;AAAA,QACZ,CAAC;AAED,aAAK,4BAA4B,IAAI,cAAc;AAAA,UACjD;AAAA,QAAA,CACD;AACD,aAAK,wBAAwB,oBAAqB,MAAM;AAAA,UACtD;AAAA,QAAA;AAAA,MAEJ;AAAA,IACF;AAIA,QAAI,aAAa,WAAW,iBAAiB;AAC3C,uBAAA;AAAA,IACF;AAGA,UAAM,oBAAoB,aAAa,GAAG,iBAAiB,CAAC,UAAU;AACpE,UAAI,MAAM,WAAW,iBAAiB;AACpC,yBAAA;AAAA,MACF,OAAO;AAEL,cAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,YAAI,UAAU;AAEZ,eAAK,4BAA4B,OAAO,YAAY;AACpD,mBAAS,QAAA;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,cAAc,MAAM;AAExB,YAAM,WAAW,KAAK,4BAA4B,IAAI,YAAY;AAClE,UAAI,UAAU;AAEZ,aAAK,4BAA4B,OAAO,YAAY;AACpD,iBAAS,QAAA;AAAA,MACX;AAEA,wBAAA;AACA,mBAAa,YAAA;AAAA,IACf;AAGA,SAAK,wBAAwB,iBAAkB,qBAAqB;AAAA,MAClE;AAAA,IAAA;AAEF,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AAGA,UAAM,QACJ,KAAK,wBAAwB,iBAAkB,OAAO,KAAK,KAAK;AAClE,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB,iBAAiB,YAAY;AAAA,MACxD,OAAO,KAAK;AAAA,IAAA,CACb;AAAA,EACH;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA,aACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,UAAU;AAE1C,UAAM,qBAAqB,CACzB,YACG;AAEH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cAAc,KAAK,eAAA;AAEzB,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB,WAAK,sBAAsB,OAAO;AAClC;AAAA,IACF;AAEA,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,YAAY,UAAU;AAS3E,UAAM,yBAAyB;AAE/B,2BAAuB,sBAAsB,MAC3C,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAE/C,SAAK;AAAA,MACH;AAAA,MACA,uBAAuB,sBAAsB;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,cAAc,KAAK,eAAA;AACzB,QAAI,CAAC,aAAa;AAChB;AAAA,IACF;AACA,UAAM,EAAE,SAAS,wBAAA,IAA4B;AAC7C,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,yBAA+D;AACrE,UAAM,0BACJ,KAAK,wBAAwB;AAC/B,QAAI,CAAC,yBAAyB;AAC5B,aAAO;AAAA,IACT;AACA,WAAO,wBAAwB,IAAI,KAAK,KAAK;AAAA,EAC/C;AAAA,EAEQ,iBAAsD;AAC5D,UAAM,OACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,QAAI,QAAQ,KAAK,UAAU,KAAK,OAAO;AACrC,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;"}
|
package/package.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ensureIndexForExpression } from "../indexes/auto-index.js"
|
|
2
|
-
import { and, gt, lt } from "../query/builder/functions.js"
|
|
2
|
+
import { and, eq, gt, lt } from "../query/builder/functions.js"
|
|
3
3
|
import { Value } from "../query/ir.js"
|
|
4
4
|
import { EventEmitter } from "../event-emitter.js"
|
|
5
5
|
import {
|
|
@@ -20,6 +20,7 @@ import type { CollectionImpl } from "./index.js"
|
|
|
20
20
|
type RequestSnapshotOptions = {
|
|
21
21
|
where?: BasicExpression<boolean>
|
|
22
22
|
optimizedOnly?: boolean
|
|
23
|
+
trackLoadSubsetPromise?: boolean
|
|
23
24
|
}
|
|
24
25
|
|
|
25
26
|
type RequestLimitedSnapshotOptions = {
|
|
@@ -197,7 +198,10 @@ export class CollectionSubscription
|
|
|
197
198
|
subscription: this,
|
|
198
199
|
})
|
|
199
200
|
|
|
200
|
-
|
|
201
|
+
const trackLoadSubsetPromise = opts?.trackLoadSubsetPromise ?? true
|
|
202
|
+
if (trackLoadSubsetPromise) {
|
|
203
|
+
this.trackLoadSubsetPromise(syncResult)
|
|
204
|
+
}
|
|
201
205
|
|
|
202
206
|
// Also load data immediately from the collection
|
|
203
207
|
const snapshot = this.collection.currentStateAsChanges(stateOpts)
|
|
@@ -218,10 +222,12 @@ export class CollectionSubscription
|
|
|
218
222
|
}
|
|
219
223
|
|
|
220
224
|
/**
|
|
221
|
-
* Sends a snapshot that
|
|
225
|
+
* Sends a snapshot that fulfills the `where` clause and all rows are bigger or equal to `minValue`.
|
|
222
226
|
* Requires a range index to be set with `setOrderByIndex` prior to calling this method.
|
|
223
227
|
* It uses that range index to load the items in the order of the index.
|
|
224
|
-
* Note: it
|
|
228
|
+
* Note 1: it may load more rows than the provided LIMIT because it loads all values equal to `minValue` + limit values greater than `minValue`.
|
|
229
|
+
* This is needed to ensure that it does not accidentally skip duplicate values when the limit falls in the middle of some duplicated values.
|
|
230
|
+
* Note 2: it does not send keys that have already been sent before.
|
|
225
231
|
*/
|
|
226
232
|
requestLimitedSnapshot({
|
|
227
233
|
orderBy,
|
|
@@ -257,12 +263,49 @@ export class CollectionSubscription
|
|
|
257
263
|
|
|
258
264
|
let biggestObservedValue = minValue
|
|
259
265
|
const changes: Array<ChangeMessage<any, string | number>> = []
|
|
260
|
-
|
|
266
|
+
|
|
267
|
+
// If we have a minValue we need to handle the case
|
|
268
|
+
// where there might be duplicate values equal to minValue that we need to include
|
|
269
|
+
// because we can have data like this: [1, 2, 3, 3, 3, 4, 5]
|
|
270
|
+
// so if minValue is 3 then the previous snapshot may not have included all 3s
|
|
271
|
+
// e.g. if it was offset 0 and limit 3 it would only have loaded the first 3
|
|
272
|
+
// so we load all rows equal to minValue first, to be sure we don't skip any duplicate values
|
|
273
|
+
let keys: Array<string | number> = []
|
|
274
|
+
if (minValue !== undefined) {
|
|
275
|
+
// First, get all items with the same value as minValue
|
|
276
|
+
const { expression } = orderBy[0]!
|
|
277
|
+
const allRowsWithMinValue = this.collection.currentStateAsChanges({
|
|
278
|
+
where: eq(expression, new Value(minValue)),
|
|
279
|
+
})
|
|
280
|
+
|
|
281
|
+
if (allRowsWithMinValue) {
|
|
282
|
+
const keysWithMinValue = allRowsWithMinValue
|
|
283
|
+
.map((change) => change.key)
|
|
284
|
+
.filter((key) => !this.sentKeys.has(key) && filterFn(key))
|
|
285
|
+
|
|
286
|
+
// Add items with the minValue first
|
|
287
|
+
keys.push(...keysWithMinValue)
|
|
288
|
+
|
|
289
|
+
// Then get items greater than minValue
|
|
290
|
+
const keysGreaterThanMin = index.take(
|
|
291
|
+
limit - keys.length,
|
|
292
|
+
minValue,
|
|
293
|
+
filterFn
|
|
294
|
+
)
|
|
295
|
+
keys.push(...keysGreaterThanMin)
|
|
296
|
+
} else {
|
|
297
|
+
keys = index.take(limit, minValue, filterFn)
|
|
298
|
+
}
|
|
299
|
+
} else {
|
|
300
|
+
keys = index.take(limit, minValue, filterFn)
|
|
301
|
+
}
|
|
261
302
|
|
|
262
303
|
const valuesNeeded = () => Math.max(limit - changes.length, 0)
|
|
263
304
|
const collectionExhausted = () => keys.length === 0
|
|
264
305
|
|
|
265
306
|
while (valuesNeeded() > 0 && !collectionExhausted()) {
|
|
307
|
+
const insertedKeys = new Set<string | number>() // Track keys we add to `changes` in this iteration
|
|
308
|
+
|
|
266
309
|
for (const key of keys) {
|
|
267
310
|
const value = this.collection.get(key)!
|
|
268
311
|
changes.push({
|
|
@@ -271,6 +314,7 @@ export class CollectionSubscription
|
|
|
271
314
|
value,
|
|
272
315
|
})
|
|
273
316
|
biggestObservedValue = value
|
|
317
|
+
insertedKeys.add(key) // Track this key
|
|
274
318
|
}
|
|
275
319
|
|
|
276
320
|
keys = index.take(valuesNeeded(), biggestObservedValue, filterFn)
|
|
@@ -296,9 +340,41 @@ export class CollectionSubscription
|
|
|
296
340
|
subscription: this,
|
|
297
341
|
})
|
|
298
342
|
|
|
299
|
-
|
|
343
|
+
// Make parallel loadSubset calls for values equal to minValue and values greater than minValue
|
|
344
|
+
const promises: Array<Promise<void>> = []
|
|
345
|
+
|
|
346
|
+
// First promise: load all values equal to minValue
|
|
347
|
+
if (typeof minValue !== `undefined`) {
|
|
348
|
+
const { expression } = orderBy[0]!
|
|
349
|
+
const exactValueFilter = eq(expression, new Value(minValue))
|
|
350
|
+
|
|
351
|
+
const equalValueResult = this.collection._sync.loadSubset({
|
|
352
|
+
where: exactValueFilter,
|
|
353
|
+
subscription: this,
|
|
354
|
+
})
|
|
355
|
+
|
|
356
|
+
if (equalValueResult instanceof Promise) {
|
|
357
|
+
promises.push(equalValueResult)
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// Second promise: load values greater than minValue
|
|
362
|
+
if (syncResult instanceof Promise) {
|
|
363
|
+
promises.push(syncResult)
|
|
364
|
+
}
|
|
365
|
+
|
|
366
|
+
// Track the combined promise
|
|
367
|
+
if (promises.length > 0) {
|
|
368
|
+
const combinedPromise = Promise.all(promises).then(() => {})
|
|
369
|
+
this.trackLoadSubsetPromise(combinedPromise)
|
|
370
|
+
} else {
|
|
371
|
+
this.trackLoadSubsetPromise(syncResult)
|
|
372
|
+
}
|
|
300
373
|
}
|
|
301
374
|
|
|
375
|
+
// TODO: also add similar test but that checks that it can also load it from the collection's loadSubset function
|
|
376
|
+
// and that that also works properly (i.e. does not skip duplicate values)
|
|
377
|
+
|
|
302
378
|
/**
|
|
303
379
|
* Filters and flips changes for keys that have not been sent yet.
|
|
304
380
|
* Deletes are filtered out for keys that have not been sent yet.
|
package/src/errors.ts
CHANGED
|
@@ -404,6 +404,22 @@ export class CollectionInputNotFoundError extends QueryCompilationError {
|
|
|
404
404
|
}
|
|
405
405
|
}
|
|
406
406
|
|
|
407
|
+
/**
|
|
408
|
+
* Error thrown when a subquery uses the same alias as its parent query.
|
|
409
|
+
* This causes issues because parent and subquery would share the same input streams,
|
|
410
|
+
* leading to empty results or incorrect data (aggregation cross-leaking).
|
|
411
|
+
*/
|
|
412
|
+
export class DuplicateAliasInSubqueryError extends QueryCompilationError {
|
|
413
|
+
constructor(alias: string, parentAliases: Array<string>) {
|
|
414
|
+
super(
|
|
415
|
+
`Subquery uses alias "${alias}" which is already used in the parent query. ` +
|
|
416
|
+
`Each alias must be unique across parent and subquery contexts. ` +
|
|
417
|
+
`Parent query aliases: ${parentAliases.join(`, `)}. ` +
|
|
418
|
+
`Please rename "${alias}" in either the parent query or subquery to avoid conflicts.`
|
|
419
|
+
)
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
|
|
407
423
|
export class UnsupportedFromTypeError extends QueryCompilationError {
|
|
408
424
|
constructor(type: string) {
|
|
409
425
|
super(`Unsupported FROM type: ${type}`)
|
|
@@ -3,6 +3,7 @@ import { optimizeQuery } from "../optimizer.js"
|
|
|
3
3
|
import {
|
|
4
4
|
CollectionInputNotFoundError,
|
|
5
5
|
DistinctRequiresSelectError,
|
|
6
|
+
DuplicateAliasInSubqueryError,
|
|
6
7
|
HavingRequiresGroupByError,
|
|
7
8
|
LimitOffsetRequireOrderByError,
|
|
8
9
|
UnsupportedFromTypeError,
|
|
@@ -99,6 +100,11 @@ export function compileQuery(
|
|
|
99
100
|
return cachedResult
|
|
100
101
|
}
|
|
101
102
|
|
|
103
|
+
// Validate the raw query BEFORE optimization to check user's original structure.
|
|
104
|
+
// This must happen before optimization because the optimizer may create internal
|
|
105
|
+
// subqueries (e.g., for predicate pushdown) that reuse aliases, which is fine.
|
|
106
|
+
validateQueryStructure(rawQuery)
|
|
107
|
+
|
|
102
108
|
// Optimize the query before compilation
|
|
103
109
|
const { optimizedQuery: query, sourceWhereClauses } = optimizeQuery(rawQuery)
|
|
104
110
|
|
|
@@ -375,6 +381,74 @@ export function compileQuery(
|
|
|
375
381
|
return compilationResult
|
|
376
382
|
}
|
|
377
383
|
|
|
384
|
+
/**
|
|
385
|
+
* Collects aliases used for DIRECT collection references (not subqueries).
|
|
386
|
+
* Used to validate that subqueries don't reuse parent query collection aliases.
|
|
387
|
+
* Only direct CollectionRef aliases matter - QueryRef aliases don't cause conflicts.
|
|
388
|
+
*/
|
|
389
|
+
function collectDirectCollectionAliases(query: QueryIR): Set<string> {
|
|
390
|
+
const aliases = new Set<string>()
|
|
391
|
+
|
|
392
|
+
// Collect FROM alias only if it's a direct collection reference
|
|
393
|
+
if (query.from.type === `collectionRef`) {
|
|
394
|
+
aliases.add(query.from.alias)
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// Collect JOIN aliases only for direct collection references
|
|
398
|
+
if (query.join) {
|
|
399
|
+
for (const joinClause of query.join) {
|
|
400
|
+
if (joinClause.from.type === `collectionRef`) {
|
|
401
|
+
aliases.add(joinClause.from.alias)
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
return aliases
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* Validates the structure of a query and its subqueries.
|
|
411
|
+
* Checks that subqueries don't reuse collection aliases from parent queries.
|
|
412
|
+
* This must be called on the RAW query before optimization.
|
|
413
|
+
*/
|
|
414
|
+
function validateQueryStructure(
|
|
415
|
+
query: QueryIR,
|
|
416
|
+
parentCollectionAliases: Set<string> = new Set()
|
|
417
|
+
): void {
|
|
418
|
+
// Collect direct collection aliases from this query level
|
|
419
|
+
const currentLevelAliases = collectDirectCollectionAliases(query)
|
|
420
|
+
|
|
421
|
+
// Check if any current alias conflicts with parent aliases
|
|
422
|
+
for (const alias of currentLevelAliases) {
|
|
423
|
+
if (parentCollectionAliases.has(alias)) {
|
|
424
|
+
throw new DuplicateAliasInSubqueryError(
|
|
425
|
+
alias,
|
|
426
|
+
Array.from(parentCollectionAliases)
|
|
427
|
+
)
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
// Combine parent and current aliases for checking nested subqueries
|
|
432
|
+
const combinedAliases = new Set([
|
|
433
|
+
...parentCollectionAliases,
|
|
434
|
+
...currentLevelAliases,
|
|
435
|
+
])
|
|
436
|
+
|
|
437
|
+
// Recursively validate FROM subquery
|
|
438
|
+
if (query.from.type === `queryRef`) {
|
|
439
|
+
validateQueryStructure(query.from.query, combinedAliases)
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Recursively validate JOIN subqueries
|
|
443
|
+
if (query.join) {
|
|
444
|
+
for (const joinClause of query.join) {
|
|
445
|
+
if (joinClause.from.type === `queryRef`) {
|
|
446
|
+
validateQueryStructure(joinClause.from.query, combinedAliases)
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
|
|
378
452
|
/**
|
|
379
453
|
* Processes the FROM clause, handling direct collection references and subqueries.
|
|
380
454
|
* Populates `aliasToCollectionId` and `aliasRemapping` for per-alias subscription tracking.
|
|
@@ -179,13 +179,11 @@ export function processOrderBy(
|
|
|
179
179
|
orderByOptimizationInfo
|
|
180
180
|
|
|
181
181
|
setSizeCallback = (getSize: () => number) => {
|
|
182
|
-
optimizableOrderByCollections[followRefCollection.id] =
|
|
183
|
-
|
|
184
|
-
dataNeeded: () => {
|
|
182
|
+
optimizableOrderByCollections[followRefCollection.id]![`dataNeeded`] =
|
|
183
|
+
() => {
|
|
185
184
|
const size = getSize()
|
|
186
185
|
return Math.max(0, orderByOptimizationInfo!.limit - size)
|
|
187
|
-
}
|
|
188
|
-
}
|
|
186
|
+
}
|
|
189
187
|
}
|
|
190
188
|
}
|
|
191
189
|
}
|
|
@@ -73,29 +73,33 @@ export class CollectionSubscriber<
|
|
|
73
73
|
)
|
|
74
74
|
}
|
|
75
75
|
|
|
76
|
+
const trackLoadPromise = () => {
|
|
77
|
+
// Guard against duplicate transitions
|
|
78
|
+
if (!this.subscriptionLoadingPromises.has(subscription)) {
|
|
79
|
+
let resolve: () => void
|
|
80
|
+
const promise = new Promise<void>((res) => {
|
|
81
|
+
resolve = res
|
|
82
|
+
})
|
|
83
|
+
|
|
84
|
+
this.subscriptionLoadingPromises.set(subscription, {
|
|
85
|
+
resolve: resolve!,
|
|
86
|
+
})
|
|
87
|
+
this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(
|
|
88
|
+
promise
|
|
89
|
+
)
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// It can be that we are not yet subscribed when the first `loadSubset` call happens (i.e. the initial query).
|
|
94
|
+
// So we also check the status here and if it's `loadingSubset` then we track the load promise
|
|
95
|
+
if (subscription.status === `loadingSubset`) {
|
|
96
|
+
trackLoadPromise()
|
|
97
|
+
}
|
|
98
|
+
|
|
76
99
|
// Subscribe to subscription status changes to propagate loading state
|
|
77
100
|
const statusUnsubscribe = subscription.on(`status:change`, (event) => {
|
|
78
|
-
// TODO: For now we are setting this loading state whenever the subscription
|
|
79
|
-
// status changes to 'loadingSubset'. But we have discussed it only happening
|
|
80
|
-
// when the the live query has it's offset/limit changed, and that triggers the
|
|
81
|
-
// subscription to request a snapshot. This will require more work to implement,
|
|
82
|
-
// and builds on https://github.com/TanStack/db/pull/663 which this PR
|
|
83
|
-
// does not yet depend on.
|
|
84
101
|
if (event.status === `loadingSubset`) {
|
|
85
|
-
|
|
86
|
-
if (!this.subscriptionLoadingPromises.has(subscription)) {
|
|
87
|
-
let resolve: () => void
|
|
88
|
-
const promise = new Promise<void>((res) => {
|
|
89
|
-
resolve = res
|
|
90
|
-
})
|
|
91
|
-
|
|
92
|
-
this.subscriptionLoadingPromises.set(subscription, {
|
|
93
|
-
resolve: resolve!,
|
|
94
|
-
})
|
|
95
|
-
this.collectionConfigBuilder.liveQueryCollection!._sync.trackLoadPromise(
|
|
96
|
-
promise
|
|
97
|
-
)
|
|
98
|
-
}
|
|
102
|
+
trackLoadPromise()
|
|
99
103
|
} else {
|
|
100
104
|
// status is 'ready'
|
|
101
105
|
const deferred = this.subscriptionLoadingPromises.get(subscription)
|
|
@@ -176,30 +180,14 @@ export class CollectionSubscriber<
|
|
|
176
180
|
whereExpression: BasicExpression<boolean> | undefined,
|
|
177
181
|
orderByInfo: OrderByOptimizationInfo
|
|
178
182
|
) {
|
|
179
|
-
const { orderBy, offset, limit,
|
|
180
|
-
orderByInfo
|
|
183
|
+
const { orderBy, offset, limit, index } = orderByInfo
|
|
181
184
|
|
|
182
185
|
const sendChangesInRange = (
|
|
183
186
|
changes: Iterable<ChangeMessage<any, string | number>>
|
|
184
187
|
) => {
|
|
185
188
|
// Split live updates into a delete of the old value and an insert of the new value
|
|
186
|
-
// and filter out changes that are bigger than the biggest value we've sent so far
|
|
187
|
-
// because they can't affect the topK (and if later we need more data, we will dynamically load more data)
|
|
188
189
|
const splittedChanges = splitUpdates(changes)
|
|
189
|
-
|
|
190
|
-
if (dataNeeded && dataNeeded() === 0) {
|
|
191
|
-
// If the topK is full [..., maxSentValue] then we do not need to send changes > maxSentValue
|
|
192
|
-
// because they can never make it into the topK.
|
|
193
|
-
// However, if the topK isn't full yet, we need to also send changes > maxSentValue
|
|
194
|
-
// because they will make it into the topK
|
|
195
|
-
filteredChanges = filterChangesSmallerOrEqualToMax(
|
|
196
|
-
splittedChanges,
|
|
197
|
-
comparator,
|
|
198
|
-
this.biggest
|
|
199
|
-
)
|
|
200
|
-
}
|
|
201
|
-
|
|
202
|
-
this.sendChangesToPipelineWithTracking(filteredChanges, subscription)
|
|
190
|
+
this.sendChangesToPipelineWithTracking(splittedChanges, subscription)
|
|
203
191
|
}
|
|
204
192
|
|
|
205
193
|
// Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far
|
|
@@ -395,37 +383,3 @@ function* splitUpdates<
|
|
|
395
383
|
}
|
|
396
384
|
}
|
|
397
385
|
}
|
|
398
|
-
|
|
399
|
-
function* filterChanges<
|
|
400
|
-
T extends object = Record<string, unknown>,
|
|
401
|
-
TKey extends string | number = string | number,
|
|
402
|
-
>(
|
|
403
|
-
changes: Iterable<ChangeMessage<T, TKey>>,
|
|
404
|
-
f: (change: ChangeMessage<T, TKey>) => boolean
|
|
405
|
-
): Generator<ChangeMessage<T, TKey>> {
|
|
406
|
-
for (const change of changes) {
|
|
407
|
-
if (f(change)) {
|
|
408
|
-
yield change
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
}
|
|
412
|
-
|
|
413
|
-
/**
|
|
414
|
-
* Filters changes to only include those that are smaller or equal to the provided max value
|
|
415
|
-
* @param changes - Iterable of changes to filter
|
|
416
|
-
* @param comparator - Comparator function to use for filtering
|
|
417
|
-
* @param maxValue - Range to filter changes within (range boundaries are exclusive)
|
|
418
|
-
* @returns Iterable of changes that fall within the range
|
|
419
|
-
*/
|
|
420
|
-
function* filterChangesSmallerOrEqualToMax<
|
|
421
|
-
T extends object = Record<string, unknown>,
|
|
422
|
-
TKey extends string | number = string | number,
|
|
423
|
-
>(
|
|
424
|
-
changes: Iterable<ChangeMessage<T, TKey>>,
|
|
425
|
-
comparator: (a: any, b: any) => number,
|
|
426
|
-
maxValue: any
|
|
427
|
-
): Generator<ChangeMessage<T, TKey>> {
|
|
428
|
-
yield* filterChanges(changes, (change) => {
|
|
429
|
-
return !maxValue || comparator(change.value, maxValue) <= 0
|
|
430
|
-
})
|
|
431
|
-
}
|