@tanstack/db 0.4.6 → 0.4.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/index.cjs.map +1 -1
- package/dist/cjs/collection/index.d.cts +2 -1
- package/dist/cjs/collection/lifecycle.cjs +2 -3
- package/dist/cjs/collection/lifecycle.cjs.map +1 -1
- package/dist/cjs/collection/mutations.cjs +4 -4
- package/dist/cjs/collection/mutations.cjs.map +1 -1
- package/dist/cjs/collection/state.cjs +22 -33
- package/dist/cjs/collection/state.cjs.map +1 -1
- package/dist/cjs/collection/state.d.cts +6 -2
- package/dist/cjs/collection/sync.cjs +4 -3
- package/dist/cjs/collection/sync.cjs.map +1 -1
- package/dist/cjs/indexes/auto-index.cjs +0 -3
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/local-only.cjs +21 -2
- package/dist/cjs/local-only.cjs.map +1 -1
- package/dist/cjs/local-only.d.cts +64 -7
- package/dist/cjs/local-storage.cjs +71 -3
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/local-storage.d.cts +55 -2
- package/dist/cjs/query/live/collection-config-builder.cjs +54 -12
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.d.cts +17 -2
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/types.d.cts +3 -5
- package/dist/esm/collection/index.d.ts +2 -1
- package/dist/esm/collection/index.js.map +1 -1
- package/dist/esm/collection/lifecycle.js +2 -3
- package/dist/esm/collection/lifecycle.js.map +1 -1
- package/dist/esm/collection/mutations.js +4 -4
- package/dist/esm/collection/mutations.js.map +1 -1
- package/dist/esm/collection/state.d.ts +6 -2
- package/dist/esm/collection/state.js +22 -33
- package/dist/esm/collection/state.js.map +1 -1
- package/dist/esm/collection/sync.js +4 -3
- package/dist/esm/collection/sync.js.map +1 -1
- package/dist/esm/indexes/auto-index.js +0 -3
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/local-only.d.ts +64 -7
- package/dist/esm/local-only.js +21 -2
- package/dist/esm/local-only.js.map +1 -1
- package/dist/esm/local-storage.d.ts +55 -2
- package/dist/esm/local-storage.js +72 -4
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/query/live/collection-config-builder.d.ts +17 -2
- package/dist/esm/query/live/collection-config-builder.js +54 -12
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/types.d.ts +3 -5
- package/package.json +1 -1
- package/src/collection/index.ts +1 -1
- package/src/collection/lifecycle.ts +3 -4
- package/src/collection/mutations.ts +8 -4
- package/src/collection/state.ts +52 -48
- package/src/collection/sync.ts +7 -6
- package/src/indexes/auto-index.ts +0 -8
- package/src/local-only.ts +119 -30
- package/src/local-storage.ts +170 -5
- package/src/query/live/collection-config-builder.ts +103 -24
- package/src/query/live/collection-subscriber.ts +3 -3
- package/src/types.ts +3 -5
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"collection-config-builder.js","sources":["../../../../src/query/live/collection-config-builder.ts"],"sourcesContent":["import { D2, output } from \"@tanstack/db-ivm\"\nimport { compileQuery } from \"../compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"../builder/index.js\"\nimport { CollectionSubscriber } from \"./collection-subscriber.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n CollectionConfigSingleRowOption,\n KeyedStream,\n ResultStream,\n SyncConfig,\n} from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression, QueryIR } from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"../compiler/joins.js\"\nimport type {\n Changes,\n FullSyncState,\n LiveQueryCollectionConfig,\n SyncState,\n} from \"./types.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\nexport class CollectionConfigBuilder<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n private readonly id: string\n readonly query: QueryIR\n private readonly collections: Record<string, Collection<any, any, any>>\n\n // WeakMap to store the keys of the results\n // so that we can retrieve them in the getKey function\n private readonly resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n private readonly orderByIndices = new WeakMap<object, string>()\n\n private readonly compare?: (val1: TResult, val2: TResult) => number\n\n private isGraphRunning = false\n\n private graphCache: D2 | undefined\n private inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n private pipelineCache: ResultStream | undefined\n public collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n // Map of collection ID to subscription\n readonly subscriptions: Record<string, CollectionSubscription> = {}\n // Map of collection IDs to functions that load keys for that lazy collection\n lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}\n // Set of collection IDs that are lazy collections\n readonly lazyCollections = new Set<string>()\n // Set of collection IDs that include an optimizable ORDER BY clause\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> = {}\n\n constructor(\n private readonly config: LiveQueryCollectionConfig<TContext, TResult>\n ) {\n // Generate a unique ID if not provided\n this.id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n this.query = buildQueryFromConfig(config)\n this.collections = extractCollectionsFromQuery(this.query)\n\n // Create compare function for ordering if the query has orderBy\n if (this.query.orderBy && this.query.orderBy.length > 0) {\n this.compare = createOrderByComparator<TResult>(this.orderByIndices)\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n this.compileBasePipeline()\n }\n\n getConfig(): CollectionConfigSingleRowOption<TResult> {\n return {\n id: this.id,\n getKey:\n this.config.getKey ||\n ((item) => this.resultKeys.get(item) as string | number),\n sync: this.getSyncConfig(),\n compare: this.compare,\n gcTime: this.config.gcTime || 5000, // 5 seconds by default for live queries\n schema: this.config.schema,\n onInsert: this.config.onInsert,\n onUpdate: this.config.onUpdate,\n onDelete: this.config.onDelete,\n startSync: this.config.startSync,\n singleResult: this.query.singleResult,\n }\n }\n\n // The callback function is called after the graph has run.\n // This gives the callback a chance to load more data if needed,\n // that's used to optimize orderBy operators that set a limit,\n // in order to load some more data if we still don't have enough rows after the pipeline has run.\n // That can happend because even though we load N rows, the pipeline might filter some of these rows out\n // causing the orderBy operator to receive less than N rows or even no rows at all.\n // So this callback would notice that it doesn't have enough rows and load some more.\n // The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.\n maybeRunGraph(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: FullSyncState,\n callback?: () => boolean\n ) {\n if (this.isGraphRunning) {\n // no nested runs of the graph\n // which is possible if the `callback`\n // would call `maybeRunGraph` e.g. after it has loaded some more data\n return\n }\n\n this.isGraphRunning = true\n\n try {\n const { begin, commit, markReady } = config\n\n // We only run the graph if all the collections are ready\n if (\n this.allCollectionsReadyOrInitialCommit() &&\n syncState.subscribedToAllCollections\n ) {\n while (syncState.graph.pendingWork()) {\n syncState.graph.run()\n callback?.()\n }\n\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (syncState.messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n if (this.allCollectionsReady()) {\n markReady()\n }\n }\n } finally {\n this.isGraphRunning = false\n }\n }\n\n private getSyncConfig(): SyncConfig<TResult> {\n return {\n rowUpdateMode: `full`,\n sync: this.syncFn.bind(this),\n }\n }\n\n private syncFn(config: Parameters<SyncConfig<TResult>[`sync`]>[0]) {\n const syncState: SyncState = {\n messagesCount: 0,\n subscribedToAllCollections: false,\n unsubscribeCallbacks: new Set<() => void>(),\n }\n\n // Extend the pipeline such that it applies the incoming changes to the collection\n const fullSyncState = this.extendPipelineWithChangeProcessing(\n config,\n syncState\n )\n\n const loadMoreDataCallbacks = this.subscribeToAllCollections(\n config,\n fullSyncState\n )\n\n // Initial run with callback to load more data if needed\n this.maybeRunGraph(config, fullSyncState, loadMoreDataCallbacks)\n\n // Return the unsubscribe function\n return () => {\n syncState.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n\n // Reset caches so a fresh graph/pipeline is compiled on next start\n // This avoids reusing a finalized D2 graph across GC restarts\n this.graphCache = undefined\n this.inputsCache = undefined\n this.pipelineCache = undefined\n this.collectionWhereClausesCache = undefined\n\n // Reset lazy collection state\n this.lazyCollections.clear()\n this.optimizableOrderByCollections = {}\n this.lazyCollectionsCallbacks = {}\n }\n }\n\n private compileBasePipeline() {\n this.graphCache = new D2()\n this.inputsCache = Object.fromEntries(\n Object.entries(this.collections).map(([key]) => [\n key,\n this.graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n const {\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(\n this.query,\n this.inputsCache as Record<string, KeyedStream>,\n this.collections,\n this.subscriptions,\n this.lazyCollectionsCallbacks,\n this.lazyCollections,\n this.optimizableOrderByCollections\n )\n\n this.pipelineCache = pipelineCache\n this.collectionWhereClausesCache = collectionWhereClausesCache\n }\n\n private maybeCompileBasePipeline() {\n if (!this.graphCache || !this.inputsCache || !this.pipelineCache) {\n this.compileBasePipeline()\n }\n return {\n graph: this.graphCache!,\n inputs: this.inputsCache!,\n pipeline: this.pipelineCache!,\n }\n }\n\n private extendPipelineWithChangeProcessing(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: SyncState\n ): FullSyncState {\n const { begin, commit } = config\n const { graph, inputs, pipeline } = this.maybeCompileBasePipeline()\n\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n syncState.messagesCount += messages.length\n\n begin()\n messages\n .reduce(\n accumulateChanges<TResult>,\n new Map<unknown, Changes<TResult>>()\n )\n .forEach(this.applyChanges.bind(this, config))\n commit()\n })\n )\n\n graph.finalize()\n\n // Extend the sync state with the graph, inputs, and pipeline\n syncState.graph = graph\n syncState.inputs = inputs\n syncState.pipeline = pipeline\n\n return syncState as FullSyncState\n }\n\n private applyChanges(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n changes: {\n deletes: number\n inserts: number\n value: TResult\n orderByIndex: string | undefined\n },\n key: unknown\n ) {\n const { write, collection } = config\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n this.resultKeys.set(value, key)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n this.orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes && collection.has(collection.getKeyFromItem(value)))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `Could not apply changes: ${JSON.stringify(changes)}. This should never happen.`\n )\n }\n }\n\n private allCollectionsReady() {\n return Object.values(this.collections).every((collection) =>\n collection.isReady()\n )\n }\n\n private allCollectionsReadyOrInitialCommit() {\n return Object.values(this.collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n private subscribeToAllCollections(\n config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n syncState: FullSyncState\n ) {\n const loaders = Object.entries(this.collections).map(\n ([collectionId, collection]) => {\n const collectionSubscriber = new CollectionSubscriber(\n collectionId,\n collection,\n config,\n syncState,\n this\n )\n\n const subscription = collectionSubscriber.subscribe()\n this.subscriptions[collectionId] = subscription\n\n const loadMore = collectionSubscriber.loadMoreIfNeeded.bind(\n collectionSubscriber,\n subscription\n )\n\n return loadMore\n }\n )\n\n const loadMoreDataCallback = () => {\n loaders.map((loader) => loader())\n return true\n }\n\n // Mark the collections as subscribed in the sync state\n syncState.subscribedToAllCollections = true\n\n return loadMoreDataCallback\n }\n}\n\nfunction buildQueryFromConfig<TContext extends Context>(\n config: LiveQueryCollectionConfig<any, any>\n) {\n // Build the query using the provided query builder function or instance\n if (typeof config.query === `function`) {\n return buildQuery<TContext>(config.query)\n }\n return getQueryIR(config.query)\n}\n\nfunction createOrderByComparator<T extends object>(\n orderByIndices: WeakMap<object, string>\n) {\n return (val1: T, val2: T): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\nfunction accumulateChanges<T>(\n acc: Map<unknown, Changes<T>>,\n [[key, tupleData], multiplicity]: [\n [unknown, [any, string | undefined]],\n number,\n ]\n) {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [T, string | undefined]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n}\n"],"names":[],"mappings":";;;;AAyBA,IAAI,6BAA6B;AAE1B,MAAM,wBAGX;AAAA,EAgCA,YACmB,QACjB;AADiB,SAAA,SAAA;AA1BnB,SAAiB,iCAAiB,QAAA;AAGlC,SAAiB,qCAAqB,QAAA;AAItC,SAAQ,iBAAiB;AAUzB,SAAS,gBAAwD,CAAA;AAEjE,SAAA,2BAAoE,CAAA;AAEpE,SAAS,sCAAsB,IAAA;AAE/B,SAAA,gCAAyE,CAAA;AAMvE,SAAK,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAEjE,SAAK,QAAQ,qBAAqB,MAAM;AACxC,SAAK,cAAc,4BAA4B,KAAK,KAAK;AAGzD,QAAI,KAAK,MAAM,WAAW,KAAK,MAAM,QAAQ,SAAS,GAAG;AACvD,WAAK,UAAU,wBAAiC,KAAK,cAAc;AAAA,IACrE;AAIA,SAAK,oBAAA;AAAA,EACP;AAAA,EAEA,YAAsD;AACpD,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,QACE,KAAK,OAAO,WACX,CAAC,SAAS,KAAK,WAAW,IAAI,IAAI;AAAA,MACrC,MAAM,KAAK,cAAA;AAAA,MACX,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK,OAAO,UAAU;AAAA;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,WAAW,KAAK,OAAO;AAAA,MACvB,cAAc,KAAK,MAAM;AAAA,IAAA;AAAA,EAE7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,cACE,QACA,WACA,UACA;AACA,QAAI,KAAK,gBAAgB;AAIvB;AAAA,IACF;AAEA,SAAK,iBAAiB;AAEtB,QAAI;AACF,YAAM,EAAE,OAAO,QAAQ,UAAA,IAAc;AAGrC,UACE,KAAK,wCACL,UAAU,4BACV;AACA,eAAO,UAAU,MAAM,eAAe;AACpC,oBAAU,MAAM,IAAA;AAChB,qBAAA;AAAA,QACF;AAIA,YAAI,UAAU,kBAAkB,GAAG;AACjC,gBAAA;AACA,iBAAA;AAAA,QACF;AAEA,YAAI,KAAK,uBAAuB;AAC9B,oBAAA;AAAA,QACF;AAAA,MACF;AAAA,IACF,UAAA;AACE,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,gBAAqC;AAC3C,WAAO;AAAA,MACL,eAAe;AAAA,MACf,MAAM,KAAK,OAAO,KAAK,IAAI;AAAA,IAAA;AAAA,EAE/B;AAAA,EAEQ,OAAO,QAAoD;AACjE,UAAM,YAAuB;AAAA,MAC3B,eAAe;AAAA,MACf,4BAA4B;AAAA,MAC5B,0CAA0B,IAAA;AAAA,IAAgB;AAI5C,UAAM,gBAAgB,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,IAAA;AAGF,UAAM,wBAAwB,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,IAAA;AAIF,SAAK,cAAc,QAAQ,eAAe,qBAAqB;AAG/D,WAAO,MAAM;AACX,gBAAU,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAIrE,WAAK,aAAa;AAClB,WAAK,cAAc;AACnB,WAAK,gBAAgB;AACrB,WAAK,8BAA8B;AAGnC,WAAK,gBAAgB,MAAA;AACrB,WAAK,gCAAgC,CAAA;AACrC,WAAK,2BAA2B,CAAA;AAAA,IAClC;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,SAAK,aAAa,IAAI,GAAA;AACtB,SAAK,cAAc,OAAO;AAAA,MACxB,OAAO,QAAQ,KAAK,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QAC9C;AAAA,QACA,KAAK,WAAY,SAAA;AAAA,MAAc,CAChC;AAAA,IAAA;AAIH,UAAM;AAAA,MACJ,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtB;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IAAA;AAGP,SAAK,gBAAgB;AACrB,SAAK,8BAA8B;AAAA,EACrC;AAAA,EAEQ,2BAA2B;AACjC,QAAI,CAAC,KAAK,cAAc,CAAC,KAAK,eAAe,CAAC,KAAK,eAAe;AAChE,WAAK,oBAAA;AAAA,IACP;AACA,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,IAAA;AAAA,EAEnB;AAAA,EAEQ,mCACN,QACA,WACe;AACf,UAAM,EAAE,OAAO,OAAA,IAAW;AAC1B,UAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,KAAK,yBAAA;AAEzC,aAAS;AAAA,MACP,OAAO,CAAC,SAAS;AACf,cAAM,WAAW,KAAK,SAAA;AACtB,kBAAU,iBAAiB,SAAS;AAEpC,cAAA;AACA,iBACG;AAAA,UACC;AAAA,8BACI,IAAA;AAAA,QAA+B,EAEpC,QAAQ,KAAK,aAAa,KAAK,MAAM,MAAM,CAAC;AAC/C,eAAA;AAAA,MACF,CAAC;AAAA,IAAA;AAGH,UAAM,SAAA;AAGN,cAAU,QAAQ;AAClB,cAAU,SAAS;AACnB,cAAU,WAAW;AAErB,WAAO;AAAA,EACT;AAAA,EAEQ,aACN,QACA,SAMA,KACA;AACA,UAAM,EAAE,OAAO,WAAA,IAAe;AAC9B,UAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,SAAK,WAAW,IAAI,OAAO,GAAG;AAG9B,QAAI,iBAAiB,QAAW;AAC9B,WAAK,eAAe,IAAI,OAAO,YAAY;AAAA,IAC7C;AAGA,QAAI,WAAW,YAAY,GAAG;AAC5B,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH;AAAA;AAAA,MAEE,UAAU;AAAA;AAAA,MAGT,YAAY,WAAW,WAAW,IAAI,WAAW,eAAe,KAAK,CAAC;AAAA,MACvE;AACA,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAEH,WAAW,UAAU,GAAG;AACtB,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH,OAAO;AACL,YAAM,IAAI;AAAA,QACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,MAAA;AAAA,IAEvD;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MAAM,CAAC,eAC5C,WAAW,QAAA;AAAA,IAAQ;AAAA,EAEvB;AAAA,EAEQ,qCAAqC;AAC3C,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MACrC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAAA,EAEQ,0BACN,QACA,WACA;AACA,UAAM,UAAU,OAAO,QAAQ,KAAK,WAAW,EAAE;AAAA,MAC/C,CAAC,CAAC,cAAc,UAAU,MAAM;AAC9B,cAAM,uBAAuB,IAAI;AAAA,UAC/B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAGF,cAAM,eAAe,qBAAqB,UAAA;AAC1C,aAAK,cAAc,YAAY,IAAI;AAEnC,cAAM,WAAW,qBAAqB,iBAAiB;AAAA,UACrD;AAAA,UACA;AAAA,QAAA;AAGF,eAAO;AAAA,MACT;AAAA,IAAA;AAGF,UAAM,uBAAuB,MAAM;AACjC,cAAQ,IAAI,CAAC,WAAW,OAAA,CAAQ;AAChC,aAAO;AAAA,IACT;AAGA,cAAU,6BAA6B;AAEvC,WAAO;AAAA,EACT;AACF;AAEA,SAAS,qBACP,QACA;AAEA,MAAI,OAAO,OAAO,UAAU,YAAY;AACtC,WAAO,WAAqB,OAAO,KAAK;AAAA,EAC1C;AACA,SAAO,WAAW,OAAO,KAAK;AAChC;AAEA,SAAS,wBACP,gBACA;AACA,SAAO,CAAC,MAAS,SAAoB;AAEnC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AACF;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAEA,SAAS,kBACP,KACA,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,GAI/B;AAGA,QAAM,CAAC,OAAO,YAAY,IAAI;AAE9B,QAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,IAC9B,SAAS;AAAA,IACT,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EAAA;AAEF,MAAI,eAAe,GAAG;AACpB,YAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,EAC1C,WAAW,eAAe,GAAG;AAC3B,YAAQ,WAAW;AACnB,YAAQ,QAAQ;AAChB,YAAQ,eAAe;AAAA,EACzB;AACA,MAAI,IAAI,KAAK,OAAO;AACpB,SAAO;AACT;"}
|
|
1
|
+
{"version":3,"file":"collection-config-builder.js","sources":["../../../../src/query/live/collection-config-builder.ts"],"sourcesContent":["import { D2, output } from \"@tanstack/db-ivm\"\nimport { compileQuery } from \"../compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"../builder/index.js\"\nimport { CollectionSubscriber } from \"./collection-subscriber.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\nimport type { RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { OrderByOptimizationInfo } from \"../compiler/order-by.js\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type {\n CollectionConfigSingleRowOption,\n KeyedStream,\n ResultStream,\n SyncConfig,\n} from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression, QueryIR } from \"../ir.js\"\nimport type { LazyCollectionCallbacks } from \"../compiler/joins.js\"\nimport type {\n Changes,\n FullSyncState,\n LiveQueryCollectionConfig,\n SyncState,\n} from \"./types.js\"\nimport type { AllCollectionEvents } from \"../../collection/events.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\ntype SyncMethods<TResult extends object> = Parameters<\n SyncConfig<TResult>[`sync`]\n>[0]\n\nexport class CollectionConfigBuilder<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n private readonly id: string\n readonly query: QueryIR\n private readonly collections: Record<string, Collection<any, any, any>>\n\n // WeakMap to store the keys of the results\n // so that we can retrieve them in the getKey function\n private readonly resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n private readonly orderByIndices = new WeakMap<object, string>()\n\n private readonly compare?: (val1: TResult, val2: TResult) => number\n\n private isGraphRunning = false\n\n // Error state tracking\n private isInErrorState = false\n\n // Reference to the live query collection for error state transitions\n private liveQueryCollection?: Collection<TResult, any, any>\n\n private graphCache: D2 | undefined\n private inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n private pipelineCache: ResultStream | undefined\n public collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n // Map of collection ID to subscription\n readonly subscriptions: Record<string, CollectionSubscription> = {}\n // Map of collection IDs to functions that load keys for that lazy collection\n lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}\n // Set of collection IDs that are lazy collections\n readonly lazyCollections = new Set<string>()\n // Set of collection IDs that include an optimizable ORDER BY clause\n optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> = {}\n\n constructor(\n private readonly config: LiveQueryCollectionConfig<TContext, TResult>\n ) {\n // Generate a unique ID if not provided\n this.id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n this.query = buildQueryFromConfig(config)\n this.collections = extractCollectionsFromQuery(this.query)\n\n // Create compare function for ordering if the query has orderBy\n if (this.query.orderBy && this.query.orderBy.length > 0) {\n this.compare = createOrderByComparator<TResult>(this.orderByIndices)\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n this.compileBasePipeline()\n }\n\n getConfig(): CollectionConfigSingleRowOption<TResult> {\n return {\n id: this.id,\n getKey:\n this.config.getKey ||\n ((item) => this.resultKeys.get(item) as string | number),\n sync: this.getSyncConfig(),\n compare: this.compare,\n gcTime: this.config.gcTime || 5000, // 5 seconds by default for live queries\n schema: this.config.schema,\n onInsert: this.config.onInsert,\n onUpdate: this.config.onUpdate,\n onDelete: this.config.onDelete,\n startSync: this.config.startSync,\n singleResult: this.query.singleResult,\n }\n }\n\n // The callback function is called after the graph has run.\n // This gives the callback a chance to load more data if needed,\n // that's used to optimize orderBy operators that set a limit,\n // in order to load some more data if we still don't have enough rows after the pipeline has run.\n // That can happen because even though we load N rows, the pipeline might filter some of these rows out\n // causing the orderBy operator to receive less than N rows or even no rows at all.\n // So this callback would notice that it doesn't have enough rows and load some more.\n // The callback returns a boolean, when it's true it's done loading data.\n maybeRunGraph(\n config: SyncMethods<TResult>,\n syncState: FullSyncState,\n callback?: () => boolean\n ) {\n if (this.isGraphRunning) {\n // no nested runs of the graph\n // which is possible if the `callback`\n // would call `maybeRunGraph` e.g. after it has loaded some more data\n return\n }\n\n this.isGraphRunning = true\n\n try {\n const { begin, commit } = config\n\n // Don't run if the live query is in an error state\n if (this.isInErrorState) {\n return\n }\n\n // Always run the graph if subscribed (eager execution)\n if (syncState.subscribedToAllCollections) {\n while (syncState.graph.pendingWork()) {\n syncState.graph.run()\n callback?.()\n }\n\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (syncState.messagesCount === 0) {\n begin()\n commit()\n // After initial commit, check if we should mark ready\n // (in case all sources were already ready before we subscribed)\n this.updateLiveQueryStatus(config)\n }\n }\n } finally {\n this.isGraphRunning = false\n }\n }\n\n private getSyncConfig(): SyncConfig<TResult> {\n return {\n rowUpdateMode: `full`,\n sync: this.syncFn.bind(this),\n }\n }\n\n private syncFn(config: SyncMethods<TResult>) {\n // Store reference to the live query collection for error state transitions\n this.liveQueryCollection = config.collection\n\n const syncState: SyncState = {\n messagesCount: 0,\n subscribedToAllCollections: false,\n unsubscribeCallbacks: new Set<() => void>(),\n }\n\n // Extend the pipeline such that it applies the incoming changes to the collection\n const fullSyncState = this.extendPipelineWithChangeProcessing(\n config,\n syncState\n )\n\n const loadMoreDataCallbacks = this.subscribeToAllCollections(\n config,\n fullSyncState\n )\n\n // Initial run with callback to load more data if needed\n this.maybeRunGraph(config, fullSyncState, loadMoreDataCallbacks)\n\n // Return the unsubscribe function\n return () => {\n syncState.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n\n // Reset caches so a fresh graph/pipeline is compiled on next start\n // This avoids reusing a finalized D2 graph across GC restarts\n this.graphCache = undefined\n this.inputsCache = undefined\n this.pipelineCache = undefined\n this.collectionWhereClausesCache = undefined\n\n // Reset lazy collection state\n this.lazyCollections.clear()\n this.optimizableOrderByCollections = {}\n this.lazyCollectionsCallbacks = {}\n }\n }\n\n private compileBasePipeline() {\n this.graphCache = new D2()\n this.inputsCache = Object.fromEntries(\n Object.entries(this.collections).map(([key]) => [\n key,\n this.graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n const {\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(\n this.query,\n this.inputsCache as Record<string, KeyedStream>,\n this.collections,\n this.subscriptions,\n this.lazyCollectionsCallbacks,\n this.lazyCollections,\n this.optimizableOrderByCollections\n )\n\n this.pipelineCache = pipelineCache\n this.collectionWhereClausesCache = collectionWhereClausesCache\n }\n\n private maybeCompileBasePipeline() {\n if (!this.graphCache || !this.inputsCache || !this.pipelineCache) {\n this.compileBasePipeline()\n }\n return {\n graph: this.graphCache!,\n inputs: this.inputsCache!,\n pipeline: this.pipelineCache!,\n }\n }\n\n private extendPipelineWithChangeProcessing(\n config: SyncMethods<TResult>,\n syncState: SyncState\n ): FullSyncState {\n const { begin, commit } = config\n const { graph, inputs, pipeline } = this.maybeCompileBasePipeline()\n\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n syncState.messagesCount += messages.length\n\n begin()\n messages\n .reduce(\n accumulateChanges<TResult>,\n new Map<unknown, Changes<TResult>>()\n )\n .forEach(this.applyChanges.bind(this, config))\n commit()\n })\n )\n\n graph.finalize()\n\n // Extend the sync state with the graph, inputs, and pipeline\n syncState.graph = graph\n syncState.inputs = inputs\n syncState.pipeline = pipeline\n\n return syncState as FullSyncState\n }\n\n private applyChanges(\n config: SyncMethods<TResult>,\n changes: {\n deletes: number\n inserts: number\n value: TResult\n orderByIndex: string | undefined\n },\n key: unknown\n ) {\n const { write, collection } = config\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n this.resultKeys.set(value, key)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n this.orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes && collection.has(collection.getKeyFromItem(value)))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `Could not apply changes: ${JSON.stringify(changes)}. This should never happen.`\n )\n }\n }\n\n /**\n * Handle status changes from source collections\n */\n private handleSourceStatusChange(\n config: SyncMethods<TResult>,\n collectionId: string,\n event: AllCollectionEvents[`status:change`]\n ) {\n const { status } = event\n\n // Handle error state - any source collection in error puts live query in error\n if (status === `error`) {\n this.transitionToError(\n `Source collection '${collectionId}' entered error state`\n )\n return\n }\n\n // Handle manual cleanup - this should not happen due to GC prevention,\n // but could happen if user manually calls cleanup()\n if (status === `cleaned-up`) {\n this.transitionToError(\n `Source collection '${collectionId}' was manually cleaned up while live query '${this.id}' depends on it. ` +\n `Live queries prevent automatic GC, so this was likely a manual cleanup() call.`\n )\n return\n }\n\n // Update ready status based on all source collections\n this.updateLiveQueryStatus(config)\n }\n\n /**\n * Update the live query status based on source collection statuses\n */\n private updateLiveQueryStatus(config: SyncMethods<TResult>) {\n const { markReady } = config\n\n // Don't update status if already in error\n if (this.isInErrorState) {\n return\n }\n\n // Mark ready when all source collections are ready\n if (this.allCollectionsReady()) {\n markReady()\n }\n }\n\n /**\n * Transition the live query to error state\n */\n private transitionToError(message: string) {\n this.isInErrorState = true\n\n // Log error to console for debugging\n console.error(`[Live Query Error] ${message}`)\n\n // Transition live query collection to error state\n this.liveQueryCollection?._lifecycle.setStatus(`error`)\n }\n\n private allCollectionsReady() {\n return Object.values(this.collections).every((collection) =>\n collection.isReady()\n )\n }\n\n private subscribeToAllCollections(\n config: SyncMethods<TResult>,\n syncState: FullSyncState\n ) {\n const loaders = Object.entries(this.collections).map(\n ([collectionId, collection]) => {\n const collectionSubscriber = new CollectionSubscriber(\n collectionId,\n collection,\n config,\n syncState,\n this\n )\n\n const subscription = collectionSubscriber.subscribe()\n this.subscriptions[collectionId] = subscription\n\n // Subscribe to status changes for status flow\n const statusUnsubscribe = collection.on(`status:change`, (event) => {\n this.handleSourceStatusChange(config, collectionId, event)\n })\n syncState.unsubscribeCallbacks.add(statusUnsubscribe)\n\n const loadMore = collectionSubscriber.loadMoreIfNeeded.bind(\n collectionSubscriber,\n subscription\n )\n\n return loadMore\n }\n )\n\n const loadMoreDataCallback = () => {\n loaders.map((loader) => loader())\n return true\n }\n\n // Mark the collections as subscribed in the sync state\n syncState.subscribedToAllCollections = true\n\n // Initial status check after all subscriptions are set up\n this.updateLiveQueryStatus(config)\n\n return loadMoreDataCallback\n }\n}\n\nfunction buildQueryFromConfig<TContext extends Context>(\n config: LiveQueryCollectionConfig<any, any>\n) {\n // Build the query using the provided query builder function or instance\n if (typeof config.query === `function`) {\n return buildQuery<TContext>(config.query)\n }\n return getQueryIR(config.query)\n}\n\nfunction createOrderByComparator<T extends object>(\n orderByIndices: WeakMap<object, string>\n) {\n return (val1: T, val2: T): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\nfunction accumulateChanges<T>(\n acc: Map<unknown, Changes<T>>,\n [[key, tupleData], multiplicity]: [\n [unknown, [any, string | undefined]],\n number,\n ]\n) {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [T, string | undefined]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n}\n"],"names":[],"mappings":";;;;AA0BA,IAAI,6BAA6B;AAM1B,MAAM,wBAGX;AAAA,EAsCA,YACmB,QACjB;AADiB,SAAA,SAAA;AAhCnB,SAAiB,iCAAiB,QAAA;AAGlC,SAAiB,qCAAqB,QAAA;AAItC,SAAQ,iBAAiB;AAGzB,SAAQ,iBAAiB;AAazB,SAAS,gBAAwD,CAAA;AAEjE,SAAA,2BAAoE,CAAA;AAEpE,SAAS,sCAAsB,IAAA;AAE/B,SAAA,gCAAyE,CAAA;AAMvE,SAAK,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAEjE,SAAK,QAAQ,qBAAqB,MAAM;AACxC,SAAK,cAAc,4BAA4B,KAAK,KAAK;AAGzD,QAAI,KAAK,MAAM,WAAW,KAAK,MAAM,QAAQ,SAAS,GAAG;AACvD,WAAK,UAAU,wBAAiC,KAAK,cAAc;AAAA,IACrE;AAIA,SAAK,oBAAA;AAAA,EACP;AAAA,EAEA,YAAsD;AACpD,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,QACE,KAAK,OAAO,WACX,CAAC,SAAS,KAAK,WAAW,IAAI,IAAI;AAAA,MACrC,MAAM,KAAK,cAAA;AAAA,MACX,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK,OAAO,UAAU;AAAA;AAAA,MAC9B,QAAQ,KAAK,OAAO;AAAA,MACpB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,UAAU,KAAK,OAAO;AAAA,MACtB,WAAW,KAAK,OAAO;AAAA,MACvB,cAAc,KAAK,MAAM;AAAA,IAAA;AAAA,EAE7B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,cACE,QACA,WACA,UACA;AACA,QAAI,KAAK,gBAAgB;AAIvB;AAAA,IACF;AAEA,SAAK,iBAAiB;AAEtB,QAAI;AACF,YAAM,EAAE,OAAO,OAAA,IAAW;AAG1B,UAAI,KAAK,gBAAgB;AACvB;AAAA,MACF;AAGA,UAAI,UAAU,4BAA4B;AACxC,eAAO,UAAU,MAAM,eAAe;AACpC,oBAAU,MAAM,IAAA;AAChB,qBAAA;AAAA,QACF;AAIA,YAAI,UAAU,kBAAkB,GAAG;AACjC,gBAAA;AACA,iBAAA;AAGA,eAAK,sBAAsB,MAAM;AAAA,QACnC;AAAA,MACF;AAAA,IACF,UAAA;AACE,WAAK,iBAAiB;AAAA,IACxB;AAAA,EACF;AAAA,EAEQ,gBAAqC;AAC3C,WAAO;AAAA,MACL,eAAe;AAAA,MACf,MAAM,KAAK,OAAO,KAAK,IAAI;AAAA,IAAA;AAAA,EAE/B;AAAA,EAEQ,OAAO,QAA8B;AAE3C,SAAK,sBAAsB,OAAO;AAElC,UAAM,YAAuB;AAAA,MAC3B,eAAe;AAAA,MACf,4BAA4B;AAAA,MAC5B,0CAA0B,IAAA;AAAA,IAAgB;AAI5C,UAAM,gBAAgB,KAAK;AAAA,MACzB;AAAA,MACA;AAAA,IAAA;AAGF,UAAM,wBAAwB,KAAK;AAAA,MACjC;AAAA,MACA;AAAA,IAAA;AAIF,SAAK,cAAc,QAAQ,eAAe,qBAAqB;AAG/D,WAAO,MAAM;AACX,gBAAU,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAIrE,WAAK,aAAa;AAClB,WAAK,cAAc;AACnB,WAAK,gBAAgB;AACrB,WAAK,8BAA8B;AAGnC,WAAK,gBAAgB,MAAA;AACrB,WAAK,gCAAgC,CAAA;AACrC,WAAK,2BAA2B,CAAA;AAAA,IAClC;AAAA,EACF;AAAA,EAEQ,sBAAsB;AAC5B,SAAK,aAAa,IAAI,GAAA;AACtB,SAAK,cAAc,OAAO;AAAA,MACxB,OAAO,QAAQ,KAAK,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QAC9C;AAAA,QACA,KAAK,WAAY,SAAA;AAAA,MAAc,CAChC;AAAA,IAAA;AAIH,UAAM;AAAA,MACJ,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtB;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IAAA;AAGP,SAAK,gBAAgB;AACrB,SAAK,8BAA8B;AAAA,EACrC;AAAA,EAEQ,2BAA2B;AACjC,QAAI,CAAC,KAAK,cAAc,CAAC,KAAK,eAAe,CAAC,KAAK,eAAe;AAChE,WAAK,oBAAA;AAAA,IACP;AACA,WAAO;AAAA,MACL,OAAO,KAAK;AAAA,MACZ,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK;AAAA,IAAA;AAAA,EAEnB;AAAA,EAEQ,mCACN,QACA,WACe;AACf,UAAM,EAAE,OAAO,OAAA,IAAW;AAC1B,UAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,KAAK,yBAAA;AAEzC,aAAS;AAAA,MACP,OAAO,CAAC,SAAS;AACf,cAAM,WAAW,KAAK,SAAA;AACtB,kBAAU,iBAAiB,SAAS;AAEpC,cAAA;AACA,iBACG;AAAA,UACC;AAAA,8BACI,IAAA;AAAA,QAA+B,EAEpC,QAAQ,KAAK,aAAa,KAAK,MAAM,MAAM,CAAC;AAC/C,eAAA;AAAA,MACF,CAAC;AAAA,IAAA;AAGH,UAAM,SAAA;AAGN,cAAU,QAAQ;AAClB,cAAU,SAAS;AACnB,cAAU,WAAW;AAErB,WAAO;AAAA,EACT;AAAA,EAEQ,aACN,QACA,SAMA,KACA;AACA,UAAM,EAAE,OAAO,WAAA,IAAe;AAC9B,UAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,SAAK,WAAW,IAAI,OAAO,GAAG;AAG9B,QAAI,iBAAiB,QAAW;AAC9B,WAAK,eAAe,IAAI,OAAO,YAAY;AAAA,IAC7C;AAGA,QAAI,WAAW,YAAY,GAAG;AAC5B,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH;AAAA;AAAA,MAEE,UAAU;AAAA;AAAA,MAGT,YAAY,WAAW,WAAW,IAAI,WAAW,eAAe,KAAK,CAAC;AAAA,MACvE;AACA,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IAEH,WAAW,UAAU,GAAG;AACtB,YAAM;AAAA,QACJ;AAAA,QACA,MAAM;AAAA,MAAA,CACP;AAAA,IACH,OAAO;AACL,YAAM,IAAI;AAAA,QACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,MAAA;AAAA,IAEvD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBACN,QACA,cACA,OACA;AACA,UAAM,EAAE,WAAW;AAGnB,QAAI,WAAW,SAAS;AACtB,WAAK;AAAA,QACH,sBAAsB,YAAY;AAAA,MAAA;AAEpC;AAAA,IACF;AAIA,QAAI,WAAW,cAAc;AAC3B,WAAK;AAAA,QACH,sBAAsB,YAAY,+CAA+C,KAAK,EAAE;AAAA,MAAA;AAG1F;AAAA,IACF;AAGA,SAAK,sBAAsB,MAAM;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,sBAAsB,QAA8B;AAC1D,UAAM,EAAE,cAAc;AAGtB,QAAI,KAAK,gBAAgB;AACvB;AAAA,IACF;AAGA,QAAI,KAAK,uBAAuB;AAC9B,gBAAA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAAiB;AACzC,SAAK,iBAAiB;AAGtB,YAAQ,MAAM,sBAAsB,OAAO,EAAE;AAG7C,SAAK,qBAAqB,WAAW,UAAU,OAAO;AAAA,EACxD;AAAA,EAEQ,sBAAsB;AAC5B,WAAO,OAAO,OAAO,KAAK,WAAW,EAAE;AAAA,MAAM,CAAC,eAC5C,WAAW,QAAA;AAAA,IAAQ;AAAA,EAEvB;AAAA,EAEQ,0BACN,QACA,WACA;AACA,UAAM,UAAU,OAAO,QAAQ,KAAK,WAAW,EAAE;AAAA,MAC/C,CAAC,CAAC,cAAc,UAAU,MAAM;AAC9B,cAAM,uBAAuB,IAAI;AAAA,UAC/B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA;AAGF,cAAM,eAAe,qBAAqB,UAAA;AAC1C,aAAK,cAAc,YAAY,IAAI;AAGnC,cAAM,oBAAoB,WAAW,GAAG,iBAAiB,CAAC,UAAU;AAClE,eAAK,yBAAyB,QAAQ,cAAc,KAAK;AAAA,QAC3D,CAAC;AACD,kBAAU,qBAAqB,IAAI,iBAAiB;AAEpD,cAAM,WAAW,qBAAqB,iBAAiB;AAAA,UACrD;AAAA,UACA;AAAA,QAAA;AAGF,eAAO;AAAA,MACT;AAAA,IAAA;AAGF,UAAM,uBAAuB,MAAM;AACjC,cAAQ,IAAI,CAAC,WAAW,OAAA,CAAQ;AAChC,aAAO;AAAA,IACT;AAGA,cAAU,6BAA6B;AAGvC,SAAK,sBAAsB,MAAM;AAEjC,WAAO;AAAA,EACT;AACF;AAEA,SAAS,qBACP,QACA;AAEA,MAAI,OAAO,OAAO,UAAU,YAAY;AACtC,WAAO,WAAqB,OAAO,KAAK;AAAA,EAC1C;AACA,SAAO,WAAW,OAAO,KAAK;AAChC;AAEA,SAAS,wBACP,gBACA;AACA,SAAO,CAAC,MAAS,SAAoB;AAEnC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AACF;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAEA,SAAS,kBACP,KACA,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,GAI/B;AAGA,QAAM,CAAC,OAAO,YAAY,IAAI;AAE9B,QAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,IAC9B,SAAS;AAAA,IACT,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EAAA;AAEF,MAAI,eAAe,GAAG;AACpB,YAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,EAC1C,WAAW,eAAe,GAAG;AAC3B,YAAQ,WAAW;AACnB,YAAQ,QAAQ;AAChB,YAAQ,eAAe;AAAA,EACzB;AACA,MAAI,IAAI,KAAK,OAAO;AACpB,SAAO;AACT;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport type { FullSyncState } from \"./types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage, SyncConfig } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n private collectionAlias: string\n\n constructor(\n private collectionId: string,\n private collection: Collection,\n private config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n private syncState: FullSyncState,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {\n this.collectionAlias = findCollectionAlias(\n this.collectionId,\n this.collectionConfigBuilder.query\n )!\n }\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseFromAlias(this.collectionAlias)\n\n if (whereClause) {\n // Convert WHERE clause to BasicExpression format for collection subscription\n const whereExpression = convertToBasicExpression(\n whereClause,\n this.collectionAlias\n )\n\n if (whereExpression) {\n // Use index optimization for this collection\n return this.subscribeToChanges(whereExpression)\n } else {\n // This should not happen - if we have a whereClause but can't create whereExpression,\n // it indicates a bug in our optimization logic\n throw new Error(\n `Failed to convert WHERE clause to collection filter for collection '${this.collectionId}'. ` +\n `This indicates a bug in the query optimization logic.`\n )\n }\n } else {\n // No WHERE clause for this collection, use regular subscription\n return this.subscribeToChanges()\n }\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n if (\n Object.hasOwn(\n this.collectionConfigBuilder.optimizableOrderByCollections,\n this.collectionId\n )\n ) {\n subscription = this.subscribeToOrderedChanges(whereExpression)\n } else {\n // If the collection is lazy then we should not include the initial state\n const includeInitialState =\n !this.collectionConfigBuilder.lazyCollections.has(this.collectionId)\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n const unsubscribe = () => {\n subscription.unsubscribe()\n }\n this.syncState.unsubscribeCallbacks.add(unsubscribe)\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n const input = this.syncState.inputs[this.collectionId]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // We need to call `maybeRunGraph` even if there's no data to load\n // because we need to mark the collection as ready if it's not already\n // and that's only done in `maybeRunGraph`\n this.collectionConfigBuilder.maybeRunGraph(\n this.config,\n this.syncState,\n dataLoader\n )\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined\n ) {\n const { orderBy, offset, limit, comparator, dataNeeded, index } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n // and filter out changes that are bigger than the biggest value we've sent so far\n // because they can't affect the topK (and if later we need more data, we will dynamically load more data)\n const splittedChanges = splitUpdates(changes)\n let filteredChanges = splittedChanges\n if (dataNeeded!() === 0) {\n // If the topK is full [..., maxSentValue] then we do not need to send changes > maxSentValue\n // because they can never make it into the topK.\n // However, if the topK isn't full yet, we need to also send changes > maxSentValue\n // because they will make it into the topK\n filteredChanges = filterChangesSmallerOrEqualToMax(\n splittedChanges,\n comparator,\n this.biggest\n )\n }\n\n this.sendChangesToPipelineWithTracking(filteredChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.collectionAlias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const { comparator } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n const trackedChanges = this.trackSentValues(changes, comparator)\n this.sendChangesToPipeline(\n trackedChanges,\n this.loadMoreIfNeeded.bind(this, subscription)\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const { orderBy, valueExtractorForRawRow } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.collectionAlias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseFromAlias(\n collectionAlias: string | undefined\n ): BasicExpression<boolean> | undefined {\n const collectionWhereClausesCache =\n this.collectionConfigBuilder.collectionWhereClausesCache\n if (collectionAlias && collectionWhereClausesCache) {\n return collectionWhereClausesCache.get(collectionAlias)\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Finds the alias for a collection ID in the query\n */\nfunction findCollectionAlias(\n collectionId: string,\n query: any\n): string | undefined {\n // Check FROM clause\n if (\n query.from?.type === `collectionRef` &&\n query.from.collection?.id === collectionId\n ) {\n return query.from.alias\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (\n joinClause.from?.type === `collectionRef` &&\n joinClause.from.collection?.id === collectionId\n ) {\n return joinClause.from.alias\n }\n }\n }\n\n return undefined\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n\nfunction* filterChanges<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n f: (change: ChangeMessage<T, TKey>) => boolean\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (f(change)) {\n yield change\n }\n }\n}\n\n/**\n * Filters changes to only include those that are smaller or equal to the provided max value\n * @param changes - Iterable of changes to filter\n * @param comparator - Comparator function to use for filtering\n * @param maxValue - Range to filter changes within (range boundaries are exclusive)\n * @returns Iterable of changes that fall within the range\n */\nfunction* filterChangesSmallerOrEqualToMax<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n comparator: (a: any, b: any) => number,\n maxValue: any\n): Generator<ChangeMessage<T, TKey>> {\n yield* filterChanges(changes, (change) => {\n return !maxValue || comparator(change.value, maxValue) <= 0\n })\n}\n"],"names":[],"mappings":";;AAcO,MAAM,qBAGX;AAAA,EAMA,YACU,cACA,YACA,QACA,WACA,yBACR;AALQ,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,SAAA;AACA,SAAA,YAAA;AACA,SAAA,0BAAA;AATV,SAAQ,UAAe;AAWrB,SAAK,kBAAkB;AAAA,MACrB,KAAK;AAAA,MACL,KAAK,wBAAwB;AAAA,IAAA;AAAA,EAEjC;AAAA,EAEA,YAAoC;AAClC,UAAM,cAAc,KAAK,wBAAwB,KAAK,eAAe;AAErE,QAAI,aAAa;AAEf,YAAM,kBAAkB;AAAA,QACtB;AAAA,QACA,KAAK;AAAA,MAAA;AAGP,UAAI,iBAAiB;AAEnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD,OAAO;AAGL,cAAM,IAAI;AAAA,UACR,uEAAuE,KAAK,YAAY;AAAA,QAAA;AAAA,MAG5F;AAAA,IACF,OAAO;AAEL,aAAO,KAAK,mBAAA;AAAA,IACd;AAAA,EACF;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,QACE,OAAO;AAAA,MACL,KAAK,wBAAwB;AAAA,MAC7B,KAAK;AAAA,IAAA,GAEP;AACA,qBAAe,KAAK,0BAA0B,eAAe;AAAA,IAC/D,OAAO;AAEL,YAAM,sBACJ,CAAC,KAAK,wBAAwB,gBAAgB,IAAI,KAAK,YAAY;AAErE,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AACA,UAAM,cAAc,MAAM;AACxB,mBAAa,YAAA;AAAA,IACf;AACA,SAAK,UAAU,qBAAqB,IAAI,WAAW;AACnD,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AACA,UAAM,QAAQ,KAAK,UAAU,OAAO,KAAK,YAAY;AACrD,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB;AAAA,MAC3B,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,YAAY,YAAY,MAAA,IACtD,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AAEF,UAAM,qBAAqB,CACzB,YACG;AAIH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,UAAI,kBAAkB;AACtB,UAAI,WAAA,MAAkB,GAAG;AAKvB,0BAAkB;AAAA,UAChB;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAAA,MAET;AAEA,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AAEF,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,EAAE,WAAA,IACN,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,UAAU;AAC/D,SAAK;AAAA,MACH;AAAA,MACA,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,EAAE,SAAS,4BACf,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,wBACN,iBACsC;AACtC,UAAM,8BACJ,KAAK,wBAAwB;AAC/B,QAAI,mBAAmB,6BAA6B;AAClD,aAAO,4BAA4B,IAAI,eAAe;AAAA,IACxD;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,oBACP,cACA,OACoB;AAEpB,MACE,MAAM,MAAM,SAAS,mBACrB,MAAM,KAAK,YAAY,OAAO,cAC9B;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,UACE,WAAW,MAAM,SAAS,mBAC1B,WAAW,KAAK,YAAY,OAAO,cACnC;AACA,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,cAIR,SACA,GACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,EAAE,MAAM,GAAG;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACF;AASA,UAAU,iCAIR,SACA,YACA,UACmC;AACnC,SAAO,cAAc,SAAS,CAAC,WAAW;AACxC,WAAO,CAAC,YAAY,WAAW,OAAO,OAAO,QAAQ,KAAK;AAAA,EAC5D,CAAC;AACH;"}
|
|
1
|
+
{"version":3,"file":"collection-subscriber.js","sources":["../../../../src/query/live/collection-subscriber.ts"],"sourcesContent":["import { MultiSet } from \"@tanstack/db-ivm\"\nimport {\n convertOrderByToBasicExpression,\n convertToBasicExpression,\n} from \"../compiler/expressions.js\"\nimport type { FullSyncState } from \"./types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { Collection } from \"../../collection/index.js\"\nimport type { ChangeMessage, SyncConfig } from \"../../types.js\"\nimport type { Context, GetResult } from \"../builder/types.js\"\nimport type { BasicExpression } from \"../ir.js\"\nimport type { CollectionConfigBuilder } from \"./collection-config-builder.js\"\nimport type { CollectionSubscription } from \"../../collection/subscription.js\"\n\nexport class CollectionSubscriber<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n> {\n // Keep track of the biggest value we've sent so far (needed for orderBy optimization)\n private biggest: any = undefined\n\n private collectionAlias: string\n\n constructor(\n private collectionId: string,\n private collection: Collection,\n private config: Parameters<SyncConfig<TResult>[`sync`]>[0],\n private syncState: FullSyncState,\n private collectionConfigBuilder: CollectionConfigBuilder<TContext, TResult>\n ) {\n this.collectionAlias = findCollectionAlias(\n this.collectionId,\n this.collectionConfigBuilder.query\n )!\n }\n\n subscribe(): CollectionSubscription {\n const whereClause = this.getWhereClauseFromAlias(this.collectionAlias)\n\n if (whereClause) {\n // Convert WHERE clause to BasicExpression format for collection subscription\n const whereExpression = convertToBasicExpression(\n whereClause,\n this.collectionAlias\n )\n\n if (whereExpression) {\n // Use index optimization for this collection\n return this.subscribeToChanges(whereExpression)\n } else {\n // This should not happen - if we have a whereClause but can't create whereExpression,\n // it indicates a bug in our optimization logic\n throw new Error(\n `Failed to convert WHERE clause to collection filter for collection '${this.collectionId}'. ` +\n `This indicates a bug in the query optimization logic.`\n )\n }\n } else {\n // No WHERE clause for this collection, use regular subscription\n return this.subscribeToChanges()\n }\n }\n\n private subscribeToChanges(whereExpression?: BasicExpression<boolean>) {\n let subscription: CollectionSubscription\n if (\n Object.hasOwn(\n this.collectionConfigBuilder.optimizableOrderByCollections,\n this.collectionId\n )\n ) {\n subscription = this.subscribeToOrderedChanges(whereExpression)\n } else {\n // If the collection is lazy then we should not include the initial state\n const includeInitialState =\n !this.collectionConfigBuilder.lazyCollections.has(this.collectionId)\n\n subscription = this.subscribeToMatchingChanges(\n whereExpression,\n includeInitialState\n )\n }\n const unsubscribe = () => {\n subscription.unsubscribe()\n }\n this.syncState.unsubscribeCallbacks.add(unsubscribe)\n return subscription\n }\n\n private sendChangesToPipeline(\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) {\n const input = this.syncState.inputs[this.collectionId]!\n const sentChanges = sendChangesToInput(\n input,\n changes,\n this.collection.config.getKey\n )\n\n // Do not provide the callback that loads more data\n // if there's no more data to load\n // otherwise we end up in an infinite loop trying to load more data\n const dataLoader = sentChanges > 0 ? callback : undefined\n\n // Always call maybeRunGraph to process changes eagerly.\n // The graph will run unless the live query is in an error state.\n // Status management is handled separately via status:change event listeners.\n this.collectionConfigBuilder.maybeRunGraph(\n this.config,\n this.syncState,\n dataLoader\n )\n }\n\n private subscribeToMatchingChanges(\n whereExpression: BasicExpression<boolean> | undefined,\n includeInitialState: boolean = false\n ) {\n const sendChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n this.sendChangesToPipeline(changes)\n }\n\n const subscription = this.collection.subscribeChanges(sendChanges, {\n includeInitialState,\n whereExpression,\n })\n\n return subscription\n }\n\n private subscribeToOrderedChanges(\n whereExpression: BasicExpression<boolean> | undefined\n ) {\n const { orderBy, offset, limit, comparator, dataNeeded, index } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n // and filter out changes that are bigger than the biggest value we've sent so far\n // because they can't affect the topK (and if later we need more data, we will dynamically load more data)\n const splittedChanges = splitUpdates(changes)\n let filteredChanges = splittedChanges\n if (dataNeeded!() === 0) {\n // If the topK is full [..., maxSentValue] then we do not need to send changes > maxSentValue\n // because they can never make it into the topK.\n // However, if the topK isn't full yet, we need to also send changes > maxSentValue\n // because they will make it into the topK\n filteredChanges = filterChangesSmallerOrEqualToMax(\n splittedChanges,\n comparator,\n this.biggest\n )\n }\n\n this.sendChangesToPipelineWithTracking(filteredChanges, subscription)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const subscription = this.collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n subscription.setOrderByIndex(index)\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.collectionAlias\n )\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n subscription.requestLimitedSnapshot({\n limit: offset + limit,\n orderBy: normalizedOrderBy,\n })\n\n return subscription\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n loadMoreIfNeeded(subscription: CollectionSubscription) {\n const orderByInfo =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]\n\n if (!orderByInfo) {\n // This query has no orderBy operator\n // so there's no data to load\n return true\n }\n\n const { dataNeeded } = orderByInfo\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${this.collectionId}`\n )\n }\n\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n this.loadNextItems(n, subscription)\n }\n return true\n }\n\n private sendChangesToPipelineWithTracking(\n changes: Iterable<ChangeMessage<any, string | number>>,\n subscription: CollectionSubscription\n ) {\n const { comparator } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n const trackedChanges = this.trackSentValues(changes, comparator)\n this.sendChangesToPipeline(\n trackedChanges,\n this.loadMoreIfNeeded.bind(this, subscription)\n )\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n private loadNextItems(n: number, subscription: CollectionSubscription) {\n const { orderBy, valueExtractorForRawRow } =\n this.collectionConfigBuilder.optimizableOrderByCollections[\n this.collectionId\n ]!\n const biggestSentRow = this.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n\n // Normalize the orderBy clauses such that the references are relative to the collection\n const normalizedOrderBy = convertOrderByToBasicExpression(\n orderBy,\n this.collectionAlias\n )\n\n // Take the `n` items after the biggest sent value\n subscription.requestLimitedSnapshot({\n orderBy: normalizedOrderBy,\n limit: n,\n minValue: biggestSentValue,\n })\n }\n\n private getWhereClauseFromAlias(\n collectionAlias: string | undefined\n ): BasicExpression<boolean> | undefined {\n const collectionWhereClausesCache =\n this.collectionConfigBuilder.collectionWhereClausesCache\n if (collectionAlias && collectionWhereClausesCache) {\n return collectionWhereClausesCache.get(collectionAlias)\n }\n return undefined\n }\n\n private *trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number\n ) {\n for (const change of changes) {\n if (!this.biggest) {\n this.biggest = change.value\n } else if (comparator(this.biggest, change.value) < 0) {\n this.biggest = change.value\n }\n\n yield change\n }\n }\n}\n\n/**\n * Finds the alias for a collection ID in the query\n */\nfunction findCollectionAlias(\n collectionId: string,\n query: any\n): string | undefined {\n // Check FROM clause\n if (\n query.from?.type === `collectionRef` &&\n query.from.collection?.id === collectionId\n ) {\n return query.from.alias\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (\n joinClause.from?.type === `collectionRef` &&\n joinClause.from.collection?.id === collectionId\n ) {\n return joinClause.from.alias\n }\n }\n }\n\n return undefined\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n): number {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n\n if (multiSetArray.length !== 0) {\n input.sendData(new MultiSet(multiSetArray))\n }\n\n return multiSetArray.length\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n\nfunction* filterChanges<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n f: (change: ChangeMessage<T, TKey>) => boolean\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (f(change)) {\n yield change\n }\n }\n}\n\n/**\n * Filters changes to only include those that are smaller or equal to the provided max value\n * @param changes - Iterable of changes to filter\n * @param comparator - Comparator function to use for filtering\n * @param maxValue - Range to filter changes within (range boundaries are exclusive)\n * @returns Iterable of changes that fall within the range\n */\nfunction* filterChangesSmallerOrEqualToMax<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n comparator: (a: any, b: any) => number,\n maxValue: any\n): Generator<ChangeMessage<T, TKey>> {\n yield* filterChanges(changes, (change) => {\n return !maxValue || comparator(change.value, maxValue) <= 0\n })\n}\n"],"names":[],"mappings":";;AAcO,MAAM,qBAGX;AAAA,EAMA,YACU,cACA,YACA,QACA,WACA,yBACR;AALQ,SAAA,eAAA;AACA,SAAA,aAAA;AACA,SAAA,SAAA;AACA,SAAA,YAAA;AACA,SAAA,0BAAA;AATV,SAAQ,UAAe;AAWrB,SAAK,kBAAkB;AAAA,MACrB,KAAK;AAAA,MACL,KAAK,wBAAwB;AAAA,IAAA;AAAA,EAEjC;AAAA,EAEA,YAAoC;AAClC,UAAM,cAAc,KAAK,wBAAwB,KAAK,eAAe;AAErE,QAAI,aAAa;AAEf,YAAM,kBAAkB;AAAA,QACtB;AAAA,QACA,KAAK;AAAA,MAAA;AAGP,UAAI,iBAAiB;AAEnB,eAAO,KAAK,mBAAmB,eAAe;AAAA,MAChD,OAAO;AAGL,cAAM,IAAI;AAAA,UACR,uEAAuE,KAAK,YAAY;AAAA,QAAA;AAAA,MAG5F;AAAA,IACF,OAAO;AAEL,aAAO,KAAK,mBAAA;AAAA,IACd;AAAA,EACF;AAAA,EAEQ,mBAAmB,iBAA4C;AACrE,QAAI;AACJ,QACE,OAAO;AAAA,MACL,KAAK,wBAAwB;AAAA,MAC7B,KAAK;AAAA,IAAA,GAEP;AACA,qBAAe,KAAK,0BAA0B,eAAe;AAAA,IAC/D,OAAO;AAEL,YAAM,sBACJ,CAAC,KAAK,wBAAwB,gBAAgB,IAAI,KAAK,YAAY;AAErE,qBAAe,KAAK;AAAA,QAClB;AAAA,QACA;AAAA,MAAA;AAAA,IAEJ;AACA,UAAM,cAAc,MAAM;AACxB,mBAAa,YAAA;AAAA,IACf;AACA,SAAK,UAAU,qBAAqB,IAAI,WAAW;AACnD,WAAO;AAAA,EACT;AAAA,EAEQ,sBACN,SACA,UACA;AACA,UAAM,QAAQ,KAAK,UAAU,OAAO,KAAK,YAAY;AACrD,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA,KAAK,WAAW,OAAO;AAAA,IAAA;AAMzB,UAAM,aAAa,cAAc,IAAI,WAAW;AAKhD,SAAK,wBAAwB;AAAA,MAC3B,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEQ,2BACN,iBACA,sBAA+B,OAC/B;AACA,UAAM,cAAc,CAClB,YACG;AACH,WAAK,sBAAsB,OAAO;AAAA,IACpC;AAEA,UAAM,eAAe,KAAK,WAAW,iBAAiB,aAAa;AAAA,MACjE;AAAA,MACA;AAAA,IAAA,CACD;AAED,WAAO;AAAA,EACT;AAAA,EAEQ,0BACN,iBACA;AACA,UAAM,EAAE,SAAS,QAAQ,OAAO,YAAY,YAAY,MAAA,IACtD,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AAEF,UAAM,qBAAqB,CACzB,YACG;AAIH,YAAM,kBAAkB,aAAa,OAAO;AAC5C,UAAI,kBAAkB;AACtB,UAAI,WAAA,MAAkB,GAAG;AAKvB,0BAAkB;AAAA,UAChB;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QAAA;AAAA,MAET;AAEA,WAAK,kCAAkC,iBAAiB,YAAY;AAAA,IACtE;AAIA,UAAM,eAAe,KAAK,WAAW,iBAAiB,oBAAoB;AAAA,MACxE;AAAA,IAAA,CACD;AAED,iBAAa,gBAAgB,KAAK;AAGlC,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAKP,iBAAa,uBAAuB;AAAA,MAClC,OAAO,SAAS;AAAA,MAChB,SAAS;AAAA,IAAA,CACV;AAED,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,cAAsC;AACrD,UAAM,cACJ,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AAEF,QAAI,CAAC,aAAa;AAGhB,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,eAAe;AAEvB,QAAI,CAAC,YAAY;AAGf,YAAM,IAAI;AAAA,QACR,8CAA8C,KAAK,YAAY;AAAA,MAAA;AAAA,IAEnE;AAIA,UAAM,IAAI,WAAA;AACV,QAAI,IAAI,GAAG;AACT,WAAK,cAAc,GAAG,YAAY;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,kCACN,SACA,cACA;AACA,UAAM,EAAE,WAAA,IACN,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,UAAM,iBAAiB,KAAK,gBAAgB,SAAS,UAAU;AAC/D,SAAK;AAAA,MACH;AAAA,MACA,KAAK,iBAAiB,KAAK,MAAM,YAAY;AAAA,IAAA;AAAA,EAEjD;AAAA;AAAA;AAAA,EAIQ,cAAc,GAAW,cAAsC;AACrE,UAAM,EAAE,SAAS,4BACf,KAAK,wBAAwB,8BAC3B,KAAK,YACP;AACF,UAAM,iBAAiB,KAAK;AAC5B,UAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAGJ,UAAM,oBAAoB;AAAA,MACxB;AAAA,MACA,KAAK;AAAA,IAAA;AAIP,iBAAa,uBAAuB;AAAA,MAClC,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU;AAAA,IAAA,CACX;AAAA,EACH;AAAA,EAEQ,wBACN,iBACsC;AACtC,UAAM,8BACJ,KAAK,wBAAwB;AAC/B,QAAI,mBAAmB,6BAA6B;AAClD,aAAO,4BAA4B,IAAI,eAAe;AAAA,IACxD;AACA,WAAO;AAAA,EACT;AAAA,EAEA,CAAS,gBACP,SACA,YACA;AACA,eAAW,UAAU,SAAS;AAC5B,UAAI,CAAC,KAAK,SAAS;AACjB,aAAK,UAAU,OAAO;AAAA,MACxB,WAAW,WAAW,KAAK,SAAS,OAAO,KAAK,IAAI,GAAG;AACrD,aAAK,UAAU,OAAO;AAAA,MACxB;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAKA,SAAS,oBACP,cACA,OACoB;AAEpB,MACE,MAAM,MAAM,SAAS,mBACrB,MAAM,KAAK,YAAY,OAAO,cAC9B;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,UACE,WAAW,MAAM,SAAS,mBAC1B,WAAW,KAAK,YAAY,OAAO,cACnC;AACA,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,mBACP,OACA,SACA,QACQ;AACR,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AAEA,MAAI,cAAc,WAAW,GAAG;AAC9B,UAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAAA,EAC5C;AAEA,SAAO,cAAc;AACvB;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,cAIR,SACA,GACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,EAAE,MAAM,GAAG;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACF;AASA,UAAU,iCAIR,SACA,YACA,UACmC;AACnC,SAAO,cAAc,SAAS,CAAC,WAAW;AACxC,WAAO,CAAC,YAAY,WAAW,OAAO,OAAO,QAAQ,KAAK;AAAA,EAC5D,CAAC;AACH;"}
|
package/dist/esm/types.d.ts
CHANGED
|
@@ -188,17 +188,15 @@ export type DeleteMutationFn<T extends object = Record<string, unknown>, TKey ex
|
|
|
188
188
|
*
|
|
189
189
|
* @example
|
|
190
190
|
* // Status transitions
|
|
191
|
-
* // idle → loading →
|
|
191
|
+
* // idle → loading → ready (when markReady() is called)
|
|
192
192
|
* // Any status can transition to → error or cleaned-up
|
|
193
193
|
*/
|
|
194
194
|
export type CollectionStatus =
|
|
195
195
|
/** Collection is created but sync hasn't started yet (when startSync config is false) */
|
|
196
196
|
`idle`
|
|
197
|
-
/** Sync has started
|
|
197
|
+
/** Sync has started and is loading data */
|
|
198
198
|
| `loading`
|
|
199
|
-
/** Collection
|
|
200
|
-
| `initialCommit`
|
|
201
|
-
/** Collection has received at least one commit and is ready for use */
|
|
199
|
+
/** Collection has been explicitly marked ready via markReady() */
|
|
202
200
|
| `ready`
|
|
203
201
|
/** An error occurred during sync initialization */
|
|
204
202
|
| `error`
|
package/package.json
CHANGED
package/src/collection/index.ts
CHANGED
|
@@ -217,7 +217,7 @@ export class CollectionImpl<
|
|
|
217
217
|
// Managers
|
|
218
218
|
private _events: CollectionEventsManager
|
|
219
219
|
private _changes: CollectionChangesManager<TOutput, TKey, TSchema, TInput>
|
|
220
|
-
|
|
220
|
+
public _lifecycle: CollectionLifecycleManager<TOutput, TKey, TSchema, TInput>
|
|
221
221
|
private _sync: CollectionSyncManager<TOutput, TKey, TSchema, TInput>
|
|
222
222
|
private _indexes: CollectionIndexesManager<TOutput, TKey, TSchema, TInput>
|
|
223
223
|
private _mutations: CollectionMutationsManager<
|
|
@@ -75,8 +75,7 @@ export class CollectionLifecycleManager<
|
|
|
75
75
|
Array<CollectionStatus>
|
|
76
76
|
> = {
|
|
77
77
|
idle: [`loading`, `error`, `cleaned-up`],
|
|
78
|
-
loading: [`
|
|
79
|
-
initialCommit: [`ready`, `error`, `cleaned-up`],
|
|
78
|
+
loading: [`ready`, `error`, `cleaned-up`],
|
|
80
79
|
ready: [`cleaned-up`, `error`],
|
|
81
80
|
error: [`cleaned-up`, `idle`],
|
|
82
81
|
"cleaned-up": [`loading`, `error`],
|
|
@@ -145,8 +144,8 @@ export class CollectionLifecycleManager<
|
|
|
145
144
|
*/
|
|
146
145
|
public markReady(): void {
|
|
147
146
|
this.validateStatusTransition(this.status, `ready`)
|
|
148
|
-
// Can transition to ready from loading
|
|
149
|
-
if (this.status === `loading`
|
|
147
|
+
// Can transition to ready from loading state
|
|
148
|
+
if (this.status === `loading`) {
|
|
150
149
|
this.setStatus(`ready`, true)
|
|
151
150
|
|
|
152
151
|
// Call any registered first ready callbacks (only on first time becoming ready)
|
|
@@ -230,7 +230,8 @@ export class CollectionMutationsManager<
|
|
|
230
230
|
|
|
231
231
|
// Apply mutations to the new transaction
|
|
232
232
|
directOpTransaction.applyMutations(mutations)
|
|
233
|
-
|
|
233
|
+
// Errors still reject tx.isPersisted.promise; this catch only prevents global unhandled rejections
|
|
234
|
+
directOpTransaction.commit().catch(() => undefined)
|
|
234
235
|
|
|
235
236
|
// Add the transaction to the collection's transactions store
|
|
236
237
|
state.transactions.set(directOpTransaction.id, directOpTransaction)
|
|
@@ -387,7 +388,8 @@ export class CollectionMutationsManager<
|
|
|
387
388
|
const emptyTransaction = createTransaction({
|
|
388
389
|
mutationFn: async () => {},
|
|
389
390
|
})
|
|
390
|
-
|
|
391
|
+
// Errors still propagate through tx.isPersisted.promise; suppress the background commit from warning
|
|
392
|
+
emptyTransaction.commit().catch(() => undefined)
|
|
391
393
|
// Schedule cleanup for empty transaction
|
|
392
394
|
state.scheduleTransactionCleanup(emptyTransaction)
|
|
393
395
|
return emptyTransaction
|
|
@@ -423,7 +425,8 @@ export class CollectionMutationsManager<
|
|
|
423
425
|
|
|
424
426
|
// Apply mutations to the new transaction
|
|
425
427
|
directOpTransaction.applyMutations(mutations)
|
|
426
|
-
|
|
428
|
+
// Errors still hit tx.isPersisted.promise; avoid leaking an unhandled rejection from the fire-and-forget commit
|
|
429
|
+
directOpTransaction.commit().catch(() => undefined)
|
|
427
430
|
|
|
428
431
|
// Add the transaction to the collection's transactions store
|
|
429
432
|
|
|
@@ -524,7 +527,8 @@ export class CollectionMutationsManager<
|
|
|
524
527
|
|
|
525
528
|
// Apply mutations to the new transaction
|
|
526
529
|
directOpTransaction.applyMutations(mutations)
|
|
527
|
-
|
|
530
|
+
// Errors still reject tx.isPersisted.promise; silence the internal commit promise to prevent test noise
|
|
531
|
+
directOpTransaction.commit().catch(() => undefined)
|
|
528
532
|
|
|
529
533
|
state.transactions.set(directOpTransaction.id, directOpTransaction)
|
|
530
534
|
state.scheduleTransactionCleanup(directOpTransaction)
|
package/src/collection/state.ts
CHANGED
|
@@ -12,11 +12,18 @@ import type { CollectionLifecycleManager } from "./lifecycle"
|
|
|
12
12
|
import type { CollectionChangesManager } from "./changes"
|
|
13
13
|
import type { CollectionIndexesManager } from "./indexes"
|
|
14
14
|
|
|
15
|
-
interface PendingSyncedTransaction<
|
|
15
|
+
interface PendingSyncedTransaction<
|
|
16
|
+
T extends object = Record<string, unknown>,
|
|
17
|
+
TKey extends string | number = string | number,
|
|
18
|
+
> {
|
|
16
19
|
committed: boolean
|
|
17
20
|
operations: Array<OptimisticChangeMessage<T>>
|
|
18
21
|
truncate?: boolean
|
|
19
22
|
deletedKeys: Set<string | number>
|
|
23
|
+
optimisticSnapshot?: {
|
|
24
|
+
upserts: Map<TKey, T>
|
|
25
|
+
deletes: Set<TKey>
|
|
26
|
+
}
|
|
20
27
|
}
|
|
21
28
|
|
|
22
29
|
export class CollectionStateManager<
|
|
@@ -33,8 +40,9 @@ export class CollectionStateManager<
|
|
|
33
40
|
|
|
34
41
|
// Core state - make public for testing
|
|
35
42
|
public transactions: SortedMap<string, Transaction<any>>
|
|
36
|
-
public pendingSyncedTransactions: Array<
|
|
37
|
-
|
|
43
|
+
public pendingSyncedTransactions: Array<
|
|
44
|
+
PendingSyncedTransaction<TOutput, TKey>
|
|
45
|
+
> = []
|
|
38
46
|
public syncedData: Map<TKey, TOutput> | SortedMap<TKey, TOutput>
|
|
39
47
|
public syncedMetadata = new Map<TKey, unknown>()
|
|
40
48
|
|
|
@@ -442,10 +450,10 @@ export class CollectionStateManager<
|
|
|
442
450
|
},
|
|
443
451
|
{
|
|
444
452
|
committedSyncedTransactions: [] as Array<
|
|
445
|
-
PendingSyncedTransaction<TOutput>
|
|
453
|
+
PendingSyncedTransaction<TOutput, TKey>
|
|
446
454
|
>,
|
|
447
455
|
uncommittedSyncedTransactions: [] as Array<
|
|
448
|
-
PendingSyncedTransaction<TOutput>
|
|
456
|
+
PendingSyncedTransaction<TOutput, TKey>
|
|
449
457
|
>,
|
|
450
458
|
hasTruncateSync: false,
|
|
451
459
|
}
|
|
@@ -455,6 +463,12 @@ export class CollectionStateManager<
|
|
|
455
463
|
// Set flag to prevent redundant optimistic state recalculations
|
|
456
464
|
this.isCommittingSyncTransactions = true
|
|
457
465
|
|
|
466
|
+
// Get the optimistic snapshot from the truncate transaction (captured when truncate() was called)
|
|
467
|
+
const truncateOptimisticSnapshot = hasTruncateSync
|
|
468
|
+
? committedSyncedTransactions.find((t) => t.truncate)
|
|
469
|
+
?.optimisticSnapshot
|
|
470
|
+
: null
|
|
471
|
+
|
|
458
472
|
// First collect all keys that will be affected by sync operations
|
|
459
473
|
const changedKeys = new Set<TKey>()
|
|
460
474
|
for (const transaction of committedSyncedTransactions) {
|
|
@@ -484,13 +498,19 @@ export class CollectionStateManager<
|
|
|
484
498
|
// Handle truncate operations first
|
|
485
499
|
if (transaction.truncate) {
|
|
486
500
|
// TRUNCATE PHASE
|
|
487
|
-
// 1) Emit a delete for every
|
|
501
|
+
// 1) Emit a delete for every visible key (synced + optimistic) so downstream listeners/indexes
|
|
488
502
|
// observe a clear-before-rebuild. We intentionally skip keys already in
|
|
489
503
|
// optimisticDeletes because their delete was previously emitted by the user.
|
|
490
|
-
for
|
|
491
|
-
|
|
504
|
+
// Use the snapshot to ensure we emit deletes for all items that existed at truncate start.
|
|
505
|
+
const visibleKeys = new Set([
|
|
506
|
+
...this.syncedData.keys(),
|
|
507
|
+
...(truncateOptimisticSnapshot?.upserts.keys() || []),
|
|
508
|
+
])
|
|
509
|
+
for (const key of visibleKeys) {
|
|
510
|
+
if (truncateOptimisticSnapshot?.deletes.has(key)) continue
|
|
492
511
|
const previousValue =
|
|
493
|
-
|
|
512
|
+
truncateOptimisticSnapshot?.upserts.get(key) ||
|
|
513
|
+
this.syncedData.get(key)
|
|
494
514
|
if (previousValue !== undefined) {
|
|
495
515
|
events.push({ type: `delete`, key, value: previousValue })
|
|
496
516
|
}
|
|
@@ -574,41 +594,14 @@ export class CollectionStateManager<
|
|
|
574
594
|
}
|
|
575
595
|
}
|
|
576
596
|
|
|
577
|
-
// Build re-apply sets from
|
|
578
|
-
//
|
|
579
|
-
const reapplyUpserts = new Map<TKey, TOutput>(
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
if (
|
|
586
|
-
!this.isThisCollection(mutation.collection) ||
|
|
587
|
-
!mutation.optimistic
|
|
588
|
-
)
|
|
589
|
-
continue
|
|
590
|
-
const key = mutation.key as TKey
|
|
591
|
-
switch (mutation.type) {
|
|
592
|
-
case `insert`:
|
|
593
|
-
reapplyUpserts.set(key, mutation.modified as TOutput)
|
|
594
|
-
reapplyDeletes.delete(key)
|
|
595
|
-
break
|
|
596
|
-
case `update`: {
|
|
597
|
-
const base = this.syncedData.get(key)
|
|
598
|
-
const next = base
|
|
599
|
-
? (Object.assign({}, base, mutation.changes) as TOutput)
|
|
600
|
-
: (mutation.modified as TOutput)
|
|
601
|
-
reapplyUpserts.set(key, next)
|
|
602
|
-
reapplyDeletes.delete(key)
|
|
603
|
-
break
|
|
604
|
-
}
|
|
605
|
-
case `delete`:
|
|
606
|
-
reapplyUpserts.delete(key)
|
|
607
|
-
reapplyDeletes.add(key)
|
|
608
|
-
break
|
|
609
|
-
}
|
|
610
|
-
}
|
|
611
|
-
}
|
|
597
|
+
// Build re-apply sets from the snapshot taken at the start of this function.
|
|
598
|
+
// This prevents losing optimistic state if transactions complete during truncate processing.
|
|
599
|
+
const reapplyUpserts = new Map<TKey, TOutput>(
|
|
600
|
+
truncateOptimisticSnapshot!.upserts
|
|
601
|
+
)
|
|
602
|
+
const reapplyDeletes = new Set<TKey>(
|
|
603
|
+
truncateOptimisticSnapshot!.deletes
|
|
604
|
+
)
|
|
612
605
|
|
|
613
606
|
// Emit inserts for re-applied upserts, skipping any keys that have an optimistic delete.
|
|
614
607
|
// If the server also inserted/updated the same key in this batch, override that value
|
|
@@ -660,6 +653,20 @@ export class CollectionStateManager<
|
|
|
660
653
|
|
|
661
654
|
// Reset flag and recompute optimistic state for any remaining active transactions
|
|
662
655
|
this.isCommittingSyncTransactions = false
|
|
656
|
+
|
|
657
|
+
// If we had a truncate, restore the preserved optimistic state from the snapshot
|
|
658
|
+
// This includes items from transactions that may have completed during processing
|
|
659
|
+
if (hasTruncateSync && truncateOptimisticSnapshot) {
|
|
660
|
+
for (const [key, value] of truncateOptimisticSnapshot.upserts) {
|
|
661
|
+
this.optimisticUpserts.set(key, value)
|
|
662
|
+
}
|
|
663
|
+
for (const key of truncateOptimisticSnapshot.deletes) {
|
|
664
|
+
this.optimisticDeletes.add(key)
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
// Always overlay any still-active optimistic transactions so mutations that started
|
|
669
|
+
// after the truncate snapshot are preserved.
|
|
663
670
|
for (const transaction of this.transactions.values()) {
|
|
664
671
|
if (![`completed`, `failed`].includes(transaction.state)) {
|
|
665
672
|
for (const mutation of transaction.mutations) {
|
|
@@ -785,12 +792,9 @@ export class CollectionStateManager<
|
|
|
785
792
|
this.recentlySyncedKeys.clear()
|
|
786
793
|
})
|
|
787
794
|
|
|
788
|
-
//
|
|
795
|
+
// Mark that we've received the first commit (for tracking purposes)
|
|
789
796
|
if (!this.hasReceivedFirstCommit) {
|
|
790
797
|
this.hasReceivedFirstCommit = true
|
|
791
|
-
const callbacks = [...this.lifecycle.onFirstReadyCallbacks]
|
|
792
|
-
this.lifecycle.onFirstReadyCallbacks = []
|
|
793
|
-
callbacks.forEach((callback) => callback())
|
|
794
798
|
}
|
|
795
799
|
}
|
|
796
800
|
}
|
package/src/collection/sync.ts
CHANGED
|
@@ -148,12 +148,6 @@ export class CollectionSyncManager<
|
|
|
148
148
|
|
|
149
149
|
pendingTransaction.committed = true
|
|
150
150
|
|
|
151
|
-
// Update status to initialCommit when transitioning from loading
|
|
152
|
-
// This indicates we're in the process of committing the first transaction
|
|
153
|
-
if (this.lifecycle.status === `loading`) {
|
|
154
|
-
this.lifecycle.setStatus(`initialCommit`)
|
|
155
|
-
}
|
|
156
|
-
|
|
157
151
|
this.state.commitPendingTransactions()
|
|
158
152
|
},
|
|
159
153
|
markReady: () => {
|
|
@@ -181,6 +175,13 @@ export class CollectionSyncManager<
|
|
|
181
175
|
// - Subsequent synced ops applied on the fresh base
|
|
182
176
|
// - Finally, optimistic mutations re-applied on top (single batch)
|
|
183
177
|
pendingTransaction.truncate = true
|
|
178
|
+
|
|
179
|
+
// Capture optimistic state NOW to preserve it even if transactions complete
|
|
180
|
+
// before this truncate transaction is committed
|
|
181
|
+
pendingTransaction.optimisticSnapshot = {
|
|
182
|
+
upserts: new Map(this.state.optimisticUpserts),
|
|
183
|
+
deletes: new Set(this.state.optimisticDeletes),
|
|
184
|
+
}
|
|
184
185
|
},
|
|
185
186
|
})
|
|
186
187
|
)
|
|
@@ -14,14 +14,6 @@ function shouldAutoIndex(collection: CollectionImpl<any, any, any, any, any>) {
|
|
|
14
14
|
return false
|
|
15
15
|
}
|
|
16
16
|
|
|
17
|
-
// Don't auto-index during sync operations
|
|
18
|
-
if (
|
|
19
|
-
collection.status === `loading` ||
|
|
20
|
-
collection.status === `initialCommit`
|
|
21
|
-
) {
|
|
22
|
-
return false
|
|
23
|
-
}
|
|
24
|
-
|
|
25
17
|
return true
|
|
26
18
|
}
|
|
27
19
|
|