@tanstack/db 0.4.8 → 0.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/errors.cjs +51 -17
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +38 -8
- package/dist/cjs/index.cjs +8 -4
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/query/builder/types.d.cts +1 -1
- package/dist/cjs/query/compiler/index.cjs +42 -19
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.d.cts +33 -8
- package/dist/cjs/query/compiler/joins.cjs +88 -66
- package/dist/cjs/query/compiler/joins.cjs.map +1 -1
- package/dist/cjs/query/compiler/joins.d.cts +5 -2
- package/dist/cjs/query/compiler/order-by.cjs +2 -0
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.d.cts +1 -0
- package/dist/cjs/query/compiler/select.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.cjs +276 -42
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-config-builder.d.cts +84 -8
- package/dist/cjs/query/live/collection-registry.cjs +16 -0
- package/dist/cjs/query/live/collection-registry.cjs.map +1 -0
- package/dist/cjs/query/live/collection-registry.d.cts +26 -0
- package/dist/cjs/query/live/collection-subscriber.cjs +57 -58
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.d.cts +4 -7
- package/dist/cjs/query/live-query-collection.cjs +11 -5
- package/dist/cjs/query/live-query-collection.cjs.map +1 -1
- package/dist/cjs/query/live-query-collection.d.cts +10 -3
- package/dist/cjs/query/optimizer.cjs +44 -7
- package/dist/cjs/query/optimizer.cjs.map +1 -1
- package/dist/cjs/query/optimizer.d.cts +4 -4
- package/dist/cjs/scheduler.cjs +137 -0
- package/dist/cjs/scheduler.cjs.map +1 -0
- package/dist/cjs/scheduler.d.cts +56 -0
- package/dist/cjs/transactions.cjs +7 -1
- package/dist/cjs/transactions.cjs.map +1 -1
- package/dist/esm/errors.d.ts +38 -8
- package/dist/esm/errors.js +52 -18
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.js +9 -5
- package/dist/esm/query/builder/types.d.ts +1 -1
- package/dist/esm/query/compiler/index.d.ts +33 -8
- package/dist/esm/query/compiler/index.js +42 -19
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/joins.d.ts +5 -2
- package/dist/esm/query/compiler/joins.js +90 -68
- package/dist/esm/query/compiler/joins.js.map +1 -1
- package/dist/esm/query/compiler/order-by.d.ts +1 -0
- package/dist/esm/query/compiler/order-by.js +2 -0
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/compiler/select.js.map +1 -1
- package/dist/esm/query/live/collection-config-builder.d.ts +84 -8
- package/dist/esm/query/live/collection-config-builder.js +276 -42
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/query/live/collection-registry.d.ts +26 -0
- package/dist/esm/query/live/collection-registry.js +16 -0
- package/dist/esm/query/live/collection-registry.js.map +1 -0
- package/dist/esm/query/live/collection-subscriber.d.ts +4 -7
- package/dist/esm/query/live/collection-subscriber.js +57 -58
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/query/live-query-collection.d.ts +10 -3
- package/dist/esm/query/live-query-collection.js +11 -5
- package/dist/esm/query/live-query-collection.js.map +1 -1
- package/dist/esm/query/optimizer.d.ts +4 -4
- package/dist/esm/query/optimizer.js +44 -7
- package/dist/esm/query/optimizer.js.map +1 -1
- package/dist/esm/scheduler.d.ts +56 -0
- package/dist/esm/scheduler.js +137 -0
- package/dist/esm/scheduler.js.map +1 -0
- package/dist/esm/transactions.js +7 -1
- package/dist/esm/transactions.js.map +1 -1
- package/package.json +2 -2
- package/src/errors.ts +79 -13
- package/src/query/builder/types.ts +1 -1
- package/src/query/compiler/index.ts +115 -32
- package/src/query/compiler/joins.ts +180 -127
- package/src/query/compiler/order-by.ts +7 -0
- package/src/query/compiler/select.ts +2 -3
- package/src/query/live/collection-config-builder.ts +450 -58
- package/src/query/live/collection-registry.ts +47 -0
- package/src/query/live/collection-subscriber.ts +88 -106
- package/src/query/live-query-collection.ts +39 -14
- package/src/query/optimizer.ts +85 -15
- package/src/scheduler.ts +198 -0
- package/src/transactions.ts +12 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"transactions.js","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":[],"mappings":";;AAgBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAI,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAc,eAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAI,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAI,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAI,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;"}
|
|
1
|
+
{"version":3,"file":"transactions.js","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport { transactionScopedScheduler } from \"./scheduler.js\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Merges two pending mutations for the same item within a transaction\n *\n * Merge behavior truth table:\n * - (insert, update) → insert (merge changes, keep empty original)\n * - (insert, delete) → null (cancel both mutations)\n * - (update, delete) → delete (delete dominates)\n * - (update, update) → update (replace with latest, union changes)\n * - (delete, delete) → delete (replace with latest)\n * - (insert, insert) → insert (replace with latest)\n *\n * Note: (delete, update) and (delete, insert) should never occur as the collection\n * layer prevents operations on deleted items within the same transaction.\n *\n * @param existing - The existing mutation in the transaction\n * @param incoming - The new mutation being applied\n * @returns The merged mutation, or null if both should be removed\n */\nfunction mergePendingMutations<T extends object>(\n existing: PendingMutation<T>,\n incoming: PendingMutation<T>\n): PendingMutation<T> | null {\n // Truth table implementation\n switch (`${existing.type}-${incoming.type}` as const) {\n case `insert-update`: {\n // Update after insert: keep as insert but merge changes\n // For insert-update, the key should remain the same since collections don't allow key changes\n return {\n ...existing,\n type: `insert` as const,\n original: {},\n modified: incoming.modified,\n changes: { ...existing.changes, ...incoming.changes },\n // Keep existing keys (key changes not allowed in updates)\n key: existing.key,\n globalKey: existing.globalKey,\n // Merge metadata (last-write-wins)\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n // Update tracking info\n mutationId: incoming.mutationId,\n updatedAt: incoming.updatedAt,\n }\n }\n\n case `insert-delete`:\n // Delete after insert: cancel both mutations\n return null\n\n case `update-delete`:\n // Delete after update: delete dominates\n return incoming\n\n case `update-update`: {\n // Update after update: replace with latest, union changes\n return {\n ...incoming,\n // Keep original from first update\n original: existing.original,\n // Union the changes from both updates\n changes: { ...existing.changes, ...incoming.changes },\n // Merge metadata\n metadata: incoming.metadata ?? existing.metadata,\n syncMetadata: { ...existing.syncMetadata, ...incoming.syncMetadata },\n }\n }\n\n case `delete-delete`:\n case `insert-insert`:\n // Same type: replace with latest\n return incoming\n\n default: {\n // Exhaustiveness check\n const _exhaustive: never = `${existing.type}-${incoming.type}` as never\n throw new Error(`Unhandled mutation combination: ${_exhaustive}`)\n }\n }\n}\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n // Clear any stale work that may have been left behind if a previous mutate\n // scope aborted before we could flush.\n transactionScopedScheduler.clear(tx.id)\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n // Always flush pending work for this transaction before removing it from\n // the ambient stack – this runs even if the mutate callback throws.\n // If flush throws (e.g., due to a job error), we still clean up the stack.\n try {\n transactionScopedScheduler.flush(tx.id)\n } finally {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n }\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit().catch(() => {\n // Errors from autoCommit are handled via isPersisted.promise\n // This catch prevents unhandled promise rejections\n })\n }\n\n return this\n }\n\n /**\n * Apply new mutations to this transaction, intelligently merging with existing mutations\n *\n * When mutations operate on the same item (same globalKey), they are merged according to\n * the following rules:\n *\n * - **insert + update** → insert (merge changes, keep empty original)\n * - **insert + delete** → removed (mutations cancel each other out)\n * - **update + delete** → delete (delete dominates)\n * - **update + update** → update (union changes, keep first original)\n * - **same type** → replace with latest\n *\n * This merging reduces over-the-wire churn and keeps the optimistic local view\n * aligned with user intent.\n *\n * @param mutations - Array of new mutations to apply\n */\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n const existingMutation = this.mutations[existingIndex]!\n const mergeResult = mergePendingMutations(existingMutation, newMutation)\n\n if (mergeResult === null) {\n // Remove the mutation (e.g., delete after insert cancels both)\n this.mutations.splice(existingIndex, 1)\n } else {\n // Replace with merged mutation\n this.mutations[existingIndex] = mergeResult\n }\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection._state.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection._state.pendingSyncedTransactions.length > 0) {\n mutation.collection._state.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n this.isPersisted.resolve(this)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Preserve the original error for rethrowing\n const originalError =\n error instanceof Error ? error : new Error(String(error))\n\n // Update transaction with error information\n this.error = {\n message: originalError.message,\n error: originalError,\n }\n\n // rollback the transaction\n this.rollback()\n\n // Re-throw the original error to preserve identity and stack\n throw originalError\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":[],"mappings":";;;AAiBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAoBrB,SAAS,sBACP,UACA,UAC2B;AAE3B,UAAQ,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI,IAAA;AAAA,IACvC,KAAK,iBAAiB;AAGpB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,MAAM;AAAA,QACN,UAAU,CAAA;AAAA,QACV,UAAU,SAAS;AAAA,QACnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,KAAK,SAAS;AAAA,QACd,WAAW,SAAS;AAAA;AAAA,QAEpB,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA;AAAA,QAEtD,YAAY,SAAS;AAAA,QACrB,WAAW,SAAS;AAAA,MAAA;AAAA,IAExB;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET,KAAK;AAEH,aAAO;AAAA,IAET,KAAK,iBAAiB;AAEpB,aAAO;AAAA,QACL,GAAG;AAAA;AAAA,QAEH,UAAU,SAAS;AAAA;AAAA,QAEnB,SAAS,EAAE,GAAG,SAAS,SAAS,GAAG,SAAS,QAAA;AAAA;AAAA,QAE5C,UAAU,SAAS,YAAY,SAAS;AAAA,QACxC,cAAc,EAAE,GAAG,SAAS,cAAc,GAAG,SAAS,aAAA;AAAA,MAAa;AAAA,IAEvE;AAAA,IAEA,KAAK;AAAA,IACL,KAAK;AAEH,aAAO;AAAA,IAET,SAAS;AAEP,YAAM,cAAqB,GAAG,SAAS,IAAI,IAAI,SAAS,IAAI;AAC5D,YAAM,IAAI,MAAM,mCAAmC,WAAW,EAAE;AAAA,IAClE;AAAA,EAAA;AAEJ;AAsDO,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AAGjD,6BAA2B,MAAM,GAAG,EAAE;AACtC,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AAInD,MAAI;AACF,+BAA2B,MAAM,GAAG,EAAE;AAAA,EACxC,UAAA;AACE,uBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAAA,EAClE;AACF;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAI,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAc,eAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAI,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,SAAS,MAAM,MAAM;AAAA,MAG1B,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AACtB,cAAM,mBAAmB,KAAK,UAAU,aAAa;AACrD,cAAM,cAAc,sBAAsB,kBAAkB,WAAW;AAEvE,YAAI,gBAAgB,MAAM;AAExB,eAAK,UAAU,OAAO,eAAe,CAAC;AAAA,QACxC,OAAO;AAEL,eAAK,UAAU,aAAa,IAAI;AAAA,QAClC;AAAA,MACF,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;AACnE,UAAM,sBAAsB,QAAQ,uBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAI,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,OAAO,KAAK,OAAO,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,OAAO,yBAAA;AAG3B,YAAI,SAAS,WAAW,OAAO,0BAA0B,SAAS,GAAG;AACnE,mBAAS,WAAW,OAAO,0BAAA;AAAA,QAC7B;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAI,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AACzB,WAAK,YAAY,QAAQ,IAAI;AAE7B,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,YAAM,gBACJ,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAG1D,WAAK,QAAQ;AAAA,QACX,SAAS,cAAc;AAAA,QACvB,OAAO;AAAA,MAAA;AAIT,WAAK,SAAA;AAGL,YAAM;AAAA,IACR;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;"}
|
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tanstack/db",
|
|
3
3
|
"description": "A reactive client store for building super fast apps on sync",
|
|
4
|
-
"version": "0.4.
|
|
4
|
+
"version": "0.4.9",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@standard-schema/spec": "^1.0.0",
|
|
7
|
-
"@tanstack/db-ivm": "0.1.
|
|
7
|
+
"@tanstack/db-ivm": "0.1.10"
|
|
8
8
|
},
|
|
9
9
|
"devDependencies": {
|
|
10
10
|
"@vitest/coverage-istanbul": "^3.2.4",
|
package/src/errors.ts
CHANGED
|
@@ -349,9 +349,23 @@ export class LimitOffsetRequireOrderByError extends QueryCompilationError {
|
|
|
349
349
|
}
|
|
350
350
|
}
|
|
351
351
|
|
|
352
|
+
/**
|
|
353
|
+
* Error thrown when a collection input stream is not found during query compilation.
|
|
354
|
+
* In self-joins, each alias (e.g., 'employee', 'manager') requires its own input stream.
|
|
355
|
+
*/
|
|
352
356
|
export class CollectionInputNotFoundError extends QueryCompilationError {
|
|
353
|
-
constructor(
|
|
354
|
-
|
|
357
|
+
constructor(
|
|
358
|
+
alias: string,
|
|
359
|
+
collectionId?: string,
|
|
360
|
+
availableKeys?: Array<string>
|
|
361
|
+
) {
|
|
362
|
+
const details = collectionId
|
|
363
|
+
? `alias "${alias}" (collection "${collectionId}")`
|
|
364
|
+
: `collection "${alias}"`
|
|
365
|
+
const availableKeysMsg = availableKeys?.length
|
|
366
|
+
? `. Available keys: ${availableKeys.join(`, `)}`
|
|
367
|
+
: ``
|
|
368
|
+
super(`Input for ${details} not found in inputs map${availableKeysMsg}`)
|
|
355
369
|
}
|
|
356
370
|
}
|
|
357
371
|
|
|
@@ -399,32 +413,32 @@ export class UnsupportedJoinTypeError extends JoinError {
|
|
|
399
413
|
}
|
|
400
414
|
}
|
|
401
415
|
|
|
402
|
-
export class
|
|
403
|
-
constructor(
|
|
416
|
+
export class InvalidJoinConditionSameSourceError extends JoinError {
|
|
417
|
+
constructor(sourceAlias: string) {
|
|
404
418
|
super(
|
|
405
|
-
`Invalid join condition: both expressions refer to the same
|
|
419
|
+
`Invalid join condition: both expressions refer to the same source "${sourceAlias}"`
|
|
406
420
|
)
|
|
407
421
|
}
|
|
408
422
|
}
|
|
409
423
|
|
|
410
|
-
export class
|
|
424
|
+
export class InvalidJoinConditionSourceMismatchError extends JoinError {
|
|
411
425
|
constructor() {
|
|
412
|
-
super(`Invalid join condition: expressions must reference
|
|
426
|
+
super(`Invalid join condition: expressions must reference source aliases`)
|
|
413
427
|
}
|
|
414
428
|
}
|
|
415
429
|
|
|
416
|
-
export class
|
|
417
|
-
constructor(
|
|
430
|
+
export class InvalidJoinConditionLeftSourceError extends JoinError {
|
|
431
|
+
constructor(sourceAlias: string) {
|
|
418
432
|
super(
|
|
419
|
-
`Invalid join condition: left expression refers to an unavailable
|
|
433
|
+
`Invalid join condition: left expression refers to an unavailable source "${sourceAlias}"`
|
|
420
434
|
)
|
|
421
435
|
}
|
|
422
436
|
}
|
|
423
437
|
|
|
424
|
-
export class
|
|
425
|
-
constructor(
|
|
438
|
+
export class InvalidJoinConditionRightSourceError extends JoinError {
|
|
439
|
+
constructor(sourceAlias: string) {
|
|
426
440
|
super(
|
|
427
|
-
`Invalid join condition: right expression does not refer to the joined
|
|
441
|
+
`Invalid join condition: right expression does not refer to the joined source "${sourceAlias}"`
|
|
428
442
|
)
|
|
429
443
|
}
|
|
430
444
|
}
|
|
@@ -563,3 +577,55 @@ export class CannotCombineEmptyExpressionListError extends QueryOptimizerError {
|
|
|
563
577
|
super(`Cannot combine empty expression list`)
|
|
564
578
|
}
|
|
565
579
|
}
|
|
580
|
+
|
|
581
|
+
/**
|
|
582
|
+
* Internal error when the query optimizer fails to convert a WHERE clause to a collection filter.
|
|
583
|
+
*/
|
|
584
|
+
export class WhereClauseConversionError extends QueryOptimizerError {
|
|
585
|
+
constructor(collectionId: string, alias: string) {
|
|
586
|
+
super(
|
|
587
|
+
`Failed to convert WHERE clause to collection filter for collection '${collectionId}' alias '${alias}'. This indicates a bug in the query optimization logic.`
|
|
588
|
+
)
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
/**
|
|
593
|
+
* Error when a subscription cannot be found during lazy join processing.
|
|
594
|
+
* For subqueries, aliases may be remapped (e.g., 'activeUser' → 'user').
|
|
595
|
+
*/
|
|
596
|
+
export class SubscriptionNotFoundError extends QueryCompilationError {
|
|
597
|
+
constructor(
|
|
598
|
+
resolvedAlias: string,
|
|
599
|
+
originalAlias: string,
|
|
600
|
+
collectionId: string,
|
|
601
|
+
availableAliases: Array<string>
|
|
602
|
+
) {
|
|
603
|
+
super(
|
|
604
|
+
`Internal error: subscription for alias '${resolvedAlias}' (remapped from '${originalAlias}', collection '${collectionId}') is missing in join pipeline. Available aliases: ${availableAliases.join(`, `)}. This indicates a bug in alias tracking.`
|
|
605
|
+
)
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
/**
|
|
610
|
+
* Error thrown when aggregate expressions are used outside of a GROUP BY context.
|
|
611
|
+
*/
|
|
612
|
+
export class AggregateNotSupportedError extends QueryCompilationError {
|
|
613
|
+
constructor() {
|
|
614
|
+
super(
|
|
615
|
+
`Aggregate expressions are not supported in this context. Use GROUP BY clause for aggregates.`
|
|
616
|
+
)
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
/**
|
|
621
|
+
* Internal error when the compiler returns aliases that don't have corresponding input streams.
|
|
622
|
+
* This should never happen since all aliases come from user declarations.
|
|
623
|
+
*/
|
|
624
|
+
export class MissingAliasInputsError extends QueryCompilationError {
|
|
625
|
+
constructor(missingAliases: Array<string>) {
|
|
626
|
+
super(
|
|
627
|
+
`Internal error: compiler returned aliases without inputs: ${missingAliases.join(`, `)}. ` +
|
|
628
|
+
`This indicates a bug in query compilation. Please report this issue.`
|
|
629
|
+
)
|
|
630
|
+
}
|
|
631
|
+
}
|
|
@@ -107,7 +107,7 @@ export type SchemaFromSource<T extends Source> = Prettify<{
|
|
|
107
107
|
* GetAliases - Extracts all table aliases available in a query context
|
|
108
108
|
*
|
|
109
109
|
* Simple utility type that returns the keys of the schema, representing
|
|
110
|
-
* all table/
|
|
110
|
+
* all table/source aliases that can be referenced in the current query.
|
|
111
111
|
*/
|
|
112
112
|
export type GetAliases<TContext extends Context> = keyof TContext[`schema`]
|
|
113
113
|
|
|
@@ -31,24 +31,53 @@ import type {
|
|
|
31
31
|
import type { QueryCache, QueryMapping } from "./types.js"
|
|
32
32
|
|
|
33
33
|
/**
|
|
34
|
-
* Result of query compilation including both the pipeline and
|
|
34
|
+
* Result of query compilation including both the pipeline and source-specific WHERE clauses
|
|
35
35
|
*/
|
|
36
36
|
export interface CompilationResult {
|
|
37
37
|
/** The ID of the main collection */
|
|
38
38
|
collectionId: string
|
|
39
|
-
|
|
39
|
+
|
|
40
|
+
/** The compiled query pipeline (D2 stream) */
|
|
40
41
|
pipeline: ResultStream
|
|
41
|
-
|
|
42
|
-
|
|
42
|
+
|
|
43
|
+
/** Map of source aliases to their WHERE clauses for index optimization */
|
|
44
|
+
sourceWhereClauses: Map<string, BasicExpression<boolean>>
|
|
45
|
+
|
|
46
|
+
/**
|
|
47
|
+
* Maps each source alias to its collection ID. Enables per-alias subscriptions for self-joins.
|
|
48
|
+
* Example: `{ employee: 'employees-col-id', manager: 'employees-col-id' }`
|
|
49
|
+
*/
|
|
50
|
+
aliasToCollectionId: Record<string, string>
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Flattened mapping from outer alias to innermost alias for subqueries.
|
|
54
|
+
* Always provides one-hop lookups, never recursive chains.
|
|
55
|
+
*
|
|
56
|
+
* Example: `{ activeUser: 'user' }` when `.from({ activeUser: subquery })`
|
|
57
|
+
* where the subquery uses `.from({ user: collection })`.
|
|
58
|
+
*
|
|
59
|
+
* For deeply nested subqueries, the mapping goes directly to the innermost alias:
|
|
60
|
+
* `{ author: 'user' }` (not `{ author: 'activeUser' }`), so `aliasRemapping[alias]`
|
|
61
|
+
* always resolves in a single lookup.
|
|
62
|
+
*
|
|
63
|
+
* Used to resolve subscriptions during lazy loading when join aliases differ from
|
|
64
|
+
* the inner aliases where collection subscriptions were created.
|
|
65
|
+
*/
|
|
66
|
+
aliasRemapping: Record<string, string>
|
|
43
67
|
}
|
|
44
68
|
|
|
45
69
|
/**
|
|
46
|
-
* Compiles a
|
|
70
|
+
* Compiles a query IR into a D2 pipeline
|
|
47
71
|
* @param rawQuery The query IR to compile
|
|
48
|
-
* @param inputs Mapping of
|
|
72
|
+
* @param inputs Mapping of source aliases to input streams (e.g., `{ employee: input1, manager: input2 }`)
|
|
73
|
+
* @param collections Mapping of collection IDs to Collection instances
|
|
74
|
+
* @param subscriptions Mapping of source aliases to CollectionSubscription instances
|
|
75
|
+
* @param callbacks Mapping of source aliases to lazy loading callbacks
|
|
76
|
+
* @param lazySources Set of source aliases that should load data lazily
|
|
77
|
+
* @param optimizableOrderByCollections Map of collection IDs to order-by optimization info
|
|
49
78
|
* @param cache Optional cache for compiled subqueries (used internally for recursion)
|
|
50
79
|
* @param queryMapping Optional mapping from optimized queries to original queries
|
|
51
|
-
* @returns A CompilationResult with the pipeline
|
|
80
|
+
* @returns A CompilationResult with the pipeline, source WHERE clauses, and alias metadata
|
|
52
81
|
*/
|
|
53
82
|
export function compileQuery(
|
|
54
83
|
rawQuery: QueryIR,
|
|
@@ -56,7 +85,7 @@ export function compileQuery(
|
|
|
56
85
|
collections: Record<string, Collection<any, any, any, any, any>>,
|
|
57
86
|
subscriptions: Record<string, CollectionSubscription>,
|
|
58
87
|
callbacks: Record<string, LazyCollectionCallbacks>,
|
|
59
|
-
|
|
88
|
+
lazySources: Set<string>,
|
|
60
89
|
optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,
|
|
61
90
|
cache: QueryCache = new WeakMap(),
|
|
62
91
|
queryMapping: QueryMapping = new WeakMap()
|
|
@@ -68,8 +97,7 @@ export function compileQuery(
|
|
|
68
97
|
}
|
|
69
98
|
|
|
70
99
|
// Optimize the query before compilation
|
|
71
|
-
const { optimizedQuery: query,
|
|
72
|
-
optimizeQuery(rawQuery)
|
|
100
|
+
const { optimizedQuery: query, sourceWhereClauses } = optimizeQuery(rawQuery)
|
|
73
101
|
|
|
74
102
|
// Create mapping from optimized query to original for caching
|
|
75
103
|
queryMapping.set(query, rawQuery)
|
|
@@ -78,12 +106,24 @@ export function compileQuery(
|
|
|
78
106
|
// Create a copy of the inputs map to avoid modifying the original
|
|
79
107
|
const allInputs = { ...inputs }
|
|
80
108
|
|
|
81
|
-
//
|
|
82
|
-
|
|
109
|
+
// Track alias to collection id relationships discovered during compilation.
|
|
110
|
+
// This includes all user-declared aliases plus inner aliases from subqueries.
|
|
111
|
+
const aliasToCollectionId: Record<string, string> = {}
|
|
112
|
+
|
|
113
|
+
// Track alias remapping for subqueries (outer alias → inner alias)
|
|
114
|
+
// e.g., when .join({ activeUser: subquery }) where subquery uses .from({ user: collection })
|
|
115
|
+
// we store: aliasRemapping['activeUser'] = 'user'
|
|
116
|
+
const aliasRemapping: Record<string, string> = {}
|
|
83
117
|
|
|
84
|
-
//
|
|
118
|
+
// Create a map of source aliases to input streams.
|
|
119
|
+
// Inputs MUST be keyed by alias (e.g., `{ employee: input1, manager: input2 }`),
|
|
120
|
+
// not by collection ID. This enables per-alias subscriptions where different aliases
|
|
121
|
+
// of the same collection (e.g., self-joins) maintain independent filtered streams.
|
|
122
|
+
const sources: Record<string, KeyedStream> = {}
|
|
123
|
+
|
|
124
|
+
// Process the FROM clause to get the main source
|
|
85
125
|
const {
|
|
86
|
-
alias:
|
|
126
|
+
alias: mainSource,
|
|
87
127
|
input: mainInput,
|
|
88
128
|
collectionId: mainCollectionId,
|
|
89
129
|
} = processFrom(
|
|
@@ -92,18 +132,20 @@ export function compileQuery(
|
|
|
92
132
|
collections,
|
|
93
133
|
subscriptions,
|
|
94
134
|
callbacks,
|
|
95
|
-
|
|
135
|
+
lazySources,
|
|
96
136
|
optimizableOrderByCollections,
|
|
97
137
|
cache,
|
|
98
|
-
queryMapping
|
|
138
|
+
queryMapping,
|
|
139
|
+
aliasToCollectionId,
|
|
140
|
+
aliasRemapping
|
|
99
141
|
)
|
|
100
|
-
|
|
142
|
+
sources[mainSource] = mainInput
|
|
101
143
|
|
|
102
|
-
// Prepare the initial pipeline with the main
|
|
144
|
+
// Prepare the initial pipeline with the main source wrapped in its alias
|
|
103
145
|
let pipeline: NamespacedAndKeyedStream = mainInput.pipe(
|
|
104
146
|
map(([key, row]) => {
|
|
105
147
|
// Initialize the record with a nested structure
|
|
106
|
-
const ret = [key, { [
|
|
148
|
+
const ret = [key, { [mainSource]: row }] as [
|
|
107
149
|
string,
|
|
108
150
|
Record<string, typeof row>,
|
|
109
151
|
]
|
|
@@ -116,19 +158,21 @@ export function compileQuery(
|
|
|
116
158
|
pipeline = processJoins(
|
|
117
159
|
pipeline,
|
|
118
160
|
query.join,
|
|
119
|
-
|
|
161
|
+
sources,
|
|
120
162
|
mainCollectionId,
|
|
121
|
-
|
|
163
|
+
mainSource,
|
|
122
164
|
allInputs,
|
|
123
165
|
cache,
|
|
124
166
|
queryMapping,
|
|
125
167
|
collections,
|
|
126
168
|
subscriptions,
|
|
127
169
|
callbacks,
|
|
128
|
-
|
|
170
|
+
lazySources,
|
|
129
171
|
optimizableOrderByCollections,
|
|
130
172
|
rawQuery,
|
|
131
|
-
compileQuery
|
|
173
|
+
compileQuery,
|
|
174
|
+
aliasToCollectionId,
|
|
175
|
+
aliasRemapping
|
|
132
176
|
)
|
|
133
177
|
}
|
|
134
178
|
|
|
@@ -185,7 +229,7 @@ export function compileQuery(
|
|
|
185
229
|
map(([key, namespacedRow]) => {
|
|
186
230
|
const selectResults =
|
|
187
231
|
!query.join && !query.groupBy
|
|
188
|
-
? namespacedRow[
|
|
232
|
+
? namespacedRow[mainSource]
|
|
189
233
|
: namespacedRow
|
|
190
234
|
|
|
191
235
|
return [
|
|
@@ -286,7 +330,9 @@ export function compileQuery(
|
|
|
286
330
|
const compilationResult = {
|
|
287
331
|
collectionId: mainCollectionId,
|
|
288
332
|
pipeline: result,
|
|
289
|
-
|
|
333
|
+
sourceWhereClauses,
|
|
334
|
+
aliasToCollectionId,
|
|
335
|
+
aliasRemapping,
|
|
290
336
|
}
|
|
291
337
|
cache.set(rawQuery, compilationResult)
|
|
292
338
|
|
|
@@ -314,7 +360,9 @@ export function compileQuery(
|
|
|
314
360
|
const compilationResult = {
|
|
315
361
|
collectionId: mainCollectionId,
|
|
316
362
|
pipeline: result,
|
|
317
|
-
|
|
363
|
+
sourceWhereClauses,
|
|
364
|
+
aliasToCollectionId,
|
|
365
|
+
aliasRemapping,
|
|
318
366
|
}
|
|
319
367
|
cache.set(rawQuery, compilationResult)
|
|
320
368
|
|
|
@@ -322,7 +370,8 @@ export function compileQuery(
|
|
|
322
370
|
}
|
|
323
371
|
|
|
324
372
|
/**
|
|
325
|
-
* Processes the FROM clause
|
|
373
|
+
* Processes the FROM clause, handling direct collection references and subqueries.
|
|
374
|
+
* Populates `aliasToCollectionId` and `aliasRemapping` for per-alias subscription tracking.
|
|
326
375
|
*/
|
|
327
376
|
function processFrom(
|
|
328
377
|
from: CollectionRef | QueryRef,
|
|
@@ -330,17 +379,24 @@ function processFrom(
|
|
|
330
379
|
collections: Record<string, Collection>,
|
|
331
380
|
subscriptions: Record<string, CollectionSubscription>,
|
|
332
381
|
callbacks: Record<string, LazyCollectionCallbacks>,
|
|
333
|
-
|
|
382
|
+
lazySources: Set<string>,
|
|
334
383
|
optimizableOrderByCollections: Record<string, OrderByOptimizationInfo>,
|
|
335
384
|
cache: QueryCache,
|
|
336
|
-
queryMapping: QueryMapping
|
|
385
|
+
queryMapping: QueryMapping,
|
|
386
|
+
aliasToCollectionId: Record<string, string>,
|
|
387
|
+
aliasRemapping: Record<string, string>
|
|
337
388
|
): { alias: string; input: KeyedStream; collectionId: string } {
|
|
338
389
|
switch (from.type) {
|
|
339
390
|
case `collectionRef`: {
|
|
340
|
-
const input = allInputs[from.
|
|
391
|
+
const input = allInputs[from.alias]
|
|
341
392
|
if (!input) {
|
|
342
|
-
throw new CollectionInputNotFoundError(
|
|
393
|
+
throw new CollectionInputNotFoundError(
|
|
394
|
+
from.alias,
|
|
395
|
+
from.collection.id,
|
|
396
|
+
Object.keys(allInputs)
|
|
397
|
+
)
|
|
343
398
|
}
|
|
399
|
+
aliasToCollectionId[from.alias] = from.collection.id
|
|
344
400
|
return { alias: from.alias, input, collectionId: from.collection.id }
|
|
345
401
|
}
|
|
346
402
|
case `queryRef`: {
|
|
@@ -354,12 +410,39 @@ function processFrom(
|
|
|
354
410
|
collections,
|
|
355
411
|
subscriptions,
|
|
356
412
|
callbacks,
|
|
357
|
-
|
|
413
|
+
lazySources,
|
|
358
414
|
optimizableOrderByCollections,
|
|
359
415
|
cache,
|
|
360
416
|
queryMapping
|
|
361
417
|
)
|
|
362
418
|
|
|
419
|
+
// Pull up alias mappings from subquery to parent scope.
|
|
420
|
+
// This includes both the innermost alias-to-collection mappings AND
|
|
421
|
+
// any existing remappings from nested subquery levels.
|
|
422
|
+
Object.assign(aliasToCollectionId, subQueryResult.aliasToCollectionId)
|
|
423
|
+
Object.assign(aliasRemapping, subQueryResult.aliasRemapping)
|
|
424
|
+
|
|
425
|
+
// Create a FLATTENED remapping from outer alias to innermost alias.
|
|
426
|
+
// For nested subqueries, this ensures one-hop lookups (not recursive chains).
|
|
427
|
+
//
|
|
428
|
+
// Example with 3-level nesting:
|
|
429
|
+
// Inner: .from({ user: usersCollection })
|
|
430
|
+
// Middle: .from({ activeUser: innerSubquery }) → creates: activeUser → user
|
|
431
|
+
// Outer: .from({ author: middleSubquery }) → creates: author → user (not author → activeUser)
|
|
432
|
+
//
|
|
433
|
+
// The key insight: We search through the PULLED-UP aliasToCollectionId (which contains
|
|
434
|
+
// the innermost 'user' alias), so we always map directly to the deepest level.
|
|
435
|
+
// This means aliasRemapping[alias] is always a single lookup, never recursive.
|
|
436
|
+
// Needed for subscription resolution during lazy loading.
|
|
437
|
+
const innerAlias = Object.keys(subQueryResult.aliasToCollectionId).find(
|
|
438
|
+
(alias) =>
|
|
439
|
+
subQueryResult.aliasToCollectionId[alias] ===
|
|
440
|
+
subQueryResult.collectionId
|
|
441
|
+
)
|
|
442
|
+
if (innerAlias && innerAlias !== from.alias) {
|
|
443
|
+
aliasRemapping[from.alias] = innerAlias
|
|
444
|
+
}
|
|
445
|
+
|
|
363
446
|
// Extract the pipeline from the compilation result
|
|
364
447
|
const subQueryInput = subQueryResult.pipeline
|
|
365
448
|
|