@tanstack/db 0.0.26 → 0.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/change-events.cjs +141 -0
- package/dist/cjs/change-events.cjs.map +1 -0
- package/dist/cjs/change-events.d.cts +49 -0
- package/dist/cjs/collection.cjs +236 -90
- package/dist/cjs/collection.cjs.map +1 -1
- package/dist/cjs/collection.d.cts +95 -20
- package/dist/cjs/errors.cjs +509 -1
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +225 -1
- package/dist/cjs/index.cjs +82 -3
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +5 -1
- package/dist/cjs/indexes/auto-index.cjs +64 -0
- package/dist/cjs/indexes/auto-index.cjs.map +1 -0
- package/dist/cjs/indexes/auto-index.d.cts +9 -0
- package/dist/cjs/indexes/base-index.cjs +46 -0
- package/dist/cjs/indexes/base-index.cjs.map +1 -0
- package/dist/cjs/indexes/base-index.d.cts +54 -0
- package/dist/cjs/indexes/index-options.d.cts +13 -0
- package/dist/cjs/indexes/lazy-index.cjs +193 -0
- package/dist/cjs/indexes/lazy-index.cjs.map +1 -0
- package/dist/cjs/indexes/lazy-index.d.cts +96 -0
- package/dist/cjs/indexes/ordered-index.cjs +227 -0
- package/dist/cjs/indexes/ordered-index.cjs.map +1 -0
- package/dist/cjs/indexes/ordered-index.d.cts +72 -0
- package/dist/cjs/local-storage.cjs +9 -15
- package/dist/cjs/local-storage.cjs.map +1 -1
- package/dist/cjs/query/builder/functions.cjs +11 -0
- package/dist/cjs/query/builder/functions.cjs.map +1 -1
- package/dist/cjs/query/builder/functions.d.cts +4 -0
- package/dist/cjs/query/builder/index.cjs +6 -7
- package/dist/cjs/query/builder/index.cjs.map +1 -1
- package/dist/cjs/query/builder/ref-proxy.cjs +37 -0
- package/dist/cjs/query/builder/ref-proxy.cjs.map +1 -1
- package/dist/cjs/query/builder/ref-proxy.d.cts +12 -0
- package/dist/cjs/query/compiler/evaluators.cjs +83 -58
- package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
- package/dist/cjs/query/compiler/evaluators.d.cts +8 -0
- package/dist/cjs/query/compiler/expressions.cjs +61 -0
- package/dist/cjs/query/compiler/expressions.cjs.map +1 -0
- package/dist/cjs/query/compiler/expressions.d.cts +25 -0
- package/dist/cjs/query/compiler/group-by.cjs +5 -10
- package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs +23 -17
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.d.cts +12 -3
- package/dist/cjs/query/compiler/joins.cjs +61 -12
- package/dist/cjs/query/compiler/joins.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.cjs +4 -34
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/types.d.cts +2 -2
- package/dist/cjs/query/live-query-collection.cjs +54 -12
- package/dist/cjs/query/live-query-collection.cjs.map +1 -1
- package/dist/cjs/query/optimizer.cjs +45 -7
- package/dist/cjs/query/optimizer.cjs.map +1 -1
- package/dist/cjs/query/optimizer.d.cts +13 -3
- package/dist/cjs/transactions.cjs +5 -4
- package/dist/cjs/transactions.cjs.map +1 -1
- package/dist/cjs/types.d.cts +31 -0
- package/dist/cjs/utils/array-utils.cjs +18 -0
- package/dist/cjs/utils/array-utils.cjs.map +1 -0
- package/dist/cjs/utils/array-utils.d.cts +8 -0
- package/dist/cjs/utils/comparison.cjs +52 -0
- package/dist/cjs/utils/comparison.cjs.map +1 -0
- package/dist/cjs/utils/comparison.d.cts +11 -0
- package/dist/cjs/utils/index-optimization.cjs +270 -0
- package/dist/cjs/utils/index-optimization.cjs.map +1 -0
- package/dist/cjs/utils/index-optimization.d.cts +29 -0
- package/dist/esm/change-events.d.ts +49 -0
- package/dist/esm/change-events.js +141 -0
- package/dist/esm/change-events.js.map +1 -0
- package/dist/esm/collection.d.ts +95 -20
- package/dist/esm/collection.js +234 -88
- package/dist/esm/collection.js.map +1 -1
- package/dist/esm/errors.d.ts +225 -1
- package/dist/esm/errors.js +510 -2
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.d.ts +5 -1
- package/dist/esm/index.js +81 -2
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/indexes/auto-index.d.ts +9 -0
- package/dist/esm/indexes/auto-index.js +64 -0
- package/dist/esm/indexes/auto-index.js.map +1 -0
- package/dist/esm/indexes/base-index.d.ts +54 -0
- package/dist/esm/indexes/base-index.js +46 -0
- package/dist/esm/indexes/base-index.js.map +1 -0
- package/dist/esm/indexes/index-options.d.ts +13 -0
- package/dist/esm/indexes/lazy-index.d.ts +96 -0
- package/dist/esm/indexes/lazy-index.js +193 -0
- package/dist/esm/indexes/lazy-index.js.map +1 -0
- package/dist/esm/indexes/ordered-index.d.ts +72 -0
- package/dist/esm/indexes/ordered-index.js +227 -0
- package/dist/esm/indexes/ordered-index.js.map +1 -0
- package/dist/esm/local-storage.js +9 -15
- package/dist/esm/local-storage.js.map +1 -1
- package/dist/esm/query/builder/functions.d.ts +4 -0
- package/dist/esm/query/builder/functions.js +11 -0
- package/dist/esm/query/builder/functions.js.map +1 -1
- package/dist/esm/query/builder/index.js +6 -7
- package/dist/esm/query/builder/index.js.map +1 -1
- package/dist/esm/query/builder/ref-proxy.d.ts +12 -0
- package/dist/esm/query/builder/ref-proxy.js +37 -0
- package/dist/esm/query/builder/ref-proxy.js.map +1 -1
- package/dist/esm/query/compiler/evaluators.d.ts +8 -0
- package/dist/esm/query/compiler/evaluators.js +84 -59
- package/dist/esm/query/compiler/evaluators.js.map +1 -1
- package/dist/esm/query/compiler/expressions.d.ts +25 -0
- package/dist/esm/query/compiler/expressions.js +61 -0
- package/dist/esm/query/compiler/expressions.js.map +1 -0
- package/dist/esm/query/compiler/group-by.js +5 -10
- package/dist/esm/query/compiler/group-by.js.map +1 -1
- package/dist/esm/query/compiler/index.d.ts +12 -3
- package/dist/esm/query/compiler/index.js +23 -17
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/joins.js +61 -12
- package/dist/esm/query/compiler/joins.js.map +1 -1
- package/dist/esm/query/compiler/order-by.js +1 -31
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/compiler/types.d.ts +2 -2
- package/dist/esm/query/live-query-collection.js +54 -12
- package/dist/esm/query/live-query-collection.js.map +1 -1
- package/dist/esm/query/optimizer.d.ts +13 -3
- package/dist/esm/query/optimizer.js +40 -2
- package/dist/esm/query/optimizer.js.map +1 -1
- package/dist/esm/transactions.js +5 -4
- package/dist/esm/transactions.js.map +1 -1
- package/dist/esm/types.d.ts +31 -0
- package/dist/esm/utils/array-utils.d.ts +8 -0
- package/dist/esm/utils/array-utils.js +18 -0
- package/dist/esm/utils/array-utils.js.map +1 -0
- package/dist/esm/utils/comparison.d.ts +11 -0
- package/dist/esm/utils/comparison.js +52 -0
- package/dist/esm/utils/comparison.js.map +1 -0
- package/dist/esm/utils/index-optimization.d.ts +29 -0
- package/dist/esm/utils/index-optimization.js +270 -0
- package/dist/esm/utils/index-optimization.js.map +1 -0
- package/package.json +3 -2
- package/src/change-events.ts +257 -0
- package/src/collection.ts +321 -110
- package/src/errors.ts +545 -1
- package/src/index.ts +7 -1
- package/src/indexes/auto-index.ts +108 -0
- package/src/indexes/base-index.ts +119 -0
- package/src/indexes/index-options.ts +42 -0
- package/src/indexes/lazy-index.ts +251 -0
- package/src/indexes/ordered-index.ts +305 -0
- package/src/local-storage.ts +16 -17
- package/src/query/builder/functions.ts +14 -0
- package/src/query/builder/index.ts +12 -7
- package/src/query/builder/ref-proxy.ts +65 -0
- package/src/query/compiler/evaluators.ts +114 -62
- package/src/query/compiler/expressions.ts +92 -0
- package/src/query/compiler/group-by.ts +10 -10
- package/src/query/compiler/index.ts +52 -22
- package/src/query/compiler/joins.ts +114 -15
- package/src/query/compiler/order-by.ts +1 -45
- package/src/query/compiler/types.ts +2 -2
- package/src/query/live-query-collection.ts +95 -15
- package/src/query/optimizer.ts +94 -5
- package/src/transactions.ts +10 -4
- package/src/types.ts +38 -0
- package/src/utils/array-utils.ts +28 -0
- package/src/utils/comparison.ts +79 -0
- package/src/utils/index-optimization.ts +546 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"live-query-collection.cjs","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n pipelineCache = compileQuery(\n query,\n inputsCache as Record<string, KeyedStream>\n )\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, markReady, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n markReady()\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Set up data flow from input collections to the compiled query\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n\n // Subscribe to changes\n const unsubscribe = collection.subscribeChanges(\n (changes: Array<ChangeMessage>) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(unsubscribe)\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n"],"names":["buildQuery","getQueryIR","collection","D2","compileQuery","output","createCollection","MultiSet"],"mappings":";;;;;;AAkBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpBA,iBAAqB,OAAO,KAAK,IACjCC,iBAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAACC,gBACCA,YAAW,WAAW,WAAWA,YAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAIC,OAAAA,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAEH,oBAAgBC,QAAAA;AAAAA,MACd;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,WAAW,YAAY,oBAAoB;AACxE,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACPC,OAAAA,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAA;AAGN,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAEA,oBAAA;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAcH,WAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AAGjC,cAAM,cAAcA,YAAW;AAAA,UAC7B,CAAC,YAAkC;AACjC,+BAAmB,OAAO,SAASA,YAAW,OAAO,MAAM;AAC3D,0BAAA;AAAA,UACF;AAAA,UACA,EAAE,qBAAqB,KAAA;AAAA,QAAK;AAE9B,6BAAqB,IAAI,WAAW;AAAA,MACtC,CAAC;AAGD,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAOI,WAAAA,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAIC,OAAAA,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;;;"}
|
|
1
|
+
{"version":3,"file":"live-query-collection.cjs","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport { convertToBasicExpression } from \"./compiler/expressions.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\nimport type { BasicExpression } from \"./ir.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n let collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n ;({\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(query, inputsCache as Record<string, KeyedStream>))\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, markReady, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n markReady()\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Subscribe to all collections, using WHERE clause optimization when available\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n const collectionAlias = findCollectionAlias(collectionId, query)\n const whereClause =\n collectionAlias && collectionWhereClausesCache\n ? collectionWhereClausesCache.get(collectionAlias)\n : undefined\n\n if (whereClause) {\n // Convert WHERE clause to BasicExpression format for collection subscription\n const whereExpression = convertToBasicExpression(\n whereClause,\n collectionAlias!\n )\n\n if (whereExpression) {\n // Use index optimization for this collection\n const subscription = collection.subscribeChanges(\n (changes) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n {\n includeInitialState: true,\n whereExpression: whereExpression,\n }\n )\n unsubscribeCallbacks.add(subscription)\n } else {\n // This should not happen - if we have a whereClause but can't create whereExpression,\n // it indicates a bug in our optimization logic\n throw new Error(\n `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. ` +\n `This indicates a bug in the query optimization logic.`\n )\n }\n } else {\n // No WHERE clause for this collection, use regular subscription\n const subscription = collection.subscribeChanges(\n (changes) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(subscription)\n }\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\n/**\n * Converts WHERE expressions from the query IR into a BasicExpression for subscribeChanges\n *\n * @param whereExpressions Array of WHERE expressions to convert\n * @param tableAlias The table alias used in the expressions\n * @returns A BasicExpression that can be used with the collection's index system\n */\n\n/**\n * Finds the alias for a collection ID in the query\n */\nfunction findCollectionAlias(\n collectionId: string,\n query: any\n): string | undefined {\n // Check FROM clause\n if (\n query.from?.type === `collectionRef` &&\n query.from.collection?.id === collectionId\n ) {\n return query.from.alias\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (\n joinClause.from?.type === `collectionRef` &&\n joinClause.from.collection?.id === collectionId\n ) {\n return joinClause.from.alias\n }\n }\n }\n\n return undefined\n}\n"],"names":["buildQuery","getQueryIR","collection","D2","compileQuery","output","convertToBasicExpression","createCollection","MultiSet"],"mappings":";;;;;;;AAoBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpBA,iBAAqB,OAAO,KAAK,IACjCC,iBAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAACC,gBACCA,YAAW,WAAW,WAAWA,YAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AAIJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAIC,OAAAA,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAIF,KAAC;AAAA,MACA,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtBC,QAAAA,aAAa,OAAO,WAA0C;AAAA,EACpE;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,WAAW,YAAY,oBAAoB;AACxE,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACPC,OAAAA,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAA;AAGN,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAEA,oBAAA;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAcH,WAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AACjC,cAAM,kBAAkB,oBAAoB,cAAc,KAAK;AAC/D,cAAM,cACJ,mBAAmB,8BACf,4BAA4B,IAAI,eAAe,IAC/C;AAEN,YAAI,aAAa;AAEf,gBAAM,kBAAkBI,YAAAA;AAAAA,YACtB;AAAA,YACA;AAAA,UAAA;AAGF,cAAI,iBAAiB;AAEnB,kBAAM,eAAeJ,YAAW;AAAA,cAC9B,CAAC,YAAY;AACX,mCAAmB,OAAO,SAASA,YAAW,OAAO,MAAM;AAC3D,8BAAA;AAAA,cACF;AAAA,cACA;AAAA,gBACE,qBAAqB;AAAA,gBACrB;AAAA,cAAA;AAAA,YACF;AAEF,iCAAqB,IAAI,YAAY;AAAA,UACvC,OAAO;AAGL,kBAAM,IAAI;AAAA,cACR,uEAAuE,YAAY;AAAA,YAAA;AAAA,UAGvF;AAAA,QACF,OAAO;AAEL,gBAAM,eAAeA,YAAW;AAAA,YAC9B,CAAC,YAAY;AACX,iCAAmB,OAAO,SAASA,YAAW,OAAO,MAAM;AAC3D,4BAAA;AAAA,YACF;AAAA,YACA,EAAE,qBAAqB,KAAA;AAAA,UAAK;AAE9B,+BAAqB,IAAI,YAAY;AAAA,QACvC;AAAA,MACF,CAAC;AAGD,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAOK,WAAAA,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAIC,OAAAA,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAaA,SAAS,oBACP,cACA,OACoB;;AAEpB,QACE,WAAM,SAAN,mBAAY,UAAS,qBACrB,WAAM,KAAK,eAAX,mBAAuB,QAAO,cAC9B;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,YACE,gBAAW,SAAX,mBAAiB,UAAS,qBAC1B,gBAAW,KAAK,eAAhB,mBAA4B,QAAO,cACnC;AACA,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;"}
|
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
3
|
const utils = require("../utils.cjs");
|
|
4
|
+
const errors = require("../errors.cjs");
|
|
4
5
|
const ir = require("./ir.cjs");
|
|
6
|
+
const expressions = require("./compiler/expressions.cjs");
|
|
5
7
|
function optimizeQuery(query) {
|
|
8
|
+
const collectionWhereClauses = extractCollectionWhereClauses(query);
|
|
6
9
|
let optimized = query;
|
|
7
10
|
let previousOptimized;
|
|
8
11
|
let iterations = 0;
|
|
@@ -13,7 +16,42 @@ function optimizeQuery(query) {
|
|
|
13
16
|
iterations++;
|
|
14
17
|
}
|
|
15
18
|
const cleaned = removeRedundantSubqueries(optimized);
|
|
16
|
-
return
|
|
19
|
+
return {
|
|
20
|
+
optimizedQuery: cleaned,
|
|
21
|
+
collectionWhereClauses
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
function extractCollectionWhereClauses(query) {
|
|
25
|
+
const collectionWhereClauses = /* @__PURE__ */ new Map();
|
|
26
|
+
if (!query.where || query.where.length === 0) {
|
|
27
|
+
return collectionWhereClauses;
|
|
28
|
+
}
|
|
29
|
+
const splitWhereClauses = splitAndClauses(query.where);
|
|
30
|
+
const analyzedClauses = splitWhereClauses.map(
|
|
31
|
+
(clause) => analyzeWhereClause(clause)
|
|
32
|
+
);
|
|
33
|
+
const groupedClauses = groupWhereClauses(analyzedClauses);
|
|
34
|
+
for (const [sourceAlias, whereClause] of groupedClauses.singleSource) {
|
|
35
|
+
if (isCollectionReference(query, sourceAlias)) {
|
|
36
|
+
if (expressions.isConvertibleToCollectionFilter(whereClause)) {
|
|
37
|
+
collectionWhereClauses.set(sourceAlias, whereClause);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return collectionWhereClauses;
|
|
42
|
+
}
|
|
43
|
+
function isCollectionReference(query, sourceAlias) {
|
|
44
|
+
if (query.from.alias === sourceAlias) {
|
|
45
|
+
return query.from.type === `collectionRef`;
|
|
46
|
+
}
|
|
47
|
+
if (query.join) {
|
|
48
|
+
for (const joinClause of query.join) {
|
|
49
|
+
if (joinClause.from.alias === sourceAlias) {
|
|
50
|
+
return joinClause.from.type === `collectionRef`;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
return false;
|
|
17
55
|
}
|
|
18
56
|
function applyRecursiveOptimization(query) {
|
|
19
57
|
var _a;
|
|
@@ -270,14 +308,14 @@ function isSafeToPushIntoExistingSubquery(query) {
|
|
|
270
308
|
}
|
|
271
309
|
return true;
|
|
272
310
|
}
|
|
273
|
-
function combineWithAnd(
|
|
274
|
-
if (
|
|
275
|
-
throw new
|
|
311
|
+
function combineWithAnd(expressions2) {
|
|
312
|
+
if (expressions2.length === 0) {
|
|
313
|
+
throw new errors.CannotCombineEmptyExpressionListError();
|
|
276
314
|
}
|
|
277
|
-
if (
|
|
278
|
-
return
|
|
315
|
+
if (expressions2.length === 1) {
|
|
316
|
+
return expressions2[0];
|
|
279
317
|
}
|
|
280
|
-
return new ir.Func(`and`,
|
|
318
|
+
return new ir.Func(`and`, expressions2);
|
|
281
319
|
}
|
|
282
320
|
exports.optimizeQuery = optimizeQuery;
|
|
283
321
|
//# sourceMappingURL=optimizer.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"optimizer.cjs","sources":["../../../src/query/optimizer.ts"],"sourcesContent":["/**\n * # Query Optimizer\n *\n * The query optimizer improves query performance by implementing predicate pushdown optimization.\n * It rewrites the intermediate representation (IR) to push WHERE clauses as close to the data\n * source as possible, reducing the amount of data processed during joins.\n *\n * ## How It Works\n *\n * The optimizer follows a 4-step process:\n *\n * ### 1. AND Clause Splitting\n * Splits AND clauses at the root level into separate WHERE clauses for granular optimization.\n * ```javascript\n * // Before: WHERE and(eq(users.department_id, 1), gt(users.age, 25))\n * // After: WHERE eq(users.department_id, 1) + WHERE gt(users.age, 25)\n * ```\n *\n * ### 2. Source Analysis\n * Analyzes each WHERE clause to determine which table sources it references:\n * - Single-source clauses: Touch only one table (e.g., `users.department_id = 1`)\n * - Multi-source clauses: Touch multiple tables (e.g., `users.id = posts.user_id`)\n *\n * ### 3. Clause Grouping\n * Groups WHERE clauses by the sources they touch:\n * - Single-source clauses are grouped by their respective table\n * - Multi-source clauses are combined for the main query\n *\n * ### 4. Subquery Creation\n * Lifts single-source WHERE clauses into subqueries that wrap the original table references.\n *\n * ## Safety & Edge Cases\n *\n * The optimizer includes targeted safety checks to prevent predicate pushdown when it could\n * break query semantics:\n *\n * ### Always Safe Operations\n * - **Creating new subqueries**: Wrapping collection references in subqueries with WHERE clauses\n * - **Main query optimizations**: Moving single-source WHERE clauses from main query to subqueries\n * - **Queries with aggregates/ORDER BY/HAVING**: Can still create new filtered subqueries\n *\n * ### Unsafe Operations (blocked by safety checks)\n * Pushing WHERE clauses **into existing subqueries** that have:\n * - **Aggregates**: GROUP BY, HAVING, or aggregate functions in SELECT (would change aggregation)\n * - **Ordering + Limits**: ORDER BY combined with LIMIT/OFFSET (would change result set)\n * - **Functional Operations**: fnSelect, fnWhere, fnHaving (potential side effects)\n *\n * The optimizer tracks which clauses were actually optimized and only removes those from the\n * main query. Subquery reuse is handled safely through immutable query copies.\n *\n * ## Example Optimizations\n *\n * ### Basic Query with Joins\n * **Original Query:**\n * ```javascript\n * query\n * .from({ users: usersCollection })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users}) => eq(users.department_id, 1))\n * .where(({posts}) => gt(posts.views, 100))\n * .where(({users, posts}) => eq(users.id, posts.author_id))\n * ```\n *\n * **Optimized Query:**\n * ```javascript\n * query\n * .from({\n * users: subquery\n * .from({ users: usersCollection })\n * .where(({users}) => eq(users.department_id, 1))\n * })\n * .join({\n * posts: subquery\n * .from({ posts: postsCollection })\n * .where(({posts}) => gt(posts.views, 100))\n * }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users, posts}) => eq(users.id, posts.author_id))\n * ```\n *\n * ### Query with Aggregates (Now Optimizable!)\n * **Original Query:**\n * ```javascript\n * query\n * .from({ users: usersCollection })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users}) => eq(users.department_id, 1))\n * .groupBy(['users.department_id'])\n * .select({ count: agg('count', '*') })\n * ```\n *\n * **Optimized Query:**\n * ```javascript\n * query\n * .from({\n * users: subquery\n * .from({ users: usersCollection })\n * .where(({users}) => eq(users.department_id, 1))\n * })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .groupBy(['users.department_id'])\n * .select({ count: agg('count', '*') })\n * ```\n *\n * ## Benefits\n *\n * - **Reduced Data Processing**: Filters applied before joins reduce intermediate result size\n * - **Better Performance**: Smaller datasets lead to faster query execution\n * - **Automatic Optimization**: No manual query rewriting required\n * - **Preserves Semantics**: Optimized queries return identical results\n * - **Safe by Design**: Comprehensive checks prevent semantic-breaking optimizations\n *\n * ## Integration\n *\n * The optimizer is automatically called during query compilation before the IR is\n * transformed into a D2Mini pipeline.\n */\n\nimport { deepEquals } from \"../utils.js\"\nimport {\n CollectionRef as CollectionRefClass,\n Func,\n QueryRef as QueryRefClass,\n} from \"./ir.js\"\nimport type { BasicExpression, From, QueryIR } from \"./ir.js\"\n\n/**\n * Represents a WHERE clause after source analysis\n */\nexport interface AnalyzedWhereClause {\n /** The WHERE expression */\n expression: BasicExpression<boolean>\n /** Set of table/source aliases that this WHERE clause touches */\n touchedSources: Set<string>\n}\n\n/**\n * Represents WHERE clauses grouped by the sources they touch\n */\nexport interface GroupedWhereClauses {\n /** WHERE clauses that touch only a single source, grouped by source alias */\n singleSource: Map<string, BasicExpression<boolean>>\n /** WHERE clauses that touch multiple sources, combined into one expression */\n multiSource?: BasicExpression<boolean>\n}\n\n/**\n * Main query optimizer entry point that lifts WHERE clauses into subqueries.\n *\n * This function implements multi-level predicate pushdown optimization by recursively\n * moving WHERE clauses through nested subqueries to get them as close to the data\n * sources as possible, then removing redundant subqueries.\n *\n * @param query - The QueryIR to optimize\n * @returns A new QueryIR with optimizations applied (or original if no optimization possible)\n *\n * @example\n * ```typescript\n * const originalQuery = {\n * from: new CollectionRef(users, 'u'),\n * join: [{ from: new CollectionRef(posts, 'p'), ... }],\n * where: [eq(u.dept_id, 1), gt(p.views, 100)]\n * }\n *\n * const optimized = optimizeQuery(originalQuery)\n * // Result: Single-source clauses moved to deepest possible subqueries\n * ```\n */\nexport function optimizeQuery(query: QueryIR): QueryIR {\n // Apply multi-level predicate pushdown with iterative convergence\n let optimized = query\n let previousOptimized: QueryIR | undefined\n let iterations = 0\n const maxIterations = 10 // Prevent infinite loops\n\n // Keep optimizing until no more changes occur or max iterations reached\n while (\n iterations < maxIterations &&\n !deepEquals(optimized, previousOptimized)\n ) {\n previousOptimized = optimized\n optimized = applyRecursiveOptimization(optimized)\n iterations++\n }\n\n // Remove redundant subqueries\n const cleaned = removeRedundantSubqueries(optimized)\n\n return cleaned\n}\n\n/**\n * Applies recursive predicate pushdown optimization.\n *\n * @param query - The QueryIR to optimize\n * @returns A new QueryIR with optimizations applied\n */\nfunction applyRecursiveOptimization(query: QueryIR): QueryIR {\n // First, recursively optimize any existing subqueries\n const subqueriesOptimized = {\n ...query,\n from:\n query.from.type === `queryRef`\n ? new QueryRefClass(\n applyRecursiveOptimization(query.from.query),\n query.from.alias\n )\n : query.from,\n join: query.join?.map((joinClause) => ({\n ...joinClause,\n from:\n joinClause.from.type === `queryRef`\n ? new QueryRefClass(\n applyRecursiveOptimization(joinClause.from.query),\n joinClause.from.alias\n )\n : joinClause.from,\n })),\n }\n\n // Then apply single-level optimization to this query\n return applySingleLevelOptimization(subqueriesOptimized)\n}\n\n/**\n * Applies single-level predicate pushdown optimization (existing logic)\n */\nfunction applySingleLevelOptimization(query: QueryIR): QueryIR {\n // Skip optimization if no WHERE clauses exist\n if (!query.where || query.where.length === 0) {\n return query\n }\n\n // Skip optimization if there are no joins - predicate pushdown only benefits joins\n // Single-table queries don't benefit from this optimization\n if (!query.join || query.join.length === 0) {\n return query\n }\n\n // Step 1: Split all AND clauses at the root level for granular optimization\n const splitWhereClauses = splitAndClauses(query.where)\n\n // Step 2: Analyze each WHERE clause to determine which sources it touches\n const analyzedClauses = splitWhereClauses.map((clause) =>\n analyzeWhereClause(clause)\n )\n\n // Step 3: Group clauses by single-source vs multi-source\n const groupedClauses = groupWhereClauses(analyzedClauses)\n\n // Step 4: Apply optimizations by lifting single-source clauses into subqueries\n return applyOptimizations(query, groupedClauses)\n}\n\n/**\n * Removes redundant subqueries that don't add value.\n * A subquery is redundant if it only wraps another query without adding\n * WHERE, SELECT, GROUP BY, HAVING, ORDER BY, or LIMIT/OFFSET clauses.\n *\n * @param query - The QueryIR to process\n * @returns A new QueryIR with redundant subqueries removed\n */\nfunction removeRedundantSubqueries(query: QueryIR): QueryIR {\n return {\n ...query,\n from: removeRedundantFromClause(query.from),\n join: query.join?.map((joinClause) => ({\n ...joinClause,\n from: removeRedundantFromClause(joinClause.from),\n })),\n }\n}\n\n/**\n * Removes redundant subqueries from a FROM clause.\n *\n * @param from - The FROM clause to process\n * @returns A FROM clause with redundant subqueries removed\n */\nfunction removeRedundantFromClause(from: From): From {\n if (from.type === `collectionRef`) {\n return from\n }\n\n const processedQuery = removeRedundantSubqueries(from.query)\n\n // Check if this subquery is redundant\n if (isRedundantSubquery(processedQuery)) {\n // Return the inner query's FROM clause with this alias\n const innerFrom = removeRedundantFromClause(processedQuery.from)\n if (innerFrom.type === `collectionRef`) {\n return new CollectionRefClass(innerFrom.collection, from.alias)\n } else {\n return new QueryRefClass(innerFrom.query, from.alias)\n }\n }\n\n return new QueryRefClass(processedQuery, from.alias)\n}\n\n/**\n * Determines if a subquery is redundant (adds no value).\n *\n * @param query - The query to check\n * @returns True if the query is redundant and can be removed\n */\nfunction isRedundantSubquery(query: QueryIR): boolean {\n return (\n (!query.where || query.where.length === 0) &&\n !query.select &&\n (!query.groupBy || query.groupBy.length === 0) &&\n (!query.having || query.having.length === 0) &&\n (!query.orderBy || query.orderBy.length === 0) &&\n (!query.join || query.join.length === 0) &&\n query.limit === undefined &&\n query.offset === undefined &&\n !query.fnSelect &&\n (!query.fnWhere || query.fnWhere.length === 0) &&\n (!query.fnHaving || query.fnHaving.length === 0)\n )\n}\n\n/**\n * Step 1: Split all AND clauses recursively into separate WHERE clauses.\n *\n * This enables more granular optimization by treating each condition independently.\n * OR clauses are preserved as they cannot be split without changing query semantics.\n *\n * @param whereClauses - Array of WHERE expressions to split\n * @returns Flattened array with AND clauses split into separate expressions\n *\n * @example\n * ```typescript\n * // Input: [and(eq(a, 1), gt(b, 2)), eq(c, 3)]\n * // Output: [eq(a, 1), gt(b, 2), eq(c, 3)]\n * ```\n */\nfunction splitAndClauses(\n whereClauses: Array<BasicExpression<boolean>>\n): Array<BasicExpression<boolean>> {\n const result: Array<BasicExpression<boolean>> = []\n\n for (const clause of whereClauses) {\n if (clause.type === `func` && clause.name === `and`) {\n // Recursively split nested AND clauses to handle complex expressions\n const splitArgs = splitAndClauses(\n clause.args as Array<BasicExpression<boolean>>\n )\n result.push(...splitArgs)\n } else {\n // Preserve non-AND clauses as-is (including OR clauses)\n result.push(clause)\n }\n }\n\n return result\n}\n\n/**\n * Step 2: Analyze which table sources a WHERE clause touches.\n *\n * This determines whether a clause can be pushed down to a specific table\n * or must remain in the main query (for multi-source clauses like join conditions).\n *\n * @param clause - The WHERE expression to analyze\n * @returns Analysis result with the expression and touched source aliases\n *\n * @example\n * ```typescript\n * // eq(users.department_id, 1) -> touches ['users']\n * // eq(users.id, posts.user_id) -> touches ['users', 'posts']\n * ```\n */\nfunction analyzeWhereClause(\n clause: BasicExpression<boolean>\n): AnalyzedWhereClause {\n const touchedSources = new Set<string>()\n\n /**\n * Recursively collect all table aliases referenced in an expression\n */\n function collectSources(expr: BasicExpression | any): void {\n switch (expr.type) {\n case `ref`:\n // PropRef path has the table alias as the first element\n if (expr.path && expr.path.length > 0) {\n const firstElement = expr.path[0]\n if (firstElement) {\n touchedSources.add(firstElement)\n }\n }\n break\n case `func`:\n // Recursively analyze function arguments (e.g., eq, gt, and, or)\n if (expr.args) {\n expr.args.forEach(collectSources)\n }\n break\n case `val`:\n // Values don't reference any sources\n break\n case `agg`:\n // Aggregates can reference sources in their arguments\n if (expr.args) {\n expr.args.forEach(collectSources)\n }\n break\n }\n }\n\n collectSources(clause)\n\n return {\n expression: clause,\n touchedSources,\n }\n}\n\n/**\n * Step 3: Group WHERE clauses by the sources they touch.\n *\n * Single-source clauses can be pushed down to subqueries for optimization.\n * Multi-source clauses must remain in the main query to preserve join semantics.\n *\n * @param analyzedClauses - Array of analyzed WHERE clauses\n * @returns Grouped clauses ready for optimization\n */\nfunction groupWhereClauses(\n analyzedClauses: Array<AnalyzedWhereClause>\n): GroupedWhereClauses {\n const singleSource = new Map<string, Array<BasicExpression<boolean>>>()\n const multiSource: Array<BasicExpression<boolean>> = []\n\n // Categorize each clause based on how many sources it touches\n for (const clause of analyzedClauses) {\n if (clause.touchedSources.size === 1) {\n // Single source clause - can be optimized\n const source = Array.from(clause.touchedSources)[0]!\n if (!singleSource.has(source)) {\n singleSource.set(source, [])\n }\n singleSource.get(source)!.push(clause.expression)\n } else if (clause.touchedSources.size > 1) {\n // Multi-source clause - must stay in main query\n multiSource.push(clause.expression)\n }\n // Skip clauses that touch no sources (constants) - they don't need optimization\n }\n\n // Combine multiple clauses for each source with AND\n const combinedSingleSource = new Map<string, BasicExpression<boolean>>()\n for (const [source, clauses] of singleSource) {\n combinedSingleSource.set(source, combineWithAnd(clauses))\n }\n\n // Combine multi-source clauses with AND\n const combinedMultiSource =\n multiSource.length > 0 ? combineWithAnd(multiSource) : undefined\n\n return {\n singleSource: combinedSingleSource,\n multiSource: combinedMultiSource,\n }\n}\n\n/**\n * Step 4: Apply optimizations by lifting single-source clauses into subqueries.\n *\n * Creates a new QueryIR with single-source WHERE clauses moved to subqueries\n * that wrap the original table references. This ensures immutability and prevents\n * infinite recursion issues.\n *\n * @param query - Original QueryIR to optimize\n * @param groupedClauses - WHERE clauses grouped by optimization strategy\n * @returns New QueryIR with optimizations applied\n */\nfunction applyOptimizations(\n query: QueryIR,\n groupedClauses: GroupedWhereClauses\n): QueryIR {\n // Track which single-source clauses were actually optimized\n const actuallyOptimized = new Set<string>()\n\n // Optimize the main FROM clause and track what was optimized\n const optimizedFrom = optimizeFromWithTracking(\n query.from,\n groupedClauses.singleSource,\n actuallyOptimized\n )\n\n // Optimize JOIN clauses and track what was optimized\n const optimizedJoins = query.join\n ? query.join.map((joinClause) => ({\n ...joinClause,\n from: optimizeFromWithTracking(\n joinClause.from,\n groupedClauses.singleSource,\n actuallyOptimized\n ),\n }))\n : undefined\n\n // Build the remaining WHERE clauses: multi-source + any single-source that weren't optimized\n const remainingWhereClauses: Array<BasicExpression<boolean>> = []\n\n // Add multi-source clauses\n if (groupedClauses.multiSource) {\n remainingWhereClauses.push(groupedClauses.multiSource)\n }\n\n // Add single-source clauses that weren't actually optimized\n for (const [source, clause] of groupedClauses.singleSource) {\n if (!actuallyOptimized.has(source)) {\n remainingWhereClauses.push(clause)\n }\n }\n\n // Create a completely new query object to ensure immutability\n const optimizedQuery: QueryIR = {\n // Copy all non-optimized fields as-is\n select: query.select,\n groupBy: query.groupBy ? [...query.groupBy] : undefined,\n having: query.having ? [...query.having] : undefined,\n orderBy: query.orderBy ? [...query.orderBy] : undefined,\n limit: query.limit,\n offset: query.offset,\n fnSelect: query.fnSelect,\n fnWhere: query.fnWhere ? [...query.fnWhere] : undefined,\n fnHaving: query.fnHaving ? [...query.fnHaving] : undefined,\n\n // Use the optimized FROM and JOIN clauses\n from: optimizedFrom,\n join: optimizedJoins,\n\n // Only include WHERE clauses that weren't successfully optimized\n where: remainingWhereClauses.length > 0 ? remainingWhereClauses : [],\n }\n\n return optimizedQuery\n}\n\n/**\n * Helper function to create a deep copy of a QueryIR object for immutability.\n *\n * This ensures that all optimizations create new objects rather than modifying\n * existing ones, preventing infinite recursion and shared reference issues.\n *\n * @param query - QueryIR to deep copy\n * @returns New QueryIR object with all nested objects copied\n */\nfunction deepCopyQuery(query: QueryIR): QueryIR {\n return {\n // Recursively copy the FROM clause\n from:\n query.from.type === `collectionRef`\n ? new CollectionRefClass(query.from.collection, query.from.alias)\n : new QueryRefClass(deepCopyQuery(query.from.query), query.from.alias),\n\n // Copy all other fields, creating new arrays where necessary\n select: query.select,\n join: query.join\n ? query.join.map((joinClause) => ({\n type: joinClause.type,\n left: joinClause.left,\n right: joinClause.right,\n from:\n joinClause.from.type === `collectionRef`\n ? new CollectionRefClass(\n joinClause.from.collection,\n joinClause.from.alias\n )\n : new QueryRefClass(\n deepCopyQuery(joinClause.from.query),\n joinClause.from.alias\n ),\n }))\n : undefined,\n where: query.where ? [...query.where] : undefined,\n groupBy: query.groupBy ? [...query.groupBy] : undefined,\n having: query.having ? [...query.having] : undefined,\n orderBy: query.orderBy ? [...query.orderBy] : undefined,\n limit: query.limit,\n offset: query.offset,\n fnSelect: query.fnSelect,\n fnWhere: query.fnWhere ? [...query.fnWhere] : undefined,\n fnHaving: query.fnHaving ? [...query.fnHaving] : undefined,\n }\n}\n\n/**\n * Helper function to optimize a FROM clause while tracking what was actually optimized.\n *\n * @param from - FROM clause to optimize\n * @param singleSourceClauses - Map of source aliases to their WHERE clauses\n * @param actuallyOptimized - Set to track which sources were actually optimized\n * @returns New FROM clause, potentially wrapped in a subquery\n */\nfunction optimizeFromWithTracking(\n from: From,\n singleSourceClauses: Map<string, BasicExpression<boolean>>,\n actuallyOptimized: Set<string>\n): From {\n const whereClause = singleSourceClauses.get(from.alias)\n\n if (!whereClause) {\n // No optimization needed, but return a copy to maintain immutability\n if (from.type === `collectionRef`) {\n return new CollectionRefClass(from.collection, from.alias)\n }\n // Must be queryRef due to type system\n return new QueryRefClass(deepCopyQuery(from.query), from.alias)\n }\n\n if (from.type === `collectionRef`) {\n // Create a new subquery with the WHERE clause for the collection\n // This is always safe since we're creating a new subquery\n const subQuery: QueryIR = {\n from: new CollectionRefClass(from.collection, from.alias),\n where: [whereClause],\n }\n actuallyOptimized.add(from.alias) // Mark as successfully optimized\n return new QueryRefClass(subQuery, from.alias)\n }\n\n // Must be queryRef due to type system\n\n // SAFETY CHECK: Only check safety when pushing WHERE clauses into existing subqueries\n // We need to be careful about pushing WHERE clauses into subqueries that already have\n // aggregates, HAVING, or ORDER BY + LIMIT since that could change their semantics\n if (!isSafeToPushIntoExistingSubquery(from.query)) {\n // Return a copy without optimization to maintain immutability\n // Do NOT mark as optimized since we didn't actually optimize it\n return new QueryRefClass(deepCopyQuery(from.query), from.alias)\n }\n\n // Add the WHERE clause to the existing subquery\n // Create a deep copy to ensure immutability\n const existingWhere = from.query.where || []\n const optimizedSubQuery: QueryIR = {\n ...deepCopyQuery(from.query),\n where: [...existingWhere, whereClause],\n }\n actuallyOptimized.add(from.alias) // Mark as successfully optimized\n return new QueryRefClass(optimizedSubQuery, from.alias)\n}\n\n/**\n * Determines if it's safe to push WHERE clauses into an existing subquery.\n *\n * Pushing WHERE clauses into existing subqueries can break semantics in several cases:\n *\n * 1. **Aggregates**: Pushing predicates before GROUP BY changes what gets aggregated\n * 2. **ORDER BY + LIMIT/OFFSET**: Pushing predicates before sorting+limiting changes the result set\n * 3. **HAVING clauses**: These operate on aggregated data, predicates should not be pushed past them\n * 4. **Functional operations**: fnSelect, fnWhere, fnHaving could have side effects\n *\n * Note: This safety check only applies when pushing WHERE clauses into existing subqueries.\n * Creating new subqueries from collection references is always safe.\n *\n * @param query - The existing subquery to check for safety\n * @returns True if it's safe to push WHERE clauses into this subquery, false otherwise\n *\n * @example\n * ```typescript\n * // UNSAFE: has GROUP BY - pushing WHERE could change aggregation\n * { from: users, groupBy: [dept], select: { count: agg('count', '*') } }\n *\n * // UNSAFE: has ORDER BY + LIMIT - pushing WHERE could change \"top 10\"\n * { from: users, orderBy: [salary desc], limit: 10 }\n *\n * // SAFE: plain SELECT without aggregates/limits\n * { from: users, select: { id, name } }\n * ```\n */\nfunction isSafeToPushIntoExistingSubquery(query: QueryIR): boolean {\n // Check for aggregates in SELECT clause\n if (query.select) {\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n return false\n }\n }\n\n // Check for GROUP BY clause\n if (query.groupBy && query.groupBy.length > 0) {\n return false\n }\n\n // Check for HAVING clause\n if (query.having && query.having.length > 0) {\n return false\n }\n\n // Check for ORDER BY with LIMIT or OFFSET (dangerous combination)\n if (query.orderBy && query.orderBy.length > 0) {\n if (query.limit !== undefined || query.offset !== undefined) {\n return false\n }\n }\n\n // Check for functional variants that might have side effects\n if (\n query.fnSelect ||\n (query.fnWhere && query.fnWhere.length > 0) ||\n (query.fnHaving && query.fnHaving.length > 0)\n ) {\n return false\n }\n\n // If none of the unsafe conditions are present, it's safe to optimize\n return true\n}\n\n/**\n * Helper function to combine multiple expressions with AND.\n *\n * If there's only one expression, it's returned as-is.\n * If there are multiple expressions, they're combined with an AND function.\n *\n * @param expressions - Array of expressions to combine\n * @returns Single expression representing the AND combination\n * @throws Error if the expressions array is empty\n */\nfunction combineWithAnd(\n expressions: Array<BasicExpression<boolean>>\n): BasicExpression<boolean> {\n if (expressions.length === 0) {\n throw new Error(`Cannot combine empty expression list`)\n }\n\n if (expressions.length === 1) {\n return expressions[0]!\n }\n\n // Create an AND function with all expressions as arguments\n return new Func(`and`, expressions)\n}\n"],"names":["deepEquals","QueryRefClass","CollectionRefClass","Func"],"mappings":";;;;AAuKO,SAAS,cAAc,OAAyB;AAErD,MAAI,YAAY;AAChB,MAAI;AACJ,MAAI,aAAa;AACjB,QAAM,gBAAgB;AAGtB,SACE,aAAa,iBACb,CAACA,MAAAA,WAAW,WAAW,iBAAiB,GACxC;AACA,wBAAoB;AACpB,gBAAY,2BAA2B,SAAS;AAChD;AAAA,EACF;AAGA,QAAM,UAAU,0BAA0B,SAAS;AAEnD,SAAO;AACT;AAQA,SAAS,2BAA2B,OAAyB;;AAE3D,QAAM,sBAAsB;AAAA,IAC1B,GAAG;AAAA,IACH,MACE,MAAM,KAAK,SAAS,aAChB,IAAIC,GAAAA;AAAAA,MACF,2BAA2B,MAAM,KAAK,KAAK;AAAA,MAC3C,MAAM,KAAK;AAAA,IAAA,IAEb,MAAM;AAAA,IACZ,OAAM,WAAM,SAAN,mBAAY,IAAI,CAAC,gBAAgB;AAAA,MACrC,GAAG;AAAA,MACH,MACE,WAAW,KAAK,SAAS,aACrB,IAAIA,GAAAA;AAAAA,QACF,2BAA2B,WAAW,KAAK,KAAK;AAAA,QAChD,WAAW,KAAK;AAAA,MAAA,IAElB,WAAW;AAAA,IAAA;AAAA,EACjB;AAIJ,SAAO,6BAA6B,mBAAmB;AACzD;AAKA,SAAS,6BAA6B,OAAyB;AAE7D,MAAI,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,GAAG;AAC5C,WAAO;AAAA,EACT;AAIA,MAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,GAAG;AAC1C,WAAO;AAAA,EACT;AAGA,QAAM,oBAAoB,gBAAgB,MAAM,KAAK;AAGrD,QAAM,kBAAkB,kBAAkB;AAAA,IAAI,CAAC,WAC7C,mBAAmB,MAAM;AAAA,EAAA;AAI3B,QAAM,iBAAiB,kBAAkB,eAAe;AAGxD,SAAO,mBAAmB,OAAO,cAAc;AACjD;AAUA,SAAS,0BAA0B,OAAyB;;AAC1D,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,0BAA0B,MAAM,IAAI;AAAA,IAC1C,OAAM,WAAM,SAAN,mBAAY,IAAI,CAAC,gBAAgB;AAAA,MACrC,GAAG;AAAA,MACH,MAAM,0BAA0B,WAAW,IAAI;AAAA,IAAA;AAAA,EAC/C;AAEN;AAQA,SAAS,0BAA0B,MAAkB;AACnD,MAAI,KAAK,SAAS,iBAAiB;AACjC,WAAO;AAAA,EACT;AAEA,QAAM,iBAAiB,0BAA0B,KAAK,KAAK;AAG3D,MAAI,oBAAoB,cAAc,GAAG;AAEvC,UAAM,YAAY,0BAA0B,eAAe,IAAI;AAC/D,QAAI,UAAU,SAAS,iBAAiB;AACtC,aAAO,IAAIC,GAAAA,cAAmB,UAAU,YAAY,KAAK,KAAK;AAAA,IAChE,OAAO;AACL,aAAO,IAAID,GAAAA,SAAc,UAAU,OAAO,KAAK,KAAK;AAAA,IACtD;AAAA,EACF;AAEA,SAAO,IAAIA,GAAAA,SAAc,gBAAgB,KAAK,KAAK;AACrD;AAQA,SAAS,oBAAoB,OAAyB;AACpD,UACG,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,MACxC,CAAC,MAAM,WACN,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,UAAU,MAAM,OAAO,WAAW,OACzC,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,MACtC,MAAM,UAAU,UAChB,MAAM,WAAW,UACjB,CAAC,MAAM,aACN,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,YAAY,MAAM,SAAS,WAAW;AAElD;AAiBA,SAAS,gBACP,cACiC;AACjC,QAAM,SAA0C,CAAA;AAEhD,aAAW,UAAU,cAAc;AACjC,QAAI,OAAO,SAAS,UAAU,OAAO,SAAS,OAAO;AAEnD,YAAM,YAAY;AAAA,QAChB,OAAO;AAAA,MAAA;AAET,aAAO,KAAK,GAAG,SAAS;AAAA,IAC1B,OAAO;AAEL,aAAO,KAAK,MAAM;AAAA,IACpB;AAAA,EACF;AAEA,SAAO;AACT;AAiBA,SAAS,mBACP,QACqB;AACrB,QAAM,qCAAqB,IAAA;AAK3B,WAAS,eAAe,MAAmC;AACzD,YAAQ,KAAK,MAAA;AAAA,MACX,KAAK;AAEH,YAAI,KAAK,QAAQ,KAAK,KAAK,SAAS,GAAG;AACrC,gBAAM,eAAe,KAAK,KAAK,CAAC;AAChC,cAAI,cAAc;AAChB,2BAAe,IAAI,YAAY;AAAA,UACjC;AAAA,QACF;AACA;AAAA,MACF,KAAK;AAEH,YAAI,KAAK,MAAM;AACb,eAAK,KAAK,QAAQ,cAAc;AAAA,QAClC;AACA;AAAA,MACF,KAAK;AAEH;AAAA,MACF,KAAK;AAEH,YAAI,KAAK,MAAM;AACb,eAAK,KAAK,QAAQ,cAAc;AAAA,QAClC;AACA;AAAA,IAAA;AAAA,EAEN;AAEA,iBAAe,MAAM;AAErB,SAAO;AAAA,IACL,YAAY;AAAA,IACZ;AAAA,EAAA;AAEJ;AAWA,SAAS,kBACP,iBACqB;AACrB,QAAM,mCAAmB,IAAA;AACzB,QAAM,cAA+C,CAAA;AAGrD,aAAW,UAAU,iBAAiB;AACpC,QAAI,OAAO,eAAe,SAAS,GAAG;AAEpC,YAAM,SAAS,MAAM,KAAK,OAAO,cAAc,EAAE,CAAC;AAClD,UAAI,CAAC,aAAa,IAAI,MAAM,GAAG;AAC7B,qBAAa,IAAI,QAAQ,EAAE;AAAA,MAC7B;AACA,mBAAa,IAAI,MAAM,EAAG,KAAK,OAAO,UAAU;AAAA,IAClD,WAAW,OAAO,eAAe,OAAO,GAAG;AAEzC,kBAAY,KAAK,OAAO,UAAU;AAAA,IACpC;AAAA,EAEF;AAGA,QAAM,2CAA2B,IAAA;AACjC,aAAW,CAAC,QAAQ,OAAO,KAAK,cAAc;AAC5C,yBAAqB,IAAI,QAAQ,eAAe,OAAO,CAAC;AAAA,EAC1D;AAGA,QAAM,sBACJ,YAAY,SAAS,IAAI,eAAe,WAAW,IAAI;AAEzD,SAAO;AAAA,IACL,cAAc;AAAA,IACd,aAAa;AAAA,EAAA;AAEjB;AAaA,SAAS,mBACP,OACA,gBACS;AAET,QAAM,wCAAwB,IAAA;AAG9B,QAAM,gBAAgB;AAAA,IACpB,MAAM;AAAA,IACN,eAAe;AAAA,IACf;AAAA,EAAA;AAIF,QAAM,iBAAiB,MAAM,OACzB,MAAM,KAAK,IAAI,CAAC,gBAAgB;AAAA,IAC9B,GAAG;AAAA,IACH,MAAM;AAAA,MACJ,WAAW;AAAA,MACX,eAAe;AAAA,MACf;AAAA,IAAA;AAAA,EACF,EACA,IACF;AAGJ,QAAM,wBAAyD,CAAA;AAG/D,MAAI,eAAe,aAAa;AAC9B,0BAAsB,KAAK,eAAe,WAAW;AAAA,EACvD;AAGA,aAAW,CAAC,QAAQ,MAAM,KAAK,eAAe,cAAc;AAC1D,QAAI,CAAC,kBAAkB,IAAI,MAAM,GAAG;AAClC,4BAAsB,KAAK,MAAM;AAAA,IACnC;AAAA,EACF;AAGA,QAAM,iBAA0B;AAAA;AAAA,IAE9B,QAAQ,MAAM;AAAA,IACd,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,QAAQ,MAAM,SAAS,CAAC,GAAG,MAAM,MAAM,IAAI;AAAA,IAC3C,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,OAAO,MAAM;AAAA,IACb,QAAQ,MAAM;AAAA,IACd,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,UAAU,MAAM,WAAW,CAAC,GAAG,MAAM,QAAQ,IAAI;AAAA;AAAA,IAGjD,MAAM;AAAA,IACN,MAAM;AAAA;AAAA,IAGN,OAAO,sBAAsB,SAAS,IAAI,wBAAwB,CAAA;AAAA,EAAC;AAGrE,SAAO;AACT;AAWA,SAAS,cAAc,OAAyB;AAC9C,SAAO;AAAA;AAAA,IAEL,MACE,MAAM,KAAK,SAAS,kBAChB,IAAIC,iBAAmB,MAAM,KAAK,YAAY,MAAM,KAAK,KAAK,IAC9D,IAAID,GAAAA,SAAc,cAAc,MAAM,KAAK,KAAK,GAAG,MAAM,KAAK,KAAK;AAAA;AAAA,IAGzE,QAAQ,MAAM;AAAA,IACd,MAAM,MAAM,OACR,MAAM,KAAK,IAAI,CAAC,gBAAgB;AAAA,MAC9B,MAAM,WAAW;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,OAAO,WAAW;AAAA,MAClB,MACE,WAAW,KAAK,SAAS,kBACrB,IAAIC,GAAAA;AAAAA,QACF,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK;AAAA,MAAA,IAElB,IAAID,GAAAA;AAAAA,QACF,cAAc,WAAW,KAAK,KAAK;AAAA,QACnC,WAAW,KAAK;AAAA,MAAA;AAAA,IAClB,EACN,IACF;AAAA,IACJ,OAAO,MAAM,QAAQ,CAAC,GAAG,MAAM,KAAK,IAAI;AAAA,IACxC,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,QAAQ,MAAM,SAAS,CAAC,GAAG,MAAM,MAAM,IAAI;AAAA,IAC3C,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,OAAO,MAAM;AAAA,IACb,QAAQ,MAAM;AAAA,IACd,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,UAAU,MAAM,WAAW,CAAC,GAAG,MAAM,QAAQ,IAAI;AAAA,EAAA;AAErD;AAUA,SAAS,yBACP,MACA,qBACA,mBACM;AACN,QAAM,cAAc,oBAAoB,IAAI,KAAK,KAAK;AAEtD,MAAI,CAAC,aAAa;AAEhB,QAAI,KAAK,SAAS,iBAAiB;AACjC,aAAO,IAAIC,GAAAA,cAAmB,KAAK,YAAY,KAAK,KAAK;AAAA,IAC3D;AAEA,WAAO,IAAID,GAAAA,SAAc,cAAc,KAAK,KAAK,GAAG,KAAK,KAAK;AAAA,EAChE;AAEA,MAAI,KAAK,SAAS,iBAAiB;AAGjC,UAAM,WAAoB;AAAA,MACxB,MAAM,IAAIC,GAAAA,cAAmB,KAAK,YAAY,KAAK,KAAK;AAAA,MACxD,OAAO,CAAC,WAAW;AAAA,IAAA;AAErB,sBAAkB,IAAI,KAAK,KAAK;AAChC,WAAO,IAAID,GAAAA,SAAc,UAAU,KAAK,KAAK;AAAA,EAC/C;AAOA,MAAI,CAAC,iCAAiC,KAAK,KAAK,GAAG;AAGjD,WAAO,IAAIA,GAAAA,SAAc,cAAc,KAAK,KAAK,GAAG,KAAK,KAAK;AAAA,EAChE;AAIA,QAAM,gBAAgB,KAAK,MAAM,SAAS,CAAA;AAC1C,QAAM,oBAA6B;AAAA,IACjC,GAAG,cAAc,KAAK,KAAK;AAAA,IAC3B,OAAO,CAAC,GAAG,eAAe,WAAW;AAAA,EAAA;AAEvC,oBAAkB,IAAI,KAAK,KAAK;AAChC,SAAO,IAAIA,GAAAA,SAAc,mBAAmB,KAAK,KAAK;AACxD;AA8BA,SAAS,iCAAiC,OAAyB;AAEjE,MAAI,MAAM,QAAQ;AAChB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAAA;AAE1B,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,QAAI,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAC3D,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MACE,MAAM,YACL,MAAM,WAAW,MAAM,QAAQ,SAAS,KACxC,MAAM,YAAY,MAAM,SAAS,SAAS,GAC3C;AACA,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAYA,SAAS,eACP,aAC0B;AAC1B,MAAI,YAAY,WAAW,GAAG;AAC5B,UAAM,IAAI,MAAM,sCAAsC;AAAA,EACxD;AAEA,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO,YAAY,CAAC;AAAA,EACtB;AAGA,SAAO,IAAIE,GAAAA,KAAK,OAAO,WAAW;AACpC;;"}
|
|
1
|
+
{"version":3,"file":"optimizer.cjs","sources":["../../../src/query/optimizer.ts"],"sourcesContent":["/**\n * # Query Optimizer\n *\n * The query optimizer improves query performance by implementing predicate pushdown optimization.\n * It rewrites the intermediate representation (IR) to push WHERE clauses as close to the data\n * source as possible, reducing the amount of data processed during joins.\n *\n * ## How It Works\n *\n * The optimizer follows a 4-step process:\n *\n * ### 1. AND Clause Splitting\n * Splits AND clauses at the root level into separate WHERE clauses for granular optimization.\n * ```javascript\n * // Before: WHERE and(eq(users.department_id, 1), gt(users.age, 25))\n * // After: WHERE eq(users.department_id, 1) + WHERE gt(users.age, 25)\n * ```\n *\n * ### 2. Source Analysis\n * Analyzes each WHERE clause to determine which table sources it references:\n * - Single-source clauses: Touch only one table (e.g., `users.department_id = 1`)\n * - Multi-source clauses: Touch multiple tables (e.g., `users.id = posts.user_id`)\n *\n * ### 3. Clause Grouping\n * Groups WHERE clauses by the sources they touch:\n * - Single-source clauses are grouped by their respective table\n * - Multi-source clauses are combined for the main query\n *\n * ### 4. Subquery Creation\n * Lifts single-source WHERE clauses into subqueries that wrap the original table references.\n *\n * ## Safety & Edge Cases\n *\n * The optimizer includes targeted safety checks to prevent predicate pushdown when it could\n * break query semantics:\n *\n * ### Always Safe Operations\n * - **Creating new subqueries**: Wrapping collection references in subqueries with WHERE clauses\n * - **Main query optimizations**: Moving single-source WHERE clauses from main query to subqueries\n * - **Queries with aggregates/ORDER BY/HAVING**: Can still create new filtered subqueries\n *\n * ### Unsafe Operations (blocked by safety checks)\n * Pushing WHERE clauses **into existing subqueries** that have:\n * - **Aggregates**: GROUP BY, HAVING, or aggregate functions in SELECT (would change aggregation)\n * - **Ordering + Limits**: ORDER BY combined with LIMIT/OFFSET (would change result set)\n * - **Functional Operations**: fnSelect, fnWhere, fnHaving (potential side effects)\n *\n * The optimizer tracks which clauses were actually optimized and only removes those from the\n * main query. Subquery reuse is handled safely through immutable query copies.\n *\n * ## Example Optimizations\n *\n * ### Basic Query with Joins\n * **Original Query:**\n * ```javascript\n * query\n * .from({ users: usersCollection })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users}) => eq(users.department_id, 1))\n * .where(({posts}) => gt(posts.views, 100))\n * .where(({users, posts}) => eq(users.id, posts.author_id))\n * ```\n *\n * **Optimized Query:**\n * ```javascript\n * query\n * .from({\n * users: subquery\n * .from({ users: usersCollection })\n * .where(({users}) => eq(users.department_id, 1))\n * })\n * .join({\n * posts: subquery\n * .from({ posts: postsCollection })\n * .where(({posts}) => gt(posts.views, 100))\n * }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users, posts}) => eq(users.id, posts.author_id))\n * ```\n *\n * ### Query with Aggregates (Now Optimizable!)\n * **Original Query:**\n * ```javascript\n * query\n * .from({ users: usersCollection })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .where(({users}) => eq(users.department_id, 1))\n * .groupBy(['users.department_id'])\n * .select({ count: agg('count', '*') })\n * ```\n *\n * **Optimized Query:**\n * ```javascript\n * query\n * .from({\n * users: subquery\n * .from({ users: usersCollection })\n * .where(({users}) => eq(users.department_id, 1))\n * })\n * .join({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.user_id))\n * .groupBy(['users.department_id'])\n * .select({ count: agg('count', '*') })\n * ```\n *\n * ## Benefits\n *\n * - **Reduced Data Processing**: Filters applied before joins reduce intermediate result size\n * - **Better Performance**: Smaller datasets lead to faster query execution\n * - **Automatic Optimization**: No manual query rewriting required\n * - **Preserves Semantics**: Optimized queries return identical results\n * - **Safe by Design**: Comprehensive checks prevent semantic-breaking optimizations\n *\n * ## Integration\n *\n * The optimizer is automatically called during query compilation before the IR is\n * transformed into a D2Mini pipeline.\n */\n\nimport { deepEquals } from \"../utils.js\"\nimport { CannotCombineEmptyExpressionListError } from \"../errors.js\"\nimport {\n CollectionRef as CollectionRefClass,\n Func,\n QueryRef as QueryRefClass,\n} from \"./ir.js\"\nimport { isConvertibleToCollectionFilter } from \"./compiler/expressions.js\"\nimport type { BasicExpression, From, QueryIR } from \"./ir.js\"\n\n/**\n * Represents a WHERE clause after source analysis\n */\nexport interface AnalyzedWhereClause {\n /** The WHERE expression */\n expression: BasicExpression<boolean>\n /** Set of table/source aliases that this WHERE clause touches */\n touchedSources: Set<string>\n}\n\n/**\n * Represents WHERE clauses grouped by the sources they touch\n */\nexport interface GroupedWhereClauses {\n /** WHERE clauses that touch only a single source, grouped by source alias */\n singleSource: Map<string, BasicExpression<boolean>>\n /** WHERE clauses that touch multiple sources, combined into one expression */\n multiSource?: BasicExpression<boolean>\n}\n\n/**\n * Result of query optimization including both the optimized query and collection-specific WHERE clauses\n */\nexport interface OptimizationResult {\n /** The optimized query with WHERE clauses potentially moved to subqueries */\n optimizedQuery: QueryIR\n /** Map of collection aliases to their extracted WHERE clauses for index optimization */\n collectionWhereClauses: Map<string, BasicExpression<boolean>>\n}\n\n/**\n * Main query optimizer entry point that lifts WHERE clauses into subqueries.\n *\n * This function implements multi-level predicate pushdown optimization by recursively\n * moving WHERE clauses through nested subqueries to get them as close to the data\n * sources as possible, then removing redundant subqueries.\n *\n * @param query - The QueryIR to optimize\n * @returns An OptimizationResult with the optimized query and collection WHERE clause mapping\n *\n * @example\n * ```typescript\n * const originalQuery = {\n * from: new CollectionRef(users, 'u'),\n * join: [{ from: new CollectionRef(posts, 'p'), ... }],\n * where: [eq(u.dept_id, 1), gt(p.views, 100)]\n * }\n *\n * const { optimizedQuery, collectionWhereClauses } = optimizeQuery(originalQuery)\n * // Result: Single-source clauses moved to deepest possible subqueries\n * // collectionWhereClauses: Map { 'u' => eq(u.dept_id, 1), 'p' => gt(p.views, 100) }\n * ```\n */\nexport function optimizeQuery(query: QueryIR): OptimizationResult {\n // First, extract collection WHERE clauses before optimization\n const collectionWhereClauses = extractCollectionWhereClauses(query)\n\n // Apply multi-level predicate pushdown with iterative convergence\n let optimized = query\n let previousOptimized: QueryIR | undefined\n let iterations = 0\n const maxIterations = 10 // Prevent infinite loops\n\n // Keep optimizing until no more changes occur or max iterations reached\n while (\n iterations < maxIterations &&\n !deepEquals(optimized, previousOptimized)\n ) {\n previousOptimized = optimized\n optimized = applyRecursiveOptimization(optimized)\n iterations++\n }\n\n // Remove redundant subqueries\n const cleaned = removeRedundantSubqueries(optimized)\n\n return {\n optimizedQuery: cleaned,\n collectionWhereClauses,\n }\n}\n\n/**\n * Extracts collection-specific WHERE clauses from a query for index optimization.\n * This analyzes the original query to identify WHERE clauses that can be pushed down\n * to specific collections, but only for simple queries without joins.\n *\n * @param query - The original QueryIR to analyze\n * @returns Map of collection aliases to their WHERE clauses\n */\nfunction extractCollectionWhereClauses(\n query: QueryIR\n): Map<string, BasicExpression<boolean>> {\n const collectionWhereClauses = new Map<string, BasicExpression<boolean>>()\n\n // Only analyze queries that have WHERE clauses\n if (!query.where || query.where.length === 0) {\n return collectionWhereClauses\n }\n\n // Split all AND clauses at the root level for granular analysis\n const splitWhereClauses = splitAndClauses(query.where)\n\n // Analyze each WHERE clause to determine which sources it touches\n const analyzedClauses = splitWhereClauses.map((clause) =>\n analyzeWhereClause(clause)\n )\n\n // Group clauses by single-source vs multi-source\n const groupedClauses = groupWhereClauses(analyzedClauses)\n\n // Only include single-source clauses that reference collections directly\n // and can be converted to BasicExpression format for collection indexes\n for (const [sourceAlias, whereClause] of groupedClauses.singleSource) {\n // Check if this source alias corresponds to a collection reference\n if (isCollectionReference(query, sourceAlias)) {\n // Check if the WHERE clause can be converted to collection-compatible format\n if (isConvertibleToCollectionFilter(whereClause)) {\n collectionWhereClauses.set(sourceAlias, whereClause)\n }\n }\n }\n\n return collectionWhereClauses\n}\n\n/**\n * Determines if a source alias refers to a collection reference (not a subquery).\n * This is used to identify WHERE clauses that can be pushed down to collection subscriptions.\n *\n * @param query - The query to analyze\n * @param sourceAlias - The source alias to check\n * @returns True if the alias refers to a collection reference\n */\nfunction isCollectionReference(query: QueryIR, sourceAlias: string): boolean {\n // Check the FROM clause\n if (query.from.alias === sourceAlias) {\n return query.from.type === `collectionRef`\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (joinClause.from.alias === sourceAlias) {\n return joinClause.from.type === `collectionRef`\n }\n }\n }\n\n return false\n}\n\n/**\n * Applies recursive predicate pushdown optimization.\n *\n * @param query - The QueryIR to optimize\n * @returns A new QueryIR with optimizations applied\n */\nfunction applyRecursiveOptimization(query: QueryIR): QueryIR {\n // First, recursively optimize any existing subqueries\n const subqueriesOptimized = {\n ...query,\n from:\n query.from.type === `queryRef`\n ? new QueryRefClass(\n applyRecursiveOptimization(query.from.query),\n query.from.alias\n )\n : query.from,\n join: query.join?.map((joinClause) => ({\n ...joinClause,\n from:\n joinClause.from.type === `queryRef`\n ? new QueryRefClass(\n applyRecursiveOptimization(joinClause.from.query),\n joinClause.from.alias\n )\n : joinClause.from,\n })),\n }\n\n // Then apply single-level optimization to this query\n return applySingleLevelOptimization(subqueriesOptimized)\n}\n\n/**\n * Applies single-level predicate pushdown optimization (existing logic)\n */\nfunction applySingleLevelOptimization(query: QueryIR): QueryIR {\n // Skip optimization if no WHERE clauses exist\n if (!query.where || query.where.length === 0) {\n return query\n }\n\n // Skip optimization if there are no joins - predicate pushdown only benefits joins\n // Single-table queries don't benefit from this optimization\n if (!query.join || query.join.length === 0) {\n return query\n }\n\n // Step 1: Split all AND clauses at the root level for granular optimization\n const splitWhereClauses = splitAndClauses(query.where)\n\n // Step 2: Analyze each WHERE clause to determine which sources it touches\n const analyzedClauses = splitWhereClauses.map((clause) =>\n analyzeWhereClause(clause)\n )\n\n // Step 3: Group clauses by single-source vs multi-source\n const groupedClauses = groupWhereClauses(analyzedClauses)\n\n // Step 4: Apply optimizations by lifting single-source clauses into subqueries\n return applyOptimizations(query, groupedClauses)\n}\n\n/**\n * Removes redundant subqueries that don't add value.\n * A subquery is redundant if it only wraps another query without adding\n * WHERE, SELECT, GROUP BY, HAVING, ORDER BY, or LIMIT/OFFSET clauses.\n *\n * @param query - The QueryIR to process\n * @returns A new QueryIR with redundant subqueries removed\n */\nfunction removeRedundantSubqueries(query: QueryIR): QueryIR {\n return {\n ...query,\n from: removeRedundantFromClause(query.from),\n join: query.join?.map((joinClause) => ({\n ...joinClause,\n from: removeRedundantFromClause(joinClause.from),\n })),\n }\n}\n\n/**\n * Removes redundant subqueries from a FROM clause.\n *\n * @param from - The FROM clause to process\n * @returns A FROM clause with redundant subqueries removed\n */\nfunction removeRedundantFromClause(from: From): From {\n if (from.type === `collectionRef`) {\n return from\n }\n\n const processedQuery = removeRedundantSubqueries(from.query)\n\n // Check if this subquery is redundant\n if (isRedundantSubquery(processedQuery)) {\n // Return the inner query's FROM clause with this alias\n const innerFrom = removeRedundantFromClause(processedQuery.from)\n if (innerFrom.type === `collectionRef`) {\n return new CollectionRefClass(innerFrom.collection, from.alias)\n } else {\n return new QueryRefClass(innerFrom.query, from.alias)\n }\n }\n\n return new QueryRefClass(processedQuery, from.alias)\n}\n\n/**\n * Determines if a subquery is redundant (adds no value).\n *\n * @param query - The query to check\n * @returns True if the query is redundant and can be removed\n */\nfunction isRedundantSubquery(query: QueryIR): boolean {\n return (\n (!query.where || query.where.length === 0) &&\n !query.select &&\n (!query.groupBy || query.groupBy.length === 0) &&\n (!query.having || query.having.length === 0) &&\n (!query.orderBy || query.orderBy.length === 0) &&\n (!query.join || query.join.length === 0) &&\n query.limit === undefined &&\n query.offset === undefined &&\n !query.fnSelect &&\n (!query.fnWhere || query.fnWhere.length === 0) &&\n (!query.fnHaving || query.fnHaving.length === 0)\n )\n}\n\n/**\n * Step 1: Split all AND clauses recursively into separate WHERE clauses.\n *\n * This enables more granular optimization by treating each condition independently.\n * OR clauses are preserved as they cannot be split without changing query semantics.\n *\n * @param whereClauses - Array of WHERE expressions to split\n * @returns Flattened array with AND clauses split into separate expressions\n *\n * @example\n * ```typescript\n * // Input: [and(eq(a, 1), gt(b, 2)), eq(c, 3)]\n * // Output: [eq(a, 1), gt(b, 2), eq(c, 3)]\n * ```\n */\nfunction splitAndClauses(\n whereClauses: Array<BasicExpression<boolean>>\n): Array<BasicExpression<boolean>> {\n const result: Array<BasicExpression<boolean>> = []\n\n for (const clause of whereClauses) {\n if (clause.type === `func` && clause.name === `and`) {\n // Recursively split nested AND clauses to handle complex expressions\n const splitArgs = splitAndClauses(\n clause.args as Array<BasicExpression<boolean>>\n )\n result.push(...splitArgs)\n } else {\n // Preserve non-AND clauses as-is (including OR clauses)\n result.push(clause)\n }\n }\n\n return result\n}\n\n/**\n * Step 2: Analyze which table sources a WHERE clause touches.\n *\n * This determines whether a clause can be pushed down to a specific table\n * or must remain in the main query (for multi-source clauses like join conditions).\n *\n * @param clause - The WHERE expression to analyze\n * @returns Analysis result with the expression and touched source aliases\n *\n * @example\n * ```typescript\n * // eq(users.department_id, 1) -> touches ['users']\n * // eq(users.id, posts.user_id) -> touches ['users', 'posts']\n * ```\n */\nfunction analyzeWhereClause(\n clause: BasicExpression<boolean>\n): AnalyzedWhereClause {\n const touchedSources = new Set<string>()\n\n /**\n * Recursively collect all table aliases referenced in an expression\n */\n function collectSources(expr: BasicExpression | any): void {\n switch (expr.type) {\n case `ref`:\n // PropRef path has the table alias as the first element\n if (expr.path && expr.path.length > 0) {\n const firstElement = expr.path[0]\n if (firstElement) {\n touchedSources.add(firstElement)\n }\n }\n break\n case `func`:\n // Recursively analyze function arguments (e.g., eq, gt, and, or)\n if (expr.args) {\n expr.args.forEach(collectSources)\n }\n break\n case `val`:\n // Values don't reference any sources\n break\n case `agg`:\n // Aggregates can reference sources in their arguments\n if (expr.args) {\n expr.args.forEach(collectSources)\n }\n break\n }\n }\n\n collectSources(clause)\n\n return {\n expression: clause,\n touchedSources,\n }\n}\n\n/**\n * Step 3: Group WHERE clauses by the sources they touch.\n *\n * Single-source clauses can be pushed down to subqueries for optimization.\n * Multi-source clauses must remain in the main query to preserve join semantics.\n *\n * @param analyzedClauses - Array of analyzed WHERE clauses\n * @returns Grouped clauses ready for optimization\n */\nfunction groupWhereClauses(\n analyzedClauses: Array<AnalyzedWhereClause>\n): GroupedWhereClauses {\n const singleSource = new Map<string, Array<BasicExpression<boolean>>>()\n const multiSource: Array<BasicExpression<boolean>> = []\n\n // Categorize each clause based on how many sources it touches\n for (const clause of analyzedClauses) {\n if (clause.touchedSources.size === 1) {\n // Single source clause - can be optimized\n const source = Array.from(clause.touchedSources)[0]!\n if (!singleSource.has(source)) {\n singleSource.set(source, [])\n }\n singleSource.get(source)!.push(clause.expression)\n } else if (clause.touchedSources.size > 1) {\n // Multi-source clause - must stay in main query\n multiSource.push(clause.expression)\n }\n // Skip clauses that touch no sources (constants) - they don't need optimization\n }\n\n // Combine multiple clauses for each source with AND\n const combinedSingleSource = new Map<string, BasicExpression<boolean>>()\n for (const [source, clauses] of singleSource) {\n combinedSingleSource.set(source, combineWithAnd(clauses))\n }\n\n // Combine multi-source clauses with AND\n const combinedMultiSource =\n multiSource.length > 0 ? combineWithAnd(multiSource) : undefined\n\n return {\n singleSource: combinedSingleSource,\n multiSource: combinedMultiSource,\n }\n}\n\n/**\n * Step 4: Apply optimizations by lifting single-source clauses into subqueries.\n *\n * Creates a new QueryIR with single-source WHERE clauses moved to subqueries\n * that wrap the original table references. This ensures immutability and prevents\n * infinite recursion issues.\n *\n * @param query - Original QueryIR to optimize\n * @param groupedClauses - WHERE clauses grouped by optimization strategy\n * @returns New QueryIR with optimizations applied\n */\nfunction applyOptimizations(\n query: QueryIR,\n groupedClauses: GroupedWhereClauses\n): QueryIR {\n // Track which single-source clauses were actually optimized\n const actuallyOptimized = new Set<string>()\n\n // Optimize the main FROM clause and track what was optimized\n const optimizedFrom = optimizeFromWithTracking(\n query.from,\n groupedClauses.singleSource,\n actuallyOptimized\n )\n\n // Optimize JOIN clauses and track what was optimized\n const optimizedJoins = query.join\n ? query.join.map((joinClause) => ({\n ...joinClause,\n from: optimizeFromWithTracking(\n joinClause.from,\n groupedClauses.singleSource,\n actuallyOptimized\n ),\n }))\n : undefined\n\n // Build the remaining WHERE clauses: multi-source + any single-source that weren't optimized\n const remainingWhereClauses: Array<BasicExpression<boolean>> = []\n\n // Add multi-source clauses\n if (groupedClauses.multiSource) {\n remainingWhereClauses.push(groupedClauses.multiSource)\n }\n\n // Add single-source clauses that weren't actually optimized\n for (const [source, clause] of groupedClauses.singleSource) {\n if (!actuallyOptimized.has(source)) {\n remainingWhereClauses.push(clause)\n }\n }\n\n // Create a completely new query object to ensure immutability\n const optimizedQuery: QueryIR = {\n // Copy all non-optimized fields as-is\n select: query.select,\n groupBy: query.groupBy ? [...query.groupBy] : undefined,\n having: query.having ? [...query.having] : undefined,\n orderBy: query.orderBy ? [...query.orderBy] : undefined,\n limit: query.limit,\n offset: query.offset,\n fnSelect: query.fnSelect,\n fnWhere: query.fnWhere ? [...query.fnWhere] : undefined,\n fnHaving: query.fnHaving ? [...query.fnHaving] : undefined,\n\n // Use the optimized FROM and JOIN clauses\n from: optimizedFrom,\n join: optimizedJoins,\n\n // Only include WHERE clauses that weren't successfully optimized\n where: remainingWhereClauses.length > 0 ? remainingWhereClauses : [],\n }\n\n return optimizedQuery\n}\n\n/**\n * Helper function to create a deep copy of a QueryIR object for immutability.\n *\n * This ensures that all optimizations create new objects rather than modifying\n * existing ones, preventing infinite recursion and shared reference issues.\n *\n * @param query - QueryIR to deep copy\n * @returns New QueryIR object with all nested objects copied\n */\nfunction deepCopyQuery(query: QueryIR): QueryIR {\n return {\n // Recursively copy the FROM clause\n from:\n query.from.type === `collectionRef`\n ? new CollectionRefClass(query.from.collection, query.from.alias)\n : new QueryRefClass(deepCopyQuery(query.from.query), query.from.alias),\n\n // Copy all other fields, creating new arrays where necessary\n select: query.select,\n join: query.join\n ? query.join.map((joinClause) => ({\n type: joinClause.type,\n left: joinClause.left,\n right: joinClause.right,\n from:\n joinClause.from.type === `collectionRef`\n ? new CollectionRefClass(\n joinClause.from.collection,\n joinClause.from.alias\n )\n : new QueryRefClass(\n deepCopyQuery(joinClause.from.query),\n joinClause.from.alias\n ),\n }))\n : undefined,\n where: query.where ? [...query.where] : undefined,\n groupBy: query.groupBy ? [...query.groupBy] : undefined,\n having: query.having ? [...query.having] : undefined,\n orderBy: query.orderBy ? [...query.orderBy] : undefined,\n limit: query.limit,\n offset: query.offset,\n fnSelect: query.fnSelect,\n fnWhere: query.fnWhere ? [...query.fnWhere] : undefined,\n fnHaving: query.fnHaving ? [...query.fnHaving] : undefined,\n }\n}\n\n/**\n * Helper function to optimize a FROM clause while tracking what was actually optimized.\n *\n * @param from - FROM clause to optimize\n * @param singleSourceClauses - Map of source aliases to their WHERE clauses\n * @param actuallyOptimized - Set to track which sources were actually optimized\n * @returns New FROM clause, potentially wrapped in a subquery\n */\nfunction optimizeFromWithTracking(\n from: From,\n singleSourceClauses: Map<string, BasicExpression<boolean>>,\n actuallyOptimized: Set<string>\n): From {\n const whereClause = singleSourceClauses.get(from.alias)\n\n if (!whereClause) {\n // No optimization needed, but return a copy to maintain immutability\n if (from.type === `collectionRef`) {\n return new CollectionRefClass(from.collection, from.alias)\n }\n // Must be queryRef due to type system\n return new QueryRefClass(deepCopyQuery(from.query), from.alias)\n }\n\n if (from.type === `collectionRef`) {\n // Create a new subquery with the WHERE clause for the collection\n // This is always safe since we're creating a new subquery\n const subQuery: QueryIR = {\n from: new CollectionRefClass(from.collection, from.alias),\n where: [whereClause],\n }\n actuallyOptimized.add(from.alias) // Mark as successfully optimized\n return new QueryRefClass(subQuery, from.alias)\n }\n\n // Must be queryRef due to type system\n\n // SAFETY CHECK: Only check safety when pushing WHERE clauses into existing subqueries\n // We need to be careful about pushing WHERE clauses into subqueries that already have\n // aggregates, HAVING, or ORDER BY + LIMIT since that could change their semantics\n if (!isSafeToPushIntoExistingSubquery(from.query)) {\n // Return a copy without optimization to maintain immutability\n // Do NOT mark as optimized since we didn't actually optimize it\n return new QueryRefClass(deepCopyQuery(from.query), from.alias)\n }\n\n // Add the WHERE clause to the existing subquery\n // Create a deep copy to ensure immutability\n const existingWhere = from.query.where || []\n const optimizedSubQuery: QueryIR = {\n ...deepCopyQuery(from.query),\n where: [...existingWhere, whereClause],\n }\n actuallyOptimized.add(from.alias) // Mark as successfully optimized\n return new QueryRefClass(optimizedSubQuery, from.alias)\n}\n\n/**\n * Determines if it's safe to push WHERE clauses into an existing subquery.\n *\n * Pushing WHERE clauses into existing subqueries can break semantics in several cases:\n *\n * 1. **Aggregates**: Pushing predicates before GROUP BY changes what gets aggregated\n * 2. **ORDER BY + LIMIT/OFFSET**: Pushing predicates before sorting+limiting changes the result set\n * 3. **HAVING clauses**: These operate on aggregated data, predicates should not be pushed past them\n * 4. **Functional operations**: fnSelect, fnWhere, fnHaving could have side effects\n *\n * Note: This safety check only applies when pushing WHERE clauses into existing subqueries.\n * Creating new subqueries from collection references is always safe.\n *\n * @param query - The existing subquery to check for safety\n * @returns True if it's safe to push WHERE clauses into this subquery, false otherwise\n *\n * @example\n * ```typescript\n * // UNSAFE: has GROUP BY - pushing WHERE could change aggregation\n * { from: users, groupBy: [dept], select: { count: agg('count', '*') } }\n *\n * // UNSAFE: has ORDER BY + LIMIT - pushing WHERE could change \"top 10\"\n * { from: users, orderBy: [salary desc], limit: 10 }\n *\n * // SAFE: plain SELECT without aggregates/limits\n * { from: users, select: { id, name } }\n * ```\n */\nfunction isSafeToPushIntoExistingSubquery(query: QueryIR): boolean {\n // Check for aggregates in SELECT clause\n if (query.select) {\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n return false\n }\n }\n\n // Check for GROUP BY clause\n if (query.groupBy && query.groupBy.length > 0) {\n return false\n }\n\n // Check for HAVING clause\n if (query.having && query.having.length > 0) {\n return false\n }\n\n // Check for ORDER BY with LIMIT or OFFSET (dangerous combination)\n if (query.orderBy && query.orderBy.length > 0) {\n if (query.limit !== undefined || query.offset !== undefined) {\n return false\n }\n }\n\n // Check for functional variants that might have side effects\n if (\n query.fnSelect ||\n (query.fnWhere && query.fnWhere.length > 0) ||\n (query.fnHaving && query.fnHaving.length > 0)\n ) {\n return false\n }\n\n // If none of the unsafe conditions are present, it's safe to optimize\n return true\n}\n\n/**\n * Helper function to combine multiple expressions with AND.\n *\n * If there's only one expression, it's returned as-is.\n * If there are multiple expressions, they're combined with an AND function.\n *\n * @param expressions - Array of expressions to combine\n * @returns Single expression representing the AND combination\n * @throws Error if the expressions array is empty\n */\nfunction combineWithAnd(\n expressions: Array<BasicExpression<boolean>>\n): BasicExpression<boolean> {\n if (expressions.length === 0) {\n throw new CannotCombineEmptyExpressionListError()\n }\n\n if (expressions.length === 1) {\n return expressions[0]!\n }\n\n // Create an AND function with all expressions as arguments\n return new Func(`and`, expressions)\n}\n"],"names":["deepEquals","isConvertibleToCollectionFilter","QueryRefClass","CollectionRefClass","expressions","CannotCombineEmptyExpressionListError","Func"],"mappings":";;;;;;AAoLO,SAAS,cAAc,OAAoC;AAEhE,QAAM,yBAAyB,8BAA8B,KAAK;AAGlE,MAAI,YAAY;AAChB,MAAI;AACJ,MAAI,aAAa;AACjB,QAAM,gBAAgB;AAGtB,SACE,aAAa,iBACb,CAACA,MAAAA,WAAW,WAAW,iBAAiB,GACxC;AACA,wBAAoB;AACpB,gBAAY,2BAA2B,SAAS;AAChD;AAAA,EACF;AAGA,QAAM,UAAU,0BAA0B,SAAS;AAEnD,SAAO;AAAA,IACL,gBAAgB;AAAA,IAChB;AAAA,EAAA;AAEJ;AAUA,SAAS,8BACP,OACuC;AACvC,QAAM,6CAA6B,IAAA;AAGnC,MAAI,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,GAAG;AAC5C,WAAO;AAAA,EACT;AAGA,QAAM,oBAAoB,gBAAgB,MAAM,KAAK;AAGrD,QAAM,kBAAkB,kBAAkB;AAAA,IAAI,CAAC,WAC7C,mBAAmB,MAAM;AAAA,EAAA;AAI3B,QAAM,iBAAiB,kBAAkB,eAAe;AAIxD,aAAW,CAAC,aAAa,WAAW,KAAK,eAAe,cAAc;AAEpE,QAAI,sBAAsB,OAAO,WAAW,GAAG;AAE7C,UAAIC,YAAAA,gCAAgC,WAAW,GAAG;AAChD,+BAAuB,IAAI,aAAa,WAAW;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAUA,SAAS,sBAAsB,OAAgB,aAA8B;AAE3E,MAAI,MAAM,KAAK,UAAU,aAAa;AACpC,WAAO,MAAM,KAAK,SAAS;AAAA,EAC7B;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,UAAI,WAAW,KAAK,UAAU,aAAa;AACzC,eAAO,WAAW,KAAK,SAAS;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAQA,SAAS,2BAA2B,OAAyB;;AAE3D,QAAM,sBAAsB;AAAA,IAC1B,GAAG;AAAA,IACH,MACE,MAAM,KAAK,SAAS,aAChB,IAAIC,GAAAA;AAAAA,MACF,2BAA2B,MAAM,KAAK,KAAK;AAAA,MAC3C,MAAM,KAAK;AAAA,IAAA,IAEb,MAAM;AAAA,IACZ,OAAM,WAAM,SAAN,mBAAY,IAAI,CAAC,gBAAgB;AAAA,MACrC,GAAG;AAAA,MACH,MACE,WAAW,KAAK,SAAS,aACrB,IAAIA,GAAAA;AAAAA,QACF,2BAA2B,WAAW,KAAK,KAAK;AAAA,QAChD,WAAW,KAAK;AAAA,MAAA,IAElB,WAAW;AAAA,IAAA;AAAA,EACjB;AAIJ,SAAO,6BAA6B,mBAAmB;AACzD;AAKA,SAAS,6BAA6B,OAAyB;AAE7D,MAAI,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,GAAG;AAC5C,WAAO;AAAA,EACT;AAIA,MAAI,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,GAAG;AAC1C,WAAO;AAAA,EACT;AAGA,QAAM,oBAAoB,gBAAgB,MAAM,KAAK;AAGrD,QAAM,kBAAkB,kBAAkB;AAAA,IAAI,CAAC,WAC7C,mBAAmB,MAAM;AAAA,EAAA;AAI3B,QAAM,iBAAiB,kBAAkB,eAAe;AAGxD,SAAO,mBAAmB,OAAO,cAAc;AACjD;AAUA,SAAS,0BAA0B,OAAyB;;AAC1D,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,0BAA0B,MAAM,IAAI;AAAA,IAC1C,OAAM,WAAM,SAAN,mBAAY,IAAI,CAAC,gBAAgB;AAAA,MACrC,GAAG;AAAA,MACH,MAAM,0BAA0B,WAAW,IAAI;AAAA,IAAA;AAAA,EAC/C;AAEN;AAQA,SAAS,0BAA0B,MAAkB;AACnD,MAAI,KAAK,SAAS,iBAAiB;AACjC,WAAO;AAAA,EACT;AAEA,QAAM,iBAAiB,0BAA0B,KAAK,KAAK;AAG3D,MAAI,oBAAoB,cAAc,GAAG;AAEvC,UAAM,YAAY,0BAA0B,eAAe,IAAI;AAC/D,QAAI,UAAU,SAAS,iBAAiB;AACtC,aAAO,IAAIC,GAAAA,cAAmB,UAAU,YAAY,KAAK,KAAK;AAAA,IAChE,OAAO;AACL,aAAO,IAAID,GAAAA,SAAc,UAAU,OAAO,KAAK,KAAK;AAAA,IACtD;AAAA,EACF;AAEA,SAAO,IAAIA,GAAAA,SAAc,gBAAgB,KAAK,KAAK;AACrD;AAQA,SAAS,oBAAoB,OAAyB;AACpD,UACG,CAAC,MAAM,SAAS,MAAM,MAAM,WAAW,MACxC,CAAC,MAAM,WACN,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,UAAU,MAAM,OAAO,WAAW,OACzC,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,QAAQ,MAAM,KAAK,WAAW,MACtC,MAAM,UAAU,UAChB,MAAM,WAAW,UACjB,CAAC,MAAM,aACN,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,OAC3C,CAAC,MAAM,YAAY,MAAM,SAAS,WAAW;AAElD;AAiBA,SAAS,gBACP,cACiC;AACjC,QAAM,SAA0C,CAAA;AAEhD,aAAW,UAAU,cAAc;AACjC,QAAI,OAAO,SAAS,UAAU,OAAO,SAAS,OAAO;AAEnD,YAAM,YAAY;AAAA,QAChB,OAAO;AAAA,MAAA;AAET,aAAO,KAAK,GAAG,SAAS;AAAA,IAC1B,OAAO;AAEL,aAAO,KAAK,MAAM;AAAA,IACpB;AAAA,EACF;AAEA,SAAO;AACT;AAiBA,SAAS,mBACP,QACqB;AACrB,QAAM,qCAAqB,IAAA;AAK3B,WAAS,eAAe,MAAmC;AACzD,YAAQ,KAAK,MAAA;AAAA,MACX,KAAK;AAEH,YAAI,KAAK,QAAQ,KAAK,KAAK,SAAS,GAAG;AACrC,gBAAM,eAAe,KAAK,KAAK,CAAC;AAChC,cAAI,cAAc;AAChB,2BAAe,IAAI,YAAY;AAAA,UACjC;AAAA,QACF;AACA;AAAA,MACF,KAAK;AAEH,YAAI,KAAK,MAAM;AACb,eAAK,KAAK,QAAQ,cAAc;AAAA,QAClC;AACA;AAAA,MACF,KAAK;AAEH;AAAA,MACF,KAAK;AAEH,YAAI,KAAK,MAAM;AACb,eAAK,KAAK,QAAQ,cAAc;AAAA,QAClC;AACA;AAAA,IAAA;AAAA,EAEN;AAEA,iBAAe,MAAM;AAErB,SAAO;AAAA,IACL,YAAY;AAAA,IACZ;AAAA,EAAA;AAEJ;AAWA,SAAS,kBACP,iBACqB;AACrB,QAAM,mCAAmB,IAAA;AACzB,QAAM,cAA+C,CAAA;AAGrD,aAAW,UAAU,iBAAiB;AACpC,QAAI,OAAO,eAAe,SAAS,GAAG;AAEpC,YAAM,SAAS,MAAM,KAAK,OAAO,cAAc,EAAE,CAAC;AAClD,UAAI,CAAC,aAAa,IAAI,MAAM,GAAG;AAC7B,qBAAa,IAAI,QAAQ,EAAE;AAAA,MAC7B;AACA,mBAAa,IAAI,MAAM,EAAG,KAAK,OAAO,UAAU;AAAA,IAClD,WAAW,OAAO,eAAe,OAAO,GAAG;AAEzC,kBAAY,KAAK,OAAO,UAAU;AAAA,IACpC;AAAA,EAEF;AAGA,QAAM,2CAA2B,IAAA;AACjC,aAAW,CAAC,QAAQ,OAAO,KAAK,cAAc;AAC5C,yBAAqB,IAAI,QAAQ,eAAe,OAAO,CAAC;AAAA,EAC1D;AAGA,QAAM,sBACJ,YAAY,SAAS,IAAI,eAAe,WAAW,IAAI;AAEzD,SAAO;AAAA,IACL,cAAc;AAAA,IACd,aAAa;AAAA,EAAA;AAEjB;AAaA,SAAS,mBACP,OACA,gBACS;AAET,QAAM,wCAAwB,IAAA;AAG9B,QAAM,gBAAgB;AAAA,IACpB,MAAM;AAAA,IACN,eAAe;AAAA,IACf;AAAA,EAAA;AAIF,QAAM,iBAAiB,MAAM,OACzB,MAAM,KAAK,IAAI,CAAC,gBAAgB;AAAA,IAC9B,GAAG;AAAA,IACH,MAAM;AAAA,MACJ,WAAW;AAAA,MACX,eAAe;AAAA,MACf;AAAA,IAAA;AAAA,EACF,EACA,IACF;AAGJ,QAAM,wBAAyD,CAAA;AAG/D,MAAI,eAAe,aAAa;AAC9B,0BAAsB,KAAK,eAAe,WAAW;AAAA,EACvD;AAGA,aAAW,CAAC,QAAQ,MAAM,KAAK,eAAe,cAAc;AAC1D,QAAI,CAAC,kBAAkB,IAAI,MAAM,GAAG;AAClC,4BAAsB,KAAK,MAAM;AAAA,IACnC;AAAA,EACF;AAGA,QAAM,iBAA0B;AAAA;AAAA,IAE9B,QAAQ,MAAM;AAAA,IACd,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,QAAQ,MAAM,SAAS,CAAC,GAAG,MAAM,MAAM,IAAI;AAAA,IAC3C,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,OAAO,MAAM;AAAA,IACb,QAAQ,MAAM;AAAA,IACd,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,UAAU,MAAM,WAAW,CAAC,GAAG,MAAM,QAAQ,IAAI;AAAA;AAAA,IAGjD,MAAM;AAAA,IACN,MAAM;AAAA;AAAA,IAGN,OAAO,sBAAsB,SAAS,IAAI,wBAAwB,CAAA;AAAA,EAAC;AAGrE,SAAO;AACT;AAWA,SAAS,cAAc,OAAyB;AAC9C,SAAO;AAAA;AAAA,IAEL,MACE,MAAM,KAAK,SAAS,kBAChB,IAAIC,iBAAmB,MAAM,KAAK,YAAY,MAAM,KAAK,KAAK,IAC9D,IAAID,GAAAA,SAAc,cAAc,MAAM,KAAK,KAAK,GAAG,MAAM,KAAK,KAAK;AAAA;AAAA,IAGzE,QAAQ,MAAM;AAAA,IACd,MAAM,MAAM,OACR,MAAM,KAAK,IAAI,CAAC,gBAAgB;AAAA,MAC9B,MAAM,WAAW;AAAA,MACjB,MAAM,WAAW;AAAA,MACjB,OAAO,WAAW;AAAA,MAClB,MACE,WAAW,KAAK,SAAS,kBACrB,IAAIC,GAAAA;AAAAA,QACF,WAAW,KAAK;AAAA,QAChB,WAAW,KAAK;AAAA,MAAA,IAElB,IAAID,GAAAA;AAAAA,QACF,cAAc,WAAW,KAAK,KAAK;AAAA,QACnC,WAAW,KAAK;AAAA,MAAA;AAAA,IAClB,EACN,IACF;AAAA,IACJ,OAAO,MAAM,QAAQ,CAAC,GAAG,MAAM,KAAK,IAAI;AAAA,IACxC,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,QAAQ,MAAM,SAAS,CAAC,GAAG,MAAM,MAAM,IAAI;AAAA,IAC3C,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,OAAO,MAAM;AAAA,IACb,QAAQ,MAAM;AAAA,IACd,UAAU,MAAM;AAAA,IAChB,SAAS,MAAM,UAAU,CAAC,GAAG,MAAM,OAAO,IAAI;AAAA,IAC9C,UAAU,MAAM,WAAW,CAAC,GAAG,MAAM,QAAQ,IAAI;AAAA,EAAA;AAErD;AAUA,SAAS,yBACP,MACA,qBACA,mBACM;AACN,QAAM,cAAc,oBAAoB,IAAI,KAAK,KAAK;AAEtD,MAAI,CAAC,aAAa;AAEhB,QAAI,KAAK,SAAS,iBAAiB;AACjC,aAAO,IAAIC,GAAAA,cAAmB,KAAK,YAAY,KAAK,KAAK;AAAA,IAC3D;AAEA,WAAO,IAAID,GAAAA,SAAc,cAAc,KAAK,KAAK,GAAG,KAAK,KAAK;AAAA,EAChE;AAEA,MAAI,KAAK,SAAS,iBAAiB;AAGjC,UAAM,WAAoB;AAAA,MACxB,MAAM,IAAIC,GAAAA,cAAmB,KAAK,YAAY,KAAK,KAAK;AAAA,MACxD,OAAO,CAAC,WAAW;AAAA,IAAA;AAErB,sBAAkB,IAAI,KAAK,KAAK;AAChC,WAAO,IAAID,GAAAA,SAAc,UAAU,KAAK,KAAK;AAAA,EAC/C;AAOA,MAAI,CAAC,iCAAiC,KAAK,KAAK,GAAG;AAGjD,WAAO,IAAIA,GAAAA,SAAc,cAAc,KAAK,KAAK,GAAG,KAAK,KAAK;AAAA,EAChE;AAIA,QAAM,gBAAgB,KAAK,MAAM,SAAS,CAAA;AAC1C,QAAM,oBAA6B;AAAA,IACjC,GAAG,cAAc,KAAK,KAAK;AAAA,IAC3B,OAAO,CAAC,GAAG,eAAe,WAAW;AAAA,EAAA;AAEvC,oBAAkB,IAAI,KAAK,KAAK;AAChC,SAAO,IAAIA,GAAAA,SAAc,mBAAmB,KAAK,KAAK;AACxD;AA8BA,SAAS,iCAAiC,OAAyB;AAEjE,MAAI,MAAM,QAAQ;AAChB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAAA;AAE1B,QAAI,eAAe;AACjB,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,UAAU,MAAM,OAAO,SAAS,GAAG;AAC3C,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,QAAI,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAC3D,aAAO;AAAA,IACT;AAAA,EACF;AAGA,MACE,MAAM,YACL,MAAM,WAAW,MAAM,QAAQ,SAAS,KACxC,MAAM,YAAY,MAAM,SAAS,SAAS,GAC3C;AACA,WAAO;AAAA,EACT;AAGA,SAAO;AACT;AAYA,SAAS,eACPE,cAC0B;AAC1B,MAAIA,aAAY,WAAW,GAAG;AAC5B,UAAM,IAAIC,OAAAA,sCAAA;AAAA,EACZ;AAEA,MAAID,aAAY,WAAW,GAAG;AAC5B,WAAOA,aAAY,CAAC;AAAA,EACtB;AAGA,SAAO,IAAIE,GAAAA,KAAK,OAAOF,YAAW;AACpC;;"}
|
|
@@ -17,6 +17,15 @@ export interface GroupedWhereClauses {
|
|
|
17
17
|
/** WHERE clauses that touch multiple sources, combined into one expression */
|
|
18
18
|
multiSource?: BasicExpression<boolean>;
|
|
19
19
|
}
|
|
20
|
+
/**
|
|
21
|
+
* Result of query optimization including both the optimized query and collection-specific WHERE clauses
|
|
22
|
+
*/
|
|
23
|
+
export interface OptimizationResult {
|
|
24
|
+
/** The optimized query with WHERE clauses potentially moved to subqueries */
|
|
25
|
+
optimizedQuery: QueryIR;
|
|
26
|
+
/** Map of collection aliases to their extracted WHERE clauses for index optimization */
|
|
27
|
+
collectionWhereClauses: Map<string, BasicExpression<boolean>>;
|
|
28
|
+
}
|
|
20
29
|
/**
|
|
21
30
|
* Main query optimizer entry point that lifts WHERE clauses into subqueries.
|
|
22
31
|
*
|
|
@@ -25,7 +34,7 @@ export interface GroupedWhereClauses {
|
|
|
25
34
|
* sources as possible, then removing redundant subqueries.
|
|
26
35
|
*
|
|
27
36
|
* @param query - The QueryIR to optimize
|
|
28
|
-
* @returns
|
|
37
|
+
* @returns An OptimizationResult with the optimized query and collection WHERE clause mapping
|
|
29
38
|
*
|
|
30
39
|
* @example
|
|
31
40
|
* ```typescript
|
|
@@ -35,8 +44,9 @@ export interface GroupedWhereClauses {
|
|
|
35
44
|
* where: [eq(u.dept_id, 1), gt(p.views, 100)]
|
|
36
45
|
* }
|
|
37
46
|
*
|
|
38
|
-
* const
|
|
47
|
+
* const { optimizedQuery, collectionWhereClauses } = optimizeQuery(originalQuery)
|
|
39
48
|
* // Result: Single-source clauses moved to deepest possible subqueries
|
|
49
|
+
* // collectionWhereClauses: Map { 'u' => eq(u.dept_id, 1), 'p' => gt(p.views, 100) }
|
|
40
50
|
* ```
|
|
41
51
|
*/
|
|
42
|
-
export declare function optimizeQuery(query: QueryIR):
|
|
52
|
+
export declare function optimizeQuery(query: QueryIR): OptimizationResult;
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
3
|
const deferred = require("./deferred.cjs");
|
|
4
|
+
const errors = require("./errors.cjs");
|
|
4
5
|
const transactions = [];
|
|
5
6
|
let transactionStack = [];
|
|
6
7
|
let sequenceNumber = 0;
|
|
@@ -31,7 +32,7 @@ function removeFromPendingList(tx) {
|
|
|
31
32
|
class Transaction {
|
|
32
33
|
constructor(config) {
|
|
33
34
|
if (typeof config.mutationFn === `undefined`) {
|
|
34
|
-
throw
|
|
35
|
+
throw new errors.MissingMutationFunctionError();
|
|
35
36
|
}
|
|
36
37
|
this.id = config.id ?? crypto.randomUUID();
|
|
37
38
|
this.mutationFn = config.mutationFn;
|
|
@@ -90,7 +91,7 @@ class Transaction {
|
|
|
90
91
|
*/
|
|
91
92
|
mutate(callback) {
|
|
92
93
|
if (this.state !== `pending`) {
|
|
93
|
-
throw
|
|
94
|
+
throw new errors.TransactionNotPendingMutateError();
|
|
94
95
|
}
|
|
95
96
|
registerTransaction(this);
|
|
96
97
|
try {
|
|
@@ -157,7 +158,7 @@ class Transaction {
|
|
|
157
158
|
var _a;
|
|
158
159
|
const isSecondaryRollback = (config == null ? void 0 : config.isSecondaryRollback) ?? false;
|
|
159
160
|
if (this.state === `completed`) {
|
|
160
|
-
throw
|
|
161
|
+
throw new errors.TransactionAlreadyCompletedRollbackError();
|
|
161
162
|
}
|
|
162
163
|
this.setState(`failed`);
|
|
163
164
|
if (!isSecondaryRollback) {
|
|
@@ -225,7 +226,7 @@ class Transaction {
|
|
|
225
226
|
*/
|
|
226
227
|
async commit() {
|
|
227
228
|
if (this.state !== `pending`) {
|
|
228
|
-
throw
|
|
229
|
+
throw new errors.TransactionNotPendingCommitError();
|
|
229
230
|
}
|
|
230
231
|
this.setState(`persisting`);
|
|
231
232
|
if (this.mutations.length === 0) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw `mutationFn is required when creating a transaction`\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw `You can no longer call .mutate() as the transaction is no longer pending`\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit()\n }\n\n return this\n }\n\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n // Replace existing mutation\n this.mutations[existingIndex] = newMutation\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw `You can no longer call .rollback() as the transaction is already completed`\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection.pendingSyncedTransactions.length > 0) {\n mutation.collection.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw `You can no longer call .commit() as the transaction is no longer pending`\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Update transaction with error information\n this.error = {\n message: error instanceof Error ? error.message : String(error),\n error: error instanceof Error ? error : new Error(String(error)),\n }\n\n // rollback the transaction\n return this.rollback()\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["createDeferred"],"mappings":";;;AAUA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAsDd,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM;AAAA,IACR;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcA,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM;AAAA,IACR;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,OAAA;AAAA,IACP;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AAEtB,aAAK,UAAU,aAAa,IAAI;AAAA,MAClC,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;;AACnE,UAAM,uBAAsB,iCAAQ,wBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM;AAAA,IACR;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,QAAO,UAAK,UAAL,mBAAY,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,yBAAA;AAGpB,YAAI,SAAS,WAAW,0BAA0B,SAAS,GAAG;AAC5D,mBAAS,WAAW,0BAAA;AAAA,QACtB;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM;AAAA,IACR;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AAEzB,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,WAAK,QAAQ;AAAA,QACX,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MAAA;AAIjE,aAAO,KAAK,SAAA;AAAA,IACd;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
|
|
1
|
+
{"version":3,"file":"transactions.cjs","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport {\n MissingMutationFunctionError,\n TransactionAlreadyCompletedRollbackError,\n TransactionNotPendingCommitError,\n TransactionNotPendingMutateError,\n} from \"./errors\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nlet sequenceNumber = 0\n\n/**\n * Creates a new transaction for grouping multiple collection operations\n * @param config - Transaction configuration with mutation function\n * @returns A new Transaction instance\n * @example\n * // Basic transaction usage\n * const tx = createTransaction({\n * mutationFn: async ({ transaction }) => {\n * // Send all mutations to API\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle transaction errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"New item\" })\n * })\n *\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async () => {\n * // API call\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later\n * await tx.commit()\n */\nexport function createTransaction<T extends object = Record<string, unknown>>(\n config: TransactionConfig<T>\n): Transaction<T> {\n const newTransaction = new Transaction<T>(config)\n transactions.push(newTransaction)\n return newTransaction\n}\n\n/**\n * Gets the currently active ambient transaction, if any\n * Used internally by collection operations to join existing transactions\n * @returns The active transaction or undefined if none is active\n * @example\n * // Check if operations will join an ambient transaction\n * const ambientTx = getActiveTransaction()\n * if (ambientTx) {\n * console.log('Operations will join transaction:', ambientTx.id)\n * }\n */\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nclass Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public sequenceNumber: number\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n if (typeof config.mutationFn === `undefined`) {\n throw new MissingMutationFunctionError()\n }\n this.id = config.id ?? crypto.randomUUID()\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.sequenceNumber = sequenceNumber++\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n /**\n * Execute collection operations within this transaction\n * @param callback - Function containing collection operations to group together\n * @returns This transaction for chaining\n * @example\n * // Group multiple operations\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * collection.update(\"2\", draft => { draft.completed = true })\n * collection.delete(\"3\")\n * })\n *\n * await tx.isPersisted.promise\n *\n * @example\n * // Handle mutate errors\n * try {\n * tx.mutate(() => {\n * collection.insert({ id: \"invalid\" }) // This might throw\n * })\n * } catch (error) {\n * console.log('Mutation failed:', error)\n * }\n *\n * @example\n * // Manual commit control\n * const tx = createTransaction({ autoCommit: false, mutationFn: async () => {} })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * // Commit later when ready\n * await tx.commit()\n */\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingMutateError()\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit()\n }\n\n return this\n }\n\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n // Replace existing mutation\n this.mutations[existingIndex] = newMutation\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n /**\n * Rollback the transaction and any conflicting transactions\n * @param config - Configuration for rollback behavior\n * @returns This transaction for chaining\n * @example\n * // Manual rollback\n * const tx = createTransaction({ mutationFn: async () => {\n * // Send to API\n * }})\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * // Rollback if needed\n * if (shouldCancel) {\n * tx.rollback()\n * }\n *\n * @example\n * // Handle rollback cascade (automatic)\n * const tx1 = createTransaction({ mutationFn: async () => {} })\n * const tx2 = createTransaction({ mutationFn: async () => {} })\n *\n * tx1.mutate(() => collection.update(\"1\", draft => { draft.value = \"A\" }))\n * tx2.mutate(() => collection.update(\"1\", draft => { draft.value = \"B\" })) // Same item\n *\n * tx1.rollback() // This will also rollback tx2 due to conflict\n *\n * @example\n * // Handle rollback in error scenarios\n * try {\n * await tx.isPersisted.promise\n * } catch (error) {\n * console.log('Transaction was rolled back:', error)\n * // Transaction automatically rolled back on mutation function failure\n * }\n */\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw new TransactionAlreadyCompletedRollbackError()\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection.onTransactionStateChange()\n\n // Only call commitPendingTransactions if there are pending sync transactions\n if (mutation.collection.pendingSyncedTransactions.length > 0) {\n mutation.collection.commitPendingTransactions()\n }\n\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n /**\n * Commit the transaction and execute the mutation function\n * @returns Promise that resolves to this transaction when complete\n * @example\n * // Manual commit (when autoCommit is false)\n * const tx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await api.saveChanges(transaction.mutations)\n * }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Buy milk\" })\n * })\n *\n * await tx.commit() // Manually commit\n *\n * @example\n * // Handle commit errors\n * try {\n * const tx = createTransaction({\n * mutationFn: async () => { throw new Error(\"API failed\") }\n * })\n *\n * tx.mutate(() => {\n * collection.insert({ id: \"1\", text: \"Item\" })\n * })\n *\n * await tx.commit()\n * } catch (error) {\n * console.log('Commit failed, transaction rolled back:', error)\n * }\n *\n * @example\n * // Check transaction state after commit\n * await tx.commit()\n * console.log(tx.state) // \"completed\" or \"failed\"\n */\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw new TransactionNotPendingCommitError()\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Update transaction with error information\n this.error = {\n message: error instanceof Error ? error.message : String(error),\n error: error instanceof Error ? error : new Error(String(error)),\n }\n\n // rollback the transaction\n return this.rollback()\n }\n\n return this\n }\n\n /**\n * Compare two transactions by their createdAt time and sequence number in order\n * to sort them in the order they were created.\n * @param other - The other transaction to compare to\n * @returns -1 if this transaction was created before the other, 1 if it was created after, 0 if they were created at the same time\n */\n compareCreatedAt(other: Transaction<any>): number {\n const createdAtComparison =\n this.createdAt.getTime() - other.createdAt.getTime()\n if (createdAtComparison !== 0) {\n return createdAtComparison\n }\n return this.sequenceNumber - other.sequenceNumber\n }\n}\n\nexport type { Transaction }\n"],"names":["MissingMutationFunctionError","createDeferred","TransactionNotPendingMutateError","TransactionAlreadyCompletedRollbackError","TransactionNotPendingCommitError"],"mappings":";;;;AAgBA,MAAM,eAAwC,CAAA;AAC9C,IAAI,mBAA4C,CAAA;AAEhD,IAAI,iBAAiB;AAsDd,SAAS,kBACd,QACgB;AAChB,QAAM,iBAAiB,IAAI,YAAe,MAAM;AAChD,eAAa,KAAK,cAAc;AAChC,SAAO;AACT;AAaO,SAAS,uBAAgD;AAC9D,MAAI,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EACrC,OAAO;AACL,WAAO;AAAA,EACT;AACF;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AACnD,QAAM,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AAChB,iBAAa,OAAO,OAAO,CAAC;AAAA,EAC9B;AACF;AAEA,MAAM,YAAwD;AAAA,EAe5D,YAAY,QAA8B;AACxC,QAAI,OAAO,OAAO,eAAe,aAAa;AAC5C,YAAM,IAAIA,OAAAA,6BAAA;AAAA,IACZ;AACA,SAAK,KAAK,OAAO,MAAM,OAAO,WAAA;AAC9B,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAA;AACjB,SAAK,cAAcC,wBAAA;AACnB,SAAK,aAAa,OAAO,cAAc;AACvC,SAAK,gCAAgB,KAAA;AACrB,SAAK,iBAAiB;AACtB,SAAK,WAAW,OAAO,YAAY,CAAA;AAAA,EACrC;AAAA,EAEA,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAEb,QAAI,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,OAAO,UAAsC;AAC3C,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,wBAAoB,IAAI;AACxB,QAAI;AACF,eAAA;AAAA,IACF,UAAA;AACE,4BAAsB,IAAI;AAAA,IAC5B;AAEA,QAAI,KAAK,YAAY;AACnB,WAAK,OAAA;AAAA,IACP;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AACnC,YAAM,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MAAA;AAGrC,UAAI,iBAAiB,GAAG;AAEtB,aAAK,UAAU,aAAa,IAAI;AAAA,MAClC,OAAO;AAEL,aAAK,UAAU,KAAK,WAAW;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAwCA,SAAS,QAA4D;;AACnE,UAAM,uBAAsB,iCAAQ,wBAAuB;AAC3D,QAAI,KAAK,UAAU,aAAa;AAC9B,YAAM,IAAIC,OAAAA,yCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AACxB,YAAM,kCAAkB,IAAA;AACxB,WAAK,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAC5C;AAAA,IACF;AAGA,SAAK,YAAY,QAAO,UAAK,UAAL,mBAAY,KAAK;AACzC,SAAK,gBAAA;AAEL,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,kBAAwB;AACtB,UAAM,gCAAgB,IAAA;AACtB,eAAW,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,yBAAA;AAGpB,YAAI,SAAS,WAAW,0BAA0B,SAAS,GAAG;AAC5D,mBAAS,WAAW,0BAAA;AAAA,QACtB;AAEA,kBAAU,IAAI,SAAS,WAAW,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAyCA,MAAM,SAAkC;AACtC,QAAI,KAAK,UAAU,WAAW;AAC5B,YAAM,IAAIC,OAAAA,iCAAA;AAAA,IACZ;AAEA,SAAK,SAAS,YAAY;AAE1B,QAAI,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AAEzB,aAAO;AAAA,IACT;AAGA,QAAI;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAA;AAEL,WAAK,YAAY,QAAQ,IAAI;AAAA,IAC/B,SAAS,OAAO;AAEd,WAAK,QAAQ;AAAA,QACX,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MAAA;AAIjE,aAAO,KAAK,SAAA;AAAA,IACd;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,iBAAiB,OAAiC;AAChD,UAAM,sBACJ,KAAK,UAAU,YAAY,MAAM,UAAU,QAAA;AAC7C,QAAI,wBAAwB,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,WAAO,KAAK,iBAAiB,MAAM;AAAA,EACrC;AACF;;;"}
|
package/dist/cjs/types.d.cts
CHANGED
|
@@ -2,6 +2,8 @@ import { IStreamBuilder } from '@electric-sql/d2mini';
|
|
|
2
2
|
import { Collection } from './collection.cjs';
|
|
3
3
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
4
4
|
import { Transaction } from './transactions.cjs';
|
|
5
|
+
import { SingleRowRefProxy } from './query/builder/ref-proxy.cjs';
|
|
6
|
+
import { BasicExpression } from './query/ir.js';
|
|
5
7
|
/**
|
|
6
8
|
* Helper type to extract the output type from a standard schema
|
|
7
9
|
*
|
|
@@ -239,6 +241,15 @@ export interface CollectionConfig<T extends object = Record<string, unknown>, TK
|
|
|
239
241
|
* Defaults to false for lazy loading. Set to true to immediately sync.
|
|
240
242
|
*/
|
|
241
243
|
startSync?: boolean;
|
|
244
|
+
/**
|
|
245
|
+
* Auto-indexing mode for the collection.
|
|
246
|
+
* When enabled, indexes will be automatically created for simple where expressions.
|
|
247
|
+
* @default "eager"
|
|
248
|
+
* @description
|
|
249
|
+
* - "off": No automatic indexing
|
|
250
|
+
* - "eager": Automatically create indexes for simple where expressions in subscribeChanges (default)
|
|
251
|
+
*/
|
|
252
|
+
autoIndex?: `off` | `eager`;
|
|
242
253
|
/**
|
|
243
254
|
* Optional function to compare two items.
|
|
244
255
|
* This is used to order the items in the collection.
|
|
@@ -409,6 +420,26 @@ export type KeyedNamespacedRow = [unknown, NamespacedRow];
|
|
|
409
420
|
* a `select` clause.
|
|
410
421
|
*/
|
|
411
422
|
export type NamespacedAndKeyedStream = IStreamBuilder<KeyedNamespacedRow>;
|
|
423
|
+
/**
|
|
424
|
+
* Options for subscribing to collection changes
|
|
425
|
+
*/
|
|
426
|
+
export interface SubscribeChangesOptions<T extends object = Record<string, unknown>> {
|
|
427
|
+
/** Whether to include the current state as initial changes */
|
|
428
|
+
includeInitialState?: boolean;
|
|
429
|
+
/** Filter changes using a where expression */
|
|
430
|
+
where?: (row: SingleRowRefProxy<T>) => any;
|
|
431
|
+
/** Pre-compiled expression for filtering changes */
|
|
432
|
+
whereExpression?: BasicExpression<boolean>;
|
|
433
|
+
}
|
|
434
|
+
/**
|
|
435
|
+
* Options for getting current state as changes
|
|
436
|
+
*/
|
|
437
|
+
export interface CurrentStateAsChangesOptions<T extends object = Record<string, unknown>> {
|
|
438
|
+
/** Filter the current state using a where expression */
|
|
439
|
+
where?: (row: SingleRowRefProxy<T>) => any;
|
|
440
|
+
/** Pre-compiled expression for filtering the current state */
|
|
441
|
+
whereExpression?: BasicExpression<boolean>;
|
|
442
|
+
}
|
|
412
443
|
/**
|
|
413
444
|
* Function type for listening to collection changes
|
|
414
445
|
* @param changes - Array of change messages describing what happened
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
function findInsertPosition(sortedArray, value, compareFn) {
|
|
4
|
+
let left = 0;
|
|
5
|
+
let right = sortedArray.length;
|
|
6
|
+
while (left < right) {
|
|
7
|
+
const mid = Math.floor((left + right) / 2);
|
|
8
|
+
const comparison = compareFn(sortedArray[mid][0], value);
|
|
9
|
+
if (comparison < 0) {
|
|
10
|
+
left = mid + 1;
|
|
11
|
+
} else {
|
|
12
|
+
right = mid;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
return left;
|
|
16
|
+
}
|
|
17
|
+
exports.findInsertPosition = findInsertPosition;
|
|
18
|
+
//# sourceMappingURL=array-utils.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"array-utils.cjs","sources":["../../../src/utils/array-utils.ts"],"sourcesContent":["/**\n * Finds the correct insert position for a value in a sorted array using binary search\n * @param sortedArray The sorted array to search in\n * @param value The value to find the position for\n * @param compareFn Comparison function to use for ordering\n * @returns The index where the value should be inserted to maintain order\n */\nexport function findInsertPosition<T>(\n sortedArray: Array<[T, any]>,\n value: T,\n compareFn: (a: T, b: T) => number\n): number {\n let left = 0\n let right = sortedArray.length\n\n while (left < right) {\n const mid = Math.floor((left + right) / 2)\n const comparison = compareFn(sortedArray[mid]![0], value)\n\n if (comparison < 0) {\n left = mid + 1\n } else {\n right = mid\n }\n }\n\n return left\n}\n"],"names":[],"mappings":";;AAOO,SAAS,mBACd,aACA,OACA,WACQ;AACR,MAAI,OAAO;AACX,MAAI,QAAQ,YAAY;AAExB,SAAO,OAAO,OAAO;AACnB,UAAM,MAAM,KAAK,OAAO,OAAO,SAAS,CAAC;AACzC,UAAM,aAAa,UAAU,YAAY,GAAG,EAAG,CAAC,GAAG,KAAK;AAExD,QAAI,aAAa,GAAG;AAClB,aAAO,MAAM;AAAA,IACf,OAAO;AACL,cAAQ;AAAA,IACV;AAAA,EACF;AAEA,SAAO;AACT;;"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Finds the correct insert position for a value in a sorted array using binary search
|
|
3
|
+
* @param sortedArray The sorted array to search in
|
|
4
|
+
* @param value The value to find the position for
|
|
5
|
+
* @param compareFn Comparison function to use for ordering
|
|
6
|
+
* @returns The index where the value should be inserted to maintain order
|
|
7
|
+
*/
|
|
8
|
+
export declare function findInsertPosition<T>(sortedArray: Array<[T, any]>, value: T, compareFn: (a: T, b: T) => number): number;
|