@tanstack/db 0.0.17 → 0.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/cjs/SortedMap.cjs.map +1 -1
  2. package/dist/cjs/collection.cjs +47 -6
  3. package/dist/cjs/collection.cjs.map +1 -1
  4. package/dist/cjs/collection.d.cts +150 -45
  5. package/dist/cjs/deferred.cjs.map +1 -1
  6. package/dist/cjs/errors.cjs.map +1 -1
  7. package/dist/cjs/optimistic-action.cjs.map +1 -1
  8. package/dist/cjs/proxy.cjs.map +1 -1
  9. package/dist/cjs/query/builder/functions.cjs.map +1 -1
  10. package/dist/cjs/query/builder/index.cjs.map +1 -1
  11. package/dist/cjs/query/builder/ref-proxy.cjs.map +1 -1
  12. package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
  13. package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
  14. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  15. package/dist/cjs/query/compiler/joins.cjs.map +1 -1
  16. package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
  17. package/dist/cjs/query/compiler/select.cjs.map +1 -1
  18. package/dist/cjs/query/ir.cjs.map +1 -1
  19. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  20. package/dist/cjs/transactions.cjs +116 -0
  21. package/dist/cjs/transactions.cjs.map +1 -1
  22. package/dist/cjs/transactions.d.cts +179 -0
  23. package/dist/cjs/types.d.cts +169 -13
  24. package/dist/esm/SortedMap.js.map +1 -1
  25. package/dist/esm/collection.d.ts +150 -45
  26. package/dist/esm/collection.js +47 -6
  27. package/dist/esm/collection.js.map +1 -1
  28. package/dist/esm/deferred.js.map +1 -1
  29. package/dist/esm/errors.js.map +1 -1
  30. package/dist/esm/optimistic-action.js.map +1 -1
  31. package/dist/esm/proxy.js.map +1 -1
  32. package/dist/esm/query/builder/functions.js.map +1 -1
  33. package/dist/esm/query/builder/index.js.map +1 -1
  34. package/dist/esm/query/builder/ref-proxy.js.map +1 -1
  35. package/dist/esm/query/compiler/evaluators.js.map +1 -1
  36. package/dist/esm/query/compiler/group-by.js.map +1 -1
  37. package/dist/esm/query/compiler/index.js.map +1 -1
  38. package/dist/esm/query/compiler/joins.js.map +1 -1
  39. package/dist/esm/query/compiler/order-by.js.map +1 -1
  40. package/dist/esm/query/compiler/select.js.map +1 -1
  41. package/dist/esm/query/ir.js.map +1 -1
  42. package/dist/esm/query/live-query-collection.js.map +1 -1
  43. package/dist/esm/transactions.d.ts +179 -0
  44. package/dist/esm/transactions.js +116 -0
  45. package/dist/esm/transactions.js.map +1 -1
  46. package/dist/esm/types.d.ts +169 -13
  47. package/package.json +1 -1
  48. package/src/collection.ts +165 -50
  49. package/src/proxy.ts +0 -1
  50. package/src/transactions.ts +179 -0
  51. package/src/types.ts +199 -28
@@ -1 +1 @@
1
- {"version":3,"file":"group-by.js","sources":["../../../../src/query/compiler/group-by.ts"],"sourcesContent":["import { filter, groupBy, groupByOperators, map } from \"@electric-sql/d2mini\"\nimport { Func, PropRef } from \"../ir.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type {\n Aggregate,\n BasicExpression,\n GroupBy,\n Having,\n Select,\n} from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\n\nconst { sum, count, avg, min, max } = groupByOperators\n\n/**\n * Interface for caching the mapping between GROUP BY expressions and SELECT expressions\n */\ninterface GroupBySelectMapping {\n selectToGroupByIndex: Map<string, number> // Maps SELECT alias to GROUP BY expression index\n groupByExpressions: Array<any> // The GROUP BY expressions for reference\n}\n\n/**\n * Validates that all non-aggregate expressions in SELECT are present in GROUP BY\n * and creates a cached mapping for efficient lookup during processing\n */\nfunction validateAndCreateMapping(\n groupByClause: GroupBy,\n selectClause?: Select\n): GroupBySelectMapping {\n const selectToGroupByIndex = new Map<string, number>()\n const groupByExpressions = [...groupByClause]\n\n if (!selectClause) {\n return { selectToGroupByIndex, groupByExpressions }\n }\n\n // Validate each SELECT expression\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n // Aggregate expressions are allowed and don't need to be in GROUP BY\n continue\n }\n\n // Non-aggregate expression must be in GROUP BY\n const groupIndex = groupByExpressions.findIndex((groupExpr) =>\n expressionsEqual(expr, groupExpr)\n )\n\n if (groupIndex === -1) {\n throw new Error(\n `Non-aggregate expression '${alias}' in SELECT must also appear in GROUP BY clause`\n )\n }\n\n // Cache the mapping\n selectToGroupByIndex.set(alias, groupIndex)\n }\n\n return { selectToGroupByIndex, groupByExpressions }\n}\n\n/**\n * Processes the GROUP BY clause with optional HAVING and SELECT\n * Works with the new __select_results structure from early SELECT processing\n */\nexport function processGroupBy(\n pipeline: NamespacedAndKeyedStream,\n groupByClause: GroupBy,\n havingClauses?: Array<Having>,\n selectClause?: Select,\n fnHavingClauses?: Array<(row: any) => any>\n): NamespacedAndKeyedStream {\n // Handle empty GROUP BY (single-group aggregation)\n if (groupByClause.length === 0) {\n // For single-group aggregation, create a single group with all data\n const aggregates: Record<string, any> = {}\n\n if (selectClause) {\n // Scan the SELECT clause for aggregate functions\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n const aggExpr = expr\n aggregates[alias] = getAggregateFunction(aggExpr)\n }\n }\n }\n\n // Use a constant key for single group\n const keyExtractor = () => ({ __singleGroup: true })\n\n // Apply the groupBy operator with single group\n pipeline = pipeline.pipe(\n groupBy(keyExtractor, aggregates)\n ) as NamespacedAndKeyedStream\n\n // Update __select_results to include aggregate values\n pipeline = pipeline.pipe(\n map(([, aggregatedRow]) => {\n // Start with the existing __select_results from early SELECT processing\n const selectResults = (aggregatedRow as any).__select_results || {}\n const finalResults: Record<string, any> = { ...selectResults }\n\n if (selectClause) {\n // Update with aggregate results\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n finalResults[alias] = aggregatedRow[alias]\n }\n // Non-aggregates keep their original values from early SELECT processing\n }\n }\n\n // Use a single key for the result and update __select_results\n return [\n `single_group`,\n {\n ...aggregatedRow,\n __select_results: finalResults,\n },\n ] as [unknown, Record<string, any>]\n })\n )\n\n // Apply HAVING clauses if present\n if (havingClauses && havingClauses.length > 0) {\n for (const havingClause of havingClauses) {\n const transformedHavingClause = transformHavingClause(\n havingClause,\n selectClause || {}\n )\n const compiledHaving = compileExpression(transformedHavingClause)\n\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return compiledHaving(namespacedRow)\n })\n )\n }\n }\n\n // Apply functional HAVING clauses if present\n if (fnHavingClauses && fnHavingClauses.length > 0) {\n for (const fnHaving of fnHavingClauses) {\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for functional HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n return pipeline\n }\n\n // Multi-group aggregation logic...\n // Validate and create mapping for non-aggregate expressions in SELECT\n const mapping = validateAndCreateMapping(groupByClause, selectClause)\n\n // Pre-compile groupBy expressions\n const compiledGroupByExpressions = groupByClause.map(compileExpression)\n\n // Create a key extractor function using simple __key_X format\n const keyExtractor = ([, row]: [\n string,\n NamespacedRow & { __select_results?: any },\n ]) => {\n // Use the original namespaced row for GROUP BY expressions, not __select_results\n const namespacedRow = { ...row }\n delete (namespacedRow as any).__select_results\n\n const key: Record<string, unknown> = {}\n\n // Use simple __key_X format for each groupBy expression\n for (let i = 0; i < groupByClause.length; i++) {\n const compiledExpr = compiledGroupByExpressions[i]!\n const value = compiledExpr(namespacedRow)\n key[`__key_${i}`] = value\n }\n\n return key\n }\n\n // Create aggregate functions for any aggregated columns in the SELECT clause\n const aggregates: Record<string, any> = {}\n\n if (selectClause) {\n // Scan the SELECT clause for aggregate functions\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n const aggExpr = expr\n aggregates[alias] = getAggregateFunction(aggExpr)\n }\n }\n }\n\n // Apply the groupBy operator\n pipeline = pipeline.pipe(groupBy(keyExtractor, aggregates))\n\n // Update __select_results to handle GROUP BY results\n pipeline = pipeline.pipe(\n map(([, aggregatedRow]) => {\n // Start with the existing __select_results from early SELECT processing\n const selectResults = (aggregatedRow as any).__select_results || {}\n const finalResults: Record<string, any> = {}\n\n if (selectClause) {\n // Process each SELECT expression\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type !== `agg`) {\n // Use cached mapping to get the corresponding __key_X for non-aggregates\n const groupIndex = mapping.selectToGroupByIndex.get(alias)\n if (groupIndex !== undefined) {\n finalResults[alias] = aggregatedRow[`__key_${groupIndex}`]\n } else {\n // Fallback to original SELECT results\n finalResults[alias] = selectResults[alias]\n }\n } else {\n // Use aggregate results\n finalResults[alias] = aggregatedRow[alias]\n }\n }\n } else {\n // No SELECT clause - just use the group keys\n for (let i = 0; i < groupByClause.length; i++) {\n finalResults[`__key_${i}`] = aggregatedRow[`__key_${i}`]\n }\n }\n\n // Generate a simple key for the live collection using group values\n let finalKey: unknown\n if (groupByClause.length === 1) {\n finalKey = aggregatedRow[`__key_0`]\n } else {\n const keyParts: Array<unknown> = []\n for (let i = 0; i < groupByClause.length; i++) {\n keyParts.push(aggregatedRow[`__key_${i}`])\n }\n finalKey = JSON.stringify(keyParts)\n }\n\n return [\n finalKey,\n {\n ...aggregatedRow,\n __select_results: finalResults,\n },\n ] as [unknown, Record<string, any>]\n })\n )\n\n // Apply HAVING clauses if present\n if (havingClauses && havingClauses.length > 0) {\n for (const havingClause of havingClauses) {\n const transformedHavingClause = transformHavingClause(\n havingClause,\n selectClause || {}\n )\n const compiledHaving = compileExpression(transformedHavingClause)\n\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return compiledHaving(namespacedRow)\n })\n )\n }\n }\n\n // Apply functional HAVING clauses if present\n if (fnHavingClauses && fnHavingClauses.length > 0) {\n for (const fnHaving of fnHavingClauses) {\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for functional HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n return pipeline\n}\n\n/**\n * Helper function to check if two expressions are equal\n */\nfunction expressionsEqual(expr1: any, expr2: any): boolean {\n if (!expr1 || !expr2) return false\n if (expr1.type !== expr2.type) return false\n\n switch (expr1.type) {\n case `ref`:\n // Compare paths as arrays\n if (!expr1.path || !expr2.path) return false\n if (expr1.path.length !== expr2.path.length) return false\n return expr1.path.every(\n (segment: string, i: number) => segment === expr2.path[i]\n )\n case `val`:\n return expr1.value === expr2.value\n case `func`:\n return (\n expr1.name === expr2.name &&\n expr1.args?.length === expr2.args?.length &&\n (expr1.args || []).every((arg: any, i: number) =>\n expressionsEqual(arg, expr2.args[i])\n )\n )\n case `agg`:\n return (\n expr1.name === expr2.name &&\n expr1.args?.length === expr2.args?.length &&\n (expr1.args || []).every((arg: any, i: number) =>\n expressionsEqual(arg, expr2.args[i])\n )\n )\n default:\n return false\n }\n}\n\n/**\n * Helper function to get an aggregate function based on the Agg expression\n */\nfunction getAggregateFunction(aggExpr: Aggregate) {\n // Pre-compile the value extractor expression\n const compiledExpr = compileExpression(aggExpr.args[0]!)\n\n // Create a value extractor function for the expression to aggregate\n const valueExtractor = ([, namespacedRow]: [string, NamespacedRow]) => {\n const value = compiledExpr(namespacedRow)\n // Ensure we return a number for numeric aggregate functions\n return typeof value === `number` ? value : value != null ? Number(value) : 0\n }\n\n // Return the appropriate aggregate function\n switch (aggExpr.name.toLowerCase()) {\n case `sum`:\n return sum(valueExtractor)\n case `count`:\n return count() // count() doesn't need a value extractor\n case `avg`:\n return avg(valueExtractor)\n case `min`:\n return min(valueExtractor)\n case `max`:\n return max(valueExtractor)\n default:\n throw new Error(`Unsupported aggregate function: ${aggExpr.name}`)\n }\n}\n\n/**\n * Transforms a HAVING clause to replace Agg expressions with references to computed values\n */\nfunction transformHavingClause(\n havingExpr: BasicExpression | Aggregate,\n selectClause: Select\n): BasicExpression {\n switch (havingExpr.type) {\n case `agg`: {\n const aggExpr = havingExpr\n // Find matching aggregate in SELECT clause\n for (const [alias, selectExpr] of Object.entries(selectClause)) {\n if (selectExpr.type === `agg` && aggregatesEqual(aggExpr, selectExpr)) {\n // Replace with a reference to the computed aggregate\n return new PropRef([`result`, alias])\n }\n }\n // If no matching aggregate found in SELECT, throw error\n throw new Error(\n `Aggregate function in HAVING clause must also be in SELECT clause: ${aggExpr.name}`\n )\n }\n\n case `func`: {\n const funcExpr = havingExpr\n // Transform function arguments recursively\n const transformedArgs = funcExpr.args.map(\n (arg: BasicExpression | Aggregate) =>\n transformHavingClause(arg, selectClause)\n )\n return new Func(funcExpr.name, transformedArgs)\n }\n\n case `ref`: {\n const refExpr = havingExpr\n // Check if this is a direct reference to a SELECT alias\n if (refExpr.path.length === 1) {\n const alias = refExpr.path[0]!\n if (selectClause[alias]) {\n // This is a reference to a SELECT alias, convert to result.alias\n return new PropRef([`result`, alias])\n }\n }\n // Return as-is for other refs\n return havingExpr as BasicExpression\n }\n\n case `val`:\n // Return as-is\n return havingExpr as BasicExpression\n\n default:\n throw new Error(\n `Unknown expression type in HAVING clause: ${(havingExpr as any).type}`\n )\n }\n}\n\n/**\n * Checks if two aggregate expressions are equal\n */\nfunction aggregatesEqual(agg1: Aggregate, agg2: Aggregate): boolean {\n return (\n agg1.name === agg2.name &&\n agg1.args.length === agg2.args.length &&\n agg1.args.every((arg, i) => expressionsEqual(arg, agg2.args[i]))\n )\n}\n"],"names":["aggregates","keyExtractor"],"mappings":";;;AAYA,MAAM,EAAE,KAAK,OAAO,KAAK,KAAK,IAAQ,IAAA;AActC,SAAS,yBACP,eACA,cACsB;AAChB,QAAA,2CAA2B,IAAoB;AAC/C,QAAA,qBAAqB,CAAC,GAAG,aAAa;AAE5C,MAAI,CAAC,cAAc;AACV,WAAA,EAAE,sBAAsB,mBAAmB;AAAA,EAAA;AAIpD,aAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACpD,QAAA,KAAK,SAAS,OAAO;AAEvB;AAAA,IAAA;AAIF,UAAM,aAAa,mBAAmB;AAAA,MAAU,CAAC,cAC/C,iBAAiB,MAAM,SAAS;AAAA,IAClC;AAEA,QAAI,eAAe,IAAI;AACrB,YAAM,IAAI;AAAA,QACR,6BAA6B,KAAK;AAAA,MACpC;AAAA,IAAA;AAImB,yBAAA,IAAI,OAAO,UAAU;AAAA,EAAA;AAGrC,SAAA,EAAE,sBAAsB,mBAAmB;AACpD;AAMO,SAAS,eACd,UACA,eACA,eACA,cACA,iBAC0B;AAEtB,MAAA,cAAc,WAAW,GAAG;AAE9B,UAAMA,cAAkC,CAAC;AAEzC,QAAI,cAAc;AAEhB,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACpD,YAAA,KAAK,SAAS,OAAO;AACvB,gBAAM,UAAU;AAChBA,sBAAW,KAAK,IAAI,qBAAqB,OAAO;AAAA,QAAA;AAAA,MAClD;AAAA,IACF;AAIF,UAAMC,gBAAe,OAAO,EAAE,eAAe,KAAK;AAGlD,eAAW,SAAS;AAAA,MAClB,QAAQA,eAAcD,WAAU;AAAA,IAClC;AAGA,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAG,EAAA,aAAa,MAAM;AAEnB,cAAA,gBAAiB,cAAsB,oBAAoB,CAAC;AAC5D,cAAA,eAAoC,EAAE,GAAG,cAAc;AAE7D,YAAI,cAAc;AAEhB,qBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACpD,gBAAA,KAAK,SAAS,OAAO;AACV,2BAAA,KAAK,IAAI,cAAc,KAAK;AAAA,YAAA;AAAA,UAC3C;AAAA,QAEF;AAIK,eAAA;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QAEtB;AAAA,MACD,CAAA;AAAA,IACH;AAGI,QAAA,iBAAiB,cAAc,SAAS,GAAG;AAC7C,iBAAW,gBAAgB,eAAe;AACxC,cAAM,0BAA0B;AAAA,UAC9B;AAAA,UACA,gBAAgB,CAAA;AAAA,QAClB;AACM,cAAA,iBAAiB,kBAAkB,uBAAuB;AAEhE,mBAAW,SAAS;AAAA,UAClB,OAAO,CAAC,CAAG,EAAA,GAAG,MAAM;AAElB,kBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAiB;AAC9D,mBAAO,eAAe,aAAa;AAAA,UACpC,CAAA;AAAA,QACH;AAAA,MAAA;AAAA,IACF;AAIE,QAAA,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,iBAAW,YAAY,iBAAiB;AACtC,mBAAW,SAAS;AAAA,UAClB,OAAO,CAAC,CAAG,EAAA,GAAG,MAAM;AAElB,kBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAiB;AAC9D,mBAAO,SAAS,aAAa;AAAA,UAC9B,CAAA;AAAA,QACH;AAAA,MAAA;AAAA,IACF;AAGK,WAAA;AAAA,EAAA;AAKH,QAAA,UAAU,yBAAyB,eAAe,YAAY;AAG9D,QAAA,6BAA6B,cAAc,IAAI,iBAAiB;AAGtE,QAAM,eAAe,CAAC,CAAG,EAAA,GAAG,MAGtB;AAEE,UAAA,gBAAgB,EAAE,GAAG,IAAI;AAC/B,WAAQ,cAAsB;AAE9B,UAAM,MAA+B,CAAC;AAGtC,aAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AACvC,YAAA,eAAe,2BAA2B,CAAC;AAC3C,YAAA,QAAQ,aAAa,aAAa;AACpC,UAAA,SAAS,CAAC,EAAE,IAAI;AAAA,IAAA;AAGf,WAAA;AAAA,EACT;AAGA,QAAM,aAAkC,CAAC;AAEzC,MAAI,cAAc;AAEhB,eAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACpD,UAAA,KAAK,SAAS,OAAO;AACvB,cAAM,UAAU;AACL,mBAAA,KAAK,IAAI,qBAAqB,OAAO;AAAA,MAAA;AAAA,IAClD;AAAA,EACF;AAIF,aAAW,SAAS,KAAK,QAAQ,cAAc,UAAU,CAAC;AAG1D,aAAW,SAAS;AAAA,IAClB,IAAI,CAAC,CAAG,EAAA,aAAa,MAAM;AAEnB,YAAA,gBAAiB,cAAsB,oBAAoB,CAAC;AAClE,YAAM,eAAoC,CAAC;AAE3C,UAAI,cAAc;AAEhB,mBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACpD,cAAA,KAAK,SAAS,OAAO;AAEvB,kBAAM,aAAa,QAAQ,qBAAqB,IAAI,KAAK;AACzD,gBAAI,eAAe,QAAW;AAC5B,2BAAa,KAAK,IAAI,cAAc,SAAS,UAAU,EAAE;AAAA,YAAA,OACpD;AAEQ,2BAAA,KAAK,IAAI,cAAc,KAAK;AAAA,YAAA;AAAA,UAC3C,OACK;AAEQ,yBAAA,KAAK,IAAI,cAAc,KAAK;AAAA,UAAA;AAAA,QAC3C;AAAA,MACF,OACK;AAEL,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,uBAAa,SAAS,CAAC,EAAE,IAAI,cAAc,SAAS,CAAC,EAAE;AAAA,QAAA;AAAA,MACzD;AAIE,UAAA;AACA,UAAA,cAAc,WAAW,GAAG;AAC9B,mBAAW,cAAc,SAAS;AAAA,MAAA,OAC7B;AACL,cAAM,WAA2B,CAAC;AAClC,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,mBAAS,KAAK,cAAc,SAAS,CAAC,EAAE,CAAC;AAAA,QAAA;AAEhC,mBAAA,KAAK,UAAU,QAAQ;AAAA,MAAA;AAG7B,aAAA;AAAA,QACL;AAAA,QACA;AAAA,UACE,GAAG;AAAA,UACH,kBAAkB;AAAA,QAAA;AAAA,MAEtB;AAAA,IACD,CAAA;AAAA,EACH;AAGI,MAAA,iBAAiB,cAAc,SAAS,GAAG;AAC7C,eAAW,gBAAgB,eAAe;AACxC,YAAM,0BAA0B;AAAA,QAC9B;AAAA,QACA,gBAAgB,CAAA;AAAA,MAClB;AACM,YAAA,iBAAiB,kBAAkB,uBAAuB;AAEhE,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAG,EAAA,GAAG,MAAM;AAElB,gBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAiB;AAC9D,iBAAO,eAAe,aAAa;AAAA,QACpC,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF;AAIE,MAAA,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,eAAW,YAAY,iBAAiB;AACtC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAG,EAAA,GAAG,MAAM;AAElB,gBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAiB;AAC9D,iBAAO,SAAS,aAAa;AAAA,QAC9B,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF;AAGK,SAAA;AACT;AAKA,SAAS,iBAAiB,OAAY,OAAqB;;AACzD,MAAI,CAAC,SAAS,CAAC,MAAc,QAAA;AAC7B,MAAI,MAAM,SAAS,MAAM,KAAa,QAAA;AAEtC,UAAQ,MAAM,MAAM;AAAA,IAClB,KAAK;AAEH,UAAI,CAAC,MAAM,QAAQ,CAAC,MAAM,KAAa,QAAA;AACvC,UAAI,MAAM,KAAK,WAAW,MAAM,KAAK,OAAe,QAAA;AACpD,aAAO,MAAM,KAAK;AAAA,QAChB,CAAC,SAAiB,MAAc,YAAY,MAAM,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF,KAAK;AACI,aAAA,MAAM,UAAU,MAAM;AAAA,IAC/B,KAAK;AACH,aACE,MAAM,SAAS,MAAM,UACrB,WAAM,SAAN,mBAAY,cAAW,WAAM,SAAN,mBAAY,YAClC,MAAM,QAAQ,CAAI,GAAA;AAAA,QAAM,CAAC,KAAU,MAClC,iBAAiB,KAAK,MAAM,KAAK,CAAC,CAAC;AAAA,MACrC;AAAA,IAEJ,KAAK;AACH,aACE,MAAM,SAAS,MAAM,UACrB,WAAM,SAAN,mBAAY,cAAW,WAAM,SAAN,mBAAY,YAClC,MAAM,QAAQ,CAAI,GAAA;AAAA,QAAM,CAAC,KAAU,MAClC,iBAAiB,KAAK,MAAM,KAAK,CAAC,CAAC;AAAA,MACrC;AAAA,IAEJ;AACS,aAAA;AAAA,EAAA;AAEb;AAKA,SAAS,qBAAqB,SAAoB;AAEhD,QAAM,eAAe,kBAAkB,QAAQ,KAAK,CAAC,CAAE;AAGvD,QAAM,iBAAiB,CAAC,CAAG,EAAA,aAAa,MAA+B;AAC/D,UAAA,QAAQ,aAAa,aAAa;AAEjC,WAAA,OAAO,UAAU,WAAW,QAAQ,SAAS,OAAO,OAAO,KAAK,IAAI;AAAA,EAC7E;AAGQ,UAAA,QAAQ,KAAK,YAAe,GAAA;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,MAAM;AAAA;AAAA,IACf,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B;AACE,YAAM,IAAI,MAAM,mCAAmC,QAAQ,IAAI,EAAE;AAAA,EAAA;AAEvE;AAKA,SAAS,sBACP,YACA,cACiB;AACjB,UAAQ,WAAW,MAAM;AAAA,IACvB,KAAK,OAAO;AACV,YAAM,UAAU;AAEhB,iBAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,YAAY,GAAG;AAC9D,YAAI,WAAW,SAAS,SAAS,gBAAgB,SAAS,UAAU,GAAG;AAErE,iBAAO,IAAI,QAAQ,CAAC,UAAU,KAAK,CAAC;AAAA,QAAA;AAAA,MACtC;AAGF,YAAM,IAAI;AAAA,QACR,sEAAsE,QAAQ,IAAI;AAAA,MACpF;AAAA,IAAA;AAAA,IAGF,KAAK,QAAQ;AACX,YAAM,WAAW;AAEX,YAAA,kBAAkB,SAAS,KAAK;AAAA,QACpC,CAAC,QACC,sBAAsB,KAAK,YAAY;AAAA,MAC3C;AACA,aAAO,IAAI,KAAK,SAAS,MAAM,eAAe;AAAA,IAAA;AAAA,IAGhD,KAAK,OAAO;AACV,YAAM,UAAU;AAEZ,UAAA,QAAQ,KAAK,WAAW,GAAG;AACvB,cAAA,QAAQ,QAAQ,KAAK,CAAC;AACxB,YAAA,aAAa,KAAK,GAAG;AAEvB,iBAAO,IAAI,QAAQ,CAAC,UAAU,KAAK,CAAC;AAAA,QAAA;AAAA,MACtC;AAGK,aAAA;AAAA,IAAA;AAAA,IAGT,KAAK;AAEI,aAAA;AAAA,IAET;AACE,YAAM,IAAI;AAAA,QACR,6CAA8C,WAAmB,IAAI;AAAA,MACvE;AAAA,EAAA;AAEN;AAKA,SAAS,gBAAgB,MAAiB,MAA0B;AAEhE,SAAA,KAAK,SAAS,KAAK,QACnB,KAAK,KAAK,WAAW,KAAK,KAAK,UAC/B,KAAK,KAAK,MAAM,CAAC,KAAK,MAAM,iBAAiB,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC;AAEnE;"}
1
+ {"version":3,"file":"group-by.js","sources":["../../../../src/query/compiler/group-by.ts"],"sourcesContent":["import { filter, groupBy, groupByOperators, map } from \"@electric-sql/d2mini\"\nimport { Func, PropRef } from \"../ir.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type {\n Aggregate,\n BasicExpression,\n GroupBy,\n Having,\n Select,\n} from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\n\nconst { sum, count, avg, min, max } = groupByOperators\n\n/**\n * Interface for caching the mapping between GROUP BY expressions and SELECT expressions\n */\ninterface GroupBySelectMapping {\n selectToGroupByIndex: Map<string, number> // Maps SELECT alias to GROUP BY expression index\n groupByExpressions: Array<any> // The GROUP BY expressions for reference\n}\n\n/**\n * Validates that all non-aggregate expressions in SELECT are present in GROUP BY\n * and creates a cached mapping for efficient lookup during processing\n */\nfunction validateAndCreateMapping(\n groupByClause: GroupBy,\n selectClause?: Select\n): GroupBySelectMapping {\n const selectToGroupByIndex = new Map<string, number>()\n const groupByExpressions = [...groupByClause]\n\n if (!selectClause) {\n return { selectToGroupByIndex, groupByExpressions }\n }\n\n // Validate each SELECT expression\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n // Aggregate expressions are allowed and don't need to be in GROUP BY\n continue\n }\n\n // Non-aggregate expression must be in GROUP BY\n const groupIndex = groupByExpressions.findIndex((groupExpr) =>\n expressionsEqual(expr, groupExpr)\n )\n\n if (groupIndex === -1) {\n throw new Error(\n `Non-aggregate expression '${alias}' in SELECT must also appear in GROUP BY clause`\n )\n }\n\n // Cache the mapping\n selectToGroupByIndex.set(alias, groupIndex)\n }\n\n return { selectToGroupByIndex, groupByExpressions }\n}\n\n/**\n * Processes the GROUP BY clause with optional HAVING and SELECT\n * Works with the new __select_results structure from early SELECT processing\n */\nexport function processGroupBy(\n pipeline: NamespacedAndKeyedStream,\n groupByClause: GroupBy,\n havingClauses?: Array<Having>,\n selectClause?: Select,\n fnHavingClauses?: Array<(row: any) => any>\n): NamespacedAndKeyedStream {\n // Handle empty GROUP BY (single-group aggregation)\n if (groupByClause.length === 0) {\n // For single-group aggregation, create a single group with all data\n const aggregates: Record<string, any> = {}\n\n if (selectClause) {\n // Scan the SELECT clause for aggregate functions\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n const aggExpr = expr\n aggregates[alias] = getAggregateFunction(aggExpr)\n }\n }\n }\n\n // Use a constant key for single group\n const keyExtractor = () => ({ __singleGroup: true })\n\n // Apply the groupBy operator with single group\n pipeline = pipeline.pipe(\n groupBy(keyExtractor, aggregates)\n ) as NamespacedAndKeyedStream\n\n // Update __select_results to include aggregate values\n pipeline = pipeline.pipe(\n map(([, aggregatedRow]) => {\n // Start with the existing __select_results from early SELECT processing\n const selectResults = (aggregatedRow as any).__select_results || {}\n const finalResults: Record<string, any> = { ...selectResults }\n\n if (selectClause) {\n // Update with aggregate results\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n finalResults[alias] = aggregatedRow[alias]\n }\n // Non-aggregates keep their original values from early SELECT processing\n }\n }\n\n // Use a single key for the result and update __select_results\n return [\n `single_group`,\n {\n ...aggregatedRow,\n __select_results: finalResults,\n },\n ] as [unknown, Record<string, any>]\n })\n )\n\n // Apply HAVING clauses if present\n if (havingClauses && havingClauses.length > 0) {\n for (const havingClause of havingClauses) {\n const transformedHavingClause = transformHavingClause(\n havingClause,\n selectClause || {}\n )\n const compiledHaving = compileExpression(transformedHavingClause)\n\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return compiledHaving(namespacedRow)\n })\n )\n }\n }\n\n // Apply functional HAVING clauses if present\n if (fnHavingClauses && fnHavingClauses.length > 0) {\n for (const fnHaving of fnHavingClauses) {\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for functional HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n return pipeline\n }\n\n // Multi-group aggregation logic...\n // Validate and create mapping for non-aggregate expressions in SELECT\n const mapping = validateAndCreateMapping(groupByClause, selectClause)\n\n // Pre-compile groupBy expressions\n const compiledGroupByExpressions = groupByClause.map(compileExpression)\n\n // Create a key extractor function using simple __key_X format\n const keyExtractor = ([, row]: [\n string,\n NamespacedRow & { __select_results?: any },\n ]) => {\n // Use the original namespaced row for GROUP BY expressions, not __select_results\n const namespacedRow = { ...row }\n delete (namespacedRow as any).__select_results\n\n const key: Record<string, unknown> = {}\n\n // Use simple __key_X format for each groupBy expression\n for (let i = 0; i < groupByClause.length; i++) {\n const compiledExpr = compiledGroupByExpressions[i]!\n const value = compiledExpr(namespacedRow)\n key[`__key_${i}`] = value\n }\n\n return key\n }\n\n // Create aggregate functions for any aggregated columns in the SELECT clause\n const aggregates: Record<string, any> = {}\n\n if (selectClause) {\n // Scan the SELECT clause for aggregate functions\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type === `agg`) {\n const aggExpr = expr\n aggregates[alias] = getAggregateFunction(aggExpr)\n }\n }\n }\n\n // Apply the groupBy operator\n pipeline = pipeline.pipe(groupBy(keyExtractor, aggregates))\n\n // Update __select_results to handle GROUP BY results\n pipeline = pipeline.pipe(\n map(([, aggregatedRow]) => {\n // Start with the existing __select_results from early SELECT processing\n const selectResults = (aggregatedRow as any).__select_results || {}\n const finalResults: Record<string, any> = {}\n\n if (selectClause) {\n // Process each SELECT expression\n for (const [alias, expr] of Object.entries(selectClause)) {\n if (expr.type !== `agg`) {\n // Use cached mapping to get the corresponding __key_X for non-aggregates\n const groupIndex = mapping.selectToGroupByIndex.get(alias)\n if (groupIndex !== undefined) {\n finalResults[alias] = aggregatedRow[`__key_${groupIndex}`]\n } else {\n // Fallback to original SELECT results\n finalResults[alias] = selectResults[alias]\n }\n } else {\n // Use aggregate results\n finalResults[alias] = aggregatedRow[alias]\n }\n }\n } else {\n // No SELECT clause - just use the group keys\n for (let i = 0; i < groupByClause.length; i++) {\n finalResults[`__key_${i}`] = aggregatedRow[`__key_${i}`]\n }\n }\n\n // Generate a simple key for the live collection using group values\n let finalKey: unknown\n if (groupByClause.length === 1) {\n finalKey = aggregatedRow[`__key_0`]\n } else {\n const keyParts: Array<unknown> = []\n for (let i = 0; i < groupByClause.length; i++) {\n keyParts.push(aggregatedRow[`__key_${i}`])\n }\n finalKey = JSON.stringify(keyParts)\n }\n\n return [\n finalKey,\n {\n ...aggregatedRow,\n __select_results: finalResults,\n },\n ] as [unknown, Record<string, any>]\n })\n )\n\n // Apply HAVING clauses if present\n if (havingClauses && havingClauses.length > 0) {\n for (const havingClause of havingClauses) {\n const transformedHavingClause = transformHavingClause(\n havingClause,\n selectClause || {}\n )\n const compiledHaving = compileExpression(transformedHavingClause)\n\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return compiledHaving(namespacedRow)\n })\n )\n }\n }\n\n // Apply functional HAVING clauses if present\n if (fnHavingClauses && fnHavingClauses.length > 0) {\n for (const fnHaving of fnHavingClauses) {\n pipeline = pipeline.pipe(\n filter(([, row]) => {\n // Create a namespaced row structure for functional HAVING evaluation\n const namespacedRow = { result: (row as any).__select_results }\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n return pipeline\n}\n\n/**\n * Helper function to check if two expressions are equal\n */\nfunction expressionsEqual(expr1: any, expr2: any): boolean {\n if (!expr1 || !expr2) return false\n if (expr1.type !== expr2.type) return false\n\n switch (expr1.type) {\n case `ref`:\n // Compare paths as arrays\n if (!expr1.path || !expr2.path) return false\n if (expr1.path.length !== expr2.path.length) return false\n return expr1.path.every(\n (segment: string, i: number) => segment === expr2.path[i]\n )\n case `val`:\n return expr1.value === expr2.value\n case `func`:\n return (\n expr1.name === expr2.name &&\n expr1.args?.length === expr2.args?.length &&\n (expr1.args || []).every((arg: any, i: number) =>\n expressionsEqual(arg, expr2.args[i])\n )\n )\n case `agg`:\n return (\n expr1.name === expr2.name &&\n expr1.args?.length === expr2.args?.length &&\n (expr1.args || []).every((arg: any, i: number) =>\n expressionsEqual(arg, expr2.args[i])\n )\n )\n default:\n return false\n }\n}\n\n/**\n * Helper function to get an aggregate function based on the Agg expression\n */\nfunction getAggregateFunction(aggExpr: Aggregate) {\n // Pre-compile the value extractor expression\n const compiledExpr = compileExpression(aggExpr.args[0]!)\n\n // Create a value extractor function for the expression to aggregate\n const valueExtractor = ([, namespacedRow]: [string, NamespacedRow]) => {\n const value = compiledExpr(namespacedRow)\n // Ensure we return a number for numeric aggregate functions\n return typeof value === `number` ? value : value != null ? Number(value) : 0\n }\n\n // Return the appropriate aggregate function\n switch (aggExpr.name.toLowerCase()) {\n case `sum`:\n return sum(valueExtractor)\n case `count`:\n return count() // count() doesn't need a value extractor\n case `avg`:\n return avg(valueExtractor)\n case `min`:\n return min(valueExtractor)\n case `max`:\n return max(valueExtractor)\n default:\n throw new Error(`Unsupported aggregate function: ${aggExpr.name}`)\n }\n}\n\n/**\n * Transforms a HAVING clause to replace Agg expressions with references to computed values\n */\nfunction transformHavingClause(\n havingExpr: BasicExpression | Aggregate,\n selectClause: Select\n): BasicExpression {\n switch (havingExpr.type) {\n case `agg`: {\n const aggExpr = havingExpr\n // Find matching aggregate in SELECT clause\n for (const [alias, selectExpr] of Object.entries(selectClause)) {\n if (selectExpr.type === `agg` && aggregatesEqual(aggExpr, selectExpr)) {\n // Replace with a reference to the computed aggregate\n return new PropRef([`result`, alias])\n }\n }\n // If no matching aggregate found in SELECT, throw error\n throw new Error(\n `Aggregate function in HAVING clause must also be in SELECT clause: ${aggExpr.name}`\n )\n }\n\n case `func`: {\n const funcExpr = havingExpr\n // Transform function arguments recursively\n const transformedArgs = funcExpr.args.map(\n (arg: BasicExpression | Aggregate) =>\n transformHavingClause(arg, selectClause)\n )\n return new Func(funcExpr.name, transformedArgs)\n }\n\n case `ref`: {\n const refExpr = havingExpr\n // Check if this is a direct reference to a SELECT alias\n if (refExpr.path.length === 1) {\n const alias = refExpr.path[0]!\n if (selectClause[alias]) {\n // This is a reference to a SELECT alias, convert to result.alias\n return new PropRef([`result`, alias])\n }\n }\n // Return as-is for other refs\n return havingExpr as BasicExpression\n }\n\n case `val`:\n // Return as-is\n return havingExpr as BasicExpression\n\n default:\n throw new Error(\n `Unknown expression type in HAVING clause: ${(havingExpr as any).type}`\n )\n }\n}\n\n/**\n * Checks if two aggregate expressions are equal\n */\nfunction aggregatesEqual(agg1: Aggregate, agg2: Aggregate): boolean {\n return (\n agg1.name === agg2.name &&\n agg1.args.length === agg2.args.length &&\n agg1.args.every((arg, i) => expressionsEqual(arg, agg2.args[i]))\n )\n}\n"],"names":["aggregates","keyExtractor"],"mappings":";;;AAYA,MAAM,EAAE,KAAK,OAAO,KAAK,KAAK,QAAQ;AActC,SAAS,yBACP,eACA,cACsB;AACtB,QAAM,2CAA2B,IAAA;AACjC,QAAM,qBAAqB,CAAC,GAAG,aAAa;AAE5C,MAAI,CAAC,cAAc;AACjB,WAAO,EAAE,sBAAsB,mBAAA;AAAA,EACjC;AAGA,aAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACxD,QAAI,KAAK,SAAS,OAAO;AAEvB;AAAA,IACF;AAGA,UAAM,aAAa,mBAAmB;AAAA,MAAU,CAAC,cAC/C,iBAAiB,MAAM,SAAS;AAAA,IAAA;AAGlC,QAAI,eAAe,IAAI;AACrB,YAAM,IAAI;AAAA,QACR,6BAA6B,KAAK;AAAA,MAAA;AAAA,IAEtC;AAGA,yBAAqB,IAAI,OAAO,UAAU;AAAA,EAC5C;AAEA,SAAO,EAAE,sBAAsB,mBAAA;AACjC;AAMO,SAAS,eACd,UACA,eACA,eACA,cACA,iBAC0B;AAE1B,MAAI,cAAc,WAAW,GAAG;AAE9B,UAAMA,cAAkC,CAAA;AAExC,QAAI,cAAc;AAEhB,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACxD,YAAI,KAAK,SAAS,OAAO;AACvB,gBAAM,UAAU;AAChBA,sBAAW,KAAK,IAAI,qBAAqB,OAAO;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAGA,UAAMC,gBAAe,OAAO,EAAE,eAAe,KAAA;AAG7C,eAAW,SAAS;AAAA,MAClB,QAAQA,eAAcD,WAAU;AAAA,IAAA;AAIlC,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAA,EAAG,aAAa,MAAM;AAEzB,cAAM,gBAAiB,cAAsB,oBAAoB,CAAA;AACjE,cAAM,eAAoC,EAAE,GAAG,cAAA;AAE/C,YAAI,cAAc;AAEhB,qBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACxD,gBAAI,KAAK,SAAS,OAAO;AACvB,2BAAa,KAAK,IAAI,cAAc,KAAK;AAAA,YAC3C;AAAA,UAEF;AAAA,QACF;AAGA,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAIH,QAAI,iBAAiB,cAAc,SAAS,GAAG;AAC7C,iBAAW,gBAAgB,eAAe;AACxC,cAAM,0BAA0B;AAAA,UAC9B;AAAA,UACA,gBAAgB,CAAA;AAAA,QAAC;AAEnB,cAAM,iBAAiB,kBAAkB,uBAAuB;AAEhE,mBAAW,SAAS;AAAA,UAClB,OAAO,CAAC,CAAA,EAAG,GAAG,MAAM;AAElB,kBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAA;AAC7C,mBAAO,eAAe,aAAa;AAAA,UACrC,CAAC;AAAA,QAAA;AAAA,MAEL;AAAA,IACF;AAGA,QAAI,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,iBAAW,YAAY,iBAAiB;AACtC,mBAAW,SAAS;AAAA,UAClB,OAAO,CAAC,CAAA,EAAG,GAAG,MAAM;AAElB,kBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAA;AAC7C,mBAAO,SAAS,aAAa;AAAA,UAC/B,CAAC;AAAA,QAAA;AAAA,MAEL;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAIA,QAAM,UAAU,yBAAyB,eAAe,YAAY;AAGpE,QAAM,6BAA6B,cAAc,IAAI,iBAAiB;AAGtE,QAAM,eAAe,CAAC,CAAA,EAAG,GAAG,MAGtB;AAEJ,UAAM,gBAAgB,EAAE,GAAG,IAAA;AAC3B,WAAQ,cAAsB;AAE9B,UAAM,MAA+B,CAAA;AAGrC,aAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,YAAM,eAAe,2BAA2B,CAAC;AACjD,YAAM,QAAQ,aAAa,aAAa;AACxC,UAAI,SAAS,CAAC,EAAE,IAAI;AAAA,IACtB;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,aAAkC,CAAA;AAExC,MAAI,cAAc;AAEhB,eAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACxD,UAAI,KAAK,SAAS,OAAO;AACvB,cAAM,UAAU;AAChB,mBAAW,KAAK,IAAI,qBAAqB,OAAO;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AAGA,aAAW,SAAS,KAAK,QAAQ,cAAc,UAAU,CAAC;AAG1D,aAAW,SAAS;AAAA,IAClB,IAAI,CAAC,CAAA,EAAG,aAAa,MAAM;AAEzB,YAAM,gBAAiB,cAAsB,oBAAoB,CAAA;AACjE,YAAM,eAAoC,CAAA;AAE1C,UAAI,cAAc;AAEhB,mBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,YAAY,GAAG;AACxD,cAAI,KAAK,SAAS,OAAO;AAEvB,kBAAM,aAAa,QAAQ,qBAAqB,IAAI,KAAK;AACzD,gBAAI,eAAe,QAAW;AAC5B,2BAAa,KAAK,IAAI,cAAc,SAAS,UAAU,EAAE;AAAA,YAC3D,OAAO;AAEL,2BAAa,KAAK,IAAI,cAAc,KAAK;AAAA,YAC3C;AAAA,UACF,OAAO;AAEL,yBAAa,KAAK,IAAI,cAAc,KAAK;AAAA,UAC3C;AAAA,QACF;AAAA,MACF,OAAO;AAEL,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,uBAAa,SAAS,CAAC,EAAE,IAAI,cAAc,SAAS,CAAC,EAAE;AAAA,QACzD;AAAA,MACF;AAGA,UAAI;AACJ,UAAI,cAAc,WAAW,GAAG;AAC9B,mBAAW,cAAc,SAAS;AAAA,MACpC,OAAO;AACL,cAAM,WAA2B,CAAA;AACjC,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,mBAAS,KAAK,cAAc,SAAS,CAAC,EAAE,CAAC;AAAA,QAC3C;AACA,mBAAW,KAAK,UAAU,QAAQ;AAAA,MACpC;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,UACE,GAAG;AAAA,UACH,kBAAkB;AAAA,QAAA;AAAA,MACpB;AAAA,IAEJ,CAAC;AAAA,EAAA;AAIH,MAAI,iBAAiB,cAAc,SAAS,GAAG;AAC7C,eAAW,gBAAgB,eAAe;AACxC,YAAM,0BAA0B;AAAA,QAC9B;AAAA,QACA,gBAAgB,CAAA;AAAA,MAAC;AAEnB,YAAM,iBAAiB,kBAAkB,uBAAuB;AAEhE,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAA,EAAG,GAAG,MAAM;AAElB,gBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAA;AAC7C,iBAAO,eAAe,aAAa;AAAA,QACrC,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,eAAW,YAAY,iBAAiB;AACtC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAA,EAAG,GAAG,MAAM;AAElB,gBAAM,gBAAgB,EAAE,QAAS,IAAY,iBAAA;AAC7C,iBAAO,SAAS,aAAa;AAAA,QAC/B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,iBAAiB,OAAY,OAAqB;;AACzD,MAAI,CAAC,SAAS,CAAC,MAAO,QAAO;AAC7B,MAAI,MAAM,SAAS,MAAM,KAAM,QAAO;AAEtC,UAAQ,MAAM,MAAA;AAAA,IACZ,KAAK;AAEH,UAAI,CAAC,MAAM,QAAQ,CAAC,MAAM,KAAM,QAAO;AACvC,UAAI,MAAM,KAAK,WAAW,MAAM,KAAK,OAAQ,QAAO;AACpD,aAAO,MAAM,KAAK;AAAA,QAChB,CAAC,SAAiB,MAAc,YAAY,MAAM,KAAK,CAAC;AAAA,MAAA;AAAA,IAE5D,KAAK;AACH,aAAO,MAAM,UAAU,MAAM;AAAA,IAC/B,KAAK;AACH,aACE,MAAM,SAAS,MAAM,UACrB,WAAM,SAAN,mBAAY,cAAW,WAAM,SAAN,mBAAY,YAClC,MAAM,QAAQ,CAAA,GAAI;AAAA,QAAM,CAAC,KAAU,MAClC,iBAAiB,KAAK,MAAM,KAAK,CAAC,CAAC;AAAA,MAAA;AAAA,IAGzC,KAAK;AACH,aACE,MAAM,SAAS,MAAM,UACrB,WAAM,SAAN,mBAAY,cAAW,WAAM,SAAN,mBAAY,YAClC,MAAM,QAAQ,CAAA,GAAI;AAAA,QAAM,CAAC,KAAU,MAClC,iBAAiB,KAAK,MAAM,KAAK,CAAC,CAAC;AAAA,MAAA;AAAA,IAGzC;AACE,aAAO;AAAA,EAAA;AAEb;AAKA,SAAS,qBAAqB,SAAoB;AAEhD,QAAM,eAAe,kBAAkB,QAAQ,KAAK,CAAC,CAAE;AAGvD,QAAM,iBAAiB,CAAC,CAAA,EAAG,aAAa,MAA+B;AACrE,UAAM,QAAQ,aAAa,aAAa;AAExC,WAAO,OAAO,UAAU,WAAW,QAAQ,SAAS,OAAO,OAAO,KAAK,IAAI;AAAA,EAC7E;AAGA,UAAQ,QAAQ,KAAK,YAAA,GAAY;AAAA,IAC/B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,MAAA;AAAA;AAAA,IACT,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B;AACE,YAAM,IAAI,MAAM,mCAAmC,QAAQ,IAAI,EAAE;AAAA,EAAA;AAEvE;AAKA,SAAS,sBACP,YACA,cACiB;AACjB,UAAQ,WAAW,MAAA;AAAA,IACjB,KAAK,OAAO;AACV,YAAM,UAAU;AAEhB,iBAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,YAAY,GAAG;AAC9D,YAAI,WAAW,SAAS,SAAS,gBAAgB,SAAS,UAAU,GAAG;AAErE,iBAAO,IAAI,QAAQ,CAAC,UAAU,KAAK,CAAC;AAAA,QACtC;AAAA,MACF;AAEA,YAAM,IAAI;AAAA,QACR,sEAAsE,QAAQ,IAAI;AAAA,MAAA;AAAA,IAEtF;AAAA,IAEA,KAAK,QAAQ;AACX,YAAM,WAAW;AAEjB,YAAM,kBAAkB,SAAS,KAAK;AAAA,QACpC,CAAC,QACC,sBAAsB,KAAK,YAAY;AAAA,MAAA;AAE3C,aAAO,IAAI,KAAK,SAAS,MAAM,eAAe;AAAA,IAChD;AAAA,IAEA,KAAK,OAAO;AACV,YAAM,UAAU;AAEhB,UAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,cAAM,QAAQ,QAAQ,KAAK,CAAC;AAC5B,YAAI,aAAa,KAAK,GAAG;AAEvB,iBAAO,IAAI,QAAQ,CAAC,UAAU,KAAK,CAAC;AAAA,QACtC;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,KAAK;AAEH,aAAO;AAAA,IAET;AACE,YAAM,IAAI;AAAA,QACR,6CAA8C,WAAmB,IAAI;AAAA,MAAA;AAAA,EACvE;AAEN;AAKA,SAAS,gBAAgB,MAAiB,MAA0B;AAClE,SACE,KAAK,SAAS,KAAK,QACnB,KAAK,KAAK,WAAW,KAAK,KAAK,UAC/B,KAAK,KAAK,MAAM,CAAC,KAAK,MAAM,iBAAiB,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC;AAEnE;"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../../../../src/query/compiler/index.ts"],"sourcesContent":["import { filter, map } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { processJoins } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelectToResults } from \"./select.js\"\nimport type { CollectionRef, QueryIR, QueryRef } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n ResultStream,\n} from \"../../types.js\"\n\n/**\n * Cache for compiled subqueries to avoid duplicate compilation\n */\ntype QueryCache = WeakMap<QueryIR, ResultStream>\n\n/**\n * Compiles a query2 IR into a D2 pipeline\n * @param query The query IR to compile\n * @param inputs Mapping of collection names to input streams\n * @param cache Optional cache for compiled subqueries (used internally for recursion)\n * @returns A stream builder representing the compiled query\n */\nexport function compileQuery(\n query: QueryIR,\n inputs: Record<string, KeyedStream>,\n cache: QueryCache = new WeakMap()\n): ResultStream {\n // Check if this query has already been compiled\n const cachedResult = cache.get(query)\n if (cachedResult) {\n return cachedResult\n }\n\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Create a map of table aliases to inputs\n const tables: Record<string, KeyedStream> = {}\n\n // Process the FROM clause to get the main table\n const { alias: mainTableAlias, input: mainInput } = processFrom(\n query.from,\n allInputs,\n cache\n )\n tables[mainTableAlias] = mainInput\n\n // Prepare the initial pipeline with the main table wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = mainInput.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainTableAlias]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join && query.join.length > 0) {\n pipeline = processJoins(\n pipeline,\n query.join,\n tables,\n mainTableAlias,\n allInputs,\n cache\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where && query.where.length > 0) {\n // Compile all WHERE expressions\n const compiledWheres = query.where.map((where) => compileExpression(where))\n\n // Apply each WHERE condition as a filter (they are ANDed together)\n for (const compiledWhere of compiledWheres) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return compiledWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process functional WHERE clauses if they exist\n if (query.fnWhere && query.fnWhere.length > 0) {\n for (const fnWhere of query.fnWhere) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process the SELECT clause early - always create __select_results\n // This eliminates duplication and allows for future DISTINCT implementation\n if (query.fnSelect) {\n // Handle functional select - apply the function to transform the row\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults = query.fnSelect!(namespacedRow)\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n } else if (query.select) {\n pipeline = processSelectToResults(pipeline, query.select, allInputs)\n } else {\n // If no SELECT clause, create __select_results with the main table data\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults =\n !query.join && !query.groupBy\n ? namespacedRow[mainTableAlias]\n : namespacedRow\n\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy && query.groupBy.length > 0) {\n pipeline = processGroupBy(\n pipeline,\n query.groupBy,\n query.having,\n query.select,\n query.fnHaving\n )\n } else if (query.select) {\n // Check if SELECT contains aggregates but no GROUP BY (implicit single-group aggregation)\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n // Handle implicit single-group aggregation\n pipeline = processGroupBy(\n pipeline,\n [], // Empty group by means single group\n query.having,\n query.select,\n query.fnHaving\n )\n }\n }\n\n // Process the HAVING clause if it exists (only applies after GROUP BY)\n if (query.having && (!query.groupBy || query.groupBy.length === 0)) {\n // Check if we have aggregates in SELECT that would trigger implicit grouping\n const hasAggregates = query.select\n ? Object.values(query.select).some((expr) => expr.type === `agg`)\n : false\n\n if (!hasAggregates) {\n throw new Error(`HAVING clause requires GROUP BY clause`)\n }\n }\n\n // Process functional HAVING clauses outside of GROUP BY (treat as additional WHERE filters)\n if (\n query.fnHaving &&\n query.fnHaving.length > 0 &&\n (!query.groupBy || query.groupBy.length === 0)\n ) {\n // If there's no GROUP BY but there are fnHaving clauses, apply them as filters\n for (const fnHaving of query.fnHaving) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy && query.orderBy.length > 0) {\n const orderedPipeline = processOrderBy(\n pipeline,\n query.orderBy,\n query.limit,\n query.offset\n )\n\n // Final step: extract the __select_results and include orderBy index\n const resultPipeline = orderedPipeline.pipe(\n map(([key, [row, orderByIndex]]) => {\n // Extract the final results from __select_results and include orderBy index\n const finalResults = (row as any).__select_results\n return [key, [finalResults, orderByIndex]] as [unknown, [any, string]]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning\n cache.set(query, result)\n return result\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new Error(\n `LIMIT and OFFSET require an ORDER BY clause to ensure deterministic results`\n )\n }\n\n // Final step: extract the __select_results and return tuple format (no orderBy)\n const resultPipeline: ResultStream = pipeline.pipe(\n map(([key, row]) => {\n // Extract the final results from __select_results and return [key, [results, undefined]]\n const finalResults = (row as any).__select_results\n return [key, [finalResults, undefined]] as [\n unknown,\n [any, string | undefined],\n ]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning\n cache.set(query, result)\n return result\n}\n\n/**\n * Processes the FROM clause to extract the main table alias and input stream\n */\nfunction processFrom(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new Error(\n `Input for collection \"${from.collection.id}\" not found in inputs map`\n )\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Recursively compile the sub-query with cache\n const subQueryInput = compileQuery(from.query, allInputs, cache)\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput }\n }\n default:\n throw new Error(`Unsupported FROM type: ${(from as any).type}`)\n }\n}\n"],"names":["resultPipeline","result"],"mappings":";;;;;;AAyBO,SAAS,aACd,OACA,QACA,QAAoB,oBAAI,WACV;AAER,QAAA,eAAe,MAAM,IAAI,KAAK;AACpC,MAAI,cAAc;AACT,WAAA;AAAA,EAAA;AAIH,QAAA,YAAY,EAAE,GAAG,OAAO;AAG9B,QAAM,SAAsC,CAAC;AAG7C,QAAM,EAAE,OAAO,gBAAgB,OAAO,UAAc,IAAA;AAAA,IAClD,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EACF;AACA,SAAO,cAAc,IAAI;AAGzB,MAAI,WAAqC,UAAU;AAAA,IACjD,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAEZ,YAAA,MAAM,CAAC,KAAK,EAAE,CAAC,cAAc,GAAG,KAAK;AAIpC,aAAA;AAAA,IACR,CAAA;AAAA,EACH;AAGA,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AAC5B,eAAA;AAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EAAA;AAIF,MAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AAEnC,UAAA,iBAAiB,MAAM,MAAM,IAAI,CAAC,UAAU,kBAAkB,KAAK,CAAC;AAG1E,eAAW,iBAAiB,gBAAgB;AAC1C,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,cAAc,aAAa;AAAA,QACnC,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF;AAIF,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAClC,eAAA,WAAW,MAAM,SAAS;AACnC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,QAAQ,aAAa;AAAA,QAC7B,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF;AAKF,MAAI,MAAM,UAAU;AAElB,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AACtB,cAAA,gBAAgB,MAAM,SAAU,aAAa;AAC5C,eAAA;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QAEtB;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA,WACS,MAAM,QAAQ;AACvB,eAAW,uBAAuB,UAAU,MAAM,MAAiB;AAAA,EAAA,OAC9D;AAEL,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AACtB,cAAA,gBACJ,CAAC,MAAM,QAAQ,CAAC,MAAM,UAClB,cAAc,cAAc,IAC5B;AAEC,eAAA;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QAEtB;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA;AAIF,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAClC,eAAA;AAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAAA,EAAA,WACS,MAAM,QAAQ;AAEvB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAC1B;AACA,QAAI,eAAe;AAEN,iBAAA;AAAA,QACT;AAAA,QACA,CAAC;AAAA;AAAA,QACD,MAAM;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,MACR;AAAA,IAAA;AAAA,EACF;AAIE,MAAA,MAAM,WAAW,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAAI;AAElE,UAAM,gBAAgB,MAAM,SACxB,OAAO,OAAO,MAAM,MAAM,EAAE,KAAK,CAAC,SAAS,KAAK,SAAS,KAAK,IAC9D;AAEJ,QAAI,CAAC,eAAe;AACZ,YAAA,IAAI,MAAM,wCAAwC;AAAA,IAAA;AAAA,EAC1D;AAIF,MACE,MAAM,YACN,MAAM,SAAS,SAAS,MACvB,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAC5C;AAEW,eAAA,YAAY,MAAM,UAAU;AACrC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,SAAS,aAAa;AAAA,QAC9B,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF;AAIF,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IACR;AAGA,UAAMA,kBAAiB,gBAAgB;AAAA,MACrC,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,YAAY,CAAC,MAAM;AAElC,cAAM,eAAgB,IAAY;AAClC,eAAO,CAAC,KAAK,CAAC,cAAc,YAAY,CAAC;AAAA,MAC1C,CAAA;AAAA,IACH;AAEA,UAAMC,UAASD;AAET,UAAA,IAAI,OAAOC,OAAM;AAChBA,WAAAA;AAAAA,EAAA,WACE,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EAAA;AAIF,QAAM,iBAA+B,SAAS;AAAA,IAC5C,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,eAAgB,IAAY;AAClC,aAAO,CAAC,KAAK,CAAC,cAAc,MAAS,CAAC;AAAA,IAIvC,CAAA;AAAA,EACH;AAEA,QAAM,SAAS;AAET,QAAA,IAAI,OAAO,MAAM;AAChB,SAAA;AACT;AAKA,SAAS,YACP,MACA,WACA,OACuC;AACvC,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,yBAAyB,KAAK,WAAW,EAAE;AAAA,QAC7C;AAAA,MAAA;AAEF,aAAO,EAAE,OAAO,KAAK,OAAO,MAAM;AAAA,IAAA;AAAA,IAEpC,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,KAAK,OAAO,WAAW,KAAK;AAI/D,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AAC/B,iBAAA,CAAC,KAAK,KAAK;AAAA,QACnB,CAAA;AAAA,MACH;AAEA,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAAe;AAAA,IAAA;AAAA,IAEpD;AACE,YAAM,IAAI,MAAM,0BAA2B,KAAa,IAAI,EAAE;AAAA,EAAA;AAEpE;"}
1
+ {"version":3,"file":"index.js","sources":["../../../../src/query/compiler/index.ts"],"sourcesContent":["import { filter, map } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { processJoins } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelectToResults } from \"./select.js\"\nimport type { CollectionRef, QueryIR, QueryRef } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n ResultStream,\n} from \"../../types.js\"\n\n/**\n * Cache for compiled subqueries to avoid duplicate compilation\n */\ntype QueryCache = WeakMap<QueryIR, ResultStream>\n\n/**\n * Compiles a query2 IR into a D2 pipeline\n * @param query The query IR to compile\n * @param inputs Mapping of collection names to input streams\n * @param cache Optional cache for compiled subqueries (used internally for recursion)\n * @returns A stream builder representing the compiled query\n */\nexport function compileQuery(\n query: QueryIR,\n inputs: Record<string, KeyedStream>,\n cache: QueryCache = new WeakMap()\n): ResultStream {\n // Check if this query has already been compiled\n const cachedResult = cache.get(query)\n if (cachedResult) {\n return cachedResult\n }\n\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Create a map of table aliases to inputs\n const tables: Record<string, KeyedStream> = {}\n\n // Process the FROM clause to get the main table\n const { alias: mainTableAlias, input: mainInput } = processFrom(\n query.from,\n allInputs,\n cache\n )\n tables[mainTableAlias] = mainInput\n\n // Prepare the initial pipeline with the main table wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = mainInput.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainTableAlias]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join && query.join.length > 0) {\n pipeline = processJoins(\n pipeline,\n query.join,\n tables,\n mainTableAlias,\n allInputs,\n cache\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where && query.where.length > 0) {\n // Compile all WHERE expressions\n const compiledWheres = query.where.map((where) => compileExpression(where))\n\n // Apply each WHERE condition as a filter (they are ANDed together)\n for (const compiledWhere of compiledWheres) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return compiledWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process functional WHERE clauses if they exist\n if (query.fnWhere && query.fnWhere.length > 0) {\n for (const fnWhere of query.fnWhere) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnWhere(namespacedRow)\n })\n )\n }\n }\n\n // Process the SELECT clause early - always create __select_results\n // This eliminates duplication and allows for future DISTINCT implementation\n if (query.fnSelect) {\n // Handle functional select - apply the function to transform the row\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults = query.fnSelect!(namespacedRow)\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n } else if (query.select) {\n pipeline = processSelectToResults(pipeline, query.select, allInputs)\n } else {\n // If no SELECT clause, create __select_results with the main table data\n pipeline = pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults =\n !query.join && !query.groupBy\n ? namespacedRow[mainTableAlias]\n : namespacedRow\n\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [string, typeof namespacedRow & { __select_results: any }]\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy && query.groupBy.length > 0) {\n pipeline = processGroupBy(\n pipeline,\n query.groupBy,\n query.having,\n query.select,\n query.fnHaving\n )\n } else if (query.select) {\n // Check if SELECT contains aggregates but no GROUP BY (implicit single-group aggregation)\n const hasAggregates = Object.values(query.select).some(\n (expr) => expr.type === `agg`\n )\n if (hasAggregates) {\n // Handle implicit single-group aggregation\n pipeline = processGroupBy(\n pipeline,\n [], // Empty group by means single group\n query.having,\n query.select,\n query.fnHaving\n )\n }\n }\n\n // Process the HAVING clause if it exists (only applies after GROUP BY)\n if (query.having && (!query.groupBy || query.groupBy.length === 0)) {\n // Check if we have aggregates in SELECT that would trigger implicit grouping\n const hasAggregates = query.select\n ? Object.values(query.select).some((expr) => expr.type === `agg`)\n : false\n\n if (!hasAggregates) {\n throw new Error(`HAVING clause requires GROUP BY clause`)\n }\n }\n\n // Process functional HAVING clauses outside of GROUP BY (treat as additional WHERE filters)\n if (\n query.fnHaving &&\n query.fnHaving.length > 0 &&\n (!query.groupBy || query.groupBy.length === 0)\n ) {\n // If there's no GROUP BY but there are fnHaving clauses, apply them as filters\n for (const fnHaving of query.fnHaving) {\n pipeline = pipeline.pipe(\n filter(([_key, namespacedRow]) => {\n return fnHaving(namespacedRow)\n })\n )\n }\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy && query.orderBy.length > 0) {\n const orderedPipeline = processOrderBy(\n pipeline,\n query.orderBy,\n query.limit,\n query.offset\n )\n\n // Final step: extract the __select_results and include orderBy index\n const resultPipeline = orderedPipeline.pipe(\n map(([key, [row, orderByIndex]]) => {\n // Extract the final results from __select_results and include orderBy index\n const finalResults = (row as any).__select_results\n return [key, [finalResults, orderByIndex]] as [unknown, [any, string]]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning\n cache.set(query, result)\n return result\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new Error(\n `LIMIT and OFFSET require an ORDER BY clause to ensure deterministic results`\n )\n }\n\n // Final step: extract the __select_results and return tuple format (no orderBy)\n const resultPipeline: ResultStream = pipeline.pipe(\n map(([key, row]) => {\n // Extract the final results from __select_results and return [key, [results, undefined]]\n const finalResults = (row as any).__select_results\n return [key, [finalResults, undefined]] as [\n unknown,\n [any, string | undefined],\n ]\n })\n )\n\n const result = resultPipeline\n // Cache the result before returning\n cache.set(query, result)\n return result\n}\n\n/**\n * Processes the FROM clause to extract the main table alias and input stream\n */\nfunction processFrom(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new Error(\n `Input for collection \"${from.collection.id}\" not found in inputs map`\n )\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Recursively compile the sub-query with cache\n const subQueryInput = compileQuery(from.query, allInputs, cache)\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput }\n }\n default:\n throw new Error(`Unsupported FROM type: ${(from as any).type}`)\n }\n}\n"],"names":["resultPipeline","result"],"mappings":";;;;;;AAyBO,SAAS,aACd,OACA,QACA,QAAoB,oBAAI,WACV;AAEd,QAAM,eAAe,MAAM,IAAI,KAAK;AACpC,MAAI,cAAc;AAChB,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,EAAE,GAAG,OAAA;AAGvB,QAAM,SAAsC,CAAA;AAG5C,QAAM,EAAE,OAAO,gBAAgB,OAAO,cAAc;AAAA,IAClD,MAAM;AAAA,IACN;AAAA,IACA;AAAA,EAAA;AAEF,SAAO,cAAc,IAAI;AAGzB,MAAI,WAAqC,UAAU;AAAA,IACjD,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,MAAM,CAAC,KAAK,EAAE,CAAC,cAAc,GAAG,KAAK;AAI3C,aAAO;AAAA,IACT,CAAC;AAAA,EAAA;AAIH,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AACvC,eAAW;AAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAGA,MAAI,MAAM,SAAS,MAAM,MAAM,SAAS,GAAG;AAEzC,UAAM,iBAAiB,MAAM,MAAM,IAAI,CAAC,UAAU,kBAAkB,KAAK,CAAC;AAG1E,eAAW,iBAAiB,gBAAgB;AAC1C,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,cAAc,aAAa;AAAA,QACpC,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAW,WAAW,MAAM,SAAS;AACnC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,QAAQ,aAAa;AAAA,QAC9B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAIA,MAAI,MAAM,UAAU;AAElB,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBAAgB,MAAM,SAAU,aAAa;AACnD,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL,WAAW,MAAM,QAAQ;AACvB,eAAW,uBAAuB,UAAU,MAAM,MAAiB;AAAA,EACrE,OAAO;AAEL,eAAW,SAAS;AAAA,MAClB,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,cAAM,gBACJ,CAAC,MAAM,QAAQ,CAAC,MAAM,UAClB,cAAc,cAAc,IAC5B;AAEN,eAAO;AAAA,UACL;AAAA,UACA;AAAA,YACE,GAAG;AAAA,YACH,kBAAkB;AAAA,UAAA;AAAA,QACpB;AAAA,MAEJ,CAAC;AAAA,IAAA;AAAA,EAEL;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,eAAW;AAAA,MACT;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAAA,EAEV,WAAW,MAAM,QAAQ;AAEvB,UAAM,gBAAgB,OAAO,OAAO,MAAM,MAAM,EAAE;AAAA,MAChD,CAAC,SAAS,KAAK,SAAS;AAAA,IAAA;AAE1B,QAAI,eAAe;AAEjB,iBAAW;AAAA,QACT;AAAA,QACA,CAAA;AAAA;AAAA,QACA,MAAM;AAAA,QACN,MAAM;AAAA,QACN,MAAM;AAAA,MAAA;AAAA,IAEV;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAAI;AAElE,UAAM,gBAAgB,MAAM,SACxB,OAAO,OAAO,MAAM,MAAM,EAAE,KAAK,CAAC,SAAS,KAAK,SAAS,KAAK,IAC9D;AAEJ,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,wCAAwC;AAAA,IAC1D;AAAA,EACF;AAGA,MACE,MAAM,YACN,MAAM,SAAS,SAAS,MACvB,CAAC,MAAM,WAAW,MAAM,QAAQ,WAAW,IAC5C;AAEA,eAAW,YAAY,MAAM,UAAU;AACrC,iBAAW,SAAS;AAAA,QAClB,OAAO,CAAC,CAAC,MAAM,aAAa,MAAM;AAChC,iBAAO,SAAS,aAAa;AAAA,QAC/B,CAAC;AAAA,MAAA;AAAA,IAEL;AAAA,EACF;AAGA,MAAI,MAAM,WAAW,MAAM,QAAQ,SAAS,GAAG;AAC7C,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,MAAM;AAAA,IAAA;AAIR,UAAMA,kBAAiB,gBAAgB;AAAA,MACrC,IAAI,CAAC,CAAC,KAAK,CAAC,KAAK,YAAY,CAAC,MAAM;AAElC,cAAM,eAAgB,IAAY;AAClC,eAAO,CAAC,KAAK,CAAC,cAAc,YAAY,CAAC;AAAA,MAC3C,CAAC;AAAA,IAAA;AAGH,UAAMC,UAASD;AAEf,UAAM,IAAI,OAAOC,OAAM;AACvB,WAAOA;AAAAA,EACT,WAAW,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAAA,EAEJ;AAGA,QAAM,iBAA+B,SAAS;AAAA,IAC5C,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAElB,YAAM,eAAgB,IAAY;AAClC,aAAO,CAAC,KAAK,CAAC,cAAc,MAAS,CAAC;AAAA,IAIxC,CAAC;AAAA,EAAA;AAGH,QAAM,SAAS;AAEf,QAAM,IAAI,OAAO,MAAM;AACvB,SAAO;AACT;AAKA,SAAS,YACP,MACA,WACA,OACuC;AACvC,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,yBAAyB,KAAK,WAAW,EAAE;AAAA,QAAA;AAAA,MAE/C;AACA,aAAO,EAAE,OAAO,KAAK,OAAO,MAAA;AAAA,IAC9B;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,KAAK,OAAO,WAAW,KAAK;AAI/D,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AACtC,iBAAO,CAAC,KAAK,KAAK;AAAA,QACpB,CAAC;AAAA,MAAA;AAGH,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAAA;AAAA,IACrC;AAAA,IACA;AACE,YAAM,IAAI,MAAM,0BAA2B,KAAa,IAAI,EAAE;AAAA,EAAA;AAEpE;"}
@@ -1 +1 @@
1
- {"version":3,"file":"joins.js","sources":["../../../../src/query/compiler/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { compileQuery } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type { CollectionRef, JoinClause, QueryIR, QueryRef } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n ResultStream,\n} from \"../../types.js\"\n\n/**\n * Cache for compiled subqueries to avoid duplicate compilation\n */\ntype QueryCache = WeakMap<QueryIR, ResultStream>\n\n/**\n * Processes all join clauses in a query\n */\nexport function processJoins(\n pipeline: NamespacedAndKeyedStream,\n joinClauses: Array<JoinClause>,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): NamespacedAndKeyedStream {\n let resultPipeline = pipeline\n\n for (const joinClause of joinClauses) {\n resultPipeline = processJoin(\n resultPipeline,\n joinClause,\n tables,\n mainTableAlias,\n allInputs,\n cache\n )\n }\n\n return resultPipeline\n}\n\n/**\n * Processes a single join clause\n */\nfunction processJoin(\n pipeline: NamespacedAndKeyedStream,\n joinClause: JoinClause,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): NamespacedAndKeyedStream {\n // Get the joined table alias and input stream\n const { alias: joinedTableAlias, input: joinedInput } = processJoinSource(\n joinClause.from,\n allInputs,\n cache\n )\n\n // Add the joined table to the tables map\n tables[joinedTableAlias] = joinedInput\n\n // Convert join type to D2 join type\n const joinType: JoinType =\n joinClause.type === `cross`\n ? `inner`\n : joinClause.type === `outer`\n ? `full`\n : (joinClause.type as JoinType)\n\n // Pre-compile the join expressions\n const compiledLeftExpr = compileExpression(joinClause.left)\n const compiledRightExpr = compileExpression(joinClause.right)\n\n // Prepare the main pipeline for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the join key from the left side of the join condition\n const leftKey = compiledLeftExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [leftKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Prepare the joined pipeline\n const joinedPipeline = joinedInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in a namespaced structure\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the join key from the right side of the join condition\n const rightKey = compiledRightExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [rightKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply the join operation\n if (![`inner`, `left`, `right`, `full`].includes(joinType)) {\n throw new Error(`Unsupported join type: ${joinClause.type}`)\n }\n return mainPipeline.pipe(\n joinOperator(joinedPipeline, joinType),\n consolidate(),\n processJoinResults(joinClause.type)\n )\n}\n\n/**\n * Processes the join source (collection or sub-query)\n */\nfunction processJoinSource(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new Error(\n `Input for collection \"${from.collection.id}\" not found in inputs map`\n )\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Recursively compile the sub-query with cache\n const subQueryInput = compileQuery(from.query, allInputs, cache)\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput as KeyedStream }\n }\n default:\n throw new Error(`Unsupported join source type: ${(from as any).type}`)\n }\n}\n\n/**\n * Processes the results of a join operation\n */\nfunction processJoinResults(joinType: string) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls\n filter((result) => {\n const [_key, [main, joined]] = result\n const mainNamespacedRow = main?.[1]\n const joinedNamespacedRow = joined?.[1]\n\n // Handle different join types\n if (joinType === `inner`) {\n return !!(mainNamespacedRow && joinedNamespacedRow)\n }\n\n if (joinType === `left`) {\n return !!mainNamespacedRow\n }\n\n if (joinType === `right`) {\n return !!joinedNamespacedRow\n }\n\n // For full joins, always include\n return true\n }),\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // Merge the namespaced rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.assign(mergedNamespacedRow, mainNamespacedRow)\n }\n\n // Add joined row data if it exists\n if (joinedNamespacedRow) {\n Object.assign(mergedNamespacedRow, joinedNamespacedRow)\n }\n\n // We create a composite key that combines the main and joined keys\n const resultKey = `[${mainKey},${joinedKey}]`\n\n return [resultKey, mergedNamespacedRow] as [string, NamespacedRow]\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;AAyBO,SAAS,aACd,UACA,aACA,QACA,gBACA,WACA,OAC0B;AAC1B,MAAI,iBAAiB;AAErB,aAAW,cAAc,aAAa;AACnB,qBAAA;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EAAA;AAGK,SAAA;AACT;AAKA,SAAS,YACP,UACA,YACA,QACA,gBACA,WACA,OAC0B;AAE1B,QAAM,EAAE,OAAO,kBAAkB,OAAO,YAAgB,IAAA;AAAA,IACtD,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EACF;AAGA,SAAO,gBAAgB,IAAI;AAGrB,QAAA,WACJ,WAAW,SAAS,UAChB,UACA,WAAW,SAAS,UAClB,SACC,WAAW;AAGd,QAAA,mBAAmB,kBAAkB,WAAW,IAAI;AACpD,QAAA,oBAAoB,kBAAkB,WAAW,KAAK;AAG5D,QAAM,eAAe,SAAS;AAAA,IAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAE7B,YAAA,UAAU,iBAAiB,aAAa;AAG9C,aAAO,CAAC,SAAS,CAAC,YAAY,aAAa,CAAC;AAAA,IAI7C,CAAA;AAAA,EACH;AAGA,QAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,YAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAI;AAGzD,YAAA,WAAW,kBAAkB,aAAa;AAGhD,aAAO,CAAC,UAAU,CAAC,YAAY,aAAa,CAAC;AAAA,IAI9C,CAAA;AAAA,EACH;AAGI,MAAA,CAAC,CAAC,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,QAAQ,GAAG;AAC1D,UAAM,IAAI,MAAM,0BAA0B,WAAW,IAAI,EAAE;AAAA,EAAA;AAE7D,SAAO,aAAa;AAAA,IAClBA,KAAa,gBAAgB,QAAQ;AAAA,IACrC,YAAY;AAAA,IACZ,mBAAmB,WAAW,IAAI;AAAA,EACpC;AACF;AAKA,SAAS,kBACP,MACA,WACA,OACuC;AACvC,UAAQ,KAAK,MAAM;AAAA,IACjB,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,yBAAyB,KAAK,WAAW,EAAE;AAAA,QAC7C;AAAA,MAAA;AAEF,aAAO,EAAE,OAAO,KAAK,OAAO,MAAM;AAAA,IAAA;AAAA,IAEpC,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,KAAK,OAAO,WAAW,KAAK;AAI/D,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AAC/B,iBAAA,CAAC,KAAK,KAAK;AAAA,QACnB,CAAA;AAAA,MACH;AAEA,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAA8B;AAAA,IAAA;AAAA,IAEnE;AACE,YAAM,IAAI,MAAM,iCAAkC,KAAa,IAAI,EAAE;AAAA,EAAA;AAE3E;AAKA,SAAS,mBAAmB,UAAkB;AAC5C,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,OAAO,CAAC,WAAW;AACjB,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,sBAAsB,iCAAS;AAGrC,YAAI,aAAa,SAAS;AACjB,iBAAA,CAAC,EAAE,qBAAqB;AAAA,QAAA;AAGjC,YAAI,aAAa,QAAQ;AACvB,iBAAO,CAAC,CAAC;AAAA,QAAA;AAGX,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,CAAC;AAAA,QAAA;AAIJ,eAAA;AAAA,MAAA,CACR;AAAA,MACD,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,UAAU,6BAAO;AACjB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,YAAY,iCAAS;AACrB,cAAA,sBAAsB,iCAAS;AAGrC,cAAM,sBAAqC,CAAC;AAG5C,YAAI,mBAAmB;AACd,iBAAA,OAAO,qBAAqB,iBAAiB;AAAA,QAAA;AAItD,YAAI,qBAAqB;AAChB,iBAAA,OAAO,qBAAqB,mBAAmB;AAAA,QAAA;AAIxD,cAAM,YAAY,IAAI,OAAO,IAAI,SAAS;AAEnC,eAAA,CAAC,WAAW,mBAAmB;AAAA,MACvC,CAAA;AAAA,IACH;AAAA,EACF;AACF;"}
1
+ {"version":3,"file":"joins.js","sources":["../../../../src/query/compiler/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { compileQuery } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type { CollectionRef, JoinClause, QueryIR, QueryRef } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n ResultStream,\n} from \"../../types.js\"\n\n/**\n * Cache for compiled subqueries to avoid duplicate compilation\n */\ntype QueryCache = WeakMap<QueryIR, ResultStream>\n\n/**\n * Processes all join clauses in a query\n */\nexport function processJoins(\n pipeline: NamespacedAndKeyedStream,\n joinClauses: Array<JoinClause>,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): NamespacedAndKeyedStream {\n let resultPipeline = pipeline\n\n for (const joinClause of joinClauses) {\n resultPipeline = processJoin(\n resultPipeline,\n joinClause,\n tables,\n mainTableAlias,\n allInputs,\n cache\n )\n }\n\n return resultPipeline\n}\n\n/**\n * Processes a single join clause\n */\nfunction processJoin(\n pipeline: NamespacedAndKeyedStream,\n joinClause: JoinClause,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): NamespacedAndKeyedStream {\n // Get the joined table alias and input stream\n const { alias: joinedTableAlias, input: joinedInput } = processJoinSource(\n joinClause.from,\n allInputs,\n cache\n )\n\n // Add the joined table to the tables map\n tables[joinedTableAlias] = joinedInput\n\n // Convert join type to D2 join type\n const joinType: JoinType =\n joinClause.type === `cross`\n ? `inner`\n : joinClause.type === `outer`\n ? `full`\n : (joinClause.type as JoinType)\n\n // Pre-compile the join expressions\n const compiledLeftExpr = compileExpression(joinClause.left)\n const compiledRightExpr = compileExpression(joinClause.right)\n\n // Prepare the main pipeline for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the join key from the left side of the join condition\n const leftKey = compiledLeftExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [leftKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Prepare the joined pipeline\n const joinedPipeline = joinedInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in a namespaced structure\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the join key from the right side of the join condition\n const rightKey = compiledRightExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [rightKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply the join operation\n if (![`inner`, `left`, `right`, `full`].includes(joinType)) {\n throw new Error(`Unsupported join type: ${joinClause.type}`)\n }\n return mainPipeline.pipe(\n joinOperator(joinedPipeline, joinType),\n consolidate(),\n processJoinResults(joinClause.type)\n )\n}\n\n/**\n * Processes the join source (collection or sub-query)\n */\nfunction processJoinSource(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new Error(\n `Input for collection \"${from.collection.id}\" not found in inputs map`\n )\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Recursively compile the sub-query with cache\n const subQueryInput = compileQuery(from.query, allInputs, cache)\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput as KeyedStream }\n }\n default:\n throw new Error(`Unsupported join source type: ${(from as any).type}`)\n }\n}\n\n/**\n * Processes the results of a join operation\n */\nfunction processJoinResults(joinType: string) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls\n filter((result) => {\n const [_key, [main, joined]] = result\n const mainNamespacedRow = main?.[1]\n const joinedNamespacedRow = joined?.[1]\n\n // Handle different join types\n if (joinType === `inner`) {\n return !!(mainNamespacedRow && joinedNamespacedRow)\n }\n\n if (joinType === `left`) {\n return !!mainNamespacedRow\n }\n\n if (joinType === `right`) {\n return !!joinedNamespacedRow\n }\n\n // For full joins, always include\n return true\n }),\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // Merge the namespaced rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.assign(mergedNamespacedRow, mainNamespacedRow)\n }\n\n // Add joined row data if it exists\n if (joinedNamespacedRow) {\n Object.assign(mergedNamespacedRow, joinedNamespacedRow)\n }\n\n // We create a composite key that combines the main and joined keys\n const resultKey = `[${mainKey},${joinedKey}]`\n\n return [resultKey, mergedNamespacedRow] as [string, NamespacedRow]\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;AAyBO,SAAS,aACd,UACA,aACA,QACA,gBACA,WACA,OAC0B;AAC1B,MAAI,iBAAiB;AAErB,aAAW,cAAc,aAAa;AACpC,qBAAiB;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,YACP,UACA,YACA,QACA,gBACA,WACA,OAC0B;AAE1B,QAAM,EAAE,OAAO,kBAAkB,OAAO,gBAAgB;AAAA,IACtD,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EAAA;AAIF,SAAO,gBAAgB,IAAI;AAG3B,QAAM,WACJ,WAAW,SAAS,UAChB,UACA,WAAW,SAAS,UAClB,SACC,WAAW;AAGpB,QAAM,mBAAmB,kBAAkB,WAAW,IAAI;AAC1D,QAAM,oBAAoB,kBAAkB,WAAW,KAAK;AAG5D,QAAM,eAAe,SAAS;AAAA,IAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAEnC,YAAM,UAAU,iBAAiB,aAAa;AAG9C,aAAO,CAAC,SAAS,CAAC,YAAY,aAAa,CAAC;AAAA,IAI9C,CAAC;AAAA,EAAA;AAIH,QAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,YAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAA;AAG3D,YAAM,WAAW,kBAAkB,aAAa;AAGhD,aAAO,CAAC,UAAU,CAAC,YAAY,aAAa,CAAC;AAAA,IAI/C,CAAC;AAAA,EAAA;AAIH,MAAI,CAAC,CAAC,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,QAAQ,GAAG;AAC1D,UAAM,IAAI,MAAM,0BAA0B,WAAW,IAAI,EAAE;AAAA,EAC7D;AACA,SAAO,aAAa;AAAA,IAClBA,KAAa,gBAAgB,QAAQ;AAAA,IACrC,YAAA;AAAA,IACA,mBAAmB,WAAW,IAAI;AAAA,EAAA;AAEtC;AAKA,SAAS,kBACP,MACA,WACA,OACuC;AACvC,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,yBAAyB,KAAK,WAAW,EAAE;AAAA,QAAA;AAAA,MAE/C;AACA,aAAO,EAAE,OAAO,KAAK,OAAO,MAAA;AAAA,IAC9B;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,KAAK,OAAO,WAAW,KAAK;AAI/D,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AACtC,iBAAO,CAAC,KAAK,KAAK;AAAA,QACpB,CAAC;AAAA,MAAA;AAGH,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAAA;AAAA,IACrC;AAAA,IACA;AACE,YAAM,IAAI,MAAM,iCAAkC,KAAa,IAAI,EAAE;AAAA,EAAA;AAE3E;AAKA,SAAS,mBAAmB,UAAkB;AAC5C,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,OAAO,CAAC,WAAW;AACjB,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,oBAAoB,6BAAO;AACjC,cAAM,sBAAsB,iCAAS;AAGrC,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,EAAE,qBAAqB;AAAA,QACjC;AAEA,YAAI,aAAa,QAAQ;AACvB,iBAAO,CAAC,CAAC;AAAA,QACX;AAEA,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,CAAC;AAAA,QACX;AAGA,eAAO;AAAA,MACT,CAAC;AAAA,MACD,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,UAAU,6BAAO;AACvB,cAAM,oBAAoB,6BAAO;AACjC,cAAM,YAAY,iCAAS;AAC3B,cAAM,sBAAsB,iCAAS;AAGrC,cAAM,sBAAqC,CAAA;AAG3C,YAAI,mBAAmB;AACrB,iBAAO,OAAO,qBAAqB,iBAAiB;AAAA,QACtD;AAGA,YAAI,qBAAqB;AACvB,iBAAO,OAAO,qBAAqB,mBAAmB;AAAA,QACxD;AAGA,cAAM,YAAY,IAAI,OAAO,IAAI,SAAS;AAE1C,eAAO,CAAC,WAAW,mBAAmB;AAAA,MACxC,CAAC;AAAA,IAAA;AAAA,EAEL;AACF;"}
@@ -1 +1 @@
1
- {"version":3,"file":"order-by.js","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { OrderByClause } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@electric-sql/d2mini\"\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => ({\n compiledExpression: compileExpression(clause.expression),\n direction: clause.direction,\n }))\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // For ORDER BY expressions, we need to provide access to both:\n // 1. The original namespaced row data (for direct table column references)\n // 2. The __select_results (for SELECT alias references)\n\n // Create a merged context for expression evaluation\n const orderByContext = { ...row }\n\n // If there are select results, merge them at the top level for alias access\n if (row.__select_results) {\n // Add select results as top-level properties for alias access\n Object.assign(orderByContext, row.__select_results)\n }\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create comparator functions\n const ascComparator = (a: any, b: any): number => {\n // Handle null/undefined\n if (a == null && b == null) return 0\n if (a == null) return -1\n if (b == null) return 1\n\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n const result = ascComparator(a[i], b[i])\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If both are dates, compare them\n if (a instanceof Date && b instanceof Date) {\n return a.getTime() - b.getTime()\n }\n\n // If at least one of the values is an object, convert to strings\n const bothObjects = typeof a === `object` && typeof b === `object`\n const notNull = a !== null && b !== null\n if (bothObjects && notNull) {\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = () => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const direction = orderByClause[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const direction = orderByClause[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n\n const comparator = makeComparator()\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":[],"mappings":";;AAWO,SAAS,eACd,UACA,eACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,YAAY;AAAA,IACrD,oBAAoB,kBAAkB,OAAO,UAAU;AAAA,IACvD,WAAW,OAAO;AAAA,EAAA,EAClB;AAGI,QAAA,iBAAiB,CAAC,QAAoD;AAMpE,UAAA,iBAAiB,EAAE,GAAG,IAAI;AAGhC,QAAI,IAAI,kBAAkB;AAEjB,aAAA,OAAO,gBAAgB,IAAI,gBAAgB;AAAA,IAAA;AAGhD,QAAA,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAC5C;AAAA,IAAA,WACS,cAAc,WAAW,GAAG;AAE/B,YAAA,WAAW,gBAAgB,CAAC;AAC3B,aAAA,SAAS,mBAAmB,cAAc;AAAA,IAAA;AAI5C,WAAA;AAAA,EACT;AAGM,QAAA,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,KAAK,QAAQ,KAAK,KAAa,QAAA;AAC/B,QAAA,KAAK,KAAa,QAAA;AAClB,QAAA,KAAK,KAAa,QAAA;AAGtB,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAC3C,aAAA,EAAE,cAAc,CAAC;AAAA,IAAA;AAI1B,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AAC/B,eAAA,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AACrD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AACvC,YAAI,WAAW,GAAG;AACT,iBAAA;AAAA,QAAA;AAAA,MACT;AAGK,aAAA,EAAE,SAAS,EAAE;AAAA,IAAA;AAIlB,QAAA,aAAa,QAAQ,aAAa,MAAM;AAC1C,aAAO,EAAE,YAAY,EAAE,QAAQ;AAAA,IAAA;AAIjC,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AACpD,UAAA,UAAU,MAAM,QAAQ,MAAM;AACpC,QAAI,eAAe,SAAS;AAC1B,aAAO,EAAE,SAAS,EAAE,cAAc,EAAE,UAAU;AAAA,IAAA;AAG5C,QAAA,IAAI,EAAU,QAAA;AACd,QAAA,IAAI,EAAU,QAAA;AACX,WAAA;AAAA,EACT;AAEM,QAAA,iBAAiB,CAAC,GAAY,MAAuB;AAClD,WAAA,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGA,QAAM,iBAAiB,MAAM;AACpB,WAAA,CAAC,GAAY,MAAe;AAE7B,UAAA,cAAc,SAAS,GAAG;AAC5B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AACvC,gBAAA,YAAY,cAAc,CAAC,EAAG;AAC9B,gBAAA,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AACT,mBAAA;AAAA,UAAA;AAAA,QACT;AAEK,eAAA,OAAO,SAAS,OAAO;AAAA,MAAA;AAI5B,UAAA,cAAc,WAAW,GAAG;AACxB,cAAA,YAAY,cAAc,CAAC,EAAG;AAC7B,eAAA,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MAAA;AAGlE,aAAA,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,QAAM,aAAa,eAAe;AAGlC,SAAO,SAAS;AAAA,IACd,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,IACD,CAAA;AAAA;AAAA,EAEH;AACF;"}
1
+ {"version":3,"file":"order-by.js","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { OrderByClause } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@electric-sql/d2mini\"\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => ({\n compiledExpression: compileExpression(clause.expression),\n direction: clause.direction,\n }))\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // For ORDER BY expressions, we need to provide access to both:\n // 1. The original namespaced row data (for direct table column references)\n // 2. The __select_results (for SELECT alias references)\n\n // Create a merged context for expression evaluation\n const orderByContext = { ...row }\n\n // If there are select results, merge them at the top level for alias access\n if (row.__select_results) {\n // Add select results as top-level properties for alias access\n Object.assign(orderByContext, row.__select_results)\n }\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create comparator functions\n const ascComparator = (a: any, b: any): number => {\n // Handle null/undefined\n if (a == null && b == null) return 0\n if (a == null) return -1\n if (b == null) return 1\n\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n const result = ascComparator(a[i], b[i])\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If both are dates, compare them\n if (a instanceof Date && b instanceof Date) {\n return a.getTime() - b.getTime()\n }\n\n // If at least one of the values is an object, convert to strings\n const bothObjects = typeof a === `object` && typeof b === `object`\n const notNull = a !== null && b !== null\n if (bothObjects && notNull) {\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = () => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const direction = orderByClause[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const direction = orderByClause[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n\n const comparator = makeComparator()\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":[],"mappings":";;AAWO,SAAS,eACd,UACA,eACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,YAAY;AAAA,IACrD,oBAAoB,kBAAkB,OAAO,UAAU;AAAA,IACvD,WAAW,OAAO;AAAA,EAAA,EAClB;AAGF,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB,EAAE,GAAG,IAAA;AAG5B,QAAI,IAAI,kBAAkB;AAExB,aAAO,OAAO,gBAAgB,IAAI,gBAAgB;AAAA,IACpD;AAEA,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,KAAK,QAAQ,KAAK,KAAM,QAAO;AACnC,QAAI,KAAK,KAAM,QAAO;AACtB,QAAI,KAAK,KAAM,QAAO;AAGtB,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAClD,aAAO,EAAE,cAAc,CAAC;AAAA,IAC1B;AAGA,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AACxC,eAAS,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AACrD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AACvC,YAAI,WAAW,GAAG;AAChB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,SAAS,EAAE;AAAA,IACtB;AAGA,QAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C,aAAO,EAAE,YAAY,EAAE,QAAA;AAAA,IACzB;AAGA,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AAC1D,UAAM,UAAU,MAAM,QAAQ,MAAM;AACpC,QAAI,eAAe,SAAS;AAC1B,aAAO,EAAE,SAAA,EAAW,cAAc,EAAE,UAAU;AAAA,IAChD;AAEA,QAAI,IAAI,EAAG,QAAO;AAClB,QAAI,IAAI,EAAG,QAAO;AAClB,WAAO;AAAA,EACT;AAEA,QAAM,iBAAiB,CAAC,GAAY,MAAuB;AACzD,WAAO,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGA,QAAM,iBAAiB,MAAM;AAC3B,WAAO,CAAC,GAAY,MAAe;AAEjC,UAAI,cAAc,SAAS,GAAG;AAC5B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,gBAAM,YAAY,cAAc,CAAC,EAAG;AACpC,gBAAM,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AAChB,mBAAO;AAAA,UACT;AAAA,QACF;AACA,eAAO,OAAO,SAAS,OAAO;AAAA,MAChC;AAGA,UAAI,cAAc,WAAW,GAAG;AAC9B,cAAM,YAAY,cAAc,CAAC,EAAG;AACpC,eAAO,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MACzE;AAEA,aAAO,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,QAAM,aAAa,eAAA;AAGnB,SAAO,SAAS;AAAA,IACd,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;"}
@@ -1 +1 @@
1
- {"version":3,"file":"select.js","sources":["../../../../src/query/compiler/select.ts"],"sourcesContent":["import { map } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { Aggregate, BasicExpression, Select } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../../types.js\"\n\n/**\n * Processes the SELECT clause and places results in __select_results\n * while preserving the original namespaced row for ORDER BY access\n */\nexport function processSelectToResults(\n pipeline: NamespacedAndKeyedStream,\n select: Select,\n _allInputs: Record<string, KeyedStream>\n): NamespacedAndKeyedStream {\n // Pre-compile all select expressions\n const compiledSelect: Array<{\n alias: string\n compiledExpression: (row: NamespacedRow) => any\n }> = []\n const spreadAliases: Array<string> = []\n\n for (const [alias, expression] of Object.entries(select)) {\n if (alias.startsWith(`__SPREAD_SENTINEL__`)) {\n // Extract the table alias from the sentinel key\n const tableAlias = alias.replace(`__SPREAD_SENTINEL__`, ``)\n spreadAliases.push(tableAlias)\n } else {\n if (isAggregateExpression(expression)) {\n // For aggregates, we'll store the expression info for GROUP BY processing\n // but still compile a placeholder that will be replaced later\n compiledSelect.push({\n alias,\n compiledExpression: () => null, // Placeholder - will be handled by GROUP BY\n })\n } else {\n compiledSelect.push({\n alias,\n compiledExpression: compileExpression(expression as BasicExpression),\n })\n }\n }\n }\n\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults: Record<string, any> = {}\n\n // First pass: spread table data for any spread sentinels\n for (const tableAlias of spreadAliases) {\n const tableData = namespacedRow[tableAlias]\n if (tableData && typeof tableData === `object`) {\n // Spread the table data into the result, but don't overwrite explicit fields\n for (const [fieldName, fieldValue] of Object.entries(tableData)) {\n if (!(fieldName in selectResults)) {\n selectResults[fieldName] = fieldValue\n }\n }\n }\n }\n\n // Second pass: evaluate all compiled select expressions (non-aggregates)\n for (const { alias, compiledExpression } of compiledSelect) {\n selectResults[alias] = compiledExpression(namespacedRow)\n }\n\n // Return the namespaced row with __select_results added\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [\n string,\n typeof namespacedRow & { __select_results: typeof selectResults },\n ]\n })\n )\n}\n\n/**\n * Processes the SELECT clause (legacy function - kept for compatibility)\n */\nexport function processSelect(\n pipeline: NamespacedAndKeyedStream,\n select: Select,\n _allInputs: Record<string, KeyedStream>\n): KeyedStream {\n // Pre-compile all select expressions\n const compiledSelect: Array<{\n alias: string\n compiledExpression: (row: NamespacedRow) => any\n }> = []\n const spreadAliases: Array<string> = []\n\n for (const [alias, expression] of Object.entries(select)) {\n if (alias.startsWith(`__SPREAD_SENTINEL__`)) {\n // Extract the table alias from the sentinel key\n const tableAlias = alias.replace(`__SPREAD_SENTINEL__`, ``)\n spreadAliases.push(tableAlias)\n } else {\n if (isAggregateExpression(expression)) {\n // Aggregates should be handled by GROUP BY processing, not here\n throw new Error(\n `Aggregate expressions in SELECT clause should be handled by GROUP BY processing`\n )\n }\n compiledSelect.push({\n alias,\n compiledExpression: compileExpression(expression as BasicExpression),\n })\n }\n }\n\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const result: Record<string, any> = {}\n\n // First pass: spread table data for any spread sentinels\n for (const tableAlias of spreadAliases) {\n const tableData = namespacedRow[tableAlias]\n if (tableData && typeof tableData === `object`) {\n // Spread the table data into the result, but don't overwrite explicit fields\n for (const [fieldName, fieldValue] of Object.entries(tableData)) {\n if (!(fieldName in result)) {\n result[fieldName] = fieldValue\n }\n }\n }\n }\n\n // Second pass: evaluate all compiled select expressions\n for (const { alias, compiledExpression } of compiledSelect) {\n result[alias] = compiledExpression(namespacedRow)\n }\n\n return [key, result] as [string, typeof result]\n })\n )\n}\n\n/**\n * Helper function to check if an expression is an aggregate\n */\nfunction isAggregateExpression(\n expr: BasicExpression | Aggregate\n): expr is Aggregate {\n return expr.type === `agg`\n}\n\n/**\n * Processes a single argument in a function context\n */\nexport function processArgument(\n arg: BasicExpression | Aggregate,\n namespacedRow: NamespacedRow\n): any {\n if (isAggregateExpression(arg)) {\n throw new Error(\n `Aggregate expressions are not supported in this context. Use GROUP BY clause for aggregates.`\n )\n }\n\n // Pre-compile the expression and evaluate immediately\n const compiledExpression = compileExpression(arg)\n const value = compiledExpression(namespacedRow)\n\n return value\n}\n"],"names":[],"mappings":";;AAagB,SAAA,uBACd,UACA,QACA,YAC0B;AAE1B,QAAM,iBAGD,CAAC;AACN,QAAM,gBAA+B,CAAC;AAEtC,aAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,MAAM,GAAG;AACpD,QAAA,MAAM,WAAW,qBAAqB,GAAG;AAE3C,YAAM,aAAa,MAAM,QAAQ,uBAAuB,EAAE;AAC1D,oBAAc,KAAK,UAAU;AAAA,IAAA,OACxB;AACD,UAAA,sBAAsB,UAAU,GAAG;AAGrC,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA,oBAAoB,MAAM;AAAA;AAAA,QAAA,CAC3B;AAAA,MAAA,OACI;AACL,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA,oBAAoB,kBAAkB,UAA6B;AAAA,QAAA,CACpE;AAAA,MAAA;AAAA,IACH;AAAA,EACF;AAGF,SAAO,SAAS;AAAA,IACd,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,YAAM,gBAAqC,CAAC;AAG5C,iBAAW,cAAc,eAAe;AAChC,cAAA,YAAY,cAAc,UAAU;AACtC,YAAA,aAAa,OAAO,cAAc,UAAU;AAE9C,qBAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC3D,gBAAA,EAAE,aAAa,gBAAgB;AACjC,4BAAc,SAAS,IAAI;AAAA,YAAA;AAAA,UAC7B;AAAA,QACF;AAAA,MACF;AAIF,iBAAW,EAAE,OAAO,mBAAmB,KAAK,gBAAgB;AAC5C,sBAAA,KAAK,IAAI,mBAAmB,aAAa;AAAA,MAAA;AAIlD,aAAA;AAAA,QACL;AAAA,QACA;AAAA,UACE,GAAG;AAAA,UACH,kBAAkB;AAAA,QAAA;AAAA,MAEtB;AAAA,IAID,CAAA;AAAA,EACH;AACF;AAkEA,SAAS,sBACP,MACmB;AACnB,SAAO,KAAK,SAAS;AACvB;"}
1
+ {"version":3,"file":"select.js","sources":["../../../../src/query/compiler/select.ts"],"sourcesContent":["import { map } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { Aggregate, BasicExpression, Select } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../../types.js\"\n\n/**\n * Processes the SELECT clause and places results in __select_results\n * while preserving the original namespaced row for ORDER BY access\n */\nexport function processSelectToResults(\n pipeline: NamespacedAndKeyedStream,\n select: Select,\n _allInputs: Record<string, KeyedStream>\n): NamespacedAndKeyedStream {\n // Pre-compile all select expressions\n const compiledSelect: Array<{\n alias: string\n compiledExpression: (row: NamespacedRow) => any\n }> = []\n const spreadAliases: Array<string> = []\n\n for (const [alias, expression] of Object.entries(select)) {\n if (alias.startsWith(`__SPREAD_SENTINEL__`)) {\n // Extract the table alias from the sentinel key\n const tableAlias = alias.replace(`__SPREAD_SENTINEL__`, ``)\n spreadAliases.push(tableAlias)\n } else {\n if (isAggregateExpression(expression)) {\n // For aggregates, we'll store the expression info for GROUP BY processing\n // but still compile a placeholder that will be replaced later\n compiledSelect.push({\n alias,\n compiledExpression: () => null, // Placeholder - will be handled by GROUP BY\n })\n } else {\n compiledSelect.push({\n alias,\n compiledExpression: compileExpression(expression as BasicExpression),\n })\n }\n }\n }\n\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const selectResults: Record<string, any> = {}\n\n // First pass: spread table data for any spread sentinels\n for (const tableAlias of spreadAliases) {\n const tableData = namespacedRow[tableAlias]\n if (tableData && typeof tableData === `object`) {\n // Spread the table data into the result, but don't overwrite explicit fields\n for (const [fieldName, fieldValue] of Object.entries(tableData)) {\n if (!(fieldName in selectResults)) {\n selectResults[fieldName] = fieldValue\n }\n }\n }\n }\n\n // Second pass: evaluate all compiled select expressions (non-aggregates)\n for (const { alias, compiledExpression } of compiledSelect) {\n selectResults[alias] = compiledExpression(namespacedRow)\n }\n\n // Return the namespaced row with __select_results added\n return [\n key,\n {\n ...namespacedRow,\n __select_results: selectResults,\n },\n ] as [\n string,\n typeof namespacedRow & { __select_results: typeof selectResults },\n ]\n })\n )\n}\n\n/**\n * Processes the SELECT clause (legacy function - kept for compatibility)\n */\nexport function processSelect(\n pipeline: NamespacedAndKeyedStream,\n select: Select,\n _allInputs: Record<string, KeyedStream>\n): KeyedStream {\n // Pre-compile all select expressions\n const compiledSelect: Array<{\n alias: string\n compiledExpression: (row: NamespacedRow) => any\n }> = []\n const spreadAliases: Array<string> = []\n\n for (const [alias, expression] of Object.entries(select)) {\n if (alias.startsWith(`__SPREAD_SENTINEL__`)) {\n // Extract the table alias from the sentinel key\n const tableAlias = alias.replace(`__SPREAD_SENTINEL__`, ``)\n spreadAliases.push(tableAlias)\n } else {\n if (isAggregateExpression(expression)) {\n // Aggregates should be handled by GROUP BY processing, not here\n throw new Error(\n `Aggregate expressions in SELECT clause should be handled by GROUP BY processing`\n )\n }\n compiledSelect.push({\n alias,\n compiledExpression: compileExpression(expression as BasicExpression),\n })\n }\n }\n\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const result: Record<string, any> = {}\n\n // First pass: spread table data for any spread sentinels\n for (const tableAlias of spreadAliases) {\n const tableData = namespacedRow[tableAlias]\n if (tableData && typeof tableData === `object`) {\n // Spread the table data into the result, but don't overwrite explicit fields\n for (const [fieldName, fieldValue] of Object.entries(tableData)) {\n if (!(fieldName in result)) {\n result[fieldName] = fieldValue\n }\n }\n }\n }\n\n // Second pass: evaluate all compiled select expressions\n for (const { alias, compiledExpression } of compiledSelect) {\n result[alias] = compiledExpression(namespacedRow)\n }\n\n return [key, result] as [string, typeof result]\n })\n )\n}\n\n/**\n * Helper function to check if an expression is an aggregate\n */\nfunction isAggregateExpression(\n expr: BasicExpression | Aggregate\n): expr is Aggregate {\n return expr.type === `agg`\n}\n\n/**\n * Processes a single argument in a function context\n */\nexport function processArgument(\n arg: BasicExpression | Aggregate,\n namespacedRow: NamespacedRow\n): any {\n if (isAggregateExpression(arg)) {\n throw new Error(\n `Aggregate expressions are not supported in this context. Use GROUP BY clause for aggregates.`\n )\n }\n\n // Pre-compile the expression and evaluate immediately\n const compiledExpression = compileExpression(arg)\n const value = compiledExpression(namespacedRow)\n\n return value\n}\n"],"names":[],"mappings":";;AAaO,SAAS,uBACd,UACA,QACA,YAC0B;AAE1B,QAAM,iBAGD,CAAA;AACL,QAAM,gBAA+B,CAAA;AAErC,aAAW,CAAC,OAAO,UAAU,KAAK,OAAO,QAAQ,MAAM,GAAG;AACxD,QAAI,MAAM,WAAW,qBAAqB,GAAG;AAE3C,YAAM,aAAa,MAAM,QAAQ,uBAAuB,EAAE;AAC1D,oBAAc,KAAK,UAAU;AAAA,IAC/B,OAAO;AACL,UAAI,sBAAsB,UAAU,GAAG;AAGrC,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA,oBAAoB,MAAM;AAAA;AAAA,QAAA,CAC3B;AAAA,MACH,OAAO;AACL,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA,oBAAoB,kBAAkB,UAA6B;AAAA,QAAA,CACpE;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO,SAAS;AAAA,IACd,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,YAAM,gBAAqC,CAAA;AAG3C,iBAAW,cAAc,eAAe;AACtC,cAAM,YAAY,cAAc,UAAU;AAC1C,YAAI,aAAa,OAAO,cAAc,UAAU;AAE9C,qBAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC/D,gBAAI,EAAE,aAAa,gBAAgB;AACjC,4BAAc,SAAS,IAAI;AAAA,YAC7B;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,EAAE,OAAO,mBAAA,KAAwB,gBAAgB;AAC1D,sBAAc,KAAK,IAAI,mBAAmB,aAAa;AAAA,MACzD;AAGA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,UACE,GAAG;AAAA,UACH,kBAAkB;AAAA,QAAA;AAAA,MACpB;AAAA,IAKJ,CAAC;AAAA,EAAA;AAEL;AAkEA,SAAS,sBACP,MACmB;AACnB,SAAO,KAAK,SAAS;AACvB;"}
@@ -1 +1 @@
1
- {"version":3,"file":"ir.js","sources":["../../../src/query/ir.ts"],"sourcesContent":["/*\nThis is the intermediate representation of the query.\n*/\n\nimport type { CollectionImpl } from \"../collection\"\nimport type { NamespacedRow } from \"../types\"\n\nexport interface QueryIR {\n from: From\n select?: Select\n join?: Join\n where?: Array<Where>\n groupBy?: GroupBy\n having?: Array<Having>\n orderBy?: OrderBy\n limit?: Limit\n offset?: Offset\n\n // Functional variants\n fnSelect?: (row: NamespacedRow) => any\n fnWhere?: Array<(row: NamespacedRow) => any>\n fnHaving?: Array<(row: NamespacedRow) => any>\n}\n\nexport type From = CollectionRef | QueryRef\n\nexport type Select = {\n [alias: string]: BasicExpression | Aggregate\n}\n\nexport type Join = Array<JoinClause>\n\nexport interface JoinClause {\n from: CollectionRef | QueryRef\n type: `left` | `right` | `inner` | `outer` | `full` | `cross`\n left: BasicExpression\n right: BasicExpression\n}\n\nexport type Where = BasicExpression<boolean>\n\nexport type GroupBy = Array<BasicExpression>\n\nexport type Having = Where\n\nexport type OrderBy = Array<OrderByClause>\n\nexport type OrderByClause = {\n expression: BasicExpression\n direction: OrderByDirection\n}\n\nexport type OrderByDirection = `asc` | `desc`\n\nexport type Limit = number\n\nexport type Offset = number\n\n/* Expressions */\n\nabstract class BaseExpression<T = any> {\n public abstract type: string\n /** @internal - Type brand for TypeScript inference */\n declare readonly __returnType: T\n}\n\nexport class CollectionRef extends BaseExpression {\n public type = `collectionRef` as const\n constructor(\n public collection: CollectionImpl,\n public alias: string\n ) {\n super()\n }\n}\n\nexport class QueryRef extends BaseExpression {\n public type = `queryRef` as const\n constructor(\n public query: QueryIR,\n public alias: string\n ) {\n super()\n }\n}\n\nexport class PropRef<T = any> extends BaseExpression<T> {\n public type = `ref` as const\n constructor(\n public path: Array<string> // path to the property in the collection, with the alias as the first element\n ) {\n super()\n }\n}\n\nexport class Value<T = any> extends BaseExpression<T> {\n public type = `val` as const\n constructor(\n public value: T // any js value\n ) {\n super()\n }\n}\n\nexport class Func<T = any> extends BaseExpression<T> {\n public type = `func` as const\n constructor(\n public name: string, // such as eq, gt, lt, upper, lower, etc.\n public args: Array<BasicExpression>\n ) {\n super()\n }\n}\n\n// This is the basic expression type that is used in the majority of expression\n// builder callbacks (select, where, groupBy, having, orderBy, etc.)\n// it doesn't include aggregate functions as those are only used in the select clause\nexport type BasicExpression<T = any> = PropRef<T> | Value<T> | Func<T>\n\nexport class Aggregate<T = any> extends BaseExpression<T> {\n public type = `agg` as const\n constructor(\n public name: string, // such as count, avg, sum, min, max, etc.\n public args: Array<BasicExpression>\n ) {\n super()\n }\n}\n"],"names":[],"mappings":"AA4DA,MAAe,eAAwB;AAIvC;AAEO,MAAM,sBAAsB,eAAe;AAAA,EAEhD,YACS,YACA,OACP;AACM,UAAA;AAHC,SAAA,aAAA;AACA,SAAA,QAAA;AAHT,SAAO,OAAO;AAAA,EAAA;AAOhB;AAEO,MAAM,iBAAiB,eAAe;AAAA,EAE3C,YACS,OACA,OACP;AACM,UAAA;AAHC,SAAA,QAAA;AACA,SAAA,QAAA;AAHT,SAAO,OAAO;AAAA,EAAA;AAOhB;AAEO,MAAM,gBAAyB,eAAkB;AAAA,EAEtD,YACS,MACP;AACM,UAAA;AAFC,SAAA,OAAA;AAFT,SAAO,OAAO;AAAA,EAAA;AAMhB;AAEO,MAAM,cAAuB,eAAkB;AAAA,EAEpD,YACS,OACP;AACM,UAAA;AAFC,SAAA,QAAA;AAFT,SAAO,OAAO;AAAA,EAAA;AAMhB;AAEO,MAAM,aAAsB,eAAkB;AAAA,EAEnD,YACS,MACA,MACP;AACM,UAAA;AAHC,SAAA,OAAA;AACA,SAAA,OAAA;AAHT,SAAO,OAAO;AAAA,EAAA;AAOhB;AAOO,MAAM,kBAA2B,eAAkB;AAAA,EAExD,YACS,MACA,MACP;AACM,UAAA;AAHC,SAAA,OAAA;AACA,SAAA,OAAA;AAHT,SAAO,OAAO;AAAA,EAAA;AAOhB;"}
1
+ {"version":3,"file":"ir.js","sources":["../../../src/query/ir.ts"],"sourcesContent":["/*\nThis is the intermediate representation of the query.\n*/\n\nimport type { CollectionImpl } from \"../collection\"\nimport type { NamespacedRow } from \"../types\"\n\nexport interface QueryIR {\n from: From\n select?: Select\n join?: Join\n where?: Array<Where>\n groupBy?: GroupBy\n having?: Array<Having>\n orderBy?: OrderBy\n limit?: Limit\n offset?: Offset\n\n // Functional variants\n fnSelect?: (row: NamespacedRow) => any\n fnWhere?: Array<(row: NamespacedRow) => any>\n fnHaving?: Array<(row: NamespacedRow) => any>\n}\n\nexport type From = CollectionRef | QueryRef\n\nexport type Select = {\n [alias: string]: BasicExpression | Aggregate\n}\n\nexport type Join = Array<JoinClause>\n\nexport interface JoinClause {\n from: CollectionRef | QueryRef\n type: `left` | `right` | `inner` | `outer` | `full` | `cross`\n left: BasicExpression\n right: BasicExpression\n}\n\nexport type Where = BasicExpression<boolean>\n\nexport type GroupBy = Array<BasicExpression>\n\nexport type Having = Where\n\nexport type OrderBy = Array<OrderByClause>\n\nexport type OrderByClause = {\n expression: BasicExpression\n direction: OrderByDirection\n}\n\nexport type OrderByDirection = `asc` | `desc`\n\nexport type Limit = number\n\nexport type Offset = number\n\n/* Expressions */\n\nabstract class BaseExpression<T = any> {\n public abstract type: string\n /** @internal - Type brand for TypeScript inference */\n declare readonly __returnType: T\n}\n\nexport class CollectionRef extends BaseExpression {\n public type = `collectionRef` as const\n constructor(\n public collection: CollectionImpl,\n public alias: string\n ) {\n super()\n }\n}\n\nexport class QueryRef extends BaseExpression {\n public type = `queryRef` as const\n constructor(\n public query: QueryIR,\n public alias: string\n ) {\n super()\n }\n}\n\nexport class PropRef<T = any> extends BaseExpression<T> {\n public type = `ref` as const\n constructor(\n public path: Array<string> // path to the property in the collection, with the alias as the first element\n ) {\n super()\n }\n}\n\nexport class Value<T = any> extends BaseExpression<T> {\n public type = `val` as const\n constructor(\n public value: T // any js value\n ) {\n super()\n }\n}\n\nexport class Func<T = any> extends BaseExpression<T> {\n public type = `func` as const\n constructor(\n public name: string, // such as eq, gt, lt, upper, lower, etc.\n public args: Array<BasicExpression>\n ) {\n super()\n }\n}\n\n// This is the basic expression type that is used in the majority of expression\n// builder callbacks (select, where, groupBy, having, orderBy, etc.)\n// it doesn't include aggregate functions as those are only used in the select clause\nexport type BasicExpression<T = any> = PropRef<T> | Value<T> | Func<T>\n\nexport class Aggregate<T = any> extends BaseExpression<T> {\n public type = `agg` as const\n constructor(\n public name: string, // such as count, avg, sum, min, max, etc.\n public args: Array<BasicExpression>\n ) {\n super()\n }\n}\n"],"names":[],"mappings":"AA4DA,MAAe,eAAwB;AAIvC;AAEO,MAAM,sBAAsB,eAAe;AAAA,EAEhD,YACS,YACA,OACP;AACA,UAAA;AAHO,SAAA,aAAA;AACA,SAAA,QAAA;AAHT,SAAO,OAAO;AAAA,EAMd;AACF;AAEO,MAAM,iBAAiB,eAAe;AAAA,EAE3C,YACS,OACA,OACP;AACA,UAAA;AAHO,SAAA,QAAA;AACA,SAAA,QAAA;AAHT,SAAO,OAAO;AAAA,EAMd;AACF;AAEO,MAAM,gBAAyB,eAAkB;AAAA,EAEtD,YACS,MACP;AACA,UAAA;AAFO,SAAA,OAAA;AAFT,SAAO,OAAO;AAAA,EAKd;AACF;AAEO,MAAM,cAAuB,eAAkB;AAAA,EAEpD,YACS,OACP;AACA,UAAA;AAFO,SAAA,QAAA;AAFT,SAAO,OAAO;AAAA,EAKd;AACF;AAEO,MAAM,aAAsB,eAAkB;AAAA,EAEnD,YACS,MACA,MACP;AACA,UAAA;AAHO,SAAA,OAAA;AACA,SAAA,OAAA;AAHT,SAAO,OAAO;AAAA,EAMd;AACF;AAOO,MAAM,kBAA2B,eAAkB;AAAA,EAExD,YACS,MACA,MACP;AACA,UAAA;AAHO,SAAA,OAAA;AACA,SAAA,OAAA;AAHT,SAAO,OAAO;AAAA,EAMd;AACF;"}
@@ -1 +1 @@
1
- {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n pipelineCache = compileQuery(\n query,\n inputsCache as Record<string, KeyedStream>\n )\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Set up data flow from input collections to the compiled query\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n\n // Subscribe to changes\n const unsubscribe = collection.subscribeChanges(\n (changes: Array<ChangeMessage>) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(unsubscribe)\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n"],"names":[],"mappings":";;;;AAkBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAG5D,QAAA,QACJ,OAAO,OAAO,UAAU,aACpB,WAAqB,OAAO,KAAK,IACjC,WAAW,OAAO,KAAK;AAIvB,QAAA,iCAAiB,QAAyB;AAG1C,QAAA,qCAAqB,QAAwB;AAG7C,QAAA,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAElC,UAAA,SAAS,eAAe,IAAI,IAAI;AAChC,UAAA,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACZ,eAAA;AAAA,MAAA,WACE,SAAS,QAAQ;AACnB,eAAA;AAAA,MAAA,OACF;AACE,eAAA;AAAA,MAAA;AAAA,IACT;AAIK,WAAA;AAAA,EAAA,IAET;AAEA,QAAA,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AACzB,WAAA,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAC3D;AAAA,EACF;AAEI,MAAA;AACA,MAAA;AACA,MAAA;AAEJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAI,GAAG;AACpB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAc;AAAA,MAC3B,CAAA;AAAA,IACH;AACgB,oBAAA;AAAA,MACd;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AAC7B,0BAAA;AAAA,IAAA;AAEf,WAAA;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IACZ;AAAA,EACF;AAIoB,sBAAA;AAGpB,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,YAAY,oBAAoB;AAC7D,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAyB;AAC7D,UAAI,gBAAgB;AACX,eAAA;AAAA,QACP,OAAO,CAAC,SAAS;AACT,gBAAA,WAAW,KAAK,SAAS;AAC/B,2BAAiB,SAAS;AAEpB,gBAAA;AAEH,mBAAA,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAG3C,kBAAA,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YACF;AACA,gBAAI,eAAe,GAAG;AACZ,sBAAA,WAAW,KAAK,IAAI,YAAY;AAAA,YAAA,WAC/B,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YAAA;AAErB,gBAAA,IAAI,KAAK,OAAO;AACb,mBAAA;AAAA,UAAA,uBACF,IAAqG,CAAC,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,aAAiB,IAAA;AAIvC,uBAAA,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AACf,6BAAA,IAAI,OAAO,YAAY;AAAA,YAAA;AAIpC,gBAAA,WAAW,YAAY,GAAG;AACtB,oBAAA;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAAA;AAAA;AAAA,cAGD,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACM,oBAAA;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAAA,WAEQ,UAAU,GAAG;AAChB,oBAAA;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAAA,OACI;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cACrD;AAAA,YAAA;AAAA,UACF,CACD;AACI,iBAAA;AAAA,QACR,CAAA;AAAA,MACH;AAEA,YAAM,SAAS;AAEf,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAI;AAGV,cAAI,kBAAkB,GAAG;AACjB,kBAAA;AACC,mBAAA;AAAA,UAAA;AAAA,QACT;AAAA,MAEJ;AAGM,YAAA,2CAA2B,IAAgB;AAG1C,aAAA,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAc,UAAU,MAAM;AAC5D,cAAA,QAAQ,OAAO,YAAY;AAGjC,cAAM,cAAc,WAAW;AAAA,UAC7B,CAAC,YAAkC;AACjC,+BAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC7C,0BAAA;AAAA,UAChB;AAAA,UACA,EAAE,qBAAqB,KAAK;AAAA,QAC9B;AACA,6BAAqB,IAAI,WAAW;AAAA,MAAA,CACrC;AAGa,oBAAA;AAGd,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IAAA;AAAA,EAEJ;AAGO,SAAA;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EACpB;AACF;AAsDO,SAAS,0BAKd,eAG8C;AAE1C,MAAA,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAGT;AACM,UAAA,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EAAA,OAClC;AAEL,UAAM,SAAS;AAIT,UAAA,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EAAA;AAEL;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAC;AAC/C,aAAW,UAAU,SAAS;AACtB,UAAA,MAAM,OAAO,OAAO,KAAK;AAC3B,QAAA,OAAO,SAAS,UAAU;AACd,oBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACrB,oBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACtC,oBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAAA,OACtC;AAES,oBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAAA;AAAA,EAC9C;AAEF,QAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAC;AAG1C,WAAS,kBAAkB,QAAa;AAClC,QAAA,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAAA;AAAA,EAC/B;AAIF,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAAA;AAI1B,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACxB,iBAAA,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QAAA;AAAA,MACnC;AAAA,IACF;AAAA,EACF;AAIF,mBAAiB,KAAK;AAEf,SAAA;AACT;"}
1
+ {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n pipelineCache = compileQuery(\n query,\n inputsCache as Record<string, KeyedStream>\n )\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Set up data flow from input collections to the compiled query\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n\n // Subscribe to changes\n const unsubscribe = collection.subscribeChanges(\n (changes: Array<ChangeMessage>) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(unsubscribe)\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n"],"names":[],"mappings":";;;;AAkBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpB,WAAqB,OAAO,KAAK,IACjC,WAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAI,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAEH,oBAAgB;AAAA,MACd;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,YAAY,oBAAoB;AAC7D,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACP,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAA;AAGN,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAc,UAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AAGjC,cAAM,cAAc,WAAW;AAAA,UAC7B,CAAC,YAAkC;AACjC,+BAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC3D,0BAAA;AAAA,UACF;AAAA,UACA,EAAE,qBAAqB,KAAA;AAAA,QAAK;AAE9B,6BAAqB,IAAI,WAAW;AAAA,MACtC,CAAC;AAGD,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;"}