@tanstack/db 0.0.27 → 0.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (167) hide show
  1. package/dist/cjs/change-events.cjs +141 -0
  2. package/dist/cjs/change-events.cjs.map +1 -0
  3. package/dist/cjs/change-events.d.cts +49 -0
  4. package/dist/cjs/collection.cjs +234 -86
  5. package/dist/cjs/collection.cjs.map +1 -1
  6. package/dist/cjs/collection.d.cts +95 -20
  7. package/dist/cjs/errors.cjs +509 -1
  8. package/dist/cjs/errors.cjs.map +1 -1
  9. package/dist/cjs/errors.d.cts +225 -1
  10. package/dist/cjs/index.cjs +82 -3
  11. package/dist/cjs/index.cjs.map +1 -1
  12. package/dist/cjs/index.d.cts +5 -1
  13. package/dist/cjs/indexes/auto-index.cjs +64 -0
  14. package/dist/cjs/indexes/auto-index.cjs.map +1 -0
  15. package/dist/cjs/indexes/auto-index.d.cts +9 -0
  16. package/dist/cjs/indexes/base-index.cjs +46 -0
  17. package/dist/cjs/indexes/base-index.cjs.map +1 -0
  18. package/dist/cjs/indexes/base-index.d.cts +54 -0
  19. package/dist/cjs/indexes/btree-index.cjs +191 -0
  20. package/dist/cjs/indexes/btree-index.cjs.map +1 -0
  21. package/dist/cjs/indexes/btree-index.d.cts +74 -0
  22. package/dist/cjs/indexes/index-options.d.cts +13 -0
  23. package/dist/cjs/indexes/lazy-index.cjs +193 -0
  24. package/dist/cjs/indexes/lazy-index.cjs.map +1 -0
  25. package/dist/cjs/indexes/lazy-index.d.cts +96 -0
  26. package/dist/cjs/local-storage.cjs +9 -15
  27. package/dist/cjs/local-storage.cjs.map +1 -1
  28. package/dist/cjs/query/builder/functions.cjs +11 -0
  29. package/dist/cjs/query/builder/functions.cjs.map +1 -1
  30. package/dist/cjs/query/builder/functions.d.cts +4 -0
  31. package/dist/cjs/query/builder/index.cjs +6 -7
  32. package/dist/cjs/query/builder/index.cjs.map +1 -1
  33. package/dist/cjs/query/builder/ref-proxy.cjs +37 -0
  34. package/dist/cjs/query/builder/ref-proxy.cjs.map +1 -1
  35. package/dist/cjs/query/builder/ref-proxy.d.cts +12 -0
  36. package/dist/cjs/query/compiler/evaluators.cjs +83 -58
  37. package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
  38. package/dist/cjs/query/compiler/evaluators.d.cts +8 -0
  39. package/dist/cjs/query/compiler/expressions.cjs +61 -0
  40. package/dist/cjs/query/compiler/expressions.cjs.map +1 -0
  41. package/dist/cjs/query/compiler/expressions.d.cts +25 -0
  42. package/dist/cjs/query/compiler/group-by.cjs +5 -10
  43. package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
  44. package/dist/cjs/query/compiler/index.cjs +23 -17
  45. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  46. package/dist/cjs/query/compiler/index.d.cts +12 -3
  47. package/dist/cjs/query/compiler/joins.cjs +61 -12
  48. package/dist/cjs/query/compiler/joins.cjs.map +1 -1
  49. package/dist/cjs/query/compiler/order-by.cjs +4 -34
  50. package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
  51. package/dist/cjs/query/compiler/types.d.cts +2 -2
  52. package/dist/cjs/query/live-query-collection.cjs +54 -12
  53. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  54. package/dist/cjs/query/optimizer.cjs +45 -7
  55. package/dist/cjs/query/optimizer.cjs.map +1 -1
  56. package/dist/cjs/query/optimizer.d.cts +13 -3
  57. package/dist/cjs/transactions.cjs +5 -4
  58. package/dist/cjs/transactions.cjs.map +1 -1
  59. package/dist/cjs/types.d.cts +31 -0
  60. package/dist/cjs/utils/array-utils.d.cts +8 -0
  61. package/dist/cjs/utils/btree.cjs +677 -0
  62. package/dist/cjs/utils/btree.cjs.map +1 -0
  63. package/dist/cjs/utils/btree.d.cts +197 -0
  64. package/dist/cjs/utils/comparison.cjs +52 -0
  65. package/dist/cjs/utils/comparison.cjs.map +1 -0
  66. package/dist/cjs/utils/comparison.d.cts +11 -0
  67. package/dist/cjs/utils/index-optimization.cjs +270 -0
  68. package/dist/cjs/utils/index-optimization.cjs.map +1 -0
  69. package/dist/cjs/utils/index-optimization.d.cts +29 -0
  70. package/dist/esm/change-events.d.ts +49 -0
  71. package/dist/esm/change-events.js +141 -0
  72. package/dist/esm/change-events.js.map +1 -0
  73. package/dist/esm/collection.d.ts +95 -20
  74. package/dist/esm/collection.js +232 -84
  75. package/dist/esm/collection.js.map +1 -1
  76. package/dist/esm/errors.d.ts +225 -1
  77. package/dist/esm/errors.js +510 -2
  78. package/dist/esm/errors.js.map +1 -1
  79. package/dist/esm/index.d.ts +5 -1
  80. package/dist/esm/index.js +81 -2
  81. package/dist/esm/index.js.map +1 -1
  82. package/dist/esm/indexes/auto-index.d.ts +9 -0
  83. package/dist/esm/indexes/auto-index.js +64 -0
  84. package/dist/esm/indexes/auto-index.js.map +1 -0
  85. package/dist/esm/indexes/base-index.d.ts +54 -0
  86. package/dist/esm/indexes/base-index.js +46 -0
  87. package/dist/esm/indexes/base-index.js.map +1 -0
  88. package/dist/esm/indexes/btree-index.d.ts +74 -0
  89. package/dist/esm/indexes/btree-index.js +191 -0
  90. package/dist/esm/indexes/btree-index.js.map +1 -0
  91. package/dist/esm/indexes/index-options.d.ts +13 -0
  92. package/dist/esm/indexes/lazy-index.d.ts +96 -0
  93. package/dist/esm/indexes/lazy-index.js +193 -0
  94. package/dist/esm/indexes/lazy-index.js.map +1 -0
  95. package/dist/esm/local-storage.js +9 -15
  96. package/dist/esm/local-storage.js.map +1 -1
  97. package/dist/esm/query/builder/functions.d.ts +4 -0
  98. package/dist/esm/query/builder/functions.js +11 -0
  99. package/dist/esm/query/builder/functions.js.map +1 -1
  100. package/dist/esm/query/builder/index.js +6 -7
  101. package/dist/esm/query/builder/index.js.map +1 -1
  102. package/dist/esm/query/builder/ref-proxy.d.ts +12 -0
  103. package/dist/esm/query/builder/ref-proxy.js +37 -0
  104. package/dist/esm/query/builder/ref-proxy.js.map +1 -1
  105. package/dist/esm/query/compiler/evaluators.d.ts +8 -0
  106. package/dist/esm/query/compiler/evaluators.js +84 -59
  107. package/dist/esm/query/compiler/evaluators.js.map +1 -1
  108. package/dist/esm/query/compiler/expressions.d.ts +25 -0
  109. package/dist/esm/query/compiler/expressions.js +61 -0
  110. package/dist/esm/query/compiler/expressions.js.map +1 -0
  111. package/dist/esm/query/compiler/group-by.js +5 -10
  112. package/dist/esm/query/compiler/group-by.js.map +1 -1
  113. package/dist/esm/query/compiler/index.d.ts +12 -3
  114. package/dist/esm/query/compiler/index.js +23 -17
  115. package/dist/esm/query/compiler/index.js.map +1 -1
  116. package/dist/esm/query/compiler/joins.js +61 -12
  117. package/dist/esm/query/compiler/joins.js.map +1 -1
  118. package/dist/esm/query/compiler/order-by.js +1 -31
  119. package/dist/esm/query/compiler/order-by.js.map +1 -1
  120. package/dist/esm/query/compiler/types.d.ts +2 -2
  121. package/dist/esm/query/live-query-collection.js +54 -12
  122. package/dist/esm/query/live-query-collection.js.map +1 -1
  123. package/dist/esm/query/optimizer.d.ts +13 -3
  124. package/dist/esm/query/optimizer.js +40 -2
  125. package/dist/esm/query/optimizer.js.map +1 -1
  126. package/dist/esm/transactions.js +5 -4
  127. package/dist/esm/transactions.js.map +1 -1
  128. package/dist/esm/types.d.ts +31 -0
  129. package/dist/esm/utils/array-utils.d.ts +8 -0
  130. package/dist/esm/utils/btree.d.ts +197 -0
  131. package/dist/esm/utils/btree.js +677 -0
  132. package/dist/esm/utils/btree.js.map +1 -0
  133. package/dist/esm/utils/comparison.d.ts +11 -0
  134. package/dist/esm/utils/comparison.js +52 -0
  135. package/dist/esm/utils/comparison.js.map +1 -0
  136. package/dist/esm/utils/index-optimization.d.ts +29 -0
  137. package/dist/esm/utils/index-optimization.js +270 -0
  138. package/dist/esm/utils/index-optimization.js.map +1 -0
  139. package/package.json +1 -1
  140. package/src/change-events.ts +257 -0
  141. package/src/collection.ts +316 -105
  142. package/src/errors.ts +545 -1
  143. package/src/index.ts +7 -1
  144. package/src/indexes/auto-index.ts +108 -0
  145. package/src/indexes/base-index.ts +119 -0
  146. package/src/indexes/btree-index.ts +263 -0
  147. package/src/indexes/index-options.ts +42 -0
  148. package/src/indexes/lazy-index.ts +251 -0
  149. package/src/local-storage.ts +16 -17
  150. package/src/query/builder/functions.ts +14 -0
  151. package/src/query/builder/index.ts +12 -7
  152. package/src/query/builder/ref-proxy.ts +65 -0
  153. package/src/query/compiler/evaluators.ts +114 -62
  154. package/src/query/compiler/expressions.ts +92 -0
  155. package/src/query/compiler/group-by.ts +10 -10
  156. package/src/query/compiler/index.ts +52 -22
  157. package/src/query/compiler/joins.ts +114 -15
  158. package/src/query/compiler/order-by.ts +1 -45
  159. package/src/query/compiler/types.ts +2 -2
  160. package/src/query/live-query-collection.ts +95 -15
  161. package/src/query/optimizer.ts +94 -5
  162. package/src/transactions.ts +10 -4
  163. package/src/types.ts +38 -0
  164. package/src/utils/array-utils.ts +28 -0
  165. package/src/utils/btree.ts +1010 -0
  166. package/src/utils/comparison.ts +79 -0
  167. package/src/utils/index-optimization.ts +546 -0
@@ -1,4 +1,5 @@
1
1
  import { map, join, consolidate, filter } from "@electric-sql/d2mini";
2
+ import { UnsupportedJoinTypeError, UnsupportedJoinSourceTypeError, CollectionInputNotFoundError, InvalidJoinConditionSameTableError, InvalidJoinConditionTableMismatchError, InvalidJoinConditionWrongTablesError } from "../../errors.js";
2
3
  import { compileExpression } from "./evaluators.js";
3
4
  import { compileQuery } from "./index.js";
4
5
  function processJoins(pipeline, joinClauses, tables, mainTableAlias, allInputs, cache, queryMapping) {
@@ -25,23 +26,29 @@ function processJoin(pipeline, joinClause, tables, mainTableAlias, allInputs, ca
25
26
  );
26
27
  tables[joinedTableAlias] = joinedInput;
27
28
  const joinType = joinClause.type === `cross` ? `inner` : joinClause.type === `outer` ? `full` : joinClause.type;
28
- const compiledLeftExpr = compileExpression(joinClause.left);
29
- const compiledRightExpr = compileExpression(joinClause.right);
29
+ const { mainExpr, joinedExpr } = analyzeJoinExpressions(
30
+ joinClause.left,
31
+ joinClause.right,
32
+ mainTableAlias,
33
+ joinedTableAlias
34
+ );
35
+ const compiledMainExpr = compileExpression(mainExpr);
36
+ const compiledJoinedExpr = compileExpression(joinedExpr);
30
37
  const mainPipeline = pipeline.pipe(
31
38
  map(([currentKey, namespacedRow]) => {
32
- const leftKey = compiledLeftExpr(namespacedRow);
33
- return [leftKey, [currentKey, namespacedRow]];
39
+ const mainKey = compiledMainExpr(namespacedRow);
40
+ return [mainKey, [currentKey, namespacedRow]];
34
41
  })
35
42
  );
36
43
  const joinedPipeline = joinedInput.pipe(
37
44
  map(([currentKey, row]) => {
38
45
  const namespacedRow = { [joinedTableAlias]: row };
39
- const rightKey = compiledRightExpr(namespacedRow);
40
- return [rightKey, [currentKey, namespacedRow]];
46
+ const joinedKey = compiledJoinedExpr(namespacedRow);
47
+ return [joinedKey, [currentKey, namespacedRow]];
41
48
  })
42
49
  );
43
50
  if (![`inner`, `left`, `right`, `full`].includes(joinType)) {
44
- throw new Error(`Unsupported join type: ${joinClause.type}`);
51
+ throw new UnsupportedJoinTypeError(joinClause.type);
45
52
  }
46
53
  return mainPipeline.pipe(
47
54
  join(joinedPipeline, joinType),
@@ -49,25 +56,67 @@ function processJoin(pipeline, joinClause, tables, mainTableAlias, allInputs, ca
49
56
  processJoinResults(joinClause.type)
50
57
  );
51
58
  }
59
+ function analyzeJoinExpressions(left, right, mainTableAlias, joinedTableAlias) {
60
+ const leftTableAlias = getTableAliasFromExpression(left);
61
+ const rightTableAlias = getTableAliasFromExpression(right);
62
+ if (leftTableAlias === mainTableAlias && rightTableAlias === joinedTableAlias) {
63
+ return { mainExpr: left, joinedExpr: right };
64
+ }
65
+ if (leftTableAlias === joinedTableAlias && rightTableAlias === mainTableAlias) {
66
+ return { mainExpr: right, joinedExpr: left };
67
+ }
68
+ if (leftTableAlias === rightTableAlias) {
69
+ throw new InvalidJoinConditionSameTableError(leftTableAlias || `unknown`);
70
+ }
71
+ if (!leftTableAlias || !rightTableAlias) {
72
+ throw new InvalidJoinConditionTableMismatchError(
73
+ mainTableAlias,
74
+ joinedTableAlias
75
+ );
76
+ }
77
+ throw new InvalidJoinConditionWrongTablesError(
78
+ leftTableAlias,
79
+ rightTableAlias,
80
+ mainTableAlias,
81
+ joinedTableAlias
82
+ );
83
+ }
84
+ function getTableAliasFromExpression(expr) {
85
+ switch (expr.type) {
86
+ case `ref`:
87
+ return expr.path[0] || null;
88
+ case `func`: {
89
+ const tableAliases = /* @__PURE__ */ new Set();
90
+ for (const arg of expr.args) {
91
+ const alias = getTableAliasFromExpression(arg);
92
+ if (alias) {
93
+ tableAliases.add(alias);
94
+ }
95
+ }
96
+ return tableAliases.size === 1 ? Array.from(tableAliases)[0] : null;
97
+ }
98
+ default:
99
+ return null;
100
+ }
101
+ }
52
102
  function processJoinSource(from, allInputs, cache, queryMapping) {
53
103
  switch (from.type) {
54
104
  case `collectionRef`: {
55
105
  const input = allInputs[from.collection.id];
56
106
  if (!input) {
57
- throw new Error(
58
- `Input for collection "${from.collection.id}" not found in inputs map`
59
- );
107
+ throw new CollectionInputNotFoundError(from.collection.id);
60
108
  }
61
109
  return { alias: from.alias, input };
62
110
  }
63
111
  case `queryRef`: {
64
112
  const originalQuery = queryMapping.get(from.query) || from.query;
65
- const subQueryInput = compileQuery(
113
+ const subQueryResult = compileQuery(
66
114
  originalQuery,
67
115
  allInputs,
68
116
  cache,
69
117
  queryMapping
70
118
  );
119
+ const subQueryInput = subQueryResult.pipeline;
71
120
  const extractedInput = subQueryInput.pipe(
72
121
  map((data) => {
73
122
  const [key, [value, _orderByIndex]] = data;
@@ -77,7 +126,7 @@ function processJoinSource(from, allInputs, cache, queryMapping) {
77
126
  return { alias: from.alias, input: extractedInput };
78
127
  }
79
128
  default:
80
- throw new Error(`Unsupported join source type: ${from.type}`);
129
+ throw new UnsupportedJoinSourceTypeError(from.type);
81
130
  }
82
131
  }
83
132
  function processJoinResults(joinType) {
@@ -1 +1 @@
1
- {"version":3,"file":"joins.js","sources":["../../../../src/query/compiler/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { compileQuery } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type { CollectionRef, JoinClause, QueryRef } from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../../types.js\"\nimport type { QueryCache, QueryMapping } from \"./types.js\"\n\n/**\n * Processes all join clauses in a query\n */\nexport function processJoins(\n pipeline: NamespacedAndKeyedStream,\n joinClauses: Array<JoinClause>,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): NamespacedAndKeyedStream {\n let resultPipeline = pipeline\n\n for (const joinClause of joinClauses) {\n resultPipeline = processJoin(\n resultPipeline,\n joinClause,\n tables,\n mainTableAlias,\n allInputs,\n cache,\n queryMapping\n )\n }\n\n return resultPipeline\n}\n\n/**\n * Processes a single join clause\n */\nfunction processJoin(\n pipeline: NamespacedAndKeyedStream,\n joinClause: JoinClause,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): NamespacedAndKeyedStream {\n // Get the joined table alias and input stream\n const { alias: joinedTableAlias, input: joinedInput } = processJoinSource(\n joinClause.from,\n allInputs,\n cache,\n queryMapping\n )\n\n // Add the joined table to the tables map\n tables[joinedTableAlias] = joinedInput\n\n // Convert join type to D2 join type\n const joinType: JoinType =\n joinClause.type === `cross`\n ? `inner`\n : joinClause.type === `outer`\n ? `full`\n : (joinClause.type as JoinType)\n\n // Pre-compile the join expressions\n const compiledLeftExpr = compileExpression(joinClause.left)\n const compiledRightExpr = compileExpression(joinClause.right)\n\n // Prepare the main pipeline for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the join key from the left side of the join condition\n const leftKey = compiledLeftExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [leftKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Prepare the joined pipeline\n const joinedPipeline = joinedInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in a namespaced structure\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the join key from the right side of the join condition\n const rightKey = compiledRightExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [rightKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply the join operation\n if (![`inner`, `left`, `right`, `full`].includes(joinType)) {\n throw new Error(`Unsupported join type: ${joinClause.type}`)\n }\n return mainPipeline.pipe(\n joinOperator(joinedPipeline, joinType),\n consolidate(),\n processJoinResults(joinClause.type)\n )\n}\n\n/**\n * Processes the join source (collection or sub-query)\n */\nfunction processJoinSource(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new Error(\n `Input for collection \"${from.collection.id}\" not found in inputs map`\n )\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Find the original query for caching purposes\n const originalQuery = queryMapping.get(from.query) || from.query\n\n // Recursively compile the sub-query with cache\n const subQueryInput = compileQuery(\n originalQuery,\n allInputs,\n cache,\n queryMapping\n )\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput as KeyedStream }\n }\n default:\n throw new Error(`Unsupported join source type: ${(from as any).type}`)\n }\n}\n\n/**\n * Processes the results of a join operation\n */\nfunction processJoinResults(joinType: string) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls\n filter((result) => {\n const [_key, [main, joined]] = result\n const mainNamespacedRow = main?.[1]\n const joinedNamespacedRow = joined?.[1]\n\n // Handle different join types\n if (joinType === `inner`) {\n return !!(mainNamespacedRow && joinedNamespacedRow)\n }\n\n if (joinType === `left`) {\n return !!mainNamespacedRow\n }\n\n if (joinType === `right`) {\n return !!joinedNamespacedRow\n }\n\n // For full joins, always include\n return true\n }),\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // Merge the namespaced rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.assign(mergedNamespacedRow, mainNamespacedRow)\n }\n\n // Add joined row data if it exists\n if (joinedNamespacedRow) {\n Object.assign(mergedNamespacedRow, joinedNamespacedRow)\n }\n\n // We create a composite key that combines the main and joined keys\n const resultKey = `[${mainKey},${joinedKey}]`\n\n return [resultKey, mergedNamespacedRow] as [string, NamespacedRow]\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;AAoBO,SAAS,aACd,UACA,aACA,QACA,gBACA,WACA,OACA,cAC0B;AAC1B,MAAI,iBAAiB;AAErB,aAAW,cAAc,aAAa;AACpC,qBAAiB;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,YACP,UACA,YACA,QACA,gBACA,WACA,OACA,cAC0B;AAE1B,QAAM,EAAE,OAAO,kBAAkB,OAAO,gBAAgB;AAAA,IACtD,WAAW;AAAA,IACX;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAIF,SAAO,gBAAgB,IAAI;AAG3B,QAAM,WACJ,WAAW,SAAS,UAChB,UACA,WAAW,SAAS,UAClB,SACC,WAAW;AAGpB,QAAM,mBAAmB,kBAAkB,WAAW,IAAI;AAC1D,QAAM,oBAAoB,kBAAkB,WAAW,KAAK;AAG5D,QAAM,eAAe,SAAS;AAAA,IAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAEnC,YAAM,UAAU,iBAAiB,aAAa;AAG9C,aAAO,CAAC,SAAS,CAAC,YAAY,aAAa,CAAC;AAAA,IAI9C,CAAC;AAAA,EAAA;AAIH,QAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,YAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAA;AAG3D,YAAM,WAAW,kBAAkB,aAAa;AAGhD,aAAO,CAAC,UAAU,CAAC,YAAY,aAAa,CAAC;AAAA,IAI/C,CAAC;AAAA,EAAA;AAIH,MAAI,CAAC,CAAC,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,QAAQ,GAAG;AAC1D,UAAM,IAAI,MAAM,0BAA0B,WAAW,IAAI,EAAE;AAAA,EAC7D;AACA,SAAO,aAAa;AAAA,IAClBA,KAAa,gBAAgB,QAAQ;AAAA,IACrC,YAAA;AAAA,IACA,mBAAmB,WAAW,IAAI;AAAA,EAAA;AAEtC;AAKA,SAAS,kBACP,MACA,WACA,OACA,cACuC;AACvC,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR,yBAAyB,KAAK,WAAW,EAAE;AAAA,QAAA;AAAA,MAE/C;AACA,aAAO,EAAE,OAAO,KAAK,OAAO,MAAA;AAAA,IAC9B;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,IAAI,KAAK,KAAK,KAAK,KAAK;AAG3D,YAAM,gBAAgB;AAAA,QACpB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAKF,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AACtC,iBAAO,CAAC,KAAK,KAAK;AAAA,QACpB,CAAC;AAAA,MAAA;AAGH,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAAA;AAAA,IACrC;AAAA,IACA;AACE,YAAM,IAAI,MAAM,iCAAkC,KAAa,IAAI,EAAE;AAAA,EAAA;AAE3E;AAKA,SAAS,mBAAmB,UAAkB;AAC5C,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,OAAO,CAAC,WAAW;AACjB,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,oBAAoB,6BAAO;AACjC,cAAM,sBAAsB,iCAAS;AAGrC,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,EAAE,qBAAqB;AAAA,QACjC;AAEA,YAAI,aAAa,QAAQ;AACvB,iBAAO,CAAC,CAAC;AAAA,QACX;AAEA,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,CAAC;AAAA,QACX;AAGA,eAAO;AAAA,MACT,CAAC;AAAA,MACD,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,UAAU,6BAAO;AACvB,cAAM,oBAAoB,6BAAO;AACjC,cAAM,YAAY,iCAAS;AAC3B,cAAM,sBAAsB,iCAAS;AAGrC,cAAM,sBAAqC,CAAA;AAG3C,YAAI,mBAAmB;AACrB,iBAAO,OAAO,qBAAqB,iBAAiB;AAAA,QACtD;AAGA,YAAI,qBAAqB;AACvB,iBAAO,OAAO,qBAAqB,mBAAmB;AAAA,QACxD;AAGA,cAAM,YAAY,IAAI,OAAO,IAAI,SAAS;AAE1C,eAAO,CAAC,WAAW,mBAAmB;AAAA,MACxC,CAAC;AAAA,IAAA;AAAA,EAEL;AACF;"}
1
+ {"version":3,"file":"joins.js","sources":["../../../../src/query/compiler/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport {\n CollectionInputNotFoundError,\n InvalidJoinConditionSameTableError,\n InvalidJoinConditionTableMismatchError,\n InvalidJoinConditionWrongTablesError,\n UnsupportedJoinSourceTypeError,\n UnsupportedJoinTypeError,\n} from \"../../errors.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport { compileQuery } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type {\n BasicExpression,\n CollectionRef,\n JoinClause,\n QueryRef,\n} from \"../ir.js\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../../types.js\"\nimport type { QueryCache, QueryMapping } from \"./types.js\"\n\n/**\n * Processes all join clauses in a query\n */\nexport function processJoins(\n pipeline: NamespacedAndKeyedStream,\n joinClauses: Array<JoinClause>,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): NamespacedAndKeyedStream {\n let resultPipeline = pipeline\n\n for (const joinClause of joinClauses) {\n resultPipeline = processJoin(\n resultPipeline,\n joinClause,\n tables,\n mainTableAlias,\n allInputs,\n cache,\n queryMapping\n )\n }\n\n return resultPipeline\n}\n\n/**\n * Processes a single join clause\n */\nfunction processJoin(\n pipeline: NamespacedAndKeyedStream,\n joinClause: JoinClause,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): NamespacedAndKeyedStream {\n // Get the joined table alias and input stream\n const { alias: joinedTableAlias, input: joinedInput } = processJoinSource(\n joinClause.from,\n allInputs,\n cache,\n queryMapping\n )\n\n // Add the joined table to the tables map\n tables[joinedTableAlias] = joinedInput\n\n // Convert join type to D2 join type\n const joinType: JoinType =\n joinClause.type === `cross`\n ? `inner`\n : joinClause.type === `outer`\n ? `full`\n : (joinClause.type as JoinType)\n\n // Analyze which table each expression refers to and swap if necessary\n const { mainExpr, joinedExpr } = analyzeJoinExpressions(\n joinClause.left,\n joinClause.right,\n mainTableAlias,\n joinedTableAlias\n )\n\n // Pre-compile the join expressions\n const compiledMainExpr = compileExpression(mainExpr)\n const compiledJoinedExpr = compileExpression(joinedExpr)\n\n // Prepare the main pipeline for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the join key from the main table expression\n const mainKey = compiledMainExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [mainKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Prepare the joined pipeline\n const joinedPipeline = joinedInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in a namespaced structure\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the join key from the joined table expression\n const joinedKey = compiledJoinedExpr(namespacedRow)\n\n // Return [joinKey, [originalKey, namespacedRow]]\n return [joinedKey, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply the join operation\n if (![`inner`, `left`, `right`, `full`].includes(joinType)) {\n throw new UnsupportedJoinTypeError(joinClause.type)\n }\n return mainPipeline.pipe(\n joinOperator(joinedPipeline, joinType),\n consolidate(),\n processJoinResults(joinClause.type)\n )\n}\n\n/**\n * Analyzes join expressions to determine which refers to which table\n * and returns them in the correct order (main table expression first, joined table expression second)\n */\nfunction analyzeJoinExpressions(\n left: BasicExpression,\n right: BasicExpression,\n mainTableAlias: string,\n joinedTableAlias: string\n): { mainExpr: BasicExpression; joinedExpr: BasicExpression } {\n const leftTableAlias = getTableAliasFromExpression(left)\n const rightTableAlias = getTableAliasFromExpression(right)\n\n // If left expression refers to main table and right refers to joined table, keep as is\n if (\n leftTableAlias === mainTableAlias &&\n rightTableAlias === joinedTableAlias\n ) {\n return { mainExpr: left, joinedExpr: right }\n }\n\n // If left expression refers to joined table and right refers to main table, swap them\n if (\n leftTableAlias === joinedTableAlias &&\n rightTableAlias === mainTableAlias\n ) {\n return { mainExpr: right, joinedExpr: left }\n }\n\n // If both expressions refer to the same alias, this is an invalid join\n if (leftTableAlias === rightTableAlias) {\n throw new InvalidJoinConditionSameTableError(leftTableAlias || `unknown`)\n }\n\n // If one expression doesn't refer to either table, this is an invalid join\n if (!leftTableAlias || !rightTableAlias) {\n throw new InvalidJoinConditionTableMismatchError(\n mainTableAlias,\n joinedTableAlias\n )\n }\n\n // If expressions refer to tables not involved in this join, this is an invalid join\n throw new InvalidJoinConditionWrongTablesError(\n leftTableAlias,\n rightTableAlias,\n mainTableAlias,\n joinedTableAlias\n )\n}\n\n/**\n * Extracts the table alias from a join expression\n */\nfunction getTableAliasFromExpression(expr: BasicExpression): string | null {\n switch (expr.type) {\n case `ref`:\n // PropRef path has the table alias as the first element\n return expr.path[0] || null\n case `func`: {\n // For function expressions, we need to check if all arguments refer to the same table\n const tableAliases = new Set<string>()\n for (const arg of expr.args) {\n const alias = getTableAliasFromExpression(arg)\n if (alias) {\n tableAliases.add(alias)\n }\n }\n // If all arguments refer to the same table, return that table alias\n return tableAliases.size === 1 ? Array.from(tableAliases)[0]! : null\n }\n default:\n // Values (type='val') don't reference any table\n return null\n }\n}\n\n/**\n * Processes the join source (collection or sub-query)\n */\nfunction processJoinSource(\n from: CollectionRef | QueryRef,\n allInputs: Record<string, KeyedStream>,\n cache: QueryCache,\n queryMapping: QueryMapping\n): { alias: string; input: KeyedStream } {\n switch (from.type) {\n case `collectionRef`: {\n const input = allInputs[from.collection.id]\n if (!input) {\n throw new CollectionInputNotFoundError(from.collection.id)\n }\n return { alias: from.alias, input }\n }\n case `queryRef`: {\n // Find the original query for caching purposes\n const originalQuery = queryMapping.get(from.query) || from.query\n\n // Recursively compile the sub-query with cache\n const subQueryResult = compileQuery(\n originalQuery,\n allInputs,\n cache,\n queryMapping\n )\n\n // Extract the pipeline from the compilation result\n const subQueryInput = subQueryResult.pipeline\n\n // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)\n // We need to extract just the value for use in parent queries\n const extractedInput = subQueryInput.pipe(\n map((data: any) => {\n const [key, [value, _orderByIndex]] = data\n return [key, value] as [unknown, any]\n })\n )\n\n return { alias: from.alias, input: extractedInput as KeyedStream }\n }\n default:\n throw new UnsupportedJoinSourceTypeError((from as any).type)\n }\n}\n\n/**\n * Processes the results of a join operation\n */\nfunction processJoinResults(joinType: string) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls\n filter((result) => {\n const [_key, [main, joined]] = result\n const mainNamespacedRow = main?.[1]\n const joinedNamespacedRow = joined?.[1]\n\n // Handle different join types\n if (joinType === `inner`) {\n return !!(mainNamespacedRow && joinedNamespacedRow)\n }\n\n if (joinType === `left`) {\n return !!mainNamespacedRow\n }\n\n if (joinType === `right`) {\n return !!joinedNamespacedRow\n }\n\n // For full joins, always include\n return true\n }),\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // Merge the namespaced rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.assign(mergedNamespacedRow, mainNamespacedRow)\n }\n\n // Add joined row data if it exists\n if (joinedNamespacedRow) {\n Object.assign(mergedNamespacedRow, joinedNamespacedRow)\n }\n\n // We create a composite key that combines the main and joined keys\n const resultKey = `[${mainKey},${joinedKey}]`\n\n return [resultKey, mergedNamespacedRow] as [string, NamespacedRow]\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;;AAiCO,SAAS,aACd,UACA,aACA,QACA,gBACA,WACA,OACA,cAC0B;AAC1B,MAAI,iBAAiB;AAErB,aAAW,cAAc,aAAa;AACpC,qBAAiB;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,YACP,UACA,YACA,QACA,gBACA,WACA,OACA,cAC0B;AAE1B,QAAM,EAAE,OAAO,kBAAkB,OAAO,gBAAgB;AAAA,IACtD,WAAW;AAAA,IACX;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAIF,SAAO,gBAAgB,IAAI;AAG3B,QAAM,WACJ,WAAW,SAAS,UAChB,UACA,WAAW,SAAS,UAClB,SACC,WAAW;AAGpB,QAAM,EAAE,UAAU,WAAA,IAAe;AAAA,IAC/B,WAAW;AAAA,IACX,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EAAA;AAIF,QAAM,mBAAmB,kBAAkB,QAAQ;AACnD,QAAM,qBAAqB,kBAAkB,UAAU;AAGvD,QAAM,eAAe,SAAS;AAAA,IAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAEnC,YAAM,UAAU,iBAAiB,aAAa;AAG9C,aAAO,CAAC,SAAS,CAAC,YAAY,aAAa,CAAC;AAAA,IAI9C,CAAC;AAAA,EAAA;AAIH,QAAM,iBAAiB,YAAY;AAAA,IACjC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,YAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAA;AAG3D,YAAM,YAAY,mBAAmB,aAAa;AAGlD,aAAO,CAAC,WAAW,CAAC,YAAY,aAAa,CAAC;AAAA,IAIhD,CAAC;AAAA,EAAA;AAIH,MAAI,CAAC,CAAC,SAAS,QAAQ,SAAS,MAAM,EAAE,SAAS,QAAQ,GAAG;AAC1D,UAAM,IAAI,yBAAyB,WAAW,IAAI;AAAA,EACpD;AACA,SAAO,aAAa;AAAA,IAClBA,KAAa,gBAAgB,QAAQ;AAAA,IACrC,YAAA;AAAA,IACA,mBAAmB,WAAW,IAAI;AAAA,EAAA;AAEtC;AAMA,SAAS,uBACP,MACA,OACA,gBACA,kBAC4D;AAC5D,QAAM,iBAAiB,4BAA4B,IAAI;AACvD,QAAM,kBAAkB,4BAA4B,KAAK;AAGzD,MACE,mBAAmB,kBACnB,oBAAoB,kBACpB;AACA,WAAO,EAAE,UAAU,MAAM,YAAY,MAAA;AAAA,EACvC;AAGA,MACE,mBAAmB,oBACnB,oBAAoB,gBACpB;AACA,WAAO,EAAE,UAAU,OAAO,YAAY,KAAA;AAAA,EACxC;AAGA,MAAI,mBAAmB,iBAAiB;AACtC,UAAM,IAAI,mCAAmC,kBAAkB,SAAS;AAAA,EAC1E;AAGA,MAAI,CAAC,kBAAkB,CAAC,iBAAiB;AACvC,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAGA,QAAM,IAAI;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;AAKA,SAAS,4BAA4B,MAAsC;AACzE,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK;AAEH,aAAO,KAAK,KAAK,CAAC,KAAK;AAAA,IACzB,KAAK,QAAQ;AAEX,YAAM,mCAAmB,IAAA;AACzB,iBAAW,OAAO,KAAK,MAAM;AAC3B,cAAM,QAAQ,4BAA4B,GAAG;AAC7C,YAAI,OAAO;AACT,uBAAa,IAAI,KAAK;AAAA,QACxB;AAAA,MACF;AAEA,aAAO,aAAa,SAAS,IAAI,MAAM,KAAK,YAAY,EAAE,CAAC,IAAK;AAAA,IAClE;AAAA,IACA;AAEE,aAAO;AAAA,EAAA;AAEb;AAKA,SAAS,kBACP,MACA,WACA,OACA,cACuC;AACvC,UAAQ,KAAK,MAAA;AAAA,IACX,KAAK,iBAAiB;AACpB,YAAM,QAAQ,UAAU,KAAK,WAAW,EAAE;AAC1C,UAAI,CAAC,OAAO;AACV,cAAM,IAAI,6BAA6B,KAAK,WAAW,EAAE;AAAA,MAC3D;AACA,aAAO,EAAE,OAAO,KAAK,OAAO,MAAA;AAAA,IAC9B;AAAA,IACA,KAAK,YAAY;AAEf,YAAM,gBAAgB,aAAa,IAAI,KAAK,KAAK,KAAK,KAAK;AAG3D,YAAM,iBAAiB;AAAA,QACrB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MAAA;AAIF,YAAM,gBAAgB,eAAe;AAIrC,YAAM,iBAAiB,cAAc;AAAA,QACnC,IAAI,CAAC,SAAc;AACjB,gBAAM,CAAC,KAAK,CAAC,OAAO,aAAa,CAAC,IAAI;AACtC,iBAAO,CAAC,KAAK,KAAK;AAAA,QACpB,CAAC;AAAA,MAAA;AAGH,aAAO,EAAE,OAAO,KAAK,OAAO,OAAO,eAAA;AAAA,IACrC;AAAA,IACA;AACE,YAAM,IAAI,+BAAgC,KAAa,IAAI;AAAA,EAAA;AAEjE;AAKA,SAAS,mBAAmB,UAAkB;AAC5C,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,OAAO,CAAC,WAAW;AACjB,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,oBAAoB,6BAAO;AACjC,cAAM,sBAAsB,iCAAS;AAGrC,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,EAAE,qBAAqB;AAAA,QACjC;AAEA,YAAI,aAAa,QAAQ;AACvB,iBAAO,CAAC,CAAC;AAAA,QACX;AAEA,YAAI,aAAa,SAAS;AACxB,iBAAO,CAAC,CAAC;AAAA,QACX;AAGA,eAAO;AAAA,MACT,CAAC;AAAA,MACD,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AAC/B,cAAM,UAAU,6BAAO;AACvB,cAAM,oBAAoB,6BAAO;AACjC,cAAM,YAAY,iCAAS;AAC3B,cAAM,sBAAsB,iCAAS;AAGrC,cAAM,sBAAqC,CAAA;AAG3C,YAAI,mBAAmB;AACrB,iBAAO,OAAO,qBAAqB,iBAAiB;AAAA,QACtD;AAGA,YAAI,qBAAqB;AACvB,iBAAO,OAAO,qBAAqB,mBAAmB;AAAA,QACxD;AAGA,cAAM,YAAY,IAAI,OAAO,IAAI,SAAS;AAE1C,eAAO,CAAC,WAAW,mBAAmB;AAAA,MACxC,CAAC;AAAA,IAAA;AAAA,EAEL;AACF;"}
@@ -1,4 +1,5 @@
1
1
  import { orderByWithFractionalIndex } from "@electric-sql/d2mini";
2
+ import { descComparator, ascComparator } from "../../utils/comparison.js";
2
3
  import { compileExpression } from "./evaluators.js";
3
4
  function processOrderBy(pipeline, orderByClause, limit, offset) {
4
5
  const compiledOrderBy = orderByClause.map((clause) => ({
@@ -20,37 +21,6 @@ function processOrderBy(pipeline, orderByClause, limit, offset) {
20
21
  }
21
22
  return null;
22
23
  };
23
- const ascComparator = (a, b) => {
24
- if (a == null && b == null) return 0;
25
- if (a == null) return -1;
26
- if (b == null) return 1;
27
- if (typeof a === `string` && typeof b === `string`) {
28
- return a.localeCompare(b);
29
- }
30
- if (Array.isArray(a) && Array.isArray(b)) {
31
- for (let i = 0; i < Math.min(a.length, b.length); i++) {
32
- const result = ascComparator(a[i], b[i]);
33
- if (result !== 0) {
34
- return result;
35
- }
36
- }
37
- return a.length - b.length;
38
- }
39
- if (a instanceof Date && b instanceof Date) {
40
- return a.getTime() - b.getTime();
41
- }
42
- const bothObjects = typeof a === `object` && typeof b === `object`;
43
- const notNull = a !== null && b !== null;
44
- if (bothObjects && notNull) {
45
- return a.toString().localeCompare(b.toString());
46
- }
47
- if (a < b) return -1;
48
- if (a > b) return 1;
49
- return 0;
50
- };
51
- const descComparator = (a, b) => {
52
- return ascComparator(b, a);
53
- };
54
24
  const makeComparator = () => {
55
25
  return (a, b) => {
56
26
  if (orderByClause.length > 1) {
@@ -1 +1 @@
1
- {"version":3,"file":"order-by.js","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@electric-sql/d2mini\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { OrderByClause } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@electric-sql/d2mini\"\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => ({\n compiledExpression: compileExpression(clause.expression),\n direction: clause.direction,\n }))\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // For ORDER BY expressions, we need to provide access to both:\n // 1. The original namespaced row data (for direct table column references)\n // 2. The __select_results (for SELECT alias references)\n\n // Create a merged context for expression evaluation\n const orderByContext = { ...row }\n\n // If there are select results, merge them at the top level for alias access\n if (row.__select_results) {\n // Add select results as top-level properties for alias access\n Object.assign(orderByContext, row.__select_results)\n }\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create comparator functions\n const ascComparator = (a: any, b: any): number => {\n // Handle null/undefined\n if (a == null && b == null) return 0\n if (a == null) return -1\n if (b == null) return 1\n\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n const result = ascComparator(a[i], b[i])\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If both are dates, compare them\n if (a instanceof Date && b instanceof Date) {\n return a.getTime() - b.getTime()\n }\n\n // If at least one of the values is an object, convert to strings\n const bothObjects = typeof a === `object` && typeof b === `object`\n const notNull = a !== null && b !== null\n if (bothObjects && notNull) {\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = () => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const direction = orderByClause[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const direction = orderByClause[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n\n const comparator = makeComparator()\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":[],"mappings":";;AAWO,SAAS,eACd,UACA,eACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,YAAY;AAAA,IACrD,oBAAoB,kBAAkB,OAAO,UAAU;AAAA,IACvD,WAAW,OAAO;AAAA,EAAA,EAClB;AAGF,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB,EAAE,GAAG,IAAA;AAG5B,QAAI,IAAI,kBAAkB;AAExB,aAAO,OAAO,gBAAgB,IAAI,gBAAgB;AAAA,IACpD;AAEA,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,KAAK,QAAQ,KAAK,KAAM,QAAO;AACnC,QAAI,KAAK,KAAM,QAAO;AACtB,QAAI,KAAK,KAAM,QAAO;AAGtB,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAClD,aAAO,EAAE,cAAc,CAAC;AAAA,IAC1B;AAGA,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AACxC,eAAS,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AACrD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AACvC,YAAI,WAAW,GAAG;AAChB,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO,EAAE,SAAS,EAAE;AAAA,IACtB;AAGA,QAAI,aAAa,QAAQ,aAAa,MAAM;AAC1C,aAAO,EAAE,YAAY,EAAE,QAAA;AAAA,IACzB;AAGA,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AAC1D,UAAM,UAAU,MAAM,QAAQ,MAAM;AACpC,QAAI,eAAe,SAAS;AAC1B,aAAO,EAAE,SAAA,EAAW,cAAc,EAAE,UAAU;AAAA,IAChD;AAEA,QAAI,IAAI,EAAG,QAAO;AAClB,QAAI,IAAI,EAAG,QAAO;AAClB,WAAO;AAAA,EACT;AAEA,QAAM,iBAAiB,CAAC,GAAY,MAAuB;AACzD,WAAO,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGA,QAAM,iBAAiB,MAAM;AAC3B,WAAO,CAAC,GAAY,MAAe;AAEjC,UAAI,cAAc,SAAS,GAAG;AAC5B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,gBAAM,YAAY,cAAc,CAAC,EAAG;AACpC,gBAAM,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AAChB,mBAAO;AAAA,UACT;AAAA,QACF;AACA,eAAO,OAAO,SAAS,OAAO;AAAA,MAChC;AAGA,UAAI,cAAc,WAAW,GAAG;AAC9B,cAAM,YAAY,cAAc,CAAC,EAAG;AACpC,eAAO,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MACzE;AAEA,aAAO,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,QAAM,aAAa,eAAA;AAGnB,SAAO,SAAS;AAAA,IACd,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;"}
1
+ {"version":3,"file":"order-by.js","sources":["../../../../src/query/compiler/order-by.ts"],"sourcesContent":["import { orderByWithFractionalIndex } from \"@electric-sql/d2mini\"\nimport { ascComparator, descComparator } from \"../../utils/comparison.js\"\nimport { compileExpression } from \"./evaluators.js\"\nimport type { OrderByClause } from \"../ir.js\"\nimport type { NamespacedAndKeyedStream, NamespacedRow } from \"../../types.js\"\nimport type { IStreamBuilder, KeyValue } from \"@electric-sql/d2mini\"\n\n/**\n * Processes the ORDER BY clause\n * Works with the new structure that has both namespaced row data and __select_results\n * Always uses fractional indexing and adds the index as __ordering_index to the result\n */\nexport function processOrderBy(\n pipeline: NamespacedAndKeyedStream,\n orderByClause: Array<OrderByClause>,\n limit?: number,\n offset?: number\n): IStreamBuilder<KeyValue<unknown, [NamespacedRow, string]>> {\n // Pre-compile all order by expressions\n const compiledOrderBy = orderByClause.map((clause) => ({\n compiledExpression: compileExpression(clause.expression),\n direction: clause.direction,\n }))\n\n // Create a value extractor function for the orderBy operator\n const valueExtractor = (row: NamespacedRow & { __select_results?: any }) => {\n // For ORDER BY expressions, we need to provide access to both:\n // 1. The original namespaced row data (for direct table column references)\n // 2. The __select_results (for SELECT alias references)\n\n // Create a merged context for expression evaluation\n const orderByContext = { ...row }\n\n // If there are select results, merge them at the top level for alias access\n if (row.__select_results) {\n // Add select results as top-level properties for alias access\n Object.assign(orderByContext, row.__select_results)\n }\n\n if (orderByClause.length > 1) {\n // For multiple orderBy columns, create a composite key\n return compiledOrderBy.map((compiled) =>\n compiled.compiledExpression(orderByContext)\n )\n } else if (orderByClause.length === 1) {\n // For a single orderBy column, use the value directly\n const compiled = compiledOrderBy[0]!\n return compiled.compiledExpression(orderByContext)\n }\n\n // Default case - no ordering\n return null\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = () => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByClause.length > 1) {\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByClause.length; i++) {\n const direction = orderByClause[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByClause.length === 1) {\n const direction = orderByClause[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n\n const comparator = makeComparator()\n\n // Use fractional indexing and return the tuple [value, index]\n return pipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit,\n offset,\n comparator,\n })\n // orderByWithFractionalIndex returns [key, [value, index]] - we keep this format\n )\n}\n"],"names":[],"mappings":";;;AAYO,SAAS,eACd,UACA,eACA,OACA,QAC4D;AAE5D,QAAM,kBAAkB,cAAc,IAAI,CAAC,YAAY;AAAA,IACrD,oBAAoB,kBAAkB,OAAO,UAAU;AAAA,IACvD,WAAW,OAAO;AAAA,EAAA,EAClB;AAGF,QAAM,iBAAiB,CAAC,QAAoD;AAM1E,UAAM,iBAAiB,EAAE,GAAG,IAAA;AAG5B,QAAI,IAAI,kBAAkB;AAExB,aAAO,OAAO,gBAAgB,IAAI,gBAAgB;AAAA,IACpD;AAEA,QAAI,cAAc,SAAS,GAAG;AAE5B,aAAO,gBAAgB;AAAA,QAAI,CAAC,aAC1B,SAAS,mBAAmB,cAAc;AAAA,MAAA;AAAA,IAE9C,WAAW,cAAc,WAAW,GAAG;AAErC,YAAM,WAAW,gBAAgB,CAAC;AAClC,aAAO,SAAS,mBAAmB,cAAc;AAAA,IACnD;AAGA,WAAO;AAAA,EACT;AAGA,QAAM,iBAAiB,MAAM;AAC3B,WAAO,CAAC,GAAY,MAAe;AAEjC,UAAI,cAAc,SAAS,GAAG;AAC5B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,gBAAM,YAAY,cAAc,CAAC,EAAG;AACpC,gBAAM,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AAChB,mBAAO;AAAA,UACT;AAAA,QACF;AACA,eAAO,OAAO,SAAS,OAAO;AAAA,MAChC;AAGA,UAAI,cAAc,WAAW,GAAG;AAC9B,cAAM,YAAY,cAAc,CAAC,EAAG;AACpC,eAAO,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MACzE;AAEA,aAAO,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AAEA,QAAM,aAAa,eAAA;AAGnB,SAAO,SAAS;AAAA,IACd,2BAA2B,gBAAgB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,IAAA,CACD;AAAA;AAAA,EAAA;AAGL;"}
@@ -1,9 +1,9 @@
1
1
  import { QueryIR } from '../ir.js';
2
- import { ResultStream } from '../../types.js';
2
+ import { CompilationResult } from './index.js';
3
3
  /**
4
4
  * Cache for compiled subqueries to avoid duplicate compilation
5
5
  */
6
- export type QueryCache = WeakMap<QueryIR, ResultStream>;
6
+ export type QueryCache = WeakMap<QueryIR, CompilationResult>;
7
7
  /**
8
8
  * Mapping from optimized queries back to their original queries for caching
9
9
  */
@@ -2,6 +2,7 @@ import { D2, output, MultiSet } from "@electric-sql/d2mini";
2
2
  import { createCollection } from "../collection.js";
3
3
  import { compileQuery } from "./compiler/index.js";
4
4
  import { buildQuery, getQueryIR } from "./builder/index.js";
5
+ import { convertToBasicExpression } from "./compiler/expressions.js";
5
6
  let liveQueryCollectionCounter = 0;
6
7
  function liveQueryCollectionOptions(config) {
7
8
  const id = config.id || `live-query-${++liveQueryCollectionCounter}`;
@@ -31,6 +32,7 @@ function liveQueryCollectionOptions(config) {
31
32
  let graphCache;
32
33
  let inputsCache;
33
34
  let pipelineCache;
35
+ let collectionWhereClausesCache;
34
36
  const compileBasePipeline = () => {
35
37
  graphCache = new D2();
36
38
  inputsCache = Object.fromEntries(
@@ -39,10 +41,10 @@ function liveQueryCollectionOptions(config) {
39
41
  graphCache.newInput()
40
42
  ])
41
43
  );
42
- pipelineCache = compileQuery(
43
- query,
44
- inputsCache
45
- );
44
+ ({
45
+ pipeline: pipelineCache,
46
+ collectionWhereClauses: collectionWhereClausesCache
47
+ } = compileQuery(query, inputsCache));
46
48
  };
47
49
  const maybeCompileBasePipeline = () => {
48
50
  if (!graphCache || !inputsCache || !pipelineCache) {
@@ -131,14 +133,40 @@ function liveQueryCollectionOptions(config) {
131
133
  const unsubscribeCallbacks = /* @__PURE__ */ new Set();
132
134
  Object.entries(collections).forEach(([collectionId, collection]) => {
133
135
  const input = inputs[collectionId];
134
- const unsubscribe = collection.subscribeChanges(
135
- (changes) => {
136
- sendChangesToInput(input, changes, collection.config.getKey);
137
- maybeRunGraph();
138
- },
139
- { includeInitialState: true }
140
- );
141
- unsubscribeCallbacks.add(unsubscribe);
136
+ const collectionAlias = findCollectionAlias(collectionId, query);
137
+ const whereClause = collectionAlias && collectionWhereClausesCache ? collectionWhereClausesCache.get(collectionAlias) : void 0;
138
+ if (whereClause) {
139
+ const whereExpression = convertToBasicExpression(
140
+ whereClause,
141
+ collectionAlias
142
+ );
143
+ if (whereExpression) {
144
+ const subscription = collection.subscribeChanges(
145
+ (changes) => {
146
+ sendChangesToInput(input, changes, collection.config.getKey);
147
+ maybeRunGraph();
148
+ },
149
+ {
150
+ includeInitialState: true,
151
+ whereExpression
152
+ }
153
+ );
154
+ unsubscribeCallbacks.add(subscription);
155
+ } else {
156
+ throw new Error(
157
+ `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. This indicates a bug in the query optimization logic.`
158
+ );
159
+ }
160
+ } else {
161
+ const subscription = collection.subscribeChanges(
162
+ (changes) => {
163
+ sendChangesToInput(input, changes, collection.config.getKey);
164
+ maybeRunGraph();
165
+ },
166
+ { includeInitialState: true }
167
+ );
168
+ unsubscribeCallbacks.add(subscription);
169
+ }
142
170
  });
143
171
  maybeRunGraph();
144
172
  return () => {
@@ -218,6 +246,20 @@ function extractCollectionsFromQuery(query) {
218
246
  extractFromQuery(query);
219
247
  return collections;
220
248
  }
249
+ function findCollectionAlias(collectionId, query) {
250
+ var _a, _b, _c, _d;
251
+ if (((_a = query.from) == null ? void 0 : _a.type) === `collectionRef` && ((_b = query.from.collection) == null ? void 0 : _b.id) === collectionId) {
252
+ return query.from.alias;
253
+ }
254
+ if (query.join) {
255
+ for (const joinClause of query.join) {
256
+ if (((_c = joinClause.from) == null ? void 0 : _c.type) === `collectionRef` && ((_d = joinClause.from.collection) == null ? void 0 : _d.id) === collectionId) {
257
+ return joinClause.from.alias;
258
+ }
259
+ }
260
+ }
261
+ return void 0;
262
+ }
221
263
  export {
222
264
  createLiveQueryCollection,
223
265
  liveQueryCollectionOptions
@@ -1 +1 @@
1
- {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n pipelineCache = compileQuery(\n query,\n inputsCache as Record<string, KeyedStream>\n )\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, markReady, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n markReady()\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Set up data flow from input collections to the compiled query\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n\n // Subscribe to changes\n const unsubscribe = collection.subscribeChanges(\n (changes: Array<ChangeMessage>) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(unsubscribe)\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n"],"names":[],"mappings":";;;;AAkBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpB,WAAqB,OAAO,KAAK,IACjC,WAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AAEJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAI,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAEH,oBAAgB;AAAA,MACd;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,WAAW,YAAY,oBAAoB;AACxE,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACP,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAA;AAGN,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAEA,oBAAA;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAc,UAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AAGjC,cAAM,cAAc,WAAW;AAAA,UAC7B,CAAC,YAAkC;AACjC,+BAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC3D,0BAAA;AAAA,UACF;AAAA,UACA,EAAE,qBAAqB,KAAA;AAAA,QAAK;AAE9B,6BAAqB,IAAI,WAAW;AAAA,MACtC,CAAC;AAGD,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;"}
1
+ {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport { convertToBasicExpression } from \"./compiler/expressions.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@electric-sql/d2mini\"\nimport type { BasicExpression } from \"./ir.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n let collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n ;({\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(query, inputsCache as Record<string, KeyedStream>))\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, markReady, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n const maybeRunGraph = () => {\n // We only run the graph if all the collections are ready\n if (allCollectionsReady()) {\n graph.run()\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n markReady()\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Subscribe to all collections, using WHERE clause optimization when available\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n const collectionAlias = findCollectionAlias(collectionId, query)\n const whereClause =\n collectionAlias && collectionWhereClausesCache\n ? collectionWhereClausesCache.get(collectionAlias)\n : undefined\n\n if (whereClause) {\n // Convert WHERE clause to BasicExpression format for collection subscription\n const whereExpression = convertToBasicExpression(\n whereClause,\n collectionAlias!\n )\n\n if (whereExpression) {\n // Use index optimization for this collection\n const subscription = collection.subscribeChanges(\n (changes) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n {\n includeInitialState: true,\n whereExpression: whereExpression,\n }\n )\n unsubscribeCallbacks.add(subscription)\n } else {\n // This should not happen - if we have a whereClause but can't create whereExpression,\n // it indicates a bug in our optimization logic\n throw new Error(\n `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. ` +\n `This indicates a bug in the query optimization logic.`\n )\n }\n } else {\n // No WHERE clause for this collection, use regular subscription\n const subscription = collection.subscribeChanges(\n (changes) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph()\n },\n { includeInitialState: true }\n )\n unsubscribeCallbacks.add(subscription)\n }\n })\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\n/**\n * Converts WHERE expressions from the query IR into a BasicExpression for subscribeChanges\n *\n * @param whereExpressions Array of WHERE expressions to convert\n * @param tableAlias The table alias used in the expressions\n * @returns A BasicExpression that can be used with the collection's index system\n */\n\n/**\n * Finds the alias for a collection ID in the query\n */\nfunction findCollectionAlias(\n collectionId: string,\n query: any\n): string | undefined {\n // Check FROM clause\n if (\n query.from?.type === `collectionRef` &&\n query.from.collection?.id === collectionId\n ) {\n return query.from.alias\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (\n joinClause.from?.type === `collectionRef` &&\n joinClause.from.collection?.id === collectionId\n ) {\n return joinClause.from.alias\n }\n }\n }\n\n return undefined\n}\n"],"names":[],"mappings":";;;;;AAoBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpB,WAAqB,OAAO,KAAK,IACjC,WAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AAIJ,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAI,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAIF,KAAC;AAAA,MACA,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtB,aAAa,OAAO,WAA0C;AAAA,EACpE;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,WAAW,YAAY,oBAAoB;AACxE,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACP,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,YAAM,gBAAgB,MAAM;AAE1B,YAAI,uBAAuB;AACzB,gBAAM,IAAA;AAGN,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAEA,oBAAA;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAc,UAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AACjC,cAAM,kBAAkB,oBAAoB,cAAc,KAAK;AAC/D,cAAM,cACJ,mBAAmB,8BACf,4BAA4B,IAAI,eAAe,IAC/C;AAEN,YAAI,aAAa;AAEf,gBAAM,kBAAkB;AAAA,YACtB;AAAA,YACA;AAAA,UAAA;AAGF,cAAI,iBAAiB;AAEnB,kBAAM,eAAe,WAAW;AAAA,cAC9B,CAAC,YAAY;AACX,mCAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC3D,8BAAA;AAAA,cACF;AAAA,cACA;AAAA,gBACE,qBAAqB;AAAA,gBACrB;AAAA,cAAA;AAAA,YACF;AAEF,iCAAqB,IAAI,YAAY;AAAA,UACvC,OAAO;AAGL,kBAAM,IAAI;AAAA,cACR,uEAAuE,YAAY;AAAA,YAAA;AAAA,UAGvF;AAAA,QACF,OAAO;AAEL,gBAAM,eAAe,WAAW;AAAA,YAC9B,CAAC,YAAY;AACX,iCAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC3D,4BAAA;AAAA,YACF;AAAA,YACA,EAAE,qBAAqB,KAAA;AAAA,UAAK;AAE9B,+BAAqB,IAAI,YAAY;AAAA,QACvC;AAAA,MACF,CAAC;AAGD,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAAA,MAC7D;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAaA,SAAS,oBACP,cACA,OACoB;;AAEpB,QACE,WAAM,SAAN,mBAAY,UAAS,qBACrB,WAAM,KAAK,eAAX,mBAAuB,QAAO,cAC9B;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,YACE,gBAAW,SAAX,mBAAiB,UAAS,qBAC1B,gBAAW,KAAK,eAAhB,mBAA4B,QAAO,cACnC;AACA,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;"}
@@ -17,6 +17,15 @@ export interface GroupedWhereClauses {
17
17
  /** WHERE clauses that touch multiple sources, combined into one expression */
18
18
  multiSource?: BasicExpression<boolean>;
19
19
  }
20
+ /**
21
+ * Result of query optimization including both the optimized query and collection-specific WHERE clauses
22
+ */
23
+ export interface OptimizationResult {
24
+ /** The optimized query with WHERE clauses potentially moved to subqueries */
25
+ optimizedQuery: QueryIR;
26
+ /** Map of collection aliases to their extracted WHERE clauses for index optimization */
27
+ collectionWhereClauses: Map<string, BasicExpression<boolean>>;
28
+ }
20
29
  /**
21
30
  * Main query optimizer entry point that lifts WHERE clauses into subqueries.
22
31
  *
@@ -25,7 +34,7 @@ export interface GroupedWhereClauses {
25
34
  * sources as possible, then removing redundant subqueries.
26
35
  *
27
36
  * @param query - The QueryIR to optimize
28
- * @returns A new QueryIR with optimizations applied (or original if no optimization possible)
37
+ * @returns An OptimizationResult with the optimized query and collection WHERE clause mapping
29
38
  *
30
39
  * @example
31
40
  * ```typescript
@@ -35,8 +44,9 @@ export interface GroupedWhereClauses {
35
44
  * where: [eq(u.dept_id, 1), gt(p.views, 100)]
36
45
  * }
37
46
  *
38
- * const optimized = optimizeQuery(originalQuery)
47
+ * const { optimizedQuery, collectionWhereClauses } = optimizeQuery(originalQuery)
39
48
  * // Result: Single-source clauses moved to deepest possible subqueries
49
+ * // collectionWhereClauses: Map { 'u' => eq(u.dept_id, 1), 'p' => gt(p.views, 100) }
40
50
  * ```
41
51
  */
42
- export declare function optimizeQuery(query: QueryIR): QueryIR;
52
+ export declare function optimizeQuery(query: QueryIR): OptimizationResult;
@@ -1,6 +1,9 @@
1
1
  import { deepEquals } from "../utils.js";
2
+ import { CannotCombineEmptyExpressionListError } from "../errors.js";
2
3
  import { QueryRef, CollectionRef, Func } from "./ir.js";
4
+ import { isConvertibleToCollectionFilter } from "./compiler/expressions.js";
3
5
  function optimizeQuery(query) {
6
+ const collectionWhereClauses = extractCollectionWhereClauses(query);
4
7
  let optimized = query;
5
8
  let previousOptimized;
6
9
  let iterations = 0;
@@ -11,7 +14,42 @@ function optimizeQuery(query) {
11
14
  iterations++;
12
15
  }
13
16
  const cleaned = removeRedundantSubqueries(optimized);
14
- return cleaned;
17
+ return {
18
+ optimizedQuery: cleaned,
19
+ collectionWhereClauses
20
+ };
21
+ }
22
+ function extractCollectionWhereClauses(query) {
23
+ const collectionWhereClauses = /* @__PURE__ */ new Map();
24
+ if (!query.where || query.where.length === 0) {
25
+ return collectionWhereClauses;
26
+ }
27
+ const splitWhereClauses = splitAndClauses(query.where);
28
+ const analyzedClauses = splitWhereClauses.map(
29
+ (clause) => analyzeWhereClause(clause)
30
+ );
31
+ const groupedClauses = groupWhereClauses(analyzedClauses);
32
+ for (const [sourceAlias, whereClause] of groupedClauses.singleSource) {
33
+ if (isCollectionReference(query, sourceAlias)) {
34
+ if (isConvertibleToCollectionFilter(whereClause)) {
35
+ collectionWhereClauses.set(sourceAlias, whereClause);
36
+ }
37
+ }
38
+ }
39
+ return collectionWhereClauses;
40
+ }
41
+ function isCollectionReference(query, sourceAlias) {
42
+ if (query.from.alias === sourceAlias) {
43
+ return query.from.type === `collectionRef`;
44
+ }
45
+ if (query.join) {
46
+ for (const joinClause of query.join) {
47
+ if (joinClause.from.alias === sourceAlias) {
48
+ return joinClause.from.type === `collectionRef`;
49
+ }
50
+ }
51
+ }
52
+ return false;
15
53
  }
16
54
  function applyRecursiveOptimization(query) {
17
55
  var _a;
@@ -270,7 +308,7 @@ function isSafeToPushIntoExistingSubquery(query) {
270
308
  }
271
309
  function combineWithAnd(expressions) {
272
310
  if (expressions.length === 0) {
273
- throw new Error(`Cannot combine empty expression list`);
311
+ throw new CannotCombineEmptyExpressionListError();
274
312
  }
275
313
  if (expressions.length === 1) {
276
314
  return expressions[0];