@tanstack/db 0.0.9 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/cjs/collection.cjs +6 -1
  2. package/dist/cjs/collection.cjs.map +1 -1
  3. package/dist/cjs/query/compiled-query.cjs +40 -59
  4. package/dist/cjs/query/compiled-query.cjs.map +1 -1
  5. package/dist/cjs/query/compiled-query.d.cts +0 -4
  6. package/dist/cjs/query/group-by.cjs +3 -3
  7. package/dist/cjs/query/group-by.cjs.map +1 -1
  8. package/dist/cjs/query/group-by.d.cts +1 -1
  9. package/dist/cjs/query/joins.cjs +16 -16
  10. package/dist/cjs/query/joins.cjs.map +1 -1
  11. package/dist/cjs/query/joins.d.cts +1 -1
  12. package/dist/cjs/query/order-by.cjs +6 -6
  13. package/dist/cjs/query/order-by.cjs.map +1 -1
  14. package/dist/cjs/query/pipeline-compiler.cjs +5 -5
  15. package/dist/cjs/query/pipeline-compiler.cjs.map +1 -1
  16. package/dist/cjs/query/pipeline-compiler.d.cts +1 -1
  17. package/dist/cjs/query/select.cjs +2 -2
  18. package/dist/cjs/query/select.cjs.map +1 -1
  19. package/dist/cjs/transactions.cjs +5 -12
  20. package/dist/cjs/transactions.cjs.map +1 -1
  21. package/dist/cjs/transactions.d.cts +1 -1
  22. package/dist/cjs/types.d.cts +1 -1
  23. package/dist/esm/collection.js +6 -1
  24. package/dist/esm/collection.js.map +1 -1
  25. package/dist/esm/query/compiled-query.d.ts +0 -4
  26. package/dist/esm/query/compiled-query.js +40 -59
  27. package/dist/esm/query/compiled-query.js.map +1 -1
  28. package/dist/esm/query/group-by.d.ts +1 -1
  29. package/dist/esm/query/group-by.js +1 -1
  30. package/dist/esm/query/group-by.js.map +1 -1
  31. package/dist/esm/query/joins.d.ts +1 -1
  32. package/dist/esm/query/joins.js +1 -1
  33. package/dist/esm/query/joins.js.map +1 -1
  34. package/dist/esm/query/order-by.js +1 -1
  35. package/dist/esm/query/order-by.js.map +1 -1
  36. package/dist/esm/query/pipeline-compiler.d.ts +1 -1
  37. package/dist/esm/query/pipeline-compiler.js +1 -1
  38. package/dist/esm/query/pipeline-compiler.js.map +1 -1
  39. package/dist/esm/query/select.js +1 -1
  40. package/dist/esm/query/select.js.map +1 -1
  41. package/dist/esm/transactions.d.ts +1 -1
  42. package/dist/esm/transactions.js +5 -12
  43. package/dist/esm/transactions.js.map +1 -1
  44. package/dist/esm/types.d.ts +1 -1
  45. package/package.json +2 -2
  46. package/src/collection.ts +6 -2
  47. package/src/query/compiled-query.ts +44 -66
  48. package/src/query/group-by.ts +1 -1
  49. package/src/query/joins.ts +2 -2
  50. package/src/query/order-by.ts +1 -1
  51. package/src/query/pipeline-compiler.ts +2 -2
  52. package/src/query/select.ts +1 -1
  53. package/src/transactions.ts +8 -20
  54. package/src/types.ts +1 -1
@@ -1 +1 @@
1
- {"version":3,"file":"joins.js","sources":["../../../src/query/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2ts\"\nimport { evaluateConditionOnNamespacedRow } from \"./evaluators.js\"\nimport { extractJoinKey } from \"./extractors.js\"\nimport type { Query } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2ts\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types.js\"\n\n/**\n * Creates a processing pipeline for join clauses\n */\nexport function processJoinClause(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>\n) {\n if (!query.join) return pipeline\n const input = allInputs[query.from]\n\n for (const joinClause of query.join) {\n // Create a stream for the joined table\n const joinedTableAlias = joinClause.as || joinClause.from\n\n // Get the right join type for the operator\n const joinType: JoinType =\n joinClause.type === `cross` ? `inner` : joinClause.type\n\n // The `in` is formatted as ['@mainKeyRef', '=', '@joinedKeyRef']\n // Destructure the main key reference and the joined key references\n const [mainKeyRef, , joinedKeyRefs] = joinClause.on\n\n // We need to prepare the main pipeline and the joined pipeline\n // to have the correct key format for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the key from the ON condition left side for the main table\n const mainRow = namespacedRow[mainTableAlias]!\n\n // Extract the join key from the main row\n const key = extractJoinKey(mainRow, mainKeyRef, mainTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Get the joined table input from the inputs map\n let joinedTableInput: KeyedStream\n\n if (allInputs[joinClause.from]) {\n // Use the provided input if available\n joinedTableInput = allInputs[joinClause.from]!\n } else {\n // Create a new input if not provided\n joinedTableInput =\n input!.graph.newInput<[string, Record<string, unknown>]>()\n }\n\n tables[joinedTableAlias] = joinedTableInput\n\n // Create a pipeline for the joined table\n const joinedPipeline = joinedTableInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in an object with the table alias as the key\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the key from the ON condition right side for the joined table\n const key = extractJoinKey(row, joinedKeyRefs, joinedTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n string,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply join with appropriate typings based on join type\n switch (joinType) {\n case `inner`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `left`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `left`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `right`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `right`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `full`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `full`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n default:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n }\n }\n return pipeline\n}\n\n/**\n * Creates a processing pipeline for join results\n */\nexport function processJoinResults(\n mainTableAlias: string,\n joinedTableAlias: string,\n joinClause: { on: any; type: string }\n) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls in the same step\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // For inner joins, both sides should be non-null\n if (joinClause.type === `inner` || joinClause.type === `cross`) {\n if (!mainNamespacedRow || !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n }\n\n // For left joins, the main row must be non-null\n if (joinClause.type === `left` && !mainNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // For right joins, the joined row must be non-null\n if (joinClause.type === `right` && !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // Merge the nested rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.entries(mainNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n }\n\n // If we have a joined row, add it to the merged result\n if (joinedNamespacedRow) {\n Object.entries(joinedNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n } else if (joinClause.type === `left` || joinClause.type === `full`) {\n // For left or full joins, add the joined table with undefined data if missing\n // mergedNamespacedRow[joinedTableAlias] = undefined\n }\n\n // For right or full joins, add the main table with undefined data if missing\n if (\n !mainNamespacedRow &&\n (joinClause.type === `right` || joinClause.type === `full`)\n ) {\n // mergedNamespacedRow[mainTableAlias] = undefined\n }\n\n // New key\n const newKey = `[${mainKey},${joinedKey}]`\n\n return [newKey, mergedNamespacedRow] as [\n string,\n typeof mergedNamespacedRow,\n ]\n }),\n // Filter out undefined results\n filter((value) => value !== undefined),\n // Process the ON condition\n filter(([_key, namespacedRow]: [string, NamespacedRow]) => {\n // If there's no ON condition, or it's a cross join, always return true\n if (!joinClause.on || joinClause.type === `cross`) {\n return true\n }\n\n // For LEFT JOIN, if the right side is null, we should include the row\n if (\n joinClause.type === `left` &&\n namespacedRow[joinedTableAlias] === undefined\n ) {\n return true\n }\n\n // For RIGHT JOIN, if the left side is null, we should include the row\n if (\n joinClause.type === `right` &&\n namespacedRow[mainTableAlias] === undefined\n ) {\n return true\n }\n\n // For FULL JOIN, if either side is null, we should include the row\n if (\n joinClause.type === `full` &&\n (namespacedRow[mainTableAlias] === undefined ||\n namespacedRow[joinedTableAlias] === undefined)\n ) {\n return true\n }\n\n return evaluateConditionOnNamespacedRow(\n namespacedRow,\n joinClause.on,\n mainTableAlias,\n joinedTableAlias\n )\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;AAmBO,SAAS,kBACd,UACA,OACA,QACA,gBACA,WACA;AACI,MAAA,CAAC,MAAM,KAAa,QAAA;AAClB,QAAA,QAAQ,UAAU,MAAM,IAAI;AAEvB,aAAA,cAAc,MAAM,MAAM;AAE7B,UAAA,mBAAmB,WAAW,MAAM,WAAW;AAGrD,UAAM,WACJ,WAAW,SAAS,UAAU,UAAU,WAAW;AAIrD,UAAM,CAAC,YAAA,EAAc,aAAa,IAAI,WAAW;AAIjD,UAAM,eAAe,SAAS;AAAA,MAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAE7B,cAAA,UAAU,cAAc,cAAc;AAG5C,cAAM,MAAM,eAAe,SAAS,YAAY,cAAc;AAG9D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGI,QAAA;AAEA,QAAA,UAAU,WAAW,IAAI,GAAG;AAEX,yBAAA,UAAU,WAAW,IAAI;AAAA,IAAA,OACvC;AAGH,yBAAA,MAAO,MAAM,SAA4C;AAAA,IAAA;AAG7D,WAAO,gBAAgB,IAAI;AAG3B,UAAM,iBAAiB,iBAAiB;AAAA,MACtC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,cAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAI;AAG/D,cAAM,MAAM,eAAe,KAAK,eAAe,gBAAgB;AAG/D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGA,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,MAAM;AAAA,UACnC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,MAAM;AAAA,UACnC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF;AACE,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AAAA,IAAA;AAAA,EACJ;AAEK,SAAA;AACT;AAKgB,SAAA,mBACd,gBACA,kBACA,YACA;AACA,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,UAAU,6BAAO;AACjB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,YAAY,iCAAS;AACrB,cAAA,sBAAsB,iCAAS;AAGrC,YAAI,WAAW,SAAS,WAAW,WAAW,SAAS,SAAS;AAC1D,cAAA,CAAC,qBAAqB,CAAC,qBAAqB;AACvC,mBAAA;AAAA,UAAA;AAAA,QACT;AAIF,YAAI,WAAW,SAAS,UAAU,CAAC,mBAAmB;AAC7C,iBAAA;AAAA,QAAA;AAIT,YAAI,WAAW,SAAS,WAAW,CAAC,qBAAqB;AAChD,iBAAA;AAAA,QAAA;AAIT,cAAM,sBAAqC,CAAC;AAG5C,YAAI,mBAAmB;AACd,iBAAA,QAAQ,iBAAiB,EAAE;AAAA,YAChC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA;AAIF,YAAI,qBAAqB;AAChB,iBAAA,QAAQ,mBAAmB,EAAE;AAAA,YAClC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA,WACS,WAAW,SAAS,UAAU,WAAW,SAAS,OAAQ;AAMrE,YACE,CAAC,sBACA,WAAW,SAAS,WAAW,WAAW,SAAS,QACpD;AAKF,cAAM,SAAS,IAAI,OAAO,IAAI,SAAS;AAEhC,eAAA,CAAC,QAAQ,mBAAmB;AAAA,MAAA,CAIpC;AAAA;AAAA,MAED,OAAO,CAAC,UAAU,UAAU,MAAS;AAAA;AAAA,MAErC,OAAO,CAAC,CAAC,MAAM,aAAa,MAA+B;AAEzD,YAAI,CAAC,WAAW,MAAM,WAAW,SAAS,SAAS;AAC1C,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,UACpB,cAAc,gBAAgB,MAAM,QACpC;AACO,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,WACpB,cAAc,cAAc,MAAM,QAClC;AACO,iBAAA;AAAA,QAAA;AAKP,YAAA,WAAW,SAAS,WACnB,cAAc,cAAc,MAAM,UACjC,cAAc,gBAAgB,MAAM,SACtC;AACO,iBAAA;AAAA,QAAA;AAGF,eAAA;AAAA,UACL;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QACF;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;"}
1
+ {"version":3,"file":"joins.js","sources":["../../../src/query/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport { evaluateConditionOnNamespacedRow } from \"./evaluators.js\"\nimport { extractJoinKey } from \"./extractors.js\"\nimport type { Query } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types.js\"\n\n/**\n * Creates a processing pipeline for join clauses\n */\nexport function processJoinClause(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>\n) {\n if (!query.join) return pipeline\n const input = allInputs[query.from]\n\n for (const joinClause of query.join) {\n // Create a stream for the joined table\n const joinedTableAlias = joinClause.as || joinClause.from\n\n // Get the right join type for the operator\n const joinType: JoinType =\n joinClause.type === `cross` ? `inner` : joinClause.type\n\n // The `in` is formatted as ['@mainKeyRef', '=', '@joinedKeyRef']\n // Destructure the main key reference and the joined key references\n const [mainKeyRef, , joinedKeyRefs] = joinClause.on\n\n // We need to prepare the main pipeline and the joined pipeline\n // to have the correct key format for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the key from the ON condition left side for the main table\n const mainRow = namespacedRow[mainTableAlias]!\n\n // Extract the join key from the main row\n const key = extractJoinKey(mainRow, mainKeyRef, mainTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Get the joined table input from the inputs map\n let joinedTableInput: KeyedStream\n\n if (allInputs[joinClause.from]) {\n // Use the provided input if available\n joinedTableInput = allInputs[joinClause.from]!\n } else {\n // Create a new input if not provided\n joinedTableInput =\n input!.graph.newInput<[string, Record<string, unknown>]>()\n }\n\n tables[joinedTableAlias] = joinedTableInput\n\n // Create a pipeline for the joined table\n const joinedPipeline = joinedTableInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in an object with the table alias as the key\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the key from the ON condition right side for the joined table\n const key = extractJoinKey(row, joinedKeyRefs, joinedTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n string,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply join with appropriate typings based on join type\n switch (joinType) {\n case `inner`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `left`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `left`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `right`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `right`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `full`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `full`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n default:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n }\n }\n return pipeline\n}\n\n/**\n * Creates a processing pipeline for join results\n */\nexport function processJoinResults(\n mainTableAlias: string,\n joinedTableAlias: string,\n joinClause: { on: any; type: string }\n) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls in the same step\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // For inner joins, both sides should be non-null\n if (joinClause.type === `inner` || joinClause.type === `cross`) {\n if (!mainNamespacedRow || !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n }\n\n // For left joins, the main row must be non-null\n if (joinClause.type === `left` && !mainNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // For right joins, the joined row must be non-null\n if (joinClause.type === `right` && !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // Merge the nested rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.entries(mainNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n }\n\n // If we have a joined row, add it to the merged result\n if (joinedNamespacedRow) {\n Object.entries(joinedNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n } else if (joinClause.type === `left` || joinClause.type === `full`) {\n // For left or full joins, add the joined table with undefined data if missing\n // mergedNamespacedRow[joinedTableAlias] = undefined\n }\n\n // For right or full joins, add the main table with undefined data if missing\n if (\n !mainNamespacedRow &&\n (joinClause.type === `right` || joinClause.type === `full`)\n ) {\n // mergedNamespacedRow[mainTableAlias] = undefined\n }\n\n // New key\n const newKey = `[${mainKey},${joinedKey}]`\n\n return [newKey, mergedNamespacedRow] as [\n string,\n typeof mergedNamespacedRow,\n ]\n }),\n // Filter out undefined results\n filter((value) => value !== undefined),\n // Process the ON condition\n filter(([_key, namespacedRow]: [string, NamespacedRow]) => {\n // If there's no ON condition, or it's a cross join, always return true\n if (!joinClause.on || joinClause.type === `cross`) {\n return true\n }\n\n // For LEFT JOIN, if the right side is null, we should include the row\n if (\n joinClause.type === `left` &&\n namespacedRow[joinedTableAlias] === undefined\n ) {\n return true\n }\n\n // For RIGHT JOIN, if the left side is null, we should include the row\n if (\n joinClause.type === `right` &&\n namespacedRow[mainTableAlias] === undefined\n ) {\n return true\n }\n\n // For FULL JOIN, if either side is null, we should include the row\n if (\n joinClause.type === `full` &&\n (namespacedRow[mainTableAlias] === undefined ||\n namespacedRow[joinedTableAlias] === undefined)\n ) {\n return true\n }\n\n return evaluateConditionOnNamespacedRow(\n namespacedRow,\n joinClause.on,\n mainTableAlias,\n joinedTableAlias\n )\n })\n )\n }\n}\n"],"names":["joinOperator"],"mappings":";;;AAmBO,SAAS,kBACd,UACA,OACA,QACA,gBACA,WACA;AACI,MAAA,CAAC,MAAM,KAAa,QAAA;AAClB,QAAA,QAAQ,UAAU,MAAM,IAAI;AAEvB,aAAA,cAAc,MAAM,MAAM;AAE7B,UAAA,mBAAmB,WAAW,MAAM,WAAW;AAGrD,UAAM,WACJ,WAAW,SAAS,UAAU,UAAU,WAAW;AAIrD,UAAM,CAAC,YAAA,EAAc,aAAa,IAAI,WAAW;AAIjD,UAAM,eAAe,SAAS;AAAA,MAC5B,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAE7B,cAAA,UAAU,cAAc,cAAc;AAG5C,cAAM,MAAM,eAAe,SAAS,YAAY,cAAc;AAG9D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGI,QAAA;AAEA,QAAA,UAAU,WAAW,IAAI,GAAG;AAEX,yBAAA,UAAU,WAAW,IAAI;AAAA,IAAA,OACvC;AAGH,yBAAA,MAAO,MAAM,SAA4C;AAAA,IAAA;AAG7D,WAAO,gBAAgB,IAAI;AAG3B,UAAM,iBAAiB,iBAAiB;AAAA,MACtC,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,cAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAI;AAG/D,cAAM,MAAM,eAAe,KAAK,eAAe,gBAAgB;AAG/D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGA,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,MAAM;AAAA,UACnC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,MAAM;AAAA,UACnC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF;AACE,mBAAW,aAAa;AAAA,UACtBA,KAAa,gBAAgB,OAAO;AAAA,UACpC,YAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AAAA,IAAA;AAAA,EACJ;AAEK,SAAA;AACT;AAKgB,SAAA,mBACd,gBACA,kBACA,YACA;AACA,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEd,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,UAAU,6BAAO;AACjB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,YAAY,iCAAS;AACrB,cAAA,sBAAsB,iCAAS;AAGrC,YAAI,WAAW,SAAS,WAAW,WAAW,SAAS,SAAS;AAC1D,cAAA,CAAC,qBAAqB,CAAC,qBAAqB;AACvC,mBAAA;AAAA,UAAA;AAAA,QACT;AAIF,YAAI,WAAW,SAAS,UAAU,CAAC,mBAAmB;AAC7C,iBAAA;AAAA,QAAA;AAIT,YAAI,WAAW,SAAS,WAAW,CAAC,qBAAqB;AAChD,iBAAA;AAAA,QAAA;AAIT,cAAM,sBAAqC,CAAC;AAG5C,YAAI,mBAAmB;AACd,iBAAA,QAAQ,iBAAiB,EAAE;AAAA,YAChC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA;AAIF,YAAI,qBAAqB;AAChB,iBAAA,QAAQ,mBAAmB,EAAE;AAAA,YAClC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA,WACS,WAAW,SAAS,UAAU,WAAW,SAAS,OAAQ;AAMrE,YACE,CAAC,sBACA,WAAW,SAAS,WAAW,WAAW,SAAS,QACpD;AAKF,cAAM,SAAS,IAAI,OAAO,IAAI,SAAS;AAEhC,eAAA,CAAC,QAAQ,mBAAmB;AAAA,MAAA,CAIpC;AAAA;AAAA,MAED,OAAO,CAAC,UAAU,UAAU,MAAS;AAAA;AAAA,MAErC,OAAO,CAAC,CAAC,MAAM,aAAa,MAA+B;AAEzD,YAAI,CAAC,WAAW,MAAM,WAAW,SAAS,SAAS;AAC1C,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,UACpB,cAAc,gBAAgB,MAAM,QACpC;AACO,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,WACpB,cAAc,cAAc,MAAM,QAClC;AACO,iBAAA;AAAA,QAAA;AAKP,YAAA,WAAW,SAAS,WACnB,cAAc,cAAc,MAAM,UACjC,cAAc,gBAAgB,MAAM,SACtC;AACO,iBAAA;AAAA,QAAA;AAGF,eAAA;AAAA,UACL;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QACF;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;"}
@@ -1,4 +1,4 @@
1
- import { orderByWithIndex, map, orderByWithFractionalIndex, orderBy } from "@electric-sql/d2ts";
1
+ import { orderByWithIndex, map, orderByWithFractionalIndex, orderBy } from "@electric-sql/d2mini";
2
2
  import { evaluateOperandOnNamespacedRow } from "./extractors.js";
3
3
  import { isOrderIndexFunctionCall } from "./utils.js";
4
4
  function processOrderBy(resultPipeline, query, mainTableAlias) {
@@ -1 +1 @@
1
- {"version":3,"file":"order-by.js","sources":["../../../src/query/order-by.ts"],"sourcesContent":["import {\n map,\n orderBy,\n orderByWithFractionalIndex,\n orderByWithIndex,\n} from \"@electric-sql/d2ts\"\nimport { evaluateOperandOnNamespacedRow } from \"./extractors\"\nimport { isOrderIndexFunctionCall } from \"./utils\"\nimport type { ConditionOperand, Query } from \"./schema\"\nimport type {\n KeyedNamespacedRow,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types\"\n\ntype OrderByItem = {\n operand: ConditionOperand\n direction: `asc` | `desc`\n}\n\ntype OrderByItems = Array<OrderByItem>\n\nexport function processOrderBy(\n resultPipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Check if any column in the SELECT clause is an ORDER_INDEX function call\n let hasOrderIndexColumn = false\n let orderIndexType: `numeric` | `fractional` = `numeric`\n let orderIndexAlias = ``\n\n // Scan the SELECT clause for ORDER_INDEX functions\n // TODO: Select is going to be optional in future - we will automatically add an\n // attribute for the index column\n for (const item of query.select!) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isOrderIndexFunctionCall(expr)) {\n hasOrderIndexColumn = true\n orderIndexAlias = alias\n orderIndexType = getOrderIndexType(expr)\n break\n }\n }\n }\n if (hasOrderIndexColumn) break\n }\n\n // Normalize orderBy to an array of objects\n const orderByItems: OrderByItems = []\n\n if (typeof query.orderBy === `string`) {\n // Handle string format: '@column'\n orderByItems.push({\n operand: query.orderBy,\n direction: `asc`,\n })\n } else if (Array.isArray(query.orderBy)) {\n // Handle array format: ['@column1', { '@column2': 'desc' }]\n for (const item of query.orderBy) {\n if (typeof item === `string`) {\n orderByItems.push({\n operand: item,\n direction: `asc`,\n })\n } else if (typeof item === `object`) {\n for (const [column, direction] of Object.entries(item)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n }\n } else if (typeof query.orderBy === `object`) {\n // Handle object format: { '@column': 'desc' }\n for (const [column, direction] of Object.entries(query.orderBy)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n\n // Create a value extractor function for the orderBy operator\n // const valueExtractor = ([key, namespacedRow]: [\n const valueExtractor = (namespacedRow: NamespacedRow) => {\n // For multiple orderBy columns, create a composite key\n if (orderByItems.length > 1) {\n return orderByItems.map((item) =>\n evaluateOperandOnNamespacedRow(\n namespacedRow,\n item.operand,\n mainTableAlias\n )\n )\n } else if (orderByItems.length === 1) {\n // For a single orderBy column, use the value directly\n const item = orderByItems[0]\n const val = evaluateOperandOnNamespacedRow(\n namespacedRow,\n item!.operand,\n mainTableAlias\n )\n return val\n }\n\n // Default case - no ordering\n return null\n }\n\n const ascComparator = (a: any, b: any): number => {\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n // Compare the values\n const result = ascComparator(a[i], b[i])\n\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If at least one of the values is an object then we don't really know how to meaningfully compare them\n // therefore we turn them into strings and compare those\n // There are 2 exceptions:\n // 1) if both objects are dates then we can compare them\n // 2) if either object is nullish then we can't call toString on it\n const bothObjects = typeof a === `object` && typeof b === `object`\n const bothDates = a instanceof Date && b instanceof Date\n const notNull = a !== null && b !== null\n if (bothObjects && !bothDates && notNull) {\n // Every object should support `toString`\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = (orderByProps: OrderByItems) => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByProps.length > 1) {\n // `a` and `b` must be arrays since `orderByItems.length > 1`\n // hence the extracted values must be arrays\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByProps.length; i++) {\n const direction = orderByProps[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n // should normally always be 0 because\n // both values are extracted based on orderByItems\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByProps.length === 1) {\n const direction = orderByProps[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n const comparator = makeComparator(orderByItems)\n\n // Apply the appropriate orderBy operator based on whether an ORDER_INDEX column is requested\n if (hasOrderIndexColumn) {\n if (orderIndexType === `numeric`) {\n // Use orderByWithIndex for numeric indices\n resultPipeline = resultPipeline.pipe(\n orderByWithIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n } else {\n // Use orderByWithFractionalIndex for fractional indices\n resultPipeline = resultPipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n }\n } else {\n // Use regular orderBy if no index column is requested\n resultPipeline = resultPipeline.pipe(\n orderBy(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n })\n )\n }\n\n return resultPipeline\n}\n\n// Helper function to extract the ORDER_INDEX type from a function call\nfunction getOrderIndexType(obj: any): `numeric` | `fractional` {\n if (!isOrderIndexFunctionCall(obj)) {\n throw new Error(`Not an ORDER_INDEX function call`)\n }\n\n const arg = obj[`ORDER_INDEX`]\n if (arg === `numeric` || arg === true || arg === `default`) {\n return `numeric`\n } else if (arg === `fractional`) {\n return `fractional`\n } else {\n throw new Error(`Invalid ORDER_INDEX type: ` + arg)\n }\n}\n"],"names":[],"mappings":";;;AAsBgB,SAAA,eACd,gBACA,OACA,gBACA;AAEA,MAAI,sBAAsB;AAC1B,MAAI,iBAA2C;AAC/C,MAAI,kBAAkB;AAKX,aAAA,QAAQ,MAAM,QAAS;AAC5B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAY,yBAAyB,IAAI,GAAG;AACxC,gCAAA;AACJ,4BAAA;AAClB,2BAAiB,kBAAkB,IAAI;AACvC;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAEF,QAAI,oBAAqB;AAAA,EAAA;AAI3B,QAAM,eAA6B,CAAC;AAEhC,MAAA,OAAO,MAAM,YAAY,UAAU;AAErC,iBAAa,KAAK;AAAA,MAChB,SAAS,MAAM;AAAA,MACf,WAAW;AAAA,IAAA,CACZ;AAAA,EACQ,WAAA,MAAM,QAAQ,MAAM,OAAO,GAAG;AAE5B,eAAA,QAAQ,MAAM,SAAS;AAC5B,UAAA,OAAO,SAAS,UAAU;AAC5B,qBAAa,KAAK;AAAA,UAChB,SAAS;AAAA,UACT,WAAW;AAAA,QAAA,CACZ;AAAA,MACH,WAAW,OAAO,SAAS,UAAU;AACnC,mBAAW,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,uBAAa,KAAK;AAAA,YAChB,SAAS;AAAA,YACT;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MACH;AAAA,IACF;AAAA,EAEO,WAAA,OAAO,MAAM,YAAY,UAAU;AAEjC,eAAA,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AAC/D,mBAAa,KAAK;AAAA,QAChB,SAAS;AAAA,QACT;AAAA,MAAA,CACD;AAAA,IAAA;AAAA,EACH;AAKI,QAAA,iBAAiB,CAAC,kBAAiC;AAEnD,QAAA,aAAa,SAAS,GAAG;AAC3B,aAAO,aAAa;AAAA,QAAI,CAAC,SACvB;AAAA,UACE;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,WACS,aAAa,WAAW,GAAG;AAE9B,YAAA,OAAO,aAAa,CAAC;AAC3B,YAAM,MAAM;AAAA,QACV;AAAA,QACA,KAAM;AAAA,QACN;AAAA,MACF;AACO,aAAA;AAAA,IAAA;AAIF,WAAA;AAAA,EACT;AAEM,QAAA,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAC3C,aAAA,EAAE,cAAc,CAAC;AAAA,IAAA;AAI1B,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AAC/B,eAAA,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AAErD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAEvC,YAAI,WAAW,GAAG;AACT,iBAAA;AAAA,QAAA;AAAA,MACT;AAGK,aAAA,EAAE,SAAS,EAAE;AAAA,IAAA;AAQtB,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AACpD,UAAA,YAAY,aAAa,QAAQ,aAAa;AAC9C,UAAA,UAAU,MAAM,QAAQ,MAAM;AAChC,QAAA,eAAe,CAAC,aAAa,SAAS;AAExC,aAAO,EAAE,SAAS,EAAE,cAAc,EAAE,UAAU;AAAA,IAAA;AAG5C,QAAA,IAAI,EAAU,QAAA;AACd,QAAA,IAAI,EAAU,QAAA;AACX,WAAA;AAAA,EACT;AAEM,QAAA,iBAAiB,CAAC,GAAY,MAAuB;AAClD,WAAA,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGM,QAAA,iBAAiB,CAAC,iBAA+B;AAC9C,WAAA,CAAC,GAAY,MAAe;AAE7B,UAAA,aAAa,SAAS,GAAG;AAG3B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AACtC,gBAAA,YAAY,aAAa,CAAC,EAAG;AAC7B,gBAAA,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AACT,mBAAA;AAAA,UAAA;AAAA,QACT;AAIK,eAAA,OAAO,SAAS,OAAO;AAAA,MAAA;AAI5B,UAAA,aAAa,WAAW,GAAG;AACvB,cAAA,YAAY,aAAa,CAAC,EAAG;AAC5B,eAAA,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MAAA;AAGlE,aAAA,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AACM,QAAA,aAAa,eAAe,YAAY;AAG9C,MAAI,qBAAqB;AACvB,QAAI,mBAAmB,WAAW;AAEhC,uBAAiB,eAAe;AAAA,QAC9B,iBAAiB,gBAAgB;AAAA,UAC/B,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACD,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA,OACK;AAEL,uBAAiB,eAAe;AAAA,QAC9B,2BAA2B,gBAAgB;AAAA,UACzC,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACD,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF,OACK;AAEL,qBAAiB,eAAe;AAAA,MAC9B,QAAQ,gBAAgB;AAAA,QACtB,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA;AAGK,SAAA;AACT;AAGA,SAAS,kBAAkB,KAAoC;AACzD,MAAA,CAAC,yBAAyB,GAAG,GAAG;AAC5B,UAAA,IAAI,MAAM,kCAAkC;AAAA,EAAA;AAG9C,QAAA,MAAM,IAAI,aAAa;AAC7B,MAAI,QAAQ,aAAa,QAAQ,QAAQ,QAAQ,WAAW;AACnD,WAAA;AAAA,EAAA,WACE,QAAQ,cAAc;AACxB,WAAA;AAAA,EAAA,OACF;AACC,UAAA,IAAI,MAAM,+BAA+B,GAAG;AAAA,EAAA;AAEtD;"}
1
+ {"version":3,"file":"order-by.js","sources":["../../../src/query/order-by.ts"],"sourcesContent":["import {\n map,\n orderBy,\n orderByWithFractionalIndex,\n orderByWithIndex,\n} from \"@electric-sql/d2mini\"\nimport { evaluateOperandOnNamespacedRow } from \"./extractors\"\nimport { isOrderIndexFunctionCall } from \"./utils\"\nimport type { ConditionOperand, Query } from \"./schema\"\nimport type {\n KeyedNamespacedRow,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types\"\n\ntype OrderByItem = {\n operand: ConditionOperand\n direction: `asc` | `desc`\n}\n\ntype OrderByItems = Array<OrderByItem>\n\nexport function processOrderBy(\n resultPipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Check if any column in the SELECT clause is an ORDER_INDEX function call\n let hasOrderIndexColumn = false\n let orderIndexType: `numeric` | `fractional` = `numeric`\n let orderIndexAlias = ``\n\n // Scan the SELECT clause for ORDER_INDEX functions\n // TODO: Select is going to be optional in future - we will automatically add an\n // attribute for the index column\n for (const item of query.select!) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isOrderIndexFunctionCall(expr)) {\n hasOrderIndexColumn = true\n orderIndexAlias = alias\n orderIndexType = getOrderIndexType(expr)\n break\n }\n }\n }\n if (hasOrderIndexColumn) break\n }\n\n // Normalize orderBy to an array of objects\n const orderByItems: OrderByItems = []\n\n if (typeof query.orderBy === `string`) {\n // Handle string format: '@column'\n orderByItems.push({\n operand: query.orderBy,\n direction: `asc`,\n })\n } else if (Array.isArray(query.orderBy)) {\n // Handle array format: ['@column1', { '@column2': 'desc' }]\n for (const item of query.orderBy) {\n if (typeof item === `string`) {\n orderByItems.push({\n operand: item,\n direction: `asc`,\n })\n } else if (typeof item === `object`) {\n for (const [column, direction] of Object.entries(item)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n }\n } else if (typeof query.orderBy === `object`) {\n // Handle object format: { '@column': 'desc' }\n for (const [column, direction] of Object.entries(query.orderBy)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n\n // Create a value extractor function for the orderBy operator\n // const valueExtractor = ([key, namespacedRow]: [\n const valueExtractor = (namespacedRow: NamespacedRow) => {\n // For multiple orderBy columns, create a composite key\n if (orderByItems.length > 1) {\n return orderByItems.map((item) =>\n evaluateOperandOnNamespacedRow(\n namespacedRow,\n item.operand,\n mainTableAlias\n )\n )\n } else if (orderByItems.length === 1) {\n // For a single orderBy column, use the value directly\n const item = orderByItems[0]\n const val = evaluateOperandOnNamespacedRow(\n namespacedRow,\n item!.operand,\n mainTableAlias\n )\n return val\n }\n\n // Default case - no ordering\n return null\n }\n\n const ascComparator = (a: any, b: any): number => {\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n // Compare the values\n const result = ascComparator(a[i], b[i])\n\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If at least one of the values is an object then we don't really know how to meaningfully compare them\n // therefore we turn them into strings and compare those\n // There are 2 exceptions:\n // 1) if both objects are dates then we can compare them\n // 2) if either object is nullish then we can't call toString on it\n const bothObjects = typeof a === `object` && typeof b === `object`\n const bothDates = a instanceof Date && b instanceof Date\n const notNull = a !== null && b !== null\n if (bothObjects && !bothDates && notNull) {\n // Every object should support `toString`\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = (orderByProps: OrderByItems) => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByProps.length > 1) {\n // `a` and `b` must be arrays since `orderByItems.length > 1`\n // hence the extracted values must be arrays\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByProps.length; i++) {\n const direction = orderByProps[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n // should normally always be 0 because\n // both values are extracted based on orderByItems\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByProps.length === 1) {\n const direction = orderByProps[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n const comparator = makeComparator(orderByItems)\n\n // Apply the appropriate orderBy operator based on whether an ORDER_INDEX column is requested\n if (hasOrderIndexColumn) {\n if (orderIndexType === `numeric`) {\n // Use orderByWithIndex for numeric indices\n resultPipeline = resultPipeline.pipe(\n orderByWithIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n } else {\n // Use orderByWithFractionalIndex for fractional indices\n resultPipeline = resultPipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n }\n } else {\n // Use regular orderBy if no index column is requested\n resultPipeline = resultPipeline.pipe(\n orderBy(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n })\n )\n }\n\n return resultPipeline\n}\n\n// Helper function to extract the ORDER_INDEX type from a function call\nfunction getOrderIndexType(obj: any): `numeric` | `fractional` {\n if (!isOrderIndexFunctionCall(obj)) {\n throw new Error(`Not an ORDER_INDEX function call`)\n }\n\n const arg = obj[`ORDER_INDEX`]\n if (arg === `numeric` || arg === true || arg === `default`) {\n return `numeric`\n } else if (arg === `fractional`) {\n return `fractional`\n } else {\n throw new Error(`Invalid ORDER_INDEX type: ` + arg)\n }\n}\n"],"names":[],"mappings":";;;AAsBgB,SAAA,eACd,gBACA,OACA,gBACA;AAEA,MAAI,sBAAsB;AAC1B,MAAI,iBAA2C;AAC/C,MAAI,kBAAkB;AAKX,aAAA,QAAQ,MAAM,QAAS;AAC5B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAY,yBAAyB,IAAI,GAAG;AACxC,gCAAA;AACJ,4BAAA;AAClB,2BAAiB,kBAAkB,IAAI;AACvC;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAEF,QAAI,oBAAqB;AAAA,EAAA;AAI3B,QAAM,eAA6B,CAAC;AAEhC,MAAA,OAAO,MAAM,YAAY,UAAU;AAErC,iBAAa,KAAK;AAAA,MAChB,SAAS,MAAM;AAAA,MACf,WAAW;AAAA,IAAA,CACZ;AAAA,EACQ,WAAA,MAAM,QAAQ,MAAM,OAAO,GAAG;AAE5B,eAAA,QAAQ,MAAM,SAAS;AAC5B,UAAA,OAAO,SAAS,UAAU;AAC5B,qBAAa,KAAK;AAAA,UAChB,SAAS;AAAA,UACT,WAAW;AAAA,QAAA,CACZ;AAAA,MACH,WAAW,OAAO,SAAS,UAAU;AACnC,mBAAW,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,uBAAa,KAAK;AAAA,YAChB,SAAS;AAAA,YACT;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MACH;AAAA,IACF;AAAA,EAEO,WAAA,OAAO,MAAM,YAAY,UAAU;AAEjC,eAAA,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AAC/D,mBAAa,KAAK;AAAA,QAChB,SAAS;AAAA,QACT;AAAA,MAAA,CACD;AAAA,IAAA;AAAA,EACH;AAKI,QAAA,iBAAiB,CAAC,kBAAiC;AAEnD,QAAA,aAAa,SAAS,GAAG;AAC3B,aAAO,aAAa;AAAA,QAAI,CAAC,SACvB;AAAA,UACE;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,WACS,aAAa,WAAW,GAAG;AAE9B,YAAA,OAAO,aAAa,CAAC;AAC3B,YAAM,MAAM;AAAA,QACV;AAAA,QACA,KAAM;AAAA,QACN;AAAA,MACF;AACO,aAAA;AAAA,IAAA;AAIF,WAAA;AAAA,EACT;AAEM,QAAA,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAC3C,aAAA,EAAE,cAAc,CAAC;AAAA,IAAA;AAI1B,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AAC/B,eAAA,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AAErD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAEvC,YAAI,WAAW,GAAG;AACT,iBAAA;AAAA,QAAA;AAAA,MACT;AAGK,aAAA,EAAE,SAAS,EAAE;AAAA,IAAA;AAQtB,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AACpD,UAAA,YAAY,aAAa,QAAQ,aAAa;AAC9C,UAAA,UAAU,MAAM,QAAQ,MAAM;AAChC,QAAA,eAAe,CAAC,aAAa,SAAS;AAExC,aAAO,EAAE,SAAS,EAAE,cAAc,EAAE,UAAU;AAAA,IAAA;AAG5C,QAAA,IAAI,EAAU,QAAA;AACd,QAAA,IAAI,EAAU,QAAA;AACX,WAAA;AAAA,EACT;AAEM,QAAA,iBAAiB,CAAC,GAAY,MAAuB;AAClD,WAAA,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGM,QAAA,iBAAiB,CAAC,iBAA+B;AAC9C,WAAA,CAAC,GAAY,MAAe;AAE7B,UAAA,aAAa,SAAS,GAAG;AAG3B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AACtC,gBAAA,YAAY,aAAa,CAAC,EAAG;AAC7B,gBAAA,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AACT,mBAAA;AAAA,UAAA;AAAA,QACT;AAIK,eAAA,OAAO,SAAS,OAAO;AAAA,MAAA;AAI5B,UAAA,aAAa,WAAW,GAAG;AACvB,cAAA,YAAY,aAAa,CAAC,EAAG;AAC5B,eAAA,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MAAA;AAGlE,aAAA,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AACM,QAAA,aAAa,eAAe,YAAY;AAG9C,MAAI,qBAAqB;AACvB,QAAI,mBAAmB,WAAW;AAEhC,uBAAiB,eAAe;AAAA,QAC9B,iBAAiB,gBAAgB;AAAA,UAC/B,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACD,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA,OACK;AAEL,uBAAiB,eAAe;AAAA,QAC9B,2BAA2B,gBAAgB;AAAA,UACzC,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACD,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF,OACK;AAEL,qBAAiB,eAAe;AAAA,MAC9B,QAAQ,gBAAgB;AAAA,QACtB,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA;AAGK,SAAA;AACT;AAGA,SAAS,kBAAkB,KAAoC;AACzD,MAAA,CAAC,yBAAyB,GAAG,GAAG;AAC5B,UAAA,IAAI,MAAM,kCAAkC;AAAA,EAAA;AAG9C,QAAA,MAAM,IAAI,aAAa;AAC7B,MAAI,QAAQ,aAAa,QAAQ,QAAQ,QAAQ,WAAW;AACnD,WAAA;AAAA,EAAA,WACE,QAAQ,cAAc;AACxB,WAAA;AAAA,EAAA,OACF;AACC,UAAA,IAAI,MAAM,+BAA+B,GAAG;AAAA,EAAA;AAEtD;"}
@@ -1,5 +1,5 @@
1
1
  import { Query } from './schema.js';
2
- import { IStreamBuilder } from '@electric-sql/d2ts';
2
+ import { IStreamBuilder } from '@electric-sql/d2mini';
3
3
  import { KeyedStream } from '../types.js';
4
4
  /**
5
5
  * Compiles a query into a D2 pipeline
@@ -1,4 +1,4 @@
1
- import { map, filter } from "@electric-sql/d2ts";
1
+ import { map, filter } from "@electric-sql/d2mini";
2
2
  import { evaluateWhereOnNamespacedRow } from "./evaluators.js";
3
3
  import { processJoinClause } from "./joins.js";
4
4
  import { processGroupBy } from "./group-by.js";
@@ -1 +1 @@
1
- {"version":3,"file":"pipeline-compiler.js","sources":["../../../src/query/pipeline-compiler.ts"],"sourcesContent":["import { filter, map } from \"@electric-sql/d2ts\"\nimport { evaluateWhereOnNamespacedRow } from \"./evaluators.js\"\nimport { processJoinClause } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelect } from \"./select.js\"\nimport type { Query } from \"./schema.js\"\nimport type { IStreamBuilder } from \"@electric-sql/d2ts\"\nimport type {\n InputRow,\n KeyedStream,\n NamespacedAndKeyedStream,\n} from \"../types.js\"\n\n/**\n * Compiles a query into a D2 pipeline\n * @param query The query to compile\n * @param inputs Mapping of table names to input streams\n * @returns A stream builder representing the compiled query\n */\nexport function compileQueryPipeline<T extends IStreamBuilder<unknown>>(\n query: Query,\n inputs: Record<string, KeyedStream>\n): T {\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Process WITH queries if they exist\n if (query.with && query.with.length > 0) {\n // Process each WITH query in order\n for (const withQuery of query.with) {\n // Ensure the WITH query has an alias\n if (!withQuery.as) {\n throw new Error(`WITH query must have an \"as\" property`)\n }\n\n // Check if this CTE name already exists in the inputs\n if (allInputs[withQuery.as]) {\n throw new Error(`CTE with name \"${withQuery.as}\" already exists`)\n }\n\n // Create a new query without the 'with' property to avoid circular references\n const withQueryWithoutWith = { ...withQuery, with: undefined }\n\n // Compile the WITH query using the current set of inputs\n // (which includes previously compiled WITH queries)\n const compiledWithQuery = compileQueryPipeline(\n withQueryWithoutWith,\n allInputs\n )\n\n // Add the compiled query to the inputs map using its alias\n allInputs[withQuery.as] = compiledWithQuery as KeyedStream\n }\n }\n\n // Create a map of table aliases to inputs\n const tables: Record<string, KeyedStream> = {}\n\n // The main table is the one in the FROM clause\n const mainTableAlias = query.as || query.from\n\n // Get the main input from the inputs map (now including CTEs)\n const input = allInputs[query.from]\n if (!input) {\n throw new Error(`Input for table \"${query.from}\" not found in inputs map`)\n }\n\n tables[mainTableAlias] = input\n\n // Prepare the initial pipeline with the main table wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = input.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainTableAlias]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join) {\n pipeline = processJoinClause(\n pipeline,\n query,\n tables,\n mainTableAlias,\n allInputs\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where) {\n pipeline = pipeline.pipe(\n filter(([_key, row]) => {\n const result = evaluateWhereOnNamespacedRow(\n row,\n query.where!,\n mainTableAlias\n )\n return result\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy) {\n pipeline = processGroupBy(pipeline, query, mainTableAlias)\n }\n\n // Process the HAVING clause if it exists\n // This works similarly to WHERE but is applied after any aggregations\n if (query.having) {\n pipeline = pipeline.pipe(\n filter(([_key, row]) => {\n // For HAVING, we're working with the flattened row that contains both\n // the group by keys and the aggregate results directly\n const result = evaluateWhereOnNamespacedRow(\n row,\n query.having!,\n mainTableAlias\n )\n return result\n })\n )\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy) {\n pipeline = processOrderBy(pipeline, query, mainTableAlias)\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new Error(\n `LIMIT and OFFSET require an ORDER BY clause to ensure deterministic results`\n )\n }\n\n // Process the SELECT clause - this is where we flatten the structure\n const resultPipeline: KeyedStream | NamespacedAndKeyedStream = query.select\n ? processSelect(pipeline, query, mainTableAlias, allInputs)\n : !query.join && !query.groupBy\n ? pipeline.pipe(\n map(([key, row]) => [key, row[mainTableAlias]] as InputRow)\n )\n : pipeline\n return resultPipeline as T\n}\n"],"names":[],"mappings":";;;;;;AAoBgB,SAAA,qBACd,OACA,QACG;AAEG,QAAA,YAAY,EAAE,GAAG,OAAO;AAG9B,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AAE5B,eAAA,aAAa,MAAM,MAAM;AAE9B,UAAA,CAAC,UAAU,IAAI;AACX,cAAA,IAAI,MAAM,uCAAuC;AAAA,MAAA;AAIrD,UAAA,UAAU,UAAU,EAAE,GAAG;AAC3B,cAAM,IAAI,MAAM,kBAAkB,UAAU,EAAE,kBAAkB;AAAA,MAAA;AAIlE,YAAM,uBAAuB,EAAE,GAAG,WAAW,MAAM,OAAU;AAI7D,YAAM,oBAAoB;AAAA,QACxB;AAAA,QACA;AAAA,MACF;AAGU,gBAAA,UAAU,EAAE,IAAI;AAAA,IAAA;AAAA,EAC5B;AAIF,QAAM,SAAsC,CAAC;AAGvC,QAAA,iBAAiB,MAAM,MAAM,MAAM;AAGnC,QAAA,QAAQ,UAAU,MAAM,IAAI;AAClC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,oBAAoB,MAAM,IAAI,2BAA2B;AAAA,EAAA;AAG3E,SAAO,cAAc,IAAI;AAGzB,MAAI,WAAqC,MAAM;AAAA,IAC7C,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAEZ,YAAA,MAAM,CAAC,KAAK,EAAE,CAAC,cAAc,GAAG,KAAK;AAIpC,aAAA;AAAA,IACR,CAAA;AAAA,EACH;AAGA,MAAI,MAAM,MAAM;AACH,eAAA;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EAAA;AAIF,MAAI,MAAM,OAAO;AACf,eAAW,SAAS;AAAA,MAClB,OAAO,CAAC,CAAC,MAAM,GAAG,MAAM;AACtB,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM;AAAA,UACN;AAAA,QACF;AACO,eAAA;AAAA,MACR,CAAA;AAAA,IACH;AAAA,EAAA;AAIF,MAAI,MAAM,SAAS;AACN,eAAA,eAAe,UAAU,OAAO,cAAc;AAAA,EAAA;AAK3D,MAAI,MAAM,QAAQ;AAChB,eAAW,SAAS;AAAA,MAClB,OAAO,CAAC,CAAC,MAAM,GAAG,MAAM;AAGtB,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM;AAAA,UACN;AAAA,QACF;AACO,eAAA;AAAA,MACR,CAAA;AAAA,IACH;AAAA,EAAA;AAIF,MAAI,MAAM,SAAS;AACN,eAAA,eAAe,UAAU,OAAO,cAAc;AAAA,EAAA,WAChD,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EAAA;AAIF,QAAM,iBAAyD,MAAM,SACjE,cAAc,UAAU,OAAO,gBAAgB,SAAS,IACxD,CAAC,MAAM,QAAQ,CAAC,MAAM,UACpB,SAAS;AAAA,IACP,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,cAAc,CAAC,CAAa;AAAA,EAAA,IAE5D;AACC,SAAA;AACT;"}
1
+ {"version":3,"file":"pipeline-compiler.js","sources":["../../../src/query/pipeline-compiler.ts"],"sourcesContent":["import { filter, map } from \"@electric-sql/d2mini\"\nimport { evaluateWhereOnNamespacedRow } from \"./evaluators.js\"\nimport { processJoinClause } from \"./joins.js\"\nimport { processGroupBy } from \"./group-by.js\"\nimport { processOrderBy } from \"./order-by.js\"\nimport { processSelect } from \"./select.js\"\nimport type { Query } from \"./schema.js\"\nimport type { IStreamBuilder } from \"@electric-sql/d2mini\"\nimport type {\n InputRow,\n KeyedStream,\n NamespacedAndKeyedStream,\n} from \"../types.js\"\n\n/**\n * Compiles a query into a D2 pipeline\n * @param query The query to compile\n * @param inputs Mapping of table names to input streams\n * @returns A stream builder representing the compiled query\n */\nexport function compileQueryPipeline<T extends IStreamBuilder<unknown>>(\n query: Query,\n inputs: Record<string, KeyedStream>\n): T {\n // Create a copy of the inputs map to avoid modifying the original\n const allInputs = { ...inputs }\n\n // Process WITH queries if they exist\n if (query.with && query.with.length > 0) {\n // Process each WITH query in order\n for (const withQuery of query.with) {\n // Ensure the WITH query has an alias\n if (!withQuery.as) {\n throw new Error(`WITH query must have an \"as\" property`)\n }\n\n // Check if this CTE name already exists in the inputs\n if (allInputs[withQuery.as]) {\n throw new Error(`CTE with name \"${withQuery.as}\" already exists`)\n }\n\n // Create a new query without the 'with' property to avoid circular references\n const withQueryWithoutWith = { ...withQuery, with: undefined }\n\n // Compile the WITH query using the current set of inputs\n // (which includes previously compiled WITH queries)\n const compiledWithQuery = compileQueryPipeline(\n withQueryWithoutWith,\n allInputs\n )\n\n // Add the compiled query to the inputs map using its alias\n allInputs[withQuery.as] = compiledWithQuery as KeyedStream\n }\n }\n\n // Create a map of table aliases to inputs\n const tables: Record<string, KeyedStream> = {}\n\n // The main table is the one in the FROM clause\n const mainTableAlias = query.as || query.from\n\n // Get the main input from the inputs map (now including CTEs)\n const input = allInputs[query.from]\n if (!input) {\n throw new Error(`Input for table \"${query.from}\" not found in inputs map`)\n }\n\n tables[mainTableAlias] = input\n\n // Prepare the initial pipeline with the main table wrapped in its alias\n let pipeline: NamespacedAndKeyedStream = input.pipe(\n map(([key, row]) => {\n // Initialize the record with a nested structure\n const ret = [key, { [mainTableAlias]: row }] as [\n string,\n Record<string, typeof row>,\n ]\n return ret\n })\n )\n\n // Process JOIN clauses if they exist\n if (query.join) {\n pipeline = processJoinClause(\n pipeline,\n query,\n tables,\n mainTableAlias,\n allInputs\n )\n }\n\n // Process the WHERE clause if it exists\n if (query.where) {\n pipeline = pipeline.pipe(\n filter(([_key, row]) => {\n const result = evaluateWhereOnNamespacedRow(\n row,\n query.where!,\n mainTableAlias\n )\n return result\n })\n )\n }\n\n // Process the GROUP BY clause if it exists\n if (query.groupBy) {\n pipeline = processGroupBy(pipeline, query, mainTableAlias)\n }\n\n // Process the HAVING clause if it exists\n // This works similarly to WHERE but is applied after any aggregations\n if (query.having) {\n pipeline = pipeline.pipe(\n filter(([_key, row]) => {\n // For HAVING, we're working with the flattened row that contains both\n // the group by keys and the aggregate results directly\n const result = evaluateWhereOnNamespacedRow(\n row,\n query.having!,\n mainTableAlias\n )\n return result\n })\n )\n }\n\n // Process orderBy parameter if it exists\n if (query.orderBy) {\n pipeline = processOrderBy(pipeline, query, mainTableAlias)\n } else if (query.limit !== undefined || query.offset !== undefined) {\n // If there's a limit or offset without orderBy, throw an error\n throw new Error(\n `LIMIT and OFFSET require an ORDER BY clause to ensure deterministic results`\n )\n }\n\n // Process the SELECT clause - this is where we flatten the structure\n const resultPipeline: KeyedStream | NamespacedAndKeyedStream = query.select\n ? processSelect(pipeline, query, mainTableAlias, allInputs)\n : !query.join && !query.groupBy\n ? pipeline.pipe(\n map(([key, row]) => [key, row[mainTableAlias]] as InputRow)\n )\n : pipeline\n return resultPipeline as T\n}\n"],"names":[],"mappings":";;;;;;AAoBgB,SAAA,qBACd,OACA,QACG;AAEG,QAAA,YAAY,EAAE,GAAG,OAAO;AAG9B,MAAI,MAAM,QAAQ,MAAM,KAAK,SAAS,GAAG;AAE5B,eAAA,aAAa,MAAM,MAAM;AAE9B,UAAA,CAAC,UAAU,IAAI;AACX,cAAA,IAAI,MAAM,uCAAuC;AAAA,MAAA;AAIrD,UAAA,UAAU,UAAU,EAAE,GAAG;AAC3B,cAAM,IAAI,MAAM,kBAAkB,UAAU,EAAE,kBAAkB;AAAA,MAAA;AAIlE,YAAM,uBAAuB,EAAE,GAAG,WAAW,MAAM,OAAU;AAI7D,YAAM,oBAAoB;AAAA,QACxB;AAAA,QACA;AAAA,MACF;AAGU,gBAAA,UAAU,EAAE,IAAI;AAAA,IAAA;AAAA,EAC5B;AAIF,QAAM,SAAsC,CAAC;AAGvC,QAAA,iBAAiB,MAAM,MAAM,MAAM;AAGnC,QAAA,QAAQ,UAAU,MAAM,IAAI;AAClC,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,oBAAoB,MAAM,IAAI,2BAA2B;AAAA,EAAA;AAG3E,SAAO,cAAc,IAAI;AAGzB,MAAI,WAAqC,MAAM;AAAA,IAC7C,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM;AAEZ,YAAA,MAAM,CAAC,KAAK,EAAE,CAAC,cAAc,GAAG,KAAK;AAIpC,aAAA;AAAA,IACR,CAAA;AAAA,EACH;AAGA,MAAI,MAAM,MAAM;AACH,eAAA;AAAA,MACT;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EAAA;AAIF,MAAI,MAAM,OAAO;AACf,eAAW,SAAS;AAAA,MAClB,OAAO,CAAC,CAAC,MAAM,GAAG,MAAM;AACtB,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM;AAAA,UACN;AAAA,QACF;AACO,eAAA;AAAA,MACR,CAAA;AAAA,IACH;AAAA,EAAA;AAIF,MAAI,MAAM,SAAS;AACN,eAAA,eAAe,UAAU,OAAO,cAAc;AAAA,EAAA;AAK3D,MAAI,MAAM,QAAQ;AAChB,eAAW,SAAS;AAAA,MAClB,OAAO,CAAC,CAAC,MAAM,GAAG,MAAM;AAGtB,cAAM,SAAS;AAAA,UACb;AAAA,UACA,MAAM;AAAA,UACN;AAAA,QACF;AACO,eAAA;AAAA,MACR,CAAA;AAAA,IACH;AAAA,EAAA;AAIF,MAAI,MAAM,SAAS;AACN,eAAA,eAAe,UAAU,OAAO,cAAc;AAAA,EAAA,WAChD,MAAM,UAAU,UAAa,MAAM,WAAW,QAAW;AAElE,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EAAA;AAIF,QAAM,iBAAyD,MAAM,SACjE,cAAc,UAAU,OAAO,gBAAgB,SAAS,IACxD,CAAC,MAAM,QAAQ,CAAC,MAAM,UACpB,SAAS;AAAA,IACP,IAAI,CAAC,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,cAAc,CAAC,CAAa;AAAA,EAAA,IAE5D;AACC,SAAA;AACT;"}
@@ -1,4 +1,4 @@
1
- import { map } from "@electric-sql/d2ts";
1
+ import { map } from "@electric-sql/d2mini";
2
2
  import { extractValueFromNamespacedRow, evaluateOperandOnNamespacedRow } from "./extractors.js";
3
3
  function processSelect(pipeline, query, mainTableAlias, inputs) {
4
4
  return pipeline.pipe(
@@ -1 +1 @@
1
- {"version":3,"file":"select.js","sources":["../../../src/query/select.ts"],"sourcesContent":["import { map } from \"@electric-sql/d2ts\"\nimport {\n evaluateOperandOnNamespacedRow,\n extractValueFromNamespacedRow,\n} from \"./extractors\"\nimport type { ConditionOperand, Query, SelectCallback } from \"./schema\"\nimport type { KeyedStream, NamespacedAndKeyedStream } from \"../types\"\n\nexport function processSelect(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string,\n inputs: Record<string, KeyedStream>\n): KeyedStream {\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const result: Record<string, unknown> = {}\n\n // Check if this is a grouped result (has no nested table structure)\n // If it's a grouped result, we need to handle it differently\n const isGroupedResult =\n query.groupBy &&\n Object.keys(namespacedRow).some(\n (namespaceKey) =>\n !Object.keys(inputs).includes(namespaceKey) &&\n typeof namespacedRow[namespaceKey] !== `object`\n )\n\n if (!query.select) {\n throw new Error(`Cannot process missing SELECT clause`)\n }\n\n for (const item of query.select) {\n // Handle callback functions\n if (typeof item === `function`) {\n const callback = item as SelectCallback\n const callbackResult = callback(namespacedRow)\n\n // If the callback returns an object, merge its properties into the result\n if (\n callbackResult &&\n typeof callbackResult === `object` &&\n !Array.isArray(callbackResult)\n ) {\n Object.assign(result, callbackResult)\n } else {\n // If the callback returns a primitive value, we can't merge it\n // This would need a specific key, but since we don't have one, we'll skip it\n // In practice, select callbacks should return objects with keys\n console.warn(\n `SelectCallback returned a non-object value. SelectCallbacks should return objects with key-value pairs.`\n )\n }\n continue\n }\n\n if (typeof item === `string`) {\n // Handle wildcard select - all columns from all tables\n if ((item as string) === `@*`) {\n // For grouped results, just return the row as is\n if (isGroupedResult) {\n Object.assign(result, namespacedRow)\n } else {\n // Extract all columns from all tables\n Object.assign(\n result,\n extractAllColumnsFromAllTables(namespacedRow)\n )\n }\n continue\n }\n\n // Handle @table.* syntax - all columns from a specific table\n if (\n (item as string).startsWith(`@`) &&\n (item as string).endsWith(`.*`)\n ) {\n const tableAlias = (item as string).slice(1, -2) // Remove the '@' and '.*' parts\n\n // For grouped results, check if we have columns from this table\n if (isGroupedResult) {\n // In grouped results, we don't have the nested structure anymore\n // So we can't extract by table. Just continue to the next item.\n continue\n } else {\n // Extract all columns from the specified table\n Object.assign(\n result,\n extractAllColumnsFromTable(namespacedRow, tableAlias)\n )\n }\n continue\n }\n\n // Handle simple column references like \"@table.column\" or \"@column\"\n if ((item as string).startsWith(`@`)) {\n const columnRef = (item as string).substring(1)\n const alias = columnRef\n\n // For grouped results, check if the column is directly in the row first\n if (isGroupedResult && columnRef in namespacedRow) {\n result[alias] = namespacedRow[columnRef]\n } else {\n // Extract the value from the nested structure\n result[alias] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias,\n undefined\n )\n }\n\n // If the alias contains a dot (table.column),\n // use just the column part as the field name\n if (alias.includes(`.`)) {\n const columnName = alias.split(`.`)[1]\n result[columnName!] = result[alias]\n delete result[alias]\n }\n }\n } else {\n // Handle aliased columns like { alias: \"@column_name\" }\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `string` && (expr as string).startsWith(`@`)) {\n const columnRef = (expr as string).substring(1)\n\n // For grouped results, check if the column is directly in the row first\n if (isGroupedResult && columnRef in namespacedRow) {\n result[alias] = namespacedRow[columnRef]\n } else {\n // Extract the value from the nested structure\n result[alias] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias,\n undefined\n )\n }\n } else if (typeof expr === `object`) {\n // For grouped results, the aggregate results are already in the row\n if (isGroupedResult && alias in namespacedRow) {\n result[alias] = namespacedRow[alias]\n } else if ((expr as { ORDER_INDEX: unknown }).ORDER_INDEX) {\n result[alias] = namespacedRow[mainTableAlias]![alias]\n } else {\n // This might be a function call\n result[alias] = evaluateOperandOnNamespacedRow(\n namespacedRow,\n expr as ConditionOperand,\n mainTableAlias,\n undefined\n )\n }\n }\n }\n }\n }\n\n return [key, result] as [string, typeof result]\n })\n )\n}\n\n// Helper function to extract all columns from all tables in a nested row\nfunction extractAllColumnsFromAllTables(\n namespacedRow: Record<string, unknown>\n): Record<string, unknown> {\n const result: Record<string, unknown> = {}\n\n // Process each table in the nested row\n for (const [tableAlias, tableData] of Object.entries(namespacedRow)) {\n if (tableData && typeof tableData === `object`) {\n // Add all columns from this table to the result\n // If there are column name conflicts, the last table's columns will overwrite previous ones\n Object.assign(\n result,\n extractAllColumnsFromTable(namespacedRow, tableAlias)\n )\n }\n }\n\n return result\n}\n\n// Helper function to extract all columns from a table in a nested row\nfunction extractAllColumnsFromTable(\n namespacedRow: Record<string, unknown>,\n tableAlias: string\n): Record<string, unknown> {\n const result: Record<string, unknown> = {}\n\n // Get the table data\n const tableData = namespacedRow[tableAlias] as\n | Record<string, unknown>\n | null\n | undefined\n\n if (!tableData || typeof tableData !== `object`) {\n return result\n }\n\n // Add all columns from the table to the result\n for (const [columnName, value] of Object.entries(tableData)) {\n result[columnName] = value\n }\n\n return result\n}\n"],"names":[],"mappings":";;AAQO,SAAS,cACd,UACA,OACA,gBACA,QACa;AACb,SAAO,SAAS;AAAA,IACd,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,YAAM,SAAkC,CAAC;AAIzC,YAAM,kBACJ,MAAM,WACN,OAAO,KAAK,aAAa,EAAE;AAAA,QACzB,CAAC,iBACC,CAAC,OAAO,KAAK,MAAM,EAAE,SAAS,YAAY,KAC1C,OAAO,cAAc,YAAY,MAAM;AAAA,MAC3C;AAEE,UAAA,CAAC,MAAM,QAAQ;AACX,cAAA,IAAI,MAAM,sCAAsC;AAAA,MAAA;AAG7C,iBAAA,QAAQ,MAAM,QAAQ;AAE3B,YAAA,OAAO,SAAS,YAAY;AAC9B,gBAAM,WAAW;AACX,gBAAA,iBAAiB,SAAS,aAAa;AAI3C,cAAA,kBACA,OAAO,mBAAmB,YAC1B,CAAC,MAAM,QAAQ,cAAc,GAC7B;AACO,mBAAA,OAAO,QAAQ,cAAc;AAAA,UAAA,OAC/B;AAIG,oBAAA;AAAA,cACN;AAAA,YACF;AAAA,UAAA;AAEF;AAAA,QAAA;AAGE,YAAA,OAAO,SAAS,UAAU;AAE5B,cAAK,SAAoB,MAAM;AAE7B,gBAAI,iBAAiB;AACZ,qBAAA,OAAO,QAAQ,aAAa;AAAA,YAAA,OAC9B;AAEE,qBAAA;AAAA,gBACL;AAAA,gBACA,+BAA+B,aAAa;AAAA,cAC9C;AAAA,YAAA;AAEF;AAAA,UAAA;AAIF,cACG,KAAgB,WAAW,GAAG,KAC9B,KAAgB,SAAS,IAAI,GAC9B;AACA,kBAAM,aAAc,KAAgB,MAAM,GAAG,EAAE;AAG/C,gBAAI,iBAAiB;AAGnB;AAAA,YAAA,OACK;AAEE,qBAAA;AAAA,gBACL;AAAA,gBACA,2BAA2B,eAAe,UAAU;AAAA,cACtD;AAAA,YAAA;AAEF;AAAA,UAAA;AAIG,cAAA,KAAgB,WAAW,GAAG,GAAG;AAC9B,kBAAA,YAAa,KAAgB,UAAU,CAAC;AAC9C,kBAAM,QAAQ;AAGV,gBAAA,mBAAmB,aAAa,eAAe;AAC1C,qBAAA,KAAK,IAAI,cAAc,SAAS;AAAA,YAAA,OAClC;AAEL,qBAAO,KAAK,IAAI;AAAA,gBACd;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,YAAA;AAKE,gBAAA,MAAM,SAAS,GAAG,GAAG;AACvB,oBAAM,aAAa,MAAM,MAAM,GAAG,EAAE,CAAC;AAC9B,qBAAA,UAAW,IAAI,OAAO,KAAK;AAClC,qBAAO,OAAO,KAAK;AAAA,YAAA;AAAA,UACrB;AAAA,QACF,OACK;AAEL,qBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,gBAAI,OAAO,SAAS,YAAa,KAAgB,WAAW,GAAG,GAAG;AAC1D,oBAAA,YAAa,KAAgB,UAAU,CAAC;AAG1C,kBAAA,mBAAmB,aAAa,eAAe;AAC1C,uBAAA,KAAK,IAAI,cAAc,SAAS;AAAA,cAAA,OAClC;AAEL,uBAAO,KAAK,IAAI;AAAA,kBACd;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AAAA,cAAA;AAAA,YAEJ,WAAW,OAAO,SAAS,UAAU;AAE/B,kBAAA,mBAAmB,SAAS,eAAe;AACtC,uBAAA,KAAK,IAAI,cAAc,KAAK;AAAA,cAAA,WACzB,KAAkC,aAAa;AACzD,uBAAO,KAAK,IAAI,cAAc,cAAc,EAAG,KAAK;AAAA,cAAA,OAC/C;AAEL,uBAAO,KAAK,IAAI;AAAA,kBACd;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AAAA,cAAA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGK,aAAA,CAAC,KAAK,MAAM;AAAA,IACpB,CAAA;AAAA,EACH;AACF;AAGA,SAAS,+BACP,eACyB;AACzB,QAAM,SAAkC,CAAC;AAGzC,aAAW,CAAC,YAAY,SAAS,KAAK,OAAO,QAAQ,aAAa,GAAG;AAC/D,QAAA,aAAa,OAAO,cAAc,UAAU;AAGvC,aAAA;AAAA,QACL;AAAA,QACA,2BAA2B,eAAe,UAAU;AAAA,MACtD;AAAA,IAAA;AAAA,EACF;AAGK,SAAA;AACT;AAGA,SAAS,2BACP,eACA,YACyB;AACzB,QAAM,SAAkC,CAAC;AAGnC,QAAA,YAAY,cAAc,UAAU;AAK1C,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AACxC,WAAA;AAAA,EAAA;AAIT,aAAW,CAAC,YAAY,KAAK,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC3D,WAAO,UAAU,IAAI;AAAA,EAAA;AAGhB,SAAA;AACT;"}
1
+ {"version":3,"file":"select.js","sources":["../../../src/query/select.ts"],"sourcesContent":["import { map } from \"@electric-sql/d2mini\"\nimport {\n evaluateOperandOnNamespacedRow,\n extractValueFromNamespacedRow,\n} from \"./extractors\"\nimport type { ConditionOperand, Query, SelectCallback } from \"./schema\"\nimport type { KeyedStream, NamespacedAndKeyedStream } from \"../types\"\n\nexport function processSelect(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string,\n inputs: Record<string, KeyedStream>\n): KeyedStream {\n return pipeline.pipe(\n map(([key, namespacedRow]) => {\n const result: Record<string, unknown> = {}\n\n // Check if this is a grouped result (has no nested table structure)\n // If it's a grouped result, we need to handle it differently\n const isGroupedResult =\n query.groupBy &&\n Object.keys(namespacedRow).some(\n (namespaceKey) =>\n !Object.keys(inputs).includes(namespaceKey) &&\n typeof namespacedRow[namespaceKey] !== `object`\n )\n\n if (!query.select) {\n throw new Error(`Cannot process missing SELECT clause`)\n }\n\n for (const item of query.select) {\n // Handle callback functions\n if (typeof item === `function`) {\n const callback = item as SelectCallback\n const callbackResult = callback(namespacedRow)\n\n // If the callback returns an object, merge its properties into the result\n if (\n callbackResult &&\n typeof callbackResult === `object` &&\n !Array.isArray(callbackResult)\n ) {\n Object.assign(result, callbackResult)\n } else {\n // If the callback returns a primitive value, we can't merge it\n // This would need a specific key, but since we don't have one, we'll skip it\n // In practice, select callbacks should return objects with keys\n console.warn(\n `SelectCallback returned a non-object value. SelectCallbacks should return objects with key-value pairs.`\n )\n }\n continue\n }\n\n if (typeof item === `string`) {\n // Handle wildcard select - all columns from all tables\n if ((item as string) === `@*`) {\n // For grouped results, just return the row as is\n if (isGroupedResult) {\n Object.assign(result, namespacedRow)\n } else {\n // Extract all columns from all tables\n Object.assign(\n result,\n extractAllColumnsFromAllTables(namespacedRow)\n )\n }\n continue\n }\n\n // Handle @table.* syntax - all columns from a specific table\n if (\n (item as string).startsWith(`@`) &&\n (item as string).endsWith(`.*`)\n ) {\n const tableAlias = (item as string).slice(1, -2) // Remove the '@' and '.*' parts\n\n // For grouped results, check if we have columns from this table\n if (isGroupedResult) {\n // In grouped results, we don't have the nested structure anymore\n // So we can't extract by table. Just continue to the next item.\n continue\n } else {\n // Extract all columns from the specified table\n Object.assign(\n result,\n extractAllColumnsFromTable(namespacedRow, tableAlias)\n )\n }\n continue\n }\n\n // Handle simple column references like \"@table.column\" or \"@column\"\n if ((item as string).startsWith(`@`)) {\n const columnRef = (item as string).substring(1)\n const alias = columnRef\n\n // For grouped results, check if the column is directly in the row first\n if (isGroupedResult && columnRef in namespacedRow) {\n result[alias] = namespacedRow[columnRef]\n } else {\n // Extract the value from the nested structure\n result[alias] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias,\n undefined\n )\n }\n\n // If the alias contains a dot (table.column),\n // use just the column part as the field name\n if (alias.includes(`.`)) {\n const columnName = alias.split(`.`)[1]\n result[columnName!] = result[alias]\n delete result[alias]\n }\n }\n } else {\n // Handle aliased columns like { alias: \"@column_name\" }\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `string` && (expr as string).startsWith(`@`)) {\n const columnRef = (expr as string).substring(1)\n\n // For grouped results, check if the column is directly in the row first\n if (isGroupedResult && columnRef in namespacedRow) {\n result[alias] = namespacedRow[columnRef]\n } else {\n // Extract the value from the nested structure\n result[alias] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias,\n undefined\n )\n }\n } else if (typeof expr === `object`) {\n // For grouped results, the aggregate results are already in the row\n if (isGroupedResult && alias in namespacedRow) {\n result[alias] = namespacedRow[alias]\n } else if ((expr as { ORDER_INDEX: unknown }).ORDER_INDEX) {\n result[alias] = namespacedRow[mainTableAlias]![alias]\n } else {\n // This might be a function call\n result[alias] = evaluateOperandOnNamespacedRow(\n namespacedRow,\n expr as ConditionOperand,\n mainTableAlias,\n undefined\n )\n }\n }\n }\n }\n }\n\n return [key, result] as [string, typeof result]\n })\n )\n}\n\n// Helper function to extract all columns from all tables in a nested row\nfunction extractAllColumnsFromAllTables(\n namespacedRow: Record<string, unknown>\n): Record<string, unknown> {\n const result: Record<string, unknown> = {}\n\n // Process each table in the nested row\n for (const [tableAlias, tableData] of Object.entries(namespacedRow)) {\n if (tableData && typeof tableData === `object`) {\n // Add all columns from this table to the result\n // If there are column name conflicts, the last table's columns will overwrite previous ones\n Object.assign(\n result,\n extractAllColumnsFromTable(namespacedRow, tableAlias)\n )\n }\n }\n\n return result\n}\n\n// Helper function to extract all columns from a table in a nested row\nfunction extractAllColumnsFromTable(\n namespacedRow: Record<string, unknown>,\n tableAlias: string\n): Record<string, unknown> {\n const result: Record<string, unknown> = {}\n\n // Get the table data\n const tableData = namespacedRow[tableAlias] as\n | Record<string, unknown>\n | null\n | undefined\n\n if (!tableData || typeof tableData !== `object`) {\n return result\n }\n\n // Add all columns from the table to the result\n for (const [columnName, value] of Object.entries(tableData)) {\n result[columnName] = value\n }\n\n return result\n}\n"],"names":[],"mappings":";;AAQO,SAAS,cACd,UACA,OACA,gBACA,QACa;AACb,SAAO,SAAS;AAAA,IACd,IAAI,CAAC,CAAC,KAAK,aAAa,MAAM;AAC5B,YAAM,SAAkC,CAAC;AAIzC,YAAM,kBACJ,MAAM,WACN,OAAO,KAAK,aAAa,EAAE;AAAA,QACzB,CAAC,iBACC,CAAC,OAAO,KAAK,MAAM,EAAE,SAAS,YAAY,KAC1C,OAAO,cAAc,YAAY,MAAM;AAAA,MAC3C;AAEE,UAAA,CAAC,MAAM,QAAQ;AACX,cAAA,IAAI,MAAM,sCAAsC;AAAA,MAAA;AAG7C,iBAAA,QAAQ,MAAM,QAAQ;AAE3B,YAAA,OAAO,SAAS,YAAY;AAC9B,gBAAM,WAAW;AACX,gBAAA,iBAAiB,SAAS,aAAa;AAI3C,cAAA,kBACA,OAAO,mBAAmB,YAC1B,CAAC,MAAM,QAAQ,cAAc,GAC7B;AACO,mBAAA,OAAO,QAAQ,cAAc;AAAA,UAAA,OAC/B;AAIG,oBAAA;AAAA,cACN;AAAA,YACF;AAAA,UAAA;AAEF;AAAA,QAAA;AAGE,YAAA,OAAO,SAAS,UAAU;AAE5B,cAAK,SAAoB,MAAM;AAE7B,gBAAI,iBAAiB;AACZ,qBAAA,OAAO,QAAQ,aAAa;AAAA,YAAA,OAC9B;AAEE,qBAAA;AAAA,gBACL;AAAA,gBACA,+BAA+B,aAAa;AAAA,cAC9C;AAAA,YAAA;AAEF;AAAA,UAAA;AAIF,cACG,KAAgB,WAAW,GAAG,KAC9B,KAAgB,SAAS,IAAI,GAC9B;AACA,kBAAM,aAAc,KAAgB,MAAM,GAAG,EAAE;AAG/C,gBAAI,iBAAiB;AAGnB;AAAA,YAAA,OACK;AAEE,qBAAA;AAAA,gBACL;AAAA,gBACA,2BAA2B,eAAe,UAAU;AAAA,cACtD;AAAA,YAAA;AAEF;AAAA,UAAA;AAIG,cAAA,KAAgB,WAAW,GAAG,GAAG;AAC9B,kBAAA,YAAa,KAAgB,UAAU,CAAC;AAC9C,kBAAM,QAAQ;AAGV,gBAAA,mBAAmB,aAAa,eAAe;AAC1C,qBAAA,KAAK,IAAI,cAAc,SAAS;AAAA,YAAA,OAClC;AAEL,qBAAO,KAAK,IAAI;AAAA,gBACd;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,YAAA;AAKE,gBAAA,MAAM,SAAS,GAAG,GAAG;AACvB,oBAAM,aAAa,MAAM,MAAM,GAAG,EAAE,CAAC;AAC9B,qBAAA,UAAW,IAAI,OAAO,KAAK;AAClC,qBAAO,OAAO,KAAK;AAAA,YAAA;AAAA,UACrB;AAAA,QACF,OACK;AAEL,qBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,gBAAI,OAAO,SAAS,YAAa,KAAgB,WAAW,GAAG,GAAG;AAC1D,oBAAA,YAAa,KAAgB,UAAU,CAAC;AAG1C,kBAAA,mBAAmB,aAAa,eAAe;AAC1C,uBAAA,KAAK,IAAI,cAAc,SAAS;AAAA,cAAA,OAClC;AAEL,uBAAO,KAAK,IAAI;AAAA,kBACd;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AAAA,cAAA;AAAA,YAEJ,WAAW,OAAO,SAAS,UAAU;AAE/B,kBAAA,mBAAmB,SAAS,eAAe;AACtC,uBAAA,KAAK,IAAI,cAAc,KAAK;AAAA,cAAA,WACzB,KAAkC,aAAa;AACzD,uBAAO,KAAK,IAAI,cAAc,cAAc,EAAG,KAAK;AAAA,cAAA,OAC/C;AAEL,uBAAO,KAAK,IAAI;AAAA,kBACd;AAAA,kBACA;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF;AAAA,cAAA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGK,aAAA,CAAC,KAAK,MAAM;AAAA,IACpB,CAAA;AAAA,EACH;AACF;AAGA,SAAS,+BACP,eACyB;AACzB,QAAM,SAAkC,CAAC;AAGzC,aAAW,CAAC,YAAY,SAAS,KAAK,OAAO,QAAQ,aAAa,GAAG;AAC/D,QAAA,aAAa,OAAO,cAAc,UAAU;AAGvC,aAAA;AAAA,QACL;AAAA,QACA,2BAA2B,eAAe,UAAU;AAAA,MACtD;AAAA,IAAA;AAAA,EACF;AAGK,SAAA;AACT;AAGA,SAAS,2BACP,eACA,YACyB;AACzB,QAAM,SAAkC,CAAC;AAGnC,QAAA,YAAY,cAAc,UAAU;AAK1C,MAAI,CAAC,aAAa,OAAO,cAAc,UAAU;AACxC,WAAA;AAAA,EAAA;AAIT,aAAW,CAAC,YAAY,KAAK,KAAK,OAAO,QAAQ,SAAS,GAAG;AAC3D,WAAO,UAAU,IAAI;AAAA,EAAA;AAGhB,SAAA;AACT;"}
@@ -1,6 +1,6 @@
1
1
  import { Deferred } from './deferred.js';
2
2
  import { MutationFn, PendingMutation, TransactionConfig, TransactionState } from './types.js';
3
- export declare function createTransaction(config: TransactionConfig): Transaction;
3
+ export declare function createTransaction<TData extends object = Record<string, unknown>>(config: TransactionConfig<TData>): Transaction<TData>;
4
4
  export declare function getActiveTransaction(): Transaction | undefined;
5
5
  export declare class Transaction<T extends object = Record<string, unknown>> {
6
6
  id: string;
@@ -1,14 +1,4 @@
1
1
  import { createDeferred } from "./deferred.js";
2
- function generateUUID() {
3
- if (typeof crypto !== `undefined` && typeof crypto.randomUUID === `function`) {
4
- return crypto.randomUUID();
5
- }
6
- return `xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx`.replace(/[xy]/g, function(c) {
7
- const r = Math.random() * 16 | 0;
8
- const v = c === `x` ? r : r & 3 | 8;
9
- return v.toString(16);
10
- });
11
- }
12
2
  const transactions = [];
13
3
  let transactionStack = [];
14
4
  function createTransaction(config) {
@@ -17,9 +7,12 @@ function createTransaction(config) {
17
7
  }
18
8
  let transactionId = config.id;
19
9
  if (!transactionId) {
20
- transactionId = generateUUID();
10
+ transactionId = crypto.randomUUID();
21
11
  }
22
- const newTransaction = new Transaction({ ...config, id: transactionId });
12
+ const newTransaction = new Transaction({
13
+ ...config,
14
+ id: transactionId
15
+ });
23
16
  transactions.push(newTransaction);
24
17
  return newTransaction;
25
18
  }
@@ -1 +1 @@
1
- {"version":3,"file":"transactions.js","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nfunction generateUUID() {\n // Check if crypto.randomUUID is available (modern browsers and Node.js 15+)\n if (\n typeof crypto !== `undefined` &&\n typeof crypto.randomUUID === `function`\n ) {\n return crypto.randomUUID()\n }\n\n // Fallback implementation for older environments\n return `xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx`.replace(/[xy]/g, function (c) {\n const r = (Math.random() * 16) | 0\n const v = c === `x` ? r : (r & 0x3) | 0x8\n return v.toString(16)\n })\n}\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nexport function createTransaction(config: TransactionConfig): Transaction {\n if (typeof config.mutationFn === `undefined`) {\n throw `mutationFn is required when creating a transaction`\n }\n\n let transactionId = config.id\n if (!transactionId) {\n transactionId = generateUUID()\n }\n const newTransaction = new Transaction({ ...config, id: transactionId })\n\n transactions.push(newTransaction)\n\n return newTransaction\n}\n\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nexport class Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n this.id = config.id!\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw `You can no longer call .mutate() as the transaction is no longer pending`\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit()\n }\n\n return this\n }\n\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n // Replace existing mutation\n this.mutations[existingIndex] = newMutation\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw `You can no longer call .rollback() as the transaction is already completed`\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection.onTransactionStateChange()\n mutation.collection.commitPendingTransactions()\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw `You can no longer call .commit() as the transaction is no longer pending`\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Update transaction with error information\n this.error = {\n message: error instanceof Error ? error.message : String(error),\n error: error instanceof Error ? error : new Error(String(error)),\n }\n\n // rollback the transaction\n return this.rollback()\n }\n\n return this\n }\n}\n"],"names":[],"mappings":";AAUA,SAAS,eAAe;AAEtB,MACE,OAAO,WAAW,eAClB,OAAO,OAAO,eAAe,YAC7B;AACA,WAAO,OAAO,WAAW;AAAA,EAAA;AAI3B,SAAO,uCAAuC,QAAQ,SAAS,SAAU,GAAG;AAC1E,UAAM,IAAK,KAAK,OAAO,IAAI,KAAM;AACjC,UAAM,IAAI,MAAM,MAAM,IAAK,IAAI,IAAO;AAC/B,WAAA,EAAE,SAAS,EAAE;AAAA,EAAA,CACrB;AACH;AAEA,MAAM,eAAwC,CAAC;AAC/C,IAAI,mBAA4C,CAAC;AAE1C,SAAS,kBAAkB,QAAwC;AACpE,MAAA,OAAO,OAAO,eAAe,aAAa;AACtC,UAAA;AAAA,EAAA;AAGR,MAAI,gBAAgB,OAAO;AAC3B,MAAI,CAAC,eAAe;AAClB,oBAAgB,aAAa;AAAA,EAAA;AAEzB,QAAA,iBAAiB,IAAI,YAAY,EAAE,GAAG,QAAQ,IAAI,eAAe;AAEvE,eAAa,KAAK,cAAc;AAEzB,SAAA;AACT;AAEO,SAAS,uBAAgD;AAC1D,MAAA,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EAAA,OAC9B;AACE,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AAC7C,QAAA,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AACH,iBAAA,OAAO,OAAO,CAAC;AAAA,EAAA;AAEhC;AAEO,MAAM,YAAwD;AAAA,EAcnE,YAAY,QAA8B;AACxC,SAAK,KAAK,OAAO;AACjB,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAC;AAClB,SAAK,cAAc,eAA+B;AAC7C,SAAA,aAAa,OAAO,cAAc;AAClC,SAAA,gCAAgB,KAAK;AACrB,SAAA,WAAW,OAAO,YAAY,CAAC;AAAA,EAAA;AAAA,EAGtC,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAET,QAAA,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAAA;AAAA,EAC5B;AAAA,EAGF,OAAO,UAAsC;AACvC,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA;AAAA,IAAA;AAGR,wBAAoB,IAAI;AACpB,QAAA;AACO,eAAA;AAAA,IAAA,UACT;AACA,4BAAsB,IAAI;AAAA,IAAA;AAG5B,QAAI,KAAK,YAAY;AACnB,WAAK,OAAO;AAAA,IAAA;AAGP,WAAA;AAAA,EAAA;AAAA,EAGT,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AAC7B,YAAA,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MACrC;AAEA,UAAI,iBAAiB,GAAG;AAEjB,aAAA,UAAU,aAAa,IAAI;AAAA,MAAA,OAC3B;AAEA,aAAA,UAAU,KAAK,WAAW;AAAA,MAAA;AAAA,IACjC;AAAA,EACF;AAAA,EAGF,SAAS,QAA4D;;AAC7D,UAAA,uBAAsB,iCAAQ,wBAAuB;AACvD,QAAA,KAAK,UAAU,aAAa;AACxB,YAAA;AAAA,IAAA;AAGR,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AAClB,YAAA,kCAAkB,IAAI;AACvB,WAAA,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAAA;AAAA,IAC5C;AAIF,SAAK,YAAY,QAAO,UAAK,UAAL,mBAAY,KAAK;AACzC,SAAK,gBAAgB;AAEd,WAAA;AAAA,EAAA;AAAA;AAAA,EAIT,kBAAwB;AAChB,UAAA,gCAAgB,IAAI;AACf,eAAA,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,yBAAyB;AAC7C,iBAAS,WAAW,0BAA0B;AACpC,kBAAA,IAAI,SAAS,WAAW,EAAE;AAAA,MAAA;AAAA,IACtC;AAAA,EACF;AAAA,EAGF,MAAM,SAAkC;AAClC,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA;AAAA,IAAA;AAGR,SAAK,SAAS,YAAY;AAEtB,QAAA,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AAElB,aAAA;AAAA,IAAA;AAIL,QAAA;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAgB;AAEhB,WAAA,YAAY,QAAQ,IAAI;AAAA,aACtB,OAAO;AAEd,WAAK,QAAQ;AAAA,QACX,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACjE;AAGA,aAAO,KAAK,SAAS;AAAA,IAAA;AAGhB,WAAA;AAAA,EAAA;AAEX;"}
1
+ {"version":3,"file":"transactions.js","sources":["../../src/transactions.ts"],"sourcesContent":["import { createDeferred } from \"./deferred\"\nimport type { Deferred } from \"./deferred\"\nimport type {\n MutationFn,\n PendingMutation,\n TransactionConfig,\n TransactionState,\n TransactionWithMutations,\n} from \"./types\"\n\nconst transactions: Array<Transaction<any>> = []\nlet transactionStack: Array<Transaction<any>> = []\n\nexport function createTransaction<\n TData extends object = Record<string, unknown>,\n>(config: TransactionConfig<TData>): Transaction<TData> {\n if (typeof config.mutationFn === `undefined`) {\n throw `mutationFn is required when creating a transaction`\n }\n\n let transactionId = config.id\n if (!transactionId) {\n transactionId = crypto.randomUUID()\n }\n const newTransaction = new Transaction<TData>({\n ...config,\n id: transactionId,\n })\n\n transactions.push(newTransaction)\n\n return newTransaction\n}\n\nexport function getActiveTransaction(): Transaction | undefined {\n if (transactionStack.length > 0) {\n return transactionStack.slice(-1)[0]\n } else {\n return undefined\n }\n}\n\nfunction registerTransaction(tx: Transaction<any>) {\n transactionStack.push(tx)\n}\n\nfunction unregisterTransaction(tx: Transaction<any>) {\n transactionStack = transactionStack.filter((t) => t.id !== tx.id)\n}\n\nfunction removeFromPendingList(tx: Transaction<any>) {\n const index = transactions.findIndex((t) => t.id === tx.id)\n if (index !== -1) {\n transactions.splice(index, 1)\n }\n}\n\nexport class Transaction<T extends object = Record<string, unknown>> {\n public id: string\n public state: TransactionState\n public mutationFn: MutationFn<T>\n public mutations: Array<PendingMutation<T>>\n public isPersisted: Deferred<Transaction<T>>\n public autoCommit: boolean\n public createdAt: Date\n public metadata: Record<string, unknown>\n public error?: {\n message: string\n error: Error\n }\n\n constructor(config: TransactionConfig<T>) {\n this.id = config.id!\n this.mutationFn = config.mutationFn\n this.state = `pending`\n this.mutations = []\n this.isPersisted = createDeferred<Transaction<T>>()\n this.autoCommit = config.autoCommit ?? true\n this.createdAt = new Date()\n this.metadata = config.metadata ?? {}\n }\n\n setState(newState: TransactionState) {\n this.state = newState\n\n if (newState === `completed` || newState === `failed`) {\n removeFromPendingList(this)\n }\n }\n\n mutate(callback: () => void): Transaction<T> {\n if (this.state !== `pending`) {\n throw `You can no longer call .mutate() as the transaction is no longer pending`\n }\n\n registerTransaction(this)\n try {\n callback()\n } finally {\n unregisterTransaction(this)\n }\n\n if (this.autoCommit) {\n this.commit()\n }\n\n return this\n }\n\n applyMutations(mutations: Array<PendingMutation<any>>): void {\n for (const newMutation of mutations) {\n const existingIndex = this.mutations.findIndex(\n (m) => m.globalKey === newMutation.globalKey\n )\n\n if (existingIndex >= 0) {\n // Replace existing mutation\n this.mutations[existingIndex] = newMutation\n } else {\n // Insert new mutation\n this.mutations.push(newMutation)\n }\n }\n }\n\n rollback(config?: { isSecondaryRollback?: boolean }): Transaction<T> {\n const isSecondaryRollback = config?.isSecondaryRollback ?? false\n if (this.state === `completed`) {\n throw `You can no longer call .rollback() as the transaction is already completed`\n }\n\n this.setState(`failed`)\n\n // See if there's any other transactions w/ mutations on the same ids\n // and roll them back as well.\n if (!isSecondaryRollback) {\n const mutationIds = new Set()\n this.mutations.forEach((m) => mutationIds.add(m.globalKey))\n for (const t of transactions) {\n t.state === `pending` &&\n t.mutations.some((m) => mutationIds.has(m.globalKey)) &&\n t.rollback({ isSecondaryRollback: true })\n }\n }\n\n // Reject the promise\n this.isPersisted.reject(this.error?.error)\n this.touchCollection()\n\n return this\n }\n\n // Tell collection that something has changed with the transaction\n touchCollection(): void {\n const hasCalled = new Set()\n for (const mutation of this.mutations) {\n if (!hasCalled.has(mutation.collection.id)) {\n mutation.collection.onTransactionStateChange()\n mutation.collection.commitPendingTransactions()\n hasCalled.add(mutation.collection.id)\n }\n }\n }\n\n async commit(): Promise<Transaction<T>> {\n if (this.state !== `pending`) {\n throw `You can no longer call .commit() as the transaction is no longer pending`\n }\n\n this.setState(`persisting`)\n\n if (this.mutations.length === 0) {\n this.setState(`completed`)\n\n return this\n }\n\n // Run mutationFn\n try {\n // At this point we know there's at least one mutation\n // We've already verified mutations is non-empty, so this cast is safe\n // Use a direct type assertion instead of object spreading to preserve the original type\n await this.mutationFn({\n transaction: this as unknown as TransactionWithMutations<T>,\n })\n\n this.setState(`completed`)\n this.touchCollection()\n\n this.isPersisted.resolve(this)\n } catch (error) {\n // Update transaction with error information\n this.error = {\n message: error instanceof Error ? error.message : String(error),\n error: error instanceof Error ? error : new Error(String(error)),\n }\n\n // rollback the transaction\n return this.rollback()\n }\n\n return this\n }\n}\n"],"names":[],"mappings":";AAUA,MAAM,eAAwC,CAAC;AAC/C,IAAI,mBAA4C,CAAC;AAE1C,SAAS,kBAEd,QAAsD;AAClD,MAAA,OAAO,OAAO,eAAe,aAAa;AACtC,UAAA;AAAA,EAAA;AAGR,MAAI,gBAAgB,OAAO;AAC3B,MAAI,CAAC,eAAe;AAClB,oBAAgB,OAAO,WAAW;AAAA,EAAA;AAE9B,QAAA,iBAAiB,IAAI,YAAmB;AAAA,IAC5C,GAAG;AAAA,IACH,IAAI;AAAA,EAAA,CACL;AAED,eAAa,KAAK,cAAc;AAEzB,SAAA;AACT;AAEO,SAAS,uBAAgD;AAC1D,MAAA,iBAAiB,SAAS,GAAG;AAC/B,WAAO,iBAAiB,MAAM,EAAE,EAAE,CAAC;AAAA,EAAA,OAC9B;AACE,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,oBAAoB,IAAsB;AACjD,mBAAiB,KAAK,EAAE;AAC1B;AAEA,SAAS,sBAAsB,IAAsB;AACnD,qBAAmB,iBAAiB,OAAO,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAClE;AAEA,SAAS,sBAAsB,IAAsB;AAC7C,QAAA,QAAQ,aAAa,UAAU,CAAC,MAAM,EAAE,OAAO,GAAG,EAAE;AAC1D,MAAI,UAAU,IAAI;AACH,iBAAA,OAAO,OAAO,CAAC;AAAA,EAAA;AAEhC;AAEO,MAAM,YAAwD;AAAA,EAcnE,YAAY,QAA8B;AACxC,SAAK,KAAK,OAAO;AACjB,SAAK,aAAa,OAAO;AACzB,SAAK,QAAQ;AACb,SAAK,YAAY,CAAC;AAClB,SAAK,cAAc,eAA+B;AAC7C,SAAA,aAAa,OAAO,cAAc;AAClC,SAAA,gCAAgB,KAAK;AACrB,SAAA,WAAW,OAAO,YAAY,CAAC;AAAA,EAAA;AAAA,EAGtC,SAAS,UAA4B;AACnC,SAAK,QAAQ;AAET,QAAA,aAAa,eAAe,aAAa,UAAU;AACrD,4BAAsB,IAAI;AAAA,IAAA;AAAA,EAC5B;AAAA,EAGF,OAAO,UAAsC;AACvC,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA;AAAA,IAAA;AAGR,wBAAoB,IAAI;AACpB,QAAA;AACO,eAAA;AAAA,IAAA,UACT;AACA,4BAAsB,IAAI;AAAA,IAAA;AAG5B,QAAI,KAAK,YAAY;AACnB,WAAK,OAAO;AAAA,IAAA;AAGP,WAAA;AAAA,EAAA;AAAA,EAGT,eAAe,WAA8C;AAC3D,eAAW,eAAe,WAAW;AAC7B,YAAA,gBAAgB,KAAK,UAAU;AAAA,QACnC,CAAC,MAAM,EAAE,cAAc,YAAY;AAAA,MACrC;AAEA,UAAI,iBAAiB,GAAG;AAEjB,aAAA,UAAU,aAAa,IAAI;AAAA,MAAA,OAC3B;AAEA,aAAA,UAAU,KAAK,WAAW;AAAA,MAAA;AAAA,IACjC;AAAA,EACF;AAAA,EAGF,SAAS,QAA4D;;AAC7D,UAAA,uBAAsB,iCAAQ,wBAAuB;AACvD,QAAA,KAAK,UAAU,aAAa;AACxB,YAAA;AAAA,IAAA;AAGR,SAAK,SAAS,QAAQ;AAItB,QAAI,CAAC,qBAAqB;AAClB,YAAA,kCAAkB,IAAI;AACvB,WAAA,UAAU,QAAQ,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC;AAC1D,iBAAW,KAAK,cAAc;AAC5B,UAAE,UAAU,aACV,EAAE,UAAU,KAAK,CAAC,MAAM,YAAY,IAAI,EAAE,SAAS,CAAC,KACpD,EAAE,SAAS,EAAE,qBAAqB,MAAM;AAAA,MAAA;AAAA,IAC5C;AAIF,SAAK,YAAY,QAAO,UAAK,UAAL,mBAAY,KAAK;AACzC,SAAK,gBAAgB;AAEd,WAAA;AAAA,EAAA;AAAA;AAAA,EAIT,kBAAwB;AAChB,UAAA,gCAAgB,IAAI;AACf,eAAA,YAAY,KAAK,WAAW;AACrC,UAAI,CAAC,UAAU,IAAI,SAAS,WAAW,EAAE,GAAG;AAC1C,iBAAS,WAAW,yBAAyB;AAC7C,iBAAS,WAAW,0BAA0B;AACpC,kBAAA,IAAI,SAAS,WAAW,EAAE;AAAA,MAAA;AAAA,IACtC;AAAA,EACF;AAAA,EAGF,MAAM,SAAkC;AAClC,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA;AAAA,IAAA;AAGR,SAAK,SAAS,YAAY;AAEtB,QAAA,KAAK,UAAU,WAAW,GAAG;AAC/B,WAAK,SAAS,WAAW;AAElB,aAAA;AAAA,IAAA;AAIL,QAAA;AAIF,YAAM,KAAK,WAAW;AAAA,QACpB,aAAa;AAAA,MAAA,CACd;AAED,WAAK,SAAS,WAAW;AACzB,WAAK,gBAAgB;AAEhB,WAAA,YAAY,QAAQ,IAAI;AAAA,aACtB,OAAO;AAEd,WAAK,QAAQ;AAAA,QACX,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAC9D,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACjE;AAGA,aAAO,KAAK,SAAS;AAAA,IAAA;AAGhB,WAAA;AAAA,EAAA;AAEX;"}
@@ -1,4 +1,4 @@
1
- import { IStreamBuilder } from '@electric-sql/d2ts';
1
+ import { IStreamBuilder } from '@electric-sql/d2mini';
2
2
  import { Collection } from './collection.js';
3
3
  import { StandardSchemaV1 } from '@standard-schema/spec';
4
4
  import { Transaction } from './transactions.js';
package/package.json CHANGED
@@ -1,9 +1,9 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
3
  "description": "A reactive client store for building super fast apps on sync",
4
- "version": "0.0.9",
4
+ "version": "0.0.11",
5
5
  "dependencies": {
6
- "@electric-sql/d2ts": "^0.1.7",
6
+ "@electric-sql/d2mini": "^0.1.2",
7
7
  "@standard-schema/spec": "^1.0.0",
8
8
  "@tanstack/store": "^0.7.0"
9
9
  },
package/src/collection.ts CHANGED
@@ -1043,9 +1043,13 @@ export class CollectionImpl<
1043
1043
  })
1044
1044
  .filter(Boolean) as Array<PendingMutation<T>>
1045
1045
 
1046
- // If no changes were made, return early
1046
+ // If no changes were made, return an empty transaction early
1047
1047
  if (mutations.length === 0) {
1048
- throw new Error(`No changes were made to any of the objects`)
1048
+ const emptyTransaction = new Transaction({
1049
+ mutationFn: async () => {},
1050
+ })
1051
+ emptyTransaction.commit()
1052
+ return emptyTransaction
1049
1053
  }
1050
1054
 
1051
1055
  // If an ambient transaction exists, use it
@@ -1,4 +1,4 @@
1
- import { D2, MessageType, MultiSet, output } from "@electric-sql/d2ts"
1
+ import { D2, MultiSet, output } from "@electric-sql/d2mini"
2
2
  import { createCollection } from "../collection.js"
3
3
  import { compileQueryPipeline } from "./pipeline-compiler.js"
4
4
  import type { Collection } from "../collection.js"
@@ -7,7 +7,7 @@ import type {
7
7
  IStreamBuilder,
8
8
  MultiSetArray,
9
9
  RootStreamBuilder,
10
- } from "@electric-sql/d2ts"
10
+ } from "@electric-sql/d2mini"
11
11
  import type { QueryBuilder, ResultsFromContext } from "./query-builder.js"
12
12
  import type { Context, Schema } from "./types.js"
13
13
 
@@ -25,7 +25,6 @@ export class CompiledQuery<TResults extends object = Record<string, unknown>> {
25
25
  private inputCollections: Record<string, Collection<any>>
26
26
  private resultCollection: Collection<TResults>
27
27
  public state: `compiled` | `running` | `stopped` = `compiled`
28
- private version = 0
29
28
  private unsubscribeCallbacks: Array<() => void> = []
30
29
 
31
30
  constructor(queryBuilder: QueryBuilder<Context<Schema>>) {
@@ -38,7 +37,7 @@ export class CompiledQuery<TResults extends object = Record<string, unknown>> {
38
37
 
39
38
  this.inputCollections = collections
40
39
 
41
- const graph = new D2({ initialFrontier: this.version })
40
+ const graph = new D2()
42
41
  const inputs = Object.fromEntries(
43
42
  Object.entries(collections).map(([key]) => [key, graph.newInput<any>()])
44
43
  )
@@ -48,48 +47,46 @@ export class CompiledQuery<TResults extends object = Record<string, unknown>> {
48
47
  query,
49
48
  inputs
50
49
  ).pipe(
51
- output(({ type, data }) => {
52
- if (type === MessageType.DATA) {
53
- begin()
54
- data.collection
55
- .getInner()
56
- .reduce((acc, [[key, value], multiplicity]) => {
57
- const changes = acc.get(key) || {
58
- deletes: 0,
59
- inserts: 0,
60
- value,
61
- }
62
- if (multiplicity < 0) {
63
- changes.deletes += Math.abs(multiplicity)
64
- } else if (multiplicity > 0) {
65
- changes.inserts += multiplicity
66
- changes.value = value
67
- }
68
- acc.set(key, changes)
69
- return acc
70
- }, new Map<unknown, { deletes: number; inserts: number; value: TResults }>())
71
- .forEach((changes, rawKey) => {
72
- const { deletes, inserts, value } = changes
73
- const valueWithKey = { ...value, _key: rawKey }
74
- if (inserts && !deletes) {
75
- write({
76
- value: valueWithKey,
77
- type: `insert`,
78
- })
79
- } else if (inserts >= deletes) {
80
- write({
81
- value: valueWithKey,
82
- type: `update`,
83
- })
84
- } else if (deletes > 0) {
85
- write({
86
- value: valueWithKey,
87
- type: `delete`,
88
- })
89
- }
90
- })
91
- commit()
92
- }
50
+ output((data) => {
51
+ begin()
52
+ data
53
+ .getInner()
54
+ .reduce((acc, [[key, value], multiplicity]) => {
55
+ const changes = acc.get(key) || {
56
+ deletes: 0,
57
+ inserts: 0,
58
+ value,
59
+ }
60
+ if (multiplicity < 0) {
61
+ changes.deletes += Math.abs(multiplicity)
62
+ } else if (multiplicity > 0) {
63
+ changes.inserts += multiplicity
64
+ changes.value = value
65
+ }
66
+ acc.set(key, changes)
67
+ return acc
68
+ }, new Map<unknown, { deletes: number; inserts: number; value: TResults }>())
69
+ .forEach((changes, rawKey) => {
70
+ const { deletes, inserts, value } = changes
71
+ const valueWithKey = { ...value, _key: rawKey }
72
+ if (inserts && !deletes) {
73
+ write({
74
+ value: valueWithKey,
75
+ type: `insert`,
76
+ })
77
+ } else if (inserts >= deletes) {
78
+ write({
79
+ value: valueWithKey,
80
+ type: `update`,
81
+ })
82
+ } else if (deletes > 0) {
83
+ write({
84
+ value: valueWithKey,
85
+ type: `delete`,
86
+ })
87
+ }
88
+ })
89
+ commit()
93
90
  })
94
91
  )
95
92
  graph.finalize()
@@ -131,22 +128,7 @@ export class CompiledQuery<TResults extends object = Record<string, unknown>> {
131
128
  multiSetArray.push([[key, change.value], -1])
132
129
  }
133
130
  }
134
- input.sendData(this.version, new MultiSet(multiSetArray))
135
- }
136
-
137
- private sendFrontierToInput(inputKey: string) {
138
- const input = this.inputs[inputKey]!
139
- input.sendFrontier(this.version)
140
- }
141
-
142
- private sendFrontierToAllInputs() {
143
- Object.entries(this.inputs).forEach(([key]) => {
144
- this.sendFrontierToInput(key)
145
- })
146
- }
147
-
148
- private incrementVersion() {
149
- this.version++
131
+ input.sendData(new MultiSet(multiSetArray))
150
132
  }
151
133
 
152
134
  private runGraph() {
@@ -168,16 +150,12 @@ export class CompiledQuery<TResults extends object = Record<string, unknown>> {
168
150
  collection.config.getKey
169
151
  )
170
152
  })
171
- this.incrementVersion()
172
- this.sendFrontierToAllInputs()
173
153
  this.runGraph()
174
154
 
175
155
  // Subscribe to changes
176
156
  Object.entries(this.inputCollections).forEach(([key, collection]) => {
177
157
  const unsubscribe = collection.subscribeChanges((changes) => {
178
158
  this.sendChangesToInput(key, changes, collection.config.getKey)
179
- this.incrementVersion()
180
- this.sendFrontierToAllInputs()
181
159
  this.runGraph()
182
160
  })
183
161
 
@@ -1,4 +1,4 @@
1
- import { groupBy, groupByOperators } from "@electric-sql/d2ts"
1
+ import { groupBy, groupByOperators } from "@electric-sql/d2mini"
2
2
  import {
3
3
  evaluateOperandOnNamespacedRow,
4
4
  extractValueFromNamespacedRow,
@@ -3,11 +3,11 @@ import {
3
3
  filter,
4
4
  join as joinOperator,
5
5
  map,
6
- } from "@electric-sql/d2ts"
6
+ } from "@electric-sql/d2mini"
7
7
  import { evaluateConditionOnNamespacedRow } from "./evaluators.js"
8
8
  import { extractJoinKey } from "./extractors.js"
9
9
  import type { Query } from "./index.js"
10
- import type { IStreamBuilder, JoinType } from "@electric-sql/d2ts"
10
+ import type { IStreamBuilder, JoinType } from "@electric-sql/d2mini"
11
11
  import type {
12
12
  KeyedStream,
13
13
  NamespacedAndKeyedStream,
@@ -3,7 +3,7 @@ import {
3
3
  orderBy,
4
4
  orderByWithFractionalIndex,
5
5
  orderByWithIndex,
6
- } from "@electric-sql/d2ts"
6
+ } from "@electric-sql/d2mini"
7
7
  import { evaluateOperandOnNamespacedRow } from "./extractors"
8
8
  import { isOrderIndexFunctionCall } from "./utils"
9
9
  import type { ConditionOperand, Query } from "./schema"
@@ -1,11 +1,11 @@
1
- import { filter, map } from "@electric-sql/d2ts"
1
+ import { filter, map } from "@electric-sql/d2mini"
2
2
  import { evaluateWhereOnNamespacedRow } from "./evaluators.js"
3
3
  import { processJoinClause } from "./joins.js"
4
4
  import { processGroupBy } from "./group-by.js"
5
5
  import { processOrderBy } from "./order-by.js"
6
6
  import { processSelect } from "./select.js"
7
7
  import type { Query } from "./schema.js"
8
- import type { IStreamBuilder } from "@electric-sql/d2ts"
8
+ import type { IStreamBuilder } from "@electric-sql/d2mini"
9
9
  import type {
10
10
  InputRow,
11
11
  KeyedStream,
@@ -1,4 +1,4 @@
1
- import { map } from "@electric-sql/d2ts"
1
+ import { map } from "@electric-sql/d2mini"
2
2
  import {
3
3
  evaluateOperandOnNamespacedRow,
4
4
  extractValueFromNamespacedRow,