@tanstack/db 0.0.10 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/cjs/query/compiled-query.cjs +40 -59
  2. package/dist/cjs/query/compiled-query.cjs.map +1 -1
  3. package/dist/cjs/query/compiled-query.d.cts +0 -4
  4. package/dist/cjs/query/group-by.cjs +3 -3
  5. package/dist/cjs/query/group-by.cjs.map +1 -1
  6. package/dist/cjs/query/group-by.d.cts +1 -1
  7. package/dist/cjs/query/joins.cjs +16 -16
  8. package/dist/cjs/query/joins.cjs.map +1 -1
  9. package/dist/cjs/query/joins.d.cts +1 -1
  10. package/dist/cjs/query/order-by.cjs +6 -6
  11. package/dist/cjs/query/order-by.cjs.map +1 -1
  12. package/dist/cjs/query/pipeline-compiler.cjs +5 -5
  13. package/dist/cjs/query/pipeline-compiler.cjs.map +1 -1
  14. package/dist/cjs/query/pipeline-compiler.d.cts +1 -1
  15. package/dist/cjs/query/select.cjs +2 -2
  16. package/dist/cjs/query/select.cjs.map +1 -1
  17. package/dist/cjs/transactions.cjs +5 -12
  18. package/dist/cjs/transactions.cjs.map +1 -1
  19. package/dist/cjs/transactions.d.cts +1 -1
  20. package/dist/cjs/types.d.cts +1 -1
  21. package/dist/esm/query/compiled-query.d.ts +0 -4
  22. package/dist/esm/query/compiled-query.js +40 -59
  23. package/dist/esm/query/compiled-query.js.map +1 -1
  24. package/dist/esm/query/group-by.d.ts +1 -1
  25. package/dist/esm/query/group-by.js +1 -1
  26. package/dist/esm/query/group-by.js.map +1 -1
  27. package/dist/esm/query/joins.d.ts +1 -1
  28. package/dist/esm/query/joins.js +1 -1
  29. package/dist/esm/query/joins.js.map +1 -1
  30. package/dist/esm/query/order-by.js +1 -1
  31. package/dist/esm/query/order-by.js.map +1 -1
  32. package/dist/esm/query/pipeline-compiler.d.ts +1 -1
  33. package/dist/esm/query/pipeline-compiler.js +1 -1
  34. package/dist/esm/query/pipeline-compiler.js.map +1 -1
  35. package/dist/esm/query/select.js +1 -1
  36. package/dist/esm/query/select.js.map +1 -1
  37. package/dist/esm/transactions.d.ts +1 -1
  38. package/dist/esm/transactions.js +5 -12
  39. package/dist/esm/transactions.js.map +1 -1
  40. package/dist/esm/types.d.ts +1 -1
  41. package/package.json +2 -2
  42. package/src/query/compiled-query.ts +44 -66
  43. package/src/query/group-by.ts +1 -1
  44. package/src/query/joins.ts +2 -2
  45. package/src/query/order-by.ts +1 -1
  46. package/src/query/pipeline-compiler.ts +2 -2
  47. package/src/query/select.ts +1 -1
  48. package/src/transactions.ts +8 -20
  49. package/src/types.ts +1 -1
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
3
+ const d2mini = require("@electric-sql/d2mini");
4
4
  const collection = require("../collection.cjs");
5
5
  const pipelineCompiler = require("./pipeline-compiler.cjs");
6
6
  function compileQuery(queryBuilder) {
@@ -9,7 +9,6 @@ function compileQuery(queryBuilder) {
9
9
  class CompiledQuery {
10
10
  constructor(queryBuilder) {
11
11
  this.state = `compiled`;
12
- this.version = 0;
13
12
  this.unsubscribeCallbacks = [];
14
13
  const query = queryBuilder._query;
15
14
  const collections = query.collections;
@@ -17,7 +16,7 @@ class CompiledQuery {
17
16
  throw new Error(`No collections provided`);
18
17
  }
19
18
  this.inputCollections = collections;
20
- const graph = new d2ts.D2({ initialFrontier: this.version });
19
+ const graph = new d2mini.D2();
21
20
  const inputs = Object.fromEntries(
22
21
  Object.entries(collections).map(([key]) => [key, graph.newInput()])
23
22
  );
@@ -26,45 +25,43 @@ class CompiledQuery {
26
25
  query,
27
26
  inputs
28
27
  ).pipe(
29
- d2ts.output(({ type, data }) => {
30
- if (type === d2ts.MessageType.DATA) {
31
- begin();
32
- data.collection.getInner().reduce((acc, [[key, value], multiplicity]) => {
33
- const changes = acc.get(key) || {
34
- deletes: 0,
35
- inserts: 0,
36
- value
37
- };
38
- if (multiplicity < 0) {
39
- changes.deletes += Math.abs(multiplicity);
40
- } else if (multiplicity > 0) {
41
- changes.inserts += multiplicity;
42
- changes.value = value;
43
- }
44
- acc.set(key, changes);
45
- return acc;
46
- }, /* @__PURE__ */ new Map()).forEach((changes, rawKey) => {
47
- const { deletes, inserts, value } = changes;
48
- const valueWithKey = { ...value, _key: rawKey };
49
- if (inserts && !deletes) {
50
- write({
51
- value: valueWithKey,
52
- type: `insert`
53
- });
54
- } else if (inserts >= deletes) {
55
- write({
56
- value: valueWithKey,
57
- type: `update`
58
- });
59
- } else if (deletes > 0) {
60
- write({
61
- value: valueWithKey,
62
- type: `delete`
63
- });
64
- }
65
- });
66
- commit();
67
- }
28
+ d2mini.output((data) => {
29
+ begin();
30
+ data.getInner().reduce((acc, [[key, value], multiplicity]) => {
31
+ const changes = acc.get(key) || {
32
+ deletes: 0,
33
+ inserts: 0,
34
+ value
35
+ };
36
+ if (multiplicity < 0) {
37
+ changes.deletes += Math.abs(multiplicity);
38
+ } else if (multiplicity > 0) {
39
+ changes.inserts += multiplicity;
40
+ changes.value = value;
41
+ }
42
+ acc.set(key, changes);
43
+ return acc;
44
+ }, /* @__PURE__ */ new Map()).forEach((changes, rawKey) => {
45
+ const { deletes, inserts, value } = changes;
46
+ const valueWithKey = { ...value, _key: rawKey };
47
+ if (inserts && !deletes) {
48
+ write({
49
+ value: valueWithKey,
50
+ type: `insert`
51
+ });
52
+ } else if (inserts >= deletes) {
53
+ write({
54
+ value: valueWithKey,
55
+ type: `update`
56
+ });
57
+ } else if (deletes > 0) {
58
+ write({
59
+ value: valueWithKey,
60
+ type: `delete`
61
+ });
62
+ }
63
+ });
64
+ commit();
68
65
  })
69
66
  );
70
67
  graph.finalize();
@@ -99,19 +96,7 @@ class CompiledQuery {
99
96
  multiSetArray.push([[key, change.value], -1]);
100
97
  }
101
98
  }
102
- input.sendData(this.version, new d2ts.MultiSet(multiSetArray));
103
- }
104
- sendFrontierToInput(inputKey) {
105
- const input = this.inputs[inputKey];
106
- input.sendFrontier(this.version);
107
- }
108
- sendFrontierToAllInputs() {
109
- Object.entries(this.inputs).forEach(([key]) => {
110
- this.sendFrontierToInput(key);
111
- });
112
- }
113
- incrementVersion() {
114
- this.version++;
99
+ input.sendData(new d2mini.MultiSet(multiSetArray));
115
100
  }
116
101
  runGraph() {
117
102
  this.graph.run();
@@ -129,14 +114,10 @@ class CompiledQuery {
129
114
  collection2.config.getKey
130
115
  );
131
116
  });
132
- this.incrementVersion();
133
- this.sendFrontierToAllInputs();
134
117
  this.runGraph();
135
118
  Object.entries(this.inputCollections).forEach(([key, collection2]) => {
136
119
  const unsubscribe = collection2.subscribeChanges((changes) => {
137
120
  this.sendChangesToInput(key, changes, collection2.config.getKey);
138
- this.incrementVersion();
139
- this.sendFrontierToAllInputs();
140
121
  this.runGraph();
141
122
  });
142
123
  this.unsubscribeCallbacks.push(unsubscribe);
@@ -1 +1 @@
1
- {"version":3,"file":"compiled-query.cjs","sources":["../../../src/query/compiled-query.ts"],"sourcesContent":["import { D2, MessageType, MultiSet, output } from \"@electric-sql/d2ts\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQueryPipeline } from \"./pipeline-compiler.js\"\nimport type { Collection } from \"../collection.js\"\nimport type { ChangeMessage, SyncConfig } from \"../types.js\"\nimport type {\n IStreamBuilder,\n MultiSetArray,\n RootStreamBuilder,\n} from \"@electric-sql/d2ts\"\nimport type { QueryBuilder, ResultsFromContext } from \"./query-builder.js\"\nimport type { Context, Schema } from \"./types.js\"\n\nexport function compileQuery<TContext extends Context<Schema>>(\n queryBuilder: QueryBuilder<TContext>\n) {\n return new CompiledQuery<\n ResultsFromContext<TContext> & { _key?: string | number }\n >(queryBuilder)\n}\n\nexport class CompiledQuery<TResults extends object = Record<string, unknown>> {\n private graph: D2\n private inputs: Record<string, RootStreamBuilder<any>>\n private inputCollections: Record<string, Collection<any>>\n private resultCollection: Collection<TResults>\n public state: `compiled` | `running` | `stopped` = `compiled`\n private version = 0\n private unsubscribeCallbacks: Array<() => void> = []\n\n constructor(queryBuilder: QueryBuilder<Context<Schema>>) {\n const query = queryBuilder._query\n const collections = query.collections\n\n if (!collections) {\n throw new Error(`No collections provided`)\n }\n\n this.inputCollections = collections\n\n const graph = new D2({ initialFrontier: this.version })\n const inputs = Object.fromEntries(\n Object.entries(collections).map(([key]) => [key, graph.newInput<any>()])\n )\n\n const sync: SyncConfig<TResults>[`sync`] = ({ begin, write, commit }) => {\n compileQueryPipeline<IStreamBuilder<[unknown, TResults]>>(\n query,\n inputs\n ).pipe(\n output(({ type, data }) => {\n if (type === MessageType.DATA) {\n begin()\n data.collection\n .getInner()\n .reduce((acc, [[key, value], multiplicity]) => {\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResults }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value } = changes\n const valueWithKey = { ...value, _key: rawKey }\n if (inserts && !deletes) {\n write({\n value: valueWithKey,\n type: `insert`,\n })\n } else if (inserts >= deletes) {\n write({\n value: valueWithKey,\n type: `update`,\n })\n } else if (deletes > 0) {\n write({\n value: valueWithKey,\n type: `delete`,\n })\n }\n })\n commit()\n }\n })\n )\n graph.finalize()\n }\n\n this.graph = graph\n this.inputs = inputs\n this.resultCollection = createCollection<TResults>({\n id: crypto.randomUUID(), // TODO: remove when we don't require any more\n getKey: (val: unknown) => {\n return (val as any)._key\n },\n sync: {\n sync,\n },\n })\n }\n\n get results() {\n return this.resultCollection\n }\n\n private sendChangesToInput(\n inputKey: string,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n ) {\n const input = this.inputs[inputKey]!\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(this.version, new MultiSet(multiSetArray))\n }\n\n private sendFrontierToInput(inputKey: string) {\n const input = this.inputs[inputKey]!\n input.sendFrontier(this.version)\n }\n\n private sendFrontierToAllInputs() {\n Object.entries(this.inputs).forEach(([key]) => {\n this.sendFrontierToInput(key)\n })\n }\n\n private incrementVersion() {\n this.version++\n }\n\n private runGraph() {\n this.graph.run()\n }\n\n start() {\n if (this.state === `running`) {\n throw new Error(`Query is already running`)\n } else if (this.state === `stopped`) {\n throw new Error(`Query is stopped`)\n }\n\n // Send initial state\n Object.entries(this.inputCollections).forEach(([key, collection]) => {\n this.sendChangesToInput(\n key,\n collection.currentStateAsChanges(),\n collection.config.getKey\n )\n })\n this.incrementVersion()\n this.sendFrontierToAllInputs()\n this.runGraph()\n\n // Subscribe to changes\n Object.entries(this.inputCollections).forEach(([key, collection]) => {\n const unsubscribe = collection.subscribeChanges((changes) => {\n this.sendChangesToInput(key, changes, collection.config.getKey)\n this.incrementVersion()\n this.sendFrontierToAllInputs()\n this.runGraph()\n })\n\n this.unsubscribeCallbacks.push(unsubscribe)\n })\n\n this.state = `running`\n return () => {\n this.stop()\n }\n }\n\n stop() {\n this.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n this.unsubscribeCallbacks = []\n this.state = `stopped`\n }\n}\n"],"names":["D2","compileQueryPipeline","output","MessageType","createCollection","MultiSet","collection"],"mappings":";;;;;AAaO,SAAS,aACd,cACA;AACO,SAAA,IAAI,cAET,YAAY;AAChB;AAEO,MAAM,cAAiE;AAAA,EAS5E,YAAY,cAA6C;AAJzD,SAAO,QAA4C;AACnD,SAAQ,UAAU;AAClB,SAAQ,uBAA0C,CAAC;AAGjD,UAAM,QAAQ,aAAa;AAC3B,UAAM,cAAc,MAAM;AAE1B,QAAI,CAAC,aAAa;AACV,YAAA,IAAI,MAAM,yBAAyB;AAAA,IAAA;AAG3C,SAAK,mBAAmB;AAExB,UAAM,QAAQ,IAAIA,KAAA,GAAG,EAAE,iBAAiB,KAAK,SAAS;AACtD,UAAM,SAAS,OAAO;AAAA,MACpB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,MAAM,SAAA,CAAe,CAAC;AAAA,IACzE;AAEA,UAAM,OAAqC,CAAC,EAAE,OAAO,OAAO,aAAa;AACvEC,uBAAA;AAAA,QACE;AAAA,QACA;AAAA,MAAA,EACA;AAAA,QACAC,KAAAA,OAAO,CAAC,EAAE,MAAM,WAAW;AACrB,cAAA,SAASC,iBAAY,MAAM;AACvB,kBAAA;AACN,iBAAK,WACF,SACA,EAAA,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,KAAK,GAAG,YAAY,MAAM;AAC7C,oBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,gBAC9B,SAAS;AAAA,gBACT,SAAS;AAAA,gBACT;AAAA,cACF;AACA,kBAAI,eAAe,GAAG;AACZ,wBAAA,WAAW,KAAK,IAAI,YAAY;AAAA,cAAA,WAC/B,eAAe,GAAG;AAC3B,wBAAQ,WAAW;AACnB,wBAAQ,QAAQ;AAAA,cAAA;AAEd,kBAAA,IAAI,KAAK,OAAO;AACb,qBAAA;AAAA,YAAA,uBACF,IAAoE,CAAC,EAC3E,QAAQ,CAAC,SAAS,WAAW;AAC5B,oBAAM,EAAE,SAAS,SAAS,MAAU,IAAA;AACpC,oBAAM,eAAe,EAAE,GAAG,OAAO,MAAM,OAAO;AAC1C,kBAAA,WAAW,CAAC,SAAS;AACjB,sBAAA;AAAA,kBACJ,OAAO;AAAA,kBACP,MAAM;AAAA,gBAAA,CACP;AAAA,cAAA,WACQ,WAAW,SAAS;AACvB,sBAAA;AAAA,kBACJ,OAAO;AAAA,kBACP,MAAM;AAAA,gBAAA,CACP;AAAA,cAAA,WACQ,UAAU,GAAG;AAChB,sBAAA;AAAA,kBACJ,OAAO;AAAA,kBACP,MAAM;AAAA,gBAAA,CACP;AAAA,cAAA;AAAA,YACH,CACD;AACI,mBAAA;AAAA,UAAA;AAAA,QAEV,CAAA;AAAA,MACH;AACA,YAAM,SAAS;AAAA,IACjB;AAEA,SAAK,QAAQ;AACb,SAAK,SAAS;AACd,SAAK,mBAAmBC,4BAA2B;AAAA,MACjD,IAAI,OAAO,WAAW;AAAA;AAAA,MACtB,QAAQ,CAAC,QAAiB;AACxB,eAAQ,IAAY;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,QACJ;AAAA,MAAA;AAAA,IACF,CACD;AAAA,EAAA;AAAA,EAGH,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EAAA;AAAA,EAGN,mBACN,UACA,SACA,QACA;AACM,UAAA,QAAQ,KAAK,OAAO,QAAQ;AAClC,UAAM,gBAAwC,CAAC;AAC/C,eAAW,UAAU,SAAS;AACtB,YAAA,MAAM,OAAO,OAAO,KAAK;AAC3B,UAAA,OAAO,SAAS,UAAU;AACd,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,MAC7C,WAAW,OAAO,SAAS,UAAU;AACrB,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACtC,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,MAAA,OACtC;AAES,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,MAAA;AAAA,IAC9C;AAEF,UAAM,SAAS,KAAK,SAAS,IAAIC,KAAAA,SAAS,aAAa,CAAC;AAAA,EAAA;AAAA,EAGlD,oBAAoB,UAAkB;AACtC,UAAA,QAAQ,KAAK,OAAO,QAAQ;AAC5B,UAAA,aAAa,KAAK,OAAO;AAAA,EAAA;AAAA,EAGzB,0BAA0B;AACzB,WAAA,QAAQ,KAAK,MAAM,EAAE,QAAQ,CAAC,CAAC,GAAG,MAAM;AAC7C,WAAK,oBAAoB,GAAG;AAAA,IAAA,CAC7B;AAAA,EAAA;AAAA,EAGK,mBAAmB;AACpB,SAAA;AAAA,EAAA;AAAA,EAGC,WAAW;AACjB,SAAK,MAAM,IAAI;AAAA,EAAA;AAAA,EAGjB,QAAQ;AACF,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA,IAAI,MAAM,0BAA0B;AAAA,IAC5C,WAAW,KAAK,UAAU,WAAW;AAC7B,YAAA,IAAI,MAAM,kBAAkB;AAAA,IAAA;AAI7B,WAAA,QAAQ,KAAK,gBAAgB,EAAE,QAAQ,CAAC,CAAC,KAAKC,WAAU,MAAM;AAC9D,WAAA;AAAA,QACH;AAAA,QACAA,YAAW,sBAAsB;AAAA,QACjCA,YAAW,OAAO;AAAA,MACpB;AAAA,IAAA,CACD;AACD,SAAK,iBAAiB;AACtB,SAAK,wBAAwB;AAC7B,SAAK,SAAS;AAGP,WAAA,QAAQ,KAAK,gBAAgB,EAAE,QAAQ,CAAC,CAAC,KAAKA,WAAU,MAAM;AACnE,YAAM,cAAcA,YAAW,iBAAiB,CAAC,YAAY;AAC3D,aAAK,mBAAmB,KAAK,SAASA,YAAW,OAAO,MAAM;AAC9D,aAAK,iBAAiB;AACtB,aAAK,wBAAwB;AAC7B,aAAK,SAAS;AAAA,MAAA,CACf;AAEI,WAAA,qBAAqB,KAAK,WAAW;AAAA,IAAA,CAC3C;AAED,SAAK,QAAQ;AACb,WAAO,MAAM;AACX,WAAK,KAAK;AAAA,IACZ;AAAA,EAAA;AAAA,EAGF,OAAO;AACL,SAAK,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAChE,SAAK,uBAAuB,CAAC;AAC7B,SAAK,QAAQ;AAAA,EAAA;AAEjB;;;"}
1
+ {"version":3,"file":"compiled-query.cjs","sources":["../../../src/query/compiled-query.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@electric-sql/d2mini\"\nimport { createCollection } from \"../collection.js\"\nimport { compileQueryPipeline } from \"./pipeline-compiler.js\"\nimport type { Collection } from \"../collection.js\"\nimport type { ChangeMessage, SyncConfig } from \"../types.js\"\nimport type {\n IStreamBuilder,\n MultiSetArray,\n RootStreamBuilder,\n} from \"@electric-sql/d2mini\"\nimport type { QueryBuilder, ResultsFromContext } from \"./query-builder.js\"\nimport type { Context, Schema } from \"./types.js\"\n\nexport function compileQuery<TContext extends Context<Schema>>(\n queryBuilder: QueryBuilder<TContext>\n) {\n return new CompiledQuery<\n ResultsFromContext<TContext> & { _key?: string | number }\n >(queryBuilder)\n}\n\nexport class CompiledQuery<TResults extends object = Record<string, unknown>> {\n private graph: D2\n private inputs: Record<string, RootStreamBuilder<any>>\n private inputCollections: Record<string, Collection<any>>\n private resultCollection: Collection<TResults>\n public state: `compiled` | `running` | `stopped` = `compiled`\n private unsubscribeCallbacks: Array<() => void> = []\n\n constructor(queryBuilder: QueryBuilder<Context<Schema>>) {\n const query = queryBuilder._query\n const collections = query.collections\n\n if (!collections) {\n throw new Error(`No collections provided`)\n }\n\n this.inputCollections = collections\n\n const graph = new D2()\n const inputs = Object.fromEntries(\n Object.entries(collections).map(([key]) => [key, graph.newInput<any>()])\n )\n\n const sync: SyncConfig<TResults>[`sync`] = ({ begin, write, commit }) => {\n compileQueryPipeline<IStreamBuilder<[unknown, TResults]>>(\n query,\n inputs\n ).pipe(\n output((data) => {\n begin()\n data\n .getInner()\n .reduce((acc, [[key, value], multiplicity]) => {\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResults }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value } = changes\n const valueWithKey = { ...value, _key: rawKey }\n if (inserts && !deletes) {\n write({\n value: valueWithKey,\n type: `insert`,\n })\n } else if (inserts >= deletes) {\n write({\n value: valueWithKey,\n type: `update`,\n })\n } else if (deletes > 0) {\n write({\n value: valueWithKey,\n type: `delete`,\n })\n }\n })\n commit()\n })\n )\n graph.finalize()\n }\n\n this.graph = graph\n this.inputs = inputs\n this.resultCollection = createCollection<TResults>({\n id: crypto.randomUUID(), // TODO: remove when we don't require any more\n getKey: (val: unknown) => {\n return (val as any)._key\n },\n sync: {\n sync,\n },\n })\n }\n\n get results() {\n return this.resultCollection\n }\n\n private sendChangesToInput(\n inputKey: string,\n changes: Array<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n ) {\n const input = this.inputs[inputKey]!\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n }\n\n private runGraph() {\n this.graph.run()\n }\n\n start() {\n if (this.state === `running`) {\n throw new Error(`Query is already running`)\n } else if (this.state === `stopped`) {\n throw new Error(`Query is stopped`)\n }\n\n // Send initial state\n Object.entries(this.inputCollections).forEach(([key, collection]) => {\n this.sendChangesToInput(\n key,\n collection.currentStateAsChanges(),\n collection.config.getKey\n )\n })\n this.runGraph()\n\n // Subscribe to changes\n Object.entries(this.inputCollections).forEach(([key, collection]) => {\n const unsubscribe = collection.subscribeChanges((changes) => {\n this.sendChangesToInput(key, changes, collection.config.getKey)\n this.runGraph()\n })\n\n this.unsubscribeCallbacks.push(unsubscribe)\n })\n\n this.state = `running`\n return () => {\n this.stop()\n }\n }\n\n stop() {\n this.unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n this.unsubscribeCallbacks = []\n this.state = `stopped`\n }\n}\n"],"names":["D2","compileQueryPipeline","output","createCollection","MultiSet","collection"],"mappings":";;;;;AAaO,SAAS,aACd,cACA;AACO,SAAA,IAAI,cAET,YAAY;AAChB;AAEO,MAAM,cAAiE;AAAA,EAQ5E,YAAY,cAA6C;AAHzD,SAAO,QAA4C;AACnD,SAAQ,uBAA0C,CAAC;AAGjD,UAAM,QAAQ,aAAa;AAC3B,UAAM,cAAc,MAAM;AAE1B,QAAI,CAAC,aAAa;AACV,YAAA,IAAI,MAAM,yBAAyB;AAAA,IAAA;AAG3C,SAAK,mBAAmB;AAElB,UAAA,QAAQ,IAAIA,UAAG;AACrB,UAAM,SAAS,OAAO;AAAA,MACpB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,MAAM,SAAA,CAAe,CAAC;AAAA,IACzE;AAEA,UAAM,OAAqC,CAAC,EAAE,OAAO,OAAO,aAAa;AACvEC,uBAAA;AAAA,QACE;AAAA,QACA;AAAA,MAAA,EACA;AAAA,QACAC,OAAA,OAAO,CAAC,SAAS;AACT,gBAAA;AAEH,eAAA,WACA,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,KAAK,GAAG,YAAY,MAAM;AAC7C,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,YACF;AACA,gBAAI,eAAe,GAAG;AACZ,sBAAA,WAAW,KAAK,IAAI,YAAY;AAAA,YAAA,WAC/B,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAAA,YAAA;AAEd,gBAAA,IAAI,KAAK,OAAO;AACb,mBAAA;AAAA,UAAA,uBACF,IAAoE,CAAC,EAC3E,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,MAAU,IAAA;AACpC,kBAAM,eAAe,EAAE,GAAG,OAAO,MAAM,OAAO;AAC1C,gBAAA,WAAW,CAAC,SAAS;AACjB,oBAAA;AAAA,gBACJ,OAAO;AAAA,gBACP,MAAM;AAAA,cAAA,CACP;AAAA,YAAA,WACQ,WAAW,SAAS;AACvB,oBAAA;AAAA,gBACJ,OAAO;AAAA,gBACP,MAAM;AAAA,cAAA,CACP;AAAA,YAAA,WACQ,UAAU,GAAG;AAChB,oBAAA;AAAA,gBACJ,OAAO;AAAA,gBACP,MAAM;AAAA,cAAA,CACP;AAAA,YAAA;AAAA,UACH,CACD;AACI,iBAAA;AAAA,QACR,CAAA;AAAA,MACH;AACA,YAAM,SAAS;AAAA,IACjB;AAEA,SAAK,QAAQ;AACb,SAAK,SAAS;AACd,SAAK,mBAAmBC,4BAA2B;AAAA,MACjD,IAAI,OAAO,WAAW;AAAA;AAAA,MACtB,QAAQ,CAAC,QAAiB;AACxB,eAAQ,IAAY;AAAA,MACtB;AAAA,MACA,MAAM;AAAA,QACJ;AAAA,MAAA;AAAA,IACF,CACD;AAAA,EAAA;AAAA,EAGH,IAAI,UAAU;AACZ,WAAO,KAAK;AAAA,EAAA;AAAA,EAGN,mBACN,UACA,SACA,QACA;AACM,UAAA,QAAQ,KAAK,OAAO,QAAQ;AAClC,UAAM,gBAAwC,CAAC;AAC/C,eAAW,UAAU,SAAS;AACtB,YAAA,MAAM,OAAO,OAAO,KAAK;AAC3B,UAAA,OAAO,SAAS,UAAU;AACd,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,MAC7C,WAAW,OAAO,SAAS,UAAU;AACrB,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACtC,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,MAAA,OACtC;AAES,sBAAA,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,MAAA;AAAA,IAC9C;AAEF,UAAM,SAAS,IAAIC,OAAS,SAAA,aAAa,CAAC;AAAA,EAAA;AAAA,EAGpC,WAAW;AACjB,SAAK,MAAM,IAAI;AAAA,EAAA;AAAA,EAGjB,QAAQ;AACF,QAAA,KAAK,UAAU,WAAW;AACtB,YAAA,IAAI,MAAM,0BAA0B;AAAA,IAC5C,WAAW,KAAK,UAAU,WAAW;AAC7B,YAAA,IAAI,MAAM,kBAAkB;AAAA,IAAA;AAI7B,WAAA,QAAQ,KAAK,gBAAgB,EAAE,QAAQ,CAAC,CAAC,KAAKC,WAAU,MAAM;AAC9D,WAAA;AAAA,QACH;AAAA,QACAA,YAAW,sBAAsB;AAAA,QACjCA,YAAW,OAAO;AAAA,MACpB;AAAA,IAAA,CACD;AACD,SAAK,SAAS;AAGP,WAAA,QAAQ,KAAK,gBAAgB,EAAE,QAAQ,CAAC,CAAC,KAAKA,WAAU,MAAM;AACnE,YAAM,cAAcA,YAAW,iBAAiB,CAAC,YAAY;AAC3D,aAAK,mBAAmB,KAAK,SAASA,YAAW,OAAO,MAAM;AAC9D,aAAK,SAAS;AAAA,MAAA,CACf;AAEI,WAAA,qBAAqB,KAAK,WAAW;AAAA,IAAA,CAC3C;AAED,SAAK,QAAQ;AACb,WAAO,MAAM;AACX,WAAK,KAAK;AAAA,IACZ;AAAA,EAAA;AAAA,EAGF,OAAO;AACL,SAAK,qBAAqB,QAAQ,CAAC,gBAAgB,aAAa;AAChE,SAAK,uBAAuB,CAAC;AAC7B,SAAK,QAAQ;AAAA,EAAA;AAEjB;;;"}
@@ -10,14 +10,10 @@ export declare class CompiledQuery<TResults extends object = Record<string, unkn
10
10
  private inputCollections;
11
11
  private resultCollection;
12
12
  state: `compiled` | `running` | `stopped`;
13
- private version;
14
13
  private unsubscribeCallbacks;
15
14
  constructor(queryBuilder: QueryBuilder<Context<Schema>>);
16
15
  get results(): Collection<TResults, string | number, {}>;
17
16
  private sendChangesToInput;
18
- private sendFrontierToInput;
19
- private sendFrontierToAllInputs;
20
- private incrementVersion;
21
17
  private runGraph;
22
18
  start(): () => void;
23
19
  stop(): void;
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
3
+ const d2mini = require("@electric-sql/d2mini");
4
4
  const extractors = require("./extractors.cjs");
5
5
  const utils = require("./utils.cjs");
6
- const { sum, count, avg, min, max, median, mode } = d2ts.groupByOperators;
6
+ const { sum, count, avg, min, max, median, mode } = d2mini.groupByOperators;
7
7
  function processGroupBy(pipeline, query, mainTableAlias) {
8
8
  const groupByColumns = Array.isArray(query.groupBy) ? query.groupBy : [query.groupBy];
9
9
  const keyExtractor = ([_oldKey, namespacedRow]) => {
@@ -41,7 +41,7 @@ function processGroupBy(pipeline, query, mainTableAlias) {
41
41
  }
42
42
  }
43
43
  if (Object.keys(aggregates).length > 0) {
44
- pipeline = pipeline.pipe(d2ts.groupBy(keyExtractor, aggregates));
44
+ pipeline = pipeline.pipe(d2mini.groupBy(keyExtractor, aggregates));
45
45
  }
46
46
  return pipeline;
47
47
  }
@@ -1 +1 @@
1
- {"version":3,"file":"group-by.cjs","sources":["../../../src/query/group-by.ts"],"sourcesContent":["import { groupBy, groupByOperators } from \"@electric-sql/d2ts\"\nimport {\n evaluateOperandOnNamespacedRow,\n extractValueFromNamespacedRow,\n} from \"./extractors\"\nimport { isAggregateFunctionCall } from \"./utils\"\nimport type { ConditionOperand, FunctionCall, Query } from \"./schema\"\nimport type { NamespacedAndKeyedStream } from \"../types.js\"\n\nconst { sum, count, avg, min, max, median, mode } = groupByOperators\n\n/**\n * Process the groupBy clause in a D2QL query\n */\nexport function processGroupBy(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Normalize groupBy to an array of column references\n const groupByColumns = Array.isArray(query.groupBy)\n ? query.groupBy\n : [query.groupBy]\n\n // Create a key extractor function for the groupBy operator\n const keyExtractor = ([_oldKey, namespacedRow]: [\n string,\n Record<string, unknown>,\n ]) => {\n const key: Record<string, unknown> = {}\n\n // Extract each groupBy column value\n for (const column of groupByColumns) {\n if (typeof column === `string` && (column as string).startsWith(`@`)) {\n const columnRef = (column as string).substring(1)\n const columnName = columnRef.includes(`.`)\n ? columnRef.split(`.`)[1]\n : columnRef\n\n key[columnName!] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias\n )\n }\n }\n\n return key\n }\n\n // Create aggregate functions for any aggregated columns in the SELECT clause\n const aggregates: Record<string, any> = {}\n\n if (!query.select) {\n throw new Error(`SELECT clause is required for GROUP BY`)\n }\n\n // Scan the SELECT clause for aggregate functions\n for (const item of query.select) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isAggregateFunctionCall(expr)) {\n // Get the function name (the only key in the object)\n const functionName = Object.keys(expr)[0]\n // Get the column reference or expression to aggregate\n const columnRef = (expr as FunctionCall)[\n functionName as keyof FunctionCall\n ]\n\n // Add the aggregate function to our aggregates object\n aggregates[alias] = getAggregateFunction(\n functionName!,\n columnRef,\n mainTableAlias\n )\n }\n }\n }\n }\n\n // Apply the groupBy operator if we have any aggregates\n if (Object.keys(aggregates).length > 0) {\n pipeline = pipeline.pipe(groupBy(keyExtractor, aggregates))\n }\n\n return pipeline\n}\n\n/**\n * Helper function to get an aggregate function based on the function name\n */\nexport function getAggregateFunction(\n functionName: string,\n columnRef: string | ConditionOperand,\n mainTableAlias: string\n) {\n // Create a value extractor function for the column to aggregate\n const valueExtractor = ([_oldKey, namespacedRow]: [\n string,\n Record<string, unknown>,\n ]) => {\n let value: unknown\n if (typeof columnRef === `string` && columnRef.startsWith(`@`)) {\n value = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef.substring(1),\n mainTableAlias\n )\n } else {\n value = evaluateOperandOnNamespacedRow(\n namespacedRow,\n columnRef as ConditionOperand,\n mainTableAlias\n )\n }\n // Ensure we return a number for aggregate functions\n return typeof value === `number` ? value : 0\n }\n\n // Return the appropriate aggregate function\n switch (functionName.toUpperCase()) {\n case `SUM`:\n return sum(valueExtractor)\n case `COUNT`:\n return count() // count() doesn't need a value extractor\n case `AVG`:\n return avg(valueExtractor)\n case `MIN`:\n return min(valueExtractor)\n case `MAX`:\n return max(valueExtractor)\n case `MEDIAN`:\n return median(valueExtractor)\n case `MODE`:\n return mode(valueExtractor)\n default:\n throw new Error(`Unsupported aggregate function: ${functionName}`)\n }\n}\n"],"names":["groupByOperators","extractValueFromNamespacedRow","isAggregateFunctionCall","groupBy","evaluateOperandOnNamespacedRow"],"mappings":";;;;;AASA,MAAM,EAAE,KAAK,OAAO,KAAK,KAAK,KAAK,QAAQ,SAASA,KAAA;AAKpC,SAAA,eACd,UACA,OACA,gBACA;AAEM,QAAA,iBAAiB,MAAM,QAAQ,MAAM,OAAO,IAC9C,MAAM,UACN,CAAC,MAAM,OAAO;AAGlB,QAAM,eAAe,CAAC,CAAC,SAAS,aAAa,MAGvC;AACJ,UAAM,MAA+B,CAAC;AAGtC,eAAW,UAAU,gBAAgB;AACnC,UAAI,OAAO,WAAW,YAAa,OAAkB,WAAW,GAAG,GAAG;AAC9D,cAAA,YAAa,OAAkB,UAAU,CAAC;AAC1C,cAAA,aAAa,UAAU,SAAS,GAAG,IACrC,UAAU,MAAM,GAAG,EAAE,CAAC,IACtB;AAEJ,YAAI,UAAW,IAAIC,WAAA;AAAA,UACjB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MAAA;AAAA,IACF;AAGK,WAAA;AAAA,EACT;AAGA,QAAM,aAAkC,CAAC;AAErC,MAAA,CAAC,MAAM,QAAQ;AACX,UAAA,IAAI,MAAM,wCAAwC;AAAA,EAAA;AAI/C,aAAA,QAAQ,MAAM,QAAQ;AAC3B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAYC,MAAA,wBAAwB,IAAI,GAAG;AAE7D,gBAAM,eAAe,OAAO,KAAK,IAAI,EAAE,CAAC;AAElC,gBAAA,YAAa,KACjB,YACF;AAGA,qBAAW,KAAK,IAAI;AAAA,YAClB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAIF,MAAI,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACtC,eAAW,SAAS,KAAKC,KAAQ,QAAA,cAAc,UAAU,CAAC;AAAA,EAAA;AAGrD,SAAA;AACT;AAKgB,SAAA,qBACd,cACA,WACA,gBACA;AAEA,QAAM,iBAAiB,CAAC,CAAC,SAAS,aAAa,MAGzC;AACA,QAAA;AACJ,QAAI,OAAO,cAAc,YAAY,UAAU,WAAW,GAAG,GAAG;AACtD,cAAAF,WAAA;AAAA,QACN;AAAA,QACA,UAAU,UAAU,CAAC;AAAA,QACrB;AAAA,MACF;AAAA,IAAA,OACK;AACG,cAAAG,WAAA;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAAA;AAGK,WAAA,OAAO,UAAU,WAAW,QAAQ;AAAA,EAC7C;AAGQ,UAAA,aAAa,YAAe,GAAA;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,MAAM;AAAA;AAAA,IACf,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,OAAO,cAAc;AAAA,IAC9B,KAAK;AACH,aAAO,KAAK,cAAc;AAAA,IAC5B;AACE,YAAM,IAAI,MAAM,mCAAmC,YAAY,EAAE;AAAA,EAAA;AAEvE;;;"}
1
+ {"version":3,"file":"group-by.cjs","sources":["../../../src/query/group-by.ts"],"sourcesContent":["import { groupBy, groupByOperators } from \"@electric-sql/d2mini\"\nimport {\n evaluateOperandOnNamespacedRow,\n extractValueFromNamespacedRow,\n} from \"./extractors\"\nimport { isAggregateFunctionCall } from \"./utils\"\nimport type { ConditionOperand, FunctionCall, Query } from \"./schema\"\nimport type { NamespacedAndKeyedStream } from \"../types.js\"\n\nconst { sum, count, avg, min, max, median, mode } = groupByOperators\n\n/**\n * Process the groupBy clause in a D2QL query\n */\nexport function processGroupBy(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Normalize groupBy to an array of column references\n const groupByColumns = Array.isArray(query.groupBy)\n ? query.groupBy\n : [query.groupBy]\n\n // Create a key extractor function for the groupBy operator\n const keyExtractor = ([_oldKey, namespacedRow]: [\n string,\n Record<string, unknown>,\n ]) => {\n const key: Record<string, unknown> = {}\n\n // Extract each groupBy column value\n for (const column of groupByColumns) {\n if (typeof column === `string` && (column as string).startsWith(`@`)) {\n const columnRef = (column as string).substring(1)\n const columnName = columnRef.includes(`.`)\n ? columnRef.split(`.`)[1]\n : columnRef\n\n key[columnName!] = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef,\n mainTableAlias\n )\n }\n }\n\n return key\n }\n\n // Create aggregate functions for any aggregated columns in the SELECT clause\n const aggregates: Record<string, any> = {}\n\n if (!query.select) {\n throw new Error(`SELECT clause is required for GROUP BY`)\n }\n\n // Scan the SELECT clause for aggregate functions\n for (const item of query.select) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isAggregateFunctionCall(expr)) {\n // Get the function name (the only key in the object)\n const functionName = Object.keys(expr)[0]\n // Get the column reference or expression to aggregate\n const columnRef = (expr as FunctionCall)[\n functionName as keyof FunctionCall\n ]\n\n // Add the aggregate function to our aggregates object\n aggregates[alias] = getAggregateFunction(\n functionName!,\n columnRef,\n mainTableAlias\n )\n }\n }\n }\n }\n\n // Apply the groupBy operator if we have any aggregates\n if (Object.keys(aggregates).length > 0) {\n pipeline = pipeline.pipe(groupBy(keyExtractor, aggregates))\n }\n\n return pipeline\n}\n\n/**\n * Helper function to get an aggregate function based on the function name\n */\nexport function getAggregateFunction(\n functionName: string,\n columnRef: string | ConditionOperand,\n mainTableAlias: string\n) {\n // Create a value extractor function for the column to aggregate\n const valueExtractor = ([_oldKey, namespacedRow]: [\n string,\n Record<string, unknown>,\n ]) => {\n let value: unknown\n if (typeof columnRef === `string` && columnRef.startsWith(`@`)) {\n value = extractValueFromNamespacedRow(\n namespacedRow,\n columnRef.substring(1),\n mainTableAlias\n )\n } else {\n value = evaluateOperandOnNamespacedRow(\n namespacedRow,\n columnRef as ConditionOperand,\n mainTableAlias\n )\n }\n // Ensure we return a number for aggregate functions\n return typeof value === `number` ? value : 0\n }\n\n // Return the appropriate aggregate function\n switch (functionName.toUpperCase()) {\n case `SUM`:\n return sum(valueExtractor)\n case `COUNT`:\n return count() // count() doesn't need a value extractor\n case `AVG`:\n return avg(valueExtractor)\n case `MIN`:\n return min(valueExtractor)\n case `MAX`:\n return max(valueExtractor)\n case `MEDIAN`:\n return median(valueExtractor)\n case `MODE`:\n return mode(valueExtractor)\n default:\n throw new Error(`Unsupported aggregate function: ${functionName}`)\n }\n}\n"],"names":["groupByOperators","extractValueFromNamespacedRow","isAggregateFunctionCall","groupBy","evaluateOperandOnNamespacedRow"],"mappings":";;;;;AASA,MAAM,EAAE,KAAK,OAAO,KAAK,KAAK,KAAK,QAAQ,SAASA,OAAA;AAKpC,SAAA,eACd,UACA,OACA,gBACA;AAEM,QAAA,iBAAiB,MAAM,QAAQ,MAAM,OAAO,IAC9C,MAAM,UACN,CAAC,MAAM,OAAO;AAGlB,QAAM,eAAe,CAAC,CAAC,SAAS,aAAa,MAGvC;AACJ,UAAM,MAA+B,CAAC;AAGtC,eAAW,UAAU,gBAAgB;AACnC,UAAI,OAAO,WAAW,YAAa,OAAkB,WAAW,GAAG,GAAG;AAC9D,cAAA,YAAa,OAAkB,UAAU,CAAC;AAC1C,cAAA,aAAa,UAAU,SAAS,GAAG,IACrC,UAAU,MAAM,GAAG,EAAE,CAAC,IACtB;AAEJ,YAAI,UAAW,IAAIC,WAAA;AAAA,UACjB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MAAA;AAAA,IACF;AAGK,WAAA;AAAA,EACT;AAGA,QAAM,aAAkC,CAAC;AAErC,MAAA,CAAC,MAAM,QAAQ;AACX,UAAA,IAAI,MAAM,wCAAwC;AAAA,EAAA;AAI/C,aAAA,QAAQ,MAAM,QAAQ;AAC3B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAYC,MAAA,wBAAwB,IAAI,GAAG;AAE7D,gBAAM,eAAe,OAAO,KAAK,IAAI,EAAE,CAAC;AAElC,gBAAA,YAAa,KACjB,YACF;AAGA,qBAAW,KAAK,IAAI;AAAA,YAClB;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAIF,MAAI,OAAO,KAAK,UAAU,EAAE,SAAS,GAAG;AACtC,eAAW,SAAS,KAAKC,OAAQ,QAAA,cAAc,UAAU,CAAC;AAAA,EAAA;AAGrD,SAAA;AACT;AAKgB,SAAA,qBACd,cACA,WACA,gBACA;AAEA,QAAM,iBAAiB,CAAC,CAAC,SAAS,aAAa,MAGzC;AACA,QAAA;AACJ,QAAI,OAAO,cAAc,YAAY,UAAU,WAAW,GAAG,GAAG;AACtD,cAAAF,WAAA;AAAA,QACN;AAAA,QACA,UAAU,UAAU,CAAC;AAAA,QACrB;AAAA,MACF;AAAA,IAAA,OACK;AACG,cAAAG,WAAA;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAAA;AAGK,WAAA,OAAO,UAAU,WAAW,QAAQ;AAAA,EAC7C;AAGQ,UAAA,aAAa,YAAe,GAAA;AAAA,IAClC,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,MAAM;AAAA;AAAA,IACf,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,IAAI,cAAc;AAAA,IAC3B,KAAK;AACH,aAAO,OAAO,cAAc;AAAA,IAC9B,KAAK;AACH,aAAO,KAAK,cAAc;AAAA,IAC5B;AACE,YAAM,IAAI,MAAM,mCAAmC,YAAY,EAAE;AAAA,EAAA;AAEvE;;;"}
@@ -12,7 +12,7 @@ export declare function getAggregateFunction(functionName: string, columnRef: st
12
12
  reduce: (values: [number, number][]) => number;
13
13
  postMap?: ((result: number) => number) | undefined;
14
14
  } | {
15
- pipe: (stream: import('@electric-sql/d2ts').IStreamBuilder<[string, Record<string, unknown>]>) => import('@electric-sql/d2ts').IStreamBuilder<import('@electric-sql/d2ts').KeyValue<string, number>>;
15
+ pipe: (stream: import('@electric-sql/d2mini').IStreamBuilder<[string, Record<string, unknown>]>) => import('@electric-sql/d2mini').IStreamBuilder<import('@electric-sql/d2mini').KeyValue<string, number>>;
16
16
  } | {
17
17
  preMap: (data: [string, Record<string, unknown>]) => {
18
18
  sum: number;
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
3
+ const d2mini = require("@electric-sql/d2mini");
4
4
  const evaluators = require("./evaluators.cjs");
5
5
  const extractors = require("./extractors.cjs");
6
6
  function processJoinClause(pipeline, query, tables, mainTableAlias, allInputs) {
@@ -11,7 +11,7 @@ function processJoinClause(pipeline, query, tables, mainTableAlias, allInputs) {
11
11
  const joinType = joinClause.type === `cross` ? `inner` : joinClause.type;
12
12
  const [mainKeyRef, , joinedKeyRefs] = joinClause.on;
13
13
  const mainPipeline = pipeline.pipe(
14
- d2ts.map(([currentKey, namespacedRow]) => {
14
+ d2mini.map(([currentKey, namespacedRow]) => {
15
15
  const mainRow = namespacedRow[mainTableAlias];
16
16
  const key = extractors.extractJoinKey(mainRow, mainKeyRef, mainTableAlias);
17
17
  return [key, [currentKey, namespacedRow]];
@@ -25,7 +25,7 @@ function processJoinClause(pipeline, query, tables, mainTableAlias, allInputs) {
25
25
  }
26
26
  tables[joinedTableAlias] = joinedTableInput;
27
27
  const joinedPipeline = joinedTableInput.pipe(
28
- d2ts.map(([currentKey, row]) => {
28
+ d2mini.map(([currentKey, row]) => {
29
29
  const namespacedRow = { [joinedTableAlias]: row };
30
30
  const key = extractors.extractJoinKey(row, joinedKeyRefs, joinedTableAlias);
31
31
  return [key, [currentKey, namespacedRow]];
@@ -34,36 +34,36 @@ function processJoinClause(pipeline, query, tables, mainTableAlias, allInputs) {
34
34
  switch (joinType) {
35
35
  case `inner`:
36
36
  pipeline = mainPipeline.pipe(
37
- d2ts.join(joinedPipeline, `inner`),
38
- d2ts.consolidate(),
37
+ d2mini.join(joinedPipeline, `inner`),
38
+ d2mini.consolidate(),
39
39
  processJoinResults(mainTableAlias, joinedTableAlias, joinClause)
40
40
  );
41
41
  break;
42
42
  case `left`:
43
43
  pipeline = mainPipeline.pipe(
44
- d2ts.join(joinedPipeline, `left`),
45
- d2ts.consolidate(),
44
+ d2mini.join(joinedPipeline, `left`),
45
+ d2mini.consolidate(),
46
46
  processJoinResults(mainTableAlias, joinedTableAlias, joinClause)
47
47
  );
48
48
  break;
49
49
  case `right`:
50
50
  pipeline = mainPipeline.pipe(
51
- d2ts.join(joinedPipeline, `right`),
52
- d2ts.consolidate(),
51
+ d2mini.join(joinedPipeline, `right`),
52
+ d2mini.consolidate(),
53
53
  processJoinResults(mainTableAlias, joinedTableAlias, joinClause)
54
54
  );
55
55
  break;
56
56
  case `full`:
57
57
  pipeline = mainPipeline.pipe(
58
- d2ts.join(joinedPipeline, `full`),
59
- d2ts.consolidate(),
58
+ d2mini.join(joinedPipeline, `full`),
59
+ d2mini.consolidate(),
60
60
  processJoinResults(mainTableAlias, joinedTableAlias, joinClause)
61
61
  );
62
62
  break;
63
63
  default:
64
64
  pipeline = mainPipeline.pipe(
65
- d2ts.join(joinedPipeline, `inner`),
66
- d2ts.consolidate(),
65
+ d2mini.join(joinedPipeline, `inner`),
66
+ d2mini.consolidate(),
67
67
  processJoinResults(mainTableAlias, joinedTableAlias, joinClause)
68
68
  );
69
69
  }
@@ -74,7 +74,7 @@ function processJoinResults(mainTableAlias, joinedTableAlias, joinClause) {
74
74
  return function(pipeline) {
75
75
  return pipeline.pipe(
76
76
  // Process the join result and handle nulls in the same step
77
- d2ts.map((result) => {
77
+ d2mini.map((result) => {
78
78
  const [_key, [main, joined]] = result;
79
79
  const mainKey = main == null ? void 0 : main[0];
80
80
  const mainNamespacedRow = main == null ? void 0 : main[1];
@@ -111,9 +111,9 @@ function processJoinResults(mainTableAlias, joinedTableAlias, joinClause) {
111
111
  return [newKey, mergedNamespacedRow];
112
112
  }),
113
113
  // Filter out undefined results
114
- d2ts.filter((value) => value !== void 0),
114
+ d2mini.filter((value) => value !== void 0),
115
115
  // Process the ON condition
116
- d2ts.filter(([_key, namespacedRow]) => {
116
+ d2mini.filter(([_key, namespacedRow]) => {
117
117
  if (!joinClause.on || joinClause.type === `cross`) {
118
118
  return true;
119
119
  }
@@ -1 +1 @@
1
- {"version":3,"file":"joins.cjs","sources":["../../../src/query/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2ts\"\nimport { evaluateConditionOnNamespacedRow } from \"./evaluators.js\"\nimport { extractJoinKey } from \"./extractors.js\"\nimport type { Query } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2ts\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types.js\"\n\n/**\n * Creates a processing pipeline for join clauses\n */\nexport function processJoinClause(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>\n) {\n if (!query.join) return pipeline\n const input = allInputs[query.from]\n\n for (const joinClause of query.join) {\n // Create a stream for the joined table\n const joinedTableAlias = joinClause.as || joinClause.from\n\n // Get the right join type for the operator\n const joinType: JoinType =\n joinClause.type === `cross` ? `inner` : joinClause.type\n\n // The `in` is formatted as ['@mainKeyRef', '=', '@joinedKeyRef']\n // Destructure the main key reference and the joined key references\n const [mainKeyRef, , joinedKeyRefs] = joinClause.on\n\n // We need to prepare the main pipeline and the joined pipeline\n // to have the correct key format for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the key from the ON condition left side for the main table\n const mainRow = namespacedRow[mainTableAlias]!\n\n // Extract the join key from the main row\n const key = extractJoinKey(mainRow, mainKeyRef, mainTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Get the joined table input from the inputs map\n let joinedTableInput: KeyedStream\n\n if (allInputs[joinClause.from]) {\n // Use the provided input if available\n joinedTableInput = allInputs[joinClause.from]!\n } else {\n // Create a new input if not provided\n joinedTableInput =\n input!.graph.newInput<[string, Record<string, unknown>]>()\n }\n\n tables[joinedTableAlias] = joinedTableInput\n\n // Create a pipeline for the joined table\n const joinedPipeline = joinedTableInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in an object with the table alias as the key\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the key from the ON condition right side for the joined table\n const key = extractJoinKey(row, joinedKeyRefs, joinedTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n string,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply join with appropriate typings based on join type\n switch (joinType) {\n case `inner`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `left`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `left`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `right`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `right`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `full`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `full`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n default:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n }\n }\n return pipeline\n}\n\n/**\n * Creates a processing pipeline for join results\n */\nexport function processJoinResults(\n mainTableAlias: string,\n joinedTableAlias: string,\n joinClause: { on: any; type: string }\n) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls in the same step\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // For inner joins, both sides should be non-null\n if (joinClause.type === `inner` || joinClause.type === `cross`) {\n if (!mainNamespacedRow || !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n }\n\n // For left joins, the main row must be non-null\n if (joinClause.type === `left` && !mainNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // For right joins, the joined row must be non-null\n if (joinClause.type === `right` && !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // Merge the nested rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.entries(mainNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n }\n\n // If we have a joined row, add it to the merged result\n if (joinedNamespacedRow) {\n Object.entries(joinedNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n } else if (joinClause.type === `left` || joinClause.type === `full`) {\n // For left or full joins, add the joined table with undefined data if missing\n // mergedNamespacedRow[joinedTableAlias] = undefined\n }\n\n // For right or full joins, add the main table with undefined data if missing\n if (\n !mainNamespacedRow &&\n (joinClause.type === `right` || joinClause.type === `full`)\n ) {\n // mergedNamespacedRow[mainTableAlias] = undefined\n }\n\n // New key\n const newKey = `[${mainKey},${joinedKey}]`\n\n return [newKey, mergedNamespacedRow] as [\n string,\n typeof mergedNamespacedRow,\n ]\n }),\n // Filter out undefined results\n filter((value) => value !== undefined),\n // Process the ON condition\n filter(([_key, namespacedRow]: [string, NamespacedRow]) => {\n // If there's no ON condition, or it's a cross join, always return true\n if (!joinClause.on || joinClause.type === `cross`) {\n return true\n }\n\n // For LEFT JOIN, if the right side is null, we should include the row\n if (\n joinClause.type === `left` &&\n namespacedRow[joinedTableAlias] === undefined\n ) {\n return true\n }\n\n // For RIGHT JOIN, if the left side is null, we should include the row\n if (\n joinClause.type === `right` &&\n namespacedRow[mainTableAlias] === undefined\n ) {\n return true\n }\n\n // For FULL JOIN, if either side is null, we should include the row\n if (\n joinClause.type === `full` &&\n (namespacedRow[mainTableAlias] === undefined ||\n namespacedRow[joinedTableAlias] === undefined)\n ) {\n return true\n }\n\n return evaluateConditionOnNamespacedRow(\n namespacedRow,\n joinClause.on,\n mainTableAlias,\n joinedTableAlias\n )\n })\n )\n }\n}\n"],"names":["map","extractJoinKey","joinOperator","consolidate","filter","evaluateConditionOnNamespacedRow"],"mappings":";;;;;AAmBO,SAAS,kBACd,UACA,OACA,QACA,gBACA,WACA;AACI,MAAA,CAAC,MAAM,KAAa,QAAA;AAClB,QAAA,QAAQ,UAAU,MAAM,IAAI;AAEvB,aAAA,cAAc,MAAM,MAAM;AAE7B,UAAA,mBAAmB,WAAW,MAAM,WAAW;AAGrD,UAAM,WACJ,WAAW,SAAS,UAAU,UAAU,WAAW;AAIrD,UAAM,CAAC,YAAA,EAAc,aAAa,IAAI,WAAW;AAIjD,UAAM,eAAe,SAAS;AAAA,MAC5BA,KAAAA,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAE7B,cAAA,UAAU,cAAc,cAAc;AAG5C,cAAM,MAAMC,WAAA,eAAe,SAAS,YAAY,cAAc;AAG9D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGI,QAAA;AAEA,QAAA,UAAU,WAAW,IAAI,GAAG;AAEX,yBAAA,UAAU,WAAW,IAAI;AAAA,IAAA,OACvC;AAGH,yBAAA,MAAO,MAAM,SAA4C;AAAA,IAAA;AAG7D,WAAO,gBAAgB,IAAI;AAG3B,UAAM,iBAAiB,iBAAiB;AAAA,MACtCD,KAAAA,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,cAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAI;AAG/D,cAAM,MAAMC,WAAA,eAAe,KAAK,eAAe,gBAAgB;AAG/D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGA,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBC,UAAa,gBAAgB,OAAO;AAAA,UACpCC,iBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,UAAa,gBAAgB,MAAM;AAAA,UACnCC,iBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,UAAa,gBAAgB,OAAO;AAAA,UACpCC,iBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,UAAa,gBAAgB,MAAM;AAAA,UACnCC,iBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF;AACE,mBAAW,aAAa;AAAA,UACtBD,UAAa,gBAAgB,OAAO;AAAA,UACpCC,iBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AAAA,IAAA;AAAA,EACJ;AAEK,SAAA;AACT;AAKgB,SAAA,mBACd,gBACA,kBACA,YACA;AACA,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEdH,KAAA,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,UAAU,6BAAO;AACjB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,YAAY,iCAAS;AACrB,cAAA,sBAAsB,iCAAS;AAGrC,YAAI,WAAW,SAAS,WAAW,WAAW,SAAS,SAAS;AAC1D,cAAA,CAAC,qBAAqB,CAAC,qBAAqB;AACvC,mBAAA;AAAA,UAAA;AAAA,QACT;AAIF,YAAI,WAAW,SAAS,UAAU,CAAC,mBAAmB;AAC7C,iBAAA;AAAA,QAAA;AAIT,YAAI,WAAW,SAAS,WAAW,CAAC,qBAAqB;AAChD,iBAAA;AAAA,QAAA;AAIT,cAAM,sBAAqC,CAAC;AAG5C,YAAI,mBAAmB;AACd,iBAAA,QAAQ,iBAAiB,EAAE;AAAA,YAChC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA;AAIF,YAAI,qBAAqB;AAChB,iBAAA,QAAQ,mBAAmB,EAAE;AAAA,YAClC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA,WACS,WAAW,SAAS,UAAU,WAAW,SAAS,OAAQ;AAMrE,YACE,CAAC,sBACA,WAAW,SAAS,WAAW,WAAW,SAAS,QACpD;AAKF,cAAM,SAAS,IAAI,OAAO,IAAI,SAAS;AAEhC,eAAA,CAAC,QAAQ,mBAAmB;AAAA,MAAA,CAIpC;AAAA;AAAA,MAEDI,KAAAA,OAAO,CAAC,UAAU,UAAU,MAAS;AAAA;AAAA,MAErCA,KAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAA+B;AAEzD,YAAI,CAAC,WAAW,MAAM,WAAW,SAAS,SAAS;AAC1C,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,UACpB,cAAc,gBAAgB,MAAM,QACpC;AACO,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,WACpB,cAAc,cAAc,MAAM,QAClC;AACO,iBAAA;AAAA,QAAA;AAKP,YAAA,WAAW,SAAS,WACnB,cAAc,cAAc,MAAM,UACjC,cAAc,gBAAgB,MAAM,SACtC;AACO,iBAAA;AAAA,QAAA;AAGF,eAAAC,WAAA;AAAA,UACL;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QACF;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;;;"}
1
+ {"version":3,"file":"joins.cjs","sources":["../../../src/query/joins.ts"],"sourcesContent":["import {\n consolidate,\n filter,\n join as joinOperator,\n map,\n} from \"@electric-sql/d2mini\"\nimport { evaluateConditionOnNamespacedRow } from \"./evaluators.js\"\nimport { extractJoinKey } from \"./extractors.js\"\nimport type { Query } from \"./index.js\"\nimport type { IStreamBuilder, JoinType } from \"@electric-sql/d2mini\"\nimport type {\n KeyedStream,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types.js\"\n\n/**\n * Creates a processing pipeline for join clauses\n */\nexport function processJoinClause(\n pipeline: NamespacedAndKeyedStream,\n query: Query,\n tables: Record<string, KeyedStream>,\n mainTableAlias: string,\n allInputs: Record<string, KeyedStream>\n) {\n if (!query.join) return pipeline\n const input = allInputs[query.from]\n\n for (const joinClause of query.join) {\n // Create a stream for the joined table\n const joinedTableAlias = joinClause.as || joinClause.from\n\n // Get the right join type for the operator\n const joinType: JoinType =\n joinClause.type === `cross` ? `inner` : joinClause.type\n\n // The `in` is formatted as ['@mainKeyRef', '=', '@joinedKeyRef']\n // Destructure the main key reference and the joined key references\n const [mainKeyRef, , joinedKeyRefs] = joinClause.on\n\n // We need to prepare the main pipeline and the joined pipeline\n // to have the correct key format for joining\n const mainPipeline = pipeline.pipe(\n map(([currentKey, namespacedRow]) => {\n // Extract the key from the ON condition left side for the main table\n const mainRow = namespacedRow[mainTableAlias]!\n\n // Extract the join key from the main row\n const key = extractJoinKey(mainRow, mainKeyRef, mainTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n unknown,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Get the joined table input from the inputs map\n let joinedTableInput: KeyedStream\n\n if (allInputs[joinClause.from]) {\n // Use the provided input if available\n joinedTableInput = allInputs[joinClause.from]!\n } else {\n // Create a new input if not provided\n joinedTableInput =\n input!.graph.newInput<[string, Record<string, unknown>]>()\n }\n\n tables[joinedTableAlias] = joinedTableInput\n\n // Create a pipeline for the joined table\n const joinedPipeline = joinedTableInput.pipe(\n map(([currentKey, row]) => {\n // Wrap the row in an object with the table alias as the key\n const namespacedRow: NamespacedRow = { [joinedTableAlias]: row }\n\n // Extract the key from the ON condition right side for the joined table\n const key = extractJoinKey(row, joinedKeyRefs, joinedTableAlias)\n\n // Return [key, namespacedRow] as a KeyValue type\n return [key, [currentKey, namespacedRow]] as [\n string,\n [string, typeof namespacedRow],\n ]\n })\n )\n\n // Apply join with appropriate typings based on join type\n switch (joinType) {\n case `inner`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `left`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `left`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `right`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `right`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n case `full`:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `full`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n break\n default:\n pipeline = mainPipeline.pipe(\n joinOperator(joinedPipeline, `inner`),\n consolidate(),\n processJoinResults(mainTableAlias, joinedTableAlias, joinClause)\n )\n }\n }\n return pipeline\n}\n\n/**\n * Creates a processing pipeline for join results\n */\nexport function processJoinResults(\n mainTableAlias: string,\n joinedTableAlias: string,\n joinClause: { on: any; type: string }\n) {\n return function (\n pipeline: IStreamBuilder<\n [\n key: string,\n [\n [string, NamespacedRow] | undefined,\n [string, NamespacedRow] | undefined,\n ],\n ]\n >\n ): NamespacedAndKeyedStream {\n return pipeline.pipe(\n // Process the join result and handle nulls in the same step\n map((result) => {\n const [_key, [main, joined]] = result\n const mainKey = main?.[0]\n const mainNamespacedRow = main?.[1]\n const joinedKey = joined?.[0]\n const joinedNamespacedRow = joined?.[1]\n\n // For inner joins, both sides should be non-null\n if (joinClause.type === `inner` || joinClause.type === `cross`) {\n if (!mainNamespacedRow || !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n }\n\n // For left joins, the main row must be non-null\n if (joinClause.type === `left` && !mainNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // For right joins, the joined row must be non-null\n if (joinClause.type === `right` && !joinedNamespacedRow) {\n return undefined // Will be filtered out\n }\n\n // Merge the nested rows\n const mergedNamespacedRow: NamespacedRow = {}\n\n // Add main row data if it exists\n if (mainNamespacedRow) {\n Object.entries(mainNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n }\n\n // If we have a joined row, add it to the merged result\n if (joinedNamespacedRow) {\n Object.entries(joinedNamespacedRow).forEach(\n ([tableAlias, tableData]) => {\n mergedNamespacedRow[tableAlias] = tableData\n }\n )\n } else if (joinClause.type === `left` || joinClause.type === `full`) {\n // For left or full joins, add the joined table with undefined data if missing\n // mergedNamespacedRow[joinedTableAlias] = undefined\n }\n\n // For right or full joins, add the main table with undefined data if missing\n if (\n !mainNamespacedRow &&\n (joinClause.type === `right` || joinClause.type === `full`)\n ) {\n // mergedNamespacedRow[mainTableAlias] = undefined\n }\n\n // New key\n const newKey = `[${mainKey},${joinedKey}]`\n\n return [newKey, mergedNamespacedRow] as [\n string,\n typeof mergedNamespacedRow,\n ]\n }),\n // Filter out undefined results\n filter((value) => value !== undefined),\n // Process the ON condition\n filter(([_key, namespacedRow]: [string, NamespacedRow]) => {\n // If there's no ON condition, or it's a cross join, always return true\n if (!joinClause.on || joinClause.type === `cross`) {\n return true\n }\n\n // For LEFT JOIN, if the right side is null, we should include the row\n if (\n joinClause.type === `left` &&\n namespacedRow[joinedTableAlias] === undefined\n ) {\n return true\n }\n\n // For RIGHT JOIN, if the left side is null, we should include the row\n if (\n joinClause.type === `right` &&\n namespacedRow[mainTableAlias] === undefined\n ) {\n return true\n }\n\n // For FULL JOIN, if either side is null, we should include the row\n if (\n joinClause.type === `full` &&\n (namespacedRow[mainTableAlias] === undefined ||\n namespacedRow[joinedTableAlias] === undefined)\n ) {\n return true\n }\n\n return evaluateConditionOnNamespacedRow(\n namespacedRow,\n joinClause.on,\n mainTableAlias,\n joinedTableAlias\n )\n })\n )\n }\n}\n"],"names":["map","extractJoinKey","joinOperator","consolidate","filter","evaluateConditionOnNamespacedRow"],"mappings":";;;;;AAmBO,SAAS,kBACd,UACA,OACA,QACA,gBACA,WACA;AACI,MAAA,CAAC,MAAM,KAAa,QAAA;AAClB,QAAA,QAAQ,UAAU,MAAM,IAAI;AAEvB,aAAA,cAAc,MAAM,MAAM;AAE7B,UAAA,mBAAmB,WAAW,MAAM,WAAW;AAGrD,UAAM,WACJ,WAAW,SAAS,UAAU,UAAU,WAAW;AAIrD,UAAM,CAAC,YAAA,EAAc,aAAa,IAAI,WAAW;AAIjD,UAAM,eAAe,SAAS;AAAA,MAC5BA,OAAAA,IAAI,CAAC,CAAC,YAAY,aAAa,MAAM;AAE7B,cAAA,UAAU,cAAc,cAAc;AAG5C,cAAM,MAAMC,WAAA,eAAe,SAAS,YAAY,cAAc;AAG9D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGI,QAAA;AAEA,QAAA,UAAU,WAAW,IAAI,GAAG;AAEX,yBAAA,UAAU,WAAW,IAAI;AAAA,IAAA,OACvC;AAGH,yBAAA,MAAO,MAAM,SAA4C;AAAA,IAAA;AAG7D,WAAO,gBAAgB,IAAI;AAG3B,UAAM,iBAAiB,iBAAiB;AAAA,MACtCD,OAAAA,IAAI,CAAC,CAAC,YAAY,GAAG,MAAM;AAEzB,cAAM,gBAA+B,EAAE,CAAC,gBAAgB,GAAG,IAAI;AAG/D,cAAM,MAAMC,WAAA,eAAe,KAAK,eAAe,gBAAgB;AAG/D,eAAO,CAAC,KAAK,CAAC,YAAY,aAAa,CAAC;AAAA,MAIzC,CAAA;AAAA,IACH;AAGA,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBC,YAAa,gBAAgB,OAAO;AAAA,UACpCC,mBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,YAAa,gBAAgB,MAAM;AAAA,UACnCC,mBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,YAAa,gBAAgB,OAAO;AAAA,UACpCC,mBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF,KAAK;AACH,mBAAW,aAAa;AAAA,UACtBD,YAAa,gBAAgB,MAAM;AAAA,UACnCC,mBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AACA;AAAA,MACF;AACE,mBAAW,aAAa;AAAA,UACtBD,YAAa,gBAAgB,OAAO;AAAA,UACpCC,mBAAY;AAAA,UACZ,mBAAmB,gBAAgB,kBAAkB,UAAU;AAAA,QACjE;AAAA,IAAA;AAAA,EACJ;AAEK,SAAA;AACT;AAKgB,SAAA,mBACd,gBACA,kBACA,YACA;AACA,SAAO,SACL,UAS0B;AAC1B,WAAO,SAAS;AAAA;AAAA,MAEdH,OAAA,IAAI,CAAC,WAAW;AACd,cAAM,CAAC,MAAM,CAAC,MAAM,MAAM,CAAC,IAAI;AACzB,cAAA,UAAU,6BAAO;AACjB,cAAA,oBAAoB,6BAAO;AAC3B,cAAA,YAAY,iCAAS;AACrB,cAAA,sBAAsB,iCAAS;AAGrC,YAAI,WAAW,SAAS,WAAW,WAAW,SAAS,SAAS;AAC1D,cAAA,CAAC,qBAAqB,CAAC,qBAAqB;AACvC,mBAAA;AAAA,UAAA;AAAA,QACT;AAIF,YAAI,WAAW,SAAS,UAAU,CAAC,mBAAmB;AAC7C,iBAAA;AAAA,QAAA;AAIT,YAAI,WAAW,SAAS,WAAW,CAAC,qBAAqB;AAChD,iBAAA;AAAA,QAAA;AAIT,cAAM,sBAAqC,CAAC;AAG5C,YAAI,mBAAmB;AACd,iBAAA,QAAQ,iBAAiB,EAAE;AAAA,YAChC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA;AAIF,YAAI,qBAAqB;AAChB,iBAAA,QAAQ,mBAAmB,EAAE;AAAA,YAClC,CAAC,CAAC,YAAY,SAAS,MAAM;AAC3B,kCAAoB,UAAU,IAAI;AAAA,YAAA;AAAA,UAEtC;AAAA,QAAA,WACS,WAAW,SAAS,UAAU,WAAW,SAAS,OAAQ;AAMrE,YACE,CAAC,sBACA,WAAW,SAAS,WAAW,WAAW,SAAS,QACpD;AAKF,cAAM,SAAS,IAAI,OAAO,IAAI,SAAS;AAEhC,eAAA,CAAC,QAAQ,mBAAmB;AAAA,MAAA,CAIpC;AAAA;AAAA,MAEDI,OAAAA,OAAO,CAAC,UAAU,UAAU,MAAS;AAAA;AAAA,MAErCA,OAAAA,OAAO,CAAC,CAAC,MAAM,aAAa,MAA+B;AAEzD,YAAI,CAAC,WAAW,MAAM,WAAW,SAAS,SAAS;AAC1C,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,UACpB,cAAc,gBAAgB,MAAM,QACpC;AACO,iBAAA;AAAA,QAAA;AAIT,YACE,WAAW,SAAS,WACpB,cAAc,cAAc,MAAM,QAClC;AACO,iBAAA;AAAA,QAAA;AAKP,YAAA,WAAW,SAAS,WACnB,cAAc,cAAc,MAAM,UACjC,cAAc,gBAAgB,MAAM,SACtC;AACO,iBAAA;AAAA,QAAA;AAGF,eAAAC,WAAA;AAAA,UACL;AAAA,UACA,WAAW;AAAA,UACX;AAAA,UACA;AAAA,QACF;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EACF;AACF;;;"}
@@ -1,5 +1,5 @@
1
1
  import { Query } from './index.js';
2
- import { IStreamBuilder } from '@electric-sql/d2ts';
2
+ import { IStreamBuilder } from '@electric-sql/d2mini';
3
3
  import { KeyedStream, NamespacedAndKeyedStream, NamespacedRow } from '../types.js';
4
4
  /**
5
5
  * Creates a processing pipeline for join clauses
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
3
+ const d2mini = require("@electric-sql/d2mini");
4
4
  const extractors = require("./extractors.cjs");
5
5
  const utils = require("./utils.cjs");
6
6
  function processOrderBy(resultPipeline, query, mainTableAlias) {
@@ -122,12 +122,12 @@ function processOrderBy(resultPipeline, query, mainTableAlias) {
122
122
  if (hasOrderIndexColumn) {
123
123
  if (orderIndexType === `numeric`) {
124
124
  resultPipeline = resultPipeline.pipe(
125
- d2ts.orderByWithIndex(valueExtractor, {
125
+ d2mini.orderByWithIndex(valueExtractor, {
126
126
  limit: query.limit,
127
127
  offset: query.offset,
128
128
  comparator
129
129
  }),
130
- d2ts.map(([key, [value, index]]) => {
130
+ d2mini.map(([key, [value, index]]) => {
131
131
  const result = {
132
132
  ...value,
133
133
  [mainTableAlias]: {
@@ -140,12 +140,12 @@ function processOrderBy(resultPipeline, query, mainTableAlias) {
140
140
  );
141
141
  } else {
142
142
  resultPipeline = resultPipeline.pipe(
143
- d2ts.orderByWithFractionalIndex(valueExtractor, {
143
+ d2mini.orderByWithFractionalIndex(valueExtractor, {
144
144
  limit: query.limit,
145
145
  offset: query.offset,
146
146
  comparator
147
147
  }),
148
- d2ts.map(([key, [value, index]]) => {
148
+ d2mini.map(([key, [value, index]]) => {
149
149
  const result = {
150
150
  ...value,
151
151
  [mainTableAlias]: {
@@ -159,7 +159,7 @@ function processOrderBy(resultPipeline, query, mainTableAlias) {
159
159
  }
160
160
  } else {
161
161
  resultPipeline = resultPipeline.pipe(
162
- d2ts.orderBy(valueExtractor, {
162
+ d2mini.orderBy(valueExtractor, {
163
163
  limit: query.limit,
164
164
  offset: query.offset,
165
165
  comparator
@@ -1 +1 @@
1
- {"version":3,"file":"order-by.cjs","sources":["../../../src/query/order-by.ts"],"sourcesContent":["import {\n map,\n orderBy,\n orderByWithFractionalIndex,\n orderByWithIndex,\n} from \"@electric-sql/d2ts\"\nimport { evaluateOperandOnNamespacedRow } from \"./extractors\"\nimport { isOrderIndexFunctionCall } from \"./utils\"\nimport type { ConditionOperand, Query } from \"./schema\"\nimport type {\n KeyedNamespacedRow,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types\"\n\ntype OrderByItem = {\n operand: ConditionOperand\n direction: `asc` | `desc`\n}\n\ntype OrderByItems = Array<OrderByItem>\n\nexport function processOrderBy(\n resultPipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Check if any column in the SELECT clause is an ORDER_INDEX function call\n let hasOrderIndexColumn = false\n let orderIndexType: `numeric` | `fractional` = `numeric`\n let orderIndexAlias = ``\n\n // Scan the SELECT clause for ORDER_INDEX functions\n // TODO: Select is going to be optional in future - we will automatically add an\n // attribute for the index column\n for (const item of query.select!) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isOrderIndexFunctionCall(expr)) {\n hasOrderIndexColumn = true\n orderIndexAlias = alias\n orderIndexType = getOrderIndexType(expr)\n break\n }\n }\n }\n if (hasOrderIndexColumn) break\n }\n\n // Normalize orderBy to an array of objects\n const orderByItems: OrderByItems = []\n\n if (typeof query.orderBy === `string`) {\n // Handle string format: '@column'\n orderByItems.push({\n operand: query.orderBy,\n direction: `asc`,\n })\n } else if (Array.isArray(query.orderBy)) {\n // Handle array format: ['@column1', { '@column2': 'desc' }]\n for (const item of query.orderBy) {\n if (typeof item === `string`) {\n orderByItems.push({\n operand: item,\n direction: `asc`,\n })\n } else if (typeof item === `object`) {\n for (const [column, direction] of Object.entries(item)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n }\n } else if (typeof query.orderBy === `object`) {\n // Handle object format: { '@column': 'desc' }\n for (const [column, direction] of Object.entries(query.orderBy)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n\n // Create a value extractor function for the orderBy operator\n // const valueExtractor = ([key, namespacedRow]: [\n const valueExtractor = (namespacedRow: NamespacedRow) => {\n // For multiple orderBy columns, create a composite key\n if (orderByItems.length > 1) {\n return orderByItems.map((item) =>\n evaluateOperandOnNamespacedRow(\n namespacedRow,\n item.operand,\n mainTableAlias\n )\n )\n } else if (orderByItems.length === 1) {\n // For a single orderBy column, use the value directly\n const item = orderByItems[0]\n const val = evaluateOperandOnNamespacedRow(\n namespacedRow,\n item!.operand,\n mainTableAlias\n )\n return val\n }\n\n // Default case - no ordering\n return null\n }\n\n const ascComparator = (a: any, b: any): number => {\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n // Compare the values\n const result = ascComparator(a[i], b[i])\n\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If at least one of the values is an object then we don't really know how to meaningfully compare them\n // therefore we turn them into strings and compare those\n // There are 2 exceptions:\n // 1) if both objects are dates then we can compare them\n // 2) if either object is nullish then we can't call toString on it\n const bothObjects = typeof a === `object` && typeof b === `object`\n const bothDates = a instanceof Date && b instanceof Date\n const notNull = a !== null && b !== null\n if (bothObjects && !bothDates && notNull) {\n // Every object should support `toString`\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = (orderByProps: OrderByItems) => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByProps.length > 1) {\n // `a` and `b` must be arrays since `orderByItems.length > 1`\n // hence the extracted values must be arrays\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByProps.length; i++) {\n const direction = orderByProps[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n // should normally always be 0 because\n // both values are extracted based on orderByItems\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByProps.length === 1) {\n const direction = orderByProps[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n const comparator = makeComparator(orderByItems)\n\n // Apply the appropriate orderBy operator based on whether an ORDER_INDEX column is requested\n if (hasOrderIndexColumn) {\n if (orderIndexType === `numeric`) {\n // Use orderByWithIndex for numeric indices\n resultPipeline = resultPipeline.pipe(\n orderByWithIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n } else {\n // Use orderByWithFractionalIndex for fractional indices\n resultPipeline = resultPipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n }\n } else {\n // Use regular orderBy if no index column is requested\n resultPipeline = resultPipeline.pipe(\n orderBy(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n })\n )\n }\n\n return resultPipeline\n}\n\n// Helper function to extract the ORDER_INDEX type from a function call\nfunction getOrderIndexType(obj: any): `numeric` | `fractional` {\n if (!isOrderIndexFunctionCall(obj)) {\n throw new Error(`Not an ORDER_INDEX function call`)\n }\n\n const arg = obj[`ORDER_INDEX`]\n if (arg === `numeric` || arg === true || arg === `default`) {\n return `numeric`\n } else if (arg === `fractional`) {\n return `fractional`\n } else {\n throw new Error(`Invalid ORDER_INDEX type: ` + arg)\n }\n}\n"],"names":["isOrderIndexFunctionCall","evaluateOperandOnNamespacedRow","orderByWithIndex","map","orderByWithFractionalIndex","orderBy"],"mappings":";;;;;AAsBgB,SAAA,eACd,gBACA,OACA,gBACA;AAEA,MAAI,sBAAsB;AAC1B,MAAI,iBAA2C;AAC/C,MAAI,kBAAkB;AAKX,aAAA,QAAQ,MAAM,QAAS;AAC5B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAYA,MAAA,yBAAyB,IAAI,GAAG;AACxC,gCAAA;AACJ,4BAAA;AAClB,2BAAiB,kBAAkB,IAAI;AACvC;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAEF,QAAI,oBAAqB;AAAA,EAAA;AAI3B,QAAM,eAA6B,CAAC;AAEhC,MAAA,OAAO,MAAM,YAAY,UAAU;AAErC,iBAAa,KAAK;AAAA,MAChB,SAAS,MAAM;AAAA,MACf,WAAW;AAAA,IAAA,CACZ;AAAA,EACQ,WAAA,MAAM,QAAQ,MAAM,OAAO,GAAG;AAE5B,eAAA,QAAQ,MAAM,SAAS;AAC5B,UAAA,OAAO,SAAS,UAAU;AAC5B,qBAAa,KAAK;AAAA,UAChB,SAAS;AAAA,UACT,WAAW;AAAA,QAAA,CACZ;AAAA,MACH,WAAW,OAAO,SAAS,UAAU;AACnC,mBAAW,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,uBAAa,KAAK;AAAA,YAChB,SAAS;AAAA,YACT;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MACH;AAAA,IACF;AAAA,EAEO,WAAA,OAAO,MAAM,YAAY,UAAU;AAEjC,eAAA,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AAC/D,mBAAa,KAAK;AAAA,QAChB,SAAS;AAAA,QACT;AAAA,MAAA,CACD;AAAA,IAAA;AAAA,EACH;AAKI,QAAA,iBAAiB,CAAC,kBAAiC;AAEnD,QAAA,aAAa,SAAS,GAAG;AAC3B,aAAO,aAAa;AAAA,QAAI,CAAC,SACvBC,WAAA;AAAA,UACE;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,WACS,aAAa,WAAW,GAAG;AAE9B,YAAA,OAAO,aAAa,CAAC;AAC3B,YAAM,MAAMA,WAAA;AAAA,QACV;AAAA,QACA,KAAM;AAAA,QACN;AAAA,MACF;AACO,aAAA;AAAA,IAAA;AAIF,WAAA;AAAA,EACT;AAEM,QAAA,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAC3C,aAAA,EAAE,cAAc,CAAC;AAAA,IAAA;AAI1B,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AAC/B,eAAA,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AAErD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAEvC,YAAI,WAAW,GAAG;AACT,iBAAA;AAAA,QAAA;AAAA,MACT;AAGK,aAAA,EAAE,SAAS,EAAE;AAAA,IAAA;AAQtB,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AACpD,UAAA,YAAY,aAAa,QAAQ,aAAa;AAC9C,UAAA,UAAU,MAAM,QAAQ,MAAM;AAChC,QAAA,eAAe,CAAC,aAAa,SAAS;AAExC,aAAO,EAAE,SAAS,EAAE,cAAc,EAAE,UAAU;AAAA,IAAA;AAG5C,QAAA,IAAI,EAAU,QAAA;AACd,QAAA,IAAI,EAAU,QAAA;AACX,WAAA;AAAA,EACT;AAEM,QAAA,iBAAiB,CAAC,GAAY,MAAuB;AAClD,WAAA,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGM,QAAA,iBAAiB,CAAC,iBAA+B;AAC9C,WAAA,CAAC,GAAY,MAAe;AAE7B,UAAA,aAAa,SAAS,GAAG;AAG3B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AACtC,gBAAA,YAAY,aAAa,CAAC,EAAG;AAC7B,gBAAA,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AACT,mBAAA;AAAA,UAAA;AAAA,QACT;AAIK,eAAA,OAAO,SAAS,OAAO;AAAA,MAAA;AAI5B,UAAA,aAAa,WAAW,GAAG;AACvB,cAAA,YAAY,aAAa,CAAC,EAAG;AAC5B,eAAA,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MAAA;AAGlE,aAAA,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AACM,QAAA,aAAa,eAAe,YAAY;AAG9C,MAAI,qBAAqB;AACvB,QAAI,mBAAmB,WAAW;AAEhC,uBAAiB,eAAe;AAAA,QAC9BC,KAAAA,iBAAiB,gBAAgB;AAAA,UAC/B,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACDC,KAAA,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA,OACK;AAEL,uBAAiB,eAAe;AAAA,QAC9BC,KAAAA,2BAA2B,gBAAgB;AAAA,UACzC,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACDD,KAAA,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF,OACK;AAEL,qBAAiB,eAAe;AAAA,MAC9BE,KAAAA,QAAQ,gBAAgB;AAAA,QACtB,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA;AAGK,SAAA;AACT;AAGA,SAAS,kBAAkB,KAAoC;AACzD,MAAA,CAACL,MAAAA,yBAAyB,GAAG,GAAG;AAC5B,UAAA,IAAI,MAAM,kCAAkC;AAAA,EAAA;AAG9C,QAAA,MAAM,IAAI,aAAa;AAC7B,MAAI,QAAQ,aAAa,QAAQ,QAAQ,QAAQ,WAAW;AACnD,WAAA;AAAA,EAAA,WACE,QAAQ,cAAc;AACxB,WAAA;AAAA,EAAA,OACF;AACC,UAAA,IAAI,MAAM,+BAA+B,GAAG;AAAA,EAAA;AAEtD;;"}
1
+ {"version":3,"file":"order-by.cjs","sources":["../../../src/query/order-by.ts"],"sourcesContent":["import {\n map,\n orderBy,\n orderByWithFractionalIndex,\n orderByWithIndex,\n} from \"@electric-sql/d2mini\"\nimport { evaluateOperandOnNamespacedRow } from \"./extractors\"\nimport { isOrderIndexFunctionCall } from \"./utils\"\nimport type { ConditionOperand, Query } from \"./schema\"\nimport type {\n KeyedNamespacedRow,\n NamespacedAndKeyedStream,\n NamespacedRow,\n} from \"../types\"\n\ntype OrderByItem = {\n operand: ConditionOperand\n direction: `asc` | `desc`\n}\n\ntype OrderByItems = Array<OrderByItem>\n\nexport function processOrderBy(\n resultPipeline: NamespacedAndKeyedStream,\n query: Query,\n mainTableAlias: string\n) {\n // Check if any column in the SELECT clause is an ORDER_INDEX function call\n let hasOrderIndexColumn = false\n let orderIndexType: `numeric` | `fractional` = `numeric`\n let orderIndexAlias = ``\n\n // Scan the SELECT clause for ORDER_INDEX functions\n // TODO: Select is going to be optional in future - we will automatically add an\n // attribute for the index column\n for (const item of query.select!) {\n if (typeof item === `object`) {\n for (const [alias, expr] of Object.entries(item)) {\n if (typeof expr === `object` && isOrderIndexFunctionCall(expr)) {\n hasOrderIndexColumn = true\n orderIndexAlias = alias\n orderIndexType = getOrderIndexType(expr)\n break\n }\n }\n }\n if (hasOrderIndexColumn) break\n }\n\n // Normalize orderBy to an array of objects\n const orderByItems: OrderByItems = []\n\n if (typeof query.orderBy === `string`) {\n // Handle string format: '@column'\n orderByItems.push({\n operand: query.orderBy,\n direction: `asc`,\n })\n } else if (Array.isArray(query.orderBy)) {\n // Handle array format: ['@column1', { '@column2': 'desc' }]\n for (const item of query.orderBy) {\n if (typeof item === `string`) {\n orderByItems.push({\n operand: item,\n direction: `asc`,\n })\n } else if (typeof item === `object`) {\n for (const [column, direction] of Object.entries(item)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n }\n } else if (typeof query.orderBy === `object`) {\n // Handle object format: { '@column': 'desc' }\n for (const [column, direction] of Object.entries(query.orderBy)) {\n orderByItems.push({\n operand: column,\n direction: direction as `asc` | `desc`,\n })\n }\n }\n\n // Create a value extractor function for the orderBy operator\n // const valueExtractor = ([key, namespacedRow]: [\n const valueExtractor = (namespacedRow: NamespacedRow) => {\n // For multiple orderBy columns, create a composite key\n if (orderByItems.length > 1) {\n return orderByItems.map((item) =>\n evaluateOperandOnNamespacedRow(\n namespacedRow,\n item.operand,\n mainTableAlias\n )\n )\n } else if (orderByItems.length === 1) {\n // For a single orderBy column, use the value directly\n const item = orderByItems[0]\n const val = evaluateOperandOnNamespacedRow(\n namespacedRow,\n item!.operand,\n mainTableAlias\n )\n return val\n }\n\n // Default case - no ordering\n return null\n }\n\n const ascComparator = (a: any, b: any): number => {\n // if a and b are both strings, compare them based on locale\n if (typeof a === `string` && typeof b === `string`) {\n return a.localeCompare(b)\n }\n\n // if a and b are both arrays, compare them element by element\n if (Array.isArray(a) && Array.isArray(b)) {\n for (let i = 0; i < Math.min(a.length, b.length); i++) {\n // Compare the values\n const result = ascComparator(a[i], b[i])\n\n if (result !== 0) {\n return result\n }\n }\n // All elements are equal up to the minimum length\n return a.length - b.length\n }\n\n // If at least one of the values is an object then we don't really know how to meaningfully compare them\n // therefore we turn them into strings and compare those\n // There are 2 exceptions:\n // 1) if both objects are dates then we can compare them\n // 2) if either object is nullish then we can't call toString on it\n const bothObjects = typeof a === `object` && typeof b === `object`\n const bothDates = a instanceof Date && b instanceof Date\n const notNull = a !== null && b !== null\n if (bothObjects && !bothDates && notNull) {\n // Every object should support `toString`\n return a.toString().localeCompare(b.toString())\n }\n\n if (a < b) return -1\n if (a > b) return 1\n return 0\n }\n\n const descComparator = (a: unknown, b: unknown): number => {\n return ascComparator(b, a)\n }\n\n // Create a multi-property comparator that respects the order and direction of each property\n const makeComparator = (orderByProps: OrderByItems) => {\n return (a: unknown, b: unknown) => {\n // If we're comparing arrays (multiple properties), compare each property in order\n if (orderByProps.length > 1) {\n // `a` and `b` must be arrays since `orderByItems.length > 1`\n // hence the extracted values must be arrays\n const arrayA = a as Array<unknown>\n const arrayB = b as Array<unknown>\n for (let i = 0; i < orderByProps.length; i++) {\n const direction = orderByProps[i]!.direction\n const compareFn =\n direction === `desc` ? descComparator : ascComparator\n const result = compareFn(arrayA[i], arrayB[i])\n if (result !== 0) {\n return result\n }\n }\n // should normally always be 0 because\n // both values are extracted based on orderByItems\n return arrayA.length - arrayB.length\n }\n\n // Single property comparison\n if (orderByProps.length === 1) {\n const direction = orderByProps[0]!.direction\n return direction === `desc` ? descComparator(a, b) : ascComparator(a, b)\n }\n\n return ascComparator(a, b)\n }\n }\n const comparator = makeComparator(orderByItems)\n\n // Apply the appropriate orderBy operator based on whether an ORDER_INDEX column is requested\n if (hasOrderIndexColumn) {\n if (orderIndexType === `numeric`) {\n // Use orderByWithIndex for numeric indices\n resultPipeline = resultPipeline.pipe(\n orderByWithIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n } else {\n // Use orderByWithFractionalIndex for fractional indices\n resultPipeline = resultPipeline.pipe(\n orderByWithFractionalIndex(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n }),\n map(([key, [value, index]]) => {\n // Add the index to the result\n // We add this to the main table alias for now\n // TODO: re are going to need to refactor the whole order by pipeline\n const result = {\n ...(value as Record<string, unknown>),\n [mainTableAlias]: {\n ...value[mainTableAlias],\n [orderIndexAlias]: index,\n },\n }\n return [key, result] as KeyedNamespacedRow\n })\n )\n }\n } else {\n // Use regular orderBy if no index column is requested\n resultPipeline = resultPipeline.pipe(\n orderBy(valueExtractor, {\n limit: query.limit,\n offset: query.offset,\n comparator,\n })\n )\n }\n\n return resultPipeline\n}\n\n// Helper function to extract the ORDER_INDEX type from a function call\nfunction getOrderIndexType(obj: any): `numeric` | `fractional` {\n if (!isOrderIndexFunctionCall(obj)) {\n throw new Error(`Not an ORDER_INDEX function call`)\n }\n\n const arg = obj[`ORDER_INDEX`]\n if (arg === `numeric` || arg === true || arg === `default`) {\n return `numeric`\n } else if (arg === `fractional`) {\n return `fractional`\n } else {\n throw new Error(`Invalid ORDER_INDEX type: ` + arg)\n }\n}\n"],"names":["isOrderIndexFunctionCall","evaluateOperandOnNamespacedRow","orderByWithIndex","map","orderByWithFractionalIndex","orderBy"],"mappings":";;;;;AAsBgB,SAAA,eACd,gBACA,OACA,gBACA;AAEA,MAAI,sBAAsB;AAC1B,MAAI,iBAA2C;AAC/C,MAAI,kBAAkB;AAKX,aAAA,QAAQ,MAAM,QAAS;AAC5B,QAAA,OAAO,SAAS,UAAU;AAC5B,iBAAW,CAAC,OAAO,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG;AAChD,YAAI,OAAO,SAAS,YAAYA,MAAA,yBAAyB,IAAI,GAAG;AACxC,gCAAA;AACJ,4BAAA;AAClB,2BAAiB,kBAAkB,IAAI;AACvC;AAAA,QAAA;AAAA,MACF;AAAA,IACF;AAEF,QAAI,oBAAqB;AAAA,EAAA;AAI3B,QAAM,eAA6B,CAAC;AAEhC,MAAA,OAAO,MAAM,YAAY,UAAU;AAErC,iBAAa,KAAK;AAAA,MAChB,SAAS,MAAM;AAAA,MACf,WAAW;AAAA,IAAA,CACZ;AAAA,EACQ,WAAA,MAAM,QAAQ,MAAM,OAAO,GAAG;AAE5B,eAAA,QAAQ,MAAM,SAAS;AAC5B,UAAA,OAAO,SAAS,UAAU;AAC5B,qBAAa,KAAK;AAAA,UAChB,SAAS;AAAA,UACT,WAAW;AAAA,QAAA,CACZ;AAAA,MACH,WAAW,OAAO,SAAS,UAAU;AACnC,mBAAW,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,IAAI,GAAG;AACtD,uBAAa,KAAK;AAAA,YAChB,SAAS;AAAA,YACT;AAAA,UAAA,CACD;AAAA,QAAA;AAAA,MACH;AAAA,IACF;AAAA,EAEO,WAAA,OAAO,MAAM,YAAY,UAAU;AAEjC,eAAA,CAAC,QAAQ,SAAS,KAAK,OAAO,QAAQ,MAAM,OAAO,GAAG;AAC/D,mBAAa,KAAK;AAAA,QAChB,SAAS;AAAA,QACT;AAAA,MAAA,CACD;AAAA,IAAA;AAAA,EACH;AAKI,QAAA,iBAAiB,CAAC,kBAAiC;AAEnD,QAAA,aAAa,SAAS,GAAG;AAC3B,aAAO,aAAa;AAAA,QAAI,CAAC,SACvBC,WAAA;AAAA,UACE;AAAA,UACA,KAAK;AAAA,UACL;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA,WACS,aAAa,WAAW,GAAG;AAE9B,YAAA,OAAO,aAAa,CAAC;AAC3B,YAAM,MAAMA,WAAA;AAAA,QACV;AAAA,QACA,KAAM;AAAA,QACN;AAAA,MACF;AACO,aAAA;AAAA,IAAA;AAIF,WAAA;AAAA,EACT;AAEM,QAAA,gBAAgB,CAAC,GAAQ,MAAmB;AAEhD,QAAI,OAAO,MAAM,YAAY,OAAO,MAAM,UAAU;AAC3C,aAAA,EAAE,cAAc,CAAC;AAAA,IAAA;AAI1B,QAAI,MAAM,QAAQ,CAAC,KAAK,MAAM,QAAQ,CAAC,GAAG;AAC/B,eAAA,IAAI,GAAG,IAAI,KAAK,IAAI,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK;AAErD,cAAM,SAAS,cAAc,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC;AAEvC,YAAI,WAAW,GAAG;AACT,iBAAA;AAAA,QAAA;AAAA,MACT;AAGK,aAAA,EAAE,SAAS,EAAE;AAAA,IAAA;AAQtB,UAAM,cAAc,OAAO,MAAM,YAAY,OAAO,MAAM;AACpD,UAAA,YAAY,aAAa,QAAQ,aAAa;AAC9C,UAAA,UAAU,MAAM,QAAQ,MAAM;AAChC,QAAA,eAAe,CAAC,aAAa,SAAS;AAExC,aAAO,EAAE,SAAS,EAAE,cAAc,EAAE,UAAU;AAAA,IAAA;AAG5C,QAAA,IAAI,EAAU,QAAA;AACd,QAAA,IAAI,EAAU,QAAA;AACX,WAAA;AAAA,EACT;AAEM,QAAA,iBAAiB,CAAC,GAAY,MAAuB;AAClD,WAAA,cAAc,GAAG,CAAC;AAAA,EAC3B;AAGM,QAAA,iBAAiB,CAAC,iBAA+B;AAC9C,WAAA,CAAC,GAAY,MAAe;AAE7B,UAAA,aAAa,SAAS,GAAG;AAG3B,cAAM,SAAS;AACf,cAAM,SAAS;AACf,iBAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AACtC,gBAAA,YAAY,aAAa,CAAC,EAAG;AAC7B,gBAAA,YACJ,cAAc,SAAS,iBAAiB;AAC1C,gBAAM,SAAS,UAAU,OAAO,CAAC,GAAG,OAAO,CAAC,CAAC;AAC7C,cAAI,WAAW,GAAG;AACT,mBAAA;AAAA,UAAA;AAAA,QACT;AAIK,eAAA,OAAO,SAAS,OAAO;AAAA,MAAA;AAI5B,UAAA,aAAa,WAAW,GAAG;AACvB,cAAA,YAAY,aAAa,CAAC,EAAG;AAC5B,eAAA,cAAc,SAAS,eAAe,GAAG,CAAC,IAAI,cAAc,GAAG,CAAC;AAAA,MAAA;AAGlE,aAAA,cAAc,GAAG,CAAC;AAAA,IAC3B;AAAA,EACF;AACM,QAAA,aAAa,eAAe,YAAY;AAG9C,MAAI,qBAAqB;AACvB,QAAI,mBAAmB,WAAW;AAEhC,uBAAiB,eAAe;AAAA,QAC9BC,OAAAA,iBAAiB,gBAAgB;AAAA,UAC/B,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACDC,OAAA,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA,OACK;AAEL,uBAAiB,eAAe;AAAA,QAC9BC,OAAAA,2BAA2B,gBAAgB;AAAA,UACzC,OAAO,MAAM;AAAA,UACb,QAAQ,MAAM;AAAA,UACd;AAAA,QAAA,CACD;AAAA,QACDD,OAAA,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,KAAK,CAAC,MAAM;AAI7B,gBAAM,SAAS;AAAA,YACb,GAAI;AAAA,YACJ,CAAC,cAAc,GAAG;AAAA,cAChB,GAAG,MAAM,cAAc;AAAA,cACvB,CAAC,eAAe,GAAG;AAAA,YAAA;AAAA,UAEvB;AACO,iBAAA,CAAC,KAAK,MAAM;AAAA,QACpB,CAAA;AAAA,MACH;AAAA,IAAA;AAAA,EACF,OACK;AAEL,qBAAiB,eAAe;AAAA,MAC9BE,OAAAA,QAAQ,gBAAgB;AAAA,QACtB,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd;AAAA,MACD,CAAA;AAAA,IACH;AAAA,EAAA;AAGK,SAAA;AACT;AAGA,SAAS,kBAAkB,KAAoC;AACzD,MAAA,CAACL,MAAAA,yBAAyB,GAAG,GAAG;AAC5B,UAAA,IAAI,MAAM,kCAAkC;AAAA,EAAA;AAG9C,QAAA,MAAM,IAAI,aAAa;AAC7B,MAAI,QAAQ,aAAa,QAAQ,QAAQ,QAAQ,WAAW;AACnD,WAAA;AAAA,EAAA,WACE,QAAQ,cAAc;AACxB,WAAA;AAAA,EAAA,OACF;AACC,UAAA,IAAI,MAAM,+BAA+B,GAAG;AAAA,EAAA;AAEtD;;"}
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
3
- const d2ts = require("@electric-sql/d2ts");
3
+ const d2mini = require("@electric-sql/d2mini");
4
4
  const evaluators = require("./evaluators.cjs");
5
5
  const joins = require("./joins.cjs");
6
6
  const groupBy = require("./group-by.cjs");
@@ -32,7 +32,7 @@ function compileQueryPipeline(query, inputs) {
32
32
  }
33
33
  tables[mainTableAlias] = input;
34
34
  let pipeline = input.pipe(
35
- d2ts.map(([key, row]) => {
35
+ d2mini.map(([key, row]) => {
36
36
  const ret = [key, { [mainTableAlias]: row }];
37
37
  return ret;
38
38
  })
@@ -48,7 +48,7 @@ function compileQueryPipeline(query, inputs) {
48
48
  }
49
49
  if (query.where) {
50
50
  pipeline = pipeline.pipe(
51
- d2ts.filter(([_key, row]) => {
51
+ d2mini.filter(([_key, row]) => {
52
52
  const result = evaluators.evaluateWhereOnNamespacedRow(
53
53
  row,
54
54
  query.where,
@@ -63,7 +63,7 @@ function compileQueryPipeline(query, inputs) {
63
63
  }
64
64
  if (query.having) {
65
65
  pipeline = pipeline.pipe(
66
- d2ts.filter(([_key, row]) => {
66
+ d2mini.filter(([_key, row]) => {
67
67
  const result = evaluators.evaluateWhereOnNamespacedRow(
68
68
  row,
69
69
  query.having,
@@ -81,7 +81,7 @@ function compileQueryPipeline(query, inputs) {
81
81
  );
82
82
  }
83
83
  const resultPipeline = query.select ? select.processSelect(pipeline, query, mainTableAlias, allInputs) : !query.join && !query.groupBy ? pipeline.pipe(
84
- d2ts.map(([key, row]) => [key, row[mainTableAlias]])
84
+ d2mini.map(([key, row]) => [key, row[mainTableAlias]])
85
85
  ) : pipeline;
86
86
  return resultPipeline;
87
87
  }