@tanstack/db 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/cjs/collection.cjs.map +1 -1
  2. package/dist/cjs/collection.d.cts +5 -5
  3. package/dist/cjs/query/compiler/group-by.cjs +4 -2
  4. package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
  5. package/dist/cjs/query/compiler/index.cjs +2 -1
  6. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  7. package/dist/cjs/query/index.d.cts +2 -1
  8. package/dist/cjs/query/ir.cjs +16 -0
  9. package/dist/cjs/query/ir.cjs.map +1 -1
  10. package/dist/cjs/query/ir.d.cts +24 -1
  11. package/dist/cjs/query/live/collection-config-builder.cjs +267 -0
  12. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -0
  13. package/dist/cjs/query/live/collection-config-builder.d.cts +36 -0
  14. package/dist/cjs/query/live/collection-subscriber.cjs +263 -0
  15. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -0
  16. package/dist/cjs/query/live/collection-subscriber.d.cts +28 -0
  17. package/dist/cjs/query/live/types.d.cts +77 -0
  18. package/dist/cjs/query/live-query-collection.cjs +3 -417
  19. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  20. package/dist/cjs/query/live-query-collection.d.cts +1 -58
  21. package/dist/cjs/query/optimizer.cjs +34 -11
  22. package/dist/cjs/query/optimizer.cjs.map +1 -1
  23. package/dist/cjs/types.d.cts +12 -0
  24. package/dist/esm/collection.d.ts +5 -5
  25. package/dist/esm/collection.js.map +1 -1
  26. package/dist/esm/query/compiler/group-by.js +5 -3
  27. package/dist/esm/query/compiler/group-by.js.map +1 -1
  28. package/dist/esm/query/compiler/index.js +3 -2
  29. package/dist/esm/query/compiler/index.js.map +1 -1
  30. package/dist/esm/query/index.d.ts +2 -1
  31. package/dist/esm/query/ir.d.ts +24 -1
  32. package/dist/esm/query/ir.js +17 -1
  33. package/dist/esm/query/ir.js.map +1 -1
  34. package/dist/esm/query/live/collection-config-builder.d.ts +36 -0
  35. package/dist/esm/query/live/collection-config-builder.js +267 -0
  36. package/dist/esm/query/live/collection-config-builder.js.map +1 -0
  37. package/dist/esm/query/live/collection-subscriber.d.ts +28 -0
  38. package/dist/esm/query/live/collection-subscriber.js +263 -0
  39. package/dist/esm/query/live/collection-subscriber.js.map +1 -0
  40. package/dist/esm/query/live/types.d.ts +77 -0
  41. package/dist/esm/query/live-query-collection.d.ts +1 -58
  42. package/dist/esm/query/live-query-collection.js +3 -417
  43. package/dist/esm/query/live-query-collection.js.map +1 -1
  44. package/dist/esm/query/optimizer.js +35 -12
  45. package/dist/esm/query/optimizer.js.map +1 -1
  46. package/dist/esm/types.d.ts +12 -0
  47. package/package.json +1 -1
  48. package/src/collection.ts +8 -5
  49. package/src/query/compiler/group-by.ts +5 -3
  50. package/src/query/compiler/index.ts +3 -2
  51. package/src/query/index.ts +2 -1
  52. package/src/query/ir.ts +48 -1
  53. package/src/query/live/collection-config-builder.ts +437 -0
  54. package/src/query/live/collection-subscriber.ts +460 -0
  55. package/src/query/live/types.ts +93 -0
  56. package/src/query/live-query-collection.ts +8 -791
  57. package/src/query/optimizer.ts +66 -18
  58. package/src/types.ts +74 -0
@@ -1,336 +1,8 @@
1
- import { D2, output, MultiSet } from "@tanstack/db-ivm";
2
1
  import { createCollection } from "../collection.js";
3
- import { createFilterFunctionFromExpression } from "../change-events.js";
4
- import { compileQuery } from "./compiler/index.js";
5
- import { buildQuery, getQueryIR } from "./builder/index.js";
6
- import { convertToBasicExpression } from "./compiler/expressions.js";
7
- let liveQueryCollectionCounter = 0;
2
+ import { CollectionConfigBuilder } from "./live/collection-config-builder.js";
8
3
  function liveQueryCollectionOptions(config) {
9
- const id = config.id || `live-query-${++liveQueryCollectionCounter}`;
10
- const query = typeof config.query === `function` ? buildQuery(config.query) : getQueryIR(config.query);
11
- const resultKeys = /* @__PURE__ */ new WeakMap();
12
- const orderByIndices = /* @__PURE__ */ new WeakMap();
13
- const compare = query.orderBy && query.orderBy.length > 0 ? (val1, val2) => {
14
- const index1 = orderByIndices.get(val1);
15
- const index2 = orderByIndices.get(val2);
16
- if (index1 && index2) {
17
- if (index1 < index2) {
18
- return -1;
19
- } else if (index1 > index2) {
20
- return 1;
21
- } else {
22
- return 0;
23
- }
24
- }
25
- return 0;
26
- } : void 0;
27
- const collections = extractCollectionsFromQuery(query);
28
- const allCollectionsReady = () => {
29
- return Object.values(collections).every(
30
- (collection) => collection.isReady()
31
- );
32
- };
33
- const allCollectionsReadyOrInitialCommit = () => {
34
- return Object.values(collections).every(
35
- (collection) => collection.status === `ready` || collection.status === `initialCommit`
36
- );
37
- };
38
- let graphCache;
39
- let inputsCache;
40
- let pipelineCache;
41
- let collectionWhereClausesCache;
42
- const lazyCollectionsCallbacks = {};
43
- const lazyCollections = /* @__PURE__ */ new Set();
44
- const optimizableOrderByCollections = {};
45
- const compileBasePipeline = () => {
46
- graphCache = new D2();
47
- inputsCache = Object.fromEntries(
48
- Object.entries(collections).map(([key]) => [
49
- key,
50
- graphCache.newInput()
51
- ])
52
- );
53
- ({
54
- pipeline: pipelineCache,
55
- collectionWhereClauses: collectionWhereClausesCache
56
- } = compileQuery(
57
- query,
58
- inputsCache,
59
- collections,
60
- lazyCollectionsCallbacks,
61
- lazyCollections,
62
- optimizableOrderByCollections
63
- ));
64
- };
65
- const maybeCompileBasePipeline = () => {
66
- if (!graphCache || !inputsCache || !pipelineCache) {
67
- compileBasePipeline();
68
- }
69
- return {
70
- graph: graphCache,
71
- inputs: inputsCache,
72
- pipeline: pipelineCache
73
- };
74
- };
75
- compileBasePipeline();
76
- const sync = {
77
- rowUpdateMode: `full`,
78
- sync: ({ begin, write, commit, markReady, collection: theCollection }) => {
79
- const { graph, inputs, pipeline } = maybeCompileBasePipeline();
80
- let messagesCount = 0;
81
- pipeline.pipe(
82
- output((data) => {
83
- const messages = data.getInner();
84
- messagesCount += messages.length;
85
- begin();
86
- messages.reduce((acc, [[key, tupleData], multiplicity]) => {
87
- const [value, orderByIndex] = tupleData;
88
- const changes = acc.get(key) || {
89
- deletes: 0,
90
- inserts: 0,
91
- value,
92
- orderByIndex
93
- };
94
- if (multiplicity < 0) {
95
- changes.deletes += Math.abs(multiplicity);
96
- } else if (multiplicity > 0) {
97
- changes.inserts += multiplicity;
98
- changes.value = value;
99
- changes.orderByIndex = orderByIndex;
100
- }
101
- acc.set(key, changes);
102
- return acc;
103
- }, /* @__PURE__ */ new Map()).forEach((changes, rawKey) => {
104
- const { deletes, inserts, value, orderByIndex } = changes;
105
- resultKeys.set(value, rawKey);
106
- if (orderByIndex !== void 0) {
107
- orderByIndices.set(value, orderByIndex);
108
- }
109
- if (inserts && deletes === 0) {
110
- write({
111
- value,
112
- type: `insert`
113
- });
114
- } else if (
115
- // Insert & update(s) (updates are a delete & insert)
116
- inserts > deletes || // Just update(s) but the item is already in the collection (so
117
- // was inserted previously).
118
- inserts === deletes && theCollection.has(rawKey)
119
- ) {
120
- write({
121
- value,
122
- type: `update`
123
- });
124
- } else if (deletes > 0) {
125
- write({
126
- value,
127
- type: `delete`
128
- });
129
- } else {
130
- throw new Error(
131
- `This should never happen ${JSON.stringify(changes)}`
132
- );
133
- }
134
- });
135
- commit();
136
- })
137
- );
138
- graph.finalize();
139
- let subscribedToAllCollections = false;
140
- const maybeRunGraph = (callback) => {
141
- if (allCollectionsReadyOrInitialCommit() && subscribedToAllCollections) {
142
- graph.run();
143
- const ready = (callback == null ? void 0 : callback()) ?? true;
144
- if (messagesCount === 0) {
145
- begin();
146
- commit();
147
- }
148
- if (ready && allCollectionsReady()) {
149
- markReady();
150
- }
151
- }
152
- };
153
- const unsubscribeCallbacks = /* @__PURE__ */ new Set();
154
- Object.entries(collections).forEach(([collectionId, collection]) => {
155
- const input = inputs[collectionId];
156
- const collectionAlias = findCollectionAlias(collectionId, query);
157
- const whereClause = collectionAlias && collectionWhereClausesCache ? collectionWhereClausesCache.get(collectionAlias) : void 0;
158
- const sendChangesToPipeline = (changes, callback) => {
159
- sendChangesToInput(input, changes, collection.config.getKey);
160
- maybeRunGraph(callback);
161
- };
162
- const sendVisibleChangesToPipeline = (changes, loadedInitialState, sentKeys) => {
163
- if (loadedInitialState) {
164
- return sendChangesToPipeline(changes);
165
- }
166
- const newChanges = [];
167
- for (const change of changes) {
168
- let newChange = change;
169
- if (!sentKeys.has(change.key)) {
170
- if (change.type === `update`) {
171
- newChange = { ...change, type: `insert` };
172
- } else if (change.type === `delete`) {
173
- continue;
174
- }
175
- }
176
- newChanges.push(newChange);
177
- }
178
- return sendChangesToPipeline(newChanges);
179
- };
180
- const loadKeys = (keys, sentKeys, filterFn) => {
181
- for (const key of keys) {
182
- if (sentKeys.has(key)) continue;
183
- const value = collection.get(key);
184
- if (value !== void 0 && filterFn(value)) {
185
- sentKeys.add(key);
186
- sendChangesToPipeline([{ type: `insert`, key, value }]);
187
- }
188
- }
189
- };
190
- const subscribeToAllChanges = (whereExpression) => {
191
- const unsubscribe = collection.subscribeChanges(
192
- sendChangesToPipeline,
193
- {
194
- includeInitialState: true,
195
- ...whereExpression ? { whereExpression } : void 0
196
- }
197
- );
198
- return unsubscribe;
199
- };
200
- const subscribeToMatchingChanges = (whereExpression) => {
201
- let loadedInitialState = false;
202
- const sentKeys = /* @__PURE__ */ new Set();
203
- const sendVisibleChanges = (changes) => {
204
- sendVisibleChangesToPipeline(changes, loadedInitialState, sentKeys);
205
- };
206
- const unsubscribe = collection.subscribeChanges(sendVisibleChanges, {
207
- whereExpression
208
- });
209
- const filterFn = whereExpression ? createFilterFunctionFromExpression(whereExpression) : () => true;
210
- const loadKs = (keys) => {
211
- return loadKeys(keys, sentKeys, filterFn);
212
- };
213
- lazyCollectionsCallbacks[collectionId] = {
214
- loadKeys: loadKs,
215
- loadInitialState: () => {
216
- if (loadedInitialState) return;
217
- loadedInitialState = true;
218
- const changes = collection.currentStateAsChanges({
219
- whereExpression
220
- });
221
- sendChangesToPipeline(changes);
222
- }
223
- };
224
- return unsubscribe;
225
- };
226
- const subscribeToOrderedChanges = (whereExpression) => {
227
- const {
228
- offset,
229
- limit,
230
- comparator,
231
- index,
232
- dataNeeded,
233
- valueExtractorForRawRow
234
- } = optimizableOrderByCollections[collectionId];
235
- if (!dataNeeded) {
236
- throw new Error(
237
- `Missing dataNeeded callback for collection ${collectionId}`
238
- );
239
- }
240
- const loadMoreIfNeeded = () => {
241
- const n = dataNeeded();
242
- if (n > 0) {
243
- loadNextItems(n);
244
- }
245
- return n === 0;
246
- };
247
- const sentValuesInfo = {
248
- sentKeys: /* @__PURE__ */ new Set(),
249
- biggest: void 0
250
- };
251
- const sendChangesToPipelineWithTracking = (changes) => {
252
- const trackedChanges = trackSentValues(
253
- changes,
254
- comparator,
255
- sentValuesInfo
256
- );
257
- sendChangesToPipeline(trackedChanges, loadMoreIfNeeded);
258
- };
259
- const loadNextItems = (n) => {
260
- const biggestSentRow = sentValuesInfo.biggest;
261
- const biggestSentValue = biggestSentRow ? valueExtractorForRawRow(biggestSentRow) : biggestSentRow;
262
- const nextOrderedKeys = index.take(n, biggestSentValue);
263
- const nextInserts = nextOrderedKeys.map((key) => {
264
- return { type: `insert`, key, value: collection.get(key) };
265
- });
266
- sendChangesToPipelineWithTracking(nextInserts);
267
- };
268
- loadNextItems(offset + limit);
269
- const sendChangesInRange = (changes) => {
270
- const splittedChanges = splitUpdates(changes);
271
- const filteredChanges = filterChangesSmallerOrEqualToMax(
272
- splittedChanges,
273
- comparator,
274
- sentValuesInfo.biggest
275
- );
276
- sendChangesToPipeline(filteredChanges, loadMoreIfNeeded);
277
- };
278
- const unsubscribe = collection.subscribeChanges(sendChangesInRange, {
279
- whereExpression
280
- });
281
- return unsubscribe;
282
- };
283
- const subscribeToChanges = (whereExpression) => {
284
- let unsubscribe;
285
- if (lazyCollections.has(collectionId)) {
286
- unsubscribe = subscribeToMatchingChanges(whereExpression);
287
- } else if (Object.hasOwn(optimizableOrderByCollections, collectionId)) {
288
- unsubscribe = subscribeToOrderedChanges(whereExpression);
289
- } else {
290
- unsubscribe = subscribeToAllChanges(whereExpression);
291
- }
292
- unsubscribeCallbacks.add(unsubscribe);
293
- };
294
- if (whereClause) {
295
- const whereExpression = convertToBasicExpression(
296
- whereClause,
297
- collectionAlias
298
- );
299
- if (whereExpression) {
300
- subscribeToChanges(whereExpression);
301
- } else {
302
- throw new Error(
303
- `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. This indicates a bug in the query optimization logic.`
304
- );
305
- }
306
- } else {
307
- subscribeToChanges();
308
- }
309
- });
310
- subscribedToAllCollections = true;
311
- maybeRunGraph();
312
- return () => {
313
- unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe());
314
- graphCache = void 0;
315
- inputsCache = void 0;
316
- pipelineCache = void 0;
317
- collectionWhereClausesCache = void 0;
318
- };
319
- }
320
- };
321
- return {
322
- id,
323
- getKey: config.getKey || ((item) => resultKeys.get(item)),
324
- sync,
325
- compare,
326
- gcTime: config.gcTime || 5e3,
327
- // 5 seconds by default for live queries
328
- schema: config.schema,
329
- onInsert: config.onInsert,
330
- onUpdate: config.onUpdate,
331
- onDelete: config.onDelete,
332
- startSync: config.startSync
333
- };
4
+ const collectionConfigBuilder = new CollectionConfigBuilder(config);
5
+ return collectionConfigBuilder.getConfig();
334
6
  }
335
7
  function createLiveQueryCollection(configOrQuery) {
336
8
  if (typeof configOrQuery === `function`) {
@@ -351,92 +23,6 @@ function createLiveQueryCollection(configOrQuery) {
351
23
  function bridgeToCreateCollection(options) {
352
24
  return createCollection(options);
353
25
  }
354
- function sendChangesToInput(input, changes, getKey) {
355
- const multiSetArray = [];
356
- for (const change of changes) {
357
- const key = getKey(change.value);
358
- if (change.type === `insert`) {
359
- multiSetArray.push([[key, change.value], 1]);
360
- } else if (change.type === `update`) {
361
- multiSetArray.push([[key, change.previousValue], -1]);
362
- multiSetArray.push([[key, change.value], 1]);
363
- } else {
364
- multiSetArray.push([[key, change.value], -1]);
365
- }
366
- }
367
- input.sendData(new MultiSet(multiSetArray));
368
- }
369
- function extractCollectionsFromQuery(query) {
370
- const collections = {};
371
- function extractFromSource(source) {
372
- if (source.type === `collectionRef`) {
373
- collections[source.collection.id] = source.collection;
374
- } else if (source.type === `queryRef`) {
375
- extractFromQuery(source.query);
376
- }
377
- }
378
- function extractFromQuery(q) {
379
- if (q.from) {
380
- extractFromSource(q.from);
381
- }
382
- if (q.join && Array.isArray(q.join)) {
383
- for (const joinClause of q.join) {
384
- if (joinClause.from) {
385
- extractFromSource(joinClause.from);
386
- }
387
- }
388
- }
389
- }
390
- extractFromQuery(query);
391
- return collections;
392
- }
393
- function findCollectionAlias(collectionId, query) {
394
- var _a, _b, _c, _d;
395
- if (((_a = query.from) == null ? void 0 : _a.type) === `collectionRef` && ((_b = query.from.collection) == null ? void 0 : _b.id) === collectionId) {
396
- return query.from.alias;
397
- }
398
- if (query.join) {
399
- for (const joinClause of query.join) {
400
- if (((_c = joinClause.from) == null ? void 0 : _c.type) === `collectionRef` && ((_d = joinClause.from.collection) == null ? void 0 : _d.id) === collectionId) {
401
- return joinClause.from.alias;
402
- }
403
- }
404
- }
405
- return void 0;
406
- }
407
- function* trackSentValues(changes, comparator, tracker) {
408
- for (const change of changes) {
409
- tracker.sentKeys.add(change.key);
410
- if (!tracker.biggest) {
411
- tracker.biggest = change.value;
412
- } else if (comparator(tracker.biggest, change.value) < 0) {
413
- tracker.biggest = change.value;
414
- }
415
- yield change;
416
- }
417
- }
418
- function* splitUpdates(changes) {
419
- for (const change of changes) {
420
- if (change.type === `update`) {
421
- yield { type: `delete`, key: change.key, value: change.previousValue };
422
- yield { type: `insert`, key: change.key, value: change.value };
423
- } else {
424
- yield change;
425
- }
426
- }
427
- }
428
- function* filterChanges(changes, f) {
429
- for (const change of changes) {
430
- if (f(change)) {
431
- yield change;
432
- }
433
- }
434
- }
435
- function* filterChangesSmallerOrEqualToMax(changes, comparator, maxValue) {
436
- yield* filterChanges(changes, (change) => {
437
- return !maxValue || comparator(change.value, maxValue) <= 0;
438
- });
439
- }
440
26
  export {
441
27
  createLiveQueryCollection,
442
28
  liveQueryCollectionOptions
@@ -1 +1 @@
1
- {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { D2, MultiSet, output } from \"@tanstack/db-ivm\"\nimport { createCollection } from \"../collection.js\"\nimport { createFilterFunctionFromExpression } from \"../change-events.js\"\nimport { compileQuery } from \"./compiler/index.js\"\nimport { buildQuery, getQueryIR } from \"./builder/index.js\"\nimport { convertToBasicExpression } from \"./compiler/expressions.js\"\nimport type { OrderByOptimizationInfo } from \"./compiler/order-by.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type {\n ChangeMessage,\n CollectionConfig,\n KeyedStream,\n ResultStream,\n SyncConfig,\n UtilsRecord,\n} from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\nimport type { MultiSetArray, RootStreamBuilder } from \"@tanstack/db-ivm\"\nimport type { BasicExpression } from \"./ir.js\"\nimport type { LazyCollectionCallbacks } from \"./compiler/joins.js\"\n\n// Global counter for auto-generated collection IDs\nlet liveQueryCollectionCounter = 0\n\n/**\n * Configuration interface for live query collection options\n *\n * @example\n * ```typescript\n * const config: LiveQueryCollectionConfig<any, any> = {\n * // id is optional - will auto-generate \"live-query-1\", \"live-query-2\", etc.\n * query: (q) => q\n * .from({ comment: commentsCollection })\n * .join(\n * { user: usersCollection },\n * ({ comment, user }) => eq(comment.user_id, user.id)\n * )\n * .where(({ comment }) => eq(comment.active, true))\n * .select(({ comment, user }) => ({\n * id: comment.id,\n * content: comment.content,\n * authorName: user.name,\n * })),\n * // getKey is optional - defaults to using stream key\n * getKey: (item) => item.id,\n * }\n * ```\n */\nexport interface LiveQueryCollectionConfig<\n TContext extends Context,\n TResult extends object = GetResult<TContext> & object,\n> {\n /**\n * Unique identifier for the collection\n * If not provided, defaults to `live-query-${number}` with auto-incrementing number\n */\n id?: string\n\n /**\n * Query builder function that defines the live query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n\n /**\n * Function to extract the key from result items\n * If not provided, defaults to using the key from the D2 stream\n */\n getKey?: (item: TResult) => string | number\n\n /**\n * Optional schema for validation\n */\n schema?: CollectionConfig<TResult>[`schema`]\n\n /**\n * Optional mutation handlers\n */\n onInsert?: CollectionConfig<TResult>[`onInsert`]\n onUpdate?: CollectionConfig<TResult>[`onUpdate`]\n onDelete?: CollectionConfig<TResult>[`onDelete`]\n\n /**\n * Start sync / the query immediately\n */\n startSync?: boolean\n\n /**\n * GC time for the collection\n */\n gcTime?: number\n}\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n // Generate a unique ID if not provided\n const id = config.id || `live-query-${++liveQueryCollectionCounter}`\n\n // Build the query using the provided query builder function or instance\n const query =\n typeof config.query === `function`\n ? buildQuery<TContext>(config.query)\n : getQueryIR(config.query)\n\n // WeakMap to store the keys of the results so that we can retreve them in the\n // getKey function\n const resultKeys = new WeakMap<object, unknown>()\n\n // WeakMap to store the orderBy index for each result\n const orderByIndices = new WeakMap<object, string>()\n\n // Create compare function for ordering if the query has orderBy\n const compare =\n query.orderBy && query.orderBy.length > 0\n ? (val1: TResult, val2: TResult): number => {\n // Use the orderBy index stored in the WeakMap\n const index1 = orderByIndices.get(val1)\n const index2 = orderByIndices.get(val2)\n\n // Compare fractional indices lexicographically\n if (index1 && index2) {\n if (index1 < index2) {\n return -1\n } else if (index1 > index2) {\n return 1\n } else {\n return 0\n }\n }\n\n // Fallback to no ordering if indices are missing\n return 0\n }\n : undefined\n\n const collections = extractCollectionsFromQuery(query)\n\n const allCollectionsReady = () => {\n return Object.values(collections).every((collection) =>\n collection.isReady()\n )\n }\n\n const allCollectionsReadyOrInitialCommit = () => {\n return Object.values(collections).every(\n (collection) =>\n collection.status === `ready` || collection.status === `initialCommit`\n )\n }\n\n let graphCache: D2 | undefined\n let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined\n let pipelineCache: ResultStream | undefined\n let collectionWhereClausesCache:\n | Map<string, BasicExpression<boolean>>\n | undefined\n\n // Map of collection IDs to functions that load keys for that lazy collection\n const lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}\n // Set of collection IDs that are lazy collections\n const lazyCollections = new Set<string>()\n // Set of collection IDs that include an optimizable ORDER BY clause\n const optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> =\n {}\n\n const compileBasePipeline = () => {\n graphCache = new D2()\n inputsCache = Object.fromEntries(\n Object.entries(collections).map(([key]) => [\n key,\n graphCache!.newInput<any>(),\n ])\n )\n\n // Compile the query and get both pipeline and collection WHERE clauses\n ;({\n pipeline: pipelineCache,\n collectionWhereClauses: collectionWhereClausesCache,\n } = compileQuery(\n query,\n inputsCache as Record<string, KeyedStream>,\n collections,\n lazyCollectionsCallbacks,\n lazyCollections,\n optimizableOrderByCollections\n ))\n }\n\n const maybeCompileBasePipeline = () => {\n if (!graphCache || !inputsCache || !pipelineCache) {\n compileBasePipeline()\n }\n return {\n graph: graphCache!,\n inputs: inputsCache!,\n pipeline: pipelineCache!,\n }\n }\n\n // Compile the base pipeline once initially\n // This is done to ensure that any errors are thrown immediately and synchronously\n compileBasePipeline()\n\n // Create the sync configuration\n const sync: SyncConfig<TResult> = {\n rowUpdateMode: `full`,\n sync: ({ begin, write, commit, markReady, collection: theCollection }) => {\n const { graph, inputs, pipeline } = maybeCompileBasePipeline()\n let messagesCount = 0\n pipeline.pipe(\n output((data) => {\n const messages = data.getInner()\n messagesCount += messages.length\n\n begin()\n messages\n .reduce((acc, [[key, tupleData], multiplicity]) => {\n // All queries now consistently return [value, orderByIndex] format\n // where orderByIndex is undefined for queries without ORDER BY\n const [value, orderByIndex] = tupleData as [\n TResult,\n string | undefined,\n ]\n\n const changes = acc.get(key) || {\n deletes: 0,\n inserts: 0,\n value,\n orderByIndex,\n }\n if (multiplicity < 0) {\n changes.deletes += Math.abs(multiplicity)\n } else if (multiplicity > 0) {\n changes.inserts += multiplicity\n changes.value = value\n changes.orderByIndex = orderByIndex\n }\n acc.set(key, changes)\n return acc\n }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())\n .forEach((changes, rawKey) => {\n const { deletes, inserts, value, orderByIndex } = changes\n\n // Store the key of the result so that we can retrieve it in the\n // getKey function\n resultKeys.set(value, rawKey)\n\n // Store the orderBy index if it exists\n if (orderByIndex !== undefined) {\n orderByIndices.set(value, orderByIndex)\n }\n\n // Simple singular insert.\n if (inserts && deletes === 0) {\n write({\n value,\n type: `insert`,\n })\n } else if (\n // Insert & update(s) (updates are a delete & insert)\n inserts > deletes ||\n // Just update(s) but the item is already in the collection (so\n // was inserted previously).\n (inserts === deletes &&\n theCollection.has(rawKey as string | number))\n ) {\n write({\n value,\n type: `update`,\n })\n // Only delete is left as an option\n } else if (deletes > 0) {\n write({\n value,\n type: `delete`,\n })\n } else {\n throw new Error(\n `This should never happen ${JSON.stringify(changes)}`\n )\n }\n })\n commit()\n })\n )\n\n graph.finalize()\n\n let subscribedToAllCollections = false\n\n // The callback function is called after the graph has run.\n // This gives the callback a chance to load more data if needed,\n // that's used to optimize orderBy operators that set a limit,\n // in order to load some more data if we still don't have enough rows after the pipeline has run.\n // That can happend because even though we load N rows, the pipeline might filter some of these rows out\n // causing the orderBy operator to receive less than N rows or even no rows at all.\n // So this callback would notice that it doesn't have enough rows and load some more.\n // The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.\n const maybeRunGraph = (callback?: () => boolean) => {\n // We only run the graph if all the collections are ready\n if (\n allCollectionsReadyOrInitialCommit() &&\n subscribedToAllCollections\n ) {\n graph.run()\n const ready = callback?.() ?? true\n // On the initial run, we may need to do an empty commit to ensure that\n // the collection is initialized\n if (messagesCount === 0) {\n begin()\n commit()\n }\n // Mark the collection as ready after the first successful run\n if (ready && allCollectionsReady()) {\n markReady()\n }\n }\n }\n\n // Unsubscribe callbacks\n const unsubscribeCallbacks = new Set<() => void>()\n\n // Subscribe to all collections, using WHERE clause optimization when available\n Object.entries(collections).forEach(([collectionId, collection]) => {\n const input = inputs[collectionId]!\n const collectionAlias = findCollectionAlias(collectionId, query)\n const whereClause =\n collectionAlias && collectionWhereClausesCache\n ? collectionWhereClausesCache.get(collectionAlias)\n : undefined\n\n const sendChangesToPipeline = (\n changes: Iterable<ChangeMessage<any, string | number>>,\n callback?: () => boolean\n ) => {\n sendChangesToInput(input, changes, collection.config.getKey)\n maybeRunGraph(callback)\n }\n\n // Wraps the sendChangesToPipeline function\n // in order to turn `update`s into `insert`s\n // for keys that have not been sent to the pipeline yet\n // and filter out deletes for keys that have not been sent\n const sendVisibleChangesToPipeline = (\n changes: Array<ChangeMessage<any, string | number>>,\n loadedInitialState: boolean,\n sentKeys: Set<string | number>\n ) => {\n if (loadedInitialState) {\n // There was no index for the join key\n // so we loaded the initial state\n // so we can safely assume that the pipeline has seen all keys\n return sendChangesToPipeline(changes)\n }\n\n const newChanges = []\n for (const change of changes) {\n let newChange = change\n if (!sentKeys.has(change.key)) {\n if (change.type === `update`) {\n newChange = { ...change, type: `insert` }\n } else if (change.type === `delete`) {\n // filter out deletes for keys that have not been sent\n continue\n }\n }\n newChanges.push(newChange)\n }\n\n return sendChangesToPipeline(newChanges)\n }\n\n const loadKeys = (\n keys: Iterable<string | number>,\n sentKeys: Set<string | number>,\n filterFn: (item: object) => boolean\n ) => {\n for (const key of keys) {\n // Only load the key once\n if (sentKeys.has(key)) continue\n\n const value = collection.get(key)\n if (value !== undefined && filterFn(value)) {\n sentKeys.add(key)\n sendChangesToPipeline([{ type: `insert`, key, value }])\n }\n }\n }\n\n const subscribeToAllChanges = (\n whereExpression: BasicExpression<boolean> | undefined\n ) => {\n const unsubscribe = collection.subscribeChanges(\n sendChangesToPipeline,\n {\n includeInitialState: true,\n ...(whereExpression ? { whereExpression } : undefined),\n }\n )\n return unsubscribe\n }\n\n // Subscribes to all changes but without the initial state\n // such that we can load keys from the initial state on demand\n // based on the matching keys from the main collection in the join\n const subscribeToMatchingChanges = (\n whereExpression: BasicExpression<boolean> | undefined\n ) => {\n let loadedInitialState = false\n const sentKeys = new Set<string | number>()\n\n const sendVisibleChanges = (\n changes: Array<ChangeMessage<any, string | number>>\n ) => {\n sendVisibleChangesToPipeline(changes, loadedInitialState, sentKeys)\n }\n\n const unsubscribe = collection.subscribeChanges(sendVisibleChanges, {\n whereExpression,\n })\n\n // Create a function that loads keys from the collection\n // into the query pipeline on demand\n const filterFn = whereExpression\n ? createFilterFunctionFromExpression(whereExpression)\n : () => true\n const loadKs = (keys: Set<string | number>) => {\n return loadKeys(keys, sentKeys, filterFn)\n }\n\n // Store the functions to load keys and load initial state in the `lazyCollectionsCallbacks` map\n // This is used by the join operator to dynamically load matching keys from the lazy collection\n // or to get the full initial state of the collection if there's no index for the join key\n lazyCollectionsCallbacks[collectionId] = {\n loadKeys: loadKs,\n loadInitialState: () => {\n // Make sure we only load the initial state once\n if (loadedInitialState) return\n loadedInitialState = true\n\n const changes = collection.currentStateAsChanges({\n whereExpression,\n })\n sendChangesToPipeline(changes)\n },\n }\n return unsubscribe\n }\n\n const subscribeToOrderedChanges = (\n whereExpression: BasicExpression<boolean> | undefined\n ) => {\n const {\n offset,\n limit,\n comparator,\n index,\n dataNeeded,\n valueExtractorForRawRow,\n } = optimizableOrderByCollections[collectionId]!\n\n if (!dataNeeded) {\n // This should never happen because the topK operator should always set the size callback\n // which in turn should lead to the orderBy operator setting the dataNeeded callback\n throw new Error(\n `Missing dataNeeded callback for collection ${collectionId}`\n )\n }\n\n // This function is called by maybeRunGraph\n // after each iteration of the query pipeline\n // to ensure that the orderBy operator has enough data to work with\n const loadMoreIfNeeded = () => {\n // `dataNeeded` probes the orderBy operator to see if it needs more data\n // if it needs more data, it returns the number of items it needs\n const n = dataNeeded()\n if (n > 0) {\n loadNextItems(n)\n }\n\n // Indicate that we're done loading data if we didn't need to load more data\n return n === 0\n }\n\n // Keep track of the keys we've sent\n // and also the biggest value we've sent so far\n const sentValuesInfo: {\n sentKeys: Set<string | number>\n biggest: any\n } = {\n sentKeys: new Set<string | number>(),\n biggest: undefined,\n }\n\n const sendChangesToPipelineWithTracking = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n const trackedChanges = trackSentValues(\n changes,\n comparator,\n sentValuesInfo\n )\n sendChangesToPipeline(trackedChanges, loadMoreIfNeeded)\n }\n\n // Loads the next `n` items from the collection\n // starting from the biggest item it has sent\n const loadNextItems = (n: number) => {\n const biggestSentRow = sentValuesInfo.biggest\n const biggestSentValue = biggestSentRow\n ? valueExtractorForRawRow(biggestSentRow)\n : biggestSentRow\n // Take the `n` items after the biggest sent value\n const nextOrderedKeys = index.take(n, biggestSentValue)\n const nextInserts: Array<ChangeMessage<any, string | number>> =\n nextOrderedKeys.map((key) => {\n return { type: `insert`, key, value: collection.get(key) }\n })\n sendChangesToPipelineWithTracking(nextInserts)\n }\n\n // Load the first `offset + limit` values from the index\n // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[\n loadNextItems(offset + limit)\n\n const sendChangesInRange = (\n changes: Iterable<ChangeMessage<any, string | number>>\n ) => {\n // Split live updates into a delete of the old value and an insert of the new value\n // and filter out changes that are bigger than the biggest value we've sent so far\n // because they can't affect the topK\n const splittedChanges = splitUpdates(changes)\n const filteredChanges = filterChangesSmallerOrEqualToMax(\n splittedChanges,\n comparator,\n sentValuesInfo.biggest\n )\n sendChangesToPipeline(filteredChanges, loadMoreIfNeeded)\n }\n\n // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far\n // values that are bigger don't need to be sent because they can't affect the topK\n const unsubscribe = collection.subscribeChanges(sendChangesInRange, {\n whereExpression,\n })\n\n return unsubscribe\n }\n\n const subscribeToChanges = (\n whereExpression?: BasicExpression<boolean>\n ) => {\n let unsubscribe: () => void\n if (lazyCollections.has(collectionId)) {\n unsubscribe = subscribeToMatchingChanges(whereExpression)\n } else if (\n Object.hasOwn(optimizableOrderByCollections, collectionId)\n ) {\n unsubscribe = subscribeToOrderedChanges(whereExpression)\n } else {\n unsubscribe = subscribeToAllChanges(whereExpression)\n }\n unsubscribeCallbacks.add(unsubscribe)\n }\n\n if (whereClause) {\n // Convert WHERE clause to BasicExpression format for collection subscription\n const whereExpression = convertToBasicExpression(\n whereClause,\n collectionAlias!\n )\n\n if (whereExpression) {\n // Use index optimization for this collection\n subscribeToChanges(whereExpression)\n } else {\n // This should not happen - if we have a whereClause but can't create whereExpression,\n // it indicates a bug in our optimization logic\n throw new Error(\n `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. ` +\n `This indicates a bug in the query optimization logic.`\n )\n }\n } else {\n // No WHERE clause for this collection, use regular subscription\n subscribeToChanges()\n }\n })\n\n subscribedToAllCollections = true\n\n // Initial run\n maybeRunGraph()\n\n // Return the unsubscribe function\n return () => {\n unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())\n\n // Reset caches so a fresh graph/pipeline is compiled on next start\n // This avoids reusing a finalized D2 graph across GC restarts\n graphCache = undefined\n inputsCache = undefined\n pipelineCache = undefined\n collectionWhereClausesCache = undefined\n }\n },\n }\n\n // Return collection configuration\n return {\n id,\n getKey:\n config.getKey || ((item) => resultKeys.get(item) as string | number),\n sync,\n compare,\n gcTime: config.gcTime || 5000, // 5 seconds by default for live queries\n schema: config.schema,\n onInsert: config.onInsert,\n onUpdate: config.onUpdate,\n onDelete: config.onDelete,\n startSync: config.startSync,\n }\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n\n/**\n * Helper function to send changes to a D2 input stream\n */\nfunction sendChangesToInput(\n input: RootStreamBuilder<unknown>,\n changes: Iterable<ChangeMessage>,\n getKey: (item: ChangeMessage[`value`]) => any\n) {\n const multiSetArray: MultiSetArray<unknown> = []\n for (const change of changes) {\n const key = getKey(change.value)\n if (change.type === `insert`) {\n multiSetArray.push([[key, change.value], 1])\n } else if (change.type === `update`) {\n multiSetArray.push([[key, change.previousValue], -1])\n multiSetArray.push([[key, change.value], 1])\n } else {\n // change.type === `delete`\n multiSetArray.push([[key, change.value], -1])\n }\n }\n input.sendData(new MultiSet(multiSetArray))\n}\n\n/**\n * Helper function to extract collections from a compiled query\n * Traverses the query IR to find all collection references\n * Maps collections by their ID (not alias) as expected by the compiler\n */\nfunction extractCollectionsFromQuery(\n query: any\n): Record<string, Collection<any, any, any>> {\n const collections: Record<string, any> = {}\n\n // Helper function to recursively extract collections from a query or source\n function extractFromSource(source: any) {\n if (source.type === `collectionRef`) {\n collections[source.collection.id] = source.collection\n } else if (source.type === `queryRef`) {\n // Recursively extract from subquery\n extractFromQuery(source.query)\n }\n }\n\n // Helper function to recursively extract collections from a query\n function extractFromQuery(q: any) {\n // Extract from FROM clause\n if (q.from) {\n extractFromSource(q.from)\n }\n\n // Extract from JOIN clauses\n if (q.join && Array.isArray(q.join)) {\n for (const joinClause of q.join) {\n if (joinClause.from) {\n extractFromSource(joinClause.from)\n }\n }\n }\n }\n\n // Start extraction from the root query\n extractFromQuery(query)\n\n return collections\n}\n\n/**\n * Converts WHERE expressions from the query IR into a BasicExpression for subscribeChanges\n *\n * @param whereExpressions Array of WHERE expressions to convert\n * @param tableAlias The table alias used in the expressions\n * @returns A BasicExpression that can be used with the collection's index system\n */\n\n/**\n * Finds the alias for a collection ID in the query\n */\nfunction findCollectionAlias(\n collectionId: string,\n query: any\n): string | undefined {\n // Check FROM clause\n if (\n query.from?.type === `collectionRef` &&\n query.from.collection?.id === collectionId\n ) {\n return query.from.alias\n }\n\n // Check JOIN clauses\n if (query.join) {\n for (const joinClause of query.join) {\n if (\n joinClause.from?.type === `collectionRef` &&\n joinClause.from.collection?.id === collectionId\n ) {\n return joinClause.from.alias\n }\n }\n }\n\n return undefined\n}\n\nfunction* trackSentValues(\n changes: Iterable<ChangeMessage<any, string | number>>,\n comparator: (a: any, b: any) => number,\n tracker: { sentKeys: Set<string | number>; biggest: any }\n) {\n for (const change of changes) {\n tracker.sentKeys.add(change.key)\n\n if (!tracker.biggest) {\n tracker.biggest = change.value\n } else if (comparator(tracker.biggest, change.value) < 0) {\n tracker.biggest = change.value\n }\n\n yield change\n }\n}\n\n/** Splits updates into a delete of the old value and an insert of the new value */\nfunction* splitUpdates<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (change.type === `update`) {\n yield { type: `delete`, key: change.key, value: change.previousValue! }\n yield { type: `insert`, key: change.key, value: change.value }\n } else {\n yield change\n }\n }\n}\n\nfunction* filterChanges<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n f: (change: ChangeMessage<T, TKey>) => boolean\n): Generator<ChangeMessage<T, TKey>> {\n for (const change of changes) {\n if (f(change)) {\n yield change\n }\n }\n}\n\n/**\n * Filters changes to only include those that are smaller than the provided max value\n * @param changes - Iterable of changes to filter\n * @param comparator - Comparator function to use for filtering\n * @param maxValue - Range to filter changes within (range boundaries are exclusive)\n * @returns Iterable of changes that fall within the range\n */\nfunction* filterChangesSmallerOrEqualToMax<\n T extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n>(\n changes: Iterable<ChangeMessage<T, TKey>>,\n comparator: (a: any, b: any) => number,\n maxValue: any\n): Generator<ChangeMessage<T, TKey>> {\n yield* filterChanges(changes, (change) => {\n return !maxValue || comparator(change.value, maxValue) <= 0\n })\n}\n"],"names":[],"mappings":";;;;;;AAuBA,IAAI,6BAA6B;AAgG1B,SAAS,2BAId,QAC2B;AAE3B,QAAM,KAAK,OAAO,MAAM,cAAc,EAAE,0BAA0B;AAGlE,QAAM,QACJ,OAAO,OAAO,UAAU,aACpB,WAAqB,OAAO,KAAK,IACjC,WAAW,OAAO,KAAK;AAI7B,QAAM,iCAAiB,QAAA;AAGvB,QAAM,qCAAqB,QAAA;AAG3B,QAAM,UACJ,MAAM,WAAW,MAAM,QAAQ,SAAS,IACpC,CAAC,MAAe,SAA0B;AAExC,UAAM,SAAS,eAAe,IAAI,IAAI;AACtC,UAAM,SAAS,eAAe,IAAI,IAAI;AAGtC,QAAI,UAAU,QAAQ;AACpB,UAAI,SAAS,QAAQ;AACnB,eAAO;AAAA,MACT,WAAW,SAAS,QAAQ;AAC1B,eAAO;AAAA,MACT,OAAO;AACL,eAAO;AAAA,MACT;AAAA,IACF;AAGA,WAAO;AAAA,EACT,IACA;AAEN,QAAM,cAAc,4BAA4B,KAAK;AAErD,QAAM,sBAAsB,MAAM;AAChC,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAAM,CAAC,eACvC,WAAW,QAAA;AAAA,IAAQ;AAAA,EAEvB;AAEA,QAAM,qCAAqC,MAAM;AAC/C,WAAO,OAAO,OAAO,WAAW,EAAE;AAAA,MAChC,CAAC,eACC,WAAW,WAAW,WAAW,WAAW,WAAW;AAAA,IAAA;AAAA,EAE7D;AAEA,MAAI;AACJ,MAAI;AACJ,MAAI;AACJ,MAAI;AAKJ,QAAM,2BAAoE,CAAA;AAE1E,QAAM,sCAAsB,IAAA;AAE5B,QAAM,gCACJ,CAAA;AAEF,QAAM,sBAAsB,MAAM;AAChC,iBAAa,IAAI,GAAA;AACjB,kBAAc,OAAO;AAAA,MACnB,OAAO,QAAQ,WAAW,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM;AAAA,QACzC;AAAA,QACA,WAAY,SAAA;AAAA,MAAc,CAC3B;AAAA,IAAA;AAIF,KAAC;AAAA,MACA,UAAU;AAAA,MACV,wBAAwB;AAAA,IAAA,IACtB;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAAA,EAEJ;AAEA,QAAM,2BAA2B,MAAM;AACrC,QAAI,CAAC,cAAc,CAAC,eAAe,CAAC,eAAe;AACjD,0BAAA;AAAA,IACF;AACA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,UAAU;AAAA,IAAA;AAAA,EAEd;AAIA,sBAAA;AAGA,QAAM,OAA4B;AAAA,IAChC,eAAe;AAAA,IACf,MAAM,CAAC,EAAE,OAAO,OAAO,QAAQ,WAAW,YAAY,oBAAoB;AACxE,YAAM,EAAE,OAAO,QAAQ,SAAA,IAAa,yBAAA;AACpC,UAAI,gBAAgB;AACpB,eAAS;AAAA,QACP,OAAO,CAAC,SAAS;AACf,gBAAM,WAAW,KAAK,SAAA;AACtB,2BAAiB,SAAS;AAE1B,gBAAA;AACA,mBACG,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,SAAS,GAAG,YAAY,MAAM;AAGjD,kBAAM,CAAC,OAAO,YAAY,IAAI;AAK9B,kBAAM,UAAU,IAAI,IAAI,GAAG,KAAK;AAAA,cAC9B,SAAS;AAAA,cACT,SAAS;AAAA,cACT;AAAA,cACA;AAAA,YAAA;AAEF,gBAAI,eAAe,GAAG;AACpB,sBAAQ,WAAW,KAAK,IAAI,YAAY;AAAA,YAC1C,WAAW,eAAe,GAAG;AAC3B,sBAAQ,WAAW;AACnB,sBAAQ,QAAQ;AAChB,sBAAQ,eAAe;AAAA,YACzB;AACA,gBAAI,IAAI,KAAK,OAAO;AACpB,mBAAO;AAAA,UACT,uBAAO,IAAA,CAAsG,EAC5G,QAAQ,CAAC,SAAS,WAAW;AAC5B,kBAAM,EAAE,SAAS,SAAS,OAAO,iBAAiB;AAIlD,uBAAW,IAAI,OAAO,MAAM;AAG5B,gBAAI,iBAAiB,QAAW;AAC9B,6BAAe,IAAI,OAAO,YAAY;AAAA,YACxC;AAGA,gBAAI,WAAW,YAAY,GAAG;AAC5B,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH;AAAA;AAAA,cAEE,UAAU;AAAA;AAAA,cAGT,YAAY,WACX,cAAc,IAAI,MAAyB;AAAA,cAC7C;AACA,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YAEH,WAAW,UAAU,GAAG;AACtB,oBAAM;AAAA,gBACJ;AAAA,gBACA,MAAM;AAAA,cAAA,CACP;AAAA,YACH,OAAO;AACL,oBAAM,IAAI;AAAA,gBACR,4BAA4B,KAAK,UAAU,OAAO,CAAC;AAAA,cAAA;AAAA,YAEvD;AAAA,UACF,CAAC;AACH,iBAAA;AAAA,QACF,CAAC;AAAA,MAAA;AAGH,YAAM,SAAA;AAEN,UAAI,6BAA6B;AAUjC,YAAM,gBAAgB,CAAC,aAA6B;AAElD,YACE,mCAAA,KACA,4BACA;AACA,gBAAM,IAAA;AACN,gBAAM,SAAQ,2CAAgB;AAG9B,cAAI,kBAAkB,GAAG;AACvB,kBAAA;AACA,mBAAA;AAAA,UACF;AAEA,cAAI,SAAS,uBAAuB;AAClC,sBAAA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,YAAM,2CAA2B,IAAA;AAGjC,aAAO,QAAQ,WAAW,EAAE,QAAQ,CAAC,CAAC,cAAc,UAAU,MAAM;AAClE,cAAM,QAAQ,OAAO,YAAY;AACjC,cAAM,kBAAkB,oBAAoB,cAAc,KAAK;AAC/D,cAAM,cACJ,mBAAmB,8BACf,4BAA4B,IAAI,eAAe,IAC/C;AAEN,cAAM,wBAAwB,CAC5B,SACA,aACG;AACH,6BAAmB,OAAO,SAAS,WAAW,OAAO,MAAM;AAC3D,wBAAc,QAAQ;AAAA,QACxB;AAMA,cAAM,+BAA+B,CACnC,SACA,oBACA,aACG;AACH,cAAI,oBAAoB;AAItB,mBAAO,sBAAsB,OAAO;AAAA,UACtC;AAEA,gBAAM,aAAa,CAAA;AACnB,qBAAW,UAAU,SAAS;AAC5B,gBAAI,YAAY;AAChB,gBAAI,CAAC,SAAS,IAAI,OAAO,GAAG,GAAG;AAC7B,kBAAI,OAAO,SAAS,UAAU;AAC5B,4BAAY,EAAE,GAAG,QAAQ,MAAM,SAAA;AAAA,cACjC,WAAW,OAAO,SAAS,UAAU;AAEnC;AAAA,cACF;AAAA,YACF;AACA,uBAAW,KAAK,SAAS;AAAA,UAC3B;AAEA,iBAAO,sBAAsB,UAAU;AAAA,QACzC;AAEA,cAAM,WAAW,CACf,MACA,UACA,aACG;AACH,qBAAW,OAAO,MAAM;AAEtB,gBAAI,SAAS,IAAI,GAAG,EAAG;AAEvB,kBAAM,QAAQ,WAAW,IAAI,GAAG;AAChC,gBAAI,UAAU,UAAa,SAAS,KAAK,GAAG;AAC1C,uBAAS,IAAI,GAAG;AAChB,oCAAsB,CAAC,EAAE,MAAM,UAAU,KAAK,MAAA,CAAO,CAAC;AAAA,YACxD;AAAA,UACF;AAAA,QACF;AAEA,cAAM,wBAAwB,CAC5B,oBACG;AACH,gBAAM,cAAc,WAAW;AAAA,YAC7B;AAAA,YACA;AAAA,cACE,qBAAqB;AAAA,cACrB,GAAI,kBAAkB,EAAE,oBAAoB;AAAA,YAAA;AAAA,UAC9C;AAEF,iBAAO;AAAA,QACT;AAKA,cAAM,6BAA6B,CACjC,oBACG;AACH,cAAI,qBAAqB;AACzB,gBAAM,+BAAe,IAAA;AAErB,gBAAM,qBAAqB,CACzB,YACG;AACH,yCAA6B,SAAS,oBAAoB,QAAQ;AAAA,UACpE;AAEA,gBAAM,cAAc,WAAW,iBAAiB,oBAAoB;AAAA,YAClE;AAAA,UAAA,CACD;AAID,gBAAM,WAAW,kBACb,mCAAmC,eAAe,IAClD,MAAM;AACV,gBAAM,SAAS,CAAC,SAA+B;AAC7C,mBAAO,SAAS,MAAM,UAAU,QAAQ;AAAA,UAC1C;AAKA,mCAAyB,YAAY,IAAI;AAAA,YACvC,UAAU;AAAA,YACV,kBAAkB,MAAM;AAEtB,kBAAI,mBAAoB;AACxB,mCAAqB;AAErB,oBAAM,UAAU,WAAW,sBAAsB;AAAA,gBAC/C;AAAA,cAAA,CACD;AACD,oCAAsB,OAAO;AAAA,YAC/B;AAAA,UAAA;AAEF,iBAAO;AAAA,QACT;AAEA,cAAM,4BAA4B,CAChC,oBACG;AACH,gBAAM;AAAA,YACJ;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UAAA,IACE,8BAA8B,YAAY;AAE9C,cAAI,CAAC,YAAY;AAGf,kBAAM,IAAI;AAAA,cACR,8CAA8C,YAAY;AAAA,YAAA;AAAA,UAE9D;AAKA,gBAAM,mBAAmB,MAAM;AAG7B,kBAAM,IAAI,WAAA;AACV,gBAAI,IAAI,GAAG;AACT,4BAAc,CAAC;AAAA,YACjB;AAGA,mBAAO,MAAM;AAAA,UACf;AAIA,gBAAM,iBAGF;AAAA,YACF,8BAAc,IAAA;AAAA,YACd,SAAS;AAAA,UAAA;AAGX,gBAAM,oCAAoC,CACxC,YACG;AACH,kBAAM,iBAAiB;AAAA,cACrB;AAAA,cACA;AAAA,cACA;AAAA,YAAA;AAEF,kCAAsB,gBAAgB,gBAAgB;AAAA,UACxD;AAIA,gBAAM,gBAAgB,CAAC,MAAc;AACnC,kBAAM,iBAAiB,eAAe;AACtC,kBAAM,mBAAmB,iBACrB,wBAAwB,cAAc,IACtC;AAEJ,kBAAM,kBAAkB,MAAM,KAAK,GAAG,gBAAgB;AACtD,kBAAM,cACJ,gBAAgB,IAAI,CAAC,QAAQ;AAC3B,qBAAO,EAAE,MAAM,UAAU,KAAK,OAAO,WAAW,IAAI,GAAG,EAAA;AAAA,YACzD,CAAC;AACH,8CAAkC,WAAW;AAAA,UAC/C;AAIA,wBAAc,SAAS,KAAK;AAE5B,gBAAM,qBAAqB,CACzB,YACG;AAIH,kBAAM,kBAAkB,aAAa,OAAO;AAC5C,kBAAM,kBAAkB;AAAA,cACtB;AAAA,cACA;AAAA,cACA,eAAe;AAAA,YAAA;AAEjB,kCAAsB,iBAAiB,gBAAgB;AAAA,UACzD;AAIA,gBAAM,cAAc,WAAW,iBAAiB,oBAAoB;AAAA,YAClE;AAAA,UAAA,CACD;AAED,iBAAO;AAAA,QACT;AAEA,cAAM,qBAAqB,CACzB,oBACG;AACH,cAAI;AACJ,cAAI,gBAAgB,IAAI,YAAY,GAAG;AACrC,0BAAc,2BAA2B,eAAe;AAAA,UAC1D,WACE,OAAO,OAAO,+BAA+B,YAAY,GACzD;AACA,0BAAc,0BAA0B,eAAe;AAAA,UACzD,OAAO;AACL,0BAAc,sBAAsB,eAAe;AAAA,UACrD;AACA,+BAAqB,IAAI,WAAW;AAAA,QACtC;AAEA,YAAI,aAAa;AAEf,gBAAM,kBAAkB;AAAA,YACtB;AAAA,YACA;AAAA,UAAA;AAGF,cAAI,iBAAiB;AAEnB,+BAAmB,eAAe;AAAA,UACpC,OAAO;AAGL,kBAAM,IAAI;AAAA,cACR,uEAAuE,YAAY;AAAA,YAAA;AAAA,UAGvF;AAAA,QACF,OAAO;AAEL,6BAAA;AAAA,QACF;AAAA,MACF,CAAC;AAED,mCAA6B;AAG7B,oBAAA;AAGA,aAAO,MAAM;AACX,6BAAqB,QAAQ,CAAC,gBAAgB,YAAA,CAAa;AAI3D,qBAAa;AACb,sBAAc;AACd,wBAAgB;AAChB,sCAA8B;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAIF,SAAO;AAAA,IACL;AAAA,IACA,QACE,OAAO,WAAW,CAAC,SAAS,WAAW,IAAI,IAAI;AAAA,IACjD;AAAA,IACA;AAAA,IACA,QAAQ,OAAO,UAAU;AAAA;AAAA,IACzB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,UAAU,OAAO;AAAA,IACjB,WAAW,OAAO;AAAA,EAAA;AAEtB;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;AAKA,SAAS,mBACP,OACA,SACA,QACA;AACA,QAAM,gBAAwC,CAAA;AAC9C,aAAW,UAAU,SAAS;AAC5B,UAAM,MAAM,OAAO,OAAO,KAAK;AAC/B,QAAI,OAAO,SAAS,UAAU;AAC5B,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,WAAW,OAAO,SAAS,UAAU;AACnC,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,aAAa,GAAG,EAAE,CAAC;AACpD,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,CAAC,CAAC;AAAA,IAC7C,OAAO;AAEL,oBAAc,KAAK,CAAC,CAAC,KAAK,OAAO,KAAK,GAAG,EAAE,CAAC;AAAA,IAC9C;AAAA,EACF;AACA,QAAM,SAAS,IAAI,SAAS,aAAa,CAAC;AAC5C;AAOA,SAAS,4BACP,OAC2C;AAC3C,QAAM,cAAmC,CAAA;AAGzC,WAAS,kBAAkB,QAAa;AACtC,QAAI,OAAO,SAAS,iBAAiB;AACnC,kBAAY,OAAO,WAAW,EAAE,IAAI,OAAO;AAAA,IAC7C,WAAW,OAAO,SAAS,YAAY;AAErC,uBAAiB,OAAO,KAAK;AAAA,IAC/B;AAAA,EACF;AAGA,WAAS,iBAAiB,GAAQ;AAEhC,QAAI,EAAE,MAAM;AACV,wBAAkB,EAAE,IAAI;AAAA,IAC1B;AAGA,QAAI,EAAE,QAAQ,MAAM,QAAQ,EAAE,IAAI,GAAG;AACnC,iBAAW,cAAc,EAAE,MAAM;AAC/B,YAAI,WAAW,MAAM;AACnB,4BAAkB,WAAW,IAAI;AAAA,QACnC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,mBAAiB,KAAK;AAEtB,SAAO;AACT;AAaA,SAAS,oBACP,cACA,OACoB;;AAEpB,QACE,WAAM,SAAN,mBAAY,UAAS,qBACrB,WAAM,KAAK,eAAX,mBAAuB,QAAO,cAC9B;AACA,WAAO,MAAM,KAAK;AAAA,EACpB;AAGA,MAAI,MAAM,MAAM;AACd,eAAW,cAAc,MAAM,MAAM;AACnC,YACE,gBAAW,SAAX,mBAAiB,UAAS,qBAC1B,gBAAW,KAAK,eAAhB,mBAA4B,QAAO,cACnC;AACA,eAAO,WAAW,KAAK;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,UAAU,gBACR,SACA,YACA,SACA;AACA,aAAW,UAAU,SAAS;AAC5B,YAAQ,SAAS,IAAI,OAAO,GAAG;AAE/B,QAAI,CAAC,QAAQ,SAAS;AACpB,cAAQ,UAAU,OAAO;AAAA,IAC3B,WAAW,WAAW,QAAQ,SAAS,OAAO,KAAK,IAAI,GAAG;AACxD,cAAQ,UAAU,OAAO;AAAA,IAC3B;AAEA,UAAM;AAAA,EACR;AACF;AAGA,UAAU,aAIR,SACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,cAAA;AACvD,YAAM,EAAE,MAAM,UAAU,KAAK,OAAO,KAAK,OAAO,OAAO,MAAA;AAAA,IACzD,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,UAAU,cAIR,SACA,GACmC;AACnC,aAAW,UAAU,SAAS;AAC5B,QAAI,EAAE,MAAM,GAAG;AACb,YAAM;AAAA,IACR;AAAA,EACF;AACF;AASA,UAAU,iCAIR,SACA,YACA,UACmC;AACnC,SAAO,cAAc,SAAS,CAAC,WAAW;AACxC,WAAO,CAAC,YAAY,WAAW,OAAO,OAAO,QAAQ,KAAK;AAAA,EAC5D,CAAC;AACH;"}
1
+ {"version":3,"file":"live-query-collection.js","sources":["../../../src/query/live-query-collection.ts"],"sourcesContent":["import { createCollection } from \"../collection.js\"\nimport { CollectionConfigBuilder } from \"./live/collection-config-builder.js\"\nimport type { LiveQueryCollectionConfig } from \"./live/types.js\"\nimport type { InitialQueryBuilder, QueryBuilder } from \"./builder/index.js\"\nimport type { Collection } from \"../collection.js\"\nimport type { CollectionConfig, UtilsRecord } from \"../types.js\"\nimport type { Context, GetResult } from \"./builder/types.js\"\n\n/**\n * Creates live query collection options for use with createCollection\n *\n * @example\n * ```typescript\n * const options = liveQueryCollectionOptions({\n * // id is optional - will auto-generate if not provided\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => eq(post.published, true))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * content: post.content,\n * })),\n * // getKey is optional - will use stream key if not provided\n * })\n *\n * const collection = createCollection(options)\n * ```\n *\n * @param config - Configuration options for the live query collection\n * @returns Collection options that can be passed to createCollection\n */\nexport function liveQueryCollectionOptions<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n config: LiveQueryCollectionConfig<TContext, TResult>\n): CollectionConfig<TResult> {\n const collectionConfigBuilder = new CollectionConfigBuilder<\n TContext,\n TResult\n >(config)\n return collectionConfigBuilder.getConfig()\n}\n\n/**\n * Creates a live query collection directly\n *\n * @example\n * ```typescript\n * // Minimal usage - just pass a query function\n * const activeUsers = createLiveQueryCollection(\n * (q) => q\n * .from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ id: user.id, name: user.name }))\n * )\n *\n * // Full configuration with custom options\n * const searchResults = createLiveQueryCollection({\n * id: \"search-results\", // Custom ID (auto-generated if omitted)\n * query: (q) => q\n * .from({ post: postsCollection })\n * .where(({ post }) => like(post.title, `%${searchTerm}%`))\n * .select(({ post }) => ({\n * id: post.id,\n * title: post.title,\n * excerpt: post.excerpt,\n * })),\n * getKey: (item) => item.id, // Custom key function (uses stream key if omitted)\n * utils: {\n * updateSearchTerm: (newTerm: string) => {\n * // Custom utility functions\n * }\n * }\n * })\n * ```\n */\n\n// Overload 1: Accept just the query function\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n>(\n query: (q: InitialQueryBuilder) => QueryBuilder<TContext>\n): Collection<TResult, string | number, {}>\n\n// Overload 2: Accept full config object with optional utilities\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n config: LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils>\n\n// Implementation\nexport function createLiveQueryCollection<\n TContext extends Context,\n TResult extends object = GetResult<TContext>,\n TUtils extends UtilsRecord = {},\n>(\n configOrQuery:\n | (LiveQueryCollectionConfig<TContext, TResult> & { utils?: TUtils })\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n): Collection<TResult, string | number, TUtils> {\n // Determine if the argument is a function (query) or a config object\n if (typeof configOrQuery === `function`) {\n // Simple query function case\n const config: LiveQueryCollectionConfig<TContext, TResult> = {\n query: configOrQuery as (\n q: InitialQueryBuilder\n ) => QueryBuilder<TContext>,\n }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection(options)\n } else {\n // Config object case\n const config = configOrQuery as LiveQueryCollectionConfig<\n TContext,\n TResult\n > & { utils?: TUtils }\n const options = liveQueryCollectionOptions<TContext, TResult>(config)\n return bridgeToCreateCollection({\n ...options,\n utils: config.utils,\n })\n }\n}\n\n/**\n * Bridge function that handles the type compatibility between query2's TResult\n * and core collection's ResolveType without exposing ugly type assertions to users\n */\nfunction bridgeToCreateCollection<\n TResult extends object,\n TUtils extends UtilsRecord = {},\n>(\n options: CollectionConfig<TResult> & { utils?: TUtils }\n): Collection<TResult, string | number, TUtils> {\n // This is the only place we need a type assertion, hidden from user API\n return createCollection(options as any) as unknown as Collection<\n TResult,\n string | number,\n TUtils\n >\n}\n"],"names":[],"mappings":";;AAgCO,SAAS,2BAId,QAC2B;AAC3B,QAAM,0BAA0B,IAAI,wBAGlC,MAAM;AACR,SAAO,wBAAwB,UAAA;AACjC;AAsDO,SAAS,0BAKd,eAG8C;AAE9C,MAAI,OAAO,kBAAkB,YAAY;AAEvC,UAAM,SAAuD;AAAA,MAC3D,OAAO;AAAA,IAAA;AAIT,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB,OAAO;AAAA,EACzC,OAAO;AAEL,UAAM,SAAS;AAIf,UAAM,UAAU,2BAA8C,MAAM;AACpE,WAAO,yBAAyB;AAAA,MAC9B,GAAG;AAAA,MACH,OAAO,OAAO;AAAA,IAAA,CACf;AAAA,EACH;AACF;AAMA,SAAS,yBAIP,SAC8C;AAE9C,SAAO,iBAAiB,OAAc;AAKxC;"}
@@ -1,6 +1,6 @@
1
1
  import { deepEquals } from "../utils.js";
2
2
  import { CannotCombineEmptyExpressionListError } from "../errors.js";
3
- import { QueryRef, CollectionRef, Func } from "./ir.js";
3
+ import { QueryRef, getWhereExpression, isResidualWhere, CollectionRef, Func, createResidualWhere } from "./ir.js";
4
4
  import { isConvertibleToCollectionFilter } from "./compiler/expressions.js";
5
5
  function optimizeQuery(query) {
6
6
  const collectionWhereClauses = extractCollectionWhereClauses(query);
@@ -76,12 +76,25 @@ function applySingleLevelOptimization(query) {
76
76
  if (!query.join || query.join.length === 0) {
77
77
  return query;
78
78
  }
79
- const splitWhereClauses = splitAndClauses(query.where);
79
+ const nonResidualWhereClauses = query.where.filter(
80
+ (where) => !isResidualWhere(where)
81
+ );
82
+ const splitWhereClauses = splitAndClauses(nonResidualWhereClauses);
80
83
  const analyzedClauses = splitWhereClauses.map(
81
84
  (clause) => analyzeWhereClause(clause)
82
85
  );
83
86
  const groupedClauses = groupWhereClauses(analyzedClauses);
84
- return applyOptimizations(query, groupedClauses);
87
+ const optimizedQuery = applyOptimizations(query, groupedClauses);
88
+ const residualWhereClauses = query.where.filter(
89
+ (where) => isResidualWhere(where)
90
+ );
91
+ if (residualWhereClauses.length > 0) {
92
+ optimizedQuery.where = [
93
+ ...optimizedQuery.where || [],
94
+ ...residualWhereClauses
95
+ ];
96
+ }
97
+ return optimizedQuery;
85
98
  }
86
99
  function removeRedundantSubqueries(query) {
87
100
  var _a;
@@ -114,18 +127,23 @@ function isRedundantSubquery(query) {
114
127
  }
115
128
  function splitAndClauses(whereClauses) {
116
129
  const result = [];
117
- for (const clause of whereClauses) {
118
- if (clause.type === `func` && clause.name === `and`) {
119
- const splitArgs = splitAndClauses(
120
- clause.args
121
- );
122
- result.push(...splitArgs);
123
- } else {
124
- result.push(clause);
125
- }
130
+ for (const whereClause of whereClauses) {
131
+ const clause = getWhereExpression(whereClause);
132
+ result.push(...splitAndClausesRecursive(clause));
126
133
  }
127
134
  return result;
128
135
  }
136
+ function splitAndClausesRecursive(clause) {
137
+ if (clause.type === `func` && clause.name === `and`) {
138
+ const result = [];
139
+ for (const arg of clause.args) {
140
+ result.push(...splitAndClausesRecursive(arg));
141
+ }
142
+ return result;
143
+ } else {
144
+ return [clause];
145
+ }
146
+ }
129
147
  function analyzeWhereClause(clause) {
130
148
  const touchedSources = /* @__PURE__ */ new Set();
131
149
  function collectSources(expr) {
@@ -201,9 +219,14 @@ function applyOptimizations(query, groupedClauses) {
201
219
  if (groupedClauses.multiSource) {
202
220
  remainingWhereClauses.push(groupedClauses.multiSource);
203
221
  }
222
+ const hasOuterJoins = query.join && query.join.some(
223
+ (join) => join.type === `left` || join.type === `right` || join.type === `full`
224
+ );
204
225
  for (const [source, clause] of groupedClauses.singleSource) {
205
226
  if (!actuallyOptimized.has(source)) {
206
227
  remainingWhereClauses.push(clause);
228
+ } else if (hasOuterJoins) {
229
+ remainingWhereClauses.push(createResidualWhere(clause));
207
230
  }
208
231
  }
209
232
  const optimizedQuery = {