@tanstack/db 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/dist/cjs/collection.cjs +7 -4
  2. package/dist/cjs/collection.cjs.map +1 -1
  3. package/dist/cjs/collection.d.cts +6 -5
  4. package/dist/cjs/query/compiler/group-by.cjs +4 -2
  5. package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
  6. package/dist/cjs/query/compiler/index.cjs +2 -1
  7. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  8. package/dist/cjs/query/index.d.cts +2 -1
  9. package/dist/cjs/query/ir.cjs +16 -0
  10. package/dist/cjs/query/ir.cjs.map +1 -1
  11. package/dist/cjs/query/ir.d.cts +24 -1
  12. package/dist/cjs/query/live/collection-config-builder.cjs +267 -0
  13. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -0
  14. package/dist/cjs/query/live/collection-config-builder.d.cts +36 -0
  15. package/dist/cjs/query/live/collection-subscriber.cjs +263 -0
  16. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -0
  17. package/dist/cjs/query/live/collection-subscriber.d.cts +28 -0
  18. package/dist/cjs/query/live/types.d.cts +77 -0
  19. package/dist/cjs/query/live-query-collection.cjs +3 -417
  20. package/dist/cjs/query/live-query-collection.cjs.map +1 -1
  21. package/dist/cjs/query/live-query-collection.d.cts +1 -58
  22. package/dist/cjs/query/optimizer.cjs +34 -11
  23. package/dist/cjs/query/optimizer.cjs.map +1 -1
  24. package/dist/cjs/types.d.cts +12 -0
  25. package/dist/esm/collection.d.ts +6 -5
  26. package/dist/esm/collection.js +7 -4
  27. package/dist/esm/collection.js.map +1 -1
  28. package/dist/esm/query/compiler/group-by.js +5 -3
  29. package/dist/esm/query/compiler/group-by.js.map +1 -1
  30. package/dist/esm/query/compiler/index.js +3 -2
  31. package/dist/esm/query/compiler/index.js.map +1 -1
  32. package/dist/esm/query/index.d.ts +2 -1
  33. package/dist/esm/query/ir.d.ts +24 -1
  34. package/dist/esm/query/ir.js +17 -1
  35. package/dist/esm/query/ir.js.map +1 -1
  36. package/dist/esm/query/live/collection-config-builder.d.ts +36 -0
  37. package/dist/esm/query/live/collection-config-builder.js +267 -0
  38. package/dist/esm/query/live/collection-config-builder.js.map +1 -0
  39. package/dist/esm/query/live/collection-subscriber.d.ts +28 -0
  40. package/dist/esm/query/live/collection-subscriber.js +263 -0
  41. package/dist/esm/query/live/collection-subscriber.js.map +1 -0
  42. package/dist/esm/query/live/types.d.ts +77 -0
  43. package/dist/esm/query/live-query-collection.d.ts +1 -58
  44. package/dist/esm/query/live-query-collection.js +3 -417
  45. package/dist/esm/query/live-query-collection.js.map +1 -1
  46. package/dist/esm/query/optimizer.js +35 -12
  47. package/dist/esm/query/optimizer.js.map +1 -1
  48. package/dist/esm/types.d.ts +12 -0
  49. package/package.json +1 -1
  50. package/src/collection.ts +17 -8
  51. package/src/query/compiler/group-by.ts +5 -3
  52. package/src/query/compiler/index.ts +3 -2
  53. package/src/query/index.ts +2 -1
  54. package/src/query/ir.ts +48 -1
  55. package/src/query/live/collection-config-builder.ts +437 -0
  56. package/src/query/live/collection-subscriber.ts +460 -0
  57. package/src/query/live/types.ts +93 -0
  58. package/src/query/live-query-collection.ts +8 -791
  59. package/src/query/optimizer.ts +66 -18
  60. package/src/types.ts +74 -0
@@ -1,97 +1,10 @@
1
- import { D2, MultiSet, output } from "@tanstack/db-ivm"
2
1
  import { createCollection } from "../collection.js"
3
- import { createFilterFunctionFromExpression } from "../change-events.js"
4
- import { compileQuery } from "./compiler/index.js"
5
- import { buildQuery, getQueryIR } from "./builder/index.js"
6
- import { convertToBasicExpression } from "./compiler/expressions.js"
7
- import type { OrderByOptimizationInfo } from "./compiler/order-by.js"
2
+ import { CollectionConfigBuilder } from "./live/collection-config-builder.js"
3
+ import type { LiveQueryCollectionConfig } from "./live/types.js"
8
4
  import type { InitialQueryBuilder, QueryBuilder } from "./builder/index.js"
9
5
  import type { Collection } from "../collection.js"
10
- import type {
11
- ChangeMessage,
12
- CollectionConfig,
13
- KeyedStream,
14
- ResultStream,
15
- SyncConfig,
16
- UtilsRecord,
17
- } from "../types.js"
6
+ import type { CollectionConfig, UtilsRecord } from "../types.js"
18
7
  import type { Context, GetResult } from "./builder/types.js"
19
- import type { MultiSetArray, RootStreamBuilder } from "@tanstack/db-ivm"
20
- import type { BasicExpression } from "./ir.js"
21
- import type { LazyCollectionCallbacks } from "./compiler/joins.js"
22
-
23
- // Global counter for auto-generated collection IDs
24
- let liveQueryCollectionCounter = 0
25
-
26
- /**
27
- * Configuration interface for live query collection options
28
- *
29
- * @example
30
- * ```typescript
31
- * const config: LiveQueryCollectionConfig<any, any> = {
32
- * // id is optional - will auto-generate "live-query-1", "live-query-2", etc.
33
- * query: (q) => q
34
- * .from({ comment: commentsCollection })
35
- * .join(
36
- * { user: usersCollection },
37
- * ({ comment, user }) => eq(comment.user_id, user.id)
38
- * )
39
- * .where(({ comment }) => eq(comment.active, true))
40
- * .select(({ comment, user }) => ({
41
- * id: comment.id,
42
- * content: comment.content,
43
- * authorName: user.name,
44
- * })),
45
- * // getKey is optional - defaults to using stream key
46
- * getKey: (item) => item.id,
47
- * }
48
- * ```
49
- */
50
- export interface LiveQueryCollectionConfig<
51
- TContext extends Context,
52
- TResult extends object = GetResult<TContext> & object,
53
- > {
54
- /**
55
- * Unique identifier for the collection
56
- * If not provided, defaults to `live-query-${number}` with auto-incrementing number
57
- */
58
- id?: string
59
-
60
- /**
61
- * Query builder function that defines the live query
62
- */
63
- query:
64
- | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)
65
- | QueryBuilder<TContext>
66
-
67
- /**
68
- * Function to extract the key from result items
69
- * If not provided, defaults to using the key from the D2 stream
70
- */
71
- getKey?: (item: TResult) => string | number
72
-
73
- /**
74
- * Optional schema for validation
75
- */
76
- schema?: CollectionConfig<TResult>[`schema`]
77
-
78
- /**
79
- * Optional mutation handlers
80
- */
81
- onInsert?: CollectionConfig<TResult>[`onInsert`]
82
- onUpdate?: CollectionConfig<TResult>[`onUpdate`]
83
- onDelete?: CollectionConfig<TResult>[`onDelete`]
84
-
85
- /**
86
- * Start sync / the query immediately
87
- */
88
- startSync?: boolean
89
-
90
- /**
91
- * GC time for the collection
92
- */
93
- gcTime?: number
94
- }
95
8
 
96
9
  /**
97
10
  * Creates live query collection options for use with createCollection
@@ -123,533 +36,11 @@ export function liveQueryCollectionOptions<
123
36
  >(
124
37
  config: LiveQueryCollectionConfig<TContext, TResult>
125
38
  ): CollectionConfig<TResult> {
126
- // Generate a unique ID if not provided
127
- const id = config.id || `live-query-${++liveQueryCollectionCounter}`
128
-
129
- // Build the query using the provided query builder function or instance
130
- const query =
131
- typeof config.query === `function`
132
- ? buildQuery<TContext>(config.query)
133
- : getQueryIR(config.query)
134
-
135
- // WeakMap to store the keys of the results so that we can retreve them in the
136
- // getKey function
137
- const resultKeys = new WeakMap<object, unknown>()
138
-
139
- // WeakMap to store the orderBy index for each result
140
- const orderByIndices = new WeakMap<object, string>()
141
-
142
- // Create compare function for ordering if the query has orderBy
143
- const compare =
144
- query.orderBy && query.orderBy.length > 0
145
- ? (val1: TResult, val2: TResult): number => {
146
- // Use the orderBy index stored in the WeakMap
147
- const index1 = orderByIndices.get(val1)
148
- const index2 = orderByIndices.get(val2)
149
-
150
- // Compare fractional indices lexicographically
151
- if (index1 && index2) {
152
- if (index1 < index2) {
153
- return -1
154
- } else if (index1 > index2) {
155
- return 1
156
- } else {
157
- return 0
158
- }
159
- }
160
-
161
- // Fallback to no ordering if indices are missing
162
- return 0
163
- }
164
- : undefined
165
-
166
- const collections = extractCollectionsFromQuery(query)
167
-
168
- const allCollectionsReady = () => {
169
- return Object.values(collections).every((collection) =>
170
- collection.isReady()
171
- )
172
- }
173
-
174
- const allCollectionsReadyOrInitialCommit = () => {
175
- return Object.values(collections).every(
176
- (collection) =>
177
- collection.status === `ready` || collection.status === `initialCommit`
178
- )
179
- }
180
-
181
- let graphCache: D2 | undefined
182
- let inputsCache: Record<string, RootStreamBuilder<unknown>> | undefined
183
- let pipelineCache: ResultStream | undefined
184
- let collectionWhereClausesCache:
185
- | Map<string, BasicExpression<boolean>>
186
- | undefined
187
-
188
- // Map of collection IDs to functions that load keys for that lazy collection
189
- const lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}
190
- // Set of collection IDs that are lazy collections
191
- const lazyCollections = new Set<string>()
192
- // Set of collection IDs that include an optimizable ORDER BY clause
193
- const optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> =
194
- {}
195
-
196
- const compileBasePipeline = () => {
197
- graphCache = new D2()
198
- inputsCache = Object.fromEntries(
199
- Object.entries(collections).map(([key]) => [
200
- key,
201
- graphCache!.newInput<any>(),
202
- ])
203
- )
204
-
205
- // Compile the query and get both pipeline and collection WHERE clauses
206
- ;({
207
- pipeline: pipelineCache,
208
- collectionWhereClauses: collectionWhereClausesCache,
209
- } = compileQuery(
210
- query,
211
- inputsCache as Record<string, KeyedStream>,
212
- collections,
213
- lazyCollectionsCallbacks,
214
- lazyCollections,
215
- optimizableOrderByCollections
216
- ))
217
- }
218
-
219
- const maybeCompileBasePipeline = () => {
220
- if (!graphCache || !inputsCache || !pipelineCache) {
221
- compileBasePipeline()
222
- }
223
- return {
224
- graph: graphCache!,
225
- inputs: inputsCache!,
226
- pipeline: pipelineCache!,
227
- }
228
- }
229
-
230
- // Compile the base pipeline once initially
231
- // This is done to ensure that any errors are thrown immediately and synchronously
232
- compileBasePipeline()
233
-
234
- // Create the sync configuration
235
- const sync: SyncConfig<TResult> = {
236
- rowUpdateMode: `full`,
237
- sync: ({ begin, write, commit, markReady, collection: theCollection }) => {
238
- const { graph, inputs, pipeline } = maybeCompileBasePipeline()
239
- let messagesCount = 0
240
- pipeline.pipe(
241
- output((data) => {
242
- const messages = data.getInner()
243
- messagesCount += messages.length
244
-
245
- begin()
246
- messages
247
- .reduce((acc, [[key, tupleData], multiplicity]) => {
248
- // All queries now consistently return [value, orderByIndex] format
249
- // where orderByIndex is undefined for queries without ORDER BY
250
- const [value, orderByIndex] = tupleData as [
251
- TResult,
252
- string | undefined,
253
- ]
254
-
255
- const changes = acc.get(key) || {
256
- deletes: 0,
257
- inserts: 0,
258
- value,
259
- orderByIndex,
260
- }
261
- if (multiplicity < 0) {
262
- changes.deletes += Math.abs(multiplicity)
263
- } else if (multiplicity > 0) {
264
- changes.inserts += multiplicity
265
- changes.value = value
266
- changes.orderByIndex = orderByIndex
267
- }
268
- acc.set(key, changes)
269
- return acc
270
- }, new Map<unknown, { deletes: number; inserts: number; value: TResult; orderByIndex: string | undefined }>())
271
- .forEach((changes, rawKey) => {
272
- const { deletes, inserts, value, orderByIndex } = changes
273
-
274
- // Store the key of the result so that we can retrieve it in the
275
- // getKey function
276
- resultKeys.set(value, rawKey)
277
-
278
- // Store the orderBy index if it exists
279
- if (orderByIndex !== undefined) {
280
- orderByIndices.set(value, orderByIndex)
281
- }
282
-
283
- // Simple singular insert.
284
- if (inserts && deletes === 0) {
285
- write({
286
- value,
287
- type: `insert`,
288
- })
289
- } else if (
290
- // Insert & update(s) (updates are a delete & insert)
291
- inserts > deletes ||
292
- // Just update(s) but the item is already in the collection (so
293
- // was inserted previously).
294
- (inserts === deletes &&
295
- theCollection.has(rawKey as string | number))
296
- ) {
297
- write({
298
- value,
299
- type: `update`,
300
- })
301
- // Only delete is left as an option
302
- } else if (deletes > 0) {
303
- write({
304
- value,
305
- type: `delete`,
306
- })
307
- } else {
308
- throw new Error(
309
- `This should never happen ${JSON.stringify(changes)}`
310
- )
311
- }
312
- })
313
- commit()
314
- })
315
- )
316
-
317
- graph.finalize()
318
-
319
- let subscribedToAllCollections = false
320
-
321
- // The callback function is called after the graph has run.
322
- // This gives the callback a chance to load more data if needed,
323
- // that's used to optimize orderBy operators that set a limit,
324
- // in order to load some more data if we still don't have enough rows after the pipeline has run.
325
- // That can happend because even though we load N rows, the pipeline might filter some of these rows out
326
- // causing the orderBy operator to receive less than N rows or even no rows at all.
327
- // So this callback would notice that it doesn't have enough rows and load some more.
328
- // The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.
329
- const maybeRunGraph = (callback?: () => boolean) => {
330
- // We only run the graph if all the collections are ready
331
- if (
332
- allCollectionsReadyOrInitialCommit() &&
333
- subscribedToAllCollections
334
- ) {
335
- graph.run()
336
- const ready = callback?.() ?? true
337
- // On the initial run, we may need to do an empty commit to ensure that
338
- // the collection is initialized
339
- if (messagesCount === 0) {
340
- begin()
341
- commit()
342
- }
343
- // Mark the collection as ready after the first successful run
344
- if (ready && allCollectionsReady()) {
345
- markReady()
346
- }
347
- }
348
- }
349
-
350
- // Unsubscribe callbacks
351
- const unsubscribeCallbacks = new Set<() => void>()
352
-
353
- // Subscribe to all collections, using WHERE clause optimization when available
354
- Object.entries(collections).forEach(([collectionId, collection]) => {
355
- const input = inputs[collectionId]!
356
- const collectionAlias = findCollectionAlias(collectionId, query)
357
- const whereClause =
358
- collectionAlias && collectionWhereClausesCache
359
- ? collectionWhereClausesCache.get(collectionAlias)
360
- : undefined
361
-
362
- const sendChangesToPipeline = (
363
- changes: Iterable<ChangeMessage<any, string | number>>,
364
- callback?: () => boolean
365
- ) => {
366
- sendChangesToInput(input, changes, collection.config.getKey)
367
- maybeRunGraph(callback)
368
- }
369
-
370
- // Wraps the sendChangesToPipeline function
371
- // in order to turn `update`s into `insert`s
372
- // for keys that have not been sent to the pipeline yet
373
- // and filter out deletes for keys that have not been sent
374
- const sendVisibleChangesToPipeline = (
375
- changes: Array<ChangeMessage<any, string | number>>,
376
- loadedInitialState: boolean,
377
- sentKeys: Set<string | number>
378
- ) => {
379
- if (loadedInitialState) {
380
- // There was no index for the join key
381
- // so we loaded the initial state
382
- // so we can safely assume that the pipeline has seen all keys
383
- return sendChangesToPipeline(changes)
384
- }
385
-
386
- const newChanges = []
387
- for (const change of changes) {
388
- let newChange = change
389
- if (!sentKeys.has(change.key)) {
390
- if (change.type === `update`) {
391
- newChange = { ...change, type: `insert` }
392
- } else if (change.type === `delete`) {
393
- // filter out deletes for keys that have not been sent
394
- continue
395
- }
396
- }
397
- newChanges.push(newChange)
398
- }
399
-
400
- return sendChangesToPipeline(newChanges)
401
- }
402
-
403
- const loadKeys = (
404
- keys: Iterable<string | number>,
405
- sentKeys: Set<string | number>,
406
- filterFn: (item: object) => boolean
407
- ) => {
408
- for (const key of keys) {
409
- // Only load the key once
410
- if (sentKeys.has(key)) continue
411
-
412
- const value = collection.get(key)
413
- if (value !== undefined && filterFn(value)) {
414
- sentKeys.add(key)
415
- sendChangesToPipeline([{ type: `insert`, key, value }])
416
- }
417
- }
418
- }
419
-
420
- const subscribeToAllChanges = (
421
- whereExpression: BasicExpression<boolean> | undefined
422
- ) => {
423
- const unsubscribe = collection.subscribeChanges(
424
- sendChangesToPipeline,
425
- {
426
- includeInitialState: true,
427
- ...(whereExpression ? { whereExpression } : undefined),
428
- }
429
- )
430
- return unsubscribe
431
- }
432
-
433
- // Subscribes to all changes but without the initial state
434
- // such that we can load keys from the initial state on demand
435
- // based on the matching keys from the main collection in the join
436
- const subscribeToMatchingChanges = (
437
- whereExpression: BasicExpression<boolean> | undefined
438
- ) => {
439
- let loadedInitialState = false
440
- const sentKeys = new Set<string | number>()
441
-
442
- const sendVisibleChanges = (
443
- changes: Array<ChangeMessage<any, string | number>>
444
- ) => {
445
- sendVisibleChangesToPipeline(changes, loadedInitialState, sentKeys)
446
- }
447
-
448
- const unsubscribe = collection.subscribeChanges(sendVisibleChanges, {
449
- whereExpression,
450
- })
451
-
452
- // Create a function that loads keys from the collection
453
- // into the query pipeline on demand
454
- const filterFn = whereExpression
455
- ? createFilterFunctionFromExpression(whereExpression)
456
- : () => true
457
- const loadKs = (keys: Set<string | number>) => {
458
- return loadKeys(keys, sentKeys, filterFn)
459
- }
460
-
461
- // Store the functions to load keys and load initial state in the `lazyCollectionsCallbacks` map
462
- // This is used by the join operator to dynamically load matching keys from the lazy collection
463
- // or to get the full initial state of the collection if there's no index for the join key
464
- lazyCollectionsCallbacks[collectionId] = {
465
- loadKeys: loadKs,
466
- loadInitialState: () => {
467
- // Make sure we only load the initial state once
468
- if (loadedInitialState) return
469
- loadedInitialState = true
470
-
471
- const changes = collection.currentStateAsChanges({
472
- whereExpression,
473
- })
474
- sendChangesToPipeline(changes)
475
- },
476
- }
477
- return unsubscribe
478
- }
479
-
480
- const subscribeToOrderedChanges = (
481
- whereExpression: BasicExpression<boolean> | undefined
482
- ) => {
483
- const {
484
- offset,
485
- limit,
486
- comparator,
487
- index,
488
- dataNeeded,
489
- valueExtractorForRawRow,
490
- } = optimizableOrderByCollections[collectionId]!
491
-
492
- if (!dataNeeded) {
493
- // This should never happen because the topK operator should always set the size callback
494
- // which in turn should lead to the orderBy operator setting the dataNeeded callback
495
- throw new Error(
496
- `Missing dataNeeded callback for collection ${collectionId}`
497
- )
498
- }
499
-
500
- // This function is called by maybeRunGraph
501
- // after each iteration of the query pipeline
502
- // to ensure that the orderBy operator has enough data to work with
503
- const loadMoreIfNeeded = () => {
504
- // `dataNeeded` probes the orderBy operator to see if it needs more data
505
- // if it needs more data, it returns the number of items it needs
506
- const n = dataNeeded()
507
- if (n > 0) {
508
- loadNextItems(n)
509
- }
510
-
511
- // Indicate that we're done loading data if we didn't need to load more data
512
- return n === 0
513
- }
514
-
515
- // Keep track of the keys we've sent
516
- // and also the biggest value we've sent so far
517
- const sentValuesInfo: {
518
- sentKeys: Set<string | number>
519
- biggest: any
520
- } = {
521
- sentKeys: new Set<string | number>(),
522
- biggest: undefined,
523
- }
524
-
525
- const sendChangesToPipelineWithTracking = (
526
- changes: Iterable<ChangeMessage<any, string | number>>
527
- ) => {
528
- const trackedChanges = trackSentValues(
529
- changes,
530
- comparator,
531
- sentValuesInfo
532
- )
533
- sendChangesToPipeline(trackedChanges, loadMoreIfNeeded)
534
- }
535
-
536
- // Loads the next `n` items from the collection
537
- // starting from the biggest item it has sent
538
- const loadNextItems = (n: number) => {
539
- const biggestSentRow = sentValuesInfo.biggest
540
- const biggestSentValue = biggestSentRow
541
- ? valueExtractorForRawRow(biggestSentRow)
542
- : biggestSentRow
543
- // Take the `n` items after the biggest sent value
544
- const nextOrderedKeys = index.take(n, biggestSentValue)
545
- const nextInserts: Array<ChangeMessage<any, string | number>> =
546
- nextOrderedKeys.map((key) => {
547
- return { type: `insert`, key, value: collection.get(key) }
548
- })
549
- sendChangesToPipelineWithTracking(nextInserts)
550
- }
551
-
552
- // Load the first `offset + limit` values from the index
553
- // i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[
554
- loadNextItems(offset + limit)
555
-
556
- const sendChangesInRange = (
557
- changes: Iterable<ChangeMessage<any, string | number>>
558
- ) => {
559
- // Split live updates into a delete of the old value and an insert of the new value
560
- // and filter out changes that are bigger than the biggest value we've sent so far
561
- // because they can't affect the topK
562
- const splittedChanges = splitUpdates(changes)
563
- const filteredChanges = filterChangesSmallerOrEqualToMax(
564
- splittedChanges,
565
- comparator,
566
- sentValuesInfo.biggest
567
- )
568
- sendChangesToPipeline(filteredChanges, loadMoreIfNeeded)
569
- }
570
-
571
- // Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far
572
- // values that are bigger don't need to be sent because they can't affect the topK
573
- const unsubscribe = collection.subscribeChanges(sendChangesInRange, {
574
- whereExpression,
575
- })
576
-
577
- return unsubscribe
578
- }
579
-
580
- const subscribeToChanges = (
581
- whereExpression?: BasicExpression<boolean>
582
- ) => {
583
- let unsubscribe: () => void
584
- if (lazyCollections.has(collectionId)) {
585
- unsubscribe = subscribeToMatchingChanges(whereExpression)
586
- } else if (
587
- Object.hasOwn(optimizableOrderByCollections, collectionId)
588
- ) {
589
- unsubscribe = subscribeToOrderedChanges(whereExpression)
590
- } else {
591
- unsubscribe = subscribeToAllChanges(whereExpression)
592
- }
593
- unsubscribeCallbacks.add(unsubscribe)
594
- }
595
-
596
- if (whereClause) {
597
- // Convert WHERE clause to BasicExpression format for collection subscription
598
- const whereExpression = convertToBasicExpression(
599
- whereClause,
600
- collectionAlias!
601
- )
602
-
603
- if (whereExpression) {
604
- // Use index optimization for this collection
605
- subscribeToChanges(whereExpression)
606
- } else {
607
- // This should not happen - if we have a whereClause but can't create whereExpression,
608
- // it indicates a bug in our optimization logic
609
- throw new Error(
610
- `Failed to convert WHERE clause to collection filter for collection '${collectionId}'. ` +
611
- `This indicates a bug in the query optimization logic.`
612
- )
613
- }
614
- } else {
615
- // No WHERE clause for this collection, use regular subscription
616
- subscribeToChanges()
617
- }
618
- })
619
-
620
- subscribedToAllCollections = true
621
-
622
- // Initial run
623
- maybeRunGraph()
624
-
625
- // Return the unsubscribe function
626
- return () => {
627
- unsubscribeCallbacks.forEach((unsubscribe) => unsubscribe())
628
-
629
- // Reset caches so a fresh graph/pipeline is compiled on next start
630
- // This avoids reusing a finalized D2 graph across GC restarts
631
- graphCache = undefined
632
- inputsCache = undefined
633
- pipelineCache = undefined
634
- collectionWhereClausesCache = undefined
635
- }
636
- },
637
- }
638
-
639
- // Return collection configuration
640
- return {
641
- id,
642
- getKey:
643
- config.getKey || ((item) => resultKeys.get(item) as string | number),
644
- sync,
645
- compare,
646
- gcTime: config.gcTime || 5000, // 5 seconds by default for live queries
647
- schema: config.schema,
648
- onInsert: config.onInsert,
649
- onUpdate: config.onUpdate,
650
- onDelete: config.onDelete,
651
- startSync: config.startSync,
652
- }
39
+ const collectionConfigBuilder = new CollectionConfigBuilder<
40
+ TContext,
41
+ TResult
42
+ >(config)
43
+ return collectionConfigBuilder.getConfig()
653
44
  }
654
45
 
655
46
  /**
@@ -754,177 +145,3 @@ function bridgeToCreateCollection<
754
145
  TUtils
755
146
  >
756
147
  }
757
-
758
- /**
759
- * Helper function to send changes to a D2 input stream
760
- */
761
- function sendChangesToInput(
762
- input: RootStreamBuilder<unknown>,
763
- changes: Iterable<ChangeMessage>,
764
- getKey: (item: ChangeMessage[`value`]) => any
765
- ) {
766
- const multiSetArray: MultiSetArray<unknown> = []
767
- for (const change of changes) {
768
- const key = getKey(change.value)
769
- if (change.type === `insert`) {
770
- multiSetArray.push([[key, change.value], 1])
771
- } else if (change.type === `update`) {
772
- multiSetArray.push([[key, change.previousValue], -1])
773
- multiSetArray.push([[key, change.value], 1])
774
- } else {
775
- // change.type === `delete`
776
- multiSetArray.push([[key, change.value], -1])
777
- }
778
- }
779
- input.sendData(new MultiSet(multiSetArray))
780
- }
781
-
782
- /**
783
- * Helper function to extract collections from a compiled query
784
- * Traverses the query IR to find all collection references
785
- * Maps collections by their ID (not alias) as expected by the compiler
786
- */
787
- function extractCollectionsFromQuery(
788
- query: any
789
- ): Record<string, Collection<any, any, any>> {
790
- const collections: Record<string, any> = {}
791
-
792
- // Helper function to recursively extract collections from a query or source
793
- function extractFromSource(source: any) {
794
- if (source.type === `collectionRef`) {
795
- collections[source.collection.id] = source.collection
796
- } else if (source.type === `queryRef`) {
797
- // Recursively extract from subquery
798
- extractFromQuery(source.query)
799
- }
800
- }
801
-
802
- // Helper function to recursively extract collections from a query
803
- function extractFromQuery(q: any) {
804
- // Extract from FROM clause
805
- if (q.from) {
806
- extractFromSource(q.from)
807
- }
808
-
809
- // Extract from JOIN clauses
810
- if (q.join && Array.isArray(q.join)) {
811
- for (const joinClause of q.join) {
812
- if (joinClause.from) {
813
- extractFromSource(joinClause.from)
814
- }
815
- }
816
- }
817
- }
818
-
819
- // Start extraction from the root query
820
- extractFromQuery(query)
821
-
822
- return collections
823
- }
824
-
825
- /**
826
- * Converts WHERE expressions from the query IR into a BasicExpression for subscribeChanges
827
- *
828
- * @param whereExpressions Array of WHERE expressions to convert
829
- * @param tableAlias The table alias used in the expressions
830
- * @returns A BasicExpression that can be used with the collection's index system
831
- */
832
-
833
- /**
834
- * Finds the alias for a collection ID in the query
835
- */
836
- function findCollectionAlias(
837
- collectionId: string,
838
- query: any
839
- ): string | undefined {
840
- // Check FROM clause
841
- if (
842
- query.from?.type === `collectionRef` &&
843
- query.from.collection?.id === collectionId
844
- ) {
845
- return query.from.alias
846
- }
847
-
848
- // Check JOIN clauses
849
- if (query.join) {
850
- for (const joinClause of query.join) {
851
- if (
852
- joinClause.from?.type === `collectionRef` &&
853
- joinClause.from.collection?.id === collectionId
854
- ) {
855
- return joinClause.from.alias
856
- }
857
- }
858
- }
859
-
860
- return undefined
861
- }
862
-
863
- function* trackSentValues(
864
- changes: Iterable<ChangeMessage<any, string | number>>,
865
- comparator: (a: any, b: any) => number,
866
- tracker: { sentKeys: Set<string | number>; biggest: any }
867
- ) {
868
- for (const change of changes) {
869
- tracker.sentKeys.add(change.key)
870
-
871
- if (!tracker.biggest) {
872
- tracker.biggest = change.value
873
- } else if (comparator(tracker.biggest, change.value) < 0) {
874
- tracker.biggest = change.value
875
- }
876
-
877
- yield change
878
- }
879
- }
880
-
881
- /** Splits updates into a delete of the old value and an insert of the new value */
882
- function* splitUpdates<
883
- T extends object = Record<string, unknown>,
884
- TKey extends string | number = string | number,
885
- >(
886
- changes: Iterable<ChangeMessage<T, TKey>>
887
- ): Generator<ChangeMessage<T, TKey>> {
888
- for (const change of changes) {
889
- if (change.type === `update`) {
890
- yield { type: `delete`, key: change.key, value: change.previousValue! }
891
- yield { type: `insert`, key: change.key, value: change.value }
892
- } else {
893
- yield change
894
- }
895
- }
896
- }
897
-
898
- function* filterChanges<
899
- T extends object = Record<string, unknown>,
900
- TKey extends string | number = string | number,
901
- >(
902
- changes: Iterable<ChangeMessage<T, TKey>>,
903
- f: (change: ChangeMessage<T, TKey>) => boolean
904
- ): Generator<ChangeMessage<T, TKey>> {
905
- for (const change of changes) {
906
- if (f(change)) {
907
- yield change
908
- }
909
- }
910
- }
911
-
912
- /**
913
- * Filters changes to only include those that are smaller than the provided max value
914
- * @param changes - Iterable of changes to filter
915
- * @param comparator - Comparator function to use for filtering
916
- * @param maxValue - Range to filter changes within (range boundaries are exclusive)
917
- * @returns Iterable of changes that fall within the range
918
- */
919
- function* filterChangesSmallerOrEqualToMax<
920
- T extends object = Record<string, unknown>,
921
- TKey extends string | number = string | number,
922
- >(
923
- changes: Iterable<ChangeMessage<T, TKey>>,
924
- comparator: (a: any, b: any) => number,
925
- maxValue: any
926
- ): Generator<ChangeMessage<T, TKey>> {
927
- yield* filterChanges(changes, (change) => {
928
- return !maxValue || comparator(change.value, maxValue) <= 0
929
- })
930
- }