@tanstack/db 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection.cjs +112 -6
- package/dist/cjs/collection.cjs.map +1 -1
- package/dist/cjs/collection.d.cts +3 -2
- package/dist/cjs/errors.cjs +6 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +3 -0
- package/dist/cjs/index.cjs +1 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/indexes/auto-index.cjs +30 -19
- package/dist/cjs/indexes/auto-index.cjs.map +1 -1
- package/dist/cjs/indexes/auto-index.d.cts +1 -0
- package/dist/cjs/indexes/base-index.cjs.map +1 -1
- package/dist/cjs/indexes/base-index.d.cts +2 -1
- package/dist/cjs/indexes/btree-index.cjs +26 -0
- package/dist/cjs/indexes/btree-index.cjs.map +1 -1
- package/dist/cjs/indexes/btree-index.d.cts +7 -0
- package/dist/cjs/indexes/index-options.d.cts +1 -1
- package/dist/cjs/query/compiler/evaluators.cjs +2 -2
- package/dist/cjs/query/compiler/evaluators.cjs.map +1 -1
- package/dist/cjs/query/compiler/evaluators.d.cts +1 -1
- package/dist/cjs/query/compiler/group-by.cjs +3 -1
- package/dist/cjs/query/compiler/group-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.cjs +72 -6
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/compiler/index.d.cts +16 -2
- package/dist/cjs/query/compiler/joins.cjs +111 -12
- package/dist/cjs/query/compiler/joins.cjs.map +1 -1
- package/dist/cjs/query/compiler/joins.d.cts +9 -2
- package/dist/cjs/query/compiler/order-by.cjs +62 -3
- package/dist/cjs/query/compiler/order-by.cjs.map +1 -1
- package/dist/cjs/query/compiler/order-by.d.cts +12 -2
- package/dist/cjs/query/live-query-collection.cjs +196 -23
- package/dist/cjs/query/live-query-collection.cjs.map +1 -1
- package/dist/cjs/types.d.cts +1 -0
- package/dist/cjs/utils/btree.cjs +15 -0
- package/dist/cjs/utils/btree.cjs.map +1 -1
- package/dist/cjs/utils/btree.d.cts +8 -0
- package/dist/esm/collection.d.ts +3 -2
- package/dist/esm/collection.js +113 -7
- package/dist/esm/collection.js.map +1 -1
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.js +6 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.js +2 -1
- package/dist/esm/indexes/auto-index.d.ts +1 -0
- package/dist/esm/indexes/auto-index.js +31 -20
- package/dist/esm/indexes/auto-index.js.map +1 -1
- package/dist/esm/indexes/base-index.d.ts +2 -1
- package/dist/esm/indexes/base-index.js.map +1 -1
- package/dist/esm/indexes/btree-index.d.ts +7 -0
- package/dist/esm/indexes/btree-index.js +26 -0
- package/dist/esm/indexes/btree-index.js.map +1 -1
- package/dist/esm/indexes/index-options.d.ts +1 -1
- package/dist/esm/query/compiler/evaluators.d.ts +1 -1
- package/dist/esm/query/compiler/evaluators.js +2 -2
- package/dist/esm/query/compiler/evaluators.js.map +1 -1
- package/dist/esm/query/compiler/group-by.js +3 -1
- package/dist/esm/query/compiler/group-by.js.map +1 -1
- package/dist/esm/query/compiler/index.d.ts +16 -2
- package/dist/esm/query/compiler/index.js +73 -7
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/compiler/joins.d.ts +9 -2
- package/dist/esm/query/compiler/joins.js +114 -15
- package/dist/esm/query/compiler/joins.js.map +1 -1
- package/dist/esm/query/compiler/order-by.d.ts +12 -2
- package/dist/esm/query/compiler/order-by.js +62 -3
- package/dist/esm/query/compiler/order-by.js.map +1 -1
- package/dist/esm/query/live-query-collection.js +196 -23
- package/dist/esm/query/live-query-collection.js.map +1 -1
- package/dist/esm/types.d.ts +1 -0
- package/dist/esm/utils/btree.d.ts +8 -0
- package/dist/esm/utils/btree.js +15 -0
- package/dist/esm/utils/btree.js.map +1 -1
- package/package.json +2 -2
- package/src/collection.ts +163 -10
- package/src/errors.ts +6 -0
- package/src/indexes/auto-index.ts +53 -31
- package/src/indexes/base-index.ts +6 -1
- package/src/indexes/btree-index.ts +29 -0
- package/src/indexes/index-options.ts +2 -2
- package/src/query/compiler/evaluators.ts +6 -3
- package/src/query/compiler/group-by.ts +3 -1
- package/src/query/compiler/index.ts +112 -5
- package/src/query/compiler/joins.ts +216 -20
- package/src/query/compiler/order-by.ts +98 -3
- package/src/query/live-query-collection.ts +352 -24
- package/src/types.ts +1 -0
- package/src/utils/btree.ts +17 -0
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import { D2, MultiSet, output } from "@tanstack/db-ivm"
|
|
2
2
|
import { createCollection } from "../collection.js"
|
|
3
|
+
import { createFilterFunctionFromExpression } from "../change-events.js"
|
|
3
4
|
import { compileQuery } from "./compiler/index.js"
|
|
4
5
|
import { buildQuery, getQueryIR } from "./builder/index.js"
|
|
5
6
|
import { convertToBasicExpression } from "./compiler/expressions.js"
|
|
7
|
+
import type { OrderByOptimizationInfo } from "./compiler/order-by.js"
|
|
6
8
|
import type { InitialQueryBuilder, QueryBuilder } from "./builder/index.js"
|
|
7
9
|
import type { Collection } from "../collection.js"
|
|
8
10
|
import type {
|
|
@@ -16,6 +18,7 @@ import type {
|
|
|
16
18
|
import type { Context, GetResult } from "./builder/types.js"
|
|
17
19
|
import type { MultiSetArray, RootStreamBuilder } from "@tanstack/db-ivm"
|
|
18
20
|
import type { BasicExpression } from "./ir.js"
|
|
21
|
+
import type { LazyCollectionCallbacks } from "./compiler/joins.js"
|
|
19
22
|
|
|
20
23
|
// Global counter for auto-generated collection IDs
|
|
21
24
|
let liveQueryCollectionCounter = 0
|
|
@@ -163,6 +166,12 @@ export function liveQueryCollectionOptions<
|
|
|
163
166
|
const collections = extractCollectionsFromQuery(query)
|
|
164
167
|
|
|
165
168
|
const allCollectionsReady = () => {
|
|
169
|
+
return Object.values(collections).every((collection) =>
|
|
170
|
+
collection.isReady()
|
|
171
|
+
)
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const allCollectionsReadyOrInitialCommit = () => {
|
|
166
175
|
return Object.values(collections).every(
|
|
167
176
|
(collection) =>
|
|
168
177
|
collection.status === `ready` || collection.status === `initialCommit`
|
|
@@ -176,6 +185,14 @@ export function liveQueryCollectionOptions<
|
|
|
176
185
|
| Map<string, BasicExpression<boolean>>
|
|
177
186
|
| undefined
|
|
178
187
|
|
|
188
|
+
// Map of collection IDs to functions that load keys for that lazy collection
|
|
189
|
+
const lazyCollectionsCallbacks: Record<string, LazyCollectionCallbacks> = {}
|
|
190
|
+
// Set of collection IDs that are lazy collections
|
|
191
|
+
const lazyCollections = new Set<string>()
|
|
192
|
+
// Set of collection IDs that include an optimizable ORDER BY clause
|
|
193
|
+
const optimizableOrderByCollections: Record<string, OrderByOptimizationInfo> =
|
|
194
|
+
{}
|
|
195
|
+
|
|
179
196
|
const compileBasePipeline = () => {
|
|
180
197
|
graphCache = new D2()
|
|
181
198
|
inputsCache = Object.fromEntries(
|
|
@@ -189,7 +206,14 @@ export function liveQueryCollectionOptions<
|
|
|
189
206
|
;({
|
|
190
207
|
pipeline: pipelineCache,
|
|
191
208
|
collectionWhereClauses: collectionWhereClausesCache,
|
|
192
|
-
} = compileQuery(
|
|
209
|
+
} = compileQuery(
|
|
210
|
+
query,
|
|
211
|
+
inputsCache as Record<string, KeyedStream>,
|
|
212
|
+
collections,
|
|
213
|
+
lazyCollectionsCallbacks,
|
|
214
|
+
lazyCollections,
|
|
215
|
+
optimizableOrderByCollections
|
|
216
|
+
))
|
|
193
217
|
}
|
|
194
218
|
|
|
195
219
|
const maybeCompileBasePipeline = () => {
|
|
@@ -292,10 +316,24 @@ export function liveQueryCollectionOptions<
|
|
|
292
316
|
|
|
293
317
|
graph.finalize()
|
|
294
318
|
|
|
295
|
-
|
|
319
|
+
let subscribedToAllCollections = false
|
|
320
|
+
|
|
321
|
+
// The callback function is called after the graph has run.
|
|
322
|
+
// This gives the callback a chance to load more data if needed,
|
|
323
|
+
// that's used to optimize orderBy operators that set a limit,
|
|
324
|
+
// in order to load some more data if we still don't have enough rows after the pipeline has run.
|
|
325
|
+
// That can happend because even though we load N rows, the pipeline might filter some of these rows out
|
|
326
|
+
// causing the orderBy operator to receive less than N rows or even no rows at all.
|
|
327
|
+
// So this callback would notice that it doesn't have enough rows and load some more.
|
|
328
|
+
// The callback returns a boolean, when it's true it's done loading data and we can mark the collection as ready.
|
|
329
|
+
const maybeRunGraph = (callback?: () => boolean) => {
|
|
296
330
|
// We only run the graph if all the collections are ready
|
|
297
|
-
if (
|
|
331
|
+
if (
|
|
332
|
+
allCollectionsReadyOrInitialCommit() &&
|
|
333
|
+
subscribedToAllCollections
|
|
334
|
+
) {
|
|
298
335
|
graph.run()
|
|
336
|
+
const ready = callback?.() ?? true
|
|
299
337
|
// On the initial run, we may need to do an empty commit to ensure that
|
|
300
338
|
// the collection is initialized
|
|
301
339
|
if (messagesCount === 0) {
|
|
@@ -303,7 +341,9 @@ export function liveQueryCollectionOptions<
|
|
|
303
341
|
commit()
|
|
304
342
|
}
|
|
305
343
|
// Mark the collection as ready after the first successful run
|
|
306
|
-
|
|
344
|
+
if (ready && allCollectionsReady()) {
|
|
345
|
+
markReady()
|
|
346
|
+
}
|
|
307
347
|
}
|
|
308
348
|
}
|
|
309
349
|
|
|
@@ -319,6 +359,240 @@ export function liveQueryCollectionOptions<
|
|
|
319
359
|
? collectionWhereClausesCache.get(collectionAlias)
|
|
320
360
|
: undefined
|
|
321
361
|
|
|
362
|
+
const sendChangesToPipeline = (
|
|
363
|
+
changes: Iterable<ChangeMessage<any, string | number>>,
|
|
364
|
+
callback?: () => boolean
|
|
365
|
+
) => {
|
|
366
|
+
sendChangesToInput(input, changes, collection.config.getKey)
|
|
367
|
+
maybeRunGraph(callback)
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// Wraps the sendChangesToPipeline function
|
|
371
|
+
// in order to turn `update`s into `insert`s
|
|
372
|
+
// for keys that have not been sent to the pipeline yet
|
|
373
|
+
// and filter out deletes for keys that have not been sent
|
|
374
|
+
const sendVisibleChangesToPipeline = (
|
|
375
|
+
changes: Array<ChangeMessage<any, string | number>>,
|
|
376
|
+
loadedInitialState: boolean,
|
|
377
|
+
sentKeys: Set<string | number>
|
|
378
|
+
) => {
|
|
379
|
+
if (loadedInitialState) {
|
|
380
|
+
// There was no index for the join key
|
|
381
|
+
// so we loaded the initial state
|
|
382
|
+
// so we can safely assume that the pipeline has seen all keys
|
|
383
|
+
return sendChangesToPipeline(changes)
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
const newChanges = []
|
|
387
|
+
for (const change of changes) {
|
|
388
|
+
let newChange = change
|
|
389
|
+
if (!sentKeys.has(change.key)) {
|
|
390
|
+
if (change.type === `update`) {
|
|
391
|
+
newChange = { ...change, type: `insert` }
|
|
392
|
+
} else if (change.type === `delete`) {
|
|
393
|
+
// filter out deletes for keys that have not been sent
|
|
394
|
+
continue
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
newChanges.push(newChange)
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
return sendChangesToPipeline(newChanges)
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
const loadKeys = (
|
|
404
|
+
keys: Iterable<string | number>,
|
|
405
|
+
sentKeys: Set<string | number>,
|
|
406
|
+
filterFn: (item: object) => boolean
|
|
407
|
+
) => {
|
|
408
|
+
for (const key of keys) {
|
|
409
|
+
// Only load the key once
|
|
410
|
+
if (sentKeys.has(key)) continue
|
|
411
|
+
|
|
412
|
+
const value = collection.get(key)
|
|
413
|
+
if (value !== undefined && filterFn(value)) {
|
|
414
|
+
sentKeys.add(key)
|
|
415
|
+
sendChangesToPipeline([{ type: `insert`, key, value }])
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
const subscribeToAllChanges = (
|
|
421
|
+
whereExpression: BasicExpression<boolean> | undefined
|
|
422
|
+
) => {
|
|
423
|
+
const unsubscribe = collection.subscribeChanges(
|
|
424
|
+
sendChangesToPipeline,
|
|
425
|
+
{
|
|
426
|
+
includeInitialState: true,
|
|
427
|
+
...(whereExpression ? { whereExpression } : undefined),
|
|
428
|
+
}
|
|
429
|
+
)
|
|
430
|
+
return unsubscribe
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
// Subscribes to all changes but without the initial state
|
|
434
|
+
// such that we can load keys from the initial state on demand
|
|
435
|
+
// based on the matching keys from the main collection in the join
|
|
436
|
+
const subscribeToMatchingChanges = (
|
|
437
|
+
whereExpression: BasicExpression<boolean> | undefined
|
|
438
|
+
) => {
|
|
439
|
+
let loadedInitialState = false
|
|
440
|
+
const sentKeys = new Set<string | number>()
|
|
441
|
+
|
|
442
|
+
const sendVisibleChanges = (
|
|
443
|
+
changes: Array<ChangeMessage<any, string | number>>
|
|
444
|
+
) => {
|
|
445
|
+
sendVisibleChangesToPipeline(changes, loadedInitialState, sentKeys)
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
const unsubscribe = collection.subscribeChanges(sendVisibleChanges, {
|
|
449
|
+
whereExpression,
|
|
450
|
+
})
|
|
451
|
+
|
|
452
|
+
// Create a function that loads keys from the collection
|
|
453
|
+
// into the query pipeline on demand
|
|
454
|
+
const filterFn = whereExpression
|
|
455
|
+
? createFilterFunctionFromExpression(whereExpression)
|
|
456
|
+
: () => true
|
|
457
|
+
const loadKs = (keys: Set<string | number>) => {
|
|
458
|
+
return loadKeys(keys, sentKeys, filterFn)
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
// Store the functions to load keys and load initial state in the `lazyCollectionsCallbacks` map
|
|
462
|
+
// This is used by the join operator to dynamically load matching keys from the lazy collection
|
|
463
|
+
// or to get the full initial state of the collection if there's no index for the join key
|
|
464
|
+
lazyCollectionsCallbacks[collectionId] = {
|
|
465
|
+
loadKeys: loadKs,
|
|
466
|
+
loadInitialState: () => {
|
|
467
|
+
// Make sure we only load the initial state once
|
|
468
|
+
if (loadedInitialState) return
|
|
469
|
+
loadedInitialState = true
|
|
470
|
+
|
|
471
|
+
const changes = collection.currentStateAsChanges({
|
|
472
|
+
whereExpression,
|
|
473
|
+
})
|
|
474
|
+
sendChangesToPipeline(changes)
|
|
475
|
+
},
|
|
476
|
+
}
|
|
477
|
+
return unsubscribe
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
const subscribeToOrderedChanges = (
|
|
481
|
+
whereExpression: BasicExpression<boolean> | undefined
|
|
482
|
+
) => {
|
|
483
|
+
const {
|
|
484
|
+
offset,
|
|
485
|
+
limit,
|
|
486
|
+
comparator,
|
|
487
|
+
index,
|
|
488
|
+
dataNeeded,
|
|
489
|
+
valueExtractorForRawRow,
|
|
490
|
+
} = optimizableOrderByCollections[collectionId]!
|
|
491
|
+
|
|
492
|
+
if (!dataNeeded) {
|
|
493
|
+
// This should never happen because the topK operator should always set the size callback
|
|
494
|
+
// which in turn should lead to the orderBy operator setting the dataNeeded callback
|
|
495
|
+
throw new Error(
|
|
496
|
+
`Missing dataNeeded callback for collection ${collectionId}`
|
|
497
|
+
)
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
// This function is called by maybeRunGraph
|
|
501
|
+
// after each iteration of the query pipeline
|
|
502
|
+
// to ensure that the orderBy operator has enough data to work with
|
|
503
|
+
const loadMoreIfNeeded = () => {
|
|
504
|
+
// `dataNeeded` probes the orderBy operator to see if it needs more data
|
|
505
|
+
// if it needs more data, it returns the number of items it needs
|
|
506
|
+
const n = dataNeeded()
|
|
507
|
+
if (n > 0) {
|
|
508
|
+
loadNextItems(n)
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Indicate that we're done loading data if we didn't need to load more data
|
|
512
|
+
return n === 0
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
// Keep track of the keys we've sent
|
|
516
|
+
// and also the biggest value we've sent so far
|
|
517
|
+
const sentValuesInfo: {
|
|
518
|
+
sentKeys: Set<string | number>
|
|
519
|
+
biggest: any
|
|
520
|
+
} = {
|
|
521
|
+
sentKeys: new Set<string | number>(),
|
|
522
|
+
biggest: undefined,
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
const sendChangesToPipelineWithTracking = (
|
|
526
|
+
changes: Iterable<ChangeMessage<any, string | number>>
|
|
527
|
+
) => {
|
|
528
|
+
const trackedChanges = trackSentValues(
|
|
529
|
+
changes,
|
|
530
|
+
comparator,
|
|
531
|
+
sentValuesInfo
|
|
532
|
+
)
|
|
533
|
+
sendChangesToPipeline(trackedChanges, loadMoreIfNeeded)
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// Loads the next `n` items from the collection
|
|
537
|
+
// starting from the biggest item it has sent
|
|
538
|
+
const loadNextItems = (n: number) => {
|
|
539
|
+
const biggestSentRow = sentValuesInfo.biggest
|
|
540
|
+
const biggestSentValue = biggestSentRow
|
|
541
|
+
? valueExtractorForRawRow(biggestSentRow)
|
|
542
|
+
: biggestSentRow
|
|
543
|
+
// Take the `n` items after the biggest sent value
|
|
544
|
+
const nextOrderedKeys = index.take(n, biggestSentValue)
|
|
545
|
+
const nextInserts: Array<ChangeMessage<any, string | number>> =
|
|
546
|
+
nextOrderedKeys.map((key) => {
|
|
547
|
+
return { type: `insert`, key, value: collection.get(key) }
|
|
548
|
+
})
|
|
549
|
+
sendChangesToPipelineWithTracking(nextInserts)
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
// Load the first `offset + limit` values from the index
|
|
553
|
+
// i.e. the K items from the collection that fall into the requested range: [offset, offset + limit[
|
|
554
|
+
loadNextItems(offset + limit)
|
|
555
|
+
|
|
556
|
+
const sendChangesInRange = (
|
|
557
|
+
changes: Iterable<ChangeMessage<any, string | number>>
|
|
558
|
+
) => {
|
|
559
|
+
// Split live updates into a delete of the old value and an insert of the new value
|
|
560
|
+
// and filter out changes that are bigger than the biggest value we've sent so far
|
|
561
|
+
// because they can't affect the topK
|
|
562
|
+
const splittedChanges = splitUpdates(changes)
|
|
563
|
+
const filteredChanges = filterChangesSmallerOrEqualToMax(
|
|
564
|
+
splittedChanges,
|
|
565
|
+
comparator,
|
|
566
|
+
sentValuesInfo.biggest
|
|
567
|
+
)
|
|
568
|
+
sendChangesToPipeline(filteredChanges, loadMoreIfNeeded)
|
|
569
|
+
}
|
|
570
|
+
|
|
571
|
+
// Subscribe to changes and only send changes that are smaller than the biggest value we've sent so far
|
|
572
|
+
// values that are bigger don't need to be sent because they can't affect the topK
|
|
573
|
+
const unsubscribe = collection.subscribeChanges(sendChangesInRange, {
|
|
574
|
+
whereExpression,
|
|
575
|
+
})
|
|
576
|
+
|
|
577
|
+
return unsubscribe
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
const subscribeToChanges = (
|
|
581
|
+
whereExpression?: BasicExpression<boolean>
|
|
582
|
+
) => {
|
|
583
|
+
let unsubscribe: () => void
|
|
584
|
+
if (lazyCollections.has(collectionId)) {
|
|
585
|
+
unsubscribe = subscribeToMatchingChanges(whereExpression)
|
|
586
|
+
} else if (
|
|
587
|
+
Object.hasOwn(optimizableOrderByCollections, collectionId)
|
|
588
|
+
) {
|
|
589
|
+
unsubscribe = subscribeToOrderedChanges(whereExpression)
|
|
590
|
+
} else {
|
|
591
|
+
unsubscribe = subscribeToAllChanges(whereExpression)
|
|
592
|
+
}
|
|
593
|
+
unsubscribeCallbacks.add(unsubscribe)
|
|
594
|
+
}
|
|
595
|
+
|
|
322
596
|
if (whereClause) {
|
|
323
597
|
// Convert WHERE clause to BasicExpression format for collection subscription
|
|
324
598
|
const whereExpression = convertToBasicExpression(
|
|
@@ -328,17 +602,7 @@ export function liveQueryCollectionOptions<
|
|
|
328
602
|
|
|
329
603
|
if (whereExpression) {
|
|
330
604
|
// Use index optimization for this collection
|
|
331
|
-
|
|
332
|
-
(changes) => {
|
|
333
|
-
sendChangesToInput(input, changes, collection.config.getKey)
|
|
334
|
-
maybeRunGraph()
|
|
335
|
-
},
|
|
336
|
-
{
|
|
337
|
-
includeInitialState: true,
|
|
338
|
-
whereExpression: whereExpression,
|
|
339
|
-
}
|
|
340
|
-
)
|
|
341
|
-
unsubscribeCallbacks.add(subscription)
|
|
605
|
+
subscribeToChanges(whereExpression)
|
|
342
606
|
} else {
|
|
343
607
|
// This should not happen - if we have a whereClause but can't create whereExpression,
|
|
344
608
|
// it indicates a bug in our optimization logic
|
|
@@ -349,17 +613,12 @@ export function liveQueryCollectionOptions<
|
|
|
349
613
|
}
|
|
350
614
|
} else {
|
|
351
615
|
// No WHERE clause for this collection, use regular subscription
|
|
352
|
-
|
|
353
|
-
(changes) => {
|
|
354
|
-
sendChangesToInput(input, changes, collection.config.getKey)
|
|
355
|
-
maybeRunGraph()
|
|
356
|
-
},
|
|
357
|
-
{ includeInitialState: true }
|
|
358
|
-
)
|
|
359
|
-
unsubscribeCallbacks.add(subscription)
|
|
616
|
+
subscribeToChanges()
|
|
360
617
|
}
|
|
361
618
|
})
|
|
362
619
|
|
|
620
|
+
subscribedToAllCollections = true
|
|
621
|
+
|
|
363
622
|
// Initial run
|
|
364
623
|
maybeRunGraph()
|
|
365
624
|
|
|
@@ -494,7 +753,7 @@ function bridgeToCreateCollection<
|
|
|
494
753
|
*/
|
|
495
754
|
function sendChangesToInput(
|
|
496
755
|
input: RootStreamBuilder<unknown>,
|
|
497
|
-
changes:
|
|
756
|
+
changes: Iterable<ChangeMessage>,
|
|
498
757
|
getKey: (item: ChangeMessage[`value`]) => any
|
|
499
758
|
) {
|
|
500
759
|
const multiSetArray: MultiSetArray<unknown> = []
|
|
@@ -593,3 +852,72 @@ function findCollectionAlias(
|
|
|
593
852
|
|
|
594
853
|
return undefined
|
|
595
854
|
}
|
|
855
|
+
|
|
856
|
+
function* trackSentValues(
|
|
857
|
+
changes: Iterable<ChangeMessage<any, string | number>>,
|
|
858
|
+
comparator: (a: any, b: any) => number,
|
|
859
|
+
tracker: { sentKeys: Set<string | number>; biggest: any }
|
|
860
|
+
) {
|
|
861
|
+
for (const change of changes) {
|
|
862
|
+
tracker.sentKeys.add(change.key)
|
|
863
|
+
|
|
864
|
+
if (!tracker.biggest) {
|
|
865
|
+
tracker.biggest = change.value
|
|
866
|
+
} else if (comparator(tracker.biggest, change.value) < 0) {
|
|
867
|
+
tracker.biggest = change.value
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
yield change
|
|
871
|
+
}
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
/** Splits updates into a delete of the old value and an insert of the new value */
|
|
875
|
+
function* splitUpdates<
|
|
876
|
+
T extends object = Record<string, unknown>,
|
|
877
|
+
TKey extends string | number = string | number,
|
|
878
|
+
>(
|
|
879
|
+
changes: Iterable<ChangeMessage<T, TKey>>
|
|
880
|
+
): Generator<ChangeMessage<T, TKey>> {
|
|
881
|
+
for (const change of changes) {
|
|
882
|
+
if (change.type === `update`) {
|
|
883
|
+
yield { type: `delete`, key: change.key, value: change.previousValue! }
|
|
884
|
+
yield { type: `insert`, key: change.key, value: change.value }
|
|
885
|
+
} else {
|
|
886
|
+
yield change
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
|
|
891
|
+
function* filterChanges<
|
|
892
|
+
T extends object = Record<string, unknown>,
|
|
893
|
+
TKey extends string | number = string | number,
|
|
894
|
+
>(
|
|
895
|
+
changes: Iterable<ChangeMessage<T, TKey>>,
|
|
896
|
+
f: (change: ChangeMessage<T, TKey>) => boolean
|
|
897
|
+
): Generator<ChangeMessage<T, TKey>> {
|
|
898
|
+
for (const change of changes) {
|
|
899
|
+
if (f(change)) {
|
|
900
|
+
yield change
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
|
|
905
|
+
/**
|
|
906
|
+
* Filters changes to only include those that are smaller than the provided max value
|
|
907
|
+
* @param changes - Iterable of changes to filter
|
|
908
|
+
* @param comparator - Comparator function to use for filtering
|
|
909
|
+
* @param maxValue - Range to filter changes within (range boundaries are exclusive)
|
|
910
|
+
* @returns Iterable of changes that fall within the range
|
|
911
|
+
*/
|
|
912
|
+
function* filterChangesSmallerOrEqualToMax<
|
|
913
|
+
T extends object = Record<string, unknown>,
|
|
914
|
+
TKey extends string | number = string | number,
|
|
915
|
+
>(
|
|
916
|
+
changes: Iterable<ChangeMessage<T, TKey>>,
|
|
917
|
+
comparator: (a: any, b: any) => number,
|
|
918
|
+
maxValue: any
|
|
919
|
+
): Generator<ChangeMessage<T, TKey>> {
|
|
920
|
+
yield* filterChanges(changes, (change) => {
|
|
921
|
+
return !maxValue || comparator(change.value, maxValue) <= 0
|
|
922
|
+
})
|
|
923
|
+
}
|
package/src/types.ts
CHANGED
package/src/utils/btree.ts
CHANGED
|
@@ -254,6 +254,14 @@ export class BTree<K = any, V = any> {
|
|
|
254
254
|
)
|
|
255
255
|
}
|
|
256
256
|
|
|
257
|
+
/** Returns the next key larger than the specified key, or undefined if there is none.
|
|
258
|
+
* Also, nextHigherKey(undefined) returns the lowest key.
|
|
259
|
+
*/
|
|
260
|
+
nextHigherKey(key: K | undefined): K | undefined {
|
|
261
|
+
const p = this.nextHigherPair(key, ReusedArray as [K, V])
|
|
262
|
+
return p && p[0]
|
|
263
|
+
}
|
|
264
|
+
|
|
257
265
|
/** Returns the next pair whose key is smaller than the specified key (or undefined if there is none).
|
|
258
266
|
* If key === undefined, this function returns the highest pair.
|
|
259
267
|
* @param key The key to search for.
|
|
@@ -268,6 +276,14 @@ export class BTree<K = any, V = any> {
|
|
|
268
276
|
return this._root.getPairOrNextLower(key, this._compare, false, reusedArray)
|
|
269
277
|
}
|
|
270
278
|
|
|
279
|
+
/** Returns the next key smaller than the specified key, or undefined if there is none.
|
|
280
|
+
* Also, nextLowerKey(undefined) returns the highest key.
|
|
281
|
+
*/
|
|
282
|
+
nextLowerKey(key: K | undefined): K | undefined {
|
|
283
|
+
const p = this.nextLowerPair(key, ReusedArray as [K, V])
|
|
284
|
+
return p && p[0]
|
|
285
|
+
}
|
|
286
|
+
|
|
271
287
|
/** Adds all pairs from a list of key-value pairs.
|
|
272
288
|
* @param pairs Pairs to add to this tree. If there are duplicate keys,
|
|
273
289
|
* later pairs currently overwrite earlier ones (e.g. [[0,1],[0,7]]
|
|
@@ -1001,6 +1017,7 @@ const EmptyLeaf = (function () {
|
|
|
1001
1017
|
n.isShared = true
|
|
1002
1018
|
return n
|
|
1003
1019
|
})()
|
|
1020
|
+
const ReusedArray: Array<any> = [] // assumed thread-local
|
|
1004
1021
|
|
|
1005
1022
|
function check(fact: boolean, ...args: Array<any>) {
|
|
1006
1023
|
if (!fact) {
|