@tanstack/db 0.5.32 → 0.5.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/cjs/index.cjs +2 -0
  2. package/dist/cjs/index.cjs.map +1 -1
  3. package/dist/cjs/index.d.cts +1 -0
  4. package/dist/cjs/query/effect.cjs +602 -0
  5. package/dist/cjs/query/effect.cjs.map +1 -0
  6. package/dist/cjs/query/effect.d.cts +94 -0
  7. package/dist/cjs/query/live/collection-config-builder.cjs +5 -74
  8. package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
  9. package/dist/cjs/query/live/collection-subscriber.cjs +33 -100
  10. package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
  11. package/dist/cjs/query/live/collection-subscriber.d.cts +0 -1
  12. package/dist/cjs/query/live/utils.cjs +179 -0
  13. package/dist/cjs/query/live/utils.cjs.map +1 -0
  14. package/dist/cjs/query/live/utils.d.cts +109 -0
  15. package/dist/esm/index.d.ts +1 -0
  16. package/dist/esm/index.js +2 -0
  17. package/dist/esm/index.js.map +1 -1
  18. package/dist/esm/query/effect.d.ts +94 -0
  19. package/dist/esm/query/effect.js +602 -0
  20. package/dist/esm/query/effect.js.map +1 -0
  21. package/dist/esm/query/live/collection-config-builder.js +1 -70
  22. package/dist/esm/query/live/collection-config-builder.js.map +1 -1
  23. package/dist/esm/query/live/collection-subscriber.d.ts +0 -1
  24. package/dist/esm/query/live/collection-subscriber.js +31 -98
  25. package/dist/esm/query/live/collection-subscriber.js.map +1 -1
  26. package/dist/esm/query/live/utils.d.ts +109 -0
  27. package/dist/esm/query/live/utils.js +179 -0
  28. package/dist/esm/query/live/utils.js.map +1 -0
  29. package/package.json +1 -1
  30. package/src/index.ts +11 -0
  31. package/src/query/effect.ts +1119 -0
  32. package/src/query/live/collection-config-builder.ts +6 -132
  33. package/src/query/live/collection-subscriber.ts +40 -156
  34. package/src/query/live/utils.ts +356 -0
@@ -0,0 +1,356 @@
1
+ import { MultiSet, serializeValue } from '@tanstack/db-ivm'
2
+ import { normalizeOrderByPaths } from '../compiler/expressions.js'
3
+ import { buildQuery, getQueryIR } from '../builder/index.js'
4
+ import type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'
5
+ import type { Collection } from '../../collection/index.js'
6
+ import type { ChangeMessage } from '../../types.js'
7
+ import type { InitialQueryBuilder, QueryBuilder } from '../builder/index.js'
8
+ import type { Context } from '../builder/types.js'
9
+ import type { OrderBy, QueryIR } from '../ir.js'
10
+ import type { OrderByOptimizationInfo } from '../compiler/order-by.js'
11
+
12
+ /**
13
+ * Helper function to extract collections from a compiled query.
14
+ * Traverses the query IR to find all collection references.
15
+ * Maps collections by their ID (not alias) as expected by the compiler.
16
+ */
17
+ export function extractCollectionsFromQuery(
18
+ query: any,
19
+ ): Record<string, Collection<any, any, any>> {
20
+ const collections: Record<string, any> = {}
21
+
22
+ // Helper function to recursively extract collections from a query or source
23
+ function extractFromSource(source: any) {
24
+ if (source.type === `collectionRef`) {
25
+ collections[source.collection.id] = source.collection
26
+ } else if (source.type === `queryRef`) {
27
+ // Recursively extract from subquery
28
+ extractFromQuery(source.query)
29
+ }
30
+ }
31
+
32
+ // Helper function to recursively extract collections from a query
33
+ function extractFromQuery(q: any) {
34
+ // Extract from FROM clause
35
+ if (q.from) {
36
+ extractFromSource(q.from)
37
+ }
38
+
39
+ // Extract from JOIN clauses
40
+ if (q.join && Array.isArray(q.join)) {
41
+ for (const joinClause of q.join) {
42
+ if (joinClause.from) {
43
+ extractFromSource(joinClause.from)
44
+ }
45
+ }
46
+ }
47
+ }
48
+
49
+ // Start extraction from the root query
50
+ extractFromQuery(query)
51
+
52
+ return collections
53
+ }
54
+
55
+ /**
56
+ * Helper function to extract the collection that is referenced in the query's FROM clause.
57
+ * The FROM clause may refer directly to a collection or indirectly to a subquery.
58
+ */
59
+ export function extractCollectionFromSource(
60
+ query: any,
61
+ ): Collection<any, any, any> {
62
+ const from = query.from
63
+
64
+ if (from.type === `collectionRef`) {
65
+ return from.collection
66
+ } else if (from.type === `queryRef`) {
67
+ // Recursively extract from subquery
68
+ return extractCollectionFromSource(from.query)
69
+ }
70
+
71
+ throw new Error(
72
+ `Failed to extract collection. Invalid FROM clause: ${JSON.stringify(query)}`,
73
+ )
74
+ }
75
+
76
+ /**
77
+ * Extracts all aliases used for each collection across the entire query tree.
78
+ *
79
+ * Traverses the QueryIR recursively to build a map from collection ID to all aliases
80
+ * that reference that collection. This is essential for self-join support, where the
81
+ * same collection may be referenced multiple times with different aliases.
82
+ *
83
+ * For example, given a query like:
84
+ * ```ts
85
+ * q.from({ employee: employeesCollection })
86
+ * .join({ manager: employeesCollection }, ({ employee, manager }) =>
87
+ * eq(employee.managerId, manager.id)
88
+ * )
89
+ * ```
90
+ *
91
+ * This function would return:
92
+ * ```
93
+ * Map { "employees" => Set { "employee", "manager" } }
94
+ * ```
95
+ *
96
+ * @param query - The query IR to extract aliases from
97
+ * @returns A map from collection ID to the set of all aliases referencing that collection
98
+ */
99
+ export function extractCollectionAliases(
100
+ query: QueryIR,
101
+ ): Map<string, Set<string>> {
102
+ const aliasesById = new Map<string, Set<string>>()
103
+
104
+ function recordAlias(source: any) {
105
+ if (!source) return
106
+
107
+ if (source.type === `collectionRef`) {
108
+ const { id } = source.collection
109
+ const existing = aliasesById.get(id)
110
+ if (existing) {
111
+ existing.add(source.alias)
112
+ } else {
113
+ aliasesById.set(id, new Set([source.alias]))
114
+ }
115
+ } else if (source.type === `queryRef`) {
116
+ traverse(source.query)
117
+ }
118
+ }
119
+
120
+ function traverse(q?: QueryIR) {
121
+ if (!q) return
122
+
123
+ recordAlias(q.from)
124
+
125
+ if (q.join) {
126
+ for (const joinClause of q.join) {
127
+ recordAlias(joinClause.from)
128
+ }
129
+ }
130
+ }
131
+
132
+ traverse(query)
133
+
134
+ return aliasesById
135
+ }
136
+
137
+ /**
138
+ * Builds a query IR from a config object that contains either a query builder
139
+ * function or a QueryBuilder instance.
140
+ */
141
+ export function buildQueryFromConfig<TContext extends Context>(config: {
142
+ query:
143
+ | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)
144
+ | QueryBuilder<TContext>
145
+ }): QueryIR {
146
+ // Build the query using the provided query builder function or instance
147
+ if (typeof config.query === `function`) {
148
+ return buildQuery<TContext>(config.query)
149
+ }
150
+ return getQueryIR(config.query)
151
+ }
152
+
153
+ /**
154
+ * Helper function to send changes to a D2 input stream.
155
+ * Converts ChangeMessages to D2 MultiSet data and sends to the input.
156
+ *
157
+ * @returns The number of multiset entries sent
158
+ */
159
+ export function sendChangesToInput(
160
+ input: RootStreamBuilder<unknown>,
161
+ changes: Iterable<ChangeMessage>,
162
+ getKey: (item: ChangeMessage[`value`]) => any,
163
+ ): number {
164
+ const multiSetArray: MultiSetArray<unknown> = []
165
+ for (const change of changes) {
166
+ const key = getKey(change.value)
167
+ if (change.type === `insert`) {
168
+ multiSetArray.push([[key, change.value], 1])
169
+ } else if (change.type === `update`) {
170
+ multiSetArray.push([[key, change.previousValue], -1])
171
+ multiSetArray.push([[key, change.value], 1])
172
+ } else {
173
+ // change.type === `delete`
174
+ multiSetArray.push([[key, change.value], -1])
175
+ }
176
+ }
177
+
178
+ if (multiSetArray.length !== 0) {
179
+ input.sendData(new MultiSet(multiSetArray))
180
+ }
181
+
182
+ return multiSetArray.length
183
+ }
184
+
185
+ /** Splits updates into a delete of the old value and an insert of the new value */
186
+ export function* splitUpdates<
187
+ T extends object = Record<string, unknown>,
188
+ TKey extends string | number = string | number,
189
+ >(
190
+ changes: Iterable<ChangeMessage<T, TKey>>,
191
+ ): Generator<ChangeMessage<T, TKey>> {
192
+ for (const change of changes) {
193
+ if (change.type === `update`) {
194
+ yield { type: `delete`, key: change.key, value: change.previousValue! }
195
+ yield { type: `insert`, key: change.key, value: change.value }
196
+ } else {
197
+ yield change
198
+ }
199
+ }
200
+ }
201
+
202
+ /**
203
+ * Filter changes to prevent duplicate inserts to a D2 pipeline.
204
+ * Maintains D2 multiplicity at 1 for visible items so that deletes
205
+ * properly reduce multiplicity to 0.
206
+ *
207
+ * Mutates `sentKeys` in place: adds keys on insert, removes on delete.
208
+ */
209
+ export function filterDuplicateInserts(
210
+ changes: Array<ChangeMessage<any, string | number>>,
211
+ sentKeys: Set<string | number>,
212
+ ): Array<ChangeMessage<any, string | number>> {
213
+ const filtered: Array<ChangeMessage<any, string | number>> = []
214
+ for (const change of changes) {
215
+ if (change.type === `insert`) {
216
+ if (sentKeys.has(change.key)) {
217
+ continue // Skip duplicate
218
+ }
219
+ sentKeys.add(change.key)
220
+ } else if (change.type === `delete`) {
221
+ sentKeys.delete(change.key)
222
+ }
223
+ filtered.push(change)
224
+ }
225
+ return filtered
226
+ }
227
+
228
+ /**
229
+ * Track the biggest value seen in a stream of changes, used for cursor-based
230
+ * pagination in ordered subscriptions. Returns whether the load request key
231
+ * should be reset (allowing another load).
232
+ *
233
+ * @param changes - changes to process (deletes are skipped)
234
+ * @param current - the current biggest value (or undefined if none)
235
+ * @param sentKeys - set of keys already sent to D2 (for new-key detection)
236
+ * @param comparator - orderBy comparator
237
+ * @returns `{ biggest, shouldResetLoadKey }` — the new biggest value and
238
+ * whether the caller should clear its last-load-request-key
239
+ */
240
+ export function trackBiggestSentValue(
241
+ changes: Array<ChangeMessage<any, string | number>>,
242
+ current: unknown | undefined,
243
+ sentKeys: Set<string | number>,
244
+ comparator: (a: any, b: any) => number,
245
+ ): { biggest: unknown; shouldResetLoadKey: boolean } {
246
+ let biggest = current
247
+ let shouldResetLoadKey = false
248
+
249
+ for (const change of changes) {
250
+ if (change.type === `delete`) continue
251
+
252
+ const isNewKey = !sentKeys.has(change.key)
253
+
254
+ if (biggest === undefined) {
255
+ biggest = change.value
256
+ shouldResetLoadKey = true
257
+ } else if (comparator(biggest, change.value) < 0) {
258
+ biggest = change.value
259
+ shouldResetLoadKey = true
260
+ } else if (isNewKey) {
261
+ // New key at same sort position — allow another load if needed
262
+ shouldResetLoadKey = true
263
+ }
264
+ }
265
+
266
+ return { biggest, shouldResetLoadKey }
267
+ }
268
+
269
+ /**
270
+ * Compute orderBy/limit subscription hints for an alias.
271
+ * Returns normalised orderBy and effective limit suitable for passing to
272
+ * `subscribeChanges`, or `undefined` values when the query's orderBy cannot
273
+ * be scoped to the given alias (e.g. cross-collection refs or aggregates).
274
+ */
275
+ export function computeSubscriptionOrderByHints(
276
+ query: { orderBy?: OrderBy; limit?: number; offset?: number },
277
+ alias: string,
278
+ ): { orderBy: OrderBy | undefined; limit: number | undefined } {
279
+ const { orderBy, limit, offset } = query
280
+ const effectiveLimit =
281
+ limit !== undefined && offset !== undefined ? limit + offset : limit
282
+
283
+ const normalizedOrderBy = orderBy
284
+ ? normalizeOrderByPaths(orderBy, alias)
285
+ : undefined
286
+
287
+ // Only pass orderBy when it is scoped to this alias and uses simple refs,
288
+ // to avoid leaking cross-collection paths into backend-specific compilers.
289
+ const canPassOrderBy =
290
+ normalizedOrderBy?.every((clause) => {
291
+ const exp = clause.expression
292
+ if (exp.type !== `ref`) return false
293
+ const path = exp.path
294
+ return Array.isArray(path) && path.length === 1
295
+ }) ?? false
296
+
297
+ return {
298
+ orderBy: canPassOrderBy ? normalizedOrderBy : undefined,
299
+ limit: canPassOrderBy ? effectiveLimit : undefined,
300
+ }
301
+ }
302
+
303
+ /**
304
+ * Compute the cursor for loading the next batch of ordered data.
305
+ * Extracts values from the biggest sent row and builds the `minValues`
306
+ * array and a deduplication key.
307
+ *
308
+ * @returns `undefined` if the load should be skipped (duplicate request),
309
+ * otherwise `{ minValues, normalizedOrderBy, loadRequestKey }`.
310
+ */
311
+ export function computeOrderedLoadCursor(
312
+ orderByInfo: Pick<
313
+ OrderByOptimizationInfo,
314
+ 'orderBy' | 'valueExtractorForRawRow' | 'offset'
315
+ >,
316
+ biggestSentRow: unknown | undefined,
317
+ lastLoadRequestKey: string | undefined,
318
+ alias: string,
319
+ limit: number,
320
+ ):
321
+ | {
322
+ minValues: Array<unknown> | undefined
323
+ normalizedOrderBy: OrderBy
324
+ loadRequestKey: string
325
+ }
326
+ | undefined {
327
+ const { orderBy, valueExtractorForRawRow, offset } = orderByInfo
328
+
329
+ // Extract all orderBy column values from the biggest sent row
330
+ // For single-column: returns single value, for multi-column: returns array
331
+ const extractedValues = biggestSentRow
332
+ ? valueExtractorForRawRow(biggestSentRow as Record<string, unknown>)
333
+ : undefined
334
+
335
+ // Normalize to array format for minValues
336
+ let minValues: Array<unknown> | undefined
337
+ if (extractedValues !== undefined) {
338
+ minValues = Array.isArray(extractedValues)
339
+ ? extractedValues
340
+ : [extractedValues]
341
+ }
342
+
343
+ // Deduplicate: skip if we already issued an identical load request
344
+ const loadRequestKey = serializeValue({
345
+ minValues: minValues ?? null,
346
+ offset,
347
+ limit,
348
+ })
349
+ if (lastLoadRequestKey === loadRequestKey) {
350
+ return undefined
351
+ }
352
+
353
+ const normalizedOrderBy = normalizeOrderByPaths(orderBy, alias)
354
+
355
+ return { minValues, normalizedOrderBy, loadRequestKey }
356
+ }