@tanstack/db 0.5.31 → 0.5.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/collection/subscription.cjs +6 -6
- package/dist/cjs/collection/subscription.cjs.map +1 -1
- package/dist/cjs/errors.cjs +8 -0
- package/dist/cjs/errors.cjs.map +1 -1
- package/dist/cjs/errors.d.cts +3 -0
- package/dist/cjs/index.cjs +5 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +1 -0
- package/dist/cjs/query/builder/types.d.cts +28 -31
- package/dist/cjs/query/compiler/index.cjs +3 -0
- package/dist/cjs/query/compiler/index.cjs.map +1 -1
- package/dist/cjs/query/effect.cjs +602 -0
- package/dist/cjs/query/effect.cjs.map +1 -0
- package/dist/cjs/query/effect.d.cts +94 -0
- package/dist/cjs/query/index.d.cts +1 -0
- package/dist/cjs/query/live/collection-config-builder.cjs +5 -74
- package/dist/cjs/query/live/collection-config-builder.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.cjs +33 -100
- package/dist/cjs/query/live/collection-subscriber.cjs.map +1 -1
- package/dist/cjs/query/live/collection-subscriber.d.cts +0 -1
- package/dist/cjs/query/live/utils.cjs +179 -0
- package/dist/cjs/query/live/utils.cjs.map +1 -0
- package/dist/cjs/query/live/utils.d.cts +109 -0
- package/dist/cjs/query/query-once.cjs +28 -0
- package/dist/cjs/query/query-once.cjs.map +1 -0
- package/dist/cjs/query/query-once.d.cts +57 -0
- package/dist/cjs/query/subset-dedupe.cjs +8 -7
- package/dist/cjs/query/subset-dedupe.cjs.map +1 -1
- package/dist/esm/collection/subscription.js +6 -6
- package/dist/esm/collection/subscription.js.map +1 -1
- package/dist/esm/errors.d.ts +3 -0
- package/dist/esm/errors.js +8 -0
- package/dist/esm/errors.js.map +1 -1
- package/dist/esm/index.d.ts +1 -0
- package/dist/esm/index.js +6 -1
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/query/builder/types.d.ts +28 -31
- package/dist/esm/query/compiler/index.js +4 -1
- package/dist/esm/query/compiler/index.js.map +1 -1
- package/dist/esm/query/effect.d.ts +94 -0
- package/dist/esm/query/effect.js +602 -0
- package/dist/esm/query/effect.js.map +1 -0
- package/dist/esm/query/index.d.ts +1 -0
- package/dist/esm/query/live/collection-config-builder.js +1 -70
- package/dist/esm/query/live/collection-config-builder.js.map +1 -1
- package/dist/esm/query/live/collection-subscriber.d.ts +0 -1
- package/dist/esm/query/live/collection-subscriber.js +31 -98
- package/dist/esm/query/live/collection-subscriber.js.map +1 -1
- package/dist/esm/query/live/utils.d.ts +109 -0
- package/dist/esm/query/live/utils.js +179 -0
- package/dist/esm/query/live/utils.js.map +1 -0
- package/dist/esm/query/query-once.d.ts +57 -0
- package/dist/esm/query/query-once.js +28 -0
- package/dist/esm/query/query-once.js.map +1 -0
- package/dist/esm/query/subset-dedupe.js +8 -7
- package/dist/esm/query/subset-dedupe.js.map +1 -1
- package/package.json +1 -1
- package/src/collection/subscription.ts +6 -6
- package/src/errors.ts +11 -0
- package/src/index.ts +11 -0
- package/src/query/builder/types.ts +64 -50
- package/src/query/compiler/index.ts +5 -0
- package/src/query/effect.ts +1119 -0
- package/src/query/index.ts +3 -0
- package/src/query/live/collection-config-builder.ts +6 -132
- package/src/query/live/collection-subscriber.ts +40 -156
- package/src/query/live/utils.ts +356 -0
- package/src/query/query-once.ts +115 -0
- package/src/query/subset-dedupe.ts +14 -15
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import { MultiSet, serializeValue } from '@tanstack/db-ivm'
|
|
2
|
+
import { normalizeOrderByPaths } from '../compiler/expressions.js'
|
|
3
|
+
import { buildQuery, getQueryIR } from '../builder/index.js'
|
|
4
|
+
import type { MultiSetArray, RootStreamBuilder } from '@tanstack/db-ivm'
|
|
5
|
+
import type { Collection } from '../../collection/index.js'
|
|
6
|
+
import type { ChangeMessage } from '../../types.js'
|
|
7
|
+
import type { InitialQueryBuilder, QueryBuilder } from '../builder/index.js'
|
|
8
|
+
import type { Context } from '../builder/types.js'
|
|
9
|
+
import type { OrderBy, QueryIR } from '../ir.js'
|
|
10
|
+
import type { OrderByOptimizationInfo } from '../compiler/order-by.js'
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Helper function to extract collections from a compiled query.
|
|
14
|
+
* Traverses the query IR to find all collection references.
|
|
15
|
+
* Maps collections by their ID (not alias) as expected by the compiler.
|
|
16
|
+
*/
|
|
17
|
+
export function extractCollectionsFromQuery(
|
|
18
|
+
query: any,
|
|
19
|
+
): Record<string, Collection<any, any, any>> {
|
|
20
|
+
const collections: Record<string, any> = {}
|
|
21
|
+
|
|
22
|
+
// Helper function to recursively extract collections from a query or source
|
|
23
|
+
function extractFromSource(source: any) {
|
|
24
|
+
if (source.type === `collectionRef`) {
|
|
25
|
+
collections[source.collection.id] = source.collection
|
|
26
|
+
} else if (source.type === `queryRef`) {
|
|
27
|
+
// Recursively extract from subquery
|
|
28
|
+
extractFromQuery(source.query)
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Helper function to recursively extract collections from a query
|
|
33
|
+
function extractFromQuery(q: any) {
|
|
34
|
+
// Extract from FROM clause
|
|
35
|
+
if (q.from) {
|
|
36
|
+
extractFromSource(q.from)
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Extract from JOIN clauses
|
|
40
|
+
if (q.join && Array.isArray(q.join)) {
|
|
41
|
+
for (const joinClause of q.join) {
|
|
42
|
+
if (joinClause.from) {
|
|
43
|
+
extractFromSource(joinClause.from)
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Start extraction from the root query
|
|
50
|
+
extractFromQuery(query)
|
|
51
|
+
|
|
52
|
+
return collections
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Helper function to extract the collection that is referenced in the query's FROM clause.
|
|
57
|
+
* The FROM clause may refer directly to a collection or indirectly to a subquery.
|
|
58
|
+
*/
|
|
59
|
+
export function extractCollectionFromSource(
|
|
60
|
+
query: any,
|
|
61
|
+
): Collection<any, any, any> {
|
|
62
|
+
const from = query.from
|
|
63
|
+
|
|
64
|
+
if (from.type === `collectionRef`) {
|
|
65
|
+
return from.collection
|
|
66
|
+
} else if (from.type === `queryRef`) {
|
|
67
|
+
// Recursively extract from subquery
|
|
68
|
+
return extractCollectionFromSource(from.query)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
throw new Error(
|
|
72
|
+
`Failed to extract collection. Invalid FROM clause: ${JSON.stringify(query)}`,
|
|
73
|
+
)
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Extracts all aliases used for each collection across the entire query tree.
|
|
78
|
+
*
|
|
79
|
+
* Traverses the QueryIR recursively to build a map from collection ID to all aliases
|
|
80
|
+
* that reference that collection. This is essential for self-join support, where the
|
|
81
|
+
* same collection may be referenced multiple times with different aliases.
|
|
82
|
+
*
|
|
83
|
+
* For example, given a query like:
|
|
84
|
+
* ```ts
|
|
85
|
+
* q.from({ employee: employeesCollection })
|
|
86
|
+
* .join({ manager: employeesCollection }, ({ employee, manager }) =>
|
|
87
|
+
* eq(employee.managerId, manager.id)
|
|
88
|
+
* )
|
|
89
|
+
* ```
|
|
90
|
+
*
|
|
91
|
+
* This function would return:
|
|
92
|
+
* ```
|
|
93
|
+
* Map { "employees" => Set { "employee", "manager" } }
|
|
94
|
+
* ```
|
|
95
|
+
*
|
|
96
|
+
* @param query - The query IR to extract aliases from
|
|
97
|
+
* @returns A map from collection ID to the set of all aliases referencing that collection
|
|
98
|
+
*/
|
|
99
|
+
export function extractCollectionAliases(
|
|
100
|
+
query: QueryIR,
|
|
101
|
+
): Map<string, Set<string>> {
|
|
102
|
+
const aliasesById = new Map<string, Set<string>>()
|
|
103
|
+
|
|
104
|
+
function recordAlias(source: any) {
|
|
105
|
+
if (!source) return
|
|
106
|
+
|
|
107
|
+
if (source.type === `collectionRef`) {
|
|
108
|
+
const { id } = source.collection
|
|
109
|
+
const existing = aliasesById.get(id)
|
|
110
|
+
if (existing) {
|
|
111
|
+
existing.add(source.alias)
|
|
112
|
+
} else {
|
|
113
|
+
aliasesById.set(id, new Set([source.alias]))
|
|
114
|
+
}
|
|
115
|
+
} else if (source.type === `queryRef`) {
|
|
116
|
+
traverse(source.query)
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function traverse(q?: QueryIR) {
|
|
121
|
+
if (!q) return
|
|
122
|
+
|
|
123
|
+
recordAlias(q.from)
|
|
124
|
+
|
|
125
|
+
if (q.join) {
|
|
126
|
+
for (const joinClause of q.join) {
|
|
127
|
+
recordAlias(joinClause.from)
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
traverse(query)
|
|
133
|
+
|
|
134
|
+
return aliasesById
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Builds a query IR from a config object that contains either a query builder
|
|
139
|
+
* function or a QueryBuilder instance.
|
|
140
|
+
*/
|
|
141
|
+
export function buildQueryFromConfig<TContext extends Context>(config: {
|
|
142
|
+
query:
|
|
143
|
+
| ((q: InitialQueryBuilder) => QueryBuilder<TContext>)
|
|
144
|
+
| QueryBuilder<TContext>
|
|
145
|
+
}): QueryIR {
|
|
146
|
+
// Build the query using the provided query builder function or instance
|
|
147
|
+
if (typeof config.query === `function`) {
|
|
148
|
+
return buildQuery<TContext>(config.query)
|
|
149
|
+
}
|
|
150
|
+
return getQueryIR(config.query)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Helper function to send changes to a D2 input stream.
|
|
155
|
+
* Converts ChangeMessages to D2 MultiSet data and sends to the input.
|
|
156
|
+
*
|
|
157
|
+
* @returns The number of multiset entries sent
|
|
158
|
+
*/
|
|
159
|
+
export function sendChangesToInput(
|
|
160
|
+
input: RootStreamBuilder<unknown>,
|
|
161
|
+
changes: Iterable<ChangeMessage>,
|
|
162
|
+
getKey: (item: ChangeMessage[`value`]) => any,
|
|
163
|
+
): number {
|
|
164
|
+
const multiSetArray: MultiSetArray<unknown> = []
|
|
165
|
+
for (const change of changes) {
|
|
166
|
+
const key = getKey(change.value)
|
|
167
|
+
if (change.type === `insert`) {
|
|
168
|
+
multiSetArray.push([[key, change.value], 1])
|
|
169
|
+
} else if (change.type === `update`) {
|
|
170
|
+
multiSetArray.push([[key, change.previousValue], -1])
|
|
171
|
+
multiSetArray.push([[key, change.value], 1])
|
|
172
|
+
} else {
|
|
173
|
+
// change.type === `delete`
|
|
174
|
+
multiSetArray.push([[key, change.value], -1])
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (multiSetArray.length !== 0) {
|
|
179
|
+
input.sendData(new MultiSet(multiSetArray))
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return multiSetArray.length
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/** Splits updates into a delete of the old value and an insert of the new value */
|
|
186
|
+
export function* splitUpdates<
|
|
187
|
+
T extends object = Record<string, unknown>,
|
|
188
|
+
TKey extends string | number = string | number,
|
|
189
|
+
>(
|
|
190
|
+
changes: Iterable<ChangeMessage<T, TKey>>,
|
|
191
|
+
): Generator<ChangeMessage<T, TKey>> {
|
|
192
|
+
for (const change of changes) {
|
|
193
|
+
if (change.type === `update`) {
|
|
194
|
+
yield { type: `delete`, key: change.key, value: change.previousValue! }
|
|
195
|
+
yield { type: `insert`, key: change.key, value: change.value }
|
|
196
|
+
} else {
|
|
197
|
+
yield change
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Filter changes to prevent duplicate inserts to a D2 pipeline.
|
|
204
|
+
* Maintains D2 multiplicity at 1 for visible items so that deletes
|
|
205
|
+
* properly reduce multiplicity to 0.
|
|
206
|
+
*
|
|
207
|
+
* Mutates `sentKeys` in place: adds keys on insert, removes on delete.
|
|
208
|
+
*/
|
|
209
|
+
export function filterDuplicateInserts(
|
|
210
|
+
changes: Array<ChangeMessage<any, string | number>>,
|
|
211
|
+
sentKeys: Set<string | number>,
|
|
212
|
+
): Array<ChangeMessage<any, string | number>> {
|
|
213
|
+
const filtered: Array<ChangeMessage<any, string | number>> = []
|
|
214
|
+
for (const change of changes) {
|
|
215
|
+
if (change.type === `insert`) {
|
|
216
|
+
if (sentKeys.has(change.key)) {
|
|
217
|
+
continue // Skip duplicate
|
|
218
|
+
}
|
|
219
|
+
sentKeys.add(change.key)
|
|
220
|
+
} else if (change.type === `delete`) {
|
|
221
|
+
sentKeys.delete(change.key)
|
|
222
|
+
}
|
|
223
|
+
filtered.push(change)
|
|
224
|
+
}
|
|
225
|
+
return filtered
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Track the biggest value seen in a stream of changes, used for cursor-based
|
|
230
|
+
* pagination in ordered subscriptions. Returns whether the load request key
|
|
231
|
+
* should be reset (allowing another load).
|
|
232
|
+
*
|
|
233
|
+
* @param changes - changes to process (deletes are skipped)
|
|
234
|
+
* @param current - the current biggest value (or undefined if none)
|
|
235
|
+
* @param sentKeys - set of keys already sent to D2 (for new-key detection)
|
|
236
|
+
* @param comparator - orderBy comparator
|
|
237
|
+
* @returns `{ biggest, shouldResetLoadKey }` — the new biggest value and
|
|
238
|
+
* whether the caller should clear its last-load-request-key
|
|
239
|
+
*/
|
|
240
|
+
export function trackBiggestSentValue(
|
|
241
|
+
changes: Array<ChangeMessage<any, string | number>>,
|
|
242
|
+
current: unknown | undefined,
|
|
243
|
+
sentKeys: Set<string | number>,
|
|
244
|
+
comparator: (a: any, b: any) => number,
|
|
245
|
+
): { biggest: unknown; shouldResetLoadKey: boolean } {
|
|
246
|
+
let biggest = current
|
|
247
|
+
let shouldResetLoadKey = false
|
|
248
|
+
|
|
249
|
+
for (const change of changes) {
|
|
250
|
+
if (change.type === `delete`) continue
|
|
251
|
+
|
|
252
|
+
const isNewKey = !sentKeys.has(change.key)
|
|
253
|
+
|
|
254
|
+
if (biggest === undefined) {
|
|
255
|
+
biggest = change.value
|
|
256
|
+
shouldResetLoadKey = true
|
|
257
|
+
} else if (comparator(biggest, change.value) < 0) {
|
|
258
|
+
biggest = change.value
|
|
259
|
+
shouldResetLoadKey = true
|
|
260
|
+
} else if (isNewKey) {
|
|
261
|
+
// New key at same sort position — allow another load if needed
|
|
262
|
+
shouldResetLoadKey = true
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
return { biggest, shouldResetLoadKey }
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
/**
|
|
270
|
+
* Compute orderBy/limit subscription hints for an alias.
|
|
271
|
+
* Returns normalised orderBy and effective limit suitable for passing to
|
|
272
|
+
* `subscribeChanges`, or `undefined` values when the query's orderBy cannot
|
|
273
|
+
* be scoped to the given alias (e.g. cross-collection refs or aggregates).
|
|
274
|
+
*/
|
|
275
|
+
export function computeSubscriptionOrderByHints(
|
|
276
|
+
query: { orderBy?: OrderBy; limit?: number; offset?: number },
|
|
277
|
+
alias: string,
|
|
278
|
+
): { orderBy: OrderBy | undefined; limit: number | undefined } {
|
|
279
|
+
const { orderBy, limit, offset } = query
|
|
280
|
+
const effectiveLimit =
|
|
281
|
+
limit !== undefined && offset !== undefined ? limit + offset : limit
|
|
282
|
+
|
|
283
|
+
const normalizedOrderBy = orderBy
|
|
284
|
+
? normalizeOrderByPaths(orderBy, alias)
|
|
285
|
+
: undefined
|
|
286
|
+
|
|
287
|
+
// Only pass orderBy when it is scoped to this alias and uses simple refs,
|
|
288
|
+
// to avoid leaking cross-collection paths into backend-specific compilers.
|
|
289
|
+
const canPassOrderBy =
|
|
290
|
+
normalizedOrderBy?.every((clause) => {
|
|
291
|
+
const exp = clause.expression
|
|
292
|
+
if (exp.type !== `ref`) return false
|
|
293
|
+
const path = exp.path
|
|
294
|
+
return Array.isArray(path) && path.length === 1
|
|
295
|
+
}) ?? false
|
|
296
|
+
|
|
297
|
+
return {
|
|
298
|
+
orderBy: canPassOrderBy ? normalizedOrderBy : undefined,
|
|
299
|
+
limit: canPassOrderBy ? effectiveLimit : undefined,
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/**
|
|
304
|
+
* Compute the cursor for loading the next batch of ordered data.
|
|
305
|
+
* Extracts values from the biggest sent row and builds the `minValues`
|
|
306
|
+
* array and a deduplication key.
|
|
307
|
+
*
|
|
308
|
+
* @returns `undefined` if the load should be skipped (duplicate request),
|
|
309
|
+
* otherwise `{ minValues, normalizedOrderBy, loadRequestKey }`.
|
|
310
|
+
*/
|
|
311
|
+
export function computeOrderedLoadCursor(
|
|
312
|
+
orderByInfo: Pick<
|
|
313
|
+
OrderByOptimizationInfo,
|
|
314
|
+
'orderBy' | 'valueExtractorForRawRow' | 'offset'
|
|
315
|
+
>,
|
|
316
|
+
biggestSentRow: unknown | undefined,
|
|
317
|
+
lastLoadRequestKey: string | undefined,
|
|
318
|
+
alias: string,
|
|
319
|
+
limit: number,
|
|
320
|
+
):
|
|
321
|
+
| {
|
|
322
|
+
minValues: Array<unknown> | undefined
|
|
323
|
+
normalizedOrderBy: OrderBy
|
|
324
|
+
loadRequestKey: string
|
|
325
|
+
}
|
|
326
|
+
| undefined {
|
|
327
|
+
const { orderBy, valueExtractorForRawRow, offset } = orderByInfo
|
|
328
|
+
|
|
329
|
+
// Extract all orderBy column values from the biggest sent row
|
|
330
|
+
// For single-column: returns single value, for multi-column: returns array
|
|
331
|
+
const extractedValues = biggestSentRow
|
|
332
|
+
? valueExtractorForRawRow(biggestSentRow as Record<string, unknown>)
|
|
333
|
+
: undefined
|
|
334
|
+
|
|
335
|
+
// Normalize to array format for minValues
|
|
336
|
+
let minValues: Array<unknown> | undefined
|
|
337
|
+
if (extractedValues !== undefined) {
|
|
338
|
+
minValues = Array.isArray(extractedValues)
|
|
339
|
+
? extractedValues
|
|
340
|
+
: [extractedValues]
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
// Deduplicate: skip if we already issued an identical load request
|
|
344
|
+
const loadRequestKey = serializeValue({
|
|
345
|
+
minValues: minValues ?? null,
|
|
346
|
+
offset,
|
|
347
|
+
limit,
|
|
348
|
+
})
|
|
349
|
+
if (lastLoadRequestKey === loadRequestKey) {
|
|
350
|
+
return undefined
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
const normalizedOrderBy = normalizeOrderByPaths(orderBy, alias)
|
|
354
|
+
|
|
355
|
+
return { minValues, normalizedOrderBy, loadRequestKey }
|
|
356
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import { createLiveQueryCollection } from './live-query-collection.js'
|
|
2
|
+
import type { InitialQueryBuilder, QueryBuilder } from './builder/index.js'
|
|
3
|
+
import type { Context, InferResultType } from './builder/types.js'
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Configuration options for queryOnce
|
|
7
|
+
*/
|
|
8
|
+
export interface QueryOnceConfig<TContext extends Context> {
|
|
9
|
+
/**
|
|
10
|
+
* Query builder function that defines the query
|
|
11
|
+
*/
|
|
12
|
+
query:
|
|
13
|
+
| ((q: InitialQueryBuilder) => QueryBuilder<TContext>)
|
|
14
|
+
| QueryBuilder<TContext>
|
|
15
|
+
// Future: timeout, signal, etc.
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Overload 1: Simple query function returning array (non-single result)
|
|
19
|
+
/**
|
|
20
|
+
* Executes a one-shot query and returns the results as an array.
|
|
21
|
+
*
|
|
22
|
+
* This function creates a live query collection, preloads it, extracts the results,
|
|
23
|
+
* and automatically cleans up the collection. It's ideal for:
|
|
24
|
+
* - AI/LLM context building
|
|
25
|
+
* - Data export
|
|
26
|
+
* - Background processing
|
|
27
|
+
* - Testing
|
|
28
|
+
*
|
|
29
|
+
* @param queryFn - A function that receives the query builder and returns a query
|
|
30
|
+
* @returns A promise that resolves to an array of query results
|
|
31
|
+
*
|
|
32
|
+
* @example
|
|
33
|
+
* ```typescript
|
|
34
|
+
* // Basic query
|
|
35
|
+
* const users = await queryOnce((q) =>
|
|
36
|
+
* q.from({ user: usersCollection })
|
|
37
|
+
* )
|
|
38
|
+
*
|
|
39
|
+
* // With filtering and projection
|
|
40
|
+
* const activeUserNames = await queryOnce((q) =>
|
|
41
|
+
* q.from({ user: usersCollection })
|
|
42
|
+
* .where(({ user }) => eq(user.active, true))
|
|
43
|
+
* .select(({ user }) => ({ name: user.name }))
|
|
44
|
+
* )
|
|
45
|
+
* ```
|
|
46
|
+
*/
|
|
47
|
+
export function queryOnce<TContext extends Context>(
|
|
48
|
+
queryFn: (q: InitialQueryBuilder) => QueryBuilder<TContext>,
|
|
49
|
+
): Promise<InferResultType<TContext>>
|
|
50
|
+
|
|
51
|
+
// Overload 2: Config object form returning array (non-single result)
|
|
52
|
+
/**
|
|
53
|
+
* Executes a one-shot query using a configuration object.
|
|
54
|
+
*
|
|
55
|
+
* @param config - Configuration object with the query function
|
|
56
|
+
* @returns A promise that resolves to an array of query results
|
|
57
|
+
*
|
|
58
|
+
* @example
|
|
59
|
+
* ```typescript
|
|
60
|
+
* const recentOrders = await queryOnce({
|
|
61
|
+
* query: (q) =>
|
|
62
|
+
* q.from({ order: ordersCollection })
|
|
63
|
+
* .orderBy(({ order }) => desc(order.createdAt))
|
|
64
|
+
* .limit(100),
|
|
65
|
+
* })
|
|
66
|
+
* ```
|
|
67
|
+
*/
|
|
68
|
+
export function queryOnce<TContext extends Context>(
|
|
69
|
+
config: QueryOnceConfig<TContext>,
|
|
70
|
+
): Promise<InferResultType<TContext>>
|
|
71
|
+
|
|
72
|
+
// Implementation
|
|
73
|
+
export async function queryOnce<TContext extends Context>(
|
|
74
|
+
configOrQuery:
|
|
75
|
+
| QueryOnceConfig<TContext>
|
|
76
|
+
| ((q: InitialQueryBuilder) => QueryBuilder<TContext>),
|
|
77
|
+
): Promise<InferResultType<TContext>> {
|
|
78
|
+
// Normalize input
|
|
79
|
+
const config: QueryOnceConfig<TContext> =
|
|
80
|
+
typeof configOrQuery === `function`
|
|
81
|
+
? { query: configOrQuery }
|
|
82
|
+
: configOrQuery
|
|
83
|
+
|
|
84
|
+
const query = (q: InitialQueryBuilder) => {
|
|
85
|
+
const queryConfig = config.query
|
|
86
|
+
return typeof queryConfig === `function` ? queryConfig(q) : queryConfig
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Create collection with minimal GC time; preload handles sync start
|
|
90
|
+
const collection = createLiveQueryCollection({
|
|
91
|
+
query,
|
|
92
|
+
gcTime: 1, // Cleanup in next tick when no subscribers (0 disables GC)
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
try {
|
|
96
|
+
// Wait for initial data load
|
|
97
|
+
await collection.preload()
|
|
98
|
+
|
|
99
|
+
// Check if this is a single-result query (findOne was called)
|
|
100
|
+
const isSingleResult =
|
|
101
|
+
(collection.config as { singleResult?: boolean }).singleResult === true
|
|
102
|
+
|
|
103
|
+
// Extract and return results
|
|
104
|
+
if (isSingleResult) {
|
|
105
|
+
const first = collection.values().next().value as
|
|
106
|
+
| InferResultType<TContext>
|
|
107
|
+
| undefined
|
|
108
|
+
return first as InferResultType<TContext>
|
|
109
|
+
}
|
|
110
|
+
return collection.toArray as InferResultType<TContext>
|
|
111
|
+
} finally {
|
|
112
|
+
// Always cleanup, even on error
|
|
113
|
+
await collection.cleanup()
|
|
114
|
+
}
|
|
115
|
+
}
|
|
@@ -126,28 +126,29 @@ export class DeduplicatedLoadSubset {
|
|
|
126
126
|
return prom
|
|
127
127
|
}
|
|
128
128
|
|
|
129
|
-
// Not fully covered by existing data
|
|
130
|
-
//
|
|
131
|
-
//
|
|
132
|
-
|
|
129
|
+
// Not fully covered by existing data — load the missing subset.
|
|
130
|
+
// We need two clones: trackingOptions preserves the original predicate for
|
|
131
|
+
// accurate tracking (e.g., where=undefined means "all data"), while loadOptions
|
|
132
|
+
// may be narrowed with a difference expression for the actual backend request.
|
|
133
|
+
const trackingOptions = cloneOptions(options)
|
|
134
|
+
const loadOptions = cloneOptions(options)
|
|
133
135
|
if (this.unlimitedWhere !== undefined && options.limit === undefined) {
|
|
134
136
|
// Compute difference to get only the missing data
|
|
135
137
|
// We can only do this for unlimited queries
|
|
136
138
|
// and we can only remove data that was loaded from unlimited queries
|
|
137
139
|
// because with limited queries we have no way to express that we already loaded part of the matching data
|
|
138
|
-
|
|
139
|
-
minusWherePredicates(
|
|
140
|
-
|
|
140
|
+
loadOptions.where =
|
|
141
|
+
minusWherePredicates(loadOptions.where, this.unlimitedWhere) ??
|
|
142
|
+
loadOptions.where
|
|
141
143
|
}
|
|
142
144
|
|
|
143
145
|
// Call underlying loadSubset to load the missing data
|
|
144
|
-
const resultPromise = this._loadSubset(
|
|
146
|
+
const resultPromise = this._loadSubset(loadOptions)
|
|
145
147
|
|
|
146
148
|
// Handle both sync (true) and async (Promise<void>) return values
|
|
147
149
|
if (resultPromise === true) {
|
|
148
|
-
// Sync return - update tracking
|
|
149
|
-
|
|
150
|
-
this.updateTracking(clonedOptions)
|
|
150
|
+
// Sync return - update tracking with the original predicate
|
|
151
|
+
this.updateTracking(trackingOptions)
|
|
151
152
|
return true
|
|
152
153
|
} else {
|
|
153
154
|
// Async return - track the promise and update tracking after it resolves
|
|
@@ -158,16 +159,14 @@ export class DeduplicatedLoadSubset {
|
|
|
158
159
|
|
|
159
160
|
// We need to create a reference to the in-flight entry so we can remove it later
|
|
160
161
|
const inflightEntry = {
|
|
161
|
-
options:
|
|
162
|
+
options: loadOptions, // Store load options for subset matching of in-flight requests
|
|
162
163
|
promise: resultPromise
|
|
163
164
|
.then((result) => {
|
|
164
165
|
// Only update tracking if this request is still from the current generation
|
|
165
166
|
// If reset() was called, the generation will have incremented and we should
|
|
166
167
|
// not repopulate the state that was just cleared
|
|
167
168
|
if (capturedGeneration === this.generation) {
|
|
168
|
-
|
|
169
|
-
// This ensures we track exactly what was loaded, not what the caller changed
|
|
170
|
-
this.updateTracking(clonedOptions)
|
|
169
|
+
this.updateTracking(trackingOptions)
|
|
171
170
|
}
|
|
172
171
|
return result
|
|
173
172
|
})
|