@tanstack/db 0.0.22 → 0.0.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/dist/cjs/collection.cjs +14 -6
  2. package/dist/cjs/collection.cjs.map +1 -1
  3. package/dist/cjs/collection.d.cts +10 -9
  4. package/dist/cjs/local-storage.cjs +1 -1
  5. package/dist/cjs/local-storage.cjs.map +1 -1
  6. package/dist/cjs/proxy.cjs +21 -0
  7. package/dist/cjs/proxy.cjs.map +1 -1
  8. package/dist/cjs/query/builder/index.cjs +72 -0
  9. package/dist/cjs/query/builder/index.cjs.map +1 -1
  10. package/dist/cjs/query/builder/index.d.cts +64 -0
  11. package/dist/cjs/query/compiler/index.cjs +44 -8
  12. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  13. package/dist/cjs/query/compiler/index.d.cts +4 -7
  14. package/dist/cjs/query/compiler/joins.cjs +14 -6
  15. package/dist/cjs/query/compiler/joins.cjs.map +1 -1
  16. package/dist/cjs/query/compiler/joins.d.cts +4 -8
  17. package/dist/cjs/query/compiler/types.d.cts +10 -0
  18. package/dist/cjs/query/optimizer.cjs +283 -0
  19. package/dist/cjs/query/optimizer.cjs.map +1 -0
  20. package/dist/cjs/query/optimizer.d.cts +42 -0
  21. package/dist/cjs/transactions.cjs.map +1 -1
  22. package/dist/cjs/transactions.d.cts +5 -5
  23. package/dist/cjs/types.d.cts +35 -10
  24. package/dist/cjs/utils.cjs +42 -0
  25. package/dist/cjs/utils.cjs.map +1 -0
  26. package/dist/cjs/utils.d.cts +18 -0
  27. package/dist/esm/collection.d.ts +10 -9
  28. package/dist/esm/collection.js +14 -6
  29. package/dist/esm/collection.js.map +1 -1
  30. package/dist/esm/local-storage.js +1 -1
  31. package/dist/esm/local-storage.js.map +1 -1
  32. package/dist/esm/proxy.js +21 -0
  33. package/dist/esm/proxy.js.map +1 -1
  34. package/dist/esm/query/builder/index.d.ts +64 -0
  35. package/dist/esm/query/builder/index.js +72 -0
  36. package/dist/esm/query/builder/index.js.map +1 -1
  37. package/dist/esm/query/compiler/index.d.ts +4 -7
  38. package/dist/esm/query/compiler/index.js +44 -8
  39. package/dist/esm/query/compiler/index.js.map +1 -1
  40. package/dist/esm/query/compiler/joins.d.ts +4 -8
  41. package/dist/esm/query/compiler/joins.js +14 -6
  42. package/dist/esm/query/compiler/joins.js.map +1 -1
  43. package/dist/esm/query/compiler/types.d.ts +10 -0
  44. package/dist/esm/query/optimizer.d.ts +42 -0
  45. package/dist/esm/query/optimizer.js +283 -0
  46. package/dist/esm/query/optimizer.js.map +1 -0
  47. package/dist/esm/transactions.d.ts +5 -5
  48. package/dist/esm/transactions.js.map +1 -1
  49. package/dist/esm/types.d.ts +35 -10
  50. package/dist/esm/utils.d.ts +18 -0
  51. package/dist/esm/utils.js +42 -0
  52. package/dist/esm/utils.js.map +1 -0
  53. package/package.json +1 -1
  54. package/src/collection.ts +62 -21
  55. package/src/local-storage.ts +2 -2
  56. package/src/proxy.ts +24 -0
  57. package/src/query/builder/index.ts +104 -0
  58. package/src/query/compiler/index.ts +85 -18
  59. package/src/query/compiler/joins.ts +21 -13
  60. package/src/query/compiler/types.ts +12 -0
  61. package/src/query/optimizer.ts +738 -0
  62. package/src/transactions.ts +8 -12
  63. package/src/types.ts +69 -14
  64. package/src/utils.ts +86 -0
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
3
  "description": "A reactive client store for building super fast apps on sync",
4
- "version": "0.0.22",
4
+ "version": "0.0.24",
5
5
  "dependencies": {
6
6
  "@electric-sql/d2mini": "^0.1.7",
7
7
  "@standard-schema/spec": "^1.0.0"
package/src/collection.ts CHANGED
@@ -12,15 +12,17 @@ import type {
12
12
  OperationConfig,
13
13
  OptimisticChangeMessage,
14
14
  PendingMutation,
15
+ ResolveInsertInput,
15
16
  ResolveType,
16
17
  StandardSchema,
17
18
  Transaction as TransactionType,
19
+ TransactionWithMutations,
18
20
  UtilsRecord,
19
21
  } from "./types"
20
22
  import type { StandardSchemaV1 } from "@standard-schema/spec"
21
23
 
22
24
  // Store collections in memory
23
- export const collectionsStore = new Map<string, CollectionImpl<any, any>>()
25
+ export const collectionsStore = new Map<string, CollectionImpl<any, any, any>>()
24
26
 
25
27
  interface PendingSyncedTransaction<T extends object = Record<string, unknown>> {
26
28
  committed: boolean
@@ -32,12 +34,15 @@ interface PendingSyncedTransaction<T extends object = Record<string, unknown>> {
32
34
  * @template T - The type of items in the collection
33
35
  * @template TKey - The type of the key for the collection
34
36
  * @template TUtils - The utilities record type
37
+ * @template TInsertInput - The type for insert operations (can be different from T for schemas with defaults)
35
38
  */
36
39
  export interface Collection<
37
40
  T extends object = Record<string, unknown>,
38
41
  TKey extends string | number = string | number,
39
42
  TUtils extends UtilsRecord = {},
40
- > extends CollectionImpl<T, TKey> {
43
+ TSchema extends StandardSchemaV1 = StandardSchemaV1,
44
+ TInsertInput extends object = T,
45
+ > extends CollectionImpl<T, TKey, TUtils, TSchema, TInsertInput> {
41
46
  readonly utils: TUtils
42
47
  }
43
48
 
@@ -124,12 +129,22 @@ export function createCollection<
124
129
  options: CollectionConfig<
125
130
  ResolveType<TExplicit, TSchema, TFallback>,
126
131
  TKey,
127
- TSchema
132
+ TSchema,
133
+ ResolveInsertInput<TExplicit, TSchema, TFallback>
128
134
  > & { utils?: TUtils }
129
- ): Collection<ResolveType<TExplicit, TSchema, TFallback>, TKey, TUtils> {
135
+ ): Collection<
136
+ ResolveType<TExplicit, TSchema, TFallback>,
137
+ TKey,
138
+ TUtils,
139
+ TSchema,
140
+ ResolveInsertInput<TExplicit, TSchema, TFallback>
141
+ > {
130
142
  const collection = new CollectionImpl<
131
143
  ResolveType<TExplicit, TSchema, TFallback>,
132
- TKey
144
+ TKey,
145
+ TUtils,
146
+ TSchema,
147
+ ResolveInsertInput<TExplicit, TSchema, TFallback>
133
148
  >(options)
134
149
 
135
150
  // Copy utils to both top level and .utils namespace
@@ -142,7 +157,9 @@ export function createCollection<
142
157
  return collection as Collection<
143
158
  ResolveType<TExplicit, TSchema, TFallback>,
144
159
  TKey,
145
- TUtils
160
+ TUtils,
161
+ TSchema,
162
+ ResolveInsertInput<TExplicit, TSchema, TFallback>
146
163
  >
147
164
  }
148
165
 
@@ -179,8 +196,10 @@ export class CollectionImpl<
179
196
  T extends object = Record<string, unknown>,
180
197
  TKey extends string | number = string | number,
181
198
  TUtils extends UtilsRecord = {},
199
+ TSchema extends StandardSchemaV1 = StandardSchemaV1,
200
+ TInsertInput extends object = T,
182
201
  > {
183
- public config: CollectionConfig<T, TKey, any>
202
+ public config: CollectionConfig<T, TKey, TSchema, TInsertInput>
184
203
 
185
204
  // Core state - make public for testing
186
205
  public transactions: SortedMap<string, Transaction<any>>
@@ -312,7 +331,7 @@ export class CollectionImpl<
312
331
  * @param config - Configuration object for the collection
313
332
  * @throws Error if sync config is missing
314
333
  */
315
- constructor(config: CollectionConfig<T, TKey, any>) {
334
+ constructor(config: CollectionConfig<T, TKey, TSchema, TInsertInput>) {
316
335
  // eslint-disable-next-line
317
336
  if (!config) {
318
337
  throw new Error(`Collection requires a config`)
@@ -1322,9 +1341,11 @@ export class CollectionImpl<
1322
1341
  * console.log('Insert failed:', error)
1323
1342
  * }
1324
1343
  */
1325
- insert = (data: T | Array<T>, config?: InsertConfig) => {
1344
+ insert = (
1345
+ data: TInsertInput | Array<TInsertInput>,
1346
+ config?: InsertConfig
1347
+ ) => {
1326
1348
  this.validateCollectionUsable(`insert`)
1327
-
1328
1349
  const ambientTransaction = getActiveTransaction()
1329
1350
 
1330
1351
  // If no ambient transaction exists, check for an onInsert handler early
@@ -1335,7 +1356,7 @@ export class CollectionImpl<
1335
1356
  }
1336
1357
 
1337
1358
  const items = Array.isArray(data) ? data : [data]
1338
- const mutations: Array<PendingMutation<T, `insert`>> = []
1359
+ const mutations: Array<PendingMutation<T>> = []
1339
1360
 
1340
1361
  // Create mutations for each item
1341
1362
  items.forEach((item) => {
@@ -1343,7 +1364,7 @@ export class CollectionImpl<
1343
1364
  const validatedData = this.validateData(item, `insert`)
1344
1365
 
1345
1366
  // Check if an item with this ID already exists in the collection
1346
- const key = this.getKeyFromItem(item)
1367
+ const key = this.getKeyFromItem(validatedData)
1347
1368
  if (this.has(key)) {
1348
1369
  throw `Cannot insert document with ID "${key}" because it already exists in the collection`
1349
1370
  }
@@ -1353,7 +1374,15 @@ export class CollectionImpl<
1353
1374
  mutationId: crypto.randomUUID(),
1354
1375
  original: {},
1355
1376
  modified: validatedData,
1356
- changes: validatedData,
1377
+ // Pick the values from validatedData based on what's passed in - this is for cases
1378
+ // where a schema has default values. The validated data has the extra default
1379
+ // values but for changes, we just want to show the data that was actually passed in.
1380
+ changes: Object.fromEntries(
1381
+ Object.keys(item).map((k) => [
1382
+ k,
1383
+ validatedData[k as keyof typeof validatedData],
1384
+ ])
1385
+ ) as TInsertInput,
1357
1386
  globalKey,
1358
1387
  key,
1359
1388
  metadata: config?.metadata as unknown,
@@ -1381,8 +1410,12 @@ export class CollectionImpl<
1381
1410
  const directOpTransaction = createTransaction<T>({
1382
1411
  mutationFn: async (params) => {
1383
1412
  // Call the onInsert handler with the transaction and collection
1384
- return this.config.onInsert!({
1385
- ...params,
1413
+ return await this.config.onInsert!({
1414
+ transaction:
1415
+ params.transaction as unknown as TransactionWithMutations<
1416
+ TInsertInput,
1417
+ `insert`
1418
+ >,
1386
1419
  collection: this as unknown as Collection<T, TKey, TUtils>,
1387
1420
  })
1388
1421
  },
@@ -1526,7 +1559,7 @@ export class CollectionImpl<
1526
1559
  }
1527
1560
 
1528
1561
  // Create mutations for each object that has changes
1529
- const mutations: Array<PendingMutation<T, `update`>> = keysArray
1562
+ const mutations: Array<PendingMutation<T, `update`, this>> = keysArray
1530
1563
  .map((key, index) => {
1531
1564
  const itemChanges = changesArray[index] // User-provided changes for this specific item
1532
1565
 
@@ -1581,7 +1614,7 @@ export class CollectionImpl<
1581
1614
  collection: this,
1582
1615
  }
1583
1616
  })
1584
- .filter(Boolean) as Array<PendingMutation<T, `update`>>
1617
+ .filter(Boolean) as Array<PendingMutation<T, `update`, this>>
1585
1618
 
1586
1619
  // If no changes were made, return an empty transaction early
1587
1620
  if (mutations.length === 0) {
@@ -1609,7 +1642,11 @@ export class CollectionImpl<
1609
1642
  mutationFn: async (params) => {
1610
1643
  // Call the onUpdate handler with the transaction and collection
1611
1644
  return this.config.onUpdate!({
1612
- ...params,
1645
+ transaction:
1646
+ params.transaction as unknown as TransactionWithMutations<
1647
+ T,
1648
+ `update`
1649
+ >,
1613
1650
  collection: this as unknown as Collection<T, TKey, TUtils>,
1614
1651
  })
1615
1652
  },
@@ -1677,7 +1714,7 @@ export class CollectionImpl<
1677
1714
  }
1678
1715
 
1679
1716
  const keysArray = Array.isArray(keys) ? keys : [keys]
1680
- const mutations: Array<PendingMutation<T, `delete`>> = []
1717
+ const mutations: Array<PendingMutation<T, `delete`, this>> = []
1681
1718
 
1682
1719
  for (const key of keysArray) {
1683
1720
  if (!this.has(key)) {
@@ -1686,7 +1723,7 @@ export class CollectionImpl<
1686
1723
  )
1687
1724
  }
1688
1725
  const globalKey = this.generateGlobalKey(key, this.get(key)!)
1689
- const mutation: PendingMutation<T, `delete`> = {
1726
+ const mutation: PendingMutation<T, `delete`, this> = {
1690
1727
  mutationId: crypto.randomUUID(),
1691
1728
  original: this.get(key)!,
1692
1729
  modified: this.get(key)!,
@@ -1724,7 +1761,11 @@ export class CollectionImpl<
1724
1761
  mutationFn: async (params) => {
1725
1762
  // Call the onDelete handler with the transaction and collection
1726
1763
  return this.config.onDelete!({
1727
- ...params,
1764
+ transaction:
1765
+ params.transaction as unknown as TransactionWithMutations<
1766
+ T,
1767
+ `delete`
1768
+ >,
1728
1769
  collection: this as unknown as Collection<T, TKey, TUtils>,
1729
1770
  })
1730
1771
  },
@@ -393,7 +393,7 @@ export function localStorageCollectionOptions<
393
393
  // Remove items
394
394
  params.transaction.mutations.forEach((mutation) => {
395
395
  // For delete operations, mutation.original contains the full object
396
- const key = config.getKey(mutation.original)
396
+ const key = config.getKey(mutation.original as ResolvedType)
397
397
  currentData.delete(key)
398
398
  })
399
399
 
@@ -506,7 +506,7 @@ function createLocalStorageSync<T extends object>(
506
506
  storageKey: string,
507
507
  storage: StorageApi,
508
508
  storageEventApi: StorageEventApi,
509
- getKey: (item: T) => string | number,
509
+ _getKey: (item: T) => string | number,
510
510
  lastKnownData: Map<string | number, StoredItem<T>>
511
511
  ): SyncConfig<T> & { manualTrigger?: () => void } {
512
512
  let syncParams: Parameters<SyncConfig<T>[`sync`]>[0] | null = null
package/src/proxy.ts CHANGED
@@ -461,6 +461,30 @@ export function createChangeProxy<
461
461
 
462
462
  // If the value is a function, bind it to the ptarget
463
463
  if (typeof value === `function`) {
464
+ // For Array methods that modify the array
465
+ if (Array.isArray(ptarget)) {
466
+ const methodName = prop.toString()
467
+ const modifyingMethods = new Set([
468
+ `pop`,
469
+ `push`,
470
+ `shift`,
471
+ `unshift`,
472
+ `splice`,
473
+ `sort`,
474
+ `reverse`,
475
+ `fill`,
476
+ `copyWithin`,
477
+ ])
478
+
479
+ if (modifyingMethods.has(methodName)) {
480
+ return function (...args: Array<unknown>) {
481
+ const result = value.apply(changeTracker.copy_, args)
482
+ markChanged(changeTracker)
483
+ return result
484
+ }
485
+ }
486
+ }
487
+
464
488
  // For Map and Set methods that modify the collection
465
489
  if (ptarget instanceof Map || ptarget instanceof Set) {
466
490
  const methodName = prop.toString()
@@ -184,6 +184,110 @@ export class BaseQueryBuilder<TContext extends Context = Context> {
184
184
  }) as any
185
185
  }
186
186
 
187
+ /**
188
+ * Perform a LEFT JOIN with another table or subquery
189
+ *
190
+ * @param source - An object with a single key-value pair where the key is the table alias and the value is a Collection or subquery
191
+ * @param onCallback - A function that receives table references and returns the join condition
192
+ * @returns A QueryBuilder with the left joined table available
193
+ *
194
+ * @example
195
+ * ```ts
196
+ * // Left join users with posts
197
+ * query
198
+ * .from({ users: usersCollection })
199
+ * .leftJoin({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.userId))
200
+ * ```
201
+ */
202
+ leftJoin<TSource extends Source>(
203
+ source: TSource,
204
+ onCallback: JoinOnCallback<
205
+ MergeContext<TContext, SchemaFromSource<TSource>>
206
+ >
207
+ ): QueryBuilder<
208
+ MergeContextWithJoinType<TContext, SchemaFromSource<TSource>, `left`>
209
+ > {
210
+ return this.join(source, onCallback, `left`)
211
+ }
212
+
213
+ /**
214
+ * Perform a RIGHT JOIN with another table or subquery
215
+ *
216
+ * @param source - An object with a single key-value pair where the key is the table alias and the value is a Collection or subquery
217
+ * @param onCallback - A function that receives table references and returns the join condition
218
+ * @returns A QueryBuilder with the right joined table available
219
+ *
220
+ * @example
221
+ * ```ts
222
+ * // Right join users with posts
223
+ * query
224
+ * .from({ users: usersCollection })
225
+ * .rightJoin({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.userId))
226
+ * ```
227
+ */
228
+ rightJoin<TSource extends Source>(
229
+ source: TSource,
230
+ onCallback: JoinOnCallback<
231
+ MergeContext<TContext, SchemaFromSource<TSource>>
232
+ >
233
+ ): QueryBuilder<
234
+ MergeContextWithJoinType<TContext, SchemaFromSource<TSource>, `right`>
235
+ > {
236
+ return this.join(source, onCallback, `right`)
237
+ }
238
+
239
+ /**
240
+ * Perform an INNER JOIN with another table or subquery
241
+ *
242
+ * @param source - An object with a single key-value pair where the key is the table alias and the value is a Collection or subquery
243
+ * @param onCallback - A function that receives table references and returns the join condition
244
+ * @returns A QueryBuilder with the inner joined table available
245
+ *
246
+ * @example
247
+ * ```ts
248
+ * // Inner join users with posts
249
+ * query
250
+ * .from({ users: usersCollection })
251
+ * .innerJoin({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.userId))
252
+ * ```
253
+ */
254
+ innerJoin<TSource extends Source>(
255
+ source: TSource,
256
+ onCallback: JoinOnCallback<
257
+ MergeContext<TContext, SchemaFromSource<TSource>>
258
+ >
259
+ ): QueryBuilder<
260
+ MergeContextWithJoinType<TContext, SchemaFromSource<TSource>, `inner`>
261
+ > {
262
+ return this.join(source, onCallback, `inner`)
263
+ }
264
+
265
+ /**
266
+ * Perform a FULL JOIN with another table or subquery
267
+ *
268
+ * @param source - An object with a single key-value pair where the key is the table alias and the value is a Collection or subquery
269
+ * @param onCallback - A function that receives table references and returns the join condition
270
+ * @returns A QueryBuilder with the full joined table available
271
+ *
272
+ * @example
273
+ * ```ts
274
+ * // Full join users with posts
275
+ * query
276
+ * .from({ users: usersCollection })
277
+ * .fullJoin({ posts: postsCollection }, ({users, posts}) => eq(users.id, posts.userId))
278
+ * ```
279
+ */
280
+ fullJoin<TSource extends Source>(
281
+ source: TSource,
282
+ onCallback: JoinOnCallback<
283
+ MergeContext<TContext, SchemaFromSource<TSource>>
284
+ >
285
+ ): QueryBuilder<
286
+ MergeContextWithJoinType<TContext, SchemaFromSource<TSource>, `full`>
287
+ > {
288
+ return this.join(source, onCallback, `full`)
289
+ }
290
+
187
291
  /**
188
292
  * Filter rows based on a condition
189
293
  *
@@ -1,4 +1,5 @@
1
1
  import { distinct, filter, map } from "@electric-sql/d2mini"
2
+ import { optimizeQuery } from "../optimizer.js"
2
3
  import { compileExpression } from "./evaluators.js"
3
4
  import { processJoins } from "./joins.js"
4
5
  import { processGroupBy } from "./group-by.js"
@@ -10,30 +11,35 @@ import type {
10
11
  NamespacedAndKeyedStream,
11
12
  ResultStream,
12
13
  } from "../../types.js"
13
-
14
- /**
15
- * Cache for compiled subqueries to avoid duplicate compilation
16
- */
17
- type QueryCache = WeakMap<QueryIR, ResultStream>
14
+ import type { QueryCache, QueryMapping } from "./types.js"
18
15
 
19
16
  /**
20
17
  * Compiles a query2 IR into a D2 pipeline
21
- * @param query The query IR to compile
18
+ * @param rawQuery The query IR to compile
22
19
  * @param inputs Mapping of collection names to input streams
23
20
  * @param cache Optional cache for compiled subqueries (used internally for recursion)
21
+ * @param queryMapping Optional mapping from optimized queries to original queries
24
22
  * @returns A stream builder representing the compiled query
25
23
  */
26
24
  export function compileQuery(
27
- query: QueryIR,
25
+ rawQuery: QueryIR,
28
26
  inputs: Record<string, KeyedStream>,
29
- cache: QueryCache = new WeakMap()
27
+ cache: QueryCache = new WeakMap(),
28
+ queryMapping: QueryMapping = new WeakMap()
30
29
  ): ResultStream {
31
- // Check if this query has already been compiled
32
- const cachedResult = cache.get(query)
30
+ // Check if the original raw query has already been compiled
31
+ const cachedResult = cache.get(rawQuery)
33
32
  if (cachedResult) {
34
33
  return cachedResult
35
34
  }
36
35
 
36
+ // Optimize the query before compilation
37
+ const query = optimizeQuery(rawQuery)
38
+
39
+ // Create mapping from optimized query to original for caching
40
+ queryMapping.set(query, rawQuery)
41
+ mapNestedQueries(query, rawQuery, queryMapping)
42
+
37
43
  // Create a copy of the inputs map to avoid modifying the original
38
44
  const allInputs = { ...inputs }
39
45
 
@@ -44,7 +50,8 @@ export function compileQuery(
44
50
  const { alias: mainTableAlias, input: mainInput } = processFrom(
45
51
  query.from,
46
52
  allInputs,
47
- cache
53
+ cache,
54
+ queryMapping
48
55
  )
49
56
  tables[mainTableAlias] = mainInput
50
57
 
@@ -68,7 +75,8 @@ export function compileQuery(
68
75
  tables,
69
76
  mainTableAlias,
70
77
  allInputs,
71
- cache
78
+ cache,
79
+ queryMapping
72
80
  )
73
81
  }
74
82
 
@@ -218,8 +226,8 @@ export function compileQuery(
218
226
  )
219
227
 
220
228
  const result = resultPipeline
221
- // Cache the result before returning
222
- cache.set(query, result)
229
+ // Cache the result before returning (use original query as key)
230
+ cache.set(rawQuery, result)
223
231
  return result
224
232
  } else if (query.limit !== undefined || query.offset !== undefined) {
225
233
  // If there's a limit or offset without orderBy, throw an error
@@ -241,8 +249,8 @@ export function compileQuery(
241
249
  )
242
250
 
243
251
  const result = resultPipeline
244
- // Cache the result before returning
245
- cache.set(query, result)
252
+ // Cache the result before returning (use original query as key)
253
+ cache.set(rawQuery, result)
246
254
  return result
247
255
  }
248
256
 
@@ -252,7 +260,8 @@ export function compileQuery(
252
260
  function processFrom(
253
261
  from: CollectionRef | QueryRef,
254
262
  allInputs: Record<string, KeyedStream>,
255
- cache: QueryCache
263
+ cache: QueryCache,
264
+ queryMapping: QueryMapping
256
265
  ): { alias: string; input: KeyedStream } {
257
266
  switch (from.type) {
258
267
  case `collectionRef`: {
@@ -265,8 +274,16 @@ function processFrom(
265
274
  return { alias: from.alias, input }
266
275
  }
267
276
  case `queryRef`: {
277
+ // Find the original query for caching purposes
278
+ const originalQuery = queryMapping.get(from.query) || from.query
279
+
268
280
  // Recursively compile the sub-query with cache
269
- const subQueryInput = compileQuery(from.query, allInputs, cache)
281
+ const subQueryInput = compileQuery(
282
+ originalQuery,
283
+ allInputs,
284
+ cache,
285
+ queryMapping
286
+ )
270
287
 
271
288
  // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)
272
289
  // We need to extract just the value for use in parent queries
@@ -283,3 +300,53 @@ function processFrom(
283
300
  throw new Error(`Unsupported FROM type: ${(from as any).type}`)
284
301
  }
285
302
  }
303
+
304
+ /**
305
+ * Recursively maps optimized subqueries to their original queries for proper caching.
306
+ * This ensures that when we encounter the same QueryRef object in different contexts,
307
+ * we can find the original query to check the cache.
308
+ */
309
+ function mapNestedQueries(
310
+ optimizedQuery: QueryIR,
311
+ originalQuery: QueryIR,
312
+ queryMapping: QueryMapping
313
+ ): void {
314
+ // Map the FROM clause if it's a QueryRef
315
+ if (
316
+ optimizedQuery.from.type === `queryRef` &&
317
+ originalQuery.from.type === `queryRef`
318
+ ) {
319
+ queryMapping.set(optimizedQuery.from.query, originalQuery.from.query)
320
+ // Recursively map nested queries
321
+ mapNestedQueries(
322
+ optimizedQuery.from.query,
323
+ originalQuery.from.query,
324
+ queryMapping
325
+ )
326
+ }
327
+
328
+ // Map JOIN clauses if they exist
329
+ if (optimizedQuery.join && originalQuery.join) {
330
+ for (
331
+ let i = 0;
332
+ i < optimizedQuery.join.length && i < originalQuery.join.length;
333
+ i++
334
+ ) {
335
+ const optimizedJoin = optimizedQuery.join[i]!
336
+ const originalJoin = originalQuery.join[i]!
337
+
338
+ if (
339
+ optimizedJoin.from.type === `queryRef` &&
340
+ originalJoin.from.type === `queryRef`
341
+ ) {
342
+ queryMapping.set(optimizedJoin.from.query, originalJoin.from.query)
343
+ // Recursively map nested queries in joins
344
+ mapNestedQueries(
345
+ optimizedJoin.from.query,
346
+ originalJoin.from.query,
347
+ queryMapping
348
+ )
349
+ }
350
+ }
351
+ }
352
+ }
@@ -7,18 +7,13 @@ import {
7
7
  import { compileExpression } from "./evaluators.js"
8
8
  import { compileQuery } from "./index.js"
9
9
  import type { IStreamBuilder, JoinType } from "@electric-sql/d2mini"
10
- import type { CollectionRef, JoinClause, QueryIR, QueryRef } from "../ir.js"
10
+ import type { CollectionRef, JoinClause, QueryRef } from "../ir.js"
11
11
  import type {
12
12
  KeyedStream,
13
13
  NamespacedAndKeyedStream,
14
14
  NamespacedRow,
15
- ResultStream,
16
15
  } from "../../types.js"
17
-
18
- /**
19
- * Cache for compiled subqueries to avoid duplicate compilation
20
- */
21
- type QueryCache = WeakMap<QueryIR, ResultStream>
16
+ import type { QueryCache, QueryMapping } from "./types.js"
22
17
 
23
18
  /**
24
19
  * Processes all join clauses in a query
@@ -29,7 +24,8 @@ export function processJoins(
29
24
  tables: Record<string, KeyedStream>,
30
25
  mainTableAlias: string,
31
26
  allInputs: Record<string, KeyedStream>,
32
- cache: QueryCache
27
+ cache: QueryCache,
28
+ queryMapping: QueryMapping
33
29
  ): NamespacedAndKeyedStream {
34
30
  let resultPipeline = pipeline
35
31
 
@@ -40,7 +36,8 @@ export function processJoins(
40
36
  tables,
41
37
  mainTableAlias,
42
38
  allInputs,
43
- cache
39
+ cache,
40
+ queryMapping
44
41
  )
45
42
  }
46
43
 
@@ -56,13 +53,15 @@ function processJoin(
56
53
  tables: Record<string, KeyedStream>,
57
54
  mainTableAlias: string,
58
55
  allInputs: Record<string, KeyedStream>,
59
- cache: QueryCache
56
+ cache: QueryCache,
57
+ queryMapping: QueryMapping
60
58
  ): NamespacedAndKeyedStream {
61
59
  // Get the joined table alias and input stream
62
60
  const { alias: joinedTableAlias, input: joinedInput } = processJoinSource(
63
61
  joinClause.from,
64
62
  allInputs,
65
- cache
63
+ cache,
64
+ queryMapping
66
65
  )
67
66
 
68
67
  // Add the joined table to the tables map
@@ -128,7 +127,8 @@ function processJoin(
128
127
  function processJoinSource(
129
128
  from: CollectionRef | QueryRef,
130
129
  allInputs: Record<string, KeyedStream>,
131
- cache: QueryCache
130
+ cache: QueryCache,
131
+ queryMapping: QueryMapping
132
132
  ): { alias: string; input: KeyedStream } {
133
133
  switch (from.type) {
134
134
  case `collectionRef`: {
@@ -141,8 +141,16 @@ function processJoinSource(
141
141
  return { alias: from.alias, input }
142
142
  }
143
143
  case `queryRef`: {
144
+ // Find the original query for caching purposes
145
+ const originalQuery = queryMapping.get(from.query) || from.query
146
+
144
147
  // Recursively compile the sub-query with cache
145
- const subQueryInput = compileQuery(from.query, allInputs, cache)
148
+ const subQueryInput = compileQuery(
149
+ originalQuery,
150
+ allInputs,
151
+ cache,
152
+ queryMapping
153
+ )
146
154
 
147
155
  // Subqueries may return [key, [value, orderByIndex]] (with ORDER BY) or [key, value] (without ORDER BY)
148
156
  // We need to extract just the value for use in parent queries
@@ -0,0 +1,12 @@
1
+ import type { QueryIR } from "../ir.js"
2
+ import type { ResultStream } from "../../types.js"
3
+
4
+ /**
5
+ * Cache for compiled subqueries to avoid duplicate compilation
6
+ */
7
+ export type QueryCache = WeakMap<QueryIR, ResultStream>
8
+
9
+ /**
10
+ * Mapping from optimized queries back to their original queries for caching
11
+ */
12
+ export type QueryMapping = WeakMap<QueryIR, QueryIR>