@tanstack/db 0.5.30 → 0.5.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/cjs/collection/subscription.cjs +6 -6
  2. package/dist/cjs/collection/subscription.cjs.map +1 -1
  3. package/dist/cjs/errors.cjs +8 -0
  4. package/dist/cjs/errors.cjs.map +1 -1
  5. package/dist/cjs/errors.d.cts +3 -0
  6. package/dist/cjs/index.cjs +13 -10
  7. package/dist/cjs/index.cjs.map +1 -1
  8. package/dist/cjs/query/builder/types.d.cts +28 -31
  9. package/dist/cjs/query/compiler/index.cjs +3 -0
  10. package/dist/cjs/query/compiler/index.cjs.map +1 -1
  11. package/dist/cjs/query/index.d.cts +1 -0
  12. package/dist/cjs/query/query-once.cjs +28 -0
  13. package/dist/cjs/query/query-once.cjs.map +1 -0
  14. package/dist/cjs/query/query-once.d.cts +57 -0
  15. package/dist/cjs/query/subset-dedupe.cjs +8 -7
  16. package/dist/cjs/query/subset-dedupe.cjs.map +1 -1
  17. package/dist/esm/collection/subscription.js +6 -6
  18. package/dist/esm/collection/subscription.js.map +1 -1
  19. package/dist/esm/errors.d.ts +3 -0
  20. package/dist/esm/errors.js +8 -0
  21. package/dist/esm/errors.js.map +1 -1
  22. package/dist/esm/index.js +6 -3
  23. package/dist/esm/index.js.map +1 -1
  24. package/dist/esm/query/builder/types.d.ts +28 -31
  25. package/dist/esm/query/compiler/index.js +4 -1
  26. package/dist/esm/query/compiler/index.js.map +1 -1
  27. package/dist/esm/query/index.d.ts +1 -0
  28. package/dist/esm/query/query-once.d.ts +57 -0
  29. package/dist/esm/query/query-once.js +28 -0
  30. package/dist/esm/query/query-once.js.map +1 -0
  31. package/dist/esm/query/subset-dedupe.js +8 -7
  32. package/dist/esm/query/subset-dedupe.js.map +1 -1
  33. package/package.json +3 -2
  34. package/skills/db-core/SKILL.md +61 -0
  35. package/skills/db-core/collection-setup/SKILL.md +427 -0
  36. package/skills/db-core/collection-setup/references/electric-adapter.md +238 -0
  37. package/skills/db-core/collection-setup/references/local-adapters.md +220 -0
  38. package/skills/db-core/collection-setup/references/powersync-adapter.md +241 -0
  39. package/skills/db-core/collection-setup/references/query-adapter.md +183 -0
  40. package/skills/db-core/collection-setup/references/rxdb-adapter.md +152 -0
  41. package/skills/db-core/collection-setup/references/schema-patterns.md +215 -0
  42. package/skills/db-core/collection-setup/references/trailbase-adapter.md +147 -0
  43. package/skills/db-core/custom-adapter/SKILL.md +285 -0
  44. package/skills/db-core/live-queries/SKILL.md +332 -0
  45. package/skills/db-core/live-queries/references/operators.md +302 -0
  46. package/skills/db-core/mutations-optimistic/SKILL.md +375 -0
  47. package/skills/db-core/mutations-optimistic/references/transaction-api.md +207 -0
  48. package/skills/meta-framework/SKILL.md +361 -0
  49. package/src/collection/subscription.ts +6 -6
  50. package/src/errors.ts +11 -0
  51. package/src/query/builder/types.ts +64 -50
  52. package/src/query/compiler/index.ts +5 -0
  53. package/src/query/index.ts +3 -0
  54. package/src/query/query-once.ts +115 -0
  55. package/src/query/subset-dedupe.ts +14 -15
@@ -0,0 +1 @@
1
+ {"version":3,"file":"query-once.js","sources":["../../../src/query/query-once.ts"],"sourcesContent":["import { createLiveQueryCollection } from './live-query-collection.js'\nimport type { InitialQueryBuilder, QueryBuilder } from './builder/index.js'\nimport type { Context, InferResultType } from './builder/types.js'\n\n/**\n * Configuration options for queryOnce\n */\nexport interface QueryOnceConfig<TContext extends Context> {\n /**\n * Query builder function that defines the query\n */\n query:\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>)\n | QueryBuilder<TContext>\n // Future: timeout, signal, etc.\n}\n\n// Overload 1: Simple query function returning array (non-single result)\n/**\n * Executes a one-shot query and returns the results as an array.\n *\n * This function creates a live query collection, preloads it, extracts the results,\n * and automatically cleans up the collection. It's ideal for:\n * - AI/LLM context building\n * - Data export\n * - Background processing\n * - Testing\n *\n * @param queryFn - A function that receives the query builder and returns a query\n * @returns A promise that resolves to an array of query results\n *\n * @example\n * ```typescript\n * // Basic query\n * const users = await queryOnce((q) =>\n * q.from({ user: usersCollection })\n * )\n *\n * // With filtering and projection\n * const activeUserNames = await queryOnce((q) =>\n * q.from({ user: usersCollection })\n * .where(({ user }) => eq(user.active, true))\n * .select(({ user }) => ({ name: user.name }))\n * )\n * ```\n */\nexport function queryOnce<TContext extends Context>(\n queryFn: (q: InitialQueryBuilder) => QueryBuilder<TContext>,\n): Promise<InferResultType<TContext>>\n\n// Overload 2: Config object form returning array (non-single result)\n/**\n * Executes a one-shot query using a configuration object.\n *\n * @param config - Configuration object with the query function\n * @returns A promise that resolves to an array of query results\n *\n * @example\n * ```typescript\n * const recentOrders = await queryOnce({\n * query: (q) =>\n * q.from({ order: ordersCollection })\n * .orderBy(({ order }) => desc(order.createdAt))\n * .limit(100),\n * })\n * ```\n */\nexport function queryOnce<TContext extends Context>(\n config: QueryOnceConfig<TContext>,\n): Promise<InferResultType<TContext>>\n\n// Implementation\nexport async function queryOnce<TContext extends Context>(\n configOrQuery:\n | QueryOnceConfig<TContext>\n | ((q: InitialQueryBuilder) => QueryBuilder<TContext>),\n): Promise<InferResultType<TContext>> {\n // Normalize input\n const config: QueryOnceConfig<TContext> =\n typeof configOrQuery === `function`\n ? { query: configOrQuery }\n : configOrQuery\n\n const query = (q: InitialQueryBuilder) => {\n const queryConfig = config.query\n return typeof queryConfig === `function` ? queryConfig(q) : queryConfig\n }\n\n // Create collection with minimal GC time; preload handles sync start\n const collection = createLiveQueryCollection({\n query,\n gcTime: 1, // Cleanup in next tick when no subscribers (0 disables GC)\n })\n\n try {\n // Wait for initial data load\n await collection.preload()\n\n // Check if this is a single-result query (findOne was called)\n const isSingleResult =\n (collection.config as { singleResult?: boolean }).singleResult === true\n\n // Extract and return results\n if (isSingleResult) {\n const first = collection.values().next().value as\n | InferResultType<TContext>\n | undefined\n return first as InferResultType<TContext>\n }\n return collection.toArray as InferResultType<TContext>\n } finally {\n // Always cleanup, even on error\n await collection.cleanup()\n }\n}\n"],"names":[],"mappings":";AAwEA,eAAsB,UACpB,eAGoC;AAEpC,QAAM,SACJ,OAAO,kBAAkB,aACrB,EAAE,OAAO,kBACT;AAEN,QAAM,QAAQ,CAAC,MAA2B;AACxC,UAAM,cAAc,OAAO;AAC3B,WAAO,OAAO,gBAAgB,aAAa,YAAY,CAAC,IAAI;AAAA,EAC9D;AAGA,QAAM,aAAa,0BAA0B;AAAA,IAC3C;AAAA,IACA,QAAQ;AAAA;AAAA,EAAA,CACT;AAED,MAAI;AAEF,UAAM,WAAW,QAAA;AAGjB,UAAM,iBACH,WAAW,OAAsC,iBAAiB;AAGrE,QAAI,gBAAgB;AAClB,YAAM,QAAQ,WAAW,OAAA,EAAS,OAAO;AAGzC,aAAO;AAAA,IACT;AACA,WAAO,WAAW;AAAA,EACpB,UAAA;AAEE,UAAM,WAAW,QAAA;AAAA,EACnB;AACF;"}
@@ -34,22 +34,23 @@ class DeduplicatedLoadSubset {
34
34
  prom.then(() => this.onDeduplicate?.(options)).catch();
35
35
  return prom;
36
36
  }
37
- const clonedOptions = cloneOptions(options);
37
+ const trackingOptions = cloneOptions(options);
38
+ const loadOptions = cloneOptions(options);
38
39
  if (this.unlimitedWhere !== void 0 && options.limit === void 0) {
39
- clonedOptions.where = minusWherePredicates(clonedOptions.where, this.unlimitedWhere) ?? clonedOptions.where;
40
+ loadOptions.where = minusWherePredicates(loadOptions.where, this.unlimitedWhere) ?? loadOptions.where;
40
41
  }
41
- const resultPromise = this._loadSubset(clonedOptions);
42
+ const resultPromise = this._loadSubset(loadOptions);
42
43
  if (resultPromise === true) {
43
- this.updateTracking(clonedOptions);
44
+ this.updateTracking(trackingOptions);
44
45
  return true;
45
46
  } else {
46
47
  const capturedGeneration = this.generation;
47
48
  const inflightEntry = {
48
- options: clonedOptions,
49
- // Store cloned options for subset matching
49
+ options: loadOptions,
50
+ // Store load options for subset matching of in-flight requests
50
51
  promise: resultPromise.then((result) => {
51
52
  if (capturedGeneration === this.generation) {
52
- this.updateTracking(clonedOptions);
53
+ this.updateTracking(trackingOptions);
53
54
  }
54
55
  return result;
55
56
  }).finally(() => {
@@ -1 +1 @@
1
- {"version":3,"file":"subset-dedupe.js","sources":["../../../src/query/subset-dedupe.ts"],"sourcesContent":["import {\n isPredicateSubset,\n isWhereSubset,\n minusWherePredicates,\n unionWherePredicates,\n} from './predicate-utils.js'\nimport type { BasicExpression } from './ir.js'\nimport type { LoadSubsetOptions } from '../types.js'\n\n/**\n * Deduplicated wrapper for a loadSubset function.\n * Tracks what data has been loaded and avoids redundant calls by applying\n * subset logic to predicates.\n *\n * @param opts - The options for the DeduplicatedLoadSubset\n * @param opts.loadSubset - The underlying loadSubset function to wrap\n * @param opts.onDeduplicate - An optional callback function that is invoked when a loadSubset call is deduplicated.\n * If the call is deduplicated because the requested data is being loaded by an inflight request,\n * then this callback is invoked when the inflight request completes successfully and the data is fully loaded.\n * This callback is useful if you need to track rows per query, in which case you can't ignore deduplicated calls\n * because you need to know which rows were loaded for each query.\n * @example\n * const dedupe = new DeduplicatedLoadSubset({ loadSubset: myLoadSubset, onDeduplicate: (opts) => console.log(`Call was deduplicated:`, opts) })\n *\n * // First call - fetches data\n * await dedupe.loadSubset({ where: gt(ref('age'), val(10)) })\n *\n * // Second call - subset of first, returns true immediately\n * await dedupe.loadSubset({ where: gt(ref('age'), val(20)) })\n *\n * // Clear state to start fresh\n * dedupe.reset()\n */\nexport class DeduplicatedLoadSubset {\n // The underlying loadSubset function to wrap\n private readonly _loadSubset: (\n options: LoadSubsetOptions,\n ) => true | Promise<void>\n\n // An optional callback function that is invoked when a loadSubset call is deduplicated.\n private readonly onDeduplicate:\n | ((options: LoadSubsetOptions) => void)\n | undefined\n\n // Combined where predicate for all unlimited calls (no limit)\n private unlimitedWhere: BasicExpression<boolean> | undefined = undefined\n\n // Flag to track if we've loaded all data (unlimited call with no where clause)\n private hasLoadedAllData = false\n\n // List of all limited calls (with limit, possibly with orderBy)\n // We clone options before storing to prevent mutation of stored predicates\n private limitedCalls: Array<LoadSubsetOptions> = []\n\n // Track in-flight calls to prevent concurrent duplicate requests\n // We store both the options and the promise so we can apply subset logic\n private inflightCalls: Array<{\n options: LoadSubsetOptions\n promise: Promise<void>\n }> = []\n\n // Generation counter to invalidate in-flight requests after reset()\n // When reset() is called, this increments, and any in-flight completion handlers\n // check if their captured generation matches before updating tracking state\n private generation = 0\n\n constructor(opts: {\n loadSubset: (options: LoadSubsetOptions) => true | Promise<void>\n onDeduplicate?: (options: LoadSubsetOptions) => void\n }) {\n this._loadSubset = opts.loadSubset\n this.onDeduplicate = opts.onDeduplicate\n }\n\n /**\n * Load a subset of data, with automatic deduplication based on previously\n * loaded predicates and in-flight requests.\n *\n * This method is auto-bound, so it can be safely passed as a callback without\n * losing its `this` context (e.g., `loadSubset: dedupe.loadSubset` in a sync config).\n *\n * @param options - The predicate options (where, orderBy, limit)\n * @returns true if data is already loaded, or a Promise that resolves when data is loaded\n */\n loadSubset = (options: LoadSubsetOptions): true | Promise<void> => {\n // If we've loaded all data, everything is covered\n if (this.hasLoadedAllData) {\n this.onDeduplicate?.(options)\n return true\n }\n\n // Check against unlimited combined predicate\n // If we've loaded all data matching a where clause, we don't need to refetch subsets\n if (this.unlimitedWhere !== undefined && options.where !== undefined) {\n if (isWhereSubset(options.where, this.unlimitedWhere)) {\n this.onDeduplicate?.(options)\n return true // Data already loaded via unlimited call\n }\n }\n\n // Check against limited calls\n if (options.limit !== undefined) {\n const alreadyLoaded = this.limitedCalls.some((loaded) =>\n isPredicateSubset(options, loaded),\n )\n\n if (alreadyLoaded) {\n this.onDeduplicate?.(options)\n return true // Already loaded\n }\n }\n\n // Check against in-flight calls using the same subset logic as resolved calls\n // This prevents duplicate requests when concurrent calls have subset relationships\n const matchingInflight = this.inflightCalls.find((inflight) =>\n isPredicateSubset(options, inflight.options),\n )\n\n if (matchingInflight !== undefined) {\n // An in-flight call will load data that covers this request\n // Return the same promise so this caller waits for the data to load\n // The in-flight promise already handles tracking updates when it completes\n const prom = matchingInflight.promise\n // Call `onDeduplicate` when the inflight request has loaded the data\n prom.then(() => this.onDeduplicate?.(options)).catch() // ignore errors\n return prom\n }\n\n // Not fully covered by existing data\n // Compute the subset of data that is not covered by the existing data\n // such that we only have to load that subset of missing data\n const clonedOptions = cloneOptions(options)\n if (this.unlimitedWhere !== undefined && options.limit === undefined) {\n // Compute difference to get only the missing data\n // We can only do this for unlimited queries\n // and we can only remove data that was loaded from unlimited queries\n // because with limited queries we have no way to express that we already loaded part of the matching data\n clonedOptions.where =\n minusWherePredicates(clonedOptions.where, this.unlimitedWhere) ??\n clonedOptions.where\n }\n\n // Call underlying loadSubset to load the missing data\n const resultPromise = this._loadSubset(clonedOptions)\n\n // Handle both sync (true) and async (Promise<void>) return values\n if (resultPromise === true) {\n // Sync return - update tracking synchronously\n // Clone options before storing to protect against caller mutation\n this.updateTracking(clonedOptions)\n return true\n } else {\n // Async return - track the promise and update tracking after it resolves\n\n // Capture the current generation - this lets us detect if reset() was called\n // while this request was in-flight, so we can skip updating tracking state\n const capturedGeneration = this.generation\n\n // We need to create a reference to the in-flight entry so we can remove it later\n const inflightEntry = {\n options: clonedOptions, // Store cloned options for subset matching\n promise: resultPromise\n .then((result) => {\n // Only update tracking if this request is still from the current generation\n // If reset() was called, the generation will have incremented and we should\n // not repopulate the state that was just cleared\n if (capturedGeneration === this.generation) {\n // Use the cloned options that we captured before any caller mutations\n // This ensures we track exactly what was loaded, not what the caller changed\n this.updateTracking(clonedOptions)\n }\n return result\n })\n .finally(() => {\n // Always remove from in-flight array on completion OR rejection\n // This ensures failed requests can be retried instead of being cached forever\n const index = this.inflightCalls.indexOf(inflightEntry)\n if (index !== -1) {\n this.inflightCalls.splice(index, 1)\n }\n }),\n }\n\n // Store the in-flight entry so concurrent subset calls can wait for it\n this.inflightCalls.push(inflightEntry)\n return inflightEntry.promise\n }\n }\n\n /**\n * Reset all tracking state.\n * Clears the history of loaded predicates and in-flight calls.\n * Use this when you want to start fresh, for example after clearing the underlying data store.\n *\n * Note: Any in-flight requests will still complete, but they will not update the tracking\n * state after the reset. This prevents old requests from repopulating cleared state.\n */\n reset(): void {\n this.unlimitedWhere = undefined\n this.hasLoadedAllData = false\n this.limitedCalls = []\n this.inflightCalls = []\n // Increment generation to invalidate any in-flight completion handlers\n // This ensures requests that were started before reset() don't repopulate the state\n this.generation++\n }\n\n private updateTracking(options: LoadSubsetOptions): void {\n // Update tracking based on whether this was a limited or unlimited call\n if (options.limit === undefined) {\n // Unlimited call - update combined where predicate\n // We ignore orderBy for unlimited calls as mentioned in requirements\n if (options.where === undefined) {\n // No where clause = all data loaded\n this.hasLoadedAllData = true\n this.unlimitedWhere = undefined\n this.limitedCalls = []\n this.inflightCalls = []\n } else if (this.unlimitedWhere === undefined) {\n this.unlimitedWhere = options.where\n } else {\n this.unlimitedWhere = unionWherePredicates([\n this.unlimitedWhere,\n options.where,\n ])\n }\n } else {\n // Limited call - add to list for future subset checks\n // Options are already cloned by caller to prevent mutation issues\n this.limitedCalls.push(options)\n }\n }\n}\n\n/**\n * Clones a LoadSubsetOptions object to prevent mutation of stored predicates.\n * This is crucial because callers often reuse the same options object and mutate\n * properties like limit or where between calls. Without cloning, our stored history\n * would reflect the mutated values rather than what was actually loaded.\n */\nexport function cloneOptions(options: LoadSubsetOptions): LoadSubsetOptions {\n return { ...options }\n}\n"],"names":[],"mappings":";AAiCO,MAAM,uBAAuB;AAAA,EAiClC,YAAY,MAGT;AAxBH,SAAQ,iBAAuD;AAG/D,SAAQ,mBAAmB;AAI3B,SAAQ,eAAyC,CAAA;AAIjD,SAAQ,gBAGH,CAAA;AAKL,SAAQ,aAAa;AAoBrB,SAAA,aAAa,CAAC,YAAqD;AAEjE,UAAI,KAAK,kBAAkB;AACzB,aAAK,gBAAgB,OAAO;AAC5B,eAAO;AAAA,MACT;AAIA,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AACpE,YAAI,cAAc,QAAQ,OAAO,KAAK,cAAc,GAAG;AACrD,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,gBAAgB,KAAK,aAAa;AAAA,UAAK,CAAC,WAC5C,kBAAkB,SAAS,MAAM;AAAA,QAAA;AAGnC,YAAI,eAAe;AACjB,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAIA,YAAM,mBAAmB,KAAK,cAAc;AAAA,QAAK,CAAC,aAChD,kBAAkB,SAAS,SAAS,OAAO;AAAA,MAAA;AAG7C,UAAI,qBAAqB,QAAW;AAIlC,cAAM,OAAO,iBAAiB;AAE9B,aAAK,KAAK,MAAM,KAAK,gBAAgB,OAAO,CAAC,EAAE,MAAA;AAC/C,eAAO;AAAA,MACT;AAKA,YAAM,gBAAgB,aAAa,OAAO;AAC1C,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AAKpE,sBAAc,QACZ,qBAAqB,cAAc,OAAO,KAAK,cAAc,KAC7D,cAAc;AAAA,MAClB;AAGA,YAAM,gBAAgB,KAAK,YAAY,aAAa;AAGpD,UAAI,kBAAkB,MAAM;AAG1B,aAAK,eAAe,aAAa;AACjC,eAAO;AAAA,MACT,OAAO;AAKL,cAAM,qBAAqB,KAAK;AAGhC,cAAM,gBAAgB;AAAA,UACpB,SAAS;AAAA;AAAA,UACT,SAAS,cACN,KAAK,CAAC,WAAW;AAIhB,gBAAI,uBAAuB,KAAK,YAAY;AAG1C,mBAAK,eAAe,aAAa;AAAA,YACnC;AACA,mBAAO;AAAA,UACT,CAAC,EACA,QAAQ,MAAM;AAGb,kBAAM,QAAQ,KAAK,cAAc,QAAQ,aAAa;AACtD,gBAAI,UAAU,IAAI;AAChB,mBAAK,cAAc,OAAO,OAAO,CAAC;AAAA,YACpC;AAAA,UACF,CAAC;AAAA,QAAA;AAIL,aAAK,cAAc,KAAK,aAAa;AACrC,eAAO,cAAc;AAAA,MACvB;AAAA,IACF;AArHE,SAAK,cAAc,KAAK;AACxB,SAAK,gBAAgB,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA6HA,QAAc;AACZ,SAAK,iBAAiB;AACtB,SAAK,mBAAmB;AACxB,SAAK,eAAe,CAAA;AACpB,SAAK,gBAAgB,CAAA;AAGrB,SAAK;AAAA,EACP;AAAA,EAEQ,eAAe,SAAkC;AAEvD,QAAI,QAAQ,UAAU,QAAW;AAG/B,UAAI,QAAQ,UAAU,QAAW;AAE/B,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AACtB,aAAK,eAAe,CAAA;AACpB,aAAK,gBAAgB,CAAA;AAAA,MACvB,WAAW,KAAK,mBAAmB,QAAW;AAC5C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,OAAO;AACL,aAAK,iBAAiB,qBAAqB;AAAA,UACzC,KAAK;AAAA,UACL,QAAQ;AAAA,QAAA,CACT;AAAA,MACH;AAAA,IACF,OAAO;AAGL,WAAK,aAAa,KAAK,OAAO;AAAA,IAChC;AAAA,EACF;AACF;AAQO,SAAS,aAAa,SAA+C;AAC1E,SAAO,EAAE,GAAG,QAAA;AACd;"}
1
+ {"version":3,"file":"subset-dedupe.js","sources":["../../../src/query/subset-dedupe.ts"],"sourcesContent":["import {\n isPredicateSubset,\n isWhereSubset,\n minusWherePredicates,\n unionWherePredicates,\n} from './predicate-utils.js'\nimport type { BasicExpression } from './ir.js'\nimport type { LoadSubsetOptions } from '../types.js'\n\n/**\n * Deduplicated wrapper for a loadSubset function.\n * Tracks what data has been loaded and avoids redundant calls by applying\n * subset logic to predicates.\n *\n * @param opts - The options for the DeduplicatedLoadSubset\n * @param opts.loadSubset - The underlying loadSubset function to wrap\n * @param opts.onDeduplicate - An optional callback function that is invoked when a loadSubset call is deduplicated.\n * If the call is deduplicated because the requested data is being loaded by an inflight request,\n * then this callback is invoked when the inflight request completes successfully and the data is fully loaded.\n * This callback is useful if you need to track rows per query, in which case you can't ignore deduplicated calls\n * because you need to know which rows were loaded for each query.\n * @example\n * const dedupe = new DeduplicatedLoadSubset({ loadSubset: myLoadSubset, onDeduplicate: (opts) => console.log(`Call was deduplicated:`, opts) })\n *\n * // First call - fetches data\n * await dedupe.loadSubset({ where: gt(ref('age'), val(10)) })\n *\n * // Second call - subset of first, returns true immediately\n * await dedupe.loadSubset({ where: gt(ref('age'), val(20)) })\n *\n * // Clear state to start fresh\n * dedupe.reset()\n */\nexport class DeduplicatedLoadSubset {\n // The underlying loadSubset function to wrap\n private readonly _loadSubset: (\n options: LoadSubsetOptions,\n ) => true | Promise<void>\n\n // An optional callback function that is invoked when a loadSubset call is deduplicated.\n private readonly onDeduplicate:\n | ((options: LoadSubsetOptions) => void)\n | undefined\n\n // Combined where predicate for all unlimited calls (no limit)\n private unlimitedWhere: BasicExpression<boolean> | undefined = undefined\n\n // Flag to track if we've loaded all data (unlimited call with no where clause)\n private hasLoadedAllData = false\n\n // List of all limited calls (with limit, possibly with orderBy)\n // We clone options before storing to prevent mutation of stored predicates\n private limitedCalls: Array<LoadSubsetOptions> = []\n\n // Track in-flight calls to prevent concurrent duplicate requests\n // We store both the options and the promise so we can apply subset logic\n private inflightCalls: Array<{\n options: LoadSubsetOptions\n promise: Promise<void>\n }> = []\n\n // Generation counter to invalidate in-flight requests after reset()\n // When reset() is called, this increments, and any in-flight completion handlers\n // check if their captured generation matches before updating tracking state\n private generation = 0\n\n constructor(opts: {\n loadSubset: (options: LoadSubsetOptions) => true | Promise<void>\n onDeduplicate?: (options: LoadSubsetOptions) => void\n }) {\n this._loadSubset = opts.loadSubset\n this.onDeduplicate = opts.onDeduplicate\n }\n\n /**\n * Load a subset of data, with automatic deduplication based on previously\n * loaded predicates and in-flight requests.\n *\n * This method is auto-bound, so it can be safely passed as a callback without\n * losing its `this` context (e.g., `loadSubset: dedupe.loadSubset` in a sync config).\n *\n * @param options - The predicate options (where, orderBy, limit)\n * @returns true if data is already loaded, or a Promise that resolves when data is loaded\n */\n loadSubset = (options: LoadSubsetOptions): true | Promise<void> => {\n // If we've loaded all data, everything is covered\n if (this.hasLoadedAllData) {\n this.onDeduplicate?.(options)\n return true\n }\n\n // Check against unlimited combined predicate\n // If we've loaded all data matching a where clause, we don't need to refetch subsets\n if (this.unlimitedWhere !== undefined && options.where !== undefined) {\n if (isWhereSubset(options.where, this.unlimitedWhere)) {\n this.onDeduplicate?.(options)\n return true // Data already loaded via unlimited call\n }\n }\n\n // Check against limited calls\n if (options.limit !== undefined) {\n const alreadyLoaded = this.limitedCalls.some((loaded) =>\n isPredicateSubset(options, loaded),\n )\n\n if (alreadyLoaded) {\n this.onDeduplicate?.(options)\n return true // Already loaded\n }\n }\n\n // Check against in-flight calls using the same subset logic as resolved calls\n // This prevents duplicate requests when concurrent calls have subset relationships\n const matchingInflight = this.inflightCalls.find((inflight) =>\n isPredicateSubset(options, inflight.options),\n )\n\n if (matchingInflight !== undefined) {\n // An in-flight call will load data that covers this request\n // Return the same promise so this caller waits for the data to load\n // The in-flight promise already handles tracking updates when it completes\n const prom = matchingInflight.promise\n // Call `onDeduplicate` when the inflight request has loaded the data\n prom.then(() => this.onDeduplicate?.(options)).catch() // ignore errors\n return prom\n }\n\n // Not fully covered by existing data — load the missing subset.\n // We need two clones: trackingOptions preserves the original predicate for\n // accurate tracking (e.g., where=undefined means \"all data\"), while loadOptions\n // may be narrowed with a difference expression for the actual backend request.\n const trackingOptions = cloneOptions(options)\n const loadOptions = cloneOptions(options)\n if (this.unlimitedWhere !== undefined && options.limit === undefined) {\n // Compute difference to get only the missing data\n // We can only do this for unlimited queries\n // and we can only remove data that was loaded from unlimited queries\n // because with limited queries we have no way to express that we already loaded part of the matching data\n loadOptions.where =\n minusWherePredicates(loadOptions.where, this.unlimitedWhere) ??\n loadOptions.where\n }\n\n // Call underlying loadSubset to load the missing data\n const resultPromise = this._loadSubset(loadOptions)\n\n // Handle both sync (true) and async (Promise<void>) return values\n if (resultPromise === true) {\n // Sync return - update tracking with the original predicate\n this.updateTracking(trackingOptions)\n return true\n } else {\n // Async return - track the promise and update tracking after it resolves\n\n // Capture the current generation - this lets us detect if reset() was called\n // while this request was in-flight, so we can skip updating tracking state\n const capturedGeneration = this.generation\n\n // We need to create a reference to the in-flight entry so we can remove it later\n const inflightEntry = {\n options: loadOptions, // Store load options for subset matching of in-flight requests\n promise: resultPromise\n .then((result) => {\n // Only update tracking if this request is still from the current generation\n // If reset() was called, the generation will have incremented and we should\n // not repopulate the state that was just cleared\n if (capturedGeneration === this.generation) {\n this.updateTracking(trackingOptions)\n }\n return result\n })\n .finally(() => {\n // Always remove from in-flight array on completion OR rejection\n // This ensures failed requests can be retried instead of being cached forever\n const index = this.inflightCalls.indexOf(inflightEntry)\n if (index !== -1) {\n this.inflightCalls.splice(index, 1)\n }\n }),\n }\n\n // Store the in-flight entry so concurrent subset calls can wait for it\n this.inflightCalls.push(inflightEntry)\n return inflightEntry.promise\n }\n }\n\n /**\n * Reset all tracking state.\n * Clears the history of loaded predicates and in-flight calls.\n * Use this when you want to start fresh, for example after clearing the underlying data store.\n *\n * Note: Any in-flight requests will still complete, but they will not update the tracking\n * state after the reset. This prevents old requests from repopulating cleared state.\n */\n reset(): void {\n this.unlimitedWhere = undefined\n this.hasLoadedAllData = false\n this.limitedCalls = []\n this.inflightCalls = []\n // Increment generation to invalidate any in-flight completion handlers\n // This ensures requests that were started before reset() don't repopulate the state\n this.generation++\n }\n\n private updateTracking(options: LoadSubsetOptions): void {\n // Update tracking based on whether this was a limited or unlimited call\n if (options.limit === undefined) {\n // Unlimited call - update combined where predicate\n // We ignore orderBy for unlimited calls as mentioned in requirements\n if (options.where === undefined) {\n // No where clause = all data loaded\n this.hasLoadedAllData = true\n this.unlimitedWhere = undefined\n this.limitedCalls = []\n this.inflightCalls = []\n } else if (this.unlimitedWhere === undefined) {\n this.unlimitedWhere = options.where\n } else {\n this.unlimitedWhere = unionWherePredicates([\n this.unlimitedWhere,\n options.where,\n ])\n }\n } else {\n // Limited call - add to list for future subset checks\n // Options are already cloned by caller to prevent mutation issues\n this.limitedCalls.push(options)\n }\n }\n}\n\n/**\n * Clones a LoadSubsetOptions object to prevent mutation of stored predicates.\n * This is crucial because callers often reuse the same options object and mutate\n * properties like limit or where between calls. Without cloning, our stored history\n * would reflect the mutated values rather than what was actually loaded.\n */\nexport function cloneOptions(options: LoadSubsetOptions): LoadSubsetOptions {\n return { ...options }\n}\n"],"names":[],"mappings":";AAiCO,MAAM,uBAAuB;AAAA,EAiClC,YAAY,MAGT;AAxBH,SAAQ,iBAAuD;AAG/D,SAAQ,mBAAmB;AAI3B,SAAQ,eAAyC,CAAA;AAIjD,SAAQ,gBAGH,CAAA;AAKL,SAAQ,aAAa;AAoBrB,SAAA,aAAa,CAAC,YAAqD;AAEjE,UAAI,KAAK,kBAAkB;AACzB,aAAK,gBAAgB,OAAO;AAC5B,eAAO;AAAA,MACT;AAIA,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AACpE,YAAI,cAAc,QAAQ,OAAO,KAAK,cAAc,GAAG;AACrD,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAGA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,gBAAgB,KAAK,aAAa;AAAA,UAAK,CAAC,WAC5C,kBAAkB,SAAS,MAAM;AAAA,QAAA;AAGnC,YAAI,eAAe;AACjB,eAAK,gBAAgB,OAAO;AAC5B,iBAAO;AAAA,QACT;AAAA,MACF;AAIA,YAAM,mBAAmB,KAAK,cAAc;AAAA,QAAK,CAAC,aAChD,kBAAkB,SAAS,SAAS,OAAO;AAAA,MAAA;AAG7C,UAAI,qBAAqB,QAAW;AAIlC,cAAM,OAAO,iBAAiB;AAE9B,aAAK,KAAK,MAAM,KAAK,gBAAgB,OAAO,CAAC,EAAE,MAAA;AAC/C,eAAO;AAAA,MACT;AAMA,YAAM,kBAAkB,aAAa,OAAO;AAC5C,YAAM,cAAc,aAAa,OAAO;AACxC,UAAI,KAAK,mBAAmB,UAAa,QAAQ,UAAU,QAAW;AAKpE,oBAAY,QACV,qBAAqB,YAAY,OAAO,KAAK,cAAc,KAC3D,YAAY;AAAA,MAChB;AAGA,YAAM,gBAAgB,KAAK,YAAY,WAAW;AAGlD,UAAI,kBAAkB,MAAM;AAE1B,aAAK,eAAe,eAAe;AACnC,eAAO;AAAA,MACT,OAAO;AAKL,cAAM,qBAAqB,KAAK;AAGhC,cAAM,gBAAgB;AAAA,UACpB,SAAS;AAAA;AAAA,UACT,SAAS,cACN,KAAK,CAAC,WAAW;AAIhB,gBAAI,uBAAuB,KAAK,YAAY;AAC1C,mBAAK,eAAe,eAAe;AAAA,YACrC;AACA,mBAAO;AAAA,UACT,CAAC,EACA,QAAQ,MAAM;AAGb,kBAAM,QAAQ,KAAK,cAAc,QAAQ,aAAa;AACtD,gBAAI,UAAU,IAAI;AAChB,mBAAK,cAAc,OAAO,OAAO,CAAC;AAAA,YACpC;AAAA,UACF,CAAC;AAAA,QAAA;AAIL,aAAK,cAAc,KAAK,aAAa;AACrC,eAAO,cAAc;AAAA,MACvB;AAAA,IACF;AApHE,SAAK,cAAc,KAAK;AACxB,SAAK,gBAAgB,KAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA4HA,QAAc;AACZ,SAAK,iBAAiB;AACtB,SAAK,mBAAmB;AACxB,SAAK,eAAe,CAAA;AACpB,SAAK,gBAAgB,CAAA;AAGrB,SAAK;AAAA,EACP;AAAA,EAEQ,eAAe,SAAkC;AAEvD,QAAI,QAAQ,UAAU,QAAW;AAG/B,UAAI,QAAQ,UAAU,QAAW;AAE/B,aAAK,mBAAmB;AACxB,aAAK,iBAAiB;AACtB,aAAK,eAAe,CAAA;AACpB,aAAK,gBAAgB,CAAA;AAAA,MACvB,WAAW,KAAK,mBAAmB,QAAW;AAC5C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,OAAO;AACL,aAAK,iBAAiB,qBAAqB;AAAA,UACzC,KAAK;AAAA,UACL,QAAQ;AAAA,QAAA,CACT;AAAA,MACH;AAAA,IACF,OAAO;AAGL,WAAK,aAAa,KAAK,OAAO;AAAA,IAChC;AAAA,EACF;AACF;AAQO,SAAS,aAAa,SAA+C;AAC1E,SAAO,EAAE,GAAG,QAAA;AACd;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tanstack/db",
3
- "version": "0.5.30",
3
+ "version": "0.5.32",
4
4
  "description": "A reactive client store for building super fast apps on sync",
5
5
  "author": "Kyle Mathews",
6
6
  "license": "MIT",
@@ -34,7 +34,8 @@
34
34
  "sideEffects": false,
35
35
  "files": [
36
36
  "dist",
37
- "src"
37
+ "src",
38
+ "skills"
38
39
  ],
39
40
  "dependencies": {
40
41
  "@standard-schema/spec": "^1.1.0",
@@ -0,0 +1,61 @@
1
+ ---
2
+ name: db-core
3
+ description: >
4
+ TanStack DB core concepts: createCollection with queryCollectionOptions,
5
+ electricCollectionOptions, powerSyncCollectionOptions, rxdbCollectionOptions,
6
+ trailbaseCollectionOptions, localOnlyCollectionOptions. Live queries via
7
+ query builder (from, where, join, select, groupBy, orderBy, limit). Optimistic
8
+ mutations with draft proxy (collection.insert, collection.update,
9
+ collection.delete). createOptimisticAction, createTransaction,
10
+ createPacedMutations. Entry point for all TanStack DB skills.
11
+ type: core
12
+ library: db
13
+ library_version: '0.5.30'
14
+ ---
15
+
16
+ # TanStack DB — Core Concepts
17
+
18
+ TanStack DB is a reactive client-side data store. It loads data into typed
19
+ collections from any backend (REST APIs, sync engines, local storage), provides
20
+ sub-millisecond live queries via differential dataflow, and supports instant
21
+ optimistic mutations with automatic rollback.
22
+
23
+ Framework packages (`@tanstack/react-db`, `@tanstack/vue-db`, `@tanstack/svelte-db`,
24
+ `@tanstack/solid-db`) re-export everything from `@tanstack/db` plus framework-specific
25
+ hooks. In framework projects, import from the framework package directly.
26
+ `@tanstack/angular-db` is the exception -- import operators from `@tanstack/db` separately.
27
+
28
+ ## Sub-Skills
29
+
30
+ | Need to... | Read |
31
+ | ------------------------------------------------ | ---------------------------------------------------- |
32
+ | Create a collection, pick an adapter, add schema | db-core/collection-setup/SKILL.md |
33
+ | Query data with where, join, groupBy, select | db-core/live-queries/SKILL.md |
34
+ | Insert, update, delete with optimistic UI | db-core/mutations-optimistic/SKILL.md |
35
+ | Build a custom sync adapter | db-core/custom-adapter/SKILL.md |
36
+ | Preload collections in route loaders | meta-framework/SKILL.md |
37
+ | Add offline transaction queueing | offline/SKILL.md (in @tanstack/offline-transactions) |
38
+
39
+ For framework-specific hooks:
40
+
41
+ | Framework | Read |
42
+ | --------- | ------------------- |
43
+ | React | react-db/SKILL.md |
44
+ | Vue | vue-db/SKILL.md |
45
+ | Svelte | svelte-db/SKILL.md |
46
+ | Solid | solid-db/SKILL.md |
47
+ | Angular | angular-db/SKILL.md |
48
+
49
+ ## Quick Decision Tree
50
+
51
+ - Setting up for the first time? → db-core/collection-setup
52
+ - Building queries on collection data? → db-core/live-queries
53
+ - Writing data / handling optimistic state? → db-core/mutations-optimistic
54
+ - Using React hooks? → react-db
55
+ - Preloading in route loaders (Start, Next, Remix)? → meta-framework
56
+ - Building an adapter for a new backend? → db-core/custom-adapter
57
+ - Need offline transaction persistence? → offline
58
+
59
+ ## Version
60
+
61
+ Targets @tanstack/db v0.5.30.
@@ -0,0 +1,427 @@
1
+ ---
2
+ name: db-core/collection-setup
3
+ description: >
4
+ Creating typed collections with createCollection. Adapter selection:
5
+ queryCollectionOptions (REST/TanStack Query), electricCollectionOptions
6
+ (ElectricSQL real-time sync), powerSyncCollectionOptions (PowerSync SQLite),
7
+ rxdbCollectionOptions (RxDB), trailbaseCollectionOptions (TrailBase),
8
+ localOnlyCollectionOptions, localStorageCollectionOptions. CollectionConfig
9
+ options: getKey, schema, sync, gcTime, autoIndex, syncMode (eager/on-demand/
10
+ progressive). StandardSchema validation with Zod/Valibot/ArkType. Collection
11
+ lifecycle (idle/loading/ready/error). Adapter-specific sync patterns including
12
+ Electric txid tracking and Query direct writes.
13
+ type: sub-skill
14
+ library: db
15
+ library_version: '0.5.30'
16
+ sources:
17
+ - 'TanStack/db:docs/overview.md'
18
+ - 'TanStack/db:docs/guides/schemas.md'
19
+ - 'TanStack/db:docs/collections/query-collection.md'
20
+ - 'TanStack/db:docs/collections/electric-collection.md'
21
+ - 'TanStack/db:docs/collections/powersync-collection.md'
22
+ - 'TanStack/db:docs/collections/rxdb-collection.md'
23
+ - 'TanStack/db:docs/collections/trailbase-collection.md'
24
+ - 'TanStack/db:packages/db/src/collection/index.ts'
25
+ ---
26
+
27
+ This skill builds on db-core. Read it first for the overall mental model.
28
+
29
+ # Collection Setup & Schema
30
+
31
+ ## Setup
32
+
33
+ ```ts
34
+ import { createCollection } from '@tanstack/react-db'
35
+ import { queryCollectionOptions } from '@tanstack/query-db-collection'
36
+ import { QueryClient } from '@tanstack/query-core'
37
+ import { z } from 'zod'
38
+
39
+ const queryClient = new QueryClient()
40
+
41
+ const todoSchema = z.object({
42
+ id: z.number(),
43
+ text: z.string(),
44
+ completed: z.boolean().default(false),
45
+ created_at: z
46
+ .union([z.string(), z.date()])
47
+ .transform((val) => (typeof val === 'string' ? new Date(val) : val)),
48
+ })
49
+
50
+ const todoCollection = createCollection(
51
+ queryCollectionOptions({
52
+ queryKey: ['todos'],
53
+ queryFn: async () => {
54
+ const res = await fetch('/api/todos')
55
+ return res.json()
56
+ },
57
+ queryClient,
58
+ getKey: (item) => item.id,
59
+ schema: todoSchema,
60
+ onInsert: async ({ transaction }) => {
61
+ await api.todos.create(transaction.mutations[0].modified)
62
+ await todoCollection.utils.refetch()
63
+ },
64
+ onUpdate: async ({ transaction }) => {
65
+ const mut = transaction.mutations[0]
66
+ await api.todos.update(mut.key, mut.changes)
67
+ await todoCollection.utils.refetch()
68
+ },
69
+ onDelete: async ({ transaction }) => {
70
+ await api.todos.delete(transaction.mutations[0].key)
71
+ await todoCollection.utils.refetch()
72
+ },
73
+ }),
74
+ )
75
+ ```
76
+
77
+ ## Choosing an Adapter
78
+
79
+ | Backend | Adapter | Package |
80
+ | -------------------------------- | ------------------------------- | ----------------------------------- |
81
+ | REST API / TanStack Query | `queryCollectionOptions` | `@tanstack/query-db-collection` |
82
+ | ElectricSQL (real-time Postgres) | `electricCollectionOptions` | `@tanstack/electric-db-collection` |
83
+ | PowerSync (SQLite offline) | `powerSyncCollectionOptions` | `@tanstack/powersync-db-collection` |
84
+ | RxDB (reactive database) | `rxdbCollectionOptions` | `@tanstack/rxdb-db-collection` |
85
+ | TrailBase (event streaming) | `trailbaseCollectionOptions` | `@tanstack/trailbase-db-collection` |
86
+ | No backend (UI state) | `localOnlyCollectionOptions` | `@tanstack/db` |
87
+ | Browser localStorage | `localStorageCollectionOptions` | `@tanstack/db` |
88
+
89
+ If the user specifies a backend (e.g. Electric, PowerSync), use that adapter directly. Only use `localOnlyCollectionOptions` when there is no backend yet — the collection API is uniform, so swapping to a real adapter later only changes the options creator.
90
+
91
+ ## Sync Modes
92
+
93
+ ```ts
94
+ queryCollectionOptions({
95
+ syncMode: 'eager', // default — loads all data upfront
96
+ // syncMode: "on-demand", // loads only what live queries request
97
+ // syncMode: "progressive", // (Electric only) query subset first, full sync in background
98
+ })
99
+ ```
100
+
101
+ | Mode | Best for | Data size |
102
+ | ------------- | ---------------------------------------------- | --------- |
103
+ | `eager` | Mostly-static datasets | <10k rows |
104
+ | `on-demand` | Search, catalogs, large tables | >50k rows |
105
+ | `progressive` | Collaborative apps needing instant first paint | Any |
106
+
107
+ ## Core Patterns
108
+
109
+ ### Local-only collection for prototyping
110
+
111
+ ```ts
112
+ import {
113
+ createCollection,
114
+ localOnlyCollectionOptions,
115
+ } from '@tanstack/react-db'
116
+
117
+ const todoCollection = createCollection(
118
+ localOnlyCollectionOptions({
119
+ getKey: (item) => item.id,
120
+ initialData: [{ id: 1, text: 'Learn TanStack DB', completed: false }],
121
+ }),
122
+ )
123
+ ```
124
+
125
+ ### Schema with type transformations
126
+
127
+ ```ts
128
+ const schema = z.object({
129
+ id: z.number(),
130
+ title: z.string(),
131
+ due_date: z
132
+ .union([z.string(), z.date()])
133
+ .transform((val) => (typeof val === 'string' ? new Date(val) : val)),
134
+ priority: z.number().default(0),
135
+ })
136
+ ```
137
+
138
+ Use `z.union([z.string(), z.date()])` for transformed fields — this ensures `TInput` is a superset of `TOutput` so that `update()` works correctly with the draft proxy.
139
+
140
+ ### ElectricSQL with txid tracking
141
+
142
+ Always use a schema with Electric — without one, the collection types as `Record<string, unknown>`.
143
+
144
+ ```ts
145
+ import { electricCollectionOptions } from '@tanstack/electric-db-collection'
146
+ import { z } from 'zod'
147
+
148
+ const todoSchema = z.object({
149
+ id: z.string(),
150
+ text: z.string(),
151
+ completed: z.boolean(),
152
+ created_at: z.coerce.date(),
153
+ })
154
+
155
+ const todoCollection = createCollection(
156
+ electricCollectionOptions({
157
+ schema: todoSchema,
158
+ shapeOptions: { url: '/api/electric/todos' },
159
+ getKey: (item) => item.id,
160
+ onInsert: async ({ transaction }) => {
161
+ const res = await api.todos.create(transaction.mutations[0].modified)
162
+ return { txid: res.txid }
163
+ },
164
+ }),
165
+ )
166
+ ```
167
+
168
+ The returned `txid` tells the collection to hold optimistic state until Electric streams back that transaction. See the [Electric adapter reference](references/electric-adapter.md) for the full dual-path pattern (schema + parser).
169
+
170
+ ## Common Mistakes
171
+
172
+ ### CRITICAL queryFn returning empty array deletes all data
173
+
174
+ Wrong:
175
+
176
+ ```ts
177
+ queryCollectionOptions({
178
+ queryFn: async () => {
179
+ const res = await fetch('/api/todos?status=active')
180
+ return res.json() // returns [] when no active todos — deletes everything
181
+ },
182
+ })
183
+ ```
184
+
185
+ Correct:
186
+
187
+ ```ts
188
+ queryCollectionOptions({
189
+ queryFn: async () => {
190
+ const res = await fetch('/api/todos') // fetch complete state
191
+ return res.json()
192
+ },
193
+ // Use on-demand mode + live query where() for filtering
194
+ syncMode: 'on-demand',
195
+ })
196
+ ```
197
+
198
+ `queryFn` result is treated as complete server state. Returning `[]` means "server has no items", deleting all existing collection data.
199
+
200
+ Source: docs/collections/query-collection.md
201
+
202
+ ### CRITICAL Not using the correct adapter for your backend
203
+
204
+ Wrong:
205
+
206
+ ```ts
207
+ const todoCollection = createCollection(
208
+ localOnlyCollectionOptions({
209
+ getKey: (item) => item.id,
210
+ }),
211
+ )
212
+ // Manually fetching and inserting...
213
+ ```
214
+
215
+ Correct:
216
+
217
+ ```ts
218
+ const todoCollection = createCollection(
219
+ queryCollectionOptions({
220
+ queryKey: ['todos'],
221
+ queryFn: async () => fetch('/api/todos').then((r) => r.json()),
222
+ queryClient,
223
+ getKey: (item) => item.id,
224
+ }),
225
+ )
226
+ ```
227
+
228
+ Each backend has a dedicated adapter that handles sync, mutation handlers, and utilities. Using `localOnlyCollectionOptions` or bare `createCollection` for a real backend bypasses all of this.
229
+
230
+ Source: docs/overview.md
231
+
232
+ ### CRITICAL Electric txid queried outside mutation transaction
233
+
234
+ Wrong:
235
+
236
+ ```ts
237
+ // Backend handler
238
+ app.post('/api/todos', async (req, res) => {
239
+ const txid = await generateTxId(sql) // WRONG: separate transaction
240
+ await sql`INSERT INTO todos ${sql(req.body)}`
241
+ res.json({ txid })
242
+ })
243
+ ```
244
+
245
+ Correct:
246
+
247
+ ```ts
248
+ app.post('/api/todos', async (req, res) => {
249
+ let txid
250
+ await sql.begin(async (tx) => {
251
+ txid = await generateTxId(tx) // CORRECT: same transaction
252
+ await tx`INSERT INTO todos ${tx(req.body)}`
253
+ })
254
+ res.json({ txid })
255
+ })
256
+ ```
257
+
258
+ `pg_current_xact_id()` must be queried inside the same SQL transaction as the mutation. Otherwise the txid doesn't match and `awaitTxId` stalls forever.
259
+
260
+ Source: docs/collections/electric-collection.md
261
+
262
+ ### CRITICAL queryFn returning partial data without merging
263
+
264
+ Wrong:
265
+
266
+ ```ts
267
+ queryCollectionOptions({
268
+ queryFn: async () => {
269
+ const newItems = await fetch('/api/todos?since=' + lastSync)
270
+ return newItems.json() // only new items — everything else deleted
271
+ },
272
+ })
273
+ ```
274
+
275
+ Correct:
276
+
277
+ ```ts
278
+ queryCollectionOptions({
279
+ queryFn: async (ctx) => {
280
+ const existing = ctx.queryClient.getQueryData(['todos']) || []
281
+ const newItems = await fetch('/api/todos?since=' + lastSync).then((r) =>
282
+ r.json(),
283
+ )
284
+ return [...existing, ...newItems]
285
+ },
286
+ })
287
+ ```
288
+
289
+ `queryFn` result replaces all collection data. For incremental fetches, merge with existing data.
290
+
291
+ Source: docs/collections/query-collection.md
292
+
293
+ ### HIGH Using async schema validation
294
+
295
+ Wrong:
296
+
297
+ ```ts
298
+ const schema = z.object({
299
+ email: z.string().refine(async (val) => {
300
+ const exists = await checkEmail(val)
301
+ return !exists
302
+ }),
303
+ })
304
+ ```
305
+
306
+ Correct:
307
+
308
+ ```ts
309
+ const schema = z.object({
310
+ email: z.string().email(),
311
+ })
312
+ // Do async validation in the mutation handler instead
313
+ ```
314
+
315
+ Schema validation must be synchronous. Async validation throws `SchemaMustBeSynchronousError` at mutation time.
316
+
317
+ Source: packages/db/src/collection/mutations.ts:101
318
+
319
+ ### HIGH getKey returning undefined for some items
320
+
321
+ Wrong:
322
+
323
+ ```ts
324
+ createCollection(
325
+ queryCollectionOptions({
326
+ getKey: (item) => item.metadata.id, // undefined if metadata missing
327
+ }),
328
+ )
329
+ ```
330
+
331
+ Correct:
332
+
333
+ ```ts
334
+ createCollection(
335
+ queryCollectionOptions({
336
+ getKey: (item) => item.id, // always present
337
+ }),
338
+ )
339
+ ```
340
+
341
+ `getKey` must return a defined value for every item. Throws `UndefinedKeyError` otherwise.
342
+
343
+ Source: packages/db/src/collection/mutations.ts:148
344
+
345
+ ### HIGH TInput not a superset of TOutput with schema transforms
346
+
347
+ Wrong:
348
+
349
+ ```ts
350
+ const schema = z.object({
351
+ created_at: z.string().transform((val) => new Date(val)),
352
+ })
353
+ // update() fails — draft.created_at is Date but schema only accepts string
354
+ ```
355
+
356
+ Correct:
357
+
358
+ ```ts
359
+ const schema = z.object({
360
+ created_at: z
361
+ .union([z.string(), z.date()])
362
+ .transform((val) => (typeof val === 'string' ? new Date(val) : val)),
363
+ })
364
+ ```
365
+
366
+ When a schema transforms types, `TInput` must accept both the pre-transform and post-transform types for `update()` to work with the draft proxy.
367
+
368
+ Source: docs/guides/schemas.md
369
+
370
+ ### HIGH React Native missing crypto.randomUUID polyfill
371
+
372
+ TanStack DB uses `crypto.randomUUID()` internally. React Native doesn't provide this. Install `react-native-random-uuid` and import it at your app entry point.
373
+
374
+ Source: docs/overview.md
375
+
376
+ ### MEDIUM Providing both explicit type parameter and schema
377
+
378
+ Wrong:
379
+
380
+ ```ts
381
+ createCollection<Todo>(queryCollectionOptions({ schema: todoSchema, ... }))
382
+ ```
383
+
384
+ Correct:
385
+
386
+ ```ts
387
+ createCollection(queryCollectionOptions({ schema: todoSchema, ... }))
388
+ ```
389
+
390
+ When a schema is provided, the collection infers types from it. An explicit generic creates conflicting type constraints.
391
+
392
+ Source: docs/overview.md
393
+
394
+ ### MEDIUM Direct writes overridden by next query sync
395
+
396
+ Wrong:
397
+
398
+ ```ts
399
+ todoCollection.utils.writeInsert(newItem)
400
+ // Next queryFn execution replaces all data, losing the direct write
401
+ ```
402
+
403
+ Correct:
404
+
405
+ ```ts
406
+ todoCollection.utils.writeInsert(newItem)
407
+ // Use staleTime to prevent immediate refetch
408
+ // Or return { refetch: false } from mutation handlers
409
+ ```
410
+
411
+ Direct writes update the collection immediately, but the next `queryFn` returns complete server state which overwrites them.
412
+
413
+ Source: docs/collections/query-collection.md
414
+
415
+ ## References
416
+
417
+ - [TanStack Query adapter](references/query-adapter.md)
418
+ - [ElectricSQL adapter](references/electric-adapter.md)
419
+ - [PowerSync adapter](references/powersync-adapter.md)
420
+ - [RxDB adapter](references/rxdb-adapter.md)
421
+ - [TrailBase adapter](references/trailbase-adapter.md)
422
+ - [Local adapters (local-only, localStorage)](references/local-adapters.md)
423
+ - [Schema validation patterns](references/schema-patterns.md)
424
+
425
+ See also: db-core/mutations-optimistic/SKILL.md — mutation handlers configured here execute during mutations.
426
+
427
+ See also: db-core/custom-adapter/SKILL.md — for building your own adapter.