@tanstack/query-db-collection 0.2.32 → 0.2.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -38,11 +38,11 @@ function validateOperations(operations, ctx) {
38
38
  }
39
39
  seenKeys.add(op.key);
40
40
  if (op.type === `update`) {
41
- if (!ctx.collection.has(op.key)) {
41
+ if (!ctx.collection._state.syncedData.has(op.key)) {
42
42
  throw new errors.UpdateOperationItemNotFoundError(op.key);
43
43
  }
44
44
  } else if (op.type === `delete`) {
45
- if (!ctx.collection.has(op.key)) {
45
+ if (!ctx.collection._state.syncedData.has(op.key)) {
46
46
  throw new errors.DeleteOperationItemNotFoundError(op.key);
47
47
  }
48
48
  }
@@ -63,7 +63,7 @@ function performWriteOperations(operations, ctx) {
63
63
  break;
64
64
  }
65
65
  case `update`: {
66
- const currentItem = ctx.collection.get(op.key);
66
+ const currentItem = ctx.collection._state.syncedData.get(op.key);
67
67
  const updatedItem = {
68
68
  ...currentItem,
69
69
  ...op.data
@@ -80,7 +80,7 @@ function performWriteOperations(operations, ctx) {
80
80
  break;
81
81
  }
82
82
  case `delete`: {
83
- const currentItem = ctx.collection.get(op.key);
83
+ const currentItem = ctx.collection._state.syncedData.get(op.key);
84
84
  ctx.write({
85
85
  type: `delete`,
86
86
  value: currentItem
@@ -88,12 +88,13 @@ function performWriteOperations(operations, ctx) {
88
88
  break;
89
89
  }
90
90
  case `upsert`: {
91
+ const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key);
91
92
  const resolved = ctx.collection.validateData(
92
93
  op.data,
93
- ctx.collection.has(op.key) ? `update` : `insert`,
94
+ existsInSyncedStore ? `update` : `insert`,
94
95
  op.key
95
96
  );
96
- if (ctx.collection.has(op.key)) {
97
+ if (existsInSyncedStore) {
97
98
  ctx.write({
98
99
  type: `update`,
99
100
  value: resolved
@@ -1 +1 @@
1
- {"version":3,"file":"manual-sync.cjs","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":["DuplicateKeyInBatchError","UpdateOperationItemNotFoundError","DeleteOperationItemNotFoundError","SyncNotInitializedError"],"mappings":";;;AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAIA,OAAAA,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAIC,OAAAA,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;;;"}
1
+ {"version":3,"file":"manual-sync.cjs","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n // NOTE: These validations check the synced store only, not the combined view (synced + optimistic)\n // This allows write operations to work correctly even when items are optimistically modified\n if (op.type === `update`) {\n if (!ctx.collection._state.syncedData.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection._state.syncedData.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n // Get from synced store only, not the combined view\n const currentItem = ctx.collection._state.syncedData.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n // Get from synced store only, not the combined view\n const currentItem = ctx.collection._state.syncedData.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n // Check synced store only, not the combined view\n const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key)\n const resolved = ctx.collection.validateData(\n op.data,\n existsInSyncedStore ? `update` : `insert`,\n op.key\n )\n if (existsInSyncedStore) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":["DuplicateKeyInBatchError","UpdateOperationItemNotFoundError","DeleteOperationItemNotFoundError","SyncNotInitializedError"],"mappings":";;;AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAIA,OAAAA,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAKnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG,GAAG;AACjD,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG,GAAG;AACjD,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,cAAc,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AAC/D,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,cAAc,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AAC/D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,sBAAsB,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AACvE,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,sBAAsB,WAAW;AAAA,UACjC,GAAG;AAAA,QAAA;AAEL,YAAI,qBAAqB;AACvB,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAIC,OAAAA,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;;;"}
@@ -41,14 +41,15 @@ function queryCollectionOptions(config) {
41
41
  const observerOptions = {
42
42
  queryKey,
43
43
  queryFn,
44
- meta,
45
- enabled,
46
- refetchInterval,
47
- retry,
48
- retryDelay,
49
- staleTime,
50
44
  structuralSharing: true,
51
- notifyOnChangeProps: `all`
45
+ notifyOnChangeProps: `all`,
46
+ // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used
47
+ ...meta !== void 0 && { meta },
48
+ ...enabled !== void 0 && { enabled },
49
+ ...refetchInterval !== void 0 && { refetchInterval },
50
+ ...retry !== void 0 && { retry },
51
+ ...retryDelay !== void 0 && { retryDelay },
52
+ ...staleTime !== void 0 && { staleTime }
52
53
  };
53
54
  const localObserver = new queryCore.QueryObserver(queryClient, observerOptions);
54
55
  let isSubscribed = false;
@@ -1 +1 @@
1
- {"version":3,"file":"query.cjs","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":["QueryKeyRequiredError","QueryFnRequiredError","QueryClientRequiredError","GetKeyRequiredError","QueryObserver","createWriteUtils"],"mappings":";;;;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAIA,OAAAA,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAIC,OAAAA,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAIC,OAAAA,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAIC,wBAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAaC,WAAAA;AAAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
1
+ {"version":3,"file":"query.cjs","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used\n ...(meta !== undefined && { meta }),\n ...(enabled !== undefined && { enabled }),\n ...(refetchInterval !== undefined && { refetchInterval }),\n ...(retry !== undefined && { retry }),\n ...(retryDelay !== undefined && { retryDelay }),\n ...(staleTime !== undefined && { staleTime }),\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":["QueryKeyRequiredError","QueryFnRequiredError","QueryClientRequiredError","GetKeyRequiredError","QueryObserver","createWriteUtils"],"mappings":";;;;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAIA,OAAAA,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAIC,OAAAA,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAIC,OAAAA,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA;AAAA,MAErB,GAAI,SAAS,UAAa,EAAE,KAAA;AAAA,MAC5B,GAAI,YAAY,UAAa,EAAE,QAAA;AAAA,MAC/B,GAAI,oBAAoB,UAAa,EAAE,gBAAA;AAAA,MACvC,GAAI,UAAU,UAAa,EAAE,MAAA;AAAA,MAC7B,GAAI,eAAe,UAAa,EAAE,WAAA;AAAA,MAClC,GAAI,cAAc,UAAa,EAAE,UAAA;AAAA,IAAU;AAG7C,UAAM,gBAAgB,IAAIC,wBAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAaC,WAAAA;AAAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
@@ -36,11 +36,11 @@ function validateOperations(operations, ctx) {
36
36
  }
37
37
  seenKeys.add(op.key);
38
38
  if (op.type === `update`) {
39
- if (!ctx.collection.has(op.key)) {
39
+ if (!ctx.collection._state.syncedData.has(op.key)) {
40
40
  throw new UpdateOperationItemNotFoundError(op.key);
41
41
  }
42
42
  } else if (op.type === `delete`) {
43
- if (!ctx.collection.has(op.key)) {
43
+ if (!ctx.collection._state.syncedData.has(op.key)) {
44
44
  throw new DeleteOperationItemNotFoundError(op.key);
45
45
  }
46
46
  }
@@ -61,7 +61,7 @@ function performWriteOperations(operations, ctx) {
61
61
  break;
62
62
  }
63
63
  case `update`: {
64
- const currentItem = ctx.collection.get(op.key);
64
+ const currentItem = ctx.collection._state.syncedData.get(op.key);
65
65
  const updatedItem = {
66
66
  ...currentItem,
67
67
  ...op.data
@@ -78,7 +78,7 @@ function performWriteOperations(operations, ctx) {
78
78
  break;
79
79
  }
80
80
  case `delete`: {
81
- const currentItem = ctx.collection.get(op.key);
81
+ const currentItem = ctx.collection._state.syncedData.get(op.key);
82
82
  ctx.write({
83
83
  type: `delete`,
84
84
  value: currentItem
@@ -86,12 +86,13 @@ function performWriteOperations(operations, ctx) {
86
86
  break;
87
87
  }
88
88
  case `upsert`: {
89
+ const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key);
89
90
  const resolved = ctx.collection.validateData(
90
91
  op.data,
91
- ctx.collection.has(op.key) ? `update` : `insert`,
92
+ existsInSyncedStore ? `update` : `insert`,
92
93
  op.key
93
94
  );
94
- if (ctx.collection.has(op.key)) {
95
+ if (existsInSyncedStore) {
95
96
  ctx.write({
96
97
  type: `update`,
97
98
  value: resolved
@@ -1 +1 @@
1
- {"version":3,"file":"manual-sync.js","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":[],"mappings":";AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAI,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;"}
1
+ {"version":3,"file":"manual-sync.js","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n // NOTE: These validations check the synced store only, not the combined view (synced + optimistic)\n // This allows write operations to work correctly even when items are optimistically modified\n if (op.type === `update`) {\n if (!ctx.collection._state.syncedData.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection._state.syncedData.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n // Get from synced store only, not the combined view\n const currentItem = ctx.collection._state.syncedData.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n // Get from synced store only, not the combined view\n const currentItem = ctx.collection._state.syncedData.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n // Check synced store only, not the combined view\n const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key)\n const resolved = ctx.collection.validateData(\n op.data,\n existsInSyncedStore ? `update` : `insert`,\n op.key\n )\n if (existsInSyncedStore) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":[],"mappings":";AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAI,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAKnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG,GAAG;AACjD,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG,GAAG;AACjD,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,cAAc,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AAC/D,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,cAAc,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AAC/D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AAEb,cAAM,sBAAsB,IAAI,WAAW,OAAO,WAAW,IAAI,GAAG,GAAG;AACvE,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,sBAAsB,WAAW;AAAA,UACjC,GAAG;AAAA,QAAA;AAEL,YAAI,qBAAqB;AACvB,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;"}
package/dist/esm/query.js CHANGED
@@ -39,14 +39,15 @@ function queryCollectionOptions(config) {
39
39
  const observerOptions = {
40
40
  queryKey,
41
41
  queryFn,
42
- meta,
43
- enabled,
44
- refetchInterval,
45
- retry,
46
- retryDelay,
47
- staleTime,
48
42
  structuralSharing: true,
49
- notifyOnChangeProps: `all`
43
+ notifyOnChangeProps: `all`,
44
+ // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used
45
+ ...meta !== void 0 && { meta },
46
+ ...enabled !== void 0 && { enabled },
47
+ ...refetchInterval !== void 0 && { refetchInterval },
48
+ ...retry !== void 0 && { retry },
49
+ ...retryDelay !== void 0 && { retryDelay },
50
+ ...staleTime !== void 0 && { staleTime }
50
51
  };
51
52
  const localObserver = new QueryObserver(queryClient, observerOptions);
52
53
  let isSubscribed = false;
@@ -1 +1 @@
1
- {"version":3,"file":"query.js","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":[],"mappings":";;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAI,cAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAa;AAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;"}
1
+ {"version":3,"file":"query.js","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used\n ...(meta !== undefined && { meta }),\n ...(enabled !== undefined && { enabled }),\n ...(refetchInterval !== undefined && { refetchInterval }),\n ...(retry !== undefined && { retry }),\n ...(retryDelay !== undefined && { retryDelay }),\n ...(staleTime !== undefined && { staleTime }),\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":[],"mappings":";;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA;AAAA,MAErB,GAAI,SAAS,UAAa,EAAE,KAAA;AAAA,MAC5B,GAAI,YAAY,UAAa,EAAE,QAAA;AAAA,MAC/B,GAAI,oBAAoB,UAAa,EAAE,gBAAA;AAAA,MACvC,GAAI,UAAU,UAAa,EAAE,MAAA;AAAA,MAC7B,GAAI,eAAe,UAAa,EAAE,WAAA;AAAA,MAClC,GAAI,cAAc,UAAa,EAAE,UAAA;AAAA,IAAU;AAG7C,UAAM,gBAAgB,IAAI,cAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAa;AAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;"}
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "@tanstack/query-db-collection",
3
3
  "description": "TanStack Query collection for TanStack DB",
4
- "version": "0.2.32",
4
+ "version": "0.2.34",
5
5
  "dependencies": {
6
6
  "@standard-schema/spec": "^1.0.0",
7
- "@tanstack/db": "0.4.11"
7
+ "@tanstack/db": "0.4.12"
8
8
  },
9
9
  "devDependencies": {
10
- "@tanstack/query-core": "^5.90.2",
10
+ "@tanstack/query-core": "^5.90.5",
11
11
  "@vitest/coverage-istanbul": "^3.2.4"
12
12
  },
13
13
  "exports": {
@@ -110,12 +110,14 @@ function validateOperations<
110
110
  seenKeys.add(op.key)
111
111
 
112
112
  // Validate operation-specific requirements
113
+ // NOTE: These validations check the synced store only, not the combined view (synced + optimistic)
114
+ // This allows write operations to work correctly even when items are optimistically modified
113
115
  if (op.type === `update`) {
114
- if (!ctx.collection.has(op.key)) {
116
+ if (!ctx.collection._state.syncedData.has(op.key)) {
115
117
  throw new UpdateOperationItemNotFoundError(op.key)
116
118
  }
117
119
  } else if (op.type === `delete`) {
118
- if (!ctx.collection.has(op.key)) {
120
+ if (!ctx.collection._state.syncedData.has(op.key)) {
119
121
  throw new DeleteOperationItemNotFoundError(op.key)
120
122
  }
121
123
  }
@@ -149,7 +151,8 @@ export function performWriteOperations<
149
151
  break
150
152
  }
151
153
  case `update`: {
152
- const currentItem = ctx.collection.get(op.key)!
154
+ // Get from synced store only, not the combined view
155
+ const currentItem = ctx.collection._state.syncedData.get(op.key)!
153
156
  const updatedItem = {
154
157
  ...currentItem,
155
158
  ...op.data,
@@ -166,7 +169,8 @@ export function performWriteOperations<
166
169
  break
167
170
  }
168
171
  case `delete`: {
169
- const currentItem = ctx.collection.get(op.key)!
172
+ // Get from synced store only, not the combined view
173
+ const currentItem = ctx.collection._state.syncedData.get(op.key)!
170
174
  ctx.write({
171
175
  type: `delete`,
172
176
  value: currentItem,
@@ -174,12 +178,14 @@ export function performWriteOperations<
174
178
  break
175
179
  }
176
180
  case `upsert`: {
181
+ // Check synced store only, not the combined view
182
+ const existsInSyncedStore = ctx.collection._state.syncedData.has(op.key)
177
183
  const resolved = ctx.collection.validateData(
178
184
  op.data,
179
- ctx.collection.has(op.key) ? `update` : `insert`,
185
+ existsInSyncedStore ? `update` : `insert`,
180
186
  op.key
181
187
  )
182
- if (ctx.collection.has(op.key)) {
188
+ if (existsInSyncedStore) {
183
189
  ctx.write({
184
190
  type: `update`,
185
191
  value: resolved,
package/src/query.ts CHANGED
@@ -433,14 +433,15 @@ export function queryCollectionOptions(
433
433
  > = {
434
434
  queryKey: queryKey,
435
435
  queryFn: queryFn,
436
- meta: meta,
437
- enabled: enabled,
438
- refetchInterval: refetchInterval,
439
- retry: retry,
440
- retryDelay: retryDelay,
441
- staleTime: staleTime,
442
436
  structuralSharing: true,
443
437
  notifyOnChangeProps: `all`,
438
+ // Only include options that are explicitly defined to allow QueryClient defaultOptions to be used
439
+ ...(meta !== undefined && { meta }),
440
+ ...(enabled !== undefined && { enabled }),
441
+ ...(refetchInterval !== undefined && { refetchInterval }),
442
+ ...(retry !== undefined && { retry }),
443
+ ...(retryDelay !== undefined && { retryDelay }),
444
+ ...(staleTime !== undefined && { staleTime }),
444
445
  }
445
446
 
446
447
  const localObserver = new QueryObserver<