@tanstack/query-db-collection 0.2.24 → 0.2.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/manual-sync.cjs +5 -5
- package/dist/cjs/manual-sync.cjs.map +1 -1
- package/dist/cjs/query.cjs +2 -4
- package/dist/cjs/query.cjs.map +1 -1
- package/dist/esm/manual-sync.js +5 -5
- package/dist/esm/manual-sync.js.map +1 -1
- package/dist/esm/query.js +2 -4
- package/dist/esm/query.js.map +1 -1
- package/package.json +2 -2
- package/src/query.ts +3 -4
package/dist/cjs/manual-sync.cjs
CHANGED
|
@@ -128,7 +128,7 @@ function createWriteUtils(getContext) {
|
|
|
128
128
|
};
|
|
129
129
|
const ctx = ensureContext();
|
|
130
130
|
const batchContext = activeBatchContexts.get(ctx);
|
|
131
|
-
if (batchContext
|
|
131
|
+
if (batchContext?.isActive) {
|
|
132
132
|
batchContext.operations.push(operation);
|
|
133
133
|
return;
|
|
134
134
|
}
|
|
@@ -141,7 +141,7 @@ function createWriteUtils(getContext) {
|
|
|
141
141
|
};
|
|
142
142
|
const ctx = ensureContext();
|
|
143
143
|
const batchContext = activeBatchContexts.get(ctx);
|
|
144
|
-
if (batchContext
|
|
144
|
+
if (batchContext?.isActive) {
|
|
145
145
|
batchContext.operations.push(operation);
|
|
146
146
|
return;
|
|
147
147
|
}
|
|
@@ -154,7 +154,7 @@ function createWriteUtils(getContext) {
|
|
|
154
154
|
};
|
|
155
155
|
const ctx = ensureContext();
|
|
156
156
|
const batchContext = activeBatchContexts.get(ctx);
|
|
157
|
-
if (batchContext
|
|
157
|
+
if (batchContext?.isActive) {
|
|
158
158
|
batchContext.operations.push(operation);
|
|
159
159
|
return;
|
|
160
160
|
}
|
|
@@ -167,7 +167,7 @@ function createWriteUtils(getContext) {
|
|
|
167
167
|
};
|
|
168
168
|
const ctx = ensureContext();
|
|
169
169
|
const batchContext = activeBatchContexts.get(ctx);
|
|
170
|
-
if (batchContext
|
|
170
|
+
if (batchContext?.isActive) {
|
|
171
171
|
batchContext.operations.push(operation);
|
|
172
172
|
return;
|
|
173
173
|
}
|
|
@@ -176,7 +176,7 @@ function createWriteUtils(getContext) {
|
|
|
176
176
|
writeBatch(callback) {
|
|
177
177
|
const ctx = ensureContext();
|
|
178
178
|
const existingBatch = activeBatchContexts.get(ctx);
|
|
179
|
-
if (existingBatch
|
|
179
|
+
if (existingBatch?.isActive) {
|
|
180
180
|
throw new Error(
|
|
181
181
|
`Cannot nest writeBatch calls. Complete the current batch before starting a new one.`
|
|
182
182
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"manual-sync.cjs","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":["DuplicateKeyInBatchError","UpdateOperationItemNotFoundError","DeleteOperationItemNotFoundError","SyncNotInitializedError"],"mappings":";;;AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAIA,OAAAA,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAIC,OAAAA,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,+CAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;;;"}
|
|
1
|
+
{"version":3,"file":"manual-sync.cjs","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":["DuplicateKeyInBatchError","UpdateOperationItemNotFoundError","DeleteOperationItemNotFoundError","SyncNotInitializedError"],"mappings":";;;AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAIA,OAAAA,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAIC,OAAAA,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAIC,OAAAA,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;;;"}
|
package/dist/cjs/query.cjs
CHANGED
|
@@ -126,9 +126,7 @@ function queryCollectionOptions(config) {
|
|
|
126
126
|
isSubscribed = false;
|
|
127
127
|
}
|
|
128
128
|
};
|
|
129
|
-
|
|
130
|
-
subscribeToQuery();
|
|
131
|
-
}
|
|
129
|
+
subscribeToQuery();
|
|
132
130
|
const unsubscribeFromCollectionEvents = collection.on(
|
|
133
131
|
`subscribers:change`,
|
|
134
132
|
({ subscriberCount }) => {
|
|
@@ -153,7 +151,7 @@ function queryCollectionOptions(config) {
|
|
|
153
151
|
queryKey
|
|
154
152
|
},
|
|
155
153
|
{
|
|
156
|
-
throwOnError: opts
|
|
154
|
+
throwOnError: opts?.throwOnError
|
|
157
155
|
}
|
|
158
156
|
);
|
|
159
157
|
};
|
package/dist/cjs/query.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"query.cjs","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // If startSync=true or there are subscribers to the collection, subscribe to the query straight away\n if (config.startSync || collection.subscriberCount > 0) {\n subscribeToQuery()\n }\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":["QueryKeyRequiredError","QueryFnRequiredError","QueryClientRequiredError","GetKeyRequiredError","QueryObserver","createWriteUtils"],"mappings":";;;;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAIA,OAAAA,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAIC,OAAAA,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAIC,OAAAA,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAIC,wBAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,WAAW,kBAAkB,GAAG;AACtD,uBAAA;AAAA,IACF;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,6BAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAaC,WAAAA;AAAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
|
|
1
|
+
{"version":3,"file":"query.cjs","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":["QueryKeyRequiredError","QueryFnRequiredError","QueryClientRequiredError","GetKeyRequiredError","QueryObserver","createWriteUtils"],"mappings":";;;;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAIA,OAAAA,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAIC,OAAAA,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAIC,OAAAA,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAIC,OAAAA,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAIC,wBAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAaC,WAAAA;AAAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;;"}
|
package/dist/esm/manual-sync.js
CHANGED
|
@@ -126,7 +126,7 @@ function createWriteUtils(getContext) {
|
|
|
126
126
|
};
|
|
127
127
|
const ctx = ensureContext();
|
|
128
128
|
const batchContext = activeBatchContexts.get(ctx);
|
|
129
|
-
if (batchContext
|
|
129
|
+
if (batchContext?.isActive) {
|
|
130
130
|
batchContext.operations.push(operation);
|
|
131
131
|
return;
|
|
132
132
|
}
|
|
@@ -139,7 +139,7 @@ function createWriteUtils(getContext) {
|
|
|
139
139
|
};
|
|
140
140
|
const ctx = ensureContext();
|
|
141
141
|
const batchContext = activeBatchContexts.get(ctx);
|
|
142
|
-
if (batchContext
|
|
142
|
+
if (batchContext?.isActive) {
|
|
143
143
|
batchContext.operations.push(operation);
|
|
144
144
|
return;
|
|
145
145
|
}
|
|
@@ -152,7 +152,7 @@ function createWriteUtils(getContext) {
|
|
|
152
152
|
};
|
|
153
153
|
const ctx = ensureContext();
|
|
154
154
|
const batchContext = activeBatchContexts.get(ctx);
|
|
155
|
-
if (batchContext
|
|
155
|
+
if (batchContext?.isActive) {
|
|
156
156
|
batchContext.operations.push(operation);
|
|
157
157
|
return;
|
|
158
158
|
}
|
|
@@ -165,7 +165,7 @@ function createWriteUtils(getContext) {
|
|
|
165
165
|
};
|
|
166
166
|
const ctx = ensureContext();
|
|
167
167
|
const batchContext = activeBatchContexts.get(ctx);
|
|
168
|
-
if (batchContext
|
|
168
|
+
if (batchContext?.isActive) {
|
|
169
169
|
batchContext.operations.push(operation);
|
|
170
170
|
return;
|
|
171
171
|
}
|
|
@@ -174,7 +174,7 @@ function createWriteUtils(getContext) {
|
|
|
174
174
|
writeBatch(callback) {
|
|
175
175
|
const ctx = ensureContext();
|
|
176
176
|
const existingBatch = activeBatchContexts.get(ctx);
|
|
177
|
-
if (existingBatch
|
|
177
|
+
if (existingBatch?.isActive) {
|
|
178
178
|
throw new Error(
|
|
179
179
|
`Cannot nest writeBatch calls. Complete the current batch before starting a new one.`
|
|
180
180
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"manual-sync.js","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":[],"mappings":";AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAI,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,6CAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,+CAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"manual-sync.js","sources":["../../src/manual-sync.ts"],"sourcesContent":["import {\n DeleteOperationItemNotFoundError,\n DuplicateKeyInBatchError,\n SyncNotInitializedError,\n UpdateOperationItemNotFoundError,\n} from \"./errors\"\nimport type { QueryClient } from \"@tanstack/query-core\"\nimport type { ChangeMessage, Collection } from \"@tanstack/db\"\n\n// Track active batch operations per context to prevent cross-collection contamination\nconst activeBatchContexts = new WeakMap<\n SyncContext<any, any>,\n {\n operations: Array<SyncOperation<any, any, any>>\n isActive: boolean\n }\n>()\n\n// Types for sync operations\nexport type SyncOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n> =\n | { type: `insert`; data: TInsertInput | Array<TInsertInput> }\n | { type: `update`; data: Partial<TRow> | Array<Partial<TRow>> }\n | { type: `delete`; key: TKey | Array<TKey> }\n | { type: `upsert`; data: Partial<TRow> | Array<Partial<TRow>> }\n\nexport interface SyncContext<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n collection: Collection<TRow>\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: TRow) => TKey\n begin: () => void\n write: (message: Omit<ChangeMessage<TRow>, `key`>) => void\n commit: () => void\n}\n\ninterface NormalizedOperation<\n TRow extends object,\n TKey extends string | number = string | number,\n> {\n type: `insert` | `update` | `delete` | `upsert`\n key: TKey\n data?: TRow | Partial<TRow>\n}\n\n// Normalize operations into a consistent format\nfunction normalizeOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n ops:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): Array<NormalizedOperation<TRow, TKey>> {\n const operations = Array.isArray(ops) ? ops : [ops]\n const normalized: Array<NormalizedOperation<TRow, TKey>> = []\n\n for (const op of operations) {\n if (op.type === `delete`) {\n const keys = Array.isArray(op.key) ? op.key : [op.key]\n for (const key of keys) {\n normalized.push({ type: `delete`, key })\n }\n } else {\n const items = Array.isArray(op.data) ? op.data : [op.data]\n for (const item of items) {\n let key: TKey\n if (op.type === `update`) {\n // For updates, we need to get the key from the partial data\n key = ctx.getKey(item as TRow)\n } else {\n // For insert/upsert, validate and resolve the full item first\n const resolved = ctx.collection.validateData(\n item,\n op.type === `upsert` ? `insert` : op.type\n )\n key = ctx.getKey(resolved)\n }\n normalized.push({ type: op.type, key, data: item })\n }\n }\n }\n\n return normalized\n}\n\n// Validate operations before executing\nfunction validateOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n>(\n operations: Array<NormalizedOperation<TRow, TKey>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const seenKeys = new Set<TKey>()\n\n for (const op of operations) {\n // Check for duplicate keys within the batch\n if (seenKeys.has(op.key)) {\n throw new DuplicateKeyInBatchError(op.key)\n }\n seenKeys.add(op.key)\n\n // Validate operation-specific requirements\n if (op.type === `update`) {\n if (!ctx.collection.has(op.key)) {\n throw new UpdateOperationItemNotFoundError(op.key)\n }\n } else if (op.type === `delete`) {\n if (!ctx.collection.has(op.key)) {\n throw new DeleteOperationItemNotFoundError(op.key)\n }\n }\n }\n}\n\n// Execute a batch of operations\nexport function performWriteOperations<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(\n operations:\n | SyncOperation<TRow, TKey, TInsertInput>\n | Array<SyncOperation<TRow, TKey, TInsertInput>>,\n ctx: SyncContext<TRow, TKey>\n): void {\n const normalized = normalizeOperations(operations, ctx)\n validateOperations(normalized, ctx)\n\n ctx.begin()\n\n for (const op of normalized) {\n switch (op.type) {\n case `insert`: {\n const resolved = ctx.collection.validateData(op.data, `insert`)\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n break\n }\n case `update`: {\n const currentItem = ctx.collection.get(op.key)!\n const updatedItem = {\n ...currentItem,\n ...op.data,\n }\n const resolved = ctx.collection.validateData(\n updatedItem,\n `update`,\n op.key\n )\n ctx.write({\n type: `update`,\n value: resolved,\n })\n break\n }\n case `delete`: {\n const currentItem = ctx.collection.get(op.key)!\n ctx.write({\n type: `delete`,\n value: currentItem,\n })\n break\n }\n case `upsert`: {\n const resolved = ctx.collection.validateData(\n op.data,\n ctx.collection.has(op.key) ? `update` : `insert`,\n op.key\n )\n if (ctx.collection.has(op.key)) {\n ctx.write({\n type: `update`,\n value: resolved,\n })\n } else {\n ctx.write({\n type: `insert`,\n value: resolved,\n })\n }\n break\n }\n }\n }\n\n ctx.commit()\n\n // Update query cache after successful commit\n const updatedData = ctx.collection.toArray\n ctx.queryClient.setQueryData(ctx.queryKey, updatedData)\n}\n\n// Factory function to create write utils\nexport function createWriteUtils<\n TRow extends object,\n TKey extends string | number = string | number,\n TInsertInput extends object = TRow,\n>(getContext: () => SyncContext<TRow, TKey> | null) {\n function ensureContext(): SyncContext<TRow, TKey> {\n const context = getContext()\n if (!context) {\n throw new SyncNotInitializedError()\n }\n return context\n }\n\n return {\n writeInsert(data: TInsertInput | Array<TInsertInput>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `insert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n // If we're in a batch, just add to the batch operations\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n // Otherwise, perform the operation immediately\n performWriteOperations(operation, ctx)\n },\n\n writeUpdate(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `update`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeDelete(key: TKey | Array<TKey>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `delete`,\n key,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeUpsert(data: Partial<TRow> | Array<Partial<TRow>>) {\n const operation: SyncOperation<TRow, TKey, TInsertInput> = {\n type: `upsert`,\n data,\n }\n\n const ctx = ensureContext()\n const batchContext = activeBatchContexts.get(ctx)\n\n if (batchContext?.isActive) {\n batchContext.operations.push(operation)\n return\n }\n\n performWriteOperations(operation, ctx)\n },\n\n writeBatch(callback: () => void) {\n const ctx = ensureContext()\n\n // Check if we're already in a batch (nested batch)\n const existingBatch = activeBatchContexts.get(ctx)\n if (existingBatch?.isActive) {\n throw new Error(\n `Cannot nest writeBatch calls. Complete the current batch before starting a new one.`\n )\n }\n\n // Set up the batch context for this specific collection\n const batchContext = {\n operations: [] as Array<SyncOperation<TRow, TKey, TInsertInput>>,\n isActive: true,\n }\n activeBatchContexts.set(ctx, batchContext)\n\n try {\n // Execute the callback - any write operations will be collected\n const result = callback()\n\n // Check if callback returns a promise (async function)\n if (\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n result &&\n typeof result === `object` &&\n `then` in result &&\n // @ts-expect-error - Runtime check for async callback, callback is typed as () => void but user might pass async\n typeof result.then === `function`\n ) {\n throw new Error(\n `writeBatch does not support async callbacks. The callback must be synchronous.`\n )\n }\n\n // Perform all collected operations\n if (batchContext.operations.length > 0) {\n performWriteOperations(batchContext.operations, ctx)\n }\n } finally {\n // Always clear the batch context\n batchContext.isActive = false\n activeBatchContexts.delete(ctx)\n }\n },\n }\n}\n"],"names":[],"mappings":";AAUA,MAAM,0CAA0B,QAAA;AA0ChC,SAAS,oBAKP,KAGA,KACwC;AACxC,QAAM,aAAa,MAAM,QAAQ,GAAG,IAAI,MAAM,CAAC,GAAG;AAClD,QAAM,aAAqD,CAAA;AAE3D,aAAW,MAAM,YAAY;AAC3B,QAAI,GAAG,SAAS,UAAU;AACxB,YAAM,OAAO,MAAM,QAAQ,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG;AACrD,iBAAW,OAAO,MAAM;AACtB,mBAAW,KAAK,EAAE,MAAM,UAAU,KAAK;AAAA,MACzC;AAAA,IACF,OAAO;AACL,YAAM,QAAQ,MAAM,QAAQ,GAAG,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,IAAI;AACzD,iBAAW,QAAQ,OAAO;AACxB,YAAI;AACJ,YAAI,GAAG,SAAS,UAAU;AAExB,gBAAM,IAAI,OAAO,IAAY;AAAA,QAC/B,OAAO;AAEL,gBAAM,WAAW,IAAI,WAAW;AAAA,YAC9B;AAAA,YACA,GAAG,SAAS,WAAW,WAAW,GAAG;AAAA,UAAA;AAEvC,gBAAM,IAAI,OAAO,QAAQ;AAAA,QAC3B;AACA,mBAAW,KAAK,EAAE,MAAM,GAAG,MAAM,KAAK,MAAM,MAAM;AAAA,MACpD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGA,SAAS,mBAIP,YACA,KACM;AACN,QAAM,+BAAe,IAAA;AAErB,aAAW,MAAM,YAAY;AAE3B,QAAI,SAAS,IAAI,GAAG,GAAG,GAAG;AACxB,YAAM,IAAI,yBAAyB,GAAG,GAAG;AAAA,IAC3C;AACA,aAAS,IAAI,GAAG,GAAG;AAGnB,QAAI,GAAG,SAAS,UAAU;AACxB,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF,WAAW,GAAG,SAAS,UAAU;AAC/B,UAAI,CAAC,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC/B,cAAM,IAAI,iCAAiC,GAAG,GAAG;AAAA,MACnD;AAAA,IACF;AAAA,EACF;AACF;AAGO,SAAS,uBAKd,YAGA,KACM;AACN,QAAM,aAAa,oBAAoB,YAAY,GAAG;AACtD,qBAAmB,YAAY,GAAG;AAElC,MAAI,MAAA;AAEJ,aAAW,MAAM,YAAY;AAC3B,YAAQ,GAAG,MAAA;AAAA,MACT,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW,aAAa,GAAG,MAAM,QAAQ;AAC9D,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,cAAM,cAAc;AAAA,UAClB,GAAG;AAAA,UACH,GAAG,GAAG;AAAA,QAAA;AAER,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B;AAAA,UACA;AAAA,UACA,GAAG;AAAA,QAAA;AAEL,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,cAAc,IAAI,WAAW,IAAI,GAAG,GAAG;AAC7C,YAAI,MAAM;AAAA,UACR,MAAM;AAAA,UACN,OAAO;AAAA,QAAA,CACR;AACD;AAAA,MACF;AAAA,MACA,KAAK,UAAU;AACb,cAAM,WAAW,IAAI,WAAW;AAAA,UAC9B,GAAG;AAAA,UACH,IAAI,WAAW,IAAI,GAAG,GAAG,IAAI,WAAW;AAAA,UACxC,GAAG;AAAA,QAAA;AAEL,YAAI,IAAI,WAAW,IAAI,GAAG,GAAG,GAAG;AAC9B,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH,OAAO;AACL,cAAI,MAAM;AAAA,YACR,MAAM;AAAA,YACN,OAAO;AAAA,UAAA,CACR;AAAA,QACH;AACA;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAEA,MAAI,OAAA;AAGJ,QAAM,cAAc,IAAI,WAAW;AACnC,MAAI,YAAY,aAAa,IAAI,UAAU,WAAW;AACxD;AAGO,SAAS,iBAId,YAAkD;AAClD,WAAS,gBAAyC;AAChD,UAAM,UAAU,WAAA;AAChB,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,wBAAA;AAAA,IACZ;AACA,WAAO;AAAA,EACT;AAEA,SAAO;AAAA,IACL,YAAY,MAA0C;AACpD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAGhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAGA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,KAAyB;AACnC,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,YAAY,MAA4C;AACtD,YAAM,YAAqD;AAAA,QACzD,MAAM;AAAA,QACN;AAAA,MAAA;AAGF,YAAM,MAAM,cAAA;AACZ,YAAM,eAAe,oBAAoB,IAAI,GAAG;AAEhD,UAAI,cAAc,UAAU;AAC1B,qBAAa,WAAW,KAAK,SAAS;AACtC;AAAA,MACF;AAEA,6BAAuB,WAAW,GAAG;AAAA,IACvC;AAAA,IAEA,WAAW,UAAsB;AAC/B,YAAM,MAAM,cAAA;AAGZ,YAAM,gBAAgB,oBAAoB,IAAI,GAAG;AACjD,UAAI,eAAe,UAAU;AAC3B,cAAM,IAAI;AAAA,UACR;AAAA,QAAA;AAAA,MAEJ;AAGA,YAAM,eAAe;AAAA,QACnB,YAAY,CAAA;AAAA,QACZ,UAAU;AAAA,MAAA;AAEZ,0BAAoB,IAAI,KAAK,YAAY;AAEzC,UAAI;AAEF,cAAM,SAAS,SAAA;AAGf;AAAA;AAAA;AAAA,UAGE,UACA,OAAO,WAAW,YAClB,UAAU;AAAA,UAEV,OAAO,OAAO,SAAS;AAAA,UACvB;AACA,gBAAM,IAAI;AAAA,YACR;AAAA,UAAA;AAAA,QAEJ;AAGA,YAAI,aAAa,WAAW,SAAS,GAAG;AACtC,iCAAuB,aAAa,YAAY,GAAG;AAAA,QACrD;AAAA,MACF,UAAA;AAEE,qBAAa,WAAW;AACxB,4BAAoB,OAAO,GAAG;AAAA,MAChC;AAAA,IACF;AAAA,EAAA;AAEJ;"}
|
package/dist/esm/query.js
CHANGED
|
@@ -124,9 +124,7 @@ function queryCollectionOptions(config) {
|
|
|
124
124
|
isSubscribed = false;
|
|
125
125
|
}
|
|
126
126
|
};
|
|
127
|
-
|
|
128
|
-
subscribeToQuery();
|
|
129
|
-
}
|
|
127
|
+
subscribeToQuery();
|
|
130
128
|
const unsubscribeFromCollectionEvents = collection.on(
|
|
131
129
|
`subscribers:change`,
|
|
132
130
|
({ subscriberCount }) => {
|
|
@@ -151,7 +149,7 @@ function queryCollectionOptions(config) {
|
|
|
151
149
|
queryKey
|
|
152
150
|
},
|
|
153
151
|
{
|
|
154
|
-
throwOnError: opts
|
|
152
|
+
throwOnError: opts?.throwOnError
|
|
155
153
|
}
|
|
156
154
|
);
|
|
157
155
|
};
|
package/dist/esm/query.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"query.js","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // If startSync=true or there are subscribers to the collection, subscribe to the query straight away\n if (config.startSync || collection.subscriberCount > 0) {\n subscribeToQuery()\n }\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":[],"mappings":";;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAI,cAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,WAAW,kBAAkB,GAAG;AACtD,uBAAA;AAAA,IACF;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,6BAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAa;AAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;"}
|
|
1
|
+
{"version":3,"file":"query.js","sources":["../../src/query.ts"],"sourcesContent":["import { QueryObserver } from \"@tanstack/query-core\"\nimport {\n GetKeyRequiredError,\n QueryClientRequiredError,\n QueryFnRequiredError,\n QueryKeyRequiredError,\n} from \"./errors\"\nimport { createWriteUtils } from \"./manual-sync\"\nimport type {\n QueryClient,\n QueryFunctionContext,\n QueryKey,\n QueryObserverOptions,\n} from \"@tanstack/query-core\"\nimport type {\n BaseCollectionConfig,\n ChangeMessage,\n CollectionConfig,\n DeleteMutationFnParams,\n InsertMutationFnParams,\n SyncConfig,\n UpdateMutationFnParams,\n UtilsRecord,\n} from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\n\n// Re-export for external use\nexport type { SyncOperation } from \"./manual-sync\"\n\n// Schema output type inference helper (matches electric.ts pattern)\ntype InferSchemaOutput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<T> extends object\n ? StandardSchemaV1.InferOutput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n// Schema input type inference helper (matches electric.ts pattern)\ntype InferSchemaInput<T> = T extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<T> extends object\n ? StandardSchemaV1.InferInput<T>\n : Record<string, unknown>\n : Record<string, unknown>\n\n/**\n * Configuration options for creating a Query Collection\n * @template T - The explicit type of items stored in the collection\n * @template TQueryFn - The queryFn type\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @template TSchema - The schema type for validation\n */\nexport interface QueryCollectionConfig<\n T extends object = object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TSchema extends StandardSchemaV1 = never,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n> extends BaseCollectionConfig<T, TKey, TSchema> {\n /** The query key used by TanStack Query to identify this query */\n queryKey: TQueryKey\n /** Function that fetches data from the server. Must return the complete collection state */\n queryFn: TQueryFn extends (\n context: QueryFunctionContext<TQueryKey>\n ) => Promise<Array<any>>\n ? (context: QueryFunctionContext<TQueryKey>) => Promise<Array<T>>\n : TQueryFn\n /* Function that extracts array items from wrapped API responses (e.g metadata, pagination) */\n select?: (data: TQueryData) => Array<T>\n /** The TanStack Query client instance */\n queryClient: QueryClient\n\n // Query-specific options\n /** Whether the query should automatically run (default: true) */\n enabled?: boolean\n refetchInterval?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`refetchInterval`]\n retry?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retry`]\n retryDelay?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`retryDelay`]\n staleTime?: QueryObserverOptions<\n Array<T>,\n TError,\n Array<T>,\n Array<T>,\n TQueryKey\n >[`staleTime`]\n\n /**\n * Metadata to pass to the query.\n * Available in queryFn via context.meta\n *\n * @example\n * // Using meta for error context\n * queryFn: async (context) => {\n * try {\n * return await api.getTodos(userId)\n * } catch (error) {\n * // Use meta for better error messages\n * throw new Error(\n * context.meta?.errorMessage || 'Failed to load todos'\n * )\n * }\n * },\n * meta: {\n * errorMessage: `Failed to load todos for user ${userId}`\n * }\n */\n meta?: Record<string, unknown>\n}\n\n/**\n * Type for the refetch utility function\n */\nexport type RefetchFn = (opts?: { throwOnError?: boolean }) => Promise<void>\n\n/**\n * Utility methods available on Query Collections for direct writes and manual operations.\n * Direct writes bypass the normal query/mutation flow and write directly to the synced data store.\n * @template TItem - The type of items stored in the collection\n * @template TKey - The type of the item keys\n * @template TInsertInput - The type accepted for insert operations\n * @template TError - The type of errors that can occur during queries\n */\nexport interface QueryCollectionUtils<\n TItem extends object = Record<string, unknown>,\n TKey extends string | number = string | number,\n TInsertInput extends object = TItem,\n TError = unknown,\n> extends UtilsRecord {\n /** Manually trigger a refetch of the query */\n refetch: RefetchFn\n /** Insert one or more items directly into the synced data store without triggering a query refetch or optimistic update */\n writeInsert: (data: TInsertInput | Array<TInsertInput>) => void\n /** Update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpdate: (updates: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Delete one or more items directly from the synced data store without triggering a query refetch or optimistic update */\n writeDelete: (keys: TKey | Array<TKey>) => void\n /** Insert or update one or more items directly in the synced data store without triggering a query refetch or optimistic update */\n writeUpsert: (data: Partial<TItem> | Array<Partial<TItem>>) => void\n /** Execute multiple write operations as a single atomic batch to the synced data store */\n writeBatch: (callback: () => void) => void\n /** Get the last error encountered by the query (if any); reset on success */\n lastError: () => TError | undefined\n /** Check if the collection is in an error state */\n isError: () => boolean\n /**\n * Get the number of consecutive sync failures.\n * Incremented only when query fails completely (not per retry attempt); reset on success.\n */\n errorCount: () => number\n /**\n * Clear the error state and trigger a refetch of the query\n * @returns Promise that resolves when the refetch completes successfully\n * @throws Error if the refetch fails\n */\n clearError: () => Promise<void>\n}\n\n/**\n * Creates query collection options for use with a standard Collection.\n * This integrates TanStack Query with TanStack DB for automatic synchronization.\n *\n * Supports automatic type inference following the priority order:\n * 1. Schema inference (highest priority)\n * 2. QueryFn return type inference (second priority)\n *\n * @template T - Type of the schema if a schema is provided otherwise it is the type of the values returned by the queryFn\n * @template TError - The type of errors that can occur during queries\n * @template TQueryKey - The type of the query key\n * @template TKey - The type of the item keys\n * @param config - Configuration options for the Query collection\n * @returns Collection options with utilities for direct writes and manual operations\n *\n * @example\n * // Type inferred from queryFn return type (NEW!)\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => {\n * const response = await fetch('/api/todos')\n * return response.json() as Todo[] // Type automatically inferred!\n * },\n * queryClient,\n * getKey: (item) => item.id, // item is typed as Todo\n * })\n * )\n *\n * @example\n * // Explicit type\n * const todosCollection = createCollection<Todo>(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // Schema inference\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * queryClient,\n * schema: todoSchema, // Type inferred from schema\n * getKey: (item) => item.id,\n * })\n * )\n *\n * @example\n * // With persistence handlers\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: fetchTodos,\n * queryClient,\n * getKey: (item) => item.id,\n * onInsert: async ({ transaction }) => {\n * await api.createTodos(transaction.mutations.map(m => m.modified))\n * },\n * onUpdate: async ({ transaction }) => {\n * await api.updateTodos(transaction.mutations)\n * },\n * onDelete: async ({ transaction }) => {\n * await api.deleteTodos(transaction.mutations.map(m => m.key))\n * }\n * })\n * )\n *\n * @example\n * // The select option extracts the items array from a response with metadata\n * const todosCollection = createCollection(\n * queryCollectionOptions({\n * queryKey: ['todos'],\n * queryFn: async () => fetch('/api/todos').then(r => r.json()),\n * select: (data) => data.items, // Extract the array of items\n * queryClient,\n * schema: todoSchema,\n * getKey: (item) => item.id,\n * })\n * )\n */\n// Overload for when schema is provided and select present\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n select: (data: TQueryData) => Array<InferSchemaInput<T>>\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided and select present\nexport function queryCollectionOptions<\n T extends object,\n TQueryFn extends (context: QueryFunctionContext<any>) => Promise<any> = (\n context: QueryFunctionContext<any>\n ) => Promise<any>,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n TQueryData = Awaited<ReturnType<TQueryFn>>,\n>(\n config: QueryCollectionConfig<\n T,\n TQueryFn,\n TError,\n TQueryKey,\n TKey,\n never,\n TQueryData\n > & {\n schema?: never // prohibit schema\n select: (data: TQueryData) => Array<T>\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\n// Overload for when schema is provided\nexport function queryCollectionOptions<\n T extends StandardSchemaV1,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n InferSchemaOutput<T>,\n (\n context: QueryFunctionContext<any>\n ) => Promise<Array<InferSchemaOutput<T>>>,\n TError,\n TQueryKey,\n TKey,\n T\n > & {\n schema: T\n }\n): CollectionConfig<InferSchemaOutput<T>, TKey, T> & {\n schema: T\n utils: QueryCollectionUtils<\n InferSchemaOutput<T>,\n TKey,\n InferSchemaInput<T>,\n TError\n >\n}\n\n// Overload for when no schema is provided\nexport function queryCollectionOptions<\n T extends object,\n TError = unknown,\n TQueryKey extends QueryKey = QueryKey,\n TKey extends string | number = string | number,\n>(\n config: QueryCollectionConfig<\n T,\n (context: QueryFunctionContext<any>) => Promise<Array<T>>,\n TError,\n TQueryKey,\n TKey\n > & {\n schema?: never // prohibit schema\n }\n): CollectionConfig<T, TKey> & {\n schema?: never // no schema in the result\n utils: QueryCollectionUtils<T, TKey, T, TError>\n}\n\nexport function queryCollectionOptions(\n config: QueryCollectionConfig<Record<string, unknown>>\n): CollectionConfig & {\n utils: QueryCollectionUtils\n} {\n const {\n queryKey,\n queryFn,\n select,\n queryClient,\n enabled,\n refetchInterval,\n retry,\n retryDelay,\n staleTime,\n getKey,\n onInsert,\n onUpdate,\n onDelete,\n meta,\n ...baseCollectionConfig\n } = config\n\n // Validate required parameters\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryKey) {\n throw new QueryKeyRequiredError()\n }\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryFn) {\n throw new QueryFnRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!queryClient) {\n throw new QueryClientRequiredError()\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n if (!getKey) {\n throw new GetKeyRequiredError()\n }\n\n /** The last error encountered by the query */\n let lastError: any\n /** The number of consecutive sync failures */\n let errorCount = 0\n /** The timestamp for when the query most recently returned the status as \"error\" */\n let lastErrorUpdatedAt = 0\n\n const internalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, markReady, collection } = params\n\n const observerOptions: QueryObserverOptions<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n > = {\n queryKey: queryKey,\n queryFn: queryFn,\n meta: meta,\n enabled: enabled,\n refetchInterval: refetchInterval,\n retry: retry,\n retryDelay: retryDelay,\n staleTime: staleTime,\n structuralSharing: true,\n notifyOnChangeProps: `all`,\n }\n\n const localObserver = new QueryObserver<\n Array<any>,\n any,\n Array<any>,\n Array<any>,\n any\n >(queryClient, observerOptions)\n\n let isSubscribed = false\n let actualUnsubscribeFn: (() => void) | null = null\n\n type UpdateHandler = Parameters<typeof localObserver.subscribe>[0]\n const handleQueryResult: UpdateHandler = (result) => {\n if (result.isSuccess) {\n // Clear error state\n lastError = undefined\n errorCount = 0\n\n const rawData = result.data\n const newItemsArray = select ? select(rawData) : rawData\n\n if (\n !Array.isArray(newItemsArray) ||\n newItemsArray.some((item) => typeof item !== `object`)\n ) {\n const errorMessage = select\n ? `@tanstack/query-db-collection: select() must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n : `@tanstack/query-db-collection: queryFn must return an array of objects. Got: ${typeof newItemsArray} for queryKey ${JSON.stringify(queryKey)}`\n\n console.error(errorMessage)\n return\n }\n\n const currentSyncedItems: Map<string | number, any> = new Map(\n collection._state.syncedData.entries()\n )\n const newItemsMap = new Map<string | number, any>()\n newItemsArray.forEach((item) => {\n const key = getKey(item)\n newItemsMap.set(key, item)\n })\n\n begin()\n\n // Helper function for shallow equality check of objects\n const shallowEqual = (\n obj1: Record<string, any>,\n obj2: Record<string, any>\n ): boolean => {\n // Get all keys from both objects\n const keys1 = Object.keys(obj1)\n const keys2 = Object.keys(obj2)\n\n // If number of keys is different, objects are not equal\n if (keys1.length !== keys2.length) return false\n\n // Check if all keys in obj1 have the same values in obj2\n return keys1.every((key) => {\n // Skip comparing functions and complex objects deeply\n if (typeof obj1[key] === `function`) return true\n return obj1[key] === obj2[key]\n })\n }\n\n currentSyncedItems.forEach((oldItem, key) => {\n const newItem = newItemsMap.get(key)\n if (!newItem) {\n write({ type: `delete`, value: oldItem })\n } else if (\n !shallowEqual(\n oldItem as Record<string, any>,\n newItem as Record<string, any>\n )\n ) {\n // Only update if there are actual differences in the properties\n write({ type: `update`, value: newItem })\n }\n })\n\n newItemsMap.forEach((newItem, key) => {\n if (!currentSyncedItems.has(key)) {\n write({ type: `insert`, value: newItem })\n }\n })\n\n commit()\n\n // Mark collection as ready after first successful query result\n markReady()\n } else if (result.isError) {\n if (result.errorUpdatedAt !== lastErrorUpdatedAt) {\n lastError = result.error\n errorCount++\n lastErrorUpdatedAt = result.errorUpdatedAt\n }\n\n console.error(\n `[QueryCollection] Error observing query ${String(queryKey)}:`,\n result.error\n )\n\n // Mark collection as ready even on error to avoid blocking apps\n markReady()\n }\n }\n\n const subscribeToQuery = () => {\n if (!isSubscribed) {\n actualUnsubscribeFn = localObserver.subscribe(handleQueryResult)\n isSubscribed = true\n }\n }\n\n const unsubscribeFromQuery = () => {\n if (isSubscribed && actualUnsubscribeFn) {\n actualUnsubscribeFn()\n actualUnsubscribeFn = null\n isSubscribed = false\n }\n }\n\n // Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)\n // We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior\n subscribeToQuery()\n\n // Set up event listener for subscriber changes\n const unsubscribeFromCollectionEvents = collection.on(\n `subscribers:change`,\n ({ subscriberCount }) => {\n if (subscriberCount > 0) {\n subscribeToQuery()\n } else if (subscriberCount === 0) {\n unsubscribeFromQuery()\n }\n }\n )\n\n // Ensure we process any existing query data (QueryObserver doesn't invoke its callback automatically with initial\n // state)\n handleQueryResult(localObserver.getCurrentResult())\n\n return async () => {\n unsubscribeFromCollectionEvents()\n unsubscribeFromQuery()\n await queryClient.cancelQueries({ queryKey })\n queryClient.removeQueries({ queryKey })\n }\n }\n\n /**\n * Refetch the query data\n * @returns Promise that resolves when the refetch is complete\n */\n const refetch: RefetchFn = (opts) => {\n return queryClient.refetchQueries(\n {\n queryKey: queryKey,\n },\n {\n throwOnError: opts?.throwOnError,\n }\n )\n }\n\n // Create write context for manual write operations\n let writeContext: {\n collection: any\n queryClient: QueryClient\n queryKey: Array<unknown>\n getKey: (item: any) => string | number\n begin: () => void\n write: (message: Omit<ChangeMessage<any>, `key`>) => void\n commit: () => void\n } | null = null\n\n // Enhanced internalSync that captures write functions for manual use\n const enhancedInternalSync: SyncConfig<any>[`sync`] = (params) => {\n const { begin, write, commit, collection } = params\n\n // Store references for manual write operations\n writeContext = {\n collection,\n queryClient,\n queryKey: queryKey as unknown as Array<unknown>,\n getKey: getKey as (item: any) => string | number,\n begin,\n write,\n commit,\n }\n\n // Call the original internalSync logic\n return internalSync(params)\n }\n\n // Create write utils using the manual-sync module\n const writeUtils = createWriteUtils<any, string | number, any>(\n () => writeContext\n )\n\n // Create wrapper handlers for direct persistence operations that handle refetching\n const wrappedOnInsert = onInsert\n ? async (params: InsertMutationFnParams<any>) => {\n const handlerResult = (await onInsert(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnUpdate = onUpdate\n ? async (params: UpdateMutationFnParams<any>) => {\n const handlerResult = (await onUpdate(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n const wrappedOnDelete = onDelete\n ? async (params: DeleteMutationFnParams<any>) => {\n const handlerResult = (await onDelete(params)) ?? {}\n const shouldRefetch =\n (handlerResult as { refetch?: boolean }).refetch !== false\n\n if (shouldRefetch) {\n await refetch()\n }\n\n return handlerResult\n }\n : undefined\n\n return {\n ...baseCollectionConfig,\n getKey,\n sync: { sync: enhancedInternalSync },\n onInsert: wrappedOnInsert,\n onUpdate: wrappedOnUpdate,\n onDelete: wrappedOnDelete,\n utils: {\n refetch,\n ...writeUtils,\n lastError: () => lastError,\n isError: () => !!lastError,\n errorCount: () => errorCount,\n clearError: () => {\n lastError = undefined\n errorCount = 0\n lastErrorUpdatedAt = 0\n return refetch({ throwOnError: true })\n },\n },\n }\n}\n"],"names":[],"mappings":";;;AAoXO,SAAS,uBACd,QAGA;AACA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EAAA,IACD;AAKJ,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,sBAAA;AAAA,EACZ;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,qBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,yBAAA;AAAA,EACZ;AAGA,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,oBAAA;AAAA,EACZ;AAGA,MAAI;AAEJ,MAAI,aAAa;AAEjB,MAAI,qBAAqB;AAEzB,QAAM,eAAwC,CAAC,WAAW;AACxD,UAAM,EAAE,OAAO,OAAO,QAAQ,WAAW,eAAe;AAExD,UAAM,kBAMF;AAAA,MACF;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,mBAAmB;AAAA,MACnB,qBAAqB;AAAA,IAAA;AAGvB,UAAM,gBAAgB,IAAI,cAMxB,aAAa,eAAe;AAE9B,QAAI,eAAe;AACnB,QAAI,sBAA2C;AAG/C,UAAM,oBAAmC,CAAC,WAAW;AACnD,UAAI,OAAO,WAAW;AAEpB,oBAAY;AACZ,qBAAa;AAEb,cAAM,UAAU,OAAO;AACvB,cAAM,gBAAgB,SAAS,OAAO,OAAO,IAAI;AAEjD,YACE,CAAC,MAAM,QAAQ,aAAa,KAC5B,cAAc,KAAK,CAAC,SAAS,OAAO,SAAS,QAAQ,GACrD;AACA,gBAAM,eAAe,SACjB,iFAAiF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC,KAC9I,gFAAgF,OAAO,aAAa,iBAAiB,KAAK,UAAU,QAAQ,CAAC;AAEjJ,kBAAQ,MAAM,YAAY;AAC1B;AAAA,QACF;AAEA,cAAM,qBAAgD,IAAI;AAAA,UACxD,WAAW,OAAO,WAAW,QAAA;AAAA,QAAQ;AAEvC,cAAM,kCAAkB,IAAA;AACxB,sBAAc,QAAQ,CAAC,SAAS;AAC9B,gBAAM,MAAM,OAAO,IAAI;AACvB,sBAAY,IAAI,KAAK,IAAI;AAAA,QAC3B,CAAC;AAED,cAAA;AAGA,cAAM,eAAe,CACnB,MACA,SACY;AAEZ,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAC9B,gBAAM,QAAQ,OAAO,KAAK,IAAI;AAG9B,cAAI,MAAM,WAAW,MAAM,OAAQ,QAAO;AAG1C,iBAAO,MAAM,MAAM,CAAC,QAAQ;AAE1B,gBAAI,OAAO,KAAK,GAAG,MAAM,WAAY,QAAO;AAC5C,mBAAO,KAAK,GAAG,MAAM,KAAK,GAAG;AAAA,UAC/B,CAAC;AAAA,QACH;AAEA,2BAAmB,QAAQ,CAAC,SAAS,QAAQ;AAC3C,gBAAM,UAAU,YAAY,IAAI,GAAG;AACnC,cAAI,CAAC,SAAS;AACZ,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C,WACE,CAAC;AAAA,YACC;AAAA,YACA;AAAA,UAAA,GAEF;AAEA,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,oBAAY,QAAQ,CAAC,SAAS,QAAQ;AACpC,cAAI,CAAC,mBAAmB,IAAI,GAAG,GAAG;AAChC,kBAAM,EAAE,MAAM,UAAU,OAAO,SAAS;AAAA,UAC1C;AAAA,QACF,CAAC;AAED,eAAA;AAGA,kBAAA;AAAA,MACF,WAAW,OAAO,SAAS;AACzB,YAAI,OAAO,mBAAmB,oBAAoB;AAChD,sBAAY,OAAO;AACnB;AACA,+BAAqB,OAAO;AAAA,QAC9B;AAEA,gBAAQ;AAAA,UACN,2CAA2C,OAAO,QAAQ,CAAC;AAAA,UAC3D,OAAO;AAAA,QAAA;AAIT,kBAAA;AAAA,MACF;AAAA,IACF;AAEA,UAAM,mBAAmB,MAAM;AAC7B,UAAI,CAAC,cAAc;AACjB,8BAAsB,cAAc,UAAU,iBAAiB;AAC/D,uBAAe;AAAA,MACjB;AAAA,IACF;AAEA,UAAM,uBAAuB,MAAM;AACjC,UAAI,gBAAgB,qBAAqB;AACvC,4BAAA;AACA,8BAAsB;AACtB,uBAAe;AAAA,MACjB;AAAA,IACF;AAIA,qBAAA;AAGA,UAAM,kCAAkC,WAAW;AAAA,MACjD;AAAA,MACA,CAAC,EAAE,gBAAA,MAAsB;AACvB,YAAI,kBAAkB,GAAG;AACvB,2BAAA;AAAA,QACF,WAAW,oBAAoB,GAAG;AAChC,+BAAA;AAAA,QACF;AAAA,MACF;AAAA,IAAA;AAKF,sBAAkB,cAAc,kBAAkB;AAElD,WAAO,YAAY;AACjB,sCAAA;AACA,2BAAA;AACA,YAAM,YAAY,cAAc,EAAE,UAAU;AAC5C,kBAAY,cAAc,EAAE,UAAU;AAAA,IACxC;AAAA,EACF;AAMA,QAAM,UAAqB,CAAC,SAAS;AACnC,WAAO,YAAY;AAAA,MACjB;AAAA,QACE;AAAA,MAAA;AAAA,MAEF;AAAA,QACE,cAAc,MAAM;AAAA,MAAA;AAAA,IACtB;AAAA,EAEJ;AAGA,MAAI,eAQO;AAGX,QAAM,uBAAgD,CAAC,WAAW;AAChE,UAAM,EAAE,OAAO,OAAO,QAAQ,eAAe;AAG7C,mBAAe;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IAAA;AAIF,WAAO,aAAa,MAAM;AAAA,EAC5B;AAGA,QAAM,aAAa;AAAA,IACjB,MAAM;AAAA,EAAA;AAIR,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,QAAM,kBAAkB,WACpB,OAAO,WAAwC;AAC7C,UAAM,gBAAiB,MAAM,SAAS,MAAM,KAAM,CAAA;AAClD,UAAM,gBACH,cAAwC,YAAY;AAEvD,QAAI,eAAe;AACjB,YAAM,QAAA;AAAA,IACR;AAEA,WAAO;AAAA,EACT,IACA;AAEJ,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA,MAAM,EAAE,MAAM,qBAAA;AAAA,IACd,UAAU;AAAA,IACV,UAAU;AAAA,IACV,UAAU;AAAA,IACV,OAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM,CAAC,CAAC;AAAA,MACjB,YAAY,MAAM;AAAA,MAClB,YAAY,MAAM;AAChB,oBAAY;AACZ,qBAAa;AACb,6BAAqB;AACrB,eAAO,QAAQ,EAAE,cAAc,MAAM;AAAA,MACvC;AAAA,IAAA;AAAA,EACF;AAEJ;"}
|
package/package.json
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tanstack/query-db-collection",
|
|
3
3
|
"description": "TanStack Query collection for TanStack DB",
|
|
4
|
-
"version": "0.2.
|
|
4
|
+
"version": "0.2.25",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@standard-schema/spec": "^1.0.0",
|
|
7
|
-
"@tanstack/db": "0.4.
|
|
7
|
+
"@tanstack/db": "0.4.4"
|
|
8
8
|
},
|
|
9
9
|
"devDependencies": {
|
|
10
10
|
"@tanstack/query-core": "^5.90.2",
|
package/src/query.ts
CHANGED
|
@@ -564,10 +564,9 @@ export function queryCollectionOptions(
|
|
|
564
564
|
}
|
|
565
565
|
}
|
|
566
566
|
|
|
567
|
-
//
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
}
|
|
567
|
+
// Always subscribe when sync starts (this could be from preload(), startSync config, or first subscriber)
|
|
568
|
+
// We'll dynamically unsubscribe/resubscribe based on subscriber count to maintain staleTime behavior
|
|
569
|
+
subscribeToQuery()
|
|
571
570
|
|
|
572
571
|
// Set up event listener for subscriber changes
|
|
573
572
|
const unsubscribeFromCollectionEvents = collection.on(
|