@firtoz/drizzle-indexeddb 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +139 -0
- package/README.md +477 -0
- package/package.json +71 -0
- package/src/collections/indexeddb-collection.ts +915 -0
- package/src/context/useDrizzleIndexedDB.ts +36 -0
- package/src/function-migrator.ts +234 -0
- package/src/index.ts +30 -0
- package/src/snapshot-migrator.ts +420 -0
- package/src/utils.ts +15 -0
|
@@ -0,0 +1,915 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
CollectionConfig,
|
|
3
|
+
InferSchemaOutput,
|
|
4
|
+
LoadSubsetOptions,
|
|
5
|
+
SyncConfig,
|
|
6
|
+
SyncConfigRes,
|
|
7
|
+
SyncMode,
|
|
8
|
+
} from "@tanstack/db";
|
|
9
|
+
import type { IR } from "@tanstack/db";
|
|
10
|
+
import {
|
|
11
|
+
extractSimpleComparisons,
|
|
12
|
+
parseOrderByExpression,
|
|
13
|
+
DeduplicatedLoadSubset,
|
|
14
|
+
} from "@tanstack/db";
|
|
15
|
+
import { getTableColumns, SQL, type Table } from "drizzle-orm";
|
|
16
|
+
import { createInsertSchema } from "drizzle-valibot";
|
|
17
|
+
import * as v from "valibot";
|
|
18
|
+
|
|
19
|
+
import type { IdOf, SelectSchema } from "@firtoz/drizzle-utils";
|
|
20
|
+
|
|
21
|
+
// biome-ignore lint/suspicious/noExplicitAny: intentional
|
|
22
|
+
type AnyId = IdOf<any>;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Type for items stored in IndexedDB (must have required sync fields)
|
|
26
|
+
*/
|
|
27
|
+
export type IndexedDBSyncItem = {
|
|
28
|
+
id: AnyId;
|
|
29
|
+
createdAt: Date;
|
|
30
|
+
updatedAt: Date;
|
|
31
|
+
deletedAt: Date | null;
|
|
32
|
+
[key: string]: unknown;
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
// WORKAROUND: DeduplicatedLoadSubset has a bug where toggling queries (e.g., isNull/isNotNull)
|
|
36
|
+
// creates invalid expressions like not(or(isNull(...), not(isNull(...))))
|
|
37
|
+
// See: https://github.com/TanStack/db/issues/828
|
|
38
|
+
// TODO: Re-enable once the bug is fixed
|
|
39
|
+
const useDedupe = false as boolean;
|
|
40
|
+
|
|
41
|
+
export interface IndexedDBCollectionConfig<TTable extends Table> {
|
|
42
|
+
/**
|
|
43
|
+
* Ref to the IndexedDB database instance
|
|
44
|
+
*/
|
|
45
|
+
indexedDBRef: React.RefObject<IDBDatabase | null>;
|
|
46
|
+
/**
|
|
47
|
+
* The database name (for perf markers)
|
|
48
|
+
*/
|
|
49
|
+
dbName: string;
|
|
50
|
+
/**
|
|
51
|
+
* The Drizzle table definition (used for schema and type inference only)
|
|
52
|
+
*/
|
|
53
|
+
table: TTable;
|
|
54
|
+
/**
|
|
55
|
+
* The name of the IndexedDB object store (should match the table name)
|
|
56
|
+
*/
|
|
57
|
+
storeName: string;
|
|
58
|
+
/**
|
|
59
|
+
* Promise that resolves when the database is ready
|
|
60
|
+
*/
|
|
61
|
+
readyPromise: Promise<void>;
|
|
62
|
+
/**
|
|
63
|
+
* Sync mode: 'eager' (immediate) or 'lazy' (on-demand)
|
|
64
|
+
*/
|
|
65
|
+
syncMode?: SyncMode;
|
|
66
|
+
/**
|
|
67
|
+
* Enable debug logging for index discovery and query optimization
|
|
68
|
+
*/
|
|
69
|
+
debug?: boolean;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Evaluates a TanStack DB IR expression against an IndexedDB item
|
|
74
|
+
*/
|
|
75
|
+
function evaluateExpression(
|
|
76
|
+
expression: IR.BasicExpression,
|
|
77
|
+
item: Record<string, unknown>,
|
|
78
|
+
): boolean {
|
|
79
|
+
if (expression.type === "ref") {
|
|
80
|
+
const propRef = expression as IR.PropRef;
|
|
81
|
+
const columnName = propRef.path[propRef.path.length - 1];
|
|
82
|
+
return item[columnName as string] !== undefined;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
if (expression.type === "val") {
|
|
86
|
+
const value = expression as IR.Value;
|
|
87
|
+
return !!value.value;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (expression.type === "func") {
|
|
91
|
+
const func = expression as IR.Func;
|
|
92
|
+
|
|
93
|
+
switch (func.name) {
|
|
94
|
+
case "eq": {
|
|
95
|
+
const left = getExpressionValue(func.args[0], item);
|
|
96
|
+
const right = getExpressionValue(func.args[1], item);
|
|
97
|
+
return left === right;
|
|
98
|
+
}
|
|
99
|
+
case "ne": {
|
|
100
|
+
const left = getExpressionValue(func.args[0], item);
|
|
101
|
+
const right = getExpressionValue(func.args[1], item);
|
|
102
|
+
return left !== right;
|
|
103
|
+
}
|
|
104
|
+
case "gt": {
|
|
105
|
+
const left = getExpressionValue(func.args[0], item);
|
|
106
|
+
const right = getExpressionValue(func.args[1], item);
|
|
107
|
+
return left > right;
|
|
108
|
+
}
|
|
109
|
+
case "gte": {
|
|
110
|
+
const left = getExpressionValue(func.args[0], item);
|
|
111
|
+
const right = getExpressionValue(func.args[1], item);
|
|
112
|
+
return left >= right;
|
|
113
|
+
}
|
|
114
|
+
case "lt": {
|
|
115
|
+
const left = getExpressionValue(func.args[0], item);
|
|
116
|
+
const right = getExpressionValue(func.args[1], item);
|
|
117
|
+
return left < right;
|
|
118
|
+
}
|
|
119
|
+
case "lte": {
|
|
120
|
+
const left = getExpressionValue(func.args[0], item);
|
|
121
|
+
const right = getExpressionValue(func.args[1], item);
|
|
122
|
+
return left <= right;
|
|
123
|
+
}
|
|
124
|
+
case "and": {
|
|
125
|
+
return func.args.every((arg) => evaluateExpression(arg, item));
|
|
126
|
+
}
|
|
127
|
+
case "or": {
|
|
128
|
+
return func.args.some((arg) => evaluateExpression(arg, item));
|
|
129
|
+
}
|
|
130
|
+
case "not": {
|
|
131
|
+
return !evaluateExpression(func.args[0], item);
|
|
132
|
+
}
|
|
133
|
+
case "isNull": {
|
|
134
|
+
const value = getExpressionValue(func.args[0], item);
|
|
135
|
+
return value === null || value === undefined;
|
|
136
|
+
}
|
|
137
|
+
case "isNotNull": {
|
|
138
|
+
const value = getExpressionValue(func.args[0], item);
|
|
139
|
+
return value !== null && value !== undefined;
|
|
140
|
+
}
|
|
141
|
+
case "like": {
|
|
142
|
+
const left = String(getExpressionValue(func.args[0], item));
|
|
143
|
+
const right = String(getExpressionValue(func.args[1], item));
|
|
144
|
+
// Convert SQL LIKE pattern to regex (case-sensitive)
|
|
145
|
+
const pattern = right.replace(/%/g, ".*").replace(/_/g, ".");
|
|
146
|
+
return new RegExp(`^${pattern}$`).test(left);
|
|
147
|
+
}
|
|
148
|
+
case "ilike": {
|
|
149
|
+
const left = String(getExpressionValue(func.args[0], item));
|
|
150
|
+
const right = String(getExpressionValue(func.args[1], item));
|
|
151
|
+
// Convert SQL ILIKE pattern to regex (case-insensitive)
|
|
152
|
+
const pattern = right.replace(/%/g, ".*").replace(/_/g, ".");
|
|
153
|
+
return new RegExp(`^${pattern}$`, "i").test(left);
|
|
154
|
+
}
|
|
155
|
+
case "in": {
|
|
156
|
+
const left = getExpressionValue(func.args[0], item);
|
|
157
|
+
const right = getExpressionValue(func.args[1], item);
|
|
158
|
+
// Check if left value is in the right array
|
|
159
|
+
return Array.isArray(right) && right.includes(left);
|
|
160
|
+
}
|
|
161
|
+
case "isUndefined": {
|
|
162
|
+
const value = getExpressionValue(func.args[0], item);
|
|
163
|
+
return value === null || value === undefined;
|
|
164
|
+
}
|
|
165
|
+
default:
|
|
166
|
+
throw new Error(`Unsupported function: ${func.name}`);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
throw new Error(
|
|
171
|
+
`Unsupported expression type: ${(expression as { type: string }).type}`,
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* Gets the value from an IR expression
|
|
177
|
+
*/
|
|
178
|
+
function getExpressionValue(
|
|
179
|
+
expression: IR.BasicExpression,
|
|
180
|
+
item: Record<string, unknown>,
|
|
181
|
+
// biome-ignore lint/suspicious/noExplicitAny: We need any here for dynamic values
|
|
182
|
+
): any {
|
|
183
|
+
if (expression.type === "ref") {
|
|
184
|
+
const propRef = expression as IR.PropRef;
|
|
185
|
+
const columnName = propRef.path[propRef.path.length - 1];
|
|
186
|
+
return item[columnName as string];
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
if (expression.type === "val") {
|
|
190
|
+
const value = expression as IR.Value;
|
|
191
|
+
return value.value;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
throw new Error(`Cannot get value from expression type: ${expression.type}`);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Reads all items from an IndexedDB object store
|
|
199
|
+
*/
|
|
200
|
+
function getAllFromStore(
|
|
201
|
+
db: IDBDatabase,
|
|
202
|
+
storeName: string,
|
|
203
|
+
): Promise<IndexedDBSyncItem[]> {
|
|
204
|
+
return new Promise((resolve, reject) => {
|
|
205
|
+
if (!db.objectStoreNames.contains(storeName)) {
|
|
206
|
+
resolve([]);
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
const transaction = db.transaction(storeName, "readonly");
|
|
211
|
+
|
|
212
|
+
const store = transaction.objectStore(storeName);
|
|
213
|
+
|
|
214
|
+
const request = store.getAll();
|
|
215
|
+
|
|
216
|
+
request.onsuccess = () => {
|
|
217
|
+
resolve(request.result as IndexedDBSyncItem[]);
|
|
218
|
+
};
|
|
219
|
+
|
|
220
|
+
request.onerror = () => {
|
|
221
|
+
reject(request.error);
|
|
222
|
+
};
|
|
223
|
+
});
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Reads items from an IndexedDB index with an optional key range
|
|
228
|
+
* Note: Index existence is validated at collection creation time
|
|
229
|
+
*/
|
|
230
|
+
function getAllFromIndex(
|
|
231
|
+
db: IDBDatabase,
|
|
232
|
+
storeName: string,
|
|
233
|
+
indexName: string,
|
|
234
|
+
keyRange?: IDBKeyRange,
|
|
235
|
+
): Promise<IndexedDBSyncItem[]> {
|
|
236
|
+
return new Promise((resolve, reject) => {
|
|
237
|
+
const transaction = db.transaction(storeName, "readonly");
|
|
238
|
+
|
|
239
|
+
const store = transaction.objectStore(storeName);
|
|
240
|
+
|
|
241
|
+
const index = store.index(indexName);
|
|
242
|
+
|
|
243
|
+
const request = keyRange ? index.getAll(keyRange) : index.getAll();
|
|
244
|
+
|
|
245
|
+
request.onsuccess = () => {
|
|
246
|
+
resolve(request.result as IndexedDBSyncItem[]);
|
|
247
|
+
};
|
|
248
|
+
|
|
249
|
+
request.onerror = () => {
|
|
250
|
+
reject(request.error);
|
|
251
|
+
};
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/**
|
|
256
|
+
* Attempts to extract a simple indexed query from an IR expression
|
|
257
|
+
* Returns the field name and key range if the query can be optimized
|
|
258
|
+
*
|
|
259
|
+
* NOTE: IndexedDB indexes are much more limited than SQL WHERE clauses:
|
|
260
|
+
* - Only supports simple comparisons on a SINGLE indexed field
|
|
261
|
+
* - Supported operators: eq, gt, gte, lt, lte
|
|
262
|
+
* - Complex queries (AND, OR, NOT, multiple fields) fall back to in-memory filtering
|
|
263
|
+
*
|
|
264
|
+
* Indexes are auto-discovered from your Drizzle schema:
|
|
265
|
+
* - Define indexes using index().on() in your schema
|
|
266
|
+
* - Run migrations to create them in IndexedDB
|
|
267
|
+
* - This collection automatically detects and uses them
|
|
268
|
+
*/
|
|
269
|
+
function tryExtractIndexedQuery(
|
|
270
|
+
expression: IR.BasicExpression,
|
|
271
|
+
indexes?: Record<string, string>,
|
|
272
|
+
debug?: boolean,
|
|
273
|
+
): { fieldName: string; indexName: string; keyRange: IDBKeyRange } | null {
|
|
274
|
+
if (!indexes) {
|
|
275
|
+
return null;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
try {
|
|
279
|
+
// Use TanStack DB helper to extract simple comparisons
|
|
280
|
+
const comparisons = extractSimpleComparisons(expression);
|
|
281
|
+
|
|
282
|
+
// We can only use an index for a single field
|
|
283
|
+
if (comparisons.length !== 1) {
|
|
284
|
+
return null;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
const comparison = comparisons[0];
|
|
288
|
+
const fieldName = comparison.field.join(".");
|
|
289
|
+
const indexName = indexes[fieldName];
|
|
290
|
+
|
|
291
|
+
if (!indexName) {
|
|
292
|
+
return null;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Convert operator to IndexedDB key range
|
|
296
|
+
|
|
297
|
+
let keyRange: IDBKeyRange | null = null;
|
|
298
|
+
|
|
299
|
+
switch (comparison.operator) {
|
|
300
|
+
case "eq":
|
|
301
|
+
keyRange = IDBKeyRange.only(comparison.value);
|
|
302
|
+
break;
|
|
303
|
+
case "gt":
|
|
304
|
+
keyRange = IDBKeyRange.lowerBound(comparison.value, true);
|
|
305
|
+
break;
|
|
306
|
+
case "gte":
|
|
307
|
+
keyRange = IDBKeyRange.lowerBound(comparison.value, false);
|
|
308
|
+
break;
|
|
309
|
+
case "lt":
|
|
310
|
+
keyRange = IDBKeyRange.upperBound(comparison.value, true);
|
|
311
|
+
break;
|
|
312
|
+
case "lte":
|
|
313
|
+
keyRange = IDBKeyRange.upperBound(comparison.value, false);
|
|
314
|
+
break;
|
|
315
|
+
default:
|
|
316
|
+
if (debug) {
|
|
317
|
+
console.warn(
|
|
318
|
+
`Skipping indexed query extraction for unsupported operator: ${comparison.operator}`,
|
|
319
|
+
);
|
|
320
|
+
}
|
|
321
|
+
return null;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (!keyRange) {
|
|
325
|
+
return null;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return { fieldName, indexName, keyRange };
|
|
329
|
+
} catch (error) {
|
|
330
|
+
console.error("Error extracting indexed query", error, expression);
|
|
331
|
+
// If extractSimpleComparisons fails, it's a complex query
|
|
332
|
+
|
|
333
|
+
return null;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
/**
|
|
338
|
+
* Adds an item to an IndexedDB object store using an existing transaction
|
|
339
|
+
*/
|
|
340
|
+
function addToStoreInTransaction(
|
|
341
|
+
store: IDBObjectStore,
|
|
342
|
+
item: IndexedDBSyncItem,
|
|
343
|
+
): Promise<void> {
|
|
344
|
+
return new Promise((resolve, reject) => {
|
|
345
|
+
const request = store.add(item);
|
|
346
|
+
|
|
347
|
+
request.onsuccess = () => {
|
|
348
|
+
resolve();
|
|
349
|
+
};
|
|
350
|
+
|
|
351
|
+
request.onerror = () => {
|
|
352
|
+
reject(request.error);
|
|
353
|
+
};
|
|
354
|
+
});
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
/**
|
|
358
|
+
* Updates an item in an IndexedDB object store using an existing transaction
|
|
359
|
+
*/
|
|
360
|
+
function updateInStoreInTransaction(
|
|
361
|
+
store: IDBObjectStore,
|
|
362
|
+
item: IndexedDBSyncItem,
|
|
363
|
+
): Promise<void> {
|
|
364
|
+
return new Promise((resolve, reject) => {
|
|
365
|
+
const request = store.put(item);
|
|
366
|
+
|
|
367
|
+
request.onsuccess = () => {
|
|
368
|
+
resolve();
|
|
369
|
+
};
|
|
370
|
+
|
|
371
|
+
request.onerror = () => {
|
|
372
|
+
reject(request.error);
|
|
373
|
+
};
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
/**
|
|
378
|
+
* Deletes an item from an IndexedDB object store using an existing transaction
|
|
379
|
+
*/
|
|
380
|
+
function deleteFromStoreInTransaction(
|
|
381
|
+
store: IDBObjectStore,
|
|
382
|
+
id: string,
|
|
383
|
+
): Promise<void> {
|
|
384
|
+
return new Promise((resolve, reject) => {
|
|
385
|
+
const request = store.delete(id);
|
|
386
|
+
|
|
387
|
+
request.onsuccess = () => {
|
|
388
|
+
resolve();
|
|
389
|
+
};
|
|
390
|
+
|
|
391
|
+
request.onerror = () => {
|
|
392
|
+
reject(request.error);
|
|
393
|
+
};
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/**
|
|
398
|
+
* Gets a single item from an IndexedDB object store by ID using an existing transaction
|
|
399
|
+
*/
|
|
400
|
+
function getFromStoreInTransaction(
|
|
401
|
+
store: IDBObjectStore,
|
|
402
|
+
id: AnyId,
|
|
403
|
+
): Promise<IndexedDBSyncItem | undefined> {
|
|
404
|
+
return new Promise((resolve, reject) => {
|
|
405
|
+
const request = store.get(id);
|
|
406
|
+
|
|
407
|
+
request.onsuccess = () => {
|
|
408
|
+
resolve(request.result as IndexedDBSyncItem | undefined);
|
|
409
|
+
};
|
|
410
|
+
|
|
411
|
+
request.onerror = () => {
|
|
412
|
+
reject(request.error);
|
|
413
|
+
};
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
/**
|
|
418
|
+
* Executes a transaction and returns a promise that resolves when the transaction completes
|
|
419
|
+
*/
|
|
420
|
+
function commitTransaction(transaction: IDBTransaction): Promise<void> {
|
|
421
|
+
return new Promise((resolve, reject) => {
|
|
422
|
+
transaction.oncomplete = () => {
|
|
423
|
+
resolve();
|
|
424
|
+
};
|
|
425
|
+
|
|
426
|
+
transaction.onerror = () => {
|
|
427
|
+
reject(transaction.error);
|
|
428
|
+
};
|
|
429
|
+
|
|
430
|
+
transaction.onabort = () => {
|
|
431
|
+
reject(new Error("Transaction aborted"));
|
|
432
|
+
};
|
|
433
|
+
});
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
/**
|
|
437
|
+
* Auto-discovers indexes from the IndexedDB store
|
|
438
|
+
* Returns a map of field names to index names for single-column indexes
|
|
439
|
+
*
|
|
440
|
+
* NOTE: Indexes are created automatically by Drizzle migrations based on your schema:
|
|
441
|
+
*
|
|
442
|
+
* @example
|
|
443
|
+
* // In your schema.ts:
|
|
444
|
+
* export const todoTable = syncableTable(
|
|
445
|
+
* "todo",
|
|
446
|
+
* { title: text("title"), userId: text("userId") },
|
|
447
|
+
* (t) => [
|
|
448
|
+
* index("todo_user_id_index").on(t.userId),
|
|
449
|
+
* index("todo_created_at_index").on(t.createdAt),
|
|
450
|
+
* ]
|
|
451
|
+
* );
|
|
452
|
+
*
|
|
453
|
+
* // The migrator will automatically create these indexes in IndexedDB
|
|
454
|
+
* // This collection will auto-detect and use them for optimized queries
|
|
455
|
+
*/
|
|
456
|
+
function discoverIndexes(
|
|
457
|
+
db: IDBDatabase,
|
|
458
|
+
storeName: string,
|
|
459
|
+
): Record<string, string> {
|
|
460
|
+
if (!db.objectStoreNames.contains(storeName)) {
|
|
461
|
+
return {};
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
const transaction = db.transaction(storeName, "readonly");
|
|
465
|
+
|
|
466
|
+
const store = transaction.objectStore(storeName);
|
|
467
|
+
|
|
468
|
+
const indexMap: Record<string, string> = {};
|
|
469
|
+
|
|
470
|
+
// Iterate through all indexes in the store
|
|
471
|
+
const indexNames = Array.from(store.indexNames);
|
|
472
|
+
|
|
473
|
+
for (const indexName of indexNames) {
|
|
474
|
+
const index = store.index(indexName);
|
|
475
|
+
const keyPath = index.keyPath;
|
|
476
|
+
|
|
477
|
+
// Only map single-column indexes (string keyPath)
|
|
478
|
+
// Compound indexes (array keyPath) are more complex and not currently optimized
|
|
479
|
+
if (typeof keyPath === "string") {
|
|
480
|
+
indexMap[keyPath] = indexName;
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
return indexMap;
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
/**
|
|
488
|
+
* Creates a TanStack DB collection config for IndexedDB
|
|
489
|
+
*/
|
|
490
|
+
export function indexedDBCollectionOptions<const TTable extends Table>(
|
|
491
|
+
config: IndexedDBCollectionConfig<TTable>,
|
|
492
|
+
) {
|
|
493
|
+
// Defer index discovery until the database is ready
|
|
494
|
+
let discoveredIndexes: Record<string, string> = {};
|
|
495
|
+
let indexesDiscovered = false;
|
|
496
|
+
|
|
497
|
+
type CollectionType = CollectionConfig<
|
|
498
|
+
InferSchemaOutput<SelectSchema<TTable>>,
|
|
499
|
+
string,
|
|
500
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
501
|
+
any
|
|
502
|
+
>;
|
|
503
|
+
|
|
504
|
+
const table = config.table;
|
|
505
|
+
|
|
506
|
+
let insertListener: CollectionType["onInsert"] | null = null;
|
|
507
|
+
let updateListener: CollectionType["onUpdate"] | null = null;
|
|
508
|
+
let deleteListener: CollectionType["onDelete"] | null = null;
|
|
509
|
+
|
|
510
|
+
const sync: SyncConfig<
|
|
511
|
+
InferSchemaOutput<SelectSchema<TTable>>,
|
|
512
|
+
string
|
|
513
|
+
>["sync"] = (params) => {
|
|
514
|
+
const { begin, write, commit, markReady } = params;
|
|
515
|
+
|
|
516
|
+
// Discover indexes once when the database is ready, regardless of sync mode
|
|
517
|
+
const discoverIndexesOnce = async () => {
|
|
518
|
+
await config.readyPromise;
|
|
519
|
+
|
|
520
|
+
if (!indexesDiscovered) {
|
|
521
|
+
discoveredIndexes = discoverIndexes(
|
|
522
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
523
|
+
config.indexedDBRef.current!,
|
|
524
|
+
config.storeName,
|
|
525
|
+
);
|
|
526
|
+
|
|
527
|
+
indexesDiscovered = true;
|
|
528
|
+
}
|
|
529
|
+
};
|
|
530
|
+
|
|
531
|
+
const initialSync = async () => {
|
|
532
|
+
await discoverIndexesOnce();
|
|
533
|
+
|
|
534
|
+
try {
|
|
535
|
+
begin();
|
|
536
|
+
|
|
537
|
+
const items = await getAllFromStore(
|
|
538
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
539
|
+
config.indexedDBRef.current!,
|
|
540
|
+
config.storeName,
|
|
541
|
+
);
|
|
542
|
+
|
|
543
|
+
for (const item of items) {
|
|
544
|
+
write({
|
|
545
|
+
type: "insert",
|
|
546
|
+
value: item as unknown as InferSchemaOutput<SelectSchema<TTable>>,
|
|
547
|
+
});
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
commit();
|
|
551
|
+
} finally {
|
|
552
|
+
markReady();
|
|
553
|
+
}
|
|
554
|
+
};
|
|
555
|
+
|
|
556
|
+
if (config.syncMode === "eager" || !config.syncMode) {
|
|
557
|
+
initialSync();
|
|
558
|
+
} else {
|
|
559
|
+
// For non-eager sync modes, still discover indexes but don't load data
|
|
560
|
+
discoverIndexesOnce().then(() => markReady());
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
insertListener = async (params) => {
|
|
564
|
+
try {
|
|
565
|
+
// Use a single transaction for all inserts
|
|
566
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
567
|
+
const transaction = config.indexedDBRef.current!.transaction(
|
|
568
|
+
config.storeName,
|
|
569
|
+
"readwrite",
|
|
570
|
+
);
|
|
571
|
+
const store = transaction.objectStore(config.storeName);
|
|
572
|
+
|
|
573
|
+
// Optimistically update the reactive store while parallelizing IndexedDB writes
|
|
574
|
+
begin();
|
|
575
|
+
const addPromises: Promise<void>[] = [];
|
|
576
|
+
|
|
577
|
+
for (const item of params.transaction.mutations) {
|
|
578
|
+
// Parse and apply defaults using valibot
|
|
579
|
+
// const itemToInsert = v.parse(insertSchemaWithDefaults, item.modified);
|
|
580
|
+
const itemToInsert = item.modified;
|
|
581
|
+
|
|
582
|
+
// Write to reactive store immediately (optimistic)
|
|
583
|
+
write({
|
|
584
|
+
type: "insert",
|
|
585
|
+
value: itemToInsert as unknown as InferSchemaOutput<
|
|
586
|
+
SelectSchema<TTable>
|
|
587
|
+
>,
|
|
588
|
+
});
|
|
589
|
+
|
|
590
|
+
// Add to IndexedDB in parallel (don't await yet)
|
|
591
|
+
addPromises.push(
|
|
592
|
+
addToStoreInTransaction(store, itemToInsert as IndexedDBSyncItem),
|
|
593
|
+
);
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
commit();
|
|
597
|
+
|
|
598
|
+
// Wait for all IndexedDB writes to complete
|
|
599
|
+
await Promise.all(addPromises);
|
|
600
|
+
await commitTransaction(transaction);
|
|
601
|
+
} catch (error) {
|
|
602
|
+
begin();
|
|
603
|
+
for (const item of params.transaction.mutations) {
|
|
604
|
+
write({
|
|
605
|
+
type: "delete",
|
|
606
|
+
value: item.modified,
|
|
607
|
+
});
|
|
608
|
+
}
|
|
609
|
+
commit();
|
|
610
|
+
|
|
611
|
+
throw error;
|
|
612
|
+
}
|
|
613
|
+
};
|
|
614
|
+
|
|
615
|
+
updateListener = async (params) => {
|
|
616
|
+
begin();
|
|
617
|
+
for (const item of params.transaction.mutations) {
|
|
618
|
+
write({
|
|
619
|
+
type: "update",
|
|
620
|
+
value: item.modified,
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
commit();
|
|
624
|
+
|
|
625
|
+
try {
|
|
626
|
+
// Use a single transaction for all updates
|
|
627
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
628
|
+
const transaction = config.indexedDBRef.current!.transaction(
|
|
629
|
+
config.storeName,
|
|
630
|
+
"readwrite",
|
|
631
|
+
);
|
|
632
|
+
const store = transaction.objectStore(config.storeName);
|
|
633
|
+
|
|
634
|
+
for (const item of params.transaction.mutations) {
|
|
635
|
+
const existing = await getFromStoreInTransaction(store, item.key);
|
|
636
|
+
|
|
637
|
+
if (existing) {
|
|
638
|
+
const updateTime = new Date();
|
|
639
|
+
const updatedItem = {
|
|
640
|
+
...existing,
|
|
641
|
+
...item.changes,
|
|
642
|
+
updatedAt: updateTime,
|
|
643
|
+
} as IndexedDBSyncItem;
|
|
644
|
+
|
|
645
|
+
await updateInStoreInTransaction(store, updatedItem);
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
// Wait for transaction to complete
|
|
650
|
+
await commitTransaction(transaction);
|
|
651
|
+
} catch (error) {
|
|
652
|
+
begin();
|
|
653
|
+
for (const item of params.transaction.mutations) {
|
|
654
|
+
const original = item.original;
|
|
655
|
+
write({
|
|
656
|
+
type: "update",
|
|
657
|
+
value: original,
|
|
658
|
+
});
|
|
659
|
+
}
|
|
660
|
+
commit();
|
|
661
|
+
|
|
662
|
+
throw error;
|
|
663
|
+
}
|
|
664
|
+
};
|
|
665
|
+
|
|
666
|
+
deleteListener = async (params) => {
|
|
667
|
+
begin();
|
|
668
|
+
for (const item of params.transaction.mutations) {
|
|
669
|
+
write({
|
|
670
|
+
type: "delete",
|
|
671
|
+
value: item.modified,
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
commit();
|
|
675
|
+
|
|
676
|
+
try {
|
|
677
|
+
// Use a single transaction for all deletes
|
|
678
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
679
|
+
const transaction = config.indexedDBRef.current!.transaction(
|
|
680
|
+
config.storeName,
|
|
681
|
+
"readwrite",
|
|
682
|
+
);
|
|
683
|
+
const store = transaction.objectStore(config.storeName);
|
|
684
|
+
|
|
685
|
+
for (const item of params.transaction.mutations) {
|
|
686
|
+
await deleteFromStoreInTransaction(store, item.key);
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
// Wait for transaction to complete
|
|
690
|
+
await commitTransaction(transaction);
|
|
691
|
+
} catch (error) {
|
|
692
|
+
begin();
|
|
693
|
+
for (const item of params.transaction.mutations) {
|
|
694
|
+
const original = item.original;
|
|
695
|
+
write({
|
|
696
|
+
type: "insert",
|
|
697
|
+
value: original,
|
|
698
|
+
});
|
|
699
|
+
}
|
|
700
|
+
commit();
|
|
701
|
+
|
|
702
|
+
throw error;
|
|
703
|
+
}
|
|
704
|
+
};
|
|
705
|
+
|
|
706
|
+
const loadSubset = async (options: LoadSubsetOptions) => {
|
|
707
|
+
await config.readyPromise;
|
|
708
|
+
|
|
709
|
+
// Ensure indexes are discovered before we try to use them
|
|
710
|
+
if (!indexesDiscovered) {
|
|
711
|
+
discoveredIndexes = discoverIndexes(
|
|
712
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
713
|
+
config.indexedDBRef.current!,
|
|
714
|
+
config.storeName,
|
|
715
|
+
);
|
|
716
|
+
indexesDiscovered = true;
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
begin();
|
|
720
|
+
|
|
721
|
+
try {
|
|
722
|
+
let items: IndexedDBSyncItem[];
|
|
723
|
+
|
|
724
|
+
// Try to use an index for efficient querying
|
|
725
|
+
const indexedQuery = options.where
|
|
726
|
+
? tryExtractIndexedQuery(
|
|
727
|
+
options.where,
|
|
728
|
+
discoveredIndexes,
|
|
729
|
+
config.debug,
|
|
730
|
+
)
|
|
731
|
+
: null;
|
|
732
|
+
|
|
733
|
+
if (indexedQuery) {
|
|
734
|
+
// Use indexed query for better performance
|
|
735
|
+
|
|
736
|
+
items = await getAllFromIndex(
|
|
737
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
738
|
+
config.indexedDBRef.current!,
|
|
739
|
+
config.storeName,
|
|
740
|
+
indexedQuery.indexName,
|
|
741
|
+
indexedQuery.keyRange,
|
|
742
|
+
);
|
|
743
|
+
} else {
|
|
744
|
+
// Fall back to getting all items
|
|
745
|
+
|
|
746
|
+
items = await getAllFromStore(
|
|
747
|
+
// biome-ignore lint/style/noNonNullAssertion: DB is guaranteed to be ready after readyPromise resolves
|
|
748
|
+
config.indexedDBRef.current!,
|
|
749
|
+
config.storeName,
|
|
750
|
+
);
|
|
751
|
+
|
|
752
|
+
// Apply where filter in memory
|
|
753
|
+
if (options.where) {
|
|
754
|
+
const whereExpression = options.where;
|
|
755
|
+
items = items.filter((item) =>
|
|
756
|
+
evaluateExpression(
|
|
757
|
+
whereExpression,
|
|
758
|
+
item as Record<string, unknown>,
|
|
759
|
+
),
|
|
760
|
+
);
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
// Apply orderBy
|
|
765
|
+
if (options.orderBy) {
|
|
766
|
+
const sorts = parseOrderByExpression(options.orderBy);
|
|
767
|
+
items.sort((a, b) => {
|
|
768
|
+
for (const sort of sorts) {
|
|
769
|
+
// Access nested field (though typically will be single level)
|
|
770
|
+
// biome-ignore lint/suspicious/noExplicitAny: Need any for dynamic field access
|
|
771
|
+
let aValue: any = a;
|
|
772
|
+
// biome-ignore lint/suspicious/noExplicitAny: Need any for dynamic field access
|
|
773
|
+
let bValue: any = b;
|
|
774
|
+
for (const fieldName of sort.field) {
|
|
775
|
+
aValue = aValue?.[fieldName];
|
|
776
|
+
bValue = bValue?.[fieldName];
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
if (aValue < bValue) {
|
|
780
|
+
return sort.direction === "asc" ? -1 : 1;
|
|
781
|
+
}
|
|
782
|
+
if (aValue > bValue) {
|
|
783
|
+
return sort.direction === "asc" ? 1 : -1;
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
return 0;
|
|
787
|
+
});
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
// Apply limit
|
|
791
|
+
if (options.limit !== undefined) {
|
|
792
|
+
items = items.slice(0, options.limit);
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
for (const item of items) {
|
|
796
|
+
write({
|
|
797
|
+
type: "insert",
|
|
798
|
+
value: item as unknown as InferSchemaOutput<SelectSchema<TTable>>,
|
|
799
|
+
});
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
commit();
|
|
803
|
+
} catch (error) {
|
|
804
|
+
commit();
|
|
805
|
+
throw error;
|
|
806
|
+
}
|
|
807
|
+
};
|
|
808
|
+
|
|
809
|
+
// Create deduplicated loadSubset wrapper to avoid redundant queries
|
|
810
|
+
let loadSubsetDedupe: DeduplicatedLoadSubset | null = null;
|
|
811
|
+
if (useDedupe) {
|
|
812
|
+
loadSubsetDedupe = new DeduplicatedLoadSubset({
|
|
813
|
+
loadSubset,
|
|
814
|
+
});
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
return {
|
|
818
|
+
cleanup: () => {
|
|
819
|
+
insertListener = null;
|
|
820
|
+
updateListener = null;
|
|
821
|
+
deleteListener = null;
|
|
822
|
+
loadSubsetDedupe?.reset();
|
|
823
|
+
},
|
|
824
|
+
loadSubset: loadSubsetDedupe?.loadSubset ?? loadSubset,
|
|
825
|
+
} satisfies SyncConfigRes;
|
|
826
|
+
};
|
|
827
|
+
|
|
828
|
+
const insertSchema = createInsertSchema(table);
|
|
829
|
+
const columns = getTableColumns(table);
|
|
830
|
+
|
|
831
|
+
for (const columnName in columns) {
|
|
832
|
+
const column = columns[columnName];
|
|
833
|
+
|
|
834
|
+
let defaultValue: unknown | undefined;
|
|
835
|
+
if (column.defaultFn) {
|
|
836
|
+
defaultValue = column.defaultFn();
|
|
837
|
+
} else if (column.default !== undefined) {
|
|
838
|
+
defaultValue = column.default;
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
if (defaultValue instanceof SQL) {
|
|
842
|
+
throw new Error(
|
|
843
|
+
`Default value for column ${columnName} is a SQL expression, which is not supported for IndexedDB`,
|
|
844
|
+
);
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
// Augment the schema to handle defaultFn and defaults
|
|
849
|
+
const insertSchemaWithDefaults = v.pipe(
|
|
850
|
+
insertSchema,
|
|
851
|
+
v.transform((input) => {
|
|
852
|
+
const result = { ...input } as Record<string, unknown>;
|
|
853
|
+
|
|
854
|
+
for (const columnName in columns) {
|
|
855
|
+
const column = columns[columnName];
|
|
856
|
+
if (result[columnName] !== undefined) continue;
|
|
857
|
+
|
|
858
|
+
let defaultValue: unknown | undefined;
|
|
859
|
+
if (column.defaultFn) {
|
|
860
|
+
defaultValue = column.defaultFn();
|
|
861
|
+
} else if (column.default !== undefined) {
|
|
862
|
+
defaultValue = column.default;
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
if (defaultValue instanceof SQL) {
|
|
866
|
+
throw new Error(
|
|
867
|
+
`Default value for column ${columnName} is a SQL expression, which is not supported for IndexedDB`,
|
|
868
|
+
);
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
if (defaultValue !== undefined) {
|
|
872
|
+
result[columnName] = defaultValue;
|
|
873
|
+
continue;
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
if (column.notNull) {
|
|
877
|
+
throw new Error(`Column ${columnName} is not nullable`);
|
|
878
|
+
}
|
|
879
|
+
|
|
880
|
+
result[columnName] = null;
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
return result;
|
|
884
|
+
}),
|
|
885
|
+
);
|
|
886
|
+
|
|
887
|
+
const result = {
|
|
888
|
+
schema: insertSchemaWithDefaults,
|
|
889
|
+
getKey: (item: InferSchemaOutput<SelectSchema<TTable>>) => {
|
|
890
|
+
const id = (item as { id: string }).id;
|
|
891
|
+
return id;
|
|
892
|
+
},
|
|
893
|
+
sync: {
|
|
894
|
+
sync,
|
|
895
|
+
},
|
|
896
|
+
onInsert: async (
|
|
897
|
+
params: Parameters<NonNullable<CollectionType["onInsert"]>>[0],
|
|
898
|
+
) => {
|
|
899
|
+
await insertListener?.(params);
|
|
900
|
+
},
|
|
901
|
+
onUpdate: async (
|
|
902
|
+
params: Parameters<NonNullable<CollectionType["onUpdate"]>>[0],
|
|
903
|
+
) => {
|
|
904
|
+
await updateListener?.(params);
|
|
905
|
+
},
|
|
906
|
+
onDelete: async (
|
|
907
|
+
params: Parameters<NonNullable<CollectionType["onDelete"]>>[0],
|
|
908
|
+
) => {
|
|
909
|
+
await deleteListener?.(params);
|
|
910
|
+
},
|
|
911
|
+
syncMode: config.syncMode,
|
|
912
|
+
} satisfies CollectionType;
|
|
913
|
+
|
|
914
|
+
return result;
|
|
915
|
+
}
|