@firtoz/db-helpers 1.0.0 → 2.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +7 -4
- package/src/deferred-write-queue.ts +202 -0
- package/src/generic-sync.ts +653 -0
- package/src/index.ts +17 -0
- package/src/ir-evaluator.ts +136 -0
- package/src/memoryCollection.ts +43 -8
- package/src/sync-types.ts +16 -0
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@firtoz/db-helpers",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "2.1.0",
|
|
4
4
|
"description": "TanStack DB helpers and utilities",
|
|
5
5
|
"main": "./src/index.ts",
|
|
6
6
|
"module": "./src/index.ts",
|
|
@@ -18,7 +18,8 @@
|
|
|
18
18
|
"README.md"
|
|
19
19
|
],
|
|
20
20
|
"scripts": {
|
|
21
|
-
"typecheck": "
|
|
21
|
+
"typecheck": "tsgo --noEmit -p ./tsconfig.json",
|
|
22
|
+
"test": "bun test",
|
|
22
23
|
"lint": "biome check --write src",
|
|
23
24
|
"lint:ci": "biome ci src",
|
|
24
25
|
"format": "biome format src --write"
|
|
@@ -48,11 +49,13 @@
|
|
|
48
49
|
},
|
|
49
50
|
"peerDependencies": {
|
|
50
51
|
"@standard-schema/spec": ">=1.1.0",
|
|
51
|
-
"@tanstack/db": ">=0.
|
|
52
|
+
"@tanstack/db": ">=0.6.1"
|
|
52
53
|
},
|
|
53
54
|
"devDependencies": {
|
|
54
55
|
"@standard-schema/spec": "^1.1.0",
|
|
55
|
-
"@tanstack/db": "^0.
|
|
56
|
+
"@tanstack/db": "^0.6.1",
|
|
57
|
+
"bun-types": "^1.3.11",
|
|
58
|
+
"zod": "^4.3.6"
|
|
56
59
|
},
|
|
57
60
|
"dependencies": {
|
|
58
61
|
"@firtoz/maybe-error": "^1.5.2"
|
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
import type { GenericSyncBackend } from "./generic-sync";
|
|
2
|
+
|
|
3
|
+
export type DeferredUpdateMutation<TItem extends object> = {
|
|
4
|
+
key: string;
|
|
5
|
+
changes: Partial<TItem>;
|
|
6
|
+
original: TItem;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export type DeferredDeleteMutation<TItem extends object> = {
|
|
10
|
+
key: string;
|
|
11
|
+
modified: TItem;
|
|
12
|
+
original: TItem;
|
|
13
|
+
};
|
|
14
|
+
|
|
15
|
+
type PendingRow<TItem extends object> =
|
|
16
|
+
| { kind: "row"; value: TItem; insertedOnly: boolean }
|
|
17
|
+
| { kind: "delete" };
|
|
18
|
+
|
|
19
|
+
function mergeUpdate<TItem extends object>(
|
|
20
|
+
m: DeferredUpdateMutation<TItem>,
|
|
21
|
+
): TItem {
|
|
22
|
+
return { ...m.original, ...m.changes } as TItem;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Write-behind queue for local mutations: coalesces by persist key and flushes to a
|
|
27
|
+
* {@link GenericSyncBackend} on an interval or when {@link flush} is called explicitly.
|
|
28
|
+
*/
|
|
29
|
+
export class DeferredWriteQueue<TItem extends object> {
|
|
30
|
+
readonly #backend: GenericSyncBackend<TItem>;
|
|
31
|
+
readonly #getPersistKey: (item: TItem) => string;
|
|
32
|
+
readonly #flushIntervalMs: number;
|
|
33
|
+
#pending = new Map<string, PendingRow<TItem>>();
|
|
34
|
+
#intervalId: ReturnType<typeof setInterval> | null = null;
|
|
35
|
+
#flushTail: Promise<void> = Promise.resolve();
|
|
36
|
+
#disposed = false;
|
|
37
|
+
|
|
38
|
+
constructor(options: {
|
|
39
|
+
backend: GenericSyncBackend<TItem>;
|
|
40
|
+
getPersistKey: (item: TItem) => string;
|
|
41
|
+
flushIntervalMs?: number;
|
|
42
|
+
}) {
|
|
43
|
+
this.#backend = options.backend;
|
|
44
|
+
this.#getPersistKey = options.getPersistKey;
|
|
45
|
+
this.#flushIntervalMs = options.flushIntervalMs ?? 100;
|
|
46
|
+
|
|
47
|
+
if (typeof globalThis !== "undefined") {
|
|
48
|
+
globalThis.addEventListener?.("beforeunload", this.#onBeforeUnload);
|
|
49
|
+
globalThis.addEventListener?.(
|
|
50
|
+
"visibilitychange",
|
|
51
|
+
this.#onVisibilityChange,
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
this.#intervalId = setInterval(() => {
|
|
56
|
+
void this.flush();
|
|
57
|
+
}, this.#flushIntervalMs);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
#onBeforeUnload = (): void => {
|
|
61
|
+
void this.flush();
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
#onVisibilityChange = (): void => {
|
|
65
|
+
const doc = (
|
|
66
|
+
globalThis as typeof globalThis & {
|
|
67
|
+
document?: { visibilityState?: string };
|
|
68
|
+
}
|
|
69
|
+
).document;
|
|
70
|
+
if (doc?.visibilityState === "hidden") {
|
|
71
|
+
void this.flush();
|
|
72
|
+
}
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
enqueueInsert(items: TItem[]): void {
|
|
76
|
+
if (this.#disposed || items.length === 0) return;
|
|
77
|
+
for (const item of items) {
|
|
78
|
+
const key = this.#getPersistKey(item);
|
|
79
|
+
const cur = this.#pending.get(key);
|
|
80
|
+
if (cur?.kind === "delete") {
|
|
81
|
+
this.#pending.set(key, {
|
|
82
|
+
kind: "row",
|
|
83
|
+
value: item,
|
|
84
|
+
insertedOnly: true,
|
|
85
|
+
});
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
if (cur?.kind === "row" && !cur.insertedOnly) {
|
|
89
|
+
this.#pending.set(key, {
|
|
90
|
+
kind: "row",
|
|
91
|
+
value: item,
|
|
92
|
+
insertedOnly: false,
|
|
93
|
+
});
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
this.#pending.set(key, { kind: "row", value: item, insertedOnly: true });
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
enqueueUpdate(mutations: DeferredUpdateMutation<TItem>[]): void {
|
|
101
|
+
if (this.#disposed || mutations.length === 0) return;
|
|
102
|
+
for (const m of mutations) {
|
|
103
|
+
const key = m.key;
|
|
104
|
+
const value = mergeUpdate(m);
|
|
105
|
+
const cur = this.#pending.get(key);
|
|
106
|
+
if (cur?.kind === "delete") {
|
|
107
|
+
this.#pending.set(key, { kind: "row", value, insertedOnly: false });
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
if (cur?.kind === "row") {
|
|
111
|
+
this.#pending.set(key, {
|
|
112
|
+
kind: "row",
|
|
113
|
+
value,
|
|
114
|
+
insertedOnly: cur.insertedOnly,
|
|
115
|
+
});
|
|
116
|
+
continue;
|
|
117
|
+
}
|
|
118
|
+
this.#pending.set(key, { kind: "row", value, insertedOnly: false });
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
enqueueDelete(mutations: DeferredDeleteMutation<TItem>[]): void {
|
|
123
|
+
if (this.#disposed || mutations.length === 0) return;
|
|
124
|
+
for (const m of mutations) {
|
|
125
|
+
this.#pending.set(m.key, { kind: "delete" });
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Drains pending ops into the backend. Serialized so concurrent flushes chain.
|
|
131
|
+
*/
|
|
132
|
+
flush(): Promise<void> {
|
|
133
|
+
this.#flushTail = this.#flushTail
|
|
134
|
+
.catch(() => {})
|
|
135
|
+
.then(() => this.#flushImpl());
|
|
136
|
+
return this.#flushTail;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
async #flushImpl(): Promise<void> {
|
|
140
|
+
if (this.#pending.size === 0) return;
|
|
141
|
+
const entries = [...this.#pending.entries()];
|
|
142
|
+
this.#pending.clear();
|
|
143
|
+
|
|
144
|
+
const deletePayload: DeferredDeleteMutation<TItem>[] = [];
|
|
145
|
+
const toInsert: TItem[] = [];
|
|
146
|
+
const toUpsert: TItem[] = [];
|
|
147
|
+
|
|
148
|
+
for (const [key, op] of entries) {
|
|
149
|
+
if (op.kind === "delete") {
|
|
150
|
+
const id =
|
|
151
|
+
Number.isFinite(Number(key)) && String(Number(key)) === key
|
|
152
|
+
? Number(key)
|
|
153
|
+
: key;
|
|
154
|
+
const stub = { id } as TItem;
|
|
155
|
+
deletePayload.push({
|
|
156
|
+
key,
|
|
157
|
+
modified: stub,
|
|
158
|
+
original: stub,
|
|
159
|
+
});
|
|
160
|
+
} else if (op.insertedOnly) {
|
|
161
|
+
toInsert.push(op.value);
|
|
162
|
+
} else {
|
|
163
|
+
toUpsert.push(op.value);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (deletePayload.length > 0) {
|
|
168
|
+
await this.#backend.handleDelete(deletePayload);
|
|
169
|
+
}
|
|
170
|
+
if (toInsert.length > 0) {
|
|
171
|
+
await this.#backend.handleInsert(toInsert);
|
|
172
|
+
}
|
|
173
|
+
if (toUpsert.length > 0) {
|
|
174
|
+
if (this.#backend.handleBatchPut !== undefined) {
|
|
175
|
+
await this.#backend.handleBatchPut(toUpsert);
|
|
176
|
+
} else {
|
|
177
|
+
await this.#backend.handleUpdate(
|
|
178
|
+
toUpsert.map((value) => ({
|
|
179
|
+
key: this.#getPersistKey(value),
|
|
180
|
+
changes: value as Partial<TItem>,
|
|
181
|
+
original: value,
|
|
182
|
+
})),
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
dispose(): void {
|
|
189
|
+
if (this.#disposed) return;
|
|
190
|
+
this.#disposed = true;
|
|
191
|
+
if (this.#intervalId !== null) {
|
|
192
|
+
clearInterval(this.#intervalId);
|
|
193
|
+
this.#intervalId = null;
|
|
194
|
+
}
|
|
195
|
+
globalThis.removeEventListener?.("beforeunload", this.#onBeforeUnload);
|
|
196
|
+
globalThis.removeEventListener?.(
|
|
197
|
+
"visibilitychange",
|
|
198
|
+
this.#onVisibilityChange,
|
|
199
|
+
);
|
|
200
|
+
void this.flush();
|
|
201
|
+
}
|
|
202
|
+
}
|
|
@@ -0,0 +1,653 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
CollectionUtils,
|
|
3
|
+
ReceiveSyncDurableOp,
|
|
4
|
+
SyncMessage,
|
|
5
|
+
} from "./sync-types";
|
|
6
|
+
import { DeferredWriteQueue } from "./deferred-write-queue";
|
|
7
|
+
import { exhaustiveGuard } from "@firtoz/maybe-error";
|
|
8
|
+
import type { StandardSchemaV1 } from "@standard-schema/spec";
|
|
9
|
+
import type {
|
|
10
|
+
CollectionConfig,
|
|
11
|
+
InferSchemaOutput,
|
|
12
|
+
SyncConfig,
|
|
13
|
+
SyncConfigRes,
|
|
14
|
+
SyncMode,
|
|
15
|
+
LoadSubsetOptions,
|
|
16
|
+
} from "@tanstack/db";
|
|
17
|
+
import { DeduplicatedLoadSubset } from "@tanstack/db";
|
|
18
|
+
|
|
19
|
+
// WORKAROUND: DeduplicatedLoadSubset has a bug where toggling queries (e.g., isNull/isNotNull)
|
|
20
|
+
// creates invalid expressions like not(or(isNull(...), not(isNull(...))))
|
|
21
|
+
// See: https://github.com/TanStack/db/issues/828
|
|
22
|
+
// TODO: Re-enable once the bug is fixed
|
|
23
|
+
export const USE_DEDUPE = false as boolean;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Base configuration for sync lifecycle management (generic, no Drizzle dependency).
|
|
27
|
+
*/
|
|
28
|
+
export interface GenericBaseSyncConfig<TItem extends object = object> {
|
|
29
|
+
readyPromise: Promise<void>;
|
|
30
|
+
syncMode?: SyncMode;
|
|
31
|
+
debug?: boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Row key for durable storage when applying {@link CollectionUtils.receiveSync} updates.
|
|
34
|
+
* If omitted, `id` on the item (string or number) is used.
|
|
35
|
+
*/
|
|
36
|
+
getSyncPersistKey?: (item: TItem) => string;
|
|
37
|
+
/**
|
|
38
|
+
* When set, local `onInsert` / `onUpdate` / `onDelete` confirm TanStack sync state immediately
|
|
39
|
+
* and enqueue durable backend writes (coalesced, flushed on an interval). `receiveSync`,
|
|
40
|
+
* `loadSubset`, and `truncate` flush the queue first so reads stay consistent.
|
|
41
|
+
*/
|
|
42
|
+
deferLocalPersistence?: boolean | { flushIntervalMs?: number };
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Backend-specific implementations required for sync (generic, no Drizzle dependency).
|
|
47
|
+
*/
|
|
48
|
+
export interface GenericSyncBackend<TItem extends object> {
|
|
49
|
+
initialLoad: () => Promise<Array<TItem>>;
|
|
50
|
+
loadSubset: (options: LoadSubsetOptions) => Promise<Array<TItem>>;
|
|
51
|
+
handleInsert: (items: Array<TItem>) => Promise<Array<TItem>>;
|
|
52
|
+
handleUpdate: (
|
|
53
|
+
mutations: Array<{
|
|
54
|
+
key: string;
|
|
55
|
+
changes: Partial<TItem>;
|
|
56
|
+
original: TItem;
|
|
57
|
+
}>,
|
|
58
|
+
) => Promise<Array<TItem>>;
|
|
59
|
+
handleDelete: (
|
|
60
|
+
mutations: Array<{
|
|
61
|
+
key: string;
|
|
62
|
+
modified: TItem;
|
|
63
|
+
original: TItem;
|
|
64
|
+
}>,
|
|
65
|
+
) => Promise<void>;
|
|
66
|
+
handleTruncate?: () => Promise<void>;
|
|
67
|
+
/**
|
|
68
|
+
* When set, {@link CollectionUtils.receiveSync} persists an entire message batch with one call
|
|
69
|
+
* (e.g. one SQLite transaction) instead of awaiting {@link handleInsert}/handleUpdate per
|
|
70
|
+
* message. TanStack `syncWrite`/`syncTruncate` still run once per message in order.
|
|
71
|
+
*/
|
|
72
|
+
applyReceiveSyncDurableWrites?: (
|
|
73
|
+
ops: ReceiveSyncDurableOp<TItem>[],
|
|
74
|
+
) => Promise<void>;
|
|
75
|
+
/**
|
|
76
|
+
* Optional batch upsert for deferred local persistence flushes (e.g. IndexedDB `put` in one tx).
|
|
77
|
+
*/
|
|
78
|
+
handleBatchPut?: (items: Array<TItem>) => Promise<void>;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Return type for createGenericSyncFunction.
|
|
83
|
+
*/
|
|
84
|
+
export type GenericSyncFunctionResult<TItem extends object> = {
|
|
85
|
+
sync: SyncConfig<TItem, string>["sync"];
|
|
86
|
+
onInsert: CollectionConfig<
|
|
87
|
+
TItem,
|
|
88
|
+
string,
|
|
89
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
90
|
+
any
|
|
91
|
+
>["onInsert"];
|
|
92
|
+
onUpdate: CollectionConfig<
|
|
93
|
+
TItem,
|
|
94
|
+
string,
|
|
95
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
96
|
+
any
|
|
97
|
+
>["onUpdate"];
|
|
98
|
+
onDelete: CollectionConfig<
|
|
99
|
+
TItem,
|
|
100
|
+
string,
|
|
101
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
102
|
+
any
|
|
103
|
+
>["onDelete"];
|
|
104
|
+
utils: CollectionUtils<TItem>;
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Creates the sync function with common lifecycle management.
|
|
109
|
+
* Generic version -- no Drizzle dependency.
|
|
110
|
+
*/
|
|
111
|
+
export function createGenericSyncFunction<TItem extends object>(
|
|
112
|
+
config: GenericBaseSyncConfig<TItem>,
|
|
113
|
+
backend: GenericSyncBackend<TItem>,
|
|
114
|
+
): GenericSyncFunctionResult<TItem> {
|
|
115
|
+
type CollectionType = CollectionConfig<
|
|
116
|
+
TItem,
|
|
117
|
+
string,
|
|
118
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
119
|
+
any
|
|
120
|
+
>;
|
|
121
|
+
|
|
122
|
+
let insertListener: CollectionType["onInsert"];
|
|
123
|
+
let updateListener: CollectionType["onUpdate"];
|
|
124
|
+
let deleteListener: CollectionType["onDelete"];
|
|
125
|
+
|
|
126
|
+
let syncBegin: (() => void) | null = null;
|
|
127
|
+
let syncWrite:
|
|
128
|
+
| ((op: { type: "insert" | "update" | "delete"; value: TItem }) => void)
|
|
129
|
+
| null = null;
|
|
130
|
+
let syncCommit: (() => void) | null = null;
|
|
131
|
+
let syncTruncate: (() => void) | null = null;
|
|
132
|
+
/** Resolves when eager `initialSync` has finished (or immediately in on-demand mode). Used so `receiveSync` cannot interleave with initial inserts. */
|
|
133
|
+
let initialSyncDone: Promise<void> | null = null;
|
|
134
|
+
/**
|
|
135
|
+
* TanStack DB allows only one pending sync transaction per collection. Every path that calls
|
|
136
|
+
* `begin`/`commit` — `initialSync`, `loadSubset`, `onInsert`/`onUpdate`/`onDelete`, `receiveSync`,
|
|
137
|
+
* and `truncate` — must run through this queue so async backends (e.g. SQLite WASM) cannot
|
|
138
|
+
* leave a transaction open across an `await` while another path starts a second transaction.
|
|
139
|
+
*/
|
|
140
|
+
let syncLayerSerial: Promise<void> = Promise.resolve();
|
|
141
|
+
|
|
142
|
+
const enqueueSyncLayer = (run: () => void | Promise<void>): Promise<void> => {
|
|
143
|
+
const next = syncLayerSerial.catch(() => {}).then(run);
|
|
144
|
+
syncLayerSerial = next;
|
|
145
|
+
return next;
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
function resolveDeferLocalPersistence(
|
|
149
|
+
opts: GenericBaseSyncConfig<TItem>["deferLocalPersistence"],
|
|
150
|
+
): { enabled: boolean; flushIntervalMs: number } {
|
|
151
|
+
if (opts === true) return { enabled: true, flushIntervalMs: 100 };
|
|
152
|
+
if (typeof opts === "object" && opts !== null) {
|
|
153
|
+
return { enabled: true, flushIntervalMs: opts.flushIntervalMs ?? 100 };
|
|
154
|
+
}
|
|
155
|
+
return { enabled: false, flushIntervalMs: 100 };
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
const deferOpts = resolveDeferLocalPersistence(config.deferLocalPersistence);
|
|
159
|
+
|
|
160
|
+
const resolveDeferredPersistKey = (item: TItem): string => {
|
|
161
|
+
if (config.getSyncPersistKey !== undefined) {
|
|
162
|
+
return config.getSyncPersistKey(item);
|
|
163
|
+
}
|
|
164
|
+
if (item !== null && typeof item === "object" && "id" in item) {
|
|
165
|
+
const id = (item as { id: unknown }).id;
|
|
166
|
+
if (typeof id === "string" || typeof id === "number") {
|
|
167
|
+
return String(id);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
throw new Error(
|
|
171
|
+
"[deferLocalPersistence] Persist key missing: set GenericBaseSyncConfig.getSyncPersistKey or use items with string/number `id`",
|
|
172
|
+
);
|
|
173
|
+
};
|
|
174
|
+
|
|
175
|
+
let deferQueue: DeferredWriteQueue<TItem> | null = null;
|
|
176
|
+
if (deferOpts.enabled) {
|
|
177
|
+
deferQueue = new DeferredWriteQueue({
|
|
178
|
+
backend,
|
|
179
|
+
getPersistKey: resolveDeferredPersistKey,
|
|
180
|
+
flushIntervalMs: deferOpts.flushIntervalMs,
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
const syncFn: SyncConfig<TItem, string>["sync"] = (params) => {
|
|
185
|
+
const { begin, write, commit, markReady, truncate } = params;
|
|
186
|
+
|
|
187
|
+
syncBegin = begin;
|
|
188
|
+
syncWrite = write;
|
|
189
|
+
syncCommit = commit;
|
|
190
|
+
syncTruncate = truncate;
|
|
191
|
+
|
|
192
|
+
const initialSync = async () => {
|
|
193
|
+
await enqueueSyncLayer(async () => {
|
|
194
|
+
await config.readyPromise;
|
|
195
|
+
|
|
196
|
+
try {
|
|
197
|
+
const items = await backend.initialLoad();
|
|
198
|
+
|
|
199
|
+
begin();
|
|
200
|
+
|
|
201
|
+
for (const item of items) {
|
|
202
|
+
write({
|
|
203
|
+
type: "insert",
|
|
204
|
+
value: item,
|
|
205
|
+
});
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
commit();
|
|
209
|
+
} finally {
|
|
210
|
+
markReady();
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
if (config.syncMode === "eager" || !config.syncMode) {
|
|
216
|
+
initialSyncDone = initialSync();
|
|
217
|
+
} else {
|
|
218
|
+
markReady();
|
|
219
|
+
initialSyncDone = Promise.resolve();
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
insertListener = async (params) => {
|
|
223
|
+
await enqueueSyncLayer(async () => {
|
|
224
|
+
const items = params.transaction.mutations.map((m) => m.modified);
|
|
225
|
+
if (deferQueue !== null) {
|
|
226
|
+
begin();
|
|
227
|
+
for (const item of items) {
|
|
228
|
+
write({
|
|
229
|
+
type: "insert",
|
|
230
|
+
value: item,
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
commit();
|
|
234
|
+
deferQueue.enqueueInsert(items);
|
|
235
|
+
return;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
const results = await backend.handleInsert(items);
|
|
239
|
+
|
|
240
|
+
begin();
|
|
241
|
+
for (const result of results) {
|
|
242
|
+
write({
|
|
243
|
+
type: "insert",
|
|
244
|
+
value: result,
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
commit();
|
|
248
|
+
});
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
updateListener = async (params) => {
|
|
252
|
+
await enqueueSyncLayer(async () => {
|
|
253
|
+
if (deferQueue !== null) {
|
|
254
|
+
const mutations = params.transaction.mutations.map((m) => ({
|
|
255
|
+
key: String(m.key),
|
|
256
|
+
changes: m.changes as Partial<TItem>,
|
|
257
|
+
original: m.original as TItem,
|
|
258
|
+
}));
|
|
259
|
+
const results = mutations.map(
|
|
260
|
+
(m) => ({ ...m.original, ...m.changes }) as TItem,
|
|
261
|
+
);
|
|
262
|
+
begin();
|
|
263
|
+
for (const result of results) {
|
|
264
|
+
write({
|
|
265
|
+
type: "update",
|
|
266
|
+
value: result,
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
commit();
|
|
270
|
+
deferQueue.enqueueUpdate(mutations);
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const results = await backend.handleUpdate(
|
|
275
|
+
params.transaction.mutations,
|
|
276
|
+
);
|
|
277
|
+
|
|
278
|
+
begin();
|
|
279
|
+
for (const result of results) {
|
|
280
|
+
write({
|
|
281
|
+
type: "update",
|
|
282
|
+
value: result,
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
commit();
|
|
286
|
+
});
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
deleteListener = async (params) => {
|
|
290
|
+
await enqueueSyncLayer(async () => {
|
|
291
|
+
if (deferQueue !== null) {
|
|
292
|
+
const mutations = params.transaction.mutations.map((m) => ({
|
|
293
|
+
key: String(m.key),
|
|
294
|
+
modified: m.modified as TItem,
|
|
295
|
+
original: m.original as TItem,
|
|
296
|
+
}));
|
|
297
|
+
begin();
|
|
298
|
+
for (const item of mutations) {
|
|
299
|
+
write({
|
|
300
|
+
type: "delete",
|
|
301
|
+
value: item.modified,
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
commit();
|
|
305
|
+
deferQueue.enqueueDelete(mutations);
|
|
306
|
+
return;
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
await backend.handleDelete(params.transaction.mutations);
|
|
310
|
+
|
|
311
|
+
begin();
|
|
312
|
+
for (const item of params.transaction.mutations) {
|
|
313
|
+
write({
|
|
314
|
+
type: "delete",
|
|
315
|
+
value: item.modified,
|
|
316
|
+
});
|
|
317
|
+
}
|
|
318
|
+
commit();
|
|
319
|
+
});
|
|
320
|
+
};
|
|
321
|
+
|
|
322
|
+
const loadSubset = async (options: LoadSubsetOptions) => {
|
|
323
|
+
await enqueueSyncLayer(async () => {
|
|
324
|
+
await config.readyPromise;
|
|
325
|
+
|
|
326
|
+
if (deferQueue !== null) {
|
|
327
|
+
await deferQueue.flush();
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
const items = await backend.loadSubset(options);
|
|
331
|
+
|
|
332
|
+
begin();
|
|
333
|
+
|
|
334
|
+
for (const item of items) {
|
|
335
|
+
write({
|
|
336
|
+
type: "insert",
|
|
337
|
+
value: item,
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
commit();
|
|
342
|
+
});
|
|
343
|
+
};
|
|
344
|
+
|
|
345
|
+
let loadSubsetDedupe: DeduplicatedLoadSubset | null = null;
|
|
346
|
+
if (USE_DEDUPE) {
|
|
347
|
+
loadSubsetDedupe = new DeduplicatedLoadSubset({
|
|
348
|
+
loadSubset,
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
cleanup: () => {
|
|
354
|
+
deferQueue?.dispose();
|
|
355
|
+
deferQueue = null;
|
|
356
|
+
insertListener = undefined;
|
|
357
|
+
updateListener = undefined;
|
|
358
|
+
deleteListener = undefined;
|
|
359
|
+
loadSubsetDedupe?.reset();
|
|
360
|
+
},
|
|
361
|
+
loadSubset: loadSubsetDedupe?.loadSubset ?? loadSubset,
|
|
362
|
+
} satisfies SyncConfigRes;
|
|
363
|
+
};
|
|
364
|
+
|
|
365
|
+
const resolveReceiveSyncPersistKey = (item: TItem): string => {
|
|
366
|
+
if (config.getSyncPersistKey !== undefined) {
|
|
367
|
+
return config.getSyncPersistKey(item);
|
|
368
|
+
}
|
|
369
|
+
if (item !== null && typeof item === "object" && "id" in item) {
|
|
370
|
+
const id = (item as { id: unknown }).id;
|
|
371
|
+
if (typeof id === "string" || typeof id === "number") {
|
|
372
|
+
return String(id);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
throw new Error(
|
|
376
|
+
"[receiveSync] Persist key missing: set GenericBaseSyncConfig.getSyncPersistKey or use items with string/number `id`",
|
|
377
|
+
);
|
|
378
|
+
};
|
|
379
|
+
|
|
380
|
+
const shallowRecordDiff = (previous: TItem, next: TItem): Partial<TItem> => {
|
|
381
|
+
const out: Partial<TItem> = {};
|
|
382
|
+
if (
|
|
383
|
+
previous !== null &&
|
|
384
|
+
typeof previous === "object" &&
|
|
385
|
+
next !== null &&
|
|
386
|
+
typeof next === "object"
|
|
387
|
+
) {
|
|
388
|
+
const prevRec = previous as Record<string, unknown>;
|
|
389
|
+
const nextRec = next as Record<string, unknown>;
|
|
390
|
+
for (const k of Object.keys(nextRec)) {
|
|
391
|
+
if (prevRec[k] !== nextRec[k]) {
|
|
392
|
+
(out as Record<string, unknown>)[k] = nextRec[k];
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
return out;
|
|
397
|
+
};
|
|
398
|
+
|
|
399
|
+
const toReceiveSyncDurableOps = (
|
|
400
|
+
messages: SyncMessage<TItem>[],
|
|
401
|
+
): ReceiveSyncDurableOp<TItem>[] => {
|
|
402
|
+
const out: ReceiveSyncDurableOp<TItem>[] = [];
|
|
403
|
+
for (const msg of messages) {
|
|
404
|
+
switch (msg.type) {
|
|
405
|
+
case "insert":
|
|
406
|
+
out.push({ type: "insert", value: msg.value });
|
|
407
|
+
break;
|
|
408
|
+
case "update":
|
|
409
|
+
out.push({
|
|
410
|
+
type: "update",
|
|
411
|
+
key: resolveReceiveSyncPersistKey(msg.value),
|
|
412
|
+
changes: shallowRecordDiff(
|
|
413
|
+
msg.previousValue,
|
|
414
|
+
msg.value,
|
|
415
|
+
) as Partial<TItem>,
|
|
416
|
+
original: msg.previousValue,
|
|
417
|
+
});
|
|
418
|
+
break;
|
|
419
|
+
case "delete":
|
|
420
|
+
out.push({ type: "delete", key: String(msg.key) });
|
|
421
|
+
break;
|
|
422
|
+
case "truncate":
|
|
423
|
+
out.push({ type: "truncate" });
|
|
424
|
+
break;
|
|
425
|
+
default:
|
|
426
|
+
exhaustiveGuard(msg);
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
return out;
|
|
430
|
+
};
|
|
431
|
+
|
|
432
|
+
const receiveSync = async (messages: SyncMessage<TItem>[]) => {
|
|
433
|
+
if (messages.length === 0) return;
|
|
434
|
+
|
|
435
|
+
await enqueueSyncLayer(async () => {
|
|
436
|
+
if (initialSyncDone) {
|
|
437
|
+
await initialSyncDone;
|
|
438
|
+
}
|
|
439
|
+
if (!syncBegin || !syncWrite || !syncCommit || !syncTruncate) {
|
|
440
|
+
if (config.debug) {
|
|
441
|
+
console.warn(
|
|
442
|
+
"[receiveSync] Sync functions not initialized yet - messages will be dropped",
|
|
443
|
+
messages.length,
|
|
444
|
+
);
|
|
445
|
+
}
|
|
446
|
+
return;
|
|
447
|
+
}
|
|
448
|
+
if (deferQueue !== null) {
|
|
449
|
+
await deferQueue.flush();
|
|
450
|
+
}
|
|
451
|
+
syncBegin();
|
|
452
|
+
|
|
453
|
+
try {
|
|
454
|
+
const applyBatch = backend.applyReceiveSyncDurableWrites;
|
|
455
|
+
if (applyBatch !== undefined) {
|
|
456
|
+
await applyBatch(toReceiveSyncDurableOps(messages));
|
|
457
|
+
for (const msg of messages) {
|
|
458
|
+
switch (msg.type) {
|
|
459
|
+
case "insert":
|
|
460
|
+
syncWrite({ type: "insert", value: msg.value });
|
|
461
|
+
break;
|
|
462
|
+
case "update":
|
|
463
|
+
syncWrite({ type: "update", value: msg.value });
|
|
464
|
+
break;
|
|
465
|
+
case "delete":
|
|
466
|
+
syncWrite({
|
|
467
|
+
type: "delete",
|
|
468
|
+
value: { id: msg.key } as TItem,
|
|
469
|
+
});
|
|
470
|
+
break;
|
|
471
|
+
case "truncate":
|
|
472
|
+
syncTruncate();
|
|
473
|
+
break;
|
|
474
|
+
default:
|
|
475
|
+
exhaustiveGuard(msg);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
} else {
|
|
479
|
+
for (const msg of messages) {
|
|
480
|
+
switch (msg.type) {
|
|
481
|
+
case "insert":
|
|
482
|
+
await backend.handleInsert([msg.value]);
|
|
483
|
+
syncWrite({ type: "insert", value: msg.value });
|
|
484
|
+
break;
|
|
485
|
+
case "update": {
|
|
486
|
+
const key = resolveReceiveSyncPersistKey(msg.value);
|
|
487
|
+
await backend.handleUpdate([
|
|
488
|
+
{
|
|
489
|
+
key,
|
|
490
|
+
changes: shallowRecordDiff(
|
|
491
|
+
msg.previousValue,
|
|
492
|
+
msg.value,
|
|
493
|
+
) as Partial<TItem>,
|
|
494
|
+
original: msg.previousValue,
|
|
495
|
+
},
|
|
496
|
+
]);
|
|
497
|
+
syncWrite({ type: "update", value: msg.value });
|
|
498
|
+
break;
|
|
499
|
+
}
|
|
500
|
+
case "delete":
|
|
501
|
+
await backend.handleDelete([
|
|
502
|
+
{
|
|
503
|
+
key: String(msg.key),
|
|
504
|
+
modified: { id: msg.key } as TItem,
|
|
505
|
+
original: { id: msg.key } as TItem,
|
|
506
|
+
},
|
|
507
|
+
]);
|
|
508
|
+
syncWrite({
|
|
509
|
+
type: "delete",
|
|
510
|
+
value: { id: msg.key } as TItem,
|
|
511
|
+
});
|
|
512
|
+
break;
|
|
513
|
+
case "truncate":
|
|
514
|
+
if (backend.handleTruncate) {
|
|
515
|
+
await backend.handleTruncate();
|
|
516
|
+
}
|
|
517
|
+
syncTruncate();
|
|
518
|
+
break;
|
|
519
|
+
default:
|
|
520
|
+
exhaustiveGuard(msg);
|
|
521
|
+
}
|
|
522
|
+
}
|
|
523
|
+
}
|
|
524
|
+
} catch (err) {
|
|
525
|
+
console.error(
|
|
526
|
+
"[receiveSync] error during sync writes, committing partial batch to avoid leaving transaction open",
|
|
527
|
+
err,
|
|
528
|
+
);
|
|
529
|
+
}
|
|
530
|
+
syncCommit();
|
|
531
|
+
});
|
|
532
|
+
};
|
|
533
|
+
|
|
534
|
+
const utils: CollectionUtils<TItem> = {
|
|
535
|
+
truncate: async () => {
|
|
536
|
+
const handleTruncate = backend.handleTruncate;
|
|
537
|
+
if (!handleTruncate) {
|
|
538
|
+
throw new Error("Truncate not supported by this backend");
|
|
539
|
+
}
|
|
540
|
+
if (!syncBegin || !syncTruncate || !syncCommit) {
|
|
541
|
+
throw new Error(
|
|
542
|
+
"Sync functions not initialized - sync function may not have been called yet",
|
|
543
|
+
);
|
|
544
|
+
}
|
|
545
|
+
await enqueueSyncLayer(async () => {
|
|
546
|
+
if (deferQueue !== null) {
|
|
547
|
+
await deferQueue.flush();
|
|
548
|
+
}
|
|
549
|
+
await handleTruncate();
|
|
550
|
+
const begin = syncBegin;
|
|
551
|
+
const trunc = syncTruncate;
|
|
552
|
+
const commit = syncCommit;
|
|
553
|
+
if (!begin || !trunc || !commit) {
|
|
554
|
+
throw new Error(
|
|
555
|
+
"Sync functions not initialized - sync function may not have been called yet",
|
|
556
|
+
);
|
|
557
|
+
}
|
|
558
|
+
begin();
|
|
559
|
+
trunc();
|
|
560
|
+
commit();
|
|
561
|
+
});
|
|
562
|
+
},
|
|
563
|
+
receiveSync,
|
|
564
|
+
};
|
|
565
|
+
|
|
566
|
+
return {
|
|
567
|
+
sync: syncFn,
|
|
568
|
+
onInsert: async (params) => {
|
|
569
|
+
if (!insertListener) {
|
|
570
|
+
throw new Error(
|
|
571
|
+
"insertListener not initialized - sync function may not have been called yet",
|
|
572
|
+
);
|
|
573
|
+
}
|
|
574
|
+
return insertListener(params);
|
|
575
|
+
},
|
|
576
|
+
onUpdate: async (params) => {
|
|
577
|
+
if (!updateListener) {
|
|
578
|
+
throw new Error(
|
|
579
|
+
"updateListener not initialized - sync function may not have been called yet",
|
|
580
|
+
);
|
|
581
|
+
}
|
|
582
|
+
return updateListener(params);
|
|
583
|
+
},
|
|
584
|
+
onDelete: async (params) => {
|
|
585
|
+
if (!deleteListener) {
|
|
586
|
+
throw new Error(
|
|
587
|
+
"deleteListener not initialized - sync function may not have been called yet",
|
|
588
|
+
);
|
|
589
|
+
}
|
|
590
|
+
return deleteListener(params);
|
|
591
|
+
},
|
|
592
|
+
utils,
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
|
|
596
|
+
/**
|
|
597
|
+
* Generic collection config factory.
|
|
598
|
+
* Combines schema, sync, and event handlers into a collection config.
|
|
599
|
+
* No Drizzle dependency -- uses StandardSchemaV1 directly.
|
|
600
|
+
*/
|
|
601
|
+
export function createGenericCollectionConfig<
|
|
602
|
+
TItem extends object,
|
|
603
|
+
TSchema extends StandardSchemaV1,
|
|
604
|
+
>(config: {
|
|
605
|
+
schema: TSchema;
|
|
606
|
+
getKey: (item: TItem) => string;
|
|
607
|
+
syncResult: GenericSyncFunctionResult<TItem>;
|
|
608
|
+
onInsert?: CollectionConfig<
|
|
609
|
+
TItem,
|
|
610
|
+
string,
|
|
611
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
612
|
+
any
|
|
613
|
+
>["onInsert"];
|
|
614
|
+
onUpdate?: CollectionConfig<
|
|
615
|
+
TItem,
|
|
616
|
+
string,
|
|
617
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
618
|
+
any
|
|
619
|
+
>["onUpdate"];
|
|
620
|
+
onDelete?: CollectionConfig<
|
|
621
|
+
TItem,
|
|
622
|
+
string,
|
|
623
|
+
// biome-ignore lint/suspicious/noExplicitAny: Schema type parameter needs to be flexible
|
|
624
|
+
any
|
|
625
|
+
>["onDelete"];
|
|
626
|
+
syncMode?: SyncMode;
|
|
627
|
+
}): Omit<
|
|
628
|
+
CollectionConfig<
|
|
629
|
+
TItem,
|
|
630
|
+
string,
|
|
631
|
+
TSchema,
|
|
632
|
+
CollectionUtils<InferSchemaOutput<TSchema>>
|
|
633
|
+
>,
|
|
634
|
+
"utils"
|
|
635
|
+
> & {
|
|
636
|
+
schema: TSchema;
|
|
637
|
+
utils: CollectionUtils<InferSchemaOutput<TSchema>>;
|
|
638
|
+
} {
|
|
639
|
+
return {
|
|
640
|
+
schema: config.schema,
|
|
641
|
+
getKey: config.getKey,
|
|
642
|
+
sync: {
|
|
643
|
+
sync: config.syncResult.sync,
|
|
644
|
+
},
|
|
645
|
+
onInsert: config.onInsert ?? config.syncResult.onInsert,
|
|
646
|
+
onUpdate: config.onUpdate ?? config.syncResult.onUpdate,
|
|
647
|
+
onDelete: config.onDelete ?? config.syncResult.onDelete,
|
|
648
|
+
syncMode: config.syncMode,
|
|
649
|
+
utils: config.syncResult.utils as CollectionUtils<
|
|
650
|
+
InferSchemaOutput<TSchema>
|
|
651
|
+
>,
|
|
652
|
+
};
|
|
653
|
+
}
|
package/src/index.ts
CHANGED
|
@@ -2,6 +2,7 @@ export type {
|
|
|
2
2
|
CollectionUtils,
|
|
3
3
|
ExternalSyncEvent,
|
|
4
4
|
ExternalSyncHandler,
|
|
5
|
+
ReceiveSyncDurableOp,
|
|
5
6
|
SyncMessage,
|
|
6
7
|
} from "./sync-types";
|
|
7
8
|
export {
|
|
@@ -9,3 +10,19 @@ export {
|
|
|
9
10
|
memoryCollectionOptions,
|
|
10
11
|
type MemoryCollection,
|
|
11
12
|
} from "./memoryCollection";
|
|
13
|
+
|
|
14
|
+
export { evaluateExpression, getExpressionValue } from "./ir-evaluator";
|
|
15
|
+
|
|
16
|
+
export {
|
|
17
|
+
USE_DEDUPE,
|
|
18
|
+
createGenericSyncFunction,
|
|
19
|
+
createGenericCollectionConfig,
|
|
20
|
+
type GenericBaseSyncConfig,
|
|
21
|
+
type GenericSyncBackend,
|
|
22
|
+
type GenericSyncFunctionResult,
|
|
23
|
+
} from "./generic-sync";
|
|
24
|
+
export {
|
|
25
|
+
DeferredWriteQueue,
|
|
26
|
+
type DeferredDeleteMutation,
|
|
27
|
+
type DeferredUpdateMutation,
|
|
28
|
+
} from "./deferred-write-queue";
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import type { IR } from "@tanstack/db";
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Evaluates a TanStack DB IR expression against a plain object item.
|
|
5
|
+
* @internal Exported for testing and reuse by collection backends
|
|
6
|
+
*/
|
|
7
|
+
export function evaluateExpression(
|
|
8
|
+
expression: IR.BasicExpression,
|
|
9
|
+
item: Record<string, unknown>,
|
|
10
|
+
): boolean {
|
|
11
|
+
switch (expression.type) {
|
|
12
|
+
case "ref": {
|
|
13
|
+
const propRef = expression;
|
|
14
|
+
const columnName = propRef.path[propRef.path.length - 1];
|
|
15
|
+
return item[columnName as string] !== undefined;
|
|
16
|
+
}
|
|
17
|
+
case "val": {
|
|
18
|
+
const value = expression;
|
|
19
|
+
return !!value.value;
|
|
20
|
+
}
|
|
21
|
+
case "func": {
|
|
22
|
+
const func = expression;
|
|
23
|
+
|
|
24
|
+
switch (func.name) {
|
|
25
|
+
case "eq": {
|
|
26
|
+
const left = getExpressionValue(func.args[0], item);
|
|
27
|
+
const right = getExpressionValue(func.args[1], item);
|
|
28
|
+
return left === right;
|
|
29
|
+
}
|
|
30
|
+
case "ne": {
|
|
31
|
+
const left = getExpressionValue(func.args[0], item);
|
|
32
|
+
const right = getExpressionValue(func.args[1], item);
|
|
33
|
+
return left !== right;
|
|
34
|
+
}
|
|
35
|
+
case "gt": {
|
|
36
|
+
const left = getExpressionValue(func.args[0], item);
|
|
37
|
+
const right = getExpressionValue(func.args[1], item);
|
|
38
|
+
return left > right;
|
|
39
|
+
}
|
|
40
|
+
case "gte": {
|
|
41
|
+
const left = getExpressionValue(func.args[0], item);
|
|
42
|
+
const right = getExpressionValue(func.args[1], item);
|
|
43
|
+
return left >= right;
|
|
44
|
+
}
|
|
45
|
+
case "lt": {
|
|
46
|
+
const left = getExpressionValue(func.args[0], item);
|
|
47
|
+
const right = getExpressionValue(func.args[1], item);
|
|
48
|
+
return left < right;
|
|
49
|
+
}
|
|
50
|
+
case "lte": {
|
|
51
|
+
const left = getExpressionValue(func.args[0], item);
|
|
52
|
+
const right = getExpressionValue(func.args[1], item);
|
|
53
|
+
return left <= right;
|
|
54
|
+
}
|
|
55
|
+
case "and": {
|
|
56
|
+
return func.args.every((arg) => evaluateExpression(arg, item));
|
|
57
|
+
}
|
|
58
|
+
case "or": {
|
|
59
|
+
return func.args.some((arg) => evaluateExpression(arg, item));
|
|
60
|
+
}
|
|
61
|
+
case "not": {
|
|
62
|
+
return !evaluateExpression(func.args[0], item);
|
|
63
|
+
}
|
|
64
|
+
case "isNull": {
|
|
65
|
+
const value = getExpressionValue(func.args[0], item);
|
|
66
|
+
return value === null || value === undefined;
|
|
67
|
+
}
|
|
68
|
+
case "isNotNull": {
|
|
69
|
+
const value = getExpressionValue(func.args[0], item);
|
|
70
|
+
return value !== null && value !== undefined;
|
|
71
|
+
}
|
|
72
|
+
case "like": {
|
|
73
|
+
const left = String(getExpressionValue(func.args[0], item));
|
|
74
|
+
const right = String(getExpressionValue(func.args[1], item));
|
|
75
|
+
const pattern = right.replace(/%/g, ".*").replace(/_/g, ".");
|
|
76
|
+
return new RegExp(`^${pattern}$`).test(left);
|
|
77
|
+
}
|
|
78
|
+
case "ilike": {
|
|
79
|
+
const left = String(getExpressionValue(func.args[0], item));
|
|
80
|
+
const right = String(getExpressionValue(func.args[1], item));
|
|
81
|
+
const pattern = right.replace(/%/g, ".*").replace(/_/g, ".");
|
|
82
|
+
return new RegExp(`^${pattern}$`, "i").test(left);
|
|
83
|
+
}
|
|
84
|
+
case "in": {
|
|
85
|
+
const left = getExpressionValue(func.args[0], item);
|
|
86
|
+
const right = getExpressionValue(func.args[1], item);
|
|
87
|
+
return Array.isArray(right) && right.includes(left);
|
|
88
|
+
}
|
|
89
|
+
case "isUndefined": {
|
|
90
|
+
const value = getExpressionValue(func.args[0], item);
|
|
91
|
+
return value === null || value === undefined;
|
|
92
|
+
}
|
|
93
|
+
default:
|
|
94
|
+
throw new Error(`Unsupported function: ${func.name}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
default: {
|
|
98
|
+
const _ex: never = expression;
|
|
99
|
+
void _ex;
|
|
100
|
+
throw new Error(
|
|
101
|
+
`Unsupported expression type: ${(expression as { type: string }).type}`,
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Gets the value from an IR expression by resolving refs and vals.
|
|
109
|
+
* @internal Exported for testing and reuse by collection backends
|
|
110
|
+
*/
|
|
111
|
+
export function getExpressionValue(
|
|
112
|
+
expression: IR.BasicExpression,
|
|
113
|
+
item: Record<string, unknown>,
|
|
114
|
+
// biome-ignore lint/suspicious/noExplicitAny: We need any here for dynamic values
|
|
115
|
+
): any {
|
|
116
|
+
switch (expression.type) {
|
|
117
|
+
case "ref": {
|
|
118
|
+
const propRef = expression;
|
|
119
|
+
const columnName = propRef.path[propRef.path.length - 1];
|
|
120
|
+
return item[columnName as string];
|
|
121
|
+
}
|
|
122
|
+
case "val": {
|
|
123
|
+
const value = expression;
|
|
124
|
+
return value.value;
|
|
125
|
+
}
|
|
126
|
+
case "func":
|
|
127
|
+
throw new Error("Cannot get value from func expression");
|
|
128
|
+
default: {
|
|
129
|
+
const _ex: never = expression;
|
|
130
|
+
void _ex;
|
|
131
|
+
throw new Error(
|
|
132
|
+
`Cannot get value from expression type: ${(expression as { type: string }).type}`,
|
|
133
|
+
);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
package/src/memoryCollection.ts
CHANGED
|
@@ -46,10 +46,27 @@ export function memoryCollectionOptions<TSchema extends StandardSchemaV1>(
|
|
|
46
46
|
type TItem = InferSchemaOutput<TSchema>;
|
|
47
47
|
type TKey = string | number;
|
|
48
48
|
let syncParams: Parameters<SyncConfig<TItem>["sync"]>[0] | null = null;
|
|
49
|
+
/** Batches from `receiveSync` that arrived before TanStack called `sync`. */
|
|
50
|
+
const pendingReceiveSyncBatches: SyncMessage<TItem, TKey>[][] = [];
|
|
51
|
+
/**
|
|
52
|
+
* One TanStack sync transaction at a time: `receiveSync`, local mutations, and `truncate` all
|
|
53
|
+
* call `begin`/`commit` — overlapping calls cause SyncTransactionAlreadyCommittedWriteError.
|
|
54
|
+
*/
|
|
55
|
+
let syncWriteChain: Promise<void> = Promise.resolve();
|
|
56
|
+
|
|
57
|
+
const enqueueSyncWrite = async (fn: () => void): Promise<void> => {
|
|
58
|
+
const next = syncWriteChain.catch(() => {}).then(fn);
|
|
59
|
+
syncWriteChain = next;
|
|
60
|
+
await next;
|
|
61
|
+
};
|
|
49
62
|
|
|
50
63
|
const sync: SyncConfig<TItem>["sync"] = (params) => {
|
|
51
64
|
syncParams = params;
|
|
52
65
|
params.markReady();
|
|
66
|
+
for (const batch of pendingReceiveSyncBatches) {
|
|
67
|
+
writeChanges(batch);
|
|
68
|
+
}
|
|
69
|
+
pendingReceiveSyncBatches.length = 0;
|
|
53
70
|
return () => {};
|
|
54
71
|
};
|
|
55
72
|
|
|
@@ -88,7 +105,9 @@ export function memoryCollectionOptions<TSchema extends StandardSchemaV1>(
|
|
|
88
105
|
for (const mutation of params.transaction.mutations) {
|
|
89
106
|
writes.push({ type: "insert", value: mutation.modified });
|
|
90
107
|
}
|
|
91
|
-
|
|
108
|
+
await enqueueSyncWrite(() => {
|
|
109
|
+
writeChanges(writes);
|
|
110
|
+
});
|
|
92
111
|
config.onBroadcast?.(writes);
|
|
93
112
|
};
|
|
94
113
|
|
|
@@ -101,7 +120,9 @@ export function memoryCollectionOptions<TSchema extends StandardSchemaV1>(
|
|
|
101
120
|
previousValue: mutation.original,
|
|
102
121
|
});
|
|
103
122
|
}
|
|
104
|
-
|
|
123
|
+
await enqueueSyncWrite(() => {
|
|
124
|
+
writeChanges(writes);
|
|
125
|
+
});
|
|
105
126
|
config.onBroadcast?.(writes);
|
|
106
127
|
};
|
|
107
128
|
|
|
@@ -110,22 +131,36 @@ export function memoryCollectionOptions<TSchema extends StandardSchemaV1>(
|
|
|
110
131
|
for (const mutation of params.transaction.mutations) {
|
|
111
132
|
writes.push({ type: "delete", key: mutation.key as TKey });
|
|
112
133
|
}
|
|
113
|
-
|
|
134
|
+
await enqueueSyncWrite(() => {
|
|
135
|
+
writeChanges(writes);
|
|
136
|
+
});
|
|
114
137
|
config.onBroadcast?.(writes);
|
|
115
138
|
};
|
|
116
139
|
|
|
117
140
|
const truncate = async () => {
|
|
118
141
|
if (!syncParams) {
|
|
119
|
-
|
|
142
|
+
// TanStack may not have invoked `sync` yet (e.g. first paint / effect). Nothing to clear.
|
|
143
|
+
pendingReceiveSyncBatches.length = 0;
|
|
144
|
+
return;
|
|
120
145
|
}
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
146
|
+
await enqueueSyncWrite(() => {
|
|
147
|
+
const p = syncParams;
|
|
148
|
+
if (!p) return;
|
|
149
|
+
p.begin();
|
|
150
|
+
p.truncate();
|
|
151
|
+
p.commit();
|
|
152
|
+
});
|
|
124
153
|
};
|
|
125
154
|
|
|
126
155
|
const receiveSync = async (messages: SyncMessage<TItem, TKey>[]) => {
|
|
127
156
|
if (messages.length === 0) return;
|
|
128
|
-
|
|
157
|
+
if (!syncParams) {
|
|
158
|
+
pendingReceiveSyncBatches.push(messages);
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
await enqueueSyncWrite(() => {
|
|
162
|
+
writeChanges(messages);
|
|
163
|
+
});
|
|
129
164
|
};
|
|
130
165
|
|
|
131
166
|
return {
|
package/src/sync-types.ts
CHANGED
|
@@ -15,6 +15,22 @@ export type SyncMessage<
|
|
|
15
15
|
| { type: "delete"; key: TKey }
|
|
16
16
|
| { type: "truncate" };
|
|
17
17
|
|
|
18
|
+
/**
|
|
19
|
+
* Normalized durable ops for a {@link SyncMessage} batch. SQLite-style backends can implement
|
|
20
|
+
* `GenericSyncBackend.applyReceiveSyncDurableWrites` to persist the whole batch in one store
|
|
21
|
+
* transaction instead of one transaction per message.
|
|
22
|
+
*/
|
|
23
|
+
export type ReceiveSyncDurableOp<TItem extends object> =
|
|
24
|
+
| { type: "insert"; value: TItem }
|
|
25
|
+
| {
|
|
26
|
+
type: "update";
|
|
27
|
+
key: string;
|
|
28
|
+
changes: Partial<TItem>;
|
|
29
|
+
original: TItem;
|
|
30
|
+
}
|
|
31
|
+
| { type: "delete"; key: string }
|
|
32
|
+
| { type: "truncate" };
|
|
33
|
+
|
|
18
34
|
/**
|
|
19
35
|
* External sync event (batched). Used internally by the sync layer.
|
|
20
36
|
*/
|