@loro-dev/flock-sqlite 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.ts ADDED
@@ -0,0 +1,1040 @@
1
+ import { openStore, type UniStoreConnection } from "@loro-dev/unisqlite";
2
+ import { computeDigest, type DigestRow } from "./digest";
3
+ import {
4
+ compareBytes,
5
+ decodeKeyParts,
6
+ encodeKeyParts,
7
+ keyToString,
8
+ prefixUpperBound,
9
+ } from "./key-encoding";
10
+ import type {
11
+ EntryClock,
12
+ Event,
13
+ EventBatch,
14
+ ExportBundle,
15
+ ExportHooks,
16
+ ExportOptions,
17
+ ExportPayload,
18
+ ExportRecord,
19
+ ImportDecision,
20
+ ImportHooks,
21
+ ImportOptions,
22
+ ImportReport,
23
+ KeyPart,
24
+ MetadataMap,
25
+ PutHooks,
26
+ PutWithMetaOptions,
27
+ EventListener,
28
+ ScanBound,
29
+ ScanOptions,
30
+ ScanRow,
31
+ Value,
32
+ VersionVector,
33
+ VersionVectorEntry,
34
+ } from "./types";
35
+
36
+ type ClockRow = {
37
+ physical: number;
38
+ logical: number;
39
+ peer: string;
40
+ };
41
+
42
+ type KvRow = {
43
+ key: Uint8Array;
44
+ data: string | null;
45
+ metadata: string | null;
46
+ physical: number;
47
+ logical: number;
48
+ peer: string;
49
+ };
50
+
51
+ type ExportQueryRow = KvRow;
52
+
53
+ type PutOperation = {
54
+ key: KeyPart[];
55
+ payload: ExportPayload;
56
+ now?: number;
57
+ skipSameValue: boolean;
58
+ source: string;
59
+ clock?: EntryClock;
60
+ eventSink?: Array<{ key: KeyPart[]; payload: ExportPayload; source: string }>;
61
+ };
62
+
63
+ const textEncoder = new TextEncoder();
64
+ const structuredCloneFn: (<T>(value: T) => T) | undefined =
65
+ (globalThis as typeof globalThis & { structuredClone?: <T>(value: T) => T }).structuredClone;
66
+
67
+ function utf8ByteLength(value: string): number {
68
+ return textEncoder.encode(value).length;
69
+ }
70
+
71
+ function isValidPeerId(peerId: unknown): peerId is string {
72
+ return typeof peerId === "string" && utf8ByteLength(peerId) < 128;
73
+ }
74
+
75
+ function createRandomPeerId(): string {
76
+ const id = new Uint8Array(32);
77
+ const cryptoAny = typeof crypto !== "undefined" ? (crypto as any) : undefined;
78
+ if (cryptoAny?.getRandomValues) {
79
+ cryptoAny.getRandomValues(id);
80
+ } else if (cryptoAny?.randomBytes) {
81
+ const buf: Uint8Array = cryptoAny.randomBytes(32);
82
+ id.set(buf);
83
+ } else {
84
+ for (let i = 0; i < 32; i += 1) {
85
+ id[i] = Math.floor(Math.random() * 256);
86
+ }
87
+ }
88
+ return Array.from(id, (byte) => byte.toString(16).padStart(2, "0")).join("");
89
+ }
90
+
91
+ function normalizePeerId(peerId?: string): string {
92
+ if (peerId === undefined) {
93
+ return createRandomPeerId();
94
+ }
95
+ if (!isValidPeerId(peerId)) {
96
+ throw new TypeError("peerId must be a UTF-8 string under 128 bytes");
97
+ }
98
+ return peerId;
99
+ }
100
+
101
+ function cloneJson<T>(value: T): T {
102
+ if (value === undefined) {
103
+ return value;
104
+ }
105
+ if (structuredCloneFn) {
106
+ return structuredCloneFn(value);
107
+ }
108
+ return JSON.parse(JSON.stringify(value)) as T;
109
+ }
110
+
111
+ function cloneMetadata(metadata: unknown): MetadataMap | undefined {
112
+ if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
113
+ return undefined;
114
+ }
115
+ return cloneJson(metadata as MetadataMap);
116
+ }
117
+
118
+ function assignPayload(target: ExportPayload, source?: ExportPayload | void): void {
119
+ if (!source || typeof source !== "object") {
120
+ return;
121
+ }
122
+ if ("data" in source) {
123
+ const value = source.data;
124
+ target.data = value === undefined ? undefined : cloneJson(value);
125
+ }
126
+ if ("metadata" in source) {
127
+ target.metadata = cloneMetadata(source.metadata);
128
+ }
129
+ }
130
+
131
+ function clonePayload(payload: ExportPayload | undefined): ExportPayload {
132
+ const result: ExportPayload = {};
133
+ assignPayload(result, payload);
134
+ return result;
135
+ }
136
+
137
+ function mergePayload(base: ExportPayload, update?: ExportPayload | void): ExportPayload {
138
+ const result = clonePayload(base);
139
+ assignPayload(result, update);
140
+ return result;
141
+ }
142
+
143
+ function buildRecord(clock: EntryClock, payload: ExportPayload): ExportRecord {
144
+ const record: ExportRecord = {
145
+ c: formatClock(clock),
146
+ };
147
+ if (payload.data !== undefined) {
148
+ record.d = cloneJson(payload.data);
149
+ }
150
+ const metadata = cloneMetadata(payload.metadata);
151
+ if (metadata !== undefined) {
152
+ record.m = metadata;
153
+ }
154
+ return record;
155
+ }
156
+
157
+ function normalizeImportDecision(decision: ImportDecision): { accept: boolean; reason?: string } {
158
+ if (!decision || typeof decision !== "object") {
159
+ return { accept: true };
160
+ }
161
+ if ("accept" in decision) {
162
+ if (!decision.accept) {
163
+ return { accept: false, reason: decision.reason ?? "rejected" };
164
+ }
165
+ return { accept: true };
166
+ }
167
+ return { accept: true };
168
+ }
169
+
170
+ function isExportOptions(arg: VersionVector | ExportOptions | undefined): arg is ExportOptions {
171
+ return (
172
+ typeof arg === "object" &&
173
+ arg !== null &&
174
+ (Object.prototype.hasOwnProperty.call(arg, "hooks") ||
175
+ Object.prototype.hasOwnProperty.call(arg, "from") ||
176
+ Object.prototype.hasOwnProperty.call(arg, "pruneTombstonesBefore") ||
177
+ Object.prototype.hasOwnProperty.call(arg, "peerId"))
178
+ );
179
+ }
180
+
181
+ function isImportOptions(arg: ExportBundle | ImportOptions): arg is ImportOptions {
182
+ return typeof arg === "object" && arg !== null && Object.prototype.hasOwnProperty.call(arg, "bundle");
183
+ }
184
+
185
+ function parseMetadata(json: string | null): MetadataMap | undefined {
186
+ if (!json) {
187
+ return undefined;
188
+ }
189
+ try {
190
+ const parsed = JSON.parse(json);
191
+ if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
192
+ return parsed as MetadataMap;
193
+ }
194
+ } catch {
195
+ // ignored
196
+ }
197
+ return undefined;
198
+ }
199
+
200
+ function parseData(json: string | null): Value | undefined {
201
+ if (json === null || json === undefined) {
202
+ return undefined;
203
+ }
204
+ return JSON.parse(json) as Value;
205
+ }
206
+
207
+ function parseClockString(raw: string): EntryClock {
208
+ const parts = raw.split(",");
209
+ if (parts.length < 3) {
210
+ return { physicalTime: 0, logicalCounter: 0, peerId: "" };
211
+ }
212
+ const physical = Number(parts[0]);
213
+ const logical = Number(parts[1]);
214
+ const peerId = parts.slice(2).join(",");
215
+ return {
216
+ physicalTime: Number.isFinite(physical) ? physical : 0,
217
+ logicalCounter: Number.isFinite(logical) ? Math.trunc(logical) : 0,
218
+ peerId: isValidPeerId(peerId) ? peerId : "",
219
+ };
220
+ }
221
+
222
+ function formatClock(clock: EntryClock): string {
223
+ return `${clock.physicalTime},${clock.logicalCounter},${clock.peerId}`;
224
+ }
225
+
226
+ function compareClock(a: EntryClock, b: EntryClock): number {
227
+ if (a.physicalTime !== b.physicalTime) {
228
+ return a.physicalTime > b.physicalTime ? 1 : -1;
229
+ }
230
+ if (a.logicalCounter !== b.logicalCounter) {
231
+ return a.logicalCounter > b.logicalCounter ? 1 : -1;
232
+ }
233
+ if (a.peerId === b.peerId) {
234
+ return 0;
235
+ }
236
+ return a.peerId > b.peerId ? 1 : -1;
237
+ }
238
+
239
+ function normalizeVersionEntry(entry?: VersionVectorEntry): VersionVectorEntry | undefined {
240
+ if (!entry) return undefined;
241
+ const { physicalTime, logicalCounter } = entry;
242
+ if (!Number.isFinite(physicalTime) || !Number.isFinite(logicalCounter)) {
243
+ return undefined;
244
+ }
245
+ return {
246
+ physicalTime: physicalTime,
247
+ logicalCounter: Math.trunc(logicalCounter),
248
+ };
249
+ }
250
+
251
+ function keyMatchesPrefix(key: Uint8Array, prefix: Uint8Array): boolean {
252
+ if (prefix.length === 0) {
253
+ return true;
254
+ }
255
+ if (key.length < prefix.length) {
256
+ return false;
257
+ }
258
+ for (let i = 0; i < prefix.length; i += 1) {
259
+ if (key[i] !== prefix[i]) {
260
+ return false;
261
+ }
262
+ }
263
+ return true;
264
+ }
265
+
266
+ async function toDigestRows(rows: KvRow[]): Promise<DigestRow[]> {
267
+ return rows.map((row) => ({
268
+ key: row.key,
269
+ data: row.data,
270
+ metadata: row.metadata,
271
+ physical: row.physical,
272
+ logical: row.logical,
273
+ peer: row.peer,
274
+ }));
275
+ }
276
+
277
+ export type FlockSQLiteOptions = {
278
+ path: string;
279
+ peerId?: string;
280
+ connection?: UniStoreConnection;
281
+ tablePrefix?: string;
282
+ };
283
+
284
+ type TableNames = {
285
+ kv: string;
286
+ overridden: string;
287
+ meta: string;
288
+ idxKvPeerClock: string;
289
+ idxOverriddenKey: string;
290
+ idxOverriddenSupersededAt: string;
291
+ };
292
+
293
+ function normalizeTablePrefix(prefix?: string): string {
294
+ if (!prefix) return "";
295
+ if (typeof prefix !== "string") {
296
+ throw new TypeError("tablePrefix must be a string");
297
+ }
298
+ if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(prefix)) {
299
+ throw new TypeError("tablePrefix must start with a letter/underscore and use only letters, digits, or underscores");
300
+ }
301
+ return prefix;
302
+ }
303
+
304
+ function buildTableNames(prefix: string): TableNames {
305
+ return {
306
+ kv: `${prefix}kv`,
307
+ overridden: `${prefix}overridden`,
308
+ meta: `${prefix}meta`,
309
+ idxKvPeerClock: `${prefix}idx_kv_peer_clock_key`,
310
+ idxOverriddenKey: `${prefix}idx_overridden_key`,
311
+ idxOverriddenSupersededAt: `${prefix}idx_overridden_superseded_at`,
312
+ };
313
+ }
314
+
315
+ export class FlockSQLite {
316
+ private db: UniStoreConnection;
317
+ private peerIdValue: string;
318
+ private vv: Map<string, VersionVectorEntry>;
319
+ private maxHlc: { physicalTime: number; logicalCounter: number };
320
+ private listeners: Set<EventListener>;
321
+ private tables: TableNames;
322
+
323
+ private constructor(db: UniStoreConnection, peerId: string, vv: Map<string, VersionVectorEntry>, maxHlc: { physicalTime: number; logicalCounter: number }, tables: TableNames) {
324
+ this.db = db;
325
+ this.peerIdValue = peerId;
326
+ this.vv = vv;
327
+ this.maxHlc = maxHlc;
328
+ this.listeners = new Set();
329
+ this.tables = tables;
330
+ }
331
+
332
+ static async open(options: FlockSQLiteOptions): Promise<FlockSQLite> {
333
+ const db = options.connection ?? (await openStore({ path: options.path }));
334
+ const prefix = normalizeTablePrefix(options.tablePrefix);
335
+ const tables = buildTableNames(prefix);
336
+ await FlockSQLite.ensureSchema(db, tables);
337
+ const peerId = await FlockSQLite.resolvePeerId(db, tables, options.peerId);
338
+ const { vv, maxHlc } = await FlockSQLite.loadVersionState(db, tables);
339
+ return new FlockSQLite(db, peerId, vv, maxHlc, tables);
340
+ }
341
+
342
+ static async fromJson(options: FlockSQLiteOptions & { bundle: ExportBundle }): Promise<FlockSQLite> {
343
+ const flock = await FlockSQLite.open(options);
344
+ await flock.importJson(options.bundle);
345
+ return flock;
346
+ }
347
+
348
+ async close(): Promise<void> {
349
+ await this.db.close();
350
+ }
351
+
352
+ private static async ensureSchema(db: UniStoreConnection, tables: TableNames): Promise<void> {
353
+ await db.exec(`
354
+ CREATE TABLE IF NOT EXISTS ${tables.kv} (
355
+ key BLOB PRIMARY KEY,
356
+ data TEXT NULL,
357
+ metadata TEXT NULL,
358
+ physical REAL NOT NULL,
359
+ logical INTEGER NOT NULL,
360
+ peer TEXT NOT NULL
361
+ )`);
362
+ await db.exec(`
363
+ CREATE TABLE IF NOT EXISTS ${tables.overridden} (
364
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
365
+ key BLOB,
366
+ data TEXT NULL,
367
+ metadata TEXT NULL,
368
+ physical REAL NOT NULL,
369
+ logical INTEGER NOT NULL,
370
+ peer TEXT NOT NULL,
371
+ superseded_at INTEGER DEFAULT (unixepoch())
372
+ )`);
373
+ await db.exec(`CREATE TABLE IF NOT EXISTS ${tables.meta} (peer_id TEXT)`);
374
+ await db.exec(
375
+ `CREATE INDEX IF NOT EXISTS ${tables.idxKvPeerClock} ON ${tables.kv}(peer, physical, logical, key)`,
376
+ );
377
+ await db.exec(
378
+ `CREATE INDEX IF NOT EXISTS ${tables.idxOverriddenKey} ON ${tables.overridden}(key)`,
379
+ );
380
+ await db.exec(
381
+ `CREATE INDEX IF NOT EXISTS ${tables.idxOverriddenSupersededAt} ON ${tables.overridden}(superseded_at)`,
382
+ );
383
+ }
384
+
385
+ private static async resolvePeerId(db: UniStoreConnection, tables: TableNames, provided?: string): Promise<string> {
386
+ const normalized = normalizePeerId(provided);
387
+ const rows = await db.query<{ peer_id: string }>(`SELECT peer_id FROM ${tables.meta} LIMIT 1`);
388
+ if (rows.length > 0 && typeof rows[0]?.peer_id === "string") {
389
+ const existing = rows[0].peer_id;
390
+ if (provided && existing !== normalized) {
391
+ await db.run(`UPDATE ${tables.meta} SET peer_id = ?`, [normalized]);
392
+ return normalized;
393
+ }
394
+ return normalizePeerId(existing);
395
+ }
396
+ await db.exec(`DELETE FROM ${tables.meta}`);
397
+ await db.run(`INSERT INTO ${tables.meta}(peer_id) VALUES (?)`, [normalized]);
398
+ return normalized;
399
+ }
400
+
401
+ private static async loadVersionState(db: UniStoreConnection, tables: TableNames): Promise<{ vv: Map<string, VersionVectorEntry>; maxHlc: { physicalTime: number; logicalCounter: number } }> {
402
+ const vv = new Map<string, VersionVectorEntry>();
403
+ const rows = await db.query<ClockRow>(
404
+ `SELECT peer, MAX(physical) AS physical, MAX(logical) AS logical FROM ${tables.kv} GROUP BY peer`,
405
+ );
406
+ for (const row of rows) {
407
+ if (!row || typeof row.peer !== "string") continue;
408
+ const entry = normalizeVersionEntry({
409
+ physicalTime: Number(row.physical ?? 0),
410
+ logicalCounter: Number(row.logical ?? 0),
411
+ });
412
+ if (entry) {
413
+ vv.set(row.peer, entry);
414
+ }
415
+ }
416
+ const maxRow = await db.query<ClockRow>(
417
+ `SELECT physical, logical FROM ${tables.kv} ORDER BY physical DESC, logical DESC LIMIT 1`,
418
+ );
419
+ const first = maxRow[0];
420
+ const maxHlc =
421
+ first && Number.isFinite(first.physical) && Number.isFinite(first.logical)
422
+ ? { physicalTime: Number(first.physical), logicalCounter: Number(first.logical) }
423
+ : { physicalTime: 0, logicalCounter: 0 };
424
+ return { vv, maxHlc };
425
+ }
426
+
427
+ private bumpVersion(clock: EntryClock): void {
428
+ const current = this.vv.get(clock.peerId);
429
+ if (!current || compareClock(clock, { ...current, peerId: clock.peerId }) > 0) {
430
+ this.vv.set(clock.peerId, {
431
+ physicalTime: clock.physicalTime,
432
+ logicalCounter: clock.logicalCounter,
433
+ });
434
+ }
435
+ if (
436
+ this.maxHlc.physicalTime < clock.physicalTime ||
437
+ (this.maxHlc.physicalTime === clock.physicalTime &&
438
+ this.maxHlc.logicalCounter < clock.logicalCounter)
439
+ ) {
440
+ this.maxHlc = { physicalTime: clock.physicalTime, logicalCounter: clock.logicalCounter };
441
+ }
442
+ }
443
+
444
+ private allocateClock(now?: number): EntryClock {
445
+ const timestamp = now ?? Date.now();
446
+ let physical = this.maxHlc.physicalTime;
447
+ let logical = this.maxHlc.logicalCounter;
448
+ if (timestamp > physical) {
449
+ physical = timestamp;
450
+ logical = 0;
451
+ } else {
452
+ logical = logical + 1;
453
+ }
454
+ return { physicalTime: physical, logicalCounter: logical, peerId: this.peerIdValue };
455
+ }
456
+
457
+ private async applyOperation(operation: PutOperation): Promise<boolean> {
458
+ const keyBytes = encodeKeyParts(operation.key);
459
+ const clock = operation.clock ?? this.allocateClock(operation.now);
460
+ const payload = mergePayload(operation.payload, {});
461
+ const dataJson = payload.data === undefined ? null : JSON.stringify(payload.data);
462
+ const metadataJson = payload.metadata === undefined ? null : JSON.stringify(payload.metadata);
463
+ let applied = false;
464
+
465
+ await this.db.asyncTransaction(async (tx) => {
466
+ const existingRows = await tx.query<KvRow>(
467
+ `SELECT key, data, metadata, physical, logical, peer FROM ${this.tables.kv} WHERE key = ? LIMIT 1`,
468
+ [keyBytes],
469
+ );
470
+ if (existingRows.length > 0) {
471
+ const existing = existingRows[0];
472
+ const existingClock: EntryClock = {
473
+ physicalTime: Number(existing.physical ?? 0),
474
+ logicalCounter: Number(existing.logical ?? 0),
475
+ peerId: String(existing.peer ?? ""),
476
+ };
477
+ const cmp = compareClock(clock, existingClock);
478
+ const existingData = existing.data ?? null;
479
+ const existingMeta = existing.metadata ?? null;
480
+ const samePayload =
481
+ operation.skipSameValue &&
482
+ dataJson === existingData &&
483
+ metadataJson === existingMeta;
484
+ if (samePayload) {
485
+ return;
486
+ }
487
+ if (cmp < 0) {
488
+ await tx.run(
489
+ `INSERT INTO ${this.tables.overridden}(key, data, metadata, physical, logical, peer) VALUES (?, ?, ?, ?, ?, ?)`,
490
+ [keyBytes, dataJson, metadataJson, clock.physicalTime, clock.logicalCounter, clock.peerId],
491
+ );
492
+ return;
493
+ }
494
+ if (cmp > 0) {
495
+ await tx.run(
496
+ `INSERT INTO ${this.tables.overridden}(key, data, metadata, physical, logical, peer) VALUES (?, ?, ?, ?, ?, ?)`,
497
+ [
498
+ keyBytes,
499
+ existing.data ?? null,
500
+ existing.metadata ?? null,
501
+ existing.physical ?? 0,
502
+ existing.logical ?? 0,
503
+ existing.peer ?? "",
504
+ ],
505
+ );
506
+ } else {
507
+ return;
508
+ }
509
+ }
510
+ await tx.run(
511
+ `INSERT INTO ${this.tables.kv}(key, data, metadata, physical, logical, peer)
512
+ VALUES (?, ?, ?, ?, ?, ?)
513
+ ON CONFLICT(key) DO UPDATE SET
514
+ data=excluded.data,
515
+ metadata=excluded.metadata,
516
+ physical=excluded.physical,
517
+ logical=excluded.logical,
518
+ peer=excluded.peer`,
519
+ [keyBytes, dataJson, metadataJson, clock.physicalTime, clock.logicalCounter, clock.peerId],
520
+ );
521
+ applied = true;
522
+ });
523
+
524
+ this.bumpVersion(clock);
525
+ if (applied) {
526
+ const eventPayload = {
527
+ key: operation.key.slice(),
528
+ payload,
529
+ source: operation.source,
530
+ };
531
+ if (operation.eventSink) {
532
+ operation.eventSink.push(eventPayload);
533
+ } else {
534
+ this.emitEvents(operation.source, [eventPayload]);
535
+ }
536
+ }
537
+ return applied;
538
+ }
539
+
540
+ private emitEvents(
541
+ source: string,
542
+ events: Array<{ key: KeyPart[]; payload: ExportPayload }>,
543
+ ): void {
544
+ if (this.listeners.size === 0 || events.length === 0) {
545
+ return;
546
+ }
547
+ const batch: EventBatch = {
548
+ source,
549
+ events: events.map((event): Event => ({
550
+ key: cloneJson(event.key),
551
+ value: event.payload.data !== undefined ? cloneJson(event.payload.data) : undefined,
552
+ metadata: cloneMetadata(event.payload.metadata),
553
+ payload: clonePayload(event.payload),
554
+ })),
555
+ };
556
+ this.listeners.forEach((listener) => {
557
+ listener(batch);
558
+ });
559
+ }
560
+
561
+ async put(key: KeyPart[], value: Value, now?: number): Promise<void> {
562
+ await this.applyOperation({
563
+ key,
564
+ payload: { data: cloneJson(value) },
565
+ now,
566
+ skipSameValue: true,
567
+ source: "local",
568
+ });
569
+ }
570
+
571
+ async putWithMeta(key: KeyPart[], value: Value, options: PutWithMetaOptions = {}): Promise<void> {
572
+ const basePayload: ExportPayload = { data: cloneJson(value) };
573
+ if (options.metadata) {
574
+ basePayload.metadata = cloneMetadata(options.metadata);
575
+ }
576
+ const hooks = options.hooks?.transform;
577
+ if (hooks) {
578
+ const working = clonePayload(basePayload);
579
+ const transformed = await hooks({ key: key.slice(), now: options.now }, working);
580
+ const finalPayload = mergePayload(basePayload, transformed ?? working);
581
+ if (finalPayload.data === undefined) {
582
+ throw new TypeError("putWithMeta requires a data value");
583
+ }
584
+ await this.applyOperation({
585
+ key,
586
+ payload: finalPayload,
587
+ now: options.now,
588
+ skipSameValue: true,
589
+ source: "local",
590
+ });
591
+ return;
592
+ }
593
+ await this.applyOperation({
594
+ key,
595
+ payload: basePayload,
596
+ now: options.now,
597
+ skipSameValue: true,
598
+ source: "local",
599
+ });
600
+ }
601
+
602
+ async delete(key: KeyPart[], now?: number): Promise<void> {
603
+ await this.applyOperation({
604
+ key,
605
+ payload: {},
606
+ now,
607
+ skipSameValue: true,
608
+ source: "local",
609
+ });
610
+ }
611
+
612
+ async set(key: KeyPart[], value: Value, now?: number): Promise<void> {
613
+ await this.put(key, value, now);
614
+ }
615
+
616
+ async setPeerId(peerId: string): Promise<void> {
617
+ const normalized = normalizePeerId(peerId);
618
+ await this.db.exec(`DELETE FROM ${this.tables.meta}`);
619
+ await this.db.run(`INSERT INTO ${this.tables.meta}(peer_id) VALUES (?)`, [normalized]);
620
+ this.peerIdValue = normalized;
621
+ }
622
+
623
+ async get(key: KeyPart[]): Promise<Value | undefined> {
624
+ const keyBytes = encodeKeyParts(key);
625
+ const rows = await this.db.query<KvRow>(
626
+ `SELECT data FROM ${this.tables.kv} WHERE key = ? LIMIT 1`,
627
+ [keyBytes],
628
+ );
629
+ const row = rows[0];
630
+ if (!row) return undefined;
631
+ return parseData(row.data);
632
+ }
633
+
634
+ async getMvr(key: KeyPart[]): Promise<Value[]> {
635
+ const rows = await this.scan({ prefix: key });
636
+ const values: Value[] = [];
637
+ for (const row of rows) {
638
+ if (row.raw.d !== true) continue;
639
+ if (row.key.length !== key.length + 1) continue;
640
+ values.push(row.key[row.key.length - 1]);
641
+ }
642
+ return values;
643
+ }
644
+
645
+ async putMvr(key: KeyPart[], value: Value, now?: number): Promise<void> {
646
+ if (value === null || typeof value === "object") {
647
+ throw new TypeError("putMvr only accepts scalar values");
648
+ }
649
+ const existing = await this.scan({ prefix: key });
650
+ for (const row of existing) {
651
+ if (row.raw.d === true) {
652
+ await this.delete(row.key, now);
653
+ }
654
+ }
655
+ const composite = key.slice();
656
+ composite.push(value);
657
+ await this.put(composite, true, now);
658
+ }
659
+
660
+ private buildScanBounds(
661
+ options: ScanOptions,
662
+ ): { where: string[]; params: unknown[]; empty?: boolean; postFilter?: (bytes: Uint8Array) => boolean } {
663
+ let lower: { value: Uint8Array; inclusive: boolean } | undefined;
664
+ let upper: { value: Uint8Array; inclusive: boolean } | undefined;
665
+ let prefixFilter: Uint8Array | undefined;
666
+
667
+ const applyLower = (candidate: { value: Uint8Array; inclusive: boolean }) => {
668
+ if (!lower) {
669
+ lower = candidate;
670
+ return;
671
+ }
672
+ const cmp = compareBytes(candidate.value, lower.value);
673
+ if (cmp > 0) {
674
+ lower = candidate;
675
+ } else if (cmp === 0) {
676
+ lower = { value: lower.value, inclusive: lower.inclusive && candidate.inclusive };
677
+ }
678
+ };
679
+
680
+ const applyUpper = (candidate: { value: Uint8Array; inclusive: boolean }) => {
681
+ if (!upper) {
682
+ upper = candidate;
683
+ return;
684
+ }
685
+ const cmp = compareBytes(candidate.value, upper.value);
686
+ if (cmp < 0) {
687
+ upper = candidate;
688
+ } else if (cmp === 0) {
689
+ upper = { value: upper.value, inclusive: upper.inclusive && candidate.inclusive };
690
+ }
691
+ };
692
+
693
+ try {
694
+ if (options.prefix) {
695
+ const prefixBytes = encodeKeyParts(options.prefix);
696
+ prefixFilter = prefixBytes;
697
+ applyLower({ value: prefixBytes, inclusive: true });
698
+ const upperPrefix = prefixUpperBound(prefixBytes);
699
+ if (upperPrefix) {
700
+ applyUpper({ value: upperPrefix, inclusive: false });
701
+ }
702
+ }
703
+ if (options.start && options.start.kind !== "unbounded") {
704
+ applyLower({
705
+ value: encodeKeyParts(options.start.key),
706
+ inclusive: options.start.kind === "inclusive",
707
+ });
708
+ }
709
+ if (options.end && options.end.kind !== "unbounded") {
710
+ applyUpper({
711
+ value: encodeKeyParts(options.end.key),
712
+ inclusive: options.end.kind === "inclusive",
713
+ });
714
+ }
715
+ } catch {
716
+ return { where: [], params: [], empty: true };
717
+ }
718
+
719
+ const where: string[] = [];
720
+ const params: unknown[] = [];
721
+ if (lower) {
722
+ where.push(`key ${lower.inclusive ? ">=" : ">"} ?`);
723
+ params.push(lower.value);
724
+ }
725
+ if (upper) {
726
+ where.push(`key ${upper.inclusive ? "<=" : "<"} ?`);
727
+ params.push(upper.value);
728
+ }
729
+ const postFilter = prefixFilter
730
+ ? ((pf: Uint8Array) => (bytes: Uint8Array) => keyMatchesPrefix(bytes, pf))(prefixFilter)
731
+ : undefined;
732
+ return { where, params, postFilter };
733
+ }
734
+
735
+ async scan(options: ScanOptions = {}): Promise<ScanRow[]> {
736
+ const bounds = this.buildScanBounds(options);
737
+ if (bounds.empty) {
738
+ return [];
739
+ }
740
+ const clauses = bounds.where.length > 0 ? `WHERE ${bounds.where.join(" AND ")}` : "";
741
+ const rows = await this.db.query<KvRow>(
742
+ `SELECT key, data, metadata, physical, logical, peer FROM ${this.tables.kv} ${clauses} ORDER BY key ASC`,
743
+ bounds.params as [],
744
+ );
745
+ const result: ScanRow[] = [];
746
+ for (const row of rows) {
747
+ const keyBytes = row.key;
748
+ if (bounds.postFilter && !bounds.postFilter(keyBytes)) {
749
+ continue;
750
+ }
751
+ const key = decodeKeyParts(keyBytes);
752
+ const clock: EntryClock = {
753
+ physicalTime: Number(row.physical ?? 0),
754
+ logicalCounter: Number(row.logical ?? 0),
755
+ peerId: String(row.peer ?? ""),
756
+ };
757
+ const payload: ExportPayload = {};
758
+ const data = parseData(row.data);
759
+ if (data !== undefined) payload.data = data;
760
+ const metadata = parseMetadata(row.metadata);
761
+ if (metadata !== undefined) payload.metadata = metadata;
762
+ const raw = buildRecord(clock, payload);
763
+ result.push({ key, raw, value: payload.data });
764
+ }
765
+ return result;
766
+ }
767
+
768
+ version(): VersionVector {
769
+ const vv: VersionVector = {};
770
+ for (const [peer, clock] of this.vv.entries()) {
771
+ vv[peer] = { ...clock };
772
+ }
773
+ return vv;
774
+ }
775
+
776
+ peerId(): string {
777
+ return this.peerIdValue;
778
+ }
779
+
780
+ getMaxPhysicalTime(): number {
781
+ return this.maxHlc.physicalTime;
782
+ }
783
+
784
+ private async exportInternal(from?: VersionVector, pruneTombstonesBefore?: number, peerId?: string): Promise<ExportBundle> {
785
+ const normalizedFrom = new Map<string, VersionVectorEntry>();
786
+ if (from) {
787
+ for (const [peer, entry] of Object.entries(from)) {
788
+ const normalized = normalizeVersionEntry(entry);
789
+ if (normalized) {
790
+ normalizedFrom.set(peer, normalized);
791
+ }
792
+ }
793
+ }
794
+
795
+ const entries: Record<string, ExportRecord> = {};
796
+
797
+ const peers = peerId ? [peerId] : Array.from(this.vv.keys());
798
+ const peersToExport: Array<{ peer: string; fromEntry?: VersionVectorEntry }> = [];
799
+ for (const peer of peers) {
800
+ const localEntry = this.vv.get(peer);
801
+ const fromEntry = normalizedFrom.get(peer);
802
+ if (
803
+ localEntry &&
804
+ fromEntry &&
805
+ (localEntry.physicalTime < fromEntry.physicalTime ||
806
+ (localEntry.physicalTime === fromEntry.physicalTime &&
807
+ localEntry.logicalCounter <= fromEntry.logicalCounter))
808
+ ) {
809
+ // Remote is already up to date for this peer.
810
+ continue;
811
+ }
812
+ peersToExport.push({ peer, fromEntry });
813
+ }
814
+
815
+ if (peerId && peersToExport.every((p) => p.peer !== peerId)) {
816
+ peersToExport.push({ peer: peerId, fromEntry: normalizedFrom.get(peerId) });
817
+ }
818
+
819
+ if (peersToExport.length === 0) {
820
+ return { version: 0, entries };
821
+ }
822
+
823
+ for (const target of peersToExport) {
824
+ const params: unknown[] = [target.peer];
825
+ let clause = "WHERE peer = ?";
826
+ if (target.fromEntry) {
827
+ clause += " AND physical >= ?";
828
+ params.push(target.fromEntry.physicalTime);
829
+ }
830
+ const rows = await this.db.query<ExportQueryRow>(
831
+ `SELECT key, data, metadata, physical, logical, peer FROM ${this.tables.kv} ${clause} ORDER BY physical, logical, key`,
832
+ params as [],
833
+ );
834
+ for (const row of rows) {
835
+ const clock: EntryClock = {
836
+ physicalTime: Number(row.physical ?? 0),
837
+ logicalCounter: Number(row.logical ?? 0),
838
+ peerId: String(row.peer ?? ""),
839
+ };
840
+ const fromEntry = target.fromEntry;
841
+ if (
842
+ fromEntry &&
843
+ (clock.physicalTime < fromEntry.physicalTime ||
844
+ (clock.physicalTime === fromEntry.physicalTime &&
845
+ clock.logicalCounter <= fromEntry.logicalCounter))
846
+ ) {
847
+ continue;
848
+ }
849
+ if (
850
+ pruneTombstonesBefore !== undefined &&
851
+ Number.isFinite(pruneTombstonesBefore) &&
852
+ row.data === null &&
853
+ clock.physicalTime < pruneTombstonesBefore
854
+ ) {
855
+ continue;
856
+ }
857
+ const payload: ExportPayload = {};
858
+ const data = parseData(row.data);
859
+ if (data !== undefined) payload.data = data;
860
+ const metadata = parseMetadata(row.metadata);
861
+ if (metadata !== undefined) payload.metadata = metadata;
862
+ const keyParts = decodeKeyParts(row.key);
863
+ entries[keyToString(keyParts)] = buildRecord(clock, payload);
864
+ }
865
+ }
866
+ return { version: 0, entries };
867
+ }
868
+
869
+ private async exportWithHooks(options: ExportOptions): Promise<ExportBundle> {
870
+ const base = await this.exportInternal(options.from, options.pruneTombstonesBefore, options.peerId);
871
+ const transform = options.hooks?.transform;
872
+ if (!transform) {
873
+ return base;
874
+ }
875
+ const result: ExportBundle = { version: base.version, entries: {} };
876
+ for (const [key, record] of Object.entries(base.entries)) {
877
+ const clock = parseClockString(record.c);
878
+ const context = {
879
+ key: JSON.parse(key) as KeyPart[],
880
+ clock,
881
+ raw: { ...record },
882
+ };
883
+ const basePayload: ExportPayload = {};
884
+ if (record.d !== undefined) basePayload.data = cloneJson(record.d);
885
+ const meta = cloneMetadata(record.m);
886
+ if (meta !== undefined) basePayload.metadata = meta;
887
+ const working = clonePayload(basePayload);
888
+ const transformed = await transform(context, working);
889
+ const finalPayload = mergePayload(basePayload, transformed ?? working);
890
+ result.entries[key] = buildRecord(clock, finalPayload);
891
+ }
892
+ return result;
893
+ }
894
+
895
+ exportJson(): Promise<ExportBundle>;
896
+ exportJson(from: VersionVector): Promise<ExportBundle>;
897
+ exportJson(from: VersionVector, pruneTombstonesBefore: number): Promise<ExportBundle>;
898
+ exportJson(options: ExportOptions): Promise<ExportBundle>;
899
+ exportJson(arg?: VersionVector | ExportOptions, pruneTombstonesBefore?: number): Promise<ExportBundle> {
900
+ if (isExportOptions(arg)) {
901
+ return this.exportWithHooks(arg);
902
+ }
903
+ return this.exportInternal(arg, pruneTombstonesBefore);
904
+ }
905
+
906
+ private async importInternal(bundle: ExportBundle): Promise<ImportReport> {
907
+ if (bundle.version !== 0) {
908
+ throw new TypeError("Unsupported bundle version");
909
+ }
910
+ let accepted = 0;
911
+ const skipped: Array<{ key: KeyPart[]; reason: string }> = [];
912
+ const appliedEvents: Array<{ key: KeyPart[]; payload: ExportPayload; source: string }> = [];
913
+ for (const [keyString, record] of Object.entries(bundle.entries)) {
914
+ let keyParts: KeyPart[];
915
+ try {
916
+ const parsed = JSON.parse(keyString);
917
+ keyParts = Array.isArray(parsed) ? (parsed as KeyPart[]) : [];
918
+ } catch {
919
+ skipped.push({ key: [], reason: "invalid key" });
920
+ continue;
921
+ }
922
+ const clock = parseClockString(record.c);
923
+ const payload: ExportPayload = {};
924
+ if (record.d !== undefined) payload.data = cloneJson(record.d);
925
+ const metadata = cloneMetadata(record.m);
926
+ if (metadata !== undefined) payload.metadata = metadata;
927
+ accepted += 1;
928
+ await this.applyOperation({
929
+ key: keyParts,
930
+ payload,
931
+ clock,
932
+ skipSameValue: false,
933
+ source: "import",
934
+ eventSink: appliedEvents,
935
+ });
936
+ }
937
+ if (appliedEvents.length > 0) {
938
+ this.emitEvents("import", appliedEvents);
939
+ }
940
+ return { accepted, skipped };
941
+ }
942
+
943
+ async importJson(bundle: ExportBundle): Promise<ImportReport>;
944
+ async importJson(options: ImportOptions): Promise<ImportReport>;
945
+ async importJson(arg: ExportBundle | ImportOptions): Promise<ImportReport> {
946
+ if (isImportOptions(arg)) {
947
+ const preprocess = arg.hooks?.preprocess;
948
+ const working = preprocess ? { version: arg.bundle.version, entries: { ...arg.bundle.entries } } : arg.bundle;
949
+ const skipped: Array<{ key: KeyPart[]; reason: string }> = [];
950
+ if (preprocess) {
951
+ for (const [key, record] of Object.entries(working.entries)) {
952
+ const contextKey = JSON.parse(key) as KeyPart[];
953
+ const clock = parseClockString(record.c);
954
+ const payload: ExportPayload = {};
955
+ if (record.d !== undefined) payload.data = cloneJson(record.d);
956
+ const metadata = cloneMetadata(record.m);
957
+ if (metadata !== undefined) payload.metadata = metadata;
958
+ const decision = await preprocess(
959
+ { key: contextKey, clock, raw: record },
960
+ clonePayload(payload),
961
+ );
962
+ const normalized = normalizeImportDecision(decision);
963
+ if (!normalized.accept) {
964
+ skipped.push({ key: contextKey, reason: normalized.reason ?? "rejected" });
965
+ delete working.entries[key];
966
+ } else {
967
+ working.entries[key] = buildRecord(clock, payload);
968
+ }
969
+ }
970
+ }
971
+ const baseReport = await this.importInternal(working);
972
+ return { accepted: baseReport.accepted, skipped: skipped.concat(baseReport.skipped) };
973
+ }
974
+ return this.importInternal(arg);
975
+ }
976
+
977
+ async importJsonStr(json: string): Promise<ImportReport> {
978
+ const parsed = JSON.parse(json) as ExportBundle;
979
+ return this.importJson(parsed);
980
+ }
981
+
982
+ async digest(): Promise<string> {
983
+ const rows = await this.db.query<KvRow>(
984
+ `SELECT key, data, metadata, physical, logical, peer FROM ${this.tables.kv} ORDER BY key ASC`,
985
+ );
986
+ const digestRows = await toDigestRows(rows);
987
+ return computeDigest(digestRows);
988
+ }
989
+
990
+ async kvToJson(): Promise<ExportBundle> {
991
+ return this.exportInternal();
992
+ }
993
+
994
+ async merge(other: FlockSQLite): Promise<void> {
995
+ const bundle = await other.exportJson();
996
+ await this.importJson(bundle);
997
+ }
998
+
999
+ static async checkConsistency(a: FlockSQLite, b: FlockSQLite): Promise<boolean> {
1000
+ const [digestA, digestB] = await Promise.all([a.digest(), b.digest()]);
1001
+ return digestA === digestB;
1002
+ }
1003
+
1004
+ checkInvariants(): void {
1005
+ // Placeholder: SQLite-backed implementation does not yet expose internal invariants.
1006
+ }
1007
+
1008
+ subscribe(listener: (batch: EventBatch) => void): () => void {
1009
+ this.listeners.add(listener);
1010
+ return () => {
1011
+ this.listeners.delete(listener);
1012
+ };
1013
+ }
1014
+ }
1015
+
1016
+ export type {
1017
+ Event,
1018
+ EventBatch,
1019
+ ExportBundle,
1020
+ ExportHooks,
1021
+ ExportOptions,
1022
+ ExportPayload,
1023
+ ExportRecord,
1024
+ ImportHooks,
1025
+ ImportOptions,
1026
+ ImportReport,
1027
+ KeyPart,
1028
+ MetadataMap,
1029
+ PutHooks,
1030
+ PutWithMetaOptions,
1031
+ ScanBound,
1032
+ ScanOptions,
1033
+ ScanRow,
1034
+ Value,
1035
+ VersionVector,
1036
+ VersionVectorEntry,
1037
+ };
1038
+
1039
+ export { FlockSQLite as Flock };
1040
+ export type { EventListener };