lakesync 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +3 -3
- package/dist/adapter.js +2 -2
- package/dist/analyst.js +1 -1
- package/dist/{base-poller-Qo_SmCZs.d.ts → base-poller-Dfaj05py.d.ts} +11 -2
- package/dist/catalogue.d.ts +1 -1
- package/dist/catalogue.js +2 -2
- package/dist/{chunk-5YOFCJQ7.js → chunk-35NGFKNN.js} +5 -5
- package/dist/{chunk-ICNT7I3K.js → chunk-DKFUPHCU.js} +37 -1
- package/dist/chunk-DKFUPHCU.js.map +1 -0
- package/dist/{chunk-DIQZDK4C.js → chunk-I5XWBDII.js} +79 -4
- package/dist/chunk-I5XWBDII.js.map +1 -0
- package/dist/{chunk-G6RQSXTJ.js → chunk-KDRXVETO.js} +111 -4
- package/dist/chunk-KDRXVETO.js.map +1 -0
- package/dist/{chunk-X3RO5SYJ.js → chunk-OXF6RDTK.js} +2 -2
- package/dist/{chunk-P5DRFKIT.js → chunk-RBL3N5ND.js} +2 -2
- package/dist/{chunk-4ARO6KTJ.js → chunk-RDUDHNFX.js} +2 -2
- package/dist/{chunk-BNJOGBYK.js → chunk-X3UOVRV7.js} +2 -2
- package/dist/client.d.ts +70 -5
- package/dist/client.js +404 -366
- package/dist/client.js.map +1 -1
- package/dist/compactor.d.ts +1 -1
- package/dist/compactor.js +3 -3
- package/dist/connector-jira.d.ts +10 -3
- package/dist/connector-jira.js +4 -2
- package/dist/connector-salesforce.d.ts +10 -3
- package/dist/connector-salesforce.js +4 -2
- package/dist/{coordinator-CxckTzYW.d.ts → coordinator-CSbsqp5C.d.ts} +37 -37
- package/dist/{db-types-BR6Kt4uf.d.ts → db-types-CPAPw8Ws.d.ts} +1 -1
- package/dist/{gateway-D5SaaMvT.d.ts → gateway-Cej8JUh9.d.ts} +3 -3
- package/dist/gateway-server.d.ts +4 -4
- package/dist/gateway-server.js +8 -8
- package/dist/gateway.d.ts +6 -6
- package/dist/gateway.js +5 -5
- package/dist/index.d.ts +32 -5
- package/dist/index.js +7 -1
- package/dist/parquet.d.ts +1 -1
- package/dist/parquet.js +2 -2
- package/dist/proto.d.ts +1 -1
- package/dist/proto.js +2 -2
- package/dist/react.d.ts +2 -2
- package/dist/{resolver-C3Wphi6O.d.ts → resolver-B10tk8Er.d.ts} +1 -1
- package/dist/{src-WYBF5LOI.js → src-3CWNXNX6.js} +8 -2
- package/dist/{src-BLID3SYL.js → src-BAFKQNDC.js} +5 -3
- package/dist/{src-LSVSNMO2.js → src-PFJ5BJZY.js} +5 -3
- package/dist/{types-V_jVu2sA.d.ts → types-BUzzVRD6.d.ts} +1 -1
- package/package.json +1 -1
- package/dist/chunk-DIQZDK4C.js.map +0 -1
- package/dist/chunk-G6RQSXTJ.js.map +0 -1
- package/dist/chunk-ICNT7I3K.js.map +0 -1
- /package/dist/{chunk-5YOFCJQ7.js.map → chunk-35NGFKNN.js.map} +0 -0
- /package/dist/{chunk-X3RO5SYJ.js.map → chunk-OXF6RDTK.js.map} +0 -0
- /package/dist/{chunk-P5DRFKIT.js.map → chunk-RBL3N5ND.js.map} +0 -0
- /package/dist/{chunk-4ARO6KTJ.js.map → chunk-RDUDHNFX.js.map} +0 -0
- /package/dist/{chunk-BNJOGBYK.js.map → chunk-X3UOVRV7.js.map} +0 -0
- /package/dist/{src-BLID3SYL.js.map → src-3CWNXNX6.js.map} +0 -0
- /package/dist/{src-LSVSNMO2.js.map → src-BAFKQNDC.js.map} +0 -0
- /package/dist/{src-WYBF5LOI.js.map → src-PFJ5BJZY.js.map} +0 -0
package/dist/client.js
CHANGED
|
@@ -6,7 +6,7 @@ import {
|
|
|
6
6
|
decodeSyncResponse,
|
|
7
7
|
encodeSyncPull,
|
|
8
8
|
encodeSyncPush
|
|
9
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-X3UOVRV7.js";
|
|
10
10
|
import {
|
|
11
11
|
Err,
|
|
12
12
|
HLC,
|
|
@@ -22,9 +22,12 @@ import {
|
|
|
22
22
|
quoteIdentifier,
|
|
23
23
|
toError,
|
|
24
24
|
unwrapOrThrow
|
|
25
|
-
} from "./chunk-
|
|
25
|
+
} from "./chunk-DKFUPHCU.js";
|
|
26
26
|
import "./chunk-7D4SUZUM.js";
|
|
27
27
|
|
|
28
|
+
// ../client/src/db/local-db.ts
|
|
29
|
+
import initSqlJs from "sql.js";
|
|
30
|
+
|
|
28
31
|
// ../client/src/db/idb-persistence.ts
|
|
29
32
|
import { openDB } from "idb";
|
|
30
33
|
var IDB_DB_NAME = "lakesync-snapshots";
|
|
@@ -57,9 +60,6 @@ async function deleteSnapshot(dbName) {
|
|
|
57
60
|
await idb.delete(STORE_NAME, dbName);
|
|
58
61
|
}
|
|
59
62
|
|
|
60
|
-
// ../client/src/db/local-db.ts
|
|
61
|
-
import initSqlJs from "sql.js";
|
|
62
|
-
|
|
63
63
|
// ../client/src/db/types.ts
|
|
64
64
|
var DbError = class extends LakeSyncError {
|
|
65
65
|
constructor(message, cause) {
|
|
@@ -368,165 +368,88 @@ async function migrateSchema(db, oldSchema, newSchema) {
|
|
|
368
368
|
});
|
|
369
369
|
}
|
|
370
370
|
|
|
371
|
-
// ../client/src/queue/
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
var DB_VERSION = 1;
|
|
375
|
-
var STORE_NAME2 = "entries";
|
|
376
|
-
function serialiseAction(action) {
|
|
377
|
-
return { ...action, hlc: action.hlc.toString() };
|
|
378
|
-
}
|
|
379
|
-
function deserialiseAction(serialised) {
|
|
380
|
-
return { ...serialised, hlc: BigInt(serialised.hlc) };
|
|
381
|
-
}
|
|
382
|
-
function serialiseEntry(entry) {
|
|
383
|
-
return { ...entry, action: serialiseAction(entry.action) };
|
|
384
|
-
}
|
|
385
|
-
function deserialiseEntry(serialised) {
|
|
386
|
-
return { ...serialised, action: deserialiseAction(serialised.action) };
|
|
387
|
-
}
|
|
388
|
-
async function wrapIdbOp(operation, fn) {
|
|
389
|
-
try {
|
|
390
|
-
return Ok(await fn());
|
|
391
|
-
} catch (error) {
|
|
392
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
393
|
-
return Err(new LakeSyncError(`Failed to ${operation}: ${message}`, "QUEUE_ERROR"));
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
var IDBActionQueue = class {
|
|
397
|
-
dbPromise;
|
|
371
|
+
// ../client/src/queue/memory-queue.ts
|
|
372
|
+
var MemoryQueue = class {
|
|
373
|
+
entries = /* @__PURE__ */ new Map();
|
|
398
374
|
counter = 0;
|
|
399
|
-
/**
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
}
|
|
411
|
-
});
|
|
412
|
-
}
|
|
413
|
-
/** Add an action to the queue. */
|
|
414
|
-
async push(action) {
|
|
415
|
-
return wrapIdbOp("push to action queue", async () => {
|
|
416
|
-
const db = await this.dbPromise;
|
|
417
|
-
const entry = {
|
|
418
|
-
id: `idb-action-${Date.now()}-${++this.counter}`,
|
|
419
|
-
action,
|
|
420
|
-
status: "pending",
|
|
421
|
-
createdAt: Date.now(),
|
|
422
|
-
retryCount: 0
|
|
423
|
-
};
|
|
424
|
-
await db.put(STORE_NAME2, serialiseEntry(entry));
|
|
425
|
-
return entry;
|
|
426
|
-
});
|
|
375
|
+
/** Add a delta to the queue */
|
|
376
|
+
async push(delta) {
|
|
377
|
+
const entry = {
|
|
378
|
+
id: `mem-${++this.counter}`,
|
|
379
|
+
delta,
|
|
380
|
+
status: "pending",
|
|
381
|
+
createdAt: Date.now(),
|
|
382
|
+
retryCount: 0
|
|
383
|
+
};
|
|
384
|
+
this.entries.set(entry.id, entry);
|
|
385
|
+
return Ok(entry);
|
|
427
386
|
}
|
|
428
|
-
/** Peek at pending entries (ordered by createdAt)
|
|
387
|
+
/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter */
|
|
429
388
|
async peek(limit) {
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
const index = tx.objectStore(STORE_NAME2).index("createdAt");
|
|
434
|
-
const results = [];
|
|
435
|
-
let cursor = await index.openCursor();
|
|
436
|
-
while (cursor && results.length < limit) {
|
|
437
|
-
const serialised = cursor.value;
|
|
438
|
-
if (serialised.status === "pending") {
|
|
439
|
-
const entry = deserialiseEntry(serialised);
|
|
440
|
-
if (entry.retryAfter === void 0 || entry.retryAfter <= Date.now()) {
|
|
441
|
-
results.push(entry);
|
|
442
|
-
}
|
|
443
|
-
}
|
|
444
|
-
cursor = await cursor.continue();
|
|
445
|
-
}
|
|
446
|
-
return results;
|
|
447
|
-
});
|
|
389
|
+
const now = Date.now();
|
|
390
|
+
const pending = [...this.entries.values()].filter((e) => e.status === "pending" && (e.retryAfter === void 0 || e.retryAfter <= now)).sort((a, b) => a.createdAt - b.createdAt).slice(0, limit);
|
|
391
|
+
return Ok(pending);
|
|
448
392
|
}
|
|
449
|
-
/** Mark entries as currently being sent
|
|
393
|
+
/** Mark entries as currently being sent */
|
|
450
394
|
async markSending(ids) {
|
|
451
|
-
|
|
452
|
-
const
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
for (const id of ids) {
|
|
456
|
-
const serialised = await store.get(id);
|
|
457
|
-
if (serialised?.status === "pending") {
|
|
458
|
-
serialised.status = "sending";
|
|
459
|
-
await store.put(serialised);
|
|
460
|
-
}
|
|
395
|
+
for (const id of ids) {
|
|
396
|
+
const entry = this.entries.get(id);
|
|
397
|
+
if (entry?.status === "pending") {
|
|
398
|
+
entry.status = "sending";
|
|
461
399
|
}
|
|
462
|
-
|
|
463
|
-
|
|
400
|
+
}
|
|
401
|
+
return Ok(void 0);
|
|
464
402
|
}
|
|
465
|
-
/** Acknowledge successful delivery (removes entries)
|
|
403
|
+
/** Acknowledge successful delivery (removes entries) */
|
|
466
404
|
async ack(ids) {
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
await tx.objectStore(STORE_NAME2).delete(id);
|
|
472
|
-
}
|
|
473
|
-
await tx.done;
|
|
474
|
-
});
|
|
405
|
+
for (const id of ids) {
|
|
406
|
+
this.entries.delete(id);
|
|
407
|
+
}
|
|
408
|
+
return Ok(void 0);
|
|
475
409
|
}
|
|
476
|
-
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff
|
|
410
|
+
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff */
|
|
477
411
|
async nack(ids) {
|
|
478
|
-
|
|
479
|
-
const
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
const
|
|
484
|
-
|
|
485
|
-
serialised.status = "pending";
|
|
486
|
-
serialised.retryCount++;
|
|
487
|
-
const backoffMs = Math.min(1e3 * 2 ** serialised.retryCount, 3e4);
|
|
488
|
-
serialised.retryAfter = Date.now() + backoffMs;
|
|
489
|
-
await store.put(serialised);
|
|
490
|
-
}
|
|
412
|
+
for (const id of ids) {
|
|
413
|
+
const entry = this.entries.get(id);
|
|
414
|
+
if (entry) {
|
|
415
|
+
entry.status = "pending";
|
|
416
|
+
entry.retryCount++;
|
|
417
|
+
const backoffMs = Math.min(1e3 * 2 ** entry.retryCount, 3e4);
|
|
418
|
+
entry.retryAfter = Date.now() + backoffMs;
|
|
491
419
|
}
|
|
492
|
-
|
|
493
|
-
|
|
420
|
+
}
|
|
421
|
+
return Ok(void 0);
|
|
494
422
|
}
|
|
495
|
-
/** Get the number of pending + sending entries
|
|
423
|
+
/** Get the number of pending + sending entries */
|
|
496
424
|
async depth() {
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
const all = await db.getAll(STORE_NAME2);
|
|
500
|
-
return all.filter((e) => e.status === "pending" || e.status === "sending").length;
|
|
501
|
-
});
|
|
425
|
+
const count = [...this.entries.values()].filter((e) => e.status !== "acked").length;
|
|
426
|
+
return Ok(count);
|
|
502
427
|
}
|
|
503
|
-
/** Remove all entries
|
|
428
|
+
/** Remove all entries */
|
|
504
429
|
async clear() {
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
await db.clear(STORE_NAME2);
|
|
508
|
-
});
|
|
430
|
+
this.entries.clear();
|
|
431
|
+
return Ok(void 0);
|
|
509
432
|
}
|
|
510
433
|
};
|
|
511
434
|
|
|
512
435
|
// ../client/src/queue/idb-queue.ts
|
|
513
|
-
import { openDB as
|
|
514
|
-
var
|
|
515
|
-
var
|
|
516
|
-
var
|
|
436
|
+
import { openDB as openDB2 } from "idb";
|
|
437
|
+
var DB_NAME = "lakesync-queue";
|
|
438
|
+
var DB_VERSION = 1;
|
|
439
|
+
var STORE_NAME2 = "entries";
|
|
517
440
|
function serialiseDelta(delta) {
|
|
518
441
|
return { ...delta, hlc: delta.hlc.toString() };
|
|
519
442
|
}
|
|
520
443
|
function deserialiseDelta(serialised) {
|
|
521
444
|
return { ...serialised, hlc: BigInt(serialised.hlc) };
|
|
522
445
|
}
|
|
523
|
-
function
|
|
446
|
+
function serialiseEntry(entry) {
|
|
524
447
|
return { ...entry, delta: serialiseDelta(entry.delta) };
|
|
525
448
|
}
|
|
526
|
-
function
|
|
449
|
+
function deserialiseEntry(serialised) {
|
|
527
450
|
return { ...serialised, delta: deserialiseDelta(serialised.delta) };
|
|
528
451
|
}
|
|
529
|
-
async function
|
|
452
|
+
async function wrapIdbOp(operation, fn) {
|
|
530
453
|
try {
|
|
531
454
|
return Ok(await fn());
|
|
532
455
|
} catch (error) {
|
|
@@ -543,10 +466,10 @@ var IDBQueue = class {
|
|
|
543
466
|
* @param dbName - Optional database name. Defaults to `'lakesync-queue'`.
|
|
544
467
|
* Useful for tests or running multiple independent queues.
|
|
545
468
|
*/
|
|
546
|
-
constructor(dbName =
|
|
547
|
-
this.dbPromise =
|
|
469
|
+
constructor(dbName = DB_NAME) {
|
|
470
|
+
this.dbPromise = openDB2(dbName, DB_VERSION, {
|
|
548
471
|
upgrade(db) {
|
|
549
|
-
const store = db.createObjectStore(
|
|
472
|
+
const store = db.createObjectStore(STORE_NAME2, { keyPath: "id" });
|
|
550
473
|
store.createIndex("status", "status");
|
|
551
474
|
store.createIndex("createdAt", "createdAt");
|
|
552
475
|
}
|
|
@@ -554,7 +477,7 @@ var IDBQueue = class {
|
|
|
554
477
|
}
|
|
555
478
|
/** Add a delta to the queue */
|
|
556
479
|
async push(delta) {
|
|
557
|
-
return
|
|
480
|
+
return wrapIdbOp("push to queue", async () => {
|
|
558
481
|
const db = await this.dbPromise;
|
|
559
482
|
const entry = {
|
|
560
483
|
id: `idb-${Date.now()}-${++this.counter}`,
|
|
@@ -563,22 +486,22 @@ var IDBQueue = class {
|
|
|
563
486
|
createdAt: Date.now(),
|
|
564
487
|
retryCount: 0
|
|
565
488
|
};
|
|
566
|
-
await db.put(
|
|
489
|
+
await db.put(STORE_NAME2, serialiseEntry(entry));
|
|
567
490
|
return entry;
|
|
568
491
|
});
|
|
569
492
|
}
|
|
570
493
|
/** Peek at pending entries (ordered by createdAt) */
|
|
571
494
|
async peek(limit) {
|
|
572
|
-
return
|
|
495
|
+
return wrapIdbOp("peek queue", async () => {
|
|
573
496
|
const db = await this.dbPromise;
|
|
574
|
-
const tx = db.transaction(
|
|
575
|
-
const index = tx.objectStore(
|
|
497
|
+
const tx = db.transaction(STORE_NAME2, "readonly");
|
|
498
|
+
const index = tx.objectStore(STORE_NAME2).index("createdAt");
|
|
576
499
|
const results = [];
|
|
577
500
|
let cursor = await index.openCursor();
|
|
578
501
|
while (cursor && results.length < limit) {
|
|
579
502
|
const serialised = cursor.value;
|
|
580
503
|
if (serialised.status === "pending") {
|
|
581
|
-
const entry =
|
|
504
|
+
const entry = deserialiseEntry(serialised);
|
|
582
505
|
if (entry.retryAfter === void 0 || entry.retryAfter <= Date.now()) {
|
|
583
506
|
results.push(entry);
|
|
584
507
|
}
|
|
@@ -590,10 +513,10 @@ var IDBQueue = class {
|
|
|
590
513
|
}
|
|
591
514
|
/** Mark entries as currently being sent */
|
|
592
515
|
async markSending(ids) {
|
|
593
|
-
return
|
|
516
|
+
return wrapIdbOp("mark sending", async () => {
|
|
594
517
|
const db = await this.dbPromise;
|
|
595
|
-
const tx = db.transaction(
|
|
596
|
-
const store = tx.objectStore(
|
|
518
|
+
const tx = db.transaction(STORE_NAME2, "readwrite");
|
|
519
|
+
const store = tx.objectStore(STORE_NAME2);
|
|
597
520
|
for (const id of ids) {
|
|
598
521
|
const serialised = await store.get(id);
|
|
599
522
|
if (serialised?.status === "pending") {
|
|
@@ -606,21 +529,21 @@ var IDBQueue = class {
|
|
|
606
529
|
}
|
|
607
530
|
/** Acknowledge successful delivery (removes entries) */
|
|
608
531
|
async ack(ids) {
|
|
609
|
-
return
|
|
532
|
+
return wrapIdbOp("ack", async () => {
|
|
610
533
|
const db = await this.dbPromise;
|
|
611
|
-
const tx = db.transaction(
|
|
534
|
+
const tx = db.transaction(STORE_NAME2, "readwrite");
|
|
612
535
|
for (const id of ids) {
|
|
613
|
-
await tx.objectStore(
|
|
536
|
+
await tx.objectStore(STORE_NAME2).delete(id);
|
|
614
537
|
}
|
|
615
538
|
await tx.done;
|
|
616
539
|
});
|
|
617
540
|
}
|
|
618
541
|
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff */
|
|
619
542
|
async nack(ids) {
|
|
620
|
-
return
|
|
543
|
+
return wrapIdbOp("nack", async () => {
|
|
621
544
|
const db = await this.dbPromise;
|
|
622
|
-
const tx = db.transaction(
|
|
623
|
-
const store = tx.objectStore(
|
|
545
|
+
const tx = db.transaction(STORE_NAME2, "readwrite");
|
|
546
|
+
const store = tx.objectStore(STORE_NAME2);
|
|
624
547
|
for (const id of ids) {
|
|
625
548
|
const serialised = await store.get(id);
|
|
626
549
|
if (serialised) {
|
|
@@ -636,155 +559,25 @@ var IDBQueue = class {
|
|
|
636
559
|
}
|
|
637
560
|
/** Get the number of pending + sending entries */
|
|
638
561
|
async depth() {
|
|
639
|
-
return
|
|
562
|
+
return wrapIdbOp("get depth", async () => {
|
|
640
563
|
const db = await this.dbPromise;
|
|
641
|
-
const all = await db.getAll(
|
|
564
|
+
const all = await db.getAll(STORE_NAME2);
|
|
642
565
|
return all.filter((e) => e.status !== "acked").length;
|
|
643
566
|
});
|
|
644
567
|
}
|
|
645
568
|
/** Remove all entries */
|
|
646
569
|
async clear() {
|
|
647
|
-
return
|
|
570
|
+
return wrapIdbOp("clear queue", async () => {
|
|
648
571
|
const db = await this.dbPromise;
|
|
649
|
-
await db.clear(
|
|
572
|
+
await db.clear(STORE_NAME2);
|
|
650
573
|
});
|
|
651
574
|
}
|
|
652
575
|
};
|
|
653
576
|
|
|
654
|
-
// ../client/src/
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
/** Add an action to the queue. */
|
|
659
|
-
async push(action) {
|
|
660
|
-
const entry = {
|
|
661
|
-
id: `mem-action-${++this.counter}`,
|
|
662
|
-
action,
|
|
663
|
-
status: "pending",
|
|
664
|
-
createdAt: Date.now(),
|
|
665
|
-
retryCount: 0
|
|
666
|
-
};
|
|
667
|
-
this.entries.set(entry.id, entry);
|
|
668
|
-
return Ok(entry);
|
|
669
|
-
}
|
|
670
|
-
/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter. */
|
|
671
|
-
async peek(limit) {
|
|
672
|
-
const now = Date.now();
|
|
673
|
-
const pending = [...this.entries.values()].filter((e) => e.status === "pending" && (e.retryAfter === void 0 || e.retryAfter <= now)).sort((a, b) => a.createdAt - b.createdAt).slice(0, limit);
|
|
674
|
-
return Ok(pending);
|
|
675
|
-
}
|
|
676
|
-
/** Mark entries as currently being sent. */
|
|
677
|
-
async markSending(ids) {
|
|
678
|
-
for (const id of ids) {
|
|
679
|
-
const entry = this.entries.get(id);
|
|
680
|
-
if (entry?.status === "pending") {
|
|
681
|
-
entry.status = "sending";
|
|
682
|
-
}
|
|
683
|
-
}
|
|
684
|
-
return Ok(void 0);
|
|
685
|
-
}
|
|
686
|
-
/** Acknowledge successful delivery (removes entries). */
|
|
687
|
-
async ack(ids) {
|
|
688
|
-
for (const id of ids) {
|
|
689
|
-
this.entries.delete(id);
|
|
690
|
-
}
|
|
691
|
-
return Ok(void 0);
|
|
692
|
-
}
|
|
693
|
-
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff. */
|
|
694
|
-
async nack(ids) {
|
|
695
|
-
for (const id of ids) {
|
|
696
|
-
const entry = this.entries.get(id);
|
|
697
|
-
if (entry) {
|
|
698
|
-
entry.status = "pending";
|
|
699
|
-
entry.retryCount++;
|
|
700
|
-
const backoffMs = Math.min(1e3 * 2 ** entry.retryCount, 3e4);
|
|
701
|
-
entry.retryAfter = Date.now() + backoffMs;
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
return Ok(void 0);
|
|
705
|
-
}
|
|
706
|
-
/** Get the number of pending + sending entries. */
|
|
707
|
-
async depth() {
|
|
708
|
-
const count = [...this.entries.values()].filter(
|
|
709
|
-
(e) => e.status === "pending" || e.status === "sending"
|
|
710
|
-
).length;
|
|
711
|
-
return Ok(count);
|
|
712
|
-
}
|
|
713
|
-
/** Remove all entries. */
|
|
714
|
-
async clear() {
|
|
715
|
-
this.entries.clear();
|
|
716
|
-
return Ok(void 0);
|
|
717
|
-
}
|
|
718
|
-
};
|
|
719
|
-
|
|
720
|
-
// ../client/src/queue/memory-queue.ts
|
|
721
|
-
var MemoryQueue = class {
|
|
722
|
-
entries = /* @__PURE__ */ new Map();
|
|
723
|
-
counter = 0;
|
|
724
|
-
/** Add a delta to the queue */
|
|
725
|
-
async push(delta) {
|
|
726
|
-
const entry = {
|
|
727
|
-
id: `mem-${++this.counter}`,
|
|
728
|
-
delta,
|
|
729
|
-
status: "pending",
|
|
730
|
-
createdAt: Date.now(),
|
|
731
|
-
retryCount: 0
|
|
732
|
-
};
|
|
733
|
-
this.entries.set(entry.id, entry);
|
|
734
|
-
return Ok(entry);
|
|
735
|
-
}
|
|
736
|
-
/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter */
|
|
737
|
-
async peek(limit) {
|
|
738
|
-
const now = Date.now();
|
|
739
|
-
const pending = [...this.entries.values()].filter((e) => e.status === "pending" && (e.retryAfter === void 0 || e.retryAfter <= now)).sort((a, b) => a.createdAt - b.createdAt).slice(0, limit);
|
|
740
|
-
return Ok(pending);
|
|
741
|
-
}
|
|
742
|
-
/** Mark entries as currently being sent */
|
|
743
|
-
async markSending(ids) {
|
|
744
|
-
for (const id of ids) {
|
|
745
|
-
const entry = this.entries.get(id);
|
|
746
|
-
if (entry?.status === "pending") {
|
|
747
|
-
entry.status = "sending";
|
|
748
|
-
}
|
|
749
|
-
}
|
|
750
|
-
return Ok(void 0);
|
|
751
|
-
}
|
|
752
|
-
/** Acknowledge successful delivery (removes entries) */
|
|
753
|
-
async ack(ids) {
|
|
754
|
-
for (const id of ids) {
|
|
755
|
-
this.entries.delete(id);
|
|
756
|
-
}
|
|
757
|
-
return Ok(void 0);
|
|
758
|
-
}
|
|
759
|
-
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff */
|
|
760
|
-
async nack(ids) {
|
|
761
|
-
for (const id of ids) {
|
|
762
|
-
const entry = this.entries.get(id);
|
|
763
|
-
if (entry) {
|
|
764
|
-
entry.status = "pending";
|
|
765
|
-
entry.retryCount++;
|
|
766
|
-
const backoffMs = Math.min(1e3 * 2 ** entry.retryCount, 3e4);
|
|
767
|
-
entry.retryAfter = Date.now() + backoffMs;
|
|
768
|
-
}
|
|
769
|
-
}
|
|
770
|
-
return Ok(void 0);
|
|
771
|
-
}
|
|
772
|
-
/** Get the number of pending + sending entries */
|
|
773
|
-
async depth() {
|
|
774
|
-
const count = [...this.entries.values()].filter((e) => e.status !== "acked").length;
|
|
775
|
-
return Ok(count);
|
|
776
|
-
}
|
|
777
|
-
/** Remove all entries */
|
|
778
|
-
async clear() {
|
|
779
|
-
this.entries.clear();
|
|
780
|
-
return Ok(void 0);
|
|
781
|
-
}
|
|
782
|
-
};
|
|
783
|
-
|
|
784
|
-
// ../client/src/sync/applier.ts
|
|
785
|
-
async function applyRemoteDeltas(db, deltas, resolver, pendingQueue) {
|
|
786
|
-
if (deltas.length === 0) {
|
|
787
|
-
return Ok(0);
|
|
577
|
+
// ../client/src/sync/applier.ts
|
|
578
|
+
async function applyRemoteDeltas(db, deltas, resolver, pendingQueue) {
|
|
579
|
+
if (deltas.length === 0) {
|
|
580
|
+
return Ok(0);
|
|
788
581
|
}
|
|
789
582
|
const cursorTableResult = await db.exec(`
|
|
790
583
|
CREATE TABLE IF NOT EXISTS _sync_cursor (
|
|
@@ -1415,7 +1208,7 @@ var SyncCoordinator = class {
|
|
|
1415
1208
|
return;
|
|
1416
1209
|
}
|
|
1417
1210
|
const hlc = this.hlc.now();
|
|
1418
|
-
const { generateActionId } = await import("./src-
|
|
1211
|
+
const { generateActionId } = await import("./src-3CWNXNX6.js");
|
|
1419
1212
|
const actionId = await generateActionId({
|
|
1420
1213
|
clientId: this._clientId,
|
|
1421
1214
|
hlc,
|
|
@@ -1517,80 +1310,6 @@ var SyncCoordinator = class {
|
|
|
1517
1310
|
}
|
|
1518
1311
|
};
|
|
1519
1312
|
|
|
1520
|
-
// ../client/src/sync/schema-sync.ts
|
|
1521
|
-
var SchemaSynchroniser = class {
|
|
1522
|
-
constructor(db) {
|
|
1523
|
-
this.db = db;
|
|
1524
|
-
}
|
|
1525
|
-
/**
|
|
1526
|
-
* Compare local schema version with server and apply migrations if behind.
|
|
1527
|
-
*
|
|
1528
|
-
* If the local version is already equal to or ahead of the server version,
|
|
1529
|
-
* this is a no-op. Otherwise, the local schema is migrated to match the
|
|
1530
|
-
* server schema via `migrateSchema()`, which runs ALTER TABLE ... ADD COLUMN
|
|
1531
|
-
* for each new column.
|
|
1532
|
-
*
|
|
1533
|
-
* @param table - The table name to synchronise
|
|
1534
|
-
* @param serverSchema - The server's current TableSchema
|
|
1535
|
-
* @param serverVersion - The server's schema version number
|
|
1536
|
-
* @returns Ok on success, or Err with a LakeSyncError on failure
|
|
1537
|
-
*/
|
|
1538
|
-
async synchronise(table, serverSchema, serverVersion) {
|
|
1539
|
-
const localSchemaResult = await getSchema(this.db, table);
|
|
1540
|
-
if (!localSchemaResult.ok) return localSchemaResult;
|
|
1541
|
-
const localSchema = localSchemaResult.value;
|
|
1542
|
-
if (!localSchema) {
|
|
1543
|
-
return Err(
|
|
1544
|
-
new SchemaError(
|
|
1545
|
-
`Cannot synchronise schema for table "${table}": no local schema registered`
|
|
1546
|
-
)
|
|
1547
|
-
);
|
|
1548
|
-
}
|
|
1549
|
-
const localVersionResult = await this.getLocalVersion(table);
|
|
1550
|
-
if (!localVersionResult.ok) return localVersionResult;
|
|
1551
|
-
const localVersion = localVersionResult.value;
|
|
1552
|
-
if (localVersion >= serverVersion) {
|
|
1553
|
-
return Ok(void 0);
|
|
1554
|
-
}
|
|
1555
|
-
const migrateResult = await migrateSchema(this.db, localSchema, serverSchema);
|
|
1556
|
-
if (!migrateResult.ok) return migrateResult;
|
|
1557
|
-
const updateResult = await this.setLocalVersion(table, serverVersion);
|
|
1558
|
-
if (!updateResult.ok) return updateResult;
|
|
1559
|
-
return Ok(void 0);
|
|
1560
|
-
}
|
|
1561
|
-
/**
|
|
1562
|
-
* Retrieve the local schema version for a given table from `_lakesync_meta`.
|
|
1563
|
-
*
|
|
1564
|
-
* @param table - The table name to look up
|
|
1565
|
-
* @returns The schema version number, or 0 if the table is not registered
|
|
1566
|
-
*/
|
|
1567
|
-
async getLocalVersion(table) {
|
|
1568
|
-
const result = await this.db.query(
|
|
1569
|
-
"SELECT schema_version FROM _lakesync_meta WHERE table_name = ?",
|
|
1570
|
-
[table]
|
|
1571
|
-
);
|
|
1572
|
-
if (!result.ok) return result;
|
|
1573
|
-
const rows = result.value;
|
|
1574
|
-
if (rows.length === 0 || !rows[0]) {
|
|
1575
|
-
return Ok(0);
|
|
1576
|
-
}
|
|
1577
|
-
return Ok(rows[0].schema_version);
|
|
1578
|
-
}
|
|
1579
|
-
/**
|
|
1580
|
-
* Set the local schema version for a given table in `_lakesync_meta`.
|
|
1581
|
-
*
|
|
1582
|
-
* @param table - The table name to update
|
|
1583
|
-
* @param version - The version number to set
|
|
1584
|
-
* @returns Ok on success, or Err with a DbError on failure
|
|
1585
|
-
*/
|
|
1586
|
-
async setLocalVersion(table, version) {
|
|
1587
|
-
return this.db.exec("UPDATE _lakesync_meta SET schema_version = ? WHERE table_name = ?", [
|
|
1588
|
-
version,
|
|
1589
|
-
table
|
|
1590
|
-
]);
|
|
1591
|
-
}
|
|
1592
|
-
};
|
|
1593
|
-
|
|
1594
1313
|
// ../client/src/sync/transport-http.ts
|
|
1595
1314
|
var HttpTransport = class {
|
|
1596
1315
|
baseUrl;
|
|
@@ -1784,6 +1503,324 @@ async function readStreamingCheckpointDeltas(response) {
|
|
|
1784
1503
|
return allDeltas;
|
|
1785
1504
|
}
|
|
1786
1505
|
|
|
1506
|
+
// ../client/src/create-client.ts
|
|
1507
|
+
async function createClient(config) {
|
|
1508
|
+
const dbResult = await LocalDB.open({
|
|
1509
|
+
name: config.name,
|
|
1510
|
+
backend: config.backend
|
|
1511
|
+
});
|
|
1512
|
+
const db = unwrapOrThrow(dbResult);
|
|
1513
|
+
for (const schema of config.schemas) {
|
|
1514
|
+
unwrapOrThrow(await registerSchema(db, schema));
|
|
1515
|
+
}
|
|
1516
|
+
const transport = new HttpTransport({
|
|
1517
|
+
baseUrl: config.gateway.url,
|
|
1518
|
+
gatewayId: config.gateway.gatewayId,
|
|
1519
|
+
token: config.gateway.token ?? ""
|
|
1520
|
+
});
|
|
1521
|
+
const queue = config.queue ?? new MemoryQueue();
|
|
1522
|
+
const autoSyncMs = config.autoSyncMs ?? 1e4;
|
|
1523
|
+
const coordinator = new SyncCoordinator(db, transport, {
|
|
1524
|
+
...config.coordinatorConfig,
|
|
1525
|
+
queue,
|
|
1526
|
+
clientId: config.clientId,
|
|
1527
|
+
autoSyncIntervalMs: autoSyncMs
|
|
1528
|
+
});
|
|
1529
|
+
if (autoSyncMs > 0) {
|
|
1530
|
+
coordinator.startAutoSync();
|
|
1531
|
+
}
|
|
1532
|
+
return {
|
|
1533
|
+
coordinator,
|
|
1534
|
+
db,
|
|
1535
|
+
transport,
|
|
1536
|
+
destroy: async () => {
|
|
1537
|
+
coordinator.stopAutoSync();
|
|
1538
|
+
await db.close();
|
|
1539
|
+
}
|
|
1540
|
+
};
|
|
1541
|
+
}
|
|
1542
|
+
|
|
1543
|
+
// ../client/src/queue/idb-action-queue.ts
|
|
1544
|
+
import { openDB as openDB3 } from "idb";
|
|
1545
|
+
var DB_NAME2 = "lakesync-action-queue";
|
|
1546
|
+
var DB_VERSION2 = 1;
|
|
1547
|
+
var STORE_NAME3 = "entries";
|
|
1548
|
+
function serialiseAction(action) {
|
|
1549
|
+
return { ...action, hlc: action.hlc.toString() };
|
|
1550
|
+
}
|
|
1551
|
+
function deserialiseAction(serialised) {
|
|
1552
|
+
return { ...serialised, hlc: BigInt(serialised.hlc) };
|
|
1553
|
+
}
|
|
1554
|
+
function serialiseEntry2(entry) {
|
|
1555
|
+
return { ...entry, action: serialiseAction(entry.action) };
|
|
1556
|
+
}
|
|
1557
|
+
function deserialiseEntry2(serialised) {
|
|
1558
|
+
return { ...serialised, action: deserialiseAction(serialised.action) };
|
|
1559
|
+
}
|
|
1560
|
+
async function wrapIdbOp2(operation, fn) {
|
|
1561
|
+
try {
|
|
1562
|
+
return Ok(await fn());
|
|
1563
|
+
} catch (error) {
|
|
1564
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1565
|
+
return Err(new LakeSyncError(`Failed to ${operation}: ${message}`, "QUEUE_ERROR"));
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
var IDBActionQueue = class {
|
|
1569
|
+
dbPromise;
|
|
1570
|
+
counter = 0;
|
|
1571
|
+
/**
|
|
1572
|
+
* Create a new IDB-backed action queue.
|
|
1573
|
+
*
|
|
1574
|
+
* @param dbName - Optional database name. Defaults to `'lakesync-action-queue'`.
|
|
1575
|
+
*/
|
|
1576
|
+
constructor(dbName = DB_NAME2) {
|
|
1577
|
+
this.dbPromise = openDB3(dbName, DB_VERSION2, {
|
|
1578
|
+
upgrade(db) {
|
|
1579
|
+
const store = db.createObjectStore(STORE_NAME3, { keyPath: "id" });
|
|
1580
|
+
store.createIndex("status", "status");
|
|
1581
|
+
store.createIndex("createdAt", "createdAt");
|
|
1582
|
+
}
|
|
1583
|
+
});
|
|
1584
|
+
}
|
|
1585
|
+
/** Add an action to the queue. */
|
|
1586
|
+
async push(action) {
|
|
1587
|
+
return wrapIdbOp2("push to action queue", async () => {
|
|
1588
|
+
const db = await this.dbPromise;
|
|
1589
|
+
const entry = {
|
|
1590
|
+
id: `idb-action-${Date.now()}-${++this.counter}`,
|
|
1591
|
+
action,
|
|
1592
|
+
status: "pending",
|
|
1593
|
+
createdAt: Date.now(),
|
|
1594
|
+
retryCount: 0
|
|
1595
|
+
};
|
|
1596
|
+
await db.put(STORE_NAME3, serialiseEntry2(entry));
|
|
1597
|
+
return entry;
|
|
1598
|
+
});
|
|
1599
|
+
}
|
|
1600
|
+
/** Peek at pending entries (ordered by createdAt). */
|
|
1601
|
+
async peek(limit) {
|
|
1602
|
+
return wrapIdbOp2("peek action queue", async () => {
|
|
1603
|
+
const db = await this.dbPromise;
|
|
1604
|
+
const tx = db.transaction(STORE_NAME3, "readonly");
|
|
1605
|
+
const index = tx.objectStore(STORE_NAME3).index("createdAt");
|
|
1606
|
+
const results = [];
|
|
1607
|
+
let cursor = await index.openCursor();
|
|
1608
|
+
while (cursor && results.length < limit) {
|
|
1609
|
+
const serialised = cursor.value;
|
|
1610
|
+
if (serialised.status === "pending") {
|
|
1611
|
+
const entry = deserialiseEntry2(serialised);
|
|
1612
|
+
if (entry.retryAfter === void 0 || entry.retryAfter <= Date.now()) {
|
|
1613
|
+
results.push(entry);
|
|
1614
|
+
}
|
|
1615
|
+
}
|
|
1616
|
+
cursor = await cursor.continue();
|
|
1617
|
+
}
|
|
1618
|
+
return results;
|
|
1619
|
+
});
|
|
1620
|
+
}
|
|
1621
|
+
/** Mark entries as currently being sent. */
|
|
1622
|
+
async markSending(ids) {
|
|
1623
|
+
return wrapIdbOp2("mark sending", async () => {
|
|
1624
|
+
const db = await this.dbPromise;
|
|
1625
|
+
const tx = db.transaction(STORE_NAME3, "readwrite");
|
|
1626
|
+
const store = tx.objectStore(STORE_NAME3);
|
|
1627
|
+
for (const id of ids) {
|
|
1628
|
+
const serialised = await store.get(id);
|
|
1629
|
+
if (serialised?.status === "pending") {
|
|
1630
|
+
serialised.status = "sending";
|
|
1631
|
+
await store.put(serialised);
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
await tx.done;
|
|
1635
|
+
});
|
|
1636
|
+
}
|
|
1637
|
+
/** Acknowledge successful delivery (removes entries). */
|
|
1638
|
+
async ack(ids) {
|
|
1639
|
+
return wrapIdbOp2("ack", async () => {
|
|
1640
|
+
const db = await this.dbPromise;
|
|
1641
|
+
const tx = db.transaction(STORE_NAME3, "readwrite");
|
|
1642
|
+
for (const id of ids) {
|
|
1643
|
+
await tx.objectStore(STORE_NAME3).delete(id);
|
|
1644
|
+
}
|
|
1645
|
+
await tx.done;
|
|
1646
|
+
});
|
|
1647
|
+
}
|
|
1648
|
+
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff. */
|
|
1649
|
+
async nack(ids) {
|
|
1650
|
+
return wrapIdbOp2("nack", async () => {
|
|
1651
|
+
const db = await this.dbPromise;
|
|
1652
|
+
const tx = db.transaction(STORE_NAME3, "readwrite");
|
|
1653
|
+
const store = tx.objectStore(STORE_NAME3);
|
|
1654
|
+
for (const id of ids) {
|
|
1655
|
+
const serialised = await store.get(id);
|
|
1656
|
+
if (serialised) {
|
|
1657
|
+
serialised.status = "pending";
|
|
1658
|
+
serialised.retryCount++;
|
|
1659
|
+
const backoffMs = Math.min(1e3 * 2 ** serialised.retryCount, 3e4);
|
|
1660
|
+
serialised.retryAfter = Date.now() + backoffMs;
|
|
1661
|
+
await store.put(serialised);
|
|
1662
|
+
}
|
|
1663
|
+
}
|
|
1664
|
+
await tx.done;
|
|
1665
|
+
});
|
|
1666
|
+
}
|
|
1667
|
+
/** Get the number of pending + sending entries. */
|
|
1668
|
+
async depth() {
|
|
1669
|
+
return wrapIdbOp2("get depth", async () => {
|
|
1670
|
+
const db = await this.dbPromise;
|
|
1671
|
+
const all = await db.getAll(STORE_NAME3);
|
|
1672
|
+
return all.filter((e) => e.status === "pending" || e.status === "sending").length;
|
|
1673
|
+
});
|
|
1674
|
+
}
|
|
1675
|
+
/** Remove all entries. */
|
|
1676
|
+
async clear() {
|
|
1677
|
+
return wrapIdbOp2("clear action queue", async () => {
|
|
1678
|
+
const db = await this.dbPromise;
|
|
1679
|
+
await db.clear(STORE_NAME3);
|
|
1680
|
+
});
|
|
1681
|
+
}
|
|
1682
|
+
};
|
|
1683
|
+
|
|
1684
|
+
// ../client/src/queue/memory-action-queue.ts
|
|
1685
|
+
var MemoryActionQueue = class {
|
|
1686
|
+
entries = /* @__PURE__ */ new Map();
|
|
1687
|
+
counter = 0;
|
|
1688
|
+
/** Add an action to the queue. */
|
|
1689
|
+
async push(action) {
|
|
1690
|
+
const entry = {
|
|
1691
|
+
id: `mem-action-${++this.counter}`,
|
|
1692
|
+
action,
|
|
1693
|
+
status: "pending",
|
|
1694
|
+
createdAt: Date.now(),
|
|
1695
|
+
retryCount: 0
|
|
1696
|
+
};
|
|
1697
|
+
this.entries.set(entry.id, entry);
|
|
1698
|
+
return Ok(entry);
|
|
1699
|
+
}
|
|
1700
|
+
/** Peek at pending entries (ordered by createdAt), skipping entries with future retryAfter. */
|
|
1701
|
+
async peek(limit) {
|
|
1702
|
+
const now = Date.now();
|
|
1703
|
+
const pending = [...this.entries.values()].filter((e) => e.status === "pending" && (e.retryAfter === void 0 || e.retryAfter <= now)).sort((a, b) => a.createdAt - b.createdAt).slice(0, limit);
|
|
1704
|
+
return Ok(pending);
|
|
1705
|
+
}
|
|
1706
|
+
/** Mark entries as currently being sent. */
|
|
1707
|
+
async markSending(ids) {
|
|
1708
|
+
for (const id of ids) {
|
|
1709
|
+
const entry = this.entries.get(id);
|
|
1710
|
+
if (entry?.status === "pending") {
|
|
1711
|
+
entry.status = "sending";
|
|
1712
|
+
}
|
|
1713
|
+
}
|
|
1714
|
+
return Ok(void 0);
|
|
1715
|
+
}
|
|
1716
|
+
/** Acknowledge successful delivery (removes entries). */
|
|
1717
|
+
async ack(ids) {
|
|
1718
|
+
for (const id of ids) {
|
|
1719
|
+
this.entries.delete(id);
|
|
1720
|
+
}
|
|
1721
|
+
return Ok(void 0);
|
|
1722
|
+
}
|
|
1723
|
+
/** Negative acknowledge — reset to pending with incremented retryCount and exponential backoff. */
|
|
1724
|
+
async nack(ids) {
|
|
1725
|
+
for (const id of ids) {
|
|
1726
|
+
const entry = this.entries.get(id);
|
|
1727
|
+
if (entry) {
|
|
1728
|
+
entry.status = "pending";
|
|
1729
|
+
entry.retryCount++;
|
|
1730
|
+
const backoffMs = Math.min(1e3 * 2 ** entry.retryCount, 3e4);
|
|
1731
|
+
entry.retryAfter = Date.now() + backoffMs;
|
|
1732
|
+
}
|
|
1733
|
+
}
|
|
1734
|
+
return Ok(void 0);
|
|
1735
|
+
}
|
|
1736
|
+
/** Get the number of pending + sending entries. */
|
|
1737
|
+
async depth() {
|
|
1738
|
+
const count = [...this.entries.values()].filter(
|
|
1739
|
+
(e) => e.status === "pending" || e.status === "sending"
|
|
1740
|
+
).length;
|
|
1741
|
+
return Ok(count);
|
|
1742
|
+
}
|
|
1743
|
+
/** Remove all entries. */
|
|
1744
|
+
async clear() {
|
|
1745
|
+
this.entries.clear();
|
|
1746
|
+
return Ok(void 0);
|
|
1747
|
+
}
|
|
1748
|
+
};
|
|
1749
|
+
|
|
1750
|
+
// ../client/src/sync/schema-sync.ts
|
|
1751
|
+
var SchemaSynchroniser = class {
|
|
1752
|
+
constructor(db) {
|
|
1753
|
+
this.db = db;
|
|
1754
|
+
}
|
|
1755
|
+
/**
|
|
1756
|
+
* Compare local schema version with server and apply migrations if behind.
|
|
1757
|
+
*
|
|
1758
|
+
* If the local version is already equal to or ahead of the server version,
|
|
1759
|
+
* this is a no-op. Otherwise, the local schema is migrated to match the
|
|
1760
|
+
* server schema via `migrateSchema()`, which runs ALTER TABLE ... ADD COLUMN
|
|
1761
|
+
* for each new column.
|
|
1762
|
+
*
|
|
1763
|
+
* @param table - The table name to synchronise
|
|
1764
|
+
* @param serverSchema - The server's current TableSchema
|
|
1765
|
+
* @param serverVersion - The server's schema version number
|
|
1766
|
+
* @returns Ok on success, or Err with a LakeSyncError on failure
|
|
1767
|
+
*/
|
|
1768
|
+
async synchronise(table, serverSchema, serverVersion) {
|
|
1769
|
+
const localSchemaResult = await getSchema(this.db, table);
|
|
1770
|
+
if (!localSchemaResult.ok) return localSchemaResult;
|
|
1771
|
+
const localSchema = localSchemaResult.value;
|
|
1772
|
+
if (!localSchema) {
|
|
1773
|
+
return Err(
|
|
1774
|
+
new SchemaError(
|
|
1775
|
+
`Cannot synchronise schema for table "${table}": no local schema registered`
|
|
1776
|
+
)
|
|
1777
|
+
);
|
|
1778
|
+
}
|
|
1779
|
+
const localVersionResult = await this.getLocalVersion(table);
|
|
1780
|
+
if (!localVersionResult.ok) return localVersionResult;
|
|
1781
|
+
const localVersion = localVersionResult.value;
|
|
1782
|
+
if (localVersion >= serverVersion) {
|
|
1783
|
+
return Ok(void 0);
|
|
1784
|
+
}
|
|
1785
|
+
const migrateResult = await migrateSchema(this.db, localSchema, serverSchema);
|
|
1786
|
+
if (!migrateResult.ok) return migrateResult;
|
|
1787
|
+
const updateResult = await this.setLocalVersion(table, serverVersion);
|
|
1788
|
+
if (!updateResult.ok) return updateResult;
|
|
1789
|
+
return Ok(void 0);
|
|
1790
|
+
}
|
|
1791
|
+
/**
|
|
1792
|
+
* Retrieve the local schema version for a given table from `_lakesync_meta`.
|
|
1793
|
+
*
|
|
1794
|
+
* @param table - The table name to look up
|
|
1795
|
+
* @returns The schema version number, or 0 if the table is not registered
|
|
1796
|
+
*/
|
|
1797
|
+
async getLocalVersion(table) {
|
|
1798
|
+
const result = await this.db.query(
|
|
1799
|
+
"SELECT schema_version FROM _lakesync_meta WHERE table_name = ?",
|
|
1800
|
+
[table]
|
|
1801
|
+
);
|
|
1802
|
+
if (!result.ok) return result;
|
|
1803
|
+
const rows = result.value;
|
|
1804
|
+
if (rows.length === 0 || !rows[0]) {
|
|
1805
|
+
return Ok(0);
|
|
1806
|
+
}
|
|
1807
|
+
return Ok(rows[0].schema_version);
|
|
1808
|
+
}
|
|
1809
|
+
/**
|
|
1810
|
+
* Set the local schema version for a given table in `_lakesync_meta`.
|
|
1811
|
+
*
|
|
1812
|
+
* @param table - The table name to update
|
|
1813
|
+
* @param version - The version number to set
|
|
1814
|
+
* @returns Ok on success, or Err with a DbError on failure
|
|
1815
|
+
*/
|
|
1816
|
+
async setLocalVersion(table, version) {
|
|
1817
|
+
return this.db.exec("UPDATE _lakesync_meta SET schema_version = ? WHERE table_name = ?", [
|
|
1818
|
+
version,
|
|
1819
|
+
table
|
|
1820
|
+
]);
|
|
1821
|
+
}
|
|
1822
|
+
};
|
|
1823
|
+
|
|
1787
1824
|
// ../client/src/sync/transport-local.ts
|
|
1788
1825
|
var LocalTransport = class {
|
|
1789
1826
|
constructor(gateway) {
|
|
@@ -2037,6 +2074,7 @@ export {
|
|
|
2037
2074
|
SyncTracker,
|
|
2038
2075
|
WebSocketTransport,
|
|
2039
2076
|
applyRemoteDeltas,
|
|
2077
|
+
createClient,
|
|
2040
2078
|
deleteSnapshot,
|
|
2041
2079
|
getSchema,
|
|
2042
2080
|
loadSnapshot,
|