@loro-dev/flock 3.1.0 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/index.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import {
2
2
  newFlock,
3
3
  get_ffi,
4
+ get_entry_ffi,
4
5
  put_json_ffi,
5
6
  put_with_meta_ffi,
6
7
  delete_ffi,
@@ -8,7 +9,9 @@ import {
8
9
  set_peer_id,
9
10
  export_json_ffi,
10
11
  import_json_ffi,
12
+ import_json_str_ffi,
11
13
  version_ffi,
14
+ inclusiveVersion_ffi,
12
15
  get_max_physical_time_ffi,
13
16
  peer_id_ffi,
14
17
  kv_to_json_ffi,
@@ -32,6 +35,16 @@ type RawEventEntry = {
32
35
  payload?: RawEventPayload;
33
36
  };
34
37
  type RawEventBatch = { source?: string; events?: RawEventEntry[] };
38
+ type RawEntryClock = {
39
+ physicalTime?: number;
40
+ logicalCounter?: number;
41
+ peerId?: string;
42
+ };
43
+ type RawEntryInfo = {
44
+ data?: Value;
45
+ metadata?: MetadataMap;
46
+ clock?: RawEntryClock;
47
+ };
35
48
 
36
49
  type MaybePromise<T> = T | Promise<T>;
37
50
 
@@ -57,7 +70,17 @@ export type VersionVectorEntry = {
57
70
  logicalCounter: number;
58
71
  };
59
72
 
60
- export type VersionVector = Record<string, VersionVectorEntry>;
73
+ export interface VersionVector {
74
+ [peer: string]: VersionVectorEntry | undefined;
75
+ }
76
+
77
+ export function encodeVersionVector(vector: VersionVector): Uint8Array {
78
+ return encodeVersionVectorBinary(vector);
79
+ }
80
+
81
+ export function decodeVersionVector(bytes: Uint8Array): VersionVector {
82
+ return decodeVersionVectorBinary(bytes);
83
+ }
61
84
 
62
85
  export type Value =
63
86
  | string
@@ -77,7 +100,10 @@ export type ExportRecord = {
77
100
  m?: MetadataMap;
78
101
  };
79
102
 
80
- export type ExportBundle = { version: number, entries: Record<string, ExportRecord> };
103
+ export type ExportBundle = {
104
+ version: number;
105
+ entries: Record<string, ExportRecord>;
106
+ };
81
107
 
82
108
  export type EntryClock = {
83
109
  physicalTime: number;
@@ -85,6 +111,12 @@ export type EntryClock = {
85
111
  peerId: string;
86
112
  };
87
113
 
114
+ export type EntryInfo = {
115
+ data?: Value;
116
+ metadata: MetadataMap;
117
+ clock: EntryClock;
118
+ };
119
+
88
120
  export type ExportPayload = {
89
121
  data?: Value;
90
122
  metadata?: MetadataMap;
@@ -182,6 +214,7 @@ export type EventBatch = {
182
214
  };
183
215
 
184
216
  const textEncoder = new TextEncoder();
217
+ const textDecoder = new TextDecoder();
185
218
 
186
219
  function utf8ByteLength(value: string): number {
187
220
  return textEncoder.encode(value).length;
@@ -193,7 +226,10 @@ function isValidPeerId(peerId: unknown): peerId is string {
193
226
 
194
227
  function createRandomPeerId(): string {
195
228
  const id = new Uint8Array(32);
196
- if (typeof crypto !== "undefined" && typeof crypto.getRandomValues === "function") {
229
+ if (
230
+ typeof crypto !== "undefined" &&
231
+ typeof crypto.getRandomValues === "function"
232
+ ) {
197
233
  crypto.getRandomValues(id);
198
234
  } else {
199
235
  for (let i = 0; i < 32; i += 1) {
@@ -208,46 +244,277 @@ function normalizePeerId(peerId?: string): string {
208
244
  return createRandomPeerId();
209
245
  }
210
246
  if (!isValidPeerId(peerId)) {
211
- throw new TypeError("peerId must be a UTF-8 string under 128 bytes");
247
+ throw new TypeError("peerId must be a UTF-8 string under 128 bytes.");
212
248
  }
213
249
  return peerId;
214
250
  }
215
251
 
216
- function encodeVersionVector(vv?: VersionVector): RawVersionVector | undefined {
217
- if (!vv) {
218
- return undefined;
252
+ type EncodableVersionVectorEntry = {
253
+ peer: string;
254
+ peerBytes: Uint8Array;
255
+ timestamp: number;
256
+ counter: number;
257
+ };
258
+
259
+ function comparePeerBytes(a: Uint8Array, b: Uint8Array): number {
260
+ if (a === b) {
261
+ return 0;
219
262
  }
220
- const raw: RawVersionVector = {};
221
- for (const [peer, entry] of Object.entries(vv)) {
222
- if (!entry) {
223
- continue;
263
+ const limit = Math.min(a.length, b.length);
264
+ for (let i = 0; i < limit; i += 1) {
265
+ const diff = a[i] - b[i];
266
+ if (diff !== 0) {
267
+ return diff;
224
268
  }
225
- if (!isValidPeerId(peer)) {
269
+ }
270
+ return a.length - b.length;
271
+ }
272
+
273
+ function collectEncodableVersionVectorEntries(
274
+ vv?: VersionVector,
275
+ ): EncodableVersionVectorEntry[] {
276
+ if (!vv || typeof vv !== "object") {
277
+ return [];
278
+ }
279
+ const entries: EncodableVersionVectorEntry[] = [];
280
+ for (const [peer, entry] of Object.entries(vv)) {
281
+ if (!entry || !isValidPeerId(peer)) {
226
282
  continue;
227
283
  }
228
284
  const { physicalTime, logicalCounter } = entry;
229
- if (typeof physicalTime !== "number" || Number.isNaN(physicalTime)) {
285
+ if (
286
+ typeof physicalTime !== "number" ||
287
+ !Number.isFinite(physicalTime) ||
288
+ typeof logicalCounter !== "number" ||
289
+ !Number.isFinite(logicalCounter)
290
+ ) {
230
291
  continue;
231
292
  }
232
- if (!Number.isFinite(logicalCounter)) {
233
- continue;
293
+ const peerBytes = textEncoder.encode(peer);
294
+ entries.push({
295
+ peer,
296
+ peerBytes,
297
+ timestamp: Math.trunc(physicalTime),
298
+ counter: Math.max(0, Math.trunc(logicalCounter)),
299
+ });
300
+ }
301
+ entries.sort((a, b) => {
302
+ if (a.timestamp !== b.timestamp) {
303
+ return a.timestamp - b.timestamp;
304
+ }
305
+ const peerCmp = comparePeerBytes(a.peerBytes, b.peerBytes);
306
+ if (peerCmp !== 0) {
307
+ return peerCmp;
234
308
  }
235
- raw[peer] = [physicalTime, Math.trunc(logicalCounter)];
309
+ return a.counter - b.counter;
310
+ });
311
+ return entries;
312
+ }
313
+
314
+ function writeUnsignedLeb128(value: number, out: number[]): void {
315
+ if (!Number.isFinite(value) || value < 0) {
316
+ throw new TypeError("leb128 values must be finite and non-negative");
317
+ }
318
+ let remaining = Math.trunc(value);
319
+ if (remaining === 0) {
320
+ out.push(0);
321
+ return;
322
+ }
323
+ while (remaining > 0) {
324
+ const byte = remaining % 0x80;
325
+ remaining = Math.floor(remaining / 0x80);
326
+ out.push(remaining > 0 ? byte | 0x80 : byte);
327
+ }
328
+ }
329
+
330
+ function writeVarStringBytes(bytes: Uint8Array, out: number[]): void {
331
+ writeUnsignedLeb128(bytes.length, out);
332
+ for (let i = 0; i < bytes.length; i += 1) {
333
+ out.push(bytes[i]);
334
+ }
335
+ }
336
+
337
+ const VERSION_VECTOR_MAGIC = new Uint8Array([86, 69, 86, 69]); // "VEVE"
338
+
339
+ function encodeVersionVectorBinary(vv?: VersionVector): Uint8Array {
340
+ const entries = collectEncodableVersionVectorEntries(vv);
341
+ const buffer: number[] = Array.from(VERSION_VECTOR_MAGIC);
342
+ if (entries.length === 0) {
343
+ return Uint8Array.from(buffer);
344
+ }
345
+
346
+ let lastTimestamp = 0;
347
+ for (let i = 0; i < entries.length; i += 1) {
348
+ const entry = entries[i];
349
+ if (entry.timestamp < 0) {
350
+ throw new TypeError("timestamp must be non-negative");
351
+ }
352
+ if (i === 0) {
353
+ writeUnsignedLeb128(entry.timestamp, buffer);
354
+ lastTimestamp = entry.timestamp;
355
+ } else {
356
+ const delta = entry.timestamp - lastTimestamp;
357
+ if (delta < 0) {
358
+ throw new TypeError("version vector timestamps must be non-decreasing");
359
+ }
360
+ writeUnsignedLeb128(delta, buffer);
361
+ lastTimestamp = entry.timestamp;
362
+ }
363
+
364
+ writeUnsignedLeb128(entry.counter, buffer);
365
+ writeVarStringBytes(entry.peerBytes, buffer);
366
+ }
367
+
368
+ return Uint8Array.from(buffer);
369
+ }
370
+
371
+ function decodeUnsignedLeb128(
372
+ bytes: Uint8Array,
373
+ offset: number,
374
+ ): [number, number] {
375
+ let result = 0;
376
+ let multiplier = 1;
377
+ let consumed = 0;
378
+ while (offset + consumed < bytes.length) {
379
+ const byte = bytes[offset + consumed];
380
+ consumed += 1;
381
+ // Use arithmetic instead of bitwise operations to avoid 32-bit overflow.
382
+ // JavaScript bitwise operators convert to 32-bit signed integers,
383
+ // which breaks for values >= 2^31.
384
+ result += (byte & 0x7f) * multiplier;
385
+ if ((byte & 0x80) === 0) {
386
+ break;
387
+ }
388
+ multiplier *= 128;
389
+ }
390
+ return [result, consumed];
391
+ }
392
+
393
+ function decodeVarString(bytes: Uint8Array, offset: number): [string, number] {
394
+ const [length, used] = decodeUnsignedLeb128(bytes, offset);
395
+ const start = offset + used;
396
+ const end = start + length;
397
+ if (end > bytes.length) {
398
+ throw new TypeError("varString length exceeds buffer");
399
+ }
400
+ const slice = bytes.subarray(start, end);
401
+ return [textDecoder.decode(slice), used + length];
402
+ }
403
+
404
+ function hasMagic(bytes: Uint8Array): boolean {
405
+ return (
406
+ bytes.length >= 4 &&
407
+ bytes[0] === VERSION_VECTOR_MAGIC[0] &&
408
+ bytes[1] === VERSION_VECTOR_MAGIC[1] &&
409
+ bytes[2] === VERSION_VECTOR_MAGIC[2] &&
410
+ bytes[3] === VERSION_VECTOR_MAGIC[3]
411
+ );
412
+ }
413
+
414
+ function decodeLegacyVersionVector(bytes: Uint8Array): VersionVector {
415
+ let offset = 0;
416
+ const [count, usedCount] = decodeUnsignedLeb128(bytes, offset);
417
+ offset += usedCount;
418
+ const [baseTimestamp, usedBase] = decodeUnsignedLeb128(bytes, offset);
419
+ offset += usedBase;
420
+ const vv: VersionVector = {};
421
+ for (let i = 0; i < count; i += 1) {
422
+ const [peer, usedPeer] = decodeVarString(bytes, offset);
423
+ offset += usedPeer;
424
+ if (!isValidPeerId(peer)) {
425
+ throw new TypeError("invalid peer id in encoded version vector");
426
+ }
427
+ const [delta, usedDelta] = decodeUnsignedLeb128(bytes, offset);
428
+ offset += usedDelta;
429
+ const [counter, usedCounter] = decodeUnsignedLeb128(bytes, offset);
430
+ offset += usedCounter;
431
+ vv[peer] = {
432
+ physicalTime: baseTimestamp + delta,
433
+ logicalCounter: counter,
434
+ };
435
+ }
436
+ return vv;
437
+ }
438
+
439
+ function decodeNewVersionVector(bytes: Uint8Array): VersionVector {
440
+ let offset = 4;
441
+ const vv: VersionVector = {};
442
+ if (offset === bytes.length) {
443
+ return vv;
444
+ }
445
+
446
+ const [firstTimestamp, usedTs] = decodeUnsignedLeb128(bytes, offset);
447
+ offset += usedTs;
448
+ const [firstCounter, usedCounter] = decodeUnsignedLeb128(bytes, offset);
449
+ offset += usedCounter;
450
+ const [firstPeer, usedPeer] = decodeVarString(bytes, offset);
451
+ offset += usedPeer;
452
+ if (!isValidPeerId(firstPeer)) {
453
+ throw new TypeError("invalid peer id in encoded version vector");
454
+ }
455
+ vv[firstPeer] = {
456
+ physicalTime: firstTimestamp,
457
+ logicalCounter: firstCounter,
458
+ };
459
+
460
+ let lastTimestamp = firstTimestamp;
461
+ while (offset < bytes.length) {
462
+ const [delta, usedDelta] = decodeUnsignedLeb128(bytes, offset);
463
+ offset += usedDelta;
464
+ const [counter, usedCtr] = decodeUnsignedLeb128(bytes, offset);
465
+ offset += usedCtr;
466
+ const [peer, usedPeerLen] = decodeVarString(bytes, offset);
467
+ offset += usedPeerLen;
468
+ if (!isValidPeerId(peer)) {
469
+ throw new TypeError("invalid peer id in encoded version vector");
470
+ }
471
+ const timestamp = lastTimestamp + delta;
472
+ if (timestamp < lastTimestamp) {
473
+ throw new TypeError("version vector timestamps must be non-decreasing");
474
+ }
475
+ vv[peer] = { physicalTime: timestamp, logicalCounter: counter };
476
+ lastTimestamp = timestamp;
477
+ }
478
+
479
+ return vv;
480
+ }
481
+
482
+ function decodeVersionVectorBinary(bytes: Uint8Array): VersionVector {
483
+ if (hasMagic(bytes)) {
484
+ return decodeNewVersionVector(bytes);
485
+ }
486
+ return decodeLegacyVersionVector(bytes);
487
+ }
488
+
489
+ function encodeVersionVectorForFfi(
490
+ vv?: VersionVector,
491
+ ): RawVersionVector | undefined {
492
+ if (!vv) {
493
+ return undefined;
494
+ }
495
+ const raw: RawVersionVector = {};
496
+ for (const entry of collectEncodableVersionVectorEntries(vv)) {
497
+ raw[entry.peer] = [entry.timestamp, entry.counter];
236
498
  }
237
499
  return raw;
238
500
  }
239
501
 
240
- function normalizePruneBefore(pruneTombstonesBefore?: number): number | undefined {
502
+ function normalizePruneBefore(
503
+ pruneTombstonesBefore?: number,
504
+ ): number | undefined {
241
505
  if (pruneTombstonesBefore === undefined) {
242
506
  return undefined;
243
507
  }
244
- if (typeof pruneTombstonesBefore !== "number" || !Number.isFinite(pruneTombstonesBefore)) {
508
+ if (
509
+ typeof pruneTombstonesBefore !== "number" ||
510
+ !Number.isFinite(pruneTombstonesBefore)
511
+ ) {
245
512
  return undefined;
246
513
  }
247
514
  return pruneTombstonesBefore;
248
515
  }
249
516
 
250
- function decodeVersionVector(raw: unknown): VersionVector {
517
+ function decodeVersionVectorFromRaw(raw: unknown): VersionVector {
251
518
  if (raw === null || typeof raw !== "object") {
252
519
  return {};
253
520
  }
@@ -263,7 +530,10 @@ function decodeVersionVector(raw: unknown): VersionVector {
263
530
  if (typeof physicalTime !== "number" || !Number.isFinite(physicalTime)) {
264
531
  continue;
265
532
  }
266
- if (typeof logicalCounter !== "number" || !Number.isFinite(logicalCounter)) {
533
+ if (
534
+ typeof logicalCounter !== "number" ||
535
+ !Number.isFinite(logicalCounter)
536
+ ) {
267
537
  continue;
268
538
  }
269
539
  result[peer] = {
@@ -284,6 +554,23 @@ function encodeBound(bound?: ScanBound): Record<string, unknown> | undefined {
284
554
  return { kind: bound.kind, key: bound.key.slice() };
285
555
  }
286
556
 
557
+ function decodeEntryInfo(raw: unknown): EntryInfo | undefined {
558
+ if (!raw || typeof raw !== "object") {
559
+ return undefined;
560
+ }
561
+ const info = raw as RawEntryInfo;
562
+ const clock = normalizeEntryClock(info.clock);
563
+ if (!clock) {
564
+ return undefined;
565
+ }
566
+ const metadata = normalizeMetadataMap(info.metadata);
567
+ const result: EntryInfo = { metadata, clock };
568
+ if ("data" in info) {
569
+ result.data = cloneJson(info.data as Value);
570
+ }
571
+ return result;
572
+ }
573
+
287
574
  function decodeEventBatch(raw: unknown): EventBatch {
288
575
  if (!raw || typeof raw !== "object") {
289
576
  return { source: "local", events: [] };
@@ -338,8 +625,9 @@ function normalizeRawEventPayload(
338
625
  return result;
339
626
  }
340
627
 
341
- const structuredCloneFn: (<T>(value: T) => T) | undefined =
342
- (globalThis as typeof globalThis & { structuredClone?: <T>(value: T) => T }).structuredClone;
628
+ const structuredCloneFn: (<T>(value: T) => T) | undefined = (
629
+ globalThis as typeof globalThis & { structuredClone?: <T>(value: T) => T }
630
+ ).structuredClone;
343
631
 
344
632
  function cloneJson<T>(value: T): T {
345
633
  if (value === undefined) {
@@ -367,10 +655,16 @@ function cloneMetadata(metadata: unknown): MetadataMap | undefined {
367
655
  return cloneJson(metadata as MetadataMap);
368
656
  }
369
657
 
658
+ function normalizeMetadataMap(metadata: unknown): MetadataMap {
659
+ const cloned = cloneMetadata(metadata);
660
+ return cloned ?? {};
661
+ }
662
+
370
663
  function decodeClock(record: ExportRecord): EntryClock {
371
664
  const rawClock = typeof record.c === "string" ? record.c : "";
372
665
  const firstComma = rawClock.indexOf(",");
373
- const secondComma = firstComma === -1 ? -1 : rawClock.indexOf(",", firstComma + 1);
666
+ const secondComma =
667
+ firstComma === -1 ? -1 : rawClock.indexOf(",", firstComma + 1);
374
668
  if (firstComma === -1 || secondComma === -1) {
375
669
  return { physicalTime: 0, logicalCounter: 0, peerId: "" };
376
670
  }
@@ -387,6 +681,29 @@ function decodeClock(record: ExportRecord): EntryClock {
387
681
  };
388
682
  }
389
683
 
684
+ function normalizeEntryClock(
685
+ clock: RawEntryClock | undefined,
686
+ ): EntryClock | undefined {
687
+ if (!clock || typeof clock !== "object") {
688
+ return undefined;
689
+ }
690
+ const { physicalTime, logicalCounter, peerId } = clock;
691
+ if (typeof physicalTime !== "number" || !Number.isFinite(physicalTime)) {
692
+ return undefined;
693
+ }
694
+ if (typeof logicalCounter !== "number" || !Number.isFinite(logicalCounter)) {
695
+ return undefined;
696
+ }
697
+ if (!isValidPeerId(peerId)) {
698
+ return undefined;
699
+ }
700
+ return {
701
+ physicalTime,
702
+ logicalCounter: Math.trunc(logicalCounter),
703
+ peerId,
704
+ };
705
+ }
706
+
390
707
  function createExportPayload(record: ExportRecord): ExportPayload {
391
708
  const payload: ExportPayload = {};
392
709
  if (record.d !== undefined) {
@@ -399,10 +716,7 @@ function createExportPayload(record: ExportRecord): ExportPayload {
399
716
  return payload;
400
717
  }
401
718
 
402
- function createPutPayload(
403
- value: Value,
404
- metadata?: MetadataMap,
405
- ): ExportPayload {
719
+ function createPutPayload(value: Value, metadata?: MetadataMap): ExportPayload {
406
720
  const payload: ExportPayload = { data: cloneJson(value) };
407
721
  const cleanMetadata = cloneMetadata(metadata);
408
722
  if (cleanMetadata !== undefined) {
@@ -511,10 +825,7 @@ function isExportOptions(value: unknown): value is ExportOptions {
511
825
  value !== null &&
512
826
  (Object.prototype.hasOwnProperty.call(value, "hooks") ||
513
827
  Object.prototype.hasOwnProperty.call(value, "from") ||
514
- Object.prototype.hasOwnProperty.call(
515
- value,
516
- "pruneTombstonesBefore",
517
- ) ||
828
+ Object.prototype.hasOwnProperty.call(value, "pruneTombstonesBefore") ||
518
829
  Object.prototype.hasOwnProperty.call(value, "peerId"))
519
830
  );
520
831
  }
@@ -564,7 +875,13 @@ export class Flock {
564
875
  now?: number,
565
876
  ): void {
566
877
  const metadataClone = cloneMetadata(metadata);
567
- put_with_meta_ffi(this.inner, key, JSON.stringify(value), metadataClone, now);
878
+ put_with_meta_ffi(
879
+ this.inner,
880
+ key,
881
+ JSON.stringify(value),
882
+ metadataClone,
883
+ now,
884
+ );
568
885
  }
569
886
 
570
887
  private async putWithMetaWithHooks(
@@ -601,15 +918,19 @@ export class Flock {
601
918
 
602
919
  /**
603
920
  * Put a value into the flock. If the given entry already exists, this insert will be skipped.
604
- * @param key
605
- * @param value
606
- * @param now
921
+ * @param key
922
+ * @param value
923
+ * @param now
607
924
  */
608
925
  put(key: KeyPart[], value: Value, now?: number): void {
609
926
  put_json_ffi(this.inner, key, JSON.stringify(value), now);
610
927
  }
611
928
 
612
- putWithMeta(key: KeyPart[], value: Value, options?: PutWithMetaOptions): void | Promise<void> {
929
+ putWithMeta(
930
+ key: KeyPart[],
931
+ value: Value,
932
+ options?: PutWithMetaOptions,
933
+ ): void | Promise<void> {
613
934
  const opts = options ?? {};
614
935
  if (opts.hooks?.transform) {
615
936
  return this.putWithMetaWithHooks(key, value, opts);
@@ -623,8 +944,8 @@ export class Flock {
623
944
 
624
945
  /**
625
946
  * Delete a value from the flock. If the given entry does not exist, this delete will be skipped.
626
- * @param key
627
- * @param now
947
+ * @param key
948
+ * @param now
628
949
  */
629
950
  delete(key: KeyPart[], now?: number): void {
630
951
  delete_ffi(this.inner, key, now);
@@ -634,12 +955,41 @@ export class Flock {
634
955
  return get_ffi(this.inner, key) as Value | undefined;
635
956
  }
636
957
 
958
+ /**
959
+ * Returns the full entry payload (data, metadata, and clock) for a key.
960
+ *
961
+ * Unlike `get`, this distinguishes between a missing key (`undefined`) and a
962
+ * tombstone (returns the clock and metadata with `data` omitted). Metadata is
963
+ * cloned and defaults to `{}` when absent.
964
+ */
965
+ getEntry(key: KeyPart[]): EntryInfo | undefined {
966
+ const raw = get_entry_ffi(this.inner, key) as RawEntryInfo | undefined;
967
+ return decodeEntryInfo(raw);
968
+ }
969
+
637
970
  merge(other: Flock): void {
638
971
  merge(this.inner, other.inner);
639
972
  }
640
973
 
974
+ /**
975
+ * Returns the exclusive version vector, which only includes peers that have
976
+ * at least one entry in the current state. This is consistent with the state
977
+ * after export and re-import.
978
+ *
979
+ * Use this version when sending to other peers for incremental sync.
980
+ */
641
981
  version(): VersionVector {
642
- return decodeVersionVector(version_ffi(this.inner));
982
+ return decodeVersionVectorFromRaw(version_ffi(this.inner));
983
+ }
984
+
985
+ /**
986
+ * Returns the inclusive version vector, which includes all peers ever seen,
987
+ * even if their entries have been overridden by other peers.
988
+ *
989
+ * Use this version when checking if you have received all data from another peer.
990
+ */
991
+ inclusiveVersion(): VersionVector {
992
+ return decodeVersionVectorFromRaw(inclusiveVersion_ffi(this.inner));
643
993
  }
644
994
 
645
995
  private exportJsonInternal(
@@ -648,16 +998,19 @@ export class Flock {
648
998
  peerId?: string,
649
999
  ): ExportBundle {
650
1000
  const pruneBefore = normalizePruneBefore(pruneTombstonesBefore);
651
- const normalizedPeerId = peerId !== undefined && isValidPeerId(peerId) ? peerId : undefined;
1001
+ const normalizedPeerId =
1002
+ peerId !== undefined && isValidPeerId(peerId) ? peerId : undefined;
652
1003
  return export_json_ffi(
653
1004
  this.inner,
654
- encodeVersionVector(from),
1005
+ encodeVersionVectorForFfi(from),
655
1006
  pruneBefore,
656
1007
  normalizedPeerId,
657
1008
  ) as ExportBundle;
658
1009
  }
659
1010
 
660
- private async exportJsonWithHooks(options: ExportOptions): Promise<ExportBundle> {
1011
+ private async exportJsonWithHooks(
1012
+ options: ExportOptions,
1013
+ ): Promise<ExportBundle> {
661
1014
  const base = this.exportJsonInternal(
662
1015
  options.from,
663
1016
  options.pruneTombstonesBefore,
@@ -684,10 +1037,7 @@ export class Flock {
684
1037
 
685
1038
  exportJson(): ExportBundle;
686
1039
  exportJson(from: VersionVector): ExportBundle;
687
- exportJson(
688
- from: VersionVector,
689
- pruneTombstonesBefore: number,
690
- ): ExportBundle;
1040
+ exportJson(from: VersionVector, pruneTombstonesBefore: number): ExportBundle;
691
1041
  exportJson(options: ExportOptions): Promise<ExportBundle>;
692
1042
  exportJson(
693
1043
  arg?: VersionVector | ExportOptions,
@@ -703,11 +1053,15 @@ export class Flock {
703
1053
  }
704
1054
 
705
1055
  private importJsonInternal(bundle: ExportBundle): ImportReport {
706
- const report = import_json_ffi(this.inner, bundle) as RawImportReport | undefined;
1056
+ const report = import_json_ffi(this.inner, bundle) as
1057
+ | RawImportReport
1058
+ | undefined;
707
1059
  return decodeImportReport(report);
708
1060
  }
709
1061
 
710
- private async importJsonWithHooks(options: ImportOptions): Promise<ImportReport> {
1062
+ private async importJsonWithHooks(
1063
+ options: ImportOptions,
1064
+ ): Promise<ImportReport> {
711
1065
  const preprocess = options.hooks?.preprocess;
712
1066
  const working = preprocess ? cloneBundle(options.bundle) : options.bundle;
713
1067
  const skippedByHooks: Array<{ key: KeyPart[]; reason: string }> = [];
@@ -747,6 +1101,13 @@ export class Flock {
747
1101
  return this.importJsonInternal(arg);
748
1102
  }
749
1103
 
1104
+ importJsonStr(bundle: string): ImportReport {
1105
+ const report = import_json_str_ffi(this.inner, bundle) as
1106
+ | RawImportReport
1107
+ | undefined;
1108
+ return decodeImportReport(report);
1109
+ }
1110
+
750
1111
  getMaxPhysicalTime(): number {
751
1112
  return Number(get_max_physical_time_ffi(this.inner));
752
1113
  }
@@ -787,7 +1148,9 @@ export class Flock {
787
1148
  const start = encodeBound(options.start);
788
1149
  const end = encodeBound(options.end);
789
1150
  const prefix = options.prefix ? options.prefix.slice() : undefined;
790
- const rows = scan_ffi(this.inner, start, end, prefix) as RawScanRow[] | undefined;
1151
+ const rows = scan_ffi(this.inner, start, end, prefix) as
1152
+ | RawScanRow[]
1153
+ | undefined;
791
1154
  if (!Array.isArray(rows)) {
792
1155
  return [];
793
1156
  }