json-patch-to-crdt 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -39,6 +39,33 @@ function observeDot(vv, dot) {
39
39
  if ((vv[dot.actor] ?? 0) < dot.ctr) vv[dot.actor] = dot.ctr;
40
40
  }
41
41
 
42
+ //#endregion
43
+ //#region src/depth.ts
44
+ const MAX_TRAVERSAL_DEPTH = 16384;
45
+ var TraversalDepthError = class extends Error {
46
+ code = 409;
47
+ reason = "MAX_DEPTH_EXCEEDED";
48
+ depth;
49
+ maxDepth;
50
+ constructor(depth, maxDepth = MAX_TRAVERSAL_DEPTH) {
51
+ super(`maximum nesting depth ${maxDepth} exceeded at depth ${depth}`);
52
+ this.name = "TraversalDepthError";
53
+ this.depth = depth;
54
+ this.maxDepth = maxDepth;
55
+ }
56
+ };
57
+ function assertTraversalDepth(depth, maxDepth = MAX_TRAVERSAL_DEPTH) {
58
+ if (depth > maxDepth) throw new TraversalDepthError(depth, maxDepth);
59
+ }
60
+ function toDepthApplyError(error) {
61
+ return {
62
+ ok: false,
63
+ code: error.code,
64
+ reason: error.reason,
65
+ message: error.message
66
+ };
67
+ }
68
+
42
69
  //#endregion
43
70
  //#region src/dot.ts
44
71
  function compareDot(a, b) {
@@ -84,15 +111,26 @@ function rgaLinearizeIds(seq) {
84
111
  if (cached && cached.version === ver) return cached.ids;
85
112
  const idx = rgaChildrenIndex(seq);
86
113
  const out = [];
87
- function walk(prev) {
88
- const children = idx.get(prev);
89
- if (!children) return;
90
- for (const c of children) {
91
- if (!c.tombstone) out.push(c.id);
92
- walk(c.id);
114
+ const stack = [];
115
+ const rootChildren = idx.get(HEAD);
116
+ if (rootChildren) stack.push({
117
+ children: rootChildren,
118
+ index: 0
119
+ });
120
+ while (stack.length > 0) {
121
+ const frame = stack[stack.length - 1];
122
+ if (frame.index >= frame.children.length) {
123
+ stack.pop();
124
+ continue;
93
125
  }
126
+ const child = frame.children[frame.index++];
127
+ if (!child.tombstone) out.push(child.id);
128
+ const grandchildren = idx.get(child.id);
129
+ if (grandchildren) stack.push({
130
+ children: grandchildren,
131
+ index: 0
132
+ });
94
133
  }
95
- walk(HEAD);
96
134
  linearCache.set(seq, {
97
135
  version: ver,
98
136
  ids: out
@@ -117,6 +155,61 @@ function rgaDelete(seq, id) {
117
155
  e.tombstone = true;
118
156
  bumpVersion(seq);
119
157
  }
158
+ /**
159
+ * Prune tombstoned elements that are causally stable and have no live descendants
160
+ * depending on them for sequence traversal.
161
+ *
162
+ * Returns the number of removed elements.
163
+ */
164
+ function rgaCompactTombstones(seq, isStable) {
165
+ if (seq.elems.size === 0) return 0;
166
+ const children = /* @__PURE__ */ new Map();
167
+ const roots = [];
168
+ for (const elem of seq.elems.values()) {
169
+ const byPrev = children.get(elem.prev);
170
+ if (byPrev) byPrev.push(elem.id);
171
+ else children.set(elem.prev, [elem.id]);
172
+ if (elem.prev === HEAD || !seq.elems.has(elem.prev)) roots.push(elem.id);
173
+ }
174
+ const removable = /* @__PURE__ */ new Set();
175
+ const visited = /* @__PURE__ */ new Set();
176
+ const stack = [];
177
+ const pushRoot = (id) => {
178
+ if (!visited.has(id)) stack.push({
179
+ id,
180
+ expanded: false
181
+ });
182
+ };
183
+ for (const id of roots) pushRoot(id);
184
+ for (const id of seq.elems.keys()) pushRoot(id);
185
+ while (stack.length > 0) {
186
+ const frame = stack.pop();
187
+ if (!frame.expanded) {
188
+ if (visited.has(frame.id)) continue;
189
+ visited.add(frame.id);
190
+ stack.push({
191
+ id: frame.id,
192
+ expanded: true
193
+ });
194
+ const childIds = children.get(frame.id);
195
+ if (childIds) {
196
+ for (const childId of childIds) if (!visited.has(childId)) stack.push({
197
+ id: childId,
198
+ expanded: false
199
+ });
200
+ }
201
+ continue;
202
+ }
203
+ const elem = seq.elems.get(frame.id);
204
+ if (!elem || !elem.tombstone || !isStable(elem.insDot)) continue;
205
+ const childIds = children.get(frame.id);
206
+ if (!childIds || childIds.every((childId) => removable.has(childId))) removable.add(frame.id);
207
+ }
208
+ if (removable.size === 0) return 0;
209
+ for (const id of removable) seq.elems.delete(id);
210
+ bumpVersion(seq);
211
+ return removable.size;
212
+ }
120
213
  function rgaIdAtIndex(seq, index) {
121
214
  return rgaLinearizeIds(seq)[index];
122
215
  }
@@ -128,17 +221,100 @@ function rgaPrevForInsertAtIndex(seq, index) {
128
221
 
129
222
  //#endregion
130
223
  //#region src/materialize.ts
131
- /** Recursively convert a CRDT node graph into a plain JSON value. */
224
+ /** Convert a CRDT node graph into a plain JSON value using an explicit stack. */
132
225
  function materialize(node) {
133
- switch (node.kind) {
134
- case "lww": return node.value;
135
- case "obj": {
136
- const out = {};
137
- for (const [k, { node: child }] of node.entries.entries()) out[k] = materialize(child);
138
- return out;
226
+ if (node.kind === "lww") return node.value;
227
+ const root = node.kind === "obj" ? {} : [];
228
+ const stack = [];
229
+ if (node.kind === "obj") stack.push({
230
+ kind: "obj",
231
+ depth: 0,
232
+ entries: Array.from(node.entries.entries(), ([key, value]) => [key, value.node]),
233
+ index: 0,
234
+ out: root
235
+ });
236
+ else stack.push({
237
+ kind: "seq",
238
+ depth: 0,
239
+ ids: rgaLinearizeIds(node),
240
+ index: 0,
241
+ seq: node,
242
+ out: root
243
+ });
244
+ while (stack.length > 0) {
245
+ const frame = stack[stack.length - 1];
246
+ if (frame.kind === "obj") {
247
+ if (frame.index >= frame.entries.length) {
248
+ stack.pop();
249
+ continue;
250
+ }
251
+ const [key, child] = frame.entries[frame.index++];
252
+ const childDepth = frame.depth + 1;
253
+ assertTraversalDepth(childDepth);
254
+ if (child.kind === "lww") {
255
+ frame.out[key] = child.value;
256
+ continue;
257
+ }
258
+ if (child.kind === "obj") {
259
+ const outObj = {};
260
+ frame.out[key] = outObj;
261
+ stack.push({
262
+ kind: "obj",
263
+ depth: childDepth,
264
+ entries: Array.from(child.entries.entries(), ([childKey, value]) => [childKey, value.node]),
265
+ index: 0,
266
+ out: outObj
267
+ });
268
+ continue;
269
+ }
270
+ const outArr = [];
271
+ frame.out[key] = outArr;
272
+ stack.push({
273
+ kind: "seq",
274
+ depth: childDepth,
275
+ ids: rgaLinearizeIds(child),
276
+ index: 0,
277
+ seq: child,
278
+ out: outArr
279
+ });
280
+ continue;
281
+ }
282
+ if (frame.index >= frame.ids.length) {
283
+ stack.pop();
284
+ continue;
139
285
  }
140
- case "seq": return rgaLinearizeIds(node).map((id) => materialize(node.elems.get(id).value));
286
+ const id = frame.ids[frame.index++];
287
+ const child = frame.seq.elems.get(id).value;
288
+ const childDepth = frame.depth + 1;
289
+ assertTraversalDepth(childDepth);
290
+ if (child.kind === "lww") {
291
+ frame.out.push(child.value);
292
+ continue;
293
+ }
294
+ if (child.kind === "obj") {
295
+ const outObj = {};
296
+ frame.out.push(outObj);
297
+ stack.push({
298
+ kind: "obj",
299
+ depth: childDepth,
300
+ entries: Array.from(child.entries.entries(), ([key, value]) => [key, value.node]),
301
+ index: 0,
302
+ out: outObj
303
+ });
304
+ continue;
305
+ }
306
+ const outArr = [];
307
+ frame.out.push(outArr);
308
+ stack.push({
309
+ kind: "seq",
310
+ depth: childDepth,
311
+ ids: rgaLinearizeIds(child),
312
+ index: 0,
313
+ seq: child,
314
+ out: outArr
315
+ });
141
316
  }
317
+ return root;
142
318
  }
143
319
 
144
320
  //#endregion
@@ -183,6 +359,19 @@ function objRemove(obj, key, dot) {
183
359
  if (!curDel || compareDot(curDel, dot) <= 0) obj.tombstone.set(key, dot);
184
360
  obj.entries.delete(key);
185
361
  }
362
+ /**
363
+ * Prune object tombstones that satisfy a caller-provided stability predicate.
364
+ * Returns the number of removed tombstone records.
365
+ */
366
+ function objCompactTombstones(obj, isStable) {
367
+ let removed = 0;
368
+ for (const [key, dot] of obj.tombstone.entries()) {
369
+ if (!isStable(dot)) continue;
370
+ obj.tombstone.delete(key);
371
+ removed += 1;
372
+ }
373
+ return removed;
374
+ }
186
375
 
187
376
  //#endregion
188
377
  //#region src/types.ts
@@ -194,6 +383,7 @@ const ROOT_KEY = "@@crdt/root";
194
383
 
195
384
  //#endregion
196
385
  //#region src/patch.ts
386
+ const DEFAULT_LCS_MAX_CELLS = 25e4;
197
387
  /** Structured compile error used to map patch validation failures to typed reasons. */
198
388
  var PatchCompileError = class extends Error {
199
389
  reason;
@@ -207,6 +397,17 @@ var PatchCompileError = class extends Error {
207
397
  this.opIndex = opIndex;
208
398
  }
209
399
  };
400
+ /** Structured lookup error thrown by `getAtJson`. */
401
+ var JsonLookupError = class extends Error {
402
+ code;
403
+ segment;
404
+ constructor(code, segment, message) {
405
+ super(message);
406
+ this.name = "JsonLookupError";
407
+ this.code = code;
408
+ this.segment = segment;
409
+ }
410
+ };
210
411
  /**
211
412
  * Parse an RFC 6901 JSON Pointer into a path array, unescaping `~1` and `~0`.
212
413
  * @param ptr - A JSON Pointer string (e.g. `"/a/b"` or `""`).
@@ -215,13 +416,37 @@ var PatchCompileError = class extends Error {
215
416
  function parseJsonPointer(ptr) {
216
417
  if (ptr === "") return [];
217
418
  if (!ptr.startsWith("/")) throw new Error(`Invalid pointer: ${ptr}`);
218
- return ptr.slice(1).split("/").map((s) => s.replace(/~1/g, "/").replace(/~0/g, "~"));
419
+ return ptr.slice(1).split("/").map(unescapeJsonPointerToken);
219
420
  }
220
421
  /** Convert a path array back to an RFC 6901 JSON Pointer string. */
221
422
  function stringifyJsonPointer(path) {
222
423
  if (path.length === 0) return "";
223
424
  return `/${path.map(escapeJsonPointer).join("/")}`;
224
425
  }
426
+ function unescapeJsonPointerToken(token) {
427
+ let out = "";
428
+ for (let i = 0; i < token.length; i++) {
429
+ const ch = token[i];
430
+ if (ch !== "~") {
431
+ out += ch;
432
+ continue;
433
+ }
434
+ const esc = token[i + 1];
435
+ if (esc === "0") {
436
+ out += "~";
437
+ i += 1;
438
+ continue;
439
+ }
440
+ if (esc === "1") {
441
+ out += "/";
442
+ i += 1;
443
+ continue;
444
+ }
445
+ const sequence = esc === void 0 ? "~" : `~${esc}`;
446
+ throw new Error(`Invalid pointer escape sequence '${sequence}'`);
447
+ }
448
+ return out;
449
+ }
225
450
  /**
226
451
  * Navigate a JSON value by path and return the value at that location.
227
452
  * Throws if the path is invalid, out of bounds, or traverses a non-container.
@@ -229,14 +454,14 @@ function stringifyJsonPointer(path) {
229
454
  function getAtJson(base, path) {
230
455
  let cur = base;
231
456
  for (const seg of path) if (Array.isArray(cur)) {
232
- const idx = seg === "-" ? cur.length : Number(seg);
233
- if (!Number.isInteger(idx)) throw new Error(`Expected array index, got ${seg}`);
234
- if (idx < 0 || idx >= cur.length) throw new Error(`Index out of bounds at ${seg}`);
457
+ if (!ARRAY_INDEX_TOKEN_PATTERN.test(seg)) throw new JsonLookupError("EXPECTED_ARRAY_INDEX", seg, `Expected array index, got '${seg}'`);
458
+ const idx = Number(seg);
459
+ if (idx < 0 || idx >= cur.length) throw new JsonLookupError("INDEX_OUT_OF_BOUNDS", seg, `Index out of bounds at '${seg}'`);
235
460
  cur = cur[idx];
236
461
  } else if (cur && typeof cur === "object") {
237
- if (!(seg in cur)) throw new Error(`Missing key ${seg}`);
462
+ if (!(seg in cur)) throw new JsonLookupError("MISSING_KEY", seg, `Missing key '${seg}'`);
238
463
  cur = cur[seg];
239
- } else throw new Error(`Cannot traverse into non-container at ${seg}`);
464
+ } else throw new JsonLookupError("NON_CONTAINER", seg, `Cannot traverse into non-container at '${seg}'`);
240
465
  return cur;
241
466
  }
242
467
  /**
@@ -254,7 +479,7 @@ function compileJsonPatchToIntent(baseJson, patch, options = {}) {
254
479
  for (let opIndex = 0; opIndex < patch.length; opIndex++) {
255
480
  const op = patch[opIndex];
256
481
  const compileBase = semantics === "sequential" ? workingBase : baseJson;
257
- intents.push(...compileSingleOp(compileBase, op, opIndex));
482
+ intents.push(...compileSingleOp(compileBase, op, opIndex, semantics));
258
483
  if (semantics === "sequential") workingBase = applyPatchOpToJson(workingBase, op, opIndex);
259
484
  }
260
485
  return intents;
@@ -277,7 +502,15 @@ function diffValue(path, base, next, ops, options) {
277
502
  if (jsonEquals(base, next)) return;
278
503
  if (Array.isArray(base) || Array.isArray(next)) {
279
504
  if ((options.arrayStrategy ?? "lcs") === "lcs" && Array.isArray(base) && Array.isArray(next)) {
280
- diffArray(path, base, next, ops, options);
505
+ if (!shouldUseLcsDiff(base.length, next.length, options.lcsMaxCells)) {
506
+ ops.push({
507
+ op: "replace",
508
+ path: stringifyJsonPointer(path),
509
+ value: next
510
+ });
511
+ return;
512
+ }
513
+ diffArray(path, base, next, ops);
281
514
  return;
282
515
  }
283
516
  ops.push({
@@ -313,7 +546,7 @@ function diffValue(path, base, next, ops, options) {
313
546
  }
314
547
  for (const key of baseKeys) if (nextSet.has(key)) diffValue([...path, key], base[key], next[key], ops, options);
315
548
  }
316
- function diffArray(path, base, next, ops, _options) {
549
+ function diffArray(path, base, next, ops) {
317
550
  const n = base.length;
318
551
  const m = next.length;
319
552
  const lcs = Array.from({ length: n + 1 }, () => Array(m + 1).fill(0));
@@ -353,6 +586,12 @@ function diffArray(path, base, next, ops, _options) {
353
586
  }
354
587
  ops.push(...compactArrayOps(localOps));
355
588
  }
589
+ function shouldUseLcsDiff(baseLength, nextLength, lcsMaxCells) {
590
+ if (lcsMaxCells === Number.POSITIVE_INFINITY) return true;
591
+ const cap = lcsMaxCells ?? DEFAULT_LCS_MAX_CELLS;
592
+ if (!Number.isFinite(cap) || cap < 1) return false;
593
+ return (baseLength + 1) * (nextLength + 1) <= cap;
594
+ }
356
595
  function compactArrayOps(ops) {
357
596
  const out = [];
358
597
  for (let i = 0; i < ops.length; i++) {
@@ -397,6 +636,7 @@ function jsonEquals(a, b) {
397
636
  function isPlainObject(value) {
398
637
  return typeof value === "object" && value !== null && !Array.isArray(value);
399
638
  }
639
+ const ARRAY_INDEX_TOKEN_PATTERN = /^(0|[1-9][0-9]*)$/;
400
640
  function hasOwn(value, key) {
401
641
  return Object.prototype.hasOwnProperty.call(value, key);
402
642
  }
@@ -407,7 +647,7 @@ function pathValueAt(base, path) {
407
647
  function assertNever$1(_value, message) {
408
648
  throw new Error(message);
409
649
  }
410
- function compileSingleOp(baseJson, op, opIndex) {
650
+ function compileSingleOp(baseJson, op, opIndex, semantics) {
411
651
  if (op.op === "test") return [{
412
652
  t: "Test",
413
653
  path: parsePointerOrThrow(op.path, op.path, opIndex),
@@ -418,15 +658,29 @@ function compileSingleOp(baseJson, op, opIndex) {
418
658
  const toPath = parsePointerOrThrow(op.path, op.path, opIndex);
419
659
  if (op.op === "move" && isStrictDescendantPath(fromPath, toPath)) throw compileError("INVALID_MOVE", `cannot move a value into one of its descendants at ${op.path}`, op.path, opIndex);
420
660
  const val = lookupValueOrThrow(baseJson, fromPath, op.from, opIndex);
661
+ if (op.op === "move" && isSamePath(fromPath, toPath)) return [];
662
+ if (op.op === "move" && semantics === "sequential") {
663
+ const removeOp = {
664
+ op: "remove",
665
+ path: op.from
666
+ };
667
+ const addOp = {
668
+ op: "add",
669
+ path: op.path,
670
+ value: val
671
+ };
672
+ const baseAfterRemove = applyPatchOpToJson(baseJson, removeOp, opIndex);
673
+ return [...compileSingleOp(baseJson, removeOp, opIndex, semantics), ...compileSingleOp(baseAfterRemove, addOp, opIndex, semantics)];
674
+ }
421
675
  const out = compileSingleOp(baseJson, {
422
676
  op: "add",
423
677
  path: op.path,
424
678
  value: val
425
- }, opIndex);
679
+ }, opIndex, semantics);
426
680
  if (op.op === "move") out.push(...compileSingleOp(baseJson, {
427
681
  op: "remove",
428
682
  path: op.from
429
- }, opIndex));
683
+ }, opIndex, semantics));
430
684
  return out;
431
685
  }
432
686
  const path = parsePointerOrThrow(op.path, op.path, opIndex);
@@ -562,7 +816,7 @@ function parseArrayIndexToken(token, op, arrLength, path, opIndex) {
562
816
  if (op !== "add") throw compileError("INVALID_POINTER", `'-' index is only valid for add at ${path}`, path, opIndex);
563
817
  return Number.POSITIVE_INFINITY;
564
818
  }
565
- if (!/^[0-9]+$/.test(token)) throw compileError("INVALID_POINTER", `expected array index at ${path}`, path, opIndex);
819
+ if (!ARRAY_INDEX_TOKEN_PATTERN.test(token)) throw compileError("INVALID_POINTER", `expected array index at ${path}`, path, opIndex);
566
820
  const index = Number(token);
567
821
  if (!Number.isSafeInteger(index)) throw compileError("OUT_OF_BOUNDS", `array index is too large at ${path}`, path, opIndex);
568
822
  if (op === "add") {
@@ -571,12 +825,36 @@ function parseArrayIndexToken(token, op, arrLength, path, opIndex) {
571
825
  return index;
572
826
  }
573
827
  function compileErrorFromLookup(error, path, opIndex) {
574
- const message = error instanceof Error ? error.message : "invalid path";
575
- if (message.includes("Expected array index")) return compileError("INVALID_POINTER", message, path, opIndex);
576
- if (message.includes("Index out of bounds")) return compileError("OUT_OF_BOUNDS", message, path, opIndex);
577
- if (message.includes("Missing key")) return compileError("MISSING_PARENT", message, path, opIndex);
578
- if (message.includes("Cannot traverse into non-container")) return compileError("INVALID_TARGET", message, path, opIndex);
579
- return compileError("INVALID_PATCH", message, path, opIndex);
828
+ const mapped = mapLookupErrorToPatchReason(error);
829
+ return compileError(mapped.reason, mapped.message, path, opIndex);
830
+ }
831
+ function mapLookupErrorToPatchReason(error) {
832
+ if (error instanceof JsonLookupError) switch (error.code) {
833
+ case "EXPECTED_ARRAY_INDEX": return {
834
+ reason: "INVALID_POINTER",
835
+ message: error.message
836
+ };
837
+ case "INDEX_OUT_OF_BOUNDS": return {
838
+ reason: "OUT_OF_BOUNDS",
839
+ message: error.message
840
+ };
841
+ case "MISSING_KEY": return {
842
+ reason: "MISSING_PARENT",
843
+ message: error.message
844
+ };
845
+ case "NON_CONTAINER": return {
846
+ reason: "INVALID_TARGET",
847
+ message: error.message
848
+ };
849
+ default: return {
850
+ reason: "INVALID_PATCH",
851
+ message: error.message
852
+ };
853
+ }
854
+ return {
855
+ reason: "INVALID_PATCH",
856
+ message: error instanceof Error ? error.message : "invalid path"
857
+ };
580
858
  }
581
859
  function compileError(reason, message, path, opIndex) {
582
860
  return new PatchCompileError(reason, message, path, opIndex);
@@ -586,6 +864,11 @@ function isStrictDescendantPath(from, to) {
586
864
  for (let i = 0; i < from.length; i++) if (from[i] !== to[i]) return false;
587
865
  return true;
588
866
  }
867
+ function isSamePath(a, b) {
868
+ if (a.length !== b.length) return false;
869
+ for (let i = 0; i < a.length; i++) if (a[i] !== b[i]) return false;
870
+ return true;
871
+ }
589
872
 
590
873
  //#endregion
591
874
  //#region src/doc.ts
@@ -688,6 +971,10 @@ function ensureSeqAtPath(head, path, dotForCreate) {
688
971
  return head.root;
689
972
  }
690
973
  function deepNodeFromJson(value, dot) {
974
+ return deepNodeFromJsonWithDepth(value, dot, 0);
975
+ }
976
+ function deepNodeFromJsonWithDepth(value, dot, depth) {
977
+ assertTraversalDepth(depth);
691
978
  if (value === null || typeof value === "string" || typeof value === "number" || typeof value === "boolean") return newReg(value, dot);
692
979
  if (Array.isArray(value)) {
693
980
  const seq = newSeq();
@@ -699,40 +986,123 @@ function deepNodeFromJson(value, dot) {
699
986
  ctr: ++ctr
700
987
  };
701
988
  const id = dotToElemId(childDot);
702
- rgaInsertAfter(seq, prev, id, childDot, deepNodeFromJson(v, childDot));
989
+ rgaInsertAfter(seq, prev, id, childDot, deepNodeFromJsonWithDepth(v, childDot, depth + 1));
703
990
  prev = id;
704
991
  }
705
992
  return seq;
706
993
  }
707
994
  const obj = newObj();
708
- for (const [k, v] of Object.entries(value)) objSet(obj, k, deepNodeFromJson(v, dot), dot);
995
+ for (const [k, v] of Object.entries(value)) objSet(obj, k, deepNodeFromJsonWithDepth(v, dot, depth + 1), dot);
709
996
  return obj;
710
997
  }
711
998
  function nodeFromJson(value, nextDot) {
712
- if (value === null || typeof value === "string" || typeof value === "number" || typeof value === "boolean") return newReg(value, nextDot());
713
- if (Array.isArray(value)) {
714
- const seq = newSeq();
715
- let prev = HEAD;
716
- for (const v of value) {
717
- const insDot = nextDot();
718
- const id = dotToElemId(insDot);
719
- rgaInsertAfter(seq, prev, id, insDot, nodeFromJson(v, nextDot));
720
- prev = id;
999
+ if (isJsonPrimitive(value)) return newReg(value, nextDot());
1000
+ const root = Array.isArray(value) ? newSeq() : newObj();
1001
+ const stack = [];
1002
+ if (Array.isArray(value)) stack.push({
1003
+ kind: "seq",
1004
+ depth: 0,
1005
+ values: value,
1006
+ index: 0,
1007
+ prev: HEAD,
1008
+ target: root
1009
+ });
1010
+ else stack.push({
1011
+ kind: "obj",
1012
+ depth: 0,
1013
+ entries: Object.entries(value),
1014
+ index: 0,
1015
+ target: root
1016
+ });
1017
+ while (stack.length > 0) {
1018
+ const frame = stack[stack.length - 1];
1019
+ if (frame.kind === "obj") {
1020
+ if (frame.index >= frame.entries.length) {
1021
+ stack.pop();
1022
+ continue;
1023
+ }
1024
+ const [key, childValue] = frame.entries[frame.index++];
1025
+ const childDepth = frame.depth + 1;
1026
+ assertTraversalDepth(childDepth);
1027
+ const entryDot = nextDot();
1028
+ if (isJsonPrimitive(childValue)) {
1029
+ objSet(frame.target, key, newReg(childValue, nextDot()), entryDot);
1030
+ continue;
1031
+ }
1032
+ if (Array.isArray(childValue)) {
1033
+ const childSeq = newSeq();
1034
+ objSet(frame.target, key, childSeq, entryDot);
1035
+ stack.push({
1036
+ kind: "seq",
1037
+ depth: childDepth,
1038
+ values: childValue,
1039
+ index: 0,
1040
+ prev: HEAD,
1041
+ target: childSeq
1042
+ });
1043
+ continue;
1044
+ }
1045
+ const childObj = newObj();
1046
+ objSet(frame.target, key, childObj, entryDot);
1047
+ stack.push({
1048
+ kind: "obj",
1049
+ depth: childDepth,
1050
+ entries: Object.entries(childValue),
1051
+ index: 0,
1052
+ target: childObj
1053
+ });
1054
+ continue;
721
1055
  }
722
- return seq;
723
- }
724
- const obj = newObj();
725
- for (const [k, v] of Object.entries(value)) {
726
- const entryDot = nextDot();
727
- objSet(obj, k, nodeFromJson(v, nextDot), entryDot);
1056
+ if (frame.index >= frame.values.length) {
1057
+ stack.pop();
1058
+ continue;
1059
+ }
1060
+ const childValue = frame.values[frame.index++];
1061
+ const childDepth = frame.depth + 1;
1062
+ assertTraversalDepth(childDepth);
1063
+ const insDot = nextDot();
1064
+ const id = dotToElemId(insDot);
1065
+ if (isJsonPrimitive(childValue)) {
1066
+ rgaInsertAfter(frame.target, frame.prev, id, insDot, newReg(childValue, nextDot()));
1067
+ frame.prev = id;
1068
+ continue;
1069
+ }
1070
+ if (Array.isArray(childValue)) {
1071
+ const childSeq = newSeq();
1072
+ rgaInsertAfter(frame.target, frame.prev, id, insDot, childSeq);
1073
+ frame.prev = id;
1074
+ stack.push({
1075
+ kind: "seq",
1076
+ depth: childDepth,
1077
+ values: childValue,
1078
+ index: 0,
1079
+ prev: HEAD,
1080
+ target: childSeq
1081
+ });
1082
+ continue;
1083
+ }
1084
+ const childObj = newObj();
1085
+ rgaInsertAfter(frame.target, frame.prev, id, insDot, childObj);
1086
+ frame.prev = id;
1087
+ stack.push({
1088
+ kind: "obj",
1089
+ depth: childDepth,
1090
+ entries: Object.entries(childValue),
1091
+ index: 0,
1092
+ target: childObj
1093
+ });
728
1094
  }
729
- return obj;
1095
+ return root;
730
1096
  }
731
1097
  /** Deep-clone a CRDT document. The clone is fully independent of the original. */
732
1098
  function cloneDoc(doc) {
733
1099
  return { root: cloneNode(doc.root) };
734
1100
  }
735
1101
  function cloneNode(node) {
1102
+ return cloneNodeAtDepth(node, 0);
1103
+ }
1104
+ function cloneNodeAtDepth(node, depth) {
1105
+ assertTraversalDepth(depth);
736
1106
  if (node.kind === "lww") return {
737
1107
  kind: "lww",
738
1108
  value: structuredClone(node.value),
@@ -744,7 +1114,7 @@ function cloneNode(node) {
744
1114
  if (node.kind === "obj") {
745
1115
  const entries = /* @__PURE__ */ new Map();
746
1116
  for (const [k, v] of node.entries.entries()) entries.set(k, {
747
- node: cloneNode(v.node),
1117
+ node: cloneNodeAtDepth(v.node, depth + 1),
748
1118
  dot: {
749
1119
  actor: v.dot.actor,
750
1120
  ctr: v.dot.ctr
@@ -766,7 +1136,7 @@ function cloneNode(node) {
766
1136
  id: e.id,
767
1137
  prev: e.prev,
768
1138
  tombstone: e.tombstone,
769
- value: cloneNode(e.value),
1139
+ value: cloneNodeAtDepth(e.value, depth + 1),
770
1140
  insDot: {
771
1141
  actor: e.insDot.actor,
772
1142
  ctr: e.insDot.ctr
@@ -777,6 +1147,9 @@ function cloneNode(node) {
777
1147
  elems
778
1148
  };
779
1149
  }
1150
+ function isJsonPrimitive(value) {
1151
+ return value === null || typeof value === "string" || typeof value === "number" || typeof value === "boolean";
1152
+ }
780
1153
  function applyTest(base, head, it, evalTestAgainst) {
781
1154
  const snapshot = evalTestAgainst === "head" ? materialize(head.root) : materialize(base.root);
782
1155
  let got;
@@ -847,12 +1220,15 @@ function applyObjRemove(head, it, newDot) {
847
1220
  return null;
848
1221
  }
849
1222
  function applyArrInsert(base, head, it, newDot, bumpCounterAbove) {
1223
+ const pointer = `/${it.path.join("/")}`;
850
1224
  const baseSeq = getSeqAtPath(base, it.path);
851
1225
  if (!baseSeq) {
852
1226
  if (it.index === 0 || it.index === Number.POSITIVE_INFINITY) {
853
1227
  const headSeq = ensureSeqAtPath(head, it.path, newDot());
854
1228
  const prev = it.index === 0 ? HEAD : rgaPrevForInsertAtIndex(headSeq, Number.MAX_SAFE_INTEGER);
855
- const d = nextInsertDotForPrev(headSeq, prev, newDot, bumpCounterAbove);
1229
+ const dotRes = nextInsertDotForPrev(headSeq, prev, newDot, pointer, bumpCounterAbove);
1230
+ if (!dotRes.ok) return dotRes;
1231
+ const d = dotRes.dot;
856
1232
  rgaInsertAfter(headSeq, prev, dotToElemId(d), d, nodeFromJson(it.value, newDot));
857
1233
  return null;
858
1234
  }
@@ -861,7 +1237,7 @@ function applyArrInsert(base, head, it, newDot, bumpCounterAbove) {
861
1237
  code: 409,
862
1238
  reason: "MISSING_PARENT",
863
1239
  message: `base array missing at /${it.path.join("/")}`,
864
- path: `/${it.path.join("/")}`
1240
+ path: pointer
865
1241
  };
866
1242
  }
867
1243
  const headSeq = ensureSeqAtPath(head, it.path, newDot());
@@ -875,20 +1251,38 @@ function applyArrInsert(base, head, it, newDot, bumpCounterAbove) {
875
1251
  path: `/${it.path.join("/")}/${it.index}`
876
1252
  };
877
1253
  const prev = idx === 0 ? HEAD : rgaIdAtIndex(baseSeq, idx - 1) ?? HEAD;
878
- const d = nextInsertDotForPrev(headSeq, prev, newDot, bumpCounterAbove);
1254
+ const dotRes = nextInsertDotForPrev(headSeq, prev, newDot, pointer, bumpCounterAbove);
1255
+ if (!dotRes.ok) return dotRes;
1256
+ const d = dotRes.dot;
879
1257
  rgaInsertAfter(headSeq, prev, dotToElemId(d), d, nodeFromJson(it.value, newDot));
880
1258
  return null;
881
1259
  }
882
- function nextInsertDotForPrev(seq, prev, newDot, bumpCounterAbove) {
1260
+ function nextInsertDotForPrev(seq, prev, newDot, path, bumpCounterAbove) {
1261
+ const MAX_INSERT_DOT_ATTEMPTS = 1024;
883
1262
  let maxSiblingDot = null;
884
1263
  for (const elem of seq.elems.values()) {
885
1264
  if (elem.prev !== prev) continue;
886
1265
  if (!maxSiblingDot || compareDot(elem.insDot, maxSiblingDot) > 0) maxSiblingDot = elem.insDot;
887
1266
  }
888
1267
  if (maxSiblingDot) bumpCounterAbove?.(maxSiblingDot.ctr);
889
- let candidate = newDot();
890
- while (maxSiblingDot && compareDot(candidate, maxSiblingDot) <= 0) candidate = newDot();
891
- return candidate;
1268
+ if (!maxSiblingDot) return {
1269
+ ok: true,
1270
+ dot: newDot()
1271
+ };
1272
+ for (let attempts = 0; attempts < MAX_INSERT_DOT_ATTEMPTS; attempts++) {
1273
+ const candidate = newDot();
1274
+ if (compareDot(candidate, maxSiblingDot) > 0) return {
1275
+ ok: true,
1276
+ dot: candidate
1277
+ };
1278
+ }
1279
+ return {
1280
+ ok: false,
1281
+ code: 409,
1282
+ reason: "DOT_GENERATION_EXHAUSTED",
1283
+ message: `failed to generate insert dot within ${MAX_INSERT_DOT_ATTEMPTS} attempts`,
1284
+ path
1285
+ };
892
1286
  }
893
1287
  function applyArrDelete(base, head, it, newDot) {
894
1288
  const d = newDot();
@@ -1056,8 +1450,7 @@ function jsonPatchToCrdtInternal(options) {
1056
1450
  const shadowBump = (ctr) => {
1057
1451
  if (shadowCtr < ctr) shadowCtr = ctr;
1058
1452
  };
1059
- for (let opIndex = 0; opIndex < options.patch.length; opIndex++) {
1060
- const op = options.patch[opIndex];
1453
+ const applySequentialOp = (op, opIndex) => {
1061
1454
  const baseJson = materialize(shadowBase.root);
1062
1455
  let intents;
1063
1456
  try {
@@ -1071,6 +1464,42 @@ function jsonPatchToCrdtInternal(options) {
1071
1464
  const shadowStep = applyIntentsToCrdt(shadowBase, shadowBase, intents, shadowDot, "base", shadowBump);
1072
1465
  if (!shadowStep.ok) return withOpIndex(shadowStep, opIndex);
1073
1466
  } else shadowBase = cloneDoc(options.head);
1467
+ return { ok: true };
1468
+ };
1469
+ for (let opIndex = 0; opIndex < options.patch.length; opIndex++) {
1470
+ const op = options.patch[opIndex];
1471
+ if (op.op === "move") {
1472
+ const baseJson = materialize(shadowBase.root);
1473
+ let fromValue;
1474
+ try {
1475
+ fromValue = structuredClone(getAtJson(baseJson, parseJsonPointer(op.from)));
1476
+ } catch {
1477
+ try {
1478
+ compileJsonPatchToIntent(baseJson, [{
1479
+ op: "remove",
1480
+ path: op.from
1481
+ }], { semantics: "sequential" });
1482
+ } catch (error) {
1483
+ return withOpIndex(toApplyError$1(error), opIndex);
1484
+ }
1485
+ return withOpIndex(toApplyError$1(/* @__PURE__ */ new Error(`failed to resolve move source at ${op.from}`)), opIndex);
1486
+ }
1487
+ if (op.from === op.path) continue;
1488
+ const removeStep = applySequentialOp({
1489
+ op: "remove",
1490
+ path: op.from
1491
+ }, opIndex);
1492
+ if (!removeStep.ok) return removeStep;
1493
+ const addStep = applySequentialOp({
1494
+ op: "add",
1495
+ path: op.path,
1496
+ value: fromValue
1497
+ }, opIndex);
1498
+ if (!addStep.ok) return addStep;
1499
+ continue;
1500
+ }
1501
+ const step = applySequentialOp(op, opIndex);
1502
+ if (!step.ok) return step;
1074
1503
  }
1075
1504
  return { ok: true };
1076
1505
  }
@@ -1085,6 +1514,7 @@ function isJsonPatchToCrdtOptions(value) {
1085
1514
  return typeof value === "object" && value !== null && "base" in value && "head" in value && "patch" in value && "newDot" in value;
1086
1515
  }
1087
1516
  function toApplyError$1(error) {
1517
+ if (error instanceof TraversalDepthError) return toDepthApplyError(error);
1088
1518
  if (error instanceof PatchCompileError) return {
1089
1519
  ok: false,
1090
1520
  code: 409,
@@ -1142,8 +1572,10 @@ function createState(initial, options) {
1142
1572
  /**
1143
1573
  * Fork a replica from a shared origin state while assigning a new local actor ID.
1144
1574
  * The forked state has an independent document clone and clock.
1575
+ * By default this rejects actor reuse to prevent duplicate-dot collisions across peers.
1145
1576
  */
1146
- function forkState(origin, actor) {
1577
+ function forkState(origin, actor, options = {}) {
1578
+ if (actor === origin.clock.actor && !options.allowActorReuse) throw new Error(`forkState actor must be unique; refusing to reuse origin actor '${actor}'`);
1147
1579
  return {
1148
1580
  doc: cloneDoc(origin.doc),
1149
1581
  clock: createClock(actor, origin.clock.ctr)
@@ -1188,11 +1620,18 @@ function tryApplyPatch(state, patch, options = {}) {
1188
1620
  doc: cloneDoc(state.doc),
1189
1621
  clock: cloneClock(state.clock)
1190
1622
  };
1191
- const result = applyPatchInternal(nextState, patch, options);
1192
- if (!result.ok) return {
1193
- ok: false,
1194
- error: result
1195
- };
1623
+ try {
1624
+ const result = applyPatchInternal(nextState, patch, options);
1625
+ if (!result.ok) return {
1626
+ ok: false,
1627
+ error: result
1628
+ };
1629
+ } catch (error) {
1630
+ return {
1631
+ ok: false,
1632
+ error: toApplyError(error)
1633
+ };
1634
+ }
1196
1635
  return {
1197
1636
  ok: true,
1198
1637
  state: nextState
@@ -1208,11 +1647,18 @@ function tryApplyPatchInPlace(state, patch, options = {}) {
1208
1647
  state.clock = next.state.clock;
1209
1648
  return { ok: true };
1210
1649
  }
1211
- const result = applyPatchInternal(state, patch, applyOptions);
1212
- if (!result.ok) return {
1213
- ok: false,
1214
- error: result
1215
- };
1650
+ try {
1651
+ const result = applyPatchInternal(state, patch, applyOptions);
1652
+ if (!result.ok) return {
1653
+ ok: false,
1654
+ error: result
1655
+ };
1656
+ } catch (error) {
1657
+ return {
1658
+ ok: false,
1659
+ error: toApplyError(error)
1660
+ };
1661
+ }
1216
1662
  return { ok: true };
1217
1663
  }
1218
1664
  /**
@@ -1261,8 +1707,8 @@ function applyPatchInternal(state, patch, options) {
1261
1707
  doc: cloneDoc(options.base.doc),
1262
1708
  clock: createClock("__base__", 0)
1263
1709
  } : null;
1264
- for (const op of patch) {
1265
- const step = applyPatchOpSequential(state, op, options, explicitBaseState ? explicitBaseState.doc : cloneDoc(state.doc));
1710
+ for (const [opIndex, op] of patch.entries()) {
1711
+ const step = applyPatchOpSequential(state, op, options, explicitBaseState ? explicitBaseState.doc : state.doc, opIndex);
1266
1712
  if (!step.ok) return step;
1267
1713
  if (explicitBaseState && op.op !== "test") {
1268
1714
  const baseStep = applyPatchInternal(explicitBaseState, [op], {
@@ -1279,33 +1725,54 @@ function applyPatchInternal(state, patch, options) {
1279
1725
  if (!compiled.ok) return compiled;
1280
1726
  return applyIntentsToCrdt(baseDoc, state.doc, compiled.intents, () => state.clock.next(), options.testAgainst ?? "head", (ctr) => bumpClockCounter(state, ctr));
1281
1727
  }
1282
- function applyPatchOpSequential(state, op, options, baseDoc) {
1728
+ function applyPatchOpSequential(state, op, options, baseDoc, opIndex) {
1283
1729
  const baseJson = materialize(baseDoc.root);
1284
1730
  if (op.op === "move") {
1285
- const fromValue = getAtJson(baseJson, parseJsonPointer(op.from));
1286
- const removeRes = applySinglePatchOp(state, baseDoc, {
1731
+ const fromResolved = resolveValueAtPointer(baseJson, op.from, opIndex);
1732
+ if (!fromResolved.ok) return fromResolved;
1733
+ const fromValue = fromResolved.value;
1734
+ const removeRes = applySinglePatchOp(state, baseDoc, baseJson, {
1287
1735
  op: "remove",
1288
1736
  path: op.from
1289
1737
  }, options);
1290
1738
  if (!removeRes.ok) return removeRes;
1291
- return applySinglePatchOp(state, cloneDoc(state.doc), {
1739
+ const addBase = state.doc;
1740
+ return applySinglePatchOp(state, addBase, materialize(addBase.root), {
1292
1741
  op: "add",
1293
1742
  path: op.path,
1294
1743
  value: fromValue
1295
1744
  }, options);
1296
1745
  }
1297
1746
  if (op.op === "copy") {
1298
- const fromValue = getAtJson(baseJson, parseJsonPointer(op.from));
1299
- return applySinglePatchOp(state, baseDoc, {
1747
+ const fromResolved = resolveValueAtPointer(baseJson, op.from, opIndex);
1748
+ if (!fromResolved.ok) return fromResolved;
1749
+ const fromValue = fromResolved.value;
1750
+ return applySinglePatchOp(state, baseDoc, baseJson, {
1300
1751
  op: "add",
1301
1752
  path: op.path,
1302
1753
  value: fromValue
1303
1754
  }, options);
1304
1755
  }
1305
- return applySinglePatchOp(state, baseDoc, op, options);
1756
+ return applySinglePatchOp(state, baseDoc, baseJson, op, options);
1306
1757
  }
1307
- function applySinglePatchOp(state, baseDoc, op, options) {
1308
- const compiled = compileIntents(materialize(baseDoc.root), [op], "sequential");
1758
+ function resolveValueAtPointer(baseJson, pointer, opIndex) {
1759
+ let path;
1760
+ try {
1761
+ path = parseJsonPointer(pointer);
1762
+ } catch (error) {
1763
+ return toPointerParseApplyError(error, pointer, opIndex);
1764
+ }
1765
+ try {
1766
+ return {
1767
+ ok: true,
1768
+ value: getAtJson(baseJson, path)
1769
+ };
1770
+ } catch (error) {
1771
+ return toPointerLookupApplyError(error, pointer, opIndex);
1772
+ }
1773
+ }
1774
+ function applySinglePatchOp(state, baseDoc, baseJson, op, options) {
1775
+ const compiled = compileIntents(baseJson, [op], "sequential");
1309
1776
  if (!compiled.ok) return compiled;
1310
1777
  return applyIntentsToCrdt(baseDoc, state.doc, compiled.intents, () => state.clock.next(), options.testAgainst ?? "head", (ctr) => bumpClockCounter(state, ctr));
1311
1778
  }
@@ -1323,30 +1790,41 @@ function compileIntents(baseJson, patch, semantics = "sequential") {
1323
1790
  }
1324
1791
  }
1325
1792
  function maxCtrInNodeForActor$1(node, actor) {
1326
- switch (node.kind) {
1327
- case "lww": return node.dot.actor === actor ? node.dot.ctr : 0;
1328
- case "obj": {
1329
- let best = 0;
1330
- for (const entry of node.entries.values()) {
1793
+ let best = 0;
1794
+ const stack = [{
1795
+ node,
1796
+ depth: 0
1797
+ }];
1798
+ while (stack.length > 0) {
1799
+ const frame = stack.pop();
1800
+ assertTraversalDepth(frame.depth);
1801
+ if (frame.node.kind === "lww") {
1802
+ if (frame.node.dot.actor === actor && frame.node.dot.ctr > best) best = frame.node.dot.ctr;
1803
+ continue;
1804
+ }
1805
+ if (frame.node.kind === "obj") {
1806
+ for (const entry of frame.node.entries.values()) {
1331
1807
  if (entry.dot.actor === actor && entry.dot.ctr > best) best = entry.dot.ctr;
1332
- const childBest = maxCtrInNodeForActor$1(entry.node, actor);
1333
- if (childBest > best) best = childBest;
1808
+ stack.push({
1809
+ node: entry.node,
1810
+ depth: frame.depth + 1
1811
+ });
1334
1812
  }
1335
- for (const tomb of node.tombstone.values()) if (tomb.actor === actor && tomb.ctr > best) best = tomb.ctr;
1336
- return best;
1813
+ for (const tomb of frame.node.tombstone.values()) if (tomb.actor === actor && tomb.ctr > best) best = tomb.ctr;
1814
+ continue;
1337
1815
  }
1338
- case "seq": {
1339
- let best = 0;
1340
- for (const elem of node.elems.values()) {
1341
- if (elem.insDot.actor === actor && elem.insDot.ctr > best) best = elem.insDot.ctr;
1342
- const childBest = maxCtrInNodeForActor$1(elem.value, actor);
1343
- if (childBest > best) best = childBest;
1344
- }
1345
- return best;
1816
+ for (const elem of frame.node.elems.values()) {
1817
+ if (elem.insDot.actor === actor && elem.insDot.ctr > best) best = elem.insDot.ctr;
1818
+ stack.push({
1819
+ node: elem.value,
1820
+ depth: frame.depth + 1
1821
+ });
1346
1822
  }
1347
1823
  }
1824
+ return best;
1348
1825
  }
1349
1826
  function toApplyError(error) {
1827
+ if (error instanceof TraversalDepthError) return toDepthApplyError(error);
1350
1828
  if (error instanceof PatchCompileError) return {
1351
1829
  ok: false,
1352
1830
  code: 409,
@@ -1362,16 +1840,51 @@ function toApplyError(error) {
1362
1840
  message: error instanceof Error ? error.message : "failed to compile patch"
1363
1841
  };
1364
1842
  }
1843
+ function toPointerParseApplyError(error, pointer, opIndex) {
1844
+ return {
1845
+ ok: false,
1846
+ code: 409,
1847
+ reason: "INVALID_POINTER",
1848
+ message: error instanceof Error ? error.message : "invalid pointer",
1849
+ path: pointer,
1850
+ opIndex
1851
+ };
1852
+ }
1853
+ function toPointerLookupApplyError(error, pointer, opIndex) {
1854
+ const mapped = mapLookupErrorToPatchReason(error);
1855
+ return {
1856
+ ok: false,
1857
+ code: 409,
1858
+ reason: mapped.reason,
1859
+ message: mapped.message,
1860
+ path: pointer,
1861
+ opIndex
1862
+ };
1863
+ }
1365
1864
 
1366
1865
  //#endregion
1367
1866
  //#region src/serialize.ts
1867
+ const HEAD_ELEM_ID = "HEAD";
1868
+ var DeserializeError = class extends Error {
1869
+ code = 409;
1870
+ reason;
1871
+ path;
1872
+ constructor(reason, path, message) {
1873
+ super(message);
1874
+ this.name = "DeserializeError";
1875
+ this.reason = reason;
1876
+ this.path = path;
1877
+ }
1878
+ };
1368
1879
  /** Serialize a CRDT document to a JSON-safe representation (Maps become plain objects). */
1369
1880
  function serializeDoc(doc) {
1370
1881
  return { root: serializeNode(doc.root) };
1371
1882
  }
1372
1883
  /** Reconstruct a CRDT document from its serialized form. */
1373
1884
  function deserializeDoc(data) {
1374
- return { root: deserializeNode(data.root) };
1885
+ if (!isRecord(data)) fail("INVALID_SERIALIZED_SHAPE", "/", "serialized doc must be an object");
1886
+ if (!("root" in data)) fail("INVALID_SERIALIZED_SHAPE", "/root", "serialized doc is missing root");
1887
+ return { root: deserializeNode(data.root, "/root", 0) };
1375
1888
  }
1376
1889
  /** Serialize a full CRDT state (document + clock) to a JSON-safe representation. */
1377
1890
  function serializeState(state) {
@@ -1385,7 +1898,11 @@ function serializeState(state) {
1385
1898
  }
1386
1899
  /** Reconstruct a full CRDT state from its serialized form, restoring the clock. */
1387
1900
  function deserializeState(data) {
1388
- const clock = createClock(data.clock.actor, data.clock.ctr);
1901
+ if (!isRecord(data)) fail("INVALID_SERIALIZED_SHAPE", "/", "serialized state must be an object");
1902
+ if (!("doc" in data)) fail("INVALID_SERIALIZED_SHAPE", "/doc", "serialized state is missing doc");
1903
+ if (!("clock" in data)) fail("INVALID_SERIALIZED_SHAPE", "/clock", "serialized state is missing clock");
1904
+ const clockRaw = asRecord(data.clock, "/clock");
1905
+ const clock = createClock(readActor(clockRaw.actor, "/clock/actor"), readCounter(clockRaw.ctr, "/clock/ctr"));
1389
1906
  return {
1390
1907
  doc: deserializeDoc(data.doc),
1391
1908
  clock
@@ -1436,54 +1953,132 @@ function serializeNode(node) {
1436
1953
  elems
1437
1954
  };
1438
1955
  }
1439
- function deserializeNode(node) {
1440
- if (node.kind === "lww") return {
1441
- kind: "lww",
1442
- value: structuredClone(node.value),
1443
- dot: {
1444
- actor: node.dot.actor,
1445
- ctr: node.dot.ctr
1446
- }
1447
- };
1448
- if (node.kind === "obj") {
1956
+ function deserializeNode(node, path, depth) {
1957
+ assertTraversalDepth(depth);
1958
+ const raw = asRecord(node, path);
1959
+ const kind = readString(raw.kind, `${path}/kind`);
1960
+ if (kind === "lww") {
1961
+ if (!("value" in raw)) fail("INVALID_SERIALIZED_SHAPE", `${path}/value`, "lww node is missing value");
1962
+ if (!("dot" in raw)) fail("INVALID_SERIALIZED_SHAPE", `${path}/dot`, "lww node is missing dot");
1963
+ return {
1964
+ kind: "lww",
1965
+ value: structuredClone(readJsonValue(raw.value, `${path}/value`, depth + 1)),
1966
+ dot: readDot(raw.dot, `${path}/dot`)
1967
+ };
1968
+ }
1969
+ if (kind === "obj") {
1970
+ const entriesRaw = asRecord(raw.entries, `${path}/entries`);
1971
+ const tombstoneRaw = asRecord(raw.tombstone, `${path}/tombstone`);
1449
1972
  const entries = /* @__PURE__ */ new Map();
1450
- for (const [k, v] of Object.entries(node.entries)) entries.set(k, {
1451
- node: deserializeNode(v.node),
1452
- dot: {
1453
- actor: v.dot.actor,
1454
- ctr: v.dot.ctr
1455
- }
1456
- });
1973
+ for (const [k, v] of Object.entries(entriesRaw)) {
1974
+ const entryPath = `${path}/entries/${k}`;
1975
+ const entryRaw = asRecord(v, entryPath);
1976
+ entries.set(k, {
1977
+ node: deserializeNode(entryRaw.node, `${entryPath}/node`, depth + 1),
1978
+ dot: readDot(entryRaw.dot, `${entryPath}/dot`)
1979
+ });
1980
+ }
1457
1981
  const tombstone = /* @__PURE__ */ new Map();
1458
- for (const [k, d] of Object.entries(node.tombstone)) tombstone.set(k, {
1459
- actor: d.actor,
1460
- ctr: d.ctr
1461
- });
1982
+ for (const [k, d] of Object.entries(tombstoneRaw)) tombstone.set(k, readDot(d, `${path}/tombstone/${k}`));
1462
1983
  return {
1463
1984
  kind: "obj",
1464
1985
  entries,
1465
1986
  tombstone
1466
1987
  };
1467
1988
  }
1989
+ if (kind !== "seq") fail("INVALID_SERIALIZED_SHAPE", `${path}/kind`, `unsupported node kind '${kind}'`);
1990
+ const elemsRaw = asRecord(raw.elems, `${path}/elems`);
1468
1991
  const elems = /* @__PURE__ */ new Map();
1469
- for (const [id, e] of Object.entries(node.elems)) elems.set(id, {
1470
- id: e.id,
1471
- prev: e.prev,
1472
- tombstone: e.tombstone,
1473
- value: deserializeNode(e.value),
1474
- insDot: {
1475
- actor: e.insDot.actor,
1476
- ctr: e.insDot.ctr
1477
- }
1478
- });
1992
+ for (const [id, rawElem] of Object.entries(elemsRaw)) {
1993
+ const elemPath = `${path}/elems/${id}`;
1994
+ const elem = asRecord(rawElem, elemPath);
1995
+ const elemId = readString(elem.id, `${elemPath}/id`);
1996
+ if (elemId !== id) fail("INVALID_SERIALIZED_INVARIANT", `${elemPath}/id`, `sequence element id '${elemId}' does not match key '${id}'`);
1997
+ const prev = readString(elem.prev, `${elemPath}/prev`);
1998
+ const tombstone = readBoolean(elem.tombstone, `${elemPath}/tombstone`);
1999
+ const value = deserializeNode(elem.value, `${elemPath}/value`, depth + 1);
2000
+ const insDot = readDot(elem.insDot, `${elemPath}/insDot`);
2001
+ if (dotToElemId(insDot) !== id) fail("INVALID_SERIALIZED_INVARIANT", `${elemPath}/insDot`, "sequence element id must match its insertion dot");
2002
+ elems.set(id, {
2003
+ id,
2004
+ prev,
2005
+ tombstone,
2006
+ value,
2007
+ insDot
2008
+ });
2009
+ }
2010
+ for (const elem of elems.values()) {
2011
+ if (elem.prev === elem.id) fail("INVALID_SERIALIZED_INVARIANT", `${path}/elems/${elem.id}/prev`, "sequence element cannot reference itself as predecessor");
2012
+ if (elem.prev !== HEAD_ELEM_ID && !elems.has(elem.prev)) fail("INVALID_SERIALIZED_INVARIANT", `${path}/elems/${elem.id}/prev`, `sequence predecessor '${elem.prev}' does not exist`);
2013
+ }
1479
2014
  return {
1480
2015
  kind: "seq",
1481
2016
  elems
1482
2017
  };
1483
2018
  }
2019
+ function asRecord(value, path) {
2020
+ if (!isRecord(value)) fail("INVALID_SERIALIZED_SHAPE", path, "expected object");
2021
+ return value;
2022
+ }
2023
+ function readDot(value, path) {
2024
+ const raw = asRecord(value, path);
2025
+ return {
2026
+ actor: readActor(raw.actor, `${path}/actor`),
2027
+ ctr: readCounter(raw.ctr, `${path}/ctr`)
2028
+ };
2029
+ }
2030
+ function readActor(value, path) {
2031
+ const actor = readString(value, path);
2032
+ if (actor.length === 0) fail("INVALID_SERIALIZED_SHAPE", path, "actor must not be empty");
2033
+ return actor;
2034
+ }
2035
+ function readCounter(value, path) {
2036
+ if (typeof value !== "number" || !Number.isSafeInteger(value) || value < 0) fail("INVALID_SERIALIZED_SHAPE", path, "counter must be a non-negative safe integer");
2037
+ return value;
2038
+ }
2039
+ function readString(value, path) {
2040
+ if (typeof value !== "string") fail("INVALID_SERIALIZED_SHAPE", path, "expected string");
2041
+ return value;
2042
+ }
2043
+ function readBoolean(value, path) {
2044
+ if (typeof value !== "boolean") fail("INVALID_SERIALIZED_SHAPE", path, "expected boolean");
2045
+ return value;
2046
+ }
2047
+ function readJsonValue(value, path, depth) {
2048
+ assertJsonValue(value, path, depth);
2049
+ return value;
2050
+ }
2051
+ function assertJsonValue(value, path, depth) {
2052
+ assertTraversalDepth(depth);
2053
+ if (value === null || typeof value === "string" || typeof value === "boolean") return;
2054
+ if (typeof value === "number") {
2055
+ if (!Number.isFinite(value)) fail("INVALID_SERIALIZED_SHAPE", path, "json number must be finite");
2056
+ return;
2057
+ }
2058
+ if (Array.isArray(value)) {
2059
+ for (const [index, item] of value.entries()) assertJsonValue(item, `${path}/${index}`, depth + 1);
2060
+ return;
2061
+ }
2062
+ if (!isRecord(value)) fail("INVALID_SERIALIZED_SHAPE", path, "expected JSON value");
2063
+ for (const [key, child] of Object.entries(value)) assertJsonValue(child, `${path}/${key}`, depth + 1);
2064
+ }
2065
+ function fail(reason, path, message) {
2066
+ throw new DeserializeError(reason, path, message);
2067
+ }
2068
+ function isRecord(value) {
2069
+ return typeof value === "object" && value !== null && !Array.isArray(value);
2070
+ }
1484
2071
 
1485
2072
  //#endregion
1486
2073
  //#region src/merge.ts
2074
+ var SharedElementMetadataMismatchError = class extends Error {
2075
+ path;
2076
+ constructor(path, id, field) {
2077
+ super(`shared RGA element '${id}' has conflicting ${field} metadata`);
2078
+ this.name = "SharedElementMetadataMismatchError";
2079
+ this.path = path;
2080
+ }
2081
+ };
1487
2082
  /** Error thrown by throwing merge helpers (`mergeDoc` / `mergeState`). */
1488
2083
  var MergeError = class extends Error {
1489
2084
  code;
@@ -1493,7 +2088,7 @@ var MergeError = class extends Error {
1493
2088
  super(error.message);
1494
2089
  this.name = "MergeError";
1495
2090
  this.code = error.code;
1496
- this.reason = "LINEAGE_MISMATCH";
2091
+ this.reason = error.reason;
1497
2092
  this.path = error.path;
1498
2093
  }
1499
2094
  };
@@ -1517,21 +2112,39 @@ function mergeDoc(a, b, options = {}) {
1517
2112
  }
1518
2113
  /** Non-throwing `mergeDoc` variant with structured conflict details. */
1519
2114
  function tryMergeDoc(a, b, options = {}) {
1520
- const mismatchPath = options.requireSharedOrigin ?? true ? findSeqLineageMismatch(a.root, b.root, []) : null;
1521
- if (mismatchPath) return {
1522
- ok: false,
1523
- error: {
2115
+ try {
2116
+ const mismatchPath = options.requireSharedOrigin ?? true ? findSeqLineageMismatch(a.root, b.root, []) : null;
2117
+ if (mismatchPath) return {
1524
2118
  ok: false,
1525
- code: 409,
1526
- reason: "LINEAGE_MISMATCH",
1527
- message: `merge requires shared array origin at ${mismatchPath}`,
1528
- path: mismatchPath
1529
- }
1530
- };
1531
- return {
1532
- ok: true,
1533
- doc: { root: mergeNode(a.root, b.root) }
1534
- };
2119
+ error: {
2120
+ ok: false,
2121
+ code: 409,
2122
+ reason: "LINEAGE_MISMATCH",
2123
+ message: `merge requires shared array origin at ${mismatchPath}`,
2124
+ path: mismatchPath
2125
+ }
2126
+ };
2127
+ return {
2128
+ ok: true,
2129
+ doc: { root: mergeNode(a.root, b.root) }
2130
+ };
2131
+ } catch (error) {
2132
+ if (error instanceof SharedElementMetadataMismatchError) return {
2133
+ ok: false,
2134
+ error: {
2135
+ ok: false,
2136
+ code: 409,
2137
+ reason: "LINEAGE_MISMATCH",
2138
+ message: error.message,
2139
+ path: error.path
2140
+ }
2141
+ };
2142
+ if (error instanceof TraversalDepthError) return {
2143
+ ok: false,
2144
+ error: toDepthApplyError(error)
2145
+ };
2146
+ throw error;
2147
+ }
1535
2148
  }
1536
2149
  /**
1537
2150
  * Merge two CRDT states.
@@ -1564,25 +2177,42 @@ function tryMergeState(a, b, options = {}) {
1564
2177
  };
1565
2178
  }
1566
2179
  function findSeqLineageMismatch(a, b, path) {
1567
- if (a.kind === "seq" && b.kind === "seq") {
1568
- const hasElemsA = a.elems.size > 0;
1569
- const hasElemsB = b.elems.size > 0;
1570
- if (hasElemsA && hasElemsB) {
1571
- let shared = false;
1572
- for (const id of a.elems.keys()) if (b.elems.has(id)) {
1573
- shared = true;
1574
- break;
2180
+ const stack = [{
2181
+ a,
2182
+ b,
2183
+ path,
2184
+ depth: path.length
2185
+ }];
2186
+ while (stack.length > 0) {
2187
+ const frame = stack.pop();
2188
+ assertTraversalDepth(frame.depth);
2189
+ if (frame.a.kind === "seq" && frame.b.kind === "seq") {
2190
+ const hasElemsA = frame.a.elems.size > 0;
2191
+ const hasElemsB = frame.b.elems.size > 0;
2192
+ if (hasElemsA && hasElemsB) {
2193
+ let shared = false;
2194
+ for (const id of frame.a.elems.keys()) if (frame.b.elems.has(id)) {
2195
+ shared = true;
2196
+ break;
2197
+ }
2198
+ if (!shared) return `/${frame.path.join("/")}`;
1575
2199
  }
1576
- if (!shared) return `/${path.join("/")}`;
1577
2200
  }
1578
- }
1579
- if (a.kind === "obj" && b.kind === "obj") {
1580
- const sharedKeys = new Set([...a.entries.keys()].filter((key) => b.entries.has(key)));
1581
- for (const key of sharedKeys) {
1582
- const nextA = a.entries.get(key).node;
1583
- const nextB = b.entries.get(key).node;
1584
- const mismatch = findSeqLineageMismatch(nextA, nextB, [...path, key]);
1585
- if (mismatch) return mismatch;
2201
+ if (frame.a.kind === "obj" && frame.b.kind === "obj") {
2202
+ const left = frame.a;
2203
+ const right = frame.b;
2204
+ const sharedKeys = [...left.entries.keys()].filter((key) => right.entries.has(key));
2205
+ for (let i = sharedKeys.length - 1; i >= 0; i--) {
2206
+ const key = sharedKeys[i];
2207
+ const nextA = left.entries.get(key).node;
2208
+ const nextB = right.entries.get(key).node;
2209
+ stack.push({
2210
+ a: nextA,
2211
+ b: nextB,
2212
+ path: [...frame.path, key],
2213
+ depth: frame.depth + 1
2214
+ });
2215
+ }
1586
2216
  }
1587
2217
  }
1588
2218
  return null;
@@ -1594,28 +2224,38 @@ function maxObservedCtrForActor(doc, actor, a, b) {
1594
2224
  return best;
1595
2225
  }
1596
2226
  function maxCtrInNodeForActor(node, actor) {
1597
- switch (node.kind) {
1598
- case "lww": return node.dot.actor === actor ? node.dot.ctr : 0;
1599
- case "obj": {
1600
- let best = 0;
1601
- for (const entry of node.entries.values()) {
2227
+ let best = 0;
2228
+ const stack = [{
2229
+ node,
2230
+ depth: 0
2231
+ }];
2232
+ while (stack.length > 0) {
2233
+ const frame = stack.pop();
2234
+ assertTraversalDepth(frame.depth);
2235
+ if (frame.node.kind === "lww") {
2236
+ if (frame.node.dot.actor === actor && frame.node.dot.ctr > best) best = frame.node.dot.ctr;
2237
+ continue;
2238
+ }
2239
+ if (frame.node.kind === "obj") {
2240
+ for (const entry of frame.node.entries.values()) {
1602
2241
  if (entry.dot.actor === actor && entry.dot.ctr > best) best = entry.dot.ctr;
1603
- const childBest = maxCtrInNodeForActor(entry.node, actor);
1604
- if (childBest > best) best = childBest;
2242
+ stack.push({
2243
+ node: entry.node,
2244
+ depth: frame.depth + 1
2245
+ });
1605
2246
  }
1606
- for (const tomb of node.tombstone.values()) if (tomb.actor === actor && tomb.ctr > best) best = tomb.ctr;
1607
- return best;
2247
+ for (const tomb of frame.node.tombstone.values()) if (tomb.actor === actor && tomb.ctr > best) best = tomb.ctr;
2248
+ continue;
1608
2249
  }
1609
- case "seq": {
1610
- let best = 0;
1611
- for (const elem of node.elems.values()) {
1612
- if (elem.insDot.actor === actor && elem.insDot.ctr > best) best = elem.insDot.ctr;
1613
- const childBest = maxCtrInNodeForActor(elem.value, actor);
1614
- if (childBest > best) best = childBest;
1615
- }
1616
- return best;
2250
+ for (const elem of frame.node.elems.values()) {
2251
+ if (elem.insDot.actor === actor && elem.insDot.ctr > best) best = elem.insDot.ctr;
2252
+ stack.push({
2253
+ node: elem.value,
2254
+ depth: frame.depth + 1
2255
+ });
1617
2256
  }
1618
2257
  }
2258
+ return best;
1619
2259
  }
1620
2260
  function repDot(node) {
1621
2261
  switch (node.kind) {
@@ -1640,11 +2280,15 @@ function repDot(node) {
1640
2280
  }
1641
2281
  }
1642
2282
  function mergeNode(a, b) {
2283
+ return mergeNodeAtDepth(a, b, 0, []);
2284
+ }
2285
+ function mergeNodeAtDepth(a, b, depth, path) {
2286
+ assertTraversalDepth(depth);
1643
2287
  if (a.kind === "lww" && b.kind === "lww") return mergeLww(a, b);
1644
- if (a.kind === "obj" && b.kind === "obj") return mergeObj(a, b);
1645
- if (a.kind === "seq" && b.kind === "seq") return mergeSeq(a, b);
1646
- if (compareDot(repDot(a), repDot(b)) >= 0) return cloneNodeShallow(a);
1647
- return cloneNodeShallow(b);
2288
+ if (a.kind === "obj" && b.kind === "obj") return mergeObj(a, b, depth + 1, path);
2289
+ if (a.kind === "seq" && b.kind === "seq") return mergeSeq(a, b, depth + 1, path);
2290
+ if (compareDot(repDot(a), repDot(b)) >= 0) return cloneNodeShallow(a, depth + 1);
2291
+ return cloneNodeShallow(b, depth + 1);
1648
2292
  }
1649
2293
  function mergeLww(a, b) {
1650
2294
  if (compareDot(a.dot, b.dot) >= 0) return {
@@ -1658,7 +2302,8 @@ function mergeLww(a, b) {
1658
2302
  dot: { ...b.dot }
1659
2303
  };
1660
2304
  }
1661
- function mergeObj(a, b) {
2305
+ function mergeObj(a, b, depth, path) {
2306
+ assertTraversalDepth(depth);
1662
2307
  const entries = /* @__PURE__ */ new Map();
1663
2308
  const tombstone = /* @__PURE__ */ new Map();
1664
2309
  const allTombKeys = new Set([...a.tombstone.keys(), ...b.tombstone.keys()]);
@@ -1675,15 +2320,15 @@ function mergeObj(a, b) {
1675
2320
  const eb = b.entries.get(key);
1676
2321
  let merged;
1677
2322
  if (ea && eb) merged = {
1678
- node: mergeNode(ea.node, eb.node),
2323
+ node: mergeNodeAtDepth(ea.node, eb.node, depth + 1, [...path, key]),
1679
2324
  dot: compareDot(ea.dot, eb.dot) >= 0 ? { ...ea.dot } : { ...eb.dot }
1680
2325
  };
1681
2326
  else if (ea) merged = {
1682
- node: cloneNodeShallow(ea.node),
2327
+ node: cloneNodeShallow(ea.node, depth + 1),
1683
2328
  dot: { ...ea.dot }
1684
2329
  };
1685
2330
  else merged = {
1686
- node: cloneNodeShallow(eb.node),
2331
+ node: cloneNodeShallow(eb.node, depth + 1),
1687
2332
  dot: { ...eb.dot }
1688
2333
  };
1689
2334
  const td = tombstone.get(key);
@@ -1696,14 +2341,17 @@ function mergeObj(a, b) {
1696
2341
  tombstone
1697
2342
  };
1698
2343
  }
1699
- function mergeSeq(a, b) {
2344
+ function mergeSeq(a, b, depth, path) {
2345
+ assertTraversalDepth(depth);
1700
2346
  const elems = /* @__PURE__ */ new Map();
1701
2347
  const allIds = new Set([...a.elems.keys(), ...b.elems.keys()]);
1702
2348
  for (const id of allIds) {
1703
2349
  const ea = a.elems.get(id);
1704
2350
  const eb = b.elems.get(id);
1705
2351
  if (ea && eb) {
1706
- const mergedValue = mergeNode(ea.value, eb.value);
2352
+ if (ea.prev !== eb.prev) throw new SharedElementMetadataMismatchError(toPointer(path), id, "prev");
2353
+ if (!sameDot(ea.insDot, eb.insDot)) throw new SharedElementMetadataMismatchError(toPointer(path), id, "insDot");
2354
+ const mergedValue = mergeNodeAtDepth(ea.value, eb.value, depth + 1, [...path, id]);
1707
2355
  elems.set(id, {
1708
2356
  id,
1709
2357
  prev: ea.prev,
@@ -1711,24 +2359,33 @@ function mergeSeq(a, b) {
1711
2359
  value: mergedValue,
1712
2360
  insDot: { ...ea.insDot }
1713
2361
  });
1714
- } else if (ea) elems.set(id, cloneElem(ea));
1715
- else elems.set(id, cloneElem(eb));
2362
+ } else if (ea) elems.set(id, cloneElem(ea, depth + 1));
2363
+ else elems.set(id, cloneElem(eb, depth + 1));
1716
2364
  }
1717
2365
  return {
1718
2366
  kind: "seq",
1719
2367
  elems
1720
2368
  };
1721
2369
  }
1722
- function cloneElem(e) {
2370
+ function sameDot(a, b) {
2371
+ return a.actor === b.actor && a.ctr === b.ctr;
2372
+ }
2373
+ function toPointer(path) {
2374
+ if (path.length === 0) return "/";
2375
+ return `/${path.join("/")}`;
2376
+ }
2377
+ function cloneElem(e, depth) {
2378
+ assertTraversalDepth(depth);
1723
2379
  return {
1724
2380
  id: e.id,
1725
2381
  prev: e.prev,
1726
2382
  tombstone: e.tombstone,
1727
- value: cloneNodeShallow(e.value),
2383
+ value: cloneNodeShallow(e.value, depth + 1),
1728
2384
  insDot: { ...e.insDot }
1729
2385
  };
1730
2386
  }
1731
- function cloneNodeShallow(node) {
2387
+ function cloneNodeShallow(node, depth) {
2388
+ assertTraversalDepth(depth);
1732
2389
  switch (node.kind) {
1733
2390
  case "lww": return {
1734
2391
  kind: "lww",
@@ -1738,7 +2395,7 @@ function cloneNodeShallow(node) {
1738
2395
  case "obj": {
1739
2396
  const entries = /* @__PURE__ */ new Map();
1740
2397
  for (const [k, v] of node.entries) entries.set(k, {
1741
- node: cloneNodeShallow(v.node),
2398
+ node: cloneNodeShallow(v.node, depth + 1),
1742
2399
  dot: { ...v.dot }
1743
2400
  });
1744
2401
  const tombstone = /* @__PURE__ */ new Map();
@@ -1751,7 +2408,7 @@ function cloneNodeShallow(node) {
1751
2408
  }
1752
2409
  case "seq": {
1753
2410
  const elems = /* @__PURE__ */ new Map();
1754
- for (const [id, e] of node.elems) elems.set(id, cloneElem(e));
2411
+ for (const [id, e] of node.elems) elems.set(id, cloneElem(e, depth + 1));
1755
2412
  return {
1756
2413
  kind: "seq",
1757
2414
  elems
@@ -1761,4 +2418,83 @@ function cloneNodeShallow(node) {
1761
2418
  }
1762
2419
 
1763
2420
  //#endregion
1764
- export { vvMerge as $, compileJsonPatchToIntent as A, newSeq as B, crdtToJsonPatch as C, jsonPatchToCrdtSafe as D, jsonPatchToCrdt as E, stringifyJsonPointer as F, rgaDelete as G, objSet as H, ROOT_KEY as I, rgaLinearizeIds as J, rgaIdAtIndex as K, lwwSet as L, getAtJson as M, jsonEquals as N, tryJsonPatchToCrdt as O, parseJsonPointer as P, vvHasDot as Q, newObj as R, crdtToFullReplace as S, docFromJsonWithDot as T, materialize as U, objRemove as V, HEAD as W, compareDot as X, rgaPrevForInsertAtIndex as Y, dotToElemId as Z, tryApplyPatch as _, tryMergeState as a, applyIntentsToCrdt as b, serializeDoc as c, applyPatch as d, cloneClock as et, applyPatchAsActor as f, toJson as g, forkState as h, tryMergeDoc as i, diffJsonPatch as j, PatchCompileError as k, serializeState as l, createState as m, mergeDoc as n, nextDotForActor as nt, deserializeDoc as o, applyPatchInPlace as p, rgaInsertAfter as q, mergeState as r, observeDot as rt, deserializeState as s, MergeError as t, createClock as tt, PatchError as u, tryApplyPatchInPlace as v, docFromJson as w, cloneDoc as x, validateJsonPatch as y, newReg as z };
2421
+ //#region src/compact.ts
2422
+ function isDotStable(stable, dot) {
2423
+ return (stable[dot.actor] ?? 0) >= dot.ctr;
2424
+ }
2425
+ /**
2426
+ * Compact causally-stable tombstones in a document.
2427
+ *
2428
+ * Safety note:
2429
+ * - Only compact at checkpoints that are causally stable across all peers you
2430
+ * may still merge with.
2431
+ * - Do not merge this compacted document with replicas that might be behind
2432
+ * the provided checkpoint.
2433
+ */
2434
+ function compactDocTombstones(doc, options) {
2435
+ const targetDoc = options.mutate ? doc : cloneDoc(doc);
2436
+ const stats = {
2437
+ objectTombstonesRemoved: 0,
2438
+ sequenceTombstonesRemoved: 0
2439
+ };
2440
+ const stable = options.stable;
2441
+ const stack = [{
2442
+ node: targetDoc.root,
2443
+ depth: 0
2444
+ }];
2445
+ while (stack.length > 0) {
2446
+ const frame = stack.pop();
2447
+ assertTraversalDepth(frame.depth);
2448
+ if (frame.node.kind === "obj") {
2449
+ stats.objectTombstonesRemoved += objCompactTombstones(frame.node, (dot) => isDotStable(stable, dot));
2450
+ for (const entry of frame.node.entries.values()) stack.push({
2451
+ node: entry.node,
2452
+ depth: frame.depth + 1
2453
+ });
2454
+ continue;
2455
+ }
2456
+ if (frame.node.kind === "seq") {
2457
+ stats.sequenceTombstonesRemoved += rgaCompactTombstones(frame.node, (dot) => isDotStable(stable, dot));
2458
+ for (const elem of frame.node.elems.values()) stack.push({
2459
+ node: elem.value,
2460
+ depth: frame.depth + 1
2461
+ });
2462
+ }
2463
+ }
2464
+ return {
2465
+ doc: targetDoc,
2466
+ stats
2467
+ };
2468
+ }
2469
+ /**
2470
+ * Compact causally-stable tombstones in a state document.
2471
+ *
2472
+ * Safety note:
2473
+ * - Only compact at checkpoints that are causally stable across all peers you
2474
+ * may still merge with.
2475
+ * - Do not merge this compacted state with replicas that might be behind the
2476
+ * provided checkpoint.
2477
+ */
2478
+ function compactStateTombstones(state, options) {
2479
+ if (options.mutate) return {
2480
+ state,
2481
+ stats: compactDocTombstones(state.doc, {
2482
+ stable: options.stable,
2483
+ mutate: true
2484
+ }).stats
2485
+ };
2486
+ const nextState = {
2487
+ doc: cloneDoc(state.doc),
2488
+ clock: cloneClock(state.clock)
2489
+ };
2490
+ return {
2491
+ state: nextState,
2492
+ stats: compactDocTombstones(nextState.doc, {
2493
+ stable: options.stable,
2494
+ mutate: true
2495
+ }).stats
2496
+ };
2497
+ }
2498
+
2499
+ //#endregion
2500
+ export { rgaLinearizeIds as $, jsonPatchToCrdtSafe as A, lwwSet as B, applyIntentsToCrdt as C, docFromJson as D, crdtToJsonPatch as E, getAtJson as F, objRemove as G, newReg as H, jsonEquals as I, HEAD as J, objSet as K, parseJsonPointer as L, PatchCompileError as M, compileJsonPatchToIntent as N, docFromJsonWithDot as O, diffJsonPatch as P, rgaInsertAfter as Q, stringifyJsonPointer as R, validateJsonPatch as S, crdtToFullReplace as T, newSeq as U, newObj as V, objCompactTombstones as W, rgaDelete as X, rgaCompactTombstones as Y, rgaIdAtIndex as Z, createState as _, mergeState as a, MAX_TRAVERSAL_DEPTH as at, tryApplyPatch as b, DeserializeError as c, createClock as ct, serializeDoc as d, rgaPrevForInsertAtIndex as et, serializeState as f, applyPatchInPlace as g, applyPatchAsActor as h, mergeDoc as i, vvMerge as it, tryJsonPatchToCrdt as j, jsonPatchToCrdt as k, deserializeDoc as l, nextDotForActor as lt, applyPatch as m, compactStateTombstones as n, dotToElemId as nt, tryMergeDoc as o, TraversalDepthError as ot, PatchError as p, materialize as q, MergeError as r, vvHasDot as rt, tryMergeState as s, cloneClock as st, compactDocTombstones as t, compareDot as tt, deserializeState as u, observeDot as ut, forkState as v, cloneDoc as w, tryApplyPatchInPlace as x, toJson as y, ROOT_KEY as z };