@dabble/patches 0.8.1 → 0.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/algorithms/ot/shared/rebaseChanges.js +6 -5
  2. package/dist/client/LWWDoc.js +11 -24
  3. package/dist/client/Patches.js +2 -2
  4. package/dist/client/PatchesDoc.d.ts +1 -1
  5. package/dist/client/PatchesDoc.js +2 -3
  6. package/dist/client/index.d.ts +1 -1
  7. package/dist/{index-C7ZhU2kS.d.ts → index-BO6EQFpw.d.ts} +2 -14
  8. package/dist/index.d.ts +13 -13
  9. package/dist/index.js +2 -10
  10. package/dist/json-patch/index.d.ts +11 -11
  11. package/dist/json-patch/index.js +0 -1
  12. package/dist/json-patch/ops/index.d.ts +1 -1
  13. package/dist/json-patch/ops/index.js +15 -14
  14. package/dist/micro/client.js +24 -6
  15. package/dist/micro/doc.js +15 -16
  16. package/dist/micro/index.d.ts +2 -2
  17. package/dist/micro/index.js +14 -7
  18. package/dist/micro/ops.d.ts +6 -4
  19. package/dist/micro/ops.js +35 -25
  20. package/dist/micro/server.d.ts +7 -4
  21. package/dist/micro/server.js +99 -31
  22. package/dist/micro/types.d.ts +33 -13
  23. package/dist/micro/types.js +8 -10
  24. package/dist/net/PatchesClient.d.ts +2 -2
  25. package/dist/net/PatchesSync.d.ts +6 -0
  26. package/dist/net/PatchesSync.js +20 -8
  27. package/dist/net/http/FetchTransport.d.ts +1 -3
  28. package/dist/net/http/FetchTransport.js +5 -11
  29. package/dist/net/index.d.ts +2 -2
  30. package/dist/net/protocol/JSONRPCClient.js +7 -0
  31. package/dist/net/protocol/JSONRPCServer.d.ts +4 -6
  32. package/dist/net/protocol/JSONRPCServer.js +3 -5
  33. package/dist/net/protocol/types.d.ts +1 -10
  34. package/dist/net/rest/PatchesREST.d.ts +2 -2
  35. package/dist/net/rest/SSEServer.d.ts +6 -5
  36. package/dist/net/rest/SSEServer.js +6 -3
  37. package/dist/server/LWWMemoryStoreBackend.d.ts +0 -1
  38. package/dist/server/LWWMemoryStoreBackend.js +0 -3
  39. package/dist/server/LWWServer.js +2 -1
  40. package/dist/server/OTBranchManager.d.ts +2 -7
  41. package/dist/server/OTBranchManager.js +0 -2
  42. package/dist/server/OTServer.js +2 -1
  43. package/dist/server/PatchesHistoryManager.d.ts +1 -10
  44. package/dist/server/PatchesHistoryManager.js +2 -18
  45. package/dist/server/index.d.ts +2 -2
  46. package/dist/server/index.js +3 -3
  47. package/dist/server/types.d.ts +0 -5
  48. package/dist/utils/concurrency.d.ts +6 -1
  49. package/dist/utils/concurrency.js +4 -0
  50. package/package.json +1 -1
@@ -20,12 +20,13 @@ function rebaseChanges(serverChanges, localChanges) {
20
20
  );
21
21
  const baseRev = lastChange.rev;
22
22
  let rev = lastChange.rev;
23
- const result = filteredLocalChanges.map((change) => {
24
- rev++;
23
+ const result = [];
24
+ for (const change of filteredLocalChanges) {
25
25
  const ops = transformPatch.transform(change.ops).ops;
26
- if (!ops.length) return null;
27
- return { ...change, baseRev, rev, ops };
28
- }).filter(Boolean);
26
+ if (!ops.length) continue;
27
+ rev++;
28
+ result.push({ ...change, baseRev, rev, ops });
29
+ }
29
30
  return result;
30
31
  }
31
32
  export {
@@ -17,13 +17,8 @@ class LWWDoc extends BaseDoc {
17
17
  this._committedRev = snapshot?.rev ?? 0;
18
18
  this._hasPending = (snapshot?.changes?.length ?? 0) > 0;
19
19
  if (snapshot?.changes && snapshot.changes.length > 0) {
20
- let currentState = this.state;
21
- for (const change of snapshot.changes) {
22
- for (const op of change.ops) {
23
- currentState = applyPatch(currentState, [op], { partial: true });
24
- }
25
- }
26
- this.state = currentState;
20
+ const allOps = snapshot.changes.flatMap((c) => c.ops);
21
+ this.state = applyPatch(this.state, allOps, { partial: true });
27
22
  }
28
23
  this._baseState = this.state;
29
24
  this._checkLoaded();
@@ -46,11 +41,11 @@ class LWWDoc extends BaseDoc {
46
41
  this._optimisticOps = [];
47
42
  let currentState = snapshot.state;
48
43
  if (snapshot.changes && snapshot.changes.length > 0) {
49
- for (const change of snapshot.changes) {
50
- for (const op of change.ops) {
51
- currentState = applyPatch(currentState, [op], { partial: true });
52
- }
53
- }
44
+ currentState = applyPatch(
45
+ currentState,
46
+ snapshot.changes.flatMap((c) => c.ops),
47
+ { partial: true }
48
+ );
54
49
  }
55
50
  this._baseState = currentState;
56
51
  this._checkLoaded();
@@ -97,20 +92,12 @@ class LWWDoc extends BaseDoc {
97
92
  this._hasPending = hasPending ?? hasPendingChanges;
98
93
  this._checkLoaded();
99
94
  if (hasServerChanges) {
100
- let newBaseState = this._baseState;
101
- for (const change of changes) {
102
- for (const op of change.ops) {
103
- newBaseState = applyPatch(newBaseState, [op], { partial: true });
104
- }
105
- }
106
- this._baseState = newBaseState;
95
+ const allOps = changes.flatMap((c) => c.ops);
96
+ this._baseState = applyPatch(this._baseState, allOps, { partial: true });
107
97
  this._recomputeState();
108
98
  } else {
109
- for (const change of changes) {
110
- for (const op of change.ops) {
111
- this._baseState = applyPatch(this._baseState, [op], { partial: true });
112
- }
113
- }
99
+ const allOps = changes.flatMap((c) => c.ops);
100
+ this._baseState = applyPatch(this._baseState, allOps, { partial: true });
114
101
  if (this._optimisticOps.length > 0) {
115
102
  this._optimisticOps.shift();
116
103
  } else {
@@ -102,8 +102,6 @@ class Patches {
102
102
  if (!docIds.length) return;
103
103
  docIds.forEach(this.trackedDocs.delete, this.trackedDocs);
104
104
  this.onUntrackDocs.emit(docIds);
105
- const closedPromises = docIds.filter((id) => this.docs.has(id)).map((id) => this.closeDoc(id));
106
- await Promise.all(closedPromises);
107
105
  const byAlgorithm = /* @__PURE__ */ new Map();
108
106
  for (const docId of docIds) {
109
107
  const managed = this.docs.get(docId);
@@ -112,6 +110,8 @@ class Patches {
112
110
  list.push(docId);
113
111
  byAlgorithm.set(algorithm, list);
114
112
  }
113
+ const closedPromises = docIds.filter((id) => this.docs.has(id)).map((id) => this.closeDoc(id));
114
+ await Promise.all(closedPromises);
115
115
  await Promise.all([...byAlgorithm.entries()].map(([algorithm, ids]) => algorithm.untrackDocs(ids)));
116
116
  }
117
117
  // ensure a second call to openDoc with the same docId returns the same promise while opening
@@ -1,6 +1,6 @@
1
1
  import 'easy-signal';
2
2
  import '../json-patch/types.js';
3
3
  import '../types.js';
4
- export { O as OTDoc, P as PatchesDoc, O as PatchesDocClass, a as PatchesDocOptions } from '../BaseDoc-BT18xPxU.js';
4
+ export { O as OTDoc, P as PatchesDoc, a as PatchesDocOptions } from '../BaseDoc-BT18xPxU.js';
5
5
  import '../json-patch/JSONPatch.js';
6
6
  import '@dabble/delta';
@@ -1,6 +1,5 @@
1
1
  import "../chunk-IZ2YBCUP.js";
2
- import { OTDoc, OTDoc as OTDoc2 } from "./OTDoc.js";
2
+ import { OTDoc } from "./OTDoc.js";
3
3
  export {
4
- OTDoc,
5
- OTDoc2 as PatchesDocClass
4
+ OTDoc
6
5
  };
@@ -1,4 +1,4 @@
1
- export { B as BaseDoc, O as OTDoc, P as PatchesDoc, O as PatchesDocClass, a as PatchesDocOptions } from '../BaseDoc-BT18xPxU.js';
1
+ export { B as BaseDoc, O as OTDoc, P as PatchesDoc, a as PatchesDocOptions } from '../BaseDoc-BT18xPxU.js';
2
2
  export { IndexedDBFactoryOptions, MultiAlgorithmFactoryOptions, MultiAlgorithmIndexedDBFactoryOptions, PatchesFactoryOptions, createLWWIndexedDBPatches, createLWWPatches, createMultiAlgorithmIndexedDBPatches, createMultiAlgorithmPatches, createOTIndexedDBPatches, createOTPatches } from './factories.js';
3
3
  export { IDBStoreWrapper, IDBTransactionWrapper, IndexedDBStore } from './IndexedDBStore.js';
4
4
  export { OTIndexedDBStore } from './OTIndexedDBStore.js';
@@ -1,4 +1,4 @@
1
- import { JSONPatchOpHandlerMap, JSONPatchOpHandler } from './json-patch/types.js';
1
+ import { JSONPatchOpHandlerMap } from './json-patch/types.js';
2
2
  import { add } from './json-patch/ops/add.js';
3
3
  import { bit } from './json-patch/ops/bitmask.js';
4
4
  import { copy } from './json-patch/ops/copy.js';
@@ -9,19 +9,7 @@ import { remove } from './json-patch/ops/remove.js';
9
9
  import { replace } from './json-patch/ops/replace.js';
10
10
  import { test } from './json-patch/ops/test.js';
11
11
 
12
- declare function getTypes(custom?: JSONPatchOpHandlerMap): {
13
- test: JSONPatchOpHandler;
14
- add: JSONPatchOpHandler;
15
- remove: JSONPatchOpHandler;
16
- replace: JSONPatchOpHandler;
17
- copy: JSONPatchOpHandler;
18
- move: JSONPatchOpHandler;
19
- '@inc': JSONPatchOpHandler;
20
- '@bit': JSONPatchOpHandler;
21
- '@txt': JSONPatchOpHandler;
22
- '@max': JSONPatchOpHandler;
23
- '@min': JSONPatchOpHandler;
24
- };
12
+ declare function getTypes(custom?: JSONPatchOpHandlerMap): JSONPatchOpHandlerMap;
25
13
 
26
14
  declare const index_add: typeof add;
27
15
  declare const index_bit: typeof bit;
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  export { Delta } from '@dabble/delta';
2
- export { B as BaseDoc, O as OTDoc, P as PatchesDoc, O as PatchesDocClass, a as PatchesDocOptions } from './BaseDoc-BT18xPxU.js';
2
+ export { B as BaseDoc, O as OTDoc, P as PatchesDoc, a as PatchesDocOptions } from './BaseDoc-BT18xPxU.js';
3
3
  export { IndexedDBFactoryOptions, MultiAlgorithmFactoryOptions, MultiAlgorithmIndexedDBFactoryOptions, PatchesFactoryOptions, createLWWIndexedDBPatches, createLWWPatches, createMultiAlgorithmIndexedDBPatches, createMultiAlgorithmPatches, createOTIndexedDBPatches, createOTPatches } from './client/factories.js';
4
4
  export { IDBStoreWrapper, IDBTransactionWrapper, IndexedDBStore } from './client/IndexedDBStore.js';
5
5
  export { OTIndexedDBStore } from './client/OTIndexedDBStore.js';
@@ -18,25 +18,25 @@ export { LWWClientStore } from './client/LWWClientStore.js';
18
18
  export { ClientAlgorithm } from './client/ClientAlgorithm.js';
19
19
  export { createChange } from './data/change.js';
20
20
  export { createVersionId, createVersionMetadata } from './data/version.js';
21
- export { ReadonlyStore, Signal, SignalSubscriber, Store, Subscriber, Unsubscriber, afterChange, batch, clearAllContext, computed, readonly, signal, store, watch, whenMatches, whenReady } from 'easy-signal';
21
+ export { ReadonlyStore, Signal, SignalSubscriber, Store, Subscriber, Unsubscriber, batch, computed, readonly, signal, store, watch } from 'easy-signal';
22
22
  export { fractionalIndex, healDuplicateOrders, sortByOrder } from './fractionalIndex.js';
23
23
  export { applyPatch } from './json-patch/applyPatch.js';
24
24
  export { composePatch } from './json-patch/composePatch.js';
25
25
  export { invertPatch } from './json-patch/invertPatch.js';
26
- export { applyBitmask, bit, bitmask, combineBitmasks } from './json-patch/ops/bitmask.js';
27
- export { i as defaultOps, g as getTypes } from './index-C7ZhU2kS.js';
26
+ export { applyBitmask, bitmask, combineBitmasks } from './json-patch/ops/bitmask.js';
27
+ export { i as defaultOps } from './index-BO6EQFpw.js';
28
28
  export { createPathProxy, pathProxy } from './json-patch/pathProxy.js';
29
29
  export { transformPatch } from './json-patch/transformPatch.js';
30
30
  export { JSONPatch, PathLike, WriteOptions } from './json-patch/JSONPatch.js';
31
- export { ApplyJSONPatchOptions, JSONPatchOpHandlerMap as JSONPatchCustomTypes, JSONPatchOp } from './json-patch/types.js';
31
+ export { ApplyJSONPatchOptions, JSONPatchOp, JSONPatchOpHandlerMap } from './json-patch/types.js';
32
32
  export { Branch, BranchStatus, Change, ChangeInput, ChangeMutator, CommitChangesOptions, DeleteDocOptions, DocSyncState, DocSyncStatus, DocumentTombstone, EditableBranchMetadata, EditableVersionMetadata, ListChangesOptions, ListVersionsOptions, PatchesSnapshot, PatchesState, PathProxy, VersionMetadata } from './types.js';
33
- export { add } from './json-patch/ops/add.js';
34
- export { copy } from './json-patch/ops/copy.js';
35
- export { increment } from './json-patch/ops/increment.js';
36
- export { max, min } from './json-patch/ops/minmax.js';
37
- export { move } from './json-patch/ops/move.js';
38
- export { remove } from './json-patch/ops/remove.js';
39
- export { replace } from './json-patch/ops/replace.js';
40
- export { test } from './json-patch/ops/test.js';
41
33
  import './utils/deferred.js';
42
34
  import './net/protocol/types.js';
35
+ import './json-patch/ops/add.js';
36
+ import './json-patch/ops/copy.js';
37
+ import './json-patch/ops/increment.js';
38
+ import './json-patch/ops/minmax.js';
39
+ import './json-patch/ops/move.js';
40
+ import './json-patch/ops/remove.js';
41
+ import './json-patch/ops/replace.js';
42
+ import './json-patch/ops/test.js';
package/dist/index.js CHANGED
@@ -9,24 +9,16 @@ import {
9
9
  readonly,
10
10
  computed,
11
11
  batch,
12
- watch,
13
- whenReady,
14
- whenMatches,
15
- afterChange,
16
- clearAllContext
12
+ watch
17
13
  } from "easy-signal";
18
14
  export * from "./fractionalIndex.js";
19
15
  export * from "./json-patch/index.js";
20
16
  export {
21
17
  Delta,
22
- afterChange,
23
18
  batch,
24
- clearAllContext,
25
19
  computed,
26
20
  readonly,
27
21
  signal,
28
22
  store,
29
- watch,
30
- whenMatches,
31
- whenReady
23
+ watch
32
24
  };
@@ -1,19 +1,19 @@
1
1
  export { applyPatch } from './applyPatch.js';
2
2
  export { composePatch } from './composePatch.js';
3
3
  export { invertPatch } from './invertPatch.js';
4
- export { applyBitmask, bit, bitmask, combineBitmasks } from './ops/bitmask.js';
5
- export { i as defaultOps, g as getTypes } from '../index-C7ZhU2kS.js';
4
+ export { applyBitmask, bitmask, combineBitmasks } from './ops/bitmask.js';
5
+ export { i as defaultOps } from '../index-BO6EQFpw.js';
6
6
  export { createPathProxy, pathProxy } from './pathProxy.js';
7
7
  export { transformPatch } from './transformPatch.js';
8
8
  export { JSONPatch, PathLike, WriteOptions } from './JSONPatch.js';
9
- export { ApplyJSONPatchOptions, JSONPatchOpHandlerMap as JSONPatchCustomTypes, JSONPatchOp } from './types.js';
10
- export { add } from './ops/add.js';
11
- export { copy } from './ops/copy.js';
12
- export { increment } from './ops/increment.js';
13
- export { max, min } from './ops/minmax.js';
14
- export { move } from './ops/move.js';
15
- export { remove } from './ops/remove.js';
16
- export { replace } from './ops/replace.js';
17
- export { test } from './ops/test.js';
9
+ export { ApplyJSONPatchOptions, JSONPatchOp, JSONPatchOpHandlerMap } from './types.js';
10
+ import './ops/add.js';
11
+ import './ops/copy.js';
12
+ import './ops/increment.js';
13
+ import './ops/minmax.js';
14
+ import './ops/move.js';
15
+ import './ops/remove.js';
16
+ import './ops/replace.js';
17
+ import './ops/test.js';
18
18
  import '../types.js';
19
19
  import '@dabble/delta';
@@ -7,7 +7,6 @@ import * as defaultOps from "./ops/index.js";
7
7
  export * from "./pathProxy.js";
8
8
  import { transformPatch } from "./transformPatch.js";
9
9
  export * from "./JSONPatch.js";
10
- export * from "./ops/index.js";
11
10
  export {
12
11
  applyBitmask,
13
12
  applyPatch,
@@ -8,4 +8,4 @@ export { move } from './move.js';
8
8
  export { remove } from './remove.js';
9
9
  export { replace } from './replace.js';
10
10
  export { test } from './test.js';
11
- export { g as getTypes } from '../../index-C7ZhU2kS.js';
11
+ export { g as getTypes } from '../../index-BO6EQFpw.js';
@@ -9,21 +9,22 @@ import { remove } from "./remove.js";
9
9
  import { replace } from "./replace.js";
10
10
  import { test } from "./test.js";
11
11
  import { text } from "./text.js";
12
+ const defaultTypes = {
13
+ test,
14
+ add,
15
+ remove,
16
+ replace,
17
+ copy,
18
+ move,
19
+ "@inc": increment,
20
+ "@bit": bit,
21
+ "@txt": text,
22
+ "@max": max,
23
+ "@min": min
24
+ };
12
25
  function getTypes(custom) {
13
- return {
14
- test,
15
- add,
16
- remove,
17
- replace,
18
- copy,
19
- move,
20
- "@inc": increment,
21
- "@bit": bit,
22
- "@txt": text,
23
- "@max": max,
24
- "@min": min,
25
- ...custom
26
- };
26
+ if (!custom || Object.keys(custom).length === 0) return defaultTypes;
27
+ return { ...defaultTypes, ...custom };
27
28
  }
28
29
  export {
29
30
  add,
@@ -1,6 +1,7 @@
1
1
  import "../chunk-IZ2YBCUP.js";
2
2
  import { signal } from "easy-signal";
3
3
  import { MicroDoc } from "./doc.js";
4
+ import { transformPendingTxt } from "./ops.js";
4
5
  class MicroClient {
5
6
  _url;
6
7
  _dbName;
@@ -29,10 +30,27 @@ class MicroClient {
29
30
  }
30
31
  }
31
32
  try {
32
- const remote = await this._fetch(`/docs/${docId}`);
33
- if (remote.rev > state.rev) {
34
- state = remote;
35
- pending = {};
33
+ if (state.rev > 0 && Object.keys(pending).length) {
34
+ try {
35
+ const sync = await this._fetch(`/docs/${docId}/sync?since=${state.rev}`);
36
+ if (sync.rev > state.rev) {
37
+ Object.assign(state.fields, sync.fields);
38
+ pending = transformPendingTxt(pending, sync.textLog);
39
+ state.rev = sync.rev;
40
+ }
41
+ } catch {
42
+ const remote = await this._fetch(`/docs/${docId}`);
43
+ if (remote.rev > state.rev) {
44
+ state = remote;
45
+ pending = {};
46
+ }
47
+ }
48
+ } else {
49
+ const remote = await this._fetch(`/docs/${docId}`);
50
+ if (remote.rev > state.rev) {
51
+ state = remote;
52
+ pending = {};
53
+ }
36
54
  }
37
55
  } catch {
38
56
  }
@@ -64,10 +82,10 @@ class MicroClient {
64
82
  }
65
83
  /** Disconnect WebSocket and clean up. */
66
84
  destroy() {
67
- for (const [id, entry] of this._docs) {
85
+ for (const entry of this._docs.values()) {
68
86
  if (entry.timer) clearTimeout(entry.timer);
69
- this._docs.delete(id);
70
87
  }
88
+ this._docs.clear();
71
89
  if (this._wsTimer) clearTimeout(this._wsTimer);
72
90
  this._ws?.close();
73
91
  this._ws = null;
package/dist/micro/doc.js CHANGED
@@ -2,16 +2,16 @@ import "../chunk-IZ2YBCUP.js";
2
2
  import { Delta } from "@dabble/delta";
3
3
  import { batch, store } from "easy-signal";
4
4
  import { buildState, consolidateOps, effectiveFields, generateId, mergeField } from "./ops.js";
5
- import { TXT, parseSuffix } from "./types.js";
5
+ import {} from "./types.js";
6
6
  function createUpdater(emit, path = "") {
7
7
  return new Proxy({}, {
8
8
  get(_, prop) {
9
9
  const p = path ? `${path}.${prop}` : prop;
10
10
  switch (prop) {
11
11
  case "set":
12
- return (val) => emit(path, "", val);
12
+ return (val) => emit(path, "=", val);
13
13
  case "del":
14
- return () => emit(path, "", null);
14
+ return () => emit(path, "!", null);
15
15
  case "inc":
16
16
  return (val = 1) => emit(path, "+", val);
17
17
  case "bit":
@@ -59,8 +59,8 @@ class MicroDoc {
59
59
  update(fn) {
60
60
  const ops = {};
61
61
  const ts = Date.now();
62
- const emit = (path, suffix, val) => {
63
- ops[suffix ? path + suffix : path] = { val, ts };
62
+ const emit = (path, op, val) => {
63
+ ops[path] = { op, val, ts };
64
64
  };
65
65
  fn(createUpdater(emit));
66
66
  if (!Object.keys(ops).length) return;
@@ -80,12 +80,11 @@ class MicroDoc {
80
80
  _confirmSend(rev) {
81
81
  if (!this._sending) return;
82
82
  for (const [key, field] of Object.entries(this._sending)) {
83
- const { suffix } = parseSuffix(key);
84
- if (suffix === TXT) {
83
+ if (field.op === "#") {
85
84
  const base = this._confirmed[key]?.val ? new Delta(this._confirmed[key].val) : new Delta();
86
- this._confirmed[key] = { val: base.compose(new Delta(field.val)).ops, ts: field.ts };
85
+ this._confirmed[key] = { op: "#", val: base.compose(new Delta(field.val)).ops, ts: field.ts };
87
86
  } else {
88
- this._confirmed[key] = mergeField(this._confirmed[key], field, suffix);
87
+ this._confirmed[key] = mergeField(this._confirmed[key], field);
89
88
  }
90
89
  }
91
90
  this._sending = null;
@@ -104,23 +103,23 @@ class MicroDoc {
104
103
  applyRemote(fields, rev) {
105
104
  batch(() => {
106
105
  for (const [key, field] of Object.entries(fields)) {
107
- const { suffix } = parseSuffix(key);
108
- if (suffix === TXT) {
106
+ if (field.op === "#") {
109
107
  const remote = new Delta(field.val);
110
108
  if (this._sending?.[key]) {
111
109
  const s = new Delta(this._sending[key].val);
112
- this._sending[key] = { val: s.transform(remote, false).ops, ts: this._sending[key].ts };
113
- const rPrime = remote.transform(s, true);
110
+ const sPrime = remote.transform(s, true);
111
+ const rPrime = s.transform(remote, false);
112
+ this._sending[key] = { op: "#", val: sPrime.ops, ts: this._sending[key].ts };
114
113
  if (this._pending[key]) {
115
114
  const p = new Delta(this._pending[key].val);
116
- this._pending[key] = { val: p.transform(rPrime, false).ops, ts: this._pending[key].ts };
115
+ this._pending[key] = { op: "#", val: rPrime.transform(p, true).ops, ts: this._pending[key].ts };
117
116
  }
118
117
  } else if (this._pending[key]) {
119
118
  const p = new Delta(this._pending[key].val);
120
- this._pending[key] = { val: p.transform(remote, false).ops, ts: this._pending[key].ts };
119
+ this._pending[key] = { op: "#", val: remote.transform(p, true).ops, ts: this._pending[key].ts };
121
120
  }
122
121
  const base = this._confirmed[key]?.val ? new Delta(this._confirmed[key].val) : new Delta();
123
- this._confirmed[key] = { val: base.compose(remote).ops, ts: field.ts };
122
+ this._confirmed[key] = { op: "#", val: base.compose(remote).ops, ts: field.ts };
124
123
  } else {
125
124
  this._confirmed[key] = field;
126
125
  }
@@ -1,7 +1,7 @@
1
1
  export { ClientOptions, MicroClient } from './client.js';
2
2
  export { MicroDoc, Updatable } from './doc.js';
3
- export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField } from './ops.js';
3
+ export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField, transformPendingTxt } from './ops.js';
4
4
  export { MemoryDbBackend, MicroServer } from './server.js';
5
- export { BIT, Change, ChangeLogEntry, CommitResult, DbBackend, DocState, Field, FieldMap, INC, MAX, ObjectStore, REF_THRESHOLD, TXT, TextLogEntry, parseSuffix } from './types.js';
5
+ export { Change, ChangeLogEntry, CommitResult, CommitWrite, DbBackend, DocState, Field, FieldMap, ObjectStore, Op, REF_THRESHOLD, RevConflictError, SyncResult, TextLogEntry } from './types.js';
6
6
  import 'easy-signal';
7
7
  import '@dabble/delta';
@@ -1,19 +1,26 @@
1
1
  import "../chunk-IZ2YBCUP.js";
2
2
  import { MicroClient } from "./client.js";
3
3
  import { MicroDoc } from "./doc.js";
4
- import { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField } from "./ops.js";
4
+ import {
5
+ applyBitmask,
6
+ bitmask,
7
+ buildState,
8
+ combineBitmasks,
9
+ consolidateOps,
10
+ effectiveFields,
11
+ generateId,
12
+ mergeField,
13
+ transformPendingTxt
14
+ } from "./ops.js";
5
15
  import { MemoryDbBackend, MicroServer } from "./server.js";
6
- import { BIT, INC, MAX, parseSuffix, REF_THRESHOLD, TXT } from "./types.js";
16
+ import { RevConflictError, REF_THRESHOLD } from "./types.js";
7
17
  export {
8
- BIT,
9
- INC,
10
- MAX,
11
18
  MemoryDbBackend,
12
19
  MicroClient,
13
20
  MicroDoc,
14
21
  MicroServer,
15
22
  REF_THRESHOLD,
16
- TXT,
23
+ RevConflictError,
17
24
  applyBitmask,
18
25
  bitmask,
19
26
  buildState,
@@ -22,5 +29,5 @@ export {
22
29
  effectiveFields,
23
30
  generateId,
24
31
  mergeField,
25
- parseSuffix
32
+ transformPendingTxt
26
33
  };
@@ -8,13 +8,15 @@ declare function applyBitmask(num: number, mask: number): number;
8
8
  declare function combineBitmasks(a: number, b: number): number;
9
9
  /** Generate a random ID. */
10
10
  declare function generateId(): string;
11
- /** Merge a single incoming field with an existing value, based on suffix type. */
12
- declare function mergeField(existing: Field | undefined, incoming: Field, suffix: string): Field;
11
+ /** Merge a single incoming field with an existing value, based on op type. */
12
+ declare function mergeField(existing: Field | undefined, incoming: Field): Field;
13
13
  /** Consolidate new ops into existing pending ops (client-side batching). */
14
14
  declare function consolidateOps(pending: FieldMap, newOps: FieldMap): FieldMap;
15
- /** Convert flat dot-notation FieldMap to a nested object. Strips suffixes from keys. */
15
+ /** Transform pending TXT field deltas against server text log entries (for reconnection). */
16
+ declare function transformPendingTxt(pending: FieldMap, textLog: Record<string, any[]>): FieldMap;
17
+ /** Convert flat dot-notation FieldMap to a nested object. */
16
18
  declare function buildState<T = Record<string, any>>(fields: FieldMap): T;
17
19
  /** Compute effective fields by layering confirmed + sending + pending. */
18
20
  declare function effectiveFields(confirmed: FieldMap, sending: FieldMap | null, pending: FieldMap): FieldMap;
19
21
 
20
- export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField };
22
+ export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField, transformPendingTxt };
package/dist/micro/ops.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import "../chunk-IZ2YBCUP.js";
2
2
  import { Delta } from "@dabble/delta";
3
- import { BIT, INC, MAX, parseSuffix, TXT } from "./types.js";
3
+ import {} from "./types.js";
4
4
  function bitmask(index, value) {
5
5
  if (index < 0 || index > 14) throw new Error("Index must be between 0 and 14");
6
6
  return value ? 1 << index : 1 << index + 15;
@@ -16,16 +16,16 @@ function combineBitmasks(a, b) {
16
16
  function generateId() {
17
17
  return Math.random().toString(36).slice(2) + Math.random().toString(36).slice(2);
18
18
  }
19
- function mergeField(existing, incoming, suffix) {
19
+ function mergeField(existing, incoming) {
20
20
  const ev = existing?.val ?? 0;
21
- switch (suffix) {
22
- case INC:
23
- return { val: ev + incoming.val, ts: incoming.ts };
24
- case BIT:
25
- return { val: applyBitmask(ev, incoming.val), ts: incoming.ts };
26
- case MAX:
21
+ switch (incoming.op) {
22
+ case "+":
23
+ return { op: "+", val: ev + incoming.val, ts: incoming.ts };
24
+ case "~":
25
+ return { op: "~", val: applyBitmask(ev, incoming.val), ts: incoming.ts };
26
+ case "^":
27
27
  return incoming.val >= ev ? incoming : existing;
28
- case TXT:
28
+ case "#":
29
29
  return incoming;
30
30
  // text composed separately
31
31
  default:
@@ -40,19 +40,18 @@ function consolidateOps(pending, newOps) {
40
40
  result[key] = field;
41
41
  continue;
42
42
  }
43
- const { suffix } = parseSuffix(key);
44
- switch (suffix) {
45
- case INC:
46
- result[key] = { val: ex.val + field.val, ts: field.ts };
43
+ switch (field.op) {
44
+ case "+":
45
+ result[key] = { op: "+", val: ex.val + field.val, ts: field.ts };
47
46
  break;
48
- case BIT:
49
- result[key] = { val: combineBitmasks(ex.val, field.val), ts: field.ts };
47
+ case "~":
48
+ result[key] = { op: "~", val: combineBitmasks(ex.val, field.val), ts: field.ts };
50
49
  break;
51
- case MAX:
50
+ case "^":
52
51
  result[key] = field.val >= ex.val ? field : ex;
53
52
  break;
54
- case TXT:
55
- result[key] = { val: new Delta(ex.val).compose(new Delta(field.val)).ops, ts: field.ts };
53
+ case "#":
54
+ result[key] = { op: "#", val: new Delta(ex.val).compose(new Delta(field.val)).ops, ts: field.ts };
56
55
  break;
57
56
  default:
58
57
  result[key] = field;
@@ -60,12 +59,23 @@ function consolidateOps(pending, newOps) {
60
59
  }
61
60
  return result;
62
61
  }
62
+ function transformPendingTxt(pending, textLog) {
63
+ const result = { ...pending };
64
+ for (const [key, deltas] of Object.entries(textLog)) {
65
+ if (!result[key]) continue;
66
+ let p = new Delta(result[key].val);
67
+ for (const delta of deltas) {
68
+ p = new Delta(delta).transform(p, true);
69
+ }
70
+ result[key] = { op: "#", val: p.ops, ts: result[key].ts };
71
+ }
72
+ return result;
73
+ }
63
74
  function buildState(fields) {
64
75
  const obj = {};
65
76
  for (const [key, field] of Object.entries(fields)) {
66
77
  if (field.val == null) continue;
67
- const { path } = parseSuffix(key);
68
- const parts = path.split(".");
78
+ const parts = key.split(".");
69
79
  let cur = obj;
70
80
  for (let i = 0; i < parts.length - 1; i++) cur = cur[parts[i]] ??= {};
71
81
  cur[parts[parts.length - 1]] = field.val;
@@ -77,12 +87,11 @@ function effectiveFields(confirmed, sending, pending) {
77
87
  const layers = sending ? [sending, pending] : [pending];
78
88
  for (const layer of layers) {
79
89
  for (const [key, field] of Object.entries(layer)) {
80
- const { suffix } = parseSuffix(key);
81
- if (suffix === TXT) {
90
+ if (field.op === "#") {
82
91
  const base = result[key]?.val ? new Delta(result[key].val) : new Delta();
83
- result[key] = { val: base.compose(new Delta(field.val)).ops, ts: field.ts };
92
+ result[key] = { op: "#", val: base.compose(new Delta(field.val)).ops, ts: field.ts };
84
93
  } else {
85
- result[key] = mergeField(result[key], field, suffix);
94
+ result[key] = mergeField(result[key], field);
86
95
  }
87
96
  }
88
97
  }
@@ -96,5 +105,6 @@ export {
96
105
  consolidateOps,
97
106
  effectiveFields,
98
107
  generateId,
99
- mergeField
108
+ mergeField,
109
+ transformPendingTxt
100
110
  };