@dabble/patches 0.8.0 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/json-patch/utils/getType.d.ts +1 -1
- package/dist/micro/client.d.ts +44 -0
- package/dist/micro/client.js +206 -0
- package/dist/micro/doc.d.ts +51 -0
- package/dist/micro/doc.js +138 -0
- package/dist/micro/index.d.ts +7 -0
- package/dist/micro/index.js +26 -0
- package/dist/micro/ops.d.ts +20 -0
- package/dist/micro/ops.js +100 -0
- package/dist/micro/server.d.ts +44 -0
- package/dist/micro/server.js +195 -0
- package/dist/micro/types.d.ts +68 -0
- package/dist/micro/types.js +16 -0
- package/package.json +5 -1
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { State, JSONPatchOp, JSONPatchOpHandler } from '../types.js';
|
|
2
2
|
|
|
3
3
|
declare function getType(state: State, patch: JSONPatchOp): JSONPatchOpHandler;
|
|
4
|
-
declare function getTypeLike(state: State, patch: JSONPatchOp): "
|
|
4
|
+
declare function getTypeLike(state: State, patch: JSONPatchOp): "add" | "remove" | "replace" | "move" | "copy" | "test";
|
|
5
5
|
|
|
6
6
|
export { getType, getTypeLike };
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { Signal } from 'easy-signal';
|
|
2
|
+
import { MicroDoc } from './doc.js';
|
|
3
|
+
import '@dabble/delta';
|
|
4
|
+
import './types.js';
|
|
5
|
+
|
|
6
|
+
interface ClientOptions {
|
|
7
|
+
/** Base URL for REST API, e.g. "https://api.example.com" */
|
|
8
|
+
url: string;
|
|
9
|
+
/** If provided, persists state to IndexedDB with this database name. */
|
|
10
|
+
dbName?: string;
|
|
11
|
+
/** Debounce delay in ms before flushing pending ops. Default: 300 */
|
|
12
|
+
debounce?: number;
|
|
13
|
+
}
|
|
14
|
+
declare class MicroClient {
|
|
15
|
+
private _url;
|
|
16
|
+
private _dbName?;
|
|
17
|
+
private _debounce;
|
|
18
|
+
private _docs;
|
|
19
|
+
private _ws;
|
|
20
|
+
private _wsBackoff;
|
|
21
|
+
private _wsTimer;
|
|
22
|
+
private _db;
|
|
23
|
+
readonly onConnection: Signal<(connected: boolean) => void>;
|
|
24
|
+
constructor(opts: ClientOptions);
|
|
25
|
+
/** Open a document. Fetches from server (or IDB cache), subscribes via WS. */
|
|
26
|
+
open<T = Record<string, any>>(docId: string): Promise<MicroDoc<T>>;
|
|
27
|
+
/** Close a document subscription. */
|
|
28
|
+
close(docId: string): void;
|
|
29
|
+
/** Force flush pending ops for a document immediately. */
|
|
30
|
+
flush(docId: string): Promise<void>;
|
|
31
|
+
/** Disconnect WebSocket and clean up. */
|
|
32
|
+
destroy(): void;
|
|
33
|
+
private _scheduleFlush;
|
|
34
|
+
private _doFlush;
|
|
35
|
+
private _ensureWS;
|
|
36
|
+
private _reconnectWS;
|
|
37
|
+
private _wsSend;
|
|
38
|
+
private _fetch;
|
|
39
|
+
private _idbOpen;
|
|
40
|
+
private _idbLoad;
|
|
41
|
+
private _idbSave;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export { type ClientOptions, MicroClient };
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { signal } from "easy-signal";
|
|
3
|
+
import { MicroDoc } from "./doc.js";
|
|
4
|
+
class MicroClient {
|
|
5
|
+
_url;
|
|
6
|
+
_dbName;
|
|
7
|
+
_debounce;
|
|
8
|
+
_docs = /* @__PURE__ */ new Map();
|
|
9
|
+
_ws = null;
|
|
10
|
+
_wsBackoff = 0;
|
|
11
|
+
_wsTimer = null;
|
|
12
|
+
_db = null;
|
|
13
|
+
onConnection = signal();
|
|
14
|
+
constructor(opts) {
|
|
15
|
+
this._url = opts.url.replace(/\/$/, "");
|
|
16
|
+
this._dbName = opts.dbName;
|
|
17
|
+
this._debounce = opts.debounce ?? 300;
|
|
18
|
+
}
|
|
19
|
+
/** Open a document. Fetches from server (or IDB cache), subscribes via WS. */
|
|
20
|
+
async open(docId) {
|
|
21
|
+
if (this._docs.has(docId)) return this._docs.get(docId).doc;
|
|
22
|
+
let state = { rev: 0, fields: {} };
|
|
23
|
+
let pending = {};
|
|
24
|
+
if (this._dbName) {
|
|
25
|
+
const cached = await this._idbLoad(docId);
|
|
26
|
+
if (cached) {
|
|
27
|
+
state = { rev: cached.rev, fields: cached.fields };
|
|
28
|
+
pending = cached.pending;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const remote = await this._fetch(`/docs/${docId}`);
|
|
33
|
+
if (remote.rev > state.rev) {
|
|
34
|
+
state = remote;
|
|
35
|
+
pending = {};
|
|
36
|
+
}
|
|
37
|
+
} catch {
|
|
38
|
+
}
|
|
39
|
+
const doc = new MicroDoc(state.fields, pending, state.rev);
|
|
40
|
+
const entry = { doc, timer: null };
|
|
41
|
+
this._docs.set(docId, entry);
|
|
42
|
+
doc._onUpdate = () => this._scheduleFlush(docId);
|
|
43
|
+
this._ensureWS();
|
|
44
|
+
this._wsSend({ type: "sub", docId });
|
|
45
|
+
return doc;
|
|
46
|
+
}
|
|
47
|
+
/** Close a document subscription. */
|
|
48
|
+
close(docId) {
|
|
49
|
+
const entry = this._docs.get(docId);
|
|
50
|
+
if (!entry) return;
|
|
51
|
+
if (entry.timer) clearTimeout(entry.timer);
|
|
52
|
+
this._docs.delete(docId);
|
|
53
|
+
this._wsSend({ type: "unsub", docId });
|
|
54
|
+
}
|
|
55
|
+
/** Force flush pending ops for a document immediately. */
|
|
56
|
+
async flush(docId) {
|
|
57
|
+
const entry = this._docs.get(docId);
|
|
58
|
+
if (!entry) return;
|
|
59
|
+
if (entry.timer) {
|
|
60
|
+
clearTimeout(entry.timer);
|
|
61
|
+
entry.timer = null;
|
|
62
|
+
}
|
|
63
|
+
await this._doFlush(docId, entry);
|
|
64
|
+
}
|
|
65
|
+
/** Disconnect WebSocket and clean up. */
|
|
66
|
+
destroy() {
|
|
67
|
+
for (const [id, entry] of this._docs) {
|
|
68
|
+
if (entry.timer) clearTimeout(entry.timer);
|
|
69
|
+
this._docs.delete(id);
|
|
70
|
+
}
|
|
71
|
+
if (this._wsTimer) clearTimeout(this._wsTimer);
|
|
72
|
+
this._ws?.close();
|
|
73
|
+
this._ws = null;
|
|
74
|
+
this._db?.close();
|
|
75
|
+
this._db = null;
|
|
76
|
+
}
|
|
77
|
+
// --- Sync ---
|
|
78
|
+
_scheduleFlush(docId) {
|
|
79
|
+
const entry = this._docs.get(docId);
|
|
80
|
+
if (!entry || entry.timer) return;
|
|
81
|
+
entry.timer = setTimeout(() => {
|
|
82
|
+
entry.timer = null;
|
|
83
|
+
this._doFlush(docId, entry);
|
|
84
|
+
}, this._debounce);
|
|
85
|
+
}
|
|
86
|
+
async _doFlush(docId, entry) {
|
|
87
|
+
const change = entry.doc._flush();
|
|
88
|
+
if (!change) return;
|
|
89
|
+
if (this._dbName) this._idbSave(docId, entry.doc);
|
|
90
|
+
try {
|
|
91
|
+
const result = await this._fetch(`/docs/${docId}/changes`, {
|
|
92
|
+
method: "POST",
|
|
93
|
+
headers: { "Content-Type": "application/json" },
|
|
94
|
+
body: JSON.stringify(change)
|
|
95
|
+
});
|
|
96
|
+
entry.doc._confirmSend(result.rev);
|
|
97
|
+
if (this._dbName) this._idbSave(docId, entry.doc);
|
|
98
|
+
if (Object.keys(entry.doc.pending).length) this._scheduleFlush(docId);
|
|
99
|
+
} catch {
|
|
100
|
+
entry.doc._failSend();
|
|
101
|
+
entry.timer = setTimeout(() => {
|
|
102
|
+
entry.timer = null;
|
|
103
|
+
this._doFlush(docId, entry);
|
|
104
|
+
}, 2e3);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
// --- WebSocket ---
|
|
108
|
+
_ensureWS() {
|
|
109
|
+
if (this._ws && this._ws.readyState <= WebSocket.OPEN) return;
|
|
110
|
+
const wsUrl = this._url.replace(/^http/, "ws") + "/ws";
|
|
111
|
+
const ws = new WebSocket(wsUrl);
|
|
112
|
+
this._ws = ws;
|
|
113
|
+
ws.onopen = () => {
|
|
114
|
+
this._wsBackoff = 0;
|
|
115
|
+
this.onConnection.emit(true);
|
|
116
|
+
for (const docId of this._docs.keys()) {
|
|
117
|
+
this._wsSend({ type: "sub", docId });
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
ws.onmessage = (e) => {
|
|
121
|
+
try {
|
|
122
|
+
const msg = JSON.parse(e.data);
|
|
123
|
+
if (msg.type === "change" && msg.docId) {
|
|
124
|
+
const entry = this._docs.get(msg.docId);
|
|
125
|
+
if (entry) entry.doc.applyRemote(msg.fields, msg.rev);
|
|
126
|
+
}
|
|
127
|
+
} catch {
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
ws.onclose = () => {
|
|
131
|
+
this.onConnection.emit(false);
|
|
132
|
+
this._reconnectWS();
|
|
133
|
+
};
|
|
134
|
+
ws.onerror = () => ws.close();
|
|
135
|
+
}
|
|
136
|
+
_reconnectWS() {
|
|
137
|
+
if (this._wsTimer) return;
|
|
138
|
+
const delay = Math.min(1e3 * 2 ** this._wsBackoff, 3e4);
|
|
139
|
+
this._wsBackoff++;
|
|
140
|
+
this._wsTimer = setTimeout(() => {
|
|
141
|
+
this._wsTimer = null;
|
|
142
|
+
if (this._docs.size > 0) this._ensureWS();
|
|
143
|
+
}, delay);
|
|
144
|
+
}
|
|
145
|
+
_wsSend(msg) {
|
|
146
|
+
if (this._ws?.readyState === WebSocket.OPEN) {
|
|
147
|
+
this._ws.send(JSON.stringify(msg));
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
// --- REST ---
|
|
151
|
+
async _fetch(path, init) {
|
|
152
|
+
const res = await fetch(this._url + path, init);
|
|
153
|
+
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
|
154
|
+
return res.json();
|
|
155
|
+
}
|
|
156
|
+
// --- IndexedDB ---
|
|
157
|
+
async _idbOpen() {
|
|
158
|
+
if (this._db) return this._db;
|
|
159
|
+
return new Promise((resolve, reject) => {
|
|
160
|
+
const req = indexedDB.open(this._dbName, 1);
|
|
161
|
+
req.onupgradeneeded = () => {
|
|
162
|
+
const db = req.result;
|
|
163
|
+
if (!db.objectStoreNames.contains("docs")) db.createObjectStore("docs");
|
|
164
|
+
if (!db.objectStoreNames.contains("pending")) db.createObjectStore("pending");
|
|
165
|
+
};
|
|
166
|
+
req.onsuccess = () => {
|
|
167
|
+
this._db = req.result;
|
|
168
|
+
resolve(req.result);
|
|
169
|
+
};
|
|
170
|
+
req.onerror = () => reject(req.error);
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
async _idbLoad(docId) {
|
|
174
|
+
try {
|
|
175
|
+
const db = await this._idbOpen();
|
|
176
|
+
const tx = db.transaction(["docs", "pending"], "readonly");
|
|
177
|
+
const [docData, pendingData] = await Promise.all([
|
|
178
|
+
idbGet(tx.objectStore("docs"), docId),
|
|
179
|
+
idbGet(tx.objectStore("pending"), docId)
|
|
180
|
+
]);
|
|
181
|
+
if (!docData) return null;
|
|
182
|
+
return { fields: docData.fields, rev: docData.rev, pending: pendingData?.ops ?? {} };
|
|
183
|
+
} catch {
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
async _idbSave(docId, doc) {
|
|
188
|
+
try {
|
|
189
|
+
const db = await this._idbOpen();
|
|
190
|
+
const tx = db.transaction(["docs", "pending"], "readwrite");
|
|
191
|
+
tx.objectStore("docs").put({ fields: doc.confirmed, rev: doc.rev }, docId);
|
|
192
|
+
tx.objectStore("pending").put({ ops: doc.pending }, docId);
|
|
193
|
+
} catch {
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
function idbGet(store, key) {
|
|
198
|
+
return new Promise((resolve, reject) => {
|
|
199
|
+
const req = store.get(key);
|
|
200
|
+
req.onsuccess = () => resolve(req.result);
|
|
201
|
+
req.onerror = () => reject(req.error);
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
export {
|
|
205
|
+
MicroClient
|
|
206
|
+
};
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { Delta } from '@dabble/delta';
|
|
2
|
+
import { Subscriber, Unsubscriber } from 'easy-signal';
|
|
3
|
+
import { FieldMap, Change } from './types.js';
|
|
4
|
+
|
|
5
|
+
interface BaseUpdates<T> {
|
|
6
|
+
set(val: T): void;
|
|
7
|
+
del(): void;
|
|
8
|
+
}
|
|
9
|
+
interface NumberUpdates extends BaseUpdates<number> {
|
|
10
|
+
inc(val?: number): void;
|
|
11
|
+
bit(val: number): void;
|
|
12
|
+
max(val: number): void;
|
|
13
|
+
}
|
|
14
|
+
interface StringUpdates extends BaseUpdates<string> {
|
|
15
|
+
max(val: string): void;
|
|
16
|
+
}
|
|
17
|
+
interface DeltaUpdates extends BaseUpdates<Delta> {
|
|
18
|
+
txt(delta: Delta): void;
|
|
19
|
+
}
|
|
20
|
+
type Updatable<T> = T extends Delta ? DeltaUpdates : T extends number ? NumberUpdates : T extends string ? StringUpdates : T extends object ? {
|
|
21
|
+
[K in keyof T]-?: Updatable<NonNullable<T[K]>>;
|
|
22
|
+
} & BaseUpdates<T> : BaseUpdates<T>;
|
|
23
|
+
declare class MicroDoc<T = Record<string, any>> {
|
|
24
|
+
rev: number;
|
|
25
|
+
private _store;
|
|
26
|
+
private _confirmed;
|
|
27
|
+
private _sending;
|
|
28
|
+
private _sendingId;
|
|
29
|
+
private _pending;
|
|
30
|
+
/** Called by client when ops are queued. */
|
|
31
|
+
_onUpdate?: () => void;
|
|
32
|
+
constructor(confirmed?: FieldMap, pending?: FieldMap, rev?: number);
|
|
33
|
+
get state(): T;
|
|
34
|
+
get pending(): FieldMap;
|
|
35
|
+
get confirmed(): FieldMap;
|
|
36
|
+
get isSending(): boolean;
|
|
37
|
+
subscribe(cb: Subscriber<T>, noInit?: false): Unsubscriber;
|
|
38
|
+
/** Apply changes via proxy-based updater. */
|
|
39
|
+
update(fn: (doc: Updatable<T>) => void): void;
|
|
40
|
+
/** Move pending to sending, return the Change to POST. Returns null if nothing to send. */
|
|
41
|
+
_flush(): Change | null;
|
|
42
|
+
/** Confirm a successful send. Merge sending into confirmed. */
|
|
43
|
+
_confirmSend(rev: number): void;
|
|
44
|
+
/** Roll sending back into pending on failure. */
|
|
45
|
+
_failSend(): void;
|
|
46
|
+
/** Apply remote fields from another client (via WS push). */
|
|
47
|
+
applyRemote(fields: FieldMap, rev: number): void;
|
|
48
|
+
private _rebuild;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
export { MicroDoc, type Updatable };
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { Delta } from "@dabble/delta";
|
|
3
|
+
import { batch, store } from "easy-signal";
|
|
4
|
+
import { buildState, consolidateOps, effectiveFields, generateId, mergeField } from "./ops.js";
|
|
5
|
+
import { TXT, parseSuffix } from "./types.js";
|
|
6
|
+
function createUpdater(emit, path = "") {
|
|
7
|
+
return new Proxy({}, {
|
|
8
|
+
get(_, prop) {
|
|
9
|
+
const p = path ? `${path}.${prop}` : prop;
|
|
10
|
+
switch (prop) {
|
|
11
|
+
case "set":
|
|
12
|
+
return (val) => emit(path, "", val);
|
|
13
|
+
case "del":
|
|
14
|
+
return () => emit(path, "", null);
|
|
15
|
+
case "inc":
|
|
16
|
+
return (val = 1) => emit(path, "+", val);
|
|
17
|
+
case "bit":
|
|
18
|
+
return (val) => emit(path, "~", val);
|
|
19
|
+
case "max":
|
|
20
|
+
return (val) => emit(path, "^", val);
|
|
21
|
+
case "txt":
|
|
22
|
+
return (delta) => emit(path, "#", delta.ops);
|
|
23
|
+
default:
|
|
24
|
+
return createUpdater(emit, p);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
class MicroDoc {
|
|
30
|
+
constructor(confirmed = {}, pending = {}, rev = 0) {
|
|
31
|
+
this.rev = rev;
|
|
32
|
+
this._confirmed = { ...confirmed };
|
|
33
|
+
this._pending = { ...pending };
|
|
34
|
+
this._store = store(this._rebuild());
|
|
35
|
+
}
|
|
36
|
+
_store;
|
|
37
|
+
_confirmed;
|
|
38
|
+
_sending = null;
|
|
39
|
+
_sendingId = null;
|
|
40
|
+
_pending = {};
|
|
41
|
+
/** Called by client when ops are queued. */
|
|
42
|
+
_onUpdate;
|
|
43
|
+
get state() {
|
|
44
|
+
return this._store.state;
|
|
45
|
+
}
|
|
46
|
+
get pending() {
|
|
47
|
+
return this._pending;
|
|
48
|
+
}
|
|
49
|
+
get confirmed() {
|
|
50
|
+
return this._confirmed;
|
|
51
|
+
}
|
|
52
|
+
get isSending() {
|
|
53
|
+
return this._sending !== null;
|
|
54
|
+
}
|
|
55
|
+
subscribe(cb, noInit) {
|
|
56
|
+
return this._store.subscribe(cb, noInit);
|
|
57
|
+
}
|
|
58
|
+
/** Apply changes via proxy-based updater. */
|
|
59
|
+
update(fn) {
|
|
60
|
+
const ops = {};
|
|
61
|
+
const ts = Date.now();
|
|
62
|
+
const emit = (path, suffix, val) => {
|
|
63
|
+
ops[suffix ? path + suffix : path] = { val, ts };
|
|
64
|
+
};
|
|
65
|
+
fn(createUpdater(emit));
|
|
66
|
+
if (!Object.keys(ops).length) return;
|
|
67
|
+
this._pending = consolidateOps(this._pending, ops);
|
|
68
|
+
this._store.state = this._rebuild();
|
|
69
|
+
this._onUpdate?.();
|
|
70
|
+
}
|
|
71
|
+
/** Move pending to sending, return the Change to POST. Returns null if nothing to send. */
|
|
72
|
+
_flush() {
|
|
73
|
+
if (this._sending || !Object.keys(this._pending).length) return null;
|
|
74
|
+
this._sending = this._pending;
|
|
75
|
+
this._sendingId = generateId();
|
|
76
|
+
this._pending = {};
|
|
77
|
+
return { id: this._sendingId, rev: this.rev, fields: this._sending };
|
|
78
|
+
}
|
|
79
|
+
/** Confirm a successful send. Merge sending into confirmed. */
|
|
80
|
+
_confirmSend(rev) {
|
|
81
|
+
if (!this._sending) return;
|
|
82
|
+
for (const [key, field] of Object.entries(this._sending)) {
|
|
83
|
+
const { suffix } = parseSuffix(key);
|
|
84
|
+
if (suffix === TXT) {
|
|
85
|
+
const base = this._confirmed[key]?.val ? new Delta(this._confirmed[key].val) : new Delta();
|
|
86
|
+
this._confirmed[key] = { val: base.compose(new Delta(field.val)).ops, ts: field.ts };
|
|
87
|
+
} else {
|
|
88
|
+
this._confirmed[key] = mergeField(this._confirmed[key], field, suffix);
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
this._sending = null;
|
|
92
|
+
this._sendingId = null;
|
|
93
|
+
this.rev = rev;
|
|
94
|
+
this._store.state = this._rebuild();
|
|
95
|
+
}
|
|
96
|
+
/** Roll sending back into pending on failure. */
|
|
97
|
+
_failSend() {
|
|
98
|
+
if (!this._sending) return;
|
|
99
|
+
this._pending = consolidateOps(this._sending, this._pending);
|
|
100
|
+
this._sending = null;
|
|
101
|
+
this._sendingId = null;
|
|
102
|
+
}
|
|
103
|
+
/** Apply remote fields from another client (via WS push). */
|
|
104
|
+
applyRemote(fields, rev) {
|
|
105
|
+
batch(() => {
|
|
106
|
+
for (const [key, field] of Object.entries(fields)) {
|
|
107
|
+
const { suffix } = parseSuffix(key);
|
|
108
|
+
if (suffix === TXT) {
|
|
109
|
+
const remote = new Delta(field.val);
|
|
110
|
+
if (this._sending?.[key]) {
|
|
111
|
+
const s = new Delta(this._sending[key].val);
|
|
112
|
+
this._sending[key] = { val: s.transform(remote, false).ops, ts: this._sending[key].ts };
|
|
113
|
+
const rPrime = remote.transform(s, true);
|
|
114
|
+
if (this._pending[key]) {
|
|
115
|
+
const p = new Delta(this._pending[key].val);
|
|
116
|
+
this._pending[key] = { val: p.transform(rPrime, false).ops, ts: this._pending[key].ts };
|
|
117
|
+
}
|
|
118
|
+
} else if (this._pending[key]) {
|
|
119
|
+
const p = new Delta(this._pending[key].val);
|
|
120
|
+
this._pending[key] = { val: p.transform(remote, false).ops, ts: this._pending[key].ts };
|
|
121
|
+
}
|
|
122
|
+
const base = this._confirmed[key]?.val ? new Delta(this._confirmed[key].val) : new Delta();
|
|
123
|
+
this._confirmed[key] = { val: base.compose(remote).ops, ts: field.ts };
|
|
124
|
+
} else {
|
|
125
|
+
this._confirmed[key] = field;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
this.rev = rev;
|
|
129
|
+
this._store.state = this._rebuild();
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
_rebuild() {
|
|
133
|
+
return buildState(effectiveFields(this._confirmed, this._sending, this._pending));
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
export {
|
|
137
|
+
MicroDoc
|
|
138
|
+
};
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export { ClientOptions, MicroClient } from './client.js';
|
|
2
|
+
export { MicroDoc, Updatable } from './doc.js';
|
|
3
|
+
export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField } from './ops.js';
|
|
4
|
+
export { MemoryDbBackend, MicroServer } from './server.js';
|
|
5
|
+
export { BIT, Change, ChangeLogEntry, CommitResult, DbBackend, DocState, Field, FieldMap, INC, MAX, ObjectStore, REF_THRESHOLD, TXT, TextLogEntry, parseSuffix } from './types.js';
|
|
6
|
+
import 'easy-signal';
|
|
7
|
+
import '@dabble/delta';
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { MicroClient } from "./client.js";
|
|
3
|
+
import { MicroDoc } from "./doc.js";
|
|
4
|
+
import { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField } from "./ops.js";
|
|
5
|
+
import { MemoryDbBackend, MicroServer } from "./server.js";
|
|
6
|
+
import { BIT, INC, MAX, parseSuffix, REF_THRESHOLD, TXT } from "./types.js";
|
|
7
|
+
export {
|
|
8
|
+
BIT,
|
|
9
|
+
INC,
|
|
10
|
+
MAX,
|
|
11
|
+
MemoryDbBackend,
|
|
12
|
+
MicroClient,
|
|
13
|
+
MicroDoc,
|
|
14
|
+
MicroServer,
|
|
15
|
+
REF_THRESHOLD,
|
|
16
|
+
TXT,
|
|
17
|
+
applyBitmask,
|
|
18
|
+
bitmask,
|
|
19
|
+
buildState,
|
|
20
|
+
combineBitmasks,
|
|
21
|
+
consolidateOps,
|
|
22
|
+
effectiveFields,
|
|
23
|
+
generateId,
|
|
24
|
+
mergeField,
|
|
25
|
+
parseSuffix
|
|
26
|
+
};
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { FieldMap, Field } from './types.js';
|
|
2
|
+
|
|
3
|
+
/** Create a bitmask value. Bottom 15 bits = on, top 15 bits = off. */
|
|
4
|
+
declare function bitmask(index: number, value: boolean): number;
|
|
5
|
+
/** Apply a bitmask to a number. */
|
|
6
|
+
declare function applyBitmask(num: number, mask: number): number;
|
|
7
|
+
/** Combine two bitmasks into one. */
|
|
8
|
+
declare function combineBitmasks(a: number, b: number): number;
|
|
9
|
+
/** Generate a random ID. */
|
|
10
|
+
declare function generateId(): string;
|
|
11
|
+
/** Merge a single incoming field with an existing value, based on suffix type. */
|
|
12
|
+
declare function mergeField(existing: Field | undefined, incoming: Field, suffix: string): Field;
|
|
13
|
+
/** Consolidate new ops into existing pending ops (client-side batching). */
|
|
14
|
+
declare function consolidateOps(pending: FieldMap, newOps: FieldMap): FieldMap;
|
|
15
|
+
/** Convert flat dot-notation FieldMap to a nested object. Strips suffixes from keys. */
|
|
16
|
+
declare function buildState<T = Record<string, any>>(fields: FieldMap): T;
|
|
17
|
+
/** Compute effective fields by layering confirmed + sending + pending. */
|
|
18
|
+
declare function effectiveFields(confirmed: FieldMap, sending: FieldMap | null, pending: FieldMap): FieldMap;
|
|
19
|
+
|
|
20
|
+
export { applyBitmask, bitmask, buildState, combineBitmasks, consolidateOps, effectiveFields, generateId, mergeField };
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { Delta } from "@dabble/delta";
|
|
3
|
+
import { BIT, INC, MAX, parseSuffix, TXT } from "./types.js";
|
|
4
|
+
function bitmask(index, value) {
|
|
5
|
+
if (index < 0 || index > 14) throw new Error("Index must be between 0 and 14");
|
|
6
|
+
return value ? 1 << index : 1 << index + 15;
|
|
7
|
+
}
|
|
8
|
+
function applyBitmask(num, mask) {
|
|
9
|
+
return num & ~(mask >> 15 & 32767) | mask & 32767;
|
|
10
|
+
}
|
|
11
|
+
function combineBitmasks(a, b) {
|
|
12
|
+
const aOff = a >> 15 & 32767, aOn = a & 32767;
|
|
13
|
+
const bOff = b >> 15 & 32767, bOn = b & 32767;
|
|
14
|
+
return (aOff & ~bOn | bOff) << 15 | (aOn & ~bOff | bOn);
|
|
15
|
+
}
|
|
16
|
+
function generateId() {
|
|
17
|
+
return Math.random().toString(36).slice(2) + Math.random().toString(36).slice(2);
|
|
18
|
+
}
|
|
19
|
+
function mergeField(existing, incoming, suffix) {
|
|
20
|
+
const ev = existing?.val ?? 0;
|
|
21
|
+
switch (suffix) {
|
|
22
|
+
case INC:
|
|
23
|
+
return { val: ev + incoming.val, ts: incoming.ts };
|
|
24
|
+
case BIT:
|
|
25
|
+
return { val: applyBitmask(ev, incoming.val), ts: incoming.ts };
|
|
26
|
+
case MAX:
|
|
27
|
+
return incoming.val >= ev ? incoming : existing;
|
|
28
|
+
case TXT:
|
|
29
|
+
return incoming;
|
|
30
|
+
// text composed separately
|
|
31
|
+
default:
|
|
32
|
+
return incoming.ts >= (existing?.ts ?? 0) ? incoming : existing;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
function consolidateOps(pending, newOps) {
|
|
36
|
+
const result = { ...pending };
|
|
37
|
+
for (const [key, field] of Object.entries(newOps)) {
|
|
38
|
+
const ex = result[key];
|
|
39
|
+
if (!ex) {
|
|
40
|
+
result[key] = field;
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
const { suffix } = parseSuffix(key);
|
|
44
|
+
switch (suffix) {
|
|
45
|
+
case INC:
|
|
46
|
+
result[key] = { val: ex.val + field.val, ts: field.ts };
|
|
47
|
+
break;
|
|
48
|
+
case BIT:
|
|
49
|
+
result[key] = { val: combineBitmasks(ex.val, field.val), ts: field.ts };
|
|
50
|
+
break;
|
|
51
|
+
case MAX:
|
|
52
|
+
result[key] = field.val >= ex.val ? field : ex;
|
|
53
|
+
break;
|
|
54
|
+
case TXT:
|
|
55
|
+
result[key] = { val: new Delta(ex.val).compose(new Delta(field.val)).ops, ts: field.ts };
|
|
56
|
+
break;
|
|
57
|
+
default:
|
|
58
|
+
result[key] = field;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return result;
|
|
62
|
+
}
|
|
63
|
+
function buildState(fields) {
|
|
64
|
+
const obj = {};
|
|
65
|
+
for (const [key, field] of Object.entries(fields)) {
|
|
66
|
+
if (field.val == null) continue;
|
|
67
|
+
const { path } = parseSuffix(key);
|
|
68
|
+
const parts = path.split(".");
|
|
69
|
+
let cur = obj;
|
|
70
|
+
for (let i = 0; i < parts.length - 1; i++) cur = cur[parts[i]] ??= {};
|
|
71
|
+
cur[parts[parts.length - 1]] = field.val;
|
|
72
|
+
}
|
|
73
|
+
return obj;
|
|
74
|
+
}
|
|
75
|
+
function effectiveFields(confirmed, sending, pending) {
|
|
76
|
+
const result = { ...confirmed };
|
|
77
|
+
const layers = sending ? [sending, pending] : [pending];
|
|
78
|
+
for (const layer of layers) {
|
|
79
|
+
for (const [key, field] of Object.entries(layer)) {
|
|
80
|
+
const { suffix } = parseSuffix(key);
|
|
81
|
+
if (suffix === TXT) {
|
|
82
|
+
const base = result[key]?.val ? new Delta(result[key].val) : new Delta();
|
|
83
|
+
result[key] = { val: base.compose(new Delta(field.val)).ops, ts: field.ts };
|
|
84
|
+
} else {
|
|
85
|
+
result[key] = mergeField(result[key], field, suffix);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return result;
|
|
90
|
+
}
|
|
91
|
+
export {
|
|
92
|
+
applyBitmask,
|
|
93
|
+
bitmask,
|
|
94
|
+
buildState,
|
|
95
|
+
combineBitmasks,
|
|
96
|
+
consolidateOps,
|
|
97
|
+
effectiveFields,
|
|
98
|
+
generateId,
|
|
99
|
+
mergeField
|
|
100
|
+
};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { DbBackend, FieldMap, Field, TextLogEntry, ChangeLogEntry, ObjectStore, DocState, CommitResult, Change } from './types.js';
|
|
2
|
+
|
|
3
|
+
type Subscriber = (fields: FieldMap, rev: number) => void;
|
|
4
|
+
declare class MicroServer {
|
|
5
|
+
private _db;
|
|
6
|
+
private _objects?;
|
|
7
|
+
private _subs;
|
|
8
|
+
constructor(_db: DbBackend, _objects?: ObjectStore | undefined);
|
|
9
|
+
/** Get full document state. */
|
|
10
|
+
getDoc(docId: string): Promise<DocState>;
|
|
11
|
+
/** Get fields changed since a given revision (for reconnection). */
|
|
12
|
+
getChangesSince(docId: string, sinceRev: number): Promise<CommitResult>;
|
|
13
|
+
/** Process an incoming change from a client. */
|
|
14
|
+
commitChanges(docId: string, change: Change): Promise<CommitResult>;
|
|
15
|
+
/** Compact text log entries up to a revision. */
|
|
16
|
+
compactTextLog(docId: string, key: string, throughRev: number): Promise<void>;
|
|
17
|
+
/** Prune old change log entries. */
|
|
18
|
+
pruneChanges(docId: string, beforeTs: number): Promise<void>;
|
|
19
|
+
/** Subscribe to changes for a document. */
|
|
20
|
+
subscribe(docId: string, cb: Subscriber): () => void;
|
|
21
|
+
/** Get subscriber count for a document. */
|
|
22
|
+
subscriberCount(docId: string): number;
|
|
23
|
+
private _broadcast;
|
|
24
|
+
private _handleLargeValue;
|
|
25
|
+
}
|
|
26
|
+
declare class MemoryDbBackend implements DbBackend {
|
|
27
|
+
private _fields;
|
|
28
|
+
private _textLog;
|
|
29
|
+
private _changeLog;
|
|
30
|
+
private _revs;
|
|
31
|
+
getFields(docId: string): Promise<FieldMap>;
|
|
32
|
+
getField(docId: string, key: string): Promise<Field | null>;
|
|
33
|
+
setFields(docId: string, fields: FieldMap): Promise<void>;
|
|
34
|
+
getTextLog(docId: string, key: string, sinceRev?: number): Promise<TextLogEntry[]>;
|
|
35
|
+
appendTextLog(docId: string, entry: TextLogEntry): Promise<void>;
|
|
36
|
+
compactTextLog(docId: string, key: string, throughRev: number, composedDelta: any): Promise<void>;
|
|
37
|
+
hasChange(docId: string, changeId: string): Promise<boolean>;
|
|
38
|
+
addChange(docId: string, entry: ChangeLogEntry): Promise<void>;
|
|
39
|
+
pruneChanges(docId: string, beforeTs: number): Promise<void>;
|
|
40
|
+
getRev(docId: string): Promise<number>;
|
|
41
|
+
setRev(docId: string, rev: number): Promise<void>;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export { MemoryDbBackend, MicroServer };
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
import { Delta } from "@dabble/delta";
|
|
3
|
+
import { applyBitmask } from "./ops.js";
|
|
4
|
+
import { BIT, INC, MAX, parseSuffix, REF_THRESHOLD, TXT } from "./types.js";
|
|
5
|
+
class MicroServer {
|
|
6
|
+
constructor(_db, _objects) {
|
|
7
|
+
this._db = _db;
|
|
8
|
+
this._objects = _objects;
|
|
9
|
+
}
|
|
10
|
+
_subs = /* @__PURE__ */ new Map();
|
|
11
|
+
/** Get full document state. */
|
|
12
|
+
async getDoc(docId) {
|
|
13
|
+
const [fields, rev] = await Promise.all([this._db.getFields(docId), this._db.getRev(docId)]);
|
|
14
|
+
return { fields, rev };
|
|
15
|
+
}
|
|
16
|
+
/** Get fields changed since a given revision (for reconnection). */
|
|
17
|
+
async getChangesSince(docId, sinceRev) {
|
|
18
|
+
const { fields, rev } = await this.getDoc(docId);
|
|
19
|
+
if (sinceRev === 0) return { fields, rev };
|
|
20
|
+
return { fields, rev };
|
|
21
|
+
}
|
|
22
|
+
/** Process an incoming change from a client. */
|
|
23
|
+
async commitChanges(docId, change) {
|
|
24
|
+
if (await this._db.hasChange(docId, change.id)) {
|
|
25
|
+
const rev2 = await this._db.getRev(docId);
|
|
26
|
+
return { rev: rev2, fields: {} };
|
|
27
|
+
}
|
|
28
|
+
const resultFields = {};
|
|
29
|
+
let rev = await this._db.getRev(docId);
|
|
30
|
+
let hasCombinableOps = false;
|
|
31
|
+
for (const [key, incoming] of Object.entries(change.fields)) {
|
|
32
|
+
const { suffix } = parseSuffix(key);
|
|
33
|
+
const existing = await this._db.getField(docId, key);
|
|
34
|
+
let resolved;
|
|
35
|
+
switch (suffix) {
|
|
36
|
+
case INC: {
|
|
37
|
+
const ev = existing?.val ?? 0;
|
|
38
|
+
resolved = { val: ev + incoming.val, ts: incoming.ts };
|
|
39
|
+
hasCombinableOps = true;
|
|
40
|
+
break;
|
|
41
|
+
}
|
|
42
|
+
case BIT: {
|
|
43
|
+
const ev = existing?.val ?? 0;
|
|
44
|
+
resolved = { val: applyBitmask(ev, incoming.val), ts: incoming.ts };
|
|
45
|
+
hasCombinableOps = true;
|
|
46
|
+
break;
|
|
47
|
+
}
|
|
48
|
+
case MAX: {
|
|
49
|
+
const ev = existing?.val ?? 0;
|
|
50
|
+
resolved = incoming.val >= ev ? incoming : existing;
|
|
51
|
+
if (resolved === existing) continue;
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
case TXT: {
|
|
55
|
+
hasCombinableOps = true;
|
|
56
|
+
const log = await this._db.getTextLog(docId, key, change.rev);
|
|
57
|
+
let delta = new Delta(incoming.val);
|
|
58
|
+
for (const entry of log) {
|
|
59
|
+
const serverDelta = new Delta(entry.delta);
|
|
60
|
+
delta = serverDelta.transform(delta, true);
|
|
61
|
+
}
|
|
62
|
+
const base = existing?.val ? new Delta(existing.val) : new Delta();
|
|
63
|
+
resolved = { val: base.compose(delta).ops, ts: incoming.ts };
|
|
64
|
+
await this._db.appendTextLog(docId, { key, delta: delta.ops, rev: rev + 1 });
|
|
65
|
+
resultFields[key] = { val: delta.ops, ts: incoming.ts };
|
|
66
|
+
await this._handleLargeValue(docId, key, resolved);
|
|
67
|
+
const toSave2 = {};
|
|
68
|
+
toSave2[key] = resolved;
|
|
69
|
+
await this._db.setFields(docId, toSave2);
|
|
70
|
+
continue;
|
|
71
|
+
}
|
|
72
|
+
default: {
|
|
73
|
+
if (existing && incoming.ts < existing.ts) continue;
|
|
74
|
+
resolved = incoming;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
await this._handleLargeValue(docId, key, resolved);
|
|
78
|
+
resultFields[key] = resolved;
|
|
79
|
+
const toSave = {};
|
|
80
|
+
toSave[key] = resolved;
|
|
81
|
+
await this._db.setFields(docId, toSave);
|
|
82
|
+
}
|
|
83
|
+
if (hasCombinableOps) {
|
|
84
|
+
await this._db.addChange(docId, { changeId: change.id, ts: Date.now() });
|
|
85
|
+
}
|
|
86
|
+
rev++;
|
|
87
|
+
await this._db.setRev(docId, rev);
|
|
88
|
+
if (Object.keys(resultFields).length) {
|
|
89
|
+
this._broadcast(docId, resultFields, rev);
|
|
90
|
+
}
|
|
91
|
+
return { rev, fields: resultFields };
|
|
92
|
+
}
|
|
93
|
+
/** Compact text log entries up to a revision. */
|
|
94
|
+
async compactTextLog(docId, key, throughRev) {
|
|
95
|
+
const entries = await this._db.getTextLog(docId, key, 0);
|
|
96
|
+
if (entries.length < 2) return;
|
|
97
|
+
const toCompose = entries.filter((e) => e.rev <= throughRev);
|
|
98
|
+
if (toCompose.length < 2) return;
|
|
99
|
+
let composed = new Delta(toCompose[0].delta);
|
|
100
|
+
for (let i = 1; i < toCompose.length; i++) {
|
|
101
|
+
composed = composed.compose(new Delta(toCompose[i].delta));
|
|
102
|
+
}
|
|
103
|
+
await this._db.compactTextLog(docId, key, throughRev, composed.ops);
|
|
104
|
+
}
|
|
105
|
+
/** Prune old change log entries. */
|
|
106
|
+
async pruneChanges(docId, beforeTs) {
|
|
107
|
+
await this._db.pruneChanges(docId, beforeTs);
|
|
108
|
+
}
|
|
109
|
+
/** Subscribe to changes for a document. */
|
|
110
|
+
subscribe(docId, cb) {
|
|
111
|
+
let subs = this._subs.get(docId);
|
|
112
|
+
if (!subs) {
|
|
113
|
+
subs = /* @__PURE__ */ new Set();
|
|
114
|
+
this._subs.set(docId, subs);
|
|
115
|
+
}
|
|
116
|
+
subs.add(cb);
|
|
117
|
+
return () => {
|
|
118
|
+
subs.delete(cb);
|
|
119
|
+
if (!subs.size) this._subs.delete(docId);
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
/** Get subscriber count for a document. */
|
|
123
|
+
subscriberCount(docId) {
|
|
124
|
+
return this._subs.get(docId)?.size ?? 0;
|
|
125
|
+
}
|
|
126
|
+
_broadcast(docId, fields, rev, exclude) {
|
|
127
|
+
const subs = this._subs.get(docId);
|
|
128
|
+
if (!subs) return;
|
|
129
|
+
for (const cb of subs) {
|
|
130
|
+
if (cb !== exclude) cb(fields, rev);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
async _handleLargeValue(docId, key, field) {
|
|
134
|
+
if (!this._objects) return;
|
|
135
|
+
const json = JSON.stringify(field.val);
|
|
136
|
+
if (json.length > REF_THRESHOLD) {
|
|
137
|
+
const ref = await this._objects.put(`${docId}/${key}`, field.val);
|
|
138
|
+
field.val = { __ref: ref, __rev: field.ts };
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
class MemoryDbBackend {
|
|
143
|
+
_fields = /* @__PURE__ */ new Map();
|
|
144
|
+
_textLog = /* @__PURE__ */ new Map();
|
|
145
|
+
_changeLog = /* @__PURE__ */ new Map();
|
|
146
|
+
_revs = /* @__PURE__ */ new Map();
|
|
147
|
+
async getFields(docId) {
|
|
148
|
+
return { ...this._fields.get(docId) ?? {} };
|
|
149
|
+
}
|
|
150
|
+
async getField(docId, key) {
|
|
151
|
+
return this._fields.get(docId)?.[key] ?? null;
|
|
152
|
+
}
|
|
153
|
+
async setFields(docId, fields) {
|
|
154
|
+
const existing = this._fields.get(docId) ?? {};
|
|
155
|
+
this._fields.set(docId, { ...existing, ...fields });
|
|
156
|
+
}
|
|
157
|
+
async getTextLog(docId, key, sinceRev = 0) {
|
|
158
|
+
return (this._textLog.get(`${docId}:${key}`) ?? []).filter((e) => e.rev > sinceRev);
|
|
159
|
+
}
|
|
160
|
+
async appendTextLog(docId, entry) {
|
|
161
|
+
const k = `${docId}:${entry.key}`;
|
|
162
|
+
const log = this._textLog.get(k) ?? [];
|
|
163
|
+
log.push(entry);
|
|
164
|
+
this._textLog.set(k, log);
|
|
165
|
+
}
|
|
166
|
+
async compactTextLog(docId, key, throughRev, composedDelta) {
|
|
167
|
+
const k = `${docId}:${key}`;
|
|
168
|
+
const log = this._textLog.get(k) ?? [];
|
|
169
|
+
const remaining = log.filter((e) => e.rev > throughRev);
|
|
170
|
+
remaining.unshift({ key, delta: composedDelta, rev: throughRev });
|
|
171
|
+
this._textLog.set(k, remaining);
|
|
172
|
+
}
|
|
173
|
+
async hasChange(docId, changeId) {
|
|
174
|
+
return (this._changeLog.get(docId) ?? []).some((e) => e.changeId === changeId);
|
|
175
|
+
}
|
|
176
|
+
async addChange(docId, entry) {
|
|
177
|
+
const log = this._changeLog.get(docId) ?? [];
|
|
178
|
+
log.push(entry);
|
|
179
|
+
this._changeLog.set(docId, log);
|
|
180
|
+
}
|
|
181
|
+
async pruneChanges(docId, beforeTs) {
|
|
182
|
+
const log = this._changeLog.get(docId) ?? [];
|
|
183
|
+
this._changeLog.set(docId, log.filter((e) => e.ts >= beforeTs));
|
|
184
|
+
}
|
|
185
|
+
async getRev(docId) {
|
|
186
|
+
return this._revs.get(docId) ?? 0;
|
|
187
|
+
}
|
|
188
|
+
async setRev(docId, rev) {
|
|
189
|
+
this._revs.set(docId, rev);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
export {
|
|
193
|
+
MemoryDbBackend,
|
|
194
|
+
MicroServer
|
|
195
|
+
};
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
/** A field value with LWW timestamp. */
|
|
2
|
+
interface Field {
|
|
3
|
+
val: any;
|
|
4
|
+
ts: number;
|
|
5
|
+
}
|
|
6
|
+
/** Map of dot-notation field keys (with optional suffix) to values. */
|
|
7
|
+
type FieldMap = Record<string, Field>;
|
|
8
|
+
/** A change sent from client to server. */
|
|
9
|
+
interface Change {
|
|
10
|
+
id: string;
|
|
11
|
+
rev: number;
|
|
12
|
+
fields: FieldMap;
|
|
13
|
+
}
|
|
14
|
+
/** Result from server after committing a change. */
|
|
15
|
+
interface CommitResult {
|
|
16
|
+
rev: number;
|
|
17
|
+
fields: FieldMap;
|
|
18
|
+
}
|
|
19
|
+
/** Full document state. */
|
|
20
|
+
interface DocState {
|
|
21
|
+
rev: number;
|
|
22
|
+
fields: FieldMap;
|
|
23
|
+
}
|
|
24
|
+
/** Suffix constants for special field types. */
|
|
25
|
+
declare const INC = "+";
|
|
26
|
+
declare const BIT = "~";
|
|
27
|
+
declare const TXT = "#";
|
|
28
|
+
declare const MAX = "^";
|
|
29
|
+
/** Parse a field key into its path and suffix (if any). */
|
|
30
|
+
declare function parseSuffix(key: string): {
|
|
31
|
+
path: string;
|
|
32
|
+
suffix: string;
|
|
33
|
+
};
|
|
34
|
+
/** Server-side text log entry for OT. */
|
|
35
|
+
interface TextLogEntry {
|
|
36
|
+
key: string;
|
|
37
|
+
delta: any;
|
|
38
|
+
rev: number;
|
|
39
|
+
}
|
|
40
|
+
/** Server-side change log entry for idempotency. */
|
|
41
|
+
interface ChangeLogEntry {
|
|
42
|
+
changeId: string;
|
|
43
|
+
ts: number;
|
|
44
|
+
}
|
|
45
|
+
/** Pluggable server-side database backend. */
|
|
46
|
+
interface DbBackend {
|
|
47
|
+
getFields(docId: string): Promise<FieldMap>;
|
|
48
|
+
getField(docId: string, key: string): Promise<Field | null>;
|
|
49
|
+
setFields(docId: string, fields: FieldMap): Promise<void>;
|
|
50
|
+
getTextLog(docId: string, key: string, sinceRev?: number): Promise<TextLogEntry[]>;
|
|
51
|
+
appendTextLog(docId: string, entry: TextLogEntry): Promise<void>;
|
|
52
|
+
compactTextLog(docId: string, key: string, throughRev: number, composedDelta: any): Promise<void>;
|
|
53
|
+
hasChange(docId: string, changeId: string): Promise<boolean>;
|
|
54
|
+
addChange(docId: string, entry: ChangeLogEntry): Promise<void>;
|
|
55
|
+
pruneChanges(docId: string, beforeTs: number): Promise<void>;
|
|
56
|
+
getRev(docId: string): Promise<number>;
|
|
57
|
+
setRev(docId: string, rev: number): Promise<void>;
|
|
58
|
+
}
|
|
59
|
+
/** Pluggable object storage for large values (S3/R2). */
|
|
60
|
+
interface ObjectStore {
|
|
61
|
+
put(key: string, value: any): Promise<string>;
|
|
62
|
+
get(ref: string): Promise<any>;
|
|
63
|
+
del(ref: string): Promise<void>;
|
|
64
|
+
}
|
|
65
|
+
/** Large value threshold in bytes (64KB). */
|
|
66
|
+
declare const REF_THRESHOLD = 65536;
|
|
67
|
+
|
|
68
|
+
export { BIT, type Change, type ChangeLogEntry, type CommitResult, type DbBackend, type DocState, type Field, type FieldMap, INC, MAX, type ObjectStore, REF_THRESHOLD, TXT, type TextLogEntry, parseSuffix };
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import "../chunk-IZ2YBCUP.js";
|
|
2
|
+
const INC = "+", BIT = "~", TXT = "#", MAX = "^";
|
|
3
|
+
const SUFFIXES = /* @__PURE__ */ new Set([INC, BIT, TXT, MAX]);
|
|
4
|
+
function parseSuffix(key) {
|
|
5
|
+
const last = key[key.length - 1];
|
|
6
|
+
return SUFFIXES.has(last) ? { path: key.slice(0, -1), suffix: last } : { path: key, suffix: "" };
|
|
7
|
+
}
|
|
8
|
+
const REF_THRESHOLD = 65536;
|
|
9
|
+
export {
|
|
10
|
+
BIT,
|
|
11
|
+
INC,
|
|
12
|
+
MAX,
|
|
13
|
+
REF_THRESHOLD,
|
|
14
|
+
TXT,
|
|
15
|
+
parseSuffix
|
|
16
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@dabble/patches",
|
|
3
|
-
"version": "0.8.
|
|
3
|
+
"version": "0.8.1",
|
|
4
4
|
"description": "Immutable JSON Patch implementation based on RFC 6902 supporting operational transformation and last-writer-wins",
|
|
5
5
|
"author": "Jacob Wright <jacwright@gmail.com>",
|
|
6
6
|
"bugs": {
|
|
@@ -41,6 +41,10 @@
|
|
|
41
41
|
"./solid": {
|
|
42
42
|
"import": "./dist/solid/index.js",
|
|
43
43
|
"types": "./dist/solid/index.d.ts"
|
|
44
|
+
},
|
|
45
|
+
"./micro": {
|
|
46
|
+
"import": "./dist/micro/index.js",
|
|
47
|
+
"types": "./dist/micro/index.d.ts"
|
|
44
48
|
}
|
|
45
49
|
},
|
|
46
50
|
"files": [
|