@loro-dev/flock-wasm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/README.md +26 -0
- package/base64/event-batcher.d.ts +37 -0
- package/base64/event-batcher.js +204 -0
- package/base64/flock_wasm.js +1158 -0
- package/base64/index.d.ts +268 -0
- package/base64/index.js +1012 -0
- package/base64/wasm.d.ts +37 -0
- package/base64/wasm.js +15 -0
- package/bundler/event-batcher.d.ts +37 -0
- package/bundler/event-batcher.js +204 -0
- package/bundler/flock_wasm.d.ts +313 -0
- package/bundler/flock_wasm.js +5 -0
- package/bundler/flock_wasm_bg.js +1119 -0
- package/bundler/flock_wasm_bg.wasm +0 -0
- package/bundler/flock_wasm_bg.wasm.d.ts +40 -0
- package/bundler/index.d.ts +268 -0
- package/bundler/index.js +1012 -0
- package/bundler/wasm.d.ts +37 -0
- package/bundler/wasm.js +4 -0
- package/nodejs/event-batcher.d.ts +37 -0
- package/nodejs/event-batcher.js +208 -0
- package/nodejs/flock_wasm.d.ts +313 -0
- package/nodejs/flock_wasm.js +1126 -0
- package/nodejs/flock_wasm_bg.wasm +0 -0
- package/nodejs/flock_wasm_bg.wasm.d.ts +40 -0
- package/nodejs/index.d.ts +268 -0
- package/nodejs/index.js +1018 -0
- package/nodejs/package.json +1 -0
- package/nodejs/wasm.d.ts +37 -0
- package/nodejs/wasm.js +2 -0
- package/package.json +50 -0
- package/web/event-batcher.d.ts +37 -0
- package/web/event-batcher.js +204 -0
- package/web/flock_wasm.d.ts +377 -0
- package/web/flock_wasm.js +1158 -0
- package/web/flock_wasm_bg.wasm +0 -0
- package/web/flock_wasm_bg.wasm.d.ts +40 -0
- package/web/index.d.ts +268 -0
- package/web/index.js +1012 -0
- package/web/wasm.d.ts +37 -0
- package/web/wasm.js +4 -0
package/nodejs/index.js
ADDED
|
@@ -0,0 +1,1018 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Flock = void 0;
|
|
4
|
+
exports.encodeVersionVector = encodeVersionVector;
|
|
5
|
+
exports.decodeVersionVector = decodeVersionVector;
|
|
6
|
+
const wasm_1 = require("./wasm");
|
|
7
|
+
const event_batcher_1 = require("./event-batcher");
|
|
8
|
+
function encodeVersionVector(vector) {
|
|
9
|
+
return encodeVersionVectorBinary(vector);
|
|
10
|
+
}
|
|
11
|
+
function decodeVersionVector(bytes) {
|
|
12
|
+
return decodeVersionVectorBinary(bytes);
|
|
13
|
+
}
|
|
14
|
+
const textEncoder = new TextEncoder();
|
|
15
|
+
const textDecoder = new TextDecoder();
|
|
16
|
+
function utf8ByteLength(value) {
|
|
17
|
+
return textEncoder.encode(value).length;
|
|
18
|
+
}
|
|
19
|
+
function isValidPeerId(peerId) {
|
|
20
|
+
return typeof peerId === "string" && utf8ByteLength(peerId) < 128;
|
|
21
|
+
}
|
|
22
|
+
function createRandomPeerId() {
|
|
23
|
+
const id = new Uint8Array(32);
|
|
24
|
+
if (typeof crypto !== "undefined" &&
|
|
25
|
+
typeof crypto.getRandomValues === "function") {
|
|
26
|
+
crypto.getRandomValues(id);
|
|
27
|
+
}
|
|
28
|
+
else {
|
|
29
|
+
for (let i = 0; i < 32; i += 1) {
|
|
30
|
+
id[i] = Math.floor(Math.random() * 256);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return Array.from(id, (byte) => byte.toString(16).padStart(2, "0")).join("");
|
|
34
|
+
}
|
|
35
|
+
function normalizePeerId(peerId) {
|
|
36
|
+
if (peerId === undefined) {
|
|
37
|
+
return createRandomPeerId();
|
|
38
|
+
}
|
|
39
|
+
if (!isValidPeerId(peerId)) {
|
|
40
|
+
throw new TypeError("peerId must be a UTF-8 string under 128 bytes.");
|
|
41
|
+
}
|
|
42
|
+
return peerId;
|
|
43
|
+
}
|
|
44
|
+
function comparePeerBytes(a, b) {
|
|
45
|
+
if (a === b) {
|
|
46
|
+
return 0;
|
|
47
|
+
}
|
|
48
|
+
const limit = Math.min(a.length, b.length);
|
|
49
|
+
for (let i = 0; i < limit; i += 1) {
|
|
50
|
+
const diff = a[i] - b[i];
|
|
51
|
+
if (diff !== 0) {
|
|
52
|
+
return diff;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return a.length - b.length;
|
|
56
|
+
}
|
|
57
|
+
function collectEncodableVersionVectorEntries(vv) {
|
|
58
|
+
if (!vv || typeof vv !== "object") {
|
|
59
|
+
return [];
|
|
60
|
+
}
|
|
61
|
+
const entries = [];
|
|
62
|
+
for (const [peer, entry] of Object.entries(vv)) {
|
|
63
|
+
if (!entry || !isValidPeerId(peer)) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
const { physicalTime, logicalCounter } = entry;
|
|
67
|
+
if (typeof physicalTime !== "number" ||
|
|
68
|
+
!Number.isFinite(physicalTime) ||
|
|
69
|
+
typeof logicalCounter !== "number" ||
|
|
70
|
+
!Number.isFinite(logicalCounter)) {
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
const peerBytes = textEncoder.encode(peer);
|
|
74
|
+
entries.push({
|
|
75
|
+
peer,
|
|
76
|
+
peerBytes,
|
|
77
|
+
timestamp: Math.trunc(physicalTime),
|
|
78
|
+
counter: Math.max(0, Math.trunc(logicalCounter)),
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
entries.sort((a, b) => {
|
|
82
|
+
if (a.timestamp !== b.timestamp) {
|
|
83
|
+
return a.timestamp - b.timestamp;
|
|
84
|
+
}
|
|
85
|
+
const peerCmp = comparePeerBytes(a.peerBytes, b.peerBytes);
|
|
86
|
+
if (peerCmp !== 0) {
|
|
87
|
+
return peerCmp;
|
|
88
|
+
}
|
|
89
|
+
return a.counter - b.counter;
|
|
90
|
+
});
|
|
91
|
+
return entries;
|
|
92
|
+
}
|
|
93
|
+
function writeUnsignedLeb128(value, out) {
|
|
94
|
+
if (!Number.isFinite(value) || value < 0) {
|
|
95
|
+
throw new TypeError("leb128 values must be finite and non-negative");
|
|
96
|
+
}
|
|
97
|
+
let remaining = Math.trunc(value);
|
|
98
|
+
if (remaining === 0) {
|
|
99
|
+
out.push(0);
|
|
100
|
+
return;
|
|
101
|
+
}
|
|
102
|
+
while (remaining > 0) {
|
|
103
|
+
const byte = remaining % 0x80;
|
|
104
|
+
remaining = Math.floor(remaining / 0x80);
|
|
105
|
+
out.push(remaining > 0 ? byte | 0x80 : byte);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
function writeVarStringBytes(bytes, out) {
|
|
109
|
+
writeUnsignedLeb128(bytes.length, out);
|
|
110
|
+
for (let i = 0; i < bytes.length; i += 1) {
|
|
111
|
+
out.push(bytes[i]);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
const VERSION_VECTOR_MAGIC = new Uint8Array([86, 69, 86, 69]); // "VEVE"
|
|
115
|
+
function encodeVersionVectorBinary(vv) {
|
|
116
|
+
const entries = collectEncodableVersionVectorEntries(vv);
|
|
117
|
+
const buffer = Array.from(VERSION_VECTOR_MAGIC);
|
|
118
|
+
if (entries.length === 0) {
|
|
119
|
+
return Uint8Array.from(buffer);
|
|
120
|
+
}
|
|
121
|
+
let lastTimestamp = 0;
|
|
122
|
+
for (let i = 0; i < entries.length; i += 1) {
|
|
123
|
+
const entry = entries[i];
|
|
124
|
+
if (entry.timestamp < 0) {
|
|
125
|
+
throw new TypeError("timestamp must be non-negative");
|
|
126
|
+
}
|
|
127
|
+
if (i === 0) {
|
|
128
|
+
writeUnsignedLeb128(entry.timestamp, buffer);
|
|
129
|
+
lastTimestamp = entry.timestamp;
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
const delta = entry.timestamp - lastTimestamp;
|
|
133
|
+
if (delta < 0) {
|
|
134
|
+
throw new TypeError("version vector timestamps must be non-decreasing");
|
|
135
|
+
}
|
|
136
|
+
writeUnsignedLeb128(delta, buffer);
|
|
137
|
+
lastTimestamp = entry.timestamp;
|
|
138
|
+
}
|
|
139
|
+
writeUnsignedLeb128(entry.counter, buffer);
|
|
140
|
+
writeVarStringBytes(entry.peerBytes, buffer);
|
|
141
|
+
}
|
|
142
|
+
return Uint8Array.from(buffer);
|
|
143
|
+
}
|
|
144
|
+
function decodeUnsignedLeb128(bytes, offset) {
|
|
145
|
+
let result = 0;
|
|
146
|
+
let multiplier = 1;
|
|
147
|
+
let consumed = 0;
|
|
148
|
+
while (offset + consumed < bytes.length) {
|
|
149
|
+
const byte = bytes[offset + consumed];
|
|
150
|
+
consumed += 1;
|
|
151
|
+
// Use arithmetic instead of bitwise operations to avoid 32-bit overflow.
|
|
152
|
+
// JavaScript bitwise operators convert to 32-bit signed integers,
|
|
153
|
+
// which breaks for values >= 2^31.
|
|
154
|
+
result += (byte & 0x7f) * multiplier;
|
|
155
|
+
if ((byte & 0x80) === 0) {
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
multiplier *= 128;
|
|
159
|
+
}
|
|
160
|
+
return [result, consumed];
|
|
161
|
+
}
|
|
162
|
+
function decodeVarString(bytes, offset) {
|
|
163
|
+
const [length, used] = decodeUnsignedLeb128(bytes, offset);
|
|
164
|
+
const start = offset + used;
|
|
165
|
+
const end = start + length;
|
|
166
|
+
if (end > bytes.length) {
|
|
167
|
+
throw new TypeError("varString length exceeds buffer");
|
|
168
|
+
}
|
|
169
|
+
const slice = bytes.subarray(start, end);
|
|
170
|
+
return [textDecoder.decode(slice), used + length];
|
|
171
|
+
}
|
|
172
|
+
function hasMagic(bytes) {
|
|
173
|
+
return (bytes.length >= 4 &&
|
|
174
|
+
bytes[0] === VERSION_VECTOR_MAGIC[0] &&
|
|
175
|
+
bytes[1] === VERSION_VECTOR_MAGIC[1] &&
|
|
176
|
+
bytes[2] === VERSION_VECTOR_MAGIC[2] &&
|
|
177
|
+
bytes[3] === VERSION_VECTOR_MAGIC[3]);
|
|
178
|
+
}
|
|
179
|
+
function decodeLegacyVersionVector(bytes) {
|
|
180
|
+
let offset = 0;
|
|
181
|
+
const [count, usedCount] = decodeUnsignedLeb128(bytes, offset);
|
|
182
|
+
offset += usedCount;
|
|
183
|
+
const [baseTimestamp, usedBase] = decodeUnsignedLeb128(bytes, offset);
|
|
184
|
+
offset += usedBase;
|
|
185
|
+
const vv = {};
|
|
186
|
+
for (let i = 0; i < count; i += 1) {
|
|
187
|
+
const [peer, usedPeer] = decodeVarString(bytes, offset);
|
|
188
|
+
offset += usedPeer;
|
|
189
|
+
if (!isValidPeerId(peer)) {
|
|
190
|
+
throw new TypeError("invalid peer id in encoded version vector");
|
|
191
|
+
}
|
|
192
|
+
const [delta, usedDelta] = decodeUnsignedLeb128(bytes, offset);
|
|
193
|
+
offset += usedDelta;
|
|
194
|
+
const [counter, usedCounter] = decodeUnsignedLeb128(bytes, offset);
|
|
195
|
+
offset += usedCounter;
|
|
196
|
+
vv[peer] = {
|
|
197
|
+
physicalTime: baseTimestamp + delta,
|
|
198
|
+
logicalCounter: counter,
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
return vv;
|
|
202
|
+
}
|
|
203
|
+
function decodeNewVersionVector(bytes) {
|
|
204
|
+
let offset = 4;
|
|
205
|
+
const vv = {};
|
|
206
|
+
if (offset === bytes.length) {
|
|
207
|
+
return vv;
|
|
208
|
+
}
|
|
209
|
+
const [firstTimestamp, usedTs] = decodeUnsignedLeb128(bytes, offset);
|
|
210
|
+
offset += usedTs;
|
|
211
|
+
const [firstCounter, usedCounter] = decodeUnsignedLeb128(bytes, offset);
|
|
212
|
+
offset += usedCounter;
|
|
213
|
+
const [firstPeer, usedPeer] = decodeVarString(bytes, offset);
|
|
214
|
+
offset += usedPeer;
|
|
215
|
+
if (!isValidPeerId(firstPeer)) {
|
|
216
|
+
throw new TypeError("invalid peer id in encoded version vector");
|
|
217
|
+
}
|
|
218
|
+
vv[firstPeer] = {
|
|
219
|
+
physicalTime: firstTimestamp,
|
|
220
|
+
logicalCounter: firstCounter,
|
|
221
|
+
};
|
|
222
|
+
let lastTimestamp = firstTimestamp;
|
|
223
|
+
while (offset < bytes.length) {
|
|
224
|
+
const [delta, usedDelta] = decodeUnsignedLeb128(bytes, offset);
|
|
225
|
+
offset += usedDelta;
|
|
226
|
+
const [counter, usedCtr] = decodeUnsignedLeb128(bytes, offset);
|
|
227
|
+
offset += usedCtr;
|
|
228
|
+
const [peer, usedPeerLen] = decodeVarString(bytes, offset);
|
|
229
|
+
offset += usedPeerLen;
|
|
230
|
+
if (!isValidPeerId(peer)) {
|
|
231
|
+
throw new TypeError("invalid peer id in encoded version vector");
|
|
232
|
+
}
|
|
233
|
+
const timestamp = lastTimestamp + delta;
|
|
234
|
+
if (timestamp < lastTimestamp) {
|
|
235
|
+
throw new TypeError("version vector timestamps must be non-decreasing");
|
|
236
|
+
}
|
|
237
|
+
vv[peer] = { physicalTime: timestamp, logicalCounter: counter };
|
|
238
|
+
lastTimestamp = timestamp;
|
|
239
|
+
}
|
|
240
|
+
return vv;
|
|
241
|
+
}
|
|
242
|
+
function decodeVersionVectorBinary(bytes) {
|
|
243
|
+
if (hasMagic(bytes)) {
|
|
244
|
+
return decodeNewVersionVector(bytes);
|
|
245
|
+
}
|
|
246
|
+
return decodeLegacyVersionVector(bytes);
|
|
247
|
+
}
|
|
248
|
+
function encodeVersionVectorForFfi(vv) {
|
|
249
|
+
if (!vv) {
|
|
250
|
+
return undefined;
|
|
251
|
+
}
|
|
252
|
+
const raw = {};
|
|
253
|
+
for (const entry of collectEncodableVersionVectorEntries(vv)) {
|
|
254
|
+
raw[entry.peer] = [entry.timestamp, entry.counter];
|
|
255
|
+
}
|
|
256
|
+
return raw;
|
|
257
|
+
}
|
|
258
|
+
function normalizePruneBefore(pruneTombstonesBefore) {
|
|
259
|
+
if (pruneTombstonesBefore === undefined) {
|
|
260
|
+
return undefined;
|
|
261
|
+
}
|
|
262
|
+
if (typeof pruneTombstonesBefore !== "number" ||
|
|
263
|
+
!Number.isFinite(pruneTombstonesBefore)) {
|
|
264
|
+
return undefined;
|
|
265
|
+
}
|
|
266
|
+
return pruneTombstonesBefore;
|
|
267
|
+
}
|
|
268
|
+
function decodeVersionVectorFromRaw(raw) {
|
|
269
|
+
if (raw === null || typeof raw !== "object") {
|
|
270
|
+
return {};
|
|
271
|
+
}
|
|
272
|
+
const result = {};
|
|
273
|
+
for (const [peer, value] of Object.entries(raw)) {
|
|
274
|
+
if (!Array.isArray(value) || value.length < 2) {
|
|
275
|
+
continue;
|
|
276
|
+
}
|
|
277
|
+
if (!isValidPeerId(peer)) {
|
|
278
|
+
continue;
|
|
279
|
+
}
|
|
280
|
+
const [physicalTime, logicalCounter] = value;
|
|
281
|
+
if (typeof physicalTime !== "number" || !Number.isFinite(physicalTime)) {
|
|
282
|
+
continue;
|
|
283
|
+
}
|
|
284
|
+
if (typeof logicalCounter !== "number" ||
|
|
285
|
+
!Number.isFinite(logicalCounter)) {
|
|
286
|
+
continue;
|
|
287
|
+
}
|
|
288
|
+
result[peer] = {
|
|
289
|
+
physicalTime,
|
|
290
|
+
logicalCounter: Math.trunc(logicalCounter),
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
return result;
|
|
294
|
+
}
|
|
295
|
+
function encodeBound(bound) {
|
|
296
|
+
if (!bound) {
|
|
297
|
+
return undefined;
|
|
298
|
+
}
|
|
299
|
+
if (bound.kind === "unbounded") {
|
|
300
|
+
return { kind: "unbounded" };
|
|
301
|
+
}
|
|
302
|
+
return { kind: bound.kind, key: bound.key.slice() };
|
|
303
|
+
}
|
|
304
|
+
function decodeEntryInfo(raw) {
|
|
305
|
+
if (!raw || typeof raw !== "object") {
|
|
306
|
+
return undefined;
|
|
307
|
+
}
|
|
308
|
+
const info = raw;
|
|
309
|
+
const clock = normalizeEntryClock(info.clock);
|
|
310
|
+
if (!clock) {
|
|
311
|
+
return undefined;
|
|
312
|
+
}
|
|
313
|
+
const metadata = normalizeMetadataMap(info.metadata);
|
|
314
|
+
const result = { metadata, clock };
|
|
315
|
+
if ("data" in info) {
|
|
316
|
+
result.data = cloneJson(info.data);
|
|
317
|
+
}
|
|
318
|
+
return result;
|
|
319
|
+
}
|
|
320
|
+
function decodeEventBatch(raw) {
|
|
321
|
+
if (!raw || typeof raw !== "object") {
|
|
322
|
+
return { source: "local", events: [] };
|
|
323
|
+
}
|
|
324
|
+
const batch = raw;
|
|
325
|
+
const source = typeof batch.source === "string" ? batch.source : "local";
|
|
326
|
+
const eventsRaw = Array.isArray(batch.events) ? batch.events : [];
|
|
327
|
+
const events = eventsRaw
|
|
328
|
+
.filter((entry) => Boolean(entry))
|
|
329
|
+
.map((entry) => buildEvent(entry));
|
|
330
|
+
return { source, events };
|
|
331
|
+
}
|
|
332
|
+
function buildEvent(entry) {
|
|
333
|
+
const key = Array.isArray(entry.key) ? entry.key : [];
|
|
334
|
+
const payload = buildEventPayload(entry);
|
|
335
|
+
return {
|
|
336
|
+
key,
|
|
337
|
+
value: payload.data,
|
|
338
|
+
metadata: cloneMetadata(payload.metadata),
|
|
339
|
+
payload,
|
|
340
|
+
};
|
|
341
|
+
}
|
|
342
|
+
function buildEventPayload(entry) {
|
|
343
|
+
const base = {};
|
|
344
|
+
if ("value" in entry) {
|
|
345
|
+
base.data = cloneJson(entry.value);
|
|
346
|
+
}
|
|
347
|
+
const entryMetadata = cloneMetadata(entry.metadata);
|
|
348
|
+
if (entryMetadata !== undefined) {
|
|
349
|
+
base.metadata = entryMetadata;
|
|
350
|
+
}
|
|
351
|
+
const update = normalizeRawEventPayload(entry.payload);
|
|
352
|
+
return mergePayload(base, update);
|
|
353
|
+
}
|
|
354
|
+
function normalizeRawEventPayload(payload) {
|
|
355
|
+
if (!payload || typeof payload !== "object") {
|
|
356
|
+
return undefined;
|
|
357
|
+
}
|
|
358
|
+
const result = {};
|
|
359
|
+
if ("data" in payload) {
|
|
360
|
+
result.data = cloneJson(payload.data);
|
|
361
|
+
}
|
|
362
|
+
const metadata = cloneMetadata(payload.metadata);
|
|
363
|
+
if (metadata !== undefined) {
|
|
364
|
+
result.metadata = metadata;
|
|
365
|
+
}
|
|
366
|
+
return result;
|
|
367
|
+
}
|
|
368
|
+
const structuredCloneFn = globalThis.structuredClone;
|
|
369
|
+
function cloneJson(value) {
|
|
370
|
+
if (value === undefined) {
|
|
371
|
+
return value;
|
|
372
|
+
}
|
|
373
|
+
if (structuredCloneFn) {
|
|
374
|
+
return structuredCloneFn(value);
|
|
375
|
+
}
|
|
376
|
+
return JSON.parse(JSON.stringify(value));
|
|
377
|
+
}
|
|
378
|
+
function parseKeyString(key) {
|
|
379
|
+
try {
|
|
380
|
+
const parsed = JSON.parse(key);
|
|
381
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
382
|
+
}
|
|
383
|
+
catch {
|
|
384
|
+
return [];
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
function cloneMetadata(metadata) {
|
|
388
|
+
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
|
|
389
|
+
return undefined;
|
|
390
|
+
}
|
|
391
|
+
return cloneJson(metadata);
|
|
392
|
+
}
|
|
393
|
+
function normalizeMetadataMap(metadata) {
|
|
394
|
+
const cloned = cloneMetadata(metadata);
|
|
395
|
+
return cloned ?? {};
|
|
396
|
+
}
|
|
397
|
+
function decodeClock(record) {
|
|
398
|
+
const rawClock = typeof record.c === "string" ? record.c : "";
|
|
399
|
+
const firstComma = rawClock.indexOf(",");
|
|
400
|
+
const secondComma = firstComma === -1 ? -1 : rawClock.indexOf(",", firstComma + 1);
|
|
401
|
+
if (firstComma === -1 || secondComma === -1) {
|
|
402
|
+
return { physicalTime: 0, logicalCounter: 0, peerId: "" };
|
|
403
|
+
}
|
|
404
|
+
const physicalTime = Number(rawClock.slice(0, firstComma));
|
|
405
|
+
const logicalCounter = Number(rawClock.slice(firstComma + 1, secondComma));
|
|
406
|
+
const peerIdRaw = rawClock.slice(secondComma + 1);
|
|
407
|
+
const peerId = isValidPeerId(peerIdRaw) ? peerIdRaw : "";
|
|
408
|
+
return {
|
|
409
|
+
physicalTime: Number.isFinite(physicalTime) ? physicalTime : 0,
|
|
410
|
+
logicalCounter: Number.isFinite(logicalCounter)
|
|
411
|
+
? Math.trunc(logicalCounter)
|
|
412
|
+
: 0,
|
|
413
|
+
peerId,
|
|
414
|
+
};
|
|
415
|
+
}
|
|
416
|
+
function normalizeEntryClock(clock) {
|
|
417
|
+
if (!clock || typeof clock !== "object") {
|
|
418
|
+
return undefined;
|
|
419
|
+
}
|
|
420
|
+
const { physicalTime, logicalCounter, peerId } = clock;
|
|
421
|
+
if (typeof physicalTime !== "number" || !Number.isFinite(physicalTime)) {
|
|
422
|
+
return undefined;
|
|
423
|
+
}
|
|
424
|
+
if (typeof logicalCounter !== "number" || !Number.isFinite(logicalCounter)) {
|
|
425
|
+
return undefined;
|
|
426
|
+
}
|
|
427
|
+
if (!isValidPeerId(peerId)) {
|
|
428
|
+
return undefined;
|
|
429
|
+
}
|
|
430
|
+
return {
|
|
431
|
+
physicalTime,
|
|
432
|
+
logicalCounter: Math.trunc(logicalCounter),
|
|
433
|
+
peerId,
|
|
434
|
+
};
|
|
435
|
+
}
|
|
436
|
+
function createExportPayload(record) {
|
|
437
|
+
const payload = {};
|
|
438
|
+
if (record.d !== undefined) {
|
|
439
|
+
payload.data = cloneJson(record.d);
|
|
440
|
+
}
|
|
441
|
+
const metadata = cloneMetadata(record.m);
|
|
442
|
+
if (metadata !== undefined) {
|
|
443
|
+
payload.metadata = metadata;
|
|
444
|
+
}
|
|
445
|
+
return payload;
|
|
446
|
+
}
|
|
447
|
+
function createPutPayload(value, metadata) {
|
|
448
|
+
const payload = { data: cloneJson(value) };
|
|
449
|
+
const cleanMetadata = cloneMetadata(metadata);
|
|
450
|
+
if (cleanMetadata !== undefined) {
|
|
451
|
+
payload.metadata = cleanMetadata;
|
|
452
|
+
}
|
|
453
|
+
return payload;
|
|
454
|
+
}
|
|
455
|
+
function assignPayload(target, source) {
|
|
456
|
+
if (!source || typeof source !== "object") {
|
|
457
|
+
return;
|
|
458
|
+
}
|
|
459
|
+
if ("data" in source) {
|
|
460
|
+
const value = source.data;
|
|
461
|
+
target.data = value === undefined ? undefined : cloneJson(value);
|
|
462
|
+
}
|
|
463
|
+
if ("metadata" in source) {
|
|
464
|
+
target.metadata = cloneMetadata(source.metadata);
|
|
465
|
+
}
|
|
466
|
+
}
|
|
467
|
+
function clonePayload(payload) {
|
|
468
|
+
const result = {};
|
|
469
|
+
assignPayload(result, payload);
|
|
470
|
+
return result;
|
|
471
|
+
}
|
|
472
|
+
function mergePayload(base, update) {
|
|
473
|
+
const result = clonePayload(base);
|
|
474
|
+
assignPayload(result, update);
|
|
475
|
+
return result;
|
|
476
|
+
}
|
|
477
|
+
function buildRecord(clock, payload) {
|
|
478
|
+
const record = { c: clock };
|
|
479
|
+
if (payload.data !== undefined) {
|
|
480
|
+
record.d = cloneJson(payload.data);
|
|
481
|
+
}
|
|
482
|
+
const metadata = cloneMetadata(payload.metadata);
|
|
483
|
+
if (metadata !== undefined) {
|
|
484
|
+
record.m = metadata;
|
|
485
|
+
}
|
|
486
|
+
return record;
|
|
487
|
+
}
|
|
488
|
+
function cloneRecord(record) {
|
|
489
|
+
return buildRecord(record.c, createExportPayload(record));
|
|
490
|
+
}
|
|
491
|
+
function buildContext(key, record) {
|
|
492
|
+
return {
|
|
493
|
+
key: parseKeyString(key),
|
|
494
|
+
clock: decodeClock(record),
|
|
495
|
+
raw: cloneRecord(record),
|
|
496
|
+
};
|
|
497
|
+
}
|
|
498
|
+
function normalizeImportDecision(decision) {
|
|
499
|
+
if (!decision || typeof decision !== "object") {
|
|
500
|
+
return { accept: true };
|
|
501
|
+
}
|
|
502
|
+
if ("accept" in decision) {
|
|
503
|
+
if (!decision.accept) {
|
|
504
|
+
return { accept: false, reason: decision.reason ?? "rejected" };
|
|
505
|
+
}
|
|
506
|
+
return { accept: true };
|
|
507
|
+
}
|
|
508
|
+
return { accept: true };
|
|
509
|
+
}
|
|
510
|
+
function decodeImportReport(raw) {
|
|
511
|
+
if (!raw || typeof raw !== "object") {
|
|
512
|
+
return { accepted: 0, skipped: [] };
|
|
513
|
+
}
|
|
514
|
+
const report = raw;
|
|
515
|
+
const accepted = typeof report.accepted === "number" ? report.accepted : 0;
|
|
516
|
+
const skippedRaw = Array.isArray(report.skipped) ? report.skipped : [];
|
|
517
|
+
const skipped = skippedRaw.map((entry) => {
|
|
518
|
+
const key = entry && Array.isArray(entry.key) ? entry.key : [];
|
|
519
|
+
const reason = entry && typeof entry.reason === "string" ? entry.reason : "unknown";
|
|
520
|
+
return { key, reason };
|
|
521
|
+
});
|
|
522
|
+
return { accepted, skipped };
|
|
523
|
+
}
|
|
524
|
+
function cloneBundle(bundle) {
|
|
525
|
+
const next = { version: bundle.version, entries: {} };
|
|
526
|
+
for (const [key, record] of Object.entries(bundle.entries)) {
|
|
527
|
+
next.entries[key] = cloneRecord(record);
|
|
528
|
+
}
|
|
529
|
+
return next;
|
|
530
|
+
}
|
|
531
|
+
function isExportOptions(value) {
|
|
532
|
+
return (typeof value === "object" &&
|
|
533
|
+
value !== null &&
|
|
534
|
+
(Object.prototype.hasOwnProperty.call(value, "hooks") ||
|
|
535
|
+
Object.prototype.hasOwnProperty.call(value, "from") ||
|
|
536
|
+
Object.prototype.hasOwnProperty.call(value, "pruneTombstonesBefore") ||
|
|
537
|
+
Object.prototype.hasOwnProperty.call(value, "peerId")));
|
|
538
|
+
}
|
|
539
|
+
function isImportOptions(value) {
|
|
540
|
+
return (typeof value === "object" &&
|
|
541
|
+
value !== null &&
|
|
542
|
+
Object.prototype.hasOwnProperty.call(value, "bundle"));
|
|
543
|
+
}
|
|
544
|
+
const defaultEventBatcherRuntime = {
|
|
545
|
+
now: () => Date.now(),
|
|
546
|
+
setTimeout: (fn, ms) => setTimeout(fn, ms),
|
|
547
|
+
clearTimeout: (handle) => clearTimeout(handle),
|
|
548
|
+
};
|
|
549
|
+
class Flock {
|
|
550
|
+
inner;
|
|
551
|
+
listeners = new Set();
|
|
552
|
+
nativeSubscriberId;
|
|
553
|
+
eventBatcher;
|
|
554
|
+
constructor(peerId) {
|
|
555
|
+
this.inner = new wasm_1.RawFlock(normalizePeerId(peerId));
|
|
556
|
+
this.eventBatcher = new event_batcher_1.EventBatcher({
|
|
557
|
+
runtime: defaultEventBatcherRuntime,
|
|
558
|
+
emit: (source, events) => {
|
|
559
|
+
this.deliverBatch({ source, events });
|
|
560
|
+
},
|
|
561
|
+
});
|
|
562
|
+
}
|
|
563
|
+
static fromInner(inner) {
|
|
564
|
+
const flock = Object.create(Flock.prototype);
|
|
565
|
+
flock.inner = inner;
|
|
566
|
+
flock.listeners = new Set();
|
|
567
|
+
flock.nativeSubscriberId = undefined;
|
|
568
|
+
flock.eventBatcher = new event_batcher_1.EventBatcher({
|
|
569
|
+
runtime: defaultEventBatcherRuntime,
|
|
570
|
+
emit: (source, events) => {
|
|
571
|
+
flock.deliverBatch({ source, events });
|
|
572
|
+
},
|
|
573
|
+
});
|
|
574
|
+
return flock;
|
|
575
|
+
}
|
|
576
|
+
static fromJson(bundle, peerId) {
|
|
577
|
+
const inner = wasm_1.RawFlock.fromJson(bundle, normalizePeerId(peerId));
|
|
578
|
+
return Flock.fromInner(inner);
|
|
579
|
+
}
|
|
580
|
+
static fromFile(bytes, peerId) {
|
|
581
|
+
const inner = wasm_1.RawFlock.fromFile(normalizePeerId(peerId), bytes);
|
|
582
|
+
return Flock.fromInner(inner);
|
|
583
|
+
}
|
|
584
|
+
checkInvariants() {
|
|
585
|
+
void this.version();
|
|
586
|
+
void this.inclusiveVersion();
|
|
587
|
+
void this.scan();
|
|
588
|
+
}
|
|
589
|
+
setPeerId(peerId) {
|
|
590
|
+
this.inner.setPeerId(normalizePeerId(peerId));
|
|
591
|
+
}
|
|
592
|
+
putWithMetaInternal(key, value, metadata, now) {
|
|
593
|
+
const metadataClone = cloneMetadata(metadata);
|
|
594
|
+
this.inner.putWithMeta(key, cloneJson(value), metadataClone, now);
|
|
595
|
+
}
|
|
596
|
+
async putWithMetaWithHooks(key, value, options) {
|
|
597
|
+
const basePayload = createPutPayload(value, options.metadata);
|
|
598
|
+
const transform = options.hooks?.transform;
|
|
599
|
+
if (!transform) {
|
|
600
|
+
this.putWithMetaInternal(key, value, options.metadata, options.now);
|
|
601
|
+
return;
|
|
602
|
+
}
|
|
603
|
+
const workingPayload = clonePayload(basePayload);
|
|
604
|
+
const transformed = await transform({ key: key.slice(), now: options.now }, workingPayload);
|
|
605
|
+
const finalPayload = mergePayload(basePayload, transformed ?? workingPayload);
|
|
606
|
+
const finalValue = finalPayload.data;
|
|
607
|
+
if (finalValue === undefined) {
|
|
608
|
+
throw new TypeError("putWithMeta requires a data value");
|
|
609
|
+
}
|
|
610
|
+
this.putWithMetaInternal(key, finalValue, finalPayload.metadata, options.now);
|
|
611
|
+
}
|
|
612
|
+
/**
|
|
613
|
+
* Put a value into the flock. If the given entry already exists, this insert will be skipped.
|
|
614
|
+
* @param key
|
|
615
|
+
* @param value
|
|
616
|
+
* @param now
|
|
617
|
+
*/
|
|
618
|
+
put(key, value, now) {
|
|
619
|
+
this.inner.put(key, cloneJson(value), now);
|
|
620
|
+
}
|
|
621
|
+
putWithMeta(key, value, options) {
|
|
622
|
+
const opts = options ?? {};
|
|
623
|
+
if (opts.hooks?.transform) {
|
|
624
|
+
return this.putWithMetaWithHooks(key, value, opts);
|
|
625
|
+
}
|
|
626
|
+
this.putWithMetaInternal(key, value, opts.metadata, opts.now);
|
|
627
|
+
}
|
|
628
|
+
set(key, value, now) {
|
|
629
|
+
this.put(key, value, now);
|
|
630
|
+
}
|
|
631
|
+
/**
|
|
632
|
+
* Delete a value from the flock. If the given entry does not exist, this delete will be skipped.
|
|
633
|
+
* @param key
|
|
634
|
+
* @param now
|
|
635
|
+
*/
|
|
636
|
+
delete(key, now) {
|
|
637
|
+
this.inner.delete(key, now);
|
|
638
|
+
}
|
|
639
|
+
get(key) {
|
|
640
|
+
return this.inner.get(key);
|
|
641
|
+
}
|
|
642
|
+
/**
|
|
643
|
+
* Returns the full entry payload (data, metadata, and clock) for a key.
|
|
644
|
+
*
|
|
645
|
+
* Unlike `get`, this distinguishes between a missing key (`undefined`) and a
|
|
646
|
+
* tombstone (returns the clock and metadata with `data` omitted). Metadata is
|
|
647
|
+
* cloned and defaults to `{}` when absent.
|
|
648
|
+
*/
|
|
649
|
+
getEntry(key) {
|
|
650
|
+
const raw = this.inner.getEntry(key);
|
|
651
|
+
return decodeEntryInfo(raw);
|
|
652
|
+
}
|
|
653
|
+
merge(other) {
|
|
654
|
+
this.eventBatcher.beforeImport();
|
|
655
|
+
void this.inner.merge(other.inner);
|
|
656
|
+
}
|
|
657
|
+
/**
|
|
658
|
+
* Returns the exclusive version vector, which only includes peers that have
|
|
659
|
+
* at least one entry in the current state. This is consistent with the state
|
|
660
|
+
* after export and re-import.
|
|
661
|
+
*
|
|
662
|
+
* Use this version when sending to other peers for incremental sync.
|
|
663
|
+
*/
|
|
664
|
+
version() {
|
|
665
|
+
return decodeVersionVectorFromRaw(this.inner.version());
|
|
666
|
+
}
|
|
667
|
+
/**
|
|
668
|
+
* Returns the inclusive version vector, which includes all peers ever seen,
|
|
669
|
+
* even if their entries have been overridden by other peers.
|
|
670
|
+
*
|
|
671
|
+
* Use this version when checking if you have received all data from another peer.
|
|
672
|
+
*/
|
|
673
|
+
inclusiveVersion() {
|
|
674
|
+
return decodeVersionVectorFromRaw(this.inner.inclusiveVersion());
|
|
675
|
+
}
|
|
676
|
+
exportJsonInternal(from, pruneTombstonesBefore, peerId) {
|
|
677
|
+
const pruneBefore = normalizePruneBefore(pruneTombstonesBefore);
|
|
678
|
+
const normalizedPeerId = peerId !== undefined && isValidPeerId(peerId) ? peerId : undefined;
|
|
679
|
+
return this.inner.exportJson(encodeVersionVectorForFfi(from), pruneBefore, normalizedPeerId);
|
|
680
|
+
}
|
|
681
|
+
async exportJsonWithHooks(options) {
|
|
682
|
+
const base = this.exportJsonInternal(options.from, options.pruneTombstonesBefore, options.peerId);
|
|
683
|
+
const transform = options.hooks?.transform;
|
|
684
|
+
if (!transform) {
|
|
685
|
+
return base;
|
|
686
|
+
}
|
|
687
|
+
const result = { version: base.version, entries: {} };
|
|
688
|
+
for (const [key, record] of Object.entries(base.entries)) {
|
|
689
|
+
const context = buildContext(key, record);
|
|
690
|
+
const basePayload = createExportPayload(record);
|
|
691
|
+
const workingPayload = clonePayload(basePayload);
|
|
692
|
+
const transformed = await transform(context, workingPayload);
|
|
693
|
+
const finalPayload = mergePayload(basePayload, transformed ?? workingPayload);
|
|
694
|
+
result.entries[key] = buildRecord(record.c, finalPayload);
|
|
695
|
+
}
|
|
696
|
+
return result;
|
|
697
|
+
}
|
|
698
|
+
exportJson(arg, pruneTombstonesBefore) {
|
|
699
|
+
if (arg === undefined) {
|
|
700
|
+
return this.exportJsonInternal(undefined, pruneTombstonesBefore);
|
|
701
|
+
}
|
|
702
|
+
if (isExportOptions(arg)) {
|
|
703
|
+
return this.exportJsonWithHooks(arg);
|
|
704
|
+
}
|
|
705
|
+
return this.exportJsonInternal(arg, pruneTombstonesBefore);
|
|
706
|
+
}
|
|
707
|
+
importJsonInternal(bundle) {
|
|
708
|
+
this.eventBatcher.beforeImport();
|
|
709
|
+
const report = this.inner.importJson(bundle);
|
|
710
|
+
return decodeImportReport(report);
|
|
711
|
+
}
|
|
712
|
+
async importJsonWithHooks(options) {
|
|
713
|
+
const preprocess = options.hooks?.preprocess;
|
|
714
|
+
const working = preprocess ? cloneBundle(options.bundle) : options.bundle;
|
|
715
|
+
const skippedByHooks = [];
|
|
716
|
+
if (preprocess) {
|
|
717
|
+
for (const key of Object.keys(working.entries)) {
|
|
718
|
+
const record = working.entries[key];
|
|
719
|
+
if (!record) {
|
|
720
|
+
continue;
|
|
721
|
+
}
|
|
722
|
+
const context = buildContext(key, record);
|
|
723
|
+
const basePayload = createExportPayload(record);
|
|
724
|
+
const decision = await preprocess(context, clonePayload(basePayload));
|
|
725
|
+
const normalized = normalizeImportDecision(decision);
|
|
726
|
+
if (!normalized.accept) {
|
|
727
|
+
skippedByHooks.push({ key: context.key, reason: normalized.reason });
|
|
728
|
+
delete working.entries[key];
|
|
729
|
+
continue;
|
|
730
|
+
}
|
|
731
|
+
working.entries[key] = buildRecord(record.c, basePayload);
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
const coreReport = this.importJsonInternal(working);
|
|
735
|
+
return {
|
|
736
|
+
accepted: coreReport.accepted,
|
|
737
|
+
skipped: skippedByHooks.concat(coreReport.skipped),
|
|
738
|
+
};
|
|
739
|
+
}
|
|
740
|
+
importJson(arg) {
|
|
741
|
+
if (isImportOptions(arg)) {
|
|
742
|
+
return this.importJsonWithHooks(arg);
|
|
743
|
+
}
|
|
744
|
+
return this.importJsonInternal(arg);
|
|
745
|
+
}
|
|
746
|
+
importJsonStr(bundle) {
|
|
747
|
+
const parsed = JSON.parse(bundle);
|
|
748
|
+
return this.importJson(parsed);
|
|
749
|
+
}
|
|
750
|
+
getMaxPhysicalTime() {
|
|
751
|
+
return Number(this.inner.getMaxPhysicalTime());
|
|
752
|
+
}
|
|
753
|
+
peerId() {
|
|
754
|
+
const id = this.inner.peerId();
|
|
755
|
+
if (typeof id !== "string") {
|
|
756
|
+
throw new TypeError("peerId ffi returned unexpected value");
|
|
757
|
+
}
|
|
758
|
+
if (!isValidPeerId(id)) {
|
|
759
|
+
throw new TypeError("peerId ffi returned an invalid string");
|
|
760
|
+
}
|
|
761
|
+
return id;
|
|
762
|
+
}
|
|
763
|
+
kvToJson() {
|
|
764
|
+
return this.exportJson();
|
|
765
|
+
}
|
|
766
|
+
putMvr(key, value, now) {
|
|
767
|
+
if (value === null || Array.isArray(value) || typeof value === "object") {
|
|
768
|
+
throw new TypeError("putMvr only accepts scalar JSON values");
|
|
769
|
+
}
|
|
770
|
+
const rows = this.scan({ prefix: key.slice() });
|
|
771
|
+
for (const row of rows) {
|
|
772
|
+
if (row.value !== true) {
|
|
773
|
+
continue;
|
|
774
|
+
}
|
|
775
|
+
this.delete(row.key, now);
|
|
776
|
+
}
|
|
777
|
+
const keyWithValue = key.slice();
|
|
778
|
+
keyWithValue.push(value);
|
|
779
|
+
this.put(keyWithValue, true, now);
|
|
780
|
+
}
|
|
781
|
+
getMvr(key) {
|
|
782
|
+
const rows = this.scan({ prefix: key.slice() });
|
|
783
|
+
const out = [];
|
|
784
|
+
for (const row of rows) {
|
|
785
|
+
if (row.key.length !== key.length + 1) {
|
|
786
|
+
continue;
|
|
787
|
+
}
|
|
788
|
+
if (row.value !== true) {
|
|
789
|
+
continue;
|
|
790
|
+
}
|
|
791
|
+
out.push(row.key[key.length]);
|
|
792
|
+
}
|
|
793
|
+
return out;
|
|
794
|
+
}
|
|
795
|
+
exportFile() {
|
|
796
|
+
return this.inner.exportFile();
|
|
797
|
+
}
|
|
798
|
+
scan(options = {}) {
|
|
799
|
+
const start = encodeBound(options.start);
|
|
800
|
+
const end = encodeBound(options.end);
|
|
801
|
+
const prefix = options.prefix ? options.prefix.slice() : undefined;
|
|
802
|
+
const rows = this.inner.scan(start, end, prefix);
|
|
803
|
+
if (!Array.isArray(rows)) {
|
|
804
|
+
return [];
|
|
805
|
+
}
|
|
806
|
+
return rows
|
|
807
|
+
.filter((row) => Boolean(row))
|
|
808
|
+
.map((row) => ({
|
|
809
|
+
key: Array.isArray(row.key) ? row.key : [],
|
|
810
|
+
raw: row.raw,
|
|
811
|
+
value: row.value,
|
|
812
|
+
}));
|
|
813
|
+
}
|
|
814
|
+
ensureNativeSubscription() {
|
|
815
|
+
if (this.nativeSubscriberId !== undefined) {
|
|
816
|
+
return;
|
|
817
|
+
}
|
|
818
|
+
this.nativeSubscriberId = this.inner.subscribe((payload) => {
|
|
819
|
+
(0, wasm_1.callPendingEvents)();
|
|
820
|
+
const batch = decodeEventBatch(payload);
|
|
821
|
+
this.handleBatch(batch);
|
|
822
|
+
});
|
|
823
|
+
}
|
|
824
|
+
handleBatch(batch) {
|
|
825
|
+
const bufferable = batch.source === "local";
|
|
826
|
+
this.eventBatcher.handleCommitEvents(batch.source, batch.events, bufferable);
|
|
827
|
+
}
|
|
828
|
+
deliverBatch(batch) {
|
|
829
|
+
if (this.listeners.size === 0) {
|
|
830
|
+
return;
|
|
831
|
+
}
|
|
832
|
+
const listeners = Array.from(this.listeners);
|
|
833
|
+
for (const listener of listeners) {
|
|
834
|
+
try {
|
|
835
|
+
listener(batch);
|
|
836
|
+
}
|
|
837
|
+
catch (error) {
|
|
838
|
+
void error;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
subscribe(listener) {
|
|
843
|
+
this.listeners.add(listener);
|
|
844
|
+
this.ensureNativeSubscription();
|
|
845
|
+
return () => {
|
|
846
|
+
this.listeners.delete(listener);
|
|
847
|
+
// Optionally clean up native subscription when no listeners remain
|
|
848
|
+
if (this.listeners.size === 0 && this.nativeSubscriberId !== undefined) {
|
|
849
|
+
this.inner.unsubscribe(this.nativeSubscriberId);
|
|
850
|
+
this.nativeSubscriberId = undefined;
|
|
851
|
+
}
|
|
852
|
+
};
|
|
853
|
+
}
|
|
854
|
+
/**
|
|
855
|
+
* Enable auto-debounce mode. Events will be accumulated and emitted after
|
|
856
|
+
* the specified timeout of inactivity. Each new operation resets the timer.
|
|
857
|
+
*
|
|
858
|
+
* Use `commit()` to force immediate emission of pending events.
|
|
859
|
+
* Use `disableAutoDebounceCommit()` to disable and emit pending events.
|
|
860
|
+
*
|
|
861
|
+
* @param timeout - Debounce timeout in milliseconds
|
|
862
|
+
* @param options - Optional configuration object with maxDebounceTime (default: 10000ms)
|
|
863
|
+
* @throws Error if called while a transaction is active
|
|
864
|
+
* @throws Error if autoDebounceCommit is already active
|
|
865
|
+
*
|
|
866
|
+
* @example
|
|
867
|
+
* ```ts
|
|
868
|
+
* flock.autoDebounceCommit(100);
|
|
869
|
+
* flock.put(["a"], 1);
|
|
870
|
+
* flock.put(["b"], 2);
|
|
871
|
+
* // No events emitted yet...
|
|
872
|
+
* // After 100ms of inactivity, subscribers receive single EventBatch
|
|
873
|
+
* // If operations keep coming, commit happens after maxDebounceTime (10s default)
|
|
874
|
+
* ```
|
|
875
|
+
*/
|
|
876
|
+
autoDebounceCommit(timeout, options) {
|
|
877
|
+
if (this.isInTxn()) {
|
|
878
|
+
throw new Error("Cannot enable autoDebounceCommit while transaction is active");
|
|
879
|
+
}
|
|
880
|
+
this.eventBatcher.autoDebounceCommit(timeout, options);
|
|
881
|
+
}
|
|
882
|
+
/**
|
|
883
|
+
* Disable auto-debounce mode and emit any pending events immediately.
|
|
884
|
+
* No-op if autoDebounceCommit is not active.
|
|
885
|
+
*/
|
|
886
|
+
disableAutoDebounceCommit() {
|
|
887
|
+
this.eventBatcher.disableAutoDebounceCommit();
|
|
888
|
+
}
|
|
889
|
+
/**
|
|
890
|
+
* Force immediate emission of any pending debounced events.
|
|
891
|
+
* Does not disable auto-debounce mode - new operations will continue to be debounced.
|
|
892
|
+
* No-op if autoDebounceCommit is not active or no events are pending.
|
|
893
|
+
*/
|
|
894
|
+
commit() {
|
|
895
|
+
this.eventBatcher.commit();
|
|
896
|
+
}
|
|
897
|
+
/**
|
|
898
|
+
* Check if auto-debounce mode is currently active.
|
|
899
|
+
*/
|
|
900
|
+
isAutoDebounceActive() {
|
|
901
|
+
return this.eventBatcher.isAutoDebounceActive();
|
|
902
|
+
}
|
|
903
|
+
/**
|
|
904
|
+
* Execute operations within a transaction. All put/delete operations inside
|
|
905
|
+
* the callback will be batched and emitted as a single EventBatch when the
|
|
906
|
+
* transaction commits successfully.
|
|
907
|
+
*
|
|
908
|
+
* If the callback throws an error, the transaction is rolled back and no
|
|
909
|
+
* events are emitted. Note: Data changes are NOT rolled back - only event
|
|
910
|
+
* emission is affected.
|
|
911
|
+
*
|
|
912
|
+
* The callback must be synchronous. For async operations, use FlockSQLite.
|
|
913
|
+
*
|
|
914
|
+
* @param callback - Synchronous function containing put/delete operations
|
|
915
|
+
* @returns The return value of the callback
|
|
916
|
+
* @throws Error if nested transaction attempted
|
|
917
|
+
* @throws Error if import is called during the transaction (auto-commits first)
|
|
918
|
+
* @throws Error if called while autoDebounceCommit is active
|
|
919
|
+
*
|
|
920
|
+
* @example
|
|
921
|
+
* ```ts
|
|
922
|
+
* flock.txn(() => {
|
|
923
|
+
* flock.put(["a"], 1);
|
|
924
|
+
* flock.put(["b"], 2);
|
|
925
|
+
* flock.put(["c"], 3);
|
|
926
|
+
* });
|
|
927
|
+
* // Subscribers receive a single EventBatch with 3 events
|
|
928
|
+
* ```
|
|
929
|
+
*/
|
|
930
|
+
txn(callback) {
|
|
931
|
+
if (this.eventBatcher.isAutoDebounceActive()) {
|
|
932
|
+
throw new Error("Cannot start transaction while autoDebounceCommit is active");
|
|
933
|
+
}
|
|
934
|
+
this.inner.txnBegin();
|
|
935
|
+
try {
|
|
936
|
+
const result = callback();
|
|
937
|
+
this.inner.txnCommit();
|
|
938
|
+
return result;
|
|
939
|
+
}
|
|
940
|
+
catch (e) {
|
|
941
|
+
// Only rollback if transaction is still active.
|
|
942
|
+
// import_json auto-commits the transaction before throwing,
|
|
943
|
+
// so we must check before attempting rollback.
|
|
944
|
+
if (this.inner.isInTxn()) {
|
|
945
|
+
this.inner.txnRollback();
|
|
946
|
+
}
|
|
947
|
+
throw e;
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
/**
|
|
951
|
+
* Check if a transaction is currently active.
|
|
952
|
+
*/
|
|
953
|
+
isInTxn() {
|
|
954
|
+
return this.inner.isInTxn();
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
exports.Flock = Flock;
|
|
958
|
+
const CALL_PENDING_EVENTS_WRAPPED = Symbol("flock.callPendingEventsWrapped");
|
|
959
|
+
function decorateMethod(prototype, method) {
|
|
960
|
+
const descriptor = Object.getOwnPropertyDescriptor(prototype, method);
|
|
961
|
+
if (!descriptor || typeof descriptor.value !== "function") {
|
|
962
|
+
return;
|
|
963
|
+
}
|
|
964
|
+
const original = descriptor.value;
|
|
965
|
+
if (original[CALL_PENDING_EVENTS_WRAPPED]) {
|
|
966
|
+
return;
|
|
967
|
+
}
|
|
968
|
+
const wrapped = function (...args) {
|
|
969
|
+
let result;
|
|
970
|
+
try {
|
|
971
|
+
result = original.apply(this, args);
|
|
972
|
+
return result;
|
|
973
|
+
}
|
|
974
|
+
finally {
|
|
975
|
+
if (result && typeof result.then === "function") {
|
|
976
|
+
void result
|
|
977
|
+
.finally(() => {
|
|
978
|
+
(0, wasm_1.callPendingEvents)();
|
|
979
|
+
})
|
|
980
|
+
.catch(() => {
|
|
981
|
+
// Ignore: this promise is only for triggering pending event draining.
|
|
982
|
+
// The original promise is returned to the caller and should surface errors there.
|
|
983
|
+
});
|
|
984
|
+
}
|
|
985
|
+
else {
|
|
986
|
+
(0, wasm_1.callPendingEvents)();
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
};
|
|
990
|
+
wrapped[CALL_PENDING_EVENTS_WRAPPED] = true;
|
|
991
|
+
Object.defineProperty(wrapped, "length", { value: original.length });
|
|
992
|
+
Object.defineProperty(prototype, method, {
|
|
993
|
+
...descriptor,
|
|
994
|
+
value: wrapped,
|
|
995
|
+
});
|
|
996
|
+
}
|
|
997
|
+
function decorateAllPrototypeMethods(prototype) {
|
|
998
|
+
const visited = new Set();
|
|
999
|
+
let current = prototype;
|
|
1000
|
+
while (current && current !== Object.prototype && current !== Function.prototype) {
|
|
1001
|
+
for (const property of Object.getOwnPropertyNames(current)) {
|
|
1002
|
+
if (property === "constructor" || visited.has(property)) {
|
|
1003
|
+
continue;
|
|
1004
|
+
}
|
|
1005
|
+
visited.add(property);
|
|
1006
|
+
decorateMethod(current, property);
|
|
1007
|
+
}
|
|
1008
|
+
for (const symbol of Object.getOwnPropertySymbols(current)) {
|
|
1009
|
+
if (visited.has(symbol)) {
|
|
1010
|
+
continue;
|
|
1011
|
+
}
|
|
1012
|
+
visited.add(symbol);
|
|
1013
|
+
decorateMethod(current, symbol);
|
|
1014
|
+
}
|
|
1015
|
+
current = Object.getPrototypeOf(current);
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
decorateAllPrototypeMethods(Flock.prototype);
|