@topgunbuild/server 0.9.0 → 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -0
- package/dist/BetterSqlite3Adapter-LUMODVC3.mjs +10 -0
- package/dist/BetterSqlite3Adapter-LUMODVC3.mjs.map +1 -0
- package/dist/chunk-5CZA6O2S.mjs +782 -0
- package/dist/chunk-5CZA6O2S.mjs.map +1 -0
- package/dist/chunk-73CP5EN6.mjs +227 -0
- package/dist/chunk-73CP5EN6.mjs.map +1 -0
- package/dist/chunk-FJ6ZGZIA.mjs +43 -0
- package/dist/chunk-FJ6ZGZIA.mjs.map +1 -0
- package/dist/chunk-IQNKZPW3.mjs +31660 -0
- package/dist/chunk-IQNKZPW3.mjs.map +1 -0
- package/dist/chunk-ZTICMRY6.mjs +7 -0
- package/dist/chunk-ZTICMRY6.mjs.map +1 -0
- package/dist/index.d.mts +6384 -2294
- package/dist/index.d.ts +6384 -2294
- package/dist/index.js +31550 -12109
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +519 -13600
- package/dist/index.mjs.map +1 -1
- package/dist/lib-ZCWT55TO.mjs +6 -0
- package/dist/lib-ZCWT55TO.mjs.map +1 -0
- package/dist/start-server.d.mts +2 -0
- package/dist/start-server.d.ts +2 -0
- package/dist/start-server.js +31751 -0
- package/dist/start-server.js.map +1 -0
- package/dist/start-server.mjs +112 -0
- package/dist/start-server.mjs.map +1 -0
- package/dist/workers/worker-scripts/base.worker.js +477 -0
- package/dist/workers/worker-scripts/base.worker.js.map +1 -0
- package/dist/workers/worker-scripts/crdt.worker.js +452 -0
- package/dist/workers/worker-scripts/crdt.worker.js.map +1 -0
- package/dist/workers/worker-scripts/merkle.worker.js +452 -0
- package/dist/workers/worker-scripts/merkle.worker.js.map +1 -0
- package/dist/workers/worker-scripts/serialization.worker.js +452 -0
- package/dist/workers/worker-scripts/serialization.worker.js.map +1 -0
- package/dist/workers/worker-scripts/test.worker.js +452 -0
- package/dist/workers/worker-scripts/test.worker.js.map +1 -0
- package/package.json +9 -4
|
@@ -0,0 +1,452 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __esm = (fn, res) => function __init() {
|
|
7
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/workers/worker-scripts/merkle.worker.ts
|
|
20
|
+
var merkle_worker_exports = {};
|
|
21
|
+
function hashString(str) {
|
|
22
|
+
let hash = 2166136261;
|
|
23
|
+
for (let i = 0; i < str.length; i++) {
|
|
24
|
+
hash ^= str.charCodeAt(i);
|
|
25
|
+
hash = Math.imul(hash, 16777619);
|
|
26
|
+
}
|
|
27
|
+
return hash >>> 0;
|
|
28
|
+
}
|
|
29
|
+
function computeItemHash(key, millis, counter, nodeId) {
|
|
30
|
+
return hashString(`${key}:${millis}:${counter}:${nodeId}`);
|
|
31
|
+
}
|
|
32
|
+
function computeORMapEntryHash(key, records) {
|
|
33
|
+
const sortedRecords = [...records].sort((a, b) => a.tag.localeCompare(b.tag));
|
|
34
|
+
let combinedStr = key;
|
|
35
|
+
for (const record of sortedRecords) {
|
|
36
|
+
combinedStr += `:${record.tag}:${record.timestamp.millis}:${record.timestamp.counter}:${record.timestamp.nodeId}`;
|
|
37
|
+
}
|
|
38
|
+
return hashString(combinedStr);
|
|
39
|
+
}
|
|
40
|
+
function buildMerkleTree(entries, depth) {
|
|
41
|
+
const root = { hash: 0, children: {} };
|
|
42
|
+
const buckets = /* @__PURE__ */ new Map();
|
|
43
|
+
for (const { key, hash: itemHash } of entries) {
|
|
44
|
+
const pathHash = hashString(key).toString(16).padStart(8, "0");
|
|
45
|
+
updateNode(root, key, itemHash, pathHash, 0, depth);
|
|
46
|
+
}
|
|
47
|
+
collectBuckets(root, "", depth, buckets);
|
|
48
|
+
return { root, buckets };
|
|
49
|
+
}
|
|
50
|
+
function updateNode(node, key, itemHash, pathHash, level, depth) {
|
|
51
|
+
if (level >= depth) {
|
|
52
|
+
if (!node.entries) node.entries = /* @__PURE__ */ new Map();
|
|
53
|
+
node.entries.set(key, itemHash);
|
|
54
|
+
let h2 = 0;
|
|
55
|
+
for (const val of node.entries.values()) {
|
|
56
|
+
h2 = h2 + val | 0;
|
|
57
|
+
}
|
|
58
|
+
node.hash = h2 >>> 0;
|
|
59
|
+
return node.hash;
|
|
60
|
+
}
|
|
61
|
+
const bucketChar = pathHash[level];
|
|
62
|
+
if (!node.children) node.children = {};
|
|
63
|
+
if (!node.children[bucketChar]) {
|
|
64
|
+
node.children[bucketChar] = { hash: 0 };
|
|
65
|
+
}
|
|
66
|
+
updateNode(node.children[bucketChar], key, itemHash, pathHash, level + 1, depth);
|
|
67
|
+
let h = 0;
|
|
68
|
+
for (const child of Object.values(node.children)) {
|
|
69
|
+
h = h + child.hash | 0;
|
|
70
|
+
}
|
|
71
|
+
node.hash = h >>> 0;
|
|
72
|
+
return node.hash;
|
|
73
|
+
}
|
|
74
|
+
function collectBuckets(node, path, depth, buckets) {
|
|
75
|
+
if (path.length >= depth) {
|
|
76
|
+
if (node.entries && node.entries.size > 0) {
|
|
77
|
+
buckets.set(path, {
|
|
78
|
+
hash: node.hash,
|
|
79
|
+
keys: Array.from(node.entries.keys())
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
if (node.children) {
|
|
85
|
+
for (const [char, child] of Object.entries(node.children)) {
|
|
86
|
+
collectBuckets(child, path + char, depth, buckets);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
var init_merkle_worker = __esm({
|
|
91
|
+
"src/workers/worker-scripts/merkle.worker.ts"() {
|
|
92
|
+
"use strict";
|
|
93
|
+
init_base_worker();
|
|
94
|
+
registerHandler("merkle-hash", (payload) => {
|
|
95
|
+
const { entries, depth = 3 } = payload;
|
|
96
|
+
const hashEntries = [];
|
|
97
|
+
const hashes = [];
|
|
98
|
+
for (const entry of entries) {
|
|
99
|
+
const itemHash = computeItemHash(
|
|
100
|
+
entry.key,
|
|
101
|
+
entry.timestamp.millis,
|
|
102
|
+
entry.timestamp.counter,
|
|
103
|
+
entry.timestamp.nodeId
|
|
104
|
+
);
|
|
105
|
+
hashEntries.push({ key: entry.key, hash: itemHash });
|
|
106
|
+
hashes.push([entry.key, itemHash]);
|
|
107
|
+
}
|
|
108
|
+
const { root, buckets } = buildMerkleTree(hashEntries, depth);
|
|
109
|
+
return {
|
|
110
|
+
hashes,
|
|
111
|
+
rootHash: root.hash,
|
|
112
|
+
buckets: Array.from(buckets.entries())
|
|
113
|
+
};
|
|
114
|
+
});
|
|
115
|
+
registerHandler("merkle-hash-ormap", (payload) => {
|
|
116
|
+
const { entries, depth = 3 } = payload;
|
|
117
|
+
const hashEntries = [];
|
|
118
|
+
const hashes = [];
|
|
119
|
+
for (const entry of entries) {
|
|
120
|
+
const entryHash = computeORMapEntryHash(entry.key, entry.records);
|
|
121
|
+
hashEntries.push({ key: entry.key, hash: entryHash });
|
|
122
|
+
hashes.push([entry.key, entryHash]);
|
|
123
|
+
}
|
|
124
|
+
const { root, buckets } = buildMerkleTree(hashEntries, depth);
|
|
125
|
+
return {
|
|
126
|
+
hashes,
|
|
127
|
+
rootHash: root.hash,
|
|
128
|
+
buckets: Array.from(buckets.entries())
|
|
129
|
+
};
|
|
130
|
+
});
|
|
131
|
+
registerHandler("merkle-diff", (payload) => {
|
|
132
|
+
const { localBuckets, remoteBuckets } = payload;
|
|
133
|
+
const localMap = new Map(localBuckets);
|
|
134
|
+
const remoteMap = new Map(remoteBuckets);
|
|
135
|
+
const missingLocal = [];
|
|
136
|
+
const missingRemote = [];
|
|
137
|
+
const differingPaths = [];
|
|
138
|
+
for (const [path, remoteBucket] of remoteMap) {
|
|
139
|
+
const localBucket = localMap.get(path);
|
|
140
|
+
if (!localBucket) {
|
|
141
|
+
missingLocal.push(...remoteBucket.keys);
|
|
142
|
+
} else if (localBucket.hash !== remoteBucket.hash) {
|
|
143
|
+
differingPaths.push(path);
|
|
144
|
+
const localKeys = new Set(localBucket.keys);
|
|
145
|
+
const remoteKeys = new Set(remoteBucket.keys);
|
|
146
|
+
for (const key of remoteKeys) {
|
|
147
|
+
if (!localKeys.has(key)) {
|
|
148
|
+
missingLocal.push(key);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
for (const key of localKeys) {
|
|
152
|
+
if (!remoteKeys.has(key)) {
|
|
153
|
+
missingRemote.push(key);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
for (const [path, localBucket] of localMap) {
|
|
159
|
+
if (!remoteMap.has(path)) {
|
|
160
|
+
missingRemote.push(...localBucket.keys);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return {
|
|
164
|
+
missingLocal,
|
|
165
|
+
missingRemote,
|
|
166
|
+
differingPaths
|
|
167
|
+
};
|
|
168
|
+
});
|
|
169
|
+
registerHandler("merkle-rebuild", (payload) => {
|
|
170
|
+
const { records, depth = 3 } = payload;
|
|
171
|
+
const hashEntries = [];
|
|
172
|
+
for (const record of records) {
|
|
173
|
+
const itemHash = computeItemHash(
|
|
174
|
+
record.key,
|
|
175
|
+
record.timestamp.millis,
|
|
176
|
+
record.timestamp.counter,
|
|
177
|
+
record.timestamp.nodeId
|
|
178
|
+
);
|
|
179
|
+
hashEntries.push({ key: record.key, hash: itemHash });
|
|
180
|
+
}
|
|
181
|
+
const { root, buckets } = buildMerkleTree(hashEntries, depth);
|
|
182
|
+
return {
|
|
183
|
+
rootHash: root.hash,
|
|
184
|
+
buckets: Array.from(buckets.entries())
|
|
185
|
+
};
|
|
186
|
+
});
|
|
187
|
+
registerHandler("merkle-rebuild-ormap", (payload) => {
|
|
188
|
+
const { records, depth = 3 } = payload;
|
|
189
|
+
const hashEntries = [];
|
|
190
|
+
for (const record of records) {
|
|
191
|
+
const entryHash = computeORMapEntryHash(record.key, record.tags);
|
|
192
|
+
hashEntries.push({ key: record.key, hash: entryHash });
|
|
193
|
+
}
|
|
194
|
+
const { root, buckets } = buildMerkleTree(hashEntries, depth);
|
|
195
|
+
return {
|
|
196
|
+
rootHash: root.hash,
|
|
197
|
+
buckets: Array.from(buckets.entries())
|
|
198
|
+
};
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
// src/workers/worker-scripts/serialization.worker.ts
|
|
204
|
+
var serialization_worker_exports = {};
|
|
205
|
+
function uint8ArrayToBase64(bytes) {
|
|
206
|
+
let binary = "";
|
|
207
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
208
|
+
binary += String.fromCharCode(bytes[i]);
|
|
209
|
+
}
|
|
210
|
+
return btoa(binary);
|
|
211
|
+
}
|
|
212
|
+
function base64ToUint8Array(base64) {
|
|
213
|
+
const binary = atob(base64);
|
|
214
|
+
const bytes = new Uint8Array(binary.length);
|
|
215
|
+
for (let i = 0; i < binary.length; i++) {
|
|
216
|
+
bytes[i] = binary.charCodeAt(i);
|
|
217
|
+
}
|
|
218
|
+
return bytes;
|
|
219
|
+
}
|
|
220
|
+
var import_core;
|
|
221
|
+
var init_serialization_worker = __esm({
|
|
222
|
+
"src/workers/worker-scripts/serialization.worker.ts"() {
|
|
223
|
+
"use strict";
|
|
224
|
+
init_base_worker();
|
|
225
|
+
import_core = require("@topgunbuild/core");
|
|
226
|
+
registerHandler("serialize", (payload) => {
|
|
227
|
+
const { items } = payload;
|
|
228
|
+
const serialized = [];
|
|
229
|
+
for (const item of items) {
|
|
230
|
+
const bytes = (0, import_core.serialize)(item);
|
|
231
|
+
serialized.push(uint8ArrayToBase64(bytes));
|
|
232
|
+
}
|
|
233
|
+
return { serialized };
|
|
234
|
+
});
|
|
235
|
+
registerHandler("deserialize", (payload) => {
|
|
236
|
+
const { items } = payload;
|
|
237
|
+
const deserialized = [];
|
|
238
|
+
for (const item of items) {
|
|
239
|
+
const bytes = base64ToUint8Array(item);
|
|
240
|
+
deserialized.push((0, import_core.deserialize)(bytes));
|
|
241
|
+
}
|
|
242
|
+
return { deserialized };
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
// src/workers/worker-scripts/test.worker.ts
|
|
248
|
+
var test_worker_exports = {};
|
|
249
|
+
var init_test_worker = __esm({
|
|
250
|
+
"src/workers/worker-scripts/test.worker.ts"() {
|
|
251
|
+
"use strict";
|
|
252
|
+
init_base_worker();
|
|
253
|
+
registerHandler("echo", (payload) => {
|
|
254
|
+
return payload;
|
|
255
|
+
});
|
|
256
|
+
registerHandler("delayed-echo", async (payload) => {
|
|
257
|
+
const { data, delay } = payload;
|
|
258
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
259
|
+
return data;
|
|
260
|
+
});
|
|
261
|
+
registerHandler("throw-error", (payload) => {
|
|
262
|
+
const { message } = payload;
|
|
263
|
+
throw new Error(message);
|
|
264
|
+
});
|
|
265
|
+
registerHandler("cpu-work", (payload) => {
|
|
266
|
+
const { iterations } = payload;
|
|
267
|
+
let result = 0;
|
|
268
|
+
for (let i = 0; i < iterations; i++) {
|
|
269
|
+
result += Math.sqrt(i);
|
|
270
|
+
}
|
|
271
|
+
return result;
|
|
272
|
+
});
|
|
273
|
+
registerHandler("return-undefined", () => {
|
|
274
|
+
return void 0;
|
|
275
|
+
});
|
|
276
|
+
registerHandler("return-null", () => {
|
|
277
|
+
return null;
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
// src/workers/worker-scripts/base.worker.ts
|
|
283
|
+
function registerHandler(type, handler) {
|
|
284
|
+
handlers.set(type, handler);
|
|
285
|
+
}
|
|
286
|
+
var import_worker_threads, handlers;
|
|
287
|
+
var init_base_worker = __esm({
|
|
288
|
+
"src/workers/worker-scripts/base.worker.ts"() {
|
|
289
|
+
"use strict";
|
|
290
|
+
import_worker_threads = require("worker_threads");
|
|
291
|
+
handlers = /* @__PURE__ */ new Map();
|
|
292
|
+
if (import_worker_threads.parentPort) {
|
|
293
|
+
import_worker_threads.parentPort.on("message", async (task) => {
|
|
294
|
+
const { id, type, payload } = task;
|
|
295
|
+
const response = {
|
|
296
|
+
id,
|
|
297
|
+
success: false
|
|
298
|
+
};
|
|
299
|
+
try {
|
|
300
|
+
const handler = handlers.get(type);
|
|
301
|
+
if (!handler) {
|
|
302
|
+
throw new Error(`Unknown task type: ${type}`);
|
|
303
|
+
}
|
|
304
|
+
const result = await handler(payload);
|
|
305
|
+
response.success = true;
|
|
306
|
+
response.result = result;
|
|
307
|
+
} catch (error) {
|
|
308
|
+
response.success = false;
|
|
309
|
+
response.error = error instanceof Error ? error.message : String(error);
|
|
310
|
+
}
|
|
311
|
+
import_worker_threads.parentPort.postMessage(response);
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
init_crdt_worker();
|
|
315
|
+
init_merkle_worker();
|
|
316
|
+
init_serialization_worker();
|
|
317
|
+
init_test_worker();
|
|
318
|
+
}
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
// src/workers/worker-scripts/crdt.worker.ts
|
|
322
|
+
var crdt_worker_exports = {};
|
|
323
|
+
function compareTimestamps(a, b) {
|
|
324
|
+
if (a.millis !== b.millis) {
|
|
325
|
+
return a.millis - b.millis;
|
|
326
|
+
}
|
|
327
|
+
if (a.counter !== b.counter) {
|
|
328
|
+
return a.counter - b.counter;
|
|
329
|
+
}
|
|
330
|
+
return a.nodeId.localeCompare(b.nodeId);
|
|
331
|
+
}
|
|
332
|
+
var init_crdt_worker = __esm({
|
|
333
|
+
"src/workers/worker-scripts/crdt.worker.ts"() {
|
|
334
|
+
init_base_worker();
|
|
335
|
+
registerHandler("lww-merge", (payload) => {
|
|
336
|
+
const { records, existingState } = payload;
|
|
337
|
+
const existingMap = /* @__PURE__ */ new Map();
|
|
338
|
+
for (const existing of existingState) {
|
|
339
|
+
existingMap.set(existing.key, {
|
|
340
|
+
value: existing.value,
|
|
341
|
+
timestamp: existing.timestamp,
|
|
342
|
+
ttlMs: existing.ttlMs
|
|
343
|
+
});
|
|
344
|
+
}
|
|
345
|
+
const toApply = [];
|
|
346
|
+
const conflicts = [];
|
|
347
|
+
let skipped = 0;
|
|
348
|
+
for (const record of records) {
|
|
349
|
+
const existing = existingMap.get(record.key);
|
|
350
|
+
if (!existing) {
|
|
351
|
+
toApply.push({
|
|
352
|
+
key: record.key,
|
|
353
|
+
value: record.value,
|
|
354
|
+
timestamp: record.timestamp,
|
|
355
|
+
ttlMs: record.ttlMs
|
|
356
|
+
});
|
|
357
|
+
existingMap.set(record.key, {
|
|
358
|
+
value: record.value,
|
|
359
|
+
timestamp: record.timestamp,
|
|
360
|
+
ttlMs: record.ttlMs
|
|
361
|
+
});
|
|
362
|
+
continue;
|
|
363
|
+
}
|
|
364
|
+
const cmp = compareTimestamps(record.timestamp, existing.timestamp);
|
|
365
|
+
const isConflict = record.timestamp.millis === existing.timestamp.millis && (record.timestamp.counter !== existing.timestamp.counter || record.timestamp.nodeId !== existing.timestamp.nodeId);
|
|
366
|
+
if (cmp > 0) {
|
|
367
|
+
toApply.push({
|
|
368
|
+
key: record.key,
|
|
369
|
+
value: record.value,
|
|
370
|
+
timestamp: record.timestamp,
|
|
371
|
+
ttlMs: record.ttlMs
|
|
372
|
+
});
|
|
373
|
+
existingMap.set(record.key, {
|
|
374
|
+
value: record.value,
|
|
375
|
+
timestamp: record.timestamp,
|
|
376
|
+
ttlMs: record.ttlMs
|
|
377
|
+
});
|
|
378
|
+
if (isConflict) {
|
|
379
|
+
conflicts.push(record.key);
|
|
380
|
+
}
|
|
381
|
+
} else if (cmp === 0) {
|
|
382
|
+
conflicts.push(record.key);
|
|
383
|
+
skipped++;
|
|
384
|
+
} else {
|
|
385
|
+
if (isConflict) {
|
|
386
|
+
conflicts.push(record.key);
|
|
387
|
+
}
|
|
388
|
+
skipped++;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
return {
|
|
392
|
+
toApply,
|
|
393
|
+
skipped,
|
|
394
|
+
conflicts
|
|
395
|
+
};
|
|
396
|
+
});
|
|
397
|
+
registerHandler("ormap-merge", (payload) => {
|
|
398
|
+
const {
|
|
399
|
+
items,
|
|
400
|
+
tombstones,
|
|
401
|
+
existingTags,
|
|
402
|
+
existingTombstones
|
|
403
|
+
} = payload;
|
|
404
|
+
const tagSet = new Set(existingTags);
|
|
405
|
+
const tombstoneSet = new Set(existingTombstones);
|
|
406
|
+
const itemsToApply = [];
|
|
407
|
+
const tombstonesToApply = [];
|
|
408
|
+
const tagsToRemove = [];
|
|
409
|
+
let itemsSkipped = 0;
|
|
410
|
+
let tombstonesSkipped = 0;
|
|
411
|
+
for (const tombstone of tombstones) {
|
|
412
|
+
if (tombstoneSet.has(tombstone.tag)) {
|
|
413
|
+
tombstonesSkipped++;
|
|
414
|
+
continue;
|
|
415
|
+
}
|
|
416
|
+
tombstonesToApply.push(tombstone.tag);
|
|
417
|
+
tombstoneSet.add(tombstone.tag);
|
|
418
|
+
if (tagSet.has(tombstone.tag)) {
|
|
419
|
+
tagsToRemove.push(tombstone.tag);
|
|
420
|
+
tagSet.delete(tombstone.tag);
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
for (const item of items) {
|
|
424
|
+
if (tombstoneSet.has(item.tag)) {
|
|
425
|
+
itemsSkipped++;
|
|
426
|
+
continue;
|
|
427
|
+
}
|
|
428
|
+
if (tagSet.has(item.tag)) {
|
|
429
|
+
itemsSkipped++;
|
|
430
|
+
continue;
|
|
431
|
+
}
|
|
432
|
+
itemsToApply.push({
|
|
433
|
+
key: item.key,
|
|
434
|
+
value: item.value,
|
|
435
|
+
timestamp: item.timestamp,
|
|
436
|
+
tag: item.tag,
|
|
437
|
+
ttlMs: item.ttlMs
|
|
438
|
+
});
|
|
439
|
+
tagSet.add(item.tag);
|
|
440
|
+
}
|
|
441
|
+
return {
|
|
442
|
+
itemsToApply,
|
|
443
|
+
tombstonesToApply,
|
|
444
|
+
tagsToRemove,
|
|
445
|
+
itemsSkipped,
|
|
446
|
+
tombstonesSkipped
|
|
447
|
+
};
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
});
|
|
451
|
+
init_crdt_worker();
|
|
452
|
+
//# sourceMappingURL=crdt.worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/workers/worker-scripts/merkle.worker.ts","../../../src/workers/worker-scripts/serialization.worker.ts","../../../src/workers/worker-scripts/test.worker.ts","../../../src/workers/worker-scripts/base.worker.ts","../../../src/workers/worker-scripts/crdt.worker.ts"],"sourcesContent":["/**\n * Merkle Worker Script\n * MerkleWorker Implementation\n *\n * Handles CPU-intensive Merkle tree operations:\n * - merkle-hash: Compute hashes for batch of LWWMap entries\n * - merkle-hash-ormap: Compute hashes for batch of ORMap entries\n * - merkle-diff: Find differences between local and remote trees\n * - merkle-rebuild: Rebuild tree from records\n */\n\nimport { registerHandler } from './base.worker';\nimport type {\n MerkleHashPayload,\n MerkleHashResult,\n ORMapMerkleHashPayload,\n ORMapMerkleHashResult,\n MerkleDiffPayload,\n MerkleDiffResult,\n MerkleRebuildPayload,\n MerkleRebuildResult,\n ORMapMerkleRebuildPayload,\n BucketInfo,\n} from '../merkle-types';\n\n// ============ Hash Functions (same as core) ============\n\n/**\n * FNV-1a Hash implementation for strings.\n * Identical to packages/core/src/utils/hash.ts\n */\nfunction hashString(str: string): number {\n let hash = 0x811c9dc5;\n for (let i = 0; i < str.length; i++) {\n hash ^= str.charCodeAt(i);\n hash = Math.imul(hash, 0x01000193);\n }\n return hash >>> 0;\n}\n\n/**\n * Compute item hash for LWWMap entry.\n * Same as in MerkleTree.update()\n */\nfunction computeItemHash(\n key: string,\n millis: number,\n counter: number,\n nodeId: string\n): number {\n return hashString(`${key}:${millis}:${counter}:${nodeId}`);\n}\n\n/**\n * Compute entry hash for ORMap entry.\n * Same as hashORMapEntry in ORMapMerkle.ts\n */\nfunction computeORMapEntryHash(\n key: string,\n records: Array<{ tag: string; timestamp: { millis: number; counter: number; nodeId: string } }>\n): number {\n // Sort records by tag for deterministic hashing\n const sortedRecords = [...records].sort((a, b) => a.tag.localeCompare(b.tag));\n\n let combinedStr = key;\n for (const record of sortedRecords) {\n combinedStr += `:${record.tag}:${record.timestamp.millis}:${record.timestamp.counter}:${record.timestamp.nodeId}`;\n }\n\n return hashString(combinedStr);\n}\n\n// ============ Merkle Tree Node Structure ============\n\ninterface MerkleNode {\n hash: number;\n children?: { [key: string]: MerkleNode };\n entries?: Map<string, number>;\n}\n\n/**\n * Build a Merkle tree from entries\n */\nfunction buildMerkleTree(\n entries: Array<{ key: string; hash: number }>,\n depth: number\n): { root: MerkleNode; buckets: Map<string, { hash: number; keys: string[] }> } {\n const root: MerkleNode = { hash: 0, children: {} };\n const buckets = new Map<string, { hash: number; keys: string[] }>();\n\n for (const { key, hash: itemHash } of entries) {\n const pathHash = hashString(key).toString(16).padStart(8, '0');\n updateNode(root, key, itemHash, pathHash, 0, depth);\n }\n\n // Collect buckets at leaf level\n collectBuckets(root, '', depth, buckets);\n\n return { root, buckets };\n}\n\nfunction updateNode(\n node: MerkleNode,\n key: string,\n itemHash: number,\n pathHash: string,\n level: number,\n depth: number\n): number {\n // Leaf Node Logic\n if (level >= depth) {\n if (!node.entries) node.entries = new Map();\n node.entries.set(key, itemHash);\n\n // Recalculate leaf hash (Sum of item hashes)\n let h = 0;\n for (const val of node.entries.values()) {\n h = (h + val) | 0;\n }\n node.hash = h >>> 0;\n return node.hash;\n }\n\n // Intermediate Node Logic\n const bucketChar = pathHash[level];\n if (!node.children) node.children = {};\n\n if (!node.children[bucketChar]) {\n node.children[bucketChar] = { hash: 0 };\n }\n\n updateNode(node.children[bucketChar], key, itemHash, pathHash, level + 1, depth);\n\n // Recalculate this node's hash from children\n let h = 0;\n for (const child of Object.values(node.children)) {\n h = (h + child.hash) | 0;\n }\n node.hash = h >>> 0;\n return node.hash;\n}\n\nfunction collectBuckets(\n node: MerkleNode,\n path: string,\n depth: number,\n buckets: Map<string, { hash: number; keys: string[] }>\n): void {\n if (path.length >= depth) {\n // Leaf level\n if (node.entries && node.entries.size > 0) {\n buckets.set(path, {\n hash: node.hash,\n keys: Array.from(node.entries.keys()),\n });\n }\n return;\n }\n\n if (node.children) {\n for (const [char, child] of Object.entries(node.children)) {\n collectBuckets(child, path + char, depth, buckets);\n }\n }\n}\n\n// ============ Handler: merkle-hash (LWWMap) ============\n\nregisterHandler('merkle-hash', (payload: unknown): MerkleHashResult => {\n const { entries, depth = 3 } = payload as MerkleHashPayload;\n\n // Compute hashes for each entry\n const hashEntries: Array<{ key: string; hash: number }> = [];\n const hashes: Array<[string, number]> = [];\n\n for (const entry of entries) {\n const itemHash = computeItemHash(\n entry.key,\n entry.timestamp.millis,\n entry.timestamp.counter,\n entry.timestamp.nodeId\n );\n hashEntries.push({ key: entry.key, hash: itemHash });\n hashes.push([entry.key, itemHash]);\n }\n\n // Build tree\n const { root, buckets } = buildMerkleTree(hashEntries, depth);\n\n return {\n hashes,\n rootHash: root.hash,\n buckets: Array.from(buckets.entries()),\n };\n});\n\n// ============ Handler: merkle-hash-ormap ============\n\nregisterHandler('merkle-hash-ormap', (payload: unknown): ORMapMerkleHashResult => {\n const { entries, depth = 3 } = payload as ORMapMerkleHashPayload;\n\n // Compute hashes for each entry\n const hashEntries: Array<{ key: string; hash: number }> = [];\n const hashes: Array<[string, number]> = [];\n\n for (const entry of entries) {\n const entryHash = computeORMapEntryHash(entry.key, entry.records);\n hashEntries.push({ key: entry.key, hash: entryHash });\n hashes.push([entry.key, entryHash]);\n }\n\n // Build tree\n const { root, buckets } = buildMerkleTree(hashEntries, depth);\n\n return {\n hashes,\n rootHash: root.hash,\n buckets: Array.from(buckets.entries()),\n };\n});\n\n// ============ Handler: merkle-diff ============\n\nregisterHandler('merkle-diff', (payload: unknown): MerkleDiffResult => {\n const { localBuckets, remoteBuckets } = payload as MerkleDiffPayload;\n\n const localMap = new Map<string, BucketInfo>(localBuckets);\n const remoteMap = new Map<string, BucketInfo>(remoteBuckets);\n\n const missingLocal: string[] = [];\n const missingRemote: string[] = [];\n const differingPaths: string[] = [];\n\n // Find keys missing locally (exist on remote but not local)\n for (const [path, remoteBucket] of remoteMap) {\n const localBucket = localMap.get(path);\n\n if (!localBucket) {\n // Entire bucket missing locally\n missingLocal.push(...remoteBucket.keys);\n } else if (localBucket.hash !== remoteBucket.hash) {\n // Buckets differ - need deeper comparison\n differingPaths.push(path);\n\n // Find specific keys that differ\n const localKeys = new Set(localBucket.keys);\n const remoteKeys = new Set(remoteBucket.keys);\n\n for (const key of remoteKeys) {\n if (!localKeys.has(key)) {\n missingLocal.push(key);\n }\n }\n\n for (const key of localKeys) {\n if (!remoteKeys.has(key)) {\n missingRemote.push(key);\n }\n }\n }\n }\n\n // Find keys missing on remote (exist locally but not on remote)\n for (const [path, localBucket] of localMap) {\n if (!remoteMap.has(path)) {\n missingRemote.push(...localBucket.keys);\n }\n }\n\n return {\n missingLocal,\n missingRemote,\n differingPaths,\n };\n});\n\n// ============ Handler: merkle-rebuild (LWWMap) ============\n\nregisterHandler('merkle-rebuild', (payload: unknown): MerkleRebuildResult => {\n const { records, depth = 3 } = payload as MerkleRebuildPayload;\n\n // Compute hashes for each record\n const hashEntries: Array<{ key: string; hash: number }> = [];\n\n for (const record of records) {\n const itemHash = computeItemHash(\n record.key,\n record.timestamp.millis,\n record.timestamp.counter,\n record.timestamp.nodeId\n );\n hashEntries.push({ key: record.key, hash: itemHash });\n }\n\n // Build tree\n const { root, buckets } = buildMerkleTree(hashEntries, depth);\n\n return {\n rootHash: root.hash,\n buckets: Array.from(buckets.entries()),\n };\n});\n\n// ============ Handler: merkle-rebuild-ormap ============\n\nregisterHandler('merkle-rebuild-ormap', (payload: unknown): MerkleRebuildResult => {\n const { records, depth = 3 } = payload as ORMapMerkleRebuildPayload;\n\n // Compute hashes for each record\n const hashEntries: Array<{ key: string; hash: number }> = [];\n\n for (const record of records) {\n const entryHash = computeORMapEntryHash(record.key, record.tags);\n hashEntries.push({ key: record.key, hash: entryHash });\n }\n\n // Build tree\n const { root, buckets } = buildMerkleTree(hashEntries, depth);\n\n return {\n rootHash: root.hash,\n buckets: Array.from(buckets.entries()),\n };\n});\n","/**\n * Serialization Worker Script\n * SerializationWorker Implementation\n *\n * Handles CPU-intensive serialization/deserialization operations:\n * - serialize: Serialize objects to MessagePack binary format\n * - deserialize: Deserialize MessagePack binary data to objects\n *\n * Uses base64 encoding for transferring binary data through postMessage.\n */\n\nimport { registerHandler } from './base.worker';\nimport { serialize, deserialize } from '@topgunbuild/core';\nimport type {\n SerializeBatchPayload,\n SerializeBatchResult,\n DeserializeBatchPayload,\n DeserializeBatchResult,\n} from '../serialization-types';\n\n// ============ Helper Functions ============\n\n/**\n * Convert Uint8Array to base64 string for postMessage transfer\n */\nfunction uint8ArrayToBase64(bytes: Uint8Array): string {\n let binary = '';\n for (let i = 0; i < bytes.length; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n return btoa(binary);\n}\n\n/**\n * Convert base64 string back to Uint8Array\n */\nfunction base64ToUint8Array(base64: string): Uint8Array {\n const binary = atob(base64);\n const bytes = new Uint8Array(binary.length);\n for (let i = 0; i < binary.length; i++) {\n bytes[i] = binary.charCodeAt(i);\n }\n return bytes;\n}\n\n// ============ Handler: serialize ============\n\nregisterHandler('serialize', (payload: unknown): SerializeBatchResult => {\n const { items } = payload as SerializeBatchPayload;\n\n const serialized: string[] = [];\n\n for (const item of items) {\n const bytes = serialize(item);\n serialized.push(uint8ArrayToBase64(bytes));\n }\n\n return { serialized };\n});\n\n// ============ Handler: deserialize ============\n\nregisterHandler('deserialize', (payload: unknown): DeserializeBatchResult => {\n const { items } = payload as DeserializeBatchPayload;\n\n const deserialized: unknown[] = [];\n\n for (const item of items) {\n const bytes = base64ToUint8Array(item);\n deserialized.push(deserialize(bytes));\n }\n\n return { deserialized };\n});\n","/**\n * Test Worker Script\n * Used for testing WorkerPool functionality\n */\n\nimport { registerHandler } from './base.worker';\n\n// Simple echo handler\nregisterHandler('echo', (payload: unknown) => {\n return payload;\n});\n\n// Delayed echo (simulates CPU work)\nregisterHandler('delayed-echo', async (payload: unknown) => {\n const { data, delay } = payload as { data: unknown; delay: number };\n await new Promise((resolve) => setTimeout(resolve, delay));\n return data;\n});\n\n// Handler that throws an error\nregisterHandler('throw-error', (payload: unknown) => {\n const { message } = payload as { message: string };\n throw new Error(message);\n});\n\n// CPU-intensive work simulation\nregisterHandler('cpu-work', (payload: unknown) => {\n const { iterations } = payload as { iterations: number };\n let result = 0;\n for (let i = 0; i < iterations; i++) {\n result += Math.sqrt(i);\n }\n return result;\n});\n\n// Handler that returns undefined\nregisterHandler('return-undefined', () => {\n return undefined;\n});\n\n// Handler that returns null\nregisterHandler('return-null', () => {\n return null;\n});\n","/**\n * Base Worker Script\n * Worker Threads Implementation\n *\n * Main worker entry point that handles all task types.\n * Imports specialized workers to register their handlers.\n */\n\nimport { parentPort } from 'worker_threads';\n\ninterface TaskMessage {\n id: string;\n type: string;\n payload: unknown;\n}\n\ninterface TaskResponse {\n id: string;\n success: boolean;\n result?: unknown;\n error?: string;\n}\n\ntype TaskHandler = (payload: unknown) => unknown | Promise<unknown>;\n\n// Handler registry\nconst handlers = new Map<string, TaskHandler>();\n\n/**\n * Register a handler for a specific task type\n */\nexport function registerHandler(type: string, handler: TaskHandler): void {\n handlers.set(type, handler);\n}\n\n/**\n * Unregister a handler\n */\nexport function unregisterHandler(type: string): void {\n handlers.delete(type);\n}\n\n/**\n * Check if a handler is registered\n */\nexport function hasHandler(type: string): boolean {\n return handlers.has(type);\n}\n\n// Message loop\nif (parentPort) {\n parentPort.on('message', async (task: TaskMessage) => {\n const { id, type, payload } = task;\n\n const response: TaskResponse = {\n id,\n success: false,\n };\n\n try {\n const handler = handlers.get(type);\n\n if (!handler) {\n throw new Error(`Unknown task type: ${type}`);\n }\n\n const result = await handler(payload);\n response.success = true;\n response.result = result;\n } catch (error) {\n response.success = false;\n response.error =\n error instanceof Error ? error.message : String(error);\n }\n\n parentPort!.postMessage(response);\n });\n\n // Signal ready (optional, for debugging)\n // parentPort.postMessage({ type: 'ready' });\n}\n\n// Export for testing\nexport { handlers };\n\n// Load specialized workers to register their handlers\n// Using require() to ensure side effects are executed (not tree-shaken)\n// Each specialized worker calls registerHandler() when loaded\n/* eslint-disable @typescript-eslint/no-require-imports */\nrequire('./crdt.worker');\nrequire('./merkle.worker');\nrequire('./serialization.worker');\nrequire('./test.worker');\n","/**\n * CRDT Merge Worker Script\n * CRDTMergeWorker Implementation\n *\n * Handles CPU-intensive CRDT merge operations:\n * - lww-merge: Merge LWWMap records (Last-Write-Wins)\n * - ormap-merge: Merge ORMap items and tombstones\n */\n\nimport { registerHandler } from './base.worker';\nimport type {\n LWWMergePayload,\n LWWMergeResult,\n ORMapMergePayload,\n ORMapMergeResult,\n} from '../crdt-types';\n\n// ============ Timestamp Comparison ============\n\ninterface Timestamp {\n millis: number;\n counter: number;\n nodeId: string;\n}\n\n/**\n * Compare two timestamps (same logic as HLC.compare)\n * Returns:\n * < 0 if a < b\n * > 0 if a > b\n * = 0 if a === b\n */\nfunction compareTimestamps(a: Timestamp, b: Timestamp): number {\n if (a.millis !== b.millis) {\n return a.millis - b.millis;\n }\n if (a.counter !== b.counter) {\n return a.counter - b.counter;\n }\n return a.nodeId.localeCompare(b.nodeId);\n}\n\n// ============ Handler: lww-merge ============\n\nregisterHandler('lww-merge', (payload: unknown): LWWMergeResult => {\n const { records, existingState } = payload as LWWMergePayload;\n\n // Build existing state map for O(1) lookup\n const existingMap = new Map<string, {\n value: unknown;\n timestamp: Timestamp;\n ttlMs?: number;\n }>();\n\n for (const existing of existingState) {\n existingMap.set(existing.key, {\n value: existing.value,\n timestamp: existing.timestamp,\n ttlMs: existing.ttlMs,\n });\n }\n\n const toApply: LWWMergeResult['toApply'] = [];\n const conflicts: string[] = [];\n let skipped = 0;\n\n for (const record of records) {\n const existing = existingMap.get(record.key);\n\n if (!existing) {\n // No existing record - apply new one\n toApply.push({\n key: record.key,\n value: record.value,\n timestamp: record.timestamp,\n ttlMs: record.ttlMs,\n });\n // Update existingMap for subsequent records in batch\n existingMap.set(record.key, {\n value: record.value,\n timestamp: record.timestamp,\n ttlMs: record.ttlMs,\n });\n continue;\n }\n\n const cmp = compareTimestamps(record.timestamp, existing.timestamp);\n\n // Detect conflict: same millis but different counter/nodeId (concurrent writes)\n const isConflict = record.timestamp.millis === existing.timestamp.millis &&\n (record.timestamp.counter !== existing.timestamp.counter ||\n record.timestamp.nodeId !== existing.timestamp.nodeId);\n\n if (cmp > 0) {\n // New record is newer - apply it\n toApply.push({\n key: record.key,\n value: record.value,\n timestamp: record.timestamp,\n ttlMs: record.ttlMs,\n });\n // Update for subsequent records\n existingMap.set(record.key, {\n value: record.value,\n timestamp: record.timestamp,\n ttlMs: record.ttlMs,\n });\n if (isConflict) {\n conflicts.push(record.key);\n }\n } else if (cmp === 0) {\n // Same timestamp - this is a conflict\n conflicts.push(record.key);\n skipped++;\n } else {\n // New record is older - skip\n if (isConflict) {\n conflicts.push(record.key);\n }\n skipped++;\n }\n }\n\n return {\n toApply,\n skipped,\n conflicts,\n };\n});\n\n// ============ Handler: ormap-merge ============\n\nregisterHandler('ormap-merge', (payload: unknown): ORMapMergeResult => {\n const {\n items,\n tombstones,\n existingTags,\n existingTombstones,\n } = payload as ORMapMergePayload;\n\n // Build sets for O(1) lookup\n const tagSet = new Set(existingTags);\n const tombstoneSet = new Set(existingTombstones);\n\n const itemsToApply: ORMapMergeResult['itemsToApply'] = [];\n const tombstonesToApply: string[] = [];\n const tagsToRemove: string[] = [];\n let itemsSkipped = 0;\n let tombstonesSkipped = 0;\n\n // Process tombstones first (they take precedence)\n for (const tombstone of tombstones) {\n if (tombstoneSet.has(tombstone.tag)) {\n // Already have this tombstone\n tombstonesSkipped++;\n continue;\n }\n\n // New tombstone - should be applied\n tombstonesToApply.push(tombstone.tag);\n tombstoneSet.add(tombstone.tag);\n\n // If this tag exists in items, mark for removal\n if (tagSet.has(tombstone.tag)) {\n tagsToRemove.push(tombstone.tag);\n tagSet.delete(tombstone.tag);\n }\n }\n\n // Process items\n for (const item of items) {\n // Check if tag is tombstoned\n if (tombstoneSet.has(item.tag)) {\n itemsSkipped++;\n continue;\n }\n\n // Check if tag already exists\n if (tagSet.has(item.tag)) {\n // Tag already exists - OR-Map semantics: same tag = same item\n // We could update value if timestamp is newer, but in pure ORMap\n // the tag is unique and immutable\n itemsSkipped++;\n continue;\n }\n\n // New item - apply it\n itemsToApply.push({\n key: item.key,\n value: item.value,\n timestamp: item.timestamp,\n tag: item.tag,\n ttlMs: item.ttlMs,\n });\n tagSet.add(item.tag);\n }\n\n return {\n itemsToApply,\n tombstonesToApply,\n tagsToRemove,\n itemsSkipped,\n tombstonesSkipped,\n };\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAAA;AA+BA,SAAS,WAAW,KAAqB;AACvC,MAAI,OAAO;AACX,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK;AACnC,YAAQ,IAAI,WAAW,CAAC;AACxB,WAAO,KAAK,KAAK,MAAM,QAAU;AAAA,EACnC;AACA,SAAO,SAAS;AAClB;AAMA,SAAS,gBACP,KACA,QACA,SACA,QACQ;AACR,SAAO,WAAW,GAAG,GAAG,IAAI,MAAM,IAAI,OAAO,IAAI,MAAM,EAAE;AAC3D;AAMA,SAAS,sBACP,KACA,SACQ;AAER,QAAM,gBAAgB,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,IAAI,cAAc,EAAE,GAAG,CAAC;AAE5E,MAAI,cAAc;AAClB,aAAW,UAAU,eAAe;AAClC,mBAAe,IAAI,OAAO,GAAG,IAAI,OAAO,UAAU,MAAM,IAAI,OAAO,UAAU,OAAO,IAAI,OAAO,UAAU,MAAM;AAAA,EACjH;AAEA,SAAO,WAAW,WAAW;AAC/B;AAaA,SAAS,gBACP,SACA,OAC8E;AAC9E,QAAM,OAAmB,EAAE,MAAM,GAAG,UAAU,CAAC,EAAE;AACjD,QAAM,UAAU,oBAAI,IAA8C;AAElE,aAAW,EAAE,KAAK,MAAM,SAAS,KAAK,SAAS;AAC7C,UAAM,WAAW,WAAW,GAAG,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC7D,eAAW,MAAM,KAAK,UAAU,UAAU,GAAG,KAAK;AAAA,EACpD;AAGA,iBAAe,MAAM,IAAI,OAAO,OAAO;AAEvC,SAAO,EAAE,MAAM,QAAQ;AACzB;AAEA,SAAS,WACP,MACA,KACA,UACA,UACA,OACA,OACQ;AAER,MAAI,SAAS,OAAO;AAClB,QAAI,CAAC,KAAK,QAAS,MAAK,UAAU,oBAAI,IAAI;AAC1C,SAAK,QAAQ,IAAI,KAAK,QAAQ;AAG9B,QAAIA,KAAI;AACR,eAAW,OAAO,KAAK,QAAQ,OAAO,GAAG;AACvC,MAAAA,KAAKA,KAAI,MAAO;AAAA,IAClB;AACA,SAAK,OAAOA,OAAM;AAClB,WAAO,KAAK;AAAA,EACd;AAGA,QAAM,aAAa,SAAS,KAAK;AACjC,MAAI,CAAC,KAAK,SAAU,MAAK,WAAW,CAAC;AAErC,MAAI,CAAC,KAAK,SAAS,UAAU,GAAG;AAC9B,SAAK,SAAS,UAAU,IAAI,EAAE,MAAM,EAAE;AAAA,EACxC;AAEA,aAAW,KAAK,SAAS,UAAU,GAAG,KAAK,UAAU,UAAU,QAAQ,GAAG,KAAK;AAG/E,MAAI,IAAI;AACR,aAAW,SAAS,OAAO,OAAO,KAAK,QAAQ,GAAG;AAChD,QAAK,IAAI,MAAM,OAAQ;AAAA,EACzB;AACA,OAAK,OAAO,MAAM;AAClB,SAAO,KAAK;AACd;AAEA,SAAS,eACP,MACA,MACA,OACA,SACM;AACN,MAAI,KAAK,UAAU,OAAO;AAExB,QAAI,KAAK,WAAW,KAAK,QAAQ,OAAO,GAAG;AACzC,cAAQ,IAAI,MAAM;AAAA,QAChB,MAAM,KAAK;AAAA,QACX,MAAM,MAAM,KAAK,KAAK,QAAQ,KAAK,CAAC;AAAA,MACtC,CAAC;AAAA,IACH;AACA;AAAA,EACF;AAEA,MAAI,KAAK,UAAU;AACjB,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,QAAQ,GAAG;AACzD,qBAAe,OAAO,OAAO,MAAM,OAAO,OAAO;AAAA,IACnD;AAAA,EACF;AACF;AApKA;AAAA;AAAA;AAWA;AA6JA,oBAAgB,eAAe,CAAC,YAAuC;AACrE,YAAM,EAAE,SAAS,QAAQ,EAAE,IAAI;AAG/B,YAAM,cAAoD,CAAC;AAC3D,YAAM,SAAkC,CAAC;AAEzC,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAW;AAAA,UACf,MAAM;AAAA,UACN,MAAM,UAAU;AAAA,UAChB,MAAM,UAAU;AAAA,UAChB,MAAM,UAAU;AAAA,QAClB;AACA,oBAAY,KAAK,EAAE,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACnD,eAAO,KAAK,CAAC,MAAM,KAAK,QAAQ,CAAC;AAAA,MACnC;AAGA,YAAM,EAAE,MAAM,QAAQ,IAAI,gBAAgB,aAAa,KAAK;AAE5D,aAAO;AAAA,QACL;AAAA,QACA,UAAU,KAAK;AAAA,QACf,SAAS,MAAM,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACvC;AAAA,IACF,CAAC;AAID,oBAAgB,qBAAqB,CAAC,YAA4C;AAChF,YAAM,EAAE,SAAS,QAAQ,EAAE,IAAI;AAG/B,YAAM,cAAoD,CAAC;AAC3D,YAAM,SAAkC,CAAC;AAEzC,iBAAW,SAAS,SAAS;AAC3B,cAAM,YAAY,sBAAsB,MAAM,KAAK,MAAM,OAAO;AAChE,oBAAY,KAAK,EAAE,KAAK,MAAM,KAAK,MAAM,UAAU,CAAC;AACpD,eAAO,KAAK,CAAC,MAAM,KAAK,SAAS,CAAC;AAAA,MACpC;AAGA,YAAM,EAAE,MAAM,QAAQ,IAAI,gBAAgB,aAAa,KAAK;AAE5D,aAAO;AAAA,QACL;AAAA,QACA,UAAU,KAAK;AAAA,QACf,SAAS,MAAM,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACvC;AAAA,IACF,CAAC;AAID,oBAAgB,eAAe,CAAC,YAAuC;AACrE,YAAM,EAAE,cAAc,cAAc,IAAI;AAExC,YAAM,WAAW,IAAI,IAAwB,YAAY;AACzD,YAAM,YAAY,IAAI,IAAwB,aAAa;AAE3D,YAAM,eAAyB,CAAC;AAChC,YAAM,gBAA0B,CAAC;AACjC,YAAM,iBAA2B,CAAC;AAGlC,iBAAW,CAAC,MAAM,YAAY,KAAK,WAAW;AAC5C,cAAM,cAAc,SAAS,IAAI,IAAI;AAErC,YAAI,CAAC,aAAa;AAEhB,uBAAa,KAAK,GAAG,aAAa,IAAI;AAAA,QACxC,WAAW,YAAY,SAAS,aAAa,MAAM;AAEjD,yBAAe,KAAK,IAAI;AAGxB,gBAAM,YAAY,IAAI,IAAI,YAAY,IAAI;AAC1C,gBAAM,aAAa,IAAI,IAAI,aAAa,IAAI;AAE5C,qBAAW,OAAO,YAAY;AAC5B,gBAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACvB,2BAAa,KAAK,GAAG;AAAA,YACvB;AAAA,UACF;AAEA,qBAAW,OAAO,WAAW;AAC3B,gBAAI,CAAC,WAAW,IAAI,GAAG,GAAG;AACxB,4BAAc,KAAK,GAAG;AAAA,YACxB;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,CAAC,MAAM,WAAW,KAAK,UAAU;AAC1C,YAAI,CAAC,UAAU,IAAI,IAAI,GAAG;AACxB,wBAAc,KAAK,GAAG,YAAY,IAAI;AAAA,QACxC;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAID,oBAAgB,kBAAkB,CAAC,YAA0C;AAC3E,YAAM,EAAE,SAAS,QAAQ,EAAE,IAAI;AAG/B,YAAM,cAAoD,CAAC;AAE3D,iBAAW,UAAU,SAAS;AAC5B,cAAM,WAAW;AAAA,UACf,OAAO;AAAA,UACP,OAAO,UAAU;AAAA,UACjB,OAAO,UAAU;AAAA,UACjB,OAAO,UAAU;AAAA,QACnB;AACA,oBAAY,KAAK,EAAE,KAAK,OAAO,KAAK,MAAM,SAAS,CAAC;AAAA,MACtD;AAGA,YAAM,EAAE,MAAM,QAAQ,IAAI,gBAAgB,aAAa,KAAK;AAE5D,aAAO;AAAA,QACL,UAAU,KAAK;AAAA,QACf,SAAS,MAAM,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACvC;AAAA,IACF,CAAC;AAID,oBAAgB,wBAAwB,CAAC,YAA0C;AACjF,YAAM,EAAE,SAAS,QAAQ,EAAE,IAAI;AAG/B,YAAM,cAAoD,CAAC;AAE3D,iBAAW,UAAU,SAAS;AAC5B,cAAM,YAAY,sBAAsB,OAAO,KAAK,OAAO,IAAI;AAC/D,oBAAY,KAAK,EAAE,KAAK,OAAO,KAAK,MAAM,UAAU,CAAC;AAAA,MACvD;AAGA,YAAM,EAAE,MAAM,QAAQ,IAAI,gBAAgB,aAAa,KAAK;AAE5D,aAAO;AAAA,QACL,UAAU,KAAK;AAAA,QACf,SAAS,MAAM,KAAK,QAAQ,QAAQ,CAAC;AAAA,MACvC;AAAA,IACF,CAAC;AAAA;AAAA;;;ACnUD;AAyBA,SAAS,mBAAmB,OAA2B;AACrD,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAU,OAAO,aAAa,MAAM,CAAC,CAAC;AAAA,EACxC;AACA,SAAO,KAAK,MAAM;AACpB;AAKA,SAAS,mBAAmB,QAA4B;AACtD,QAAM,SAAS,KAAK,MAAM;AAC1B,QAAM,QAAQ,IAAI,WAAW,OAAO,MAAM;AAC1C,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,CAAC,IAAI,OAAO,WAAW,CAAC;AAAA,EAChC;AACA,SAAO;AACT;AA3CA,IAYA;AAZA;AAAA;AAAA;AAWA;AACA,kBAAuC;AAmCvC,oBAAgB,aAAa,CAAC,YAA2C;AACvE,YAAM,EAAE,MAAM,IAAI;AAElB,YAAM,aAAuB,CAAC;AAE9B,iBAAW,QAAQ,OAAO;AACxB,cAAM,YAAQ,uBAAU,IAAI;AAC5B,mBAAW,KAAK,mBAAmB,KAAK,CAAC;AAAA,MAC3C;AAEA,aAAO,EAAE,WAAW;AAAA,IACtB,CAAC;AAID,oBAAgB,eAAe,CAAC,YAA6C;AAC3E,YAAM,EAAE,MAAM,IAAI;AAElB,YAAM,eAA0B,CAAC;AAEjC,iBAAW,QAAQ,OAAO;AACxB,cAAM,QAAQ,mBAAmB,IAAI;AACrC,qBAAa,SAAK,yBAAY,KAAK,CAAC;AAAA,MACtC;AAEA,aAAO,EAAE,aAAa;AAAA,IACxB,CAAC;AAAA;AAAA;;;ACzED;AAAA;AAAA;AAAA;AAKA;AAGA,oBAAgB,QAAQ,CAAC,YAAqB;AAC5C,aAAO;AAAA,IACT,CAAC;AAGD,oBAAgB,gBAAgB,OAAO,YAAqB;AAC1D,YAAM,EAAE,MAAM,MAAM,IAAI;AACxB,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,KAAK,CAAC;AACzD,aAAO;AAAA,IACT,CAAC;AAGD,oBAAgB,eAAe,CAAC,YAAqB;AACnD,YAAM,EAAE,QAAQ,IAAI;AACpB,YAAM,IAAI,MAAM,OAAO;AAAA,IACzB,CAAC;AAGD,oBAAgB,YAAY,CAAC,YAAqB;AAChD,YAAM,EAAE,WAAW,IAAI;AACvB,UAAI,SAAS;AACb,eAAS,IAAI,GAAG,IAAI,YAAY,KAAK;AACnC,kBAAU,KAAK,KAAK,CAAC;AAAA,MACvB;AACA,aAAO;AAAA,IACT,CAAC;AAGD,oBAAgB,oBAAoB,MAAM;AACxC,aAAO;AAAA,IACT,CAAC;AAGD,oBAAgB,eAAe,MAAM;AACnC,aAAO;AAAA,IACT,CAAC;AAAA;AAAA;;;ACZM,SAAS,gBAAgB,MAAc,SAA4B;AACxE,WAAS,IAAI,MAAM,OAAO;AAC5B;AAjCA,IAQA,uBAkBM;AA1BN;AAAA;AAAA;AAQA,4BAA2B;AAkB3B,IAAM,WAAW,oBAAI,IAAyB;AAwB9C,QAAI,kCAAY;AACd,uCAAW,GAAG,WAAW,OAAO,SAAsB;AACpD,cAAM,EAAE,IAAI,MAAM,QAAQ,IAAI;AAE9B,cAAM,WAAyB;AAAA,UAC7B;AAAA,UACA,SAAS;AAAA,QACX;AAEA,YAAI;AACF,gBAAM,UAAU,SAAS,IAAI,IAAI;AAEjC,cAAI,CAAC,SAAS;AACZ,kBAAM,IAAI,MAAM,sBAAsB,IAAI,EAAE;AAAA,UAC9C;AAEA,gBAAM,SAAS,MAAM,QAAQ,OAAO;AACpC,mBAAS,UAAU;AACnB,mBAAS,SAAS;AAAA,QACpB,SAAS,OAAO;AACd,mBAAS,UAAU;AACnB,mBAAS,QACP,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QACzD;AAEA,yCAAY,YAAY,QAAQ;AAAA,MAClC,CAAC;AAAA,IAIH;AASA;AACA;AACA;AACA;AAAA;AAAA;;;AC5FA;AAgCA,SAAS,kBAAkB,GAAc,GAAsB;AAC7D,MAAI,EAAE,WAAW,EAAE,QAAQ;AACzB,WAAO,EAAE,SAAS,EAAE;AAAA,EACtB;AACA,MAAI,EAAE,YAAY,EAAE,SAAS;AAC3B,WAAO,EAAE,UAAU,EAAE;AAAA,EACvB;AACA,SAAO,EAAE,OAAO,cAAc,EAAE,MAAM;AACxC;AAxCA;AAAA;AASA;AAmCA,oBAAgB,aAAa,CAAC,YAAqC;AACjE,YAAM,EAAE,SAAS,cAAc,IAAI;AAGnC,YAAM,cAAc,oBAAI,IAIrB;AAEH,iBAAW,YAAY,eAAe;AACpC,oBAAY,IAAI,SAAS,KAAK;AAAA,UAC5B,OAAO,SAAS;AAAA,UAChB,WAAW,SAAS;AAAA,UACpB,OAAO,SAAS;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,YAAM,UAAqC,CAAC;AAC5C,YAAM,YAAsB,CAAC;AAC7B,UAAI,UAAU;AAEd,iBAAW,UAAU,SAAS;AAC5B,cAAM,WAAW,YAAY,IAAI,OAAO,GAAG;AAE3C,YAAI,CAAC,UAAU;AAEb,kBAAQ,KAAK;AAAA,YACX,KAAK,OAAO;AAAA,YACZ,OAAO,OAAO;AAAA,YACd,WAAW,OAAO;AAAA,YAClB,OAAO,OAAO;AAAA,UAChB,CAAC;AAED,sBAAY,IAAI,OAAO,KAAK;AAAA,YAC1B,OAAO,OAAO;AAAA,YACd,WAAW,OAAO;AAAA,YAClB,OAAO,OAAO;AAAA,UAChB,CAAC;AACD;AAAA,QACF;AAEA,cAAM,MAAM,kBAAkB,OAAO,WAAW,SAAS,SAAS;AAGlE,cAAM,aAAa,OAAO,UAAU,WAAW,SAAS,UAAU,WAC/D,OAAO,UAAU,YAAY,SAAS,UAAU,WAChD,OAAO,UAAU,WAAW,SAAS,UAAU;AAElD,YAAI,MAAM,GAAG;AAEX,kBAAQ,KAAK;AAAA,YACX,KAAK,OAAO;AAAA,YACZ,OAAO,OAAO;AAAA,YACd,WAAW,OAAO;AAAA,YAClB,OAAO,OAAO;AAAA,UAChB,CAAC;AAED,sBAAY,IAAI,OAAO,KAAK;AAAA,YAC1B,OAAO,OAAO;AAAA,YACd,WAAW,OAAO;AAAA,YAClB,OAAO,OAAO;AAAA,UAChB,CAAC;AACD,cAAI,YAAY;AACd,sBAAU,KAAK,OAAO,GAAG;AAAA,UAC3B;AAAA,QACF,WAAW,QAAQ,GAAG;AAEpB,oBAAU,KAAK,OAAO,GAAG;AACzB;AAAA,QACF,OAAO;AAEL,cAAI,YAAY;AACd,sBAAU,KAAK,OAAO,GAAG;AAAA,UAC3B;AACA;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAID,oBAAgB,eAAe,CAAC,YAAuC;AACrE,YAAM;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,IAAI;AAGJ,YAAM,SAAS,IAAI,IAAI,YAAY;AACnC,YAAM,eAAe,IAAI,IAAI,kBAAkB;AAE/C,YAAM,eAAiD,CAAC;AACxD,YAAM,oBAA8B,CAAC;AACrC,YAAM,eAAyB,CAAC;AAChC,UAAI,eAAe;AACnB,UAAI,oBAAoB;AAGxB,iBAAW,aAAa,YAAY;AAClC,YAAI,aAAa,IAAI,UAAU,GAAG,GAAG;AAEnC;AACA;AAAA,QACF;AAGA,0BAAkB,KAAK,UAAU,GAAG;AACpC,qBAAa,IAAI,UAAU,GAAG;AAG9B,YAAI,OAAO,IAAI,UAAU,GAAG,GAAG;AAC7B,uBAAa,KAAK,UAAU,GAAG;AAC/B,iBAAO,OAAO,UAAU,GAAG;AAAA,QAC7B;AAAA,MACF;AAGA,iBAAW,QAAQ,OAAO;AAExB,YAAI,aAAa,IAAI,KAAK,GAAG,GAAG;AAC9B;AACA;AAAA,QACF;AAGA,YAAI,OAAO,IAAI,KAAK,GAAG,GAAG;AAIxB;AACA;AAAA,QACF;AAGA,qBAAa,KAAK;AAAA,UAChB,KAAK,KAAK;AAAA,UACV,OAAO,KAAK;AAAA,UACZ,WAAW,KAAK;AAAA,UAChB,KAAK,KAAK;AAAA,UACV,OAAO,KAAK;AAAA,QACd,CAAC;AACD,eAAO,IAAI,KAAK,GAAG;AAAA,MACrB;AAEA,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAAA;AAAA;","names":["h"]}
|