@liorandb/core 1.1.0 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-2FSI7HX7.js +1689 -0
- package/dist/index.d.ts +21 -11
- package/dist/index.js +5 -1828
- package/dist/queue-YILKSUEI.js +179 -0
- package/package.json +1 -1
- package/src/LioranManager.ts +99 -44
- package/src/core/collection.ts +39 -8
- package/src/core/database.ts +58 -33
- package/src/core/wal.ts +42 -13
- package/src/ipc/index.ts +41 -19
- package/src/ipc/pool.ts +136 -0
- package/src/ipc/queue.ts +85 -31
- package/src/ipc/worker.ts +72 -0
- package/src/ipc/client.ts +0 -85
- package/src/ipc/server.ts +0 -147
- package/src/ipc/socketPath.ts +0 -10
package/dist/index.js
CHANGED
|
@@ -1,1831 +1,8 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
// src/core/database.ts
|
|
7
|
-
import path6 from "path";
|
|
8
|
-
import fs6 from "fs";
|
|
9
|
-
|
|
10
|
-
// src/core/collection.ts
|
|
11
|
-
import { ClassicLevel as ClassicLevel3 } from "classic-level";
|
|
12
|
-
|
|
13
|
-
// src/core/query.ts
|
|
14
|
-
function getByPath(obj, path10) {
|
|
15
|
-
return path10.split(".").reduce((o, p) => o ? o[p] : void 0, obj);
|
|
16
|
-
}
|
|
17
|
-
function matchDocument(doc, query) {
|
|
18
|
-
for (const key of Object.keys(query)) {
|
|
19
|
-
const cond = query[key];
|
|
20
|
-
const val = getByPath(doc, key);
|
|
21
|
-
if (cond && typeof cond === "object" && !Array.isArray(cond)) {
|
|
22
|
-
for (const op of Object.keys(cond)) {
|
|
23
|
-
const v = cond[op];
|
|
24
|
-
if (op === "$gt" && !(val > v)) return false;
|
|
25
|
-
if (op === "$gte" && !(val >= v)) return false;
|
|
26
|
-
if (op === "$lt" && !(val < v)) return false;
|
|
27
|
-
if (op === "$lte" && !(val <= v)) return false;
|
|
28
|
-
if (op === "$ne" && val === v) return false;
|
|
29
|
-
if (op === "$eq" && val !== v) return false;
|
|
30
|
-
if (op === "$in" && (!Array.isArray(v) || !v.includes(val))) return false;
|
|
31
|
-
}
|
|
32
|
-
} else {
|
|
33
|
-
if (val !== cond) return false;
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
return true;
|
|
37
|
-
}
|
|
38
|
-
function applyUpdate(oldDoc, update) {
|
|
39
|
-
const doc = structuredClone(oldDoc);
|
|
40
|
-
if (update.$set) {
|
|
41
|
-
for (const k in update.$set) {
|
|
42
|
-
const parts = k.split(".");
|
|
43
|
-
let cur = doc;
|
|
44
|
-
for (let i = 0; i < parts.length - 1; i++) {
|
|
45
|
-
cur[parts[i]] ??= {};
|
|
46
|
-
cur = cur[parts[i]];
|
|
47
|
-
}
|
|
48
|
-
cur[parts.at(-1)] = update.$set[k];
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
if (update.$inc) {
|
|
52
|
-
for (const k in update.$inc) {
|
|
53
|
-
const val = getByPath(doc, k) ?? 0;
|
|
54
|
-
const parts = k.split(".");
|
|
55
|
-
let cur = doc;
|
|
56
|
-
for (let i = 0; i < parts.length - 1; i++) {
|
|
57
|
-
cur[parts[i]] ??= {};
|
|
58
|
-
cur = cur[parts[i]];
|
|
59
|
-
}
|
|
60
|
-
cur[parts.at(-1)] = val + update.$inc[k];
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
const hasOp = Object.keys(update).some((k) => k.startsWith("$"));
|
|
64
|
-
if (!hasOp) {
|
|
65
|
-
return { ...doc, ...update };
|
|
66
|
-
}
|
|
67
|
-
return doc;
|
|
68
|
-
}
|
|
69
|
-
function selectIndex(query, indexes) {
|
|
70
|
-
for (const key of Object.keys(query)) {
|
|
71
|
-
if (!indexes.has(key)) continue;
|
|
72
|
-
const cond = query[key];
|
|
73
|
-
if (cond && typeof cond === "object" && !Array.isArray(cond)) {
|
|
74
|
-
return { field: key, cond };
|
|
75
|
-
}
|
|
76
|
-
return { field: key, cond: { $eq: cond } };
|
|
77
|
-
}
|
|
78
|
-
return null;
|
|
79
|
-
}
|
|
80
|
-
async function runIndexedQuery(query, indexProvider, allDocIds) {
|
|
81
|
-
const indexes = indexProvider.indexes;
|
|
82
|
-
if (!indexes?.size) {
|
|
83
|
-
return new Set(await allDocIds());
|
|
84
|
-
}
|
|
85
|
-
const sel = selectIndex(query, indexes);
|
|
86
|
-
if (!sel) {
|
|
87
|
-
return new Set(await allDocIds());
|
|
88
|
-
}
|
|
89
|
-
const { field, cond } = sel;
|
|
90
|
-
if ("$eq" in cond) {
|
|
91
|
-
return await indexProvider.findByIndex(field, cond.$eq) ?? new Set(await allDocIds());
|
|
92
|
-
}
|
|
93
|
-
if ("$in" in cond) {
|
|
94
|
-
const out = /* @__PURE__ */ new Set();
|
|
95
|
-
for (const v of cond.$in) {
|
|
96
|
-
const r = await indexProvider.findByIndex(field, v);
|
|
97
|
-
if (r) for (const id of r) out.add(id);
|
|
98
|
-
}
|
|
99
|
-
return out;
|
|
100
|
-
}
|
|
101
|
-
if (indexProvider.rangeByIndex && ("$gt" in cond || "$gte" in cond || "$lt" in cond || "$lte" in cond)) {
|
|
102
|
-
return await indexProvider.rangeByIndex(field, cond) ?? new Set(await allDocIds());
|
|
103
|
-
}
|
|
104
|
-
return new Set(await allDocIds());
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
// src/core/collection.ts
|
|
108
|
-
import { v4 as uuid } from "uuid";
|
|
109
|
-
|
|
110
|
-
// src/utils/encryption.ts
|
|
111
|
-
import crypto2 from "crypto";
|
|
112
|
-
|
|
113
|
-
// src/utils/secureKey.ts
|
|
114
|
-
import crypto from "crypto";
|
|
115
|
-
import os from "os";
|
|
116
|
-
function getMasterKey() {
|
|
117
|
-
const fingerprint = [
|
|
118
|
-
os.hostname(),
|
|
119
|
-
os.platform(),
|
|
120
|
-
os.arch(),
|
|
121
|
-
os.cpus()?.[0]?.model ?? "unknown",
|
|
122
|
-
os.cpus()?.length ?? 0,
|
|
123
|
-
os.totalmem()
|
|
124
|
-
].join("|");
|
|
125
|
-
return crypto.createHash("sha256").update(fingerprint).digest();
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
// src/utils/encryption.ts
|
|
129
|
-
var algorithm = "aes-256-gcm";
|
|
130
|
-
var ACTIVE_KEY = getMasterKey();
|
|
131
|
-
function setEncryptionKey(key) {
|
|
132
|
-
if (!key) return;
|
|
133
|
-
if (typeof key === "string") {
|
|
134
|
-
ACTIVE_KEY = crypto2.createHash("sha256").update(key).digest();
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
if (Buffer.isBuffer(key)) {
|
|
138
|
-
if (key.length !== 32) {
|
|
139
|
-
throw new Error("Encryption key must be 32 bytes");
|
|
140
|
-
}
|
|
141
|
-
ACTIVE_KEY = key;
|
|
142
|
-
return;
|
|
143
|
-
}
|
|
144
|
-
throw new Error("Invalid encryption key format");
|
|
145
|
-
}
|
|
146
|
-
function encryptData(obj) {
|
|
147
|
-
const iv = crypto2.randomBytes(16);
|
|
148
|
-
const json = JSON.stringify(obj);
|
|
149
|
-
if (json.length > 5e6) {
|
|
150
|
-
throw new Error("Document too large (>5MB)");
|
|
151
|
-
}
|
|
152
|
-
const data = Buffer.from(json, "utf8");
|
|
153
|
-
const cipher = crypto2.createCipheriv(algorithm, ACTIVE_KEY, iv);
|
|
154
|
-
const encrypted = Buffer.concat([cipher.update(data), cipher.final()]);
|
|
155
|
-
const tag = cipher.getAuthTag();
|
|
156
|
-
return Buffer.concat([iv, tag, encrypted]).toString("base64");
|
|
157
|
-
}
|
|
158
|
-
function decryptData(enc) {
|
|
159
|
-
const buf = Buffer.from(enc, "base64");
|
|
160
|
-
const iv = buf.subarray(0, 16);
|
|
161
|
-
const tag = buf.subarray(16, 32);
|
|
162
|
-
const encrypted = buf.subarray(32);
|
|
163
|
-
const decipher = crypto2.createDecipheriv(algorithm, ACTIVE_KEY, iv);
|
|
164
|
-
decipher.setAuthTag(tag);
|
|
165
|
-
const decrypted = Buffer.concat([
|
|
166
|
-
decipher.update(encrypted),
|
|
167
|
-
decipher.final()
|
|
168
|
-
]);
|
|
169
|
-
return JSON.parse(decrypted.toString("utf8"));
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
// src/utils/schema.ts
|
|
173
|
-
function validateSchema(schema, data) {
|
|
174
|
-
const result = schema.safeParse(data);
|
|
175
|
-
if (!result.success) {
|
|
176
|
-
throw new Error(
|
|
177
|
-
"Schema validation failed:\n" + JSON.stringify(result.error.format(), null, 2)
|
|
178
|
-
);
|
|
179
|
-
}
|
|
180
|
-
return result.data;
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
// src/core/compaction.ts
|
|
184
|
-
import fs2 from "fs";
|
|
185
|
-
import path2 from "path";
|
|
186
|
-
import { ClassicLevel as ClassicLevel2 } from "classic-level";
|
|
187
|
-
|
|
188
|
-
// src/core/index.ts
|
|
189
|
-
import path from "path";
|
|
190
|
-
import fs from "fs";
|
|
191
|
-
import { ClassicLevel } from "classic-level";
|
|
192
|
-
var Index = class {
|
|
193
|
-
field;
|
|
194
|
-
unique;
|
|
195
|
-
dir;
|
|
196
|
-
db;
|
|
197
|
-
constructor(baseDir, field, options = {}) {
|
|
198
|
-
this.field = field;
|
|
199
|
-
this.unique = !!options.unique;
|
|
200
|
-
this.dir = path.join(baseDir, "__indexes", field + ".idx");
|
|
201
|
-
fs.mkdirSync(this.dir, { recursive: true });
|
|
202
|
-
this.db = new ClassicLevel(this.dir, { valueEncoding: "utf8" });
|
|
203
|
-
}
|
|
204
|
-
/* ------------------------- INTERNAL ------------------------- */
|
|
205
|
-
normalizeKey(value) {
|
|
206
|
-
if (value === null || value === void 0) return "__null__";
|
|
207
|
-
if (typeof value === "object") {
|
|
208
|
-
return JSON.stringify(value);
|
|
209
|
-
}
|
|
210
|
-
return String(value);
|
|
211
|
-
}
|
|
212
|
-
async getRaw(key) {
|
|
213
|
-
try {
|
|
214
|
-
const v = await this.db.get(key);
|
|
215
|
-
if (v === void 0) return null;
|
|
216
|
-
return JSON.parse(v);
|
|
217
|
-
} catch {
|
|
218
|
-
return null;
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
async setRaw(key, value) {
|
|
222
|
-
await this.db.put(key, JSON.stringify(value));
|
|
223
|
-
}
|
|
224
|
-
async delRaw(key) {
|
|
225
|
-
try {
|
|
226
|
-
await this.db.del(key);
|
|
227
|
-
} catch {
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
/* --------------------------- API --------------------------- */
|
|
231
|
-
async insert(doc) {
|
|
232
|
-
const val = doc[this.field];
|
|
233
|
-
if (val === void 0) return;
|
|
234
|
-
const key = this.normalizeKey(val);
|
|
235
|
-
if (this.unique) {
|
|
236
|
-
const existing = await this.getRaw(key);
|
|
237
|
-
if (existing) {
|
|
238
|
-
throw new Error(
|
|
239
|
-
`Unique index violation on "${this.field}" = ${val}`
|
|
240
|
-
);
|
|
241
|
-
}
|
|
242
|
-
await this.setRaw(key, doc._id);
|
|
243
|
-
return;
|
|
244
|
-
}
|
|
245
|
-
const arr = await this.getRaw(key);
|
|
246
|
-
if (!arr) {
|
|
247
|
-
await this.setRaw(key, [doc._id]);
|
|
248
|
-
} else {
|
|
249
|
-
if (!arr.includes(doc._id)) {
|
|
250
|
-
arr.push(doc._id);
|
|
251
|
-
await this.setRaw(key, arr);
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
async delete(doc) {
|
|
256
|
-
const val = doc[this.field];
|
|
257
|
-
if (val === void 0) return;
|
|
258
|
-
const key = this.normalizeKey(val);
|
|
259
|
-
if (this.unique) {
|
|
260
|
-
await this.delRaw(key);
|
|
261
|
-
return;
|
|
262
|
-
}
|
|
263
|
-
const arr = await this.getRaw(key);
|
|
264
|
-
if (!arr) return;
|
|
265
|
-
const next = arr.filter((id) => id !== doc._id);
|
|
266
|
-
if (next.length === 0) {
|
|
267
|
-
await this.delRaw(key);
|
|
268
|
-
} else {
|
|
269
|
-
await this.setRaw(key, next);
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
async update(oldDoc, newDoc) {
|
|
273
|
-
const oldVal = oldDoc?.[this.field];
|
|
274
|
-
const newVal = newDoc?.[this.field];
|
|
275
|
-
if (oldVal === newVal) return;
|
|
276
|
-
if (oldDoc) await this.delete(oldDoc);
|
|
277
|
-
if (newDoc) await this.insert(newDoc);
|
|
278
|
-
}
|
|
279
|
-
async find(value) {
|
|
280
|
-
const key = this.normalizeKey(value);
|
|
281
|
-
const raw = await this.getRaw(key);
|
|
282
|
-
if (!raw) return [];
|
|
283
|
-
if (this.unique) return [raw];
|
|
284
|
-
return raw;
|
|
285
|
-
}
|
|
286
|
-
async close() {
|
|
287
|
-
try {
|
|
288
|
-
await this.db.close();
|
|
289
|
-
} catch {
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
};
|
|
293
|
-
|
|
294
|
-
// src/core/compaction.ts
|
|
295
|
-
var TMP_SUFFIX = "__compact_tmp";
|
|
296
|
-
var OLD_SUFFIX = "__compact_old";
|
|
297
|
-
var INDEX_DIR = "__indexes";
|
|
298
|
-
async function compactCollectionEngine(col) {
|
|
299
|
-
const baseDir = col.dir;
|
|
300
|
-
const tmpDir = baseDir + TMP_SUFFIX;
|
|
301
|
-
const oldDir = baseDir + OLD_SUFFIX;
|
|
302
|
-
await crashRecovery(baseDir);
|
|
303
|
-
safeRemove(tmpDir);
|
|
304
|
-
safeRemove(oldDir);
|
|
305
|
-
await snapshotRebuild(col, tmpDir);
|
|
306
|
-
await atomicSwap(baseDir, tmpDir, oldDir);
|
|
307
|
-
safeRemove(oldDir);
|
|
308
|
-
await reopenCollectionDB(col);
|
|
309
|
-
await rebuildIndexes(col);
|
|
310
|
-
}
|
|
311
|
-
async function snapshotRebuild(col, tmpDir) {
|
|
312
|
-
fs2.mkdirSync(tmpDir, { recursive: true });
|
|
313
|
-
const tmpDB = new ClassicLevel2(tmpDir, {
|
|
314
|
-
valueEncoding: "utf8"
|
|
315
|
-
});
|
|
316
|
-
for await (const [key, val] of col.db.iterator()) {
|
|
317
|
-
if (val !== void 0) {
|
|
318
|
-
await tmpDB.put(key, val);
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
await tmpDB.close();
|
|
322
|
-
await col.db.close();
|
|
323
|
-
}
|
|
324
|
-
async function atomicSwap(base, tmp, old) {
|
|
325
|
-
fs2.renameSync(base, old);
|
|
326
|
-
try {
|
|
327
|
-
fs2.renameSync(tmp, base);
|
|
328
|
-
} catch (err) {
|
|
329
|
-
if (fs2.existsSync(old)) {
|
|
330
|
-
fs2.renameSync(old, base);
|
|
331
|
-
}
|
|
332
|
-
throw err;
|
|
333
|
-
}
|
|
334
|
-
}
|
|
335
|
-
async function crashRecovery(baseDir) {
|
|
336
|
-
const tmp = baseDir + TMP_SUFFIX;
|
|
337
|
-
const old = baseDir + OLD_SUFFIX;
|
|
338
|
-
const baseExists = fs2.existsSync(baseDir);
|
|
339
|
-
const tmpExists = fs2.existsSync(tmp);
|
|
340
|
-
const oldExists = fs2.existsSync(old);
|
|
341
|
-
if (tmpExists && oldExists) {
|
|
342
|
-
safeRemove(baseDir);
|
|
343
|
-
fs2.renameSync(tmp, baseDir);
|
|
344
|
-
safeRemove(old);
|
|
345
|
-
return;
|
|
346
|
-
}
|
|
347
|
-
if (!baseExists && oldExists) {
|
|
348
|
-
fs2.renameSync(old, baseDir);
|
|
349
|
-
return;
|
|
350
|
-
}
|
|
351
|
-
if (tmpExists && !oldExists) {
|
|
352
|
-
safeRemove(tmp);
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
async function reopenCollectionDB(col) {
|
|
356
|
-
col.db = new ClassicLevel2(col.dir, {
|
|
357
|
-
valueEncoding: "utf8"
|
|
358
|
-
});
|
|
359
|
-
}
|
|
360
|
-
async function rebuildIndexes(col) {
|
|
361
|
-
const indexRoot = path2.join(col.dir, INDEX_DIR);
|
|
362
|
-
const oldIndexes = new Map(col["indexes"]);
|
|
363
|
-
for (const idx of oldIndexes.values()) {
|
|
364
|
-
try {
|
|
365
|
-
await idx.close();
|
|
366
|
-
} catch {
|
|
367
|
-
}
|
|
368
|
-
}
|
|
369
|
-
safeRemove(indexRoot);
|
|
370
|
-
fs2.mkdirSync(indexRoot, { recursive: true });
|
|
371
|
-
const rebuiltIndexes = /* @__PURE__ */ new Map();
|
|
372
|
-
for (const idx of oldIndexes.values()) {
|
|
373
|
-
const rebuilt = new Index(col.dir, idx.field, {
|
|
374
|
-
unique: idx.unique
|
|
375
|
-
});
|
|
376
|
-
for await (const [, enc] of col.db.iterator()) {
|
|
377
|
-
if (!enc) continue;
|
|
378
|
-
try {
|
|
379
|
-
const doc = decryptData(enc);
|
|
380
|
-
await rebuilt.insert(doc);
|
|
381
|
-
} catch {
|
|
382
|
-
}
|
|
383
|
-
}
|
|
384
|
-
rebuiltIndexes.set(idx.field, rebuilt);
|
|
385
|
-
}
|
|
386
|
-
col["indexes"] = rebuiltIndexes;
|
|
387
|
-
}
|
|
388
|
-
function safeRemove(p) {
|
|
389
|
-
if (fs2.existsSync(p)) {
|
|
390
|
-
fs2.rmSync(p, { recursive: true, force: true });
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
|
|
394
|
-
// src/core/collection.ts
|
|
395
|
-
var Collection = class {
|
|
396
|
-
dir;
|
|
397
|
-
db;
|
|
398
|
-
queue = Promise.resolve();
|
|
399
|
-
schema;
|
|
400
|
-
schemaVersion = 1;
|
|
401
|
-
migrations = [];
|
|
402
|
-
indexes = /* @__PURE__ */ new Map();
|
|
403
|
-
constructor(dir, schema, schemaVersion = 1) {
|
|
404
|
-
this.dir = dir;
|
|
405
|
-
this.db = new ClassicLevel3(dir, { valueEncoding: "utf8" });
|
|
406
|
-
this.schema = schema;
|
|
407
|
-
this.schemaVersion = schemaVersion;
|
|
408
|
-
}
|
|
409
|
-
/* ===================== SCHEMA ===================== */
|
|
410
|
-
setSchema(schema, version) {
|
|
411
|
-
this.schema = schema;
|
|
412
|
-
this.schemaVersion = version;
|
|
413
|
-
}
|
|
414
|
-
addMigration(migration) {
|
|
415
|
-
this.migrations.push(migration);
|
|
416
|
-
this.migrations.sort((a, b) => a.from - b.from);
|
|
417
|
-
}
|
|
418
|
-
validate(doc) {
|
|
419
|
-
return this.schema ? validateSchema(this.schema, doc) : doc;
|
|
420
|
-
}
|
|
421
|
-
migrateIfNeeded(doc) {
|
|
422
|
-
let currentVersion = doc.__v ?? 1;
|
|
423
|
-
if (currentVersion === this.schemaVersion) {
|
|
424
|
-
return doc;
|
|
425
|
-
}
|
|
426
|
-
let working = doc;
|
|
427
|
-
for (const migration of this.migrations) {
|
|
428
|
-
if (migration.from === currentVersion) {
|
|
429
|
-
working = migration.migrate(working);
|
|
430
|
-
currentVersion = migration.to;
|
|
431
|
-
}
|
|
432
|
-
}
|
|
433
|
-
working.__v = this.schemaVersion;
|
|
434
|
-
return this.validate(working);
|
|
435
|
-
}
|
|
436
|
-
/* ===================== QUEUE ===================== */
|
|
437
|
-
_enqueue(task) {
|
|
438
|
-
this.queue = this.queue.then(task).catch(console.error);
|
|
439
|
-
return this.queue;
|
|
440
|
-
}
|
|
441
|
-
async close() {
|
|
442
|
-
for (const idx of this.indexes.values()) {
|
|
443
|
-
try {
|
|
444
|
-
await idx.close();
|
|
445
|
-
} catch {
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
try {
|
|
449
|
-
await this.db.close();
|
|
450
|
-
} catch {
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
/* ===================== INDEX MANAGEMENT ===================== */
|
|
454
|
-
registerIndex(index) {
|
|
455
|
-
this.indexes.set(index.field, index);
|
|
456
|
-
}
|
|
457
|
-
getIndex(field) {
|
|
458
|
-
return this.indexes.get(field);
|
|
459
|
-
}
|
|
460
|
-
async _updateIndexes(oldDoc, newDoc) {
|
|
461
|
-
for (const index of this.indexes.values()) {
|
|
462
|
-
await index.update(oldDoc, newDoc);
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
/* ===================== COMPACTION ===================== */
|
|
466
|
-
async compact() {
|
|
467
|
-
return this._enqueue(async () => {
|
|
468
|
-
try {
|
|
469
|
-
await this.db.close();
|
|
470
|
-
} catch {
|
|
471
|
-
}
|
|
472
|
-
await compactCollectionEngine(this);
|
|
473
|
-
this.db = new ClassicLevel3(this.dir, { valueEncoding: "utf8" });
|
|
474
|
-
await rebuildIndexes(this);
|
|
475
|
-
});
|
|
476
|
-
}
|
|
477
|
-
/* ===================== INTERNAL EXEC ===================== */
|
|
478
|
-
async _exec(op, args) {
|
|
479
|
-
switch (op) {
|
|
480
|
-
case "insertOne":
|
|
481
|
-
return this._insertOne(args[0]);
|
|
482
|
-
case "insertMany":
|
|
483
|
-
return this._insertMany(args[0]);
|
|
484
|
-
case "find":
|
|
485
|
-
return this._find(args[0]);
|
|
486
|
-
case "findOne":
|
|
487
|
-
return this._findOne(args[0]);
|
|
488
|
-
case "updateOne":
|
|
489
|
-
return this._updateOne(args[0], args[1], args[2]);
|
|
490
|
-
case "updateMany":
|
|
491
|
-
return this._updateMany(args[0], args[1]);
|
|
492
|
-
case "deleteOne":
|
|
493
|
-
return this._deleteOne(args[0]);
|
|
494
|
-
case "deleteMany":
|
|
495
|
-
return this._deleteMany(args[0]);
|
|
496
|
-
case "countDocuments":
|
|
497
|
-
return this._countDocuments(args[0]);
|
|
498
|
-
default:
|
|
499
|
-
throw new Error(`Unknown operation: ${op}`);
|
|
500
|
-
}
|
|
501
|
-
}
|
|
502
|
-
/* ===================== STORAGE ===================== */
|
|
503
|
-
async _insertOne(doc) {
|
|
504
|
-
const _id = doc._id ?? uuid();
|
|
505
|
-
const final = this.validate({
|
|
506
|
-
_id,
|
|
507
|
-
...doc,
|
|
508
|
-
__v: this.schemaVersion
|
|
509
|
-
});
|
|
510
|
-
await this.db.put(String(_id), encryptData(final));
|
|
511
|
-
await this._updateIndexes(null, final);
|
|
512
|
-
return final;
|
|
513
|
-
}
|
|
514
|
-
async _insertMany(docs) {
|
|
515
|
-
const batch = [];
|
|
516
|
-
const out = [];
|
|
517
|
-
for (const d of docs) {
|
|
518
|
-
const _id = d._id ?? uuid();
|
|
519
|
-
const final = this.validate({
|
|
520
|
-
_id,
|
|
521
|
-
...d,
|
|
522
|
-
__v: this.schemaVersion
|
|
523
|
-
});
|
|
524
|
-
batch.push({
|
|
525
|
-
type: "put",
|
|
526
|
-
key: String(_id),
|
|
527
|
-
value: encryptData(final)
|
|
528
|
-
});
|
|
529
|
-
out.push(final);
|
|
530
|
-
}
|
|
531
|
-
await this.db.batch(batch);
|
|
532
|
-
for (const doc of out) {
|
|
533
|
-
await this._updateIndexes(null, doc);
|
|
534
|
-
}
|
|
535
|
-
return out;
|
|
536
|
-
}
|
|
537
|
-
/* ===================== QUERY ===================== */
|
|
538
|
-
async _getCandidateIds(query) {
|
|
539
|
-
const indexedFields = new Set(this.indexes.keys());
|
|
540
|
-
return runIndexedQuery(
|
|
541
|
-
query,
|
|
542
|
-
{
|
|
543
|
-
indexes: indexedFields,
|
|
544
|
-
findByIndex: async (field, value) => {
|
|
545
|
-
const idx = this.indexes.get(field);
|
|
546
|
-
if (!idx) return null;
|
|
547
|
-
return new Set(await idx.find(value));
|
|
548
|
-
}
|
|
549
|
-
},
|
|
550
|
-
async () => {
|
|
551
|
-
const ids = [];
|
|
552
|
-
for await (const [key] of this.db.iterator()) {
|
|
553
|
-
ids.push(key);
|
|
554
|
-
}
|
|
555
|
-
return ids;
|
|
556
|
-
}
|
|
557
|
-
);
|
|
558
|
-
}
|
|
559
|
-
async _readAndMigrate(id) {
|
|
560
|
-
const enc = await this.db.get(id);
|
|
561
|
-
if (!enc) return null;
|
|
562
|
-
const raw = decryptData(enc);
|
|
563
|
-
const migrated = this.migrateIfNeeded(raw);
|
|
564
|
-
if (raw.__v !== this.schemaVersion) {
|
|
565
|
-
await this.db.put(id, encryptData(migrated));
|
|
566
|
-
await this._updateIndexes(raw, migrated);
|
|
567
|
-
}
|
|
568
|
-
return migrated;
|
|
569
|
-
}
|
|
570
|
-
async _find(query) {
|
|
571
|
-
const ids = await this._getCandidateIds(query);
|
|
572
|
-
const out = [];
|
|
573
|
-
for (const id of ids) {
|
|
574
|
-
try {
|
|
575
|
-
const doc = await this._readAndMigrate(id);
|
|
576
|
-
if (doc && matchDocument(doc, query)) {
|
|
577
|
-
out.push(doc);
|
|
578
|
-
}
|
|
579
|
-
} catch {
|
|
580
|
-
}
|
|
581
|
-
}
|
|
582
|
-
return out;
|
|
583
|
-
}
|
|
584
|
-
async _findOne(query) {
|
|
585
|
-
if (query?._id) {
|
|
586
|
-
try {
|
|
587
|
-
return await this._readAndMigrate(String(query._id));
|
|
588
|
-
} catch {
|
|
589
|
-
return null;
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
const ids = await this._getCandidateIds(query);
|
|
593
|
-
for (const id of ids) {
|
|
594
|
-
try {
|
|
595
|
-
const doc = await this._readAndMigrate(id);
|
|
596
|
-
if (doc && matchDocument(doc, query)) {
|
|
597
|
-
return doc;
|
|
598
|
-
}
|
|
599
|
-
} catch {
|
|
600
|
-
}
|
|
601
|
-
}
|
|
602
|
-
return null;
|
|
603
|
-
}
|
|
604
|
-
async _countDocuments(filter) {
|
|
605
|
-
const ids = await this._getCandidateIds(filter);
|
|
606
|
-
let count = 0;
|
|
607
|
-
for (const id of ids) {
|
|
608
|
-
try {
|
|
609
|
-
const doc = await this._readAndMigrate(id);
|
|
610
|
-
if (doc && matchDocument(doc, filter)) {
|
|
611
|
-
count++;
|
|
612
|
-
}
|
|
613
|
-
} catch {
|
|
614
|
-
}
|
|
615
|
-
}
|
|
616
|
-
return count;
|
|
617
|
-
}
|
|
618
|
-
/* ===================== UPDATE ===================== */
|
|
619
|
-
async _updateOne(filter, update, options) {
|
|
620
|
-
const ids = await this._getCandidateIds(filter);
|
|
621
|
-
for (const id of ids) {
|
|
622
|
-
const existing = await this._readAndMigrate(id);
|
|
623
|
-
if (!existing) continue;
|
|
624
|
-
if (matchDocument(existing, filter)) {
|
|
625
|
-
const updated = this.validate({
|
|
626
|
-
...applyUpdate(existing, update),
|
|
627
|
-
_id: existing._id,
|
|
628
|
-
__v: this.schemaVersion
|
|
629
|
-
});
|
|
630
|
-
await this.db.put(id, encryptData(updated));
|
|
631
|
-
await this._updateIndexes(existing, updated);
|
|
632
|
-
return updated;
|
|
633
|
-
}
|
|
634
|
-
}
|
|
635
|
-
if (options?.upsert) {
|
|
636
|
-
return this._insertOne(applyUpdate({}, update));
|
|
637
|
-
}
|
|
638
|
-
return null;
|
|
639
|
-
}
|
|
640
|
-
async _updateMany(filter, update) {
|
|
641
|
-
const ids = await this._getCandidateIds(filter);
|
|
642
|
-
const out = [];
|
|
643
|
-
for (const id of ids) {
|
|
644
|
-
const existing = await this._readAndMigrate(id);
|
|
645
|
-
if (!existing) continue;
|
|
646
|
-
if (matchDocument(existing, filter)) {
|
|
647
|
-
const updated = this.validate({
|
|
648
|
-
...applyUpdate(existing, update),
|
|
649
|
-
_id: existing._id,
|
|
650
|
-
__v: this.schemaVersion
|
|
651
|
-
});
|
|
652
|
-
await this.db.put(id, encryptData(updated));
|
|
653
|
-
await this._updateIndexes(existing, updated);
|
|
654
|
-
out.push(updated);
|
|
655
|
-
}
|
|
656
|
-
}
|
|
657
|
-
return out;
|
|
658
|
-
}
|
|
659
|
-
/* ===================== DELETE ===================== */
|
|
660
|
-
async _deleteOne(filter) {
|
|
661
|
-
const ids = await this._getCandidateIds(filter);
|
|
662
|
-
for (const id of ids) {
|
|
663
|
-
const existing = await this._readAndMigrate(id);
|
|
664
|
-
if (!existing) continue;
|
|
665
|
-
if (matchDocument(existing, filter)) {
|
|
666
|
-
await this.db.del(id);
|
|
667
|
-
await this._updateIndexes(existing, null);
|
|
668
|
-
return true;
|
|
669
|
-
}
|
|
670
|
-
}
|
|
671
|
-
return false;
|
|
672
|
-
}
|
|
673
|
-
async _deleteMany(filter) {
|
|
674
|
-
const ids = await this._getCandidateIds(filter);
|
|
675
|
-
let count = 0;
|
|
676
|
-
for (const id of ids) {
|
|
677
|
-
const existing = await this._readAndMigrate(id);
|
|
678
|
-
if (!existing) continue;
|
|
679
|
-
if (matchDocument(existing, filter)) {
|
|
680
|
-
await this.db.del(id);
|
|
681
|
-
await this._updateIndexes(existing, null);
|
|
682
|
-
count++;
|
|
683
|
-
}
|
|
684
|
-
}
|
|
685
|
-
return count;
|
|
686
|
-
}
|
|
687
|
-
/* ===================== PUBLIC API ===================== */
|
|
688
|
-
insertOne(doc) {
|
|
689
|
-
return this._enqueue(() => this._exec("insertOne", [doc]));
|
|
690
|
-
}
|
|
691
|
-
insertMany(docs) {
|
|
692
|
-
return this._enqueue(() => this._exec("insertMany", [docs]));
|
|
693
|
-
}
|
|
694
|
-
find(query = {}) {
|
|
695
|
-
return this._enqueue(() => this._exec("find", [query]));
|
|
696
|
-
}
|
|
697
|
-
findOne(query = {}) {
|
|
698
|
-
return this._enqueue(() => this._exec("findOne", [query]));
|
|
699
|
-
}
|
|
700
|
-
updateOne(filter, update, options) {
|
|
701
|
-
return this._enqueue(
|
|
702
|
-
() => this._exec("updateOne", [filter, update, options])
|
|
703
|
-
);
|
|
704
|
-
}
|
|
705
|
-
updateMany(filter, update) {
|
|
706
|
-
return this._enqueue(
|
|
707
|
-
() => this._exec("updateMany", [filter, update])
|
|
708
|
-
);
|
|
709
|
-
}
|
|
710
|
-
deleteOne(filter) {
|
|
711
|
-
return this._enqueue(
|
|
712
|
-
() => this._exec("deleteOne", [filter])
|
|
713
|
-
);
|
|
714
|
-
}
|
|
715
|
-
deleteMany(filter) {
|
|
716
|
-
return this._enqueue(
|
|
717
|
-
() => this._exec("deleteMany", [filter])
|
|
718
|
-
);
|
|
719
|
-
}
|
|
720
|
-
countDocuments(filter = {}) {
|
|
721
|
-
return this._enqueue(
|
|
722
|
-
() => this._exec("countDocuments", [filter])
|
|
723
|
-
);
|
|
724
|
-
}
|
|
725
|
-
};
|
|
726
|
-
|
|
727
|
-
// src/core/migration.ts
|
|
728
|
-
import fs3 from "fs";
|
|
729
|
-
import path3 from "path";
|
|
730
|
-
import crypto3 from "crypto";
|
|
731
|
-
var LOCK_FILE = "__migration.lock";
|
|
732
|
-
var HISTORY_FILE = "__migration_history.json";
|
|
733
|
-
var MigrationEngine = class {
|
|
734
|
-
constructor(db) {
|
|
735
|
-
this.db = db;
|
|
736
|
-
}
|
|
737
|
-
migrations = /* @__PURE__ */ new Map();
|
|
738
|
-
/* ------------------------------------------------------------ */
|
|
739
|
-
/* Public API */
|
|
740
|
-
/* ------------------------------------------------------------ */
|
|
741
|
-
register(from, to, fn) {
|
|
742
|
-
const key = `${from}\u2192${to}`;
|
|
743
|
-
if (this.migrations.has(key)) {
|
|
744
|
-
throw new Error(`Duplicate migration: ${key}`);
|
|
745
|
-
}
|
|
746
|
-
this.migrations.set(key, fn);
|
|
747
|
-
}
|
|
748
|
-
async migrate(from, to, fn) {
|
|
749
|
-
this.register(from, to, fn);
|
|
750
|
-
await this.execute();
|
|
751
|
-
}
|
|
752
|
-
async upgradeToLatest() {
|
|
753
|
-
await this.execute();
|
|
754
|
-
}
|
|
755
|
-
/* ------------------------------------------------------------ */
|
|
756
|
-
/* Core Execution Logic */
|
|
757
|
-
/* ------------------------------------------------------------ */
|
|
758
|
-
async execute() {
|
|
759
|
-
let current = this.db.getSchemaVersion();
|
|
760
|
-
while (true) {
|
|
761
|
-
const next = this.findNext(current);
|
|
762
|
-
if (!next) break;
|
|
763
|
-
const fn = this.migrations.get(`${current}\u2192${next}`);
|
|
764
|
-
await this.runMigration(current, next, fn);
|
|
765
|
-
current = next;
|
|
766
|
-
}
|
|
767
|
-
}
|
|
768
|
-
findNext(current) {
|
|
769
|
-
for (const key of this.migrations.keys()) {
|
|
770
|
-
const [from, to] = key.split("\u2192");
|
|
771
|
-
if (from === current) return to;
|
|
772
|
-
}
|
|
773
|
-
return null;
|
|
774
|
-
}
|
|
775
|
-
/* ------------------------------------------------------------ */
|
|
776
|
-
/* Atomic Migration Execution */
|
|
777
|
-
/* ------------------------------------------------------------ */
|
|
778
|
-
async runMigration(from, to, fn) {
|
|
779
|
-
const current = this.db.getSchemaVersion();
|
|
780
|
-
if (current !== from) {
|
|
781
|
-
throw new Error(
|
|
782
|
-
`Schema mismatch: DB=${current}, expected=${from}`
|
|
783
|
-
);
|
|
784
|
-
}
|
|
785
|
-
const lockPath = path3.join(this.db.basePath, LOCK_FILE);
|
|
786
|
-
if (fs3.existsSync(lockPath)) {
|
|
787
|
-
throw new Error(
|
|
788
|
-
"Previous migration interrupted. Resolve manually before continuing."
|
|
789
|
-
);
|
|
790
|
-
}
|
|
791
|
-
this.acquireLock(lockPath);
|
|
792
|
-
try {
|
|
793
|
-
await this.db.transaction(async () => {
|
|
794
|
-
await fn(this.db);
|
|
795
|
-
this.writeHistory(from, to, fn);
|
|
796
|
-
this.db.setSchemaVersion(to);
|
|
797
|
-
});
|
|
798
|
-
} finally {
|
|
799
|
-
this.releaseLock(lockPath);
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
/* ------------------------------------------------------------ */
|
|
803
|
-
/* Locking */
|
|
804
|
-
/* ------------------------------------------------------------ */
|
|
805
|
-
acquireLock(file) {
|
|
806
|
-
const token = crypto3.randomBytes(16).toString("hex");
|
|
807
|
-
fs3.writeFileSync(
|
|
808
|
-
file,
|
|
809
|
-
JSON.stringify({
|
|
810
|
-
pid: process.pid,
|
|
811
|
-
token,
|
|
812
|
-
time: Date.now()
|
|
813
|
-
})
|
|
814
|
-
);
|
|
815
|
-
}
|
|
816
|
-
releaseLock(file) {
|
|
817
|
-
if (fs3.existsSync(file)) fs3.unlinkSync(file);
|
|
818
|
-
}
|
|
819
|
-
/* ------------------------------------------------------------ */
|
|
820
|
-
/* Migration History */
|
|
821
|
-
/* ------------------------------------------------------------ */
|
|
822
|
-
historyPath() {
|
|
823
|
-
return path3.join(this.db.basePath, HISTORY_FILE);
|
|
824
|
-
}
|
|
825
|
-
readHistory() {
|
|
826
|
-
if (!fs3.existsSync(this.historyPath())) return [];
|
|
827
|
-
return JSON.parse(fs3.readFileSync(this.historyPath(), "utf8"));
|
|
828
|
-
}
|
|
829
|
-
writeHistory(from, to, fn) {
|
|
830
|
-
const history = this.readHistory();
|
|
831
|
-
history.push({
|
|
832
|
-
from,
|
|
833
|
-
to,
|
|
834
|
-
checksum: this.hash(fn.toString()),
|
|
835
|
-
appliedAt: Date.now()
|
|
836
|
-
});
|
|
837
|
-
fs3.writeFileSync(this.historyPath(), JSON.stringify(history, null, 2));
|
|
838
|
-
}
|
|
839
|
-
hash(data) {
|
|
840
|
-
return crypto3.createHash("sha256").update(data).digest("hex");
|
|
841
|
-
}
|
|
842
|
-
/* ------------------------------------------------------------ */
|
|
843
|
-
/* Diagnostics */
|
|
844
|
-
/* ------------------------------------------------------------ */
|
|
845
|
-
getHistory() {
|
|
846
|
-
return this.readHistory();
|
|
847
|
-
}
|
|
848
|
-
};
|
|
849
|
-
|
|
850
|
-
// src/core/wal.ts
|
|
851
|
-
import fs4 from "fs";
|
|
852
|
-
import path4 from "path";
|
|
853
|
-
var MAX_WAL_SIZE = 16 * 1024 * 1024;
|
|
854
|
-
var WAL_DIR = "__wal";
|
|
855
|
-
var CRC32_TABLE = (() => {
|
|
856
|
-
const table = new Uint32Array(256);
|
|
857
|
-
for (let i = 0; i < 256; i++) {
|
|
858
|
-
let c = i;
|
|
859
|
-
for (let k = 0; k < 8; k++) {
|
|
860
|
-
c = c & 1 ? 3988292384 ^ c >>> 1 : c >>> 1;
|
|
861
|
-
}
|
|
862
|
-
table[i] = c >>> 0;
|
|
863
|
-
}
|
|
864
|
-
return table;
|
|
865
|
-
})();
|
|
866
|
-
function crc32(input) {
|
|
867
|
-
let crc = 4294967295;
|
|
868
|
-
for (let i = 0; i < input.length; i++) {
|
|
869
|
-
crc = CRC32_TABLE[(crc ^ input.charCodeAt(i)) & 255] ^ crc >>> 8;
|
|
870
|
-
}
|
|
871
|
-
return (crc ^ 4294967295) >>> 0;
|
|
872
|
-
}
|
|
873
|
-
var WALManager = class {
|
|
874
|
-
walDir;
|
|
875
|
-
currentGen = 1;
|
|
876
|
-
lsn = 0;
|
|
877
|
-
fd = null;
|
|
878
|
-
constructor(baseDir) {
|
|
879
|
-
this.walDir = path4.join(baseDir, WAL_DIR);
|
|
880
|
-
fs4.mkdirSync(this.walDir, { recursive: true });
|
|
881
|
-
this.currentGen = this.detectLastGeneration();
|
|
882
|
-
this.recoverLSNFromExistingLogs();
|
|
883
|
-
}
|
|
884
|
-
/* -------------------------
|
|
885
|
-
INTERNAL HELPERS
|
|
886
|
-
------------------------- */
|
|
887
|
-
walPath(gen = this.currentGen) {
|
|
888
|
-
return path4.join(
|
|
889
|
-
this.walDir,
|
|
890
|
-
`wal-${String(gen).padStart(6, "0")}.log`
|
|
891
|
-
);
|
|
892
|
-
}
|
|
893
|
-
detectLastGeneration() {
|
|
894
|
-
if (!fs4.existsSync(this.walDir)) return 1;
|
|
895
|
-
const files = fs4.readdirSync(this.walDir);
|
|
896
|
-
let max = 0;
|
|
897
|
-
for (const f of files) {
|
|
898
|
-
const m = f.match(/^wal-(\d+)\.log$/);
|
|
899
|
-
if (m) {
|
|
900
|
-
const gen = Number(m[1]);
|
|
901
|
-
if (!Number.isNaN(gen)) {
|
|
902
|
-
max = Math.max(max, gen);
|
|
903
|
-
}
|
|
904
|
-
}
|
|
905
|
-
}
|
|
906
|
-
return max || 1;
|
|
907
|
-
}
|
|
908
|
-
recoverLSNFromExistingLogs() {
|
|
909
|
-
const files = this.getSortedWalFiles();
|
|
910
|
-
for (const file of files) {
|
|
911
|
-
const filePath = path4.join(this.walDir, file);
|
|
912
|
-
const lines = fs4.readFileSync(filePath, "utf8").split("\n");
|
|
913
|
-
for (const line of lines) {
|
|
914
|
-
if (!line.trim()) continue;
|
|
915
|
-
try {
|
|
916
|
-
const parsed = JSON.parse(line);
|
|
917
|
-
const { crc, ...record } = parsed;
|
|
918
|
-
if (crc32(JSON.stringify(record)) !== crc) break;
|
|
919
|
-
this.lsn = Math.max(this.lsn, record.lsn);
|
|
920
|
-
} catch {
|
|
921
|
-
break;
|
|
922
|
-
}
|
|
923
|
-
}
|
|
924
|
-
}
|
|
925
|
-
}
|
|
926
|
-
getSortedWalFiles() {
|
|
927
|
-
return fs4.readdirSync(this.walDir).filter((f) => /^wal-\d+\.log$/.test(f)).sort((a, b) => {
|
|
928
|
-
const ga = Number(a.match(/^wal-(\d+)\.log$/)[1]);
|
|
929
|
-
const gb = Number(b.match(/^wal-(\d+)\.log$/)[1]);
|
|
930
|
-
return ga - gb;
|
|
931
|
-
});
|
|
932
|
-
}
|
|
933
|
-
async open() {
|
|
934
|
-
if (!this.fd) {
|
|
935
|
-
this.fd = await fs4.promises.open(this.walPath(), "a");
|
|
936
|
-
}
|
|
937
|
-
}
|
|
938
|
-
async rotate() {
|
|
939
|
-
if (this.fd) {
|
|
940
|
-
await this.fd.sync();
|
|
941
|
-
await this.fd.close();
|
|
942
|
-
this.fd = null;
|
|
943
|
-
}
|
|
944
|
-
this.currentGen++;
|
|
945
|
-
}
|
|
946
|
-
/* -------------------------
|
|
947
|
-
APPEND (Crash-safe)
|
|
948
|
-
------------------------- */
|
|
949
|
-
async append(record) {
|
|
950
|
-
await this.open();
|
|
951
|
-
const full = {
|
|
952
|
-
...record,
|
|
953
|
-
lsn: ++this.lsn
|
|
954
|
-
};
|
|
955
|
-
const body = JSON.stringify(full);
|
|
956
|
-
const stored = {
|
|
957
|
-
...full,
|
|
958
|
-
crc: crc32(body)
|
|
959
|
-
};
|
|
960
|
-
const line = JSON.stringify(stored) + "\n";
|
|
961
|
-
await this.fd.write(line);
|
|
962
|
-
await this.fd.sync();
|
|
963
|
-
const stat = await this.fd.stat();
|
|
964
|
-
if (stat.size >= MAX_WAL_SIZE) {
|
|
965
|
-
await this.rotate();
|
|
966
|
-
}
|
|
967
|
-
return full.lsn;
|
|
968
|
-
}
|
|
969
|
-
/* -------------------------
|
|
970
|
-
REPLAY (Auto-heal tail)
|
|
971
|
-
------------------------- */
|
|
972
|
-
async replay(fromLSN, apply) {
|
|
973
|
-
if (!fs4.existsSync(this.walDir)) return;
|
|
974
|
-
const files = this.getSortedWalFiles();
|
|
975
|
-
for (const file of files) {
|
|
976
|
-
const filePath = path4.join(this.walDir, file);
|
|
977
|
-
const fd = fs4.openSync(filePath, "r+");
|
|
978
|
-
const content = fs4.readFileSync(filePath, "utf8");
|
|
979
|
-
const lines = content.split("\n");
|
|
980
|
-
let validOffset = 0;
|
|
981
|
-
for (let i = 0; i < lines.length; i++) {
|
|
982
|
-
const line = lines[i];
|
|
983
|
-
if (!line.trim()) {
|
|
984
|
-
validOffset += line.length + 1;
|
|
985
|
-
continue;
|
|
986
|
-
}
|
|
987
|
-
let parsed;
|
|
988
|
-
try {
|
|
989
|
-
parsed = JSON.parse(line);
|
|
990
|
-
} catch {
|
|
991
|
-
break;
|
|
992
|
-
}
|
|
993
|
-
const { crc, ...record } = parsed;
|
|
994
|
-
const expected = crc32(JSON.stringify(record));
|
|
995
|
-
if (expected !== crc) {
|
|
996
|
-
break;
|
|
997
|
-
}
|
|
998
|
-
validOffset += line.length + 1;
|
|
999
|
-
if (record.lsn <= fromLSN) continue;
|
|
1000
|
-
this.lsn = Math.max(this.lsn, record.lsn);
|
|
1001
|
-
await apply(record);
|
|
1002
|
-
}
|
|
1003
|
-
const stat = fs4.fstatSync(fd);
|
|
1004
|
-
if (validOffset < stat.size) {
|
|
1005
|
-
fs4.ftruncateSync(fd, validOffset);
|
|
1006
|
-
}
|
|
1007
|
-
fs4.closeSync(fd);
|
|
1008
|
-
}
|
|
1009
|
-
}
|
|
1010
|
-
/* -------------------------
|
|
1011
|
-
CLEANUP
|
|
1012
|
-
------------------------- */
|
|
1013
|
-
async cleanup(beforeGen) {
|
|
1014
|
-
if (!fs4.existsSync(this.walDir)) return;
|
|
1015
|
-
const files = fs4.readdirSync(this.walDir);
|
|
1016
|
-
for (const f of files) {
|
|
1017
|
-
const m = f.match(/^wal-(\d+)\.log$/);
|
|
1018
|
-
if (!m) continue;
|
|
1019
|
-
const gen = Number(m[1]);
|
|
1020
|
-
if (gen < beforeGen) {
|
|
1021
|
-
fs4.unlinkSync(path4.join(this.walDir, f));
|
|
1022
|
-
}
|
|
1023
|
-
}
|
|
1024
|
-
}
|
|
1025
|
-
/* -------------------------
|
|
1026
|
-
GETTERS
|
|
1027
|
-
------------------------- */
|
|
1028
|
-
getCurrentLSN() {
|
|
1029
|
-
return this.lsn;
|
|
1030
|
-
}
|
|
1031
|
-
getCurrentGen() {
|
|
1032
|
-
return this.currentGen;
|
|
1033
|
-
}
|
|
1034
|
-
};
|
|
1035
|
-
|
|
1036
|
-
// src/core/checkpoint.ts
|
|
1037
|
-
import fs5 from "fs";
|
|
1038
|
-
import path5 from "path";
|
|
1039
|
-
var CHECKPOINT_A = "__checkpoint_A.json";
|
|
1040
|
-
var CHECKPOINT_B = "__checkpoint_B.json";
|
|
1041
|
-
var FORMAT_VERSION = 1;
|
|
1042
|
-
var CRC32_TABLE2 = (() => {
|
|
1043
|
-
const table = new Uint32Array(256);
|
|
1044
|
-
for (let i = 0; i < 256; i++) {
|
|
1045
|
-
let c = i;
|
|
1046
|
-
for (let k = 0; k < 8; k++) {
|
|
1047
|
-
c = c & 1 ? 3988292384 ^ c >>> 1 : c >>> 1;
|
|
1048
|
-
}
|
|
1049
|
-
table[i] = c >>> 0;
|
|
1050
|
-
}
|
|
1051
|
-
return table;
|
|
1052
|
-
})();
|
|
1053
|
-
function crc322(input) {
|
|
1054
|
-
let crc = 4294967295;
|
|
1055
|
-
for (let i = 0; i < input.length; i++) {
|
|
1056
|
-
crc = CRC32_TABLE2[(crc ^ input.charCodeAt(i)) & 255] ^ crc >>> 8;
|
|
1057
|
-
}
|
|
1058
|
-
return (crc ^ 4294967295) >>> 0;
|
|
1059
|
-
}
|
|
1060
|
-
var CheckpointManager = class {
|
|
1061
|
-
baseDir;
|
|
1062
|
-
data;
|
|
1063
|
-
constructor(baseDir) {
|
|
1064
|
-
this.baseDir = baseDir;
|
|
1065
|
-
this.data = {
|
|
1066
|
-
lsn: 0,
|
|
1067
|
-
walGen: 1,
|
|
1068
|
-
time: 0,
|
|
1069
|
-
version: FORMAT_VERSION
|
|
1070
|
-
};
|
|
1071
|
-
this.load();
|
|
1072
|
-
}
|
|
1073
|
-
/* -------------------------
|
|
1074
|
-
LOAD (CRC + FALLBACK)
|
|
1075
|
-
------------------------- */
|
|
1076
|
-
load() {
|
|
1077
|
-
const a = this.readCheckpoint(CHECKPOINT_A);
|
|
1078
|
-
const b = this.readCheckpoint(CHECKPOINT_B);
|
|
1079
|
-
if (a && b) {
|
|
1080
|
-
this.data = a.data.lsn >= b.data.lsn ? a.data : b.data;
|
|
1081
|
-
return;
|
|
1082
|
-
}
|
|
1083
|
-
if (a) {
|
|
1084
|
-
this.data = a.data;
|
|
1085
|
-
return;
|
|
1086
|
-
}
|
|
1087
|
-
if (b) {
|
|
1088
|
-
this.data = b.data;
|
|
1089
|
-
return;
|
|
1090
|
-
}
|
|
1091
|
-
console.warn("No valid checkpoint found, starting from zero");
|
|
1092
|
-
}
|
|
1093
|
-
readCheckpoint(file) {
|
|
1094
|
-
const filePath = path5.join(this.baseDir, file);
|
|
1095
|
-
if (!fs5.existsSync(filePath)) return null;
|
|
1096
|
-
try {
|
|
1097
|
-
const raw = fs5.readFileSync(filePath, "utf8");
|
|
1098
|
-
const parsed = JSON.parse(raw);
|
|
1099
|
-
if (!parsed?.data || typeof parsed.crc !== "number") {
|
|
1100
|
-
return null;
|
|
1101
|
-
}
|
|
1102
|
-
const expected = crc322(JSON.stringify(parsed.data));
|
|
1103
|
-
if (expected !== parsed.crc) {
|
|
1104
|
-
console.error(`Checkpoint CRC mismatch: ${file}`);
|
|
1105
|
-
return null;
|
|
1106
|
-
}
|
|
1107
|
-
return parsed;
|
|
1108
|
-
} catch {
|
|
1109
|
-
return null;
|
|
1110
|
-
}
|
|
1111
|
-
}
|
|
1112
|
-
/* -------------------------
|
|
1113
|
-
SAVE (DUAL WRITE)
|
|
1114
|
-
------------------------- */
|
|
1115
|
-
save(lsn, walGen) {
|
|
1116
|
-
const data = {
|
|
1117
|
-
lsn,
|
|
1118
|
-
walGen,
|
|
1119
|
-
time: Date.now(),
|
|
1120
|
-
version: FORMAT_VERSION
|
|
1121
|
-
};
|
|
1122
|
-
const stored = {
|
|
1123
|
-
data,
|
|
1124
|
-
crc: crc322(JSON.stringify(data))
|
|
1125
|
-
};
|
|
1126
|
-
const target = lsn % 2 === 0 ? CHECKPOINT_A : CHECKPOINT_B;
|
|
1127
|
-
try {
|
|
1128
|
-
fs5.writeFileSync(
|
|
1129
|
-
path5.join(this.baseDir, target),
|
|
1130
|
-
JSON.stringify(stored, null, 2),
|
|
1131
|
-
"utf8"
|
|
1132
|
-
);
|
|
1133
|
-
this.data = data;
|
|
1134
|
-
} catch (err) {
|
|
1135
|
-
console.error("Failed to write checkpoint:", err);
|
|
1136
|
-
}
|
|
1137
|
-
}
|
|
1138
|
-
/* -------------------------
|
|
1139
|
-
GET CURRENT
|
|
1140
|
-
------------------------- */
|
|
1141
|
-
get() {
|
|
1142
|
-
return this.data;
|
|
1143
|
-
}
|
|
1144
|
-
};
|
|
1145
|
-
|
|
1146
|
-
// src/core/database.ts
|
|
1147
|
-
var META_FILE = "__db_meta.json";
|
|
1148
|
-
var META_VERSION = 2;
|
|
1149
|
-
var DEFAULT_SCHEMA_VERSION = "v1";
|
|
1150
|
-
var DBTransactionContext = class {
|
|
1151
|
-
constructor(db, txId) {
|
|
1152
|
-
this.db = db;
|
|
1153
|
-
this.txId = txId;
|
|
1154
|
-
}
|
|
1155
|
-
ops = [];
|
|
1156
|
-
collection(name) {
|
|
1157
|
-
return new Proxy({}, {
|
|
1158
|
-
get: (_, prop) => {
|
|
1159
|
-
return (...args) => {
|
|
1160
|
-
this.ops.push({
|
|
1161
|
-
tx: this.txId,
|
|
1162
|
-
col: name,
|
|
1163
|
-
op: prop,
|
|
1164
|
-
args
|
|
1165
|
-
});
|
|
1166
|
-
};
|
|
1167
|
-
}
|
|
1168
|
-
});
|
|
1169
|
-
}
|
|
1170
|
-
async commit() {
|
|
1171
|
-
for (const op of this.ops) {
|
|
1172
|
-
await this.db.wal.append({
|
|
1173
|
-
tx: this.txId,
|
|
1174
|
-
type: "op",
|
|
1175
|
-
payload: op
|
|
1176
|
-
});
|
|
1177
|
-
}
|
|
1178
|
-
const commitLSN = await this.db.wal.append({
|
|
1179
|
-
tx: this.txId,
|
|
1180
|
-
type: "commit"
|
|
1181
|
-
});
|
|
1182
|
-
await this.db.applyTransaction(this.ops);
|
|
1183
|
-
const appliedLSN = await this.db.wal.append({
|
|
1184
|
-
tx: this.txId,
|
|
1185
|
-
type: "applied"
|
|
1186
|
-
});
|
|
1187
|
-
this.db.advanceCheckpoint(appliedLSN);
|
|
1188
|
-
await this.db.postCommitMaintenance();
|
|
1189
|
-
}
|
|
1190
|
-
};
|
|
1191
|
-
var LioranDB = class _LioranDB {
|
|
1192
|
-
basePath;
|
|
1193
|
-
dbName;
|
|
1194
|
-
manager;
|
|
1195
|
-
collections;
|
|
1196
|
-
metaPath;
|
|
1197
|
-
meta;
|
|
1198
|
-
migrator;
|
|
1199
|
-
static TX_SEQ = 0;
|
|
1200
|
-
wal;
|
|
1201
|
-
checkpoint;
|
|
1202
|
-
constructor(basePath, dbName, manager) {
|
|
1203
|
-
this.basePath = basePath;
|
|
1204
|
-
this.dbName = dbName;
|
|
1205
|
-
this.manager = manager;
|
|
1206
|
-
this.collections = /* @__PURE__ */ new Map();
|
|
1207
|
-
this.metaPath = path6.join(basePath, META_FILE);
|
|
1208
|
-
fs6.mkdirSync(basePath, { recursive: true });
|
|
1209
|
-
this.loadMeta();
|
|
1210
|
-
this.wal = new WALManager(basePath);
|
|
1211
|
-
this.checkpoint = new CheckpointManager(basePath);
|
|
1212
|
-
this.migrator = new MigrationEngine(this);
|
|
1213
|
-
this.initialize().catch(console.error);
|
|
1214
|
-
}
|
|
1215
|
-
/* ------------------------- INIT & RECOVERY ------------------------- */
|
|
1216
|
-
async initialize() {
|
|
1217
|
-
await this.recoverFromWAL();
|
|
1218
|
-
}
|
|
1219
|
-
async recoverFromWAL() {
|
|
1220
|
-
const checkpointData = this.checkpoint.get();
|
|
1221
|
-
const fromLSN = checkpointData.lsn;
|
|
1222
|
-
const committed = /* @__PURE__ */ new Set();
|
|
1223
|
-
const applied = /* @__PURE__ */ new Set();
|
|
1224
|
-
const ops = /* @__PURE__ */ new Map();
|
|
1225
|
-
await this.wal.replay(fromLSN, async (record) => {
|
|
1226
|
-
if (record.type === "commit") {
|
|
1227
|
-
committed.add(record.tx);
|
|
1228
|
-
} else if (record.type === "applied") {
|
|
1229
|
-
applied.add(record.tx);
|
|
1230
|
-
} else if (record.type === "op") {
|
|
1231
|
-
if (!ops.has(record.tx)) ops.set(record.tx, []);
|
|
1232
|
-
ops.get(record.tx).push(record.payload);
|
|
1233
|
-
}
|
|
1234
|
-
});
|
|
1235
|
-
let highestAppliedLSN = fromLSN;
|
|
1236
|
-
for (const tx of committed) {
|
|
1237
|
-
if (applied.has(tx)) continue;
|
|
1238
|
-
const txOps = ops.get(tx);
|
|
1239
|
-
if (txOps) {
|
|
1240
|
-
await this.applyTransaction(txOps);
|
|
1241
|
-
highestAppliedLSN = this.wal.getCurrentLSN();
|
|
1242
|
-
}
|
|
1243
|
-
}
|
|
1244
|
-
this.advanceCheckpoint(highestAppliedLSN);
|
|
1245
|
-
}
|
|
1246
|
-
/* ------------------------- CHECKPOINT ADVANCE ------------------------- */
|
|
1247
|
-
advanceCheckpoint(lsn) {
|
|
1248
|
-
const current = this.checkpoint.get();
|
|
1249
|
-
if (lsn > current.lsn) {
|
|
1250
|
-
this.checkpoint.save(lsn, this.wal.getCurrentGen());
|
|
1251
|
-
this.wal.cleanup(this.wal.getCurrentGen() - 1).catch(() => {
|
|
1252
|
-
});
|
|
1253
|
-
}
|
|
1254
|
-
}
|
|
1255
|
-
/* ------------------------- META ------------------------- */
|
|
1256
|
-
loadMeta() {
|
|
1257
|
-
if (!fs6.existsSync(this.metaPath)) {
|
|
1258
|
-
this.meta = {
|
|
1259
|
-
version: META_VERSION,
|
|
1260
|
-
indexes: {},
|
|
1261
|
-
schemaVersion: DEFAULT_SCHEMA_VERSION
|
|
1262
|
-
};
|
|
1263
|
-
this.saveMeta();
|
|
1264
|
-
return;
|
|
1265
|
-
}
|
|
1266
|
-
this.meta = JSON.parse(fs6.readFileSync(this.metaPath, "utf8"));
|
|
1267
|
-
if (!this.meta.schemaVersion) {
|
|
1268
|
-
this.meta.schemaVersion = DEFAULT_SCHEMA_VERSION;
|
|
1269
|
-
this.saveMeta();
|
|
1270
|
-
}
|
|
1271
|
-
}
|
|
1272
|
-
saveMeta() {
|
|
1273
|
-
fs6.writeFileSync(this.metaPath, JSON.stringify(this.meta, null, 2));
|
|
1274
|
-
}
|
|
1275
|
-
getSchemaVersion() {
|
|
1276
|
-
return this.meta.schemaVersion;
|
|
1277
|
-
}
|
|
1278
|
-
setSchemaVersion(v) {
|
|
1279
|
-
this.meta.schemaVersion = v;
|
|
1280
|
-
this.saveMeta();
|
|
1281
|
-
}
|
|
1282
|
-
/* ------------------------- DB MIGRATIONS ------------------------- */
|
|
1283
|
-
migrate(from, to, fn) {
|
|
1284
|
-
this.migrator.register(from, to, async (db) => {
|
|
1285
|
-
await fn(db);
|
|
1286
|
-
db.setSchemaVersion(to);
|
|
1287
|
-
});
|
|
1288
|
-
}
|
|
1289
|
-
async applyMigrations(targetVersion) {
|
|
1290
|
-
await this.migrator.upgradeToLatest();
|
|
1291
|
-
}
|
|
1292
|
-
/* ------------------------- TX APPLY ------------------------- */
|
|
1293
|
-
async applyTransaction(ops) {
|
|
1294
|
-
for (const { col, op, args } of ops) {
|
|
1295
|
-
const collection = this.collection(col);
|
|
1296
|
-
await collection._exec(op, args);
|
|
1297
|
-
}
|
|
1298
|
-
}
|
|
1299
|
-
/* ------------------------- COLLECTION ------------------------- */
|
|
1300
|
-
collection(name, schema, schemaVersion) {
|
|
1301
|
-
if (this.collections.has(name)) {
|
|
1302
|
-
const col2 = this.collections.get(name);
|
|
1303
|
-
if (schema && schemaVersion !== void 0) {
|
|
1304
|
-
col2.setSchema(schema, schemaVersion);
|
|
1305
|
-
}
|
|
1306
|
-
return col2;
|
|
1307
|
-
}
|
|
1308
|
-
const colPath = path6.join(this.basePath, name);
|
|
1309
|
-
fs6.mkdirSync(colPath, { recursive: true });
|
|
1310
|
-
const col = new Collection(
|
|
1311
|
-
colPath,
|
|
1312
|
-
schema,
|
|
1313
|
-
schemaVersion ?? 1
|
|
1314
|
-
);
|
|
1315
|
-
const metas = this.meta.indexes[name] ?? [];
|
|
1316
|
-
for (const m of metas) {
|
|
1317
|
-
col.registerIndex(new Index(colPath, m.field, m.options));
|
|
1318
|
-
}
|
|
1319
|
-
this.collections.set(name, col);
|
|
1320
|
-
return col;
|
|
1321
|
-
}
|
|
1322
|
-
/* ------------------------- INDEX API ------------------------- */
|
|
1323
|
-
async createIndex(collection, field, options = {}) {
|
|
1324
|
-
const col = this.collection(collection);
|
|
1325
|
-
const existing = this.meta.indexes[collection]?.find((i) => i.field === field);
|
|
1326
|
-
if (existing) return;
|
|
1327
|
-
const index = new Index(col.dir, field, options);
|
|
1328
|
-
for await (const [key, enc] of col.db.iterator()) {
|
|
1329
|
-
if (!enc) continue;
|
|
1330
|
-
try {
|
|
1331
|
-
const doc = decryptData(enc);
|
|
1332
|
-
await index.insert(doc);
|
|
1333
|
-
} catch (err) {
|
|
1334
|
-
const msg = err instanceof Error ? err.message : String(err);
|
|
1335
|
-
console.warn(`Index build skipped doc ${key}: ${msg}`);
|
|
1336
|
-
}
|
|
1337
|
-
}
|
|
1338
|
-
col.registerIndex(index);
|
|
1339
|
-
if (!this.meta.indexes[collection]) {
|
|
1340
|
-
this.meta.indexes[collection] = [];
|
|
1341
|
-
}
|
|
1342
|
-
this.meta.indexes[collection].push({ field, options });
|
|
1343
|
-
this.saveMeta();
|
|
1344
|
-
}
|
|
1345
|
-
/* ------------------------- COMPACTION ------------------------- */
|
|
1346
|
-
async compactCollection(name) {
|
|
1347
|
-
const col = this.collection(name);
|
|
1348
|
-
await col.compact();
|
|
1349
|
-
}
|
|
1350
|
-
async compactAll() {
|
|
1351
|
-
for (const name of this.collections.keys()) {
|
|
1352
|
-
await this.compactCollection(name);
|
|
1353
|
-
}
|
|
1354
|
-
}
|
|
1355
|
-
/* ------------------------- TX API ------------------------- */
|
|
1356
|
-
async transaction(fn) {
|
|
1357
|
-
const txId = ++_LioranDB.TX_SEQ;
|
|
1358
|
-
const tx = new DBTransactionContext(this, txId);
|
|
1359
|
-
const result = await fn(tx);
|
|
1360
|
-
await tx.commit();
|
|
1361
|
-
return result;
|
|
1362
|
-
}
|
|
1363
|
-
/* ------------------------- POST COMMIT ------------------------- */
|
|
1364
|
-
async postCommitMaintenance() {
|
|
1365
|
-
}
|
|
1366
|
-
/* ------------------------- SHUTDOWN ------------------------- */
|
|
1367
|
-
async close() {
|
|
1368
|
-
for (const col of this.collections.values()) {
|
|
1369
|
-
try {
|
|
1370
|
-
await col.close();
|
|
1371
|
-
} catch {
|
|
1372
|
-
}
|
|
1373
|
-
}
|
|
1374
|
-
this.collections.clear();
|
|
1375
|
-
}
|
|
1376
|
-
};
|
|
1377
|
-
|
|
1378
|
-
// src/utils/rootpath.ts
|
|
1379
|
-
import os2 from "os";
|
|
1380
|
-
import path7 from "path";
|
|
1381
|
-
import fs7 from "fs";
|
|
1382
|
-
function getDefaultRootPath() {
|
|
1383
|
-
let dbPath = process.env.LIORANDB_PATH;
|
|
1384
|
-
if (!dbPath) {
|
|
1385
|
-
const homeDir = os2.homedir();
|
|
1386
|
-
dbPath = path7.join(homeDir, "LioranDB", "db");
|
|
1387
|
-
if (!fs7.existsSync(dbPath)) {
|
|
1388
|
-
fs7.mkdirSync(dbPath, { recursive: true });
|
|
1389
|
-
}
|
|
1390
|
-
process.env.LIORANDB_PATH = dbPath;
|
|
1391
|
-
}
|
|
1392
|
-
return dbPath;
|
|
1393
|
-
}
|
|
1394
|
-
function getBaseDBFolder() {
|
|
1395
|
-
return getDefaultRootPath();
|
|
1396
|
-
}
|
|
1397
|
-
|
|
1398
|
-
// src/ipc/client.ts
|
|
1399
|
-
import net from "net";
|
|
1400
|
-
|
|
1401
|
-
// src/ipc/socketPath.ts
|
|
1402
|
-
import os3 from "os";
|
|
1403
|
-
import path8 from "path";
|
|
1404
|
-
function getIPCSocketPath(rootPath) {
|
|
1405
|
-
if (os3.platform() === "win32") {
|
|
1406
|
-
return `\\\\.\\pipe\\liorandb_${rootPath.replace(/[:\\\/]/g, "_")}`;
|
|
1407
|
-
}
|
|
1408
|
-
return path8.join(rootPath, ".lioran.sock");
|
|
1409
|
-
}
|
|
1410
|
-
|
|
1411
|
-
// src/ipc/client.ts
|
|
1412
|
-
function delay(ms) {
|
|
1413
|
-
return new Promise((r) => setTimeout(r, ms));
|
|
1414
|
-
}
|
|
1415
|
-
async function connectWithRetry(path10) {
|
|
1416
|
-
let attempt = 0;
|
|
1417
|
-
while (true) {
|
|
1418
|
-
try {
|
|
1419
|
-
return await new Promise((resolve, reject) => {
|
|
1420
|
-
const socket = net.connect(path10, () => resolve(socket));
|
|
1421
|
-
socket.once("error", reject);
|
|
1422
|
-
});
|
|
1423
|
-
} catch (err) {
|
|
1424
|
-
if (err.code === "ENOENT" || err.code === "ECONNREFUSED") {
|
|
1425
|
-
if (attempt++ > 80) {
|
|
1426
|
-
throw new Error("IPC server not reachable");
|
|
1427
|
-
}
|
|
1428
|
-
await delay(50);
|
|
1429
|
-
continue;
|
|
1430
|
-
}
|
|
1431
|
-
throw err;
|
|
1432
|
-
}
|
|
1433
|
-
}
|
|
1434
|
-
}
|
|
1435
|
-
var IPCClient = class {
|
|
1436
|
-
socket;
|
|
1437
|
-
buffer = "";
|
|
1438
|
-
seq = 0;
|
|
1439
|
-
pending = /* @__PURE__ */ new Map();
|
|
1440
|
-
ready;
|
|
1441
|
-
constructor(rootPath) {
|
|
1442
|
-
const socketPath = getIPCSocketPath(rootPath);
|
|
1443
|
-
this.ready = this.init(socketPath);
|
|
1444
|
-
}
|
|
1445
|
-
async init(socketPath) {
|
|
1446
|
-
this.socket = await connectWithRetry(socketPath);
|
|
1447
|
-
this.socket.on("data", (data) => {
|
|
1448
|
-
this.buffer += data.toString();
|
|
1449
|
-
while (this.buffer.includes("\n")) {
|
|
1450
|
-
const idx = this.buffer.indexOf("\n");
|
|
1451
|
-
const raw = this.buffer.slice(0, idx);
|
|
1452
|
-
this.buffer = this.buffer.slice(idx + 1);
|
|
1453
|
-
const msg = JSON.parse(raw);
|
|
1454
|
-
const cb = this.pending.get(msg.id);
|
|
1455
|
-
if (cb) {
|
|
1456
|
-
this.pending.delete(msg.id);
|
|
1457
|
-
cb(msg);
|
|
1458
|
-
}
|
|
1459
|
-
}
|
|
1460
|
-
});
|
|
1461
|
-
this.socket.on("error", (err) => {
|
|
1462
|
-
console.error("IPC socket error:", err);
|
|
1463
|
-
});
|
|
1464
|
-
}
|
|
1465
|
-
async exec(action, args) {
|
|
1466
|
-
await this.ready;
|
|
1467
|
-
return new Promise((resolve, reject) => {
|
|
1468
|
-
const id = ++this.seq;
|
|
1469
|
-
this.pending.set(id, (msg) => {
|
|
1470
|
-
msg.ok ? resolve(msg.result) : reject(new Error(msg.error));
|
|
1471
|
-
});
|
|
1472
|
-
this.socket.write(JSON.stringify({ id, action, args }) + "\n");
|
|
1473
|
-
});
|
|
1474
|
-
}
|
|
1475
|
-
close() {
|
|
1476
|
-
try {
|
|
1477
|
-
this.socket.end();
|
|
1478
|
-
} catch {
|
|
1479
|
-
}
|
|
1480
|
-
}
|
|
1481
|
-
};
|
|
1482
|
-
|
|
1483
|
-
// src/ipc/queue.ts
|
|
1484
|
-
var DBQueue = class {
|
|
1485
|
-
client;
|
|
1486
|
-
constructor(rootPath = getDefaultRootPath()) {
|
|
1487
|
-
this.client = new IPCClient(rootPath);
|
|
1488
|
-
}
|
|
1489
|
-
exec(action, args) {
|
|
1490
|
-
return this.client.exec(action, args);
|
|
1491
|
-
}
|
|
1492
|
-
/* ----------------------------- COMPACTION API ----------------------------- */
|
|
1493
|
-
compactCollection(db, col) {
|
|
1494
|
-
return this.exec("compact:collection", { db, col });
|
|
1495
|
-
}
|
|
1496
|
-
compactDB(db) {
|
|
1497
|
-
return this.exec("compact:db", { db });
|
|
1498
|
-
}
|
|
1499
|
-
compactAll() {
|
|
1500
|
-
return this.exec("compact:all", {});
|
|
1501
|
-
}
|
|
1502
|
-
/* ----------------------------- SNAPSHOT API ----------------------------- */
|
|
1503
|
-
snapshot(path10) {
|
|
1504
|
-
return this.exec("snapshot", { path: path10 });
|
|
1505
|
-
}
|
|
1506
|
-
restore(path10) {
|
|
1507
|
-
return this.exec("restore", { path: path10 });
|
|
1508
|
-
}
|
|
1509
|
-
/* ------------------------------ SHUTDOWN ------------------------------ */
|
|
1510
|
-
async shutdown() {
|
|
1511
|
-
try {
|
|
1512
|
-
await this.exec("shutdown", {});
|
|
1513
|
-
} catch {
|
|
1514
|
-
}
|
|
1515
|
-
this.client.close();
|
|
1516
|
-
}
|
|
1517
|
-
};
|
|
1518
|
-
var dbQueue = new DBQueue();
|
|
1519
|
-
|
|
1520
|
-
// src/ipc/server.ts
|
|
1521
|
-
import net2 from "net";
|
|
1522
|
-
import fs8 from "fs";
|
|
1523
|
-
var IPCServer = class {
|
|
1524
|
-
server;
|
|
1525
|
-
manager;
|
|
1526
|
-
socketPath;
|
|
1527
|
-
constructor(manager, rootPath) {
|
|
1528
|
-
this.manager = manager;
|
|
1529
|
-
this.socketPath = getIPCSocketPath(rootPath);
|
|
1530
|
-
}
|
|
1531
|
-
start() {
|
|
1532
|
-
if (!this.socketPath.startsWith("\\\\.\\")) {
|
|
1533
|
-
if (fs8.existsSync(this.socketPath)) fs8.unlinkSync(this.socketPath);
|
|
1534
|
-
}
|
|
1535
|
-
this.server = net2.createServer((socket) => {
|
|
1536
|
-
let buffer = "";
|
|
1537
|
-
socket.on("data", async (data) => {
|
|
1538
|
-
buffer += data.toString();
|
|
1539
|
-
while (buffer.includes("\n")) {
|
|
1540
|
-
const idx = buffer.indexOf("\n");
|
|
1541
|
-
const raw = buffer.slice(0, idx);
|
|
1542
|
-
buffer = buffer.slice(idx + 1);
|
|
1543
|
-
try {
|
|
1544
|
-
const msg = JSON.parse(raw);
|
|
1545
|
-
await this.handleMessage(socket, msg);
|
|
1546
|
-
} catch {
|
|
1547
|
-
socket.write(JSON.stringify({
|
|
1548
|
-
id: null,
|
|
1549
|
-
ok: false,
|
|
1550
|
-
error: "Invalid JSON"
|
|
1551
|
-
}) + "\n");
|
|
1552
|
-
}
|
|
1553
|
-
}
|
|
1554
|
-
});
|
|
1555
|
-
});
|
|
1556
|
-
this.server.listen(this.socketPath, () => {
|
|
1557
|
-
console.log("[IPC] Server listening:", this.socketPath);
|
|
1558
|
-
});
|
|
1559
|
-
}
|
|
1560
|
-
async handleMessage(socket, msg) {
|
|
1561
|
-
const { id, action, args } = msg;
|
|
1562
|
-
try {
|
|
1563
|
-
let result;
|
|
1564
|
-
switch (action) {
|
|
1565
|
-
/* ---------------- DB ---------------- */
|
|
1566
|
-
case "db": {
|
|
1567
|
-
await this.manager.db(args.db);
|
|
1568
|
-
result = true;
|
|
1569
|
-
break;
|
|
1570
|
-
}
|
|
1571
|
-
/* ---------------- OPS ---------------- */
|
|
1572
|
-
case "op": {
|
|
1573
|
-
const { db, col, method, params } = args;
|
|
1574
|
-
const collection = (await this.manager.db(db)).collection(col);
|
|
1575
|
-
result = await collection[method](...params);
|
|
1576
|
-
break;
|
|
1577
|
-
}
|
|
1578
|
-
/* ---------------- COMPACTION ---------------- */
|
|
1579
|
-
case "compact:collection": {
|
|
1580
|
-
const { db, col } = args;
|
|
1581
|
-
const collection = (await this.manager.db(db)).collection(col);
|
|
1582
|
-
await collection.compact();
|
|
1583
|
-
result = true;
|
|
1584
|
-
break;
|
|
1585
|
-
}
|
|
1586
|
-
case "compact:db": {
|
|
1587
|
-
const { db } = args;
|
|
1588
|
-
const database = await this.manager.db(db);
|
|
1589
|
-
await database.compactAll();
|
|
1590
|
-
result = true;
|
|
1591
|
-
break;
|
|
1592
|
-
}
|
|
1593
|
-
case "compact:all": {
|
|
1594
|
-
for (const db of this.manager.openDBs.values()) {
|
|
1595
|
-
await db.compactAll();
|
|
1596
|
-
}
|
|
1597
|
-
result = true;
|
|
1598
|
-
break;
|
|
1599
|
-
}
|
|
1600
|
-
/* ---------------- SNAPSHOT ---------------- */
|
|
1601
|
-
case "snapshot": {
|
|
1602
|
-
const { path: snapshotPath } = args;
|
|
1603
|
-
await this.manager.snapshot(snapshotPath);
|
|
1604
|
-
result = true;
|
|
1605
|
-
break;
|
|
1606
|
-
}
|
|
1607
|
-
case "restore": {
|
|
1608
|
-
const { path: snapshotPath } = args;
|
|
1609
|
-
await this.manager.restore(snapshotPath);
|
|
1610
|
-
result = true;
|
|
1611
|
-
break;
|
|
1612
|
-
}
|
|
1613
|
-
/* ---------------- CONTROL ---------------- */
|
|
1614
|
-
case "shutdown": {
|
|
1615
|
-
await this.manager.closeAll();
|
|
1616
|
-
result = true;
|
|
1617
|
-
break;
|
|
1618
|
-
}
|
|
1619
|
-
default:
|
|
1620
|
-
throw new Error(`Unknown IPC action: ${action}`);
|
|
1621
|
-
}
|
|
1622
|
-
socket.write(JSON.stringify({ id, ok: true, result }) + "\n");
|
|
1623
|
-
} catch (err) {
|
|
1624
|
-
socket.write(JSON.stringify({
|
|
1625
|
-
id,
|
|
1626
|
-
ok: false,
|
|
1627
|
-
error: err?.message || "IPC error"
|
|
1628
|
-
}) + "\n");
|
|
1629
|
-
}
|
|
1630
|
-
}
|
|
1631
|
-
async close() {
|
|
1632
|
-
if (this.server) this.server.close();
|
|
1633
|
-
if (!this.socketPath.startsWith("\\\\.\\")) {
|
|
1634
|
-
try {
|
|
1635
|
-
fs8.unlinkSync(this.socketPath);
|
|
1636
|
-
} catch {
|
|
1637
|
-
}
|
|
1638
|
-
}
|
|
1639
|
-
}
|
|
1640
|
-
};
|
|
1641
|
-
|
|
1642
|
-
// src/LioranManager.ts
|
|
1643
|
-
var LioranManager = class {
|
|
1644
|
-
rootPath;
|
|
1645
|
-
openDBs;
|
|
1646
|
-
closed = false;
|
|
1647
|
-
mode;
|
|
1648
|
-
lockFd;
|
|
1649
|
-
ipcServer;
|
|
1650
|
-
constructor(options = {}) {
|
|
1651
|
-
const { rootPath, encryptionKey } = options;
|
|
1652
|
-
this.rootPath = rootPath || getDefaultRootPath();
|
|
1653
|
-
if (!fs9.existsSync(this.rootPath)) {
|
|
1654
|
-
fs9.mkdirSync(this.rootPath, { recursive: true });
|
|
1655
|
-
}
|
|
1656
|
-
if (encryptionKey) {
|
|
1657
|
-
setEncryptionKey(encryptionKey);
|
|
1658
|
-
}
|
|
1659
|
-
this.openDBs = /* @__PURE__ */ new Map();
|
|
1660
|
-
this.mode = this.tryAcquireLock() ? "primary" /* PRIMARY */ : "client" /* CLIENT */;
|
|
1661
|
-
if (this.mode === "primary" /* PRIMARY */) {
|
|
1662
|
-
this.ipcServer = new IPCServer(this, this.rootPath);
|
|
1663
|
-
this.ipcServer.start();
|
|
1664
|
-
this._registerShutdownHooks();
|
|
1665
|
-
}
|
|
1666
|
-
}
|
|
1667
|
-
/* ---------------- LOCK MANAGEMENT ---------------- */
|
|
1668
|
-
isProcessAlive(pid) {
|
|
1669
|
-
try {
|
|
1670
|
-
process2.kill(pid, 0);
|
|
1671
|
-
return true;
|
|
1672
|
-
} catch {
|
|
1673
|
-
return false;
|
|
1674
|
-
}
|
|
1675
|
-
}
|
|
1676
|
-
tryAcquireLock() {
|
|
1677
|
-
const lockPath = path9.join(this.rootPath, ".lioran.lock");
|
|
1678
|
-
try {
|
|
1679
|
-
this.lockFd = fs9.openSync(lockPath, "wx");
|
|
1680
|
-
fs9.writeSync(this.lockFd, String(process2.pid));
|
|
1681
|
-
return true;
|
|
1682
|
-
} catch {
|
|
1683
|
-
try {
|
|
1684
|
-
const pid = Number(fs9.readFileSync(lockPath, "utf8"));
|
|
1685
|
-
if (!this.isProcessAlive(pid)) {
|
|
1686
|
-
fs9.unlinkSync(lockPath);
|
|
1687
|
-
this.lockFd = fs9.openSync(lockPath, "wx");
|
|
1688
|
-
fs9.writeSync(this.lockFd, String(process2.pid));
|
|
1689
|
-
return true;
|
|
1690
|
-
}
|
|
1691
|
-
} catch {
|
|
1692
|
-
}
|
|
1693
|
-
return false;
|
|
1694
|
-
}
|
|
1695
|
-
}
|
|
1696
|
-
/* ---------------- DB OPEN ---------------- */
|
|
1697
|
-
async db(name) {
|
|
1698
|
-
if (this.mode === "client" /* CLIENT */) {
|
|
1699
|
-
await dbQueue.exec("db", { db: name });
|
|
1700
|
-
return new IPCDatabase(name);
|
|
1701
|
-
}
|
|
1702
|
-
return this.openDatabase(name);
|
|
1703
|
-
}
|
|
1704
|
-
async openDatabase(name) {
|
|
1705
|
-
this._assertOpen();
|
|
1706
|
-
if (this.openDBs.has(name)) {
|
|
1707
|
-
return this.openDBs.get(name);
|
|
1708
|
-
}
|
|
1709
|
-
const dbPath = path9.join(this.rootPath, name);
|
|
1710
|
-
await fs9.promises.mkdir(dbPath, { recursive: true });
|
|
1711
|
-
const db = new LioranDB(dbPath, name, this);
|
|
1712
|
-
this.openDBs.set(name, db);
|
|
1713
|
-
return db;
|
|
1714
|
-
}
|
|
1715
|
-
/* ---------------- SNAPSHOT ORCHESTRATION ---------------- */
|
|
1716
|
-
/**
|
|
1717
|
-
* Create TAR snapshot of full DB directory
|
|
1718
|
-
*/
|
|
1719
|
-
async snapshot(snapshotPath) {
|
|
1720
|
-
if (this.mode === "client" /* CLIENT */) {
|
|
1721
|
-
return dbQueue.exec("snapshot", { path: snapshotPath });
|
|
1722
|
-
}
|
|
1723
|
-
for (const db of this.openDBs.values()) {
|
|
1724
|
-
for (const col of db.collections.values()) {
|
|
1725
|
-
try {
|
|
1726
|
-
await col.db.close();
|
|
1727
|
-
} catch {
|
|
1728
|
-
}
|
|
1729
|
-
}
|
|
1730
|
-
}
|
|
1731
|
-
fs9.mkdirSync(path9.dirname(snapshotPath), { recursive: true });
|
|
1732
|
-
const tar = await import("tar");
|
|
1733
|
-
await tar.c({
|
|
1734
|
-
gzip: true,
|
|
1735
|
-
file: snapshotPath,
|
|
1736
|
-
cwd: this.rootPath,
|
|
1737
|
-
portable: true
|
|
1738
|
-
}, ["./"]);
|
|
1739
|
-
return true;
|
|
1740
|
-
}
|
|
1741
|
-
/**
|
|
1742
|
-
* Restore TAR snapshot safely
|
|
1743
|
-
*/
|
|
1744
|
-
async restore(snapshotPath) {
|
|
1745
|
-
if (this.mode === "client" /* CLIENT */) {
|
|
1746
|
-
return dbQueue.exec("restore", { path: snapshotPath });
|
|
1747
|
-
}
|
|
1748
|
-
await this.closeAll();
|
|
1749
|
-
fs9.rmSync(this.rootPath, { recursive: true, force: true });
|
|
1750
|
-
fs9.mkdirSync(this.rootPath, { recursive: true });
|
|
1751
|
-
const tar = await import("tar");
|
|
1752
|
-
await tar.x({
|
|
1753
|
-
file: snapshotPath,
|
|
1754
|
-
cwd: this.rootPath
|
|
1755
|
-
});
|
|
1756
|
-
console.log("Restore completed. Restart required.");
|
|
1757
|
-
process2.exit(0);
|
|
1758
|
-
}
|
|
1759
|
-
/* ---------------- SHUTDOWN ---------------- */
|
|
1760
|
-
async closeAll() {
|
|
1761
|
-
if (this.closed) return;
|
|
1762
|
-
this.closed = true;
|
|
1763
|
-
if (this.mode === "client" /* CLIENT */) {
|
|
1764
|
-
await dbQueue.shutdown();
|
|
1765
|
-
return;
|
|
1766
|
-
}
|
|
1767
|
-
for (const db of this.openDBs.values()) {
|
|
1768
|
-
try {
|
|
1769
|
-
await db.close();
|
|
1770
|
-
} catch {
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
this.openDBs.clear();
|
|
1774
|
-
try {
|
|
1775
|
-
if (this.lockFd) fs9.closeSync(this.lockFd);
|
|
1776
|
-
fs9.unlinkSync(path9.join(this.rootPath, ".lioran.lock"));
|
|
1777
|
-
} catch {
|
|
1778
|
-
}
|
|
1779
|
-
await this.ipcServer?.close();
|
|
1780
|
-
}
|
|
1781
|
-
async close() {
|
|
1782
|
-
return this.closeAll();
|
|
1783
|
-
}
|
|
1784
|
-
_registerShutdownHooks() {
|
|
1785
|
-
const shutdown = async () => {
|
|
1786
|
-
await this.closeAll();
|
|
1787
|
-
};
|
|
1788
|
-
process2.on("SIGINT", shutdown);
|
|
1789
|
-
process2.on("SIGTERM", shutdown);
|
|
1790
|
-
process2.on("exit", shutdown);
|
|
1791
|
-
}
|
|
1792
|
-
_assertOpen() {
|
|
1793
|
-
if (this.closed) {
|
|
1794
|
-
throw new Error("LioranManager is closed");
|
|
1795
|
-
}
|
|
1796
|
-
}
|
|
1797
|
-
};
|
|
1798
|
-
var IPCDatabase = class {
|
|
1799
|
-
constructor(name) {
|
|
1800
|
-
this.name = name;
|
|
1801
|
-
}
|
|
1802
|
-
collection(name) {
|
|
1803
|
-
return new IPCCollection(this.name, name);
|
|
1804
|
-
}
|
|
1805
|
-
};
|
|
1806
|
-
var IPCCollection = class {
|
|
1807
|
-
constructor(db, col) {
|
|
1808
|
-
this.db = db;
|
|
1809
|
-
this.col = col;
|
|
1810
|
-
}
|
|
1811
|
-
call(method, params) {
|
|
1812
|
-
return dbQueue.exec("op", {
|
|
1813
|
-
db: this.db,
|
|
1814
|
-
col: this.col,
|
|
1815
|
-
method,
|
|
1816
|
-
params
|
|
1817
|
-
});
|
|
1818
|
-
}
|
|
1819
|
-
insertOne = (doc) => this.call("insertOne", [doc]);
|
|
1820
|
-
insertMany = (docs) => this.call("insertMany", [docs]);
|
|
1821
|
-
find = (query) => this.call("find", [query]);
|
|
1822
|
-
findOne = (query) => this.call("findOne", [query]);
|
|
1823
|
-
updateOne = (filter, update, options) => this.call("updateOne", [filter, update, options]);
|
|
1824
|
-
updateMany = (filter, update) => this.call("updateMany", [filter, update]);
|
|
1825
|
-
deleteOne = (filter) => this.call("deleteOne", [filter]);
|
|
1826
|
-
deleteMany = (filter) => this.call("deleteMany", [filter]);
|
|
1827
|
-
countDocuments = (filter) => this.call("countDocuments", [filter]);
|
|
1828
|
-
};
|
|
1
|
+
import {
|
|
2
|
+
LioranDB,
|
|
3
|
+
LioranManager,
|
|
4
|
+
getBaseDBFolder
|
|
5
|
+
} from "./chunk-2FSI7HX7.js";
|
|
1829
6
|
export {
|
|
1830
7
|
LioranDB,
|
|
1831
8
|
LioranManager,
|