@onurege3467/zerohelper 10.3.0 → 11.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/README.md +9 -779
  2. package/dist/index.js +19 -44
  3. package/package.json +21 -70
  4. package/dist/bin/commands/cache.d.ts +0 -2
  5. package/dist/bin/commands/cache.js +0 -92
  6. package/dist/bin/commands/db-backup.d.ts +0 -3
  7. package/dist/bin/commands/db-backup.js +0 -118
  8. package/dist/bin/commands/db.d.ts +0 -2
  9. package/dist/bin/commands/db.js +0 -334
  10. package/dist/bin/commands/import-export.d.ts +0 -3
  11. package/dist/bin/commands/import-export.js +0 -123
  12. package/dist/bin/commands/init.d.ts +0 -2
  13. package/dist/bin/commands/init.js +0 -85
  14. package/dist/bin/commands/migrate.d.ts +0 -3
  15. package/dist/bin/commands/migrate.js +0 -167
  16. package/dist/bin/commands/repl.d.ts +0 -2
  17. package/dist/bin/commands/repl.js +0 -96
  18. package/dist/bin/commands/seed.d.ts +0 -2
  19. package/dist/bin/commands/seed.js +0 -76
  20. package/dist/bin/commands/zpack.d.ts +0 -2
  21. package/dist/bin/commands/zpack.js +0 -36
  22. package/dist/bin/index.d.ts +0 -2
  23. package/dist/bin/index.js +0 -28
  24. package/dist/bin/types.d.ts +0 -22
  25. package/dist/bin/types.js +0 -2
  26. package/dist/bin/utils/config.d.ts +0 -3
  27. package/dist/bin/utils/config.js +0 -78
  28. package/dist/bin/utils/prompts.d.ts +0 -3
  29. package/dist/bin/utils/prompts.js +0 -115
  30. package/dist/bin/zero.d.ts +0 -2
  31. package/dist/bin/zero.js +0 -849
  32. package/dist/database/IDatabase.d.ts +0 -71
  33. package/dist/database/IDatabase.js +0 -48
  34. package/dist/database/cacheWrapper.d.ts +0 -34
  35. package/dist/database/cacheWrapper.js +0 -214
  36. package/dist/database/index.d.ts +0 -12
  37. package/dist/database/index.js +0 -100
  38. package/dist/database/json.d.ts +0 -32
  39. package/dist/database/json.js +0 -208
  40. package/dist/database/migration.d.ts +0 -21
  41. package/dist/database/migration.js +0 -97
  42. package/dist/database/mongodb.d.ts +0 -26
  43. package/dist/database/mongodb.js +0 -145
  44. package/dist/database/mysql.d.ts +0 -29
  45. package/dist/database/mysql.js +0 -282
  46. package/dist/database/pg.d.ts +0 -28
  47. package/dist/database/pg.js +0 -200
  48. package/dist/database/redis.d.ts +0 -31
  49. package/dist/database/redis.js +0 -176
  50. package/dist/database/seeder.d.ts +0 -20
  51. package/dist/database/seeder.js +0 -37
  52. package/dist/database/sqlite.d.ts +0 -26
  53. package/dist/database/sqlite.js +0 -211
  54. package/dist/database/telemetry.d.ts +0 -35
  55. package/dist/database/telemetry.js +0 -41
  56. package/dist/database/toon.d.ts +0 -33
  57. package/dist/database/toon.js +0 -244
  58. package/dist/database/types.d.ts +0 -71
  59. package/dist/database/types.js +0 -2
  60. package/dist/database/zpack.d.ts +0 -75
  61. package/dist/database/zpack.js +0 -616
  62. package/dist/functions/ai.d.ts +0 -126
  63. package/dist/functions/ai.js +0 -387
  64. package/dist/functions/index.d.ts +0 -223
  65. package/dist/functions/index.js +0 -686
  66. package/dist/functions/security.d.ts +0 -15
  67. package/dist/functions/security.js +0 -46
  68. package/dist/functions/toon.d.ts +0 -10
  69. package/dist/functions/toon.js +0 -214
  70. package/dist/functions/worker.d.ts +0 -5
  71. package/dist/functions/worker.js +0 -35
  72. package/dist/index.d.ts +0 -8
  73. package/dist/migrations/1767521950635_test_migration.d.ts +0 -3
  74. package/dist/migrations/1767521950635_test_migration.js +0 -11
  75. package/dist/migrations/1767522158826_create_users_table.d.ts +0 -2
  76. package/dist/migrations/1767522158826_create_users_table.js +0 -11
  77. package/dist/package.json +0 -79
  78. package/dist/tests/test.d.ts +0 -1
  79. package/dist/tests/test.js +0 -26
  80. package/dist/zero.config.d.ts +0 -10
  81. package/dist/zero.config.js +0 -13
@@ -1,616 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.ZPackAdapter = exports.ZPackDatabase = void 0;
7
- const IDatabase_1 = require("./IDatabase");
8
- const fs_1 = __importDefault(require("fs"));
9
- const fsp = fs_1.default.promises;
10
- const zlib_1 = __importDefault(require("zlib"));
11
- /**
12
- * ZPackDatabase: Low-level Binary Storage
13
- */
14
- class ZPackDatabase {
15
- constructor(filePath, options = {}) {
16
- this.fd = null;
17
- this.fileSize = 0n;
18
- this.index = new Map();
19
- this.deleted = new Set();
20
- this.version = 1;
21
- this._nextId = 1;
22
- this._writeQueue = Promise.resolve();
23
- this._closed = false;
24
- this._contentEnd = 0n;
25
- if (!filePath || typeof filePath !== "string")
26
- throw new Error("ZPackDatabase: 'filePath' zorunludur.");
27
- this.filePath = filePath;
28
- this._autoFlush = options.autoFlush === true;
29
- this._compression = options.compression === true;
30
- }
31
- async open() {
32
- if (this.fd)
33
- return;
34
- try {
35
- this.fd = await fsp.open(this.filePath, fs_1.default.constants.O_RDWR);
36
- }
37
- catch (err) {
38
- if (err && err.code === "ENOENT") {
39
- this.fd = await fsp.open(this.filePath, fs_1.default.constants.O_RDWR | fs_1.default.constants.O_CREAT);
40
- }
41
- else
42
- throw err;
43
- }
44
- const stat = await this.fd.stat();
45
- this.fileSize = BigInt(stat.size);
46
- this._contentEnd = this.fileSize;
47
- if (!(await this._tryLoadIndexFromFooter()))
48
- await this._scanAndRebuildIndex();
49
- if (this.index.size > 0) {
50
- let maxId = 0;
51
- for (const docId of this.index.keys())
52
- if (docId > maxId)
53
- maxId = docId;
54
- this._nextId = maxId + 1;
55
- }
56
- }
57
- async close() {
58
- if (this._closed || !this.fd)
59
- return;
60
- await this._writeFooter();
61
- await this.fd.close();
62
- this.fd = null;
63
- this._closed = true;
64
- }
65
- async vacuum() {
66
- this._ensureOpen();
67
- return this._enqueue(async () => {
68
- const tempPath = this.filePath + ".tmp";
69
- const tempDb = new ZPackDatabase(tempPath, { autoFlush: false, compression: this._compression });
70
- await tempDb.open();
71
- for (const docId of this.index.keys()) {
72
- const doc = await this.get(docId);
73
- if (doc)
74
- await tempDb.insert(doc, docId);
75
- }
76
- await tempDb.close();
77
- await this.fd.close();
78
- await fsp.rename(tempPath, this.filePath);
79
- this.fd = null;
80
- this.index.clear();
81
- this.deleted.clear();
82
- await this.open();
83
- });
84
- }
85
- async insert(document, docId) {
86
- this._ensureOpen();
87
- const payload = this._encodeDocument(document, docId);
88
- return this._enqueue(async () => {
89
- const writeOffset = this.fileSize;
90
- await this.fd.write(payload, 0, payload.length, Number(writeOffset));
91
- this.fileSize = writeOffset + BigInt(payload.length);
92
- this._contentEnd = this.fileSize;
93
- const parsed = this._peekDocMeta(payload);
94
- if (parsed.fieldCount === 0) {
95
- this.index.delete(parsed.docId);
96
- this.deleted.add(parsed.docId);
97
- }
98
- else {
99
- this.index.set(parsed.docId, writeOffset);
100
- this.deleted.delete(parsed.docId);
101
- if (parsed.docId >= this._nextId)
102
- this._nextId = parsed.docId + 1;
103
- }
104
- if (this._autoFlush)
105
- await this._internalWriteFooter();
106
- return parsed.docId;
107
- });
108
- }
109
- async insertBatch(documents) {
110
- this._ensureOpen();
111
- if (!Array.isArray(documents) || documents.length === 0)
112
- return [];
113
- const payloads = [];
114
- const metas = [];
115
- for (const doc of documents) {
116
- const buf = this._encodeDocument(doc);
117
- payloads.push(buf);
118
- metas.push(this._peekDocMeta(buf));
119
- }
120
- const totalLen = payloads.reduce((s, b) => s + b.length, 0);
121
- const buffer = Buffer.alloc(totalLen);
122
- let pos = 0;
123
- for (const b of payloads) {
124
- b.copy(buffer, pos);
125
- pos += b.length;
126
- }
127
- return this._enqueue(async () => {
128
- const writeOffset = this.fileSize;
129
- await this.fd.write(buffer, 0, buffer.length, Number(writeOffset));
130
- let cur = writeOffset;
131
- const ids = [];
132
- for (let i = 0; i < payloads.length; i++) {
133
- const meta = metas[i];
134
- if (meta.fieldCount === 0) {
135
- this.index.delete(meta.docId);
136
- this.deleted.add(meta.docId);
137
- }
138
- else {
139
- this.index.set(meta.docId, cur);
140
- this.deleted.delete(meta.docId);
141
- if (meta.docId >= this._nextId)
142
- this._nextId = meta.docId + 1;
143
- }
144
- ids.push(meta.docId);
145
- cur += BigInt(payloads[i].length);
146
- }
147
- this.fileSize = writeOffset + BigInt(buffer.length);
148
- this._contentEnd = this.fileSize;
149
- if (this._autoFlush)
150
- await this._internalWriteFooter();
151
- return ids;
152
- });
153
- }
154
- async delete(docId) {
155
- const tomb = this._encodeTombstone(docId);
156
- await this._enqueue(async () => {
157
- const writeOffset = this.fileSize;
158
- await this.fd.write(tomb, 0, tomb.length, Number(writeOffset));
159
- this.fileSize = writeOffset + BigInt(tomb.length);
160
- this._contentEnd = this.fileSize;
161
- this.index.delete(docId);
162
- this.deleted.add(docId);
163
- if (this._autoFlush)
164
- await this._internalWriteFooter();
165
- });
166
- }
167
- async get(docId) {
168
- this._ensureOpen();
169
- const offset = this.index.get(docId);
170
- if (offset === undefined)
171
- return null;
172
- const header = Buffer.alloc(6);
173
- const { bytesRead: hread } = await this.fd.read(header, 0, header.length, Number(offset));
174
- if (hread !== header.length)
175
- return null;
176
- const docLength = header.readUInt16LE(0);
177
- const totalSize = 2 + docLength;
178
- const buf = Buffer.alloc(totalSize);
179
- const { bytesRead } = await this.fd.read(buf, 0, totalSize, Number(offset));
180
- if (bytesRead !== totalSize)
181
- return null;
182
- return this._decodeDocument(buf).document;
183
- }
184
- keys() { return Array.from(this.index.keys()); }
185
- _ensureOpen() {
186
- if (!this.fd)
187
- throw new Error("ZPackDatabase: önce 'open()' çağrılmalı.");
188
- if (this._closed)
189
- throw new Error("ZPackDatabase: dosya kapalı.");
190
- }
191
- _enqueue(taskFn) {
192
- this._writeQueue = this._writeQueue.then(taskFn, taskFn);
193
- return this._writeQueue;
194
- }
195
- async _internalWriteFooter() {
196
- const entries = Array.from(this.index.entries());
197
- const footerSize = 9 + entries.length * 12;
198
- const footer = Buffer.alloc(footerSize + 4);
199
- footer.write("ZPCK", 0, "utf8");
200
- footer.writeUInt8(this.version, 4);
201
- footer.writeUInt32LE(entries.length, 5);
202
- let p = 9;
203
- for (const [id, off] of entries) {
204
- footer.writeUInt32LE(id, p);
205
- p += 4;
206
- footer.writeUInt32LE(Number(off & 0xffffffffn), p);
207
- p += 4;
208
- footer.writeUInt32LE(Number((off >> 32n) & 0xffffffffn), p);
209
- p += 4;
210
- }
211
- footer.writeUInt32LE(footerSize, p);
212
- const writeOffset = this.fileSize;
213
- await this.fd.write(footer, 0, footer.length, Number(writeOffset));
214
- this._contentEnd = writeOffset;
215
- this.fileSize = writeOffset + BigInt(footer.length);
216
- }
217
- async _writeFooter() {
218
- await this._enqueue(() => this._internalWriteFooter());
219
- }
220
- _encodeDocument(document, docId) {
221
- let id = docId ?? this._nextId++;
222
- if (this._compression) {
223
- const dataStr = JSON.stringify(document);
224
- const compressed = zlib_1.default.deflateSync(dataStr);
225
- const buf = Buffer.alloc(6 + compressed.length);
226
- buf.writeUInt16LE(4 + compressed.length, 0);
227
- buf.writeUInt32LE(id, 2);
228
- compressed.copy(buf, 6);
229
- return buf;
230
- }
231
- const fieldBuffers = [];
232
- for (const [key, value] of Object.entries(document)) {
233
- const keyBuf = Buffer.from(String(key), "utf8");
234
- const valBuf = Buffer.from(String(value), "utf8");
235
- const fb = Buffer.alloc(2 + keyBuf.length + valBuf.length);
236
- fb.writeUInt8(keyBuf.length, 0);
237
- keyBuf.copy(fb, 1);
238
- fb.writeUInt8(valBuf.length, 1 + keyBuf.length);
239
- valBuf.copy(fb, 2 + keyBuf.length);
240
- fieldBuffers.push(fb);
241
- }
242
- const payloadSize = 4 + fieldBuffers.reduce((s, b) => s + b.length, 0);
243
- const buf = Buffer.alloc(2 + payloadSize);
244
- buf.writeUInt16LE(payloadSize, 0);
245
- buf.writeUInt32LE(id, 2);
246
- let offset = 6;
247
- for (const b of fieldBuffers) {
248
- b.copy(buf, offset);
249
- offset += b.length;
250
- }
251
- return buf;
252
- }
253
- _decodeDocument(buf) {
254
- const payloadSize = buf.readUInt16LE(0);
255
- const docId = buf.readUInt32LE(2);
256
- if (this._compression && payloadSize > 4) {
257
- try {
258
- const decompressed = zlib_1.default.inflateSync(buf.subarray(6));
259
- return { docId, fieldCount: 1, document: JSON.parse(decompressed.toString()) };
260
- }
261
- catch (e) { }
262
- }
263
- let p = 6;
264
- const end = 2 + payloadSize;
265
- const obj = {};
266
- let fields = 0;
267
- while (p < end) {
268
- if (p + 1 > end)
269
- break;
270
- const klen = buf.readUInt8(p);
271
- p += 1;
272
- if (p + klen > end)
273
- break;
274
- const key = buf.toString("utf8", p, p + klen);
275
- p += klen;
276
- if (p + 1 > end)
277
- break;
278
- const vlen = buf.readUInt8(p);
279
- p += 1;
280
- if (p + vlen > end)
281
- break;
282
- const val = buf.toString("utf8", p, p + vlen);
283
- p += vlen;
284
- obj[key] = val;
285
- fields += 1;
286
- }
287
- return { docId, fieldCount: fields, document: obj };
288
- }
289
- _encodeTombstone(docId) {
290
- const buf = Buffer.alloc(6);
291
- buf.writeUInt16LE(4, 0);
292
- buf.writeUInt32LE(docId, 2);
293
- return buf;
294
- }
295
- _peekDocMeta(encodedBuf) {
296
- const payloadSize = encodedBuf.readUInt16LE(0);
297
- const docId = encodedBuf.readUInt32LE(2);
298
- return { docId, fieldCount: payloadSize > 4 ? 1 : 0 };
299
- }
300
- async _tryLoadIndexFromFooter() {
301
- if (this.fileSize < 13n)
302
- return false;
303
- const sizeBuf = Buffer.alloc(4);
304
- await this.fd.read(sizeBuf, 0, 4, Number(this.fileSize - 4n));
305
- const footerSize = sizeBuf.readUInt32LE(0);
306
- if (footerSize < 9 || BigInt(footerSize) + 4n > this.fileSize)
307
- return false;
308
- const footerStart = this.fileSize - 4n - BigInt(footerSize);
309
- const footer = Buffer.alloc(footerSize);
310
- await this.fd.read(footer, 0, footerSize, Number(footerStart));
311
- if (footer.toString("utf8", 0, 4) !== "ZPCK" || footer.readUInt8(4) !== this.version)
312
- return false;
313
- const count = footer.readUInt32LE(5);
314
- let p = 9;
315
- this.index.clear();
316
- for (let i = 0; i < count; i++) {
317
- const id = footer.readUInt32LE(p);
318
- p += 4;
319
- const lo = footer.readUInt32LE(p);
320
- p += 4;
321
- const hi = footer.readUInt32LE(p);
322
- p += 4;
323
- this.index.set(id, (BigInt(hi) << 32n) + BigInt(lo));
324
- }
325
- this._contentEnd = footerStart;
326
- return true;
327
- }
328
- async _scanAndRebuildIndex() {
329
- this.index.clear();
330
- this.deleted.clear();
331
- let offset = 0n;
332
- const headerBuf = Buffer.alloc(2);
333
- while (offset + 2n <= this.fileSize) {
334
- const { bytesRead } = await this.fd.read(headerBuf, 0, 2, Number(offset));
335
- if (bytesRead < 2)
336
- break;
337
- const payloadSize = headerBuf.readUInt16LE(0);
338
- const idBuf = Buffer.alloc(4);
339
- await this.fd.read(idBuf, 0, 4, Number(offset + 2n));
340
- const docId = idBuf.readUInt32LE(0);
341
- if (payloadSize === 4)
342
- this.index.delete(docId);
343
- else
344
- this.index.set(docId, offset);
345
- offset += BigInt(2 + payloadSize);
346
- }
347
- }
348
- }
349
- exports.ZPackDatabase = ZPackDatabase;
350
- /**
351
- * ZPackAdapter: IDatabase Implementation
352
- */
353
- class ZPackAdapter extends IDatabase_1.IDatabase {
354
- constructor(config) {
355
- super();
356
- this.tableMaxId = new Map();
357
- this.keyIndex = new Map();
358
- this.rowCache = new Map();
359
- this.secondary = new Map();
360
- this.indexedFields = new Map();
361
- this._isClosing = false;
362
- this._executing = Promise.resolve();
363
- this.db = new ZPackDatabase(config.path, { autoFlush: !!config.autoFlush, compression: !!config.cache });
364
- if (config.indexFields) {
365
- for (const [table, fields] of Object.entries(config.indexFields)) {
366
- this.indexedFields.set(table, new Set(fields));
367
- }
368
- }
369
- this.initPromise = this._init();
370
- }
371
- async _init() {
372
- await this.db.open();
373
- for (const physicalDocId of this.db.keys()) {
374
- const doc = await this.db.get(physicalDocId);
375
- if (!doc || !doc.t || isNaN(Number(doc._id)))
376
- continue;
377
- const table = String(doc.t), idNum = Number(doc._id);
378
- await this.ensureTable(table);
379
- this.keyIndex.get(table).set(idNum, BigInt(physicalDocId));
380
- if (idNum > (this.tableMaxId.get(table) || 0))
381
- this.tableMaxId.set(table, idNum);
382
- this._updateSecondaryIndex(table, idNum, doc);
383
- }
384
- }
385
- async _execute(fn) {
386
- if (this._isClosing)
387
- throw new Error("ZPack: Adaptör kapanıyor.");
388
- const next = this._executing.then(async () => {
389
- if (this._isClosing)
390
- return;
391
- await this.initPromise;
392
- return fn();
393
- });
394
- this._executing = next.catch(() => { });
395
- return next;
396
- }
397
- // --- INTERNAL RAW METHODS (No Queue) to prevent deadlocks ---
398
- async _rawSelect(table, where = null) {
399
- await this.ensureTable(table);
400
- if (where && Object.keys(where).length === 1) {
401
- const [field, value] = Object.entries(where)[0];
402
- const index = this.secondary.get(table)?.get(field);
403
- if (index) {
404
- const matches = index.get(String(value));
405
- if (matches) {
406
- const results = [];
407
- for (const logicalId of matches) {
408
- const physicalId = this.keyIndex.get(table).get(logicalId);
409
- if (physicalId !== undefined) {
410
- const doc = await this.db.get(Number(physicalId));
411
- if (doc)
412
- results.push(doc);
413
- }
414
- }
415
- return results;
416
- }
417
- return [];
418
- }
419
- }
420
- const results = [];
421
- for (const [logicalId, physicalId] of this.keyIndex.get(table).entries()) {
422
- let row = this.rowCache.get(table).get(logicalId);
423
- if (!row) {
424
- const doc = await this.db.get(Number(physicalId));
425
- if (!doc)
426
- continue;
427
- row = doc;
428
- this.rowCache.get(table).set(logicalId, row);
429
- }
430
- if (this._matches(row, where))
431
- results.push({ ...row });
432
- }
433
- return results;
434
- }
435
- async ensureTable(table) {
436
- if (!this.tableMaxId.has(table)) {
437
- this.tableMaxId.set(table, 0);
438
- this.keyIndex.set(table, new Map());
439
- this.rowCache.set(table, new Map());
440
- this.secondary.set(table, new Map());
441
- }
442
- }
443
- _updateSecondaryIndex(table, logicalId, data, oldData = null) {
444
- const fields = this.indexedFields.get(table);
445
- if (!fields)
446
- return;
447
- const tableIndex = this.secondary.get(table);
448
- for (const field of fields) {
449
- if (!tableIndex.has(field))
450
- tableIndex.set(field, new Map());
451
- const fieldMap = tableIndex.get(field);
452
- if (oldData && oldData[field] !== undefined)
453
- fieldMap.get(String(oldData[field]))?.delete(logicalId);
454
- if (data[field] !== undefined) {
455
- const newVal = String(data[field]);
456
- if (!fieldMap.has(newVal))
457
- fieldMap.set(newVal, new Set());
458
- fieldMap.get(newVal).add(logicalId);
459
- }
460
- }
461
- }
462
- _coerce(table, data, id) {
463
- const out = { t: table, _id: String(id) };
464
- for (const [k, v] of Object.entries(data || {})) {
465
- if (k !== 't' && k !== '_id')
466
- out[k] = typeof v === 'string' ? v : JSON.stringify(v);
467
- }
468
- return out;
469
- }
470
- _matches(row, where) {
471
- if (!where || Object.keys(where).length === 0)
472
- return true;
473
- return Object.entries(where).every(([k, v]) => String(row[k]) === String(v));
474
- }
475
- // --- PUBLIC METHODS (With Queue) ---
476
- async select(table, where = null) {
477
- return this._execute(() => this._rawSelect(table, where));
478
- }
479
- async selectOne(table, where = null) {
480
- const res = await this.select(table, where);
481
- return res[0] || null;
482
- }
483
- async insert(table, data) {
484
- return this._execute(async () => {
485
- await this.ensureTable(table);
486
- await this.runHooks('beforeInsert', table, data);
487
- const nextId = (this.tableMaxId.get(table) || 0) + 1;
488
- const record = this._coerce(table, data, nextId);
489
- const physicalId = await this.db.insert(record);
490
- this.tableMaxId.set(table, nextId);
491
- this.keyIndex.get(table).set(nextId, BigInt(physicalId));
492
- const fullRow = { _id: nextId, ...data };
493
- this.rowCache.get(table).set(nextId, fullRow);
494
- this._updateSecondaryIndex(table, nextId, fullRow);
495
- await this.runHooks('afterInsert', table, fullRow);
496
- return nextId;
497
- });
498
- }
499
- async update(table, data, where) {
500
- return this._execute(async () => {
501
- const rows = await this._rawSelect(table, where);
502
- for (const row of rows) {
503
- const logicalId = Number(row._id);
504
- await this.runHooks('beforeUpdate', table, { old: row, new: data });
505
- const merged = { ...row, ...data };
506
- const record = this._coerce(table, merged, logicalId);
507
- const physicalId = await this.db.insert(record);
508
- this.keyIndex.get(table).set(logicalId, BigInt(physicalId));
509
- this.rowCache.get(table).set(logicalId, merged);
510
- this._updateSecondaryIndex(table, logicalId, merged, row);
511
- await this.runHooks('afterUpdate', table, merged);
512
- }
513
- return rows.length;
514
- });
515
- }
516
- async delete(table, where) {
517
- return this._execute(async () => {
518
- const rows = await this._rawSelect(table, where);
519
- for (const row of rows) {
520
- const logicalId = Number(row._id);
521
- await this.runHooks('beforeDelete', table, row);
522
- const physicalId = this.keyIndex.get(table).get(logicalId);
523
- if (physicalId !== undefined) {
524
- await this.db.delete(Number(physicalId));
525
- this.keyIndex.get(table).delete(logicalId);
526
- this.rowCache.get(table).delete(logicalId);
527
- this._updateSecondaryIndex(table, logicalId, {}, row);
528
- }
529
- await this.runHooks('afterDelete', table, row);
530
- }
531
- return rows.length;
532
- });
533
- }
534
- async set(table, data, where) {
535
- return this._execute(async () => {
536
- const existing = await this._rawSelect(table, where);
537
- if (existing.length > 0) {
538
- // Update logic here directly using _raw logic
539
- const row = existing[0];
540
- const logicalId = Number(row._id);
541
- const merged = { ...row, ...data };
542
- const record = this._coerce(table, merged, logicalId);
543
- const physicalId = await this.db.insert(record);
544
- this.keyIndex.get(table).set(logicalId, BigInt(physicalId));
545
- this.rowCache.get(table).set(logicalId, merged);
546
- this._updateSecondaryIndex(table, logicalId, merged, row);
547
- return logicalId;
548
- }
549
- else {
550
- // Insert logic here directly
551
- const nextId = (this.tableMaxId.get(table) || 0) + 1;
552
- const record = this._coerce(table, { ...where, ...data }, nextId);
553
- const physicalId = await this.db.insert(record);
554
- this.tableMaxId.set(table, nextId);
555
- this.keyIndex.get(table).set(nextId, BigInt(physicalId));
556
- const fullRow = { _id: nextId, ...where, ...data };
557
- this.rowCache.get(table).set(nextId, fullRow);
558
- this._updateSecondaryIndex(table, nextId, fullRow);
559
- return nextId;
560
- }
561
- });
562
- }
563
- async bulkInsert(table, dataArray) {
564
- return this._execute(async () => {
565
- for (const d of dataArray) {
566
- const nextId = (this.tableMaxId.get(table) || 0) + 1;
567
- const record = this._coerce(table, d, nextId);
568
- const physicalId = await this.db.insert(record);
569
- this.tableMaxId.set(table, nextId);
570
- this.keyIndex.get(table).set(nextId, BigInt(physicalId));
571
- const fullRow = { _id: nextId, ...d };
572
- this.rowCache.get(table).set(nextId, fullRow);
573
- this._updateSecondaryIndex(table, nextId, fullRow);
574
- }
575
- return dataArray.length;
576
- });
577
- }
578
- async increment(table, incs, where = {}) {
579
- return this._execute(async () => {
580
- const rows = await this._rawSelect(table, where);
581
- for (const row of rows) {
582
- const logicalId = Number(row._id);
583
- const merged = { ...row };
584
- for (const [f, v] of Object.entries(incs))
585
- merged[f] = (Number(merged[f]) || 0) + v;
586
- const record = this._coerce(table, merged, logicalId);
587
- const physicalId = await this.db.insert(record);
588
- this.keyIndex.get(table).set(logicalId, BigInt(physicalId));
589
- this.rowCache.get(table).set(logicalId, merged);
590
- this._updateSecondaryIndex(table, logicalId, merged, row);
591
- }
592
- return rows.length;
593
- });
594
- }
595
- async decrement(table, decs, where = {}) {
596
- const incs = {};
597
- for (const k in decs)
598
- incs[k] = -decs[k];
599
- return this.increment(table, incs, where);
600
- }
601
- async vacuum() {
602
- return this._execute(async () => {
603
- await this.db.vacuum();
604
- });
605
- }
606
- async close() {
607
- this._isClosing = true;
608
- try {
609
- await this._executing;
610
- await this.db.close();
611
- }
612
- catch (e) { }
613
- }
614
- }
615
- exports.ZPackAdapter = ZPackAdapter;
616
- exports.default = ZPackAdapter;