json-database-st 1.0.7 → 1.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/JSONDatabase.js +259 -160
  2. package/package.json +1 -1
package/JSONDatabase.js CHANGED
@@ -1,11 +1,13 @@
1
1
  // File: JSONDatabase.js
2
2
  // Final, Complete, and Secure Version (Patched)
3
3
 
4
- const fs = require('fs').promises;
5
- const path = require('path');
6
- const crypto = require('crypto');
7
- const _ = require('lodash');
8
- const EventEmitter = require('events');
4
+ const fs = require("fs").promises;
5
+ const path = require("path");
6
+ const crypto = require("crypto");
7
+ const _ = require("lodash");
8
+ const EventEmitter = require("events");
9
+ // --- FIX: Add dependency for cross-process file locking. Run `npm install proper-lockfile`.
10
+ const lockfile = require("proper-lockfile");
9
11
 
10
12
  // --- Custom Error Classes for Better Error Handling ---
11
13
 
@@ -32,7 +34,6 @@ class IndexViolationError extends DBError {}
32
34
  /** Error for security-related issues like path traversal or bad keys. */
33
35
  class SecurityError extends DBError {}
34
36
 
35
-
36
37
  // --- Type Definitions for Clarity ---
37
38
 
38
39
  /**
@@ -74,13 +75,11 @@ class SecurityError extends DBError {}
74
75
  * @property {boolean} [unique=false] - If true, enforces that the indexed field must be unique across the collection.
75
76
  */
76
77
 
77
-
78
78
  // --- Cryptography Constants ---
79
- const ALGORITHM = 'aes-256-gcm';
79
+ const ALGORITHM = "aes-256-gcm";
80
80
  const IV_LENGTH = 16;
81
81
  const AUTH_TAG_LENGTH = 16;
82
82
 
83
-
84
83
  /**
85
84
  * A robust, secure, promise-based JSON file database with atomic operations, indexing, schema validation, and events.
86
85
  * Includes encryption-at-rest and path traversal protection.
@@ -109,13 +108,23 @@ class JSONDatabase extends EventEmitter {
109
108
  const resolvedPath = path.resolve(filename);
110
109
  const workingDir = process.cwd();
111
110
  if (!resolvedPath.startsWith(workingDir)) {
112
- throw new SecurityError(`Path traversal detected. Database path must be within the project directory: ${workingDir}`);
111
+ throw new SecurityError(
112
+ `Path traversal detected. Database path must be within the project directory: ${workingDir}`
113
+ );
113
114
  }
114
- this.filename = /\.json$/.test(resolvedPath) ? resolvedPath : `${resolvedPath}.json`;
115
+ this.filename = /\.json$/.test(resolvedPath)
116
+ ? resolvedPath
117
+ : `${resolvedPath}.json`;
115
118
 
116
119
  // --- Security Check: Encryption Key ---
117
- if (options.encryptionKey && (!options.encryptionKey || Buffer.from(options.encryptionKey, 'hex').length !== 32)) {
118
- throw new SecurityError('Encryption key must be a 32-byte (64-character hex) string.');
120
+ if (
121
+ options.encryptionKey &&
122
+ (!options.encryptionKey ||
123
+ Buffer.from(options.encryptionKey, "hex").length !== 32)
124
+ ) {
125
+ throw new SecurityError(
126
+ "Encryption key must be a 32-byte (64-character hex) string."
127
+ );
119
128
  }
120
129
 
121
130
  this.config = {
@@ -123,7 +132,9 @@ class JSONDatabase extends EventEmitter {
123
132
  writeOnChange: options.writeOnChange !== false,
124
133
  schema: options.schema || null,
125
134
  indices: options.indices || [],
126
- encryptionKey: options.encryptionKey ? Buffer.from(options.encryptionKey, 'hex') : null,
135
+ encryptionKey: options.encryptionKey
136
+ ? Buffer.from(options.encryptionKey, "hex")
137
+ : null,
127
138
  };
128
139
 
129
140
  this.cache = null;
@@ -138,29 +149,44 @@ class JSONDatabase extends EventEmitter {
138
149
  // --- Encryption & Decryption ---
139
150
  _encrypt(data) {
140
151
  const iv = crypto.randomBytes(IV_LENGTH);
141
- const cipher = crypto.createCipheriv(ALGORITHM, this.config.encryptionKey, iv);
152
+ const cipher = crypto.createCipheriv(
153
+ ALGORITHM,
154
+ this.config.encryptionKey,
155
+ iv
156
+ );
142
157
  const jsonString = JSON.stringify(data);
143
- const encrypted = Buffer.concat([cipher.update(jsonString, 'utf8'), cipher.final()]);
158
+ const encrypted = Buffer.concat([
159
+ cipher.update(jsonString, "utf8"),
160
+ cipher.final(),
161
+ ]);
144
162
  const authTag = cipher.getAuthTag();
145
163
  return JSON.stringify({
146
- iv: iv.toString('hex'),
147
- tag: authTag.toString('hex'),
148
- content: encrypted.toString('hex'),
164
+ iv: iv.toString("hex"),
165
+ tag: authTag.toString("hex"),
166
+ content: encrypted.toString("hex"),
149
167
  });
150
168
  }
151
169
 
152
170
  _decrypt(encryptedPayload) {
153
171
  try {
154
172
  const payload = JSON.parse(encryptedPayload);
155
- const iv = Buffer.from(payload.iv, 'hex');
156
- const authTag = Buffer.from(payload.tag, 'hex');
157
- const encryptedContent = Buffer.from(payload.content, 'hex');
158
- const decipher = crypto.createDecipheriv(ALGORITHM, this.config.encryptionKey, iv);
173
+ const iv = Buffer.from(payload.iv, "hex");
174
+ const authTag = Buffer.from(payload.tag, "hex");
175
+ const encryptedContent = Buffer.from(payload.content, "hex");
176
+ const decipher = crypto.createDecipheriv(
177
+ ALGORITHM,
178
+ this.config.encryptionKey,
179
+ iv
180
+ );
159
181
  decipher.setAuthTag(authTag);
160
- const decrypted = decipher.update(encryptedContent, 'hex', 'utf8') + decipher.final('utf8');
182
+ const decrypted =
183
+ decipher.update(encryptedContent, "hex", "utf8") +
184
+ decipher.final("utf8");
161
185
  return JSON.parse(decrypted);
162
186
  } catch (e) {
163
- throw new SecurityError('Decryption failed. The file may be corrupted, tampered with, or the encryption key is incorrect.');
187
+ throw new SecurityError(
188
+ "Decryption failed. The file may be corrupted, tampered with, or the encryption key is incorrect."
189
+ );
164
190
  }
165
191
  }
166
192
 
@@ -168,13 +194,38 @@ class JSONDatabase extends EventEmitter {
168
194
 
169
195
  /** @private Kicks off the initialization process. */
170
196
  async _initialize() {
197
+ // --- FIX: Crash Recovery for Durable Writes ---
198
+ // Check if a temporary file exists from a previously failed write.
199
+ // If so, it represents the most recent state. We recover by renaming it.
200
+ const tempFile = this.filename + ".tmp";
201
+ try {
202
+ await fs.access(tempFile);
203
+ console.warn(
204
+ `[JSONDatabase] Found temporary file ${tempFile}. Recovering from a previous failed write.`
205
+ );
206
+ await fs.rename(tempFile, this.filename);
207
+ console.log(
208
+ `[JSONDatabase] Recovery successful. ${this.filename} has been restored.`
209
+ );
210
+ } catch (e) {
211
+ // This is the normal case where no temp file exists. Do nothing.
212
+ }
213
+
171
214
  try {
172
215
  await this._refreshCache();
173
216
  this._rebuildAllIndices();
174
217
  } catch (err) {
175
- const initError = new DBInitializationError(`Failed to initialize database: ${err.message}`);
176
- this.emit('error', initError);
177
- console.error(`[JSONDatabase] FATAL: Initialization failed for ${this.filename}. The database is in an unusable state.`, err);
218
+ const initError = new DBInitializationError(
219
+ `Failed to initialize database: ${err.message}`
220
+ );
221
+ this.emit("error", initError);
222
+ console.error(
223
+ `[JSONDatabase] FATAL: Initialization failed for ${this.filename}. The database is in an unusable state.`,
224
+ err
225
+ );
226
+ // --- ENHANCEMENT: Make the instance unusable if init fails ---
227
+ // By re-throwing here, the _initPromise will be rejected, and all subsequent
228
+ // operations waiting on _ensureInitialized() will fail immediately.
178
229
  throw initError;
179
230
  }
180
231
  }
@@ -182,22 +233,25 @@ class JSONDatabase extends EventEmitter {
182
233
  /** @private Reads file, decrypts if necessary, and populates cache. */
183
234
  async _refreshCache() {
184
235
  try {
185
- const fileContent = await fs.readFile(this.filename, 'utf8');
236
+ const fileContent = await fs.readFile(this.filename, "utf8");
186
237
  if (this.config.encryptionKey) {
187
- this.cache = fileContent.trim() === '' ? {} : this._decrypt(fileContent);
238
+ this.cache =
239
+ fileContent.trim() === "" ? {} : this._decrypt(fileContent);
188
240
  } else {
189
- this.cache = fileContent.trim() === '' ? {} : JSON.parse(fileContent);
241
+ this.cache = fileContent.trim() === "" ? {} : JSON.parse(fileContent);
190
242
  }
191
243
  this.stats.reads++;
192
244
  } catch (err) {
193
- if (err.code === 'ENOENT') {
194
- console.warn(`[JSONDatabase] File ${this.filename} not found. Creating.`);
245
+ if (err.code === "ENOENT") {
246
+ console.warn(
247
+ `[JSONDatabase] File ${this.filename} not found. Creating.`
248
+ );
195
249
  this.cache = {};
196
- const initialContent = this.config.encryptionKey ? this._encrypt({}) : '{}';
197
- await fs.writeFile(this.filename, initialContent, 'utf8');
198
- this.stats.writes++;
250
+ // Do not write file here; _atomicWrite will create it safely.
199
251
  } else if (err instanceof SyntaxError && !this.config.encryptionKey) {
200
- throw new DBInitializationError(`Failed to parse JSON from ${this.filename}. File is corrupted.`);
252
+ throw new DBInitializationError(
253
+ `Failed to parse JSON from ${this.filename}. File is corrupted.`
254
+ );
201
255
  } else {
202
256
  throw err; // Re-throw security, crypto, and other errors
203
257
  }
@@ -206,137 +260,171 @@ class JSONDatabase extends EventEmitter {
206
260
 
207
261
  /** @private Ensures all operations wait for initialization to complete. */
208
262
  async _ensureInitialized() {
209
- return this._initPromise;
263
+ // This promise will be rejected if _initialize() fails, stopping all operations.
264
+ return this._initPromise;
210
265
  }
211
266
 
212
267
  /** @private Performs an atomic write operation. */
213
268
  async _atomicWrite(operationFn) {
214
269
  await this._ensureInitialized();
215
270
 
271
+ // This promise chain ensures all writes *from this process* happen one after another.
216
272
  this.writeLock = this.writeLock.then(async () => {
217
- const oldData = this.cache;
218
- const dataToModify = _.cloneDeep(oldData);
219
-
273
+ let releaseLock;
220
274
  try {
221
- // --- FIX: Await the operation function in case it's async ---
275
+ // --- FIX: Acquire a cross-process lock to prevent race conditions.
276
+ // This will wait if another process (or this one) currently holds the lock.
277
+ releaseLock = await lockfile.lock(this.filename, {
278
+ stale: 7000, // Lock is considered stale after 7s
279
+ retries: {
280
+ retries: 5,
281
+ factor: 1.2,
282
+ minTimeout: 200,
283
+ },
284
+ });
285
+
286
+ // --- FIX: Refresh cache *after* acquiring the lock.
287
+ // This is critical to get the latest data if another process changed it.
288
+ await this._refreshCache();
289
+
290
+ const oldData = this.cache;
291
+ const dataToModify = _.cloneDeep(oldData);
292
+
222
293
  const newData = await operationFn(dataToModify);
223
294
 
224
295
  if (newData === undefined) {
225
- throw new TransactionError("Atomic operation function returned undefined. Aborting to prevent data loss.");
296
+ throw new TransactionError(
297
+ "Atomic operation function returned undefined. Aborting to prevent data loss. Did you forget to `return data`?"
298
+ );
226
299
  }
227
300
 
228
301
  if (this.config.schema) {
229
302
  const validationResult = this.config.schema.safeParse(newData);
230
303
  if (!validationResult.success) {
231
- throw new ValidationError('Schema validation failed.', validationResult.error.issues);
304
+ throw new ValidationError(
305
+ "Schema validation failed.",
306
+ validationResult.error.issues
307
+ );
232
308
  }
233
309
  }
234
310
 
311
+ this._updateIndices(oldData, newData);
312
+
235
313
  if (this.config.writeOnChange && _.isEqual(newData, oldData)) {
236
- return oldData;
314
+ return oldData; // Return the unchanged data
237
315
  }
238
-
239
- this._updateIndices(oldData, newData);
240
316
 
241
317
  const contentToWrite = this.config.encryptionKey
242
318
  ? this._encrypt(newData)
243
319
  : JSON.stringify(newData, null, this.config.prettyPrint ? 2 : 0);
244
-
245
- await fs.writeFile(this.filename, contentToWrite, 'utf8');
320
+
321
+ // --- FIX: Implement durable write. Write to temp file first.
322
+ const tempFile = this.filename + ".tmp";
323
+ await fs.writeFile(tempFile, contentToWrite, "utf8");
324
+ // --- FIX: Atomically rename temp file to the final filename.
325
+ await fs.rename(tempFile, this.filename);
246
326
 
247
327
  this.cache = newData;
248
328
  this.stats.writes++;
249
-
250
- this.emit('write', { filename: this.filename, timestamp: Date.now() });
251
- this.emit('change', { oldValue: oldData, newValue: newData });
252
329
 
253
- return newData;
330
+ this.emit("write", { filename: this.filename, timestamp: Date.now() });
331
+ this.emit("change", { oldValue: oldData, newValue: newData });
254
332
 
333
+ return newData;
255
334
  } catch (error) {
256
- this.emit('error', error);
257
- console.error("[JSONDatabase] Atomic write failed. No changes were saved.", error);
335
+ this.emit("error", error);
336
+ console.error(
337
+ "[JSONDatabase] Atomic write failed. No changes were saved.",
338
+ error
339
+ );
258
340
  throw error;
341
+ } finally {
342
+ // --- FIX: Always release the lock, even if an error occurred.
343
+ if (releaseLock) {
344
+ await releaseLock();
345
+ }
259
346
  }
260
347
  });
261
348
 
262
349
  return this.writeLock;
263
350
  }
264
-
351
+
265
352
  // --- Indexing ---
266
353
 
267
354
  /** @private Clears and rebuilds all defined indices from the current cache. */
268
355
  _rebuildAllIndices() {
269
- this._indices.clear();
270
- for (const indexDef of this.config.indices) {
271
- this._indices.set(indexDef.name, new Map());
272
- }
273
- if (this.config.indices.length > 0 && !_.isEmpty(this.cache)) {
274
- this._updateIndices({}, this.cache); // Treat it as a full "add" operation
275
- }
276
- console.log(`[JSONDatabase] Rebuilt ${this.config.indices.length} indices for ${this.filename}.`);
356
+ this._indices.clear();
357
+ for (const indexDef of this.config.indices) {
358
+ this._indices.set(indexDef.name, new Map());
359
+ }
360
+ if (this.config.indices.length > 0 && !_.isEmpty(this.cache)) {
361
+ // Rebuild by treating the current state as "new" and the previous state as empty.
362
+ this._updateIndices({}, this.cache);
363
+ }
364
+ console.log(
365
+ `[JSONDatabase] Rebuilt ${this.config.indices.length} indices for ${this.filename}.`
366
+ );
277
367
  }
278
368
 
279
- /** @private Compares old and new data to update indices efficiently. */
369
+ /**
370
+ * @private Compares old and new data to update indices efficiently.
371
+ * FIX: Replaced inefficient and buggy index update logic with a robust key-based comparison.
372
+ * This new implementation correctly handles additions, deletions, and in-place updates,
373
+ * and is significantly more performant.
374
+ */
280
375
  _updateIndices(oldData, newData) {
281
- for (const indexDef of this.config.indices) {
282
- const collectionPath = indexDef.path;
283
- const field = indexDef.field;
284
- const indexMap = this._indices.get(indexDef.name);
285
-
286
- const oldCollection = _.get(oldData, collectionPath, {});
287
- const newCollection = _.get(newData, collectionPath, {});
288
-
289
- const oldKeys = Object.keys(oldCollection);
290
- const newKeys = Object.keys(newCollection);
291
-
292
- const addedKeys = _.difference(newKeys, oldKeys);
293
- const removedKeys = _.difference(oldKeys, newKeys);
294
- const potentiallyModifiedKeys = _.intersection(oldKeys, newKeys);
295
-
296
- for (const key of removedKeys) {
297
- const oldItem = oldCollection[key];
298
- if (oldItem && oldItem[field] !== undefined) {
299
- indexMap.delete(oldItem[field]);
300
- }
301
- }
376
+ for (const indexDef of this.config.indices) {
377
+ const indexMap = this._indices.get(indexDef.name);
378
+ if (!indexMap) continue;
302
379
 
303
- for (const key of addedKeys) {
304
- const newItem = newCollection[key];
305
- const indexValue = newItem?.[field];
306
- if (indexValue !== undefined) {
307
- if (indexDef.unique && indexMap.has(indexValue)) {
308
- throw new IndexViolationError(`Unique index '${indexDef.name}' violated for value '${indexValue}'.`);
309
- }
310
- indexMap.set(indexValue, key);
311
- }
312
- }
380
+ const oldCollection = _.get(oldData, indexDef.path, {});
381
+ const newCollection = _.get(newData, indexDef.path, {});
382
+
383
+ if (!_.isObject(oldCollection) || !_.isObject(newCollection)) {
384
+ continue; // Indexing requires a collection (object or array).
385
+ }
386
+
387
+ const allKeys = _.union(_.keys(oldCollection), _.keys(newCollection));
388
+
389
+ for (const key of allKeys) {
390
+ const oldItem = oldCollection[key];
391
+ const newItem = newCollection[key];
392
+
393
+ if (_.isEqual(oldItem, newItem)) {
394
+ continue; // Item is unchanged, no index update needed.
395
+ }
313
396
 
314
- for (const key of potentiallyModifiedKeys) {
315
- const oldItem = oldCollection[key];
316
- const newItem = newCollection[key];
317
- const oldIndexValue = oldItem?.[field];
318
- const newIndexValue = newItem?.[field];
319
-
320
- if (!_.isEqual(oldItem, newItem) && oldIndexValue !== newIndexValue) {
321
- if (oldIndexValue !== undefined) indexMap.delete(oldIndexValue);
322
- if (newIndexValue !== undefined) {
323
- if (indexDef.unique && indexMap.has(newIndexValue)) {
324
- throw new IndexViolationError(`Unique index '${indexDef.name}' violated for value '${newIndexValue}'.`);
325
- }
326
- indexMap.set(newIndexValue, key);
327
- }
328
- }
397
+ const oldVal = oldItem?.[indexDef.field];
398
+ const newVal = newItem?.[indexDef.field];
399
+
400
+ if (_.isEqual(oldVal, newVal)) {
401
+ continue; // Indexed field's value is unchanged.
402
+ }
403
+
404
+ // 1. Remove the old value if it was indexed and pointed to this item.
405
+ if (oldVal !== undefined && indexMap.get(oldVal) === key) {
406
+ indexMap.delete(oldVal);
407
+ }
408
+
409
+ // 2. Add the new value if it's defined.
410
+ if (newVal !== undefined) {
411
+ // Check for unique constraint violation before adding.
412
+ if (indexDef.unique && indexMap.has(newVal)) {
413
+ throw new IndexViolationError(
414
+ `Unique index '${indexDef.name}' violated for value '${newVal}'.`
415
+ );
329
416
  }
417
+ indexMap.set(newVal, key);
418
+ }
330
419
  }
420
+ }
331
421
  }
332
422
 
333
-
334
423
  // --- Public API ---
335
424
 
336
425
  async get(path, defaultValue) {
337
426
  await this._ensureInitialized();
338
427
  this.stats.cacheHits++;
339
- // --- FIX: Handle undefined/null path to get the entire object ---
340
428
  if (path === undefined || path === null) {
341
429
  return this.cache;
342
430
  }
@@ -350,7 +438,7 @@ class JSONDatabase extends EventEmitter {
350
438
  }
351
439
 
352
440
  async set(path, value) {
353
- await this._atomicWrite(data => {
441
+ return this._atomicWrite((data) => {
354
442
  _.set(data, path, value);
355
443
  return data;
356
444
  });
@@ -358,7 +446,7 @@ class JSONDatabase extends EventEmitter {
358
446
 
359
447
  async delete(path) {
360
448
  let deleted = false;
361
- await this._atomicWrite(data => {
449
+ await this._atomicWrite((data) => {
362
450
  deleted = _.unset(data, path);
363
451
  return data;
364
452
  });
@@ -367,11 +455,11 @@ class JSONDatabase extends EventEmitter {
367
455
 
368
456
  async push(path, ...items) {
369
457
  if (items.length === 0) return;
370
- await this._atomicWrite(data => {
458
+ return this._atomicWrite((data) => {
371
459
  const arr = _.get(data, path);
372
460
  const targetArray = Array.isArray(arr) ? arr : [];
373
- items.forEach(item => {
374
- if (!targetArray.some(existing => _.isEqual(existing, item))) {
461
+ items.forEach((item) => {
462
+ if (!targetArray.some((existing) => _.isEqual(existing, item))) {
375
463
  targetArray.push(item);
376
464
  }
377
465
  });
@@ -382,7 +470,7 @@ class JSONDatabase extends EventEmitter {
382
470
 
383
471
  async pull(path, ...itemsToRemove) {
384
472
  if (itemsToRemove.length === 0) return;
385
- await this._atomicWrite(data => {
473
+ return this._atomicWrite((data) => {
386
474
  const arr = _.get(data, path);
387
475
  if (Array.isArray(arr)) {
388
476
  _.pullAllWith(arr, itemsToRemove, _.isEqual);
@@ -398,43 +486,49 @@ class JSONDatabase extends EventEmitter {
398
486
  async batch(ops, options = { stopOnError: false }) {
399
487
  if (!Array.isArray(ops) || ops.length === 0) return;
400
488
 
401
- await this._atomicWrite(data => {
489
+ return this._atomicWrite((data) => {
402
490
  for (const [index, op] of ops.entries()) {
403
491
  try {
404
- if (!op || !op.type || op.path === undefined) throw new Error("Invalid operation format: missing type or path.");
405
-
492
+ if (!op || !op.type || op.path === undefined)
493
+ throw new Error("Invalid operation format: missing type or path.");
494
+
406
495
  switch (op.type) {
407
- case 'set':
408
- if (!op.hasOwnProperty('value')) throw new Error("Set operation missing 'value'.");
496
+ case "set":
497
+ if (!op.hasOwnProperty("value"))
498
+ throw new Error("Set operation missing 'value'.");
409
499
  _.set(data, op.path, op.value);
410
500
  break;
411
- case 'delete':
501
+ case "delete":
412
502
  _.unset(data, op.path);
413
503
  break;
414
- case 'push':
415
- if (!Array.isArray(op.values)) throw new Error("Push operation 'values' must be an array.");
504
+ case "push":
505
+ if (!Array.isArray(op.values))
506
+ throw new Error("Push operation 'values' must be an array.");
416
507
  const arr = _.get(data, op.path);
417
508
  const targetArray = Array.isArray(arr) ? arr : [];
418
- op.values.forEach(item => {
419
- if (!targetArray.some(existing => _.isEqual(existing, item))) targetArray.push(item);
509
+ op.values.forEach((item) => {
510
+ if (!targetArray.some((existing) => _.isEqual(existing, item)))
511
+ targetArray.push(item);
420
512
  });
421
513
  _.set(data, op.path, targetArray);
422
514
  break;
423
- case 'pull':
424
- if (!Array.isArray(op.values)) throw new Error("Pull operation 'values' must be an array.");
515
+ case "pull":
516
+ if (!Array.isArray(op.values))
517
+ throw new Error("Pull operation 'values' must be an array.");
425
518
  const pullArr = _.get(data, op.path);
426
- if (Array.isArray(pullArr)) _.pullAllWith(pullArr, op.values, _.isEqual);
519
+ if (Array.isArray(pullArr))
520
+ _.pullAllWith(pullArr, op.values, _.isEqual);
427
521
  break;
428
522
  default:
429
523
  throw new Error(`Unsupported operation type: '${op.type}'.`);
430
524
  }
431
525
  } catch (err) {
432
- const errorMessage = `[JSONDatabase] Batch failed at operation index ${index} (type: ${op?.type}): ${err.message}`;
433
- if (options.stopOnError) {
434
- throw new Error(errorMessage);
435
- } else {
436
- console.error(errorMessage);
437
- }
526
+ const errorMessage = `[JSONDatabase] Batch failed at operation index ${index} (type: ${op?.type}): ${err.message}`;
527
+ if (options.stopOnError) {
528
+ throw new Error(errorMessage);
529
+ } else {
530
+ console.error(errorMessage);
531
+ }
438
532
  }
439
533
  }
440
534
  return data;
@@ -442,33 +536,36 @@ class JSONDatabase extends EventEmitter {
442
536
  }
443
537
 
444
538
  async find(collectionPath, predicate) {
445
- await this._ensureInitialized();
446
- const collection = _.get(this.cache, collectionPath);
447
- if (typeof collection !== 'object' || collection === null) return undefined;
448
-
449
- this.stats.cacheHits++;
450
- return _.find(collection, predicate);
539
+ await this._ensureInitialized();
540
+ const collection = _.get(this.cache, collectionPath);
541
+ if (typeof collection !== "object" || collection === null) return undefined;
542
+
543
+ this.stats.cacheHits++;
544
+ return _.find(collection, predicate);
451
545
  }
452
-
546
+
453
547
  async findByIndex(indexName, value) {
454
- await this._ensureInitialized();
455
- if (!this._indices.has(indexName)) {
456
- throw new Error(`Index with name '${indexName}' does not exist.`);
457
- }
548
+ await this._ensureInitialized();
549
+ if (!this._indices.has(indexName)) {
550
+ throw new Error(`Index with name '${indexName}' does not exist.`);
551
+ }
458
552
 
459
- this.stats.cacheHits++;
460
- const indexMap = this._indices.get(indexName);
461
- const objectKey = indexMap.get(value);
553
+ this.stats.cacheHits++;
554
+ const indexMap = this._indices.get(indexName);
555
+ const objectKey = indexMap.get(value);
462
556
 
463
- if (objectKey === undefined) return undefined;
557
+ if (objectKey === undefined) return undefined;
464
558
 
465
- const indexDef = this.config.indices.find(i => i.name === indexName);
466
- return _.get(this.cache, [..._.toPath(indexDef.path), objectKey]);
559
+ const indexDef = this.config.indices.find((i) => i.name === indexName);
560
+ const fullPath = [..._.toPath(indexDef.path), objectKey];
561
+ return _.get(this.cache, fullPath);
467
562
  }
468
563
 
469
564
  async clear() {
470
- console.warn(`[JSONDatabase] Clearing all data from ${this.filename}.`);
471
- await this._atomicWrite(() => ({}));
565
+ console.warn(
566
+ `[JSONDatabase] Clearing all data from ${this.filename}. This action is irreversible.`
567
+ );
568
+ return this._atomicWrite(() => ({}));
472
569
  }
473
570
 
474
571
  getStats() {
@@ -477,15 +574,17 @@ class JSONDatabase extends EventEmitter {
477
574
 
478
575
  async close() {
479
576
  await this.writeLock;
480
-
577
+
481
578
  this.cache = null;
482
579
  this._indices.clear();
483
580
  this.removeAllListeners();
484
581
  this._initPromise = null;
485
582
 
486
583
  const finalStats = JSON.stringify(this.getStats());
487
- console.log(`[JSONDatabase] Closed connection to ${this.filename}. Final Stats: ${finalStats}`);
584
+ console.log(
585
+ `[JSONDatabase] Closed connection to ${this.filename}. Final Stats: ${finalStats}`
586
+ );
488
587
  }
489
588
  }
490
589
 
491
- module.exports = JSONDatabase;
590
+ module.exports = JSONDatabase;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "json-database-st",
3
- "version": "1.0.7",
3
+ "version": "1.0.9",
4
4
  "description": "A simple, promise-based JSON file database for Node.js with atomic operations and lodash integration.",
5
5
  "main": "JSONDatabase.js",
6
6
  "scripts": {