json-database-st 1.0.4 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/JSONDatabase.js +485 -0
- package/README.md +76 -57
- package/package.json +1 -1
package/JSONDatabase.js
ADDED
|
@@ -0,0 +1,485 @@
|
|
|
1
|
+
// File: JSONDatabase.js
|
|
2
|
+
// Final, Complete, and Secure Version
|
|
3
|
+
|
|
4
|
+
const fs = require('fs').promises;
|
|
5
|
+
const path = require('path');
|
|
6
|
+
const crypto = require('crypto');
|
|
7
|
+
const _ = require('lodash');
|
|
8
|
+
const EventEmitter = require('events');
|
|
9
|
+
|
|
10
|
+
// --- Custom Error Classes for Better Error Handling ---
|
|
11
|
+
|
|
12
|
+
/** Base error for all database-specific issues. */
|
|
13
|
+
class DBError extends Error {
|
|
14
|
+
constructor(message) {
|
|
15
|
+
super(message);
|
|
16
|
+
this.name = this.constructor.name;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
/** Error during database file initialization or parsing. */
|
|
20
|
+
class DBInitializationError extends DBError {}
|
|
21
|
+
/** Error within a user-provided transaction function. */
|
|
22
|
+
class TransactionError extends DBError {}
|
|
23
|
+
/** Error when data fails schema validation. */
|
|
24
|
+
class ValidationError extends DBError {
|
|
25
|
+
constructor(message, validationIssues) {
|
|
26
|
+
super(message);
|
|
27
|
+
this.issues = validationIssues; // e.g., from Zod/Joi
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
/** Error related to index integrity (e.g., unique constraint violation). */
|
|
31
|
+
class IndexViolationError extends DBError {}
|
|
32
|
+
/** Error for security-related issues like path traversal or bad keys. */
|
|
33
|
+
class SecurityError extends DBError {}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
// --- Type Definitions for Clarity ---
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* @typedef {object} BatchOperationSet
|
|
40
|
+
* @property {'set'} type
|
|
41
|
+
* @property {string | string[]} path
|
|
42
|
+
* @property {any} value
|
|
43
|
+
*/
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* @typedef {object} BatchOperationDelete
|
|
47
|
+
* @property {'delete'} type
|
|
48
|
+
* @property {string | string[]} path
|
|
49
|
+
*/
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* @typedef {object} BatchOperationPush
|
|
53
|
+
* @property {'push'} type
|
|
54
|
+
* @property {string | string[]} path
|
|
55
|
+
* @property {any[]} values - Items to push uniquely using deep comparison.
|
|
56
|
+
*/
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* @typedef {object} BatchOperationPull
|
|
60
|
+
* @property {'pull'} type
|
|
61
|
+
* @property {string | string[]} path
|
|
62
|
+
* @property {any[]} values - Items to remove using deep comparison.
|
|
63
|
+
*/
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* @typedef {BatchOperationSet | BatchOperationDelete | BatchOperationPush | BatchOperationPull} BatchOperation
|
|
67
|
+
*/
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* @typedef {object} IndexDefinition
|
|
71
|
+
* @property {string} name - The unique name for the index.
|
|
72
|
+
* @property {string | string[]} path - The lodash path to the collection object (e.g., 'users').
|
|
73
|
+
* @property {string} field - The property field within each collection item to index (e.g., 'email').
|
|
74
|
+
* @property {boolean} [unique=false] - If true, enforces that the indexed field must be unique across the collection.
|
|
75
|
+
*/
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
// --- Cryptography Constants ---
|
|
79
|
+
const ALGORITHM = 'aes-256-gcm';
|
|
80
|
+
const IV_LENGTH = 16;
|
|
81
|
+
const AUTH_TAG_LENGTH = 16;
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* A robust, secure, promise-based JSON file database with atomic operations, indexing, schema validation, and events.
|
|
86
|
+
* Includes encryption-at-rest and path traversal protection.
|
|
87
|
+
*
|
|
88
|
+
* @class JSONDatabase
|
|
89
|
+
* @extends {EventEmitter}
|
|
90
|
+
*/
|
|
91
|
+
class JSONDatabase extends EventEmitter {
|
|
92
|
+
/**
|
|
93
|
+
* Creates a database instance.
|
|
94
|
+
*
|
|
95
|
+
* @param {string} filename - Database file path.
|
|
96
|
+
* @param {object} [options] - Configuration options.
|
|
97
|
+
* @param {string} [options.encryptionKey=null] - A 32-byte (64-character hex) secret key for encryption. If provided, enables encryption-at-rest. **MANAGE THIS KEY SECURELY.**
|
|
98
|
+
* @param {boolean} [options.prettyPrint=false] - Pretty-print JSON output (only if not encrypted).
|
|
99
|
+
* @param {boolean} [options.writeOnChange=true] - Only write to disk if data has changed.
|
|
100
|
+
* @param {object} [options.schema=null] - A validation schema (e.g., from Zod) with a `safeParse` method.
|
|
101
|
+
* @param {IndexDefinition[]} [options.indices=[]] - An array of index definitions for fast lookups.
|
|
102
|
+
* @throws {SecurityError} If the filename is invalid or attempts path traversal.
|
|
103
|
+
* @throws {SecurityError} If an encryption key is provided but is not the correct length.
|
|
104
|
+
*/
|
|
105
|
+
constructor(filename, options = {}) {
|
|
106
|
+
super();
|
|
107
|
+
|
|
108
|
+
// --- Security Check: Path Traversal ---
|
|
109
|
+
const resolvedPath = path.resolve(filename);
|
|
110
|
+
const workingDir = process.cwd();
|
|
111
|
+
if (!resolvedPath.startsWith(workingDir)) {
|
|
112
|
+
throw new SecurityError(`Path traversal detected. Database path must be within the project directory: ${workingDir}`);
|
|
113
|
+
}
|
|
114
|
+
this.filename = /\.json$/.test(resolvedPath) ? resolvedPath : `${resolvedPath}.json`;
|
|
115
|
+
|
|
116
|
+
// --- Security Check: Encryption Key ---
|
|
117
|
+
if (options.encryptionKey && (!options.encryptionKey || Buffer.from(options.encryptionKey, 'hex').length !== 32)) {
|
|
118
|
+
throw new SecurityError('Encryption key must be a 32-byte (64-character hex) string.');
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
this.config = {
|
|
122
|
+
prettyPrint: options.prettyPrint === true,
|
|
123
|
+
writeOnChange: options.writeOnChange !== false,
|
|
124
|
+
schema: options.schema || null,
|
|
125
|
+
indices: options.indices || [],
|
|
126
|
+
encryptionKey: options.encryptionKey ? Buffer.from(options.encryptionKey, 'hex') : null,
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
this.cache = null;
|
|
130
|
+
this.writeLock = Promise.resolve();
|
|
131
|
+
this.stats = { reads: 0, writes: 0, cacheHits: 0 };
|
|
132
|
+
this._indices = new Map();
|
|
133
|
+
|
|
134
|
+
// Asynchronously initialize. Operations will queue behind this promise.
|
|
135
|
+
this._initPromise = this._initialize();
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// --- Encryption & Decryption ---
|
|
139
|
+
_encrypt(data) {
|
|
140
|
+
const iv = crypto.randomBytes(IV_LENGTH);
|
|
141
|
+
const cipher = crypto.createCipheriv(ALGORITHM, this.config.encryptionKey, iv);
|
|
142
|
+
const jsonString = JSON.stringify(data);
|
|
143
|
+
const encrypted = Buffer.concat([cipher.update(jsonString, 'utf8'), cipher.final()]);
|
|
144
|
+
const authTag = cipher.getAuthTag();
|
|
145
|
+
return JSON.stringify({
|
|
146
|
+
iv: iv.toString('hex'),
|
|
147
|
+
tag: authTag.toString('hex'),
|
|
148
|
+
content: encrypted.toString('hex'),
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
_decrypt(encryptedPayload) {
|
|
153
|
+
try {
|
|
154
|
+
const payload = JSON.parse(encryptedPayload);
|
|
155
|
+
const iv = Buffer.from(payload.iv, 'hex');
|
|
156
|
+
const authTag = Buffer.from(payload.tag, 'hex');
|
|
157
|
+
const encryptedContent = Buffer.from(payload.content, 'hex');
|
|
158
|
+
const decipher = crypto.createDecipheriv(ALGORITHM, this.config.encryptionKey, iv);
|
|
159
|
+
decipher.setAuthTag(authTag);
|
|
160
|
+
const decrypted = decipher.update(encryptedContent, 'hex', 'utf8') + decipher.final('utf8');
|
|
161
|
+
return JSON.parse(decrypted);
|
|
162
|
+
} catch (e) {
|
|
163
|
+
throw new SecurityError('Decryption failed. The file may be corrupted, tampered with, or the encryption key is incorrect.');
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// --- Private Core Methods ---
|
|
168
|
+
|
|
169
|
+
/** @private Kicks off the initialization process. */
|
|
170
|
+
async _initialize() {
|
|
171
|
+
try {
|
|
172
|
+
await this._refreshCache();
|
|
173
|
+
this._rebuildAllIndices();
|
|
174
|
+
} catch (err) {
|
|
175
|
+
const initError = new DBInitializationError(`Failed to initialize database: ${err.message}`);
|
|
176
|
+
this.emit('error', initError);
|
|
177
|
+
console.error(`[JSONDatabase] FATAL: Initialization failed for ${this.filename}. The database is in an unusable state.`, err);
|
|
178
|
+
throw initError;
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/** @private Reads file, decrypts if necessary, and populates cache. */
|
|
183
|
+
async _refreshCache() {
|
|
184
|
+
try {
|
|
185
|
+
const fileContent = await fs.readFile(this.filename, 'utf8');
|
|
186
|
+
if (this.config.encryptionKey) {
|
|
187
|
+
this.cache = fileContent.trim() === '' ? {} : this._decrypt(fileContent);
|
|
188
|
+
} else {
|
|
189
|
+
this.cache = fileContent.trim() === '' ? {} : JSON.parse(fileContent);
|
|
190
|
+
}
|
|
191
|
+
this.stats.reads++;
|
|
192
|
+
} catch (err) {
|
|
193
|
+
if (err.code === 'ENOENT') {
|
|
194
|
+
console.warn(`[JSONDatabase] File ${this.filename} not found. Creating.`);
|
|
195
|
+
this.cache = {};
|
|
196
|
+
const initialContent = this.config.encryptionKey ? this._encrypt({}) : '{}';
|
|
197
|
+
await fs.writeFile(this.filename, initialContent, 'utf8');
|
|
198
|
+
this.stats.writes++;
|
|
199
|
+
} else if (err instanceof SyntaxError && !this.config.encryptionKey) {
|
|
200
|
+
throw new DBInitializationError(`Failed to parse JSON from ${this.filename}. File is corrupted.`);
|
|
201
|
+
} else {
|
|
202
|
+
throw err; // Re-throw security, crypto, and other errors
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
/** @private Ensures all operations wait for initialization to complete. */
|
|
208
|
+
async _ensureInitialized() {
|
|
209
|
+
return this._initPromise;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/** @private Performs an atomic write operation. */
|
|
213
|
+
async _atomicWrite(operationFn) {
|
|
214
|
+
await this._ensureInitialized();
|
|
215
|
+
|
|
216
|
+
this.writeLock = this.writeLock.then(async () => {
|
|
217
|
+
const oldData = this.cache;
|
|
218
|
+
const dataToModify = _.cloneDeep(oldData);
|
|
219
|
+
|
|
220
|
+
try {
|
|
221
|
+
const newData = operationFn(dataToModify);
|
|
222
|
+
if (newData === undefined) {
|
|
223
|
+
throw new TransactionError("Atomic operation function returned undefined. Aborting to prevent data loss.");
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (this.config.schema) {
|
|
227
|
+
const validationResult = this.config.schema.safeParse(newData);
|
|
228
|
+
if (!validationResult.success) {
|
|
229
|
+
throw new ValidationError('Schema validation failed.', validationResult.error.issues);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
if (this.config.writeOnChange && _.isEqual(newData, oldData)) {
|
|
234
|
+
return oldData;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
this._updateIndices(oldData, newData);
|
|
238
|
+
|
|
239
|
+
const contentToWrite = this.config.encryptionKey
|
|
240
|
+
? this._encrypt(newData)
|
|
241
|
+
: JSON.stringify(newData, null, this.config.prettyPrint ? 2 : 0);
|
|
242
|
+
|
|
243
|
+
await fs.writeFile(this.filename, contentToWrite, 'utf8');
|
|
244
|
+
|
|
245
|
+
this.cache = newData;
|
|
246
|
+
this.stats.writes++;
|
|
247
|
+
|
|
248
|
+
this.emit('write', { filename: this.filename, timestamp: Date.now() });
|
|
249
|
+
this.emit('change', { oldValue: oldData, newValue: newData });
|
|
250
|
+
|
|
251
|
+
return newData;
|
|
252
|
+
|
|
253
|
+
} catch (error) {
|
|
254
|
+
this.emit('error', error);
|
|
255
|
+
console.error("[JSONDatabase] Atomic write failed. No changes were saved.", error);
|
|
256
|
+
throw error;
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
return this.writeLock;
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// --- Indexing ---
|
|
264
|
+
|
|
265
|
+
/** @private Clears and rebuilds all defined indices from the current cache. */
|
|
266
|
+
_rebuildAllIndices() {
|
|
267
|
+
this._indices.clear();
|
|
268
|
+
for (const indexDef of this.config.indices) {
|
|
269
|
+
this._indices.set(indexDef.name, new Map());
|
|
270
|
+
}
|
|
271
|
+
if (this.config.indices.length > 0 && !_.isEmpty(this.cache)) {
|
|
272
|
+
this._updateIndices({}, this.cache); // Treat it as a full "add" operation
|
|
273
|
+
}
|
|
274
|
+
console.log(`[JSONDatabase] Rebuilt ${this.config.indices.length} indices for ${this.filename}.`);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
/** @private Compares old and new data to update indices efficiently. */
|
|
278
|
+
_updateIndices(oldData, newData) {
|
|
279
|
+
for (const indexDef of this.config.indices) {
|
|
280
|
+
const collectionPath = indexDef.path;
|
|
281
|
+
const field = indexDef.field;
|
|
282
|
+
const indexMap = this._indices.get(indexDef.name);
|
|
283
|
+
|
|
284
|
+
const oldCollection = _.get(oldData, collectionPath, {});
|
|
285
|
+
const newCollection = _.get(newData, collectionPath, {});
|
|
286
|
+
|
|
287
|
+
const oldKeys = Object.keys(oldCollection);
|
|
288
|
+
const newKeys = Object.keys(newCollection);
|
|
289
|
+
|
|
290
|
+
const addedKeys = _.difference(newKeys, oldKeys);
|
|
291
|
+
const removedKeys = _.difference(oldKeys, newKeys);
|
|
292
|
+
const potentiallyModifiedKeys = _.intersection(oldKeys, newKeys);
|
|
293
|
+
|
|
294
|
+
for (const key of removedKeys) {
|
|
295
|
+
const oldItem = oldCollection[key];
|
|
296
|
+
if (oldItem && oldItem[field] !== undefined) {
|
|
297
|
+
indexMap.delete(oldItem[field]);
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
for (const key of addedKeys) {
|
|
302
|
+
const newItem = newCollection[key];
|
|
303
|
+
const indexValue = newItem?.[field];
|
|
304
|
+
if (indexValue !== undefined) {
|
|
305
|
+
if (indexDef.unique && indexMap.has(indexValue)) {
|
|
306
|
+
throw new IndexViolationError(`Unique index '${indexDef.name}' violated for value '${indexValue}'.`);
|
|
307
|
+
}
|
|
308
|
+
indexMap.set(indexValue, key);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
for (const key of potentiallyModifiedKeys) {
|
|
313
|
+
const oldItem = oldCollection[key];
|
|
314
|
+
const newItem = newCollection[key];
|
|
315
|
+
const oldIndexValue = oldItem?.[field];
|
|
316
|
+
const newIndexValue = newItem?.[field];
|
|
317
|
+
|
|
318
|
+
if (!_.isEqual(oldItem, newItem) && oldIndexValue !== newIndexValue) {
|
|
319
|
+
if (oldIndexValue !== undefined) indexMap.delete(oldIndexValue);
|
|
320
|
+
if (newIndexValue !== undefined) {
|
|
321
|
+
if (indexDef.unique && indexMap.has(newIndexValue)) {
|
|
322
|
+
throw new IndexViolationError(`Unique index '${indexDef.name}' violated for value '${newIndexValue}'.`);
|
|
323
|
+
}
|
|
324
|
+
indexMap.set(newIndexValue, key);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
// --- Public API ---
|
|
333
|
+
|
|
334
|
+
async get(path, defaultValue) {
|
|
335
|
+
await this._ensureInitialized();
|
|
336
|
+
this.stats.cacheHits++;
|
|
337
|
+
return _.get(this.cache, path, defaultValue);
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
async has(path) {
|
|
341
|
+
await this._ensureInitialized();
|
|
342
|
+
this.stats.cacheHits++;
|
|
343
|
+
return _.has(this.cache, path);
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
async set(path, value) {
|
|
347
|
+
await this._atomicWrite(data => {
|
|
348
|
+
_.set(data, path, value);
|
|
349
|
+
return data;
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
async delete(path) {
|
|
354
|
+
let deleted = false;
|
|
355
|
+
await this._atomicWrite(data => {
|
|
356
|
+
deleted = _.unset(data, path);
|
|
357
|
+
return data;
|
|
358
|
+
});
|
|
359
|
+
return deleted;
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
async push(path, ...items) {
|
|
363
|
+
if (items.length === 0) return;
|
|
364
|
+
await this._atomicWrite(data => {
|
|
365
|
+
const arr = _.get(data, path);
|
|
366
|
+
const targetArray = Array.isArray(arr) ? arr : [];
|
|
367
|
+
items.forEach(item => {
|
|
368
|
+
if (!targetArray.some(existing => _.isEqual(existing, item))) {
|
|
369
|
+
targetArray.push(item);
|
|
370
|
+
}
|
|
371
|
+
});
|
|
372
|
+
_.set(data, path, targetArray);
|
|
373
|
+
return data;
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
async pull(path, ...itemsToRemove) {
|
|
378
|
+
if (itemsToRemove.length === 0) return;
|
|
379
|
+
await this._atomicWrite(data => {
|
|
380
|
+
const arr = _.get(data, path);
|
|
381
|
+
if (Array.isArray(arr)) {
|
|
382
|
+
_.pullAllWith(arr, itemsToRemove, _.isEqual);
|
|
383
|
+
}
|
|
384
|
+
return data;
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
async transaction(transactionFn) {
|
|
389
|
+
return this._atomicWrite(transactionFn);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
async batch(ops, options = { stopOnError: false }) {
|
|
393
|
+
if (!Array.isArray(ops) || ops.length === 0) return;
|
|
394
|
+
|
|
395
|
+
await this._atomicWrite(data => {
|
|
396
|
+
for (const [index, op] of ops.entries()) {
|
|
397
|
+
try {
|
|
398
|
+
if (!op || !op.type || op.path === undefined) throw new Error("Invalid operation format: missing type or path.");
|
|
399
|
+
|
|
400
|
+
switch (op.type) {
|
|
401
|
+
case 'set':
|
|
402
|
+
if (!op.hasOwnProperty('value')) throw new Error("Set operation missing 'value'.");
|
|
403
|
+
_.set(data, op.path, op.value);
|
|
404
|
+
break;
|
|
405
|
+
case 'delete':
|
|
406
|
+
_.unset(data, op.path);
|
|
407
|
+
break;
|
|
408
|
+
case 'push':
|
|
409
|
+
if (!Array.isArray(op.values)) throw new Error("Push operation 'values' must be an array.");
|
|
410
|
+
const arr = _.get(data, op.path);
|
|
411
|
+
const targetArray = Array.isArray(arr) ? arr : [];
|
|
412
|
+
op.values.forEach(item => {
|
|
413
|
+
if (!targetArray.some(existing => _.isEqual(existing, item))) targetArray.push(item);
|
|
414
|
+
});
|
|
415
|
+
_.set(data, op.path, targetArray);
|
|
416
|
+
break;
|
|
417
|
+
case 'pull':
|
|
418
|
+
if (!Array.isArray(op.values)) throw new Error("Pull operation 'values' must be an array.");
|
|
419
|
+
const pullArr = _.get(data, op.path);
|
|
420
|
+
if (Array.isArray(pullArr)) _.pullAllWith(pullArr, op.values, _.isEqual);
|
|
421
|
+
break;
|
|
422
|
+
default:
|
|
423
|
+
throw new Error(`Unsupported operation type: '${op.type}'.`);
|
|
424
|
+
}
|
|
425
|
+
} catch (err) {
|
|
426
|
+
const errorMessage = `[JSONDatabase] Batch failed at operation index ${index} (type: ${op?.type}): ${err.message}`;
|
|
427
|
+
if (options.stopOnError) {
|
|
428
|
+
throw new Error(errorMessage);
|
|
429
|
+
} else {
|
|
430
|
+
console.error(errorMessage);
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
return data;
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
async find(collectionPath, predicate) {
|
|
439
|
+
await this._ensureInitialized();
|
|
440
|
+
const collection = _.get(this.cache, collectionPath);
|
|
441
|
+
if (typeof collection !== 'object' || collection === null) return undefined;
|
|
442
|
+
|
|
443
|
+
this.stats.cacheHits++;
|
|
444
|
+
return _.find(collection, predicate);
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
async findByIndex(indexName, value) {
|
|
448
|
+
await this._ensureInitialized();
|
|
449
|
+
if (!this._indices.has(indexName)) {
|
|
450
|
+
throw new Error(`Index with name '${indexName}' does not exist.`);
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
this.stats.cacheHits++;
|
|
454
|
+
const indexMap = this._indices.get(indexName);
|
|
455
|
+
const objectKey = indexMap.get(value);
|
|
456
|
+
|
|
457
|
+
if (objectKey === undefined) return undefined;
|
|
458
|
+
|
|
459
|
+
const indexDef = this.config.indices.find(i => i.name === indexName);
|
|
460
|
+
return _.get(this.cache, [..._.toPath(indexDef.path), objectKey]);
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
async clear() {
|
|
464
|
+
console.warn(`[JSONDatabase] Clearing all data from ${this.filename}.`);
|
|
465
|
+
await this._atomicWrite(() => ({}));
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
getStats() {
|
|
469
|
+
return { ...this.stats };
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
async close() {
|
|
473
|
+
await this.writeLock;
|
|
474
|
+
|
|
475
|
+
this.cache = null;
|
|
476
|
+
this._indices.clear();
|
|
477
|
+
this.removeAllListeners();
|
|
478
|
+
this._initPromise = null;
|
|
479
|
+
|
|
480
|
+
const finalStats = JSON.stringify(this.getStats());
|
|
481
|
+
console.log(`[JSONDatabase] Closed connection to ${this.filename}. Final Stats: ${finalStats}`);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
module.exports = JSONDatabase;
|
package/README.md
CHANGED
|
@@ -1,73 +1,95 @@
|
|
|
1
|
-
# JSON Database
|
|
1
|
+
# JSON Database ST
|
|
2
2
|
|
|
3
|
-
[](https://badge.fury.io/js/json-database-st)
|
|
4
|
+
[](https://www.npmjs.com/package/json-database-st)
|
|
4
5
|
[](https://opensource.org/licenses/MIT)
|
|
5
6
|
|
|
6
|
-
A
|
|
7
|
+
A secure, performant, and feature-rich JSON file database for Node.js. Designed for projects that need simple, persistent data storage without the overhead of a traditional database server, but with modern features like **encryption, indexing, and schema validation**.
|
|
7
8
|
|
|
8
|
-
Ideal for small projects,
|
|
9
|
+
Ideal for small to medium-sized projects, configuration management, user session data, or any application where data safety and integrity are critical.
|
|
9
10
|
|
|
10
11
|
## Features
|
|
11
12
|
|
|
12
|
-
*
|
|
13
|
-
* **
|
|
14
|
-
* **
|
|
15
|
-
* **
|
|
16
|
-
|
|
17
|
-
*
|
|
18
|
-
*
|
|
19
|
-
*
|
|
20
|
-
|
|
21
|
-
*
|
|
13
|
+
* **🔒 Security First:**
|
|
14
|
+
* **Encryption at Rest:** Built-in AES-256-GCM encryption protects your data on disk.
|
|
15
|
+
* **Path Traversal Protection:** Prevents malicious file path inputs.
|
|
16
|
+
* **Secure by Default:** Fails safely if data is tampered with or the key is wrong.
|
|
17
|
+
|
|
18
|
+
* **⚡ High-Performance Indexing:**
|
|
19
|
+
* Create indexes on your data fields (e.g., `users.email`).
|
|
20
|
+
* Enjoy near-instantaneous `O(1)` lookups with `findByIndex()`, avoiding slow full-database scans.
|
|
21
|
+
|
|
22
|
+
* **🤝 Atomic & Reliable:**
|
|
23
|
+
* **Atomic Writes:** All write operations (`set`, `transaction`, `batch`, etc.) are queued and executed atomically, preventing data corruption.
|
|
24
|
+
* **Transactions:** Execute complex multi-step operations as a single, indivisible unit.
|
|
25
|
+
* **Batching:** Perform multiple simple operations in a single, efficient disk write.
|
|
26
|
+
|
|
27
|
+
* **✅ Data Integrity:**
|
|
28
|
+
* **Schema Validation:** Integrate with libraries like Zod or Joi to enforce data structures on every write, preventing bad data from ever being saved.
|
|
29
|
+
* **Deep Uniqueness:** The `push()` method automatically prevents duplicate entries in arrays using deep object comparison.
|
|
30
|
+
|
|
31
|
+
* **📢 Modern & DX-Focused API:**
|
|
32
|
+
* **Promise-based:** Fully asynchronous `async/await` friendly API.
|
|
33
|
+
* **Event-Driven:** Emits `write`, `change`, and `error` events for reactive programming, auditing, or real-time updates.
|
|
34
|
+
* **Intuitive & Powerful:** A clean API (`get`, `set`, `find`) powered by `lodash` for flexible path notation.
|
|
22
35
|
|
|
23
36
|
## Installation
|
|
24
37
|
|
|
25
38
|
```bash
|
|
26
|
-
#
|
|
39
|
+
# This package requires lodash as a peer dependency
|
|
27
40
|
npm install json-database-st lodash
|
|
28
41
|
```
|
|
29
42
|
|
|
43
|
+
## Quick Start: Secure & Indexed Database
|
|
30
44
|
|
|
31
|
-
|
|
45
|
+
This example demonstrates setting up a secure, encrypted database with a high-speed index on user emails.
|
|
32
46
|
|
|
33
47
|
```javascript
|
|
34
|
-
const JSONDatabase = require('json-database-st');
|
|
48
|
+
const JSONDatabase = require('json-database-st');
|
|
35
49
|
const path = require('path');
|
|
50
|
+
const crypto = require('crypto');
|
|
51
|
+
|
|
52
|
+
// 1. Generate a secure key (run once and store it in environment variables)
|
|
53
|
+
// const encryptionKey = crypto.randomBytes(32).toString('hex');
|
|
54
|
+
// console.log('Your secure encryption key:', encryptionKey);
|
|
55
|
+
const ENCRYPTION_KEY = 'd0a7e8c1b2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e6f7a8b9'; // Example key
|
|
36
56
|
|
|
37
|
-
// Initialize
|
|
38
|
-
const db = new JSONDatabase(path.join(__dirname, '
|
|
57
|
+
// 2. Initialize the database with encryption and an index
|
|
58
|
+
const db = new JSONDatabase(path.join(__dirname, 'secure-data.json'), {
|
|
59
|
+
encryptionKey: ENCRYPTION_KEY,
|
|
60
|
+
indices: [
|
|
61
|
+
{ name: 'user-email', path: 'users', field: 'email', unique: true }
|
|
62
|
+
]
|
|
63
|
+
});
|
|
39
64
|
|
|
40
65
|
async function run() {
|
|
41
66
|
try {
|
|
42
|
-
// Set data
|
|
43
|
-
await db.set('
|
|
44
|
-
await db.set('
|
|
45
|
-
|
|
46
|
-
//
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
await db.
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
console.log('
|
|
61
|
-
|
|
62
|
-
// Get Stats
|
|
63
|
-
console.log('DB Stats:', db.getStats());
|
|
67
|
+
// 3. Set data. The index will be updated automatically.
|
|
68
|
+
await db.set('users.alice', { email: 'alice@example.com', name: 'Alice' });
|
|
69
|
+
await db.set('users.bob', { email: 'bob@example.com', name: 'Bob' });
|
|
70
|
+
|
|
71
|
+
// This would throw an IndexViolationError because the email is not unique
|
|
72
|
+
// await db.set('users.impostor', { email: 'alice@example.com', name: 'Impostor' });
|
|
73
|
+
|
|
74
|
+
// 4. Use the high-speed index for an instant lookup
|
|
75
|
+
console.log('--- Finding user with index ---');
|
|
76
|
+
const alice = await db.findByIndex('user-email', 'alice@example.com');
|
|
77
|
+
console.log('Found user:', alice); // -> { email: 'alice@example.com', name: 'Alice' }
|
|
78
|
+
|
|
79
|
+
// 5. Perform a transaction
|
|
80
|
+
await db.transaction(data => {
|
|
81
|
+
data.users.bob.lastLogin = Date.now();
|
|
82
|
+
return data; // Must return the modified data
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
console.log('\n--- Bob after transaction ---');
|
|
86
|
+
console.log(await db.get('users.bob'));
|
|
64
87
|
|
|
65
88
|
} catch (err) {
|
|
66
89
|
console.error('Database operation failed:', err);
|
|
67
90
|
} finally {
|
|
68
|
-
// IMPORTANT: Always close the DB
|
|
91
|
+
// 6. IMPORTANT: Always close the DB for a graceful shutdown
|
|
69
92
|
await db.close();
|
|
70
|
-
console.log('Database closed.');
|
|
71
93
|
}
|
|
72
94
|
}
|
|
73
95
|
|
|
@@ -76,9 +98,9 @@ run();
|
|
|
76
98
|
|
|
77
99
|
## Documentation
|
|
78
100
|
|
|
79
|
-
**Full API details and advanced usage examples are available
|
|
101
|
+
**Full API details and advanced usage examples are available on the hosted documentation site:**
|
|
80
102
|
|
|
81
|
-
**[View Documentation](https://sethunthunder111.github.io/json-database-st/)**
|
|
103
|
+
**[View Full Documentation Website](https://sethunthunder111.github.io/json-database-st/)**
|
|
82
104
|
|
|
83
105
|
## API Summary
|
|
84
106
|
|
|
@@ -90,27 +112,24 @@ run();
|
|
|
90
112
|
* `async push(path, ...items)`
|
|
91
113
|
* `async pull(path, ...itemsToRemove)`
|
|
92
114
|
* `async transaction(asyncFn)`
|
|
93
|
-
* `async batch(operations)`
|
|
94
|
-
* `async
|
|
115
|
+
* `async batch(operations, [options])`
|
|
116
|
+
* `async find(collectionPath, predicate)`
|
|
117
|
+
* `async findByIndex(indexName, value)`
|
|
95
118
|
* `async clear()`
|
|
96
119
|
* `getStats()`
|
|
97
120
|
* `async close()`
|
|
98
|
-
*
|
|
99
|
-
|
|
100
|
-
## Concurrency and Atomicity
|
|
101
|
-
|
|
102
|
-
Writes are queued and executed one after another for a given instance, ensuring file integrity. Reads use an in-memory cache for speed. See Core Concepts in the full documentation for details.
|
|
121
|
+
* Events: `.on('write', handler)`, `.on('change', handler)`, `.on('error', handler)`
|
|
103
122
|
|
|
104
123
|
## Limitations
|
|
105
124
|
|
|
106
|
-
*
|
|
107
|
-
*
|
|
108
|
-
*
|
|
125
|
+
* **In-Memory Operation:** The entire database file is loaded into memory on initialization. This makes it extremely fast for reads but limits the practical file size to what can comfortably fit in your available RAM.
|
|
126
|
+
* **Single-Process Focus:** While writes are atomic, this library is designed for use by a single Node.js process. Using it with multiple processes writing to the same file (e.g., in a cluster) is not recommended and can lead to race conditions.
|
|
127
|
+
* **Not a Replacement for SQL/NoSQL Servers:** For very large datasets, high write concurrency, complex queries, or multi-process/multi-server needs, a dedicated database system like PostgreSQL, MongoDB, or SQLite is the appropriate choice.
|
|
109
128
|
|
|
110
129
|
## Contributing
|
|
111
130
|
|
|
112
|
-
Contributions
|
|
131
|
+
Contributions, issues, and feature requests are welcome! Please feel free to open an issue to discuss any significant changes.
|
|
113
132
|
|
|
114
133
|
## License
|
|
115
134
|
|
|
116
|
-
[MIT](LICENSE)
|
|
135
|
+
[MIT](LICENSE)
|
package/package.json
CHANGED