beanbagdb 0.0.5 → 0.5.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +9 -6
- package/dist/beanbagdb.cjs.js +153242 -0
- package/dist/beanbagdb.cjs.js.map +1 -0
- package/dist/beanbagdb.esm.js +153237 -0
- package/dist/beanbagdb.esm.js.map +1 -0
- package/package.json +14 -4
- package/rollup.config.js +47 -0
- package/src/beanbagdb.js +580 -0
- package/src/couchdb.js +11 -10
- package/src/index.js +4 -577
- package/src/pouchdb.js +13 -11
- package/src/system_schema.js +4 -8
- package/src/utils.js +40 -0
- package/test/init.test.js +19 -19
- package/test/test1.js +26 -9
- package/test/helper.js +0 -12
package/src/couchdb.js
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import { scryptSync, randomBytes, createCipheriv, createDecipheriv } from 'crypto';
|
|
2
|
+
import BeanBagDB from "./beanbagdb.js";
|
|
3
|
+
import nano from 'nano';
|
|
3
4
|
|
|
4
|
-
class BeanBagDB_CouchDB extends
|
|
5
|
+
class BeanBagDB_CouchDB extends BeanBagDB {
|
|
5
6
|
constructor(db_url,db_name,encryption_key){
|
|
6
|
-
const cdb =
|
|
7
|
+
const cdb = nano(db_url);
|
|
7
8
|
const doc_obj = {
|
|
8
9
|
name: db_name,
|
|
9
10
|
encryption_key: encryption_key,
|
|
@@ -36,17 +37,17 @@ class BeanBagDB_CouchDB extends SDB {
|
|
|
36
37
|
},
|
|
37
38
|
utils:{
|
|
38
39
|
encrypt: (text,encryptionKey)=>{
|
|
39
|
-
const key =
|
|
40
|
-
const iv =
|
|
41
|
-
const cipher =
|
|
40
|
+
const key = scryptSync(encryptionKey, 'salt', 32); // Derive a 256-bit key
|
|
41
|
+
const iv = randomBytes(16); // Initialization vector
|
|
42
|
+
const cipher = createCipheriv('aes-256-cbc', key, iv);
|
|
42
43
|
let encrypted = cipher.update(text, 'utf8', 'hex');
|
|
43
44
|
encrypted += cipher.final('hex');
|
|
44
45
|
return iv.toString('hex') + ':' + encrypted; // Prepend the IV for later use
|
|
45
46
|
},
|
|
46
47
|
decrypt : (encryptedText, encryptionKey)=>{
|
|
47
|
-
const key =
|
|
48
|
+
const key = scryptSync(encryptionKey, 'salt', 32); // Derive a 256-bit key
|
|
48
49
|
const [iv, encrypted] = encryptedText.split(':').map(part => Buffer.from(part, 'hex'));
|
|
49
|
-
const decipher =
|
|
50
|
+
const decipher = createDecipheriv('aes-256-cbc', key, iv);
|
|
50
51
|
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
|
51
52
|
decrypted += decipher.final('utf8');
|
|
52
53
|
return decrypted;
|
|
@@ -60,4 +61,4 @@ class BeanBagDB_CouchDB extends SDB {
|
|
|
60
61
|
}
|
|
61
62
|
}
|
|
62
63
|
|
|
63
|
-
|
|
64
|
+
export default BeanBagDB_CouchDB
|
package/src/index.js
CHANGED
|
@@ -1,578 +1,5 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
/**
|
|
5
|
-
* This the core class. it is not very useful in itself but can be used to generate a sub class for a specific database for eg CouchDB.
|
|
6
|
-
* It takes a db_instance argument, which , this class relies on perform CRUD operations on the data.
|
|
7
|
-
* Why have a "dumb" class ? : So that the core functionalities remains in a single place and the multiple Databases can be supported.
|
|
8
|
-
*/
|
|
9
|
-
class BeanBagDB {
|
|
10
|
-
/**
|
|
11
|
-
* @param {object} db_instance - Database object
|
|
12
|
-
* db_instance object contains 3 main keys :
|
|
13
|
-
* - `name` : the name of the local database
|
|
14
|
-
* - `encryption_key`: this is required for encrypting documents
|
|
15
|
-
* - `api` : this is an object that must contain database specific functions. This includes : `insert(doc)`: takes a doc and runs the db insertion function, `update(updated_doc)` : gets the updated document and updates it in the DB, `search(query)`: takes a query to fetch data from the DB (assuming array of JSON is returned ), `get(id)`: takes a document id and returns its content, `createIndex(filter)`: to create an index in the database based on a filter
|
|
16
|
-
* - `utils` : this includes `encrypt`, `decrypt`
|
|
17
|
-
*/
|
|
18
|
-
constructor(db_instance) {
|
|
19
|
-
// data validation checks
|
|
20
|
-
this._check_required_fields(["name", "encryption_key", "api", "utils"],db_instance)
|
|
21
|
-
this._check_required_fields(["insert", "update", "delete", "search","get","createIndex"],db_instance.api)
|
|
22
|
-
this._check_required_fields(["encrypt", "decrypt","ping"],db_instance.utils)
|
|
1
|
+
import BeanBagDB from "./beanbagdb";
|
|
2
|
+
import BeanBagDB_CouchDB from "./couchdb";
|
|
3
|
+
import BeanBagDB_PouchDB from "./pouchdb";
|
|
23
4
|
|
|
24
|
-
|
|
25
|
-
// db name should not be blank,
|
|
26
|
-
|
|
27
|
-
this.name = db_instance.name;
|
|
28
|
-
this.encryption_key = db_instance.encryption_key;
|
|
29
|
-
|
|
30
|
-
this.db_api = db_instance.api;
|
|
31
|
-
this.utils = db_instance.utils;
|
|
32
|
-
|
|
33
|
-
this._version = packageJson.version; // package version
|
|
34
|
-
this.ready_check = { initialized: false, latest: false };
|
|
35
|
-
console.log("Run ready() now");
|
|
36
|
-
|
|
37
|
-
this.plugins = {}
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
/**
|
|
41
|
-
* This is to check if the database is ready to be used. It it important to run this after the class is initialized.
|
|
42
|
-
*/
|
|
43
|
-
async ready() {
|
|
44
|
-
console.log("Checking...");
|
|
45
|
-
// @TODO : ping the database
|
|
46
|
-
this.ready_check = await this._check_ready_to_use();
|
|
47
|
-
if (this.ready_check.initialized) {
|
|
48
|
-
console.log("Ready to use!");
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
check_if_ready(){
|
|
53
|
-
return this.ready_check.ready
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
/**
|
|
57
|
-
* Initializes the database making it ready to be used. Typically, required to run after every time package is updated to a new version.
|
|
58
|
-
* See the documentation on the architecture of the DB to understand what default schemas are required for a smooth functioning of the database
|
|
59
|
-
*/
|
|
60
|
-
async initialize_db() {
|
|
61
|
-
try {
|
|
62
|
-
if (this.ready_check.initialized == false) {
|
|
63
|
-
// add the meta-schemas doc
|
|
64
|
-
let schema_schema_doc = this._get_blank_doc("schema");
|
|
65
|
-
schema_schema_doc.data = sys_sch.schema_schema;
|
|
66
|
-
await this.db_api.insert(schema_schema_doc);
|
|
67
|
-
// add system schemas
|
|
68
|
-
let keys = Object.keys(sys_sch.system_schemas);
|
|
69
|
-
for (let index = 0; index < keys.length; index++) {
|
|
70
|
-
const element = sys_sch.system_schemas[keys[index]];
|
|
71
|
-
let schema_record = this._get_blank_schema_doc(
|
|
72
|
-
"schema",
|
|
73
|
-
sys_sch.schema_schema["schema"],
|
|
74
|
-
element
|
|
75
|
-
);
|
|
76
|
-
await this.db_api.insert(schema_record);
|
|
77
|
-
}
|
|
78
|
-
// create an index
|
|
79
|
-
await this.db_api.createIndex({
|
|
80
|
-
index: { fields: ["schema", "data", "meta"] },
|
|
81
|
-
});
|
|
82
|
-
console.log("Database Indexed.");
|
|
83
|
-
// create the log doc
|
|
84
|
-
const log_schema = sys_sch.system_schemas["logs"]["schema"];
|
|
85
|
-
let log_doc = this._get_blank_schema_doc("system_logs", log_schema, {
|
|
86
|
-
logs: [
|
|
87
|
-
{
|
|
88
|
-
message: `Database is initialized with version ${this._version}.`,
|
|
89
|
-
on: this._get_now_unix_timestamp(),
|
|
90
|
-
human_date: new Date().toLocaleString(),
|
|
91
|
-
},
|
|
92
|
-
],
|
|
93
|
-
});
|
|
94
|
-
await this.db_api.insert(log_doc);
|
|
95
|
-
// create the setting doc
|
|
96
|
-
const setting_schema = sys_sch.system_schemas["settings"]["schema"];
|
|
97
|
-
let setting_doc = this._get_blank_schema_doc(
|
|
98
|
-
"system_settings",
|
|
99
|
-
setting_schema,
|
|
100
|
-
{
|
|
101
|
-
name: "beanbagdb_version",
|
|
102
|
-
value: this._version,
|
|
103
|
-
user_editable: false,
|
|
104
|
-
}
|
|
105
|
-
);
|
|
106
|
-
await this.db_api.insert(setting_doc);
|
|
107
|
-
// finally update the flags
|
|
108
|
-
this.ready_check.initialized = true;
|
|
109
|
-
this.ready_check.latest = true;
|
|
110
|
-
console.log("Database initialized");
|
|
111
|
-
} else {
|
|
112
|
-
console.log("Database already initialized");
|
|
113
|
-
if (!this.ready_check.latest) {
|
|
114
|
-
// update to latest schema
|
|
115
|
-
this._update_system_schema();
|
|
116
|
-
} else {
|
|
117
|
-
console.log("Database already up to date");
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
} catch (error) {
|
|
121
|
-
console.log(error);
|
|
122
|
-
throw error;
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
/**
|
|
127
|
-
* Adds indexes for all the schemas in the data base. This is important to make search faster. This must be done every time a new schema is introduced in the database
|
|
128
|
-
*/
|
|
129
|
-
async update_indexes() {
|
|
130
|
-
// @TODO check this. i don't the index created this way are actually useful in search.
|
|
131
|
-
let all_schemas_docs = await this.db_api.search({
|
|
132
|
-
selector: { schema: "schema" },
|
|
133
|
-
});
|
|
134
|
-
let indexes = [];
|
|
135
|
-
all_schemas_docs.docs.map((item) => {
|
|
136
|
-
Object.keys(item.data.schema.properties).map((key) => {
|
|
137
|
-
indexes.push("data." + key);
|
|
138
|
-
});
|
|
139
|
-
});
|
|
140
|
-
await this.db_api.createIndex({ index: { fields: indexes } });
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
/**
|
|
144
|
-
* Validates a data object against a provided JSON schema
|
|
145
|
-
* It relies on the Ajv package to make the validation.
|
|
146
|
-
* @param {Object} schema_obj - The JSON schema object to validate against
|
|
147
|
-
* @param {Object} data_obj - The data object to validate
|
|
148
|
-
* @throws {Error} If the data object does not conform to the schema
|
|
149
|
-
*/
|
|
150
|
-
validate_data(schema_obj, data_obj) {
|
|
151
|
-
const ajv = new Ajv(); // options can be passed, e.g. {allErrors: true}
|
|
152
|
-
const validate = ajv.compile(schema_obj);
|
|
153
|
-
const valid = validate(data_obj);
|
|
154
|
-
if (!valid) {
|
|
155
|
-
console.log(validate.errors);
|
|
156
|
-
throw new Error(validate.errors);
|
|
157
|
-
}
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
/**
|
|
161
|
-
* Returns a document with the provided ID
|
|
162
|
-
* @param {String} doc_id - the doc Id (not the primary key)
|
|
163
|
-
* @param {Boolean} include_schema - whether to include the schema doc as well
|
|
164
|
-
* @returns {Object} {doc} or {doc,schema}
|
|
165
|
-
*/
|
|
166
|
-
async get(doc_id,include_schema=false) {
|
|
167
|
-
let doc = await this.db_api.get(doc_id);
|
|
168
|
-
let schema = await this.get_schema_doc(doc.schema);
|
|
169
|
-
doc = this._decrypt_doc(schema, doc);
|
|
170
|
-
if(include_schema){
|
|
171
|
-
return {doc,schema}
|
|
172
|
-
}
|
|
173
|
-
return {doc};
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
/**
|
|
177
|
-
* Returns schema document for the given schema name s
|
|
178
|
-
* @param {String} schema_name - Schema name
|
|
179
|
-
*/
|
|
180
|
-
async get_schema_doc(schema_name) {
|
|
181
|
-
let schemaSearch = await this.db_api.search({
|
|
182
|
-
selector: { schema: "schema", "data.name": schema_name },
|
|
183
|
-
});
|
|
184
|
-
if (schemaSearch.docs.length == 0) {
|
|
185
|
-
throw new Error("Schema not found");
|
|
186
|
-
}
|
|
187
|
-
return schemaSearch.docs[0]["data"];
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
/**
|
|
191
|
-
* Fetches a document based on a given schema and primary key.
|
|
192
|
-
* In case schema has a single record, leave the primary_key blank `[]`
|
|
193
|
-
* Can also be used to get special system docs such as settings
|
|
194
|
-
* @param {String} schema_name
|
|
195
|
-
* @param {Object} primary_key
|
|
196
|
-
* @returns object
|
|
197
|
-
*/
|
|
198
|
-
async get_doc(schema_name, primary_key = {}) {
|
|
199
|
-
let s_doc = await this.get_schema_doc(schema_name);
|
|
200
|
-
let doc_obj;
|
|
201
|
-
if (
|
|
202
|
-
s_doc["settings"]["primary_keys"] &&
|
|
203
|
-
s_doc["settings"]["primary_keys"].length > 0
|
|
204
|
-
) {
|
|
205
|
-
let A = s_doc["settings"]["primary_keys"];
|
|
206
|
-
let search_criteria = { schema: schema_name };
|
|
207
|
-
A.forEach((itm) => {
|
|
208
|
-
if (!primary_key[itm]) {
|
|
209
|
-
throw new Error(
|
|
210
|
-
"Incomplete Primary key set. Required field(s) : " + A.join(",")
|
|
211
|
-
);
|
|
212
|
-
}
|
|
213
|
-
search_criteria["data." + itm] = primary_key[itm];
|
|
214
|
-
});
|
|
215
|
-
let s = await this.search({ selector: search_criteria });
|
|
216
|
-
doc_obj = s.docs[0];
|
|
217
|
-
} else {
|
|
218
|
-
let s = await this.search({ selector: { schema: schema_name } });
|
|
219
|
-
if (s.docs.length > 1) {
|
|
220
|
-
throw new Error(
|
|
221
|
-
"Invalid schema. At least one primary key must be defined or set the singleRecord option to true. "
|
|
222
|
-
);
|
|
223
|
-
}
|
|
224
|
-
doc_obj = s.docs[0];
|
|
225
|
-
}
|
|
226
|
-
doc_obj = this._decrypt_doc(s_doc, doc_obj);
|
|
227
|
-
return doc_obj;
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
/**
|
|
231
|
-
* Searches for documents in the database for the specified query. The query are Mango queries.
|
|
232
|
-
* One field is mandatory : Schema
|
|
233
|
-
* E.g
|
|
234
|
-
* @param {Object} criteria
|
|
235
|
-
*/
|
|
236
|
-
async search(criteria) {
|
|
237
|
-
if (!criteria["selector"]) {
|
|
238
|
-
throw new Error("Invalid search query.");
|
|
239
|
-
}
|
|
240
|
-
if (!criteria["selector"]["schema"]) {
|
|
241
|
-
throw new Error("The search criteria must contain the schema");
|
|
242
|
-
}
|
|
243
|
-
const results = await this.db_api.search(criteria);
|
|
244
|
-
return results;
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
/**
|
|
248
|
-
* Inserts a doc for the given schema
|
|
249
|
-
* @param {String} schema e.g "contact"
|
|
250
|
-
* @param {Object} data e.g {"name":"","mobile":""...}
|
|
251
|
-
* @param {Object} settings (optional)
|
|
252
|
-
*/
|
|
253
|
-
async insert(schema, data, settings = {}) {
|
|
254
|
-
try {
|
|
255
|
-
let doc_obj = await this._insert_pre_checks(schema, data, settings);
|
|
256
|
-
let new_rec = await this.db_api.insert(doc_obj);
|
|
257
|
-
return { id: new_rec["id"] };
|
|
258
|
-
} catch (error) {
|
|
259
|
-
console.log(error);
|
|
260
|
-
throw error;
|
|
261
|
-
}
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
//** Update data */
|
|
268
|
-
/**
|
|
269
|
-
* Update data and meta of a doc.
|
|
270
|
-
*
|
|
271
|
-
* - Q: Which data fields can be edited ?
|
|
272
|
-
* - A: Depends on the setting.editable_fields. If this is blank, then all fields are editable.
|
|
273
|
-
* - Q: Are primary key fields editable ?
|
|
274
|
-
* - A: Yes. before making the update, a check is done to ensue the primary key policy is not violated
|
|
275
|
-
*
|
|
276
|
-
* @param {String} doc_id
|
|
277
|
-
* @param {String} rev_id
|
|
278
|
-
* @param {*} schema_name
|
|
279
|
-
* @param {doc_obj} updates {data:{},meta:{}}, need not be the full document, just the new values of all/some fields
|
|
280
|
-
* @param {Boolean} save_conflict = true -
|
|
281
|
-
* @returns
|
|
282
|
-
*/
|
|
283
|
-
async update(doc_id, rev_id, updates, update_source="api",save_conflict = true) {
|
|
284
|
-
// making a big assumption here : primary key fields cannot be edited
|
|
285
|
-
// so updating the doc will not generate primary key conflicts
|
|
286
|
-
let req_data = await this.get(doc_id,true);
|
|
287
|
-
let schema = req_data.schema // await this.get_schema_doc(schema_name);
|
|
288
|
-
let full_doc = req_data.doc // await this.get(doc_id)["doc"];
|
|
289
|
-
|
|
290
|
-
// @TODO fix this : what to do if the rev id does not match
|
|
291
|
-
// if (full_doc["_rev"] != rev_id) {
|
|
292
|
-
// // throw error , save conflicting doc separately by default
|
|
293
|
-
// if (save_conflict) {
|
|
294
|
-
// // save conflicting doc todo
|
|
295
|
-
// }
|
|
296
|
-
// }
|
|
297
|
-
|
|
298
|
-
// blank check
|
|
299
|
-
|
|
300
|
-
// update new value depending on settings.editable_fields (if does not exists, all fields are editable)
|
|
301
|
-
let edit_fields = Object.keys(schema.schema.properties)
|
|
302
|
-
if(schema.settings["editable_fields"]&&schema.settings["editable_fields"].length>0){
|
|
303
|
-
edit_fields = schema.settings["editable_fields"]
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
// now generate the new doc with updates
|
|
307
|
-
let allowed_updates = this._filterObject(updates.data,edit_fields);
|
|
308
|
-
let updated_data = { ...full_doc.data, ...allowed_updates };
|
|
309
|
-
|
|
310
|
-
// validate data
|
|
311
|
-
this.validate_data(schema.schema, updated_data);
|
|
312
|
-
|
|
313
|
-
// primary key check if multiple records can be created
|
|
314
|
-
if(schema.settings["single_record"]==false){
|
|
315
|
-
if(schema.settings["primary_keys"]&&schema.settings["primary_keys"].length>0){
|
|
316
|
-
let pri_fields = schema.settings["primary_keys"]
|
|
317
|
-
let search_criteria = {schema:schema.name}
|
|
318
|
-
pri_fields.map(itm=>{search_criteria["data."+itm] = updated_data[itm]})
|
|
319
|
-
let search = await this.search({selection:search_criteria})
|
|
320
|
-
if(search.docs.length>0){
|
|
321
|
-
if(search.docs.length==1){
|
|
322
|
-
let thedoc = search.docs[0]
|
|
323
|
-
if(thedoc["_id"]!=doc_id){
|
|
324
|
-
throw new Error("Update not allowed. Document with the same primary key already exists")
|
|
325
|
-
}
|
|
326
|
-
}else{
|
|
327
|
-
throw new Error("There is something wrong with the schema")
|
|
328
|
-
}
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
}
|
|
332
|
-
|
|
333
|
-
// encrypt the data
|
|
334
|
-
|
|
335
|
-
full_doc["data"] = updated_data
|
|
336
|
-
full_doc = this._encrypt_doc(schema,full_doc);
|
|
337
|
-
|
|
338
|
-
if(updates.meta){
|
|
339
|
-
let m_sch = sys_sch.editable_metadata_schema
|
|
340
|
-
let editable_fields = Object.keys(m_sch["properties"])
|
|
341
|
-
let allowed_meta = this._filterObject(updates.meta,editable_fields)
|
|
342
|
-
this.validate_data(m_sch,allowed_meta)
|
|
343
|
-
full_doc["meta"] = {...full_doc["meta"],...allowed_meta}
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
full_doc.meta["updated_on"] = this._get_now_unix_timestamp()
|
|
347
|
-
full_doc.meta["updated_by"] = update_source
|
|
348
|
-
let up = await this.db_api.update(full_doc);
|
|
349
|
-
return up;
|
|
350
|
-
}
|
|
351
|
-
|
|
352
|
-
async delete(doc_id) {
|
|
353
|
-
await this.db_api.delete(doc_id)
|
|
354
|
-
}
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
async load_plugin(plugin_name,plugin_module){
|
|
358
|
-
this.plugins[plugin_name] = {}
|
|
359
|
-
for (let func_name in plugin_module){
|
|
360
|
-
if(typeof plugin_module[func_name]=='function'){
|
|
361
|
-
this.plugins[plugin_name][func_name] = plugin_module[func_name].bind(null,this)
|
|
362
|
-
}
|
|
363
|
-
}
|
|
364
|
-
// Check if the plugin has an on_load method and call it
|
|
365
|
-
if (typeof this.plugins[plugin_name].on_load === 'function') {
|
|
366
|
-
await this.plugins[plugin_name].on_load();
|
|
367
|
-
}
|
|
368
|
-
}
|
|
369
|
-
|
|
370
|
-
//////// Helper method ////////
|
|
371
|
-
|
|
372
|
-
_check_required_fields(requiredFields,obj){
|
|
373
|
-
for (const field of requiredFields) {
|
|
374
|
-
if (!obj[field]) {throw new Error(`${field} is required`);}
|
|
375
|
-
}
|
|
376
|
-
}
|
|
377
|
-
|
|
378
|
-
/**
|
|
379
|
-
*
|
|
380
|
-
* @param {*} obj
|
|
381
|
-
* @param {*} fields
|
|
382
|
-
* @returns
|
|
383
|
-
*/
|
|
384
|
-
_filterObject(obj, fields) {
|
|
385
|
-
return fields.reduce((filteredObj, field) => {
|
|
386
|
-
if (Object.prototype.hasOwnProperty.call(obj, field)) {
|
|
387
|
-
filteredObj[field] = obj[field];
|
|
388
|
-
}
|
|
389
|
-
return filteredObj;
|
|
390
|
-
}, {});
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
/**
|
|
394
|
-
* Checks if the selected database is initialized for working with BeanBagDB. Also throws a warning if package version does not match with database version.
|
|
395
|
-
* Every time a database is initialized, a setting document `beanbagdb_version` is added. If this does not exists, the database is not initialized. If it exists but does not match the current version, a warning is shown.
|
|
396
|
-
* @returns {object} {initialized:boolean,latest:boolean}
|
|
397
|
-
*/
|
|
398
|
-
async _check_ready_to_use() {
|
|
399
|
-
// @TODO check if ready to use in major API methods
|
|
400
|
-
let check = { initialized: false, latest: false };
|
|
401
|
-
// @TODO this is not really fool proof. check all the required docs, they have the system_generated flag
|
|
402
|
-
// what if some user mistakenly modifies or deletes some of the required docs ?
|
|
403
|
-
let version_search = await this.db_api.search({
|
|
404
|
-
selector: { schema: "system_settings", "data.name": "beanbagdb_version" },
|
|
405
|
-
});
|
|
406
|
-
if (version_search.docs.length > 0) {
|
|
407
|
-
let doc = version_search.docs[0];
|
|
408
|
-
check.initialized = true;
|
|
409
|
-
check.latest = doc["data"]["value"] == this._version;
|
|
410
|
-
}
|
|
411
|
-
if (check.initialized == false) {
|
|
412
|
-
console.warn(
|
|
413
|
-
"This database is not ready to be used. It is not initialized. Run `initialize_db()` first"
|
|
414
|
-
);
|
|
415
|
-
}
|
|
416
|
-
if ((check.latest == false) & (check.initialized == true)) {
|
|
417
|
-
console.warn(
|
|
418
|
-
"This database is not updated with the latest version. Run `initialize_db()` again to update to the latest version"
|
|
419
|
-
);
|
|
420
|
-
}
|
|
421
|
-
return check;
|
|
422
|
-
}
|
|
423
|
-
|
|
424
|
-
/**
|
|
425
|
-
* To update the system schema or reset to a stable version to ensure functioning of the BeanBagDB
|
|
426
|
-
*/
|
|
427
|
-
async _update_system_schema() {
|
|
428
|
-
console.log("Todo");
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
/**
|
|
432
|
-
* Returns the current Unix timestamp in seconds.
|
|
433
|
-
* divide by 1000 (Date.now gives ms) to convert to seconds. 1 s = 1000 ms
|
|
434
|
-
* @returns {number}
|
|
435
|
-
*/
|
|
436
|
-
_get_now_unix_timestamp() {
|
|
437
|
-
return Math.floor(Date.now() / 1000);
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
/**
|
|
441
|
-
* Generates a blank database json object. All objects in the database follow the same structure
|
|
442
|
-
* @param {string} schema_name
|
|
443
|
-
* @returns {object}
|
|
444
|
-
*/
|
|
445
|
-
_get_blank_doc(schema_name) {
|
|
446
|
-
if (!schema_name) {
|
|
447
|
-
throw new Error("Schema name not provided for the blank doc");
|
|
448
|
-
}
|
|
449
|
-
let doc = {
|
|
450
|
-
data: {},
|
|
451
|
-
meta: {
|
|
452
|
-
createdOn: this._get_now_unix_timestamp(),
|
|
453
|
-
tags: [],
|
|
454
|
-
app :{}
|
|
455
|
-
},
|
|
456
|
-
schema: schema_name,
|
|
457
|
-
};
|
|
458
|
-
return doc;
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
/**
|
|
462
|
-
* Generates a blank schema doc ready to be inserted to the database. Note that no validation is done. This is for internal use
|
|
463
|
-
* @param {string} schema_name
|
|
464
|
-
* @param {Object} schema_object
|
|
465
|
-
* @param {Object} data
|
|
466
|
-
* @returns {Object}
|
|
467
|
-
*/
|
|
468
|
-
_get_blank_schema_doc(schema_name, schema_object, data) {
|
|
469
|
-
this.validate_data(schema_object, data);
|
|
470
|
-
let obj = this._get_blank_doc(schema_name);
|
|
471
|
-
obj["data"] = data;
|
|
472
|
-
return obj;
|
|
473
|
-
}
|
|
474
|
-
|
|
475
|
-
/**
|
|
476
|
-
* Decrypts a given document using it's schema. The list of encrypted fields : schema_obj.settings.encrypted_fields
|
|
477
|
-
* @param {Object} schema_obj
|
|
478
|
-
* @param {Object} doc_obj
|
|
479
|
-
* @returns {Object}
|
|
480
|
-
*/
|
|
481
|
-
_decrypt_doc(schema_obj, doc_obj) {
|
|
482
|
-
if (
|
|
483
|
-
schema_obj.settings["encrypted_fields"] &&
|
|
484
|
-
schema_obj.settings["encrypted_fields"].length > 0
|
|
485
|
-
) {
|
|
486
|
-
schema_obj.settings["encrypted_fields"].forEach((itm) => {
|
|
487
|
-
doc_obj.data[itm] = this.utils.decrypt(
|
|
488
|
-
doc_obj.data[itm],
|
|
489
|
-
this.encryption_key
|
|
490
|
-
);
|
|
491
|
-
});
|
|
492
|
-
}
|
|
493
|
-
return { ...doc_obj };
|
|
494
|
-
}
|
|
495
|
-
|
|
496
|
-
/**
|
|
497
|
-
* Encrypts a given doc using it's schema obj.
|
|
498
|
-
* @param {Object} schema_obj
|
|
499
|
-
* @param {Object} doc_obj
|
|
500
|
-
* @returns {Object}
|
|
501
|
-
*/
|
|
502
|
-
_encrypt_doc(schema_obj, doc_obj) {
|
|
503
|
-
|
|
504
|
-
if (
|
|
505
|
-
schema_obj.settings["encrypted_fields"] &&
|
|
506
|
-
schema_obj.settings["encrypted_fields"].length > 0
|
|
507
|
-
) {
|
|
508
|
-
// console.log(schema_obj,doc_obj)
|
|
509
|
-
schema_obj.settings["encrypted_fields"].forEach((itm) => {
|
|
510
|
-
doc_obj.data[itm] = this.utils.encrypt(
|
|
511
|
-
doc_obj.data[itm],
|
|
512
|
-
this.encryption_key
|
|
513
|
-
);
|
|
514
|
-
});
|
|
515
|
-
}
|
|
516
|
-
return { ...doc_obj };
|
|
517
|
-
}
|
|
518
|
-
|
|
519
|
-
/**
|
|
520
|
-
* Checks if the new document is valid and ready to be inserted in the DB.
|
|
521
|
-
* List of checks:
|
|
522
|
-
* - fetch the schema object and validate the data object against the schema
|
|
523
|
-
* - check if the doc with same primary keys already exists
|
|
524
|
-
* - replace encrypted fields with encrypted values
|
|
525
|
-
* - return the doc
|
|
526
|
-
* @param {Object} schema
|
|
527
|
-
* @param {Object} data
|
|
528
|
-
*/
|
|
529
|
-
async _insert_pre_checks(schema, data, settings = {}) {
|
|
530
|
-
// schema search
|
|
531
|
-
let sch_search = await this.search({
|
|
532
|
-
selector: { schema: "schema", "data.name": schema },
|
|
533
|
-
});
|
|
534
|
-
if (sch_search.docs.length == 0) {
|
|
535
|
-
throw new Error("Invalid Schema");
|
|
536
|
-
}
|
|
537
|
-
let schemaDoc = sch_search.docs[0]["data"];
|
|
538
|
-
// validate data
|
|
539
|
-
this.validate_data(schemaDoc.schema, data);
|
|
540
|
-
|
|
541
|
-
// special checks for special docs
|
|
542
|
-
// @TODO : for schema dos: settings fields must be in schema field
|
|
543
|
-
// @TODO : check if single record setting is set to true
|
|
544
|
-
|
|
545
|
-
// duplicate check
|
|
546
|
-
if (
|
|
547
|
-
schemaDoc.settings["primary_keys"] &&
|
|
548
|
-
schemaDoc.settings["primary_keys"].length > 0
|
|
549
|
-
) {
|
|
550
|
-
let primary_obj = { schema: schema };
|
|
551
|
-
schemaDoc.settings["primary_keys"].map((ky) => {
|
|
552
|
-
primary_obj["data." + ky] = data[ky];
|
|
553
|
-
});
|
|
554
|
-
console.log(primary_obj);
|
|
555
|
-
let prim_search = await this.search({ selector: primary_obj });
|
|
556
|
-
console.log(prim_search);
|
|
557
|
-
if (prim_search.docs.length > 0) {
|
|
558
|
-
throw new Error("Doc already exists");
|
|
559
|
-
}
|
|
560
|
-
}
|
|
561
|
-
// encrypt if required
|
|
562
|
-
let new_data = { ...data };
|
|
563
|
-
if (
|
|
564
|
-
schemaDoc.settings["encrypted_fields"] &&
|
|
565
|
-
schemaDoc.settings["encrypted_fields"].length > 0
|
|
566
|
-
) {
|
|
567
|
-
schemaDoc.settings["encrypted_fields"].forEach((itm) => {
|
|
568
|
-
new_data[itm] = this.utils.encrypt(data[itm], this.encryption_key);
|
|
569
|
-
});
|
|
570
|
-
}
|
|
571
|
-
// generate the doc object for data
|
|
572
|
-
let doc_obj = this._get_blank_doc(schema);
|
|
573
|
-
doc_obj["data"] = new_data;
|
|
574
|
-
return doc_obj;
|
|
575
|
-
}
|
|
576
|
-
}
|
|
577
|
-
|
|
578
|
-
module.exports = BeanBagDB;
|
|
5
|
+
export {BeanBagDB,BeanBagDB_CouchDB,BeanBagDB_PouchDB}
|
package/src/pouchdb.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
const SDB = require("./index.js")
|
|
1
|
+
import PouchDB from 'pouchdb';
|
|
2
|
+
import pouchdbFind from 'pouchdb-find';
|
|
3
|
+
PouchDB.plugin(pouchdbFind)
|
|
5
4
|
|
|
6
|
-
|
|
5
|
+
import { scryptSync, randomBytes, createCipheriv, createDecipheriv } from 'crypto';
|
|
6
|
+
import BeanBagDB from "./beanbagdb.js";
|
|
7
|
+
|
|
8
|
+
class BeanBagDB_PouchDB extends BeanBagDB {
|
|
7
9
|
constructor(db_url,db_name,encryption_key){
|
|
8
10
|
const pdb = new PouchDB(db_name);
|
|
9
11
|
const doc_obj = {
|
|
@@ -39,17 +41,17 @@ class BeanBagDB_PouchDB extends SDB {
|
|
|
39
41
|
},
|
|
40
42
|
utils:{
|
|
41
43
|
encrypt: (text,encryptionKey)=>{
|
|
42
|
-
const key =
|
|
43
|
-
const iv =
|
|
44
|
-
const cipher =
|
|
44
|
+
const key = scryptSync(encryptionKey, 'salt', 32); // Derive a 256-bit key
|
|
45
|
+
const iv = randomBytes(16); // Initialization vector
|
|
46
|
+
const cipher = createCipheriv('aes-256-cbc', key, iv);
|
|
45
47
|
let encrypted = cipher.update(text, 'utf8', 'hex');
|
|
46
48
|
encrypted += cipher.final('hex');
|
|
47
49
|
return iv.toString('hex') + ':' + encrypted; // Prepend the IV for later use
|
|
48
50
|
},
|
|
49
51
|
decrypt : (encryptedText, encryptionKey)=>{
|
|
50
|
-
const key =
|
|
52
|
+
const key = scryptSync(encryptionKey, 'salt', 32); // Derive a 256-bit key
|
|
51
53
|
const [iv, encrypted] = encryptedText.split(':').map(part => Buffer.from(part, 'hex'));
|
|
52
|
-
const decipher =
|
|
54
|
+
const decipher = createDecipheriv('aes-256-cbc', key, iv);
|
|
53
55
|
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
|
|
54
56
|
decrypted += decipher.final('utf8');
|
|
55
57
|
return decrypted;
|
|
@@ -63,4 +65,4 @@ class BeanBagDB_PouchDB extends SDB {
|
|
|
63
65
|
}
|
|
64
66
|
}
|
|
65
67
|
|
|
66
|
-
|
|
68
|
+
export default BeanBagDB_PouchDB
|