beanbagdb 0.5.46 → 0.5.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/deploy_docs.yml +38 -0
- package/.github/workflows/release.yml +3 -3
- package/.github/workflows/test_on_master.yml +36 -0
- package/index.md +1 -0
- package/jsdoc.json +21 -0
- package/package.json +5 -1
- package/src/index.js +332 -146
- package/src/system_schema.js +35 -39
- package/test/couch_connect.js +29 -0
- package/test/couchdb.js +75 -0
- package/test/init.test.js +5 -74
- package/test/operations.test.js +327 -1
- package/test/pouchdb.js +74 -0
- package/test/test1.js +30 -1
package/src/index.js
CHANGED
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import * as sys_sch from "./system_schema.js";
|
|
2
|
-
// import { version } from "../package.json" assert {type :"json"};
|
|
3
2
|
/**
|
|
4
3
|
* This the core class. it is not very useful in itself but can be used to generate a sub class for a specific database for eg CouchDB.
|
|
5
4
|
* It takes a db_instance argument, which , this class relies on perform CRUD operations on the data.
|
|
6
5
|
* Why have a "dumb" class ? : So that the core functionalities remains in a single place and the multiple Databases can be supported.
|
|
6
|
+
* Naming convention :
|
|
7
|
+
* - user facing methods : verbs with underscores, no camel case
|
|
8
|
+
* - internal methods (uses the this object) only to be used within the class : name starts with underscore (_)
|
|
9
|
+
* - util methods : these can also be used by the user, this object not accessed, : name starts with util_
|
|
7
10
|
*/
|
|
8
|
-
class BeanBagDB {
|
|
11
|
+
export class BeanBagDB {
|
|
9
12
|
/**
|
|
10
13
|
* @param {object} db_instance - Database object
|
|
11
14
|
* db_instance object contains 3 main keys :
|
|
@@ -16,117 +19,196 @@ class BeanBagDB {
|
|
|
16
19
|
*/
|
|
17
20
|
constructor(db_instance) {
|
|
18
21
|
// data validation checks
|
|
19
|
-
this.
|
|
20
|
-
this.
|
|
21
|
-
this.
|
|
22
|
+
this.util_check_required_fields(["name", "encryption_key", "api", "utils","db_name"],db_instance)
|
|
23
|
+
this.util_check_required_fields(["insert", "update", "delete", "search","get","createIndex"],db_instance.api)
|
|
24
|
+
this.util_check_required_fields(["encrypt", "decrypt","ping","validate_schema"],db_instance.utils)
|
|
22
25
|
|
|
23
|
-
if(db_instance.encryption_key.length
|
|
26
|
+
if(db_instance.encryption_key.length<20){throw new Error("encryption_key must have at least 20 letters")}
|
|
24
27
|
// db name should not be blank,
|
|
25
28
|
|
|
26
|
-
this.name = db_instance.name;
|
|
27
29
|
this.encryption_key = db_instance.encryption_key;
|
|
28
|
-
|
|
30
|
+
this.db_name = db_instance.db_name // couchdb,pouchdb etc...
|
|
29
31
|
this.db_api = db_instance.api;
|
|
30
32
|
this.utils = db_instance.utils;
|
|
31
33
|
|
|
32
|
-
this.
|
|
33
|
-
|
|
34
|
-
|
|
34
|
+
this.meta = {
|
|
35
|
+
database_name : db_instance.name,
|
|
36
|
+
backend_database : this.db_name,
|
|
37
|
+
beanbagdb_version_db : null
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this._version = this._get_current_version()
|
|
41
|
+
// latest indicated if the DB was initialized with the latest version or not.
|
|
42
|
+
this.active = false
|
|
35
43
|
|
|
44
|
+
console.log("Run ready() now");
|
|
45
|
+
|
|
36
46
|
this.plugins = {}
|
|
47
|
+
|
|
48
|
+
this.error_codes = {
|
|
49
|
+
not_active : "Database is not ready. Run ready() first",
|
|
50
|
+
schema_not_found:"Schema not found"
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async metadata(){
|
|
55
|
+
// returns system data
|
|
56
|
+
return {
|
|
57
|
+
... this.meta,
|
|
58
|
+
beanbagdb_version_code : this._version,
|
|
59
|
+
ready_to_use : this.active
|
|
60
|
+
}
|
|
61
|
+
// todo : doc count, schema count, records for each schema, size of the database,
|
|
37
62
|
}
|
|
38
63
|
|
|
39
64
|
/**
|
|
40
65
|
* This is to check if the database is ready to be used. It it important to run this after the class is initialized.
|
|
41
66
|
*/
|
|
42
67
|
async ready() {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
68
|
+
// TODO Ping db
|
|
69
|
+
let check = { initialized: false, latest: false ,db_version:null};
|
|
70
|
+
let version_search = await this.db_api.search({
|
|
71
|
+
selector: { schema: "system_settings", "data.name": "beanbagdb_version" },
|
|
72
|
+
});
|
|
73
|
+
if (version_search.docs.length > 0) {
|
|
74
|
+
let doc = version_search.docs[0];
|
|
75
|
+
this.active = doc["data"]["value"] == this._version;
|
|
76
|
+
this.meta.beanbagdb_version_db = doc["data"]["value"]
|
|
77
|
+
}
|
|
78
|
+
if(this.active){
|
|
79
|
+
console.log("Ready")
|
|
80
|
+
}else{
|
|
81
|
+
await this.initialize_db()
|
|
49
82
|
}
|
|
50
83
|
}
|
|
51
84
|
|
|
52
|
-
|
|
53
|
-
return this.ready_check.ready
|
|
54
|
-
}
|
|
85
|
+
|
|
55
86
|
|
|
56
87
|
/**
|
|
57
88
|
* Initializes the database making it ready to be used. Typically, required to run after every time package is updated to a new version.
|
|
58
89
|
* See the documentation on the architecture of the DB to understand what default schemas are required for a smooth functioning of the database
|
|
59
90
|
*/
|
|
60
91
|
async initialize_db() {
|
|
92
|
+
// this works on its own but is usually called by ready automatically if required
|
|
93
|
+
|
|
94
|
+
// check for schema_scehma : if yes, check if latest and upgrade if required, if no create a new schema doc
|
|
95
|
+
let logs = ["init started"]
|
|
61
96
|
try {
|
|
62
|
-
|
|
63
|
-
|
|
97
|
+
let schema = await this.get_schema_doc("schema")
|
|
98
|
+
if (schema["data"]["version"] != sys_sch.schema_schema.version){
|
|
99
|
+
logs.push("old schema_schema v "+schema["data"]["version"])
|
|
100
|
+
let full_doc = await this.db_api.get(schema["_id"])
|
|
101
|
+
full_doc["data"] = {...sys_sch.schema_schema}
|
|
102
|
+
full_doc["meta"]["updated_on"] = this._get_now_unix_timestamp()
|
|
103
|
+
await this.db_api.update(full_doc)
|
|
104
|
+
logs.push("new schema_schema v "+sys_sch.schema_schema.version)
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
} catch (error) {
|
|
108
|
+
console.log(error)
|
|
109
|
+
if (error.message==this.error_codes.schema_not_found) {
|
|
110
|
+
console.log("...adding new ")
|
|
111
|
+
// inserting new schema_schema doc
|
|
64
112
|
let schema_schema_doc = this._get_blank_doc("schema");
|
|
65
113
|
schema_schema_doc.data = sys_sch.schema_schema;
|
|
66
114
|
await this.db_api.insert(schema_schema_doc);
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
115
|
+
logs.push("init schema_schema v "+sys_sch.schema_schema.version)
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
let keys = Object.keys(sys_sch.system_schemas);
|
|
120
|
+
for (let index = 0; index < keys.length; index++) {
|
|
121
|
+
const schema_name = sys_sch.system_schemas[keys[index]]["name"]
|
|
122
|
+
const schema_data = sys_sch.system_schemas[keys[index]];
|
|
123
|
+
try {
|
|
124
|
+
// console.log(schema_name)
|
|
125
|
+
let schema1 = await this.get_schema_doc(schema_name)
|
|
126
|
+
if (schema1["data"]["version"] != schema_data.version){
|
|
127
|
+
logs.push("old "+schema_name+" v "+schema1["data"]["version"])
|
|
128
|
+
let full_doc = await this.db_api.get(schema1["_id"])
|
|
129
|
+
full_doc["data"] = {...schema_data}
|
|
130
|
+
full_doc["meta"]["updated_on"] = this._get_now_unix_timestamp()
|
|
131
|
+
await this.db_api.update(full_doc)
|
|
132
|
+
logs.push("new "+schema_name+" v "+schema_data.version)
|
|
133
|
+
}
|
|
134
|
+
} catch (error) {
|
|
135
|
+
console.log(error)
|
|
136
|
+
if (error.message==this.error_codes.schema_not_found) {
|
|
137
|
+
// inserting new schema doc
|
|
138
|
+
let new_schema_doc = this._get_blank_schema_doc("schema",sys_sch.schema_schema["schema"],schema_data);
|
|
139
|
+
await this.db_api.insert(new_schema_doc);
|
|
140
|
+
logs.push("init "+schema_name+" v "+schema_data.version)
|
|
77
141
|
}
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
// store the logs in the log_doc , generate it for the first time
|
|
145
|
+
// console.log(logs)
|
|
146
|
+
if(logs.length>1){
|
|
147
|
+
// version needs to be updated in the object as well as settings and must be logged
|
|
148
|
+
logs.push("Init done")
|
|
149
|
+
|
|
150
|
+
await this.insert_or_update_setting("system_logs",{value:{text:logs.join(","),added:this._get_now_unix_timestamp()},"on_update_array":"append"})
|
|
151
|
+
await this.insert_or_update_setting("beanbagdb_version",{value:this._version})
|
|
152
|
+
// await this.insert_or_update_setting("system_logs",{value:{text:"This is just a test.",added:this._get_now_unix_timestamp()}})
|
|
153
|
+
|
|
154
|
+
this.meta.beanbagdb_version_db = this._version
|
|
155
|
+
this.active = true
|
|
156
|
+
}else{
|
|
157
|
+
// no new updates were done
|
|
158
|
+
console.log("already updated. nothing is required to be done. continue")
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
async insert_or_update_setting(name,new_data,schema={}){
|
|
162
|
+
// TODO implement schema check
|
|
163
|
+
if(!new_data){throw new Error("No data provided")}
|
|
164
|
+
if(!new_data.value){throw new Error("No value provided")}
|
|
165
|
+
|
|
166
|
+
let doc_search = await this.db_api.search({"selector":{"schema":"system_settings","data.name":name}})
|
|
167
|
+
if(doc_search.docs.length>0){
|
|
168
|
+
// doc already exists, check schema and update it : if it exists then it's value already exists and can be
|
|
169
|
+
let doc = {...doc_search.docs[0]}
|
|
170
|
+
if(Array.isArray(doc.data.value)){
|
|
171
|
+
let append_type = doc.data.on_update_array
|
|
172
|
+
if(append_type=="append"){
|
|
173
|
+
doc["data"]["value"].push(new_data.value)
|
|
174
|
+
}else if(append_type=="update"){
|
|
175
|
+
doc["data"]["value"] = new_data.value
|
|
176
|
+
}else{
|
|
177
|
+
throw new Error("Invalid on update array value")
|
|
104
178
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
// finally update the flags
|
|
108
|
-
this.ready_check.initialized = true;
|
|
109
|
-
this.ready_check.latest = true;
|
|
110
|
-
console.log("Database initialized");
|
|
111
|
-
} else {
|
|
112
|
-
console.log("Database already initialized");
|
|
113
|
-
if (!this.ready_check.latest) {
|
|
114
|
-
// update to latest schema
|
|
115
|
-
this._update_system_schema();
|
|
116
|
-
} else {
|
|
117
|
-
console.log("Database already up to date");
|
|
179
|
+
}else{
|
|
180
|
+
doc["data"]["value"] = new_data.value
|
|
118
181
|
}
|
|
182
|
+
// finally update it
|
|
183
|
+
doc["meta"]["updated_on"] = this._get_now_unix_timestamp()
|
|
184
|
+
await this.db_api.update(doc)
|
|
185
|
+
return doc
|
|
186
|
+
|
|
187
|
+
}else{
|
|
188
|
+
// doc does not exists, generate a new one
|
|
189
|
+
let new_val= {value:new_data.value}
|
|
190
|
+
|
|
191
|
+
if (new_data.on_update_array){
|
|
192
|
+
// this indicates the provided value is initial value inside the array
|
|
193
|
+
new_val.value = [new_data.value]
|
|
194
|
+
new_val.on_update_array = new_data.on_update_array
|
|
119
195
|
}
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
196
|
+
let new_doc = this._get_blank_doc("system_settings")
|
|
197
|
+
new_doc["data"] = {
|
|
198
|
+
"name": name,
|
|
199
|
+
...new_val
|
|
200
|
+
}
|
|
201
|
+
let d = await this.db_api.insert(new_doc)
|
|
202
|
+
return d
|
|
123
203
|
}
|
|
124
204
|
}
|
|
125
205
|
|
|
206
|
+
|
|
126
207
|
/**
|
|
127
208
|
* Adds indexes for all the schemas in the data base. This is important to make search faster. This must be done every time a new schema is introduced in the database
|
|
128
209
|
*/
|
|
129
210
|
async update_indexes() {
|
|
211
|
+
this._check_ready_to_use()
|
|
130
212
|
// @TODO check this. i don't the index created this way are actually useful in search.
|
|
131
213
|
let all_schemas_docs = await this.db_api.search({
|
|
132
214
|
selector: { schema: "schema" },
|
|
@@ -153,13 +235,75 @@ class BeanBagDB {
|
|
|
153
235
|
//const validate = ajv.compile(schema_obj);
|
|
154
236
|
//const valid = validate(data_obj);
|
|
155
237
|
if (!valid) {
|
|
156
|
-
|
|
157
|
-
throw new Error(validate.errors);
|
|
238
|
+
throw new ValidationError(validate.errors);
|
|
158
239
|
}
|
|
159
240
|
}
|
|
160
241
|
|
|
161
242
|
validate_schema_object(schema_doc){
|
|
243
|
+
let errors = [{"message":"Schema validation errors "}]
|
|
244
|
+
if(!schema_doc["schema"]["type"]){
|
|
245
|
+
errors.push({message:"Schema must have the field schema.'type' which can only be 'object' "})
|
|
246
|
+
}else{
|
|
247
|
+
if(schema_doc["schema"]["type"]!="object"){
|
|
248
|
+
errors.push({message:"The schema.'type' value is invalid.Only 'object' allowed"})
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
if(!schema_doc["schema"]["properties"]){
|
|
252
|
+
errors.push({message:"The schema.'properties' object does not exists"})
|
|
253
|
+
}else{
|
|
254
|
+
if(typeof(schema_doc["schema"]["properties"])!="object"){
|
|
255
|
+
errors.push({message:"Invalid schema.properties. It must be an object and must have atleast one field inside."})
|
|
256
|
+
}
|
|
257
|
+
if(Object.keys(schema_doc["schema"]["properties"]).length==0){
|
|
258
|
+
errors.push({message:"You must define at least one property"})
|
|
259
|
+
}
|
|
260
|
+
}
|
|
162
261
|
|
|
262
|
+
if(!schema_doc["schema"]["additionalProperties"]){
|
|
263
|
+
errors.push({message:"The schema.'additionalProperties' field is required"})
|
|
264
|
+
}else{
|
|
265
|
+
if(typeof(schema_doc["schema"]["additionalProperties"])!="boolean"){
|
|
266
|
+
errors.push({message:"Invalid schema.additionalProperties. It must be a boolean value"})
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
const allKeys = Object.keys(schema_doc["schema"]["properties"])
|
|
271
|
+
if(schema_doc["settings"]["primary_keys"].length>0){
|
|
272
|
+
// check if all keys belong to the schema and are not of type object
|
|
273
|
+
let all_pk_exist = schema_doc["settings"]["primary_keys"].every(item=>allKeys.includes(item)&&schema_doc["schema"]["properties"][item]["type"]!="object"&&schema_doc["schema"]["properties"][item]["type"]!="array")
|
|
274
|
+
|
|
275
|
+
if(!all_pk_exist){
|
|
276
|
+
errors.push({message:"Primary keys invalid. All keys must be defined in the schema and must be non object"})
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
if(schema_doc["settings"]["non_editable_fields"].length>0){
|
|
282
|
+
// check if all keys belong to the schema
|
|
283
|
+
let all_ne_exist = schema_doc["settings"]["non_editable_fields"].every(item=>allKeys.includes(item))
|
|
284
|
+
if(!all_ne_exist){
|
|
285
|
+
errors.push({message:"Non editable fields invalid. All fields must be defined in the schema "})
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
if(schema_doc["settings"]["encrypted_fields"].length>0){
|
|
290
|
+
// check if all keys belong to the schema and are only string
|
|
291
|
+
let all_enc_exist = schema_doc["settings"]["encrypted_fields"].every(item=>allKeys.includes(item)&&schema_doc["schema"]["properties"][item]["type"]=="string")
|
|
292
|
+
if(!all_enc_exist){
|
|
293
|
+
errors.push({message:"Invalid encrypted fields. All fields must be defined in the schema and must be string "})
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// check : primary keys cannot be encrypted
|
|
297
|
+
let all_enc_no_pk = schema_doc["settings"]["encrypted_fields"].every(item=>!schema_doc["settings"]["primary_keys"].includes(item))
|
|
298
|
+
if(!all_enc_no_pk){
|
|
299
|
+
errors.push({message:"Invalid encrypted fields.Primary key fields cannot be encrypted "})
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
/// cannot encrypt primary field keys
|
|
304
|
+
if(errors.length>1){
|
|
305
|
+
throw new ValidationError(errors)
|
|
306
|
+
}
|
|
163
307
|
}
|
|
164
308
|
|
|
165
309
|
/**
|
|
@@ -169,9 +313,10 @@ class BeanBagDB {
|
|
|
169
313
|
* @returns {Object} {doc} or {doc,schema}
|
|
170
314
|
*/
|
|
171
315
|
async get(doc_id,include_schema=false) {
|
|
316
|
+
this._check_ready_to_use()
|
|
172
317
|
let doc = await this.db_api.get(doc_id);
|
|
173
318
|
let schema = await this.get_schema_doc(doc.schema);
|
|
174
|
-
doc = this._decrypt_doc(schema, doc);
|
|
319
|
+
doc = this._decrypt_doc(schema["data"], doc);
|
|
175
320
|
if(include_schema){
|
|
176
321
|
return {doc,schema}
|
|
177
322
|
}
|
|
@@ -186,10 +331,11 @@ class BeanBagDB {
|
|
|
186
331
|
let schemaSearch = await this.db_api.search({
|
|
187
332
|
selector: { schema: "schema", "data.name": schema_name },
|
|
188
333
|
});
|
|
334
|
+
// console.log(schemaSearch)
|
|
189
335
|
if (schemaSearch.docs.length == 0) {
|
|
190
|
-
throw new Error(
|
|
336
|
+
throw new Error(this.error_codes.schema_not_found);
|
|
191
337
|
}
|
|
192
|
-
return schemaSearch.docs[0]
|
|
338
|
+
return schemaSearch.docs[0];
|
|
193
339
|
}
|
|
194
340
|
|
|
195
341
|
/**
|
|
@@ -201,7 +347,9 @@ class BeanBagDB {
|
|
|
201
347
|
* @returns object
|
|
202
348
|
*/
|
|
203
349
|
async get_doc(schema_name, primary_key = {}) {
|
|
204
|
-
|
|
350
|
+
this._check_ready_to_use()
|
|
351
|
+
let schema_doc = await this.get_schema_doc(schema_name);
|
|
352
|
+
let s_doc = schema_doc["data"];
|
|
205
353
|
let doc_obj;
|
|
206
354
|
if (
|
|
207
355
|
s_doc["settings"]["primary_keys"] &&
|
|
@@ -239,6 +387,7 @@ class BeanBagDB {
|
|
|
239
387
|
* @param {Object} criteria
|
|
240
388
|
*/
|
|
241
389
|
async search(criteria) {
|
|
390
|
+
this._check_ready_to_use()
|
|
242
391
|
if (!criteria["selector"]) {
|
|
243
392
|
throw new Error("Invalid search query.");
|
|
244
393
|
}
|
|
@@ -255,13 +404,15 @@ class BeanBagDB {
|
|
|
255
404
|
* @param {Object} data e.g {"name":"","mobile":""...}
|
|
256
405
|
* @param {Object} settings (optional)
|
|
257
406
|
*/
|
|
258
|
-
async insert(schema, data, settings = {}) {
|
|
407
|
+
async insert(schema, data, meta= {},settings = {}) {
|
|
408
|
+
//console.log("here in insert")
|
|
409
|
+
this._check_ready_to_use()
|
|
259
410
|
try {
|
|
260
411
|
let doc_obj = await this._insert_pre_checks(schema, data, settings);
|
|
261
412
|
let new_rec = await this.db_api.insert(doc_obj);
|
|
262
413
|
return { id: new_rec["id"] };
|
|
263
414
|
} catch (error) {
|
|
264
|
-
console.log(error);
|
|
415
|
+
// console.log(error);
|
|
265
416
|
throw error;
|
|
266
417
|
}
|
|
267
418
|
}
|
|
@@ -283,13 +434,14 @@ class BeanBagDB {
|
|
|
283
434
|
* @param {*} schema_name
|
|
284
435
|
* @param {doc_obj} updates {data:{},meta:{}}, need not be the full document, just the new values of all/some fields
|
|
285
436
|
* @param {Boolean} save_conflict = true -
|
|
286
|
-
*
|
|
437
|
+
*
|
|
287
438
|
*/
|
|
288
439
|
async update(doc_id, rev_id, updates, update_source="api",save_conflict = true) {
|
|
440
|
+
this._check_ready_to_use()
|
|
289
441
|
// making a big assumption here : primary key fields cannot be edited
|
|
290
442
|
// so updating the doc will not generate primary key conflicts
|
|
291
443
|
let req_data = await this.get(doc_id,true);
|
|
292
|
-
let schema = req_data.schema
|
|
444
|
+
let schema = req_data.schema
|
|
293
445
|
let full_doc = req_data.doc // await this.get(doc_id)["doc"];
|
|
294
446
|
|
|
295
447
|
// @TODO fix this : what to do if the rev id does not match
|
|
@@ -300,43 +452,37 @@ class BeanBagDB {
|
|
|
300
452
|
// }
|
|
301
453
|
// }
|
|
302
454
|
|
|
303
|
-
// blank check
|
|
304
|
-
|
|
305
455
|
// update new value depending on settings.editable_fields (if does not exists, all fields are editable)
|
|
306
|
-
let
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
}
|
|
456
|
+
let all_fields = Object.keys(schema.schema.properties)
|
|
457
|
+
let unedit_fields = schema.settings["non_editable_fields"]
|
|
458
|
+
let edit_fields = all_fields.filter(item=>!unedit_fields.includes(item))
|
|
310
459
|
|
|
311
460
|
// now generate the new doc with updates
|
|
312
461
|
let allowed_updates = this._filterObject(updates.data,edit_fields);
|
|
313
462
|
let updated_data = { ...full_doc.data, ...allowed_updates };
|
|
314
463
|
|
|
315
|
-
// validate data
|
|
316
464
|
this.validate_data(schema.schema, updated_data);
|
|
317
465
|
|
|
318
466
|
// primary key check if multiple records can be created
|
|
319
|
-
if(schema.settings["single_record"]==false){
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
if(search.docs.length
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
throw new Error("Update not allowed. Document with the same primary key already exists")
|
|
330
|
-
}
|
|
331
|
-
}else{
|
|
332
|
-
throw new Error("There is something wrong with the schema")
|
|
467
|
+
if(schema.settings["single_record"]==false && schema.settings["primary_keys"].length>0){
|
|
468
|
+
let pri_fields = schema.settings["primary_keys"]
|
|
469
|
+
let search_criteria = {schema:schema.name}
|
|
470
|
+
pri_fields.map(itm=>{search_criteria["data."+itm] = updated_data[itm]})
|
|
471
|
+
let search = await this.search({selection:search_criteria})
|
|
472
|
+
if(search.docs.length>0){
|
|
473
|
+
if(search.docs.length==1){
|
|
474
|
+
let thedoc = search.docs[0]
|
|
475
|
+
if(thedoc["_id"]!=doc_id){
|
|
476
|
+
throw new DocUpdateError([{message:"Update not allowed. Document with the same primary key already exists"}])
|
|
333
477
|
}
|
|
334
478
|
}
|
|
335
|
-
|
|
479
|
+
else{
|
|
480
|
+
throw new Error("There is something wrong with the schema primary keys")
|
|
481
|
+
}
|
|
482
|
+
}
|
|
336
483
|
}
|
|
337
484
|
|
|
338
485
|
// encrypt the data
|
|
339
|
-
|
|
340
486
|
full_doc["data"] = updated_data
|
|
341
487
|
full_doc = this._encrypt_doc(schema,full_doc);
|
|
342
488
|
|
|
@@ -355,11 +501,13 @@ class BeanBagDB {
|
|
|
355
501
|
}
|
|
356
502
|
|
|
357
503
|
async delete(doc_id) {
|
|
504
|
+
this._check_ready_to_use()
|
|
358
505
|
await this.db_api.delete(doc_id)
|
|
359
506
|
}
|
|
360
507
|
|
|
361
508
|
|
|
362
509
|
async load_plugin(plugin_name,plugin_module){
|
|
510
|
+
this._check_ready_to_use()
|
|
363
511
|
this.plugins[plugin_name] = {}
|
|
364
512
|
for (let func_name in plugin_module){
|
|
365
513
|
if(typeof plugin_module[func_name]=='function'){
|
|
@@ -372,24 +520,39 @@ class BeanBagDB {
|
|
|
372
520
|
}
|
|
373
521
|
}
|
|
374
522
|
|
|
375
|
-
////////
|
|
523
|
+
//////// Internal methods ////////
|
|
524
|
+
|
|
525
|
+
_get_current_version(){
|
|
526
|
+
// current version is the sum of versions of all system defined schemas
|
|
527
|
+
let sum = sys_sch.schema_schema.version
|
|
528
|
+
let keys = Object.keys(sys_sch.system_schemas).map(item=>{
|
|
529
|
+
sum = sum+ sys_sch.system_schemas[item].version
|
|
530
|
+
})
|
|
531
|
+
if(sum == NaN){
|
|
532
|
+
throw Error("Error in system schema version numbers")
|
|
533
|
+
}
|
|
534
|
+
return sum
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
_check_ready_to_use(){
|
|
538
|
+
if(!this.active){
|
|
539
|
+
throw new Error(this.error_codes.not_active)
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
|
|
376
543
|
|
|
377
544
|
_generate_random_link(){
|
|
378
545
|
const dictionary = ['rain', 'mars', 'banana', 'earth', 'kiwi', 'mercury', 'fuji', 'hurricane', 'matterhorn', 'snow', 'saturn', 'jupiter', 'peach', 'wind', 'pluto', 'apple', 'k2', 'storm', 'venus', 'denali', 'cloud', 'sunshine', 'mango', 'drizzle', 'pineapple', 'aconcagua', 'gasherbrum', 'apricot', 'neptune', 'fog', 'orange', 'blueberry', 'kilimanjaro', 'uranus', 'grape', 'storm', 'montblanc', 'lemon', 'chooyu', 'raspberry', 'cherry', 'thunder', 'vinson', 'breeze', 'elbrus', 'everest', 'parbat', 'makalu', 'nanga', 'kangchenjunga', 'lightning', 'cyclone', 'comet', 'asteroid', 'pomegranate', 'nectarine', 'clementine', 'strawberry', 'tornado', 'avalanche', 'andes', 'rockies', 'himalayas', 'pyrenees', 'carpathians', 'cascade', 'etna', 'vesuvius', 'volcano', 'tundra', 'whirlwind', 'iceberg', 'eclipse', 'zephyr', 'tropic', 'monsoon', 'aurora'];
|
|
379
546
|
return Array.from({ length: 4 }, () => dictionary[Math.floor(Math.random() * dictionary.length)]).join('-');
|
|
380
547
|
}
|
|
381
548
|
|
|
382
|
-
|
|
383
|
-
for (const field of requiredFields) {
|
|
384
|
-
if (!obj[field]) {throw new Error(`${field} is required`);}
|
|
385
|
-
}
|
|
386
|
-
}
|
|
549
|
+
|
|
387
550
|
|
|
388
551
|
/**
|
|
389
552
|
*
|
|
390
553
|
* @param {*} obj
|
|
391
554
|
* @param {*} fields
|
|
392
|
-
*
|
|
555
|
+
*
|
|
393
556
|
*/
|
|
394
557
|
_filterObject(obj, fields) {
|
|
395
558
|
return fields.reduce((filteredObj, field) => {
|
|
@@ -400,36 +563,7 @@ class BeanBagDB {
|
|
|
400
563
|
}, {});
|
|
401
564
|
}
|
|
402
565
|
|
|
403
|
-
|
|
404
|
-
* Checks if the selected database is initialized for working with BeanBagDB. Also throws a warning if package version does not match with database version.
|
|
405
|
-
* Every time a database is initialized, a setting document `beanbagdb_version` is added. If this does not exists, the database is not initialized. If it exists but does not match the current version, a warning is shown.
|
|
406
|
-
* @returns {object} {initialized:boolean,latest:boolean}
|
|
407
|
-
*/
|
|
408
|
-
async _check_ready_to_use() {
|
|
409
|
-
// @TODO check if ready to use in major API methods
|
|
410
|
-
let check = { initialized: false, latest: false };
|
|
411
|
-
// @TODO this is not really fool proof. check all the required docs, they have the system_generated flag
|
|
412
|
-
// what if some user mistakenly modifies or deletes some of the required docs ?
|
|
413
|
-
let version_search = await this.db_api.search({
|
|
414
|
-
selector: { schema: "system_settings", "data.name": "beanbagdb_version" },
|
|
415
|
-
});
|
|
416
|
-
if (version_search.docs.length > 0) {
|
|
417
|
-
let doc = version_search.docs[0];
|
|
418
|
-
check.initialized = true;
|
|
419
|
-
check.latest = doc["data"]["value"] == this._version;
|
|
420
|
-
}
|
|
421
|
-
if (check.initialized == false) {
|
|
422
|
-
console.warn(
|
|
423
|
-
"This database is not ready to be used. It is not initialized. Run `initialize_db()` first"
|
|
424
|
-
);
|
|
425
|
-
}
|
|
426
|
-
if ((check.latest == false) & (check.initialized == true)) {
|
|
427
|
-
console.warn(
|
|
428
|
-
"This database is not updated with the latest version. Run `initialize_db()` again to update to the latest version"
|
|
429
|
-
);
|
|
430
|
-
}
|
|
431
|
-
return check;
|
|
432
|
-
}
|
|
566
|
+
|
|
433
567
|
|
|
434
568
|
/**
|
|
435
569
|
* To update the system schema or reset to a stable version to ensure functioning of the BeanBagDB
|
|
@@ -459,7 +593,7 @@ class BeanBagDB {
|
|
|
459
593
|
let doc = {
|
|
460
594
|
data: {},
|
|
461
595
|
meta: {
|
|
462
|
-
|
|
596
|
+
created_on: this._get_now_unix_timestamp(),
|
|
463
597
|
tags: [],
|
|
464
598
|
app :{},
|
|
465
599
|
link : this._generate_random_link() // there is a link by default. overwrite this if user provided one but only before checking if it is unique
|
|
@@ -563,6 +697,10 @@ class BeanBagDB {
|
|
|
563
697
|
|
|
564
698
|
// special checks for special docs
|
|
565
699
|
// @TODO : for schema dos: settings fields must be in schema field
|
|
700
|
+
if(schema=="schema"){
|
|
701
|
+
//more checks are required
|
|
702
|
+
this.validate_schema_object(data)
|
|
703
|
+
}
|
|
566
704
|
// @TODO : check if single record setting is set to true
|
|
567
705
|
|
|
568
706
|
// duplicate check
|
|
@@ -596,6 +734,54 @@ class BeanBagDB {
|
|
|
596
734
|
doc_obj["data"] = new_data;
|
|
597
735
|
return doc_obj;
|
|
598
736
|
}
|
|
737
|
+
|
|
738
|
+
////// Utility methods
|
|
739
|
+
util_check_required_fields(requiredFields,obj){
|
|
740
|
+
for (const field of requiredFields) {
|
|
741
|
+
if (!obj[field]) {throw new Error(`${field} is required`);}
|
|
742
|
+
}
|
|
743
|
+
}
|
|
599
744
|
}
|
|
600
745
|
|
|
601
|
-
|
|
746
|
+
|
|
747
|
+
export class ValidationError extends Error {
|
|
748
|
+
constructor(errors = []) {
|
|
749
|
+
// Create a message based on the list of errors
|
|
750
|
+
//console.log(errors)
|
|
751
|
+
let error_messages = errors.map(item=>` ${(item.instancePath||" ").replace("/","")} ${item.message} `)
|
|
752
|
+
let message = `Validation failed with ${errors.length} error(s): ${error_messages.join(",")}`;
|
|
753
|
+
super(message);
|
|
754
|
+
this.name = 'ValidationError';
|
|
755
|
+
this.errors = errors; // Store the list of errors
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
export class DocUpdateError extends Error {
|
|
760
|
+
constructor(errors=[]){
|
|
761
|
+
let error_messages = errors.map(item=>`${item.message}`)
|
|
762
|
+
let message = `Error in document update. ${error_messages.join(",")}`
|
|
763
|
+
super(message)
|
|
764
|
+
this.name = "DocUpdateError";
|
|
765
|
+
this.errors = errors
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
export class DocInsertError extends Error {
|
|
770
|
+
constructor(errors=[]){
|
|
771
|
+
let error_messages = errors.map(item=>`${item.message}`)
|
|
772
|
+
let message = `Error in document insert. ${error_messages.join(",")}`
|
|
773
|
+
super(message)
|
|
774
|
+
this.name = "DocInsertError";
|
|
775
|
+
this.errors = errors
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
export class DocNotFoundError extends Error {
|
|
780
|
+
constructor(errors=[]){
|
|
781
|
+
let error_messages = errors.map(item=>`${item.message}`)
|
|
782
|
+
let message = `Error in fetching document. Criteria : ${error_messages.join(",")}`
|
|
783
|
+
super(message)
|
|
784
|
+
this.name = "DocNotFoundError";
|
|
785
|
+
this.errors = errors
|
|
786
|
+
}
|
|
787
|
+
}
|