@xuda.io/xuda-dbs-plugin-xuda 1.0.316 → 1.0.318

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@xuda.io/xuda-dbs-plugin-xuda",
3
- "version": "1.0.316",
3
+ "version": "1.0.318",
4
4
  "description": "Xuda Database Socket for Xuda's proprietary structure powered by CouchDB",
5
5
  "scripts": {
6
6
  "pub": "npm version patch --force && npm publish --access public"
@@ -17,6 +17,6 @@
17
17
  "dependencies": {
18
18
  "@xuda.io/xu_cast": "^1.0.3",
19
19
  "lodash": "^4.17.21",
20
- "nano": "^10.0.0"
20
+ "nano": "^10.0.4"
21
21
  }
22
22
  }
@@ -1,5 +1,8 @@
1
- const _ = require('lodash');
2
- const crypto = require('node:crypto');
1
+ import { createRequire } from 'module';
2
+ import _ from 'lodash';
3
+ import crypto from 'node:crypto';
4
+
5
+ const require = createRequire(import.meta.url);
3
6
 
4
7
  const check_unique = async function (e, docP, table_obj, db, app_id_reference) {
5
8
  var len = docP.udfIndex.length;
@@ -43,7 +46,6 @@ const check_unique = async function (e, docP, table_obj, db, app_id_reference) {
43
46
  var keyValue = [];
44
47
 
45
48
  for await (const [keySegment, valSegment] of Object.entries(keysArr)) {
46
- // run on key segment
47
49
  let _fieldType = find_item_by_key(table_obj.tableFields, 'field_id', valSegment).props.fieldType;
48
50
  let _val = await get_cast_val('check_unique', valSegment, _fieldType, val.keyValue[Number(keySegment)]);
49
51
  keyValue.push(_val);
@@ -55,8 +57,6 @@ const check_unique = async function (e, docP, table_obj, db, app_id_reference) {
55
57
  const ret = await query_db(e, db, app_id_reference, table_obj);
56
58
  const json = ret.data;
57
59
 
58
- // var data;
59
-
60
60
  if (json.code > 0) {
61
61
  var data = json.data;
62
62
  if (data?.rows?.length) {
@@ -70,7 +70,6 @@ const check_unique = async function (e, docP, table_obj, db, app_id_reference) {
70
70
  }
71
71
  }
72
72
  }
73
- // throw json.data;
74
73
  return { code: 1, data: 'ok' };
75
74
  } catch (msg) {
76
75
  console.error(msg);
@@ -89,22 +88,15 @@ const get_index_json = async function (docInP, table_obj) {
89
88
  var key_val = [];
90
89
 
91
90
  if (docInP.udfData && table_obj) {
92
- // check for udf data array
93
91
  if (table_obj.tableIndexes) {
94
- // console.log(udfDicIndexT.rows);
95
92
  for await (var valIndex of table_obj.tableIndexes) {
96
- // run on index rows
97
- keysArr = valIndex.data.keys; // create array from keys string segment
93
+ keysArr = valIndex.data.keys;
98
94
  key_val = [];
99
95
  for await (var valSegment of keysArr) {
100
- // run on key segment
101
- // find keys values
102
96
  if (docInP.udfData.data[valSegment]) {
103
97
  key_val.push(docInP.udfData.data[valSegment]);
104
98
  continue;
105
99
  }
106
- // put the value
107
- // debugger;
108
100
  const field_obj = find_item_by_key(table_obj.tableFields, 'field_id', valSegment);
109
101
  if (!field_obj) {
110
102
  throw 'field not found in key: ' + valSegment;
@@ -121,7 +113,6 @@ const get_index_json = async function (docInP, table_obj) {
121
113
  keyValue: key_val,
122
114
  });
123
115
  }
124
- // );
125
116
  }
126
117
  }
127
118
  return { code: 1, data: index_json };
@@ -249,7 +240,6 @@ const get_opt = function (e, table_obj) {
249
240
  }
250
241
  opt.fields = fields;
251
242
  }
252
- // fix names
253
243
 
254
244
  for (const [key, val] of Object.entries(opt.fields)) {
255
245
  opt.fields[key] = 'udfData.data.' + val;
@@ -257,9 +247,7 @@ const get_opt = function (e, table_obj) {
257
247
 
258
248
  const _sort_model = typeof e.sortModel === 'string' ? JSON.parse(e.sortModel) : e.sortModel;
259
249
 
260
- // if (!e?.sortModel || !JSON.parse(e.sortModel).length) {
261
250
  if (!e?.sortModel || !_sort_model.length) {
262
- // added 2021 09 10
263
251
  if (opt.sort) {
264
252
  for (const [key, val] of Object.entries(opt.sort)) {
265
253
  opt.sort[key] = {
@@ -276,68 +264,51 @@ const get_opt = function (e, table_obj) {
276
264
  return e?.data?.field_id;
277
265
  });
278
266
 
279
- // Helper function to recursively process the query object
280
267
  function recursiveReplace(obj) {
281
268
  if (typeof obj === 'object' && obj !== null) {
282
- // Create a new object to store the modified query
283
269
  let newObj = Array.isArray(obj) ? [] : {};
284
270
 
285
- // Traverse through the object
286
271
  for (let key in obj) {
287
272
  if (obj.hasOwnProperty(key)) {
288
- // If the key is in the keys_to_replace array, replace it
289
273
  let newKey = keys_to_replace.includes(key) ? `udfData.data.${key}` : key;
290
-
291
- // Recursively process nested objects
292
274
  newObj[newKey] = recursiveReplace(obj[key]);
293
275
  }
294
276
  }
295
277
 
296
278
  return newObj;
297
279
  } else {
298
- // If it's not an object or array, return the value as is
299
280
  return obj;
300
281
  }
301
282
  }
302
283
 
303
- // Start the recursive replacement
304
284
  return recursiveReplace(query);
305
285
  }
306
286
 
307
287
  function replaceRegexOptions(query) {
308
- // Helper function to recursively process the query object
309
288
  function recursiveReplace(obj) {
310
289
  if (typeof obj === 'object' && obj !== null) {
311
- // Create a new object to store the modified query
312
290
  let newObj = Array.isArray(obj) ? [] : {};
313
291
 
314
- // Traverse through the object
315
292
  for (let key in obj) {
316
293
  if (obj.hasOwnProperty(key)) {
317
294
  if (key === '$regex') {
318
295
  newObj[key] = obj.$options ? `(?${obj.$options})${obj[key]}` : obj[key];
319
296
  return newObj;
320
297
  }
321
-
322
- // Recursively process nested objects
323
-
324
298
  newObj[key] = recursiveReplace(obj[key]);
325
299
  }
326
300
  }
327
301
 
328
302
  return newObj;
329
303
  } else {
330
- // If it's not an object or array, return the value as is
331
304
  return obj;
332
305
  }
333
306
  }
334
307
 
335
- // Start the recursive replacement
336
308
  return recursiveReplace(query);
337
309
  }
338
310
 
339
311
  if (e.dataSourceFilterModelType === 'query' && e.filterModelMongo) {
340
- // selector_new['$and'] = [replaceRegexOptions(replaceKeysInQuery(JSON.parse(e.filterModelMongo)))];
341
312
  selector_new['$and'] = [replaceRegexOptions(replaceKeysInQuery(typeof e.filterModelMongo === 'string' ? JSON.parse(e.filterModelMongo) : e.filterModelMongo))];
342
313
  }
343
314
 
@@ -438,7 +409,6 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
438
409
  });
439
410
  }
440
411
  } else {
441
- // no index
442
412
  rows.push({ key: '', value: body.docs.length });
443
413
  }
444
414
  return { code: 1, data: { rows: rows, total_rows: rows.length, opt } };
@@ -523,9 +493,9 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
523
493
  }
524
494
 
525
495
  if (typeof totals_obj[field_id][value] === 'undefined') {
526
- totals_obj[field_id][value] = 1; //value;
496
+ totals_obj[field_id][value] = 1;
527
497
  } else {
528
- totals_obj[field_id][value]++; //+= value;
498
+ totals_obj[field_id][value]++;
529
499
  }
530
500
 
531
501
  break;
@@ -545,11 +515,8 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
545
515
  return raw_data();
546
516
  };
547
517
 
548
- // xuda
549
-
550
518
  try {
551
519
  try {
552
- // console.log("opt", opt);
553
520
  const doc = await db.find(opt);
554
521
  var mango_index_obj;
555
522
  if (doc?.warning?.includes('No matching index found')) {
@@ -603,7 +570,7 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
603
570
  return { code: -1, data: err.message };
604
571
  }
605
572
  } else {
606
- return { code: -1, data: err.message };
573
+ return { code: -1, data: err.message, opt };
607
574
  }
608
575
  }
609
576
  } catch (err) {
@@ -691,34 +658,9 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
691
658
  rows.push({
692
659
  id: val.id,
693
660
  value: val.doc.udfData,
694
- // value: { udfData: val.doc.udfData, _id: val.id },
695
661
  });
696
662
  }
697
663
 
698
- // const reduce_rows = () => {
699
- // const reduce_obj = body.docs.reduce(
700
- // (params, data) => {
701
- // const fx = (val, idx) => {
702
- // const group_by_field = data[params.sort[idx]];
703
-
704
- // if (params.sort[idx + 1]) {
705
- // val[group_by_field] = val[group_by_field] ?? {};
706
- // val[group_by_field][data[params.sort[idx + 1]]] = 0;
707
- // idx++;
708
- // fx(val[group_by_field], idx);
709
- // return;
710
- // }
711
-
712
- // val[group_by_field] = val[group_by_field] ?? 0;
713
- // val[group_by_field] += 1;
714
- // };
715
- // fx(params.group, 0);
716
-
717
- // return params;
718
- // },
719
- // { sort: opt.sort, group: {} }
720
- // );}
721
-
722
664
  return { code: 1, data: { rows: rows, total_rows: rows.length, opt } };
723
665
  } catch (err) {
724
666
  return { code: -1, data: err.message };
@@ -751,14 +693,13 @@ const query_db = async function (e, db, app_id_reference, table_obj) {
751
693
  }
752
694
 
753
695
  if (e.count && !e.filter_from) {
754
- // count tables
755
696
  return await count_tables();
756
697
  }
757
698
 
758
699
  return await runtime_get_mango_data();
759
700
  };
760
701
 
761
- exports.create = async (params, setup_doc, resolve, reject) => {
702
+ export const create = async (params, setup_doc, resolve, reject) => {
762
703
  const e = params.e;
763
704
  const db = params.db;
764
705
  const app_id_reference = params.app_id_reference;
@@ -852,7 +793,8 @@ exports.create = async (params, setup_doc, resolve, reject) => {
852
793
  }
853
794
  return await single();
854
795
  };
855
- exports.read = async (params, setup_doc, resolve, reject) => {
796
+
797
+ export const read = async (params, setup_doc, resolve, reject) => {
856
798
  const e = params.e;
857
799
  const db = params.db;
858
800
  const app_id_reference = params.app_id_reference;
@@ -865,23 +807,22 @@ exports.read = async (params, setup_doc, resolve, reject) => {
865
807
 
866
808
  return resolve(ret.data);
867
809
  };
868
- exports.update = async (params, setup_doc, resolve, reject) => {
810
+
811
+ export const update = async (params, setup_doc, resolve, reject) => {
869
812
  const e = params.e;
870
813
  const db = params.db;
871
814
  const app_id_reference = params.app_id_reference;
872
815
  const table_obj = params.table_obj;
873
816
  try {
874
817
  if (e.row_id === 'newRecord') {
875
- return this.create(params, setup_doc, resolve, reject);
818
+ return create(params, setup_doc, resolve, reject);
876
819
  }
877
820
  var doc = await db.get(e.row_id, {});
878
- // let data = doc.udfData.data;
879
821
  var error = undefined;
880
822
  if (!e.field_id && !e.table_data) {
881
823
  error = 'Invalid field_id or table_data object to save';
882
824
  return reject(error);
883
825
  }
884
- // single value save
885
826
  if (e.field_id) {
886
827
  let _tableFieldsObj = find_item_by_key(table_obj.tableFields, 'field_id', e.field_id);
887
828
  if (!_tableFieldsObj || _.isEmpty(_tableFieldsObj)) {
@@ -891,9 +832,7 @@ exports.update = async (params, setup_doc, resolve, reject) => {
891
832
  doc.udfData.data[e.field_id] = await get_cast_val('dbs_update', e.field_id, _tableFieldsObj.props.fieldType, e.field_value);
892
833
  }
893
834
 
894
- // object value save
895
835
  if (e.table_data) {
896
- // data = {};
897
836
  for await (const [key, val] of Object.entries(e.table_data)) {
898
837
  let _tableFieldsObj = find_item_by_key(table_obj.tableFields, 'field_id', key);
899
838
  if (!_tableFieldsObj || _.isEmpty(_tableFieldsObj)) {
@@ -928,7 +867,8 @@ exports.update = async (params, setup_doc, resolve, reject) => {
928
867
  return reject(error);
929
868
  }
930
869
  };
931
- exports.delete = async (params, setup_doc, resolve, reject) => {
870
+
871
+ export const deleteFunc = async (params, setup_doc, resolve, reject) => {
932
872
  const e = params.e;
933
873
  const db = params.db;
934
874
  const app_id_reference = params.app_id_reference;
@@ -961,7 +901,9 @@ exports.delete = async (params, setup_doc, resolve, reject) => {
961
901
  }
962
902
  };
963
903
 
964
- exports.restore = async (params, setup_doc, resolve, reject) => {
904
+ export { deleteFunc as delete }; // Renaming export to avoid reserved keyword conflict
905
+
906
+ export const restore = async (params, setup_doc, resolve, reject) => {
965
907
  const e = params.e;
966
908
  const db = params.db;
967
909
  const app_id_reference = params.app_id_reference;
@@ -986,7 +928,8 @@ exports.restore = async (params, setup_doc, resolve, reject) => {
986
928
  return reject(error);
987
929
  }
988
930
  };
989
- exports.check_unique = async (params, setup_doc, resolve, reject) => {
931
+
932
+ export const check_unique_export = async (params, setup_doc, resolve, reject) => {
990
933
  const e = params.e;
991
934
  const db = params.db;
992
935
  const app_id_reference = params.app_id_reference;
@@ -1003,7 +946,10 @@ exports.check_unique = async (params, setup_doc, resolve, reject) => {
1003
946
  return reject(error);
1004
947
  }
1005
948
  };
1006
- exports.get_connection = async (params, setup_doc, resolve, reject) => {
949
+
950
+ export { check_unique_export as check_unique };
951
+
952
+ export const get_connection = async (params, setup_doc, resolve, reject) => {
1007
953
  try {
1008
954
  if (!setup_doc.db_connection_string) throw 'db_connection_string missing';
1009
955
  const nano = require('nano')(setup_doc.db_connection_string);