@opengis/fastify-table 1.0.90 → 1.0.92

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Changelog.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # fastify-table
2
2
 
3
+ ## 1.0.92 - 03.08.2024
4
+
5
+ - fix getTemplate
6
+
7
+ ## 1.0.91 - 30.08.2024
8
+
9
+ - add CRUD and Auth logs
10
+
3
11
  ## 1.0.90 - 27.08.2024
4
12
 
5
13
  - logger file API to core
@@ -13,7 +13,10 @@ export default async function deleteCrud(req) {
13
13
 
14
14
  if (!table) return { status: 404, message: 'table is required' };
15
15
 
16
- const data = await dataDelete({ table, id });
16
+ const { user = {} } = req;
17
+ const data = await dataDelete({
18
+ table, id, uid: user?.uid,
19
+ });
17
20
 
18
21
  return { rowCount: data.rowCount, msg: !data.rowCount ? data : null };
19
22
  }
@@ -36,14 +36,18 @@ export default async function insert(req) {
36
36
 
37
37
  const { uid } = funcs.config?.auth?.disable || ispublic ? { uid: '1' } : user || {};
38
38
  Object.assign(req.body, { uid, editor_id: uid });
39
- const res = await dataInsert({ table: add || table, data: req.body });
39
+ const res = await dataInsert({
40
+ table: add || table, data: req.body, uid,
41
+ });
40
42
 
41
43
  const extraKeys = Object.keys(formData)?.filter((key) => formData?.[key]?.type === 'DataTable' && formData?.[key]?.table && formData?.[key]?.parent_id && req.body[key].length);
42
44
  if (extraKeys?.length) {
43
45
  res.extra = {};
44
46
  await Promise.all(extraKeys?.map(async (key) => {
45
47
  const extraRows = await Promise.all(req.body[key].map(async (row) => {
46
- const extraRes = await dataInsert({ table: formData[key].table, data: { ...row, [formData[key].parent_id]: req.body[formData[key].parent_id] } });
48
+ const extraRes = await dataInsert({
49
+ table: formData[key].table, data: { ...row, [formData[key].parent_id]: req.body[formData[key].parent_id] }, uid,
50
+ });
47
51
  return extraRes?.rows?.[0];
48
52
  }));
49
53
  Object.assign(res.extra, { [key]: extraRows.filter((el) => el) });
@@ -38,17 +38,19 @@ export default async function update(req) {
38
38
  return { message: 'Дані містять заборонені символи. Приберіть їх та спробуйте ще раз', status: 409 };
39
39
  }
40
40
 
41
- const res = await dataUpdate({ table: tokenData?.table || table, id: tokenData?.id || id, data: req.body });
41
+ const res = await dataUpdate({
42
+ table: tokenData?.table || table, id: tokenData?.id || id, data: req.body, uid,
43
+ });
42
44
 
43
45
  const extraKeys = Object.keys(formData)?.filter((key) => formData?.[key]?.type === 'DataTable' && formData?.[key]?.table && formData?.[key]?.parent_id && req.body[key].length);
44
46
  if (extraKeys?.length) {
45
47
  res.extra = {};
46
48
  await Promise.all(extraKeys?.map(async (key) => {
47
49
  // delete old extra data
48
- await pgClients.client.query(`delete from ${formData[key].table} where ${formData[key].parent_id}=$1`, [req.body[formData[key].parent_id]]);
50
+ await pgClients.client.query(`delete from ${formData[key].table} where ${formData[key].parent_id}=$1`, [req.body[formData[key].parent_id]]); // rewrite?
49
51
  // insert new extra data
50
52
  const extraRows = await Promise.all(req.body[key].map(async (row) => {
51
- const extraRes = await dataInsert({ table: formData[key].table, data: { ...row, [formData[key].parent_id]: req.body[formData[key].parent_id] } });
53
+ const extraRes = await dataInsert({ table: formData[key].table, data: { ...row, [formData[key].parent_id]: req.body[formData[key].parent_id] }, uid });
52
54
  return extraRes?.rows?.[0];
53
55
  }));
54
56
  Object.assign(res.extra, { [key]: extraRows.filter((el) => el) });
@@ -1,9 +1,10 @@
1
1
  import getPG from '../../pg/funcs/getPG.js';
2
2
 
3
3
  import getMeta from '../../pg/funcs/getMeta.js';
4
+ import logChanges from './utils/logChanges.js';
4
5
 
5
6
  export default async function dataDelete({
6
- table, id, pg: pg1,
7
+ table, id, pg: pg1, uid,
7
8
  }) {
8
9
  const pg = pg1 || getPG({ name: 'client' });
9
10
  const { pk } = await getMeta(table);
@@ -11,5 +12,8 @@ export default async function dataDelete({
11
12
  const delQuery = `delete from ${table} WHERE ${pk} = $1 returning *`;
12
13
  // console.log(updateDataset);
13
14
  const res = await pg.one(delQuery, [id]) || {};
15
+ await logChanges({
16
+ pg, table, id, uid, type: 'DELETE',
17
+ });
14
18
  return res;
15
19
  }
@@ -1,7 +1,10 @@
1
1
  import getPG from '../../pg/funcs/getPG.js';
2
2
  import getMeta from '../../pg/funcs/getMeta.js';
3
+ import logChanges from './utils/logChanges.js';
3
4
 
4
- export default async function dataInsert({ table, data, pg: pg1 }) {
5
+ export default async function dataInsert({
6
+ table, data, pg: pg1, uid,
7
+ }) {
5
8
  const pg = pg1 || getPG({ name: 'client' });
6
9
  if (!data) return null;
7
10
  const { columns } = await getMeta(table);
@@ -20,5 +23,8 @@ export default async function dataInsert({ table, data, pg: pg1 }) {
20
23
  returning *`;
21
24
 
22
25
  const res = await pg.query(insertQuery, [...filterData.map((el) => (typeof el[1] === 'object' && (!Array.isArray(el[1]) || typeof el[1]?.[0] === 'object') ? JSON.stringify(el[1]) : el[1]))]) || {};
26
+ await logChanges({
27
+ pg, table, data, id: res.rows?.[0]?.[pg.pk[table]], uid, type: 'INSERT',
28
+ });
23
29
  return res;
24
30
  }
@@ -1,9 +1,10 @@
1
1
  import getPG from '../../pg/funcs/getPG.js';
2
2
 
3
3
  import getMeta from '../../pg/funcs/getMeta.js';
4
+ import logChanges from './utils/logChanges.js';
4
5
 
5
6
  export default async function dataUpdate({
6
- table, id, data, pg: pg1,
7
+ table, id, data, pg: pg1, uid,
7
8
  }) {
8
9
  if (!data || !table || !id) return null;
9
10
 
@@ -14,11 +15,16 @@ export default async function dataUpdate({
14
15
  const filterData = Object.keys(data)
15
16
  .filter((el) => (/* typeof data[el] === 'boolean' ? true : data[el] && */ names?.includes(el)));
16
17
 
17
- const filterValue = filterData.map((el) => [el, data[el]]).map((el) => (typeof el[1] === 'object' && (!Array.isArray(el[1]) || typeof el[1]?.[0] === 'object') ? JSON.stringify(el[1]) : el[1]));
18
+ const filterValue = filterData.map((el) => [el, data[el]]).map((el) => (typeof el[1] === 'object' && el[1] && (!Array.isArray(el[1]) || typeof el[1]?.[0] === 'object') ? JSON.stringify(el[1]) : el[1]));
18
19
 
19
20
  const updateQuery = `UPDATE ${table} SET ${filterData?.map((key, i) => (key === 'geom' ? `"${key}"=st_setsrid(st_geomfromgeojson($${i + 2}::json),4326)` : `"${key}"=$${i + 2}`)).join(',')}
20
21
  WHERE ${pk} = $1 returning *`;
21
22
  // console.log(updateQuery, filterValue);
22
23
  const res = await pg.query(updateQuery, [id, ...filterValue]).then(el => el?.rows?.[0]) || {};
24
+
25
+ await logChanges({
26
+ pg, table, data, id, uid, type: 'UPDATE',
27
+ });
28
+
23
29
  return res;
24
30
  }
@@ -0,0 +1,71 @@
1
+ function formatData(fieldType = 'text', value = null) {
2
+ if (!value) return null;
3
+ if (fieldType === 'geometry') {
4
+ return typeof value === 'object' ? `st_astext(st_geomfromgeojson('${JSON.stringify(value)}'::json))` : `st_astext('${value}'::geometry)`;
5
+ }
6
+ if (['integer', 'numeric', 'double precision'].includes(fieldType)) {
7
+ return value || null;
8
+ }
9
+ if (fieldType.includes('timestamp') || fieldType === 'date') {
10
+ if (typeof value === 'object') {
11
+ return value ? `'${value.toISOString()}'::${fieldType}` : null;
12
+ }
13
+ return value ? `'${value}'::${fieldType}` : null;
14
+ }
15
+ /* if (fieldType.includes('json') && Array.isArray(value)) {
16
+ return value;
17
+ } */
18
+ if (Array.isArray(value)) {
19
+ return `'{ ${value.join(',')} }'::${fieldType}`;
20
+ }
21
+ return typeof value === 'object' ? JSON.stringify(value) : `'${value || null}'`;
22
+ }
23
+
24
+ export default async function logChanges({
25
+ pg, table, id, data, uid = 1, type,
26
+ }) {
27
+ if (!id) {
28
+ console.error('param id is required');
29
+ return null;
30
+ }
31
+ if (!table || !pg.pk?.[table]) {
32
+ console.error('table not found');
33
+ return null;
34
+ }
35
+ if (!pg.pk?.['log.table_changes'] || !pg.pk?.['log.table_changes_data']) {
36
+ console.error('log table not found');
37
+ return null;
38
+ }
39
+ if (!type) {
40
+ console.error('invalid type');
41
+ return null;
42
+ }
43
+
44
+ try {
45
+ const { change_id: changeId } = await pg.query(`insert into log.table_changes(change_date,change_type,change_user_id,entity_type,entity_id)
46
+ values(CURRENT_DATE, $1, $2, $3, $4) returning change_id`, [type, uid, table, id]).then((res) => res.rows?.[0] || {});
47
+
48
+ const q = `select ${Object.keys(data || {}).join(',') || '*'} from ${table} where ${pg.pk?.[table]}=$1`;
49
+ // console.log(q, type, id);
50
+ const { fields = [] } = await pg.query(`select * from ${table} limit 0`);
51
+ const old = type !== 'INSERT' ? await pg.query(q, [id]).then((res) => res.rows?.[0] || {}) : {};
52
+
53
+ const fieldTypes = fields?.reduce((acc, curr) => Object.assign(acc, { [curr.name]: pg.pgType[curr.dataTypeID] }), {}) || {};
54
+ const q1 = Object.keys(data || {}).map((el) => `insert into log.table_changes_data(change_id,entity_key,value_old,value_new)
55
+ values('${changeId}', '${el}', ${formatData(fieldTypes[el], old[el])}, ${formatData(fieldTypes[el], data[el])}) returning *`).join(';\n');
56
+ // console.log(q1);
57
+ const res = await pg.query(q1);
58
+
59
+ const newData = type === 'DELETE' ? {} : (Array.isArray(res) ? res : [res]).reduce((acc, curr) => Object.assign(acc, { [curr.rows?.[0].entity_key]: curr.rows?.[0].value_new }), {});
60
+ // console.log('logChanges OK', type);
61
+ return {
62
+ change_id: changeId, entity_type: table, entity_id: id, uid, change_type: type, old, new: newData,
63
+ };
64
+ }
65
+ catch (err) {
66
+ console.error('logChanges error', type, table, id, data, err.toString());
67
+ return {
68
+ error: err.toString(), entity_type: table, entity_id: id, uid, change_type: type,
69
+ };
70
+ }
71
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@opengis/fastify-table",
3
- "version": "1.0.90",
3
+ "version": "1.0.92",
4
4
  "type": "module",
5
5
  "description": "core-plugins",
6
6
  "main": "index.js",
@@ -1,44 +1,81 @@
1
1
  create schema if not exists log;
2
2
 
3
+ -- DROP TABLE IF EXISTS log.table_changes cascade;
3
4
  CREATE TABLE IF NOT EXISTS log.table_changes();
4
- ALTER TABLE log.table_changes DROP CONSTRAINT IF EXISTS log_table_changes_pkey;
5
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS table_change_id text NOT NULL DEFAULT next_id();
6
-
7
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS entity_type text;
8
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS entity_id text; -- object_id
5
+ ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS change_id text NOT NULL DEFAULT next_id();
9
6
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS change_type text;
10
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS change_key text;
11
7
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS change_date date;
12
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS json_old json;
13
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS json_new json;
14
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS date_old timestamp without time zone;
15
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS date_new timestamp without time zone;
16
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS number_old numeric;
17
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS number_new numeric;
18
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS bool_old boolean;
19
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS bool_new boolean;
20
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS text_old text;
21
- ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS text_new text;
22
-
8
+ ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS change_user_id text;
9
+ ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS entity_type text; -- table_name
10
+ ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS entity_id text; -- object_id
23
11
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS uid text;
24
12
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS cdate timestamp without time zone DEFAULT (now())::timestamp without time zone;
25
13
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS editor_id text;
26
14
  ALTER TABLE log.table_changes ADD COLUMN IF NOT EXISTS editor_date timestamp without time zone;
27
- ALTER TABLE log.table_changes ADD CONSTRAINT log_table_changes_pkey PRIMARY KEY (table_change_id);
28
15
 
29
- CREATE TABLE IF NOT EXISTS log.user_auth();
30
- ALTER TABLE log.user_auth DROP CONSTRAINT IF EXISTS log_user_auth_pkey;
31
- ALTER TABLE log.user_auth DROP CONSTRAINT IF EXISTS log_user_auth_user_id_fkey;
16
+ -- DROP TABLE IF EXISTS log.table_changes_data;
17
+ CREATE TABLE IF NOT EXISTS log.table_changes_data();
18
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS change_data_id text NOT NULL DEFAULT next_id();
19
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS change_id text not null;
20
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS entity_key text; -- column_name
21
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS value_old text;
22
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS value_new text;
23
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS uid text;
24
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS cdate timestamp without time zone DEFAULT (now())::timestamp without time zone;
25
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS editor_id text;
26
+ ALTER TABLE log.table_changes_data ADD COLUMN IF NOT EXISTS editor_date timestamp without time zone;
32
27
 
28
+ -- DROP TABLE IF EXISTS log.user_auth;
29
+ CREATE TABLE IF NOT EXISTS log.user_auth();
33
30
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS user_auth_id text NOT NULL DEFAULT next_id();
34
-
35
31
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS user_id text;
36
- ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS user_auth_date timestamp without time zone;
37
- ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS user_auth_type text;
38
-
32
+ ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS auth_date timestamp without time zone;
33
+ ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS auth_type text;
39
34
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS uid text;
40
35
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS cdate timestamp without time zone DEFAULT (now())::timestamp without time zone;
41
36
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS editor_id text;
42
37
  ALTER TABLE log.user_auth ADD COLUMN IF NOT EXISTS editor_date timestamp without time zone;
38
+
39
+ COMMENT ON TABLE log.table_changes IS 'Логи подій змін в БД';
40
+ COMMENT ON COLUMN log.table_changes.change_type IS 'Тип події (insert / update / delete)';
41
+ COMMENT ON COLUMN log.table_changes.change_date IS 'Дата внесення змін до БД';
42
+ COMMENT ON COLUMN log.table_changes.entity_type IS 'Таблиця, до якої вносяться зміни';
43
+ COMMENT ON COLUMN log.table_changes.entity_id IS 'ID строки, до якої вносяться зміни';
44
+ COMMENT ON COLUMN log.table_changes.change_user_id IS 'Ініціатор внесення змін';
45
+
46
+ COMMENT ON TABLE log.table_changes_data IS 'Логи змін в таблицях БД';
47
+ COMMENT ON COLUMN log.table_changes_data.change_id IS 'ID події зміни в БД';
48
+ COMMENT ON COLUMN log.table_changes_data.entity_key IS 'Колонка таблиці, до якої вносяться зміни';
49
+ COMMENT ON COLUMN log.table_changes_data.value_old IS 'Старе значення';
50
+ COMMENT ON COLUMN log.table_changes_data.value_new IS 'Нове значення';
51
+
52
+ COMMENT ON TABLE log.user_auth IS 'Логи авторизації';
53
+ COMMENT ON COLUMN log.user_auth.user_id IS 'ID користувача';
54
+ COMMENT ON COLUMN log.user_auth.auth_date IS 'Дата авторизації';
55
+ COMMENT ON COLUMN log.user_auth.auth_type IS 'Тип авторизації';
56
+
57
+ ALTER TABLE log.table_changes DROP CONSTRAINT IF EXISTS log_table_changes_pkey cascade;
58
+ ALTER TABLE log.table_changes_data DROP CONSTRAINT IF EXISTS log_table_changes_data_pkey;
59
+ ALTER TABLE log.table_changes_data DROP CONSTRAINT IF EXISTS log_table_changes_data_change_id_fkey;
60
+ ALTER TABLE log.user_auth DROP CONSTRAINT IF EXISTS log_user_auth_pkey;
61
+ ALTER TABLE log.user_auth DROP CONSTRAINT IF EXISTS log_user_auth_user_id_fkey;
62
+
63
+ ALTER TABLE log.table_changes ADD CONSTRAINT log_table_changes_pkey PRIMARY KEY (change_id);
64
+ ALTER TABLE log.table_changes_data ADD CONSTRAINT log_table_changes_data_pkey PRIMARY KEY (change_data_id);
65
+ ALTER TABLE log.table_changes_data ADD CONSTRAINT log_table_changes_data_change_id_fkey FOREIGN KEY (change_id)
66
+ REFERENCES log.table_changes (change_id);
43
67
  ALTER TABLE log.user_auth ADD CONSTRAINT log_user_auth_pkey PRIMARY KEY (user_auth_id);
44
- -- ALTER TABLE log.user_auth ADD CONSTRAINT log_user_auth_user_id_fkey FOREIGN KEY (user_id) REFERENCES admin.users (uid) MATCH SIMPLE;
68
+ -- ALTER TABLE log.user_auth ADD CONSTRAINT log_user_auth_user_id_fkey FOREIGN KEY (user_id) REFERENCES admin.users (uid) MATCH SIMPLE;
69
+
70
+ /* drop old columns */
71
+ alter table log.table_changes drop column if exists date_new;
72
+ alter table log.table_changes drop column if exists date_old;
73
+ alter table log.table_changes drop column if exists number_new;
74
+ alter table log.table_changes drop column if exists number_old;
75
+ alter table log.table_changes drop column if exists json_new;
76
+ alter table log.table_changes drop column if exists json_old;
77
+ alter table log.table_changes drop column if exists text_new;
78
+ alter table log.table_changes drop column if exists text_old;
79
+ alter table log.table_changes drop column if exists bool_new;
80
+ alter table log.table_changes drop column if exists bool_old;
81
+ alter table log.table_changes drop column if exists table_change_id;
@@ -2,7 +2,7 @@ import getSelectMeta from './utils/getSelectMeta.js';
2
2
  import getPG from '../../pg/funcs/getPG.js';
3
3
  import config from '../../config.js';
4
4
  import getTemplate from './utils/getTemplate.js';
5
- import getTableSql from '../funcs/getFilterSQL/util/getTableSql.js';
5
+ // import getTableSql from '../funcs/getFilterSQL/util/getTableSql.js';
6
6
 
7
7
  const limit = 50;
8
8
  const headers = { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': 'GET', 'Cache-Control': 'no-cache' };
@@ -22,7 +22,7 @@ export default async function suggest(req) {
22
22
  if (table && !pg1.pk[body?.table || table]) {
23
23
  return { headers, status: 400, message: 'param name is invalid: 1' };
24
24
  }
25
- const columnExists = true;
25
+ const columnExists = body?.columns?.find((col) => col?.name === column);
26
26
  if (table && (!column || !columnExists)) {
27
27
  return { headers, status: 400, message: 'param name is invalid: 2' };
28
28
  }
@@ -32,7 +32,7 @@ export default async function suggest(req) {
32
32
  original: `with c(id,text) as (select row_number() over(), ${column} from ${body?.table || table} group by ${column} ) select * from c`,
33
33
  searchQuery: '("text" ilike $1 )',
34
34
  }
35
- : await getSelectMeta({ name: selectName });
35
+ : await getSelectMeta({ name: selectName, nocache: query?.nocache });
36
36
  const pg = meta?.db ? getPG(meta.db) : pg1;
37
37
  if (!meta) return { headers, status: 404, message: 'Not found query select ' };
38
38
  if (query.meta) return meta;
@@ -26,7 +26,10 @@ export default async function tableAPI(req) {
26
26
  // skip non-existing columns
27
27
  const columnList = dbColumns.map((el) => el.name || el).join(',');
28
28
 
29
- const cols = Object.keys(schema || {}).filter((col) => columnList.includes(col) && !extraKeys.includes(col))?.join(',');
29
+ const { fields = [] } = !loadTable?.table ? await pg.query(`select * from ${table || opt?.table || params.table} limit 0`) : {};
30
+ const cols = loadTable?.table
31
+ ? Object.keys(schema || {}).filter((col) => columnList.includes(col) && !extraKeys.includes(col))?.join(',')
32
+ : fields.map((el) => (el?.name?.includes('geom') ? `st_asgeojson(${el.name})::json as "${el.name}"` : `"${el?.name}"`)).join(',');
30
33
  const where = [`"${pk}" = $1`, loadTable.query].filter((el) => el);
31
34
  const geom = columnList.includes('geom') ? 'st_asgeojson(geom)::json as geom,' : '';
32
35
  const q = `select "${pk}" as id, ${geom} ${cols || '*'} from ${table || opt?.table || params.table} t where ${where.join(' and ') || 'true'} limit 1`;
@@ -13,8 +13,8 @@ function getTable(table) {
13
13
 
14
14
  const selectMeta = {};
15
15
 
16
- export default async function getSelectMeta({ name, pg: pg1 }) {
17
- if (selectMeta[name]) return selectMeta[name];
16
+ export default async function getSelectMeta({ name, pg: pg1, nocache }) {
17
+ if (selectMeta[name] && !nocache) return selectMeta[name];
18
18
 
19
19
  const cls = await getSelect(name);
20
20
 
@@ -23,6 +23,6 @@ export default async function getTemplateDir(type, name) {
23
23
 
24
24
  const sql = loadTemplate[type].includes(`${name}.sql`) ? await readFile(path.join(typeDir, `${name}.sql`), 'utf-8') : null;
25
25
  const data = loadTemplate[type].includes(`${name}.json`) ? JSON.parse(await readFile(path.join(typeDir, `${name}.json`), 'utf-8')) : await readFile(path.join(typeDir, `${name}.${ext}`), 'utf-8');
26
- if (sql) return { ...data || {}, sql };
26
+ if (sql) return { sql };
27
27
  return data;
28
28
  }
@@ -63,9 +63,11 @@ test('widget api', async (t) => {
63
63
  });
64
64
 
65
65
  // before GET log request
66
- await t.test('FAKE POST /widget/history/:objectid', async() => {
67
- const res = await pg.query(`insert into log.table_changes(uid, entity_id, entity_type)
68
- select $1, '1', 'admin.users'`, [config.testUser?.uid || '1']);
66
+ await t.test('FAKE POST /widget/history/:objectid', async () => {
67
+ const { changeId } = await pg.query(`insert into log.table_changes(uid, entity_id, entity_type)
68
+ select $1, '1', 'admin.users' returning change_id as "changeId"`, [config.testUser?.uid || '1']).then((res) => res.rows?.[0] || {});
69
+ const res1 = await pg.query(`insert into log.table_changes_data(change_id, entity_key, value_new)
70
+ select $1, 'uid', $2`, [changeId, config.testUser?.uid || '1']);
69
71
  // console.log(res.rowCount);
70
72
  });
71
73
 
@@ -104,11 +106,12 @@ test('widget api', async (t) => {
104
106
  assert.ok(resp?.data?.id === fileId, 'file widget delete fail');
105
107
  });
106
108
 
107
- await t.test('clean after test', async() => {
108
- const res1 = await pg.query(`delete from crm.communications where entity_id=$1`, ['1']);
109
- const res2 = await pg.query(`delete from crm.checklists where entity_id=$1`, ['1']);
110
- const res3 = await pg.query(`delete from crm.files where entity_id=$1`, ['1']);
111
- const res4 = await pg.query(`delete from log.table_changes where entity_id=$1`, ['1']);
109
+ await t.test('clean after test', async () => {
110
+ const res1 = await pg.query('delete from crm.communications where entity_id=$1', ['1']);
111
+ const res2 = await pg.query('delete from crm.checklists where entity_id=$1', ['1']);
112
+ const res3 = await pg.query('delete from crm.files where entity_id=$1', ['1']);
113
+ const res4 = await pg.query('delete from log.table_changes_data where change_id in (select change_id from log.table_changes where entity_id=$1)', ['1']);
114
+ const res5 = await pg.query('delete from log.table_changes where entity_id=$1', ['1']);
112
115
  pg.end();
113
116
  });
114
117
  });
@@ -7,6 +7,7 @@ import rclient from '../../redis/client.js';
7
7
  import dataInsert from '../../crud/funcs/dataInsert.js';
8
8
  import dataUpdate from '../../crud/funcs/dataUpdate.js';
9
9
  import dataDelete from '../../crud/funcs/dataDelete.js';
10
+ import logChanges from '../../crud/funcs/utils/logChanges.js';
10
11
  import isFileExists from '../../crud/funcs/isFileExists.js';
11
12
 
12
13
  import getOpt from '../../crud/funcs/getOpt.js';
@@ -24,21 +25,66 @@ test('funcs crud', async (t) => {
24
25
  assert.equal(data.table, 'gis.dataset');
25
26
  });
26
27
 
27
- const id = (Math.random() * 10000).toFixed();
28
- /* await t.test('dataInsert', async () => {
29
- const data = await dataInsert({ table: 'gis.dataset', data: { dataset_id: id, dataset_name: '222' } });
30
- assert.equal(data.dataset_id, id);
31
- });
28
+ if (config?.local) {
29
+ const id = (Math.random() * 10000).toFixed();
30
+ await t.test('dataInsert', async () => {
31
+ const data = await dataInsert({ table: 'gis.dataset', data: { dataset_id: id, dataset_name: '222' }, uid: '2' });
32
+ assert.equal(data.rows?.[0]?.dataset_id, id);
33
+ });
34
+ await t.test('dataUpdate', async () => {
35
+ const data = await dataUpdate({
36
+ table: 'gis.dataset', id, data: { dataset_name: '22211' }, uid: '2',
37
+ });
38
+ assert.equal(data.dataset_name, '22211');
39
+ });
40
+ await t.test('dataDelete', async () => {
41
+ const data = await dataDelete({ table: 'gis.dataset', id, uid: '2' });
42
+ assert.ok(data);
43
+ });
44
+ await t.test('clean up after test', async () => {
45
+ if (pgClients.client.pk?.['log.table_changes_data']) {
46
+ const { rowCount: testDatasets } = await pgClients.client.query('delete from gis.dataset where uid=$1', ['2']);
47
+ const { rowCount } = await pgClients.client.query('delete from log.table_changes_data where change_id in (select change_id from log.table_changes where entity_id=$1)', [id]);
48
+ const { rowCount: rowCount1 } = await pgClients.client.query('delete from log.table_changes where entity_id=$1', [id]);
49
+ console.log('CRUD test clean up', id, testDatasets, rowCount, rowCount1);
50
+ assert.ok(rowCount1);
51
+ }
52
+ });
53
+ }
32
54
 
33
- await t.test('dataUpdate', async () => {
34
- const data = await dataUpdate({ table: 'gis.dataset', id, data: { dataset_name: '22211' } });
35
- assert.equal(data.dataset_name, '22211');
36
- });
55
+ if (pgClients.client.pk?.['log.table_changes_data']) {
56
+ const id = (Math.random() * 10000).toFixed();
57
+ const name = '222';
58
+ await t.test('logChanges INSERT', async () => {
59
+ await pgClients.client.query(`insert into gis.dataset(dataset_id,dataset_name, uid) values($1,$2,$3) on conflict(dataset_id) do
60
+ update set dataset_name = excluded.dataset_name, uid = excluded.uid`, [id, name, '2']);
37
61
 
38
- await t.test('dataDelete', async () => {
39
- const data = await dataDelete({ table: 'gis.dataset', id });
40
- assert.ok(data);
41
- }); */
62
+ const data = await logChanges({
63
+ pg: pgClients.client, id, table: 'gis.dataset', data: { dataset_id: id, dataset_name: '222' }, uid: '2', type: 'INSERT',
64
+ });
65
+ assert.ok(data?.change_type === 'INSERT' && data?.entity_id === id && data?.new?.dataset_name === '222', typeof data === 'object' ? JSON.stringify(data) : data);
66
+ });
67
+ await t.test('logChanges UPDATE', async () => {
68
+ /* await pgClients.client.query('update gis.dataset(dataset_id,dataset_name) set dataset_name=$', [id, name]); */
69
+ const data = await logChanges({
70
+ pg: pgClients.client, id, table: 'gis.dataset', data: { dataset_id: id, dataset_name: '2222' }, uid: '2', type: 'UPDATE',
71
+ });
72
+ assert.ok(data?.change_type === 'UPDATE' && data?.entity_id === id && data?.new?.dataset_name === '2222' && data?.old?.dataset_name === '222', typeof data === 'object' ? JSON.stringify(data) : data);
73
+ });
74
+ await t.test('logChanges DELETE', async () => {
75
+ const data = await logChanges({
76
+ pg: pgClients.client, id, table: 'gis.dataset', uid: '2', type: 'DELETE',
77
+ });
78
+ assert.ok(data?.change_type === 'DELETE' && data?.old?.dataset_id === id, typeof data === 'object' ? JSON.stringify(data) : data);
79
+ });
80
+ await t.test('clean up after test', async () => {
81
+ const { rowCount: testDatasets } = await pgClients.client.query('delete from gis.dataset where uid=$1', ['2']);
82
+ const { rowCount } = await pgClients.client.query('delete from log.table_changes_data where change_id in (select change_id from log.table_changes where entity_id=$1)', [id]);
83
+ const { rowCount: rowCount1 } = await pgClients.client.query('delete from log.table_changes where entity_id=$1', [id]);
84
+ console.log('logChanges test clean up', id, testDatasets, rowCount, rowCount1);
85
+ assert.ok(rowCount1);
86
+ });
87
+ }
42
88
 
43
89
  await t.test('isFileExists', async () => {
44
90
  const data = await isFileExists({ filepath: '../../crud/funcs/isFileExists.js' });
@@ -46,7 +92,7 @@ test('funcs crud', async (t) => {
46
92
  });
47
93
 
48
94
  let tokens;
49
- const session = { passport: { user: { uid: '1' } } };
95
+ // const session = { passport: { user: { uid: '1' } } };
50
96
  const tokenData = JSON.stringify({ add: 'gis.dataset', form: 'test.dataset.form' });
51
97
 
52
98
  await t.test('setToken', async () => {
@@ -1,51 +1,57 @@
1
- import dataInsert from "../../crud/funcs/dataInsert.js";
1
+ import dataInsert from '../../crud/funcs/dataInsert.js';
2
2
 
3
3
  const table = 'crm.properties';
4
4
 
5
5
  function checkKeyType({ body, key }) {
6
- if (typeof body[key] === 'number' && (!/\D/.test(body[key].toString()) && body[key].toString().length < 10)) {
7
- return { [key]: 'int' };
8
- } else if (typeof body[key] === 'object') {
9
- return { [key]: 'json' };
10
- } else if (Date.parse(body[key], 'yyyy/MM/ddTHH:mm:ss.000Z')) {
11
- return { [key]: 'date' };
12
- }
13
- return { [key]: 'text' };
6
+ if (typeof body[key] === 'number' && (!/\D/.test(body[key].toString()) && body[key].toString().length < 10)) {
7
+ return { [key]: 'int' };
8
+ } if (typeof body[key] === 'object') {
9
+ return { [key]: 'json' };
10
+ }
11
+ if (Date.parse(body[key], 'yyyy/MM/ddTHH:mm:ss.000Z')) {
12
+ return { [key]: 'date' };
13
+ }
14
+ return { [key]: 'text' };
14
15
  }
15
16
 
16
17
  export default async function addExtraProperties({
17
- pg, params = {}, body = {},
18
+ pg, funcs, params = {}, body = {}, user = {},
18
19
  }) {
19
- const { id } = params;
20
- if (!id) {
21
- return { message: 'not enougn params: 1', status: 400 };
22
- }
23
- const extraProperties = Object.keys(body);
24
- if (!extraProperties.length) {
25
- return { message: 'not enougn params: 2', status: 400 };
26
- }
20
+ const { id } = params;
21
+ if (!id) {
22
+ return { message: 'not enougn params: 1', status: 400 };
23
+ }
24
+ const extraProperties = Object.keys(body);
25
+ if (!extraProperties.length) {
26
+ return { message: 'not enougn params: 2', status: 400 };
27
+ }
27
28
 
28
- if (!pg.pk?.[table]) {
29
- return { message: 'table not found: crm.properties', status: 400 };
30
- }
29
+ if (!pg.pk?.[table]) {
30
+ return { message: 'table not found: crm.properties', status: 400 };
31
+ }
31
32
 
32
- try {
33
- await pg.query(`delete from crm.properties where object_id=$1`, [id]);
34
- const keyTypeMatch = extraProperties.filter((key) => body[key]).reduce((acc, curr) => Object.assign(acc, checkKeyType({ body, key: curr })), {});
35
- const res = await Promise.all(Object.keys(keyTypeMatch).map(async (key) => {
36
- const propertyType = keyTypeMatch[key];
37
- const { rows = [] } = await dataInsert({
38
- pg, table, data: {
39
- property_type: propertyType,
40
- property_key: key,
41
- object_id: id,
42
- [`property_${propertyType}`]: body[key],
43
- }
44
- });
45
- return { id: rows[0]?.property_id, type: propertyType, value: body[key] };
46
- }));
47
- return { message: { rows: res }, status: 200 };
48
- } catch (err) {
49
- return { error: err.toString(), status: 500 };
50
- }
51
- }
33
+ try {
34
+ const uid = funcs.config?.auth?.disable ? '1' : user.uid;
35
+ await pg.query('delete from crm.properties where object_id=$1', [id]); // rewrite?
36
+ const keyTypeMatch = extraProperties.filter((key) => body[key]).reduce((acc, curr) => Object.assign(acc, checkKeyType({ body, key: curr })), {});
37
+ const res = await Promise.all(Object.keys(keyTypeMatch).map(async (key) => {
38
+ const propertyType = keyTypeMatch[key];
39
+ const { rows = [] } = await dataInsert({
40
+ pg,
41
+ table,
42
+ data: {
43
+ property_type: propertyType,
44
+ property_key: key,
45
+ object_id: id,
46
+ [`property_${propertyType}`]: body[key],
47
+ },
48
+ uid,
49
+ });
50
+ return { id: rows[0]?.property_id, type: propertyType, value: body[key] };
51
+ }));
52
+ return { message: { rows: res }, status: 200 };
53
+ }
54
+ catch (err) {
55
+ return { error: err.toString(), status: 500 };
56
+ }
57
+ }
@@ -29,9 +29,11 @@ export default async function widgetGet({
29
29
  from crm.communications c left join admin.users u on u.uid=c.uid where entity_id=$1 order by cdate desc`
30
30
  : 'select communication_id, entity_id, body, subject, cdate, uid from crm.communications where entity_id=$1 order by cdate desc',
31
31
 
32
- history: `SELECT table_change_id, entity_id, entity_type, change_key, change_date, json_old, json_new, date_old,
33
- date_new, number_old, number_new, bool_old, bool_new, text_old,
34
- text_new, uid, cdate FROM log.table_changes where entity_id=$1 order by cdate desc, change_key limit 100`,
32
+ history: `SELECT b.change_data_id, change_id, entity_id, entity_type, change_type, change_date, uid, cdate, b.entity_key, b.value_new, b.value_old FROM log.table_changes a
33
+ left join lateral(
34
+ select change_data_id, entity_key, value_new, value_old from log.table_changes_data where change_id=a.change_id
35
+ )b on 1=1
36
+ where entity_id=$1 and b.change_data_id is not null order by cdate desc limit 100`,
35
37
 
36
38
  checklist: pg.pk['admin.users']
37
39
  ? `SELECT checklist_id, entity_id, subject, is_done, done_date, c.uid, c.cdate, coalesce(user_name,' ')||' '||coalesce(sur_name,'') as username,
@@ -45,7 +45,9 @@ export default async function widgetSet(req) {
45
45
  return { message: 'invalid file extension', status: 400 };
46
46
  }
47
47
 
48
- const { rows = [] } = await dataInsert({ table: 'crm.files', data });
48
+ const { rows = [] } = await dataInsert({
49
+ table: 'crm.files', data, uid: user?.uid,
50
+ });
49
51
  return {
50
52
  rowCount: 1, data: 'ok', command: 'UPLOAD', id: rows[0]?.file_id, entity_id: rows[0]?.entity_id,
51
53
  };
@@ -56,8 +58,12 @@ export default async function widgetSet(req) {
56
58
  const data = { ...body, uid: user?.uid, entity_id: objectid };
57
59
 
58
60
  const result = id
59
- ? await dataUpdate({ table, data, id })
60
- : await dataInsert({ table, data });
61
+ ? await dataUpdate({
62
+ table, data, id, uid: user?.uid,
63
+ })
64
+ : await dataInsert({
65
+ table, data, uid: user?.uid,
66
+ });
61
67
 
62
68
  return {
63
69
  rowCount: result.rowCount, data: 'ok', command: result.command, id: result.rows?.[0]?.[pkList[type]] || result?.[pkList[type]],
package/widget/index.js CHANGED
@@ -10,7 +10,7 @@ const tableSchema = {
10
10
  },
11
11
  querystring: {
12
12
  debug: { type: 'string', pattern: '^(\\d+)$' },
13
- }
13
+ },
14
14
  };
15
15
 
16
16
  async function route(fastify, opt) {