@opengis/bi 1.0.14 → 1.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/README.md +50 -50
  2. package/config.js +12 -12
  3. package/dist/bi.js +1 -1
  4. package/dist/bi.umd.cjs +63 -63
  5. package/dist/{import-file-DUp3rsNI.js → import-file-CRC0sYYT.js} +8055 -7987
  6. package/dist/{map-component-mixin-CGM0P5ub.js → map-component-mixin-BCtWEvzv.js} +3795 -2116
  7. package/dist/style.css +1 -1
  8. package/dist/{vs-calendar-cOoinEwc.js → vs-calendar-5ot79n0N.js} +20 -9
  9. package/dist/{vs-funnel-bar-kLkPoIhJ.js → vs-funnel-bar-CLo6gXI_.js} +2 -2
  10. package/dist/{vs-heatmap-3XAVGTSo.js → vs-heatmap-DHGA8dRk.js} +3 -4
  11. package/dist/{vs-map-cluster-BWJPx7wE.js → vs-map-cluster-CNgX6JVF.js} +2 -2
  12. package/dist/{vs-map-B1tr6V5_.js → vs-map-pIn5wS4G.js} +2 -2
  13. package/dist/{vs-number-CrU7LmkV.js → vs-number-DYfok8VU.js} +19 -12
  14. package/dist/{vs-text-DRPx3aID.js → vs-text-Dckykz09.js} +19 -14
  15. package/package.json +107 -97
  16. package/plugin.js +14 -13
  17. package/server/migrations/bi.dataset.sql +26 -0
  18. package/server/migrations/bi.sql +93 -93
  19. package/server/plugins/docs.js +48 -48
  20. package/server/plugins/hook.js +89 -89
  21. package/server/plugins/vite.js +69 -69
  22. package/server/routes/dashboard/controllers/dashboard.delete.js +38 -37
  23. package/server/routes/dashboard/controllers/dashboard.js +118 -114
  24. package/server/routes/dashboard/controllers/dashboard.list.js +30 -36
  25. package/server/routes/dashboard/controllers/utils/yaml.js +11 -11
  26. package/server/routes/dashboard/index.mjs +25 -25
  27. package/server/routes/data/controllers/data.js +167 -156
  28. package/server/routes/data/controllers/util/chartSQL.js +42 -39
  29. package/server/routes/data/controllers/util/normalizeData.js +59 -56
  30. package/server/routes/data/index.mjs +29 -24
  31. package/server/routes/dataset/controllers/bi.dataset.demo.add.js +97 -0
  32. package/server/routes/dataset/controllers/bi.dataset.import.js +67 -0
  33. package/server/routes/dataset/controllers/util/create.table.js +22 -0
  34. package/server/routes/dataset/controllers/util/prepare.data.js +49 -0
  35. package/server/routes/dataset/index.mjs +19 -0
  36. package/server/routes/db/controllers/dbTablePreview.js +63 -63
  37. package/server/routes/db/controllers/dbTables.js +36 -36
  38. package/server/routes/db/index.mjs +17 -17
  39. package/server/routes/edit/controllers/dashboard.add.js +26 -24
  40. package/server/routes/edit/controllers/dashboard.edit.js +46 -44
  41. package/server/routes/edit/controllers/widget.add.js +75 -73
  42. package/server/routes/edit/controllers/widget.del.js +69 -70
  43. package/server/routes/edit/controllers/widget.edit.js +52 -103
  44. package/server/routes/edit/index.mjs +31 -31
  45. package/server/routes/map/controllers/cluster.js +109 -104
  46. package/server/routes/map/controllers/clusterVtile.js +166 -213
  47. package/server/routes/map/controllers/geojson.js +127 -127
  48. package/server/routes/map/controllers/map.js +60 -57
  49. package/server/routes/map/controllers/utils/downloadClusterData.js +43 -0
  50. package/server/routes/map/controllers/vtile.js +183 -182
  51. package/server/routes/map/index.mjs +25 -25
  52. package/server/utils/getWidget.js +85 -83
  53. package/utils.js +12 -12
@@ -0,0 +1,97 @@
1
+ import path from 'node:path';
2
+ import { readFile, writeFile, mkdir } from 'node:fs/promises';
3
+
4
+ import { getPG } from '@opengis/fastify-table/utils.js';
5
+
6
+ const host = 'https://cdn.softpro.ua/demo/bi';
7
+
8
+
9
+ const root = process.cwd();
10
+ const send = (msg) => console.log(msg)
11
+ async function downloadFile(fileName) {
12
+ const pg = getPG();
13
+ send(`${fileName} - checking if already downloaded...`);
14
+
15
+ const demoPath = path.join(root, '/log/temp/', fileName);
16
+ // const fileExists = await isFileExists(demoPath);
17
+
18
+ // send(`${host}/${fileName}` + demoPath)
19
+ const response = await fetch(`${host}/${fileName}`);
20
+ const body = await response.text();
21
+ //send(body)
22
+ if (response?.status !== 200) {
23
+ return { [fileName]: { result: 'file not found', status: 404 } };
24
+ }
25
+
26
+ await mkdir(path.dirname(demoPath), { recursive: true });
27
+
28
+ await writeFile(demoPath, body, 'utf8');
29
+
30
+ await pg.query(`drop table if exists ${path.parse(fileName).name}`);
31
+ const { exists } = await pg.query(`select to_regclass($1) is not null as exists`, [path.parse(fileName).name])
32
+ .then((res1) => res1.rows?.[0] || {});
33
+
34
+
35
+ if (exists) {
36
+ send(`${fileName} - dataset table already exists!`);
37
+ return { [fileName]: { result: 'dataset table already exists', status: 200 } };
38
+ }
39
+
40
+ const sql = await readFile(demoPath, 'utf8');
41
+ await pg.query(sql);
42
+ return { [fileName]: { result: 'success', status: 200 } };
43
+ }
44
+
45
+ /**
46
+ * Імпорт demo даних dashboard
47
+ *
48
+ * @method GET
49
+ * @summary Імпорт demo даних dashboard
50
+ * @priority 4
51
+ * @type api
52
+ * @tag bi
53
+ * @errors 400,500
54
+ * @returns {Number} status Номер помилки
55
+ * @returns {String} error Опис помилки
56
+ * @returns {Object} rows Масив з колонками таблиці
57
+ */
58
+
59
+ const demoList = {
60
+ 'demo.orders.sql': 'Статистика продажів (demo)',
61
+ 'demo.cleaned_sales_data.sql': 'Статистика продажів (demo 2)',
62
+ 'demo.video_game_sales.sql': 'Статистика продажів відеоігор (demo)',
63
+ };
64
+
65
+ export default async function biDatasetDemoAdd(req) {
66
+ const { pg, query = {} } = req;
67
+ // const send = () => { }; // eventStream?
68
+
69
+
70
+
71
+ const sqlList = [];
72
+
73
+ const tables = Object.keys(demoList)?.map((el) => {
74
+ const table = path.parse(el).name;
75
+ sqlList.push(`insert into bi.dataset(dataset_id,name,table_name)
76
+ select '${el}', '${demoList?.[el]}', '${table}' on conflict(dataset_id) do update set
77
+ name=excluded.name, table_name=excluded.table_name
78
+ returning dataset_id as id`);
79
+ return table;
80
+ });
81
+
82
+ const { count } = await pg.query(`select count(*) from pg_catalog.pg_tables where schemaname||'.'||tablename = any($1)`, [tables]).then((res1) => res1.rows?.[0] || {});
83
+
84
+ if (count === '3' && !query.nocache) {
85
+ const result = await pg.query(sqlList.join(';\n'));
86
+ const ids = (Array.isArray(result) ? result : [result]).filter((el) => el).map((el) => el?.rows?.[0]?.id);
87
+ return { message: { ids, message: 'all datasets already downloaded' }, status: 200 };
88
+ }
89
+
90
+ const res = {};
91
+ const resultAll = await Promise.all(Object.keys(demoList).map(async (fileName) => await downloadFile(fileName))).catch(err => console.log(err));
92
+
93
+ const result = await pg.query(sqlList.join(';\n'));
94
+ const ids = (Array.isArray(result) ? result : [result]).filter((el) => el).map((el) => el?.rows?.[0]?.id);
95
+ return { message: { ids, message: res, resultAll }, status: 200 };
96
+
97
+ };
@@ -0,0 +1,67 @@
1
+ import { randomBytes } from 'node:crypto';
2
+
3
+ import createTableFunc from './util/create.table.js';
4
+ import prepareData from './util/prepare.data.js';
5
+
6
+ /**
7
+ * Імпорт даних до BI набору
8
+ *
9
+ * @method POST
10
+ * @summary Імпорт даних до BI набору
11
+ * @priority 4
12
+ * @alias biDatasetPost
13
+ * @type api
14
+ * @tag bi
15
+ * @param {Object} params.id Dashboard ID
16
+ * @errors 400,500
17
+ * @returns {Number} status Номер помилки
18
+ * @returns {String} error Опис помилки
19
+ * @returns {Object} rows Масив з колонками таблиці
20
+ */
21
+
22
+ export default async function biDatasetPost(req) {
23
+ const {
24
+ pg, params = {}, body = {}, session = {},
25
+ } = req;
26
+ const { uid } = session?.passport?.user || {};
27
+
28
+ if (!uid) {
29
+ return { message: 'access restricted', status: 403 };
30
+ }
31
+
32
+ if (!Array.isArray(body?.data) && body?.data?.features?.[0]?.type !== 'Feature') {
33
+ return { message: 'body data param is invalid', status: 400 };
34
+ }
35
+
36
+ // await pg.query(`insert into bi.dataset(dataset_id,name, uid) values($1,$1,$2) on conflict (dataset_id) do update set table_name=null`, [params?.id, uid]);
37
+ const dataset = await pg.query('select dataset_id as id, name, table_name as table from bi.dataset where dataset_id=$1', [params?.id]).then((res) => res.rows?.[0] || {});
38
+ if (!dataset.id) {
39
+ return { message: `dataset not found: ${params?.id}`, status: 404 };
40
+ }
41
+
42
+ const tableName = randomBytes(64).toString('hex').substring(0, 24).replace(/^\d+/, '');
43
+ const pkey = tableName.concat('_id');
44
+ const table = `bi_data.${tableName}`;
45
+ const { columns, insertData } = await prepareData({ req, table, data: body?.data });
46
+ const createTable = createTableFunc({ table, pkey, columns });
47
+ const updateDataset = `update bi.dataset set table_name='${table}', editor_id='${uid}' where dataset_id='${params?.id}'`;
48
+ const sqlList = [createTable, insertData, updateDataset];
49
+ if (dataset.name) {
50
+ sqlList.push(`comment on table ${table} is '${dataset.name}'`);
51
+ }
52
+ const q = sqlList.filter((el) => el).join(';');
53
+ await pg.query('create extension if not exists postgis;create schema if not exists bi_data;');
54
+
55
+ // await pg.query(createTable);
56
+
57
+ //const result = {}
58
+ const result = await pg.query(q);
59
+
60
+ return {
61
+ message: {
62
+ id: params?.id, action: 'import', result: 'success', total: result?.find((el) => el.command === 'INSERT')?.rowCount,
63
+ },
64
+ status: 200,
65
+ };
66
+
67
+ };
@@ -0,0 +1,22 @@
1
+ export default function createTableFunc({
2
+ table, pkey, columns,
3
+ }) {
4
+ const columnType1 = {
5
+ text: 'text', select: 'text', date: 'date', 'yes/no': 'boolean', badge: 'text', number: 'numeric', tags: 'text[]', geom: 'geom',
6
+ };
7
+
8
+ const createQuery = `create table if not exists ${table} (
9
+ ${pkey} text not null default public.uuid_generate_v4(),
10
+ geom public.geometry, ${columns ? `
11
+ ${columns?.map((el) => `${el.name} ${columnType1[el.format] || 'text'}`).join(', ')},` : ''}
12
+ cdate timestamp without time zone not null default now(),
13
+ editor_date timestamp without time zone,
14
+ uid text,
15
+ editor_id text,
16
+ files json,
17
+ constraint ${table.replace(/\./g, '_')}_constraint_pkey PRIMARY KEY (${pkey}) )`;
18
+
19
+ const commentQuery = columns?.filter((el) => el.ua)?.map((el) => `comment on column ${table}.${el.name} is '${el.ua}'`).join(';');
20
+
21
+ return [createQuery, commentQuery].join(';');
22
+ };
@@ -0,0 +1,49 @@
1
+ import path from 'node:path';
2
+
3
+ import { getFolder, isFileExists } from '@opengis/fastify-table/utils.js';
4
+
5
+ import { file2json } from '@opengis/fastify-file/utils.js';
6
+
7
+ function getFileColumns({ data }) {
8
+ if (data?.features) {
9
+ return Object.keys(data?.features[0]?.properties); // geojson / shapefile
10
+ }
11
+ if (Array.isArray(data)) {
12
+ return Object.keys(data[0])?.filter((el) => el); // csv / xls
13
+ }
14
+ return Object.values(data[Object.keys(data)?.[0]]?.[0]); // else?
15
+ }
16
+
17
+ export default async function prepareData({
18
+ req, table, file_path: filePath, data: data1,
19
+ }) {
20
+ const rootDir = getFolder(req);
21
+ const fullPath = path.join(rootDir, filePath || '');
22
+ const fileExists = await isFileExists(fullPath);
23
+ if (!fileExists && !data1) {
24
+ return { message: `file not found: ${filePath}`, status: 404 };
25
+ }
26
+
27
+ const data11 = data1 || await file2json(fullPath);
28
+ const data = (['.xls', '.xlsx'].includes(path.extname(fullPath)) && !Array.isArray(data11)) ? data11[Object.keys(data11)[0]] : data11;
29
+
30
+ /* generate column list based on input data */
31
+ const columns = getFileColumns({ data })
32
+ ?.filter((el) => !['editor_date', 'cdate', 'uid', 'editor_id', 'files'].includes(el.toLowerCase()))
33
+ ?.map((el, index) => ({ name: `col_${index}`, ua: el, format: 'text' }));
34
+
35
+ const columnList1 = columns?.map((el) => el.name)?.join(',');
36
+ const columns1 = ','.concat(columns?.map((el) => `features->'properties'->>'${el?.ua}'`)?.join(','));
37
+ const columns2 = columns?.map((el) => `d->>'${el.ua}'`).join(',');
38
+ const q1 = data?.features ? `(SELECT json_array_elements('${JSON.stringify(data.features).replace(/'/g, "''")}'::json) AS features)q` : null;
39
+ const q2 = !data?.features ? `( SELECT json_array_elements('${JSON.stringify(Array.isArray(data)
40
+ ? data
41
+ : data[Object?.keys(data)?.[0]])
42
+ .replace(/'/g, "''")
43
+ .replace(/\\"/g, '`')}'::json) AS d )q`
44
+ : null;
45
+ const insertData = data?.features
46
+ ? `INSERT into ${table} (geom ${','.concat(columnList1)}) SELECT public.ST_GeomFromGeoJSON((features->>'geometry')::json) AS geom ${columns1} FROM ${q1}`
47
+ : `INSERT into ${table} (${columnList1}) SELECT ${columns2} FROM ${q2}`;
48
+ return { columns, insertData };
49
+ };
@@ -0,0 +1,19 @@
1
+ import biDatasetPost from './controllers/bi.dataset.import.js';
2
+ import biDatasetDemoAdd from './controllers/bi.dataset.demo.add.js';
3
+
4
+ const biSchema = {};
5
+
6
+ export default async function route(fastify, opts) {
7
+ fastify.route({
8
+ method: 'POST',
9
+ url: '/bi-dataset/:id',
10
+ schema: biSchema,
11
+ handler: biDatasetPost,
12
+ });
13
+ fastify.route({
14
+ method: 'GET',
15
+ url: '/bi-dataset-demo',
16
+ schema: biSchema,
17
+ handler: biDatasetDemoAdd,
18
+ });
19
+ }
@@ -1,63 +1,63 @@
1
- const q = `select nspname||'.'||relname as table, json_agg(json_build_object('name',attname, 'type', a.atttypid::regtype, 'description', coalesce(col_description(attrelid, attnum),attname))) as columns
2
- from pg_attribute a
3
- left join pg_catalog.pg_attrdef d ON (a.attrelid, a.attnum) = (d.adrelid, d.adnum)
4
- JOIN pg_class AS i
5
- ON i.oid = a.attrelid
6
- JOIN pg_namespace AS NS ON i.relnamespace = NS.OID
7
- where a.attnum > 0 and nspname||'.'||relname = $1
8
- and not a.attisdropped
9
- group by nspname||'.'||relname limit 1`;
10
-
11
- export default async function dbTablePreview({ pg, params = {}, query = {} }) {
12
- if (!params?.name) {
13
- return { message: 'not enough params: name', status: 400 };
14
- }
15
-
16
- if (query.sql) return q;
17
- try {
18
- const { table, columns } = await pg
19
- .query(q, [params.name])
20
- .then((res) => res.rows?.[0] || {});
21
- if (!table) {
22
- return { message: 'table not found', status: 404 };
23
- }
24
-
25
- const { count = 0 } = await pg
26
- .query(
27
- `select reltuples as count from pg_class where oid = to_regclass($1)`,
28
- [params.name]
29
- )
30
- .then((res) => res.rows?.[0] || {});
31
- const geom = columns.find((el) => el.type === 'geometry')?.name;
32
- const { bounds, extentStr } = geom
33
- ? await pg
34
- .query(
35
- `select count(*),
36
- st_asgeojson(st_extent(${geom}))::json as bounds,
37
- replace(regexp_replace(st_extent(${geom})::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
38
- from ${params.name}`
39
- )
40
- .then((res) => res.rows?.[0] || {})
41
- : {};
42
- const extent = extentStr ? extentStr.split(',') : undefined;
43
-
44
- const systemColumns = [
45
- 'uid',
46
- 'files',
47
- 'editor_date',
48
- 'cdate',
49
- 'editor_id',
50
- geom,
51
- ];
52
- const columnList = columns
53
- .map((el) => el?.name)
54
- .filter((el) => !systemColumns.includes(el));
55
- const { rows = [] } = await pg.query(
56
- `select ${columnList.join(',')} ${geom ? `, st_asgeojson(geom)::json as geom` : ''} from ${table} limit 10`
57
- );
58
-
59
- return { count, geom: !!geom, bounds, extent, columns, rows };
60
- } catch (err) {
61
- return { error: err.toString(), status: 500 };
62
- }
63
- }
1
+ const q = `select nspname||'.'||relname as table, json_agg(json_build_object('name',attname, 'type', a.atttypid::regtype, 'description', coalesce(col_description(attrelid, attnum),attname))) as columns
2
+ from pg_attribute a
3
+ left join pg_catalog.pg_attrdef d ON (a.attrelid, a.attnum) = (d.adrelid, d.adnum)
4
+ JOIN pg_class AS i
5
+ ON i.oid = a.attrelid
6
+ JOIN pg_namespace AS NS ON i.relnamespace = NS.OID
7
+ where a.attnum > 0 and nspname||'.'||relname = $1
8
+ and not a.attisdropped
9
+ group by nspname||'.'||relname limit 1`;
10
+
11
+ export default async function dbTablePreview({ pg, params = {}, query = {} }) {
12
+ if (!params?.name) {
13
+ return { message: 'not enough params: name', status: 400 };
14
+ }
15
+
16
+ if (query.sql) return q;
17
+ try {
18
+ const { table, columns } = await pg
19
+ .query(q, [params.name])
20
+ .then((res) => res.rows?.[0] || {});
21
+ if (!table) {
22
+ return { message: 'table not found', status: 404 };
23
+ }
24
+
25
+ const { count = 0 } = await pg
26
+ .query(
27
+ `select reltuples as count from pg_class where oid = to_regclass($1)`,
28
+ [params.name]
29
+ )
30
+ .then((res) => res.rows?.[0] || {});
31
+ const geom = columns.find((el) => el.type === 'geometry')?.name;
32
+ const { bounds, extentStr } = geom
33
+ ? await pg
34
+ .query(
35
+ `select count(*),
36
+ st_asgeojson(st_extent(${geom}))::json as bounds,
37
+ replace(regexp_replace(st_extent(${geom})::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
38
+ from ${params.name}`
39
+ )
40
+ .then((res) => res.rows?.[0] || {})
41
+ : {};
42
+ const extent = extentStr ? extentStr.split(',') : undefined;
43
+
44
+ const systemColumns = [
45
+ 'uid',
46
+ 'files',
47
+ 'editor_date',
48
+ 'cdate',
49
+ 'editor_id',
50
+ geom,
51
+ ];
52
+ const columnList = columns
53
+ .map((el) => el?.name)
54
+ .filter((el) => !systemColumns.includes(el));
55
+ const { rows = [] } = await pg.query(
56
+ `select ${columnList.join(',')} ${geom ? `, st_asgeojson(geom)::json as geom` : ''} from ${table} limit 10`
57
+ );
58
+
59
+ return { count, geom: !!geom, bounds, extent, columns, rows };
60
+ } catch (err) {
61
+ return { error: err.toString(), status: 500 };
62
+ }
63
+ }
@@ -1,36 +1,36 @@
1
- export default async function dbTables({ pg, query = {} }) {
2
- const q = `select
3
- t.table_schema ||'.'|| t.table_name as table,
4
- obj_description(to_regclass(t.table_schema ||'."'|| t.table_name||'"')) as description,
5
- t.table_schema as schema,
6
- (select reltuples from pg_class where oid = to_regclass(t.table_schema ||'."'|| t.table_name||'"') ) as total,
7
- coalesce(isgeom,false) as isgeom
8
-
9
- from information_schema.tables t
10
- left join lateral(
11
- select true as isgeom from information_schema.columns c
12
- where c.table_name = t.table_name
13
- and c.table_schema = t.table_schema and 'geometry'=c.udt_name limit 1
14
- )c on 1=1
15
-
16
- where t.table_type = 'BASE TABLE'
17
- and t.table_schema not in ('public','log','admin','feature_ir','gis', 'setting')
18
- and t.table_name not like '%.%'
19
- and regexp_replace(t.table_name, '^[[:digit:]]', '', 'g') = t.table_name
20
- and 1=(SELECT count(*) FROM pg_catalog.pg_constraint con
21
- INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid
22
- INNER JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
23
- WHERE nsp.nspname = t.table_schema AND rel.relname = t.table_name and contype='p'
24
- )
25
- and ${query.schema ? `t.table_schema=$1` : '1=1'} order by total desc`;
26
- if (query.sql) return q;
27
- try {
28
- const { rows = [] } = await pg.query(
29
- q,
30
- [query.schema].filter((el) => el)
31
- );
32
- return { rows };
33
- } catch (err) {
34
- return { error: err.toString(), status: 500 };
35
- }
36
- }
1
+ export default async function dbTables({ pg, query = {} }) {
2
+ const q = `select
3
+ t.table_schema ||'.'|| t.table_name as table,
4
+ obj_description(to_regclass(t.table_schema ||'."'|| t.table_name||'"')) as description,
5
+ t.table_schema as schema,
6
+ (select reltuples from pg_class where oid = to_regclass(t.table_schema ||'."'|| t.table_name||'"') ) as total,
7
+ coalesce(isgeom,false) as isgeom
8
+
9
+ from information_schema.tables t
10
+ left join lateral(
11
+ select true as isgeom from information_schema.columns c
12
+ where c.table_name = t.table_name
13
+ and c.table_schema = t.table_schema and 'geometry'=c.udt_name limit 1
14
+ )c on 1=1
15
+
16
+ where t.table_type = 'BASE TABLE'
17
+ and t.table_schema not in ('public','log','admin','feature_ir','gis', 'setting')
18
+ and t.table_name not like '%.%'
19
+ and regexp_replace(t.table_name, '^[[:digit:]]', '', 'g') = t.table_name
20
+ and 1=(SELECT count(*) FROM pg_catalog.pg_constraint con
21
+ INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid
22
+ INNER JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
23
+ WHERE nsp.nspname = t.table_schema AND rel.relname = t.table_name and contype='p'
24
+ )
25
+ and ${query.schema ? `t.table_schema=$1` : '1=1'} order by total desc`;
26
+ if (query.sql) return q;
27
+ try {
28
+ const { rows = [] } = await pg.query(
29
+ q,
30
+ [query.schema].filter((el) => el)
31
+ );
32
+ return { rows };
33
+ } catch (err) {
34
+ return { error: err.toString(), status: 500 };
35
+ }
36
+ }
@@ -1,17 +1,17 @@
1
- import dbTables from './controllers/dbTables.js';
2
- import dbTablePreview from './controllers/dbTablePreview.js';
3
-
4
- export default async function route(fastify, opts) {
5
- fastify.route({
6
- method: 'GET',
7
- url: '/db-tables',
8
- schema: {},
9
- handler: dbTables,
10
- });
11
- fastify.route({
12
- method: 'GET',
13
- url: '/db-tables/:name',
14
- schema: {},
15
- handler: dbTablePreview,
16
- });
17
- }
1
+ import dbTables from './controllers/dbTables.js';
2
+ import dbTablePreview from './controllers/dbTablePreview.js';
3
+
4
+ export default async function route(fastify, opts) {
5
+ fastify.route({
6
+ method: 'GET',
7
+ url: '/db-tables',
8
+ schema: {},
9
+ handler: dbTables,
10
+ });
11
+ fastify.route({
12
+ method: 'GET',
13
+ url: '/db-tables/:name',
14
+ schema: {},
15
+ handler: dbTablePreview,
16
+ });
17
+ }
@@ -1,24 +1,26 @@
1
- export default async function widgetAdd({ pg, funcs, params = {}, body }) {
2
- try {
3
- const time = Date.now();
4
- const tableName = body.table_name;
5
- const checkTable = await pg.query(
6
- `select * from bi.dashboard where $1 in (table_name)`,
7
- [tableName]
8
- );
9
- if (!checkTable.rows.length) return { message: 'bad params', status: 401 };
10
- const res = await funcs.dataInsert({
11
- table: 'bi.dashboard',
12
- data: body,
13
- });
14
-
15
- return {
16
- time: Date.now() - time,
17
- message: `Added new dashboard, ID: '${res.rows[0].title}'`,
18
- status: 200,
19
- rows: res.rows,
20
- };
21
- } catch (err) {
22
- return { error: err.toString(), status: 500 };
23
- }
24
- }
1
+ import { dataInsert } from "@opengis/fastify-table/utils.js";
2
+
3
+ export default async function widgetAdd({ pg, funcs, params = {}, body }) {
4
+ try {
5
+ const time = Date.now();
6
+ const tableName = body.table_name;
7
+ const checkTable = await pg.query(
8
+ `select * from bi.dashboard where $1 in (table_name)`,
9
+ [tableName]
10
+ );
11
+ if (!checkTable.rows.length) return { message: 'bad params', status: 401 };
12
+ const res = await dataInsert({
13
+ table: 'bi.dashboard',
14
+ data: body,
15
+ });
16
+
17
+ return {
18
+ time: Date.now() - time,
19
+ message: `Added new dashboard, ID: '${res.rows[0].title}'`,
20
+ status: 200,
21
+ rows: res.rows,
22
+ };
23
+ } catch (err) {
24
+ return { error: err.toString(), status: 500 };
25
+ }
26
+ }
@@ -1,44 +1,46 @@
1
- export default async function dashboardEdit(
2
- { pg, funcs, params = {}, body = {} },
3
- reply
4
- ) {
5
- try {
6
- if (!params.name) {
7
- return {
8
- message: 'not enough params: dashboard name required',
9
- status: 400,
10
- };
11
- }
12
- const tableName = body.table_name;
13
- const checkTable = await pg.query(
14
- `select * from bi.dashboard where $1 in (table_name)`,
15
- [tableName]
16
- );
17
- if (!checkTable.rows.length) return { message: 'bad params', status: 401 };
18
- const { name: dashboardName } = params;
19
- const row = await pg
20
- .query(
21
- `select dashboard_id from bi.dashboard where $1 in (dashboard_id, name)`,
22
- [dashboardName]
23
- )
24
- .then((res1) => res1.rows?.[0] || {});
25
- const { dashboard_id: dashboardId } = row;
26
-
27
- const res = await funcs.dataUpdate({
28
- table: 'bi.dashboard',
29
- id: dashboardId,
30
- data: body,
31
- });
32
- if (!Object.keys(res)?.length) {
33
- return { message: 'not found data', status: 404 };
34
- }
35
-
36
- return {
37
- message: `updated ${dashboardName}`,
38
- status: 200,
39
- rows: res,
40
- };
41
- } catch (err) {
42
- return reply.status(500).send(err.toString());
43
- }
44
- }
1
+ import { dataUpdate } from "@opengis/fastify-table/utils.js";
2
+
3
+ export default async function dashboardEdit(
4
+ { pg, params = {}, body = {} },
5
+ reply
6
+ ) {
7
+ try {
8
+ if (!params.name) {
9
+ return {
10
+ message: 'not enough params: dashboard name required',
11
+ status: 400,
12
+ };
13
+ }
14
+ const tableName = body.table_name;
15
+ const checkTable = await pg.query(
16
+ `select * from bi.dashboard where $1 in (table_name)`,
17
+ [tableName]
18
+ );
19
+ if (!checkTable.rows.length) return { message: 'bad params', status: 401 };
20
+ const { name: dashboardName } = params;
21
+ const row = await pg
22
+ .query(
23
+ `select dashboard_id from bi.dashboard where $1 in (dashboard_id, name)`,
24
+ [dashboardName]
25
+ )
26
+ .then((res1) => res1.rows?.[0] || {});
27
+ const { dashboard_id: dashboardId } = row;
28
+
29
+ const res = await dataUpdate({
30
+ table: 'bi.dashboard',
31
+ id: dashboardId,
32
+ data: body,
33
+ });
34
+ if (!Object.keys(res)?.length) {
35
+ return { message: 'not found data', status: 404 };
36
+ }
37
+
38
+ return {
39
+ message: `updated ${dashboardName}`,
40
+ status: 200,
41
+ rows: res,
42
+ };
43
+ } catch (err) {
44
+ return reply.status(500).send(err.toString());
45
+ }
46
+ }