@opengis/bi 1.0.21 → 1.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/bi.js +1 -1
  2. package/dist/bi.umd.cjs +109 -106
  3. package/dist/{import-file-C8BY90-b.js → import-file-DgBd_UN1.js} +20484 -18794
  4. package/dist/{map-component-mixin-CFtShPun.js → map-component-mixin-NewmNy_M.js} +502 -483
  5. package/dist/style.css +1 -1
  6. package/dist/{vs-calendar-B9vXdsaG.js → vs-calendar-CPXj4hBh.js} +1 -1
  7. package/dist/vs-donut-CUmi2ir5.js +148 -0
  8. package/dist/{vs-funnel-bar-Cj0O8tIf.js → vs-funnel-bar-B3DpbtUl.js} +1 -1
  9. package/dist/{vs-heatmap-C9oFph_f.js → vs-heatmap-COwT3bHE.js} +1 -1
  10. package/dist/{vs-map-WOn0RAU7.js → vs-map-DwyQHLpN.js} +2 -2
  11. package/dist/{vs-map-cluster-RJa6sNfI.js → vs-map-cluster-CnZ9g6k-.js} +2 -2
  12. package/dist/{vs-number-BG0szZL-.js → vs-number-LwROg9Oe.js} +5 -5
  13. package/dist/vs-table-Dt_MSaCC.js +68 -0
  14. package/dist/{vs-text-Kwl3-0yy.js → vs-text-DgAf3Ids.js} +2 -2
  15. package/package.json +6 -5
  16. package/plugin.js +1 -1
  17. package/server/helpers/mdToHTML.js +17 -0
  18. package/server/migrations/bi.dataset.sql +13 -0
  19. package/server/migrations/bi.sql +2 -0
  20. package/server/routes/dashboard/controllers/dashboard.js +10 -11
  21. package/server/routes/dashboard/controllers/dashboard.list.js +6 -6
  22. package/server/routes/data/controllers/data.js +14 -6
  23. package/server/routes/data/controllers/util/chartSQL.js +6 -3
  24. package/server/routes/dataset/controllers/bi.dataset.list.js +3 -1
  25. package/server/routes/dataset/controllers/comment.js +55 -0
  26. package/server/routes/dataset/controllers/createDatasetPost.js +132 -0
  27. package/server/routes/dataset/controllers/data.js +145 -0
  28. package/server/routes/{db → dataset}/controllers/dbTablePreview.js +17 -24
  29. package/server/routes/{db → dataset}/controllers/dbTables.js +7 -11
  30. package/server/routes/dataset/controllers/delete.js +39 -0
  31. package/server/routes/dataset/controllers/{bi.dataset.edit.js → editDataset.js} +32 -26
  32. package/server/routes/dataset/controllers/export.js +213 -0
  33. package/server/routes/dataset/controllers/form.js +99 -0
  34. package/server/routes/dataset/controllers/format.js +44 -0
  35. package/server/routes/dataset/controllers/insert.js +46 -0
  36. package/server/routes/dataset/controllers/table.js +69 -0
  37. package/server/routes/dataset/controllers/update.js +42 -0
  38. package/server/routes/dataset/index.mjs +88 -43
  39. package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
  40. package/server/routes/dataset/utils/convertJSONToXls.js +49 -0
  41. package/server/routes/dataset/utils/createTableQuery.js +59 -0
  42. package/server/routes/dataset/utils/datasetForms.js +1 -0
  43. package/server/routes/dataset/utils/descriptionList.js +46 -0
  44. package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
  45. package/server/routes/dataset/utils/executeQuery.js +46 -0
  46. package/server/routes/dataset/utils/getLayersData.js +107 -0
  47. package/server/routes/dataset/utils/getTableData.js +47 -0
  48. package/server/routes/dataset/utils/insertDataQuery.js +12 -0
  49. package/server/routes/dataset/utils/metaFormat.js +24 -0
  50. package/server/routes/edit/controllers/dashboard.add.js +3 -3
  51. package/server/routes/edit/controllers/widget.add.js +8 -3
  52. package/server/routes/edit/controllers/widget.edit.js +23 -5
  53. package/server/routes/map/controllers/cluster.js +41 -41
  54. package/server/routes/map/controllers/clusterVtile.js +5 -5
  55. package/server/routes/map/controllers/geojson.js +6 -6
  56. package/server/routes/map/controllers/heatmap.js +118 -0
  57. package/server/routes/map/controllers/map.js +3 -3
  58. package/server/routes/map/controllers/utils/downloadClusterData.js +6 -4
  59. package/server/routes/map/controllers/vtile.js +3 -3
  60. package/server/routes/map/index.mjs +2 -0
  61. package/server/utils/getWidget.js +10 -6
  62. package/server/routes/dataset/controllers/bi.dataset.add.js +0 -86
  63. package/server/routes/dataset/controllers/bi.dataset.data.add.js +0 -49
  64. package/server/routes/dataset/controllers/bi.dataset.data.del.js +0 -54
  65. package/server/routes/dataset/controllers/bi.dataset.data.edit.js +0 -55
  66. package/server/routes/dataset/controllers/bi.dataset.data.list.js +0 -71
  67. package/server/routes/dataset/controllers/bi.dataset.del.js +0 -48
  68. package/server/routes/dataset/controllers/bi.dataset.demo.add.js +0 -97
  69. package/server/routes/dataset/controllers/util/create.table.js +0 -21
  70. package/server/routes/dataset/controllers/util/prepare.data.js +0 -49
  71. package/server/routes/db/index.mjs +0 -17
@@ -0,0 +1,55 @@
1
+ import { pgClients } from '@opengis/fastify-table/utils.js';
2
+
3
+ export default async function datasetEditComment({
4
+ pg = pgClients.client,
5
+ body = {},
6
+ params = {},
7
+ }) {
8
+ if (!params?.id) {
9
+ return { message: 'not enough params: id', status: 400 };
10
+ }
11
+
12
+ if (!Array.isArray(body.data) || !body.data?.length) {
13
+ return { message: 'invalid param: body.data not an array / empty', status: 400 };
14
+ }
15
+
16
+ const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [params.id])
17
+ .then(el => el.rows?.[0] || {});
18
+
19
+ if (!dataset?.table) {
20
+ return { message: dataset?.id ? 'dataset table not set' : 'dataset not found', status: 404 };
21
+ }
22
+
23
+ const { fields = [] } = pg.pk?.[dataset.table] ? await pg.query(`select * from ${dataset.table} limit 0`) : {};
24
+
25
+ if (!fields.length) {
26
+ return { message: `table not found: ${dataset.table}`, status: 404 };
27
+ }
28
+
29
+ const columnList = fields.map(el => el.name);
30
+ const validData = body.data?.filter?.(el => el?.name && el?.comment && columnList.includes(el.name));
31
+
32
+ const q1 = validData.map(el => `COMMENT ON COLUMN ${dataset.table}."${el.name.replace(/'/g, "''")}" IS '${el.comment.replace(/'/g, "''")}'`).join(';');
33
+ if (q1) await pg.query(q1);
34
+
35
+ const q2 = `select
36
+ attrelid::regclass,
37
+ attname,
38
+ pg_catalog.col_description(attrelid,attnum) as title
39
+ from pg_catalog.pg_attribute a
40
+ where attnum > 0
41
+ and attname not in ('editor_id','editor_date','cdate','geom','id','uid','cdate')
42
+ and atttypid::regtype not in ('json','geometry')
43
+ and pg_catalog.col_description(attrelid,attnum) is not null`;
44
+
45
+ const { rows = [] } = await pg.query(q2);
46
+
47
+ const title = rows
48
+ .filter(el => el.attrelid === dataset.table)
49
+ .reduce((p, el) => ({ ...p, [el.attname]: el.title }), {});
50
+
51
+ const res = await pg.query(`update bi.dataset set setting=coalesce(setting::jsonb, '{}'::jsonb)||$1::jsonb
52
+ where dataset_id=$2 returning dataset_id as id, setting`, [{ title }, dataset.id]);
53
+
54
+ return res.rows?.[0];
55
+ }
@@ -0,0 +1,132 @@
1
+ import path from 'node:path';
2
+ import { existsSync } from 'node:fs';
3
+
4
+ import { config, pgClients, getFolder } from '@opengis/fastify-table/utils.js';
5
+ import { file2json } from '@opengis/fastify-file/utils.js';
6
+
7
+ import createTableQuery from '../utils/createTableQuery.js';
8
+ import executeQuery from '../utils/executeQuery.js';
9
+ import downloadRemoteFile from '../utils/downloadRemoteFile.js';
10
+
11
+ const insertDataset = `insert into bi.dataset
12
+ (name, table_name, dataset_file_path, column_list, pk, data_source, uid)
13
+ values($1,$2,$3,$4,$5,$6,$7) returning dataset_id`;
14
+
15
+ export default async function createDatasetPost({
16
+ pg = pgClients.client, body = {}, user = {},
17
+ }) {
18
+ if (!user?.uid) {
19
+ return { message: 'access restricted', status: 403 };
20
+ }
21
+
22
+ if (!body?.name) {
23
+ return { message: 'not enough query params: name', status: 400 };
24
+ }
25
+
26
+ if (!body?.table_name && !body?.file && !body?.column_list?.length && !body?.dataset_url) {
27
+ return { message: 'not enough query params: table / file / column_list/ url', status: 400 };
28
+ }
29
+
30
+ const {
31
+ name,
32
+ table_name: existingTable,
33
+ column_list: columns = [],
34
+ dataset_url: datasetUrl,
35
+ } = body;
36
+
37
+ const rootDir = getFolder(config, 'local');
38
+
39
+ if (datasetUrl) {
40
+ const { filePath, error } = await downloadRemoteFile({
41
+ rootDir, url: datasetUrl, table: name || datasetUrl,
42
+ });
43
+ if (error || !filePath) {
44
+ return { message: error || 'file request URL error', status: 500 };
45
+ }
46
+ Object.assign(body, { file: filePath });
47
+ }
48
+
49
+ const { file: relPath } = body;
50
+
51
+ if (relPath) {
52
+ const filepath = path.join(rootDir, relPath);
53
+ const exists = existsSync(filepath);
54
+
55
+ if (!exists) {
56
+ return { message: 'Файл з вихідними даними не знайдено', status: 404 };
57
+ }
58
+
59
+ const json = await file2json({ filepath });
60
+
61
+ // excel sheets fix?
62
+ const data1 = (['.xls', '.xlsx'].includes(path.extname(filepath)) && !Array.isArray(json)) ? json[Object.keys(json)[0]] : json;
63
+ const data = path.extname(filepath) === '.json' && body?.key ? data1?.[body.key] : data1;
64
+
65
+ const features = ['.csv', '.xlsx', '.xls'].includes(path.extname(filepath))
66
+ ? data?.map?.((el) => ({ type: 'Feature', properties: Object.keys(el).reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }))
67
+ : data?.features || data?.map?.((el) => ({ type: 'Feature', geometry: el.geom, properties: Object.keys(el).filter((key) => key !== 'geom').reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }));
68
+
69
+ if (!Array.isArray(features) || !features?.length) {
70
+ return { message: 'Файл з вихідними даними порожній', status: 400 };
71
+ }
72
+
73
+ Object.assign(data, { features });
74
+
75
+ const fileColumns = Object.keys(data?.features[0]?.properties)
76
+ ?.filter((el) => !['editor_date', 'cdate', 'uid', 'editor_id', 'files'].includes(el.toLowerCase()))
77
+ ?.map((el) => ({ title: el, format: 'text' }));
78
+
79
+ const { sql, pkey, table } = createTableQuery(fileColumns, name);
80
+
81
+ const { datasetId, error } = await executeQuery({
82
+ sql,
83
+ data,
84
+ name,
85
+ table,
86
+ relPath,
87
+ columns: fileColumns,
88
+ pkey,
89
+ source: datasetUrl ? 'url' : 'file',
90
+ url: datasetUrl,
91
+ user,
92
+ dataKey: body?.key,
93
+ });
94
+
95
+ if (error) return { error, status: 500 };
96
+
97
+ pgClients.client.pk[table] = pkey;
98
+ pgClients.client.tlist.push(table);
99
+ return { message: { id: datasetId, table, source: datasetUrl ? 'url' : 'file' }, status: 200 };
100
+ }
101
+
102
+ if (existingTable) {
103
+ await pg.query(`alter table ${existingTable} add column if not exists geom public.geometry;
104
+ alter table ${existingTable} add column if not exists files json`);
105
+ const args = [name, existingTable, null, null, pg.pk?.[existingTable], JSON.stringify({ type: 'table' }), user?.uid];
106
+ const datasetId = await pg.query(insertDataset, args).then(el => el.rows?.[0]?.dataset_id);
107
+ return { message: { id: datasetId, table: existingTable, source: 'table' }, status: 200 };
108
+ }
109
+
110
+ if (!columns?.length) {
111
+ return { message: 'У даній заяві відсутні налаштування структури набору даних', status: 400 };
112
+ }
113
+
114
+ const { sql, pkey, table } = createTableQuery(columns, name);
115
+
116
+ const { datasetId, error } = await executeQuery({
117
+ sql,
118
+ name,
119
+ table,
120
+ relPath,
121
+ columns,
122
+ pkey,
123
+ source: 'newtable',
124
+ user,
125
+ });
126
+
127
+ if (error) return { error, status: 500 };
128
+
129
+ pgClients.client.pk[table] = pkey;
130
+ pgClients.client.tlist.push(table);
131
+ return { message: { id: datasetId, table, source: 'newtable' }, status: 200 };
132
+ }
@@ -0,0 +1,145 @@
1
+ import {
2
+ config, pgClients, getMeta, getFilterSQL, setToken, getToken,
3
+ } from '@opengis/fastify-table/utils.js';
4
+
5
+ import metaFormat from '../utils/metaFormat.js';
6
+ import datasetFormData from './table.js';
7
+
8
+ const maxLimit = 100;
9
+
10
+ export default async function datasetData(req, reply) {
11
+ const {
12
+ pg = pgClients.client, params = {}, query = {}, user = {},
13
+ } = req;
14
+ const time = Date.now();
15
+ const { uid, user_type: userType = 'regular' } = user;
16
+
17
+ if (!uid) {
18
+ return { message: 'access restricted', status: 403 };
19
+ }
20
+
21
+ if (!params?.id) {
22
+ return { message: 'not enough params: id', status: 404 };
23
+ }
24
+
25
+ const tokenData = await getToken({
26
+ uid: user?.uid,
27
+ token: params.id,
28
+ mode: 'w',
29
+ json: 1,
30
+ });
31
+
32
+ if (tokenData?.dataset && tokenData?.id) {
33
+ return datasetFormData(req, reply);
34
+ }
35
+
36
+ const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, filter_list as filters,
37
+ sql_list as sql, style, form_setting, pk, access_level from bi.dataset where dataset_id=$1`, [params.id]).then(el => el.rows[0] || {});
38
+
39
+ if (!dataset?.id) {
40
+ return { message: 'dataset not found', status: 404 };
41
+ }
42
+
43
+ const tlist = await pg.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist
44
+ from pg_class where relkind in ('r','v')`).then(el => el.rows[0]?.tlist || []);
45
+
46
+ if (!dataset.table || !tlist.includes(dataset.table.replace(/"/g, '')) || (!pg.pk?.[dataset.table.replace(/"/g, '')] && !dataset.pk)) {
47
+ return { message: `table not found: ${dataset.table}`, status: 404 };
48
+ }
49
+
50
+ const { pk = dataset.pk, columns: dbColumns = [] } = await getMeta(dataset.table) || {};
51
+ const cols = dataset.columns?.filter((el) => el.name !== 'geom')?.map((el) => el.name || el)?.join(',');
52
+
53
+ const limit = Math.min(maxLimit, +(query.limit || 20));
54
+ const offset = query.page && query.page > 0 ? ` offset ${(query.page - 1) * limit}` : '';
55
+
56
+ const checkFilter = [query.filter, query.search].filter((el) => el).length;
57
+
58
+ const fData = checkFilter ? await getFilterSQL({
59
+ table: dataset.table,
60
+ filter: query.filter,
61
+ search: query.search,
62
+ json: 1,
63
+ }) : {};
64
+
65
+ const where = [dataset.query, fData.q].filter((el) => el);
66
+
67
+ const sqlTable = dataset.sql
68
+ ?.filter?.((el) => !el?.disabled && el?.sql?.replace)
69
+ ?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', uid)}) ${el.name || `t${i}`} on 1=1 `)
70
+ ?.join('') || '';
71
+
72
+ const columnList = dbColumns.map((el) => el.name || el).join(',');
73
+ const order = columnList.includes('cdate') ? (`order by cdate ${query.desc ? 'desc' : ''}`) : '';
74
+
75
+ const q = `select
76
+ ${cols || '*'}
77
+ ${pk ? `,"${pk}" as id` : ''}
78
+ ${dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text') ? ',files' : ''}
79
+ ${dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
80
+ from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')} ${order}) t
81
+ ${sqlTable} where ${where.join(' and ') || 'true'} ${order} ${offset} limit ${limit}`.replace(/{{uid}}/g, uid);
82
+
83
+ if (query.sql === '1' && (config.debug || userType.includes('admin'))) { return q; }
84
+
85
+ // console.log(pg.options.database, q);
86
+ const { rows = [] } = await pg.query(q);
87
+
88
+ const qCount = `select
89
+ count(*)::int as total,
90
+ count(*) FILTER(WHERE ${[fData.q].filter(el => el).join(' and ') || 'true'})::int as filtered
91
+ from ${dataset.table} t ${sqlTable}
92
+ where ${[dataset.query].filter(el => el).join(' and ') || 'true'} `.replace(/{{uid}}/g, uid);
93
+
94
+ const { total, filtered } = await pg.queryCache(qCount).then(el => el?.rows?.[0] || {});
95
+
96
+ await metaFormat({ rows, columns: dataset.columns });
97
+
98
+ if (rows?.length && uid) {
99
+ rows.filter((row) => row[pk] || row.id).forEach((row) => Object.assign(row, {
100
+ token: setToken({
101
+ ids: [JSON.stringify({
102
+ id: row[pk] || row.id,
103
+ table: dataset.table,
104
+ form: `${params.id}.form`,
105
+ dataset: params.id,
106
+ })],
107
+ uid,
108
+ array: 1,
109
+ })[0],
110
+ }));
111
+ }
112
+ const res = {
113
+ time: Date.now() - time,
114
+ access: dataset.access_level,
115
+ card: dataset.card,
116
+ actions: ['add', 'edit', 'del', 'get'],
117
+ total,
118
+ filtered,
119
+ count: rows.length,
120
+ pk,
121
+ table: dataset.table,
122
+ form: `${params.id}.form`,
123
+ rows,
124
+ columns: dataset.columns || dbColumns.map(col => ({ name: col.name, format: pg.pgType[col.dataTypeID], title: col.name })),
125
+ filters: dataset.filters,
126
+ };
127
+
128
+ if (uid) {
129
+ const addTokens = setToken({
130
+ ids: [JSON.stringify({
131
+ table: dataset.table,
132
+ form: `${params.id}.form`,
133
+ dataset: dataset.id,
134
+ })],
135
+ uid,
136
+ array: 1,
137
+ });
138
+
139
+ Object.assign(res, {
140
+ addToken: addTokens[0],
141
+ });
142
+ }
143
+
144
+ return res;
145
+ }
@@ -16,28 +16,21 @@ export default async function dbTablePreview({ pg, params = {}, query = {} }) {
16
16
  if (query.sql) return q;
17
17
  try {
18
18
  const { table, columns } = await pg
19
- .query(q, [params.name])
20
- .then((res) => res.rows?.[0] || {});
19
+ .query(q, [params.name.replace(/"/g, '')])
20
+ .then(el => el.rows?.[0] || {});
21
21
  if (!table) {
22
22
  return { message: 'table not found', status: 404 };
23
23
  }
24
24
 
25
- const { count = 0 } = await pg
26
- .query(
27
- `select reltuples as count from pg_class where oid = to_regclass($1)`,
28
- [params.name]
29
- )
30
- .then((res) => res.rows?.[0] || {});
25
+ const { count = 0 } = await pg.query('select reltuples as count from pg_class where oid = to_regclass($1)', [params.name])
26
+ .then(el => el.rows?.[0] || {});
31
27
  const geom = columns.find((el) => el.type === 'geometry')?.name;
32
28
  const { bounds, extentStr } = geom
33
- ? await pg
34
- .query(
35
- `select count(*),
36
- st_asgeojson(st_extent(${geom}))::json as bounds,
37
- replace(regexp_replace(st_extent(${geom})::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
38
- from ${params.name}`
39
- )
40
- .then((res) => res.rows?.[0] || {})
29
+ ? await pg.query(`select
30
+ count(*),
31
+ st_asgeojson(st_extent(${geom}))::json as bounds,
32
+ replace(regexp_replace(st_extent(${geom})::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
33
+ from ${params.name}`).then(el => el.rows?.[0] || {})
41
34
  : {};
42
35
  const extent = extentStr ? extentStr.split(',') : undefined;
43
36
 
@@ -49,15 +42,15 @@ export default async function dbTablePreview({ pg, params = {}, query = {} }) {
49
42
  'editor_id',
50
43
  geom,
51
44
  ];
52
- const columnList = columns
53
- .map((el) => el?.name)
54
- .filter((el) => !systemColumns.includes(el));
55
- const { rows = [] } = await pg.query(
56
- `select ${columnList.join(',')} ${geom ? `, st_asgeojson(geom)::json as geom` : ''} from ${table} limit 10`
57
- );
58
45
 
59
- return { count, geom: !!geom, bounds, extent, columns, rows };
60
- } catch (err) {
46
+ const columnList = columns.map((el) => el?.name).filter((el) => !systemColumns.includes(el));
47
+ const { rows = [] } = await pg.query(`select ${columnList.map(el => `"${el.replace(/'/g, "''")}"`).join(',')} ${geom ? ', st_asgeojson(geom)::json as geom' : ''} from ${params.name.replace(/'/g, '')} limit 10`);
48
+
49
+ return {
50
+ count, geom: !!geom, bounds, extent, columns, rows,
51
+ };
52
+ }
53
+ catch (err) {
61
54
  return { error: err.toString(), status: 500 };
62
55
  }
63
56
  }
@@ -1,6 +1,6 @@
1
1
  export default async function dbTables({ pg, query = {} }) {
2
2
  const q = `select
3
- t.table_schema ||'.'|| t.table_name as table,
3
+ t.table_schema ||'."'|| t.table_name ||'"' as table,
4
4
  obj_description(to_regclass(t.table_schema ||'."'|| t.table_name||'"')) as description,
5
5
  t.table_schema as schema,
6
6
  (select reltuples from pg_class where oid = to_regclass(t.table_schema ||'."'|| t.table_name||'"') ) as total,
@@ -22,15 +22,11 @@ export default async function dbTables({ pg, query = {} }) {
22
22
  INNER JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
23
23
  WHERE nsp.nspname = t.table_schema AND rel.relname = t.table_name and contype='p'
24
24
  )
25
- and ${query.schema ? `t.table_schema=$1` : '1=1'} order by total desc`;
25
+ and isgeom
26
+ order by total desc`;
27
+
26
28
  if (query.sql) return q;
27
- try {
28
- const { rows = [] } = await pg.query(
29
- q,
30
- [query.schema].filter((el) => el)
31
- );
32
- return { rows };
33
- } catch (err) {
34
- return { error: err.toString(), status: 500 };
35
- }
29
+
30
+ const { rows = [] } = await pg.queryCache(q, { time: 0 });
31
+ return { rows };
36
32
  }
@@ -0,0 +1,39 @@
1
+ import {
2
+ config, getToken, dataDelete, pgClients,
3
+ } from '@opengis/fastify-table/utils.js';
4
+
5
+ export default async function datasetDataDelete({
6
+ pg = pgClients.client, params = {}, user = {},
7
+ }) {
8
+ if (!user?.uid) {
9
+ return { message: 'access restricted', status: 403 };
10
+ }
11
+
12
+ const tokenData = await getToken({
13
+ uid: user?.uid,
14
+ token: params.id,
15
+ mode: 'a',
16
+ json: 1,
17
+ });
18
+
19
+ if (!tokenData && !config.local && !config.debug) {
20
+ return { message: 'token not allow', status: 403 };
21
+ }
22
+
23
+ const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [tokenData?.dataset || params.id])
24
+ .then(el => el.rows[0] || {});
25
+
26
+ const table = tokenData?.table || dataset.table;
27
+
28
+ if (!dataset.id) {
29
+ return { message: 'dataset not found', status: 404 };
30
+ }
31
+
32
+ const res = await dataDelete({
33
+ id: tokenData?.id || params?.object_id,
34
+ table,
35
+ uid: user?.uid,
36
+ });
37
+
38
+ return { rowCount: res.rowCount, msg: !res.rowCount ? res : null };
39
+ }
@@ -1,4 +1,4 @@
1
- import { dataUpdate, pgClients, getPG, initPG } from "@opengis/fastify-table/utils.js";
1
+ import { dataUpdate, pgClients } from '@opengis/fastify-table/utils.js';
2
2
 
3
3
  const columnTypeMatch = {
4
4
  text: 'text',
@@ -12,12 +12,12 @@ const columnTypeMatch = {
12
12
  };
13
13
 
14
14
  /**
15
- * Додавання нового набору даних BI
15
+ * Редагування структури набору даних
16
16
  *
17
- * @method POST
18
- * @summary Додавання нового набору даних BI
17
+ * @method PUT
18
+ * @summary Редагування структури набору даних
19
19
  * @priority 4
20
- * @alias biDatasetAdd
20
+ * @alias editDataset
21
21
  * @type api
22
22
  * @tag bi
23
23
  * @param {Object} params.id Dashboard ID
@@ -27,47 +27,53 @@ const columnTypeMatch = {
27
27
  * @returns {Object} rows Масив з колонками таблиці
28
28
  */
29
29
 
30
- export default async function biDatasetEdit(req) {
31
- const { params = {}, body = {}, session = {} } = req;
32
- const { uid } = session?.passport?.user || {};
33
-
34
- if (!uid) {
30
+ export default async function editDataset(req) {
31
+ const {
32
+ pg = pgClients.client, query = {}, body = {}, user = {},
33
+ } = req;
34
+ if (!user?.uid) {
35
35
  return { message: 'access restricted', status: 403 };
36
36
  }
37
37
 
38
- if (!body.columns?.length) {
38
+ if (!query?.id) {
39
+ return { message: 'not enough params: id', status: 404 };
40
+ }
41
+
42
+ if (!body.column_list?.length) {
39
43
  return { message: 'not enough params: columns', status: 400 };
40
44
  }
41
45
 
42
- const dataset = await pgClients.client.query('select db, dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [params.id])
43
- .then(el => el.rows[0]);
46
+ const dataset = await pg.query('select dataset_id as id, table_name as table, column_list as columns from bi.dataset where dataset_id=$1', [query.id])
47
+ .then(el => el.rows[0] || {});
44
48
 
45
- if (!dataset.id) {
49
+ if (!dataset?.id) {
46
50
  return { message: 'dataset not found', status: 404 };
47
51
  }
48
52
 
49
- const pg = dataset.db ? getPG({ db: dataset.db }) : pgClients.client;
50
- if (!pg.pk) await initPG(pg);
51
-
52
53
  if (!dataset.table || !pg.pk?.[dataset.table]) {
53
54
  return { message: `table not found: ${dataset.table}`, status: 404 };
54
55
  }
55
56
 
57
+ if (!dataset.table.startsWith('data_user.')) {
58
+ return { message: 'access restricted: source', status: 403 };
59
+ }
60
+
56
61
  const { fields = [] } = await pg.query(`select * from ${dataset.table} limit 0`);
57
62
  const columnList = fields.map((col) => col.name);
58
63
 
59
- const columns = (dataset.columns || body.columns).map((col, idx) => ({
60
- ...col,
61
- name: col.name && columnList.includes(col.name) ? col.name : `col_${idx}`,
62
- disabled: col.name && !body.columns.find((item) => item.name === col.name),
63
- }));
64
+ const columns = (dataset.columns || [])
65
+ .concat(body.column_list.filter((col) => !col.name || !columnList.includes(col.name)))
66
+ .map((col, idx) => ({
67
+ ...col,
68
+ name: col.name && columnList.includes(col.name) ? col.name : `col_${idx}`,
69
+ disabled: col.name && !body.column_list.find((item) => item.name === col.name),
70
+ }));
64
71
 
65
72
  await dataUpdate({
66
- pg: pgClients.client,
67
73
  table: 'bi.dataset',
68
74
  data: { column_list: columns },
69
75
  id: dataset.id,
70
- uid,
76
+ uid: user.uid,
71
77
  });
72
78
 
73
79
  const sqlList = columns
@@ -79,5 +85,5 @@ export default async function biDatasetEdit(req) {
79
85
  await pg.query(sqlList.join(';'));
80
86
  }
81
87
 
82
- return { message: { id: dataset?.id, action: 'edit', columns }, status: 200 };
83
- };
88
+ return { message: { id: dataset?.id, table: dataset.table, columns }, status: 200 };
89
+ }