@opengis/bi 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +42 -42
- package/dist/import-file-D8jh74Dz.js +3543 -0
- package/dist/{vs-funnel-bar-C_TceUrc.js → vs-funnel-bar-T330oJNS.js} +3 -3
- package/dist/{vs-list-DyhLUIPb.js → vs-list-DeHF_Oaf.js} +109 -109
- package/dist/{vs-map-BtQJNN4L.js → vs-map-Skt608pM.js} +8 -8
- package/dist/{vs-map-cluster-BbPUosvt.js → vs-map-cluster-BRUiY_90.js} +21 -21
- package/dist/{vs-number-D2GkU586.js → vs-number-Dd_21nn-.js} +3 -3
- package/dist/{vs-table-D_Yn9QqB.js → vs-table-BwC29Zyc.js} +6 -6
- package/dist/{vs-text-BivVd6cY.js → vs-text-DEJjWxDu.js} +32 -39
- package/package.json +77 -76
- package/plugin.js +22 -0
- package/server/helpers/mdToHTML.js +17 -0
- package/server/migrations/bi.dataset.sql +46 -0
- package/server/migrations/bi.sql +112 -0
- package/server/plugins/docs.js +48 -0
- package/server/plugins/hook.js +89 -0
- package/server/plugins/vite.js +69 -0
- package/server/routes/dashboard/controllers/dashboard.import.js +103 -0
- package/server/routes/dashboard/controllers/dashboard.js +157 -0
- package/server/routes/dashboard/controllers/dashboard.list.js +40 -0
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -0
- package/server/routes/dashboard/index.mjs +26 -0
- package/server/routes/data/controllers/data.js +230 -0
- package/server/routes/data/controllers/util/chartSQL.js +49 -0
- package/server/routes/data/controllers/util/normalizeData.js +65 -0
- package/server/routes/data/index.mjs +32 -0
- package/server/routes/dataset/controllers/bi.dataset.list.js +29 -0
- package/server/routes/dataset/controllers/bi.db.list.js +19 -0
- package/server/routes/dataset/controllers/comment.js +55 -0
- package/server/routes/dataset/controllers/createDatasetPost.js +134 -0
- package/server/routes/dataset/controllers/data.js +149 -0
- package/server/routes/dataset/controllers/dbTablePreview.js +58 -0
- package/server/routes/dataset/controllers/dbTables.js +34 -0
- package/server/routes/dataset/controllers/delete.js +40 -0
- package/server/routes/dataset/controllers/deleteDataset.js +52 -0
- package/server/routes/dataset/controllers/editDataset.js +90 -0
- package/server/routes/dataset/controllers/export.js +214 -0
- package/server/routes/dataset/controllers/form.js +99 -0
- package/server/routes/dataset/controllers/format.js +46 -0
- package/server/routes/dataset/controllers/insert.js +47 -0
- package/server/routes/dataset/controllers/table.js +68 -0
- package/server/routes/dataset/controllers/update.js +43 -0
- package/server/routes/dataset/index.mjs +132 -0
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
- package/server/routes/dataset/utils/convertJSONToXls.js +47 -0
- package/server/routes/dataset/utils/createTableQuery.js +59 -0
- package/server/routes/dataset/utils/datasetForms.js +1 -0
- package/server/routes/dataset/utils/descriptionList.js +46 -0
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
- package/server/routes/dataset/utils/executeQuery.js +46 -0
- package/server/routes/dataset/utils/getLayersData.js +107 -0
- package/server/routes/dataset/utils/getTableData.js +47 -0
- package/server/routes/dataset/utils/insertDataQuery.js +12 -0
- package/server/routes/dataset/utils/metaFormat.js +24 -0
- package/server/routes/edit/controllers/dashboard.add.js +36 -0
- package/server/routes/edit/controllers/dashboard.delete.js +39 -0
- package/server/routes/edit/controllers/dashboard.edit.js +61 -0
- package/server/routes/edit/controllers/widget.add.js +78 -0
- package/server/routes/edit/controllers/widget.del.js +58 -0
- package/server/routes/edit/controllers/widget.edit.js +106 -0
- package/server/routes/edit/index.mjs +33 -0
- package/server/routes/map/controllers/cluster.js +125 -0
- package/server/routes/map/controllers/clusterVtile.js +166 -0
- package/server/routes/map/controllers/geojson.js +127 -0
- package/server/routes/map/controllers/heatmap.js +118 -0
- package/server/routes/map/controllers/map.js +69 -0
- package/server/routes/map/controllers/utils/downloadClusterData.js +45 -0
- package/server/routes/map/controllers/vtile.js +183 -0
- package/server/routes/map/index.mjs +32 -0
- package/server/templates/page/login.html +59 -0
- package/server/utils/getWidget.js +117 -0
- package/utils.js +12 -0
- package/dist/import-file-Bx4xpxVb.js +0 -3493
|
@@ -0,0 +1,230 @@
|
|
|
1
|
+
import yaml from 'js-yaml';
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
config,
|
|
5
|
+
autoIndex,
|
|
6
|
+
pgClients,
|
|
7
|
+
getSelect,
|
|
8
|
+
getSelectVal,
|
|
9
|
+
getFilterSQL,
|
|
10
|
+
getMeta,
|
|
11
|
+
logger,
|
|
12
|
+
} from '@opengis/fastify-table/utils.js';
|
|
13
|
+
|
|
14
|
+
import chartSQL from './util/chartSQL.js';
|
|
15
|
+
import normalizeData from './util/normalizeData.js';
|
|
16
|
+
|
|
17
|
+
import { getWidget } from '../../../../utils.js';
|
|
18
|
+
|
|
19
|
+
const maxLimit = 100;
|
|
20
|
+
|
|
21
|
+
export default async function dataAPI(req, reply) {
|
|
22
|
+
const time = Date.now();
|
|
23
|
+
|
|
24
|
+
const { query = {}, user = {}, unittest } = req;
|
|
25
|
+
|
|
26
|
+
query.metric = Array.isArray(query.metric) ? query.metric.pop() : query.metric;
|
|
27
|
+
|
|
28
|
+
const { dashboard, widget, filter, search, samples } = query;
|
|
29
|
+
|
|
30
|
+
const widgetData = await getWidget({ pg: req.pg, dashboard, widget });
|
|
31
|
+
|
|
32
|
+
if (widgetData.status) return widgetData;
|
|
33
|
+
|
|
34
|
+
const { type, text, data = {}, controls, style, options } = widgetData;
|
|
35
|
+
|
|
36
|
+
const pg = widgetData.pg || req.pg || pgClients.client;
|
|
37
|
+
|
|
38
|
+
const error1 = {};
|
|
39
|
+
const { fields: cols = [] } = await pg.query(
|
|
40
|
+
`select * from ${data.table} t ${widgetData.tableSQL || data.tableSQL || ''} limit 0`
|
|
41
|
+
).catch(err => Object.assign(error1, { error: err.toString() })) || {};
|
|
42
|
+
const columnTypes = cols?.map?.((el) => ({
|
|
43
|
+
name: el.name,
|
|
44
|
+
type: pg.pgType?.[el.dataTypeID],
|
|
45
|
+
}));
|
|
46
|
+
|
|
47
|
+
// data param
|
|
48
|
+
const { x, cls, groupbyCls, metric, table, where, tableSQL, groupby, xName, yName, xType, yType, error = error1 } =
|
|
49
|
+
normalizeData(widgetData, query, columnTypes);
|
|
50
|
+
|
|
51
|
+
const limit = Math.min(query.limit || widgetData.limit || maxLimit, maxLimit);
|
|
52
|
+
// if (error) { return reply.status(400).send(error); }
|
|
53
|
+
|
|
54
|
+
// auto Index
|
|
55
|
+
if (pg.pk?.[data.table]) {
|
|
56
|
+
autoIndex({
|
|
57
|
+
table: data.table,
|
|
58
|
+
pg,
|
|
59
|
+
columns: [data?.time]
|
|
60
|
+
.concat([xName])
|
|
61
|
+
.concat([groupby])
|
|
62
|
+
.filter((el) => el),
|
|
63
|
+
}).catch((err) => console.log(err));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
const { pk, columns = [], view } = await getMeta({ pg, table: data.table });
|
|
67
|
+
|
|
68
|
+
if (!view && !pk) {
|
|
69
|
+
return { message: `table not found: ${data.table} (${pg.options?.database})`, status: 404 };
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// const columnList = columns.map(col => col.name);
|
|
73
|
+
const groupbyColumnNotExists = groupby?.split?.(',')?.filter?.(el => !columnTypes.map(el => el.name).includes(el.trim()));
|
|
74
|
+
|
|
75
|
+
if (groupby && groupbyColumnNotExists?.length) {
|
|
76
|
+
return { message: `groupby column not found: ${groupbyColumnNotExists} (${data.table}/${pg.options?.database})`, status: 404 };
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// get group
|
|
80
|
+
const groupData = groupby
|
|
81
|
+
? await pg
|
|
82
|
+
.query(
|
|
83
|
+
`select ${groupby} as name ,count(*) from ${tableSQL || table} group by ${groupby} order by count(*) desc 20`
|
|
84
|
+
)
|
|
85
|
+
.then((el) => el.rows)
|
|
86
|
+
: null;
|
|
87
|
+
|
|
88
|
+
if (query.sql === '2') return { x, metric, table, tableSQL, data, groupData };
|
|
89
|
+
|
|
90
|
+
const order = data.order || (type === 'listbar' && cols.find(el => el.name === 'metric') ? 'metric desc' : null);
|
|
91
|
+
|
|
92
|
+
const fData =
|
|
93
|
+
filter || search
|
|
94
|
+
? await getFilterSQL({
|
|
95
|
+
pg,
|
|
96
|
+
table,
|
|
97
|
+
filter,
|
|
98
|
+
search,
|
|
99
|
+
filterList: widgetData.filters,
|
|
100
|
+
})
|
|
101
|
+
: {};
|
|
102
|
+
|
|
103
|
+
const optimizedSQL = widgetData?.sql
|
|
104
|
+
? `${widgetData.sql} ${fData?.q && false ? fData?.q : ''} limit ${limit}`
|
|
105
|
+
: (fData?.optimizedSQL || `select * from ${tableSQL || table}`);
|
|
106
|
+
|
|
107
|
+
if (type?.includes('bar') && !metric?.length) {
|
|
108
|
+
return { message: 'empty widget params: metrics', status: 400 };
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const sql = widgetData.sql ? optimizedSQL : (chartSQL[type] || chartSQL.chart)({
|
|
112
|
+
where: config.local && user?.user_type === 'superadmin' ? 'true' : where, // test
|
|
113
|
+
metric,
|
|
114
|
+
yType, // metric type
|
|
115
|
+
columns: widgetData.columns,
|
|
116
|
+
table: `(${optimizedSQL})q`,
|
|
117
|
+
x,
|
|
118
|
+
groupData,
|
|
119
|
+
groupby,
|
|
120
|
+
order,
|
|
121
|
+
samples,
|
|
122
|
+
limit,
|
|
123
|
+
xType,
|
|
124
|
+
fx: widgetData.fx,
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
if (query.sql) return sql;
|
|
128
|
+
|
|
129
|
+
if (!sql || sql?.includes('undefined')) {
|
|
130
|
+
return {
|
|
131
|
+
message: {
|
|
132
|
+
error: 'invalid sql',
|
|
133
|
+
type,
|
|
134
|
+
sql,
|
|
135
|
+
where,
|
|
136
|
+
metric,
|
|
137
|
+
table: `(${optimizedSQL})q`,
|
|
138
|
+
x,
|
|
139
|
+
groupData,
|
|
140
|
+
groupby,
|
|
141
|
+
},
|
|
142
|
+
status: 500,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (config.trace) console.log(sql, user?.uid);
|
|
147
|
+
|
|
148
|
+
const { rows = [], fields = [], errorSql } = await pg.query(sql.replace('{{uid}}', user?.uid)).catch(err => {
|
|
149
|
+
logger.file('bi/data', { error: err.toString(), sql });
|
|
150
|
+
return { errorSql: err.toString() };
|
|
151
|
+
}); // test with limit
|
|
152
|
+
|
|
153
|
+
if (groupbyCls) {
|
|
154
|
+
const { arr = [] } = await getSelect(groupbyCls, pg) || {};
|
|
155
|
+
if (arr.length) {
|
|
156
|
+
const ids = arr.map(el => el.id);
|
|
157
|
+
const text = arr.reduce((acc, curr) => ({ ...acc, [curr.id]: curr.text }), {});
|
|
158
|
+
rows.forEach(row => {
|
|
159
|
+
ids.reduce((acc, curr) => {
|
|
160
|
+
Object.assign(row, { [text[curr]]: row[curr] });
|
|
161
|
+
delete row[curr];
|
|
162
|
+
return acc;
|
|
163
|
+
}, {});
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if (cls) {
|
|
169
|
+
const values = rows
|
|
170
|
+
.map((row) => row[x])
|
|
171
|
+
?.filter((el, idx, arr) => el && arr.indexOf(el) === idx);
|
|
172
|
+
const vals = await getSelectVal({ pg, name: cls, values });
|
|
173
|
+
rows
|
|
174
|
+
.filter((row) => row[x])
|
|
175
|
+
.forEach((row) => {
|
|
176
|
+
Object.assign(row, { [x]: vals?.[row[x]]?.text || vals?.[row[x]] || row[x] });
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
const metaTitles = columns.reduce((acc, curr) => Object.assign(acc, { [curr.name]: curr.title || curr.ua }), {});
|
|
181
|
+
const titles = Array.isArray(widgetData?.columns)
|
|
182
|
+
? widgetData.columns.reduce((acc, curr) => Object.assign(acc, { [curr.name]: curr.title || curr.ua }), {})
|
|
183
|
+
: Object.keys(widgetData?.columns || {}).reduce((acc, curr) => Object.assign(acc, { [curr]: widgetData?.columns?.[curr] }), {});
|
|
184
|
+
|
|
185
|
+
const rows1 = type === 'table' ? rows.map(row => Object.keys(row || {}).reduce((acc, curr) => Object.assign(acc, { [titles?.[curr] || metaTitles?.[curr] || curr]: row?.[curr] }), {})) : rows;
|
|
186
|
+
|
|
187
|
+
const yml = widgetData.yml || yaml.dump(extractYml(widgetData));
|
|
188
|
+
const dimensions = fields.map((el) => el.name);
|
|
189
|
+
|
|
190
|
+
const res = {
|
|
191
|
+
time: Date.now() - time,
|
|
192
|
+
error: error || errorSql || (!widgetData.sql ? widgetData.error : undefined),
|
|
193
|
+
dimensions,
|
|
194
|
+
filter: xName,
|
|
195
|
+
dimensionsType: [xType, yType].filter((el) => el)?.length
|
|
196
|
+
? [xType, yType].filter((el) => el)
|
|
197
|
+
: fields.map((el) => pg.pgType?.[el.dataTypeID]),
|
|
198
|
+
type,
|
|
199
|
+
|
|
200
|
+
text: text || widgetData?.title || data.text,
|
|
201
|
+
// data: query.format === 'data' ? dimensions.map(el => rows.map(r => r[el])) : undefined,
|
|
202
|
+
source:
|
|
203
|
+
query.format === 'array'
|
|
204
|
+
? dimensions.map((el) => rows1.map((r) => r[el]))
|
|
205
|
+
: rows1,
|
|
206
|
+
style,
|
|
207
|
+
options,
|
|
208
|
+
controls,
|
|
209
|
+
yml,
|
|
210
|
+
data: widgetData.data,
|
|
211
|
+
id: query.widget,
|
|
212
|
+
columns: columnTypes.map(el => Object.assign(el, { title: titles[el.name] || metaTitles?.[el.name] || el.name })),
|
|
213
|
+
params: config?.local || unittest ? {
|
|
214
|
+
x,
|
|
215
|
+
cls,
|
|
216
|
+
metric,
|
|
217
|
+
table,
|
|
218
|
+
tableSQL,
|
|
219
|
+
where,
|
|
220
|
+
groupby,
|
|
221
|
+
sql,
|
|
222
|
+
} : undefined,
|
|
223
|
+
};
|
|
224
|
+
return res;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
function extractYml(sourceData) {
|
|
228
|
+
const { title, description, type, data, style, controls } = sourceData;
|
|
229
|
+
return { title, description, type, data, style, controls };
|
|
230
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
function number({ metric, where, table, samples, fx }) {
|
|
2
|
+
const sql = `select ${fx || metric} from ${table} where ${where} ${samples ? 'limit 10' : ''}`;
|
|
3
|
+
return sql;
|
|
4
|
+
}
|
|
5
|
+
function table({ columns = [], table, where, samples }) {
|
|
6
|
+
const cols = Array.isArray(columns)
|
|
7
|
+
? columns.map((el) => `"${(el.name || el).replace(/'/g, "''")}"`).join(',')
|
|
8
|
+
: Object.keys(columns).map(key => `"${key.replace(/'/g, "''")}"`).join(',');
|
|
9
|
+
return `select ${cols || '*'} from ${table} where ${where} ${samples ? 'limit 10' : 'limit 20'} `;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function chart({
|
|
13
|
+
metric,
|
|
14
|
+
yType, // metric type
|
|
15
|
+
where,
|
|
16
|
+
table,
|
|
17
|
+
x,
|
|
18
|
+
groupby,
|
|
19
|
+
groupData,
|
|
20
|
+
order,
|
|
21
|
+
samples,
|
|
22
|
+
limit = 100,
|
|
23
|
+
xType,
|
|
24
|
+
fx, // agg function
|
|
25
|
+
}) {
|
|
26
|
+
const xCol = x && xType?.includes('[]') ? `unnest(${x})` : x;
|
|
27
|
+
|
|
28
|
+
const metricData =
|
|
29
|
+
groupData
|
|
30
|
+
?.filter(el => el.name)
|
|
31
|
+
?.map(
|
|
32
|
+
(el) =>
|
|
33
|
+
`${metric} filter (where '${el.name.toString().replace(/'/g, "''")}'=${yType?.includes('[]') ? `any(${groupby.replace(/'/g, "''")}::text[])` : groupby.replace(/'/g, "''")}) as "${el.name.toString().replace(/'/g, "''")}"`
|
|
34
|
+
)
|
|
35
|
+
.join(',') || `${fx || metric} as metric`;
|
|
36
|
+
const sql = `select ${xCol} ${x && xType?.includes('[]') ? `as ${x}` : ''}, ${metricData}
|
|
37
|
+
from ${table}
|
|
38
|
+
where ${where}
|
|
39
|
+
${xCol ? `group by ${xCol}` : ''}
|
|
40
|
+
${order || xCol ? `order by ${order || xCol}` : ''}
|
|
41
|
+
${samples ? 'limit 10' : `limit ${limit}`}`;
|
|
42
|
+
return sql;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function text() {
|
|
46
|
+
return undefined;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
export default { number, chart, table };
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
function normalizeData(data, query = {}, columnTypes = []) {
|
|
2
|
+
const skip = [];
|
|
3
|
+
['x', 'groupby', 'granularity'].forEach((el) => {
|
|
4
|
+
// console.log(el, query[el], columnTypes.find(col => col.name == query[el]))
|
|
5
|
+
if (!columnTypes.find((col) => col.name == query[el])) {
|
|
6
|
+
if (query[el] && query[el] !== 'null') {
|
|
7
|
+
if (el === 'granularity' && !['week', 'month', 'quarter', 'year'].includes(query[el])) {
|
|
8
|
+
skip.push(`invalid granularity option: ${query[el]}`);
|
|
9
|
+
} else if (el !== 'granularity') { skip.push(`column not found: ${query[el]}`); }
|
|
10
|
+
}
|
|
11
|
+
if (!(el === 'granularity' || (el === 'groupby' && query[el] === 'null'))) delete query[el];
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
if (
|
|
16
|
+
!columnTypes.find(
|
|
17
|
+
(col) => col.type === 'numeric' && col.name == query.metric
|
|
18
|
+
)
|
|
19
|
+
) {
|
|
20
|
+
delete query.metric;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const xName = query.x || (Array.isArray(data.x) ? data.x[0] : data.x);
|
|
24
|
+
const xType = columnTypes.find((el) => el.name == xName)?.type;
|
|
25
|
+
|
|
26
|
+
const granularity =
|
|
27
|
+
xType === 'date' || xType?.includes('timestamp')
|
|
28
|
+
? query.granularity || data.granularity || 'year'
|
|
29
|
+
: null;
|
|
30
|
+
|
|
31
|
+
const x =
|
|
32
|
+
(granularity
|
|
33
|
+
? `date_trunc('${granularity}',${xName})::date::text`
|
|
34
|
+
: null) || xName;
|
|
35
|
+
|
|
36
|
+
const metrics = Array.isArray(data.metrics || data.metric) ? (data.metrics || data.metric) : [data.metrics || data.metric];
|
|
37
|
+
const metric =
|
|
38
|
+
(query.metric ? `sum(${query.metric})` : null) ||
|
|
39
|
+
(metrics.length
|
|
40
|
+
? (metrics
|
|
41
|
+
?.filter((el) => el && columnTypes.find((col) => col.name == (el?.name || el)))
|
|
42
|
+
?.map((el) => el.fx || `${el.operator || 'sum'}(${el.name || el})`)?.join(',') || 'count(*)')
|
|
43
|
+
: 'count(*)');
|
|
44
|
+
|
|
45
|
+
const yName = metrics?.[0]?.name || metrics?.[0];
|
|
46
|
+
const yType = columnTypes.find((el) => el.name == yName)?.type;
|
|
47
|
+
|
|
48
|
+
const { cls, groupbyCls, table, filterCustom } = data;
|
|
49
|
+
const groupby = (query.groupby || data.groupby) === 'null' ? null : (query.groupby || data.groupby);
|
|
50
|
+
// const orderby = query.orderby || data.orderby || 'count(*)';
|
|
51
|
+
|
|
52
|
+
const custom = query?.filterCustom
|
|
53
|
+
?.split(',')
|
|
54
|
+
?.map((el) => filterCustom?.find((item) => item?.name === el)?.sql)
|
|
55
|
+
?.filter((el) => el)
|
|
56
|
+
?.join(' and ');
|
|
57
|
+
const where = `${data.query || '1=1'} and ${custom || 'true'}`;
|
|
58
|
+
|
|
59
|
+
const tableSQL = data.tableSQL?.length
|
|
60
|
+
? `(select * from ${data?.table} t ${data.tableSQL || ''} where ${where})q`
|
|
61
|
+
: undefined;
|
|
62
|
+
|
|
63
|
+
return { x, cls, groupbyCls, metric, table, where, tableSQL, groupby, xName, xType, yName, yType, error: skip.length ? skip.join(',') : undefined };
|
|
64
|
+
}
|
|
65
|
+
export default normalizeData;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import data from './controllers/data.js';
|
|
2
|
+
|
|
3
|
+
const biSchema = {
|
|
4
|
+
type: 'object',
|
|
5
|
+
properties: {
|
|
6
|
+
querystring: {
|
|
7
|
+
widget: { type: 'string', pattern: '^([\\d\\w_]+)$' },
|
|
8
|
+
dashboard: { type: 'string', pattern: '^([\\d\\w_]+)$' },
|
|
9
|
+
sql: { type: 'string', pattern: '^([\\d])$' },
|
|
10
|
+
// metric: { type: 'string', pattern: '^([\\d\\w_]+)$' },
|
|
11
|
+
x: { type: 'string', pattern: '^([\\d\\w_]+)$' },
|
|
12
|
+
granularity: { type: 'string', pattern: '^(week|month|quarter|year)$' },
|
|
13
|
+
groupby: { type: 'string', pattern: '^([\\d\\w_]+)$' },
|
|
14
|
+
filterCustom: { type: 'string', pattern: '^([\\d\\w_,]+)$' },
|
|
15
|
+
},
|
|
16
|
+
params: {
|
|
17
|
+
id: { type: 'string', pattern: '^([\\d\\w]+)$' },
|
|
18
|
+
},
|
|
19
|
+
},
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
const policy = ['public'];
|
|
23
|
+
|
|
24
|
+
export default async function route(fastify, opts) {
|
|
25
|
+
fastify.route({
|
|
26
|
+
method: 'GET',
|
|
27
|
+
url: '/bi-data',
|
|
28
|
+
schema: biSchema,
|
|
29
|
+
config: { policy },
|
|
30
|
+
handler: data,
|
|
31
|
+
});
|
|
32
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { pgClients } from "@opengis/fastify-table/utils.js";
|
|
2
|
+
|
|
3
|
+
const maxLimit = 100;
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Отримання списку наборів даних BI
|
|
7
|
+
*
|
|
8
|
+
* @method GET
|
|
9
|
+
* @summary Отримання списку наборів даних BI
|
|
10
|
+
* @priority 4
|
|
11
|
+
* @alias biDatasetList
|
|
12
|
+
* @type api
|
|
13
|
+
* @tag bi
|
|
14
|
+
* @errors 400,500
|
|
15
|
+
* @returns {Number} status Номер помилки
|
|
16
|
+
* @returns {String} error Опис помилки
|
|
17
|
+
* @returns {Object} rows Масив з колонками таблиці
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
export default async function biDatasetList({ pg = pgClients.client, query = {} }) {
|
|
21
|
+
|
|
22
|
+
const limit = Math.min(maxLimit, +(query.limit || 20));
|
|
23
|
+
const offset = query.page && query.page > 0 ? (query.page - 1) * limit : 0;
|
|
24
|
+
|
|
25
|
+
const { rows = [] } = await pg.query(`select dataset_id as id, name, source_type as source, table_name as table, dashboard_list
|
|
26
|
+
from bi.dataset order by name limit ${limit} offset ${offset}`);
|
|
27
|
+
|
|
28
|
+
return { rows };
|
|
29
|
+
};
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { config } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Список БД
|
|
5
|
+
*
|
|
6
|
+
* @method POST
|
|
7
|
+
* @priority 4
|
|
8
|
+
* @alias biDbList
|
|
9
|
+
* @type api
|
|
10
|
+
* @tag bi
|
|
11
|
+
* @errors 400,500
|
|
12
|
+
* @returns {Number} status Номер помилки
|
|
13
|
+
* @returns {String} error Опис помилки
|
|
14
|
+
* @returns {Object} rows Масив з колонками таблиці
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
export default async function biDbList(req) {
|
|
18
|
+
return { db_list: config.db_list?.map((el) => ({ id: el.key, text: el.title })) || [] };
|
|
19
|
+
};
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
export default async function datasetEditComment({
|
|
4
|
+
pg = pgClients.client,
|
|
5
|
+
body = {},
|
|
6
|
+
params = {},
|
|
7
|
+
}) {
|
|
8
|
+
if (!params?.id) {
|
|
9
|
+
return { message: 'not enough params: id', status: 400 };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
if (!Array.isArray(body.data) || !body.data?.length) {
|
|
13
|
+
return { message: 'invalid param: body.data not an array / empty', status: 400 };
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [params.id])
|
|
17
|
+
.then(el => el.rows?.[0] || {});
|
|
18
|
+
|
|
19
|
+
if (!dataset?.table) {
|
|
20
|
+
return { message: dataset?.id ? 'dataset table not set' : 'dataset not found', status: 404 };
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const { fields = [] } = pg.pk?.[dataset.table] ? await pg.query(`select * from ${dataset.table} limit 0`) : {};
|
|
24
|
+
|
|
25
|
+
if (!fields.length) {
|
|
26
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const columnList = fields.map(el => el.name);
|
|
30
|
+
const validData = body.data?.filter?.(el => el?.name && el?.comment && columnList.includes(el.name));
|
|
31
|
+
|
|
32
|
+
const q1 = validData.map(el => `COMMENT ON COLUMN ${dataset.table}."${el.name.replace(/'/g, "''")}" IS '${el.comment.replace(/'/g, "''")}'`).join(';');
|
|
33
|
+
if (q1) await pg.query(q1);
|
|
34
|
+
|
|
35
|
+
const q2 = `select
|
|
36
|
+
attrelid::regclass,
|
|
37
|
+
attname,
|
|
38
|
+
pg_catalog.col_description(attrelid,attnum) as title
|
|
39
|
+
from pg_catalog.pg_attribute a
|
|
40
|
+
where attnum > 0
|
|
41
|
+
and attname not in ('editor_id','editor_date','cdate','geom','id','uid','cdate')
|
|
42
|
+
and atttypid::regtype not in ('json','geometry')
|
|
43
|
+
and pg_catalog.col_description(attrelid,attnum) is not null`;
|
|
44
|
+
|
|
45
|
+
const { rows = [] } = await pg.query(q2);
|
|
46
|
+
|
|
47
|
+
const title = rows
|
|
48
|
+
.filter(el => el.attrelid === dataset.table)
|
|
49
|
+
.reduce((p, el) => ({ ...p, [el.attname]: el.title }), {});
|
|
50
|
+
|
|
51
|
+
const res = await pg.query(`update bi.dataset set setting=coalesce(setting::jsonb, '{}'::jsonb)||$1::jsonb
|
|
52
|
+
where dataset_id=$2 returning dataset_id as id, setting`, [{ title }, dataset.id]);
|
|
53
|
+
|
|
54
|
+
return res.rows?.[0];
|
|
55
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { existsSync } from 'node:fs';
|
|
3
|
+
|
|
4
|
+
import { config, pgClients, getFolder, file2json } from '@opengis/fastify-table/utils.js';
|
|
5
|
+
|
|
6
|
+
import createTableQuery from '../utils/createTableQuery.js';
|
|
7
|
+
import executeQuery from '../utils/executeQuery.js';
|
|
8
|
+
import downloadRemoteFile from '../utils/downloadRemoteFile.js';
|
|
9
|
+
|
|
10
|
+
const insertDataset = `insert into bi.dataset
|
|
11
|
+
(name, table_name, dataset_file_path, column_list, pk, data_source, uid)
|
|
12
|
+
values($1,$2,$3,$4,$5,$6,$7) returning dataset_id`;
|
|
13
|
+
|
|
14
|
+
export default async function createDatasetPost({
|
|
15
|
+
pg = pgClients.client, body = {}, user = {},
|
|
16
|
+
}) {
|
|
17
|
+
if (!user?.uid) {
|
|
18
|
+
return { message: 'access restricted', status: 403 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
if (!body?.name) {
|
|
22
|
+
return { message: 'not enough query params: name', status: 400 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (!body?.table_name && !body?.file && !body?.column_list?.length && !body?.dataset_url) {
|
|
26
|
+
return { message: 'not enough query params: table / file / column_list/ url', status: 400 };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const {
|
|
30
|
+
name,
|
|
31
|
+
table_name: existingTable,
|
|
32
|
+
column_list: columns = [],
|
|
33
|
+
dataset_url: datasetUrl,
|
|
34
|
+
encoding,
|
|
35
|
+
} = body;
|
|
36
|
+
|
|
37
|
+
const rootDir = getFolder(config, 'local');
|
|
38
|
+
|
|
39
|
+
if (datasetUrl) {
|
|
40
|
+
const { filePath, error } = await downloadRemoteFile({
|
|
41
|
+
rootDir, url: datasetUrl, table: name || datasetUrl,
|
|
42
|
+
});
|
|
43
|
+
if (error || !filePath) {
|
|
44
|
+
return { message: error || 'file request URL error', status: 500 };
|
|
45
|
+
}
|
|
46
|
+
Object.assign(body, { file: filePath });
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const { file: relPath } = body;
|
|
50
|
+
|
|
51
|
+
if (relPath) {
|
|
52
|
+
const filepath = path.join(rootDir, relPath);
|
|
53
|
+
const exists = existsSync(filepath);
|
|
54
|
+
|
|
55
|
+
if (!exists) {
|
|
56
|
+
return { message: 'Файл з вихідними даними не знайдено', status: 404 };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const json = await file2json({ filepath, encoding });
|
|
60
|
+
|
|
61
|
+
// excel sheets fix?
|
|
62
|
+
const data1 = (['.xls', '.xlsx'].includes(path.extname(filepath)) && !Array.isArray(json)) ? json[Object.keys(json)[0]] : json;
|
|
63
|
+
const data = path.extname(filepath) === '.json' && body?.key ? data1?.[body.key] : data1;
|
|
64
|
+
|
|
65
|
+
const features = ['.csv', '.xlsx', '.xls'].includes(path.extname(filepath))
|
|
66
|
+
? data?.map?.((el) => ({ type: 'Feature', properties: Object.keys(el).reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }))
|
|
67
|
+
: data?.features || data?.map?.((el) => ({ type: 'Feature', geometry: el.geom, properties: Object.keys(el).filter((key) => key !== 'geom').reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }));
|
|
68
|
+
|
|
69
|
+
if (!Array.isArray(features) || !features?.length) {
|
|
70
|
+
return { message: 'Файл з вихідними даними порожній', status: 400 };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
Object.assign(data, { features });
|
|
74
|
+
|
|
75
|
+
const fileColumns = Object.keys(data?.features[0]?.properties)
|
|
76
|
+
?.filter((el) => !['editor_date', 'cdate', 'uid', 'editor_id', 'files'].includes(el.toLowerCase()))
|
|
77
|
+
?.map((el) => ({ title: el, format: 'text' }));
|
|
78
|
+
|
|
79
|
+
const { sql, pkey, table } = createTableQuery(fileColumns, name);
|
|
80
|
+
|
|
81
|
+
const { datasetId, error } = await executeQuery({
|
|
82
|
+
pg,
|
|
83
|
+
sql,
|
|
84
|
+
data,
|
|
85
|
+
name,
|
|
86
|
+
table,
|
|
87
|
+
relPath,
|
|
88
|
+
columns: fileColumns,
|
|
89
|
+
pkey,
|
|
90
|
+
source: datasetUrl ? 'url' : 'file',
|
|
91
|
+
url: datasetUrl,
|
|
92
|
+
user,
|
|
93
|
+
dataKey: body?.key,
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
if (error) return { error, status: 500 };
|
|
97
|
+
|
|
98
|
+
pg.pk[table] = pkey;
|
|
99
|
+
pg.tlist.push(table);
|
|
100
|
+
return { message: { id: datasetId, table, source: datasetUrl ? 'url' : 'file' }, status: 200 };
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (existingTable) {
|
|
104
|
+
await pg.query(`alter table ${existingTable} add column if not exists geom public.geometry;
|
|
105
|
+
alter table ${existingTable} add column if not exists files json`);
|
|
106
|
+
const args = [name, existingTable, null, null, pg.pk?.[existingTable], JSON.stringify({ type: 'table' }), user?.uid];
|
|
107
|
+
const datasetId = await pg.query(insertDataset, args).then(el => el.rows?.[0]?.dataset_id);
|
|
108
|
+
return { message: { id: datasetId, table: existingTable, source: 'table' }, status: 200 };
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (!columns?.length) {
|
|
112
|
+
return { message: 'У даній заяві відсутні налаштування структури набору даних', status: 400 };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const { sql, pkey, table } = createTableQuery(columns, name);
|
|
116
|
+
|
|
117
|
+
const { datasetId, error } = await executeQuery({
|
|
118
|
+
pg,
|
|
119
|
+
sql,
|
|
120
|
+
name,
|
|
121
|
+
table,
|
|
122
|
+
relPath,
|
|
123
|
+
columns,
|
|
124
|
+
pkey,
|
|
125
|
+
source: 'newtable',
|
|
126
|
+
user,
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
if (error) return { error, status: 500 };
|
|
130
|
+
|
|
131
|
+
pg.pk[table] = pkey;
|
|
132
|
+
pg.tlist.push(table);
|
|
133
|
+
return { message: { id: datasetId, table, source: 'newtable' }, status: 200 };
|
|
134
|
+
}
|