@opengis/bi 1.0.21 → 1.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +109 -106
- package/dist/{import-file-C8BY90-b.js → import-file-DgBd_UN1.js} +20484 -18794
- package/dist/{map-component-mixin-CFtShPun.js → map-component-mixin-NewmNy_M.js} +502 -483
- package/dist/style.css +1 -1
- package/dist/{vs-calendar-B9vXdsaG.js → vs-calendar-CPXj4hBh.js} +1 -1
- package/dist/vs-donut-CUmi2ir5.js +148 -0
- package/dist/{vs-funnel-bar-Cj0O8tIf.js → vs-funnel-bar-B3DpbtUl.js} +1 -1
- package/dist/{vs-heatmap-C9oFph_f.js → vs-heatmap-COwT3bHE.js} +1 -1
- package/dist/{vs-map-WOn0RAU7.js → vs-map-DwyQHLpN.js} +2 -2
- package/dist/{vs-map-cluster-RJa6sNfI.js → vs-map-cluster-CnZ9g6k-.js} +2 -2
- package/dist/{vs-number-BG0szZL-.js → vs-number-LwROg9Oe.js} +5 -5
- package/dist/vs-table-Dt_MSaCC.js +68 -0
- package/dist/{vs-text-Kwl3-0yy.js → vs-text-DgAf3Ids.js} +2 -2
- package/package.json +6 -5
- package/plugin.js +1 -1
- package/server/helpers/mdToHTML.js +17 -0
- package/server/migrations/bi.dataset.sql +13 -0
- package/server/migrations/bi.sql +2 -0
- package/server/routes/dashboard/controllers/dashboard.js +10 -11
- package/server/routes/dashboard/controllers/dashboard.list.js +6 -6
- package/server/routes/data/controllers/data.js +14 -6
- package/server/routes/data/controllers/util/chartSQL.js +6 -3
- package/server/routes/dataset/controllers/bi.dataset.list.js +3 -1
- package/server/routes/dataset/controllers/comment.js +55 -0
- package/server/routes/dataset/controllers/createDatasetPost.js +132 -0
- package/server/routes/dataset/controllers/data.js +145 -0
- package/server/routes/{db → dataset}/controllers/dbTablePreview.js +17 -24
- package/server/routes/{db → dataset}/controllers/dbTables.js +7 -11
- package/server/routes/dataset/controllers/delete.js +39 -0
- package/server/routes/dataset/controllers/{bi.dataset.edit.js → editDataset.js} +32 -26
- package/server/routes/dataset/controllers/export.js +213 -0
- package/server/routes/dataset/controllers/form.js +99 -0
- package/server/routes/dataset/controllers/format.js +44 -0
- package/server/routes/dataset/controllers/insert.js +46 -0
- package/server/routes/dataset/controllers/table.js +69 -0
- package/server/routes/dataset/controllers/update.js +42 -0
- package/server/routes/dataset/index.mjs +88 -43
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
- package/server/routes/dataset/utils/convertJSONToXls.js +49 -0
- package/server/routes/dataset/utils/createTableQuery.js +59 -0
- package/server/routes/dataset/utils/datasetForms.js +1 -0
- package/server/routes/dataset/utils/descriptionList.js +46 -0
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
- package/server/routes/dataset/utils/executeQuery.js +46 -0
- package/server/routes/dataset/utils/getLayersData.js +107 -0
- package/server/routes/dataset/utils/getTableData.js +47 -0
- package/server/routes/dataset/utils/insertDataQuery.js +12 -0
- package/server/routes/dataset/utils/metaFormat.js +24 -0
- package/server/routes/edit/controllers/dashboard.add.js +3 -3
- package/server/routes/edit/controllers/widget.add.js +8 -3
- package/server/routes/edit/controllers/widget.edit.js +23 -5
- package/server/routes/map/controllers/cluster.js +41 -41
- package/server/routes/map/controllers/clusterVtile.js +5 -5
- package/server/routes/map/controllers/geojson.js +6 -6
- package/server/routes/map/controllers/heatmap.js +118 -0
- package/server/routes/map/controllers/map.js +3 -3
- package/server/routes/map/controllers/utils/downloadClusterData.js +6 -4
- package/server/routes/map/controllers/vtile.js +3 -3
- package/server/routes/map/index.mjs +2 -0
- package/server/utils/getWidget.js +10 -6
- package/server/routes/dataset/controllers/bi.dataset.add.js +0 -86
- package/server/routes/dataset/controllers/bi.dataset.data.add.js +0 -49
- package/server/routes/dataset/controllers/bi.dataset.data.del.js +0 -54
- package/server/routes/dataset/controllers/bi.dataset.data.edit.js +0 -55
- package/server/routes/dataset/controllers/bi.dataset.data.list.js +0 -71
- package/server/routes/dataset/controllers/bi.dataset.del.js +0 -48
- package/server/routes/dataset/controllers/bi.dataset.demo.add.js +0 -97
- package/server/routes/dataset/controllers/util/create.table.js +0 -21
- package/server/routes/dataset/controllers/util/prepare.data.js +0 -49
- package/server/routes/db/index.mjs +0 -17
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { createHash } from 'node:crypto';
|
|
4
|
+
import { existsSync, rmSync } from 'node:fs';
|
|
5
|
+
|
|
6
|
+
import { readFile, writeFile, mkdir } from 'node:fs/promises';
|
|
7
|
+
|
|
8
|
+
import {
|
|
9
|
+
config, pgClients, getMeta, getFolder, getFilterSQL, eventStream, logger,
|
|
10
|
+
} from '@opengis/fastify-table/utils.js';
|
|
11
|
+
|
|
12
|
+
import convertJSONToXls from '../utils/convertJSONToXls.js';
|
|
13
|
+
import convertJSONToCSV from '../utils/convertJSONToCSV.js';
|
|
14
|
+
import metaFormat from '../utils/metaFormat.js';
|
|
15
|
+
|
|
16
|
+
export default async function datasetDataExport(req, reply) {
|
|
17
|
+
const {
|
|
18
|
+
pg = pgClients.client, params = {}, query = {}, user = {}, unittest,
|
|
19
|
+
} = req;
|
|
20
|
+
|
|
21
|
+
if (!user?.uid) {
|
|
22
|
+
return { message: 'access restricted', status: 403 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const { user_type: userType = 'regular' } = user;
|
|
26
|
+
|
|
27
|
+
if (!params?.id) {
|
|
28
|
+
return { message: 'not enough params: id', status: 404 };
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const { format = 'json' } = query;
|
|
32
|
+
|
|
33
|
+
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns,
|
|
34
|
+
export_columns as "exportColumns", sql_list as sql from bi.dataset where dataset_id=$1`, [params.id])
|
|
35
|
+
.then(el => el.rows[0] || {});
|
|
36
|
+
|
|
37
|
+
if (!dataset?.id || !dataset.table) {
|
|
38
|
+
return { message: 'dataset not found', status: 404 };
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const { pk = dataset.pk, view, columns: dbColumns = [] } = await getMeta(dataset.table);
|
|
42
|
+
|
|
43
|
+
if (!dataset.table || (!pg.pk?.[dataset.table] && !pk && !view)) {
|
|
44
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const rootDir = getFolder(req, 'local');
|
|
48
|
+
|
|
49
|
+
const date = new Date();
|
|
50
|
+
const sufixName = `${query.filter || '1=1'}-${query.col || 'all'}-${query.search || '1=1'}-${query.limit || 'unlimited'}`;
|
|
51
|
+
const sufixDate = [date.getFullYear(), date.getMonth(), date.getDate(), date.getHours()].join('-');
|
|
52
|
+
|
|
53
|
+
const objInfo = createHash('md5').update([sufixName, sufixDate].join('-')).digest('hex');
|
|
54
|
+
const fileName = dataset.table.concat('_').concat(objInfo).concat('.').concat(format);
|
|
55
|
+
const filepath = path.join(rootDir, '/files/temp', fileName);
|
|
56
|
+
|
|
57
|
+
if (existsSync(filepath) && query.nocache) {
|
|
58
|
+
rmSync(filepath);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (existsSync(filepath) && !query?.sql) {
|
|
62
|
+
const encoding = path.extname(filepath) === '.xlsx' ? 'base64' : 'utf8';
|
|
63
|
+
const data = await readFile(filepath, encoding);
|
|
64
|
+
return reply.headers({ 'Content-Disposition': `attachment; filename=${path.basename(filepath)}` }).send(Buffer.from(data, encoding));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const fData = await getFilterSQL({
|
|
68
|
+
filter: query.filter,
|
|
69
|
+
search: query?.search,
|
|
70
|
+
table: dataset.table,
|
|
71
|
+
json: 1,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const where = [(params.object_id ? ` "${pk}" = $1` : null), dataset.query, fData.q].filter((el) => el);
|
|
75
|
+
|
|
76
|
+
const cols = dataset.exportColumns
|
|
77
|
+
|| dataset.columns?.map((el) => el.name);
|
|
78
|
+
|
|
79
|
+
if (!cols?.length) {
|
|
80
|
+
return { message: `empty export columns: ${dataset.id}`, status: 400 };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const sqlTable = dataset.sql
|
|
84
|
+
?.filter?.((el) => !el?.disabled && el?.sql?.replace)
|
|
85
|
+
?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', user?.uid)}) ${el.name || `t${i}`} on 1=1 `)
|
|
86
|
+
?.join('') || '';
|
|
87
|
+
|
|
88
|
+
const { total = '0' } = await pg.query(`select count(*) as total from ${dataset.table}
|
|
89
|
+
where ${dataset.query || 'true'} and ${params.object_id ? `${pk}='${params.object_id}'` : '1=1'}`)
|
|
90
|
+
.then(el => el.rows?.[0] || {});
|
|
91
|
+
|
|
92
|
+
if (total === '0') {
|
|
93
|
+
return { message: 'Немає даних, які можна експортувати', status: 200 };
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const ext = path.extname(filepath);
|
|
97
|
+
const filepathJSON = ['csv', 'xlsx'].includes(format) ? filepath.replace(ext, '.json') : filepath;
|
|
98
|
+
|
|
99
|
+
const cacheFileJSON = existsSync(filepathJSON);
|
|
100
|
+
|
|
101
|
+
const send = +total < 10000 || unittest
|
|
102
|
+
? console.log
|
|
103
|
+
: eventStream(reply);
|
|
104
|
+
|
|
105
|
+
if (cacheFileJSON && !query.sql) {
|
|
106
|
+
if (query.nocache) rmSync(filepathJSON); // delete old file, prevent append
|
|
107
|
+
if (!query.nocache && ['xlsx', 'csv'].includes(format)) {
|
|
108
|
+
send(`Знайдено файл формату json. Пропуск обробки даних. Початок конвертації в ${format}...`);
|
|
109
|
+
if (format === 'xlsx') {
|
|
110
|
+
return convertJSONToXls({
|
|
111
|
+
filepathJSON,
|
|
112
|
+
colmodel: dataset.columns,
|
|
113
|
+
domain: req.hostname,
|
|
114
|
+
source: dataset.table,
|
|
115
|
+
send,
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
if (format === 'csv') {
|
|
119
|
+
return convertJSONToCSV({ send, filepathJSON });
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
const limit = Math.min(query.limit, 10000) || 10000;
|
|
125
|
+
|
|
126
|
+
const optimizedSQL = `select ${pk ? `"${pk}" as id,` : ''} *
|
|
127
|
+
${['geojson', 'shp'].includes(format) && dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
|
|
128
|
+
from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')}) t
|
|
129
|
+
${sqlTable} where ${where.join(' and ') || 'true'} offset 0 limit ${limit}`.replace(/{{uid}}/g, user?.uid);
|
|
130
|
+
|
|
131
|
+
if (query.sql === '1' && (config.debug || userType.includes('admin'))) return optimizedSQL;
|
|
132
|
+
|
|
133
|
+
const res = {};
|
|
134
|
+
let offset = 0;
|
|
135
|
+
let seq = 0;
|
|
136
|
+
|
|
137
|
+
send(`Всього в реєстрі: ${total}`);
|
|
138
|
+
|
|
139
|
+
while ((+total - offset > 0) && !res?.error) {
|
|
140
|
+
try {
|
|
141
|
+
const q = optimizedSQL.replace(/offset 0/g, `offset ${offset}`);
|
|
142
|
+
const { rows = [] } = await pg.query(q, (params.object_id ? [params.object_id] : null));
|
|
143
|
+
|
|
144
|
+
send(`Оброблено: ${offset}/${total}`);
|
|
145
|
+
send(`seq: ${++seq}`);
|
|
146
|
+
send(`Обробка ${rows.length} об'єктів...`);
|
|
147
|
+
|
|
148
|
+
if (!rows.length) {
|
|
149
|
+
send('Обробка даних успішно завершена');
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
await metaFormat({ rows, columns: dataset.columns });
|
|
154
|
+
|
|
155
|
+
// skip non present after metaFormat
|
|
156
|
+
const allowColumnList = []; // 'id'
|
|
157
|
+
if (['geojson', 'shp'].includes(format)) {
|
|
158
|
+
allowColumnList.push('geom');
|
|
159
|
+
}
|
|
160
|
+
rows.forEach((row) => {
|
|
161
|
+
Object.keys(row)
|
|
162
|
+
.filter((el) => !allowColumnList.includes(el.name) && !cols.includes(el))
|
|
163
|
+
.forEach((key) => delete row[key]);
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
// convert csv / xlsx from json
|
|
167
|
+
if (!existsSync(filepathJSON)) { // if json not exists
|
|
168
|
+
await mkdir(path.dirname(filepath), { recursive: true });
|
|
169
|
+
await writeFile(filepathJSON, JSON.stringify(rows));
|
|
170
|
+
}
|
|
171
|
+
else { // if json exists
|
|
172
|
+
const jsonData = JSON.parse(await readFile(filepathJSON) || '{}');
|
|
173
|
+
const moreData = jsonData.concat(rows); // rewrite to appendFile?
|
|
174
|
+
await writeFile(filepathJSON, JSON.stringify(moreData));
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
offset += rows.length;
|
|
178
|
+
}
|
|
179
|
+
catch (err) {
|
|
180
|
+
Object.assign(res, { error: err.toString(), status: 500 });
|
|
181
|
+
send(`error: ${err.toString()}`, 1);
|
|
182
|
+
logger.error({
|
|
183
|
+
name: 'export/table',
|
|
184
|
+
filepathJSON,
|
|
185
|
+
total,
|
|
186
|
+
offset,
|
|
187
|
+
result: res,
|
|
188
|
+
error: err.toString(),
|
|
189
|
+
});
|
|
190
|
+
return err.toString();
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
if (!res?.error && format === 'csv') {
|
|
195
|
+
send('Сформовано файл формату json. Початок конвертації в csv...');
|
|
196
|
+
return convertJSONToCSV({ send, filepathJSON });
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
if (!res?.error && format === 'xlsx') {
|
|
200
|
+
send('Сформовано файл формату json. Початок конвертації в xlsx...');
|
|
201
|
+
return convertJSONToXls({
|
|
202
|
+
filepathJSON,
|
|
203
|
+
colmodel: dataset.columns,
|
|
204
|
+
domain: req.hostname,
|
|
205
|
+
source: dataset.table,
|
|
206
|
+
send,
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
const message = res.error || 'Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних';
|
|
211
|
+
send(message, 1);
|
|
212
|
+
return { message, status: 200 };
|
|
213
|
+
}
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, pgClients, getMeta, getToken,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
import datasetForms from '../utils/datasetForms.js';
|
|
6
|
+
|
|
7
|
+
const inputType = {
|
|
8
|
+
text: 'Text',
|
|
9
|
+
autocomplete: 'Autocomplete',
|
|
10
|
+
select: 'Autocomplete',
|
|
11
|
+
date: 'DatePicker',
|
|
12
|
+
'yes/no': 'Switcher',
|
|
13
|
+
badge: 'Select',
|
|
14
|
+
number: 'Number',
|
|
15
|
+
tags: 'Tags',
|
|
16
|
+
file: 'File',
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
const systemColumns = [
|
|
20
|
+
'uid',
|
|
21
|
+
'cdate',
|
|
22
|
+
'editor_id',
|
|
23
|
+
'editor_date',
|
|
24
|
+
'files',
|
|
25
|
+
];
|
|
26
|
+
|
|
27
|
+
export default async function datasetForm({
|
|
28
|
+
pg = pgClients.client, params = {}, query = {}, user = {},
|
|
29
|
+
}) {
|
|
30
|
+
if (!user?.uid) {
|
|
31
|
+
return { message: 'access restricted', status: 403 };
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
if (!params?.id) {
|
|
35
|
+
return { message: 'not enough params: id', status: 404 };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const tokenData = await getToken({
|
|
39
|
+
uid: user?.uid,
|
|
40
|
+
token: params.id,
|
|
41
|
+
mode: 'a',
|
|
42
|
+
json: 1,
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
if (!tokenData && !config.local && !config.debug) {
|
|
46
|
+
return { message: 'token not allow', status: 403 };
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const {
|
|
50
|
+
id, columns, formSetting, table, style,
|
|
51
|
+
} = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, style,
|
|
52
|
+
form_setting as "formSetting" from bi.dataset where dataset_id=$1`, [tokenData?.dataset || params.id])
|
|
53
|
+
.then(el => el.rows?.[0] || {});
|
|
54
|
+
|
|
55
|
+
if (!id) {
|
|
56
|
+
return { message: 'dataset not found', status: 404 };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if (datasetForms[id] && !query.nocache) {
|
|
60
|
+
return datasetForms[id];
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const { columns: dbColumns = [] } = await getMeta(table) || {};
|
|
64
|
+
const isFilesColumn = dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text');
|
|
65
|
+
|
|
66
|
+
const formSchema = formSetting
|
|
67
|
+
|| ((columns || dbColumns).filter((col) => !systemColumns.includes(col.name))).reduce((acc, curr) => Object.assign(acc, {
|
|
68
|
+
[curr.name]: {
|
|
69
|
+
type: inputType[curr?.format || curr?.type || 'text'] || 'Text',
|
|
70
|
+
ua: curr?.description || curr?.title || curr?.name,
|
|
71
|
+
validators: curr?.is_required ? ['required'] : null,
|
|
72
|
+
options: curr?.values?.length ? curr?.values : null,
|
|
73
|
+
data: curr?.data,
|
|
74
|
+
min: curr?.type || curr?.format === 'number' ? 0 : null,
|
|
75
|
+
},
|
|
76
|
+
}), {});
|
|
77
|
+
|
|
78
|
+
Object.assign(formSchema, {
|
|
79
|
+
geom: {
|
|
80
|
+
type: 'Geom',
|
|
81
|
+
ua: 'Геометрія',
|
|
82
|
+
height: '500',
|
|
83
|
+
geom_edit_controol: [style?.type ? [style.type] : null],
|
|
84
|
+
},
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
if (isFilesColumn) {
|
|
88
|
+
Object.assign(formSchema, {
|
|
89
|
+
files: {
|
|
90
|
+
type: 'FileList',
|
|
91
|
+
ua: 'Файли',
|
|
92
|
+
},
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
datasetForms[id] = formSchema;
|
|
97
|
+
|
|
98
|
+
return formSchema;
|
|
99
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import getTableData from "../utils/getTableData.js";
|
|
2
|
+
import getLayersData from "../utils/getLayersData.js";
|
|
3
|
+
|
|
4
|
+
export default async function gisFormat({ query = {}, pg }) {
|
|
5
|
+
const time = [Date.now()];
|
|
6
|
+
const { layer, table, id, lat, lng } = query;
|
|
7
|
+
|
|
8
|
+
if (!table && !layer) {
|
|
9
|
+
return { message: 'not enough params', status: 400 };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
if (layer && !(lat && lng)) {
|
|
13
|
+
return { message: 'not enough params: lat/lng', status: 400 };
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (table && !(id || layer)) {
|
|
17
|
+
return { message: 'not enough params: id', status: 400 };
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const { message, template, rows, layers, html, body } = layer
|
|
21
|
+
? await getLayersData({ pg, layer, table, id, lat, lng, time })
|
|
22
|
+
: await getTableData({ pg, table, id, time });
|
|
23
|
+
|
|
24
|
+
if (message) return { message, status: 400 };
|
|
25
|
+
|
|
26
|
+
return {
|
|
27
|
+
time: {
|
|
28
|
+
layer: time[1] - time[0],
|
|
29
|
+
data: time[2] - time[1],
|
|
30
|
+
html: time[3] - time[2],
|
|
31
|
+
total: time[3] - time[0],
|
|
32
|
+
},
|
|
33
|
+
|
|
34
|
+
id: rows[0]?.id,
|
|
35
|
+
table,
|
|
36
|
+
layers,
|
|
37
|
+
|
|
38
|
+
data: { geom: rows[0]?.geom },
|
|
39
|
+
rows,
|
|
40
|
+
html,
|
|
41
|
+
template,
|
|
42
|
+
body,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, getToken, dataInsert, pgClients,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
import datasetForms from '../utils/datasetForms.js';
|
|
6
|
+
|
|
7
|
+
export default async function datasetDataInsert({
|
|
8
|
+
pg = pgClients.client, params = {}, body = {}, user = {},
|
|
9
|
+
}) {
|
|
10
|
+
if (!user?.uid) {
|
|
11
|
+
return { message: 'access restricted', status: 403 };
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const tokenData = await getToken({
|
|
15
|
+
uid: user?.uid,
|
|
16
|
+
token: params.id,
|
|
17
|
+
mode: 'a',
|
|
18
|
+
json: 1,
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
if (!tokenData && !config.local && !config.debug) {
|
|
22
|
+
return { message: 'token not allow', status: 403 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [tokenData?.dataset || params.id])
|
|
26
|
+
.then(el => el.rows[0] || {});
|
|
27
|
+
|
|
28
|
+
const { form, table: add } = tokenData
|
|
29
|
+
|| { form: `${dataset.id}.form`, table: dataset.table };
|
|
30
|
+
|
|
31
|
+
if (!form || !dataset.id || !datasetForms[dataset.id]) {
|
|
32
|
+
return { message: 'dataset not found', status: 404 };
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const res = await dataInsert({
|
|
36
|
+
table: add,
|
|
37
|
+
data: body,
|
|
38
|
+
uid: user?.uid,
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
if (!res) {
|
|
42
|
+
return { message: 'nothing added' };
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return { rows: res.rows };
|
|
46
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, pgClients, getMeta, setToken, getToken,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
export default async function datasetFormData({
|
|
6
|
+
pg = pgClients.client, params = {}, query = {}, user = {},
|
|
7
|
+
}) {
|
|
8
|
+
if (!params?.id) {
|
|
9
|
+
return { message: 'not enough params: id', status: 400 };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
if (!user?.uid) {
|
|
13
|
+
return { message: 'access restricted', status: 403 };
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const { user_type: userType = 'regular' } = user;
|
|
17
|
+
|
|
18
|
+
const tokenData = await getToken({
|
|
19
|
+
uid: user?.uid,
|
|
20
|
+
token: params.id,
|
|
21
|
+
mode: 'w',
|
|
22
|
+
json: 1,
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
if (!tokenData && !params?.object_id) {
|
|
26
|
+
return { message: 'not enough params: object_id', status: 400 };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, filter_list as filters,
|
|
30
|
+
sql_list as sql, style, form_setting, pk, access_level from bi.dataset where dataset_id=$1`, [tokenData?.dataset || params.id]).then(el => el.rows[0] || {});
|
|
31
|
+
|
|
32
|
+
if (!dataset?.id) {
|
|
33
|
+
return { message: 'dataset not found', status: 404 };
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const tlist = await pg.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist
|
|
37
|
+
from pg_class where relkind in ('r','v')`).then(el => el.rows[0]?.tlist || []);
|
|
38
|
+
|
|
39
|
+
if (!dataset.table || !tlist.includes(dataset.table.replace(/"/g, '')) || (!pg.pk?.[dataset.table.replace(/"/g, '')] && !dataset.pk)) {
|
|
40
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const { pk = dataset.pk, columns: dbColumns = [] } = await getMeta(dataset.table) || {};
|
|
44
|
+
const cols = dataset.columns?.filter((el) => el.name !== 'geom')?.map((el) => el.name || el)?.join(',');
|
|
45
|
+
|
|
46
|
+
const where = [`"${pk}" = $1`, dataset.query].filter((el) => el);
|
|
47
|
+
|
|
48
|
+
const geom = dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : '';
|
|
49
|
+
const files = dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text') ? ',files' : '';
|
|
50
|
+
const q = `select "${pk}" as id, ${cols || '*'} ${files} ${geom} from ${dataset.table} t where ${where.join(' and ') || 'true'} limit 1`;
|
|
51
|
+
|
|
52
|
+
if (query.sql === '1' && (config.debug || userType.includes('admin'))) { return q; }
|
|
53
|
+
|
|
54
|
+
const data = await pg.query(q, [tokenData?.id || params.object_id]).then(el => el.rows?.[0] || {});
|
|
55
|
+
|
|
56
|
+
data.token = setToken({
|
|
57
|
+
ids: [JSON.stringify({
|
|
58
|
+
id: data.id,
|
|
59
|
+
table: dataset.table,
|
|
60
|
+
form: `${dataset.id}.form`,
|
|
61
|
+
dataset: dataset.id,
|
|
62
|
+
})],
|
|
63
|
+
uid: user?.uid,
|
|
64
|
+
array: 1,
|
|
65
|
+
})[0];
|
|
66
|
+
|
|
67
|
+
return data;
|
|
68
|
+
}
|
|
69
|
+
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, getToken, dataUpdate, pgClients,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
import datasetForms from '../utils/datasetForms.js';
|
|
6
|
+
|
|
7
|
+
export default async function datasetDataUpdate({
|
|
8
|
+
pg = pgClients.client, params = {}, body = {}, user = {},
|
|
9
|
+
}) {
|
|
10
|
+
if (!user?.uid) {
|
|
11
|
+
return { message: 'access restricted', status: 403 };
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
const tokenData = await getToken({
|
|
15
|
+
uid: user?.uid,
|
|
16
|
+
token: params.id,
|
|
17
|
+
mode: 'a',
|
|
18
|
+
json: 1,
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
if (!tokenData && !config.local && !config.debug) {
|
|
22
|
+
return { message: 'token not allow', status: 403 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [tokenData?.dataset || params.id])
|
|
26
|
+
.then(el => el.rows[0] || {});
|
|
27
|
+
|
|
28
|
+
const table = tokenData?.table || dataset.table;
|
|
29
|
+
|
|
30
|
+
if (!dataset.id || !datasetForms[dataset.id]) {
|
|
31
|
+
return { message: 'dataset not found', status: 404 };
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const res = await dataUpdate({
|
|
35
|
+
id: tokenData?.id || params?.object_id,
|
|
36
|
+
table,
|
|
37
|
+
data: body,
|
|
38
|
+
uid: user?.uid,
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
return res;
|
|
42
|
+
}
|
|
@@ -1,75 +1,120 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
import biDatasetList from './controllers/bi.dataset.list.js'; // get dataset list
|
|
4
|
-
import biDatasetAdd from './controllers/bi.dataset.add.js'; // assign table / import data from body (vue sheet) or file (relative path)
|
|
5
|
-
import biDatasetDel from './controllers/bi.dataset.del.js'; // delete dataset + data
|
|
6
|
-
import biDatasetEdit from './controllers/bi.dataset.edit.js'; // edit dataset structure
|
|
7
|
-
import biDbList from './controllers/bi.db.list.js'; // list db
|
|
8
|
-
import biDatasetDataAdd from './controllers/bi.dataset.data.add.js';
|
|
9
|
-
import biDatasetDataEdit from './controllers/bi.dataset.data.edit.js';
|
|
10
|
-
import biDatasetDataDel from './controllers/bi.dataset.data.del.js';
|
|
1
|
+
import dbTables from './controllers/dbTables.js';
|
|
2
|
+
import dbTablePreview from './controllers/dbTablePreview.js';
|
|
11
3
|
|
|
12
|
-
|
|
4
|
+
import createDatasetPost from './controllers/createDatasetPost.js';
|
|
5
|
+
import editDataset from './controllers/editDataset.js';
|
|
6
|
+
import datasetData from './controllers/data.js';
|
|
7
|
+
import datasetFormData from './controllers/table.js';
|
|
8
|
+
import datasetDataInsert from './controllers/insert.js';
|
|
9
|
+
import datasetDataUpdate from './controllers/update.js';
|
|
10
|
+
import datasetDataDelete from './controllers/delete.js';
|
|
11
|
+
import datasetDataExport from './controllers/export.js';
|
|
12
|
+
import datasetForm from './controllers/form.js';
|
|
13
|
+
import format from './controllers/format.js';
|
|
14
|
+
import datasetEditComment from './controllers/comment.js';
|
|
15
|
+
import biDbList from './controllers/bi.db.list.js'; // list db
|
|
16
|
+
import biDatasetList from './controllers/bi.dataset.list.js'; // list datasets
|
|
13
17
|
|
|
14
18
|
export default async function route(fastify, opts) {
|
|
15
19
|
fastify.route({
|
|
16
|
-
method: '
|
|
17
|
-
url: '/bi-
|
|
18
|
-
schema:
|
|
19
|
-
handler:
|
|
20
|
+
method: 'GET',
|
|
21
|
+
url: '/bi-db-list',
|
|
22
|
+
schema: {},
|
|
23
|
+
handler: biDbList,
|
|
20
24
|
});
|
|
21
25
|
fastify.route({
|
|
22
26
|
method: 'GET',
|
|
23
|
-
url: '/bi-
|
|
24
|
-
schema:
|
|
25
|
-
handler:
|
|
27
|
+
url: '/bi-datasets',
|
|
28
|
+
schema: {},
|
|
29
|
+
handler: biDatasetList,
|
|
26
30
|
});
|
|
27
31
|
fastify.route({
|
|
28
32
|
method: 'GET',
|
|
29
|
-
url: '/
|
|
30
|
-
|
|
31
|
-
handler:
|
|
33
|
+
url: '/gis-format',
|
|
34
|
+
config: { policy: ['site'] },
|
|
35
|
+
handler: format,
|
|
32
36
|
});
|
|
33
37
|
fastify.route({
|
|
34
38
|
method: 'GET',
|
|
35
|
-
url: '/
|
|
36
|
-
schema:
|
|
37
|
-
|
|
39
|
+
url: '/db-tables',
|
|
40
|
+
schema: {},
|
|
41
|
+
config: { policy: ['site'] },
|
|
42
|
+
handler: dbTables,
|
|
38
43
|
});
|
|
39
44
|
fastify.route({
|
|
40
45
|
method: 'GET',
|
|
41
|
-
url: '/
|
|
42
|
-
schema:
|
|
43
|
-
|
|
46
|
+
url: '/db-tables/:name',
|
|
47
|
+
schema: {},
|
|
48
|
+
config: { policy: ['site'] },
|
|
49
|
+
handler: dbTablePreview,
|
|
44
50
|
});
|
|
45
51
|
fastify.route({
|
|
46
|
-
method: '
|
|
47
|
-
url: '/
|
|
48
|
-
schema:
|
|
49
|
-
|
|
52
|
+
method: 'POST',
|
|
53
|
+
url: '/dataset',
|
|
54
|
+
schema: {},
|
|
55
|
+
config: { policy: ['site'] },
|
|
56
|
+
handler: createDatasetPost,
|
|
50
57
|
});
|
|
51
58
|
fastify.route({
|
|
52
59
|
method: 'PUT',
|
|
53
|
-
url: '/
|
|
54
|
-
schema:
|
|
55
|
-
|
|
60
|
+
url: '/dataset',
|
|
61
|
+
schema: {},
|
|
62
|
+
config: { policy: ['site'] },
|
|
63
|
+
handler: editDataset,
|
|
64
|
+
});
|
|
65
|
+
fastify.route({
|
|
66
|
+
method: 'GET',
|
|
67
|
+
url: '/dataset-data/:id',
|
|
68
|
+
schema: {},
|
|
69
|
+
config: { policy: ['site'] },
|
|
70
|
+
handler: datasetData,
|
|
71
|
+
});
|
|
72
|
+
fastify.route({
|
|
73
|
+
method: 'GET',
|
|
74
|
+
url: '/dataset-data/:id/:object_id',
|
|
75
|
+
schema: {},
|
|
76
|
+
config: { policy: ['site'] },
|
|
77
|
+
handler: datasetFormData,
|
|
56
78
|
});
|
|
57
79
|
fastify.route({
|
|
58
80
|
method: 'POST',
|
|
59
|
-
url: '/
|
|
60
|
-
schema:
|
|
61
|
-
|
|
81
|
+
url: '/dataset-data/:id',
|
|
82
|
+
schema: {},
|
|
83
|
+
config: { policy: ['site'] },
|
|
84
|
+
handler: datasetDataInsert,
|
|
62
85
|
});
|
|
63
86
|
fastify.route({
|
|
64
87
|
method: 'PUT',
|
|
65
|
-
url: '/
|
|
66
|
-
schema:
|
|
67
|
-
|
|
88
|
+
url: '/dataset-data/:id/:object_id?',
|
|
89
|
+
schema: {},
|
|
90
|
+
config: { policy: ['site'] },
|
|
91
|
+
handler: datasetDataUpdate,
|
|
68
92
|
});
|
|
69
93
|
fastify.route({
|
|
70
94
|
method: 'DELETE',
|
|
71
|
-
url: '/
|
|
72
|
-
schema:
|
|
73
|
-
|
|
95
|
+
url: '/dataset-data/:id/:object_id?',
|
|
96
|
+
schema: {},
|
|
97
|
+
config: { policy: ['site'] },
|
|
98
|
+
handler: datasetDataDelete,
|
|
99
|
+
});
|
|
100
|
+
fastify.route({
|
|
101
|
+
method: 'GET',
|
|
102
|
+
url: '/dataset-form/:id',
|
|
103
|
+
schema: {},
|
|
104
|
+
config: { policy: ['site'] },
|
|
105
|
+
handler: datasetForm,
|
|
106
|
+
});
|
|
107
|
+
fastify.route({
|
|
108
|
+
method: 'GET',
|
|
109
|
+
url: '/dataset-export/:id',
|
|
110
|
+
schema: {},
|
|
111
|
+
config: { policy: ['site'] },
|
|
112
|
+
handler: datasetDataExport,
|
|
113
|
+
});
|
|
114
|
+
fastify.route({
|
|
115
|
+
method: 'POST',
|
|
116
|
+
url: '/dataset-comment/:id',
|
|
117
|
+
config: { policy: ['site'] },
|
|
118
|
+
handler: datasetEditComment,
|
|
74
119
|
});
|
|
75
120
|
}
|