@opengis/bi 1.2.30 → 1.2.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +92 -92
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +88 -86
- package/dist/{import-file-B4o9p2_2.js → import-file-D-ISqB7l.js} +1772 -1593
- package/dist/style.css +1 -1
- package/dist/{vs-funnel-bar-BOWwPnjW.js → vs-funnel-bar-aoZzvriV.js} +3 -3
- package/dist/{vs-list-D8jGusRT.js → vs-list-CBkyJSBj.js} +53 -32
- package/dist/{vs-map-BGplOwpB.js → vs-map-C3C11qmT.js} +102 -91
- package/dist/{vs-map-cluster-CHQJV2As.js → vs-map-cluster-BsPmHIMx.js} +91 -81
- package/dist/{vs-number-jPqxFQ6d.js → vs-number-d58ftpH5.js} +3 -3
- package/dist/{vs-table-BDgIvJbY.js → vs-table-BHa5Velm.js} +6 -6
- package/dist/{vs-text-DcHOffy9.js → vs-text-Bq87gMTx.js} +4 -4
- package/package.json +77 -75
- package/plugin.js +21 -22
- package/server/helpers/mdToHTML.js +17 -17
- package/server/migrations/bi.dataset.sql +46 -46
- package/server/migrations/bi.sql +115 -112
- package/server/plugins/docs.js +48 -48
- package/server/plugins/hook.js +89 -89
- package/server/plugins/vite.js +81 -81
- package/server/routes/dashboard/controllers/dashboard.import.js +103 -103
- package/server/routes/dashboard/controllers/dashboard.js +158 -157
- package/server/routes/dashboard/controllers/dashboard.list.js +60 -60
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -11
- package/server/routes/dashboard/index.mjs +26 -26
- package/server/routes/data/controllers/data.js +230 -229
- package/server/routes/data/controllers/util/chartSQL.js +49 -49
- package/server/routes/data/controllers/util/normalizeData.js +65 -65
- package/server/routes/data/index.mjs +38 -38
- package/server/routes/dataset/controllers/bi.dataset.list.js +29 -29
- package/server/routes/dataset/controllers/bi.db.list.js +19 -19
- package/server/routes/dataset/controllers/comment.js +55 -55
- package/server/routes/dataset/controllers/createDatasetPost.js +134 -134
- package/server/routes/dataset/controllers/data.js +149 -149
- package/server/routes/dataset/controllers/dbTablePreview.js +58 -58
- package/server/routes/dataset/controllers/dbTables.js +34 -34
- package/server/routes/dataset/controllers/delete.js +40 -40
- package/server/routes/dataset/controllers/deleteDataset.js +52 -52
- package/server/routes/dataset/controllers/editDataset.js +90 -90
- package/server/routes/dataset/controllers/export.js +214 -214
- package/server/routes/dataset/controllers/form.js +99 -99
- package/server/routes/dataset/controllers/format.js +46 -46
- package/server/routes/dataset/controllers/insert.js +47 -47
- package/server/routes/dataset/controllers/table.js +68 -68
- package/server/routes/dataset/controllers/update.js +43 -43
- package/server/routes/dataset/index.mjs +132 -132
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -17
- package/server/routes/dataset/utils/convertJSONToXls.js +47 -47
- package/server/routes/dataset/utils/createTableQuery.js +59 -59
- package/server/routes/dataset/utils/datasetForms.js +1 -1
- package/server/routes/dataset/utils/descriptionList.js +45 -45
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -58
- package/server/routes/dataset/utils/executeQuery.js +46 -46
- package/server/routes/dataset/utils/getLayersData.js +106 -106
- package/server/routes/dataset/utils/getTableData.js +46 -46
- package/server/routes/dataset/utils/insertDataQuery.js +12 -12
- package/server/routes/dataset/utils/metaFormat.js +24 -24
- package/server/routes/edit/controllers/dashboard.add.js +36 -36
- package/server/routes/edit/controllers/dashboard.delete.js +39 -39
- package/server/routes/edit/controllers/dashboard.edit.js +61 -61
- package/server/routes/edit/controllers/widget.add.js +78 -78
- package/server/routes/edit/controllers/widget.del.js +58 -58
- package/server/routes/edit/controllers/widget.edit.js +115 -115
- package/server/routes/edit/index.mjs +33 -33
- package/server/routes/map/controllers/cluster.js +125 -125
- package/server/routes/map/controllers/clusterVtile.js +166 -166
- package/server/routes/map/controllers/geojson.js +127 -127
- package/server/routes/map/controllers/heatmap.js +118 -118
- package/server/routes/map/controllers/map.js +69 -69
- package/server/routes/map/controllers/utils/downloadClusterData.js +44 -44
- package/server/routes/map/controllers/vtile.js +183 -183
- package/server/routes/map/index.mjs +32 -32
- package/server/templates/page/login.html +58 -58
- package/server/utils/getWidget.js +118 -117
- package/utils.js +12 -12
|
@@ -1,134 +1,134 @@
|
|
|
1
|
-
import path from 'node:path';
|
|
2
|
-
import { existsSync } from 'node:fs';
|
|
3
|
-
|
|
4
|
-
import { config, pgClients, getFolder, file2json } from '@opengis/fastify-table/utils.js';
|
|
5
|
-
|
|
6
|
-
import createTableQuery from '../utils/createTableQuery.js';
|
|
7
|
-
import executeQuery from '../utils/executeQuery.js';
|
|
8
|
-
import downloadRemoteFile from '../utils/downloadRemoteFile.js';
|
|
9
|
-
|
|
10
|
-
const insertDataset = `insert into bi.dataset
|
|
11
|
-
(name, table_name, dataset_file_path, column_list, pk, data_source, uid)
|
|
12
|
-
values($1,$2,$3,$4,$5,$6,$7) returning dataset_id`;
|
|
13
|
-
|
|
14
|
-
export default async function createDatasetPost({
|
|
15
|
-
pg = pgClients.client, body = {}, user = {},
|
|
16
|
-
}) {
|
|
17
|
-
if (!user?.uid) {
|
|
18
|
-
return { message: 'access restricted', status: 403 };
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
if (!body?.name) {
|
|
22
|
-
return { message: 'not enough query params: name', status: 400 };
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
if (!body?.table_name && !body?.file && !body?.column_list?.length && !body?.dataset_url) {
|
|
26
|
-
return { message: 'not enough query params: table / file / column_list/ url', status: 400 };
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const {
|
|
30
|
-
name,
|
|
31
|
-
table_name: existingTable,
|
|
32
|
-
column_list: columns = [],
|
|
33
|
-
dataset_url: datasetUrl,
|
|
34
|
-
encoding,
|
|
35
|
-
} = body;
|
|
36
|
-
|
|
37
|
-
const rootDir = getFolder(config, 'local');
|
|
38
|
-
|
|
39
|
-
if (datasetUrl) {
|
|
40
|
-
const { filePath, error } = await downloadRemoteFile({
|
|
41
|
-
rootDir, url: datasetUrl, table: name || datasetUrl,
|
|
42
|
-
});
|
|
43
|
-
if (error || !filePath) {
|
|
44
|
-
return { message: error || 'file request URL error', status: 500 };
|
|
45
|
-
}
|
|
46
|
-
Object.assign(body, { file: filePath });
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
const { file: relPath } = body;
|
|
50
|
-
|
|
51
|
-
if (relPath) {
|
|
52
|
-
const filepath = path.join(rootDir, relPath);
|
|
53
|
-
const exists = existsSync(filepath);
|
|
54
|
-
|
|
55
|
-
if (!exists) {
|
|
56
|
-
return { message: 'Файл з вихідними даними не знайдено', status: 404 };
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const json = await file2json({ filepath, encoding });
|
|
60
|
-
|
|
61
|
-
// excel sheets fix?
|
|
62
|
-
const data1 = (['.xls', '.xlsx'].includes(path.extname(filepath)) && !Array.isArray(json)) ? json[Object.keys(json)[0]] : json;
|
|
63
|
-
const data = path.extname(filepath) === '.json' && body?.key ? data1?.[body.key] : data1;
|
|
64
|
-
|
|
65
|
-
const features = ['.csv', '.xlsx', '.xls'].includes(path.extname(filepath))
|
|
66
|
-
? data?.map?.((el) => ({ type: 'Feature', properties: Object.keys(el).reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }))
|
|
67
|
-
: data?.features || data?.map?.((el) => ({ type: 'Feature', geometry: el.geom, properties: Object.keys(el).filter((key) => key !== 'geom').reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }));
|
|
68
|
-
|
|
69
|
-
if (!Array.isArray(features) || !features?.length) {
|
|
70
|
-
return { message: 'Файл з вихідними даними порожній', status: 400 };
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
Object.assign(data, { features });
|
|
74
|
-
|
|
75
|
-
const fileColumns = Object.keys(data?.features[0]?.properties)
|
|
76
|
-
?.filter((el) => !['editor_date', 'cdate', 'uid', 'editor_id', 'files'].includes(el.toLowerCase()))
|
|
77
|
-
?.map((el) => ({ title: el, format: 'text' }));
|
|
78
|
-
|
|
79
|
-
const { sql, pkey, table } = createTableQuery(fileColumns, name);
|
|
80
|
-
|
|
81
|
-
const { datasetId, error } = await executeQuery({
|
|
82
|
-
pg,
|
|
83
|
-
sql,
|
|
84
|
-
data,
|
|
85
|
-
name,
|
|
86
|
-
table,
|
|
87
|
-
relPath,
|
|
88
|
-
columns: fileColumns,
|
|
89
|
-
pkey,
|
|
90
|
-
source: datasetUrl ? 'url' : 'file',
|
|
91
|
-
url: datasetUrl,
|
|
92
|
-
user,
|
|
93
|
-
dataKey: body?.key,
|
|
94
|
-
});
|
|
95
|
-
|
|
96
|
-
if (error) return { error, status: 500 };
|
|
97
|
-
|
|
98
|
-
pg.pk[table] = pkey;
|
|
99
|
-
pg.tlist.push(table);
|
|
100
|
-
return { message: { id: datasetId, table, source: datasetUrl ? 'url' : 'file' }, status: 200 };
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
if (existingTable) {
|
|
104
|
-
await pg.query(`alter table ${existingTable} add column if not exists geom public.geometry;
|
|
105
|
-
alter table ${existingTable} add column if not exists files json`);
|
|
106
|
-
const args = [name, existingTable, null, null, pg.pk?.[existingTable], JSON.stringify({ type: 'table' }), user?.uid];
|
|
107
|
-
const datasetId = await pg.query(insertDataset, args).then(el => el.rows?.[0]?.dataset_id);
|
|
108
|
-
return { message: { id: datasetId, table: existingTable, source: 'table' }, status: 200 };
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
if (!columns?.length) {
|
|
112
|
-
return { message: 'У даній заяві відсутні налаштування структури набору даних', status: 400 };
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
const { sql, pkey, table } = createTableQuery(columns, name);
|
|
116
|
-
|
|
117
|
-
const { datasetId, error } = await executeQuery({
|
|
118
|
-
pg,
|
|
119
|
-
sql,
|
|
120
|
-
name,
|
|
121
|
-
table,
|
|
122
|
-
relPath,
|
|
123
|
-
columns,
|
|
124
|
-
pkey,
|
|
125
|
-
source: 'newtable',
|
|
126
|
-
user,
|
|
127
|
-
});
|
|
128
|
-
|
|
129
|
-
if (error) return { error, status: 500 };
|
|
130
|
-
|
|
131
|
-
pg.pk[table] = pkey;
|
|
132
|
-
pg.tlist.push(table);
|
|
133
|
-
return { message: { id: datasetId, table, source: 'newtable' }, status: 200 };
|
|
134
|
-
}
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { existsSync } from 'node:fs';
|
|
3
|
+
|
|
4
|
+
import { config, pgClients, getFolder, file2json } from '@opengis/fastify-table/utils.js';
|
|
5
|
+
|
|
6
|
+
import createTableQuery from '../utils/createTableQuery.js';
|
|
7
|
+
import executeQuery from '../utils/executeQuery.js';
|
|
8
|
+
import downloadRemoteFile from '../utils/downloadRemoteFile.js';
|
|
9
|
+
|
|
10
|
+
const insertDataset = `insert into bi.dataset
|
|
11
|
+
(name, table_name, dataset_file_path, column_list, pk, data_source, uid)
|
|
12
|
+
values($1,$2,$3,$4,$5,$6,$7) returning dataset_id`;
|
|
13
|
+
|
|
14
|
+
export default async function createDatasetPost({
|
|
15
|
+
pg = pgClients.client, body = {}, user = {},
|
|
16
|
+
}) {
|
|
17
|
+
if (!user?.uid) {
|
|
18
|
+
return { message: 'access restricted', status: 403 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
if (!body?.name) {
|
|
22
|
+
return { message: 'not enough query params: name', status: 400 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (!body?.table_name && !body?.file && !body?.column_list?.length && !body?.dataset_url) {
|
|
26
|
+
return { message: 'not enough query params: table / file / column_list/ url', status: 400 };
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const {
|
|
30
|
+
name,
|
|
31
|
+
table_name: existingTable,
|
|
32
|
+
column_list: columns = [],
|
|
33
|
+
dataset_url: datasetUrl,
|
|
34
|
+
encoding,
|
|
35
|
+
} = body;
|
|
36
|
+
|
|
37
|
+
const rootDir = getFolder(config, 'local');
|
|
38
|
+
|
|
39
|
+
if (datasetUrl) {
|
|
40
|
+
const { filePath, error } = await downloadRemoteFile({
|
|
41
|
+
rootDir, url: datasetUrl, table: name || datasetUrl,
|
|
42
|
+
});
|
|
43
|
+
if (error || !filePath) {
|
|
44
|
+
return { message: error || 'file request URL error', status: 500 };
|
|
45
|
+
}
|
|
46
|
+
Object.assign(body, { file: filePath });
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const { file: relPath } = body;
|
|
50
|
+
|
|
51
|
+
if (relPath) {
|
|
52
|
+
const filepath = path.join(rootDir, relPath);
|
|
53
|
+
const exists = existsSync(filepath);
|
|
54
|
+
|
|
55
|
+
if (!exists) {
|
|
56
|
+
return { message: 'Файл з вихідними даними не знайдено', status: 404 };
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const json = await file2json({ filepath, encoding });
|
|
60
|
+
|
|
61
|
+
// excel sheets fix?
|
|
62
|
+
const data1 = (['.xls', '.xlsx'].includes(path.extname(filepath)) && !Array.isArray(json)) ? json[Object.keys(json)[0]] : json;
|
|
63
|
+
const data = path.extname(filepath) === '.json' && body?.key ? data1?.[body.key] : data1;
|
|
64
|
+
|
|
65
|
+
const features = ['.csv', '.xlsx', '.xls'].includes(path.extname(filepath))
|
|
66
|
+
? data?.map?.((el) => ({ type: 'Feature', properties: Object.keys(el).reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }))
|
|
67
|
+
: data?.features || data?.map?.((el) => ({ type: 'Feature', geometry: el.geom, properties: Object.keys(el).filter((key) => key !== 'geom').reduce((acc, curr) => Object.assign(acc, { [curr]: el[curr] }), {}) }));
|
|
68
|
+
|
|
69
|
+
if (!Array.isArray(features) || !features?.length) {
|
|
70
|
+
return { message: 'Файл з вихідними даними порожній', status: 400 };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
Object.assign(data, { features });
|
|
74
|
+
|
|
75
|
+
const fileColumns = Object.keys(data?.features[0]?.properties)
|
|
76
|
+
?.filter((el) => !['editor_date', 'cdate', 'uid', 'editor_id', 'files'].includes(el.toLowerCase()))
|
|
77
|
+
?.map((el) => ({ title: el, format: 'text' }));
|
|
78
|
+
|
|
79
|
+
const { sql, pkey, table } = createTableQuery(fileColumns, name);
|
|
80
|
+
|
|
81
|
+
const { datasetId, error } = await executeQuery({
|
|
82
|
+
pg,
|
|
83
|
+
sql,
|
|
84
|
+
data,
|
|
85
|
+
name,
|
|
86
|
+
table,
|
|
87
|
+
relPath,
|
|
88
|
+
columns: fileColumns,
|
|
89
|
+
pkey,
|
|
90
|
+
source: datasetUrl ? 'url' : 'file',
|
|
91
|
+
url: datasetUrl,
|
|
92
|
+
user,
|
|
93
|
+
dataKey: body?.key,
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
if (error) return { error, status: 500 };
|
|
97
|
+
|
|
98
|
+
pg.pk[table] = pkey;
|
|
99
|
+
pg.tlist.push(table);
|
|
100
|
+
return { message: { id: datasetId, table, source: datasetUrl ? 'url' : 'file' }, status: 200 };
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
if (existingTable) {
|
|
104
|
+
await pg.query(`alter table ${existingTable} add column if not exists geom public.geometry;
|
|
105
|
+
alter table ${existingTable} add column if not exists files json`);
|
|
106
|
+
const args = [name, existingTable, null, null, pg.pk?.[existingTable], JSON.stringify({ type: 'table' }), user?.uid];
|
|
107
|
+
const datasetId = await pg.query(insertDataset, args).then(el => el.rows?.[0]?.dataset_id);
|
|
108
|
+
return { message: { id: datasetId, table: existingTable, source: 'table' }, status: 200 };
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (!columns?.length) {
|
|
112
|
+
return { message: 'У даній заяві відсутні налаштування структури набору даних', status: 400 };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const { sql, pkey, table } = createTableQuery(columns, name);
|
|
116
|
+
|
|
117
|
+
const { datasetId, error } = await executeQuery({
|
|
118
|
+
pg,
|
|
119
|
+
sql,
|
|
120
|
+
name,
|
|
121
|
+
table,
|
|
122
|
+
relPath,
|
|
123
|
+
columns,
|
|
124
|
+
pkey,
|
|
125
|
+
source: 'newtable',
|
|
126
|
+
user,
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
if (error) return { error, status: 500 };
|
|
130
|
+
|
|
131
|
+
pg.pk[table] = pkey;
|
|
132
|
+
pg.tlist.push(table);
|
|
133
|
+
return { message: { id: datasetId, table, source: 'newtable' }, status: 200 };
|
|
134
|
+
}
|
|
@@ -1,149 +1,149 @@
|
|
|
1
|
-
import {
|
|
2
|
-
config, pgClients, getMeta, getFilterSQL, setToken, getToken,
|
|
3
|
-
} from '@opengis/fastify-table/utils.js';
|
|
4
|
-
|
|
5
|
-
import metaFormat from '../utils/metaFormat.js';
|
|
6
|
-
import datasetFormData from './table.js';
|
|
7
|
-
import datasetFormBody from './form.js';
|
|
8
|
-
|
|
9
|
-
const maxLimit = 100;
|
|
10
|
-
|
|
11
|
-
export default async function datasetData(req, reply) {
|
|
12
|
-
const {
|
|
13
|
-
pg = pgClients.client, params = {}, query = {}, user = {},
|
|
14
|
-
} = req;
|
|
15
|
-
const time = Date.now();
|
|
16
|
-
const { uid, user_type: userType = 'regular' } = user;
|
|
17
|
-
|
|
18
|
-
if (!uid) {
|
|
19
|
-
// return { message: 'access restricted', status: 403 };
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
if (!params?.id) {
|
|
23
|
-
return { message: 'not enough params: id', status: 404 };
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
const tokenData = await getToken({
|
|
27
|
-
uid: user?.uid,
|
|
28
|
-
token: params.id,
|
|
29
|
-
mode: 'w',
|
|
30
|
-
json: 1,
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
if (tokenData?.dataset && tokenData?.id) {
|
|
34
|
-
return datasetFormData(req, reply);
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, filter_list as filters,
|
|
38
|
-
sql_list as sql, style, form_setting, pk, access_level, dashboard_list from bi.dataset where dataset_id=$1`, [params.id]).then(el => el.rows[0] || {});
|
|
39
|
-
|
|
40
|
-
if (!dataset?.id) {
|
|
41
|
-
return { message: 'dataset not found', status: 404 };
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
const tlist = await pg.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist
|
|
45
|
-
from pg_class where relkind in ('r','v')`).then(el => el.rows[0]?.tlist || []);
|
|
46
|
-
|
|
47
|
-
if (!dataset.table || !tlist.includes(dataset.table.replace(/"/g, '')) || (!pg.pk?.[dataset.table.replace(/"/g, '')] && !dataset.pk)) {
|
|
48
|
-
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
const { pk = dataset.pk, columns: dbColumns = [] } = await getMeta(dataset.table) || {};
|
|
52
|
-
const cols = dataset.columns?.filter((el) => el.name !== 'geom')?.map((el) => el.name || el)?.join(',');
|
|
53
|
-
|
|
54
|
-
const limit = Math.min(maxLimit, +(query.limit || 20));
|
|
55
|
-
const offset = query.page && query.page > 0 ? ` offset ${(query.page - 1) * limit}` : '';
|
|
56
|
-
|
|
57
|
-
const checkFilter = [query.filter, query.search].filter((el) => el).length;
|
|
58
|
-
|
|
59
|
-
const fData = checkFilter ? await getFilterSQL({
|
|
60
|
-
table: dataset.table,
|
|
61
|
-
filter: query.filter,
|
|
62
|
-
search: query.search,
|
|
63
|
-
json: 1,
|
|
64
|
-
}) : {};
|
|
65
|
-
|
|
66
|
-
const where = [dataset.query, fData.q].filter((el) => el);
|
|
67
|
-
|
|
68
|
-
const sqlTable = dataset.sql
|
|
69
|
-
?.filter?.((el) => !el?.disabled && el?.sql?.replace)
|
|
70
|
-
?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', uid || '')}) ${el.name || `t${i}`} on 1=1 `)
|
|
71
|
-
?.join('') || '';
|
|
72
|
-
|
|
73
|
-
const columnList = dbColumns.map((el) => el.name || el).join(',');
|
|
74
|
-
const order = columnList.includes('cdate') ? (`order by cdate ${query.desc ? 'desc' : ''}`) : '';
|
|
75
|
-
|
|
76
|
-
const q = `select
|
|
77
|
-
${cols || '*'}
|
|
78
|
-
${pk ? `,"${pk}" as id` : ''}
|
|
79
|
-
${dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text') ? ',files' : ''}
|
|
80
|
-
${dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
|
|
81
|
-
from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')} ${order}) t
|
|
82
|
-
${sqlTable} where ${where.join(' and ') || 'true'} ${order} ${offset} limit ${limit}`.replace(/{{uid}}/g, uid || '');
|
|
83
|
-
|
|
84
|
-
if (query.sql === '1' && (config.debug || userType.includes('admin'))) { return q; }
|
|
85
|
-
|
|
86
|
-
// console.log(pg.options.database, q);
|
|
87
|
-
const { rows = [] } = await pg.query(q);
|
|
88
|
-
|
|
89
|
-
const qCount = `select
|
|
90
|
-
count(*)::int as total,
|
|
91
|
-
count(*) FILTER(WHERE ${[fData.q].filter(el => el).join(' and ') || 'true'})::int as filtered
|
|
92
|
-
from ${dataset.table} t ${sqlTable}
|
|
93
|
-
where ${[dataset.query].filter(el => el).join(' and ') || 'true'} `.replace(/{{uid}}/g, uid || '');
|
|
94
|
-
|
|
95
|
-
const { total, filtered } = await pg.queryCache(qCount).then(el => el?.rows?.[0] || {});
|
|
96
|
-
|
|
97
|
-
await metaFormat({ rows, columns: dataset.columns });
|
|
98
|
-
|
|
99
|
-
if (rows?.length && uid) {
|
|
100
|
-
rows.filter((row) => row[pk] || row.id).forEach((row) => Object.assign(row, {
|
|
101
|
-
token: setToken({
|
|
102
|
-
ids: [JSON.stringify({
|
|
103
|
-
id: row[pk] || row.id,
|
|
104
|
-
table: dataset.table,
|
|
105
|
-
form: `${params.id}.form`,
|
|
106
|
-
dataset: params.id,
|
|
107
|
-
})],
|
|
108
|
-
uid,
|
|
109
|
-
array: 1,
|
|
110
|
-
})[0],
|
|
111
|
-
}));
|
|
112
|
-
}
|
|
113
|
-
const res = {
|
|
114
|
-
time: Date.now() - time,
|
|
115
|
-
access: dataset.access_level,
|
|
116
|
-
card: dataset.card,
|
|
117
|
-
actions: ['add', 'edit', 'del', 'get'],
|
|
118
|
-
total,
|
|
119
|
-
filtered,
|
|
120
|
-
count: rows.length,
|
|
121
|
-
pk,
|
|
122
|
-
table: dataset.table,
|
|
123
|
-
form: `${params.id}.form`,
|
|
124
|
-
dashboardList: dataset.dashboard_list,
|
|
125
|
-
rows,
|
|
126
|
-
columns: dataset.columns || dbColumns.map(col => ({ name: col.name, type: pg.pgType[col.dataTypeID], title: col.name })),
|
|
127
|
-
filters: dataset.filters,
|
|
128
|
-
};
|
|
129
|
-
|
|
130
|
-
if (uid) {
|
|
131
|
-
const addTokens = setToken({
|
|
132
|
-
ids: [JSON.stringify({
|
|
133
|
-
table: dataset.table,
|
|
134
|
-
form: `${params.id}.form`,
|
|
135
|
-
dataset: dataset.id,
|
|
136
|
-
})],
|
|
137
|
-
uid,
|
|
138
|
-
array: 1,
|
|
139
|
-
});
|
|
140
|
-
|
|
141
|
-
Object.assign(res, {
|
|
142
|
-
addToken: addTokens[0],
|
|
143
|
-
});
|
|
144
|
-
Object.assign(req, { params: { id: addTokens[0] } });
|
|
145
|
-
await datasetFormBody(req); // for inline edit
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
return res;
|
|
149
|
-
}
|
|
1
|
+
import {
|
|
2
|
+
config, pgClients, getMeta, getFilterSQL, setToken, getToken,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
import metaFormat from '../utils/metaFormat.js';
|
|
6
|
+
import datasetFormData from './table.js';
|
|
7
|
+
import datasetFormBody from './form.js';
|
|
8
|
+
|
|
9
|
+
const maxLimit = 100;
|
|
10
|
+
|
|
11
|
+
export default async function datasetData(req, reply) {
|
|
12
|
+
const {
|
|
13
|
+
pg = pgClients.client, params = {}, query = {}, user = {},
|
|
14
|
+
} = req;
|
|
15
|
+
const time = Date.now();
|
|
16
|
+
const { uid, user_type: userType = 'regular' } = user;
|
|
17
|
+
|
|
18
|
+
if (!uid) {
|
|
19
|
+
// return { message: 'access restricted', status: 403 };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (!params?.id) {
|
|
23
|
+
return { message: 'not enough params: id', status: 404 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const tokenData = await getToken({
|
|
27
|
+
uid: user?.uid,
|
|
28
|
+
token: params.id,
|
|
29
|
+
mode: 'w',
|
|
30
|
+
json: 1,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
if (tokenData?.dataset && tokenData?.id) {
|
|
34
|
+
return datasetFormData(req, reply);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, filter_list as filters,
|
|
38
|
+
sql_list as sql, style, form_setting, pk, access_level, dashboard_list from bi.dataset where dataset_id=$1`, [params.id]).then(el => el.rows[0] || {});
|
|
39
|
+
|
|
40
|
+
if (!dataset?.id) {
|
|
41
|
+
return { message: 'dataset not found', status: 404 };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const tlist = await pg.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist
|
|
45
|
+
from pg_class where relkind in ('r','v')`).then(el => el.rows[0]?.tlist || []);
|
|
46
|
+
|
|
47
|
+
if (!dataset.table || !tlist.includes(dataset.table.replace(/"/g, '')) || (!pg.pk?.[dataset.table.replace(/"/g, '')] && !dataset.pk)) {
|
|
48
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const { pk = dataset.pk, columns: dbColumns = [] } = await getMeta(dataset.table) || {};
|
|
52
|
+
const cols = dataset.columns?.filter((el) => el.name !== 'geom')?.map((el) => el.name || el)?.join(',');
|
|
53
|
+
|
|
54
|
+
const limit = Math.min(maxLimit, +(query.limit || 20));
|
|
55
|
+
const offset = query.page && query.page > 0 ? ` offset ${(query.page - 1) * limit}` : '';
|
|
56
|
+
|
|
57
|
+
const checkFilter = [query.filter, query.search].filter((el) => el).length;
|
|
58
|
+
|
|
59
|
+
const fData = checkFilter ? await getFilterSQL({
|
|
60
|
+
table: dataset.table,
|
|
61
|
+
filter: query.filter,
|
|
62
|
+
search: query.search,
|
|
63
|
+
json: 1,
|
|
64
|
+
}) : {};
|
|
65
|
+
|
|
66
|
+
const where = [dataset.query, fData.q].filter((el) => el);
|
|
67
|
+
|
|
68
|
+
const sqlTable = dataset.sql
|
|
69
|
+
?.filter?.((el) => !el?.disabled && el?.sql?.replace)
|
|
70
|
+
?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', uid || '')}) ${el.name || `t${i}`} on 1=1 `)
|
|
71
|
+
?.join('') || '';
|
|
72
|
+
|
|
73
|
+
const columnList = dbColumns.map((el) => el.name || el).join(',');
|
|
74
|
+
const order = columnList.includes('cdate') ? (`order by cdate ${query.desc ? 'desc' : ''}`) : '';
|
|
75
|
+
|
|
76
|
+
const q = `select
|
|
77
|
+
${cols || '*'}
|
|
78
|
+
${pk ? `,"${pk}" as id` : ''}
|
|
79
|
+
${dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text') ? ',files' : ''}
|
|
80
|
+
${dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
|
|
81
|
+
from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')} ${order}) t
|
|
82
|
+
${sqlTable} where ${where.join(' and ') || 'true'} ${order} ${offset} limit ${limit}`.replace(/{{uid}}/g, uid || '');
|
|
83
|
+
|
|
84
|
+
if (query.sql === '1' && (config.debug || userType.includes('admin'))) { return q; }
|
|
85
|
+
|
|
86
|
+
// console.log(pg.options.database, q);
|
|
87
|
+
const { rows = [] } = await pg.query(q);
|
|
88
|
+
|
|
89
|
+
const qCount = `select
|
|
90
|
+
count(*)::int as total,
|
|
91
|
+
count(*) FILTER(WHERE ${[fData.q].filter(el => el).join(' and ') || 'true'})::int as filtered
|
|
92
|
+
from ${dataset.table} t ${sqlTable}
|
|
93
|
+
where ${[dataset.query].filter(el => el).join(' and ') || 'true'} `.replace(/{{uid}}/g, uid || '');
|
|
94
|
+
|
|
95
|
+
const { total, filtered } = await pg.queryCache(qCount).then(el => el?.rows?.[0] || {});
|
|
96
|
+
|
|
97
|
+
await metaFormat({ rows, columns: dataset.columns });
|
|
98
|
+
|
|
99
|
+
if (rows?.length && uid) {
|
|
100
|
+
rows.filter((row) => row[pk] || row.id).forEach((row) => Object.assign(row, {
|
|
101
|
+
token: setToken({
|
|
102
|
+
ids: [JSON.stringify({
|
|
103
|
+
id: row[pk] || row.id,
|
|
104
|
+
table: dataset.table,
|
|
105
|
+
form: `${params.id}.form`,
|
|
106
|
+
dataset: params.id,
|
|
107
|
+
})],
|
|
108
|
+
uid,
|
|
109
|
+
array: 1,
|
|
110
|
+
})[0],
|
|
111
|
+
}));
|
|
112
|
+
}
|
|
113
|
+
const res = {
|
|
114
|
+
time: Date.now() - time,
|
|
115
|
+
access: dataset.access_level,
|
|
116
|
+
card: dataset.card,
|
|
117
|
+
actions: ['add', 'edit', 'del', 'get'],
|
|
118
|
+
total,
|
|
119
|
+
filtered,
|
|
120
|
+
count: rows.length,
|
|
121
|
+
pk,
|
|
122
|
+
table: dataset.table,
|
|
123
|
+
form: `${params.id}.form`,
|
|
124
|
+
dashboardList: dataset.dashboard_list,
|
|
125
|
+
rows,
|
|
126
|
+
columns: dataset.columns || dbColumns.map(col => ({ name: col.name, type: pg.pgType[col.dataTypeID], title: col.name })),
|
|
127
|
+
filters: dataset.filters,
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
if (uid) {
|
|
131
|
+
const addTokens = setToken({
|
|
132
|
+
ids: [JSON.stringify({
|
|
133
|
+
table: dataset.table,
|
|
134
|
+
form: `${params.id}.form`,
|
|
135
|
+
dataset: dataset.id,
|
|
136
|
+
})],
|
|
137
|
+
uid,
|
|
138
|
+
array: 1,
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
Object.assign(res, {
|
|
142
|
+
addToken: addTokens[0],
|
|
143
|
+
});
|
|
144
|
+
Object.assign(req, { params: { id: addTokens[0] } });
|
|
145
|
+
await datasetFormBody(req); // for inline edit
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return res;
|
|
149
|
+
}
|