@opengis/bi 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +42 -42
- package/dist/import-file-D8jh74Dz.js +3543 -0
- package/dist/{vs-funnel-bar-C_TceUrc.js → vs-funnel-bar-T330oJNS.js} +3 -3
- package/dist/{vs-list-DyhLUIPb.js → vs-list-DeHF_Oaf.js} +109 -109
- package/dist/{vs-map-BtQJNN4L.js → vs-map-Skt608pM.js} +8 -8
- package/dist/{vs-map-cluster-BbPUosvt.js → vs-map-cluster-BRUiY_90.js} +21 -21
- package/dist/{vs-number-D2GkU586.js → vs-number-Dd_21nn-.js} +3 -3
- package/dist/{vs-table-D_Yn9QqB.js → vs-table-BwC29Zyc.js} +6 -6
- package/dist/{vs-text-BivVd6cY.js → vs-text-DEJjWxDu.js} +32 -39
- package/package.json +77 -76
- package/plugin.js +22 -0
- package/server/helpers/mdToHTML.js +17 -0
- package/server/migrations/bi.dataset.sql +46 -0
- package/server/migrations/bi.sql +112 -0
- package/server/plugins/docs.js +48 -0
- package/server/plugins/hook.js +89 -0
- package/server/plugins/vite.js +69 -0
- package/server/routes/dashboard/controllers/dashboard.import.js +103 -0
- package/server/routes/dashboard/controllers/dashboard.js +157 -0
- package/server/routes/dashboard/controllers/dashboard.list.js +40 -0
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -0
- package/server/routes/dashboard/index.mjs +26 -0
- package/server/routes/data/controllers/data.js +230 -0
- package/server/routes/data/controllers/util/chartSQL.js +49 -0
- package/server/routes/data/controllers/util/normalizeData.js +65 -0
- package/server/routes/data/index.mjs +32 -0
- package/server/routes/dataset/controllers/bi.dataset.list.js +29 -0
- package/server/routes/dataset/controllers/bi.db.list.js +19 -0
- package/server/routes/dataset/controllers/comment.js +55 -0
- package/server/routes/dataset/controllers/createDatasetPost.js +134 -0
- package/server/routes/dataset/controllers/data.js +149 -0
- package/server/routes/dataset/controllers/dbTablePreview.js +58 -0
- package/server/routes/dataset/controllers/dbTables.js +34 -0
- package/server/routes/dataset/controllers/delete.js +40 -0
- package/server/routes/dataset/controllers/deleteDataset.js +52 -0
- package/server/routes/dataset/controllers/editDataset.js +90 -0
- package/server/routes/dataset/controllers/export.js +214 -0
- package/server/routes/dataset/controllers/form.js +99 -0
- package/server/routes/dataset/controllers/format.js +46 -0
- package/server/routes/dataset/controllers/insert.js +47 -0
- package/server/routes/dataset/controllers/table.js +68 -0
- package/server/routes/dataset/controllers/update.js +43 -0
- package/server/routes/dataset/index.mjs +132 -0
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
- package/server/routes/dataset/utils/convertJSONToXls.js +47 -0
- package/server/routes/dataset/utils/createTableQuery.js +59 -0
- package/server/routes/dataset/utils/datasetForms.js +1 -0
- package/server/routes/dataset/utils/descriptionList.js +46 -0
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
- package/server/routes/dataset/utils/executeQuery.js +46 -0
- package/server/routes/dataset/utils/getLayersData.js +107 -0
- package/server/routes/dataset/utils/getTableData.js +47 -0
- package/server/routes/dataset/utils/insertDataQuery.js +12 -0
- package/server/routes/dataset/utils/metaFormat.js +24 -0
- package/server/routes/edit/controllers/dashboard.add.js +36 -0
- package/server/routes/edit/controllers/dashboard.delete.js +39 -0
- package/server/routes/edit/controllers/dashboard.edit.js +61 -0
- package/server/routes/edit/controllers/widget.add.js +78 -0
- package/server/routes/edit/controllers/widget.del.js +58 -0
- package/server/routes/edit/controllers/widget.edit.js +106 -0
- package/server/routes/edit/index.mjs +33 -0
- package/server/routes/map/controllers/cluster.js +125 -0
- package/server/routes/map/controllers/clusterVtile.js +166 -0
- package/server/routes/map/controllers/geojson.js +127 -0
- package/server/routes/map/controllers/heatmap.js +118 -0
- package/server/routes/map/controllers/map.js +69 -0
- package/server/routes/map/controllers/utils/downloadClusterData.js +45 -0
- package/server/routes/map/controllers/vtile.js +183 -0
- package/server/routes/map/index.mjs +32 -0
- package/server/templates/page/login.html +59 -0
- package/server/utils/getWidget.js +117 -0
- package/utils.js +12 -0
- package/dist/import-file-Bx4xpxVb.js +0 -3493
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, pgClients, getMeta, getFilterSQL, setToken, getToken,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
import metaFormat from '../utils/metaFormat.js';
|
|
6
|
+
import datasetFormData from './table.js';
|
|
7
|
+
import datasetFormBody from './form.js';
|
|
8
|
+
|
|
9
|
+
const maxLimit = 100;
|
|
10
|
+
|
|
11
|
+
export default async function datasetData(req, reply) {
|
|
12
|
+
const {
|
|
13
|
+
pg = pgClients.client, params = {}, query = {}, user = {},
|
|
14
|
+
} = req;
|
|
15
|
+
const time = Date.now();
|
|
16
|
+
const { uid, user_type: userType = 'regular' } = user;
|
|
17
|
+
|
|
18
|
+
if (!uid) {
|
|
19
|
+
// return { message: 'access restricted', status: 403 };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (!params?.id) {
|
|
23
|
+
return { message: 'not enough params: id', status: 404 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const tokenData = await getToken({
|
|
27
|
+
uid: user?.uid,
|
|
28
|
+
token: params.id,
|
|
29
|
+
mode: 'w',
|
|
30
|
+
json: 1,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
if (tokenData?.dataset && tokenData?.id) {
|
|
34
|
+
return datasetFormData(req, reply);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, filter_list as filters,
|
|
38
|
+
sql_list as sql, style, form_setting, pk, access_level, dashboard_list from bi.dataset where dataset_id=$1`, [params.id]).then(el => el.rows[0] || {});
|
|
39
|
+
|
|
40
|
+
if (!dataset?.id) {
|
|
41
|
+
return { message: 'dataset not found', status: 404 };
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const tlist = await pg.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist
|
|
45
|
+
from pg_class where relkind in ('r','v')`).then(el => el.rows[0]?.tlist || []);
|
|
46
|
+
|
|
47
|
+
if (!dataset.table || !tlist.includes(dataset.table.replace(/"/g, '')) || (!pg.pk?.[dataset.table.replace(/"/g, '')] && !dataset.pk)) {
|
|
48
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const { pk = dataset.pk, columns: dbColumns = [] } = await getMeta(dataset.table) || {};
|
|
52
|
+
const cols = dataset.columns?.filter((el) => el.name !== 'geom')?.map((el) => el.name || el)?.join(',');
|
|
53
|
+
|
|
54
|
+
const limit = Math.min(maxLimit, +(query.limit || 20));
|
|
55
|
+
const offset = query.page && query.page > 0 ? ` offset ${(query.page - 1) * limit}` : '';
|
|
56
|
+
|
|
57
|
+
const checkFilter = [query.filter, query.search].filter((el) => el).length;
|
|
58
|
+
|
|
59
|
+
const fData = checkFilter ? await getFilterSQL({
|
|
60
|
+
table: dataset.table,
|
|
61
|
+
filter: query.filter,
|
|
62
|
+
search: query.search,
|
|
63
|
+
json: 1,
|
|
64
|
+
}) : {};
|
|
65
|
+
|
|
66
|
+
const where = [dataset.query, fData.q].filter((el) => el);
|
|
67
|
+
|
|
68
|
+
const sqlTable = dataset.sql
|
|
69
|
+
?.filter?.((el) => !el?.disabled && el?.sql?.replace)
|
|
70
|
+
?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', uid || '')}) ${el.name || `t${i}`} on 1=1 `)
|
|
71
|
+
?.join('') || '';
|
|
72
|
+
|
|
73
|
+
const columnList = dbColumns.map((el) => el.name || el).join(',');
|
|
74
|
+
const order = columnList.includes('cdate') ? (`order by cdate ${query.desc ? 'desc' : ''}`) : '';
|
|
75
|
+
|
|
76
|
+
const q = `select
|
|
77
|
+
${cols || '*'}
|
|
78
|
+
${pk ? `,"${pk}" as id` : ''}
|
|
79
|
+
${dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text') ? ',files' : ''}
|
|
80
|
+
${dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
|
|
81
|
+
from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')} ${order}) t
|
|
82
|
+
${sqlTable} where ${where.join(' and ') || 'true'} ${order} ${offset} limit ${limit}`.replace(/{{uid}}/g, uid || '');
|
|
83
|
+
|
|
84
|
+
if (query.sql === '1' && (config.debug || userType.includes('admin'))) { return q; }
|
|
85
|
+
|
|
86
|
+
// console.log(pg.options.database, q);
|
|
87
|
+
const { rows = [] } = await pg.query(q);
|
|
88
|
+
|
|
89
|
+
const qCount = `select
|
|
90
|
+
count(*)::int as total,
|
|
91
|
+
count(*) FILTER(WHERE ${[fData.q].filter(el => el).join(' and ') || 'true'})::int as filtered
|
|
92
|
+
from ${dataset.table} t ${sqlTable}
|
|
93
|
+
where ${[dataset.query].filter(el => el).join(' and ') || 'true'} `.replace(/{{uid}}/g, uid || '');
|
|
94
|
+
|
|
95
|
+
const { total, filtered } = await pg.queryCache(qCount).then(el => el?.rows?.[0] || {});
|
|
96
|
+
|
|
97
|
+
await metaFormat({ rows, columns: dataset.columns });
|
|
98
|
+
|
|
99
|
+
if (rows?.length && uid) {
|
|
100
|
+
rows.filter((row) => row[pk] || row.id).forEach((row) => Object.assign(row, {
|
|
101
|
+
token: setToken({
|
|
102
|
+
ids: [JSON.stringify({
|
|
103
|
+
id: row[pk] || row.id,
|
|
104
|
+
table: dataset.table,
|
|
105
|
+
form: `${params.id}.form`,
|
|
106
|
+
dataset: params.id,
|
|
107
|
+
})],
|
|
108
|
+
uid,
|
|
109
|
+
array: 1,
|
|
110
|
+
})[0],
|
|
111
|
+
}));
|
|
112
|
+
}
|
|
113
|
+
const res = {
|
|
114
|
+
time: Date.now() - time,
|
|
115
|
+
access: dataset.access_level,
|
|
116
|
+
card: dataset.card,
|
|
117
|
+
actions: ['add', 'edit', 'del', 'get'],
|
|
118
|
+
total,
|
|
119
|
+
filtered,
|
|
120
|
+
count: rows.length,
|
|
121
|
+
pk,
|
|
122
|
+
table: dataset.table,
|
|
123
|
+
form: `${params.id}.form`,
|
|
124
|
+
dashboardList: dataset.dashboard_list,
|
|
125
|
+
rows,
|
|
126
|
+
columns: dataset.columns || dbColumns.map(col => ({ name: col.name, type: pg.pgType[col.dataTypeID], title: col.name })),
|
|
127
|
+
filters: dataset.filters,
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
if (uid) {
|
|
131
|
+
const addTokens = setToken({
|
|
132
|
+
ids: [JSON.stringify({
|
|
133
|
+
table: dataset.table,
|
|
134
|
+
form: `${params.id}.form`,
|
|
135
|
+
dataset: dataset.id,
|
|
136
|
+
})],
|
|
137
|
+
uid,
|
|
138
|
+
array: 1,
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
Object.assign(res, {
|
|
142
|
+
addToken: addTokens[0],
|
|
143
|
+
});
|
|
144
|
+
Object.assign(req, { params: { id: addTokens[0] } });
|
|
145
|
+
await datasetFormBody(req); // for inline edit
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return res;
|
|
149
|
+
}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { pgClients } from "@opengis/fastify-table/utils.js";
|
|
2
|
+
|
|
3
|
+
const q = `select nspname||'.'||relname as table, json_agg(json_build_object('name',attname, 'type', a.atttypid::regtype, 'description', coalesce(col_description(attrelid, attnum),attname))) as columns
|
|
4
|
+
from pg_attribute a
|
|
5
|
+
left join pg_catalog.pg_attrdef d ON (a.attrelid, a.attnum) = (d.adrelid, d.adnum)
|
|
6
|
+
JOIN pg_class AS i
|
|
7
|
+
ON i.oid = a.attrelid
|
|
8
|
+
JOIN pg_namespace AS NS ON i.relnamespace = NS.OID
|
|
9
|
+
where a.attnum > 0 and nspname||'.'||relname = $1
|
|
10
|
+
and not a.attisdropped
|
|
11
|
+
group by nspname||'.'||relname limit 1`;
|
|
12
|
+
|
|
13
|
+
export default async function dbTablePreview({ pg = pgClients.client, params = {}, query = {} }) {
|
|
14
|
+
if (!params?.name) {
|
|
15
|
+
return { message: 'not enough params: name', status: 400 };
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (query.sql) return q;
|
|
19
|
+
try {
|
|
20
|
+
const { table, columns } = await pg
|
|
21
|
+
.query(q, [params.name.replace(/"/g, '')])
|
|
22
|
+
.then(el => el.rows?.[0] || {});
|
|
23
|
+
if (!table) {
|
|
24
|
+
return { message: 'table not found', status: 404 };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const { count = 0 } = await pg.query('select reltuples as count from pg_class where oid = to_regclass($1)', [params.name])
|
|
28
|
+
.then(el => el.rows?.[0] || {});
|
|
29
|
+
const geom = columns.find((el) => el.type === 'geometry')?.name;
|
|
30
|
+
const { bounds, extentStr } = geom
|
|
31
|
+
? await pg.query(`select
|
|
32
|
+
count(*),
|
|
33
|
+
st_asgeojson(st_extent(${geom}))::json as bounds,
|
|
34
|
+
replace(regexp_replace(st_extent(${geom})::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
|
|
35
|
+
from ${params.name}`).then(el => el.rows?.[0] || {})
|
|
36
|
+
: {};
|
|
37
|
+
const extent = extentStr ? extentStr.split(',') : undefined;
|
|
38
|
+
|
|
39
|
+
const systemColumns = [
|
|
40
|
+
'uid',
|
|
41
|
+
'files',
|
|
42
|
+
'editor_date',
|
|
43
|
+
'cdate',
|
|
44
|
+
'editor_id',
|
|
45
|
+
geom,
|
|
46
|
+
];
|
|
47
|
+
|
|
48
|
+
const columnList = columns.map((el) => el?.name).filter((el) => !systemColumns.includes(el));
|
|
49
|
+
const { rows = [] } = await pg.query(`select ${columnList.map(el => `"${el.replace(/'/g, "''")}"`).join(',')} ${geom ? ', st_asgeojson(geom)::json as geom' : ''} from ${params.name.replace(/'/g, '')} limit 10`);
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
count, geom: !!geom, bounds, extent, columns, rows,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
catch (err) {
|
|
56
|
+
return { error: err.toString(), status: 500 };
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { pgClients } from "@opengis/fastify-table/utils.js";
|
|
2
|
+
|
|
3
|
+
export default async function dbTables({ pg = pgClients.client, query = {} }) {
|
|
4
|
+
const q = `select
|
|
5
|
+
t.table_schema ||'."'|| t.table_name ||'"' as table,
|
|
6
|
+
obj_description(to_regclass(t.table_schema ||'."'|| t.table_name||'"')) as description,
|
|
7
|
+
t.table_schema as schema,
|
|
8
|
+
(select reltuples from pg_class where oid = to_regclass(t.table_schema ||'."'|| t.table_name||'"') ) as total,
|
|
9
|
+
coalesce(isgeom,false) as isgeom
|
|
10
|
+
|
|
11
|
+
from information_schema.tables t
|
|
12
|
+
left join lateral(
|
|
13
|
+
select true as isgeom from information_schema.columns c
|
|
14
|
+
where c.table_name = t.table_name
|
|
15
|
+
and c.table_schema = t.table_schema and 'geometry'=c.udt_name limit 1
|
|
16
|
+
)c on 1=1
|
|
17
|
+
|
|
18
|
+
where t.table_type = 'BASE TABLE'
|
|
19
|
+
and t.table_schema not in ('public','log','admin','feature_ir','gis', 'setting')
|
|
20
|
+
and t.table_name not like '%.%'
|
|
21
|
+
and regexp_replace(t.table_name, '^[[:digit:]]', '', 'g') = t.table_name
|
|
22
|
+
and 1=(SELECT count(*) FROM pg_catalog.pg_constraint con
|
|
23
|
+
INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid
|
|
24
|
+
INNER JOIN pg_catalog.pg_namespace nsp ON nsp.oid = connamespace
|
|
25
|
+
WHERE nsp.nspname = t.table_schema AND rel.relname = t.table_name and contype='p'
|
|
26
|
+
)
|
|
27
|
+
and isgeom
|
|
28
|
+
order by total desc`;
|
|
29
|
+
|
|
30
|
+
if (query.sql) return q;
|
|
31
|
+
|
|
32
|
+
const { rows = [] } = await pg.queryCache(q, { time: 0 });
|
|
33
|
+
return { rows };
|
|
34
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import {
|
|
2
|
+
config, getToken, dataDelete, pgClients,
|
|
3
|
+
} from '@opengis/fastify-table/utils.js';
|
|
4
|
+
|
|
5
|
+
export default async function datasetDataDelete({
|
|
6
|
+
pg = pgClients.client, params = {}, user = {},
|
|
7
|
+
}) {
|
|
8
|
+
if (!user?.uid) {
|
|
9
|
+
return { message: 'access restricted', status: 403 };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
const tokenData = await getToken({
|
|
13
|
+
uid: user?.uid,
|
|
14
|
+
token: params.id,
|
|
15
|
+
mode: 'a',
|
|
16
|
+
json: 1,
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
if (!tokenData && !config.local && !config.debug) {
|
|
20
|
+
return { message: 'token not allow', status: 403 };
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [tokenData?.dataset || params.id])
|
|
24
|
+
.then(el => el.rows[0] || {});
|
|
25
|
+
|
|
26
|
+
const table = tokenData?.table || dataset.table;
|
|
27
|
+
|
|
28
|
+
if (!dataset.id) {
|
|
29
|
+
return { message: 'dataset not found', status: 404 };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const res = await dataDelete({
|
|
33
|
+
pg,
|
|
34
|
+
id: tokenData?.id || params?.object_id,
|
|
35
|
+
table,
|
|
36
|
+
uid: user?.uid,
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
return { rowCount: res.rowCount, msg: !res.rowCount ? res : null };
|
|
40
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { dataDelete, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Видалення набору даних
|
|
5
|
+
*
|
|
6
|
+
* @method DELETE
|
|
7
|
+
* @summary Видалення набору даних
|
|
8
|
+
* @priority 4
|
|
9
|
+
* @alias deleteDataset
|
|
10
|
+
* @type api
|
|
11
|
+
* @tag bi
|
|
12
|
+
* @param {Object} query.id Dataset ID
|
|
13
|
+
* @errors 400,500
|
|
14
|
+
* @returns {Number} status Номер помилки
|
|
15
|
+
* @returns {String} error Опис помилки
|
|
16
|
+
* @returns {Object} rows Масив з колонками таблиці
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
export default async function deleteDataset({
|
|
20
|
+
pg = pgClients.client, query = {}, user = {},
|
|
21
|
+
}) {
|
|
22
|
+
if (!user?.uid) {
|
|
23
|
+
return { message: 'access restricted', status: 403 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (!query?.id) {
|
|
27
|
+
return { message: 'not enough params: id', status: 404 };
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [query.id])
|
|
31
|
+
.then(el => el.rows[0] || {});
|
|
32
|
+
|
|
33
|
+
if (!dataset.id) {
|
|
34
|
+
return { message: 'dataset not found', status: 404 };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
const table = dataset.table && pg.pk[dataset.table] ? dataset.table : dataset.table?.replace(/"/g, '');
|
|
38
|
+
|
|
39
|
+
const dropTable = table && pg.pk?.[table] && table.startsWith('data_user.');
|
|
40
|
+
if (dropTable) {
|
|
41
|
+
await pg.query(`drop table if exists ${table}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
await dataDelete({
|
|
45
|
+
pg,
|
|
46
|
+
table: 'bi.dataset',
|
|
47
|
+
id: dataset.id,
|
|
48
|
+
uid: user.uid,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
return { message: { id: dataset?.id, table: dropTable ? dataset.table : undefined }, status: 200 };
|
|
52
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { dataUpdate, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
const columnTypeMatch = {
|
|
4
|
+
text: 'text',
|
|
5
|
+
select: 'text',
|
|
6
|
+
date: 'date',
|
|
7
|
+
'yes/no': 'boolean',
|
|
8
|
+
badge: 'text',
|
|
9
|
+
number: 'numeric',
|
|
10
|
+
tags: 'text[]',
|
|
11
|
+
geom: 'geom',
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Редагування структури набору даних
|
|
16
|
+
*
|
|
17
|
+
* @method PUT
|
|
18
|
+
* @summary Редагування структури набору даних
|
|
19
|
+
* @priority 4
|
|
20
|
+
* @alias editDataset
|
|
21
|
+
* @type api
|
|
22
|
+
* @tag bi
|
|
23
|
+
* @param {Object} query.id Видалення ID
|
|
24
|
+
* @errors 400,500
|
|
25
|
+
* @returns {Number} status Номер помилки
|
|
26
|
+
* @returns {String} error Опис помилки
|
|
27
|
+
* @returns {Object} rows Масив з колонками таблиці
|
|
28
|
+
*/
|
|
29
|
+
|
|
30
|
+
export default async function editDataset(req) {
|
|
31
|
+
const {
|
|
32
|
+
pg = pgClients.client, query = {}, body = {}, user = {},
|
|
33
|
+
} = req;
|
|
34
|
+
if (!user?.uid) {
|
|
35
|
+
return { message: 'access restricted', status: 403 };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (!query?.id) {
|
|
39
|
+
return { message: 'not enough params: id', status: 404 };
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if (!body.column_list?.length) {
|
|
43
|
+
return { message: 'not enough params: columns', status: 400 };
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const dataset = await pg.query('select dataset_id as id, table_name as table, column_list as columns from bi.dataset where dataset_id=$1', [query.id])
|
|
47
|
+
.then(el => el.rows[0] || {});
|
|
48
|
+
|
|
49
|
+
if (!dataset?.id) {
|
|
50
|
+
return { message: 'dataset not found', status: 404 };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (!dataset.table || !pg.pk?.[dataset.table]) {
|
|
54
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (!dataset.table.startsWith('data_user.')) {
|
|
58
|
+
return { message: 'access restricted: source', status: 403 };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const { fields = [] } = await pg.query(`select * from ${dataset.table} limit 0`);
|
|
62
|
+
const columnList = fields.map((col) => col.name);
|
|
63
|
+
|
|
64
|
+
const columns = (dataset.columns || [])
|
|
65
|
+
.concat(body.column_list.filter((col) => !col.name || !columnList.includes(col.name)))
|
|
66
|
+
.map((col, idx) => ({
|
|
67
|
+
...col,
|
|
68
|
+
name: col.name && columnList.includes(col.name) ? col.name : `col_${idx}`,
|
|
69
|
+
disabled: col.name && !body.column_list.find((item) => item.name === col.name),
|
|
70
|
+
}));
|
|
71
|
+
|
|
72
|
+
await dataUpdate({
|
|
73
|
+
pg,
|
|
74
|
+
table: 'bi.dataset',
|
|
75
|
+
data: { column_list: columns },
|
|
76
|
+
id: dataset.id,
|
|
77
|
+
uid: user.uid,
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
const sqlList = columns
|
|
81
|
+
.filter((col) => col.name.startsWith('col_') || (col.title && dataset.columns?.find?.((item) => item.name === col.name)?.title !== col.title))
|
|
82
|
+
.map((col) => `alter table ${dataset.table} add column if not exists ${col.name} ${columnTypeMatch[col.type] || 'text'};
|
|
83
|
+
comment on column ${dataset.table}."${col.name}" is '${(col.title || col.name).replace(/'/g, "''")}'`);
|
|
84
|
+
|
|
85
|
+
if (sqlList.length) {
|
|
86
|
+
await pg.query(sqlList.join(';'));
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return { message: { id: dataset?.id, table: dataset.table, columns }, status: 200 };
|
|
90
|
+
}
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { createHash } from 'node:crypto';
|
|
4
|
+
import { existsSync, rmSync } from 'node:fs';
|
|
5
|
+
|
|
6
|
+
import { readFile, writeFile, mkdir } from 'node:fs/promises';
|
|
7
|
+
|
|
8
|
+
import {
|
|
9
|
+
config, pgClients, getMeta, getFolder, getFilterSQL, eventStream, logger,
|
|
10
|
+
} from '@opengis/fastify-table/utils.js';
|
|
11
|
+
|
|
12
|
+
// import convertJSONToXls from '../utils/convertJSONToXls.js';
|
|
13
|
+
import convertJSONToCSV from '../utils/convertJSONToCSV.js';
|
|
14
|
+
import metaFormat from '../utils/metaFormat.js';
|
|
15
|
+
|
|
16
|
+
export default async function datasetDataExport(req, reply) {
|
|
17
|
+
const {
|
|
18
|
+
pg = pgClients.client, params = {}, query = {}, user = {}, unittest,
|
|
19
|
+
} = req;
|
|
20
|
+
|
|
21
|
+
if (!user?.uid) {
|
|
22
|
+
return { message: 'access restricted', status: 403 };
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const { user_type: userType = 'regular' } = user;
|
|
26
|
+
|
|
27
|
+
if (!params?.id) {
|
|
28
|
+
return { message: 'not enough params: id', status: 404 };
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const { format = 'json' } = query;
|
|
32
|
+
|
|
33
|
+
const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns,
|
|
34
|
+
export_columns as "exportColumns", sql_list as sql from bi.dataset where dataset_id=$1`, [params.id])
|
|
35
|
+
.then(el => el.rows[0] || {});
|
|
36
|
+
|
|
37
|
+
if (!dataset?.id || !dataset.table) {
|
|
38
|
+
return { message: 'dataset not found', status: 404 };
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const { pk = dataset.pk, view, columns: dbColumns = [] } = await getMeta(dataset.table);
|
|
42
|
+
|
|
43
|
+
if (!dataset.table || (!pg.pk?.[dataset.table] && !pk && !view)) {
|
|
44
|
+
return { message: `table not found: ${dataset.table}`, status: 404 };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const rootDir = getFolder(req, 'local');
|
|
48
|
+
|
|
49
|
+
const date = new Date();
|
|
50
|
+
const sufixName = `${query.filter || '1=1'}-${query.col || 'all'}-${query.search || '1=1'}-${query.limit || 'unlimited'}`;
|
|
51
|
+
const sufixDate = [date.getFullYear(), date.getMonth(), date.getDate(), date.getHours()].join('-');
|
|
52
|
+
|
|
53
|
+
const objInfo = createHash('md5').update([sufixName, sufixDate].join('-')).digest('hex');
|
|
54
|
+
const fileName = dataset.table.concat('_').concat(objInfo).concat('.').concat(format);
|
|
55
|
+
const filepath = path.join(rootDir, '/files/temp', fileName);
|
|
56
|
+
|
|
57
|
+
if (existsSync(filepath) && query.nocache) {
|
|
58
|
+
rmSync(filepath);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (existsSync(filepath) && !query?.sql) {
|
|
62
|
+
const encoding = path.extname(filepath) === '.xlsx' ? 'base64' : 'utf8';
|
|
63
|
+
const data = await readFile(filepath, encoding);
|
|
64
|
+
return reply.headers({ 'Content-Disposition': `attachment; filename=${path.basename(filepath)}` }).send(Buffer.from(data, encoding));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const fData = await getFilterSQL({
|
|
68
|
+
filter: query.filter,
|
|
69
|
+
search: query?.search,
|
|
70
|
+
table: dataset.table,
|
|
71
|
+
json: 1,
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
const where = [(params.object_id ? ` "${pk}" = $1` : null), dataset.query, fData.q].filter((el) => el);
|
|
75
|
+
|
|
76
|
+
const cols = dataset.exportColumns
|
|
77
|
+
|| dataset.columns?.map((el) => el.name);
|
|
78
|
+
|
|
79
|
+
if (!cols?.length) {
|
|
80
|
+
return { message: `empty export columns: ${dataset.id}`, status: 400 };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const sqlTable = dataset.sql
|
|
84
|
+
?.filter?.((el) => !el?.disabled && el?.sql?.replace)
|
|
85
|
+
?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', user?.uid)}) ${el.name || `t${i}`} on 1=1 `)
|
|
86
|
+
?.join('') || '';
|
|
87
|
+
|
|
88
|
+
const { total = '0' } = await pg.query(`select count(*) as total from ${dataset.table}
|
|
89
|
+
where ${dataset.query || 'true'} and ${params.object_id ? `${pk}='${params.object_id}'` : '1=1'}`)
|
|
90
|
+
.then(el => el.rows?.[0] || {});
|
|
91
|
+
|
|
92
|
+
if (total === '0') {
|
|
93
|
+
return { message: 'Немає даних, які можна експортувати', status: 200 };
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const ext = path.extname(filepath);
|
|
97
|
+
const filepathJSON = ['csv', 'xlsx'].includes(format) ? filepath.replace(ext, '.json') : filepath;
|
|
98
|
+
|
|
99
|
+
const cacheFileJSON = existsSync(filepathJSON);
|
|
100
|
+
|
|
101
|
+
const send = +total < 10000 || unittest
|
|
102
|
+
? console.log
|
|
103
|
+
: eventStream(reply);
|
|
104
|
+
|
|
105
|
+
if (cacheFileJSON && !query.sql) {
|
|
106
|
+
if (query.nocache) rmSync(filepathJSON); // delete old file, prevent append
|
|
107
|
+
if (!query.nocache && ['xlsx', 'csv'].includes(format)) {
|
|
108
|
+
send(`Знайдено файл формату json. Пропуск обробки даних. Початок конвертації в ${format}...`);
|
|
109
|
+
if (format === 'xlsx') {
|
|
110
|
+
return 'temporary not work';
|
|
111
|
+
return convertJSONToXls({
|
|
112
|
+
filepathJSON,
|
|
113
|
+
colmodel: dataset.columns,
|
|
114
|
+
domain: req.hostname,
|
|
115
|
+
source: dataset.table,
|
|
116
|
+
send,
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
if (format === 'csv') {
|
|
120
|
+
return convertJSONToCSV({ send, filepathJSON });
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
const limit = Math.min(query.limit, 10000) || 10000;
|
|
126
|
+
|
|
127
|
+
const optimizedSQL = `select ${pk ? `"${pk}" as id,` : ''} *
|
|
128
|
+
${['geojson', 'shp'].includes(format) && dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
|
|
129
|
+
from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')}) t
|
|
130
|
+
${sqlTable} where ${where.join(' and ') || 'true'} offset 0 limit ${limit}`.replace(/{{uid}}/g, user?.uid);
|
|
131
|
+
|
|
132
|
+
if (query.sql === '1' && (config.debug || userType.includes('admin'))) return optimizedSQL;
|
|
133
|
+
|
|
134
|
+
const res = {};
|
|
135
|
+
let offset = 0;
|
|
136
|
+
let seq = 0;
|
|
137
|
+
|
|
138
|
+
send(`Всього в реєстрі: ${total}`);
|
|
139
|
+
|
|
140
|
+
while ((+total - offset > 0) && !res?.error) {
|
|
141
|
+
try {
|
|
142
|
+
const q = optimizedSQL.replace(/offset 0/g, `offset ${offset}`);
|
|
143
|
+
const { rows = [] } = await pg.query(q, (params.object_id ? [params.object_id] : null));
|
|
144
|
+
|
|
145
|
+
send(`Оброблено: ${offset}/${total}`);
|
|
146
|
+
send(`seq: ${++seq}`);
|
|
147
|
+
send(`Обробка ${rows.length} об'єктів...`);
|
|
148
|
+
|
|
149
|
+
if (!rows.length) {
|
|
150
|
+
send('Обробка даних успішно завершена');
|
|
151
|
+
break;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
await metaFormat({ rows, columns: dataset.columns });
|
|
155
|
+
|
|
156
|
+
// skip non present after metaFormat
|
|
157
|
+
const allowColumnList = []; // 'id'
|
|
158
|
+
if (['geojson', 'shp'].includes(format)) {
|
|
159
|
+
allowColumnList.push('geom');
|
|
160
|
+
}
|
|
161
|
+
rows.forEach((row) => {
|
|
162
|
+
Object.keys(row)
|
|
163
|
+
.filter((el) => !allowColumnList.includes(el.name) && !cols.includes(el))
|
|
164
|
+
.forEach((key) => delete row[key]);
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
// convert csv / xlsx from json
|
|
168
|
+
if (!existsSync(filepathJSON)) { // if json not exists
|
|
169
|
+
await mkdir(path.dirname(filepath), { recursive: true });
|
|
170
|
+
await writeFile(filepathJSON, JSON.stringify(rows));
|
|
171
|
+
}
|
|
172
|
+
else { // if json exists
|
|
173
|
+
const jsonData = JSON.parse(await readFile(filepathJSON) || '{}');
|
|
174
|
+
const moreData = jsonData.concat(rows); // rewrite to appendFile?
|
|
175
|
+
await writeFile(filepathJSON, JSON.stringify(moreData));
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
offset += rows.length;
|
|
179
|
+
}
|
|
180
|
+
catch (err) {
|
|
181
|
+
Object.assign(res, { error: err.toString(), status: 500 });
|
|
182
|
+
send(`error: ${err.toString()}`, 1);
|
|
183
|
+
logger.error({
|
|
184
|
+
name: 'export/table',
|
|
185
|
+
filepathJSON,
|
|
186
|
+
total,
|
|
187
|
+
offset,
|
|
188
|
+
result: res,
|
|
189
|
+
error: err.toString(),
|
|
190
|
+
});
|
|
191
|
+
return err.toString();
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
if (!res?.error && format === 'csv') {
|
|
196
|
+
send('Сформовано файл формату json. Початок конвертації в csv...');
|
|
197
|
+
return convertJSONToCSV({ send, filepathJSON });
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
if (!res?.error && format === 'xlsx') {
|
|
201
|
+
send('Сформовано файл формату json. Початок конвертації в xlsx...');
|
|
202
|
+
return convertJSONToXls({
|
|
203
|
+
filepathJSON,
|
|
204
|
+
colmodel: dataset.columns,
|
|
205
|
+
domain: req.hostname,
|
|
206
|
+
source: dataset.table,
|
|
207
|
+
send,
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
const message = res.error || 'Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних';
|
|
212
|
+
send(message, 1);
|
|
213
|
+
return { message, status: 200 };
|
|
214
|
+
}
|