@opengis/bi 1.0.21 → 1.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +101 -101
- package/dist/{import-file-C8BY90-b.js → import-file-r0dN1aVl.js} +19761 -18120
- package/dist/{map-component-mixin-CFtShPun.js → map-component-mixin-DU9YFNY4.js} +1 -1
- package/dist/style.css +1 -1
- package/dist/{vs-calendar-B9vXdsaG.js → vs-calendar-B64GoLWu.js} +1 -1
- package/dist/{vs-funnel-bar-Cj0O8tIf.js → vs-funnel-bar-DV5vXI3k.js} +1 -1
- package/dist/{vs-heatmap-C9oFph_f.js → vs-heatmap-Ox5uspv9.js} +1 -1
- package/dist/{vs-map-WOn0RAU7.js → vs-map-9t4WlfUa.js} +2 -2
- package/dist/{vs-map-cluster-RJa6sNfI.js → vs-map-cluster-DSZGPUt8.js} +2 -2
- package/dist/{vs-number-BG0szZL-.js → vs-number-O3_Cvwaw.js} +1 -1
- package/dist/vs-table-C_CsDsZN.js +68 -0
- package/dist/{vs-text-Kwl3-0yy.js → vs-text-I3gRlw-X.js} +2 -2
- package/package.json +5 -4
- package/plugin.js +1 -1
- package/server/helpers/mdToHTML.js +17 -0
- package/server/migrations/bi.dataset.sql +13 -0
- package/server/routes/dashboard/controllers/dashboard.js +3 -3
- package/server/routes/data/controllers/data.js +12 -13
- package/server/routes/data/controllers/util/chartSQL.js +6 -3
- package/server/routes/dataset/controllers/bi.dataset.list.js +3 -1
- package/server/routes/dataset/controllers/comment.js +55 -0
- package/server/routes/dataset/controllers/createDatasetPost.js +132 -0
- package/server/routes/dataset/controllers/data.js +145 -0
- package/server/routes/{db → dataset}/controllers/dbTablePreview.js +17 -24
- package/server/routes/{db → dataset}/controllers/dbTables.js +7 -11
- package/server/routes/dataset/controllers/delete.js +39 -0
- package/server/routes/dataset/controllers/{bi.dataset.edit.js → editDataset.js} +32 -26
- package/server/routes/dataset/controllers/export.js +213 -0
- package/server/routes/dataset/controllers/form.js +99 -0
- package/server/routes/dataset/controllers/format.js +44 -0
- package/server/routes/dataset/controllers/insert.js +46 -0
- package/server/routes/dataset/controllers/table.js +69 -0
- package/server/routes/dataset/controllers/update.js +42 -0
- package/server/routes/dataset/index.mjs +88 -43
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
- package/server/routes/dataset/utils/convertJSONToXls.js +49 -0
- package/server/routes/dataset/utils/createTableQuery.js +59 -0
- package/server/routes/dataset/utils/datasetForms.js +1 -0
- package/server/routes/dataset/utils/descriptionList.js +46 -0
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
- package/server/routes/dataset/utils/executeQuery.js +46 -0
- package/server/routes/dataset/utils/getLayersData.js +107 -0
- package/server/routes/dataset/utils/getTableData.js +47 -0
- package/server/routes/dataset/utils/insertDataQuery.js +12 -0
- package/server/routes/dataset/utils/metaFormat.js +24 -0
- package/server/routes/edit/controllers/widget.edit.js +23 -5
- package/server/routes/map/controllers/heatmap.js +118 -0
- package/server/routes/map/index.mjs +2 -0
- package/server/utils/getWidget.js +5 -2
- package/server/routes/dataset/controllers/bi.dataset.add.js +0 -86
- package/server/routes/dataset/controllers/bi.dataset.data.add.js +0 -49
- package/server/routes/dataset/controllers/bi.dataset.data.del.js +0 -54
- package/server/routes/dataset/controllers/bi.dataset.data.edit.js +0 -55
- package/server/routes/dataset/controllers/bi.dataset.data.list.js +0 -71
- package/server/routes/dataset/controllers/bi.dataset.del.js +0 -48
- package/server/routes/dataset/controllers/bi.dataset.demo.add.js +0 -97
- package/server/routes/dataset/controllers/util/create.table.js +0 -21
- package/server/routes/dataset/controllers/util/prepare.data.js +0 -49
- package/server/routes/db/index.mjs +0 -17
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { existsSync } from 'node:fs';
|
|
3
|
+
import { rm, mkdir, writeFile } from 'node:fs/promises';
|
|
4
|
+
import { createHash } from 'node:crypto';
|
|
5
|
+
|
|
6
|
+
import { config, logger } from '@opengis/fastify-table/utils.js';
|
|
7
|
+
|
|
8
|
+
export default async function downloadRemoteFile({
|
|
9
|
+
rootDir, url, table,
|
|
10
|
+
}) {
|
|
11
|
+
if (!rootDir) {
|
|
12
|
+
return { error: 'param rootDir is required', status: 400 };
|
|
13
|
+
}
|
|
14
|
+
if (!url) {
|
|
15
|
+
return { error: 'param url is required', status: 400 };
|
|
16
|
+
}
|
|
17
|
+
if (!table) {
|
|
18
|
+
return { error: 'param table is required', status: 400 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
try {
|
|
22
|
+
const response = await fetch(url);
|
|
23
|
+
|
|
24
|
+
if (response?.status !== 200) {
|
|
25
|
+
return { message: 'file not found', status: response?.status };
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
29
|
+
const buffer = Buffer.from(arrayBuffer);
|
|
30
|
+
|
|
31
|
+
const contentType = response.headers.get('Content-Type');
|
|
32
|
+
|
|
33
|
+
// eslint-disable-next-line newline-per-chained-call
|
|
34
|
+
const extName = contentType === 'text/plain' ? path.extname(url) : `.${response.headers.get('Content-Type')?.split(';')?.shift()?.split('/')?.pop()?.replace(/\+|\-|\./g, '')}`; // path.extname(url)
|
|
35
|
+
|
|
36
|
+
const filePath = `/files/tmp/${createHash('md5').update([url, table].join()).digest('hex')}${extName}`;
|
|
37
|
+
const fullPath = path.join(rootDir, filePath);
|
|
38
|
+
|
|
39
|
+
if (config?.local) {
|
|
40
|
+
console.log(url, fullPath, existsSync(fullPath));
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
await mkdir(path.dirname(fullPath), { recursive: true });
|
|
44
|
+
|
|
45
|
+
if (existsSync(fullPath) && config?.local) {
|
|
46
|
+
await rm(fullPath);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (!existsSync(fullPath)) {
|
|
50
|
+
await writeFile(fullPath, buffer);
|
|
51
|
+
}
|
|
52
|
+
return { filePath };
|
|
53
|
+
}
|
|
54
|
+
catch (err) {
|
|
55
|
+
logger.file('dataset/create/error', { url, error: err.toString() });
|
|
56
|
+
return { error: err.toString(), status: 500 };
|
|
57
|
+
}
|
|
58
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
import insertDataQuery from './insertDataQuery.js';
|
|
4
|
+
|
|
5
|
+
const insertDataset = `insert into bi.dataset
|
|
6
|
+
(name, table_name, dataset_file_path, column_list, pk, data_source, uid)
|
|
7
|
+
values($1,$2,$3,$4,$5,$6,$7) returning dataset_id`;
|
|
8
|
+
|
|
9
|
+
const updateAppeal = 'update datasets_appeal.appeal set dataset_id=$1, data_key=$2 where ap_id=$3';
|
|
10
|
+
|
|
11
|
+
export default async function executeQuery({
|
|
12
|
+
sql, data, name, id, table, relPath, columns, pkey, source, user, url, dataKey,
|
|
13
|
+
}) {
|
|
14
|
+
const client = await pgClients.client.connect();
|
|
15
|
+
try {
|
|
16
|
+
await client.query('BEGIN');
|
|
17
|
+
|
|
18
|
+
// create table
|
|
19
|
+
await client.query(sql);
|
|
20
|
+
|
|
21
|
+
// insert data
|
|
22
|
+
if (data) {
|
|
23
|
+
const insertData = insertDataQuery(columns, data, table);
|
|
24
|
+
await client.query(insertData);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// create dataset
|
|
28
|
+
const args = [name || id, table, relPath, columns ? JSON.stringify(columns) : null, pkey, JSON.stringify({ type: source, appeal: id, url }), user?.uid];
|
|
29
|
+
const datasetId = await client.query(insertDataset, args).then(el => el.rows?.[0]?.dataset_id);
|
|
30
|
+
|
|
31
|
+
if (id) {
|
|
32
|
+
// update appeal for public only, admin w/o appeal
|
|
33
|
+
await client.query(updateAppeal, [datasetId, dataKey, id]);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
await client.query('COMMIT');
|
|
37
|
+
return { datasetId };
|
|
38
|
+
}
|
|
39
|
+
catch (err) {
|
|
40
|
+
await client.query('ROLLBACK');
|
|
41
|
+
return { error: err.toString(), status: 500 };
|
|
42
|
+
}
|
|
43
|
+
finally {
|
|
44
|
+
client.release();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import descriptionList from "./descriptionList.js";
|
|
2
|
+
import metaFormat from "./metaFormat.js";
|
|
3
|
+
|
|
4
|
+
async function getDataByLatLng({ pg, layer, table, id, lat, lng, time = [] }) {
|
|
5
|
+
// const rclient = getRedis();
|
|
6
|
+
// const redisKey = `${pg.options.database}:gis-format:${layer}:${lat}:${lng}`;
|
|
7
|
+
// const cacheData = await rclient.get(redisKey);
|
|
8
|
+
// if (cacheData && !config.local) return JSON.parse(cacheData);
|
|
9
|
+
|
|
10
|
+
const { rows: styles = [] } = await pg.query(`select style_id as id, dataset
|
|
11
|
+
from gis.style where style_id=any($1::text[])
|
|
12
|
+
union all
|
|
13
|
+
select dataset_id as id, json_build_array(json_build_object('pk', pk, 'table', table_name, 'query', query, 'card', setting->>'card', 'columns', column_list)) as dataset
|
|
14
|
+
from bi.dataset a where dataset_id=any($1::text[])`, [layer.split(',')]);
|
|
15
|
+
time.push(Date.now());
|
|
16
|
+
|
|
17
|
+
if (!styles?.length) {
|
|
18
|
+
return { message: `style not found: ${layer}`, status: 400 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const { srids } = await pg.queryCache?.('select json_agg(srid) as srids from spatial_ref_sys').then((res1) => res1.rows?.[0] || {}) || {};
|
|
22
|
+
|
|
23
|
+
const layers = styles.reduce((acc, curr) => {
|
|
24
|
+
curr.dataset?.filter((el) => el.table && (el.pk || pg.pk?.[el.table]))
|
|
25
|
+
?.forEach((el) => acc.push({
|
|
26
|
+
layer: curr.id,
|
|
27
|
+
pk: el.pk || pg.pk?.[el.table],
|
|
28
|
+
key: el.key,
|
|
29
|
+
table: el.table,
|
|
30
|
+
query: el.query,
|
|
31
|
+
template: el.card,
|
|
32
|
+
column_list: el.column_list,
|
|
33
|
+
columns: el.columns || el.column_list?.map((el) => el.name)?.join(','),
|
|
34
|
+
srid: el.srid || '4326',
|
|
35
|
+
geom: el.geom || 'geom',
|
|
36
|
+
step: srids?.includes(el.srid - 0) && el.srid !== '4326' ? 10 : 0.0002,
|
|
37
|
+
}));
|
|
38
|
+
return acc;
|
|
39
|
+
}, [])
|
|
40
|
+
.filter((el) => el.pk && el.layer)
|
|
41
|
+
.filter((el) => table ? el.table === table : true);
|
|
42
|
+
|
|
43
|
+
if (!layers.length) {
|
|
44
|
+
return { message: 'data not found', status: 404 };
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const point = `srid=4326;point(${lng} ${lat})`;
|
|
48
|
+
|
|
49
|
+
// ${el.columns?.split?.filter?.((el) => el) ? `,${el.columns?.split?.filter?.((el) => el) || ''}` : ''}
|
|
50
|
+
|
|
51
|
+
const q = layers.map((el) => `select ${el.pk}, '${el.layer}' as layer,
|
|
52
|
+
row_to_json(t) as data,
|
|
53
|
+
st_asgeojson(${el.geom})::json as geom from ${el.table} t
|
|
54
|
+
where ${el.query || '1=1'} and ${id ? `${el.pk}::text='${id.replace(/'/g, "''")}'` : '1=1'}
|
|
55
|
+
and case
|
|
56
|
+
when ST_GeometryType(geom) in ('ST_Polygon','ST_MultiPolygon')
|
|
57
|
+
then st_intersects(geom,st_buffer('${point}',${el.step}))
|
|
58
|
+
|
|
59
|
+
when ST_GeometryType(geom) in ('ST_Line','ST_MultiLineString')
|
|
60
|
+
then st_distance(geom,'${point}') < ${el.step}
|
|
61
|
+
else false end`).join(' union all ');
|
|
62
|
+
|
|
63
|
+
// if (config.local && config.debug) console.log(q);
|
|
64
|
+
const { rows: objects = [] } = await pg.query(q) || {};
|
|
65
|
+
time.push(Date.now());
|
|
66
|
+
|
|
67
|
+
const rows = objects.map((row) => {
|
|
68
|
+
const layerData = layers.find((el) => el.layer === row?.layer) || {};
|
|
69
|
+
const rowData = layerData?.columns?.split?.filter?.((el) => el) ? layerData?.columns?.split?.filter?.((el) => el).reduce((acc, curr) => Object.assign(acc, { [curr]: row.data?.[curr] }), {}) : row.data;
|
|
70
|
+
return {
|
|
71
|
+
...rowData,
|
|
72
|
+
...row,
|
|
73
|
+
...layerData,
|
|
74
|
+
data: undefined,
|
|
75
|
+
id: layerData.pk ? row?.[layerData.pk] : undefined,
|
|
76
|
+
geom: row.geom,
|
|
77
|
+
};
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
const { pk, layer: layerId, card, template } = rows[0] || {};
|
|
81
|
+
|
|
82
|
+
const dataset = layers.find((el) => el.layer === rows?.[0]?.layer) || {};
|
|
83
|
+
|
|
84
|
+
const columnList = (dataset.column_list || dataset.columns)?.reduce?.((acc, curr) => Object.assign(acc, { [curr.name]: curr.title || curr.ua || curr.name }), {}) || {};
|
|
85
|
+
|
|
86
|
+
const columns = card
|
|
87
|
+
? Object.keys(card).map(el => `${el}| ${card[el]}`).join('|')
|
|
88
|
+
: Object.keys(columnList)?.map((key) => `${columnList[key]}| ${key}`).join('|');
|
|
89
|
+
|
|
90
|
+
const html = columns && rows?.[0] ? await descriptionList(rows[0], { hash: { columns } }) : undefined;
|
|
91
|
+
time.push(Date.now());
|
|
92
|
+
|
|
93
|
+
await Promise.all(rows.map(async () => {
|
|
94
|
+
const layerData = layers.find((el) => el.layer === rows?.[0]?.layer) || {};
|
|
95
|
+
const columns = layerData.column_list || layerData.columns;
|
|
96
|
+
if (columns?.length && Array.isArray(columns)) {
|
|
97
|
+
await metaFormat({ rows, columns });
|
|
98
|
+
}
|
|
99
|
+
}));
|
|
100
|
+
time.push(Date.now());
|
|
101
|
+
|
|
102
|
+
const resp = { id: rows?.[0]?.[pk], layers: layerId, template, data: rows?.[0], rows, html };
|
|
103
|
+
|
|
104
|
+
// await rclient.set(redisKey, JSON.stringify(resp), 'EX', 5 * 60);
|
|
105
|
+
return resp;
|
|
106
|
+
}
|
|
107
|
+
export default getDataByLatLng
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { handlebars } from "@opengis/fastify-table/utils.js";
|
|
2
|
+
|
|
3
|
+
import descriptionList from "./descriptionList.js";
|
|
4
|
+
// import metaFormat from "./metaFormat.js";
|
|
5
|
+
|
|
6
|
+
async function getDataById({ pg, table, id, time = [] }) {
|
|
7
|
+
const dataset = await pg.query(`select dataset_id as id, pk, query, table_name, column_list, setting
|
|
8
|
+
from bi.dataset where dataset_id=$1`, [table]).then((res) => res.rows[0] || {});
|
|
9
|
+
time.push(Date.now());
|
|
10
|
+
|
|
11
|
+
if (!dataset?.id) {
|
|
12
|
+
return { message: `dataset not found: ${table}`, status: 400 };
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (!dataset?.table_name) {
|
|
16
|
+
return { message: 'invalid dataset: empty params table', status: 400 };
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const pk = dataset.pk || pg.pk?.[dataset.table_name];
|
|
20
|
+
|
|
21
|
+
const { rows = [] } = pk ? await pg.query(`select *, ${pk}, geom::json from ${dataset.table_name} where ${dataset.query || '1=1'} and ${pk}=$1`, [id]) || {} : {};
|
|
22
|
+
if (pk) rows.forEach((row) => Object.assign(row, { id: row?.[pk] }));
|
|
23
|
+
time.push(Date.now());
|
|
24
|
+
|
|
25
|
+
// sql columns
|
|
26
|
+
const sql = `select attname as name, pg_catalog.col_description(attrelid,attnum) as title, atttypid::regtype as type from pg_catalog.pg_attribute a
|
|
27
|
+
where attrelid='${dataset.table_name}'::regclass and attnum>0
|
|
28
|
+
and attname not in ('editor_id','editor_date','cdate','geom','id','uid','cdate')
|
|
29
|
+
and atttypid::regtype not in ('json','geometry')`;
|
|
30
|
+
|
|
31
|
+
// card or auto
|
|
32
|
+
const type = dataset.setting?.card_type || 'auto';
|
|
33
|
+
const list = dataset.setting?.card_list || [];
|
|
34
|
+
const { rows: all } = await pg.query(sql).then(el => el);
|
|
35
|
+
const columns = all.filter(el => type === 'list' ? list.includes(el.name) : true)
|
|
36
|
+
.map(el => `${el.title || el.name}| ${el.name}`).join('|')
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
const { body } = type === 'html' && dataset?.setting?.card ? await pg.query(`select body->>'body' as body from admin.doc_template where title=$1`, [dataset?.setting?.card]).then(el => el.rows[0] || {}) : {}
|
|
40
|
+
|
|
41
|
+
const html = body ? await handlebars.compile(body)(rows[0]) : await descriptionList(rows[0], { hash: { columns } });
|
|
42
|
+
time.push(Date.now());
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
return { id: rows?.[0]?.id, template: dataset?.setting?.card, rows, html, body };
|
|
46
|
+
}
|
|
47
|
+
export default getDataById
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export default function insertDataQuery(columns, data, table) {
|
|
2
|
+
const columnList = columns?.map((el) => el.name)?.join(',');
|
|
3
|
+
const features = data?.features || [];
|
|
4
|
+
if (!features?.length) return null;
|
|
5
|
+
|
|
6
|
+
const insertData = `INSERT into ${table} (geom ${columns?.length ? ','.concat(columnList) : ''}) SELECT public.ST_GeomFromGeoJSON((features->>'geometry')::json) AS geom
|
|
7
|
+
${columns?.length ? ','.concat(columns?.map((el) => `features->'properties'->>'${el.title}'`)?.join(',')) : ''} FROM
|
|
8
|
+
(SELECT json_array_elements('${JSON.stringify(features).replace(/'/g, "''")}'::json) AS features)q`;
|
|
9
|
+
|
|
10
|
+
// console.log(insertData);
|
|
11
|
+
return insertData;
|
|
12
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { getSelectVal } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
export default async function metaFormat({ rows, columns }) {
|
|
4
|
+
const selectCols = columns?.filter?.((el) => el.data);
|
|
5
|
+
if (!selectCols?.length) return rows;
|
|
6
|
+
|
|
7
|
+
await Promise.all(selectCols.map(async (attr) => {
|
|
8
|
+
const values = [...new Set(rows?.map((el) => el[attr.name]).flat())].filter((el) => (typeof el === 'boolean' ? true : el));
|
|
9
|
+
if (!values.length) return null;
|
|
10
|
+
|
|
11
|
+
const cls = await getSelectVal({ name: attr.data, values });
|
|
12
|
+
if (!cls) return null;
|
|
13
|
+
|
|
14
|
+
rows.forEach(el => {
|
|
15
|
+
const val = el[attr.name]?.map?.(c => cls[c.toString()] || cls[c] || c) || cls[el[attr.name]?.toString()] || cls[el[attr.name]] || el[attr.name];
|
|
16
|
+
if (!val) return;
|
|
17
|
+
Object.assign(el, { [val?.color ? `${attr.name}_data` : `${attr.name}_text`]: (val.color ? val : val.text || val) });
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
return null;
|
|
21
|
+
}));
|
|
22
|
+
|
|
23
|
+
return rows;
|
|
24
|
+
}
|
|
@@ -5,7 +5,8 @@ import { getWidget, yamlSafe } from '../../../../utils.js';
|
|
|
5
5
|
export default async function widgetEdit({ pg, body, params, }) {
|
|
6
6
|
const { widget: widgetName, name: dashboardName } = params;
|
|
7
7
|
const data = body.yml && !body.style ? yamlSafe.load(body.yml) : body;
|
|
8
|
-
|
|
8
|
+
const yml = body.yml ?? yamlSafe.dump(body.data);
|
|
9
|
+
console.log(body.data, data, body.yml, yml);
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
if (!widgetName || !dashboardName) {
|
|
@@ -15,8 +16,8 @@ export default async function widgetEdit({ pg, body, params, }) {
|
|
|
15
16
|
};
|
|
16
17
|
}
|
|
17
18
|
|
|
18
|
-
const { widget_id: widgetId } = await pg.query(
|
|
19
|
-
`select a.widget_id , b.dashboard_id from bi.widget a, bi.dashboard b
|
|
19
|
+
const { widget_id: widgetId, dashboard_id: dashboardId, widgets } = await pg.query(
|
|
20
|
+
`select a.widget_id , b.dashboard_id, b.widgets from bi.widget a, bi.dashboard b
|
|
20
21
|
where $2 in (a.widget_id, a.name) and $1 in (b.dashboard_id, b.name) order by 1,2`,
|
|
21
22
|
[dashboardName, widgetName]).then(res1 => res1.rows?.[0] || {});
|
|
22
23
|
|
|
@@ -35,8 +36,8 @@ export default async function widgetEdit({ pg, body, params, }) {
|
|
|
35
36
|
return { message: 'bad params: table ' + tableName, status: 400 };
|
|
36
37
|
}
|
|
37
38
|
|
|
38
|
-
if (
|
|
39
|
-
Object.assign(widgetData, { yml
|
|
39
|
+
if (yml) {
|
|
40
|
+
Object.assign(widgetData, { yml })
|
|
40
41
|
}
|
|
41
42
|
|
|
42
43
|
Object.assign(widgetData, { table_name: tableName });
|
|
@@ -47,6 +48,23 @@ export default async function widgetEdit({ pg, body, params, }) {
|
|
|
47
48
|
data: widgetData,
|
|
48
49
|
});
|
|
49
50
|
|
|
51
|
+
const idx = widgets.findIndex(el => el.name === widgetName);
|
|
52
|
+
if (widgetData.hasOwnProperty('data')) {
|
|
53
|
+
widgets[idx].data = widgetData.data
|
|
54
|
+
}
|
|
55
|
+
if (widgetData.hasOwnProperty('yml')) {
|
|
56
|
+
widgets[idx].yml = widgetData.yml;
|
|
57
|
+
}
|
|
58
|
+
if (widgetData.hasOwnProperty('style')) {
|
|
59
|
+
widgets[idx].style = widgetData.style;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
await dataUpdate({
|
|
63
|
+
table: 'bi.dashboard',
|
|
64
|
+
id: dashboardId,
|
|
65
|
+
data: { widgets },
|
|
66
|
+
});
|
|
67
|
+
|
|
50
68
|
return {
|
|
51
69
|
message: rows,
|
|
52
70
|
status: 200,
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { createHash } from 'node:crypto';
|
|
3
|
+
import { existsSync } from 'node:fs';
|
|
4
|
+
import { readFile, writeFile, mkdir, stat } from 'node:fs/promises';
|
|
5
|
+
|
|
6
|
+
import { pgClients, getFilterSQL, getPGAsync, getMeta, getFolder } from '@opengis/fastify-table/utils.js';
|
|
7
|
+
|
|
8
|
+
import { getWidget } from '../../../../utils.js';
|
|
9
|
+
|
|
10
|
+
const hourMs = 3.6e6;
|
|
11
|
+
const maxLimit = 2500;
|
|
12
|
+
|
|
13
|
+
export default async function heatmap(req, reply) {
|
|
14
|
+
const { query = {}, user = {} } = req;
|
|
15
|
+
|
|
16
|
+
const { widget, dashboard, filter, search, size = 0.1 } = query;
|
|
17
|
+
|
|
18
|
+
if (query.size && (+query.size || 0) <= 0) {
|
|
19
|
+
return { message: 'param size is invalid', status: 400 };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (!dashboard || !widget) {
|
|
23
|
+
return { message: 'not enough params: dashboard / widget', status: 400 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const { data } = await getWidget({ widget, dashboard });
|
|
27
|
+
|
|
28
|
+
if (!data?.table) {
|
|
29
|
+
return { message: 'widget not found: ' + widget, status: 400 };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const limit = Math.min(+query.limit || maxLimit, maxLimit);
|
|
33
|
+
const hash = [search, filter, limit].filter((el) => el).join();
|
|
34
|
+
|
|
35
|
+
const root = getFolder(req, 'local');
|
|
36
|
+
const file = path.join(
|
|
37
|
+
root,
|
|
38
|
+
`/map/geojson/heatmap/${data.table}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}.geojson`
|
|
39
|
+
);
|
|
40
|
+
|
|
41
|
+
if (existsSync(file) && !query.nocache && !query.sql) {
|
|
42
|
+
const timeNow = Date.now();
|
|
43
|
+
const stats = await stat(file);
|
|
44
|
+
const birthTime = new Date(stats.birthtime).getTime();
|
|
45
|
+
if (!(birthTime - timeNow > hourMs * 24)) {
|
|
46
|
+
const geojson = JSON.parse((await readFile(file, 'utf-8')) || {});
|
|
47
|
+
return geojson;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const pg = data.db ? await getPGAsync(data.db) : pgClients.client;
|
|
52
|
+
|
|
53
|
+
if (!pg.pk?.[data.table]) {
|
|
54
|
+
return { message: `table not found: ${data.table}`, status: 404 };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const metric = query.metric || data.metrics?.[0]?.name || (Array.isArray(data.metrics) ? data.metrics?.[0] : data.metrics);
|
|
58
|
+
|
|
59
|
+
const operator = metric
|
|
60
|
+
? (['sum', 'min', 'max', 'avg'].find(el => el === query.operator) || 'sum')
|
|
61
|
+
: undefined;
|
|
62
|
+
|
|
63
|
+
const aggregator = metric
|
|
64
|
+
? `${operator}(${metric})`
|
|
65
|
+
: 'count(*)';
|
|
66
|
+
|
|
67
|
+
const { geom, columns } = await getMeta({ pg, table: data.table });
|
|
68
|
+
|
|
69
|
+
const { dataTypeID } = columns.find(col => col.name === metric) || {};
|
|
70
|
+
|
|
71
|
+
if (metric && !dataTypeID) {
|
|
72
|
+
return { message: `metric column not found: ${metric}`, status: 404 };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (!['integer', 'numeric', 'double precision'].includes(pg.pgType[dataTypeID])) {
|
|
76
|
+
return { message: `metric column invalid type: ${metric} (${pg.pgType[dataTypeID]})`, status: 404 };
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (!geom) {
|
|
80
|
+
return { message: `geometry column not found: ${data.table}`, status: 404 };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const { optimizedSQL = `select * from ${data.table} where 1=1` } = hash ? await getFilterSQL({ pg, table: data.table, filter, search }) : {};
|
|
84
|
+
|
|
85
|
+
const subQuery = `SELECT ${aggregator} AS metric, hex.geom FROM (
|
|
86
|
+
SELECT ST_SetSRID( (ST_HexagonGrid(${size}, ST_Extent(q.${geom})) ).geom, 4326 ) as geom FROM ( ${optimizedSQL})q
|
|
87
|
+
)hex
|
|
88
|
+
LEFT JOIN ( ${optimizedSQL} )pts
|
|
89
|
+
ON ST_Within(pts.${geom}, hex.geom)
|
|
90
|
+
|
|
91
|
+
JOIN ( SELECT ST_ConvexHull(ST_Collect(${geom})) AS mask FROM ( ${optimizedSQL} )q )point_mask
|
|
92
|
+
ON ST_Intersects(hex.geom, point_mask.mask)
|
|
93
|
+
|
|
94
|
+
WHERE 1=1 /*and pts.${geom} is not null AND st_srid(pts.${geom}) > 0*/
|
|
95
|
+
GROUP BY hex.geom
|
|
96
|
+
limit ${limit}`;
|
|
97
|
+
|
|
98
|
+
if (query.sql === '1' && user?.user_type?.includes('admin')) return subQuery;
|
|
99
|
+
|
|
100
|
+
const q = `SELECT 'FeatureCollection' As type, json_agg(f) As features FROM (
|
|
101
|
+
SELECT
|
|
102
|
+
'Feature' As type,
|
|
103
|
+
row_number() over() as id,
|
|
104
|
+
st_asgeojson(geom, 6, 0)::json as geometry,
|
|
105
|
+
json_build_object( 'metric', metric ) as properties
|
|
106
|
+
from (${subQuery})sq
|
|
107
|
+
)f`;
|
|
108
|
+
|
|
109
|
+
if (query.sql === '2' && user?.user_type?.includes('admin')) return q;
|
|
110
|
+
|
|
111
|
+
const geojson = await pg.query(q)
|
|
112
|
+
.then(el => el.rows?.[0] || {});
|
|
113
|
+
|
|
114
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
115
|
+
await writeFile(file, JSON.stringify(geojson));
|
|
116
|
+
|
|
117
|
+
return geojson;
|
|
118
|
+
}
|
|
@@ -4,6 +4,7 @@ import vtile from './controllers/vtile.js';
|
|
|
4
4
|
|
|
5
5
|
import cluster from './controllers/cluster.js';
|
|
6
6
|
import clusterVtile from './controllers/clusterVtile.js';
|
|
7
|
+
import heatmap from './controllers/heatmap.js';
|
|
7
8
|
|
|
8
9
|
const biSchema = {
|
|
9
10
|
querystring: {
|
|
@@ -22,4 +23,5 @@ export default async function route(fastify, opts) {
|
|
|
22
23
|
fastify.get('/bi-vtile/:z/:y/:x', { schema: biSchema }, vtile);
|
|
23
24
|
fastify.get('/bi-cluster', { schema: biSchema }, cluster);
|
|
24
25
|
fastify.get('/bi-cluster-vtile/:z/:y/:x', { schema: biSchema }, clusterVtile);
|
|
26
|
+
fastify.get('/bi-heatmap', {}, heatmap);
|
|
25
27
|
}
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import { getTemplate, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
2
|
|
|
3
|
+
import mdToHTML from '../helpers/mdToHTML.js';
|
|
4
|
+
|
|
3
5
|
const pg = pgClients.client;
|
|
4
6
|
|
|
5
7
|
async function getWidget({ dashboard, widget }) {
|
|
@@ -33,7 +35,8 @@ async function getWidget({ dashboard, widget }) {
|
|
|
33
35
|
? dashboardData?.find((el) => el[2] === (widget || 'index'))?.[1]
|
|
34
36
|
: await getTemplate('widget', widget);
|
|
35
37
|
if (typeof widgetData === 'string') {
|
|
36
|
-
|
|
38
|
+
const html = mdToHTML(widgetData);
|
|
39
|
+
return { source: html, status: 200 };
|
|
37
40
|
}
|
|
38
41
|
if (!id && !dashboardData && !widgetData) {
|
|
39
42
|
return { message: `not found ${widget} ${dashboard}`, status: 404 };
|
|
@@ -65,7 +68,7 @@ async function getWidget({ dashboard, widget }) {
|
|
|
65
68
|
widgetData?.table_name ||
|
|
66
69
|
dashboardIndex?.table ||
|
|
67
70
|
dashboardIndex?.table_name,
|
|
68
|
-
db: dashboardIndex?.db || pgClients.client?.options?.database,
|
|
71
|
+
db: dashboardIndex?.db || widgetData?.db || pgClients.client?.options?.database,
|
|
69
72
|
});
|
|
70
73
|
const main = { ...(dashboardIndex || {}), ...widgetData, ...data, ...data?.data || {} };
|
|
71
74
|
|
|
@@ -1,86 +0,0 @@
|
|
|
1
|
-
import { randomBytes } from 'node:crypto';
|
|
2
|
-
|
|
3
|
-
import { dataInsert, getPG, pgClients, initPG } from '@opengis/fastify-table/utils.js';
|
|
4
|
-
|
|
5
|
-
import createTableFunc from './util/create.table.js';
|
|
6
|
-
import prepareData from './util/prepare.data.js';
|
|
7
|
-
|
|
8
|
-
async function updatePGMeta(pg, table) {
|
|
9
|
-
if (!pg.tlist) await initPG(pg);
|
|
10
|
-
if (!pg.tlist.includes(table)) {
|
|
11
|
-
const { pks } = await pg.query(`SELECT json_object_agg(c.conrelid::regclass, a.attname) as pks FROM pg_constraint c
|
|
12
|
-
left join pg_attribute a on c.conrelid=a.attrelid and a.attnum = c.conkey[1] WHERE c.contype='p'::"char"`).then(el => el.rows[0]) || {};
|
|
13
|
-
pg.pk[table] = pks[table];
|
|
14
|
-
pg.tlist.push(table);
|
|
15
|
-
}
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* Додавання нового набору даних BI
|
|
20
|
-
*
|
|
21
|
-
* @method POST
|
|
22
|
-
* @summary Додавання нового набору даних BI
|
|
23
|
-
* @priority 4
|
|
24
|
-
* @alias biDatasetAdd
|
|
25
|
-
* @type api
|
|
26
|
-
* @tag bi
|
|
27
|
-
* @param {Object} params.id Dashboard ID
|
|
28
|
-
* @errors 400,500
|
|
29
|
-
* @returns {Number} status Номер помилки
|
|
30
|
-
* @returns {String} error Опис помилки
|
|
31
|
-
* @returns {Object} rows Масив з колонками таблиці
|
|
32
|
-
*/
|
|
33
|
-
|
|
34
|
-
export default async function biDatasetAdd(req) {
|
|
35
|
-
const { body = {}, session = {} } = req;
|
|
36
|
-
const { uid } = session?.passport?.user || {};
|
|
37
|
-
|
|
38
|
-
if (!uid) {
|
|
39
|
-
return { message: 'access restricted', status: 403 };
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if (body.table_name) {
|
|
43
|
-
const result = await dataInsert({
|
|
44
|
-
table: 'bi.dataset',
|
|
45
|
-
data: { ...body, source_type: 'db' },
|
|
46
|
-
uid,
|
|
47
|
-
}).then(el => el.rows?.[0]);
|
|
48
|
-
|
|
49
|
-
const pg = body.db ? getPG({ db: body.db }) : pgClients.client;
|
|
50
|
-
await updatePGMeta(pg,body.table_name);
|
|
51
|
-
|
|
52
|
-
return { message: { id: result?.dataset_id, action: 'assign', table: body.table_name }, status: 200 };
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
if (!Array.isArray(body.data) && body.data?.features?.[0]?.type !== 'Feature' && !body.file_path) {
|
|
56
|
-
return { message: 'body data param is invalid', status: 400 };
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const tableName = randomBytes(64).toString('hex').substring(0, 24).replace(/^\d+/, '');
|
|
60
|
-
const pkey = tableName.concat('_id');
|
|
61
|
-
const table = `bi_data.${tableName}`;
|
|
62
|
-
const { columns, insertData } = await prepareData({ table, file_path: body.file_path, data: body?.data });
|
|
63
|
-
const createTable = createTableFunc({ table, pkey, columns, name: body.name });
|
|
64
|
-
|
|
65
|
-
const q = ['create extension if not exists postgis;create schema if not exists bi_data', createTable, insertData].join(';');
|
|
66
|
-
|
|
67
|
-
const pg = body.db ? getPG({ db: body.db }) : pgClients.client;
|
|
68
|
-
if (!pg.pk) await initPG(pg);
|
|
69
|
-
|
|
70
|
-
const results = await pg.query(q);
|
|
71
|
-
|
|
72
|
-
const result = await dataInsert({
|
|
73
|
-
table: 'bi.dataset',
|
|
74
|
-
data: { ...body, table_name: table, source_type: body.data ? 'user' : 'file' },
|
|
75
|
-
uid,
|
|
76
|
-
}).then(el => el.rows?.[0]);
|
|
77
|
-
|
|
78
|
-
await updatePGMeta(pg,table);
|
|
79
|
-
|
|
80
|
-
return {
|
|
81
|
-
message: {
|
|
82
|
-
id: result?.dataset_id, action: 'import', total: results?.find((el) => el.command === 'INSERT')?.rowCount, table,
|
|
83
|
-
},
|
|
84
|
-
status: 200,
|
|
85
|
-
};
|
|
86
|
-
};
|
|
@@ -1,49 +0,0 @@
|
|
|
1
|
-
import { dataInsert, getPG, initPG, pgClients } from "@opengis/fastify-table/utils.js";
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Внесення даних до набору BI
|
|
5
|
-
*
|
|
6
|
-
* @method POST
|
|
7
|
-
* @summary Внесення даних до набору BI
|
|
8
|
-
* @priority 4
|
|
9
|
-
* @alias biDatasetDataAdd
|
|
10
|
-
* @type api
|
|
11
|
-
* @tag bi
|
|
12
|
-
* @param {Object} params.id Dataset ID
|
|
13
|
-
* @errors 400,500
|
|
14
|
-
* @returns {Number} status Номер помилки
|
|
15
|
-
* @returns {String} error Опис помилки
|
|
16
|
-
* @returns {Object} rows Масив з колонками таблиці
|
|
17
|
-
*/
|
|
18
|
-
|
|
19
|
-
export default async function biDatasetDataAdd(req) {
|
|
20
|
-
const { params = {}, session = {}, body = {} } = req;
|
|
21
|
-
const { uid } = session?.passport?.user || {};
|
|
22
|
-
|
|
23
|
-
if (!uid) {
|
|
24
|
-
return { message: 'access restricted', status: 403 };
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
if (!params?.id) {
|
|
28
|
-
return { message: 'not enough params: id', status: 400 };
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
const { id, table, db } = await pgClients.client.query('select db, dataset_id as id, table_name as table from bi.dataset where dataset_id=$1', [params.id])
|
|
32
|
-
.then(el => el.rows[0]);
|
|
33
|
-
|
|
34
|
-
if (!id) {
|
|
35
|
-
return { message: 'dataset not found', status: 404 };
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
const pg = db ? getPG({ db }) : pgClients.client;
|
|
39
|
-
if (!pg.pk) await initPG(pg);
|
|
40
|
-
|
|
41
|
-
const result = await dataInsert({
|
|
42
|
-
pg,
|
|
43
|
-
table,
|
|
44
|
-
data: body,
|
|
45
|
-
uid,
|
|
46
|
-
}).then(el => el.rows[0]);
|
|
47
|
-
|
|
48
|
-
return { id, action: 'insert', result };
|
|
49
|
-
};
|