@opengis/bi 1.2.0 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +42 -42
- package/dist/import-file-D8jh74Dz.js +3543 -0
- package/dist/{vs-funnel-bar-C_TceUrc.js → vs-funnel-bar-T330oJNS.js} +3 -3
- package/dist/{vs-list-DyhLUIPb.js → vs-list-DeHF_Oaf.js} +109 -109
- package/dist/{vs-map-BtQJNN4L.js → vs-map-Skt608pM.js} +8 -8
- package/dist/{vs-map-cluster-BbPUosvt.js → vs-map-cluster-BRUiY_90.js} +21 -21
- package/dist/{vs-number-D2GkU586.js → vs-number-Dd_21nn-.js} +3 -3
- package/dist/{vs-table-D_Yn9QqB.js → vs-table-BwC29Zyc.js} +6 -6
- package/dist/{vs-text-BivVd6cY.js → vs-text-DEJjWxDu.js} +32 -39
- package/package.json +77 -76
- package/plugin.js +22 -0
- package/server/helpers/mdToHTML.js +17 -0
- package/server/migrations/bi.dataset.sql +46 -0
- package/server/migrations/bi.sql +112 -0
- package/server/plugins/docs.js +48 -0
- package/server/plugins/hook.js +89 -0
- package/server/plugins/vite.js +69 -0
- package/server/routes/dashboard/controllers/dashboard.import.js +103 -0
- package/server/routes/dashboard/controllers/dashboard.js +157 -0
- package/server/routes/dashboard/controllers/dashboard.list.js +40 -0
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -0
- package/server/routes/dashboard/index.mjs +26 -0
- package/server/routes/data/controllers/data.js +230 -0
- package/server/routes/data/controllers/util/chartSQL.js +49 -0
- package/server/routes/data/controllers/util/normalizeData.js +65 -0
- package/server/routes/data/index.mjs +32 -0
- package/server/routes/dataset/controllers/bi.dataset.list.js +29 -0
- package/server/routes/dataset/controllers/bi.db.list.js +19 -0
- package/server/routes/dataset/controllers/comment.js +55 -0
- package/server/routes/dataset/controllers/createDatasetPost.js +134 -0
- package/server/routes/dataset/controllers/data.js +149 -0
- package/server/routes/dataset/controllers/dbTablePreview.js +58 -0
- package/server/routes/dataset/controllers/dbTables.js +34 -0
- package/server/routes/dataset/controllers/delete.js +40 -0
- package/server/routes/dataset/controllers/deleteDataset.js +52 -0
- package/server/routes/dataset/controllers/editDataset.js +90 -0
- package/server/routes/dataset/controllers/export.js +214 -0
- package/server/routes/dataset/controllers/form.js +99 -0
- package/server/routes/dataset/controllers/format.js +46 -0
- package/server/routes/dataset/controllers/insert.js +47 -0
- package/server/routes/dataset/controllers/table.js +68 -0
- package/server/routes/dataset/controllers/update.js +43 -0
- package/server/routes/dataset/index.mjs +132 -0
- package/server/routes/dataset/utils/convertJSONToCSV.js +17 -0
- package/server/routes/dataset/utils/convertJSONToXls.js +47 -0
- package/server/routes/dataset/utils/createTableQuery.js +59 -0
- package/server/routes/dataset/utils/datasetForms.js +1 -0
- package/server/routes/dataset/utils/descriptionList.js +46 -0
- package/server/routes/dataset/utils/downloadRemoteFile.js +58 -0
- package/server/routes/dataset/utils/executeQuery.js +46 -0
- package/server/routes/dataset/utils/getLayersData.js +107 -0
- package/server/routes/dataset/utils/getTableData.js +47 -0
- package/server/routes/dataset/utils/insertDataQuery.js +12 -0
- package/server/routes/dataset/utils/metaFormat.js +24 -0
- package/server/routes/edit/controllers/dashboard.add.js +36 -0
- package/server/routes/edit/controllers/dashboard.delete.js +39 -0
- package/server/routes/edit/controllers/dashboard.edit.js +61 -0
- package/server/routes/edit/controllers/widget.add.js +78 -0
- package/server/routes/edit/controllers/widget.del.js +58 -0
- package/server/routes/edit/controllers/widget.edit.js +106 -0
- package/server/routes/edit/index.mjs +33 -0
- package/server/routes/map/controllers/cluster.js +125 -0
- package/server/routes/map/controllers/clusterVtile.js +166 -0
- package/server/routes/map/controllers/geojson.js +127 -0
- package/server/routes/map/controllers/heatmap.js +118 -0
- package/server/routes/map/controllers/map.js +69 -0
- package/server/routes/map/controllers/utils/downloadClusterData.js +45 -0
- package/server/routes/map/controllers/vtile.js +183 -0
- package/server/routes/map/index.mjs +32 -0
- package/server/templates/page/login.html +59 -0
- package/server/utils/getWidget.js +117 -0
- package/utils.js +12 -0
- package/dist/import-file-Bx4xpxVb.js +0 -3493
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import { getFilterSQL, logger, pgClients, getMeta } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
import { getWidget } from '../../../../utils.js';
|
|
4
|
+
|
|
5
|
+
import downloadClusterData from './utils/downloadClusterData.js';
|
|
6
|
+
|
|
7
|
+
const clusterExists = {};
|
|
8
|
+
|
|
9
|
+
export default async function cluster(req, reply) {
|
|
10
|
+
const { query = {} } = req;
|
|
11
|
+
const { widget, filter, dashboard, search } = query;
|
|
12
|
+
|
|
13
|
+
if (!widget) {
|
|
14
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const { pg = req.pg || pgClients.client, data, style, controls } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
18
|
+
|
|
19
|
+
const pkey = pg.pk?.[data?.table];
|
|
20
|
+
|
|
21
|
+
if (!pkey) {
|
|
22
|
+
return {
|
|
23
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
24
|
+
status: 400,
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// data param
|
|
29
|
+
const {
|
|
30
|
+
table,
|
|
31
|
+
query: where = '1=1',
|
|
32
|
+
metrics = [],
|
|
33
|
+
cluster,
|
|
34
|
+
clusterTable = {},
|
|
35
|
+
} = data;
|
|
36
|
+
|
|
37
|
+
if (!cluster) {
|
|
38
|
+
return {
|
|
39
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
40
|
+
status: 400,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (!metrics.length) {
|
|
45
|
+
return {
|
|
46
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
47
|
+
status: 400,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (!clusterTable?.name) {
|
|
52
|
+
Object.assign(clusterTable, {
|
|
53
|
+
name: 'bi.cluster',
|
|
54
|
+
title: 'title',
|
|
55
|
+
query: `type='${cluster}'`,
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
if (cluster && !clusterExists[cluster]) {
|
|
61
|
+
const res = await downloadClusterData({ pg, cluster });
|
|
62
|
+
if (res) return res;
|
|
63
|
+
clusterExists[cluster] = 1;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (clusterTable?.name && !pg.pk?.[clusterTable?.name]) {
|
|
67
|
+
return {
|
|
68
|
+
message: 'invalid widget params: clusterTable pkey not found',
|
|
69
|
+
status: 404,
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const { bounds, extentStr } = await pg.query(`select count(*),
|
|
74
|
+
st_asgeojson(st_extent(geom))::json as bounds,
|
|
75
|
+
replace(regexp_replace(st_extent(geom)::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
|
|
76
|
+
from ${table} where ${where || '1=1'}`).then((res) => res.rows?.[0] || {});
|
|
77
|
+
const extent = extentStr ? extentStr.split(',') : undefined;
|
|
78
|
+
|
|
79
|
+
// get sql
|
|
80
|
+
const { optimizedSQL } =
|
|
81
|
+
filter || search
|
|
82
|
+
? await getFilterSQL({ pg, table, filter, search })
|
|
83
|
+
: {};
|
|
84
|
+
|
|
85
|
+
const { columns = [] } = await getMeta({ pg, table });
|
|
86
|
+
const columnList = columns.map(el => el.name);
|
|
87
|
+
|
|
88
|
+
if (query.metric && typeof query.metric === 'string') {
|
|
89
|
+
const checkInvalid = query.metric.split(',').find(el => !columnList.includes(el) && el !== 'count');
|
|
90
|
+
if (checkInvalid) {
|
|
91
|
+
return reply.status(404).send(`invalid query metric value: ${checkInvalid}`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const multipleMetrics = query.metric ? query.metric.split(',').map(el => el === 'count' ? 'count(*)' : `sum(${el.replace(/'/g, "''")})::float as ${el}`).join(',') : null;
|
|
96
|
+
const multipleMetricsOrder = query.metric ? query.metric.split(',').map(el => el === 'count' ? 'count(*)' : `sum(${el.replace(/'/g, "''")})::float`).join(',') : null;
|
|
97
|
+
const metricFunc = multipleMetrics
|
|
98
|
+
|| `${clusterTable?.operator || 'sum'}("${metrics[0]}")::float`;
|
|
99
|
+
|
|
100
|
+
const q = `select b.*, ${metricFunc} ${multipleMetrics ? '' : 'as metric'}
|
|
101
|
+
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
102
|
+
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id, ${clusterTable?.column || cluster} as name, ${clusterTable?.title} as title from ${clusterTable?.name} where ${clusterTable?.codifierColumn || 'codifier'}=q."${clusterTable?.column || cluster}" limit 1)b on 1=1
|
|
103
|
+
where ${where} group by b.id, b.name, b.title order by ${multipleMetricsOrder || metricFunc} desc`;
|
|
104
|
+
|
|
105
|
+
if (query.sql === '1') return q;
|
|
106
|
+
|
|
107
|
+
// auto Index
|
|
108
|
+
// autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
109
|
+
|
|
110
|
+
const { rows = [] } = await pg.query(q);
|
|
111
|
+
const vals = rows.map((el) => el.metric - 0).sort((a, b) => a - b);
|
|
112
|
+
const len = vals.length;
|
|
113
|
+
const sizes = [
|
|
114
|
+
vals[0],
|
|
115
|
+
vals[Math.floor(len / 4)],
|
|
116
|
+
vals[Math.floor(len / 2)],
|
|
117
|
+
vals[Math.floor(len * 0.75)],
|
|
118
|
+
vals[len - 1],
|
|
119
|
+
];
|
|
120
|
+
return { sizes, style, controls, metrics, rows, columns: columns.map(({ name, title, dataTypeID }) => ({ name, title, type: pg.pgType[dataTypeID] })), bounds, extent, count: rows.length, total: rows?.reduce((acc, curr) => (curr.metric || 0) + acc, 0) };
|
|
121
|
+
} catch (err) {
|
|
122
|
+
logger.file('bi/cluster/error', { error: err.toString(), query });
|
|
123
|
+
return { error: err.toString(), status: 500 };
|
|
124
|
+
}
|
|
125
|
+
}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import Sphericalmercator from '@mapbox/sphericalmercator';
|
|
2
|
+
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import { createHash } from 'crypto';
|
|
5
|
+
import { writeFile, mkdir } from 'fs/promises';
|
|
6
|
+
|
|
7
|
+
import { logger, getFolder, getFilterSQL, autoIndex, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
+
|
|
9
|
+
import { getWidget } from '../../../../utils.js';
|
|
10
|
+
|
|
11
|
+
import downloadClusterData from './utils/downloadClusterData.js';
|
|
12
|
+
|
|
13
|
+
const mercator = new Sphericalmercator({ size: 256 });
|
|
14
|
+
|
|
15
|
+
const clusterExists = {};
|
|
16
|
+
|
|
17
|
+
export default async function clusterVtile(req, reply) {
|
|
18
|
+
const { params = {}, query = {} } = req;
|
|
19
|
+
const { z, y } = params;
|
|
20
|
+
const x = params.x?.split('.')[0] - 0;
|
|
21
|
+
|
|
22
|
+
if (!x || !y || !z) {
|
|
23
|
+
return { message: 'not enough params: xyz', status: 400 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const { widget, filter, dashboard, search, clusterZoom, nocache, pointZoom } =
|
|
27
|
+
query;
|
|
28
|
+
|
|
29
|
+
if (!widget) {
|
|
30
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const { pg = req.pg || pgClients.client, data } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
34
|
+
|
|
35
|
+
const headers = {
|
|
36
|
+
'Content-Type': 'application/x-protobuf',
|
|
37
|
+
'Cache-Control':
|
|
38
|
+
nocache || query.sql ? 'no-cache' : 'public, max-age=86400',
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
42
|
+
|
|
43
|
+
const root = getFolder(req);
|
|
44
|
+
const file = path.join(
|
|
45
|
+
root,
|
|
46
|
+
`/map/vtile/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}${z}/${x}/${y}.mvt`
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
if (!data?.table) {
|
|
51
|
+
return {
|
|
52
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table not specified`,
|
|
53
|
+
status: 400,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const pkey = pg.pk?.[data?.table];
|
|
58
|
+
|
|
59
|
+
if (!pkey) {
|
|
60
|
+
return {
|
|
61
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
62
|
+
status: 400,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// data param
|
|
67
|
+
const {
|
|
68
|
+
table,
|
|
69
|
+
query: where = '1=1',
|
|
70
|
+
metrics = [],
|
|
71
|
+
cluster,
|
|
72
|
+
clusterTable = {},
|
|
73
|
+
} = data;
|
|
74
|
+
if (!clusterTable?.name) {
|
|
75
|
+
Object.assign(clusterTable, {
|
|
76
|
+
name: 'bi.cluster',
|
|
77
|
+
title: 'title',
|
|
78
|
+
query: `type='${data.cluster}'`,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (cluster && !clusterExists[data.cluster]) {
|
|
83
|
+
const res = await downloadClusterData({ pg, cluster });
|
|
84
|
+
if (res) return res;
|
|
85
|
+
clusterExists[cluster] = 1;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (!cluster) {
|
|
89
|
+
return {
|
|
90
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
91
|
+
status: 400,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (!metrics.length) {
|
|
96
|
+
return {
|
|
97
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
98
|
+
status: 400,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// get sql
|
|
103
|
+
const { optimizedSQL } =
|
|
104
|
+
filter || search
|
|
105
|
+
? await getFilterSQL({ pg, table, filter, search })
|
|
106
|
+
: {};
|
|
107
|
+
|
|
108
|
+
const q = `select ${clusterTable?.column || cluster} as name, ${clusterTable?.operator || 'sum'}("${metrics[0]}")::float as metric, b.*
|
|
109
|
+
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
110
|
+
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id, ${clusterTable?.title} as title,
|
|
111
|
+
${clusterTable?.geom || 'geom'} as geom from ${clusterTable?.name}
|
|
112
|
+
where ${clusterTable?.query || '1=1'} and ${clusterTable?.codifierColumn || 'codifier'}=q."${clusterTable?.column || cluster}" limit 1
|
|
113
|
+
)b on 1=1
|
|
114
|
+
where ${where} group by
|
|
115
|
+
q."${clusterTable?.column || cluster}", b.id, b.title, b.geom`;
|
|
116
|
+
|
|
117
|
+
if (query.sql === '1') return q;
|
|
118
|
+
|
|
119
|
+
const geomCol =
|
|
120
|
+
parseInt(z, 10) < parseInt(pointZoom, 10)
|
|
121
|
+
? `ST_Centroid(${clusterTable?.geom || data?.geom || 'geom'})`
|
|
122
|
+
: clusterTable?.geom || data?.geom || 'geom';
|
|
123
|
+
|
|
124
|
+
const bbox = mercator.bbox(+y, +x, +z, false /* , '900913' */);
|
|
125
|
+
const bbox2d = `'BOX(${bbox[0]} ${bbox[1]},${bbox[2]} ${bbox[3]})'::box2d`;
|
|
126
|
+
|
|
127
|
+
const q1 = `SELECT ST_AsMVT(q, 'bi', 4096, 'geom','row') as tile
|
|
128
|
+
FROM (
|
|
129
|
+
SELECT
|
|
130
|
+
floor(random() * 100000 + 1)::int + row_number() over() as row,
|
|
131
|
+
|
|
132
|
+
${pg.pk?.[clusterTable?.name] ? 'id,' : ''} name, metric, title,
|
|
133
|
+
|
|
134
|
+
ST_AsMVTGeom(st_transform(${geomCol}, 3857),ST_TileEnvelope(${z},${y},${x})::box2d,4096,256,false) as geom
|
|
135
|
+
|
|
136
|
+
FROM (select * from (${q})q where geom && ${bbox2d}
|
|
137
|
+
|
|
138
|
+
and geom is not null and st_srid(geom) >0
|
|
139
|
+
|
|
140
|
+
and ST_GeometryType(geom) = any ('{ "ST_Polygon", "ST_MultiPolygon" }')
|
|
141
|
+
|
|
142
|
+
limit 3000)q
|
|
143
|
+
) q`;
|
|
144
|
+
|
|
145
|
+
if (query.sql === '2') return q1;
|
|
146
|
+
|
|
147
|
+
// auto Index
|
|
148
|
+
autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
149
|
+
|
|
150
|
+
const { rows = [] } = await pg.query(q1);
|
|
151
|
+
|
|
152
|
+
if (query.sql === '3') return rows.map((el) => el.tile);
|
|
153
|
+
|
|
154
|
+
const buffer = Buffer.concat(rows.map((el) => Buffer.from(el.tile)));
|
|
155
|
+
|
|
156
|
+
if (!nocache) {
|
|
157
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
158
|
+
await writeFile(file, buffer, 'binary');
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return reply.headers(headers).send(buffer);
|
|
162
|
+
} catch (err) {
|
|
163
|
+
logger.file('bi/clusterVtile/error', { error: err.toString(), query, params });
|
|
164
|
+
return { error: err.toString(), status: 500 };
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { createHash } from 'crypto';
|
|
3
|
+
import { writeFile, mkdir, readFile, stat } from 'fs/promises';
|
|
4
|
+
import { existsSync, /* readdirSync, */ readFileSync } from 'fs';
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
import { getFolder, getFilterSQL, autoIndex, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
+
|
|
9
|
+
import normalizeData from '../../data/controllers/util/normalizeData.js';
|
|
10
|
+
|
|
11
|
+
import { getWidget } from '../../../../utils.js';
|
|
12
|
+
|
|
13
|
+
const types = {
|
|
14
|
+
point: 'ST_Point' /* ,ST_MultiPoint */,
|
|
15
|
+
polygon: 'ST_Polygon,ST_MultiPolygon',
|
|
16
|
+
};
|
|
17
|
+
const hourMs = 3.6e6;
|
|
18
|
+
|
|
19
|
+
export default async function geojson(req, reply) {
|
|
20
|
+
const { query = {} } = req;
|
|
21
|
+
|
|
22
|
+
const {
|
|
23
|
+
filter,
|
|
24
|
+
widget,
|
|
25
|
+
sql,
|
|
26
|
+
type,
|
|
27
|
+
nocache,
|
|
28
|
+
id,
|
|
29
|
+
dashboard,
|
|
30
|
+
geom = 'geom',
|
|
31
|
+
pointZoom = 0,
|
|
32
|
+
} = query;
|
|
33
|
+
|
|
34
|
+
if (!widget && !dashboard) {
|
|
35
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const data = await getWidget({ pg: req.pg, dashboard, widget });
|
|
39
|
+
if (data.status) return data;
|
|
40
|
+
|
|
41
|
+
const pg = data.pg || req.pg || pgClients.client;
|
|
42
|
+
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
43
|
+
|
|
44
|
+
const root = getFolder(req);
|
|
45
|
+
const file = path.join(
|
|
46
|
+
root,
|
|
47
|
+
`/map/geojson/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}.geojson`
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
if (existsSync(file)) {
|
|
51
|
+
const timeNow = Date.now();
|
|
52
|
+
const stats = await stat(file);
|
|
53
|
+
const birthTime = new Date(stats.birthtime).getTime();
|
|
54
|
+
if (!(birthTime - timeNow > hourMs * 24) && !nocache) {
|
|
55
|
+
const res = JSON.parse((await readFile(file, 'utf-8')) || {});
|
|
56
|
+
return res;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const pkey = pg.pk?.[data?.table];
|
|
62
|
+
if (!pkey) {
|
|
63
|
+
return {
|
|
64
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
65
|
+
status: 400,
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// data param
|
|
70
|
+
const { table, where = '1=1', xName, x } = normalizeData(data, query);
|
|
71
|
+
|
|
72
|
+
if (!xName && !x) {
|
|
73
|
+
return {
|
|
74
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: x axis column not specified`,
|
|
75
|
+
status: 400,
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// get sql
|
|
80
|
+
const filterQ = filter
|
|
81
|
+
? await getFilterSQL({ pg, table, filter, query })
|
|
82
|
+
: undefined;
|
|
83
|
+
const q = `select "${pkey}", "${xName || x}", /* st_asgeojson(geom)::json as */ ${geom} as geom from ${filterQ ? `(${filterQ})` : table} q where ${where}`;
|
|
84
|
+
|
|
85
|
+
if (sql === '1') return q;
|
|
86
|
+
|
|
87
|
+
const { st_geometrytype: geomType = 'point' } = await pg
|
|
88
|
+
.query(
|
|
89
|
+
`select st_geometrytype(${geom}), count(*) from ${table}
|
|
90
|
+
where ${where} group by st_geometrytype(${geom})`
|
|
91
|
+
)
|
|
92
|
+
.then((res) => res.rows?.[0] || {});
|
|
93
|
+
|
|
94
|
+
const q1 = `SELECT 'FeatureCollection' As type, json_agg(f) As features FROM (
|
|
95
|
+
SELECT 'Feature' As type, row_number() over() as id,
|
|
96
|
+
st_asgeojson(st_force2d(${query.srid
|
|
97
|
+
? `st_transform(${type === 'centroid' ? `st_centroid(${geom})` : geom},${query.srid})`
|
|
98
|
+
: `${type === 'centroid' || query.point || query.centroid ? `st_centroid(${geom})` : geom}`
|
|
99
|
+
}), 6, 0)::json as geometry,
|
|
100
|
+
(select row_to_json(tc) from (select ${'' ? `${''} as status, ` : ''}
|
|
101
|
+
${xName ? `${xName},` : ''}
|
|
102
|
+
${data.style?.colorAttr ? `${data.style.colorAttr},` : ''}
|
|
103
|
+
${pkey} as id,(select file_path from crm.files
|
|
104
|
+
where entity_id=q.${pkey}::text and file_status <>'3' and ext in ('png','jpg') limit 1) as image
|
|
105
|
+
)tc) as properties
|
|
106
|
+
from (${q})q where ${id && pkey ? ` ${pkey} = '${id}' and ` : ''} ${geom} is not null
|
|
107
|
+
${data.query ? ` and ${data.query}` : ''}
|
|
108
|
+
${query.extent ? `and ${geom} && 'BOX(${query.extent.split(',').reduce((p, el, i) => p + el + (i % 2 ? ',' : ' '), '')})'::box2d` : ''}
|
|
109
|
+
${types[type] ? ` and ST_GeometryType(${geom}) = any ('{ ${types[type]} }') ` : ''}
|
|
110
|
+
limit ${geomType?.toLowerCase()?.includes('point') ? '15000' : '2500'})f`;
|
|
111
|
+
|
|
112
|
+
if (sql === '2') return q1;
|
|
113
|
+
|
|
114
|
+
// auto Index
|
|
115
|
+
autoIndex({ table, columns: [xName] });
|
|
116
|
+
|
|
117
|
+
const res = await pg.query(q1).then((res) => res.rows?.[0] || {});
|
|
118
|
+
|
|
119
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
120
|
+
await writeFile(file, JSON.stringify(res));
|
|
121
|
+
|
|
122
|
+
return res;
|
|
123
|
+
} catch (err) {
|
|
124
|
+
logger.file('bi/geojson', { level: 'ERROR', error: err.toString(), query });
|
|
125
|
+
return { error: err.toString(), status: 500 };
|
|
126
|
+
}
|
|
127
|
+
}
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { createHash } from 'node:crypto';
|
|
3
|
+
import { existsSync } from 'node:fs';
|
|
4
|
+
import { readFile, writeFile, mkdir, stat } from 'node:fs/promises';
|
|
5
|
+
|
|
6
|
+
import { pgClients, getFilterSQL, getMeta, getFolder } from '@opengis/fastify-table/utils.js';
|
|
7
|
+
|
|
8
|
+
import { getWidget } from '../../../../utils.js';
|
|
9
|
+
|
|
10
|
+
const hourMs = 3.6e6;
|
|
11
|
+
const maxLimit = 2500;
|
|
12
|
+
|
|
13
|
+
export default async function heatmap(req, reply) {
|
|
14
|
+
const { query = {}, user = {} } = req;
|
|
15
|
+
|
|
16
|
+
const { widget, dashboard, filter, search, size = 0.1 } = query;
|
|
17
|
+
|
|
18
|
+
if (query.size && (+query.size || 0) <= 0) {
|
|
19
|
+
return { message: 'param size is invalid', status: 400 };
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (!dashboard || !widget) {
|
|
23
|
+
return { message: 'not enough params: dashboard / widget', status: 400 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const { data } = await getWidget({ pg: req.pg, widget, dashboard });
|
|
27
|
+
|
|
28
|
+
if (!data?.table) {
|
|
29
|
+
return { message: 'widget not found: ' + widget, status: 400 };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const limit = Math.min(+query.limit || maxLimit, maxLimit);
|
|
33
|
+
const hash = [search, filter, limit].filter((el) => el).join();
|
|
34
|
+
|
|
35
|
+
const root = getFolder(req, 'local');
|
|
36
|
+
const file = path.join(
|
|
37
|
+
root,
|
|
38
|
+
`/map/geojson/heatmap/${data.table}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}.geojson`
|
|
39
|
+
);
|
|
40
|
+
|
|
41
|
+
if (existsSync(file) && !query.nocache && !query.sql) {
|
|
42
|
+
const timeNow = Date.now();
|
|
43
|
+
const stats = await stat(file);
|
|
44
|
+
const birthTime = new Date(stats.birthtime).getTime();
|
|
45
|
+
if (!(birthTime - timeNow > hourMs * 24)) {
|
|
46
|
+
const geojson = JSON.parse((await readFile(file, 'utf-8')) || {});
|
|
47
|
+
return geojson;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
const pg = data.pg || req.pg || pgClients.client;
|
|
52
|
+
|
|
53
|
+
if (!pg.pk?.[data.table]) {
|
|
54
|
+
return { message: `table not found: ${data.table}`, status: 404 };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const metric = query.metric || data.metrics?.[0]?.name || (Array.isArray(data.metrics) ? data.metrics?.[0] : data.metrics);
|
|
58
|
+
|
|
59
|
+
const operator = metric
|
|
60
|
+
? (['sum', 'min', 'max', 'avg'].find(el => el === query.operator) || 'sum')
|
|
61
|
+
: undefined;
|
|
62
|
+
|
|
63
|
+
const aggregator = metric
|
|
64
|
+
? `${operator}(${metric})`
|
|
65
|
+
: 'count(*)';
|
|
66
|
+
|
|
67
|
+
const { geom, columns } = await getMeta({ pg, table: data.table });
|
|
68
|
+
|
|
69
|
+
const { dataTypeID } = columns.find(col => col.name === metric) || {};
|
|
70
|
+
|
|
71
|
+
if (metric && !dataTypeID) {
|
|
72
|
+
return { message: `metric column not found: ${metric}`, status: 404 };
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (!['integer', 'numeric', 'double precision'].includes(pg.pgType[dataTypeID])) {
|
|
76
|
+
return { message: `metric column invalid type: ${metric} (${pg.pgType[dataTypeID]})`, status: 404 };
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (!geom) {
|
|
80
|
+
return { message: `geometry column not found: ${data.table}`, status: 404 };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const { optimizedSQL = `select * from ${data.table} where 1=1` } = hash ? await getFilterSQL({ pg, table: data.table, filter, search }) : {};
|
|
84
|
+
|
|
85
|
+
const subQuery = `SELECT ${aggregator} AS metric, hex.geom FROM (
|
|
86
|
+
SELECT ST_SetSRID( (ST_HexagonGrid(${size}, ST_Extent(q.${geom})) ).geom, 4326 ) as geom FROM ( ${optimizedSQL})q
|
|
87
|
+
)hex
|
|
88
|
+
LEFT JOIN ( ${optimizedSQL} )pts
|
|
89
|
+
ON ST_Within(pts.${geom}, hex.geom)
|
|
90
|
+
|
|
91
|
+
JOIN ( SELECT ST_ConvexHull(ST_Collect(${geom})) AS mask FROM ( ${optimizedSQL} )q )point_mask
|
|
92
|
+
ON ST_Intersects(hex.geom, point_mask.mask)
|
|
93
|
+
|
|
94
|
+
WHERE 1=1 /*and pts.${geom} is not null AND st_srid(pts.${geom}) > 0*/
|
|
95
|
+
GROUP BY hex.geom
|
|
96
|
+
limit ${limit}`;
|
|
97
|
+
|
|
98
|
+
if (query.sql === '1' && user?.user_type?.includes('admin')) return subQuery;
|
|
99
|
+
|
|
100
|
+
const q = `SELECT 'FeatureCollection' As type, json_agg(f) As features FROM (
|
|
101
|
+
SELECT
|
|
102
|
+
'Feature' As type,
|
|
103
|
+
row_number() over() as id,
|
|
104
|
+
st_asgeojson(geom, 6, 0)::json as geometry,
|
|
105
|
+
json_build_object( 'metric', metric ) as properties
|
|
106
|
+
from (${subQuery})sq
|
|
107
|
+
)f`;
|
|
108
|
+
|
|
109
|
+
if (query.sql === '2' && user?.user_type?.includes('admin')) return q;
|
|
110
|
+
|
|
111
|
+
const geojson = await pg.query(q)
|
|
112
|
+
.then(el => el.rows?.[0] || {});
|
|
113
|
+
|
|
114
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
115
|
+
await writeFile(file, JSON.stringify(geojson));
|
|
116
|
+
|
|
117
|
+
return geojson;
|
|
118
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { pgClients, getFilterSQL, getSelectVal } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
import { getWidget } from '../../../../utils.js';
|
|
4
|
+
|
|
5
|
+
export default async function map(req) {
|
|
6
|
+
const { query = {} } = req;
|
|
7
|
+
const { dashboard, widget } = query;
|
|
8
|
+
|
|
9
|
+
const { pg = req.pg || pgClients.client, data, type, layers, style, controls } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
10
|
+
|
|
11
|
+
if (!['map'].includes(type)) {
|
|
12
|
+
return { message: 'access restricted: invalid widget type', status: 403 };
|
|
13
|
+
}
|
|
14
|
+
if (!data?.table) {
|
|
15
|
+
return { message: 'invalid widget: param table is required', status: 400 };
|
|
16
|
+
}
|
|
17
|
+
if (!pg.pk[data?.table]) {
|
|
18
|
+
return { message: 'invalid widget: table pkey not found', status: 400 };
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const { q = '' } = await getFilterSQL({
|
|
22
|
+
pg,
|
|
23
|
+
table: data?.table,
|
|
24
|
+
filter: query.filter,
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
const res = {};
|
|
28
|
+
if (data?.color) {
|
|
29
|
+
const { rows = [] } = await pg.query(
|
|
30
|
+
`select count(*), "${data.color}" as val from ${data.table} where ${data.query || '1=1'} group by "${data.color}"`
|
|
31
|
+
);
|
|
32
|
+
if (data?.cls) {
|
|
33
|
+
const vals = await getSelectVal({
|
|
34
|
+
pg, name: data.cls, values: rows.map(el => el.val), ar: true,
|
|
35
|
+
});
|
|
36
|
+
rows.forEach(row => Object.assign(row, { ...vals?.find?.(el => el.id === row.val) || { text: row.val } }));
|
|
37
|
+
}
|
|
38
|
+
Object.assign(res, { colors: rows }); // кольори для легенди
|
|
39
|
+
}
|
|
40
|
+
if (data?.metrics?.length) {
|
|
41
|
+
const metric = data?.metrics[0];
|
|
42
|
+
const q1 = `select PERCENTILE_CONT(0) WITHIN GROUP (ORDER BY "${metric}") as "0",
|
|
43
|
+
PERCENTILE_CONT(0.25) WITHIN GROUP (ORDER BY "${metric}") as "25",
|
|
44
|
+
PERCENTILE_CONT(0.50) WITHIN GROUP (ORDER BY "${metric}") as "50",
|
|
45
|
+
PERCENTILE_CONT(0.75) WITHIN GROUP (ORDER BY "${metric}") as "75",
|
|
46
|
+
PERCENTILE_CONT(1) WITHIN GROUP (ORDER BY "${metric}") as "100" from ${data.table} where ${data.query || '1=1'} and ${q || '1=1'}`;
|
|
47
|
+
const sizes = await pg
|
|
48
|
+
.query(q1)
|
|
49
|
+
.then(el => Object.values(el.rows?.[0] || {}));
|
|
50
|
+
Object.assign(res, { sizes }); // розміри для легенди
|
|
51
|
+
}
|
|
52
|
+
const { bounds, extentStr } = await pg.query(`select count(*),
|
|
53
|
+
st_asgeojson(st_extent(geom))::json as bounds,
|
|
54
|
+
replace(regexp_replace(st_extent(geom)::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
|
|
55
|
+
from ${data.table} where ${data.query || '1=1'}`).then(el => el.rows?.[0] || {});
|
|
56
|
+
const extent = extentStr ? extentStr.split(',') : undefined;
|
|
57
|
+
|
|
58
|
+
Object.assign(res, {
|
|
59
|
+
layers,
|
|
60
|
+
style,
|
|
61
|
+
controls,
|
|
62
|
+
columns: data.columns,
|
|
63
|
+
bounds, // Map bounds
|
|
64
|
+
extent,
|
|
65
|
+
top: [], // 10 найкращих
|
|
66
|
+
bottom: [], // 10 найгірших
|
|
67
|
+
});
|
|
68
|
+
return res;
|
|
69
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { config, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
export default async function downloadClusterData({ pg = pgClients.client, cluster }) {
|
|
4
|
+
if (!pg || !cluster) return null;
|
|
5
|
+
const res = await fetch(`https://cdn.softpro.ua/data/bi/${cluster}-ua.geojson`);
|
|
6
|
+
if (res?.status !== 200) {
|
|
7
|
+
return {
|
|
8
|
+
message: `cluster file not found: ${cluster}-ua.json`,
|
|
9
|
+
status: 404,
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
try {
|
|
13
|
+
const geojson = await res.json();
|
|
14
|
+
const features = geojson?.features?.filter(
|
|
15
|
+
(el, idx, arr) => el?.geometry &&
|
|
16
|
+
arr.map((item) => item.properties.name)
|
|
17
|
+
.indexOf(el.properties.name) === idx
|
|
18
|
+
); // unique
|
|
19
|
+
if (!features?.length) {
|
|
20
|
+
return {
|
|
21
|
+
message: `cluster file empty: ${cluster}-ua.json`,
|
|
22
|
+
status: 400,
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
const { count = 0 } = await pg.query(`select count(*)::int from bi.cluster where type=$1`, [cluster])
|
|
26
|
+
.then((res1) => res1.rows?.[0] || {});
|
|
27
|
+
if (count !== features?.length || config.debug) {
|
|
28
|
+
// await pg.query(`delete from bi.cluster where type=$1`, [cluster]);
|
|
29
|
+
const values = features?.map((el) => `('${el.properties.codifier?.replace(/'/g, "''") || ''}','${el.properties.name?.replace(/'/g, "''") || ''}', '${cluster}', ST_GeomFromGeoJSON('${JSON.stringify(el.geometry)}')::geometry)`).join(',');
|
|
30
|
+
|
|
31
|
+
const { rowCount } = await pg.query(`insert into bi.cluster (codifier,title,type,geom)
|
|
32
|
+
values ${values} on conflict(title,type) do update set codifier=excluded.codifier, geom=excluded.geom`);
|
|
33
|
+
logger.file('bi/clusterVtile', { cluster, rowCount });
|
|
34
|
+
}
|
|
35
|
+
} catch (err) {
|
|
36
|
+
logger.file('bi/clusterVtile/error', {
|
|
37
|
+
error: err.toString(),
|
|
38
|
+
filename: `${cluster}-ua.json`,
|
|
39
|
+
});
|
|
40
|
+
return {
|
|
41
|
+
message: `cluster file import error: ${cluster}-ua.json`,
|
|
42
|
+
status: 500,
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
}
|