@opengis/bi 1.0.28 → 1.0.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config.js +12 -12
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +110 -123
- package/dist/{import-file-nBKuPZWn.js → import-file-CGUzCAix.js} +17348 -17513
- package/dist/{map-component-mixin-eyru7mUO.js → map-component-mixin-BkEQzSdJ.js} +4839 -4839
- package/dist/{vs-donut-QxB7xUy1.js → vs-donut-DiNanKYo.js} +1 -1
- package/dist/{vs-funnel-bar-BBWwOerT.js → vs-funnel-bar-BM7WMuDM.js} +1 -1
- package/dist/{vs-map-DYBDyDP3.js → vs-map-DLALMtkA.js} +2 -2
- package/dist/{vs-map-cluster-BM_cd8Xs.js → vs-map-cluster-DLJPVkGz.js} +2 -2
- package/dist/{vs-number-amN4sYu6.js → vs-number-EfOaU6-B.js} +1 -1
- package/dist/{vs-table-Bm5TzNWN.js → vs-table-BdxjaLf1.js} +1 -1
- package/dist/{vs-text-DJpxMHE3.js → vs-text-DhA-8Fxf.js} +4 -4
- package/package.json +2 -2
- package/server/plugins/docs.js +48 -48
- package/server/plugins/hook.js +89 -89
- package/server/plugins/vite.js +69 -69
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -11
- package/server/routes/data/controllers/data.js +1 -0
- package/server/routes/data/controllers/util/chartSQL.js +2 -1
- package/server/routes/data/controllers/util/normalizeData.js +61 -61
- package/server/routes/map/controllers/cluster.js +110 -110
- package/server/routes/map/controllers/clusterVtile.js +166 -166
- package/server/routes/map/controllers/geojson.js +127 -127
- package/server/routes/map/controllers/map.js +61 -61
- package/server/routes/map/controllers/utils/downloadClusterData.js +44 -44
- package/server/routes/map/controllers/vtile.js +183 -183
- package/utils.js +12 -12
|
@@ -1,61 +1,61 @@
|
|
|
1
|
-
function normalizeData(data, query = {}, columnTypes = []) {
|
|
2
|
-
const skip = [];
|
|
3
|
-
['x', 'groupby', 'granularity'].forEach((el) => {
|
|
4
|
-
// console.log(el, query[el], columnTypes.find(col => col.name == query[el]))
|
|
5
|
-
if (!columnTypes.find((col) => col.name == query[el])) {
|
|
6
|
-
if (query[el] && query[el] !== 'null') skip.push(`column not found: ${query[el]}`);
|
|
7
|
-
if (!(el === 'groupby' && query[el] === 'null')) delete query[el];
|
|
8
|
-
}
|
|
9
|
-
});
|
|
10
|
-
|
|
11
|
-
if (
|
|
12
|
-
!columnTypes.find(
|
|
13
|
-
(col) => col.type === 'numeric' && col.name == query.metric
|
|
14
|
-
)
|
|
15
|
-
) {
|
|
16
|
-
delete query.metric;
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
const xName = query.x || (Array.isArray(data.x) ? data.x[0] : data.x);
|
|
20
|
-
const xType = columnTypes.find((el) => el.name == xName)?.type;
|
|
21
|
-
|
|
22
|
-
const granularity =
|
|
23
|
-
xType === 'date' || xType?.includes('timestamp')
|
|
24
|
-
? query.granularity || data.granularity || 'year'
|
|
25
|
-
: null;
|
|
26
|
-
|
|
27
|
-
const x =
|
|
28
|
-
(granularity
|
|
29
|
-
? `date_trunc('${granularity}',${xName})::date::text`
|
|
30
|
-
: null) || xName;
|
|
31
|
-
|
|
32
|
-
const metrics = Array.isArray(data.metrics || data.metric) ? (data.metrics || data.metric) : [data.metrics || data.metric];
|
|
33
|
-
const metric =
|
|
34
|
-
(query.metric ? `sum(${query.metric})` : null) ||
|
|
35
|
-
(metrics.length
|
|
36
|
-
? (metrics
|
|
37
|
-
?.filter((el) => el && columnTypes.find((col) => col.name == (el?.name || el)))
|
|
38
|
-
?.map((el) => el.fx || `${el.operator || 'sum'}(${el.name || el})`)?.join(',') || 'count(*)')
|
|
39
|
-
: 'count(*)');
|
|
40
|
-
|
|
41
|
-
const yName = metrics?.[0]?.name || metrics?.[0];
|
|
42
|
-
const yType = columnTypes.find((el) => el.name == yName)?.type;
|
|
43
|
-
|
|
44
|
-
const { cls, table, filterCustom } = data;
|
|
45
|
-
const groupby = (query.groupby || data.groupby) === 'null' ? null : (query.groupby || data.groupby);
|
|
46
|
-
// const orderby = query.orderby || data.orderby || 'count(*)';
|
|
47
|
-
|
|
48
|
-
const custom = query?.filterCustom
|
|
49
|
-
?.split(',')
|
|
50
|
-
?.map((el) => filterCustom?.find((item) => item?.name === el)?.sql)
|
|
51
|
-
?.filter((el) => el)
|
|
52
|
-
?.join(' and ');
|
|
53
|
-
const where = `${data.query || '1=1'} and ${custom || 'true'}`;
|
|
54
|
-
|
|
55
|
-
const tableSQL = data.tableSQL?.length
|
|
56
|
-
? `(select * from ${data?.table} t ${data.tableSQL.join(' \n ')} where ${where})q`
|
|
57
|
-
: undefined;
|
|
58
|
-
|
|
59
|
-
return { x, cls, metric, table, where, tableSQL, groupby, xName, xType, yName, yType, error: skip.length ? skip.join(',') : undefined };
|
|
60
|
-
}
|
|
61
|
-
export default normalizeData;
|
|
1
|
+
function normalizeData(data, query = {}, columnTypes = []) {
|
|
2
|
+
const skip = [];
|
|
3
|
+
['x', 'groupby', 'granularity'].forEach((el) => {
|
|
4
|
+
// console.log(el, query[el], columnTypes.find(col => col.name == query[el]))
|
|
5
|
+
if (!columnTypes.find((col) => col.name == query[el])) {
|
|
6
|
+
if (query[el] && query[el] !== 'null') skip.push(`column not found: ${query[el]}`);
|
|
7
|
+
if (!(el === 'groupby' && query[el] === 'null')) delete query[el];
|
|
8
|
+
}
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
if (
|
|
12
|
+
!columnTypes.find(
|
|
13
|
+
(col) => col.type === 'numeric' && col.name == query.metric
|
|
14
|
+
)
|
|
15
|
+
) {
|
|
16
|
+
delete query.metric;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const xName = query.x || (Array.isArray(data.x) ? data.x[0] : data.x);
|
|
20
|
+
const xType = columnTypes.find((el) => el.name == xName)?.type;
|
|
21
|
+
|
|
22
|
+
const granularity =
|
|
23
|
+
xType === 'date' || xType?.includes('timestamp')
|
|
24
|
+
? query.granularity || data.granularity || 'year'
|
|
25
|
+
: null;
|
|
26
|
+
|
|
27
|
+
const x =
|
|
28
|
+
(granularity
|
|
29
|
+
? `date_trunc('${granularity}',${xName})::date::text`
|
|
30
|
+
: null) || xName;
|
|
31
|
+
|
|
32
|
+
const metrics = Array.isArray(data.metrics || data.metric) ? (data.metrics || data.metric) : [data.metrics || data.metric];
|
|
33
|
+
const metric =
|
|
34
|
+
(query.metric ? `sum(${query.metric})` : null) ||
|
|
35
|
+
(metrics.length
|
|
36
|
+
? (metrics
|
|
37
|
+
?.filter((el) => el && columnTypes.find((col) => col.name == (el?.name || el)))
|
|
38
|
+
?.map((el) => el.fx || `${el.operator || 'sum'}(${el.name || el})`)?.join(',') || 'count(*)')
|
|
39
|
+
: 'count(*)');
|
|
40
|
+
|
|
41
|
+
const yName = metrics?.[0]?.name || metrics?.[0];
|
|
42
|
+
const yType = columnTypes.find((el) => el.name == yName)?.type;
|
|
43
|
+
|
|
44
|
+
const { cls, table, filterCustom } = data;
|
|
45
|
+
const groupby = (query.groupby || data.groupby) === 'null' ? null : (query.groupby || data.groupby);
|
|
46
|
+
// const orderby = query.orderby || data.orderby || 'count(*)';
|
|
47
|
+
|
|
48
|
+
const custom = query?.filterCustom
|
|
49
|
+
?.split(',')
|
|
50
|
+
?.map((el) => filterCustom?.find((item) => item?.name === el)?.sql)
|
|
51
|
+
?.filter((el) => el)
|
|
52
|
+
?.join(' and ');
|
|
53
|
+
const where = `${data.query || '1=1'} and ${custom || 'true'}`;
|
|
54
|
+
|
|
55
|
+
const tableSQL = data.tableSQL?.length
|
|
56
|
+
? `(select * from ${data?.table} t ${data.tableSQL.join(' \n ')} where ${where})q`
|
|
57
|
+
: undefined;
|
|
58
|
+
|
|
59
|
+
return { x, cls, metric, table, where, tableSQL, groupby, xName, xType, yName, yType, error: skip.length ? skip.join(',') : undefined };
|
|
60
|
+
}
|
|
61
|
+
export default normalizeData;
|
|
@@ -1,110 +1,110 @@
|
|
|
1
|
-
import { getFilterSQL, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
-
|
|
3
|
-
import { getWidget } from '../../../../utils.js';
|
|
4
|
-
|
|
5
|
-
import downloadClusterData from './utils/downloadClusterData.js';
|
|
6
|
-
|
|
7
|
-
const clusterExists = {};
|
|
8
|
-
|
|
9
|
-
export default async function cluster(req) {
|
|
10
|
-
const { query = {} } = req;
|
|
11
|
-
const { widget, filter, dashboard, search } = query;
|
|
12
|
-
|
|
13
|
-
if (!widget) {
|
|
14
|
-
return { message: 'not enough params: widget', status: 400 };
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
const { pg = req.pg || pgClients.client, data } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
18
|
-
|
|
19
|
-
const pkey = pg.pk?.[data?.table];
|
|
20
|
-
|
|
21
|
-
if (!pkey) {
|
|
22
|
-
return {
|
|
23
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
24
|
-
status: 400,
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// data param
|
|
29
|
-
const {
|
|
30
|
-
table,
|
|
31
|
-
query: where = '1=1',
|
|
32
|
-
metrics = [],
|
|
33
|
-
cluster,
|
|
34
|
-
clusterTable = {},
|
|
35
|
-
} = data;
|
|
36
|
-
|
|
37
|
-
if (!cluster) {
|
|
38
|
-
return {
|
|
39
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
40
|
-
status: 400,
|
|
41
|
-
};
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
if (!metrics.length) {
|
|
45
|
-
return {
|
|
46
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
47
|
-
status: 400,
|
|
48
|
-
};
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
if (!clusterTable?.name) {
|
|
52
|
-
Object.assign(clusterTable, {
|
|
53
|
-
name: 'bi.cluster',
|
|
54
|
-
title: 'title',
|
|
55
|
-
query: `type='${cluster}'`,
|
|
56
|
-
});
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
try {
|
|
60
|
-
if (cluster && !clusterExists[cluster]) {
|
|
61
|
-
const res = await downloadClusterData({ pg, cluster });
|
|
62
|
-
if (res) return res;
|
|
63
|
-
clusterExists[cluster] = 1;
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
if (clusterTable?.name && !pg.pk?.[clusterTable?.name]) {
|
|
67
|
-
return {
|
|
68
|
-
message: 'invalid widget params: clusterTable pkey not found',
|
|
69
|
-
status: 404,
|
|
70
|
-
};
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
const { bounds, extentStr } = await pg.query(`select count(*),
|
|
74
|
-
st_asgeojson(st_extent(geom))::json as bounds,
|
|
75
|
-
replace(regexp_replace(st_extent(geom)::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
|
|
76
|
-
from ${table} where ${where || '1=1'}`).then((res) => res.rows?.[0] || {});
|
|
77
|
-
const extent = extentStr ? extentStr.split(',') : undefined;
|
|
78
|
-
|
|
79
|
-
// get sql
|
|
80
|
-
const { optimizedSQL } =
|
|
81
|
-
filter || search
|
|
82
|
-
? await getFilterSQL({ pg, table, filter, search })
|
|
83
|
-
: {};
|
|
84
|
-
|
|
85
|
-
const q = `select "${cluster}" as name, sum("${metrics[0]}")::float as metric
|
|
86
|
-
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
87
|
-
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id from ${clusterTable?.name} where ${clusterTable?.title}=q."${cluster}" limit 1)b on 1=1
|
|
88
|
-
where ${where} group by ${cluster}, b.id order by sum("${metrics[0]}")::float desc`;
|
|
89
|
-
|
|
90
|
-
if (query.sql === '1') return q;
|
|
91
|
-
|
|
92
|
-
// auto Index
|
|
93
|
-
// autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
94
|
-
|
|
95
|
-
const { rows = [] } = await pg.query(q);
|
|
96
|
-
const vals = rows.map((el) => el.metric - 0).sort((a, b) => a - b);
|
|
97
|
-
const len = vals.length;
|
|
98
|
-
const sizes = [
|
|
99
|
-
vals[0],
|
|
100
|
-
vals[Math.floor(len / 4)],
|
|
101
|
-
vals[Math.floor(len / 2)],
|
|
102
|
-
vals[Math.floor(len * 0.75)],
|
|
103
|
-
vals[len - 1],
|
|
104
|
-
];
|
|
105
|
-
return { sizes, rows, bounds, extent, count: rows.length, total: rows?.reduce((acc, curr) => (curr.metric || 0) + acc, 0) };
|
|
106
|
-
} catch (err) {
|
|
107
|
-
logger.file('bi/cluster/error', { error: err.toString(), query });
|
|
108
|
-
return { error: err.toString(), status: 500 };
|
|
109
|
-
}
|
|
110
|
-
}
|
|
1
|
+
import { getFilterSQL, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
2
|
+
|
|
3
|
+
import { getWidget } from '../../../../utils.js';
|
|
4
|
+
|
|
5
|
+
import downloadClusterData from './utils/downloadClusterData.js';
|
|
6
|
+
|
|
7
|
+
const clusterExists = {};
|
|
8
|
+
|
|
9
|
+
export default async function cluster(req) {
|
|
10
|
+
const { query = {} } = req;
|
|
11
|
+
const { widget, filter, dashboard, search } = query;
|
|
12
|
+
|
|
13
|
+
if (!widget) {
|
|
14
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
const { pg = req.pg || pgClients.client, data } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
18
|
+
|
|
19
|
+
const pkey = pg.pk?.[data?.table];
|
|
20
|
+
|
|
21
|
+
if (!pkey) {
|
|
22
|
+
return {
|
|
23
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
24
|
+
status: 400,
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// data param
|
|
29
|
+
const {
|
|
30
|
+
table,
|
|
31
|
+
query: where = '1=1',
|
|
32
|
+
metrics = [],
|
|
33
|
+
cluster,
|
|
34
|
+
clusterTable = {},
|
|
35
|
+
} = data;
|
|
36
|
+
|
|
37
|
+
if (!cluster) {
|
|
38
|
+
return {
|
|
39
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
40
|
+
status: 400,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (!metrics.length) {
|
|
45
|
+
return {
|
|
46
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
47
|
+
status: 400,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (!clusterTable?.name) {
|
|
52
|
+
Object.assign(clusterTable, {
|
|
53
|
+
name: 'bi.cluster',
|
|
54
|
+
title: 'title',
|
|
55
|
+
query: `type='${cluster}'`,
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
if (cluster && !clusterExists[cluster]) {
|
|
61
|
+
const res = await downloadClusterData({ pg, cluster });
|
|
62
|
+
if (res) return res;
|
|
63
|
+
clusterExists[cluster] = 1;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (clusterTable?.name && !pg.pk?.[clusterTable?.name]) {
|
|
67
|
+
return {
|
|
68
|
+
message: 'invalid widget params: clusterTable pkey not found',
|
|
69
|
+
status: 404,
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const { bounds, extentStr } = await pg.query(`select count(*),
|
|
74
|
+
st_asgeojson(st_extent(geom))::json as bounds,
|
|
75
|
+
replace(regexp_replace(st_extent(geom)::box2d::text,'BOX\\(|\\)','','g'),' ',',') as "extentStr"
|
|
76
|
+
from ${table} where ${where || '1=1'}`).then((res) => res.rows?.[0] || {});
|
|
77
|
+
const extent = extentStr ? extentStr.split(',') : undefined;
|
|
78
|
+
|
|
79
|
+
// get sql
|
|
80
|
+
const { optimizedSQL } =
|
|
81
|
+
filter || search
|
|
82
|
+
? await getFilterSQL({ pg, table, filter, search })
|
|
83
|
+
: {};
|
|
84
|
+
|
|
85
|
+
const q = `select "${cluster}" as name, sum("${metrics[0]}")::float as metric
|
|
86
|
+
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
87
|
+
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id from ${clusterTable?.name} where ${clusterTable?.title}=q."${cluster}" limit 1)b on 1=1
|
|
88
|
+
where ${where} group by ${cluster}, b.id order by sum("${metrics[0]}")::float desc`;
|
|
89
|
+
|
|
90
|
+
if (query.sql === '1') return q;
|
|
91
|
+
|
|
92
|
+
// auto Index
|
|
93
|
+
// autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
94
|
+
|
|
95
|
+
const { rows = [] } = await pg.query(q);
|
|
96
|
+
const vals = rows.map((el) => el.metric - 0).sort((a, b) => a - b);
|
|
97
|
+
const len = vals.length;
|
|
98
|
+
const sizes = [
|
|
99
|
+
vals[0],
|
|
100
|
+
vals[Math.floor(len / 4)],
|
|
101
|
+
vals[Math.floor(len / 2)],
|
|
102
|
+
vals[Math.floor(len * 0.75)],
|
|
103
|
+
vals[len - 1],
|
|
104
|
+
];
|
|
105
|
+
return { sizes, rows, bounds, extent, count: rows.length, total: rows?.reduce((acc, curr) => (curr.metric || 0) + acc, 0) };
|
|
106
|
+
} catch (err) {
|
|
107
|
+
logger.file('bi/cluster/error', { error: err.toString(), query });
|
|
108
|
+
return { error: err.toString(), status: 500 };
|
|
109
|
+
}
|
|
110
|
+
}
|