@opengis/bi 1.2.2 → 1.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bi.js +1 -1
- package/dist/bi.umd.cjs +40 -40
- package/dist/import-file-MEpI7PGd.js +3509 -0
- package/dist/{vs-funnel-bar-T330oJNS.js → vs-funnel-bar-BdVcgrYG.js} +1 -1
- package/dist/{vs-list-DeHF_Oaf.js → vs-list-_1Ub562I.js} +1 -1
- package/dist/{vs-map-Skt608pM.js → vs-map-BQwf_Vmm.js} +2 -2
- package/dist/{vs-map-cluster-BRUiY_90.js → vs-map-cluster-Bu0fgucv.js} +2 -2
- package/dist/{vs-number-Dd_21nn-.js → vs-number-Dke5ThxE.js} +1 -1
- package/dist/{vs-table-BwC29Zyc.js → vs-table-C95epuWz.js} +1 -1
- package/dist/{vs-text-DEJjWxDu.js → vs-text-DAq6rCjJ.js} +1 -1
- package/package.json +3 -2
- package/server/plugins/vite.js +69 -69
- package/server/routes/dashboard/controllers/utils/yaml.js +11 -11
- package/server/routes/data/controllers/data.js +2 -2
- package/server/routes/data/index.mjs +7 -1
- package/server/routes/map/controllers/cluster.js +125 -125
- package/server/routes/map/controllers/clusterVtile.js +166 -166
- package/server/routes/map/controllers/geojson.js +127 -127
- package/server/routes/map/controllers/map.js +69 -69
- package/server/routes/map/controllers/utils/downloadClusterData.js +44 -44
- package/server/routes/map/controllers/vtile.js +183 -183
- package/utils.js +12 -12
- package/dist/import-file-D8jh74Dz.js +0 -3543
|
@@ -1,166 +1,166 @@
|
|
|
1
|
-
import Sphericalmercator from '@mapbox/sphericalmercator';
|
|
2
|
-
|
|
3
|
-
import path from 'path';
|
|
4
|
-
import { createHash } from 'crypto';
|
|
5
|
-
import { writeFile, mkdir } from 'fs/promises';
|
|
6
|
-
|
|
7
|
-
import { logger, getFolder, getFilterSQL, autoIndex, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
-
|
|
9
|
-
import { getWidget } from '../../../../utils.js';
|
|
10
|
-
|
|
11
|
-
import downloadClusterData from './utils/downloadClusterData.js';
|
|
12
|
-
|
|
13
|
-
const mercator = new Sphericalmercator({ size: 256 });
|
|
14
|
-
|
|
15
|
-
const clusterExists = {};
|
|
16
|
-
|
|
17
|
-
export default async function clusterVtile(req, reply) {
|
|
18
|
-
const { params = {}, query = {} } = req;
|
|
19
|
-
const { z, y } = params;
|
|
20
|
-
const x = params.x?.split('.')[0] - 0;
|
|
21
|
-
|
|
22
|
-
if (!x || !y || !z) {
|
|
23
|
-
return { message: 'not enough params: xyz', status: 400 };
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
const { widget, filter, dashboard, search, clusterZoom, nocache, pointZoom } =
|
|
27
|
-
query;
|
|
28
|
-
|
|
29
|
-
if (!widget) {
|
|
30
|
-
return { message: 'not enough params: widget', status: 400 };
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
const { pg = req.pg || pgClients.client, data } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
34
|
-
|
|
35
|
-
const headers = {
|
|
36
|
-
'Content-Type': 'application/x-protobuf',
|
|
37
|
-
'Cache-Control':
|
|
38
|
-
nocache || query.sql ? 'no-cache' : 'public, max-age=86400',
|
|
39
|
-
};
|
|
40
|
-
|
|
41
|
-
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
42
|
-
|
|
43
|
-
const root = getFolder(req);
|
|
44
|
-
const file = path.join(
|
|
45
|
-
root,
|
|
46
|
-
`/map/vtile/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}${z}/${x}/${y}.mvt`
|
|
47
|
-
);
|
|
48
|
-
|
|
49
|
-
try {
|
|
50
|
-
if (!data?.table) {
|
|
51
|
-
return {
|
|
52
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: table not specified`,
|
|
53
|
-
status: 400,
|
|
54
|
-
};
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
const pkey = pg.pk?.[data?.table];
|
|
58
|
-
|
|
59
|
-
if (!pkey) {
|
|
60
|
-
return {
|
|
61
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
62
|
-
status: 400,
|
|
63
|
-
};
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
// data param
|
|
67
|
-
const {
|
|
68
|
-
table,
|
|
69
|
-
query: where = '1=1',
|
|
70
|
-
metrics = [],
|
|
71
|
-
cluster,
|
|
72
|
-
clusterTable = {},
|
|
73
|
-
} = data;
|
|
74
|
-
if (!clusterTable?.name) {
|
|
75
|
-
Object.assign(clusterTable, {
|
|
76
|
-
name: 'bi.cluster',
|
|
77
|
-
title: 'title',
|
|
78
|
-
query: `type='${data.cluster}'`,
|
|
79
|
-
});
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
if (cluster && !clusterExists[data.cluster]) {
|
|
83
|
-
const res = await downloadClusterData({ pg, cluster });
|
|
84
|
-
if (res) return res;
|
|
85
|
-
clusterExists[cluster] = 1;
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
if (!cluster) {
|
|
89
|
-
return {
|
|
90
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
91
|
-
status: 400,
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
if (!metrics.length) {
|
|
96
|
-
return {
|
|
97
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
98
|
-
status: 400,
|
|
99
|
-
};
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
// get sql
|
|
103
|
-
const { optimizedSQL } =
|
|
104
|
-
filter || search
|
|
105
|
-
? await getFilterSQL({ pg, table, filter, search })
|
|
106
|
-
: {};
|
|
107
|
-
|
|
108
|
-
const q = `select ${clusterTable?.column || cluster} as name, ${clusterTable?.operator || 'sum'}("${metrics[0]}")::float as metric, b.*
|
|
109
|
-
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
110
|
-
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id, ${clusterTable?.title} as title,
|
|
111
|
-
${clusterTable?.geom || 'geom'} as geom from ${clusterTable?.name}
|
|
112
|
-
where ${clusterTable?.query || '1=1'} and ${clusterTable?.codifierColumn || 'codifier'}=q."${clusterTable?.column || cluster}" limit 1
|
|
113
|
-
)b on 1=1
|
|
114
|
-
where ${where} group by
|
|
115
|
-
q."${clusterTable?.column || cluster}", b.id, b.title, b.geom`;
|
|
116
|
-
|
|
117
|
-
if (query.sql === '1') return q;
|
|
118
|
-
|
|
119
|
-
const geomCol =
|
|
120
|
-
parseInt(z, 10) < parseInt(pointZoom, 10)
|
|
121
|
-
? `ST_Centroid(${clusterTable?.geom || data?.geom || 'geom'})`
|
|
122
|
-
: clusterTable?.geom || data?.geom || 'geom';
|
|
123
|
-
|
|
124
|
-
const bbox = mercator.bbox(+y, +x, +z, false /* , '900913' */);
|
|
125
|
-
const bbox2d = `'BOX(${bbox[0]} ${bbox[1]},${bbox[2]} ${bbox[3]})'::box2d`;
|
|
126
|
-
|
|
127
|
-
const q1 = `SELECT ST_AsMVT(q, 'bi', 4096, 'geom','row') as tile
|
|
128
|
-
FROM (
|
|
129
|
-
SELECT
|
|
130
|
-
floor(random() * 100000 + 1)::int + row_number() over() as row,
|
|
131
|
-
|
|
132
|
-
${pg.pk?.[clusterTable?.name] ? 'id,' : ''} name, metric, title,
|
|
133
|
-
|
|
134
|
-
ST_AsMVTGeom(st_transform(${geomCol}, 3857),ST_TileEnvelope(${z},${y},${x})::box2d,4096,256,false) as geom
|
|
135
|
-
|
|
136
|
-
FROM (select * from (${q})q where geom && ${bbox2d}
|
|
137
|
-
|
|
138
|
-
and geom is not null and st_srid(geom) >0
|
|
139
|
-
|
|
140
|
-
and ST_GeometryType(geom) = any ('{ "ST_Polygon", "ST_MultiPolygon" }')
|
|
141
|
-
|
|
142
|
-
limit 3000)q
|
|
143
|
-
) q`;
|
|
144
|
-
|
|
145
|
-
if (query.sql === '2') return q1;
|
|
146
|
-
|
|
147
|
-
// auto Index
|
|
148
|
-
autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
149
|
-
|
|
150
|
-
const { rows = [] } = await pg.query(q1);
|
|
151
|
-
|
|
152
|
-
if (query.sql === '3') return rows.map((el) => el.tile);
|
|
153
|
-
|
|
154
|
-
const buffer = Buffer.concat(rows.map((el) => Buffer.from(el.tile)));
|
|
155
|
-
|
|
156
|
-
if (!nocache) {
|
|
157
|
-
await mkdir(path.dirname(file), { recursive: true });
|
|
158
|
-
await writeFile(file, buffer, 'binary');
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
return reply.headers(headers).send(buffer);
|
|
162
|
-
} catch (err) {
|
|
163
|
-
logger.file('bi/clusterVtile/error', { error: err.toString(), query, params });
|
|
164
|
-
return { error: err.toString(), status: 500 };
|
|
165
|
-
}
|
|
166
|
-
}
|
|
1
|
+
import Sphericalmercator from '@mapbox/sphericalmercator';
|
|
2
|
+
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import { createHash } from 'crypto';
|
|
5
|
+
import { writeFile, mkdir } from 'fs/promises';
|
|
6
|
+
|
|
7
|
+
import { logger, getFolder, getFilterSQL, autoIndex, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
+
|
|
9
|
+
import { getWidget } from '../../../../utils.js';
|
|
10
|
+
|
|
11
|
+
import downloadClusterData from './utils/downloadClusterData.js';
|
|
12
|
+
|
|
13
|
+
const mercator = new Sphericalmercator({ size: 256 });
|
|
14
|
+
|
|
15
|
+
const clusterExists = {};
|
|
16
|
+
|
|
17
|
+
export default async function clusterVtile(req, reply) {
|
|
18
|
+
const { params = {}, query = {} } = req;
|
|
19
|
+
const { z, y } = params;
|
|
20
|
+
const x = params.x?.split('.')[0] - 0;
|
|
21
|
+
|
|
22
|
+
if (!x || !y || !z) {
|
|
23
|
+
return { message: 'not enough params: xyz', status: 400 };
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const { widget, filter, dashboard, search, clusterZoom, nocache, pointZoom } =
|
|
27
|
+
query;
|
|
28
|
+
|
|
29
|
+
if (!widget) {
|
|
30
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const { pg = req.pg || pgClients.client, data } = await getWidget({ pg: req.pg, dashboard, widget });
|
|
34
|
+
|
|
35
|
+
const headers = {
|
|
36
|
+
'Content-Type': 'application/x-protobuf',
|
|
37
|
+
'Cache-Control':
|
|
38
|
+
nocache || query.sql ? 'no-cache' : 'public, max-age=86400',
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
42
|
+
|
|
43
|
+
const root = getFolder(req);
|
|
44
|
+
const file = path.join(
|
|
45
|
+
root,
|
|
46
|
+
`/map/vtile/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}${z}/${x}/${y}.mvt`
|
|
47
|
+
);
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
if (!data?.table) {
|
|
51
|
+
return {
|
|
52
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table not specified`,
|
|
53
|
+
status: 400,
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const pkey = pg.pk?.[data?.table];
|
|
58
|
+
|
|
59
|
+
if (!pkey) {
|
|
60
|
+
return {
|
|
61
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
62
|
+
status: 400,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// data param
|
|
67
|
+
const {
|
|
68
|
+
table,
|
|
69
|
+
query: where = '1=1',
|
|
70
|
+
metrics = [],
|
|
71
|
+
cluster,
|
|
72
|
+
clusterTable = {},
|
|
73
|
+
} = data;
|
|
74
|
+
if (!clusterTable?.name) {
|
|
75
|
+
Object.assign(clusterTable, {
|
|
76
|
+
name: 'bi.cluster',
|
|
77
|
+
title: 'title',
|
|
78
|
+
query: `type='${data.cluster}'`,
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
if (cluster && !clusterExists[data.cluster]) {
|
|
83
|
+
const res = await downloadClusterData({ pg, cluster });
|
|
84
|
+
if (res) return res;
|
|
85
|
+
clusterExists[cluster] = 1;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (!cluster) {
|
|
89
|
+
return {
|
|
90
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: cluster column not specified`,
|
|
91
|
+
status: 400,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (!metrics.length) {
|
|
96
|
+
return {
|
|
97
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: metric columns not found`,
|
|
98
|
+
status: 400,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// get sql
|
|
103
|
+
const { optimizedSQL } =
|
|
104
|
+
filter || search
|
|
105
|
+
? await getFilterSQL({ pg, table, filter, search })
|
|
106
|
+
: {};
|
|
107
|
+
|
|
108
|
+
const q = `select ${clusterTable?.column || cluster} as name, ${clusterTable?.operator || 'sum'}("${metrics[0]}")::float as metric, b.*
|
|
109
|
+
from ${optimizedSQL ? `(${optimizedSQL})` : table} q
|
|
110
|
+
left join lateral (select "${pg.pk?.[clusterTable?.name]}" as id, ${clusterTable?.title} as title,
|
|
111
|
+
${clusterTable?.geom || 'geom'} as geom from ${clusterTable?.name}
|
|
112
|
+
where ${clusterTable?.query || '1=1'} and ${clusterTable?.codifierColumn || 'codifier'}=q."${clusterTable?.column || cluster}" limit 1
|
|
113
|
+
)b on 1=1
|
|
114
|
+
where ${where} group by
|
|
115
|
+
q."${clusterTable?.column || cluster}", b.id, b.title, b.geom`;
|
|
116
|
+
|
|
117
|
+
if (query.sql === '1') return q;
|
|
118
|
+
|
|
119
|
+
const geomCol =
|
|
120
|
+
parseInt(z, 10) < parseInt(pointZoom, 10)
|
|
121
|
+
? `ST_Centroid(${clusterTable?.geom || data?.geom || 'geom'})`
|
|
122
|
+
: clusterTable?.geom || data?.geom || 'geom';
|
|
123
|
+
|
|
124
|
+
const bbox = mercator.bbox(+y, +x, +z, false /* , '900913' */);
|
|
125
|
+
const bbox2d = `'BOX(${bbox[0]} ${bbox[1]},${bbox[2]} ${bbox[3]})'::box2d`;
|
|
126
|
+
|
|
127
|
+
const q1 = `SELECT ST_AsMVT(q, 'bi', 4096, 'geom','row') as tile
|
|
128
|
+
FROM (
|
|
129
|
+
SELECT
|
|
130
|
+
floor(random() * 100000 + 1)::int + row_number() over() as row,
|
|
131
|
+
|
|
132
|
+
${pg.pk?.[clusterTable?.name] ? 'id,' : ''} name, metric, title,
|
|
133
|
+
|
|
134
|
+
ST_AsMVTGeom(st_transform(${geomCol}, 3857),ST_TileEnvelope(${z},${y},${x})::box2d,4096,256,false) as geom
|
|
135
|
+
|
|
136
|
+
FROM (select * from (${q})q where geom && ${bbox2d}
|
|
137
|
+
|
|
138
|
+
and geom is not null and st_srid(geom) >0
|
|
139
|
+
|
|
140
|
+
and ST_GeometryType(geom) = any ('{ "ST_Polygon", "ST_MultiPolygon" }')
|
|
141
|
+
|
|
142
|
+
limit 3000)q
|
|
143
|
+
) q`;
|
|
144
|
+
|
|
145
|
+
if (query.sql === '2') return q1;
|
|
146
|
+
|
|
147
|
+
// auto Index
|
|
148
|
+
autoIndex({ table, columns: (metrics || []).concat([cluster]) });
|
|
149
|
+
|
|
150
|
+
const { rows = [] } = await pg.query(q1);
|
|
151
|
+
|
|
152
|
+
if (query.sql === '3') return rows.map((el) => el.tile);
|
|
153
|
+
|
|
154
|
+
const buffer = Buffer.concat(rows.map((el) => Buffer.from(el.tile)));
|
|
155
|
+
|
|
156
|
+
if (!nocache) {
|
|
157
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
158
|
+
await writeFile(file, buffer, 'binary');
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
return reply.headers(headers).send(buffer);
|
|
162
|
+
} catch (err) {
|
|
163
|
+
logger.file('bi/clusterVtile/error', { error: err.toString(), query, params });
|
|
164
|
+
return { error: err.toString(), status: 500 };
|
|
165
|
+
}
|
|
166
|
+
}
|
|
@@ -1,127 +1,127 @@
|
|
|
1
|
-
import path from 'path';
|
|
2
|
-
import { createHash } from 'crypto';
|
|
3
|
-
import { writeFile, mkdir, readFile, stat } from 'fs/promises';
|
|
4
|
-
import { existsSync, /* readdirSync, */ readFileSync } from 'fs';
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
import { getFolder, getFilterSQL, autoIndex, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
-
|
|
9
|
-
import normalizeData from '../../data/controllers/util/normalizeData.js';
|
|
10
|
-
|
|
11
|
-
import { getWidget } from '../../../../utils.js';
|
|
12
|
-
|
|
13
|
-
const types = {
|
|
14
|
-
point: 'ST_Point' /* ,ST_MultiPoint */,
|
|
15
|
-
polygon: 'ST_Polygon,ST_MultiPolygon',
|
|
16
|
-
};
|
|
17
|
-
const hourMs = 3.6e6;
|
|
18
|
-
|
|
19
|
-
export default async function geojson(req, reply) {
|
|
20
|
-
const { query = {} } = req;
|
|
21
|
-
|
|
22
|
-
const {
|
|
23
|
-
filter,
|
|
24
|
-
widget,
|
|
25
|
-
sql,
|
|
26
|
-
type,
|
|
27
|
-
nocache,
|
|
28
|
-
id,
|
|
29
|
-
dashboard,
|
|
30
|
-
geom = 'geom',
|
|
31
|
-
pointZoom = 0,
|
|
32
|
-
} = query;
|
|
33
|
-
|
|
34
|
-
if (!widget && !dashboard) {
|
|
35
|
-
return { message: 'not enough params: widget', status: 400 };
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
const data = await getWidget({ pg: req.pg, dashboard, widget });
|
|
39
|
-
if (data.status) return data;
|
|
40
|
-
|
|
41
|
-
const pg = data.pg || req.pg || pgClients.client;
|
|
42
|
-
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
43
|
-
|
|
44
|
-
const root = getFolder(req);
|
|
45
|
-
const file = path.join(
|
|
46
|
-
root,
|
|
47
|
-
`/map/geojson/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}.geojson`
|
|
48
|
-
);
|
|
49
|
-
|
|
50
|
-
if (existsSync(file)) {
|
|
51
|
-
const timeNow = Date.now();
|
|
52
|
-
const stats = await stat(file);
|
|
53
|
-
const birthTime = new Date(stats.birthtime).getTime();
|
|
54
|
-
if (!(birthTime - timeNow > hourMs * 24) && !nocache) {
|
|
55
|
-
const res = JSON.parse((await readFile(file, 'utf-8')) || {});
|
|
56
|
-
return res;
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
try {
|
|
61
|
-
const pkey = pg.pk?.[data?.table];
|
|
62
|
-
if (!pkey) {
|
|
63
|
-
return {
|
|
64
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
65
|
-
status: 400,
|
|
66
|
-
};
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
// data param
|
|
70
|
-
const { table, where = '1=1', xName, x } = normalizeData(data, query);
|
|
71
|
-
|
|
72
|
-
if (!xName && !x) {
|
|
73
|
-
return {
|
|
74
|
-
message: `invalid ${widget ? 'widget' : 'dashboard'}: x axis column not specified`,
|
|
75
|
-
status: 400,
|
|
76
|
-
};
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
// get sql
|
|
80
|
-
const filterQ = filter
|
|
81
|
-
? await getFilterSQL({ pg, table, filter, query })
|
|
82
|
-
: undefined;
|
|
83
|
-
const q = `select "${pkey}", "${xName || x}", /* st_asgeojson(geom)::json as */ ${geom} as geom from ${filterQ ? `(${filterQ})` : table} q where ${where}`;
|
|
84
|
-
|
|
85
|
-
if (sql === '1') return q;
|
|
86
|
-
|
|
87
|
-
const { st_geometrytype: geomType = 'point' } = await pg
|
|
88
|
-
.query(
|
|
89
|
-
`select st_geometrytype(${geom}), count(*) from ${table}
|
|
90
|
-
where ${where} group by st_geometrytype(${geom})`
|
|
91
|
-
)
|
|
92
|
-
.then((res) => res.rows?.[0] || {});
|
|
93
|
-
|
|
94
|
-
const q1 = `SELECT 'FeatureCollection' As type, json_agg(f) As features FROM (
|
|
95
|
-
SELECT 'Feature' As type, row_number() over() as id,
|
|
96
|
-
st_asgeojson(st_force2d(${query.srid
|
|
97
|
-
? `st_transform(${type === 'centroid' ? `st_centroid(${geom})` : geom},${query.srid})`
|
|
98
|
-
: `${type === 'centroid' || query.point || query.centroid ? `st_centroid(${geom})` : geom}`
|
|
99
|
-
}), 6, 0)::json as geometry,
|
|
100
|
-
(select row_to_json(tc) from (select ${'' ? `${''} as status, ` : ''}
|
|
101
|
-
${xName ? `${xName},` : ''}
|
|
102
|
-
${data.style?.colorAttr ? `${data.style.colorAttr},` : ''}
|
|
103
|
-
${pkey} as id,(select file_path from crm.files
|
|
104
|
-
where entity_id=q.${pkey}::text and file_status <>'3' and ext in ('png','jpg') limit 1) as image
|
|
105
|
-
)tc) as properties
|
|
106
|
-
from (${q})q where ${id && pkey ? ` ${pkey} = '${id}' and ` : ''} ${geom} is not null
|
|
107
|
-
${data.query ? ` and ${data.query}` : ''}
|
|
108
|
-
${query.extent ? `and ${geom} && 'BOX(${query.extent.split(',').reduce((p, el, i) => p + el + (i % 2 ? ',' : ' '), '')})'::box2d` : ''}
|
|
109
|
-
${types[type] ? ` and ST_GeometryType(${geom}) = any ('{ ${types[type]} }') ` : ''}
|
|
110
|
-
limit ${geomType?.toLowerCase()?.includes('point') ? '15000' : '2500'})f`;
|
|
111
|
-
|
|
112
|
-
if (sql === '2') return q1;
|
|
113
|
-
|
|
114
|
-
// auto Index
|
|
115
|
-
autoIndex({ table, columns: [xName] });
|
|
116
|
-
|
|
117
|
-
const res = await pg.query(q1).then((res) => res.rows?.[0] || {});
|
|
118
|
-
|
|
119
|
-
await mkdir(path.dirname(file), { recursive: true });
|
|
120
|
-
await writeFile(file, JSON.stringify(res));
|
|
121
|
-
|
|
122
|
-
return res;
|
|
123
|
-
} catch (err) {
|
|
124
|
-
logger.file('bi/geojson', { level: 'ERROR', error: err.toString(), query });
|
|
125
|
-
return { error: err.toString(), status: 500 };
|
|
126
|
-
}
|
|
127
|
-
}
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { createHash } from 'crypto';
|
|
3
|
+
import { writeFile, mkdir, readFile, stat } from 'fs/promises';
|
|
4
|
+
import { existsSync, /* readdirSync, */ readFileSync } from 'fs';
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
import { getFolder, getFilterSQL, autoIndex, logger, pgClients } from '@opengis/fastify-table/utils.js';
|
|
8
|
+
|
|
9
|
+
import normalizeData from '../../data/controllers/util/normalizeData.js';
|
|
10
|
+
|
|
11
|
+
import { getWidget } from '../../../../utils.js';
|
|
12
|
+
|
|
13
|
+
const types = {
|
|
14
|
+
point: 'ST_Point' /* ,ST_MultiPoint */,
|
|
15
|
+
polygon: 'ST_Polygon,ST_MultiPolygon',
|
|
16
|
+
};
|
|
17
|
+
const hourMs = 3.6e6;
|
|
18
|
+
|
|
19
|
+
export default async function geojson(req, reply) {
|
|
20
|
+
const { query = {} } = req;
|
|
21
|
+
|
|
22
|
+
const {
|
|
23
|
+
filter,
|
|
24
|
+
widget,
|
|
25
|
+
sql,
|
|
26
|
+
type,
|
|
27
|
+
nocache,
|
|
28
|
+
id,
|
|
29
|
+
dashboard,
|
|
30
|
+
geom = 'geom',
|
|
31
|
+
pointZoom = 0,
|
|
32
|
+
} = query;
|
|
33
|
+
|
|
34
|
+
if (!widget && !dashboard) {
|
|
35
|
+
return { message: 'not enough params: widget', status: 400 };
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const data = await getWidget({ pg: req.pg, dashboard, widget });
|
|
39
|
+
if (data.status) return data;
|
|
40
|
+
|
|
41
|
+
const pg = data.pg || req.pg || pgClients.client;
|
|
42
|
+
const hash = [pointZoom, filter].filter((el) => el).join();
|
|
43
|
+
|
|
44
|
+
const root = getFolder(req);
|
|
45
|
+
const file = path.join(
|
|
46
|
+
root,
|
|
47
|
+
`/map/geojson/${widget}/${hash ? `${createHash('sha1').update(hash).digest('base64')}/` : ''}.geojson`
|
|
48
|
+
);
|
|
49
|
+
|
|
50
|
+
if (existsSync(file)) {
|
|
51
|
+
const timeNow = Date.now();
|
|
52
|
+
const stats = await stat(file);
|
|
53
|
+
const birthTime = new Date(stats.birthtime).getTime();
|
|
54
|
+
if (!(birthTime - timeNow > hourMs * 24) && !nocache) {
|
|
55
|
+
const res = JSON.parse((await readFile(file, 'utf-8')) || {});
|
|
56
|
+
return res;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
const pkey = pg.pk?.[data?.table];
|
|
62
|
+
if (!pkey) {
|
|
63
|
+
return {
|
|
64
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: table pk not found (${data?.table})`,
|
|
65
|
+
status: 400,
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// data param
|
|
70
|
+
const { table, where = '1=1', xName, x } = normalizeData(data, query);
|
|
71
|
+
|
|
72
|
+
if (!xName && !x) {
|
|
73
|
+
return {
|
|
74
|
+
message: `invalid ${widget ? 'widget' : 'dashboard'}: x axis column not specified`,
|
|
75
|
+
status: 400,
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// get sql
|
|
80
|
+
const filterQ = filter
|
|
81
|
+
? await getFilterSQL({ pg, table, filter, query })
|
|
82
|
+
: undefined;
|
|
83
|
+
const q = `select "${pkey}", "${xName || x}", /* st_asgeojson(geom)::json as */ ${geom} as geom from ${filterQ ? `(${filterQ})` : table} q where ${where}`;
|
|
84
|
+
|
|
85
|
+
if (sql === '1') return q;
|
|
86
|
+
|
|
87
|
+
const { st_geometrytype: geomType = 'point' } = await pg
|
|
88
|
+
.query(
|
|
89
|
+
`select st_geometrytype(${geom}), count(*) from ${table}
|
|
90
|
+
where ${where} group by st_geometrytype(${geom})`
|
|
91
|
+
)
|
|
92
|
+
.then((res) => res.rows?.[0] || {});
|
|
93
|
+
|
|
94
|
+
const q1 = `SELECT 'FeatureCollection' As type, json_agg(f) As features FROM (
|
|
95
|
+
SELECT 'Feature' As type, row_number() over() as id,
|
|
96
|
+
st_asgeojson(st_force2d(${query.srid
|
|
97
|
+
? `st_transform(${type === 'centroid' ? `st_centroid(${geom})` : geom},${query.srid})`
|
|
98
|
+
: `${type === 'centroid' || query.point || query.centroid ? `st_centroid(${geom})` : geom}`
|
|
99
|
+
}), 6, 0)::json as geometry,
|
|
100
|
+
(select row_to_json(tc) from (select ${'' ? `${''} as status, ` : ''}
|
|
101
|
+
${xName ? `${xName},` : ''}
|
|
102
|
+
${data.style?.colorAttr ? `${data.style.colorAttr},` : ''}
|
|
103
|
+
${pkey} as id,(select file_path from crm.files
|
|
104
|
+
where entity_id=q.${pkey}::text and file_status <>'3' and ext in ('png','jpg') limit 1) as image
|
|
105
|
+
)tc) as properties
|
|
106
|
+
from (${q})q where ${id && pkey ? ` ${pkey} = '${id}' and ` : ''} ${geom} is not null
|
|
107
|
+
${data.query ? ` and ${data.query}` : ''}
|
|
108
|
+
${query.extent ? `and ${geom} && 'BOX(${query.extent.split(',').reduce((p, el, i) => p + el + (i % 2 ? ',' : ' '), '')})'::box2d` : ''}
|
|
109
|
+
${types[type] ? ` and ST_GeometryType(${geom}) = any ('{ ${types[type]} }') ` : ''}
|
|
110
|
+
limit ${geomType?.toLowerCase()?.includes('point') ? '15000' : '2500'})f`;
|
|
111
|
+
|
|
112
|
+
if (sql === '2') return q1;
|
|
113
|
+
|
|
114
|
+
// auto Index
|
|
115
|
+
autoIndex({ table, columns: [xName] });
|
|
116
|
+
|
|
117
|
+
const res = await pg.query(q1).then((res) => res.rows?.[0] || {});
|
|
118
|
+
|
|
119
|
+
await mkdir(path.dirname(file), { recursive: true });
|
|
120
|
+
await writeFile(file, JSON.stringify(res));
|
|
121
|
+
|
|
122
|
+
return res;
|
|
123
|
+
} catch (err) {
|
|
124
|
+
logger.file('bi/geojson', { level: 'ERROR', error: err.toString(), query });
|
|
125
|
+
return { error: err.toString(), status: 500 };
|
|
126
|
+
}
|
|
127
|
+
}
|