@opengis/bi 1.2.30 → 1.2.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/README.md +92 -92
  2. package/dist/bi.js +1 -1
  3. package/dist/bi.umd.cjs +88 -86
  4. package/dist/{import-file-B4o9p2_2.js → import-file-D-ISqB7l.js} +1772 -1593
  5. package/dist/style.css +1 -1
  6. package/dist/{vs-funnel-bar-BOWwPnjW.js → vs-funnel-bar-aoZzvriV.js} +3 -3
  7. package/dist/{vs-list-D8jGusRT.js → vs-list-CBkyJSBj.js} +53 -32
  8. package/dist/{vs-map-BGplOwpB.js → vs-map-C3C11qmT.js} +102 -91
  9. package/dist/{vs-map-cluster-CHQJV2As.js → vs-map-cluster-BsPmHIMx.js} +91 -81
  10. package/dist/{vs-number-jPqxFQ6d.js → vs-number-d58ftpH5.js} +3 -3
  11. package/dist/{vs-table-BDgIvJbY.js → vs-table-BHa5Velm.js} +6 -6
  12. package/dist/{vs-text-DcHOffy9.js → vs-text-Bq87gMTx.js} +4 -4
  13. package/package.json +77 -75
  14. package/plugin.js +21 -22
  15. package/server/helpers/mdToHTML.js +17 -17
  16. package/server/migrations/bi.dataset.sql +46 -46
  17. package/server/migrations/bi.sql +115 -112
  18. package/server/plugins/docs.js +48 -48
  19. package/server/plugins/hook.js +89 -89
  20. package/server/plugins/vite.js +81 -81
  21. package/server/routes/dashboard/controllers/dashboard.import.js +103 -103
  22. package/server/routes/dashboard/controllers/dashboard.js +158 -157
  23. package/server/routes/dashboard/controllers/dashboard.list.js +60 -60
  24. package/server/routes/dashboard/controllers/utils/yaml.js +11 -11
  25. package/server/routes/dashboard/index.mjs +26 -26
  26. package/server/routes/data/controllers/data.js +230 -229
  27. package/server/routes/data/controllers/util/chartSQL.js +49 -49
  28. package/server/routes/data/controllers/util/normalizeData.js +65 -65
  29. package/server/routes/data/index.mjs +38 -38
  30. package/server/routes/dataset/controllers/bi.dataset.list.js +29 -29
  31. package/server/routes/dataset/controllers/bi.db.list.js +19 -19
  32. package/server/routes/dataset/controllers/comment.js +55 -55
  33. package/server/routes/dataset/controllers/createDatasetPost.js +134 -134
  34. package/server/routes/dataset/controllers/data.js +149 -149
  35. package/server/routes/dataset/controllers/dbTablePreview.js +58 -58
  36. package/server/routes/dataset/controllers/dbTables.js +34 -34
  37. package/server/routes/dataset/controllers/delete.js +40 -40
  38. package/server/routes/dataset/controllers/deleteDataset.js +52 -52
  39. package/server/routes/dataset/controllers/editDataset.js +90 -90
  40. package/server/routes/dataset/controllers/export.js +214 -214
  41. package/server/routes/dataset/controllers/form.js +99 -99
  42. package/server/routes/dataset/controllers/format.js +46 -46
  43. package/server/routes/dataset/controllers/insert.js +47 -47
  44. package/server/routes/dataset/controllers/table.js +68 -68
  45. package/server/routes/dataset/controllers/update.js +43 -43
  46. package/server/routes/dataset/index.mjs +132 -132
  47. package/server/routes/dataset/utils/convertJSONToCSV.js +17 -17
  48. package/server/routes/dataset/utils/convertJSONToXls.js +47 -47
  49. package/server/routes/dataset/utils/createTableQuery.js +59 -59
  50. package/server/routes/dataset/utils/datasetForms.js +1 -1
  51. package/server/routes/dataset/utils/descriptionList.js +45 -45
  52. package/server/routes/dataset/utils/downloadRemoteFile.js +58 -58
  53. package/server/routes/dataset/utils/executeQuery.js +46 -46
  54. package/server/routes/dataset/utils/getLayersData.js +106 -106
  55. package/server/routes/dataset/utils/getTableData.js +46 -46
  56. package/server/routes/dataset/utils/insertDataQuery.js +12 -12
  57. package/server/routes/dataset/utils/metaFormat.js +24 -24
  58. package/server/routes/edit/controllers/dashboard.add.js +36 -36
  59. package/server/routes/edit/controllers/dashboard.delete.js +39 -39
  60. package/server/routes/edit/controllers/dashboard.edit.js +61 -61
  61. package/server/routes/edit/controllers/widget.add.js +78 -78
  62. package/server/routes/edit/controllers/widget.del.js +58 -58
  63. package/server/routes/edit/controllers/widget.edit.js +115 -115
  64. package/server/routes/edit/index.mjs +33 -33
  65. package/server/routes/map/controllers/cluster.js +125 -125
  66. package/server/routes/map/controllers/clusterVtile.js +166 -166
  67. package/server/routes/map/controllers/geojson.js +127 -127
  68. package/server/routes/map/controllers/heatmap.js +118 -118
  69. package/server/routes/map/controllers/map.js +69 -69
  70. package/server/routes/map/controllers/utils/downloadClusterData.js +44 -44
  71. package/server/routes/map/controllers/vtile.js +183 -183
  72. package/server/routes/map/index.mjs +32 -32
  73. package/server/templates/page/login.html +58 -58
  74. package/server/utils/getWidget.js +118 -117
  75. package/utils.js +12 -12
@@ -1,214 +1,214 @@
1
- /* eslint-disable no-await-in-loop */
2
- import path from 'node:path';
3
- import { createHash } from 'node:crypto';
4
- import { existsSync, rmSync } from 'node:fs';
5
-
6
- import { readFile, writeFile, mkdir } from 'node:fs/promises';
7
-
8
- import {
9
- config, pgClients, getMeta, getFolder, getFilterSQL, eventStream, logger,
10
- } from '@opengis/fastify-table/utils.js';
11
-
12
- // import convertJSONToXls from '../utils/convertJSONToXls.js';
13
- import convertJSONToCSV from '../utils/convertJSONToCSV.js';
14
- import metaFormat from '../utils/metaFormat.js';
15
-
16
- export default async function datasetDataExport(req, reply) {
17
- const {
18
- pg = pgClients.client, params = {}, query = {}, user = {}, unittest,
19
- } = req;
20
-
21
- if (!user?.uid) {
22
- return { message: 'access restricted', status: 403 };
23
- }
24
-
25
- const { user_type: userType = 'regular' } = user;
26
-
27
- if (!params?.id) {
28
- return { message: 'not enough params: id', status: 404 };
29
- }
30
-
31
- const { format = 'json' } = query;
32
-
33
- const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns,
34
- export_columns as "exportColumns", sql_list as sql from bi.dataset where dataset_id=$1`, [params.id])
35
- .then(el => el.rows[0] || {});
36
-
37
- if (!dataset?.id || !dataset.table) {
38
- return { message: 'dataset not found', status: 404 };
39
- }
40
-
41
- const { pk = dataset.pk, view, columns: dbColumns = [] } = await getMeta(dataset.table);
42
-
43
- if (!dataset.table || (!pg.pk?.[dataset.table] && !pk && !view)) {
44
- return { message: `table not found: ${dataset.table}`, status: 404 };
45
- }
46
-
47
- const rootDir = getFolder(req, 'local');
48
-
49
- const date = new Date();
50
- const sufixName = `${query.filter || '1=1'}-${query.col || 'all'}-${query.search || '1=1'}-${query.limit || 'unlimited'}`;
51
- const sufixDate = [date.getFullYear(), date.getMonth(), date.getDate(), date.getHours()].join('-');
52
-
53
- const objInfo = createHash('md5').update([sufixName, sufixDate].join('-')).digest('hex');
54
- const fileName = dataset.table.concat('_').concat(objInfo).concat('.').concat(format);
55
- const filepath = path.join(rootDir, '/files/temp', fileName);
56
-
57
- if (existsSync(filepath) && query.nocache) {
58
- rmSync(filepath);
59
- }
60
-
61
- if (existsSync(filepath) && !query?.sql) {
62
- const encoding = path.extname(filepath) === '.xlsx' ? 'base64' : 'utf8';
63
- const data = await readFile(filepath, encoding);
64
- return reply.headers({ 'Content-Disposition': `attachment; filename=${path.basename(filepath)}` }).send(Buffer.from(data, encoding));
65
- }
66
-
67
- const fData = await getFilterSQL({
68
- filter: query.filter,
69
- search: query?.search,
70
- table: dataset.table,
71
- json: 1,
72
- });
73
-
74
- const where = [(params.object_id ? ` "${pk}" = $1` : null), dataset.query, fData.q].filter((el) => el);
75
-
76
- const cols = dataset.exportColumns
77
- || dataset.columns?.map((el) => el.name);
78
-
79
- if (!cols?.length) {
80
- return { message: `empty export columns: ${dataset.id}`, status: 400 };
81
- }
82
-
83
- const sqlTable = dataset.sql
84
- ?.filter?.((el) => !el?.disabled && el?.sql?.replace)
85
- ?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', user?.uid)}) ${el.name || `t${i}`} on 1=1 `)
86
- ?.join('') || '';
87
-
88
- const { total = '0' } = await pg.query(`select count(*) as total from ${dataset.table}
89
- where ${dataset.query || 'true'} and ${params.object_id ? `${pk}='${params.object_id}'` : '1=1'}`)
90
- .then(el => el.rows?.[0] || {});
91
-
92
- if (total === '0') {
93
- return { message: 'Немає даних, які можна експортувати', status: 200 };
94
- }
95
-
96
- const ext = path.extname(filepath);
97
- const filepathJSON = ['csv', 'xlsx'].includes(format) ? filepath.replace(ext, '.json') : filepath;
98
-
99
- const cacheFileJSON = existsSync(filepathJSON);
100
-
101
- const send = +total < 10000 || unittest
102
- ? console.log
103
- : eventStream(reply);
104
-
105
- if (cacheFileJSON && !query.sql) {
106
- if (query.nocache) rmSync(filepathJSON); // delete old file, prevent append
107
- if (!query.nocache && ['xlsx', 'csv'].includes(format)) {
108
- send(`Знайдено файл формату json. Пропуск обробки даних. Початок конвертації в ${format}...`);
109
- if (format === 'xlsx') {
110
- return 'temporary not work';
111
- return convertJSONToXls({
112
- filepathJSON,
113
- colmodel: dataset.columns,
114
- domain: req.hostname,
115
- source: dataset.table,
116
- send,
117
- });
118
- }
119
- if (format === 'csv') {
120
- return convertJSONToCSV({ send, filepathJSON });
121
- }
122
- }
123
- }
124
-
125
- const limit = Math.min(query.limit, 10000) || 10000;
126
-
127
- const optimizedSQL = `select ${pk ? `"${pk}" as id,` : ''} *
128
- ${['geojson', 'shp'].includes(format) && dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
129
- from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')}) t
130
- ${sqlTable} where ${where.join(' and ') || 'true'} offset 0 limit ${limit}`.replace(/{{uid}}/g, user?.uid);
131
-
132
- if (query.sql === '1' && (config.debug || userType.includes('admin'))) return optimizedSQL;
133
-
134
- const res = {};
135
- let offset = 0;
136
- let seq = 0;
137
-
138
- send(`Всього в реєстрі: ${total}`);
139
-
140
- while ((+total - offset > 0) && !res?.error) {
141
- try {
142
- const q = optimizedSQL.replace(/offset 0/g, `offset ${offset}`);
143
- const { rows = [] } = await pg.query(q, (params.object_id ? [params.object_id] : null));
144
-
145
- send(`Оброблено: ${offset}/${total}`);
146
- send(`seq: ${++seq}`);
147
- send(`Обробка ${rows.length} об'єктів...`);
148
-
149
- if (!rows.length) {
150
- send('Обробка даних успішно завершена');
151
- break;
152
- }
153
-
154
- await metaFormat({ rows, columns: dataset.columns });
155
-
156
- // skip non present after metaFormat
157
- const allowColumnList = []; // 'id'
158
- if (['geojson', 'shp'].includes(format)) {
159
- allowColumnList.push('geom');
160
- }
161
- rows.forEach((row) => {
162
- Object.keys(row)
163
- .filter((el) => !allowColumnList.includes(el.name) && !cols.includes(el))
164
- .forEach((key) => delete row[key]);
165
- });
166
-
167
- // convert csv / xlsx from json
168
- if (!existsSync(filepathJSON)) { // if json not exists
169
- await mkdir(path.dirname(filepath), { recursive: true });
170
- await writeFile(filepathJSON, JSON.stringify(rows));
171
- }
172
- else { // if json exists
173
- const jsonData = JSON.parse(await readFile(filepathJSON) || '{}');
174
- const moreData = jsonData.concat(rows); // rewrite to appendFile?
175
- await writeFile(filepathJSON, JSON.stringify(moreData));
176
- }
177
-
178
- offset += rows.length;
179
- }
180
- catch (err) {
181
- Object.assign(res, { error: err.toString(), status: 500 });
182
- send(`error: ${err.toString()}`, 1);
183
- logger.error({
184
- name: 'export/table',
185
- filepathJSON,
186
- total,
187
- offset,
188
- result: res,
189
- error: err.toString(),
190
- });
191
- return err.toString();
192
- }
193
- }
194
-
195
- if (!res?.error && format === 'csv') {
196
- send('Сформовано файл формату json. Початок конвертації в csv...');
197
- return convertJSONToCSV({ send, filepathJSON });
198
- }
199
-
200
- if (!res?.error && format === 'xlsx') {
201
- send('Сформовано файл формату json. Початок конвертації в xlsx...');
202
- return convertJSONToXls({
203
- filepathJSON,
204
- colmodel: dataset.columns,
205
- domain: req.hostname,
206
- source: dataset.table,
207
- send,
208
- });
209
- }
210
-
211
- const message = res.error || 'Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних';
212
- send(message, 1);
213
- return { message, status: 200 };
214
- }
1
+ /* eslint-disable no-await-in-loop */
2
+ import path from 'node:path';
3
+ import { createHash } from 'node:crypto';
4
+ import { existsSync, rmSync } from 'node:fs';
5
+
6
+ import { readFile, writeFile, mkdir } from 'node:fs/promises';
7
+
8
+ import {
9
+ config, pgClients, getMeta, getFolder, getFilterSQL, eventStream, logger,
10
+ } from '@opengis/fastify-table/utils.js';
11
+
12
+ // import convertJSONToXls from '../utils/convertJSONToXls.js';
13
+ import convertJSONToCSV from '../utils/convertJSONToCSV.js';
14
+ import metaFormat from '../utils/metaFormat.js';
15
+
16
+ export default async function datasetDataExport(req, reply) {
17
+ const {
18
+ pg = pgClients.client, params = {}, query = {}, user = {}, unittest,
19
+ } = req;
20
+
21
+ if (!user?.uid) {
22
+ return { message: 'access restricted', status: 403 };
23
+ }
24
+
25
+ const { user_type: userType = 'regular' } = user;
26
+
27
+ if (!params?.id) {
28
+ return { message: 'not enough params: id', status: 404 };
29
+ }
30
+
31
+ const { format = 'json' } = query;
32
+
33
+ const dataset = await pg.query(`select dataset_id as id, table_name as table, column_list as columns,
34
+ export_columns as "exportColumns", sql_list as sql from bi.dataset where dataset_id=$1`, [params.id])
35
+ .then(el => el.rows[0] || {});
36
+
37
+ if (!dataset?.id || !dataset.table) {
38
+ return { message: 'dataset not found', status: 404 };
39
+ }
40
+
41
+ const { pk = dataset.pk, view, columns: dbColumns = [] } = await getMeta(dataset.table);
42
+
43
+ if (!dataset.table || (!pg.pk?.[dataset.table] && !pk && !view)) {
44
+ return { message: `table not found: ${dataset.table}`, status: 404 };
45
+ }
46
+
47
+ const rootDir = getFolder(req, 'local');
48
+
49
+ const date = new Date();
50
+ const sufixName = `${query.filter || '1=1'}-${query.col || 'all'}-${query.search || '1=1'}-${query.limit || 'unlimited'}`;
51
+ const sufixDate = [date.getFullYear(), date.getMonth(), date.getDate(), date.getHours()].join('-');
52
+
53
+ const objInfo = createHash('md5').update([sufixName, sufixDate].join('-')).digest('hex');
54
+ const fileName = dataset.table.concat('_').concat(objInfo).concat('.').concat(format);
55
+ const filepath = path.join(rootDir, '/files/temp', fileName);
56
+
57
+ if (existsSync(filepath) && query.nocache) {
58
+ rmSync(filepath);
59
+ }
60
+
61
+ if (existsSync(filepath) && !query?.sql) {
62
+ const encoding = path.extname(filepath) === '.xlsx' ? 'base64' : 'utf8';
63
+ const data = await readFile(filepath, encoding);
64
+ return reply.headers({ 'Content-Disposition': `attachment; filename=${path.basename(filepath)}` }).send(Buffer.from(data, encoding));
65
+ }
66
+
67
+ const fData = await getFilterSQL({
68
+ filter: query.filter,
69
+ search: query?.search,
70
+ table: dataset.table,
71
+ json: 1,
72
+ });
73
+
74
+ const where = [(params.object_id ? ` "${pk}" = $1` : null), dataset.query, fData.q].filter((el) => el);
75
+
76
+ const cols = dataset.exportColumns
77
+ || dataset.columns?.map((el) => el.name);
78
+
79
+ if (!cols?.length) {
80
+ return { message: `empty export columns: ${dataset.id}`, status: 400 };
81
+ }
82
+
83
+ const sqlTable = dataset.sql
84
+ ?.filter?.((el) => !el?.disabled && el?.sql?.replace)
85
+ ?.map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', user?.uid)}) ${el.name || `t${i}`} on 1=1 `)
86
+ ?.join('') || '';
87
+
88
+ const { total = '0' } = await pg.query(`select count(*) as total from ${dataset.table}
89
+ where ${dataset.query || 'true'} and ${params.object_id ? `${pk}='${params.object_id}'` : '1=1'}`)
90
+ .then(el => el.rows?.[0] || {});
91
+
92
+ if (total === '0') {
93
+ return { message: 'Немає даних, які можна експортувати', status: 200 };
94
+ }
95
+
96
+ const ext = path.extname(filepath);
97
+ const filepathJSON = ['csv', 'xlsx'].includes(format) ? filepath.replace(ext, '.json') : filepath;
98
+
99
+ const cacheFileJSON = existsSync(filepathJSON);
100
+
101
+ const send = +total < 10000 || unittest
102
+ ? console.log
103
+ : eventStream(reply);
104
+
105
+ if (cacheFileJSON && !query.sql) {
106
+ if (query.nocache) rmSync(filepathJSON); // delete old file, prevent append
107
+ if (!query.nocache && ['xlsx', 'csv'].includes(format)) {
108
+ send(`Знайдено файл формату json. Пропуск обробки даних. Початок конвертації в ${format}...`);
109
+ if (format === 'xlsx') {
110
+ return 'temporary not work';
111
+ return convertJSONToXls({
112
+ filepathJSON,
113
+ colmodel: dataset.columns,
114
+ domain: req.hostname,
115
+ source: dataset.table,
116
+ send,
117
+ });
118
+ }
119
+ if (format === 'csv') {
120
+ return convertJSONToCSV({ send, filepathJSON });
121
+ }
122
+ }
123
+ }
124
+
125
+ const limit = Math.min(query.limit, 10000) || 10000;
126
+
127
+ const optimizedSQL = `select ${pk ? `"${pk}" as id,` : ''} *
128
+ ${['geojson', 'shp'].includes(format) && dbColumns.find((el) => el.name === 'geom' && pg.pgType[el.dataTypeID] === 'geometry') ? ',st_asgeojson(geom)::json as geom' : ''}
129
+ from (select * from ${dataset.table} where ${sqlTable ? 'true' : (where.join(' and ') || 'true')}) t
130
+ ${sqlTable} where ${where.join(' and ') || 'true'} offset 0 limit ${limit}`.replace(/{{uid}}/g, user?.uid);
131
+
132
+ if (query.sql === '1' && (config.debug || userType.includes('admin'))) return optimizedSQL;
133
+
134
+ const res = {};
135
+ let offset = 0;
136
+ let seq = 0;
137
+
138
+ send(`Всього в реєстрі: ${total}`);
139
+
140
+ while ((+total - offset > 0) && !res?.error) {
141
+ try {
142
+ const q = optimizedSQL.replace(/offset 0/g, `offset ${offset}`);
143
+ const { rows = [] } = await pg.query(q, (params.object_id ? [params.object_id] : null));
144
+
145
+ send(`Оброблено: ${offset}/${total}`);
146
+ send(`seq: ${++seq}`);
147
+ send(`Обробка ${rows.length} об'єктів...`);
148
+
149
+ if (!rows.length) {
150
+ send('Обробка даних успішно завершена');
151
+ break;
152
+ }
153
+
154
+ await metaFormat({ rows, columns: dataset.columns });
155
+
156
+ // skip non present after metaFormat
157
+ const allowColumnList = []; // 'id'
158
+ if (['geojson', 'shp'].includes(format)) {
159
+ allowColumnList.push('geom');
160
+ }
161
+ rows.forEach((row) => {
162
+ Object.keys(row)
163
+ .filter((el) => !allowColumnList.includes(el.name) && !cols.includes(el))
164
+ .forEach((key) => delete row[key]);
165
+ });
166
+
167
+ // convert csv / xlsx from json
168
+ if (!existsSync(filepathJSON)) { // if json not exists
169
+ await mkdir(path.dirname(filepath), { recursive: true });
170
+ await writeFile(filepathJSON, JSON.stringify(rows));
171
+ }
172
+ else { // if json exists
173
+ const jsonData = JSON.parse(await readFile(filepathJSON) || '{}');
174
+ const moreData = jsonData.concat(rows); // rewrite to appendFile?
175
+ await writeFile(filepathJSON, JSON.stringify(moreData));
176
+ }
177
+
178
+ offset += rows.length;
179
+ }
180
+ catch (err) {
181
+ Object.assign(res, { error: err.toString(), status: 500 });
182
+ send(`error: ${err.toString()}`, 1);
183
+ logger.error({
184
+ name: 'export/table',
185
+ filepathJSON,
186
+ total,
187
+ offset,
188
+ result: res,
189
+ error: err.toString(),
190
+ });
191
+ return err.toString();
192
+ }
193
+ }
194
+
195
+ if (!res?.error && format === 'csv') {
196
+ send('Сформовано файл формату json. Початок конвертації в csv...');
197
+ return convertJSONToCSV({ send, filepathJSON });
198
+ }
199
+
200
+ if (!res?.error && format === 'xlsx') {
201
+ send('Сформовано файл формату json. Початок конвертації в xlsx...');
202
+ return convertJSONToXls({
203
+ filepathJSON,
204
+ colmodel: dataset.columns,
205
+ domain: req.hostname,
206
+ source: dataset.table,
207
+ send,
208
+ });
209
+ }
210
+
211
+ const message = res.error || 'Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних';
212
+ send(message, 1);
213
+ return { message, status: 200 };
214
+ }
@@ -1,99 +1,99 @@
1
- import {
2
- config, pgClients, getMeta, getToken,
3
- } from '@opengis/fastify-table/utils.js';
4
-
5
- import datasetForms from '../utils/datasetForms.js';
6
-
7
- const inputType = {
8
- text: 'Text',
9
- autocomplete: 'Autocomplete',
10
- select: 'Autocomplete',
11
- date: 'DatePicker',
12
- 'yes/no': 'Switcher',
13
- badge: 'Select',
14
- number: 'Number',
15
- tags: 'Tags',
16
- file: 'File',
17
- };
18
-
19
- const systemColumns = [
20
- 'uid',
21
- 'cdate',
22
- 'editor_id',
23
- 'editor_date',
24
- 'files',
25
- ];
26
-
27
- export default async function datasetForm({
28
- pg = pgClients.client, params = {}, query = {}, user = {},
29
- }) {
30
- if (!user?.uid) {
31
- return { message: 'access restricted', status: 403 };
32
- }
33
-
34
- if (!params?.id) {
35
- return { message: 'not enough params: id', status: 404 };
36
- }
37
-
38
- const tokenData = await getToken({
39
- uid: user?.uid,
40
- token: params.id,
41
- mode: 'a',
42
- json: 1,
43
- });
44
-
45
- if (!tokenData && !config.local && !config.debug) {
46
- return { message: 'token not allow', status: 403 };
47
- }
48
-
49
- const {
50
- id, columns, formSetting, table, style,
51
- } = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, style,
52
- form_setting as "formSetting" from bi.dataset where dataset_id=$1`, [tokenData?.dataset || params.id])
53
- .then(el => el.rows?.[0] || {});
54
-
55
- if (!id) {
56
- return { message: 'dataset not found', status: 404 };
57
- }
58
-
59
- if (datasetForms[id] && !query.nocache) {
60
- return datasetForms[id];
61
- }
62
-
63
- const { columns: dbColumns = [] } = await getMeta(table) || {};
64
- const isFilesColumn = dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text');
65
-
66
- const formSchema = formSetting
67
- || ((columns || dbColumns).filter((col) => !systemColumns.includes(col.name))).reduce((acc, curr) => Object.assign(acc, {
68
- [curr.name]: {
69
- type: inputType[curr?.format || curr?.type || 'text'] || 'Text',
70
- ua: curr?.description || curr?.title || curr?.name,
71
- validators: curr?.is_required ? ['required'] : null,
72
- options: curr?.values?.length ? curr?.values : null,
73
- data: curr?.data,
74
- min: curr?.type || curr?.format === 'number' ? 0 : null,
75
- },
76
- }), {});
77
-
78
- Object.assign(formSchema, {
79
- geom: {
80
- type: 'Geom',
81
- ua: 'Геометрія',
82
- height: '500',
83
- geom_edit_controol: [style?.type ? [style.type] : null],
84
- },
85
- });
86
-
87
- if (isFilesColumn) {
88
- Object.assign(formSchema, {
89
- files: {
90
- type: 'FileList',
91
- ua: 'Файли',
92
- },
93
- });
94
- }
95
-
96
- datasetForms[id] = formSchema;
97
-
98
- return formSchema;
99
- }
1
+ import {
2
+ config, pgClients, getMeta, getToken,
3
+ } from '@opengis/fastify-table/utils.js';
4
+
5
+ import datasetForms from '../utils/datasetForms.js';
6
+
7
+ const inputType = {
8
+ text: 'Text',
9
+ autocomplete: 'Autocomplete',
10
+ select: 'Autocomplete',
11
+ date: 'DatePicker',
12
+ 'yes/no': 'Switcher',
13
+ badge: 'Select',
14
+ number: 'Number',
15
+ tags: 'Tags',
16
+ file: 'File',
17
+ };
18
+
19
+ const systemColumns = [
20
+ 'uid',
21
+ 'cdate',
22
+ 'editor_id',
23
+ 'editor_date',
24
+ 'files',
25
+ ];
26
+
27
+ export default async function datasetForm({
28
+ pg = pgClients.client, params = {}, query = {}, user = {},
29
+ }) {
30
+ if (!user?.uid) {
31
+ return { message: 'access restricted', status: 403 };
32
+ }
33
+
34
+ if (!params?.id) {
35
+ return { message: 'not enough params: id', status: 404 };
36
+ }
37
+
38
+ const tokenData = await getToken({
39
+ uid: user?.uid,
40
+ token: params.id,
41
+ mode: 'a',
42
+ json: 1,
43
+ });
44
+
45
+ if (!tokenData && !config.local && !config.debug) {
46
+ return { message: 'token not allow', status: 403 };
47
+ }
48
+
49
+ const {
50
+ id, columns, formSetting, table, style,
51
+ } = await pg.query(`select dataset_id as id, table_name as table, column_list as columns, style,
52
+ form_setting as "formSetting" from bi.dataset where dataset_id=$1`, [tokenData?.dataset || params.id])
53
+ .then(el => el.rows?.[0] || {});
54
+
55
+ if (!id) {
56
+ return { message: 'dataset not found', status: 404 };
57
+ }
58
+
59
+ if (datasetForms[id] && !query.nocache) {
60
+ return datasetForms[id];
61
+ }
62
+
63
+ const { columns: dbColumns = [] } = await getMeta(table) || {};
64
+ const isFilesColumn = dbColumns.find((el) => el.name === 'files' && pg.pgType[el.dataTypeID] === 'text');
65
+
66
+ const formSchema = formSetting
67
+ || ((columns || dbColumns).filter((col) => !systemColumns.includes(col.name))).reduce((acc, curr) => Object.assign(acc, {
68
+ [curr.name]: {
69
+ type: inputType[curr?.format || curr?.type || 'text'] || 'Text',
70
+ ua: curr?.description || curr?.title || curr?.name,
71
+ validators: curr?.is_required ? ['required'] : null,
72
+ options: curr?.values?.length ? curr?.values : null,
73
+ data: curr?.data,
74
+ min: curr?.type || curr?.format === 'number' ? 0 : null,
75
+ },
76
+ }), {});
77
+
78
+ Object.assign(formSchema, {
79
+ geom: {
80
+ type: 'Geom',
81
+ ua: 'Геометрія',
82
+ height: '500',
83
+ geom_edit_controol: [style?.type ? [style.type] : null],
84
+ },
85
+ });
86
+
87
+ if (isFilesColumn) {
88
+ Object.assign(formSchema, {
89
+ files: {
90
+ type: 'FileList',
91
+ ua: 'Файли',
92
+ },
93
+ });
94
+ }
95
+
96
+ datasetForms[id] = formSchema;
97
+
98
+ return formSchema;
99
+ }