@opengis/fastify-table 1.2.59 → 1.2.61

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@opengis/fastify-table",
3
- "version": "1.2.59",
3
+ "version": "1.2.61",
4
4
  "type": "module",
5
5
  "description": "core-plugins",
6
6
  "keywords": [
@@ -24,6 +24,7 @@ function getPG(param) {
24
24
  host: host || config.pg?.host,
25
25
  port: port || config.pg?.port,
26
26
  database: db || database || config.pg?.db || config.pg?.database,
27
+ statement_timeout: config.pg?.statement_timeout || 10000,
27
28
  };
28
29
 
29
30
  pgClients[name] = new pg.Pool(dbConfig);
@@ -25,6 +25,7 @@ async function getPGAsync(param) {
25
25
  host: host || config.pg?.host,
26
26
  port: port || config.pg?.port,
27
27
  database: db || database || config.pg?.db || config.pg?.database,
28
+ statement_timeout: config.pg?.statement_timeout || 10000,
28
29
  };
29
30
 
30
31
  pgClients[name] = new pg.Pool(dbConfig);
@@ -66,7 +66,7 @@ export default function checkPolicy(req, reply) {
66
66
  return null;
67
67
  }
68
68
 
69
- const validToken = (req.ip === '193.239.152.181' || req.ip === '127.0.0.1' || req.ip.startsWith('192.168.') || config.debug)
69
+ const validToken = (req.ip === '193.239.152.181' || req.ip === '127.0.0.1' || req.ip?.startsWith?.('192.168.') || config.debug)
70
70
  && req.headers?.uid
71
71
  && req.headers?.token
72
72
  && config.auth?.tokens?.includes?.(headers.token);
@@ -1,4 +1,4 @@
1
- import { logger, autoIndex, getSelect, getFilterSQL, getTemplate, getSelectVal, pgClients } from '../../../../utils.js';
1
+ import { config, logger, autoIndex, getSelect, getFilterSQL, getTemplate, getSelectVal, pgClients } from '../../../../utils.js';
2
2
 
3
3
  export default async function filterAPI(req) {
4
4
  const time = Date.now();
@@ -32,14 +32,14 @@ export default async function filterAPI(req) {
32
32
  const filters = (loadTable?.filter_list || loadTable?.filters || loadTable?.filterList || []).concat(loadTable?.filterSql || []);
33
33
 
34
34
  // admin.custom_column - user filter NA-165
35
- const { rows: properties = [] } = await pg.query(`select column_id, name, title, format, data from admin.custom_column where entity=$1 and uid=$2 and filter`, [params.name, user?.uid]);
35
+ const { rows: properties = [] } = await pg.query(`select column_id, name, title, format, data from admin.custom_column where entity=$1 and uid=$2 and filter`, [params.table, user?.uid]);
36
36
  properties.forEach((row) => filters.push({ id: row.name, name: row.name, ua: row.title, type: row.format, data: row.data }));
37
37
 
38
38
  // KRYVYIRIH-231
39
39
  autoIndex({ table: loadTable.table, columns: filters.filter((el) => columns?.find?.((item) => item?.name === el.name)) })
40
40
  .catch(err => {
41
41
  console.error(err.toString());
42
- logger.file('autoindex/error', { name: params?.name, error: err.toString(), stack: err.stack });
42
+ logger.file('autoindex/error', { name: params?.table, error: err.toString(), stack: err.stack });
43
43
  });
44
44
 
45
45
  filters?.forEach?.(el => Object.assign(el, { id: el.id || el.name }));
@@ -59,27 +59,43 @@ export default async function filterAPI(req) {
59
59
  Object.assign(el, { options });
60
60
  }
61
61
 
62
- const countArr = pg.pgType[dataTypeID]?.includes('[]')
63
- ? await pg.queryCache(`select unnest(${el.id})::text as id,count(*) from (${optimizedSQL})q group by unnest(${el.id})`, { table: loadTable.table, time: 5 })
64
- : await pg.queryCache(`select ${el.id}::text as id,count(*) from (${optimizedSQL})q group by ${el.id}`, { table: loadTable.table, time: 5 });
65
- const ids = countArr.rows.map(el1 => el1.id);
62
+ const q = pg.pgType[dataTypeID]?.includes('[]')
63
+ ? `select unnest(${el.id})::text as id,count(*) from (${optimizedSQL})q group by unnest(${el.id}) limit 500`
64
+ : `select ${el.id}::text as id,count(*) from (${optimizedSQL})q group by ${el.id} limit 500`;
66
65
 
67
- const clsData = await getSelectVal({ pg, values: ids, name: el.data });
66
+ try {
67
+ if (config.debug) console.log('filter options start', el.id, 'array', pg.pgType[dataTypeID]?.includes('[]'));
68
+ const countArr = await pg.queryCache(q, { table: loadTable.table, time: config.local ? 0 : undefined });
69
+ if (config.debug) console.log('filter options finish', el.id, 'array', pg.pgType[dataTypeID]?.includes('[]'));
68
70
 
69
- const options = countArr.rows.map(cel => {
70
- const data = cls?.arr?.find(c => c.id === cel.id) || { text: clsData[cel.id] };
71
- return { ...cel, ...data };
72
- });
73
- Object.assign(el, { options });
71
+ const ids = countArr.rows.map(el1 => el1.id);
72
+
73
+ const clsData = await getSelectVal({ pg, values: ids, name: el.data });
74
+
75
+ const options = countArr.rows.map(cel => {
76
+ const data = cls?.arr?.find(c => c.id === cel.id) || { text: clsData[cel.id] };
77
+ return { ...cel, ...data };
78
+ });
79
+ Object.assign(el, { options });
80
+ } catch (err) {
81
+ Object.assign(el, { timeout: true });
82
+ logger.file('timeout', { table: params?.table, filter: el.id, stack: err.stack });
83
+ console.log('filter query timeout', params.table, el.id);
84
+ }
74
85
  }));
75
86
 
76
87
  const q = ((loadTable?.filterState || []).concat(loadTable?.filterCustom || [])).filter((el) => el.name && el.sql).map((el) => `select count(*), '${el.name}' as name from (${optimizedSQL})q where ${el.sql}`).join(' union all ');
77
- const { rows = [] } = q ? await pg.query(q) : {};
78
- if (rows?.length) {
79
- ((loadTable?.filterState || []).concat(loadTable?.filterCustom || [])).filter((el) => el.name && el.sql).forEach((el) => {
80
- const { count } = rows.find((row) => row.name === el.name) || {};
81
- Object.assign(el, { count, sql: undefined });
82
- });
88
+
89
+ try {
90
+ const { rows = [] } = q ? await pg.query(q) : {};
91
+ if (rows?.length) {
92
+ ((loadTable?.filterState || []).concat(loadTable?.filterCustom || [])).filter((el) => el.name && el.sql).forEach((el) => {
93
+ const { count } = rows.find((row) => row.name === el.name) || {};
94
+ Object.assign(el, { count, sql: undefined });
95
+ });
96
+ }
97
+ } catch (err) {
98
+ logger.file('timeout', { table: params?.table, filter: 'sql', stack: err.stack });
83
99
  }
84
100
 
85
101
  const sqlList = loadTable?.sql
@@ -89,24 +105,35 @@ export default async function filterAPI(req) {
89
105
 
90
106
  // percentile_cont - alternative
91
107
  await Promise.all(filters.filter((el) => el.name && el.type === 'Range' && fields?.find?.((item) => item?.name === el.name)).map(async (el) => {
92
- const data = await pg.queryCache(`select array[
108
+ try {
109
+ const data = await pg.queryCache(`select array[
93
110
  min(${el.name}),
94
111
  percentile_disc(0.25) within group (order by ${el.name}),
95
112
  percentile_disc(0.5) within group (order by ${el.name}),
96
113
  percentile_disc(0.75) within group (order by ${el.name}),
97
114
  max(${el.name})
98
- ] as range from ${loadTable.table} ${sqlList && false ? ` t ${sqlList}` : ''} where ${loadTable.query || '1=1'}`, { table: loadTable.table }).then(el => el.rows?.[0]?.range);
99
- Object.assign(el, { data });
115
+ ] as range from ${loadTable.table} ${sqlList && false ? ` t ${sqlList}` : ''} where ${loadTable.query || '1=1'}`,
116
+ { table: loadTable.table, time: config.local ? 0 : undefined }).then(el => el.rows?.[0]?.range);
117
+ Object.assign(el, { data });
118
+ } catch (err) {
119
+ Object.assign(el, { timeout: 1 });
120
+ logger.file('timeout', { table: params?.table, filter: 'sql', stack: err.stack });
121
+ }
100
122
  }));
101
123
 
102
124
  const sqlFilters = (loadTable?.filterCustom || []).filter((el) => el.name && el.sql);
103
125
  const q1 = sqlFilters.map((el) => `select count(*), '${el.name}' as name from ${loadTable.table} where ${loadTable.query || '1=1'} and ${el.sql}`).join(' union all ');
104
- const { rows: sqlRows = [] } = q1 ? await pg.queryCache(q1, { table: loadTable.table }) : {};
105
- if (sqlRows?.length) {
106
- sqlFilters.forEach((el) => {
107
- const { count } = sqlRows.find((row) => row.name === el.name) || {};
108
- Object.assign(el, { count, sql: undefined });
109
- });
126
+
127
+ try {
128
+ const { rows: sqlRows = [] } = q1 ? await pg.queryCache(q1, { table: loadTable.table, time: config.local ? 0 : undefined }) : {};
129
+ if (sqlRows?.length) {
130
+ sqlFilters.forEach((el) => {
131
+ const { count } = sqlRows.find((row) => row.name === el.name) || {};
132
+ Object.assign(el, { count, sql: undefined });
133
+ });
134
+ }
135
+ } catch (err) {
136
+ logger.file('timeout', { table: params?.table, filter: 'custom', stack: err.stack });
110
137
  }
111
138
 
112
139
  return {