@opengis/fastify-table 1.3.63 → 1.3.65
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/server/plugins/cron/funcs/runCron.js +4 -3
- package/server/plugins/cron/funcs/verifyUnique.js +2 -1
- package/server/plugins/cron/index.js +13 -11
- package/server/plugins/crud/funcs/dataDelete.js +9 -4
- package/server/plugins/crud/funcs/dataInsert.js +12 -5
- package/server/plugins/crud/funcs/dataUpdate.js +28 -5
- package/server/plugins/crud/funcs/getOpt.js +3 -2
- package/server/plugins/crud/funcs/getToken.js +3 -2
- package/server/plugins/crud/funcs/setOpt.js +4 -4
- package/server/plugins/crud/funcs/setToken.js +1 -2
- package/server/plugins/logger/getLogger.js +1 -0
- package/server/plugins/metric/loggerSystem.js +15 -13
- package/server/plugins/metric/systemMetricsFifthly.js +4 -3
- package/server/plugins/migration/exec.sql.js +2 -2
- package/server/plugins/pg/funcs/getMeta.js +1 -0
- package/server/plugins/pg/funcs/getPG.js +2 -2
- package/server/plugins/pg/funcs/getPGAsync.js +2 -2
- package/server/plugins/pg/funcs/init.js +5 -5
- package/server/plugins/policy/funcs/checkPolicy.js +6 -6
- package/server/plugins/table/funcs/metaFormat/getSelectVal.js +6 -5
- package/server/routes/crud/controllers/table.js +2 -2
- package/server/routes/dblist/controllers/readItems.js +11 -4
- package/server/routes/dblist/controllers/setItem.js +2 -2
- package/server/routes/table/controllers/data.js +57 -39
package/package.json
CHANGED
|
@@ -9,15 +9,16 @@ export default async function runCron({
|
|
|
9
9
|
const unique = await verifyUnique(name);
|
|
10
10
|
if (!unique) return;
|
|
11
11
|
|
|
12
|
-
const db = pg
|
|
12
|
+
const db = pg?.options?.database;
|
|
13
|
+
if (!db) return;
|
|
13
14
|
|
|
14
15
|
try {
|
|
15
16
|
const data = await func({ pg });
|
|
16
|
-
const subdir =
|
|
17
|
+
const subdir = data ? 'cron' : 'cron/null';
|
|
17
18
|
logger.file(subdir, { db, name, result: data });
|
|
18
19
|
}
|
|
19
20
|
catch (err) {
|
|
20
21
|
logger.file('cron', { db, name, error: err.toString() });
|
|
21
22
|
logger.error(err);
|
|
22
23
|
}
|
|
23
|
-
}
|
|
24
|
+
}
|
|
@@ -9,6 +9,7 @@ const md5 = (string) => createHash('md5').update(string).digest('hex');
|
|
|
9
9
|
const rclient = getRedis();
|
|
10
10
|
|
|
11
11
|
export default async function verifyUnique(name) {
|
|
12
|
+
if (!config.redis) return true; // skip if redis is disabled
|
|
12
13
|
const cronId = config.port || 3000 + md5(name);
|
|
13
14
|
// one per node check
|
|
14
15
|
const key = `cron:unique:${cronId}`;
|
|
@@ -19,4 +20,4 @@ export default async function verifyUnique(name) {
|
|
|
19
20
|
}
|
|
20
21
|
await rclient.expire(key, 20);
|
|
21
22
|
return true;
|
|
22
|
-
}
|
|
23
|
+
}
|
|
@@ -1,37 +1,39 @@
|
|
|
1
1
|
import { createHash } from 'node:crypto';
|
|
2
2
|
|
|
3
|
-
import config from
|
|
3
|
+
import config from '../../../config.js';
|
|
4
4
|
|
|
5
|
-
import getPG from
|
|
6
|
-
import pgClients from
|
|
5
|
+
import getPG from '../pg/funcs/getPG.js';
|
|
6
|
+
import pgClients from '../pg/pgClients.js';
|
|
7
7
|
import getRedis from '../redis/funcs/getRedis.js';
|
|
8
|
-
import logger from
|
|
9
|
-
import interval2ms from
|
|
8
|
+
import logger from '../logger/getLogger.js';
|
|
9
|
+
import interval2ms from './funcs/interval2ms.js';
|
|
10
10
|
|
|
11
11
|
const rclient = getRedis();
|
|
12
12
|
|
|
13
13
|
async function runCron({
|
|
14
14
|
pg = pgClients.client, query, name,
|
|
15
15
|
}) {
|
|
16
|
-
const db = pg
|
|
16
|
+
const db = pg?.options?.database;
|
|
17
17
|
|
|
18
18
|
// verifyUnique
|
|
19
19
|
const key = `cron:unique:${name}`;
|
|
20
|
-
const unique = await rclient.setnx(key, 1);
|
|
21
|
-
const ttl = await rclient.ttl(key);
|
|
20
|
+
const unique = config.redis ? await rclient.setnx(key, 1) : null;
|
|
21
|
+
const ttl = config.redis ? await rclient.ttl(key) : -1;
|
|
22
22
|
|
|
23
23
|
if (!unique && ttl !== -1) {
|
|
24
24
|
// if (config.trace) console.log(name, db, query, 'skip unique');
|
|
25
25
|
return;
|
|
26
26
|
}
|
|
27
27
|
|
|
28
|
-
|
|
28
|
+
if (config.redis) {
|
|
29
|
+
await rclient.expire(key, 20);
|
|
30
|
+
}
|
|
29
31
|
|
|
30
32
|
try {
|
|
31
|
-
if (!pg.pk) await pg.init();
|
|
33
|
+
if (!pg.pk && config.pg) { await pg.init(); }
|
|
32
34
|
|
|
33
35
|
if (config.trace) console.time(`${db}:${query}`);
|
|
34
|
-
const { command, rows = [], rowCount } = await pg.query(query);
|
|
36
|
+
const { command, rows = [], rowCount } = pg?.pk ? await pg.query(query) : {};
|
|
35
37
|
if (config.trace) console.timeEnd(`${db}:${query}`);
|
|
36
38
|
|
|
37
39
|
logger.file('cron', { db, name, result: { command, rows, rowCount } });
|
|
@@ -2,6 +2,7 @@ import getPG from '../../pg/funcs/getPG.js';
|
|
|
2
2
|
import getMeta from '../../pg/funcs/getMeta.js';
|
|
3
3
|
import getRedis from '../../redis/funcs/getRedis.js';
|
|
4
4
|
import pgClients from '../../pg/pgClients.js';
|
|
5
|
+
import config from '../../../../config.js';
|
|
5
6
|
|
|
6
7
|
import extraData from '../../extra/extraData.js';
|
|
7
8
|
import logChanges from './utils/logChanges.js';
|
|
@@ -15,7 +16,7 @@ export default async function dataDelete({
|
|
|
15
16
|
const pg = pg1 || getPG({ name: 'client' });
|
|
16
17
|
|
|
17
18
|
// pg client single transaction support
|
|
18
|
-
if (!pg.
|
|
19
|
+
if (!pg?.pk && config.pg) {
|
|
19
20
|
pg.options = pgClients.client?.options;
|
|
20
21
|
pg.tlist = pgClients.client?.tlist;
|
|
21
22
|
pg.pgType = pgClients.client?.pgType;
|
|
@@ -25,7 +26,9 @@ export default async function dataDelete({
|
|
|
25
26
|
|
|
26
27
|
const { pk } = await getMeta({ pg, table })
|
|
27
28
|
.catch(err => {
|
|
28
|
-
logger.file('crud/delete', {
|
|
29
|
+
logger.file('crud/delete', {
|
|
30
|
+
error: err.toString(), stack: err.stack, table, id, referer, uid,
|
|
31
|
+
});
|
|
29
32
|
throw new Error(err.toString());
|
|
30
33
|
});
|
|
31
34
|
const table1 = table.replace(/"/g, '');
|
|
@@ -38,7 +41,9 @@ export default async function dataDelete({
|
|
|
38
41
|
|
|
39
42
|
const res = await pg.query(delQuery, [id])
|
|
40
43
|
.catch(err => {
|
|
41
|
-
logger.file('crud/delete', {
|
|
44
|
+
logger.file('crud/delete', {
|
|
45
|
+
error: err.toString(), stack: err.stack, table, id, referer, uid, q: delQuery,
|
|
46
|
+
});
|
|
42
47
|
throw new Error(err.toString());
|
|
43
48
|
})
|
|
44
49
|
.then(el => el.rows?.[0] || {});
|
|
@@ -46,6 +51,6 @@ export default async function dataDelete({
|
|
|
46
51
|
await logChanges({
|
|
47
52
|
pg, table, tokenData, referer, id, uid, type: 'DELETE',
|
|
48
53
|
});
|
|
49
|
-
rclient.incr(`pg:${table}:crud`);
|
|
54
|
+
if (config.redis) { rclient.incr(`pg:${table}:crud`); }
|
|
50
55
|
return { ...res, ...extraRes || {} };
|
|
51
56
|
}
|
|
@@ -2,6 +2,7 @@ import getPG from '../../pg/funcs/getPG.js';
|
|
|
2
2
|
import getMeta from '../../pg/funcs/getMeta.js';
|
|
3
3
|
import getRedis from '../../redis/funcs/getRedis.js';
|
|
4
4
|
import pgClients from '../../pg/pgClients.js';
|
|
5
|
+
import config from '../../../../config.js';
|
|
5
6
|
|
|
6
7
|
import logChanges from './utils/logChanges.js';
|
|
7
8
|
import logger from '../../logger/getLogger.js';
|
|
@@ -15,7 +16,7 @@ export default async function dataInsert({
|
|
|
15
16
|
const pg = pg1 || getPG({ name: 'client' });
|
|
16
17
|
|
|
17
18
|
// pg client single transaction support
|
|
18
|
-
if (!pg.
|
|
19
|
+
if (!pg?.pk && config.pg) {
|
|
19
20
|
pg.options = pgClients.client?.options;
|
|
20
21
|
pg.tlist = pgClients.client?.tlist;
|
|
21
22
|
pg.pgType = pgClients.client?.pgType;
|
|
@@ -32,7 +33,9 @@ export default async function dataInsert({
|
|
|
32
33
|
Object.assign(data, {
|
|
33
34
|
...(id && pg.pk?.[table] ? { [pg.pk?.[table]]: id } : {}),
|
|
34
35
|
...(table !== 'admin.users' ? { uid } : {}),
|
|
35
|
-
editor_id: uid,
|
|
36
|
+
editor_id: uid,
|
|
37
|
+
created_by: uid,
|
|
38
|
+
updated_by: uid,
|
|
36
39
|
// editor_id: uid,
|
|
37
40
|
});
|
|
38
41
|
|
|
@@ -52,13 +55,17 @@ export default async function dataInsert({
|
|
|
52
55
|
|
|
53
56
|
const res = await pg.query(insertQuery, [...filterData.map((el) => (typeof el[1] === 'object' && (!Array.isArray(el[1]) || typeof el[1]?.[0] === 'object') ? JSON.stringify(el[1]) : el[1]))])
|
|
54
57
|
.catch(err => {
|
|
55
|
-
logger.file('crud/insert', {
|
|
58
|
+
logger.file('crud/insert', {
|
|
59
|
+
error: err.toString(), stack: err.stack, table, id, referer, uid, q: insertQuery,
|
|
60
|
+
});
|
|
56
61
|
throw new Error(err.toString());
|
|
57
62
|
}).then(el => el || {});
|
|
58
63
|
|
|
59
64
|
const table1 = pg.pk[table] ? table : table.replace(/"/g, '');
|
|
60
65
|
|
|
61
|
-
const extraRes = await extraData({
|
|
66
|
+
const extraRes = await extraData({
|
|
67
|
+
table, form: tokenData?.form, id: res.rows?.[0]?.[pg.pk[table1]], data, uid,
|
|
68
|
+
}, pg);
|
|
62
69
|
if (extraRes && res?.rows?.[0]) {
|
|
63
70
|
Object.assign(res.rows[0], { ...extraRes });
|
|
64
71
|
}
|
|
@@ -74,6 +81,6 @@ export default async function dataInsert({
|
|
|
74
81
|
type: 'INSERT',
|
|
75
82
|
});
|
|
76
83
|
|
|
77
|
-
rclient.incr(`pg:${table}:crud`);
|
|
84
|
+
if (config.redis) { rclient.incr(`pg:${table}:crud`); }
|
|
78
85
|
return res;
|
|
79
86
|
}
|
|
@@ -2,6 +2,7 @@ import getPG from '../../pg/funcs/getPG.js';
|
|
|
2
2
|
import getMeta from '../../pg/funcs/getMeta.js';
|
|
3
3
|
import getRedis from '../../redis/funcs/getRedis.js';
|
|
4
4
|
import pgClients from '../../pg/pgClients.js';
|
|
5
|
+
import config from '../../../../config.js';
|
|
5
6
|
|
|
6
7
|
import extraData from '../../extra/extraData.js';
|
|
7
8
|
import logChanges from './utils/logChanges.js';
|
|
@@ -28,7 +29,7 @@ export default async function dataUpdate({
|
|
|
28
29
|
const pg = pg1 || getPG({ name: 'client' });
|
|
29
30
|
|
|
30
31
|
// pg client single transaction support
|
|
31
|
-
if (!pg.
|
|
32
|
+
if (!pg?.pk && config.pg) {
|
|
32
33
|
pg.options = pgClients.client?.options;
|
|
33
34
|
pg.tlist = pgClients.client?.tlist;
|
|
34
35
|
pg.pgType = pgClients.client?.pgType;
|
|
@@ -72,17 +73,39 @@ export default async function dataUpdate({
|
|
|
72
73
|
// console.log(updateQuery, filterValue);
|
|
73
74
|
const res = await pg.query(updateQuery, [id, ...filterValue])
|
|
74
75
|
.catch(err => {
|
|
75
|
-
logger.file('crud/update', {
|
|
76
|
+
logger.file('crud/update', {
|
|
77
|
+
error: err.toString(),
|
|
78
|
+
stack: err.stack,
|
|
79
|
+
table,
|
|
80
|
+
id,
|
|
81
|
+
referer,
|
|
82
|
+
uid,
|
|
83
|
+
data,
|
|
84
|
+
q: updateQuery,
|
|
85
|
+
});
|
|
76
86
|
throw new Error(err.toString());
|
|
77
87
|
})
|
|
78
88
|
.then(el => el?.rows?.[0]) || {};
|
|
79
89
|
|
|
80
|
-
const extraRes = await extraData({
|
|
90
|
+
const extraRes = await extraData({
|
|
91
|
+
table,
|
|
92
|
+
form: tokenData?.form,
|
|
93
|
+
id,
|
|
94
|
+
data,
|
|
95
|
+
uid,
|
|
96
|
+
}, pg);
|
|
81
97
|
|
|
82
98
|
await logChanges({
|
|
83
|
-
pg,
|
|
99
|
+
pg,
|
|
100
|
+
table,
|
|
101
|
+
tokenData,
|
|
102
|
+
referer,
|
|
103
|
+
data,
|
|
104
|
+
id,
|
|
105
|
+
uid,
|
|
106
|
+
type: 'UPDATE',
|
|
84
107
|
});
|
|
85
108
|
|
|
86
|
-
rclient.incr(`pg:${table}:crud`);
|
|
109
|
+
if (config.redis) { rclient.incr(`pg:${table}:crud`); }
|
|
87
110
|
return { ...res, ...extraRes || {} };
|
|
88
111
|
}
|
|
@@ -1,9 +1,10 @@
|
|
|
1
|
+
import config from '../../../../config.js';
|
|
1
2
|
import getRedis from '../../redis/funcs/getRedis.js';
|
|
2
3
|
|
|
3
|
-
|
|
4
|
+
const rclient = getRedis({ db: 0 });
|
|
4
5
|
|
|
5
6
|
export default async function getOpt(token, uid = 0) {
|
|
6
|
-
|
|
7
|
+
if (!config.redis) return null;
|
|
7
8
|
|
|
8
9
|
const key = `opt:${uid}:${token}`;
|
|
9
10
|
// console.log(key);
|
|
@@ -15,14 +15,15 @@ const keys = {
|
|
|
15
15
|
e: '%s:token:exec:%s',
|
|
16
16
|
};
|
|
17
17
|
|
|
18
|
+
const rclient = getRedis({ db: 0 });
|
|
19
|
+
|
|
18
20
|
async function getToken({
|
|
19
21
|
uid, token, json,
|
|
20
22
|
}) {
|
|
23
|
+
if (!config.redis) return null;
|
|
21
24
|
const mode = 'w';
|
|
22
25
|
// if (mode === 'r') return token;
|
|
23
26
|
|
|
24
|
-
const rclient = getRedis({ db: 0 });
|
|
25
|
-
|
|
26
27
|
const key = sprintf(keys[mode], config?.pg?.database, uid?.toString());
|
|
27
28
|
const id = await rclient.hget(key, token);
|
|
28
29
|
// console.log(key, token);
|
|
@@ -1,21 +1,21 @@
|
|
|
1
1
|
import { createHash, randomUUID } from 'crypto';
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
3
|
+
import config from '../../../../config.js';
|
|
5
4
|
import getRedis from '../../redis/funcs/getRedis.js';
|
|
6
5
|
|
|
7
|
-
|
|
6
|
+
const random = randomUUID();
|
|
7
|
+
const rclient = getRedis({ db: 0 });
|
|
8
8
|
|
|
9
9
|
function md5(string) {
|
|
10
10
|
return createHash('md5').update(string).digest('hex');
|
|
11
11
|
}
|
|
12
12
|
|
|
13
13
|
export default function setOpt(params, uid = 0) {
|
|
14
|
+
if (!config.redis) return null;
|
|
14
15
|
const token = Buffer.from(md5(typeof params === 'object' ? JSON.stringify(params) : params) + random, 'hex').toString('base64').replace(/[+-=]+/g, '');
|
|
15
16
|
// const token = md5(params);
|
|
16
17
|
const key = `opt:${uid}:${token}`;
|
|
17
18
|
|
|
18
|
-
const rclient = getRedis({ db: 0 });
|
|
19
19
|
rclient.set(key, JSON.stringify(params), 'EX', 60 * 60);
|
|
20
20
|
return token;
|
|
21
21
|
}
|
|
@@ -23,8 +23,7 @@ const generateCodes = (ids, userToken) => {
|
|
|
23
23
|
function setToken({
|
|
24
24
|
ids: idsOrigin, uid, array,
|
|
25
25
|
}) {
|
|
26
|
-
|
|
27
|
-
|
|
26
|
+
if (!config.redis) return null;
|
|
28
27
|
if (!uid) return { user: 'empty' };
|
|
29
28
|
if (!Object.keys(idsOrigin).length) return { ids: 'empty' };
|
|
30
29
|
|
|
@@ -49,6 +49,7 @@ if (config.debug) {
|
|
|
49
49
|
logger.metrics = function metrics(key, val, dbName) {
|
|
50
50
|
const dbname = dbName || config.pg?.database;
|
|
51
51
|
if (!dbname && !isServer) return;
|
|
52
|
+
if (!config.redis) return;
|
|
52
53
|
rclient2.hincrby(`${dbname}:system_metrics`, key, val || 1);
|
|
53
54
|
};
|
|
54
55
|
|
|
@@ -3,7 +3,9 @@ import process from 'process';
|
|
|
3
3
|
import util from 'node:util';
|
|
4
4
|
import { exec } from 'node:child_process';
|
|
5
5
|
|
|
6
|
-
import {
|
|
6
|
+
import {
|
|
7
|
+
config, getFolder, pgClients, getRedis,
|
|
8
|
+
} from '../../../utils.js';
|
|
7
9
|
|
|
8
10
|
const execAsync = util.promisify(exec);
|
|
9
11
|
const platform = os.platform();
|
|
@@ -21,9 +23,9 @@ const sqlQuery = `select datname as dbname, application_name as app, client_addr
|
|
|
21
23
|
|
|
22
24
|
export default async function loggerSystem(req) {
|
|
23
25
|
const { pg = pgClients.client } = req;
|
|
24
|
-
const dbName = pg
|
|
26
|
+
const dbName = pg?.options?.database;
|
|
25
27
|
|
|
26
|
-
const dbsize = await rclient.get(`${dbName}:content:dbsize:${redisKey}`);
|
|
28
|
+
const dbsize = config.redis ? await rclient.get(`${dbName}:content:dbsize:${redisKey}`) : null;
|
|
27
29
|
|
|
28
30
|
const dbVerion = await pg.query('select version();').then(el => el.rows?.[0]?.version);
|
|
29
31
|
|
|
@@ -46,7 +48,7 @@ export default async function loggerSystem(req) {
|
|
|
46
48
|
const resource = process.resourceUsage();
|
|
47
49
|
|
|
48
50
|
const currentCpuUsage = (((process.cpuUsage().user + process.cpuUsage().system) * 1000) / totalCpu) * 100;
|
|
49
|
-
const redisInfo = await rclient.info();
|
|
51
|
+
const redisInfo = config.redis ? await rclient.info() : '';
|
|
50
52
|
|
|
51
53
|
const lines = redisInfo.split('\r\n').filter((el) => el && el.split);
|
|
52
54
|
const redis = {};
|
|
@@ -58,21 +60,21 @@ export default async function loggerSystem(req) {
|
|
|
58
60
|
}
|
|
59
61
|
}
|
|
60
62
|
|
|
61
|
-
await rclient.set(`${dbName}:content:dbsize:${redisKey}`, dbSize, 'EX', 30 * 60);
|
|
63
|
+
if (config.redis) { await rclient.set(`${dbName}:content:dbsize:${redisKey}`, dbSize, 'EX', 30 * 60); }
|
|
62
64
|
|
|
63
|
-
const latency = await rclient.latency('latest');
|
|
65
|
+
const latency = config.redis ? await rclient.latency('latest') : null;
|
|
64
66
|
|
|
65
|
-
const uptime = await pg.query(
|
|
67
|
+
const uptime = await pg.query('select extract(\'epoch\' from current_timestamp - pg_postmaster_start_time())::int as uptime')
|
|
66
68
|
.then(el => el.rows?.[0]?.uptime);
|
|
67
69
|
|
|
68
|
-
const metric5 = await rclient2.hgetall(`${dbName}:system_metrics`);
|
|
70
|
+
const metric5 = config.redis ? await rclient2.hgetall(`${dbName}:system_metrics`) : {};
|
|
69
71
|
|
|
70
72
|
Object.keys(metric5 || {}).filter((el) => el.startsWith('time')).forEach((m) => {
|
|
71
73
|
metric5[m] /= 1000.0;
|
|
72
74
|
});
|
|
73
75
|
|
|
74
76
|
const metricSort = metric5 ? JSON.parse(JSON.stringify(metric5, Object.keys(metric5).sort())) : undefined;
|
|
75
|
-
const userOnline = await rclient2.scan(['0', 'match', `${dbName}:user:*`, 'count', '10000']);
|
|
77
|
+
const userOnline = config.redis ? await rclient2.scan(['0', 'match', `${dbName}:user:*`, 'count', '10000']) : [];
|
|
76
78
|
// const userOnline = await rclient10.scan(['0', 'match', 'sess:*', 'count', '10000']);
|
|
77
79
|
|
|
78
80
|
return {
|
|
@@ -80,8 +82,8 @@ export default async function loggerSystem(req) {
|
|
|
80
82
|
root: process.cwd(),
|
|
81
83
|
processFolder,
|
|
82
84
|
saveDirectory: filesFolder,
|
|
83
|
-
dbhost: pg
|
|
84
|
-
dbport: pg
|
|
85
|
+
dbhost: pg?.options?.host,
|
|
86
|
+
dbport: pg?.options?.port,
|
|
85
87
|
dbname: dbName,
|
|
86
88
|
},
|
|
87
89
|
|
|
@@ -101,7 +103,7 @@ export default async function loggerSystem(req) {
|
|
|
101
103
|
'node.cpu.time': resource.systemCPUTime,
|
|
102
104
|
'node.cpu.load': os.loadavg()[0],
|
|
103
105
|
'redis.ram': redis.used_memory_human,
|
|
104
|
-
'redis.latency': latency?.join(','),
|
|
106
|
+
'redis.latency': latency?.join?.(','),
|
|
105
107
|
'redis.ram.peak': redis.used_memory_peak_human,
|
|
106
108
|
'redis.clients': redis.connected_clients,
|
|
107
109
|
'redis.db0': redis.db0?.split(',')?.[0]?.substring(5),
|
|
@@ -126,4 +128,4 @@ export default async function loggerSystem(req) {
|
|
|
126
128
|
top: topProcess,
|
|
127
129
|
query,
|
|
128
130
|
};
|
|
129
|
-
}
|
|
131
|
+
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import pgClients from '../pg/pgClients.js';
|
|
2
2
|
import logger from '../logger/getLogger.js';
|
|
3
3
|
import getRedis from '../redis/funcs/getRedis.js';
|
|
4
|
+
import config from '../../../config.js';
|
|
4
5
|
|
|
5
6
|
import loggerSystem from './loggerSystem.js';
|
|
6
7
|
|
|
@@ -8,9 +9,9 @@ const rclient2 = getRedis({ db: 2 });
|
|
|
8
9
|
|
|
9
10
|
export default async function systemMetricsFifthly({ pg = pgClients.client }) {
|
|
10
11
|
const system = await loggerSystem({ pg });
|
|
11
|
-
const dbName = pg
|
|
12
|
+
const dbName = pg?.options?.database;
|
|
12
13
|
|
|
13
|
-
await rclient2.del(`${dbName}:system_metrics`);
|
|
14
|
+
if (config.redis) { await rclient2.del(`${dbName}:system_metrics`); }
|
|
14
15
|
|
|
15
16
|
logger.file('metric', {
|
|
16
17
|
dbname: dbName,
|
|
@@ -20,4 +21,4 @@ export default async function systemMetricsFifthly({ pg = pgClients.client }) {
|
|
|
20
21
|
});
|
|
21
22
|
|
|
22
23
|
return { message: 'Saved Fifthly' };
|
|
23
|
-
}
|
|
24
|
+
}
|
|
@@ -31,7 +31,7 @@ export default async function execSql(filepath, pg = pgClients.client) {
|
|
|
31
31
|
const sql = readFileSync(filepath, 'utf-8');
|
|
32
32
|
|
|
33
33
|
const hash = createHash('md5').update(sql).digest('hex');
|
|
34
|
-
const hashes = await rclient.hgetall(`${pg
|
|
34
|
+
const hashes = config.redis ? await rclient.hgetall(`${pg?.options?.database}:migration-hashes`).then(obj => Object.keys(obj)) : [];
|
|
35
35
|
|
|
36
36
|
if (hashes.includes(hash) && !config.disableCache) {
|
|
37
37
|
console.log(filename, 'skip equal hash', Date.now() - start);
|
|
@@ -41,7 +41,7 @@ export default async function execSql(filepath, pg = pgClients.client) {
|
|
|
41
41
|
try {
|
|
42
42
|
console.log(filename, 'start', Date.now() - start);
|
|
43
43
|
await pg.query(sql);
|
|
44
|
-
if (!config.disableCache) await rclient.hset(`${pg
|
|
44
|
+
if (!config.disableCache && config.redis) await rclient.hset(`${pg?.options?.database}:migration-hashes`, hash, 1);
|
|
45
45
|
console.log(filename, 'finish', Date.now() - start);
|
|
46
46
|
logger.file('migration/success', {
|
|
47
47
|
filepath,
|
|
@@ -5,6 +5,7 @@ const data = {};
|
|
|
5
5
|
// decorator
|
|
6
6
|
export default async function getMeta(opt, nocache) {
|
|
7
7
|
const pg = opt?.pg || getPG({ name: 'client' });
|
|
8
|
+
if (!pg) return { error: 'pg connection not established', status: 400 };
|
|
8
9
|
const table = opt?.table || opt;
|
|
9
10
|
|
|
10
11
|
if (pg?.options?.database && data[pg.options.database]?.[table] && !nocache) return data[pg.options.database][table];
|
|
@@ -20,8 +20,8 @@ function getPG(param) {
|
|
|
20
20
|
if (pgClients[name]) return pgClients[name];
|
|
21
21
|
|
|
22
22
|
const dbConfig = {
|
|
23
|
-
user: user || config.pg?.user,
|
|
24
|
-
password: password || config.pg?.password,
|
|
23
|
+
user: user || config.pg?.user || 'postgres',
|
|
24
|
+
password: password || config.pg?.password || 'postgres',
|
|
25
25
|
host: host || config.pg?.host,
|
|
26
26
|
port: port || config.pg?.port,
|
|
27
27
|
database: db || database || config.pg?.db || config.pg?.database,
|
|
@@ -21,8 +21,8 @@ async function getPGAsync(param) {
|
|
|
21
21
|
if (pgClients[name]?.tlist) return pgClients[name];
|
|
22
22
|
|
|
23
23
|
const dbConfig = {
|
|
24
|
-
user: user || config.pg?.user,
|
|
25
|
-
password: password || config.pg?.password,
|
|
24
|
+
user: user || config.pg?.user || 'postgres',
|
|
25
|
+
password: password || config.pg?.password || 'postgres',
|
|
26
26
|
host: host || config.pg?.host,
|
|
27
27
|
port: port || config.pg?.port,
|
|
28
28
|
database: db || database || config.pg?.db || config.pg?.database,
|
|
@@ -7,7 +7,7 @@ import logger from '../../logger/getLogger.js';
|
|
|
7
7
|
const rclient = getRedis({ db: 0 });
|
|
8
8
|
|
|
9
9
|
async function init(client) {
|
|
10
|
-
if (!client
|
|
10
|
+
if (!client?.options?.database) {
|
|
11
11
|
return;
|
|
12
12
|
}
|
|
13
13
|
const textQuery = `select
|
|
@@ -23,7 +23,7 @@ async function init(client) {
|
|
|
23
23
|
from pg_class where relkind in ('r','v')`);
|
|
24
24
|
const relkinds = rows.reduce((acc, curr) => Object.assign(acc, { [curr.tname]: curr.relkind }), {});
|
|
25
25
|
|
|
26
|
-
async function query(q, args = [], isstream) {
|
|
26
|
+
async function query(q, args = [], isstream = false) {
|
|
27
27
|
try {
|
|
28
28
|
if (isstream) {
|
|
29
29
|
await client.query('set statement_timeout to 100000000');
|
|
@@ -83,13 +83,13 @@ async function init(client) {
|
|
|
83
83
|
|
|
84
84
|
// CRUD table state
|
|
85
85
|
const keyCacheTable = `pg:${table}:crud`;
|
|
86
|
-
const crudInc = table ? await rclient.get(keyCacheTable) || 0 : 0;
|
|
86
|
+
const crudInc = table && config.redis ? (await rclient.get(keyCacheTable) || 0) : 0;
|
|
87
87
|
|
|
88
88
|
//
|
|
89
89
|
const hash = createHash('sha1').update([q, JSON.stringify(args)].join()).digest('base64');
|
|
90
90
|
const keyCache = `pg:${hash}:${crudInc}`;
|
|
91
91
|
|
|
92
|
-
const cacheData = await rclient.get(keyCache);
|
|
92
|
+
const cacheData = config.redis ? await rclient.get(keyCache) : null;
|
|
93
93
|
|
|
94
94
|
if (cacheData && !config.local) {
|
|
95
95
|
// console.log('from cache', table, query);
|
|
@@ -98,7 +98,7 @@ async function init(client) {
|
|
|
98
98
|
|
|
99
99
|
const data = await query(q, args || []);
|
|
100
100
|
|
|
101
|
-
if (seconds > 0) {
|
|
101
|
+
if (seconds > 0 && config.redis) {
|
|
102
102
|
rclient.set(keyCache, JSON.stringify(data), 'EX', seconds);
|
|
103
103
|
}
|
|
104
104
|
|
|
@@ -75,12 +75,12 @@ export default function checkPolicy(req, reply) {
|
|
|
75
75
|
}
|
|
76
76
|
|
|
77
77
|
/* === policy: public === */
|
|
78
|
-
if (policy.includes('public') || skipCheckPolicy(path)) {
|
|
78
|
+
if (policy.includes('public') || skipCheckPolicy(path) || !config.pg || config.auth?.disable || config.local || config.debug || unittest) {
|
|
79
79
|
return null;
|
|
80
80
|
}
|
|
81
81
|
|
|
82
82
|
/* === 0. policy: unauthorized access from admin URL === */
|
|
83
|
-
if (!validToken && !user?.uid &&
|
|
83
|
+
if (!validToken && !user?.uid && isAdmin && !policy.includes('public')) {
|
|
84
84
|
logger.file('policy/unauthorized', {
|
|
85
85
|
path, method, params, query, body, token: headers?.token, userId: headers?.uid, ip: req.ip, headers, message: 'unauthorized',
|
|
86
86
|
});
|
|
@@ -88,7 +88,7 @@ export default function checkPolicy(req, reply) {
|
|
|
88
88
|
}
|
|
89
89
|
|
|
90
90
|
/* === 3. policy: user === */
|
|
91
|
-
if (!validToken && !user && policy.includes('user') &&
|
|
91
|
+
if (!validToken && !user && policy.includes('user') && !skipCheckPolicy(path)) {
|
|
92
92
|
logger.file('policy/user', {
|
|
93
93
|
path, method, params, query, body, message: 'access restricted: 3',
|
|
94
94
|
});
|
|
@@ -96,7 +96,7 @@ export default function checkPolicy(req, reply) {
|
|
|
96
96
|
}
|
|
97
97
|
|
|
98
98
|
/* === 4. policy: referer === */
|
|
99
|
-
if (!validToken && !headers?.referer?.includes?.(hostname) && policy.includes('referer')
|
|
99
|
+
if (!validToken && !headers?.referer?.includes?.(hostname) && policy.includes('referer')) {
|
|
100
100
|
logger.file('policy/referer', {
|
|
101
101
|
path, method, params, query, body, message: 'access restricted: 4', uid: user?.uid,
|
|
102
102
|
});
|
|
@@ -104,7 +104,7 @@ export default function checkPolicy(req, reply) {
|
|
|
104
104
|
}
|
|
105
105
|
|
|
106
106
|
/* === 5. policy: site auth === */
|
|
107
|
-
if (!validToken && !policy.includes('site') && !isAdmin
|
|
107
|
+
if (!validToken && !policy.includes('site') && !isAdmin) {
|
|
108
108
|
logger.file('policy/site', {
|
|
109
109
|
path, method, params, query, body, message: 'access restricted: 5', uid: user?.uid,
|
|
110
110
|
});
|
|
@@ -112,7 +112,7 @@ export default function checkPolicy(req, reply) {
|
|
|
112
112
|
}
|
|
113
113
|
|
|
114
114
|
/* === 6. base policy: block non-public api w/ out authorization === */
|
|
115
|
-
if (!validToken && isAdmin && !isUser && isServer
|
|
115
|
+
if (!validToken && isAdmin && !isUser && isServer) {
|
|
116
116
|
logger.file('policy/api', {
|
|
117
117
|
path, method, params, query, body, message: 'access restricted: 6', uid: user?.uid,
|
|
118
118
|
});
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
import getSelect from '../getSelect.js';
|
|
2
2
|
import pgClients from '../../../pg/pgClients.js';
|
|
3
|
-
import
|
|
3
|
+
import rclient from '../../../redis/client.js';
|
|
4
|
+
import config from '../../../../../config.js';
|
|
4
5
|
|
|
5
6
|
const selectIds = {};
|
|
6
7
|
export default async function getSelectVal({
|
|
7
8
|
pg = pgClients.client, name, values: valuesOrigin, ar = false,
|
|
8
9
|
}) {
|
|
9
10
|
if (!valuesOrigin?.length) return null;
|
|
10
|
-
const values = valuesOrigin.filter(el => typeof el === 'boolean' ? true : el).map(el => el.toString());
|
|
11
|
+
const values = valuesOrigin.filter(el => (typeof el === 'boolean' ? true : el)).map(el => el.toString());
|
|
11
12
|
const cls = await getSelect(name, pg);
|
|
12
13
|
|
|
13
14
|
// === array ===
|
|
@@ -26,7 +27,7 @@ export default async function getSelectVal({
|
|
|
26
27
|
|
|
27
28
|
// cache
|
|
28
29
|
const key = `select:${name}`;
|
|
29
|
-
const cache = values?.length ? (await
|
|
30
|
+
const cache = values?.length && config.redis ? (await rclient.hmget(key, values)).reduce((p, el, i) => ({ ...p, [values[i]]: el }), {}) : {};
|
|
30
31
|
const filteredValues = values.filter(el => !cache[el]);
|
|
31
32
|
|
|
32
33
|
// query select
|
|
@@ -37,8 +38,8 @@ export default async function getSelectVal({
|
|
|
37
38
|
|
|
38
39
|
const clsObj = { ...cache, ...data.reduce((p, el) => ({ ...p, [el.id.toString()]: el.color ? el : el.text }), {}) };
|
|
39
40
|
|
|
40
|
-
if (data?.length) {
|
|
41
|
-
|
|
41
|
+
if (data?.length && config.redis) {
|
|
42
|
+
rclient.hmset(key, clsObj);
|
|
42
43
|
}
|
|
43
44
|
|
|
44
45
|
if (ar) {
|
|
@@ -75,7 +75,7 @@ export default async function tableAPI(req) {
|
|
|
75
75
|
if (!data) return { message: 'not found', status: 404 };
|
|
76
76
|
|
|
77
77
|
Object.keys(schema).filter(key => schema[key]?.type === 'DataTable').forEach(key => {
|
|
78
|
-
if (data[key] && !Array.isArray(data[key])) { data[key] = null }
|
|
78
|
+
if (data[key] && !Array.isArray(data[key])) { data[key] = null; }
|
|
79
79
|
});
|
|
80
80
|
|
|
81
81
|
if (extraKeys?.length) {
|
|
@@ -93,7 +93,7 @@ export default async function tableAPI(req) {
|
|
|
93
93
|
ids: [JSON.stringify({ id, table: tableName, form: loadTable.form })],
|
|
94
94
|
uid: user.uid,
|
|
95
95
|
array: 1,
|
|
96
|
-
})[0];
|
|
96
|
+
})?.[0];
|
|
97
97
|
}
|
|
98
98
|
|
|
99
99
|
await extraDataGet({ rows: [data], table: tableName, form: hookData?.form || tokenData?.form || loadTable.form }, pg);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { dblist, getRedis } from '../../../../utils.js';
|
|
1
|
+
import { config, dblist, getRedis } from '../../../../utils.js';
|
|
2
2
|
|
|
3
3
|
import formatData from '../utils/formatData.js';
|
|
4
4
|
|
|
@@ -12,10 +12,17 @@ export default async function readItemList(req) {
|
|
|
12
12
|
|| '2';
|
|
13
13
|
|
|
14
14
|
const key = `current-db:${uid}`;
|
|
15
|
-
const ttl = await rclient.ttl(key);
|
|
16
|
-
const currentId = await rclient.get(key);
|
|
15
|
+
const ttl = config.redis ? await rclient.ttl(key) : null;
|
|
16
|
+
const currentId = config.redis ? await rclient.get(key) : null;
|
|
17
17
|
rclient.setex(key, 60 * 60 * 10000, currentId);
|
|
18
18
|
|
|
19
19
|
const { originalMaxAge, expires } = req.session?.cookie || {};
|
|
20
|
-
return {
|
|
20
|
+
return {
|
|
21
|
+
ttl,
|
|
22
|
+
current: currentId || rows[0]?.id,
|
|
23
|
+
rows,
|
|
24
|
+
user: {
|
|
25
|
+
...req.user, originalMaxAge, expires, uid,
|
|
26
|
+
},
|
|
27
|
+
};
|
|
21
28
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { dblist, getRedis } from '../../../../utils.js';
|
|
1
|
+
import { config, dblist, getRedis } from '../../../../utils.js';
|
|
2
2
|
|
|
3
3
|
const rclient = getRedis();
|
|
4
4
|
|
|
@@ -19,7 +19,7 @@ export default async function setItem(req) {
|
|
|
19
19
|
|| req.session?.passport?.user?.username // login passwd
|
|
20
20
|
|| '2';
|
|
21
21
|
|
|
22
|
-
await rclient.setex(`current-db:${uid}`, 60 * 60 * 10000, id);
|
|
22
|
+
if (config.redis) { await rclient.setex(`current-db:${uid}`, 60 * 60 * 10000, id); }
|
|
23
23
|
|
|
24
24
|
return { current: id };
|
|
25
25
|
}
|
|
@@ -9,8 +9,8 @@ import locales from './utils/locales.js';
|
|
|
9
9
|
import conditions from './utils/conditions.js';
|
|
10
10
|
|
|
11
11
|
const components = {
|
|
12
|
-
'vs-widget-file':
|
|
13
|
-
'vs-widget-comments':
|
|
12
|
+
'vs-widget-file': 'select \'vs-widget-file\' as component, count(*) from crm.files where entity_id=$1 and file_status<>3',
|
|
13
|
+
'vs-widget-comments': 'select \'vs-widget-comments\' as component, count(*) from crm.communications where entity_id=$1',
|
|
14
14
|
};
|
|
15
15
|
|
|
16
16
|
const checkInline = {};
|
|
@@ -42,40 +42,38 @@ export default async function dataAPI(req, reply, called) {
|
|
|
42
42
|
if (!checkInline[params?.table] && loadTable?.sql?.length && loadTable.table) {
|
|
43
43
|
const filterSql = loadTable.sql.filter(el => !el?.disabled && (el.inline ?? true));
|
|
44
44
|
const sqlTable = filterSql.map((el, i) => ` left join lateral (${el.sql}) ${el.name || `t${i}`} on 1=1 `)?.join('') || '';
|
|
45
|
-
const d = await Promise.all(filterSql.map((el, i) => pg.query(`select ${el.name || `t${i}`}.* from(select * from ${loadTable.table})t ${sqlTable} limit 0`).then(
|
|
45
|
+
const d = await Promise.all(filterSql.map((el, i) => pg.query(`select ${el.name || `t${i}`}.* from(select * from ${loadTable.table})t ${sqlTable} limit 0`).then(item => item.fields)));
|
|
46
46
|
d.forEach((el, i) => {
|
|
47
|
-
filterSql[i].inline = el.length
|
|
47
|
+
filterSql[i].inline = el.length === 1;
|
|
48
48
|
filterSql[i].fields = el.map(f => f.name);
|
|
49
49
|
});
|
|
50
50
|
checkInline[params?.table] = loadTable.sql;
|
|
51
|
-
}
|
|
52
|
-
|
|
51
|
+
}
|
|
52
|
+
else if (checkInline[params?.table]) {
|
|
53
|
+
loadTable.sql = checkInline[params?.table];
|
|
53
54
|
}
|
|
54
55
|
|
|
55
56
|
if (query.sql === '0') return loadTable;
|
|
56
57
|
|
|
58
|
+
if (!config.pg) { return reply.status(500).send('empty pg'); }
|
|
59
|
+
|
|
57
60
|
if (!loadTable && !(tokenData?.table && pg.pk?.[tokenData?.table]) && !(called && pg.pk?.[params?.table])) {
|
|
58
|
-
return
|
|
61
|
+
return reply.status(404).send('template not found');
|
|
59
62
|
}
|
|
60
63
|
|
|
61
64
|
const id = tokenData?.id || hookData?.id || params?.id;
|
|
62
65
|
const { actions = [], query: accessQuery } = await getAccess({ table: tokenData?.table || hookData?.table || params.table, id, user }, pg) || {};
|
|
63
66
|
|
|
64
67
|
if (!actions.includes('view') && !config?.local && !called) {
|
|
65
|
-
return
|
|
68
|
+
return reply.status(403).send('access restricted');
|
|
66
69
|
}
|
|
67
70
|
|
|
68
71
|
const {
|
|
69
72
|
table, columns = [], sql, cardSql, filters, form, meta, sqlColumns, public: ispublic, editable = false,
|
|
70
73
|
} = loadTable || tokenData || params;
|
|
71
74
|
|
|
72
|
-
const columns1 = columns || dbColumns.map(({ name, title, dataTypeID }) => ({ name, title, type: pg.pgType[dataTypeID] }));
|
|
73
|
-
columns1.forEach(col => {
|
|
74
|
-
Object.assign(col, locales[`${table || params.table}.${col.name}`] || {});
|
|
75
|
-
});
|
|
76
|
-
|
|
77
75
|
const tableMeta = await getMeta({ pg, table });
|
|
78
|
-
timeArr.push(Date.now())
|
|
76
|
+
timeArr.push(Date.now());
|
|
79
77
|
if (tableMeta?.view) {
|
|
80
78
|
if (!loadTable?.key && !tokenData?.key) return { message: `key not found: ${table}`, status: 404 };
|
|
81
79
|
Object.assign(tableMeta, { pk: loadTable?.key || tokenData?.key });
|
|
@@ -83,7 +81,14 @@ export default async function dataAPI(req, reply, called) {
|
|
|
83
81
|
|
|
84
82
|
const { pk, columns: dbColumns = [] } = tableMeta || {};
|
|
85
83
|
|
|
86
|
-
|
|
84
|
+
const columns1 = columns || dbColumns.map(({ name, title, dataTypeID }) => ({ name, title, type: pg.pgType[dataTypeID] }));
|
|
85
|
+
columns1.forEach(col => {
|
|
86
|
+
Object.assign(col, locales[`${table || params.table}.${col.name}`] || {});
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
if (!pk) {
|
|
90
|
+
return reply.status(404).send(`table not found: ${table}`);
|
|
91
|
+
}
|
|
87
92
|
|
|
88
93
|
const columnList = dbColumns.map((el) => el.name || el).join(',');
|
|
89
94
|
const sqlTable = sql?.filter?.((el) => !el?.disabled && !el.inline && el?.sql?.replace && (!el.sql.includes('{{uid}}') || uid)).map((el, i) => ` left join lateral (${el.sql.replace('{{uid}}', uid)}) ${el.name || `t${i}`} on 1=1 `)?.join('') || '';
|
|
@@ -179,7 +184,7 @@ export default async function dataAPI(req, reply, called) {
|
|
|
179
184
|
throw new Error(err.toString());
|
|
180
185
|
});
|
|
181
186
|
|
|
182
|
-
timeArr.push(Date.now())
|
|
187
|
+
timeArr.push(Date.now());
|
|
183
188
|
|
|
184
189
|
if (uid && rows.length && editable) {
|
|
185
190
|
rows.forEach(row => {
|
|
@@ -187,7 +192,7 @@ export default async function dataAPI(req, reply, called) {
|
|
|
187
192
|
ids: [JSON.stringify({ id: row.id, table: tokenData?.table || hookData?.table || params.table, form: loadTable?.form })],
|
|
188
193
|
uid,
|
|
189
194
|
array: 1,
|
|
190
|
-
})[0];
|
|
195
|
+
})?.[0];
|
|
191
196
|
});
|
|
192
197
|
}
|
|
193
198
|
|
|
@@ -209,14 +214,14 @@ export default async function dataAPI(req, reply, called) {
|
|
|
209
214
|
? { total: rows.length, filtered: rows.length }
|
|
210
215
|
: await pg.queryCache?.(qCount, { table: loadTable?.table || tokenData?.table, time: 5 }).then(el => el?.rows[0] || {});
|
|
211
216
|
|
|
212
|
-
timeArr.push(Date.now())
|
|
217
|
+
timeArr.push(Date.now());
|
|
213
218
|
const { total, filtered } = counts || {};
|
|
214
219
|
const agg = Object.keys(counts).filter(el => !['total', 'filtered'].includes(el)).reduce((acc, el) => ({ ...acc, [el]: counts[el] }), {});
|
|
215
220
|
|
|
216
221
|
await extraDataGet({ rows, table: loadTable?.table, form }, pg);
|
|
217
222
|
|
|
218
223
|
await metaFormat({ rows, table: tokenData?.table || hookData?.table || params.table, sufix }, pg);
|
|
219
|
-
timeArr.push(Date.now())
|
|
224
|
+
timeArr.push(Date.now());
|
|
220
225
|
const status = [];
|
|
221
226
|
if (loadTable?.meta?.status) {
|
|
222
227
|
const statusColumn = loadTable.meta?.cls?.[loadTable.meta?.status]
|
|
@@ -237,19 +242,19 @@ export default async function dataAPI(req, reply, called) {
|
|
|
237
242
|
|
|
238
243
|
const tokens = {};
|
|
239
244
|
if (template && objectId) {
|
|
240
|
-
|
|
241
245
|
// tokens result
|
|
242
246
|
if (index?.tokens && typeof index?.tokens === 'object' && !Array.isArray(index?.tokens)) {
|
|
243
247
|
Object.keys(index.tokens || {})
|
|
244
248
|
.filter(key => index?.tokens[key]?.public
|
|
245
249
|
|| actions?.includes?.('edit')
|
|
246
250
|
|| actions?.includes?.('add')
|
|
247
|
-
|| !index?.tokens[key]?.table
|
|
248
|
-
)
|
|
251
|
+
|| !index?.tokens[key]?.table)
|
|
249
252
|
.forEach(key => {
|
|
250
253
|
const item = index?.tokens[key];
|
|
251
254
|
Object.keys(item).filter(el => item[el]?.includes?.('{{')).forEach(el => {
|
|
252
|
-
item[el] = handlebarsSync.compile(item[el])({
|
|
255
|
+
item[el] = handlebarsSync.compile(item[el])({
|
|
256
|
+
user, uid: user?.uid, id, data: rows[0],
|
|
257
|
+
});
|
|
253
258
|
});
|
|
254
259
|
|
|
255
260
|
const token = item.form && item.table ? setToken({
|
|
@@ -261,43 +266,47 @@ export default async function dataAPI(req, reply, called) {
|
|
|
261
266
|
});
|
|
262
267
|
}
|
|
263
268
|
|
|
264
|
-
// conditions
|
|
269
|
+
// conditions
|
|
265
270
|
panels?.filter(el => el.items).forEach(el => {
|
|
266
|
-
el.items = el.items?.filter(
|
|
271
|
+
el.items = el.items?.filter(item => conditions(item.conditions, rows[0]));
|
|
267
272
|
});
|
|
268
273
|
|
|
269
274
|
// title, count
|
|
270
275
|
panels?.filter(el => el.items).forEach(async el => {
|
|
271
|
-
const
|
|
272
|
-
const data = await Promise.all(
|
|
273
|
-
|
|
274
|
-
Object.assign(
|
|
276
|
+
const filtered1 = el.items.filter(item => item.count?.toLowerCase?.().includes('select'));
|
|
277
|
+
const data = await Promise.all(filtered1.map(item => pg.query(item.count).then(item1 => item1.rows[0] || {})));
|
|
278
|
+
filtered1.forEach((el1, i) => {
|
|
279
|
+
Object.assign(el1, data[i] || {}, data[i].count ? {} : { count: undefined });
|
|
275
280
|
});
|
|
276
|
-
const
|
|
277
|
-
const
|
|
281
|
+
const q1 = el.items.map((item) => (item.component ? components[item.component] : null)).filter(item => item).join(' union all ');
|
|
282
|
+
const counts1 = q1 && id
|
|
278
283
|
? await pg.query(q, [id])
|
|
279
284
|
.then(e => e.rows.reduce((acc, curr) => Object.assign(acc, { [curr.component]: curr.count }), {}))
|
|
280
285
|
: {};
|
|
281
|
-
el.items?.filter?.(item => item.component)?.forEach(item => Object.assign(item, { count:
|
|
286
|
+
el.items?.filter?.(item => item.component)?.forEach(item => Object.assign(item, { count: counts1?.[item.component] }));
|
|
282
287
|
});
|
|
283
288
|
|
|
284
289
|
// data result
|
|
285
290
|
const data = {};
|
|
286
|
-
const route = pg.pk?.['admin.routes'] ? await pg.query(
|
|
291
|
+
const route = pg.pk?.['admin.routes'] ? await pg.query('select route_id as path, title from admin.routes where enabled and alias=$1 limit 1', [table])
|
|
287
292
|
.then(el => el.rows?.[0] || {}) : {};
|
|
288
293
|
Object.assign(route, { tableTitle: loadTable?.title });
|
|
289
294
|
if (index?.data && index?.data?.[0]?.name) {
|
|
290
295
|
await Promise.all(index.data.filter((el) => el?.name && el?.sql).map(async (el) => {
|
|
291
|
-
const
|
|
292
|
-
|
|
296
|
+
const q2 = handlebarsSync.compile(el.sql)({
|
|
297
|
+
data: rows[0], user, uid: user?.uid, id,
|
|
298
|
+
});
|
|
299
|
+
const { rows: sqlData } = await pg.query(q2);
|
|
293
300
|
data[el.name] = sqlData;
|
|
294
301
|
}));
|
|
295
302
|
}
|
|
296
303
|
|
|
297
304
|
// html
|
|
298
305
|
await Promise.all(template.filter(el => el[0].includes('.hbs')).map(async (el) => {
|
|
299
|
-
const htmlContent = await handlebars.compile(el[1])({
|
|
300
|
-
|
|
306
|
+
const htmlContent = await handlebars.compile(el[1])({
|
|
307
|
+
...rows[0], user, data, tokens,
|
|
308
|
+
});
|
|
309
|
+
const name = el[0].substring(0, el[0].lastIndexOf('.'));
|
|
301
310
|
html[name] = htmlContent;
|
|
302
311
|
}));
|
|
303
312
|
}
|
|
@@ -338,7 +347,11 @@ export default async function dataAPI(req, reply, called) {
|
|
|
338
347
|
// console.log({ add: loadTable.table, form: loadTable.form });
|
|
339
348
|
if (uid && actions.includes('add')) {
|
|
340
349
|
const addTokens = setToken({
|
|
341
|
-
ids: [
|
|
350
|
+
ids: [
|
|
351
|
+
JSON.stringify({
|
|
352
|
+
table: tokenData?.table || hookData?.table || params.table,
|
|
353
|
+
form: loadTable?.form,
|
|
354
|
+
})],
|
|
342
355
|
uid,
|
|
343
356
|
array: 1,
|
|
344
357
|
});
|
|
@@ -346,7 +359,12 @@ export default async function dataAPI(req, reply, called) {
|
|
|
346
359
|
}
|
|
347
360
|
|
|
348
361
|
const result = await applyHook('afterData', {
|
|
349
|
-
pg,
|
|
362
|
+
pg,
|
|
363
|
+
table: loadTable?.table || tokenData?.table,
|
|
364
|
+
id: tokenData?.id || hookData?.id || params.id,
|
|
365
|
+
template: tokenData?.table || hookData?.table || params.table,
|
|
366
|
+
payload: res,
|
|
367
|
+
user,
|
|
350
368
|
});
|
|
351
369
|
|
|
352
370
|
return result || res;
|