@opengis/fastify-table 1.2.19 → 1.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,118 +1,118 @@
1
- import getTemplate from '../../..//table/funcs/getTemplate.js';
2
- import { metaFormat } from '@opengis/fastify-table/utils.js';
3
-
4
- const defaultTitles = {
5
- editor_date: 'Дата оновлення',
6
- editor_id: 'Редактор',
7
- сdate: 'Дата створення',
8
- uid: 'Автор',
9
- body: 'Зміст',
10
- entity_id: 'ID Сутності',
11
- entity_type: 'Таблиця сутності',
12
- file_path: 'Шлях до файлу',
13
- uploaded_name: 'Назва файлу',
14
- size: 'Розмір файлу',
15
- ext: 'Розширення файлу',
16
- };
17
-
18
- function getValue(val, tableName) {
19
- if (!val) return null;
20
- if (['crm.files'].includes(tableName)) {
21
- return typeof val === 'object'
22
- ? JSON.stringify(val)
23
- : val;
24
- }
25
- return typeof val === 'object'
26
- ? JSON.stringify(val)?.substring?.(0, 30)
27
- : val?.toString?.()?.substring?.(0, 30);
28
- }
29
-
30
- // extract titles and cls from form schema
31
- // alt: extract table template from referer -> form
32
- export default async function logChanges({
33
- pg, table: table1, tokenData, referer, id, data, uid = 1, type,
34
- }) {
35
- const table = table1.replace(/"/g, '');
36
- if (!id) {
37
- console.error('param id is required');
38
- return null;
39
- }
40
- if (!table || !pg.pk?.[table]) {
41
- console.error('table not found');
42
- return null;
43
- }
44
- if (!pg.pk?.['log.table_changes'] || !pg.pk?.['log.table_changes_data']) {
45
- console.error('log table not found');
46
- return null;
47
- }
48
- if (!type) {
49
- console.error('invalid type');
50
- return null;
51
- }
52
-
53
- try {
54
- const { change_id: changeId } = await pg.query(`insert into log.table_changes(change_date,change_type,change_user_id,entity_type,entity_id)
55
- values(CURRENT_DATE, $1, $2, $3, $4) returning change_id`, [type, uid, table, id]).then((res) => res.rows?.[0] || {});
56
-
57
- const q = `select json_object_agg(entity_key, value_new) from (
58
- select
59
- entity_key,
60
- value_new,
61
- ( rank() over (partition by entity_key order by cdate desc) = 1 ) as is_latest
62
- from log.table_changes_data
63
-
64
- where change_id in (
65
- select
66
- change_id
67
- from log.table_changes
68
- where entity_id=$1
69
- and entity_type=$2
70
- )
71
-
72
- )q where is_latest`;
73
- // console.log(q, type, id);
74
-
75
- const old = type !== 'INSERT' ? await pg.query(q, [id, table]).then(el => el.rows?.[0]?.json_object_agg || {}) : {};
76
-
77
- const body = await getTemplate('form', tokenData?.form);
78
- const schema = body?.schema || body || {};
79
- const titles = Object.keys(schema)
80
- .reduce((acc, curr) => Object.assign(acc, { [curr]: schema[curr].title || schema[curr].ua }), {});
81
- const cls = Object.keys(schema)
82
- .filter(el => schema[el]?.data)
83
- .reduce((acc, curr) => Object.assign(acc, { [curr]: schema[curr].data }), {});
84
-
85
- if (data) {
86
- await metaFormat({ rows: [data], cls, sufix: false });
87
- }
88
-
89
- const newObj = Object.fromEntries(Object.entries(data || {}).map(el => ([[titles[el[0]] || defaultTitles[el[0]] || el[0]], el[1]])));
90
- const changesData = Object.keys(newObj || {}).map(el => ({
91
- change_id: changeId,
92
- entity_key: el,
93
- value_old: getValue(old?.[el], table),
94
- value_new: type === 'DELETE' ? null : getValue(newObj?.[el], table),
95
- uid,
96
- })).filter(el => el?.value_new !== el?.value_old);
97
-
98
- const res = await Promise.all(changesData.map(async (el) => {
99
- const insertQuery = `insert into log.table_changes_data (${Object.entries(el)?.map((key) => `"${key[0]}"`).join(',')})
100
- values (${Object.entries(el)?.map((key, i) => `$${i + 1}`).join(',')}) returning *`;
101
-
102
- const { rows = [] } = await pg.query(insertQuery, [...Object.entries(el).map((el1) => (el1[1] && typeof el1[1] === 'object' && (!Array.isArray(el1[1]) || typeof el1[1]?.[0] === 'object') ? JSON.stringify(el1[1]) : el1[1]))]) || {};
103
- return rows[0];
104
- }));
105
-
106
- const newData = type === 'DELETE' ? {} : (Array.isArray(res) ? res : [res]).reduce((acc, curr) => Object.assign(acc, { [curr.entity_key]: curr.value_new }), {});
107
- // console.log('logChanges OK', type);
108
- return {
109
- change_id: changeId, entity_type: table, entity_id: id, uid, change_type: type, old, new: newData,
110
- };
111
- }
112
- catch (err) {
113
- console.error('logChanges error', type, table, id, data, err.toString());
114
- return {
115
- error: err.toString(), entity_type: table, entity_id: id, uid, change_type: type,
116
- };
117
- }
118
- }
1
+ import getTemplate from '../../..//table/funcs/getTemplate.js';
2
+ import { metaFormat } from '@opengis/fastify-table/utils.js';
3
+
4
+ const defaultTitles = {
5
+ editor_date: 'Дата оновлення',
6
+ editor_id: 'Редактор',
7
+ сdate: 'Дата створення',
8
+ uid: 'Автор',
9
+ body: 'Зміст',
10
+ entity_id: 'ID Сутності',
11
+ entity_type: 'Таблиця сутності',
12
+ file_path: 'Шлях до файлу',
13
+ uploaded_name: 'Назва файлу',
14
+ size: 'Розмір файлу',
15
+ ext: 'Розширення файлу',
16
+ };
17
+
18
+ function getValue(val, tableName) {
19
+ if (!val) return null;
20
+ if (['crm.files'].includes(tableName)) {
21
+ return typeof val === 'object'
22
+ ? JSON.stringify(val)
23
+ : val;
24
+ }
25
+ return typeof val === 'object'
26
+ ? JSON.stringify(val)?.substring?.(0, 30)
27
+ : val?.toString?.()?.substring?.(0, 30);
28
+ }
29
+
30
+ // extract titles and cls from form schema
31
+ // alt: extract table template from referer -> form
32
+ export default async function logChanges({
33
+ pg, table: table1, tokenData, referer, id, data, uid = 1, type,
34
+ }) {
35
+ const table = table1.replace(/"/g, '');
36
+ if (!id) {
37
+ console.error('param id is required');
38
+ return null;
39
+ }
40
+ if (!table || !pg.pk?.[table]) {
41
+ console.error('table not found');
42
+ return null;
43
+ }
44
+ if (!pg.pk?.['log.table_changes'] || !pg.pk?.['log.table_changes_data']) {
45
+ console.error('log table not found');
46
+ return null;
47
+ }
48
+ if (!type) {
49
+ console.error('invalid type');
50
+ return null;
51
+ }
52
+
53
+ try {
54
+ const { change_id: changeId } = await pg.query(`insert into log.table_changes(change_date,change_type,change_user_id,entity_type,entity_id)
55
+ values(CURRENT_DATE, $1, $2, $3, $4) returning change_id`, [type, uid, table, id]).then((res) => res.rows?.[0] || {});
56
+
57
+ const q = `select json_object_agg(entity_key, value_new) from (
58
+ select
59
+ entity_key,
60
+ value_new,
61
+ ( rank() over (partition by entity_key order by cdate desc) = 1 ) as is_latest
62
+ from log.table_changes_data
63
+
64
+ where change_id in (
65
+ select
66
+ change_id
67
+ from log.table_changes
68
+ where entity_id=$1
69
+ and entity_type=$2
70
+ )
71
+
72
+ )q where is_latest`;
73
+ // console.log(q, type, id);
74
+
75
+ const old = type !== 'INSERT' ? await pg.query(q, [id, table]).then(el => el.rows?.[0]?.json_object_agg || {}) : {};
76
+
77
+ const body = await getTemplate('form', tokenData?.form);
78
+ const schema = body?.schema || body || {};
79
+ const titles = Object.keys(schema)
80
+ .reduce((acc, curr) => Object.assign(acc, { [curr]: schema[curr].title || schema[curr].ua }), {});
81
+ const cls = Object.keys(schema)
82
+ .filter(el => schema[el]?.data)
83
+ .reduce((acc, curr) => Object.assign(acc, { [curr]: schema[curr].data }), {});
84
+
85
+ if (data) {
86
+ await metaFormat({ rows: [data], cls, sufix: false });
87
+ }
88
+
89
+ const newObj = Object.fromEntries(Object.entries(data || {}).map(el => ([[titles[el[0]] || defaultTitles[el[0]] || el[0]], el[1]])));
90
+ const changesData = Object.keys(newObj || {}).map(el => ({
91
+ change_id: changeId,
92
+ entity_key: el,
93
+ value_old: getValue(old?.[el], table),
94
+ value_new: type === 'DELETE' ? null : getValue(newObj?.[el], table),
95
+ uid,
96
+ })).filter(el => el?.value_new !== el?.value_old);
97
+
98
+ const res = await Promise.all(changesData.map(async (el) => {
99
+ const insertQuery = `insert into log.table_changes_data (${Object.entries(el)?.map((key) => `"${key[0]}"`).join(',')})
100
+ values (${Object.entries(el)?.map((key, i) => `$${i + 1}`).join(',')}) returning *`;
101
+
102
+ const { rows = [] } = await pg.query(insertQuery, [...Object.entries(el).map((el1) => (el1[1] && typeof el1[1] === 'object' && (!Array.isArray(el1[1]) || typeof el1[1]?.[0] === 'object') ? JSON.stringify(el1[1]) : el1[1]))]) || {};
103
+ return rows[0];
104
+ }));
105
+
106
+ const newData = type === 'DELETE' ? {} : (Array.isArray(res) ? res : [res]).reduce((acc, curr) => Object.assign(acc, { [curr.entity_key]: curr.value_new }), {});
107
+ // console.log('logChanges OK', type);
108
+ return {
109
+ change_id: changeId, entity_type: table, entity_id: id, uid, change_type: type, old, new: newData,
110
+ };
111
+ }
112
+ catch (err) {
113
+ console.error('logChanges error', type, table, id, data, err.toString());
114
+ return {
115
+ error: err.toString(), entity_type: table, entity_id: id, uid, change_type: type,
116
+ };
117
+ }
118
+ }
@@ -1,14 +1,18 @@
1
1
  import pino from 'pino';
2
2
  // import path from 'node:path';
3
3
 
4
- import config from '../../../config.js';
5
-
6
- if (!config.log) config.log = {};
4
+ import { config, getRedis } from '../../../utils.js';
7
5
 
8
6
  // utils
9
7
  import getHooks from './getHooks.js';
10
8
  import serializers from './serializers.js';
11
9
 
10
+ const isServer = process.argv[2];
11
+
12
+ const rclient2 = getRedis({ db: 2 });
13
+
14
+ if (!config.log) config.log = {};
15
+
12
16
  const level = config.log?.level || process.env.PINO_LOG_LEVEL || 'info';
13
17
  console.log(`log level: ${level}`);
14
18
  const options = {
@@ -32,4 +36,10 @@ const logger = pino(options);
32
36
  logger.file = function userFile(logfolder, msg, req) {
33
37
  logger.info({ logfolder, ...(typeof msg === 'string' ? { msg } : msg) }, req);
34
38
  };
39
+ logger.metrics = function metrics(key, val, dbName) {
40
+ const dbname = dbName || config.pg?.database;
41
+ if (!dbname && !isServer) return;
42
+ rclient2.hincrby(`${dbname}:system_metrics`, key, val || 1);
43
+ };
44
+
35
45
  export default logger;
@@ -0,0 +1,8 @@
1
+ import addCron from '../cron/funcs/addCron.js';
2
+ import systemMetricsFifthly from './systemMetricsFifthly.js';
3
+
4
+ async function plugin(fastify) {
5
+ addCron(systemMetricsFifthly, 60 * 15);
6
+ }
7
+
8
+ export default plugin;
@@ -0,0 +1,129 @@
1
+ import os from 'os';
2
+ import process from 'process';
3
+ import util from 'node:util';
4
+ import { exec } from 'node:child_process';
5
+
6
+ import { config, getFolder, pgClients, getRedis } from '../../../utils.js';
7
+
8
+ const execAsync = util.promisify(exec);
9
+ const platform = os.platform();
10
+ const processFolder = os.homedir();
11
+ const formatMemoryUsage = (data) => `${(data / 1024 / 1024).toFixed(2)} MB`;
12
+
13
+ const rclient = getRedis();
14
+ const rclient2 = getRedis({ db: 2 });
15
+ // const rclient10 = getRedis({ db: 10 });
16
+ const filesFolder = getFolder(config);
17
+
18
+ const redisKey = 'logger-process-online';
19
+ const sqlQuery = `select datname as dbname, application_name as app, client_addr as client_ip,
20
+ (now()-backend_start)::text as msec, state, query from pg_stat_activity where datname=$1`;
21
+
22
+ export default async function loggerSystem(req) {
23
+ const { pg = pgClients.client } = req;
24
+ const dbName = pg.options?.database;
25
+
26
+ const dbsize = await rclient.get(`${dbName}:content:dbsize:${redisKey}`);
27
+
28
+ const dbVerion = await pg.query('select version();').then(el => el.rows?.[0]?.version);
29
+
30
+ const dbSize = dbsize
31
+ || (await pg.query(`select pg_size_pretty(pg_database_size('${dbName}'));`).then(el => el.rows?.[0]?.pg_size_pretty));
32
+
33
+ const { rows: query = [] } = await pg.query(sqlQuery, [dbName]);
34
+
35
+ const { stdout: topProcess } = platform === 'win32'
36
+ ? { stdout: 'Cant show top on this system type' }
37
+ : await execAsync('top -b -n 1');
38
+
39
+ const osInfo = `${os.version()} ${os.machine?.() || ''}`;
40
+ const cpuInfo = os.cpus();
41
+
42
+ const totalCpu = (Object.values(cpuInfo[0].times).reduce((acc, tv) => acc + tv, 0)) * cpuInfo.length;
43
+ const totalMemory = os.totalmem();
44
+
45
+ const memory = process.memoryUsage();
46
+ const resource = process.resourceUsage();
47
+
48
+ const currentCpuUsage = (((process.cpuUsage().user + process.cpuUsage().system) * 1000) / totalCpu) * 100;
49
+ const redisInfo = await rclient.info();
50
+
51
+ const lines = redisInfo.split('\r\n').filter((el) => el && el.split);
52
+ const redis = {};
53
+
54
+ for (let i = 0; i < lines.length; i += 1) {
55
+ const [key, val] = lines[i].split(':');
56
+ if (val) {
57
+ redis[key] = val;
58
+ }
59
+ }
60
+
61
+ await rclient.set(`${dbName}:content:dbsize:${redisKey}`, dbSize, 'EX', 30 * 60);
62
+
63
+ const latency = await rclient.latency('latest');
64
+
65
+ const uptime = await pg.query(`select extract('epoch' from current_timestamp - pg_postmaster_start_time())::int as uptime`)
66
+ .then(el => el.rows?.[0]?.uptime);
67
+
68
+ const metric5 = await rclient2.hgetall(`${dbName}:system_metrics`);
69
+
70
+ Object.keys(metric5 || {}).filter((el) => el.startsWith('time')).forEach((m) => {
71
+ metric5[m] /= 1000.0;
72
+ });
73
+
74
+ const metricSort = metric5 ? JSON.parse(JSON.stringify(metric5, Object.keys(metric5).sort())) : undefined;
75
+ const userOnline = await rclient2.scan(['0', 'match', `${dbName}:user:*`, 'count', '10000']);
76
+ // const userOnline = await rclient10.scan(['0', 'match', 'sess:*', 'count', '10000']);
77
+
78
+ return {
79
+ process: {
80
+ root: process.cwd(),
81
+ processFolder,
82
+ saveDirectory: filesFolder,
83
+ dbhost: pg.options?.host,
84
+ dbport: pg.options?.port,
85
+ dbname: dbName,
86
+ },
87
+
88
+ uptime: {
89
+ node: Math.round(process.uptime()),
90
+ system: os.uptime(),
91
+ pg: uptime,
92
+ redis: redis.uptime_in_seconds - 0,
93
+ },
94
+ metric: {
95
+ cpu: `${Math.round(currentCpuUsage)} %`,
96
+ memory: `${Math.round(((memory.rss / totalMemory) * 100) * 100) / 100} %`,
97
+ 'user.online': userOnline?.[1]?.length,
98
+ 'node.total': formatMemoryUsage(memory.heapTotal),
99
+ 'node.used': formatMemoryUsage(memory.heapUsed),
100
+ 'node.rss': formatMemoryUsage(memory.rss),
101
+ 'node.cpu.time': resource.systemCPUTime,
102
+ 'node.cpu.load': os.loadavg()[0],
103
+ 'redis.ram': redis.used_memory_human,
104
+ 'redis.latency': latency?.join(','),
105
+ 'redis.ram.peak': redis.used_memory_peak_human,
106
+ 'redis.clients': redis.connected_clients,
107
+ 'redis.db0': redis.db0?.split(',')?.[0]?.substring(5),
108
+ 'redis.db1': redis.db1?.split(',')?.[0]?.substring(5),
109
+ 'redis.db2': redis.db2?.split(',')?.[0]?.substring(5),
110
+ 'redis.db10': redis.db10?.split(',')?.[0]?.substring(5),
111
+ 'pg.connection': query?.length,
112
+ dbsize: dbSize,
113
+ filesize: 0,
114
+ },
115
+ metricSort,
116
+ system: {
117
+ os: osInfo,
118
+ release: os.release(),
119
+ core: cpuInfo.length,
120
+ cpu: cpuInfo[0].model,
121
+ ram: formatMemoryUsage(totalMemory),
122
+ db: dbVerion,
123
+ redis: rclient.server_info?.redis_version,
124
+ node: process.version,
125
+ },
126
+ top: topProcess,
127
+ query,
128
+ };
129
+ };
@@ -0,0 +1,23 @@
1
+ import pgClients from '../pg/pgClients.js';
2
+ import logger from '../logger/getLogger.js';
3
+ import getRedis from '../redis/funcs/getRedis.js';
4
+
5
+ import loggerSystem from './loggerSystem.js';
6
+
7
+ const rclient2 = getRedis({ db: 2 });
8
+
9
+ export default async function systemMetricsFifthly({ pg = pgClients.client }) {
10
+ const system = await loggerSystem({ pg });
11
+ const dbName = pg.options?.database;
12
+
13
+ await rclient2.del(`${dbName}:system_metrics`);
14
+
15
+ logger.file('metric', {
16
+ dbname: dbName,
17
+ ...system.metric,
18
+ uptime: system.uptime,
19
+ ...system.metricSort,
20
+ });
21
+
22
+ return { message: 'Saved Fifthly' };
23
+ }
@@ -1,37 +1,37 @@
1
- import path from 'node:path';
2
- import { readdirSync, existsSync } from 'node:fs';
3
-
4
- import config from '../../../config.js';
5
- import execSql from './exec.sql.js';
6
- // import getCallerDir from './get.caller.dir.js';
7
-
8
- const time = Date.now();
9
-
10
- export default async function execMigrations(dirPath, iscore) {
11
- if (
12
- !dirPath
13
- || (config.migrationsCore === false && iscore)
14
- || config.migrations === false
15
- ) {
16
- console.log('migrations skip', 'core: ', !!iscore, 'dir: ', dirPath || 'not specified');
17
- return;
18
- }
19
-
20
- console.log('migrations start', dirPath, Date.now() - time);
21
-
22
- const exists = existsSync(dirPath);
23
-
24
- if (exists) {
25
- // get directory sql file list
26
- const content = readdirSync(dirPath, { withFileTypes: true })
27
- ?.filter((el) => el.isFile() && path.extname(el.name) === '.sql')
28
- ?.map((el) => el.name) || [];
29
-
30
- // execute sql files
31
- if (content?.length) {
32
- await content.reduce((promise, filename) => promise.then(() => execSql(path.join(dirPath, filename))), Promise.resolve());
33
- }
34
- }
35
-
36
- console.log('migrations finish', dirPath, exists, Date.now() - time);
37
- }
1
+ import path from 'node:path';
2
+ import { readdirSync, existsSync } from 'node:fs';
3
+
4
+ import config from '../../../config.js';
5
+ import execSql from './exec.sql.js';
6
+ // import getCallerDir from './get.caller.dir.js';
7
+
8
+ const time = Date.now();
9
+
10
+ export default async function execMigrations(dirPath, iscore) {
11
+ if (
12
+ !dirPath
13
+ || (config.migrationsCore === false && iscore)
14
+ || config.migrations === false
15
+ ) {
16
+ console.log('migrations skip', 'core: ', !!iscore, 'dir: ', dirPath || 'not specified');
17
+ return;
18
+ }
19
+
20
+ console.log('migrations start', dirPath, Date.now() - time);
21
+
22
+ const exists = existsSync(dirPath);
23
+
24
+ if (exists) {
25
+ // get directory sql file list
26
+ const content = readdirSync(dirPath, { withFileTypes: true })
27
+ ?.filter((el) => el.isFile() && path.extname(el.name) === '.sql')
28
+ ?.map((el) => el.name) || [];
29
+
30
+ // execute sql files
31
+ if (content?.length) {
32
+ await content.reduce((promise, filename) => promise.then(() => execSql(path.join(dirPath, filename))), Promise.resolve());
33
+ }
34
+ }
35
+
36
+ console.log('migrations finish', dirPath, exists, Date.now() - time);
37
+ }
@@ -1,12 +1,12 @@
1
- import checkPolicy from './funcs/checkPolicy.js';
2
-
3
- async function plugin(fastify) {
4
- fastify.addHook('preParsing', async (request, reply) => {
5
- const resp = checkPolicy(request, reply);
6
- if (resp) {
7
- return resp;
8
- }
9
- });
10
- }
11
-
12
- export default plugin;
1
+ import checkPolicy from './funcs/checkPolicy.js';
2
+
3
+ async function plugin(fastify) {
4
+ fastify.addHook('preParsing', async (request, reply) => {
5
+ const resp = checkPolicy(request, reply);
6
+ if (resp) {
7
+ return resp;
8
+ }
9
+ });
10
+ }
11
+
12
+ export default plugin;