@opengis/fastify-table 2.0.16 → 2.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/server/plugins/logger/checkUserAccess.js +22 -0
- package/dist/server/plugins/logger/createFileStream.js +5 -21
- package/dist/server/plugins/logger/getLogger.js +0 -2
- package/dist/server/plugins/logger/getRootDir.js +22 -0
- package/dist/server/plugins/pg/funcs/getPG.js +3 -2
- package/dist/server/plugins/pg/funcs/getPGAsync.js +3 -2
- package/dist/server/plugins/pg/funcs/init.js +26 -16
- package/dist/server/plugins/table/funcs/getFilterSQL/index.js +1 -1
- package/dist/server/routes/logger/controllers/logger.file.js +2 -2
- package/package.json +1 -1
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import config from "../../../config.js";
|
|
2
|
+
const { accessToken = "0NWcGQxKRP8AsRxD" } = config.auth || {};
|
|
3
|
+
/**
|
|
4
|
+
*
|
|
5
|
+
* @summary check user access to logger interface - per admin user type or user group
|
|
6
|
+
* @returns {Object} message, status
|
|
7
|
+
*/
|
|
8
|
+
export default function checkUserAccess({ user = {}, token, }) {
|
|
9
|
+
if (token && token === accessToken) {
|
|
10
|
+
return { message: "access granted", status: 200 };
|
|
11
|
+
}
|
|
12
|
+
// console.log(user);
|
|
13
|
+
if (!user.user_type?.includes?.("admin") &&
|
|
14
|
+
!config?.local &&
|
|
15
|
+
!config.auth?.disable) {
|
|
16
|
+
return { message: "access restricted", status: 403 };
|
|
17
|
+
}
|
|
18
|
+
/* if (!['admin', 'superadmin']?.includes(user.user_type) && count === '0') {
|
|
19
|
+
return { message: 'access restricted', status: 403 };
|
|
20
|
+
} */
|
|
21
|
+
return { message: "access granted", status: 200 };
|
|
22
|
+
}
|
|
@@ -1,30 +1,14 @@
|
|
|
1
|
-
/* eslint-disable no-console */
|
|
2
|
-
const streams = {};
|
|
3
|
-
import build from "pino-abstract-transport";
|
|
4
1
|
import fs from "node:fs";
|
|
2
|
+
import build from "pino-abstract-transport";
|
|
5
3
|
import labels from "./labels.js";
|
|
6
|
-
import
|
|
7
|
-
|
|
8
|
-
const
|
|
9
|
-
const { dir = "log", interval = "1d", compress = "gzip", // maxFiles = 90, local: teeToStdout,
|
|
10
|
-
} = config?.log || {};
|
|
4
|
+
import getRootDir from "./getRootDir.js";
|
|
5
|
+
const dir = getRootDir();
|
|
6
|
+
const streams = {};
|
|
11
7
|
function createFileStream({ level, status }) {
|
|
12
|
-
console.log(dir, level, generator()(), interval, compress);
|
|
13
|
-
/* const params = {
|
|
14
|
-
maxFiles, // logs to save limit
|
|
15
|
-
history: 'history', // history file name
|
|
16
|
-
interval, // rotate daily
|
|
17
|
-
compress, // compress rotated files
|
|
18
|
-
teeToStdout, // debug / logs to stdout
|
|
19
|
-
path: `${dir}/${level}`, // absolute path (root directory)
|
|
20
|
-
intervalBoundary: true, // true - log name with lower boundary of rotation interval
|
|
21
|
-
initialRotation: true, // true - log rotation check on init
|
|
22
|
-
// intervalUTC: true, // local tz -> utc
|
|
23
|
-
};
|
|
24
|
-
return createStream(generator({ interval }), params); */
|
|
25
8
|
const dt = new Date().toISOString().split("T")[0];
|
|
26
9
|
const fileName = `${dir}/${level}/${dt}${status ? `_${status}` : ""}.log`;
|
|
27
10
|
fs.mkdirSync(`${dir}/${level}`, { recursive: true });
|
|
11
|
+
console.log("creating log stream: " + fileName);
|
|
28
12
|
const stream = fs.createWriteStream(fileName, {
|
|
29
13
|
encoding: "utf8",
|
|
30
14
|
flags: "a+",
|
|
@@ -8,8 +8,6 @@ import serializers from "./serializers.js";
|
|
|
8
8
|
import timestampWithTimeZone from "./timestampWithTimeZone.js";
|
|
9
9
|
const isServer = process.argv[2];
|
|
10
10
|
const rclient2 = getRedis({ db: 2 });
|
|
11
|
-
if (!config.log)
|
|
12
|
-
config.log = {};
|
|
13
11
|
const level = config.log?.level || process.env.PINO_LOG_LEVEL || "info";
|
|
14
12
|
console.log(`log level: ${level}`);
|
|
15
13
|
const options = {
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/* eslint-disable no-console */
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import config from "../../../config.js";
|
|
5
|
+
let logDir = null;
|
|
6
|
+
export default function getRootDir() {
|
|
7
|
+
// absolute / relative path
|
|
8
|
+
if (config.logDir) {
|
|
9
|
+
console.log(`logging to: ${config.logDir}`);
|
|
10
|
+
return config.logDir;
|
|
11
|
+
}
|
|
12
|
+
if (logDir) {
|
|
13
|
+
console.log(`logging to: ${logDir}`);
|
|
14
|
+
return logDir;
|
|
15
|
+
}
|
|
16
|
+
const file = ["config.json", "/data/local/config.json"].find((el) => fs.existsSync(el) ? el : null);
|
|
17
|
+
// .env / config.json => process.cwd()
|
|
18
|
+
const root = file && file.startsWith("/data/local") ? "/data/local" : process.cwd();
|
|
19
|
+
logDir = path.join(root, config.log?.dir || "log");
|
|
20
|
+
console.log(`logging to: ${logDir}`);
|
|
21
|
+
return logDir;
|
|
22
|
+
}
|
|
@@ -13,7 +13,8 @@ function getPG(param = {}) {
|
|
|
13
13
|
const dbListParams = dblist.find((el) => el.key === param?.key) ||
|
|
14
14
|
dblist.find((el) => el.database === (param?.db || param?.database || param) &&
|
|
15
15
|
el.port === param?.port);
|
|
16
|
-
const { user, password, host, port, db, database, name: origin,
|
|
16
|
+
const { user, password, host, port, db, database, name: origin, statement_timeout: timeout, // explicit
|
|
17
|
+
} = dbListParams ??
|
|
17
18
|
(typeof param === "string" ? getDBParams(param) : param || {});
|
|
18
19
|
const name = origin || db || database || param || "client";
|
|
19
20
|
if (pgClients[name])
|
|
@@ -24,7 +25,7 @@ function getPG(param = {}) {
|
|
|
24
25
|
host: host || config.pg?.host,
|
|
25
26
|
port: port || config.pg?.port,
|
|
26
27
|
database: db || database || config.pg?.db || config.pg?.database,
|
|
27
|
-
statement_timeout: config.pg?.statement_timeout || 10000,
|
|
28
|
+
statement_timeout: timeout || config.pg?.statement_timeout || 10000,
|
|
28
29
|
};
|
|
29
30
|
if (!dbConfig.database) {
|
|
30
31
|
return null;
|
|
@@ -12,7 +12,8 @@ async function getPGAsync(param) {
|
|
|
12
12
|
return null;
|
|
13
13
|
const dbListParams = dblist.find((el) => el.key === param?.key) ||
|
|
14
14
|
dblist.find((el) => el.database === (param?.db || param?.database || param));
|
|
15
|
-
const { user, password, host, port, db, database, name: origin,
|
|
15
|
+
const { user, password, host, port, db, database, name: origin, statement_timeout: timeout, // explicit
|
|
16
|
+
} = dbListParams ??
|
|
16
17
|
(typeof param === "string" ? getDBParams(param) : param || {});
|
|
17
18
|
const name = origin || db || database || param || "client";
|
|
18
19
|
if (pgClients[name]?.tlist)
|
|
@@ -23,7 +24,7 @@ async function getPGAsync(param) {
|
|
|
23
24
|
host: host || config.pg?.host,
|
|
24
25
|
port: port || config.pg?.port,
|
|
25
26
|
database: db || database || config.pg?.db || config.pg?.database,
|
|
26
|
-
statement_timeout: config.pg?.statement_timeout || 10000,
|
|
27
|
+
statement_timeout: timeout || config.pg?.statement_timeout || 10000,
|
|
27
28
|
};
|
|
28
29
|
if (!dbConfig.database) {
|
|
29
30
|
return null;
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import pg from "pg";
|
|
1
2
|
import { createHash } from "node:crypto";
|
|
2
3
|
import config from "../../../../config.js";
|
|
3
4
|
import getRedis from "../../redis/funcs/getRedis.js";
|
|
@@ -68,34 +69,43 @@ async function init(client) {
|
|
|
68
69
|
WHERE
|
|
69
70
|
relkind IN ('r', 'v')`);
|
|
70
71
|
const relkinds = rows.reduce((acc, curr) => Object.assign(acc, { [curr.tname]: curr.relkind }), {});
|
|
71
|
-
async function query(q, args = []
|
|
72
|
-
let timeoutWasSet;
|
|
72
|
+
async function query(q, args = []) {
|
|
73
73
|
try {
|
|
74
|
-
if (isstream) {
|
|
75
|
-
await client.query(`set statement_timeout to ${timeout}`);
|
|
76
|
-
timeoutWasSet = true;
|
|
77
|
-
}
|
|
78
74
|
const data = await client.query(q, args);
|
|
79
75
|
return data;
|
|
80
76
|
}
|
|
81
77
|
catch (err) {
|
|
82
|
-
|
|
78
|
+
// canceling statement due to statement timeout
|
|
79
|
+
if (err.code === "57014") {
|
|
83
80
|
logger.file("timeout/query", { q, stack: err.stack });
|
|
81
|
+
}
|
|
82
|
+
throw err;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
async function querySafe(q, param) {
|
|
86
|
+
const pg1 = new pg.Pool({
|
|
87
|
+
...client.options,
|
|
88
|
+
statement_timeout: param?.timeout || 100000000,
|
|
89
|
+
});
|
|
90
|
+
try {
|
|
91
|
+
const args = Array.isArray(param) ? param : param?.args || [];
|
|
92
|
+
const data = await pg1.query(q, args);
|
|
93
|
+
console.log("pg.querySafe ok", q);
|
|
94
|
+
return data;
|
|
95
|
+
}
|
|
96
|
+
catch (err) {
|
|
97
|
+
if (err.code === "57014") {
|
|
98
|
+
console.error("pg.querySafe timeout", q);
|
|
84
99
|
return { rows: [], timeout: true };
|
|
85
100
|
}
|
|
86
|
-
|
|
101
|
+
console.error("pg.querySafe error", q);
|
|
102
|
+
throw err;
|
|
87
103
|
}
|
|
88
104
|
finally {
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
}
|
|
105
|
+
pg1.end();
|
|
106
|
+
console.log("pg.querySafe released", q);
|
|
92
107
|
}
|
|
93
108
|
}
|
|
94
|
-
async function querySafe(q, param) {
|
|
95
|
-
const args = Array.isArray(param) ? param : param?.args || [];
|
|
96
|
-
const data = await query(q, args, true, param?.timeout);
|
|
97
|
-
return data;
|
|
98
|
-
}
|
|
99
109
|
async function one(q, param) {
|
|
100
110
|
const data = await query(q, Array.isArray(param) ? param : param?.args || []);
|
|
101
111
|
const result = ((Array.isArray(data) ? data.pop() : data)?.rows || [])[0] || {};
|
|
@@ -77,7 +77,7 @@ export default async function getFilterSQL({ table, filter, pg = pgClients.clien
|
|
|
77
77
|
const { fields = [] } = await pg.query(fieldQuery);
|
|
78
78
|
const autoSearchColumn = fields
|
|
79
79
|
?.filter((el) => pg.pgType?.[el.dataTypeID] === "text")
|
|
80
|
-
?.map((el) => el.name)
|
|
80
|
+
?.map((el) => `"${el.name}"`)
|
|
81
81
|
.join(",");
|
|
82
82
|
const searchColumn = body?.search_column || body?.meta?.search || autoSearchColumn;
|
|
83
83
|
const fieldsList = (fieldsModel || fields)?.map((el) => el.name);
|
|
@@ -2,8 +2,8 @@ import path from "node:path";
|
|
|
2
2
|
import { lstat, readdir, readFile } from "node:fs/promises";
|
|
3
3
|
import { createReadStream, existsSync } from "node:fs";
|
|
4
4
|
import readline from "node:readline";
|
|
5
|
-
import checkUserAccess from "
|
|
6
|
-
import getRootDir from "
|
|
5
|
+
import checkUserAccess from "../../../plugins/logger/checkUserAccess.js";
|
|
6
|
+
import getRootDir from "../../../plugins/logger/getRootDir.js";
|
|
7
7
|
const limit = 200000;
|
|
8
8
|
const rootDir = getRootDir();
|
|
9
9
|
/**
|