@opengis/fastify-table 2.0.106 → 2.0.108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config.d.ts.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -0
- package/dist/script/adduser +15 -0
- package/dist/script/dump.js +176 -0
- package/dist/script/migrate.js +25 -0
- package/dist/server/plugins/auth/funcs/verifyPassword.d.ts.map +1 -1
- package/dist/server/plugins/auth/funcs/verifyPassword.js +3 -1
- package/dist/server/plugins/crud/funcs/dataInsert.js +2 -2
- package/dist/server/plugins/crud/funcs/dataUpdate.d.ts.map +1 -1
- package/dist/server/plugins/crud/funcs/dataUpdate.js +9 -6
- package/dist/server/plugins/crud/funcs/validateData.js +1 -1
- package/dist/server/plugins/file/providers/fs.js +2 -2
- package/dist/server/plugins/file/providers/s3/funcs/downloadFile.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/downloadFile.js +1 -2
- package/dist/server/plugins/file/providers/s3/funcs/fileExists.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/fileExists.js +1 -2
- package/dist/server/plugins/file/providers/s3/funcs/uploadFile.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/uploadFile.js +1 -2
- package/dist/server/plugins/file/providers/s3/index.js +4 -4
- package/dist/server/plugins/logger/getLogger.d.ts.map +1 -1
- package/dist/server/plugins/logger/getLogger.js +14 -11
- package/dist/server/plugins/logger/index.d.ts.map +1 -1
- package/dist/server/plugins/logger/index.js +5 -4
- package/dist/server/plugins/migration/exec.migrations.js +6 -6
- package/dist/server/plugins/pg/funcs/getMeta.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/getMeta.js +3 -5
- package/dist/server/plugins/pg/funcs/getPG.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/getPG.js +2 -1
- package/dist/server/plugins/pg/funcs/getPGAsync.js +2 -2
- package/dist/server/plugins/pg/funcs/init.d.ts +1 -1
- package/dist/server/plugins/pg/funcs/init.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/init.js +43 -36
- package/dist/server/plugins/pg/funcs/pool.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/pool.js +12 -18
- package/dist/server/plugins/pg/index.d.ts.map +1 -1
- package/dist/server/plugins/pg/index.js +3 -2
- package/dist/server/plugins/redis/funcs/getRedis.d.ts.map +1 -1
- package/dist/server/plugins/redis/funcs/getRedis.js +7 -5
- package/dist/server/plugins/redis/index.d.ts.map +1 -1
- package/dist/server/plugins/redis/index.js +4 -1
- package/dist/server/plugins/sqlite/index.d.ts.map +1 -1
- package/dist/server/plugins/sqlite/index.js +7 -3
- package/dist/server/plugins/table/funcs/getFilter.d.ts +1 -1
- package/dist/server/plugins/table/funcs/getFilter.d.ts.map +1 -1
- package/dist/server/plugins/table/funcs/getFilter.js +14 -1
- package/dist/server/plugins/table/funcs/getSelectMeta.d.ts.map +1 -1
- package/dist/server/plugins/table/funcs/getSelectMeta.js +2 -4
- package/dist/server/plugins/table/funcs/gisIRColumn.d.ts +2 -2
- package/dist/server/plugins/table/funcs/gisIRColumn.js +1 -1
- package/dist/server/plugins/upload/finishUpload.d.ts +9 -0
- package/dist/server/plugins/upload/finishUpload.d.ts.map +1 -0
- package/dist/server/plugins/upload/finishUpload.js +33 -0
- package/dist/server/plugins/upload/getUploadStatus.d.ts +5 -0
- package/dist/server/plugins/upload/getUploadStatus.d.ts.map +1 -0
- package/dist/server/plugins/upload/getUploadStatus.js +36 -0
- package/dist/server/plugins/upload/index.d.ts +6 -0
- package/dist/server/plugins/upload/index.d.ts.map +1 -0
- package/dist/server/plugins/upload/index.js +12 -0
- package/dist/server/plugins/upload/startUpload.d.ts +8 -0
- package/dist/server/plugins/upload/startUpload.d.ts.map +1 -0
- package/dist/server/plugins/upload/startUpload.js +53 -0
- package/dist/server/plugins/upload/uploadChunk.d.ts +9 -0
- package/dist/server/plugins/upload/uploadChunk.d.ts.map +1 -0
- package/dist/server/plugins/upload/uploadChunk.js +47 -0
- package/dist/server/plugins/util/funcs/unflattenObject.d.ts.map +1 -1
- package/dist/server/plugins/util/funcs/unflattenObject.js +5 -3
- package/dist/server/routes/access/controllers/access.group.d.ts +2 -2
- package/dist/server/routes/access/controllers/access.group.d.ts.map +1 -1
- package/dist/server/routes/access/controllers/access.group.js +0 -1
- package/dist/server/routes/access/controllers/access.group.post.d.ts +2 -2
- package/dist/server/routes/access/controllers/access.group.post.d.ts.map +1 -1
- package/dist/server/routes/access/controllers/access.group.post.js +0 -1
- package/dist/server/routes/auth/controllers/2factor/providers/totp.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/2factor/providers/totp.js +1 -1
- package/dist/server/routes/auth/controllers/core/registration.d.ts +1 -1
- package/dist/server/routes/auth/controllers/core/registration.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/core/registration.js +37 -9
- package/dist/server/routes/auth/controllers/core/updateUserInfo.js +1 -1
- package/dist/server/routes/auth/controllers/jwt/authorize.js +5 -5
- package/dist/server/routes/auth/controllers/jwt/token.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/jwt/token.js +10 -12
- package/dist/server/routes/cron/controllers/cronApi.d.ts +1 -1
- package/dist/server/routes/cron/controllers/cronApi.d.ts.map +1 -1
- package/dist/server/routes/cron/controllers/cronApi.js +5 -3
- package/dist/server/routes/crud/controllers/insert.d.ts +1 -4
- package/dist/server/routes/crud/controllers/insert.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/insert.js +24 -16
- package/dist/server/routes/crud/controllers/table.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/table.js +13 -6
- package/dist/server/routes/crud/controllers/update.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/update.js +23 -15
- package/dist/server/routes/file/controllers/delete.d.ts +1 -15
- package/dist/server/routes/file/controllers/delete.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/delete.js +13 -20
- package/dist/server/routes/file/controllers/download.d.ts +2 -2
- package/dist/server/routes/file/controllers/download.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/download.js +39 -30
- package/dist/server/routes/file/controllers/files.d.ts +2 -1
- package/dist/server/routes/file/controllers/files.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/files.js +15 -11
- package/dist/server/routes/file/controllers/resize.d.ts +1 -2
- package/dist/server/routes/file/controllers/resize.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/resize.js +17 -6
- package/dist/server/routes/file/controllers/upload.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/upload.js +17 -16
- package/dist/server/routes/file/controllers/uploadImage.d.ts +11 -13
- package/dist/server/routes/file/controllers/uploadImage.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/uploadImage.js +13 -15
- package/dist/server/routes/logger/controllers/logger.file.js +1 -1
- package/dist/server/routes/menu/controllers/interfaces.d.ts +1 -7
- package/dist/server/routes/menu/controllers/interfaces.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/card.d.ts +1 -1
- package/dist/server/routes/table/controllers/card.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/card.js +15 -9
- package/dist/server/routes/table/controllers/filter.d.ts +1 -1
- package/dist/server/routes/table/controllers/filter.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/filter.js +9 -2
- package/dist/server/routes/table/controllers/form.d.ts +1 -1
- package/dist/server/routes/table/controllers/form.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/form.js +8 -5
- package/dist/server/routes/table/controllers/search.d.ts +1 -1
- package/dist/server/routes/table/controllers/search.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/search.js +5 -6
- package/dist/server/routes/table/controllers/suggest.d.ts +1 -1
- package/dist/server/routes/table/controllers/suggest.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/suggest.js +30 -15
- package/dist/server/routes/table/functions/getData.d.ts +1 -1
- package/dist/server/routes/table/functions/getData.d.ts.map +1 -1
- package/dist/server/routes/table/functions/getData.js +60 -45
- package/dist/server/routes/upload/index.d.ts +2 -0
- package/dist/server/routes/upload/index.d.ts.map +1 -0
- package/dist/server/routes/upload/index.js +72 -0
- package/dist/server/types/core.d.ts +7 -1
- package/dist/server/types/core.d.ts.map +1 -1
- package/dist/utils.d.ts +5 -0
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +5 -0
- package/package.json +3 -3
package/dist/config.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../config.ts"],"names":[],"mappings":"AAWA,QAAA,MAAM,MAAM,KAA8D,CAAC;
|
|
1
|
+
{"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../config.ts"],"names":[],"mappings":"AAWA,QAAA,MAAM,MAAM,KAA8D,CAAC;AAqC3E,eAAe,MAAM,CAAC"}
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAuFA,iBAAS,MAAM,CAAC,OAAO,EAAE,GAAG,QA6J3B;;AACD,wBAA0B"}
|
package/dist/index.js
CHANGED
|
@@ -46,6 +46,7 @@ import templatesRoutes from "./server/routes/templates/index.js";
|
|
|
46
46
|
import widgetRoutes from "./server/routes/widget/index.js";
|
|
47
47
|
import authRoutes from "./server/routes/auth/index.js";
|
|
48
48
|
import fileRoutes from "./server/routes/file/index.js";
|
|
49
|
+
import uploadChunkRoutes from "./server/routes/upload/index.js";
|
|
49
50
|
import grpcRoutes from "./server/routes/grpc/index.js";
|
|
50
51
|
import notificationsRoutes from "./server/routes/notifications/index.js";
|
|
51
52
|
const filename = fileURLToPath(import.meta.url);
|
|
@@ -115,6 +116,9 @@ function plugin(fastify) {
|
|
|
115
116
|
keyGenerator: (req) => `${req.ip}-${req.raw.url.split("?")[0]}`,
|
|
116
117
|
});
|
|
117
118
|
}
|
|
119
|
+
else {
|
|
120
|
+
console.log("⚠️ rate limit is disabled");
|
|
121
|
+
}
|
|
118
122
|
// add multipart parser before any route registration
|
|
119
123
|
fastify.register(import("@fastify/multipart"), {
|
|
120
124
|
limits: {
|
|
@@ -138,6 +142,7 @@ function plugin(fastify) {
|
|
|
138
142
|
fastify.register(authRoutes); // from fastify-auth
|
|
139
143
|
// from fastify-file
|
|
140
144
|
fastify.register(fileRoutes);
|
|
145
|
+
fastify.register(uploadChunkRoutes);
|
|
141
146
|
fastify.register(grpcRoutes, opt);
|
|
142
147
|
// from admin
|
|
143
148
|
fastify.register(notificationsRoutes, opt);
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
rootDir=`echo $(dirname "$0")`
|
|
3
|
+
echo "$rootDir/passwd"
|
|
4
|
+
|
|
5
|
+
if [ -e "$rootDir/../passwd" ]
|
|
6
|
+
then
|
|
7
|
+
password=`echo -n "$2" | sha1sum | awk '{print $1}'`
|
|
8
|
+
echo "$1:$password"
|
|
9
|
+
echo "$1:$password" >> "$rootDir/../passwd"
|
|
10
|
+
else
|
|
11
|
+
> passwd
|
|
12
|
+
password=`echo -n "$2" | sha1sum | awk '{print $1}'`
|
|
13
|
+
echo "$1:$password"
|
|
14
|
+
echo "$1:$password" > "$rootDir/../passwd"
|
|
15
|
+
fi
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
// Вигрузка схеми бд
|
|
2
|
+
// Usage examples:
|
|
3
|
+
// bun .\script\dump.js --table=bpmn.tasks
|
|
4
|
+
// bun .\script\dump.js --schema=bpmn
|
|
5
|
+
|
|
6
|
+
import path from 'node:path';
|
|
7
|
+
import { existsSync } from 'node:fs';
|
|
8
|
+
import { mkdir, writeFile, rm, rmdir, stat } from 'node:fs/promises';
|
|
9
|
+
|
|
10
|
+
import { config, handlebars, pgClients, getTemplate } from '../utils.js';
|
|
11
|
+
|
|
12
|
+
import { build, teardown } from '../helper.js';
|
|
13
|
+
|
|
14
|
+
const app = build();
|
|
15
|
+
app.addHook('onClose', async () => teardown());
|
|
16
|
+
dumpMigrateSQL();
|
|
17
|
+
// app.close();
|
|
18
|
+
|
|
19
|
+
const debug = false;
|
|
20
|
+
|
|
21
|
+
export default async function dumpMigrateSQL() {
|
|
22
|
+
try {
|
|
23
|
+
// const { database, host, port, user, password } = config.pg;
|
|
24
|
+
|
|
25
|
+
if (!config.pg) {
|
|
26
|
+
console.error('empty config.pg, skip...');
|
|
27
|
+
return null;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
if (!Bun.argv[2]) {
|
|
31
|
+
console.error('missing schema / table name, skip...');
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const [key, value] = Bun.argv[2].substring(2).split('=');
|
|
35
|
+
const tableName = key === 'table' ? value : null;
|
|
36
|
+
const schemaName = key === 'schema' ? value : value.split('.').shift();
|
|
37
|
+
|
|
38
|
+
const pg = pgClients.client;
|
|
39
|
+
// const pg = await getPGAsync({ database, host, port, user, password });
|
|
40
|
+
await pg.query(`select 1`);
|
|
41
|
+
|
|
42
|
+
const schemaExists = await pg.query(`SELECT 1 FROM information_schema.schemata WHERE schema_name = $1`, [schemaName]).then(el => el.rowCount);
|
|
43
|
+
|
|
44
|
+
if (!schemaExists) {
|
|
45
|
+
console.error('Вказаної схеми не існує', config.pg?.database);
|
|
46
|
+
return null;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// if (tableName && !pg.pk?.[tableName]) {
|
|
50
|
+
// console.error('Вказаної таблиці не існує', config.pg?.database);
|
|
51
|
+
// return null;
|
|
52
|
+
// }
|
|
53
|
+
|
|
54
|
+
const dump = await schemaItem({
|
|
55
|
+
pg,
|
|
56
|
+
table: tableName,
|
|
57
|
+
schema: schemaName,
|
|
58
|
+
debug,
|
|
59
|
+
is_erd: false
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
if (debug) {
|
|
63
|
+
console.log(dump);
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const filepath = await saveFile(dump, tableName || schemaName);
|
|
68
|
+
console.log('sucess', filepath);
|
|
69
|
+
} catch (err) {
|
|
70
|
+
console.error(err);
|
|
71
|
+
} finally {
|
|
72
|
+
app.close();
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function saveFile(data, filename) {
|
|
77
|
+
if (!data) throw new Error(`no data - ${filename}`);
|
|
78
|
+
|
|
79
|
+
const filepath = path.join('log/dump', `${filename}.sql`);
|
|
80
|
+
const fileExists = existsSync(filepath);
|
|
81
|
+
|
|
82
|
+
// overwrite old file
|
|
83
|
+
if (fileExists) {
|
|
84
|
+
const stats = await stat(filepath);
|
|
85
|
+
if (stats.isDirectory()) {
|
|
86
|
+
await rmdir(filepath, { force: true, recursive: true });
|
|
87
|
+
} else {
|
|
88
|
+
await rm(filepath)
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
await mkdir(path.dirname(filepath), { recursive: true });
|
|
93
|
+
await writeFile(filepath, Buffer.from(data, 'utf-8'));
|
|
94
|
+
|
|
95
|
+
return filepath;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
async function schemaItem({
|
|
99
|
+
pg, table, schema, debug
|
|
100
|
+
}) {
|
|
101
|
+
if (!schema && !table) return new Error('param schema is required');
|
|
102
|
+
|
|
103
|
+
const { rows: schemaInfo } = await pg.query(`select c.oid,relname,nspname,obj_description(c.oid) as description,
|
|
104
|
+
(
|
|
105
|
+
select json_agg(row_to_json(q))
|
|
106
|
+
from (
|
|
107
|
+
select
|
|
108
|
+
column_name,
|
|
109
|
+
case
|
|
110
|
+
when data_type='USER-DEFINED' AND udt_name='geometry' THEN 'geometry'
|
|
111
|
+
when data_type='ARRAY' AND udt_name='_text' THEN 'text[]'
|
|
112
|
+
when data_type='ARRAY' AND udt_name='_int4' THEN 'integer[]'
|
|
113
|
+
else data_type
|
|
114
|
+
end as data_type,
|
|
115
|
+
ordinal_position,
|
|
116
|
+
column_default,
|
|
117
|
+
is_nullable,
|
|
118
|
+
case
|
|
119
|
+
when column_name='uid' then 'ідентифікатор автора запису в БД'
|
|
120
|
+
when column_name='cdate' then 'Дата створення запису в БД'
|
|
121
|
+
when column_name='editor_id' then 'Ідентифікатор автора, який останій вніс зміни в запис'
|
|
122
|
+
when column_name='editor_date' then 'Час останії зміни в записі'
|
|
123
|
+
when column_name='files' then 'Системна колонка'
|
|
124
|
+
when column_name='doc_status' then 'Статус документа'
|
|
125
|
+
when column_name='reg_status' then 'Статус реєстрації'
|
|
126
|
+
when column_name='obj_version' then 'Версія запису'
|
|
127
|
+
else col_description(a.attrelid,ordinal_position)
|
|
128
|
+
end as description
|
|
129
|
+
from information_schema.columns col
|
|
130
|
+
LEFT JOIN pg_attribute a ON col.column_name=a.attname and c.oid = a.attrelid
|
|
131
|
+
where col.table_schema=nspname and col.table_name=relname
|
|
132
|
+
)q
|
|
133
|
+
) as columns
|
|
134
|
+
from pg_class c
|
|
135
|
+
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
|
|
136
|
+
where ${table ? `nspname||'.'||relname='${table}'` : `'${schema}'=nspname`} and relam=2
|
|
137
|
+
order by nspname,relname`);
|
|
138
|
+
|
|
139
|
+
if (!schemaInfo?.length) throw new Error('invalid params');
|
|
140
|
+
|
|
141
|
+
const { rows: constraints } = await pg.query(`select con.conrelid::regclass as constraint_table, a.column_name,
|
|
142
|
+
con.conname as constraint_name,contype as constraint_type, con.confrelid::regclass as foreign_table,
|
|
143
|
+
con.confupdtype, con.confdeltype, con.confmatchtype, u.column_name as foreign_column from pg_constraint con
|
|
144
|
+
left join pg_class c ON c.oid = con.conrelid
|
|
145
|
+
left join pg_namespace n ON n.oid = c.relnamespace
|
|
146
|
+
left join lateral (
|
|
147
|
+
select string_agg(a.attname,',') as column_name from pg_attribute a
|
|
148
|
+
where con.conrelid = a.attrelid and a.attnum = any(con.conkey) limit 1
|
|
149
|
+
)a on 1=1
|
|
150
|
+
left join lateral (
|
|
151
|
+
select column_name from information_schema.constraint_column_usage u
|
|
152
|
+
where conname=u.constraint_name limit 1
|
|
153
|
+
)u on 1=1
|
|
154
|
+
where ${table ? `conrelid::regclass::text = '${table}'` : `nspname = '${schema}'`}`);
|
|
155
|
+
|
|
156
|
+
// add table constraints, mermaid
|
|
157
|
+
schemaInfo?.forEach((row) => {
|
|
158
|
+
// constraint type to column
|
|
159
|
+
row?.columns?.forEach((col) => {
|
|
160
|
+
const { constraint_type } = constraints?.find((con) => con?.column_name === col?.column_name && con.constraint_table === `${row.nspname}.${row.relname}`) || {};
|
|
161
|
+
Object.assign(col, { constraint_type });
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// table relations
|
|
165
|
+
const tableConstraints = constraints?.filter((el) => el?.constraint_table === `${row.nspname}.${row.relname}`);
|
|
166
|
+
Object.assign(row, { constraints: tableConstraints });
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
if (debug) return schemaInfo;
|
|
170
|
+
|
|
171
|
+
const body = await getTemplate('pt', 'schemaItem.pt');
|
|
172
|
+
|
|
173
|
+
const schemaContent = await handlebars.compile(body?.hbs || 'template not found: schemaItem.pt')({ nspname: schema, rows: schemaInfo, constraints });
|
|
174
|
+
|
|
175
|
+
return schemaContent.replace(/'/g, "'");
|
|
176
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import Fastify from 'fastify';
|
|
2
|
+
|
|
3
|
+
import config from '../config.js';
|
|
4
|
+
import plugin from '../index.js';
|
|
5
|
+
|
|
6
|
+
const timeoutMs = +(config.migrationTimeout || 5000);
|
|
7
|
+
|
|
8
|
+
process.env.PORT = process.env.PORT || config.port || 3000;
|
|
9
|
+
|
|
10
|
+
const app = Fastify();
|
|
11
|
+
|
|
12
|
+
app.register(plugin, config);
|
|
13
|
+
|
|
14
|
+
app.listen({ host: '0.0.0.0', port: process.env.PORT }, (err) => {
|
|
15
|
+
console.log(`Server started via port: ${process.env.PORT}`);
|
|
16
|
+
setTimeout(() => {
|
|
17
|
+
console.log('Server closed after timeout', timeoutMs);
|
|
18
|
+
app.close();
|
|
19
|
+
process.exit(0);
|
|
20
|
+
}, timeoutMs);
|
|
21
|
+
if (err) {
|
|
22
|
+
console.error('migrations error', err.toString());
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"verifyPassword.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/auth/funcs/verifyPassword.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWpD,wBAA8B,cAAc,CAAC,EAC3C,EAAE,EACF,QAAQ,EACR,QAAQ,GACT,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;CAClB;;;;;;
|
|
1
|
+
{"version":3,"file":"verifyPassword.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/auth/funcs/verifyPassword.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWpD,wBAA8B,cAAc,CAAC,EAC3C,EAAE,EACF,QAAQ,EACR,QAAQ,GACT,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;CAClB;;;;;;eAiCA"}
|
|
@@ -12,7 +12,9 @@ export default async function verifyPassword({ pg, username, password, }) {
|
|
|
12
12
|
if (!password || password === "")
|
|
13
13
|
return { message: "not enough params: password" };
|
|
14
14
|
const query = "select * from admin.users where $1 in (login,email,phone) and enabled limit 1";
|
|
15
|
-
const json = await pg
|
|
15
|
+
const json = await pg
|
|
16
|
+
.query(query, [username])
|
|
17
|
+
.then((el) => el.rows?.[0] || {});
|
|
16
18
|
if (!json)
|
|
17
19
|
return { message: "user not found" };
|
|
18
20
|
let hash = "";
|
|
@@ -103,13 +103,13 @@ export default async function dataInsert({ id, table: table1, referer, data, pg:
|
|
|
103
103
|
uid,
|
|
104
104
|
type: "INSERT",
|
|
105
105
|
});
|
|
106
|
-
if (config.redis) {
|
|
106
|
+
if (config.redis && rclient?.status !== "end") {
|
|
107
107
|
rclient.incr(`pg:${table}:crud`);
|
|
108
108
|
}
|
|
109
109
|
if (!isClient) {
|
|
110
110
|
await client.query("commit;");
|
|
111
111
|
}
|
|
112
|
-
return res;
|
|
112
|
+
return { ...res, id: id1, data: res.rows[0] };
|
|
113
113
|
}
|
|
114
114
|
catch (err) {
|
|
115
115
|
logger.file("crud/insert", {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dataUpdate.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/crud/funcs/dataUpdate.ts"],"names":[],"mappings":"AAWA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAepD,wBAA8B,UAAU,CAAC,EACvC,KAAK,EACL,SAAS,EACT,OAAO,EACP,EAAE,EACF,IAAI,EACJ,EAAE,EAAE,GAAG,EACP,GAAG,GACJ,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAChC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,EAAE,EAAE,MAAM,GAAG,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC1B,EAAE,CAAC,EAAE,UAAU,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,
|
|
1
|
+
{"version":3,"file":"dataUpdate.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/crud/funcs/dataUpdate.ts"],"names":[],"mappings":"AAWA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAepD,wBAA8B,UAAU,CAAC,EACvC,KAAK,EACL,SAAS,EACT,OAAO,EACP,EAAE,EACF,IAAI,EACJ,EAAE,EAAE,GAAG,EACP,GAAG,GACJ,EAAE;IACD,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAChC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,EAAE,EAAE,MAAM,GAAG,MAAM,CAAC;IACpB,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC1B,EAAE,CAAC,EAAE,UAAU,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,gBAkPA"}
|
|
@@ -20,11 +20,13 @@ function assignValue(key, i, srid = 4326, columnType = "text") {
|
|
|
20
20
|
return `"${key}"=$${i + 2}`;
|
|
21
21
|
}
|
|
22
22
|
export default async function dataUpdate({ table, tokenData, referer, id, data, pg: pg1, uid, }) {
|
|
23
|
-
if (!data || !table || !id)
|
|
23
|
+
if (!data || !table || !id) {
|
|
24
24
|
return null;
|
|
25
|
+
}
|
|
25
26
|
const pg = pg1 || getPG({ name: "client" });
|
|
26
|
-
if (!pg)
|
|
27
|
+
if (!pg) {
|
|
27
28
|
return null;
|
|
29
|
+
}
|
|
28
30
|
// pg client single transaction support
|
|
29
31
|
if (!pg?.pk && config.pg) {
|
|
30
32
|
pg.options = pgClients.client?.options;
|
|
@@ -34,8 +36,9 @@ export default async function dataUpdate({ table, tokenData, referer, id, data,
|
|
|
34
36
|
pg.pk = pgClients.client?.pk;
|
|
35
37
|
}
|
|
36
38
|
const { columns, pk } = await getMeta({ pg, table });
|
|
37
|
-
if (!columns)
|
|
39
|
+
if (!columns) {
|
|
38
40
|
return null;
|
|
41
|
+
}
|
|
39
42
|
const names = columns.map((el) => el.name);
|
|
40
43
|
const types = columns.reduce((acc, { name, dataTypeID }) => ({
|
|
41
44
|
...acc,
|
|
@@ -73,7 +76,7 @@ export default async function dataUpdate({ table, tokenData, referer, id, data,
|
|
|
73
76
|
json_object_agg(f_geometry_column, case when srid = 0 then 4326 else srid end) as rel
|
|
74
77
|
from public.geometry_columns group by f_table_schema||'.'||f_table_name
|
|
75
78
|
)q`)
|
|
76
|
-
.then((
|
|
79
|
+
.then((el) => el.rows?.[0] || {});
|
|
77
80
|
Object.assign(srids, srids1);
|
|
78
81
|
}
|
|
79
82
|
const updateQuery = `UPDATE ${table} SET ${systemColumns ? `${systemColumns}${filterData?.length ? "," : ""}` : ""}
|
|
@@ -167,13 +170,13 @@ export default async function dataUpdate({ table, tokenData, referer, id, data,
|
|
|
167
170
|
uid,
|
|
168
171
|
type: "UPDATE",
|
|
169
172
|
});
|
|
170
|
-
if (config.redis) {
|
|
173
|
+
if (config.redis && rclient?.status !== "end") {
|
|
171
174
|
rclient.incr(`pg:${table}:crud`);
|
|
172
175
|
}
|
|
173
176
|
if (!isClient) {
|
|
174
177
|
await client.query("commit;");
|
|
175
178
|
}
|
|
176
|
-
return res || {};
|
|
179
|
+
return { ...(res || {}), id };
|
|
177
180
|
}
|
|
178
181
|
catch (err) {
|
|
179
182
|
logger.file("crud/update", {
|
|
@@ -81,7 +81,7 @@ function checkBody({ body = {}, arr = [], idx }) {
|
|
|
81
81
|
}, []);
|
|
82
82
|
const invalidField = res.find((el) => el?.error);
|
|
83
83
|
if (invalidField) {
|
|
84
|
-
console.warn("invalid field: ", invalidField?.key, invalidField?.error);
|
|
84
|
+
console.warn("⚠️ invalid field: ", invalidField?.key, invalidField?.error);
|
|
85
85
|
return invalidField;
|
|
86
86
|
}
|
|
87
87
|
return { message: "ok" };
|
|
@@ -51,12 +51,12 @@ const uploadFile = () => async (fp, data, opt = {}) => {
|
|
|
51
51
|
}
|
|
52
52
|
await fsp.mkdir(path.dirname(filepath), { recursive: true });
|
|
53
53
|
try {
|
|
54
|
-
const exists = await isFileExists(
|
|
54
|
+
const exists = await isFileExists(filepath);
|
|
55
55
|
if (!exists) {
|
|
56
56
|
await fsp.writeFile(filepath, validData, opt);
|
|
57
57
|
}
|
|
58
58
|
else if (isBuffer(validData) || isReadableStream(validData)) {
|
|
59
|
-
await fsp.
|
|
59
|
+
await fsp.appendFile(filepath, validData, opt);
|
|
60
60
|
}
|
|
61
61
|
else {
|
|
62
62
|
await fsp.copyFile(validData, filepath);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"downloadFile.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/downloadFile.ts"],"names":[],"mappings":"AAgBA,QAAA,MAAM,aAAa,GAChB,aAAa,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,MAC1B,IAAI,MAAM,EAAE,UAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,
|
|
1
|
+
{"version":3,"file":"downloadFile.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/downloadFile.ts"],"names":[],"mappings":"AAgBA,QAAA,MAAM,aAAa,GAChB,aAAa,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,MAC1B,IAAI,MAAM,EAAE,UAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,iBA0CnD,CAAC;AAEJ,eAAe,aAAa,CAAC"}
|
|
@@ -11,8 +11,7 @@ import getS3FilePath from "./utils/getS3FilePath.js";
|
|
|
11
11
|
const getFileStream = (s3Settings) => async (fp, options = {}) => {
|
|
12
12
|
const filepath = getS3FilePath(fp, s3Settings);
|
|
13
13
|
const bucketParams = {
|
|
14
|
-
Bucket:
|
|
15
|
-
.containerName,
|
|
14
|
+
Bucket: s3Settings?.containerName || config.s3?.containerName || "work",
|
|
16
15
|
Key: filepath[0] === "/" ? filepath?.slice(1) : filepath,
|
|
17
16
|
Range: options.Range,
|
|
18
17
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fileExists.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/fileExists.ts"],"names":[],"mappings":"AAWA,QAAA,MAAM,eAAe,GAClB,aAAa,GAAG,MACV,IAAI,GAAG,EAAE,UAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,
|
|
1
|
+
{"version":3,"file":"fileExists.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/fileExists.ts"],"names":[],"mappings":"AAWA,QAAA,MAAM,eAAe,GAClB,aAAa,GAAG,MACV,IAAI,GAAG,EAAE,UAAS,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,iBAkBhD,CAAC;AAEJ,eAAe,eAAe,CAAC"}
|
|
@@ -8,8 +8,7 @@ import isFileExists from "../../../utils/isFileExists.js";
|
|
|
8
8
|
const getFileMetadata = (s3Settings) => async (fp, options = {}) => {
|
|
9
9
|
const filepath = getS3FilePath(fp, s3Settings);
|
|
10
10
|
const bucketParams = {
|
|
11
|
-
Bucket:
|
|
12
|
-
.containerName,
|
|
11
|
+
Bucket: s3Settings?.containerName || config.s3?.containerName || "work",
|
|
13
12
|
Key: filepath[0] === "/" ? filepath?.slice(1) : filepath,
|
|
14
13
|
};
|
|
15
14
|
try {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uploadFile.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/uploadFile.ts"],"names":[],"mappings":"AAaA,QAAA,MAAM,UAAU,GAAI,aAAa,GAAG,MAAY,IAAI,GAAG,EAAE,MAAM,GAAG,+
|
|
1
|
+
{"version":3,"file":"uploadFile.d.ts","sourceRoot":"","sources":["../../../../../../../server/plugins/file/providers/s3/funcs/uploadFile.ts"],"names":[],"mappings":"AAaA,QAAA,MAAM,UAAU,GAAI,aAAa,GAAG,MAAY,IAAI,GAAG,EAAE,MAAM,GAAG,+EA6BjE,CAAC;AAEF,eAAe,UAAU,CAAC"}
|
|
@@ -13,8 +13,7 @@ const uploadFile = (s3Settings) => async (fp, data) => {
|
|
|
13
13
|
const type = getMimeType(filepath);
|
|
14
14
|
try {
|
|
15
15
|
const bucketParams = {
|
|
16
|
-
Bucket:
|
|
17
|
-
.containerName,
|
|
16
|
+
Bucket: s3Settings?.containerName || config.s3?.containerName || "work",
|
|
18
17
|
Key: filepath,
|
|
19
18
|
ContentLength: size,
|
|
20
19
|
ContentType: type,
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import downloadFile from
|
|
2
|
-
import fileExists from
|
|
3
|
-
import uploadFile from
|
|
1
|
+
import downloadFile from "./funcs/downloadFile.js";
|
|
2
|
+
import fileExists from "./funcs/fileExists.js";
|
|
3
|
+
import uploadFile from "./funcs/uploadFile.js";
|
|
4
4
|
export default function s3Storage(opt) {
|
|
5
5
|
return {
|
|
6
|
-
name:
|
|
6
|
+
name: "s3",
|
|
7
7
|
downloadFile: downloadFile(opt),
|
|
8
8
|
uploadFile: uploadFile(opt),
|
|
9
9
|
fileExists: fileExists(opt),
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getLogger.d.ts","sourceRoot":"","sources":["../../../../server/plugins/logger/getLogger.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,SAAS,CAAC;AAE9C,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B,UAAU,cAAe,SAAQ,IAAI,CAAC,MAAM;IAC1C,IAAI,EAAE,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,cAAc,KAAK,IAAI,CAAC;IAClE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;CAC5D;
|
|
1
|
+
{"version":3,"file":"getLogger.d.ts","sourceRoot":"","sources":["../../../../server/plugins/logger/getLogger.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,cAAc,EAAE,MAAM,SAAS,CAAC;AAE9C,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAE5B,UAAU,cAAe,SAAQ,IAAI,CAAC,MAAM;IAC1C,IAAI,EAAE,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,EAAE,cAAc,KAAK,IAAI,CAAC;IAClE,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;CAC5D;AAmDD,QAAA,MAAM,MAAM,EAAE,cAAgD,CAAC;AAiC/D,eAAe,MAAM,CAAC"}
|
|
@@ -9,16 +9,19 @@ import timestampWithTimeZone from "./timestampWithTimeZone.js";
|
|
|
9
9
|
const isServer = process.argv[2];
|
|
10
10
|
const level = process.env.LOG_LEVEL || "info";
|
|
11
11
|
console.log(`log level: ${level}`);
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
12
|
+
// skip logging during npx vitest run
|
|
13
|
+
const targets = process.env.VITEST
|
|
14
|
+
? []
|
|
15
|
+
: [
|
|
16
|
+
{
|
|
17
|
+
target: "./createFileStream.js", // path.resolve('utils/createFileStream.js')
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
level: "error",
|
|
21
|
+
target: "pino/file",
|
|
22
|
+
options: { destination: 1 },
|
|
23
|
+
},
|
|
24
|
+
];
|
|
22
25
|
// push trace logs to console (those would not be saved to file)
|
|
23
26
|
if (["trace", "debug"].includes(level)) {
|
|
24
27
|
targets.push({
|
|
@@ -56,6 +59,6 @@ logger.metrics = function metrics(key, val, dbName) {
|
|
|
56
59
|
return;
|
|
57
60
|
rclient2
|
|
58
61
|
.hincrby(`${dbname}:system_metrics`, key, val || 1)
|
|
59
|
-
.catch((err) => console.warn("logger metrics error", err.toString()));
|
|
62
|
+
.catch((err) => console.warn("⚠️ logger metrics error", err.toString()));
|
|
60
63
|
};
|
|
61
64
|
export default logger;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/logger/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAgC,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/logger/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAgC,MAAM,SAAS,CAAC;AAyC7E,iBAAe,MAAM,CAAC,OAAO,EAAE,eAAe,iBAwC7C;AACD,eAAe,MAAM,CAAC"}
|
|
@@ -6,7 +6,7 @@ process.on("uncaughtException", (err) => {
|
|
|
6
6
|
if (err.message === "Connection terminated unexpectedly" ||
|
|
7
7
|
err.code === "ECONNRESET") {
|
|
8
8
|
logger.file("pg", { error: err.toString() });
|
|
9
|
-
console.warn("Ignored pg connection drop:", err.toString());
|
|
9
|
+
console.warn("⚠️ Ignored pg connection drop:", err.toString());
|
|
10
10
|
return;
|
|
11
11
|
}
|
|
12
12
|
throw err; // keep normal behavior
|
|
@@ -18,12 +18,13 @@ process.on("unhandledRejection", (err) => {
|
|
|
18
18
|
"Connection terminated due to connection timeout",
|
|
19
19
|
].includes(err.message)) {
|
|
20
20
|
logger.file("pg", { error: err.message });
|
|
21
|
-
console.warn("Ignored pg connection timeout / close:", err.toString());
|
|
21
|
+
console.warn("⚠️ Ignored pg connection timeout / close:", err.toString());
|
|
22
22
|
return;
|
|
23
23
|
}
|
|
24
|
-
if (err.message === "Connection is closed."
|
|
24
|
+
if (err.message === "Connection is closed." ||
|
|
25
|
+
err.message.includes("maxRetriesPerRequest")) {
|
|
25
26
|
logger.file("redis", { error: err.message });
|
|
26
|
-
console.warn("Ignored redis connection close:", err.toString());
|
|
27
|
+
console.warn("⚠️ Ignored redis connection close:", err.toString());
|
|
27
28
|
return;
|
|
28
29
|
}
|
|
29
30
|
throw err;
|
|
@@ -10,19 +10,19 @@ export default async function execMigrations(dirPath, pg = pgClients.client, isc
|
|
|
10
10
|
if (!dirPath) {
|
|
11
11
|
const txt = "migrations skip: path not specified";
|
|
12
12
|
if (debug)
|
|
13
|
-
console.warn(txt);
|
|
13
|
+
console.warn(`⚠️ ${txt}`);
|
|
14
14
|
return txt;
|
|
15
15
|
}
|
|
16
16
|
if (config.migrationsCore === false && iscore) {
|
|
17
17
|
const txt = `migrations skip: core - ${dirPath}`;
|
|
18
18
|
if (debug)
|
|
19
|
-
console.
|
|
19
|
+
console.warn(`⚠️ ${txt}`);
|
|
20
20
|
return txt;
|
|
21
21
|
}
|
|
22
22
|
if (config.migrations === false && !iscore) {
|
|
23
23
|
const txt = `migrations skip: path - ${dirPath}`;
|
|
24
24
|
if (debug)
|
|
25
|
-
console.
|
|
25
|
+
console.warn(`⚠️ ${txt}`);
|
|
26
26
|
return txt;
|
|
27
27
|
}
|
|
28
28
|
if (process.env.NODE_ENV !== "production" &&
|
|
@@ -30,7 +30,7 @@ export default async function execMigrations(dirPath, pg = pgClients.client, isc
|
|
|
30
30
|
!(iscore ? config.migrationsCore : config.migrations)) {
|
|
31
31
|
const txt = `migrations skip: not a production environment - ${iscore ? "core" : "path"} : ${dirPath}`;
|
|
32
32
|
if (debug)
|
|
33
|
-
console.
|
|
33
|
+
console.warn(`⚠️ ${txt}`);
|
|
34
34
|
return txt;
|
|
35
35
|
}
|
|
36
36
|
if (debug)
|
|
@@ -39,7 +39,7 @@ export default async function execMigrations(dirPath, pg = pgClients.client, isc
|
|
|
39
39
|
if (!exists) {
|
|
40
40
|
const txt = `migrations skip: directory not found - ${dirPath}`;
|
|
41
41
|
if (debug)
|
|
42
|
-
console.warn(txt);
|
|
42
|
+
console.warn(`⚠️ ${txt}`);
|
|
43
43
|
return txt;
|
|
44
44
|
}
|
|
45
45
|
// get directory sql file list
|
|
@@ -50,7 +50,7 @@ export default async function execMigrations(dirPath, pg = pgClients.client, isc
|
|
|
50
50
|
if (!content?.length) {
|
|
51
51
|
const txt = `migrations skip: no sql in specified directory - ${dirPath}`;
|
|
52
52
|
if (debug)
|
|
53
|
-
console.warn(txt);
|
|
53
|
+
console.warn(`⚠️ ${txt}`);
|
|
54
54
|
return txt;
|
|
55
55
|
}
|
|
56
56
|
await content.reduce((promise, filename) => promise.then(() => execSql(path.join(dirPath, filename), pg)), Promise.resolve());
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/getMeta.ts"],"names":[],"mappings":"AAKA,wBAA8B,OAAO,CAAC,GAAG,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,GAAG,
|
|
1
|
+
{"version":3,"file":"getMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/getMeta.ts"],"names":[],"mappings":"AAKA,wBAA8B,OAAO,CAAC,GAAG,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE,GAAG,gBAuE5D"}
|
|
@@ -6,9 +6,7 @@ export default async function getMeta(opt, nocache) {
|
|
|
6
6
|
if (!pg)
|
|
7
7
|
return { error: "pg connection not established", status: 400 };
|
|
8
8
|
// reconnect if at start of process connection was unavailable
|
|
9
|
-
if (!pg.pk) {
|
|
10
|
-
pg.connectionAttempt = 0;
|
|
11
|
-
pg.init = init(pg);
|
|
9
|
+
if (!pg.pk || nocache) {
|
|
12
10
|
await init(pg);
|
|
13
11
|
}
|
|
14
12
|
const table1 = opt?.table || opt;
|
|
@@ -27,8 +25,8 @@ export default async function getMeta(opt, nocache) {
|
|
|
27
25
|
WHERE c.contype='p'::"char" and c.conrelid::regclass = $1::regclass`, [table])
|
|
28
26
|
.then((el) => el.rows[0].pks1 || {});
|
|
29
27
|
const pk = table.startsWith("public.")
|
|
30
|
-
? pks1[table.replace("public.", "")]
|
|
31
|
-
: pks1[table]
|
|
28
|
+
? pks1[table.replace("public.", "")]
|
|
29
|
+
: pks1[table];
|
|
32
30
|
const geomColumns = fields.filter((el) => pg.pgType?.[el.dataTypeID] === "geometry");
|
|
33
31
|
const geomAttr = geomColumns.find((el) => el.name === "geom_4326") || geomColumns[0];
|
|
34
32
|
const dbColumns = await pg
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getPG.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/getPG.ts"],"names":[],"mappings":"AAQA,iBAAS,KAAK,CAAC,KAAK,GAAE,GAAQ,
|
|
1
|
+
{"version":3,"file":"getPG.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/getPG.ts"],"names":[],"mappings":"AAQA,iBAAS,KAAK,CAAC,KAAK,GAAE,GAAQ,OAyD7B;AAMD,eAAe,KAAK,CAAC"}
|
|
@@ -5,6 +5,7 @@ import dblist from "../../../../dblist.js";
|
|
|
5
5
|
import pgClients from "../pgClients.js";
|
|
6
6
|
import getDBParams from "./getDBParams.js";
|
|
7
7
|
function getPG(param = {}) {
|
|
8
|
+
// console.log("config.pg", config.pg, process.env); // ! process.env.VITEST always in uppercase, debug to properly test via npx vitest run
|
|
8
9
|
if (!config.pg)
|
|
9
10
|
return null;
|
|
10
11
|
const dbListParams = dblist.find((el) => el.key === param?.key) ||
|
|
@@ -35,7 +36,7 @@ function getPG(param = {}) {
|
|
|
35
36
|
pgClients[name] = pool(dbConfig);
|
|
36
37
|
pgClients[name].init = () => init(pgClients[name]); // for compatibility
|
|
37
38
|
// force init
|
|
38
|
-
init(pgClients[name]).catch((err) => console.warn("pg client init error", host, port, dbConfig.database));
|
|
39
|
+
init(pgClients[name]).catch((err) => console.warn("⚠️ pg client init error", host, port, dbConfig.database));
|
|
39
40
|
return pgClients[name];
|
|
40
41
|
}
|
|
41
42
|
if (config.pg) {
|
|
@@ -2,10 +2,10 @@ import init from "./init.js";
|
|
|
2
2
|
import getPG from "./getPG.js";
|
|
3
3
|
async function getPGAsync(param) {
|
|
4
4
|
const client = getPG(param);
|
|
5
|
-
const { host, port, database } = client
|
|
5
|
+
const { host, port, database } = client?.options || {};
|
|
6
6
|
if (client && !client.tlist) {
|
|
7
7
|
// force init
|
|
8
|
-
await init(client).catch((err) => console.warn("PG client init error", host, port, database));
|
|
8
|
+
await init(client).catch((err) => console.warn("⚠️ PG client init error", host, port, database));
|
|
9
9
|
}
|
|
10
10
|
return client;
|
|
11
11
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/init.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAIpD,iBAAe,IAAI,CAAC,MAAM,EAAE,UAAU,
|
|
1
|
+
{"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/init.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAIpD,iBAAe,IAAI,CAAC,MAAM,CAAC,EAAE,UAAU,iBAsNtC;AAGD,eAAe,IAAI,CAAC"}
|