@opengis/fastify-table 1.4.29 → 1.4.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/config.js CHANGED
@@ -1,5 +1,4 @@
1
1
  import dotenv from 'dotenv';
2
- import path from 'node:path';
3
2
 
4
3
  import { existsSync, readFileSync } from 'node:fs';
5
4
 
@@ -8,46 +7,21 @@ import unflattenObject from './server/plugins/util/funcs/unflattenObject.js';
8
7
  const fileName = ['config.json', '/data/local/config.json'].find(el => (existsSync(el) ? el : null));
9
8
  const config = fileName ? JSON.parse(readFileSync(fileName)) : {};
10
9
 
10
+ const { skipKeys = ['windir'] } = config;
11
+
11
12
  // npm run dev === cross-env NODE_ENV=development
12
13
  // alt: node --env=development
13
14
  Object.assign(config, {
14
15
  storageList: {},
15
16
  allTemplates: config?.allTemplates || {},
16
17
  skipCheckPolicyRoutes: [],
17
- env: process.env?.NODE_ENV || process.argv[2]?.split?.('=')?.pop?.(),
18
+ env: process.env?.NODE_ENV,
18
19
  });
19
20
 
20
21
  function loadEnvConfig() {
21
- if (config.env && existsSync(`.env.${config.env}`)) {
22
- const { parsed } = dotenv.config({ path: `.env.${config.env}` });
23
- if (parsed) {
24
- console.log('start with env:', config.env);
25
-
26
- const obj = unflattenObject(parsed);
27
-
28
- Object.keys(obj || {})
29
- .filter(key => typeof obj[key] === 'string'
30
- && (obj[key].startsWith('[') || ['true', 'false'].includes(obj[key]))) // json array / boolean
31
- .forEach(key => {
32
- try {
33
- obj[key] = JSON.parse(obj[key]);
34
- }
35
- catch (err) {
36
- console.warn(`Invalid JSON for key "${key}": ${obj[key]}`);
37
- }
38
- });
39
- if (obj) {
40
- Object.assign(config, obj);
41
- console.log('env init success', config.env, config.pg?.database);
42
- }
43
- else {
44
- console.log('env init error', config.env, config.pg?.database);
45
- }
46
- }
47
- else {
48
- console.error('env init error: malformed file', config.env);
49
- }
50
- }
22
+ // node --env-file-if-exists=.env.dev --env-file-if-exists=.env server
23
+ const configKeys = Object.keys(process.env).filter(key => !skipKeys.includes(key) && key.charAt(0) === key.charAt(0)?.toLowerCase?.() || key.includes('.')).reduce((acc, curr) => ({ ...acc, [curr]: process.env[curr] }), {});
24
+ Object.assign(config, unflattenObject(configKeys));
51
25
  }
52
26
 
53
27
  loadEnvConfig();
package/index.js CHANGED
@@ -5,6 +5,8 @@ import { fileURLToPath } from 'node:url';
5
5
 
6
6
  import config from './config.js';
7
7
 
8
+ const { maxFileSize = 512 } = config;
9
+
8
10
  // helpers
9
11
  // import helperPlugin from './server/helpers/index.js';
10
12
 
@@ -113,7 +115,11 @@ async function plugin(fastify, opt) {
113
115
  templatesRoutes(fastify, opt);
114
116
 
115
117
  // from fastify-file
116
- await fastify.register(import('@fastify/multipart')); // content parser, await before adding upload routes
118
+ await fastify.register(import('@fastify/multipart'), {
119
+ limits: {
120
+ fileSize: maxFileSize * 1024 * 1024,
121
+ },
122
+ }); // content parser, await before adding upload routes
117
123
  fastify.register(import('./server/routes/file/index.mjs'), opt);
118
124
  fastify.register(import('./server/routes/grpc/index.mjs'), opt);
119
125
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@opengis/fastify-table",
3
- "version": "1.4.29",
3
+ "version": "1.4.31",
4
4
  "type": "module",
5
5
  "description": "core-plugins",
6
6
  "keywords": [
@@ -26,7 +26,9 @@
26
26
  "test:routes": "node --test .\\test\\routes",
27
27
  "test:functions": "node --test .\\test\\functions",
28
28
  "compress": "node compress.js",
29
- "dev": "set NODE_ENV=development&& node server.js"
29
+ "dev1": "set NODE_ENV=dev&& node server.js",
30
+ "dev": "node --env-file=.env.dev --env-file=.env server",
31
+ "start": "node --env-file=.env.dev server"
30
32
  },
31
33
  "dependencies": {
32
34
  "@aws-sdk/client-s3": "3.554.0",
@@ -37,7 +39,6 @@
37
39
  "dotenv": "16.5.0",
38
40
  "fastify": "5.3.3",
39
41
  "fastify-plugin": "5.0.1",
40
- "formidable": "3.5.1",
41
42
  "handlebars": "4.7.8",
42
43
  "image-size": "1.2.0",
43
44
  "ioredis": "5.3.2",
@@ -1,12 +1,12 @@
1
1
  create schema if not exists admin;
2
2
  create schema if not exists crm;
3
- create schema if not exists setting;
4
3
 
5
4
  CREATE EXTENSION if not exists pg_trgm SCHEMA public VERSION "1.5";
6
5
 
7
6
  -- drop old
8
- DROP TABLE IF EXISTS setting.extra_data;
7
+ DROP TABLE IF EXISTS admin.user_properties;
9
8
  DROP TABLE IF EXISTS admin.table_properties;
9
+ DROP SCHEMA IF EXISTS setting cascade;
10
10
 
11
11
  CREATE TABLE IF NOT EXISTS admin.properties();
12
12
  ALTER TABLE admin.properties DROP CONSTRAINT IF EXISTS admin_properties_property_id_pkey;
@@ -37,40 +37,6 @@ ALTER TABLE admin.properties ADD CONSTRAINT admin_properties_property_id_pkey PR
37
37
 
38
38
  COMMENT ON TABLE admin.properties IS 'Налаштування';
39
39
 
40
- CREATE TABLE IF NOT EXISTS admin.user_properties();
41
- ALTER TABLE admin.user_properties DROP CONSTRAINT IF EXISTS admin_user_properties_property_id_pkey;
42
- alter table admin.user_properties drop constraint if exists user_properties_key_uid_unique;
43
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_id text NOT NULL DEFAULT next_id();
44
-
45
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_key text;
46
- COMMENT ON COLUMN admin.user_properties.property_key IS 'Ключ';
47
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_json json;
48
- COMMENT ON COLUMN admin.user_properties.property_json IS 'Налаштування json';
49
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_text text;
50
- COMMENT ON COLUMN admin.user_properties.property_text IS 'Налаштування text';
51
-
52
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_entity text;
53
- COMMENT ON COLUMN admin.user_properties.property_entity IS 'Сутність';
54
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS property_title text;
55
- COMMENT ON COLUMN admin.user_properties.property_title IS 'Назва';
56
-
57
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS private boolean;
58
- update admin.user_properties set private = true where private is null;
59
- ALTER TABLE admin.user_properties ALTER COLUMN private SET NOT NULL;
60
- ALTER TABLE admin.user_properties ALTER COLUMN private SET default true;
61
- COMMENT ON COLUMN admin.user_properties.private IS 'Доступ лише для користувача, який додав дане налаштування';
62
-
63
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS uid text NOT NULL DEFAULT '1'::text;
64
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS editor_id text;
65
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS editor_date timestamp without time zone;
66
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS cdate timestamp without time zone DEFAULT now();
67
- ALTER TABLE admin.user_properties ADD COLUMN IF NOT EXISTS files json;
68
-
69
- ALTER TABLE admin.user_properties ADD CONSTRAINT admin_user_properties_property_id_pkey PRIMARY KEY(property_id);
70
- ALTER TABLE admin.user_properties ADD CONSTRAINT user_properties_key_uid_unique UNIQUE (property_key,property_entity,uid);
71
-
72
- COMMENT ON TABLE admin.user_properties IS 'Налаштування користувача';
73
-
74
40
  CREATE TABLE IF NOT EXISTS admin.custom_column();
75
41
  ALTER TABLE admin.custom_column DROP CONSTRAINT IF EXISTS admin_custom_column_column_id_pkey cascade;
76
42
  alter table admin.custom_column drop constraint if exists admin_custom_column_name_entity_uid_unique;
@@ -147,42 +113,3 @@ CREATE INDEX IF NOT EXISTS crm_extra_data_cdate_btree_idx ON crm.extra_data USIN
147
113
  CREATE INDEX IF NOT EXISTS crm_extra_data_property_id_btree_idx ON crm.extra_data USING btree (property_id COLLATE pg_catalog."default");
148
114
  CREATE INDEX IF NOT EXISTS crm_extra_data_property_entity_btree_idx ON crm.extra_data USING btree (property_entity COLLATE pg_catalog."default");
149
115
  CREATE INDEX IF NOT EXISTS crm_extra_data_object_id_btree_idx ON crm.extra_data USING btree (object_id COLLATE pg_catalog."default");
150
-
151
-
152
- CREATE TABLE IF NOT EXISTS setting.property ();
153
- ALTER TABLE setting.property DROP CONSTRAINT IF EXISTS setting_property_pkey;
154
-
155
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_id text;
156
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_entity text;
157
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_key text;
158
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_text text;
159
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_json json;
160
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS property_int integer;
161
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS level text;
162
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS object_id text;
163
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS category text;
164
-
165
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS uid text;
166
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS editor_id text;
167
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS editor_date timestamp without time zone;
168
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS cdate timestamp without time zone;
169
- ALTER TABLE setting.property ALTER COLUMN cdate SET DEFAULT date_trunc('seconds'::text, now());
170
- ALTER TABLE setting.property ADD COLUMN IF NOT EXISTS files json;
171
-
172
- ALTER TABLE setting.property ADD CONSTRAINT setting_property_pkey PRIMARY KEY (property_id);
173
-
174
- COMMENT ON TABLE setting.property IS 'Налаштування';
175
- COMMENT ON COLUMN setting.property.property_entity IS 'Сутність';
176
- COMMENT ON COLUMN setting.property.property_key IS 'Ключ';
177
- COMMENT ON COLUMN setting.property.property_text IS 'Текстове значення налаштування';
178
- COMMENT ON COLUMN setting.property.property_json IS 'Значення налаштування';
179
- COMMENT ON COLUMN setting.property.level IS 'Рівень (user/system)';
180
- COMMENT ON COLUMN setting.property.object_id IS 'ID Об''єкту';
181
- COMMENT ON COLUMN setting.property.category IS 'Категорія (filter/menu/...)';
182
- COMMENT ON COLUMN setting.property.property_int IS 'Цілочислове значения';
183
-
184
- CREATE INDEX if not exists setting_property_object_id_gin_idx ON setting.property USING gin (object_id COLLATE pg_catalog."default" gin_trgm_ops);
185
- CREATE INDEX if not exists setting_property_property_entity_gin_idx ON setting.property USING gin (property_entity COLLATE pg_catalog."default" gin_trgm_ops);
186
- CREATE INDEX if not exists setting_property_property_int_btree_idx ON setting.property USING btree (property_int);
187
- CREATE INDEX if not exists setting_property_property_key_gin_idx ON setting.property USING gin (property_key COLLATE pg_catalog."default" gin_trgm_ops);
188
- CREATE INDEX if not exists setting_property_property_text_gin_idx ON setting.property USING gin (property_text COLLATE pg_catalog."default" gin_trgm_ops);
@@ -47,7 +47,7 @@ async function runCron({
47
47
 
48
48
  async function plugin(fastify) {
49
49
  if (config.cronList?.length) {
50
- config.cronList?.filter(el => el.query && !el.disabled)?.forEach?.((el, idx) => {
50
+ config.cronList?.filter?.(el => el.query && !el.disabled)?.forEach?.((el, idx) => {
51
51
  const { interval, db, query } = el;
52
52
  const name = createHash('md5').update(`${config.port || 3000}:${db}:${query}`).digest('hex');
53
53
  const pg = getPG(db);
@@ -32,59 +32,55 @@ export default async function dataDelete({
32
32
 
33
33
  const delQuery = `delete from ${table} WHERE ${pk}::text = $1::text returning *`;
34
34
 
35
- // for transactions
36
- const isClient = typeof pg.query === 'function' && typeof pg.release === 'function';
37
- const client = isClient ? pg : await pg.connect();
35
+ // for transactions
36
+ const isClient = typeof pg.query === 'function' && typeof pg.release === 'function';
37
+ const client = isClient ? pg : await pg.connect();
38
+
39
+ if (isClient || !client.pk) {
40
+ client.options = pg.options;
41
+ client.tlist = pg.tlist;
42
+ client.pgType = pg.pgType;
43
+ client.relkinds = pg.relkinds;
44
+ client.pk = pg.pk;
45
+ }
38
46
 
47
+ try {
39
48
  if (!isClient) {
40
- client.caller = 'dataDelete';
41
- }
42
-
43
- if (isClient || !client.pk) {
44
- client.options = pg.options;
45
- client.tlist = pg.tlist;
46
- client.pgType = pg.pgType;
47
- client.relkinds = pg.relkinds;
48
- client.pk = pg.pk;
49
+ await client.query('begin;');
49
50
  }
50
51
 
51
- try {
52
- if (client.caller === 'dataDelete') {
53
- await client.query('begin;');
54
- }
52
+ const row = {};
53
+ await extraData({
54
+ table, form: tokenData?.form, id, uid, row,
55
+ }, client);
55
56
 
56
- const row = {};
57
- await extraData({
58
- table, form: tokenData?.form, id, uid, row,
59
- }, client);
57
+ const res = await client.query(delQuery, [id])
58
+ .then(el => (el.rows?.[0] ? { rowCount: 1, ...el.rows[0] } : {}));
60
59
 
61
- const res = await client.query(delQuery, [id])
62
- .then(el => (el.rows?.[0] ? { rowCount: 1, ...el.rows[0] } : {}));
60
+ await logChanges({
61
+ pg: client, table, tokenData, referer, id, uid, type: 'DELETE',
62
+ });
63
63
 
64
- await logChanges({
65
- pg: client, table, tokenData, referer, id, uid, type: 'DELETE',
66
- });
64
+ if (config.redis) { rclient.incr(`pg:${table}:crud`); }
67
65
 
68
- if (config.redis) { rclient.incr(`pg:${table}:crud`); }
69
-
70
- if (client.caller === 'dataDelete') {
71
- await client.query('commit;');
72
- }
73
-
74
- return { ...res, ...row };
66
+ if (!isClient) {
67
+ await client.query('commit;');
75
68
  }
76
- catch (err) {
77
- logger.file('crud/delete', {
78
- error: err.toString(), stack: err.stack, table, id, referer, uid, form: tokenData?.form,
79
- });
80
- if (client.caller === 'dataDelete') {
81
- await client.query('rollback;');
82
- }
83
- throw err;
69
+
70
+ return { ...res, ...row };
71
+ }
72
+ catch (err) {
73
+ logger.file('crud/delete', {
74
+ error: err.toString(), stack: err.stack, table, id, referer, uid, form: tokenData?.form,
75
+ });
76
+ if (!isClient) {
77
+ await client.query('rollback;');
84
78
  }
85
- finally {
86
- if (client.caller === 'dataDelete') {
87
- await client.query('begin;');
88
- }
79
+ throw err;
80
+ }
81
+ finally {
82
+ if (!isClient) {
83
+ await client.query('begin;');
89
84
  }
85
+ }
90
86
  }
@@ -79,10 +79,6 @@ export default async function dataUpdate({
79
79
  const isClient = typeof pg.query === 'function' && typeof pg.release === 'function';
80
80
  const client = isClient ? pg : await pg.connect();
81
81
 
82
- if (!isClient) {
83
- client.caller = 'dataUpdate';
84
- }
85
-
86
82
  if (isClient || !client.pk) {
87
83
  client.options = pg.options;
88
84
  client.tlist = pg.tlist;
@@ -92,7 +88,7 @@ export default async function dataUpdate({
92
88
  }
93
89
 
94
90
  try {
95
- if (client.caller === 'dataUpdate') {
91
+ if (!isClient) {
96
92
  await client.query('begin;');
97
93
  }
98
94
  const res = await client.query(updateQuery, [id, ...filterValue])
@@ -161,7 +157,7 @@ export default async function dataUpdate({
161
157
 
162
158
  if (config.redis) { rclient.incr(`pg:${table}:crud`); }
163
159
 
164
- if (client.caller === 'dataUpdate') {
160
+ if (!isClient) {
165
161
  await client.query('commit;');
166
162
  }
167
163
  return res || {};
@@ -170,13 +166,13 @@ export default async function dataUpdate({
170
166
  logger.file('crud/update', {
171
167
  error: err.toString(), stack: err.stack, table, id, referer, uid, form: tokenData?.form,
172
168
  });
173
- if (client.caller === 'dataUpdate') {
169
+ if (!isClient) {
174
170
  await client.query('rollback;');
175
171
  }
176
172
  throw err;
177
173
  }
178
174
  finally {
179
- if (client.caller === 'dataUpdate') {
175
+ if (!isClient) {
180
176
  client.release();
181
177
  }
182
178
  }
@@ -1,62 +1,100 @@
1
1
  import path from 'node:path';
2
- import { mkdir } from 'node:fs/promises';
2
+ import { mkdir, writeFile } from 'node:fs/promises';
3
+ import { randomUUID } from 'node:crypto';
4
+ import { imageSize } from 'image-size';
3
5
 
4
6
  import config from '../../../config.js';
5
7
 
6
8
  import providers from './providers/index.js';
7
9
 
8
- import uploadFileDisk from './utils/uploadFileDisk.js';
9
-
10
10
  import { all, images } from './utils/allowedExtensions.js';
11
11
 
12
+ import grpc from '../grpc/grpc.js';
13
+
14
+ const { resizeImage } = grpc();
15
+
16
+ const { resizeImageMinSize = 5 } = config; // resize images >= 5 MB by default
17
+
18
+ async function writeFileToDisk(file, buffer) {
19
+ if (!file?.filepath || !file.extension || !buffer) { return null; }
20
+
21
+ // resize big images
22
+ if (images.find(el => el === file.extension) && file.size >= (resizeImageMinSize * 1024 * 1024)) {
23
+ const { width = 320, height = 240 } = imageSize(buffer) || {};
24
+
25
+ const ratio = width / height;
26
+
27
+ const resizeWidth = Math.min(width, 2048);
28
+ const resizeHeight = resizeWidth / ratio;
29
+
30
+ const { result } = await resizeImage({
31
+ base64: buffer.toString('base64'),
32
+ width: resizeWidth,
33
+ height: resizeHeight,
34
+ quality: 75,
35
+ });
36
+
37
+ await writeFile(`${file.filepath.replace(`.${file.extension}`, `_original.${file.extension}`)}`, buffer);
38
+ await writeFile(file.filepath, Buffer.from(result, 'base64'));
39
+ return null;
40
+ }
41
+
42
+ await writeFile(file.filepath, buffer);
43
+ return null;
44
+ }
45
+
12
46
  export default async function uploadMultiPart(req) {
13
47
  const allowedExtensions = {
14
48
  '/file/upload-image/*': images,
15
49
  }[req.routeOptions?.url || ''] || all;
16
50
 
17
- const dir = req.params?.['*'] || 'uploads';
18
- const yearMonthDay = (new Date()).toISOString().split('T')[0];
19
- const dbname = req.pg?.options?.database || req.pg?.database || config.pg?.database; // request / config params / default config params
20
- const folder = path.join(config.root || `/data/local/${dbname || ''}`, config.folder || '', 'files', dir, yearMonthDay);
21
- await mkdir(folder, { recursive: true });
22
-
23
- const file = await uploadFileDisk({ req, folder }) || {};
24
- if (!file?.filepath) throw new Error('upload error');
51
+ const parts = req.parts();
52
+ const part = await parts.next();
25
53
 
26
- // originalFilepath for auto-resized images
27
- const { filepath, originalFilepath } = file;
54
+ if (!part?.value?.filename) {
55
+ throw new Error('upload error');
56
+ }
28
57
 
29
- const extName = path.extname(filepath).slice(1).toLowerCase();
58
+ const ext = path.extname(part.value.filename).toLowerCase();
30
59
 
31
60
  // check extension
32
- if (allowedExtensions.indexOf(extName) === -1) {
61
+ if (!allowedExtensions.includes(ext.substring(1))) {
33
62
  throw new Error('file extension is not allowed');
34
63
  }
35
64
 
36
- Object.assign(file, {
37
- relativeOriginalFilepath: originalFilepath ? path.join(
38
- '/files',
39
- dir,
40
- yearMonthDay,
41
- path.relative(path.join(folder, 'files'), originalFilepath).replace(/\\/g, '/').replace(/\.\.\//g, ''),
42
- ) : undefined,
43
- relativeFilepath: path.join(
44
- '/files',
45
- dir,
46
- yearMonthDay,
47
- path.relative(path.join(folder, 'files'), filepath).replace(/\\/g, '/').replace(/\.\.\//g, ''),
48
- ),
49
- });
50
- // console.log(path.join(folder, 'files), filepath);
65
+ const buffer = await part.value.toBuffer();
66
+
67
+ if (!buffer?.length) {
68
+ throw new Error('file buffer is empty');
69
+ }
70
+
71
+ const dir = req.params?.['*'] || 'uploads';
72
+ const yearMonthDay = (new Date()).toISOString().split('T')[0];
73
+
74
+ const dbname = req.pg?.options?.database || req.pg?.database || config.pg?.database; // request / config params / default config params
75
+
76
+ const rootDir = config.root || `/data/local/${dbname || ''}`;
77
+ const reldirpath = path.join('/files', dir, yearMonthDay);
78
+ const folder = path.join(rootDir, config.folder || '', reldirpath);
79
+
80
+ const newFilename = `${randomUUID()}${ext}`;
81
+
82
+ const file = {
83
+ originalFilename: part.value.filename,
84
+ filepath: path.join(folder, newFilename).replace(/\\/g, '/'),
85
+ relativeFilepath: path.join(reldirpath, newFilename).replace(/\\/g, '/'),
86
+ size: Buffer.byteLength(buffer),
87
+ mimetype: part.value.mimetype,
88
+ extension: ext.substring(1),
89
+ };
90
+
91
+ await mkdir(folder, { recursive: true });
92
+ await writeFileToDisk(file, buffer);
51
93
 
52
94
  // move file to s3
53
95
  if (config.s3?.endpoint) {
54
- const fp = providers({ provider: 'fs' });
55
- const data = await fp.downloadFile(filepath, { buffer: true });
56
- if (typeof data === 'function') throw new Error('data is function!');
57
- if (!data) throw new Error('upload error');
58
96
  const s3 = providers();
59
- await s3.uploadFile(file.relativeFilepath, data);
97
+ await s3.uploadFile(file.relativeFilepath, buffer);
60
98
  if (config.trace) console.log('upload to s3', file.relativeFilepath);
61
99
  }
62
100
 
@@ -1,14 +1,28 @@
1
+ /* eslint-disable no-param-reassign */
1
2
  export default function unflattenObject(flatObj) {
2
3
  return Object.keys(flatObj).reduce((acc, key) => {
3
4
  const keys = key.split('.');
4
5
  keys.reduce((nestedObj, part, index) => {
5
6
  if (index === keys.length - 1) {
6
- nestedObj[part] = flatObj[key];
7
- } else {
7
+ // json array
8
+ if (typeof flatObj[key] === 'string' && flatObj[key].startsWith('[')) {
9
+ try {
10
+ nestedObj[part] = JSON.parse(flatObj[key]);
11
+ }
12
+ catch (err) {
13
+ console.error(`Error parsing JSON for key ${key}:`, err);
14
+ nestedObj[part] = flatObj[key]; // fallback to original value if parsing fails
15
+ }
16
+ }
17
+ else {
18
+ nestedObj[part] = ['true', 'false'].includes(flatObj[key]) ? JSON.parse(flatObj[key]) : flatObj[key];
19
+ }
20
+ }
21
+ else {
8
22
  nestedObj[part] = nestedObj[part] || {};
9
23
  }
10
24
  return nestedObj[part];
11
25
  }, acc);
12
26
  return acc;
13
27
  }, {});
14
- }
28
+ }
@@ -1,17 +1,33 @@
1
- export default async function getExtraProperties({
2
- pg, params = {},
3
- }) {
4
- const { id } = params;
5
- if (!id) {
6
- return { message: 'not enougn params', status: 400 };
7
- }
8
-
9
- const { rows = [] } = pg.pk?.['crm.properties']
10
- ? await pg.query(`select property_key, property_type, property_text, property_int,
11
- property_json, property_date from crm.properties where property_key is not null and object_id=$1`, [id])
12
- : {};
13
- if (!rows.length) return {};
14
-
15
- const data = rows.reduce((acc, curr) => Object.assign(acc, { [curr.property_key]: curr[`property_${curr.property_type}`] }), {});
16
- return { message: data, status: 200 };
17
- }
1
+ /* eslint-disable camelcase */
2
+ import pgClients from '../../../plugins/pg/pgClients.js';
3
+
4
+ export default async function getAppSettings({
5
+ pg = pgClients.client, query = {}, params = {}, user = {},
6
+ }, reply) {
7
+ const t1 = Date.now();
8
+
9
+ if (!pg) {
10
+ return reply.status(500).send('empty pg');
11
+ }
12
+
13
+ if (!pg.pk?.['admin.properties']) {
14
+ return reply.status(404).send('properties table not found');
15
+ }
16
+
17
+ if (params.entity === 'user' && !user.uid) {
18
+ return reply.status(401).send('unauthorized');
19
+ }
20
+
21
+ const { uid } = user;
22
+
23
+ const args = params.entity === 'user'
24
+ ? [user.uid]
25
+ : [params.entity || 'app'];
26
+
27
+ const { rows = [] } = await pg.query('select property_key as key, property_text, property_json from admin.properties where property_entity=$1', args);
28
+
29
+ const settings = rows.filter(row => (typeof query.keys === 'string' ? query.keys.includes(row.key) : true))
30
+ .reduce((acc, { key, property_text, property_json }) => ({ ...acc, [key]: property_text || property_json }), {});
31
+
32
+ return reply.status(200).send({ time: Date.now() - t1, uid, settings });
33
+ }
@@ -0,0 +1,74 @@
1
+ import { logger, dataInsert, pgClients } from '../../../../utils.js';
2
+
3
+ function checkValueType(val) {
4
+ if (val && typeof val === 'object') {
5
+ return 'property_json';
6
+ }
7
+ if (val && typeof val === 'number' && (!/\D/.test(val.toString()) && val.toString().length < 10)) {
8
+ return 'property_int';
9
+ }
10
+ return 'property_text';
11
+ }
12
+
13
+ export default async function postAppSettings({
14
+ pg = pgClients.client, body = {}, user = {}, params = {},
15
+ }, reply) {
16
+ const { uid } = user;
17
+
18
+ if ((!params.entity || params.entity === 'app') && !user?.user_type?.includes?.('admin')) {
19
+ return reply.status(403).send('access restricted');
20
+ }
21
+
22
+ if (!pg) {
23
+ return reply.status(500).send('empty pg');
24
+ }
25
+
26
+ if (!pg?.pk?.['admin.properties']) {
27
+ return reply.status(404).send('table not found');
28
+ }
29
+
30
+ const { key, val } = body;
31
+
32
+ if ((!key || !val) && !Object.keys(body).length) {
33
+ return reply.status(400).send('not enough body params');
34
+ }
35
+
36
+ const keys = Object.keys(body);
37
+ const entity = params.entity === 'user' ? user.uid : (params.entity || 'app');
38
+
39
+ const client = await pg.connect();
40
+
41
+ try {
42
+ await client.query('begin;');
43
+
44
+ await client.query('delete from admin.properties where property_entity=$1 and property_key=any($2)', [entity, keys]);
45
+
46
+ await Promise.all(keys.filter(el => body[el]).map(async (el) => {
47
+ const columnType = checkValueType(body[el]);
48
+
49
+ await dataInsert({
50
+ pg: client,
51
+ table: 'admin.properties',
52
+ data: {
53
+ property_key: el,
54
+ [columnType]: body[el],
55
+ property_entity: entity,
56
+ },
57
+ uid,
58
+ });
59
+ }));
60
+
61
+ await client.query('commit;');
62
+
63
+ return reply.status(200).send('ok');
64
+ }
65
+ catch (err) {
66
+ logger.file('properties/error', {
67
+ error: err.toString(), stack: err.stack, body, user, entity,
68
+ });
69
+ return reply.status(500).send(err.toString());
70
+ }
71
+ finally {
72
+ client.release();
73
+ }
74
+ }