@opengis/fastify-table 1.4.54 → 1.4.55

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@opengis/fastify-table",
3
- "version": "1.4.54",
3
+ "version": "1.4.55",
4
4
  "type": "module",
5
5
  "description": "core-plugins",
6
6
  "keywords": [
@@ -1,18 +1,18 @@
1
- import setToken from '../../plugins/crud/funcs/setToken.js';
2
-
3
- export default function tokenFunc(params) {
4
- const { data, hash } = params;
5
-
6
- if (!data?.root?.user?.uid && !hash.uid) return '-';
7
- if (!hash || typeof hash !== 'object') return '-';
8
-
9
- // const id = hash?.edit ? hash?.id : data?.root?.id;
10
- // console.log(hash)
11
- const [token] = setToken({
12
- ids: [JSON.stringify(hash)],
13
-
14
- uid: data?.root?.user?.uid || hash.uid,
15
- array: 1,
16
- });
17
- return token;
18
- }
1
+ import setToken from '../../plugins/crud/funcs/setToken.js';
2
+
3
+ export default function tokenFunc(params) {
4
+ const { data, hash } = params;
5
+
6
+ if (!data?.root?.user?.uid && !hash.uid) return '-';
7
+ if (!hash || typeof hash !== 'object') return '-';
8
+
9
+ // const id = hash?.edit ? hash?.id : data?.root?.id;
10
+ // console.log(hash)
11
+ const [token] = setToken({
12
+ ids: [JSON.stringify(hash)],
13
+
14
+ uid: data?.root?.user?.uid || hash.uid,
15
+ array: 1,
16
+ });
17
+ return token;
18
+ }
@@ -1,52 +1,52 @@
1
- import config from '../../../../config.js';
2
-
3
- import logger from '../../logger/getLogger.js';
4
- import pgClients from '../../pg/pgClients.js';
5
-
6
- import cronList from '../cronList.js';
7
- import runCron from './runCron.js';
8
- import interval2ms from './interval2ms.js';
9
-
10
- /**
11
- * interval:
12
- * - 02:54 - every day
13
- * - 2:03 - every day
14
- * - *1:43 - 2 times a day
15
- * - *12:03 - 2 times a day
16
- * - **:54 - every hour
17
- * - **:*3 - every 10 minutes
18
- * - 60 - every minute
19
- * - 10 * 60 - every 10 minutes
20
- */
21
-
22
- export default async function addCron(func, interval, pg = pgClients.client) {
23
- const { time = {}, disabled = [] } = config.cron || {};
24
-
25
- const name = func.name || func.toString().split('/').at(-1).split('\'')[0];
26
-
27
- // if (!config.isServer) return;
28
-
29
- if (disabled.includes(name)) {
30
- logger.file('cron', { name, message: 'cron disabled' });
31
- return;
32
- }
33
-
34
- cronList[name] = func;
35
-
36
- const userInterval = time[name] || interval;
37
- const [waitMs, intervalMs] = interval2ms[typeof interval](userInterval);
38
-
39
- if (intervalMs < 1000) {
40
- logger.file('cron', { name, error: `interval ${interval} too small` });
41
- return;
42
- }
43
-
44
- // setTimeout to w8 for the time to start
45
- setTimeout(() => {
46
- runCron({ pg, func, name });
47
- // interval
48
- setInterval(() => {
49
- runCron({ pg, func, name });
50
- }, intervalMs);
51
- }, waitMs);
52
- }
1
+ import config from '../../../../config.js';
2
+
3
+ import logger from '../../logger/getLogger.js';
4
+ import pgClients from '../../pg/pgClients.js';
5
+
6
+ import cronList from '../cronList.js';
7
+ import runCron from './runCron.js';
8
+ import interval2ms from './interval2ms.js';
9
+
10
+ /**
11
+ * interval:
12
+ * - 02:54 - every day
13
+ * - 2:03 - every day
14
+ * - *1:43 - 2 times a day
15
+ * - *12:03 - 2 times a day
16
+ * - **:54 - every hour
17
+ * - **:*3 - every 10 minutes
18
+ * - 60 - every minute
19
+ * - 10 * 60 - every 10 minutes
20
+ */
21
+
22
+ export default async function addCron(func, interval, pg = pgClients.client) {
23
+ const { time = {}, disabled = [] } = config.cron || {};
24
+
25
+ const name = func.name || func.toString().split('/').at(-1).split('\'')[0];
26
+
27
+ // if (!config.isServer) return;
28
+
29
+ if (disabled.includes(name)) {
30
+ logger.file('cron', { name, message: 'cron disabled' });
31
+ return;
32
+ }
33
+
34
+ cronList[name] = func;
35
+
36
+ const userInterval = time[name] || interval;
37
+ const [waitMs, intervalMs] = interval2ms[typeof interval](userInterval);
38
+
39
+ if (intervalMs < 1000) {
40
+ logger.file('cron', { name, error: `interval ${interval} too small` });
41
+ return;
42
+ }
43
+
44
+ // setTimeout to w8 for the time to start
45
+ setTimeout(() => {
46
+ runCron({ pg, func, name });
47
+ // interval
48
+ setInterval(() => {
49
+ runCron({ pg, func, name });
50
+ }, intervalMs);
51
+ }, waitMs);
52
+ }
@@ -1,14 +1,14 @@
1
- import config from '../../../../config.js';
2
- import getRedis from '../../redis/funcs/getRedis.js';
3
-
4
- const rclient = getRedis({ db: 0 });
5
-
6
- export default async function getOpt(token, uid = 0) {
7
- if (!config.redis) return null;
8
-
9
- const key = `opt:${uid}:${token}`;
10
- // console.log(key);
11
- const data = await rclient.get(key);
12
- if (!data) return null;
13
- return JSON.parse(data);
14
- }
1
+ import config from '../../../../config.js';
2
+ import getRedis from '../../redis/funcs/getRedis.js';
3
+
4
+ const rclient = getRedis({ db: 0 });
5
+
6
+ export default async function getOpt(token, uid = 0) {
7
+ if (!config.redis) return null;
8
+
9
+ const key = `opt:${uid}:${token}`;
10
+ // console.log(key);
11
+ const data = await rclient.get(key);
12
+ if (!data) return null;
13
+ return JSON.parse(data);
14
+ }
@@ -1,11 +1,11 @@
1
- import path from 'node:path';
2
-
3
- import config from '../../../../../config.js';
4
-
5
- export default function getFolder(req, type = 'server') {
6
- if (!['server', 'local'].includes(type)) throw new Error('params type is invalid');
7
- const types = { local: req.root || config.root, server: req.mapServerRoot || config.mapServerRoot };
8
- const dbname = req.pg?.options?.database || req.pg?.database || config.pg?.database; // request / config params / default config params
9
- const filepath = path.posix.join(types[type] || `/data/local/${dbname || ''}`, req.folder || config.folder || '');
10
- return filepath;
11
- }
1
+ import path from 'node:path';
2
+
3
+ import config from '../../../../../config.js';
4
+
5
+ export default function getFolder(req, type = 'server') {
6
+ if (!['server', 'local'].includes(type)) throw new Error('params type is invalid');
7
+ const types = { local: req.root || config.root, server: req.mapServerRoot || config.mapServerRoot };
8
+ const dbname = req.pg?.options?.database || req.pg?.database || config.pg?.database; // request / config params / default config params
9
+ const filepath = path.posix.join(types[type] || `/data/local/${dbname || ''}`, req.folder || config.folder || '');
10
+ return filepath;
11
+ }
@@ -46,7 +46,7 @@ async function writeFileToDisk(file, buffer) {
46
46
  return null;
47
47
  }
48
48
 
49
- export default async function uploadMultiPart(req, { subdir, originalFilename = false } = {}) {
49
+ export default async function uploadMultiPart(req, { buffer, subdir, originalFilename = false } = {}) {
50
50
  const allowedExtensions = {
51
51
  '/file/upload-image/*': images,
52
52
  }[req.routeOptions?.url || ''] || all;
@@ -72,17 +72,19 @@ export default async function uploadMultiPart(req, { subdir, originalFilename =
72
72
  throw new Error('file extension is not allowed');
73
73
  }
74
74
 
75
- const buffer = await new Promise((res, rej) => {
75
+ const fileBuffer = await new Promise((res, rej) => {
76
76
  const chunks = [];
77
77
  value.file.on('data', chunk => chunks.push(chunk));
78
78
  value.file.on('end', () => res(Buffer.concat(chunks)));
79
79
  value.file.on('error', rej);
80
80
  });
81
81
 
82
- if (!buffer?.length) {
82
+ if (!fileBuffer?.length) {
83
83
  throw new Error('file buffer is empty');
84
84
  }
85
85
 
86
+ if (buffer) return fileBuffer; // return buffer for custom upload
87
+
86
88
  const dir = subdir != null ? subdir : (req.params?.['*'] || 'uploads');
87
89
  const yearMonthDay = subdir != null ? '' : (new Date()).toISOString().split('T')[0];
88
90
 
@@ -104,18 +106,18 @@ export default async function uploadMultiPart(req, { subdir, originalFilename =
104
106
  filepath: path.join(folder, newFilename).replace(/\\/g, '/'),
105
107
  filetype: getFileType(newFilename),
106
108
  relativeFilepath: path.join(reldirpath, newFilename).replace(/\\/g, '/'),
107
- size: Buffer.byteLength(buffer),
109
+ size: Buffer.byteLength(fileBuffer),
108
110
  mimetype: value.mimetype,
109
111
  extension: ext.substring(1),
110
112
  };
111
113
 
112
114
  await mkdir(folder, { recursive: true });
113
- await writeFileToDisk(file, buffer);
115
+ await writeFileToDisk(file, fileBuffer);
114
116
 
115
117
  // move file to s3
116
118
  if (config.s3?.endpoint) {
117
119
  const s3 = providers();
118
- await s3.uploadFile(file.relativeFilepath, buffer);
120
+ await s3.uploadFile(file.relativeFilepath, fileBuffer);
119
121
  if (config.trace) console.log('upload to s3', file.relativeFilepath);
120
122
  }
121
123
 
@@ -1,19 +1,19 @@
1
- import applyHookSync from '../hook/funcs/applyHookSync.js';
2
-
3
- function errorStatus(error) {
4
- const hook = applyHookSync('errorStatus', error);
5
- if (hook) return hook;
6
-
7
- if (error.routine === 'exec_stmt_raise' && error.file === 'pl_exec.c') {
8
- return 601;
9
- }
10
- if (error.routine === 'ExecConstraints') {
11
- return 602;
12
- }
13
- if (error.type === 'DatabaseError') {
14
- return 600;
15
- }
16
-
17
- return 500;
18
- }
19
- export default errorStatus;
1
+ import applyHookSync from '../hook/funcs/applyHookSync.js';
2
+
3
+ function errorStatus(error) {
4
+ const hook = applyHookSync('errorStatus', error);
5
+ if (hook) return hook;
6
+
7
+ if (error.routine === 'exec_stmt_raise' && error.file === 'pl_exec.c') {
8
+ return 601;
9
+ }
10
+ if (error.routine === 'ExecConstraints') {
11
+ return 602;
12
+ }
13
+ if (error.type === 'DatabaseError') {
14
+ return 600;
15
+ }
16
+
17
+ return 500;
18
+ }
19
+ export default errorStatus;
@@ -1,8 +1,8 @@
1
- import redisClients from './funcs/redisClients.js';
2
- import getRedis from './funcs/getRedis.js';
3
-
4
- if (!redisClients[0]) {
5
- getRedis({ db: 0 });
6
- }
7
-
8
- export default redisClients[0];
1
+ import redisClients from './funcs/redisClients.js';
2
+ import getRedis from './funcs/getRedis.js';
3
+
4
+ if (!redisClients[0]) {
5
+ getRedis({ db: 0 });
6
+ }
7
+
8
+ export default redisClients[0];
@@ -1,3 +1,3 @@
1
- const redisClients = {};
2
-
3
- export default redisClients;
1
+ const redisClients = {};
2
+
3
+ export default redisClients;
@@ -1,13 +1,13 @@
1
- async function getCustomQuery({
2
- pg, table, customFilter,
3
- }) {
4
- if (!customFilter) return null;
5
- const customFilterList = customFilter?.split(',')?.map((el) => el?.split('_').pop());
6
- const { property_json: customFilterSQL } = await pg.one(`select json_agg(json_build_object('id',property_id,'name',property_key,'query',property_text)
7
- ) as property_json from admin.properties where property_key is not null and property_entity='customQuery' and object_id=$1`, [table]);
8
- const data = customFilterSQL?.length ? customFilterSQL.filter((el) => customFilterList.includes(el.id)) || [] : [];
9
- const customQuery = data?.map((el) => el.query).join(' and ');
10
- return `${customQuery}`;
11
- }
12
-
13
- export default getCustomQuery;
1
+ async function getCustomQuery({
2
+ pg, table, customFilter,
3
+ }) {
4
+ if (!customFilter) return null;
5
+ const customFilterList = customFilter?.split(',')?.map((el) => el?.split('_').pop());
6
+ const { property_json: customFilterSQL } = await pg.one(`select json_agg(json_build_object('id',property_id,'name',property_key,'query',property_text)
7
+ ) as property_json from admin.properties where property_key is not null and property_entity='customQuery' and object_id=$1`, [table]);
8
+ const data = customFilterSQL?.length ? customFilterSQL.filter((el) => customFilterList.includes(el.id)) || [] : [];
9
+ const customQuery = data?.map((el) => el.query).join(' and ');
10
+ return `${customQuery}`;
11
+ }
12
+
13
+ export default getCustomQuery;
@@ -1,34 +1,34 @@
1
- function getTable(table) {
2
- const result = table?.toLowerCase()?.replace(/[\n\r]+/g, ' ')?.split(' from ')?.filter((el) => /^[a-z0-9_]+\.[a-z0-9_]+/.test(el))
3
- ?.map((el) => el.split(/[ )]/)[0]);
4
- return result;
5
- }
6
-
7
- /**
8
- * @param {Number} opt.json - (1|0) 1 - Результат - Object, 0 - String
9
- * @param {String} opt.query - запит до таблиці
10
- * @param {String} opt.hash - інформація з хешу по запиту
11
- */
12
- const tableSql = {};
13
- async function getTableSql({
14
- pg, body, table, fields,
15
- }) {
16
- if (tableSql[table]) return tableSql[table];
17
-
18
- const fieldList = fields.map((el) => el.name);
19
-
20
- const tableList = body?.sql?.map((el) => getTable(el.sql)).reduce((acc, el) => acc.concat(el), []).filter((el) => fieldList.includes(pg.pk[el]));
21
-
22
- if (!tableList) { tableSql[table] = []; return []; }
23
-
24
- const data = await Promise.all(tableList?.map(async (tableEl) => {
25
- const { fields: fieldsEl } = await pg.query(`select * from ${tableEl} limit 0`);
26
- return fieldsEl.map((el) => ({ name: el.name, table: tableEl, pk: pg.pk[tableEl] }));
27
- }));
28
-
29
- tableSql[table] = data.reduce((acc, el) => acc.concat(el), []);
30
-
31
- return tableSql[table];
32
- }
33
-
34
- export default getTableSql;
1
+ function getTable(table) {
2
+ const result = table?.toLowerCase()?.replace(/[\n\r]+/g, ' ')?.split(' from ')?.filter((el) => /^[a-z0-9_]+\.[a-z0-9_]+/.test(el))
3
+ ?.map((el) => el.split(/[ )]/)[0]);
4
+ return result;
5
+ }
6
+
7
+ /**
8
+ * @param {Number} opt.json - (1|0) 1 - Результат - Object, 0 - String
9
+ * @param {String} opt.query - запит до таблиці
10
+ * @param {String} opt.hash - інформація з хешу по запиту
11
+ */
12
+ const tableSql = {};
13
+ async function getTableSql({
14
+ pg, body, table, fields,
15
+ }) {
16
+ if (tableSql[table]) return tableSql[table];
17
+
18
+ const fieldList = fields.map((el) => el.name);
19
+
20
+ const tableList = body?.sql?.map((el) => getTable(el.sql)).reduce((acc, el) => acc.concat(el), []).filter((el) => fieldList.includes(pg.pk[el]));
21
+
22
+ if (!tableList) { tableSql[table] = []; return []; }
23
+
24
+ const data = await Promise.all(tableList?.map(async (tableEl) => {
25
+ const { fields: fieldsEl } = await pg.query(`select * from ${tableEl} limit 0`);
26
+ return fieldsEl.map((el) => ({ name: el.name, table: tableEl, pk: pg.pk[tableEl] }));
27
+ }));
28
+
29
+ tableSql[table] = data.reduce((acc, el) => acc.concat(el), []);
30
+
31
+ return tableSql[table];
32
+ }
33
+
34
+ export default getTableSql;
@@ -1,19 +1,19 @@
1
- import fs from 'fs';
2
- import path from 'path';
3
-
4
- import config from '../../../../config.js';
5
-
6
- const loadTemplate = {};
7
-
8
- export default async function getTemplateDir(type) {
9
- if (!type) return null;
10
-
11
- const cwd = process.cwd();
12
- const typeDir = path.join(cwd, (config.templateDir || 'server/templates'), type);
13
-
14
- if (!loadTemplate[type]) {
15
- const typeList = fs.existsSync(typeDir) ? fs.readdirSync(typeDir) : [];
16
- loadTemplate[type] = typeList;
17
- }
18
- return loadTemplate[type];
19
- }
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+
4
+ import config from '../../../../config.js';
5
+
6
+ const loadTemplate = {};
7
+
8
+ export default async function getTemplateDir(type) {
9
+ if (!type) return null;
10
+
11
+ const cwd = process.cwd();
12
+ const typeDir = path.join(cwd, (config.templateDir || 'server/templates'), type);
13
+
14
+ if (!loadTemplate[type]) {
15
+ const typeList = fs.existsSync(typeDir) ? fs.readdirSync(typeDir) : [];
16
+ loadTemplate[type] = typeList;
17
+ }
18
+ return loadTemplate[type];
19
+ }
@@ -1,82 +1,82 @@
1
- import getSelect from './getSelect.js';
2
-
3
- import getFilterSQL from './getFilterSQL/index.js';
4
- import getTemplate from './getTemplate.js';
5
- import pgClients from '../../pg/pgClients.js';
6
- import config from '../../../../config.js';
7
- import getSelectVal from './metaFormat/getSelectVal.js';
8
-
9
- export default async function gisIRColumn({
10
- pg = pgClients.client, layer, column, sql, query = '1=1', filter, state, search, custom,
11
- }) {
12
- const time = Date.now();
13
-
14
- const sel = await getSelect(query.cls || column, pg);
15
-
16
- const body = await getTemplate('table', layer);
17
- const fData = await getFilterSQL({
18
- table: layer,
19
- filter,
20
- state,
21
- search,
22
- custom,
23
- });
24
-
25
- const { tlist } = await pg.one(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist from pg_class
26
- where relkind in ('r','v','m')`);
27
-
28
- const tableName = body?.table || layer;
29
- if (!tlist.includes(tableName)) return { error: `table not found: ${tableName}`, status: 400 };
30
-
31
- // eslint-disable-next-line max-len
32
- const { fields } = await pg.query(`select * from (${fData?.optimizedSQL || `select * from ${tableName}`})q limit 0`);
33
-
34
- const col = fields.find((el) => el.name === column);
35
-
36
- if (!col) return { status: 404, message: 'not found' };
37
- const colField = pg.pgType[col.dataTypeID]?.includes('[]') ? `unnest(${column})` : column;
38
-
39
- const q = `select ${colField} as id, count(*)::int from (
40
- ${fData?.optimizedSQL || `select * from ${tableName} where ${body?.query || 'true'}`}
41
- )t group by ${colField} order by count desc limit 15`;
42
-
43
- if (sql) return q;
44
-
45
- if (!body?.columns?.length) {
46
- const { rows } = await pg.query(q);
47
- if (sel?.arr?.length) {
48
- rows.forEach((el) => {
49
- const data = sel?.find((item) => item.id?.toString() === el.id?.toString());
50
- Object.assign(el, data || {});
51
- });
52
- }
53
- return {
54
- count: rows?.reduce((acc, el) => acc + el.count, 0),
55
- sql: config.local ? q : undefined,
56
- rows,
57
- };
58
- }
59
-
60
- const { rows } = await pg.query(q);
61
- const cls = query.cls || body?.columns?.find((el) => el.name === column)?.data || col.data || col.option;
62
- const select = await getSelectVal({
63
- pg, name: cls, values: rows.map((el) => el.id), ar: 1,
64
- });
65
- rows.forEach((el) => {
66
- if (Array.isArray(select)) {
67
- Object.assign(el, select.find((item) => item.id?.toString() === el.id?.toString()) || {});
68
- }
69
- else if (typeof select?.[el.id] === 'string') {
70
- Object.assign(el, { text: select?.[el.id] });
71
- }
72
- else {
73
- Object.assign(el, select?.[el.id] || {});
74
- }
75
- });
76
- return {
77
- time: Date.now() - time,
78
- count: rows.reduce((acc, el) => acc + el.count, 0),
79
- sql: config.local ? q : undefined,
80
- rows,
81
- };
82
- }
1
+ import getSelect from './getSelect.js';
2
+
3
+ import getFilterSQL from './getFilterSQL/index.js';
4
+ import getTemplate from './getTemplate.js';
5
+ import pgClients from '../../pg/pgClients.js';
6
+ import config from '../../../../config.js';
7
+ import getSelectVal from './metaFormat/getSelectVal.js';
8
+
9
+ export default async function gisIRColumn({
10
+ pg = pgClients.client, layer, column, sql, query = '1=1', filter, state, search, custom,
11
+ }) {
12
+ const time = Date.now();
13
+
14
+ const sel = await getSelect(query.cls || column, pg);
15
+
16
+ const body = await getTemplate('table', layer);
17
+ const fData = await getFilterSQL({
18
+ table: layer,
19
+ filter,
20
+ state,
21
+ search,
22
+ custom,
23
+ });
24
+
25
+ const { tlist } = await pg.one(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist from pg_class
26
+ where relkind in ('r','v','m')`);
27
+
28
+ const tableName = body?.table || layer;
29
+ if (!tlist.includes(tableName)) return { error: `table not found: ${tableName}`, status: 400 };
30
+
31
+ // eslint-disable-next-line max-len
32
+ const { fields } = await pg.query(`select * from (${fData?.optimizedSQL || `select * from ${tableName}`})q limit 0`);
33
+
34
+ const col = fields.find((el) => el.name === column);
35
+
36
+ if (!col) return { status: 404, message: 'not found' };
37
+ const colField = pg.pgType[col.dataTypeID]?.includes('[]') ? `unnest(${column})` : column;
38
+
39
+ const q = `select ${colField} as id, count(*)::int from (
40
+ ${fData?.optimizedSQL || `select * from ${tableName} where ${body?.query || 'true'}`}
41
+ )t group by ${colField} order by count desc limit 15`;
42
+
43
+ if (sql) return q;
44
+
45
+ if (!body?.columns?.length) {
46
+ const { rows } = await pg.query(q);
47
+ if (sel?.arr?.length) {
48
+ rows.forEach((el) => {
49
+ const data = sel?.find((item) => item.id?.toString() === el.id?.toString());
50
+ Object.assign(el, data || {});
51
+ });
52
+ }
53
+ return {
54
+ count: rows?.reduce((acc, el) => acc + el.count, 0),
55
+ sql: config.local ? q : undefined,
56
+ rows,
57
+ };
58
+ }
59
+
60
+ const { rows } = await pg.query(q);
61
+ const cls = query.cls || body?.columns?.find((el) => el.name === column)?.data || col.data || col.option;
62
+ const select = await getSelectVal({
63
+ pg, name: cls, values: rows.map((el) => el.id), ar: 1,
64
+ });
65
+ rows.forEach((el) => {
66
+ if (Array.isArray(select)) {
67
+ Object.assign(el, select.find((item) => item.id?.toString() === el.id?.toString()) || {});
68
+ }
69
+ else if (typeof select?.[el.id] === 'string') {
70
+ Object.assign(el, { text: select?.[el.id] });
71
+ }
72
+ else {
73
+ Object.assign(el, select?.[el.id] || {});
74
+ }
75
+ });
76
+ return {
77
+ time: Date.now() - time,
78
+ count: rows.reduce((acc, el) => acc + el.count, 0),
79
+ sql: config.local ? q : undefined,
80
+ rows,
81
+ };
82
+ }
@@ -1 +1 @@
1
- export default {};
1
+ export default {};
@@ -1 +1 @@
1
- export default {};
1
+ export default {};
@@ -1 +1 @@
1
- export default [];
1
+ export default [];