@opengis/fastify-table 2.0.106 → 2.0.108

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. package/dist/config.d.ts.map +1 -1
  2. package/dist/index.d.ts.map +1 -1
  3. package/dist/index.js +5 -0
  4. package/dist/script/adduser +15 -0
  5. package/dist/script/dump.js +176 -0
  6. package/dist/script/migrate.js +25 -0
  7. package/dist/server/plugins/auth/funcs/verifyPassword.d.ts.map +1 -1
  8. package/dist/server/plugins/auth/funcs/verifyPassword.js +3 -1
  9. package/dist/server/plugins/crud/funcs/dataInsert.js +2 -2
  10. package/dist/server/plugins/crud/funcs/dataUpdate.d.ts.map +1 -1
  11. package/dist/server/plugins/crud/funcs/dataUpdate.js +9 -6
  12. package/dist/server/plugins/crud/funcs/validateData.js +1 -1
  13. package/dist/server/plugins/file/providers/fs.js +2 -2
  14. package/dist/server/plugins/file/providers/s3/funcs/downloadFile.d.ts.map +1 -1
  15. package/dist/server/plugins/file/providers/s3/funcs/downloadFile.js +1 -2
  16. package/dist/server/plugins/file/providers/s3/funcs/fileExists.d.ts.map +1 -1
  17. package/dist/server/plugins/file/providers/s3/funcs/fileExists.js +1 -2
  18. package/dist/server/plugins/file/providers/s3/funcs/uploadFile.d.ts.map +1 -1
  19. package/dist/server/plugins/file/providers/s3/funcs/uploadFile.js +1 -2
  20. package/dist/server/plugins/file/providers/s3/index.js +4 -4
  21. package/dist/server/plugins/logger/getLogger.d.ts.map +1 -1
  22. package/dist/server/plugins/logger/getLogger.js +14 -11
  23. package/dist/server/plugins/logger/index.d.ts.map +1 -1
  24. package/dist/server/plugins/logger/index.js +5 -4
  25. package/dist/server/plugins/migration/exec.migrations.js +6 -6
  26. package/dist/server/plugins/pg/funcs/getMeta.d.ts.map +1 -1
  27. package/dist/server/plugins/pg/funcs/getMeta.js +3 -5
  28. package/dist/server/plugins/pg/funcs/getPG.d.ts.map +1 -1
  29. package/dist/server/plugins/pg/funcs/getPG.js +2 -1
  30. package/dist/server/plugins/pg/funcs/getPGAsync.js +2 -2
  31. package/dist/server/plugins/pg/funcs/init.d.ts +1 -1
  32. package/dist/server/plugins/pg/funcs/init.d.ts.map +1 -1
  33. package/dist/server/plugins/pg/funcs/init.js +43 -36
  34. package/dist/server/plugins/pg/funcs/pool.d.ts.map +1 -1
  35. package/dist/server/plugins/pg/funcs/pool.js +12 -18
  36. package/dist/server/plugins/pg/index.d.ts.map +1 -1
  37. package/dist/server/plugins/pg/index.js +3 -2
  38. package/dist/server/plugins/redis/funcs/getRedis.d.ts.map +1 -1
  39. package/dist/server/plugins/redis/funcs/getRedis.js +7 -5
  40. package/dist/server/plugins/redis/index.d.ts.map +1 -1
  41. package/dist/server/plugins/redis/index.js +4 -1
  42. package/dist/server/plugins/sqlite/index.d.ts.map +1 -1
  43. package/dist/server/plugins/sqlite/index.js +7 -3
  44. package/dist/server/plugins/table/funcs/getFilter.d.ts +1 -1
  45. package/dist/server/plugins/table/funcs/getFilter.d.ts.map +1 -1
  46. package/dist/server/plugins/table/funcs/getFilter.js +14 -1
  47. package/dist/server/plugins/table/funcs/getSelectMeta.d.ts.map +1 -1
  48. package/dist/server/plugins/table/funcs/getSelectMeta.js +2 -4
  49. package/dist/server/plugins/table/funcs/gisIRColumn.d.ts +2 -2
  50. package/dist/server/plugins/table/funcs/gisIRColumn.js +1 -1
  51. package/dist/server/plugins/upload/finishUpload.d.ts +9 -0
  52. package/dist/server/plugins/upload/finishUpload.d.ts.map +1 -0
  53. package/dist/server/plugins/upload/finishUpload.js +33 -0
  54. package/dist/server/plugins/upload/getUploadStatus.d.ts +5 -0
  55. package/dist/server/plugins/upload/getUploadStatus.d.ts.map +1 -0
  56. package/dist/server/plugins/upload/getUploadStatus.js +36 -0
  57. package/dist/server/plugins/upload/index.d.ts +6 -0
  58. package/dist/server/plugins/upload/index.d.ts.map +1 -0
  59. package/dist/server/plugins/upload/index.js +12 -0
  60. package/dist/server/plugins/upload/startUpload.d.ts +8 -0
  61. package/dist/server/plugins/upload/startUpload.d.ts.map +1 -0
  62. package/dist/server/plugins/upload/startUpload.js +53 -0
  63. package/dist/server/plugins/upload/uploadChunk.d.ts +9 -0
  64. package/dist/server/plugins/upload/uploadChunk.d.ts.map +1 -0
  65. package/dist/server/plugins/upload/uploadChunk.js +47 -0
  66. package/dist/server/plugins/util/funcs/unflattenObject.d.ts.map +1 -1
  67. package/dist/server/plugins/util/funcs/unflattenObject.js +5 -3
  68. package/dist/server/routes/access/controllers/access.group.d.ts +2 -2
  69. package/dist/server/routes/access/controllers/access.group.d.ts.map +1 -1
  70. package/dist/server/routes/access/controllers/access.group.js +0 -1
  71. package/dist/server/routes/access/controllers/access.group.post.d.ts +2 -2
  72. package/dist/server/routes/access/controllers/access.group.post.d.ts.map +1 -1
  73. package/dist/server/routes/access/controllers/access.group.post.js +0 -1
  74. package/dist/server/routes/auth/controllers/2factor/providers/totp.d.ts.map +1 -1
  75. package/dist/server/routes/auth/controllers/2factor/providers/totp.js +1 -1
  76. package/dist/server/routes/auth/controllers/core/registration.d.ts +1 -1
  77. package/dist/server/routes/auth/controllers/core/registration.d.ts.map +1 -1
  78. package/dist/server/routes/auth/controllers/core/registration.js +37 -9
  79. package/dist/server/routes/auth/controllers/core/updateUserInfo.js +1 -1
  80. package/dist/server/routes/auth/controllers/jwt/authorize.js +5 -5
  81. package/dist/server/routes/auth/controllers/jwt/token.d.ts.map +1 -1
  82. package/dist/server/routes/auth/controllers/jwt/token.js +10 -12
  83. package/dist/server/routes/cron/controllers/cronApi.d.ts +1 -1
  84. package/dist/server/routes/cron/controllers/cronApi.d.ts.map +1 -1
  85. package/dist/server/routes/cron/controllers/cronApi.js +5 -3
  86. package/dist/server/routes/crud/controllers/insert.d.ts +1 -4
  87. package/dist/server/routes/crud/controllers/insert.d.ts.map +1 -1
  88. package/dist/server/routes/crud/controllers/insert.js +24 -16
  89. package/dist/server/routes/crud/controllers/table.d.ts.map +1 -1
  90. package/dist/server/routes/crud/controllers/table.js +13 -6
  91. package/dist/server/routes/crud/controllers/update.d.ts.map +1 -1
  92. package/dist/server/routes/crud/controllers/update.js +23 -15
  93. package/dist/server/routes/file/controllers/delete.d.ts +1 -15
  94. package/dist/server/routes/file/controllers/delete.d.ts.map +1 -1
  95. package/dist/server/routes/file/controllers/delete.js +13 -20
  96. package/dist/server/routes/file/controllers/download.d.ts +2 -2
  97. package/dist/server/routes/file/controllers/download.d.ts.map +1 -1
  98. package/dist/server/routes/file/controllers/download.js +39 -30
  99. package/dist/server/routes/file/controllers/files.d.ts +2 -1
  100. package/dist/server/routes/file/controllers/files.d.ts.map +1 -1
  101. package/dist/server/routes/file/controllers/files.js +15 -11
  102. package/dist/server/routes/file/controllers/resize.d.ts +1 -2
  103. package/dist/server/routes/file/controllers/resize.d.ts.map +1 -1
  104. package/dist/server/routes/file/controllers/resize.js +17 -6
  105. package/dist/server/routes/file/controllers/upload.d.ts.map +1 -1
  106. package/dist/server/routes/file/controllers/upload.js +17 -16
  107. package/dist/server/routes/file/controllers/uploadImage.d.ts +11 -13
  108. package/dist/server/routes/file/controllers/uploadImage.d.ts.map +1 -1
  109. package/dist/server/routes/file/controllers/uploadImage.js +13 -15
  110. package/dist/server/routes/logger/controllers/logger.file.js +1 -1
  111. package/dist/server/routes/menu/controllers/interfaces.d.ts +1 -7
  112. package/dist/server/routes/menu/controllers/interfaces.d.ts.map +1 -1
  113. package/dist/server/routes/table/controllers/card.d.ts +1 -1
  114. package/dist/server/routes/table/controllers/card.d.ts.map +1 -1
  115. package/dist/server/routes/table/controllers/card.js +15 -9
  116. package/dist/server/routes/table/controllers/filter.d.ts +1 -1
  117. package/dist/server/routes/table/controllers/filter.d.ts.map +1 -1
  118. package/dist/server/routes/table/controllers/filter.js +9 -2
  119. package/dist/server/routes/table/controllers/form.d.ts +1 -1
  120. package/dist/server/routes/table/controllers/form.d.ts.map +1 -1
  121. package/dist/server/routes/table/controllers/form.js +8 -5
  122. package/dist/server/routes/table/controllers/search.d.ts +1 -1
  123. package/dist/server/routes/table/controllers/search.d.ts.map +1 -1
  124. package/dist/server/routes/table/controllers/search.js +5 -6
  125. package/dist/server/routes/table/controllers/suggest.d.ts +1 -1
  126. package/dist/server/routes/table/controllers/suggest.d.ts.map +1 -1
  127. package/dist/server/routes/table/controllers/suggest.js +30 -15
  128. package/dist/server/routes/table/functions/getData.d.ts +1 -1
  129. package/dist/server/routes/table/functions/getData.d.ts.map +1 -1
  130. package/dist/server/routes/table/functions/getData.js +60 -45
  131. package/dist/server/routes/upload/index.d.ts +2 -0
  132. package/dist/server/routes/upload/index.d.ts.map +1 -0
  133. package/dist/server/routes/upload/index.js +72 -0
  134. package/dist/server/types/core.d.ts +7 -1
  135. package/dist/server/types/core.d.ts.map +1 -1
  136. package/dist/utils.d.ts +5 -0
  137. package/dist/utils.d.ts.map +1 -1
  138. package/dist/utils.js +5 -0
  139. package/package.json +3 -3
@@ -5,38 +5,40 @@ import getRedis from "../../redis/funcs/getRedis.js";
5
5
  import logger from "../../logger/getLogger.js";
6
6
  const rclient = getRedis({ db: 0 });
7
7
  async function init(client) {
8
- if (!client?.options?.database) {
8
+ if (!client)
9
+ return;
10
+ // for unit tests
11
+ const options = client.options || {
12
+ database: client.database,
13
+ user: client.user,
14
+ password: client.password,
15
+ port: client.port,
16
+ host: client.host,
17
+ };
18
+ if (!client || !client.query || !client.connect) {
9
19
  return;
10
20
  }
11
- const { pgType, pk } = await client
12
- .query(`SELECT
13
- (
14
- SELECT
15
- json_object_agg(
16
- conrelid:: regclass,
17
- (
18
- SELECT
19
- attname
20
- FROM
21
- pg_attribute
22
- WHERE
23
- attrelid = c.conrelid
24
- AND attnum = c.conkey [1]
25
- )
26
- )
27
- FROM
28
- pg_constraint c
29
- WHERE
30
- contype = 'p'
31
- AND connamespace:: regnamespace:: text NOT IN ('sde')
32
- ) AS pk,
33
- (
34
- SELECT
35
- json_object_agg(t.oid:: text, pg_catalog.format_type(t.oid, NULL))
36
- FROM
37
- pg_catalog.pg_type t
38
- ) AS "pgType"`)
39
- .then((d) => d.rows[0]);
21
+ const pgType = await client
22
+ .query(`SELECT json_object_agg(t.oid:: text, pg_catalog.format_type(t.oid, NULL)) FROM pg_catalog.pg_type t`)
23
+ .then((el) => el.rows[0]?.json_object_agg || {});
24
+ const pks = await client
25
+ .query(`
26
+ SELECT
27
+ connamespace::regnamespace::text,
28
+ conrelid::regclass,
29
+ (
30
+ SELECT attname FROM pg_attribute WHERE attrelid = c.conrelid AND attnum = c.conkey[1]
31
+ ) as pk
32
+ FROM pg_constraint c
33
+ WHERE contype = 'p'
34
+ AND connamespace::regnamespace::text NOT IN ('sde')`)
35
+ .then((el) => el.rows || []);
36
+ const pk = pks.reduce((acc, curr) => ({
37
+ ...acc,
38
+ [curr.connamespace === "public"
39
+ ? `${curr.connamespace}.${curr.conrelid}`
40
+ : curr.conrelid]: curr.pk,
41
+ }), {});
40
42
  const tlist = await client
41
43
  .query(`SELECT
42
44
  array_agg(
@@ -72,6 +74,8 @@ async function init(client) {
72
74
  .then((el) => el.rows || []);
73
75
  const relkinds = rows.reduce((acc, curr) => Object.assign(acc, { [curr.tname]: curr.relkind }), {});
74
76
  async function query(q, args = []) {
77
+ if (!client)
78
+ throw new Error("empty pg client");
75
79
  try {
76
80
  const data = await client.query(q, args);
77
81
  return data;
@@ -86,7 +90,7 @@ async function init(client) {
86
90
  }
87
91
  async function querySafe(q, param) {
88
92
  const pg1 = new pg.Pool({
89
- ...client.options,
93
+ ...options,
90
94
  statement_timeout: param?.timeout || 100000000,
91
95
  });
92
96
  try {
@@ -97,10 +101,10 @@ async function init(client) {
97
101
  }
98
102
  catch (err) {
99
103
  if (err.code === "57014") {
100
- console.warn("pg.querySafe timeout", q);
104
+ console.warn("⚠️ pg.querySafe timeout", q);
101
105
  return { rows: [], timeout: true };
102
106
  }
103
- console.warn("pg.querySafe error", q);
107
+ console.warn("⚠️ pg.querySafe error", q);
104
108
  throw err;
105
109
  }
106
110
  finally {
@@ -114,6 +118,8 @@ async function init(client) {
114
118
  return result;
115
119
  }
116
120
  async function queryNotice(q, args, cb = () => { }) {
121
+ if (!client)
122
+ throw new Error("empty pg client");
117
123
  const clientCb = await client.connect();
118
124
  clientCb.on("notice", (e) => {
119
125
  cb(e.message);
@@ -159,6 +165,8 @@ async function init(client) {
159
165
  return data;
160
166
  }
161
167
  Object.assign(client, {
168
+ ...options,
169
+ options,
162
170
  one,
163
171
  pgType,
164
172
  pk,
@@ -168,11 +176,10 @@ async function init(client) {
168
176
  queryNotice,
169
177
  querySafe,
170
178
  });
171
- // client.init = undefined;
172
- console.log("New client init finished", client.options?.database);
179
+ console.log("New client init finished", client.database);
173
180
  logger.file("pg", {
174
181
  message: "client init finished",
175
- database: client.options?.database,
182
+ database: client.database,
176
183
  });
177
184
  }
178
185
  // export default client;
@@ -1 +1 @@
1
- {"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/pool.ts"],"names":[],"mappings":"AAcA,eAAO,MAAM,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,CAAC;yBAK7B,QAAO,GAAQ;AAA/B,wBAsEE"}
1
+ {"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/pool.ts"],"names":[],"mappings":"AAgBA,eAAO,MAAM,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,CAAC;yBAE7B,QAAO,GAAQ;AAA/B,wBAgEE"}
@@ -6,9 +6,8 @@ types.setTypeParser(1114, (stringValue) => stringValue);
6
6
  import pgClients from "../pgClients.js";
7
7
  import init from "./init.js";
8
8
  import config from "../../../../config.js";
9
+ config.ready = config.ready || {};
9
10
  export const Pools = {};
10
- const errored = {};
11
- const inited = {};
12
11
  export default (param = {}) => {
13
12
  if (!config.pg)
14
13
  return null;
@@ -26,24 +25,24 @@ export default (param = {}) => {
26
25
  }
27
26
  });
28
27
  pool.on("error", (err) => {
29
- console.warn("Unexpected error on idle client", param.database);
30
- errored[param.database] = 1;
28
+ console.warn("⚠️ Unexpected error on idle client", param.database);
29
+ config.ready[`pg:${param.database}`] = false;
31
30
  logger.file("pg", {
32
31
  error: err.toString(),
33
32
  database: param.database,
34
33
  });
35
34
  if (config.trace) {
36
- console.warn("Unexpected error on idle client details:", err.toString(), err.stack);
35
+ console.warn("⚠️ Unexpected error on idle client details:", err.toString(), err.stack);
37
36
  }
38
37
  });
39
- pool.on("connect", (client) => {
38
+ pool.on("connect", () => {
40
39
  // skip auto drops and reconnects handled by pg internally
41
- if (!inited[param.database] || errored[param.database]) {
40
+ if (!config.ready[`pg:${param.database}`]) {
42
41
  logger.file("pg", {
43
42
  message: "client connected",
44
43
  database: param.database,
45
44
  });
46
- inited[param.database] = 1;
45
+ config.ready[`pg:${param.database}`] = true;
47
46
  }
48
47
  if (config.trace) {
49
48
  console.log("PG client connected", param.database);
@@ -52,17 +51,12 @@ export default (param = {}) => {
52
51
  if (pgClients[name] && !pgClients[name]?.tlist) {
53
52
  init(Pools[name]);
54
53
  }
55
- Pools[name] = client;
54
+ Pools[name] = pool;
56
55
  });
57
- pool.on("acquire", () => {
58
- if (config.trace) {
59
- console.log("PG client acquired", param.database);
60
- }
61
- });
62
- pool.on("remove", () => {
63
- if (config.trace) {
56
+ if (config.trace) {
57
+ pool.on("remove", () => {
64
58
  console.log("PG Client removed from the pool.", param.database);
65
- }
66
- });
59
+ });
60
+ }
67
61
  return pool;
68
62
  };
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/pg/index.ts"],"names":[],"mappings":"AA+BA,iBAAS,MAAM,CAAC,OAAO,EAAE,GAAG,QAwB3B;AAED,eAAe,MAAM,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/pg/index.ts"],"names":[],"mappings":"AAgCA,iBAAS,MAAM,CAAC,OAAO,EAAE,GAAG,QAwB3B;AAED,eAAe,MAAM,CAAC"}
@@ -3,8 +3,9 @@ import pgClients from "./pgClients.js";
3
3
  import getPGAsync from "./funcs/getPGAsync.js";
4
4
  import logger from "../logger/getLogger.js";
5
5
  function close() {
6
- Object.keys(pgClients).forEach((el) => {
7
- pgClients[el].end();
6
+ Object.keys(pgClients).forEach((key) => {
7
+ console.log("Closing pg client", key);
8
+ pgClients[key].end();
8
9
  });
9
10
  }
10
11
  async function getHeadersPG(req) {
@@ -1 +1 @@
1
- {"version":3,"file":"getRedis.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/redis/funcs/getRedis.ts"],"names":[],"mappings":"AAQA,iBAAS,QAAQ,CACf,EACE,EAAE,EACF,IAAI,EACJ,IAAI,EACJ,WAAkB,EAClB,aAAwB,EACxB,kBAAyB,EACzB,WAAkB,EAClB,cAAqB,EACrB,oBAAwB,GACzB,GAAE;IACD,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,aAAa,CAAC,EAAE,GAAG,CAAC;IACpB,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,oBAAoB,CAAC,EAAE,MAAM,CAAC;CACnB,OAwCd;AAED,eAAe,QAAQ,CAAC"}
1
+ {"version":3,"file":"getRedis.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/redis/funcs/getRedis.ts"],"names":[],"mappings":"AAQA,iBAAS,QAAQ,CACf,EACE,EAAE,EACF,IAAI,EACJ,IAAI,EACJ,WAAmB,EACnB,aAA8D,EAC9D,kBAAyB,EACzB,WAAkB,EAClB,cAAqB,EACrB,oBAAwB,GACzB,GAAE;IACD,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,aAAa,CAAC,EAAE,GAAG,CAAC;IACpB,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,oBAAoB,CAAC,EAAE,MAAM,CAAC;CACnB,OA6Cd;AAED,eAAe,QAAQ,CAAC"}
@@ -3,10 +3,15 @@ import config from "../../../../config.js";
3
3
  import logger from "../../logger/getLogger.js";
4
4
  import redisClients from "./redisClients.js";
5
5
  config.ready = config.ready || {};
6
- function getRedis({ db, host, port, closeClient = true, retryStrategy = () => { }, enableOfflineQueue = true, lazyConnect = true, connectTimeout = 2000, maxRetriesPerRequest = 0, } = { db: 0 }) {
6
+ function getRedis({ db, host, port, closeClient = false, retryStrategy = (times) => Math.min(times * 500, 5000), enableOfflineQueue = true, lazyConnect = true, connectTimeout = 2000, maxRetriesPerRequest = 1, } = { db: 0 }) {
7
7
  if (!config.redis && !host)
8
8
  return null;
9
9
  const key = host || port ? [host, port, db].join("-") : db;
10
+ // try to reconnect after connection error / disconnect
11
+ if (redisClients[key]?.status === "end" &&
12
+ typeof redisClients[key].connect === "function") {
13
+ redisClients[key].connect();
14
+ }
10
15
  if (redisClients[key]) {
11
16
  return redisClients[key];
12
17
  }
@@ -28,11 +33,8 @@ function getRedis({ db, host, port, closeClient = true, retryStrategy = () => {
28
33
  config.ready[`redis:${key}`] = true;
29
34
  });
30
35
  redisClients[key].on("error", (err) => {
31
- console.warn("Ignored redis error:", err.message);
36
+ console.warn("⚠️ Ignored redis error:", err.message);
32
37
  logger.file("redis", { error: err.toString() });
33
- if (err.code === "ETIMEDOUT") {
34
- // redisClients[key].disconnect();
35
- }
36
38
  });
37
39
  console.log("redis connected", db, host, port);
38
40
  return redisClients[key];
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;AAQ/C,iBAAe,MAAM,CAAC,OAAO,EAAE,eAAe,iBAE7C;AAED,eAAe,MAAM,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;AAW/C,iBAAe,MAAM,CAAC,OAAO,EAAE,eAAe,iBAE7C;AAED,eAAe,MAAM,CAAC"}
@@ -1,6 +1,9 @@
1
1
  import redisClients from "./funcs/redisClients.js";
2
2
  function close() {
3
- Object.keys(redisClients).forEach((key) => redisClients[key].quit());
3
+ Object.keys(redisClients).forEach((key) => {
4
+ console.log("Closing redis client", key);
5
+ redisClients[key].quit();
6
+ });
4
7
  }
5
8
  async function plugin(fastify) {
6
9
  fastify.addHook("onClose", close);
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/sqlite/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAM1C,iBAAe,QAAQ,CAAC,GAAG,EAAE,eAAe,iBAI3C;;AAED,wBAA4B"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/sqlite/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAa1C,iBAAe,QAAQ,CAAC,GAAG,EAAE,eAAe,iBAE3C;;AAED,wBAA4B"}
@@ -1,8 +1,12 @@
1
1
  import fp from "fastify-plugin";
2
2
  import sqliteClients from "./sqliteClients.js";
3
- async function dbPlugin(app) {
4
- app.addHook("onClose", async () => {
5
- Object.keys(sqliteClients).forEach((key) => sqliteClients[key].close());
3
+ function close() {
4
+ Object.keys(sqliteClients).forEach((key) => {
5
+ console.log("Closing sqlite client", key);
6
+ sqliteClients[key].close();
6
7
  });
7
8
  }
9
+ async function dbPlugin(app) {
10
+ app.addHook("onClose", close);
11
+ }
8
12
  export default fp(dbPlugin);
@@ -7,5 +7,5 @@ export default function getFilter({ pg, table, filter, custom, state, search, us
7
7
  state?: string;
8
8
  search?: string;
9
9
  user?: Record<string, any>;
10
- }, reply: any): Promise<any>;
10
+ }, reply?: any): Promise<any>;
11
11
  //# sourceMappingURL=getFilter.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"getFilter.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getFilter.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAGzD,wBAA8B,SAAS,CACrC,EACE,EAAqB,EACrB,KAAK,EACL,MAAM,EACN,MAAM,EACN,KAAK,EACL,MAAM,EACN,IAAI,GACL,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B,EACD,KAAK,EAAE,GAAG,gBAoBX"}
1
+ {"version":3,"file":"getFilter.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getFilter.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAmBzD,wBAA8B,SAAS,CACrC,EACE,EAAqB,EACrB,KAAK,EACL,MAAM,EACN,MAAM,EACN,KAAK,EACL,MAAM,EACN,IAAI,GACL,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B,EACD,KAAK,CAAC,EAAE,GAAG,gBAoBZ"}
@@ -1,5 +1,18 @@
1
1
  import routeFilter from "../../../routes/table/controllers/filter.js";
2
2
  import pgClients from "../../pg/pgClients.js";
3
+ const reply1 = {
4
+ response: {},
5
+ // redirect: (txt) => txt,
6
+ redirect: (txt) => Object.assign(reply1.response, {
7
+ body: txt,
8
+ statusCode: reply1.response.statusCode || 200,
9
+ }),
10
+ status: (statusCode) => {
11
+ Object.assign(reply1.response, { status: statusCode });
12
+ return reply1;
13
+ },
14
+ send: (txt) => Object.assign(reply1.response, txt),
15
+ };
3
16
  export default async function getFilter({ pg = pgClients.client, table, filter, custom, state, search, user, }, reply) {
4
17
  const params = { table };
5
18
  const query = {
@@ -13,6 +26,6 @@ export default async function getFilter({ pg = pgClients.client, table, filter,
13
26
  params,
14
27
  query,
15
28
  user,
16
- }, reply, 1);
29
+ }, reply || reply1, 1);
17
30
  return result;
18
31
  }
@@ -1 +1 @@
1
- {"version":3,"file":"getSelectMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getSelectMeta.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWzD,wBAA8B,aAAa,CAAC,EAC1C,IAAI,EACJ,UAAU,EACV,GAAG,EACH,OAAO,EACP,MAAM,EACN,EAAqB,GACtB,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,EAAE,CAAC,EAAE,UAAU,CAAC;CACjB,gBA6GA"}
1
+ {"version":3,"file":"getSelectMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getSelectMeta.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWzD,wBAA8B,aAAa,CAAC,EAC1C,IAAI,EACJ,UAAU,EACV,GAAG,EACH,OAAO,EACP,MAAM,EACN,EAAqB,GACtB,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,EAAE,CAAC,EAAE,UAAU,CAAC;CACjB,gBA4GA"}
@@ -1,6 +1,6 @@
1
1
  import logger from "../../logger/getLogger.js";
2
2
  import autoIndex from "../../pg/funcs/autoIndex.js";
3
- import getPG from "../../pg/funcs/getPG.js";
3
+ import getPGAsync from "../../pg/funcs/getPGAsync.js";
4
4
  import pgClients from "../../pg/pgClients.js";
5
5
  import getSelect from "./getSelect.js";
6
6
  const limit = 50;
@@ -17,9 +17,7 @@ export default async function getSelectMeta({ name, startsWith, key, nocache, pa
17
17
  }
18
18
  const cls = await getSelect(name, pg, nocache);
19
19
  const db = typeof cls?.db === "string" ? { database: cls.db } : cls?.db;
20
- const pg1 = cls?.db ? getPG(db) : pg;
21
- if (!pg1?.pk)
22
- await pg1?.init?.();
20
+ const pg1 = cls?.db ? await getPGAsync(db) : pg;
23
21
  if (!cls)
24
22
  return null;
25
23
  if (cls.arr)
@@ -28,7 +28,7 @@ export default function gisIRColumn({ pg, layer, column, sql, query, filter, sta
28
28
  } | {
29
29
  count: any;
30
30
  sql: string | undefined;
31
- rows: any[];
31
+ rows: any;
32
32
  error?: undefined;
33
33
  status?: undefined;
34
34
  message?: undefined;
@@ -37,7 +37,7 @@ export default function gisIRColumn({ pg, layer, column, sql, query, filter, sta
37
37
  time: number;
38
38
  count: any;
39
39
  sql: string | undefined;
40
- rows: any[];
40
+ rows: any;
41
41
  error?: undefined;
42
42
  status?: undefined;
43
43
  message?: undefined;
@@ -19,7 +19,7 @@ export default async function gisIRColumn({ pg = pgClients.client, layer, column
19
19
  const { tlist } = await pg
20
20
  .query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist from pg_class
21
21
  where relkind in ('r','v','m')`)
22
- .then((el) => el.rows[0]);
22
+ .then((el) => el.rows?.[0] || {});
23
23
  const tableName = body?.table || layer;
24
24
  if (!tlist.includes(tableName))
25
25
  return { error: `table not found: ${tableName}`, status: 400 };
@@ -0,0 +1,9 @@
1
+ export default function finishUpload({ host, id, }: {
2
+ host?: string;
3
+ id: string;
4
+ }): Promise<{
5
+ error?: string;
6
+ success?: boolean;
7
+ code: number;
8
+ }>;
9
+ //# sourceMappingURL=finishUpload.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"finishUpload.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/finishUpload.ts"],"names":[],"mappings":"AAMA,wBAA8B,YAAY,CAAC,EACzC,IAAI,EACJ,EAAE,GACH,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;CACZ,GAAG,OAAO,CAAC;IAAE,KAAK,CAAC,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAC,CAqC/D"}
@@ -0,0 +1,33 @@
1
+ import path from "node:path";
2
+ import { existsSync } from "node:fs";
3
+ import { readFile, writeFile } from "node:fs/promises";
4
+ import { prefix, metaDir, fetchTimeoutMs } from "./index.js";
5
+ export default async function finishUpload({ host, id, }) {
6
+ if (!host) {
7
+ const metaExists = existsSync(path.join(metaDir, `${id}.json`));
8
+ if (!metaExists) {
9
+ return { error: "upload not found: " + id, code: 404 };
10
+ }
11
+ const meta = JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"));
12
+ if (meta.uploaded !== meta.size) {
13
+ return { error: "Upload not complete", code: 400 };
14
+ }
15
+ const metaData = {
16
+ ...meta,
17
+ uploadDate: new Date().toISOString(),
18
+ exists: true,
19
+ finished: true,
20
+ };
21
+ const fileBytes = Buffer.from(JSON.stringify(metaData, null, 2));
22
+ // save metadata
23
+ await writeFile(path.join(metaDir, `${id}.json`), fileBytes);
24
+ return { success: true, code: 200 };
25
+ }
26
+ const res = await fetch(`${host}/${prefix}/${id}/finish`, {
27
+ method: "POST",
28
+ signal: AbortSignal.timeout(fetchTimeoutMs),
29
+ }).catch((err) => ({
30
+ json: () => Promise.resolve({ error: err.toString(), code: 501 }),
31
+ }));
32
+ return res.json();
33
+ }
@@ -0,0 +1,5 @@
1
+ export default function getUploadStatus({ host, id, }: {
2
+ host?: string;
3
+ id: string;
4
+ }): Promise<any>;
5
+ //# sourceMappingURL=getUploadStatus.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getUploadStatus.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/getUploadStatus.ts"],"names":[],"mappings":"AAaA,wBAA8B,eAAe,CAAC,EAC5C,IAAI,EACJ,EAAE,GACH,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;CACZ,gBAoCA"}
@@ -0,0 +1,36 @@
1
+ import path from "node:path";
2
+ import { existsSync } from "node:fs";
3
+ import { readFile } from "node:fs/promises";
4
+ import { prefix, metaDir, uploadChunkDirectory, fetchTimeoutMs, } from "./index.js";
5
+ import isFileExists from "../file/isFileExists.js";
6
+ export default async function getUploadStatus({ host, id, }) {
7
+ // return local file metadata
8
+ if (!host) {
9
+ const metaExists = existsSync(path.join(metaDir, `${id}.json`));
10
+ // check file upload status: finished/inprogress
11
+ const meta = metaExists
12
+ ? JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"))
13
+ : {};
14
+ // check if file exists
15
+ // const fileExists = existsSync(
16
+ // path.join(fileDir, `${id}.${meta.extension}`)
17
+ // );
18
+ const fileExists = await isFileExists(path
19
+ .join(uploadChunkDirectory, `${id}.${meta.extension}`)
20
+ .replace(/\\/g, "/"));
21
+ return {
22
+ ...meta,
23
+ // metaPath: path.join(metaDir, `${id}.json`),
24
+ uploadChunkDirectory,
25
+ exists: !!fileExists,
26
+ finished: meta.uploaded && meta.size && meta.uploaded === meta.size,
27
+ };
28
+ }
29
+ // request remote file upload status
30
+ const resp = await fetch(`${host}/${prefix}/${id}`, {
31
+ signal: AbortSignal.timeout(fetchTimeoutMs),
32
+ }).catch((err) => ({
33
+ json: () => Promise.resolve({ error: err.toString(), code: 501 }),
34
+ }));
35
+ return resp.json();
36
+ }
@@ -0,0 +1,6 @@
1
+ export declare const fetchTimeoutMs: number;
2
+ export declare const prefix = "file/upload2";
3
+ export declare const uploadChunkDirectory = "/files/uploads";
4
+ export declare const fileDir: string;
5
+ export declare const metaDir: string;
6
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/index.ts"],"names":[],"mappings":"AASA,eAAO,MAAM,cAAc,QAAmC,CAAC;AAE/D,eAAO,MAAM,MAAM,iBAAiB,CAAC;AAErC,eAAO,MAAM,oBAAoB,mBAAmB,CAAC;AAErD,eAAO,MAAM,OAAO,QAEE,CAAC;AAEvB,eAAO,MAAM,OAAO,QAA4B,CAAC"}
@@ -0,0 +1,12 @@
1
+ import path from "node:path";
2
+ import config from "../../../config.js";
3
+ import getFolder from "../crud/funcs/utils/getFolder.js";
4
+ config.chunkSize = +(config.chunkSize || 1048576); // 1 MB per chunk by default
5
+ const rootDir = getFolder(config, "local");
6
+ export const fetchTimeoutMs = +(config.fetchTimeoutMs || 5000);
7
+ export const prefix = "file/upload2";
8
+ export const uploadChunkDirectory = "/files/uploads";
9
+ export const fileDir = path
10
+ .join(rootDir, uploadChunkDirectory)
11
+ .replace(/\\/g, "/");
12
+ export const metaDir = path.join(fileDir, "tmp");
@@ -0,0 +1,8 @@
1
+ export default function startUpload({ host, id, fileName, size, subdir, }: {
2
+ host?: string;
3
+ id?: string;
4
+ fileName: string;
5
+ size?: number;
6
+ subdir?: string;
7
+ }): Promise<unknown>;
8
+ //# sourceMappingURL=startUpload.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"startUpload.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/startUpload.ts"],"names":[],"mappings":"AAaA,wBAA8B,WAAW,CAAC,EACxC,IAAI,EACJ,EAAE,EACF,QAAQ,EACR,IAAI,EACJ,MAAM,GACP,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,oBA2DA"}
@@ -0,0 +1,53 @@
1
+ import path from "node:path";
2
+ import { randomUUID } from "node:crypto";
3
+ import { mkdir, writeFile } from "node:fs/promises";
4
+ import getUploadStatus from "./getUploadStatus.js";
5
+ import { prefix, fileDir, metaDir, uploadChunkDirectory, fetchTimeoutMs, } from "./index.js";
6
+ export default async function startUpload({ host, id, fileName, size, subdir, }) {
7
+ if (!host) {
8
+ const extension = path.extname(fileName).substring(1);
9
+ const id1 = id || randomUUID();
10
+ const key = id1.split("-").pop();
11
+ const meta = {
12
+ id: id1,
13
+ key,
14
+ fileName,
15
+ size,
16
+ extension,
17
+ uploaded: 0,
18
+ exists: false,
19
+ };
20
+ // check current upload state
21
+ if (id) {
22
+ const status = await getUploadStatus({ id });
23
+ Object.assign(meta, status);
24
+ }
25
+ // resume file upload
26
+ if (meta.uploaded > 0) {
27
+ return meta;
28
+ }
29
+ // if not started - start new upload
30
+ const relativeDirpath = path
31
+ .join(uploadChunkDirectory, subdir || "")
32
+ .replace(/\\/g, "/");
33
+ await mkdir(path.join(fileDir, "tmp"), { recursive: true });
34
+ // create metadata for resumable upload
35
+ const metaData = {
36
+ ...meta,
37
+ subdir,
38
+ metaPath: undefined,
39
+ relativeDirpath,
40
+ };
41
+ await writeFile(path.join(metaDir, `${id1}.json`), JSON.stringify(metaData, null, 2));
42
+ return { ...meta, metaPath: undefined };
43
+ }
44
+ const res = await fetch(`${host}/${prefix}/start`, {
45
+ method: "POST",
46
+ headers: { "Content-Type": "application/json" },
47
+ body: JSON.stringify({ fileName, size, subdir, id }),
48
+ signal: AbortSignal.timeout(fetchTimeoutMs),
49
+ }).catch((err) => ({
50
+ json: () => Promise.resolve({ error: err.toString(), code: 501 }),
51
+ }));
52
+ return res.json();
53
+ }
@@ -0,0 +1,9 @@
1
+ export default function uploadChunk({ host, id, body, offset, end, size, }: {
2
+ host?: string;
3
+ id: string;
4
+ body: any;
5
+ offset: number;
6
+ end: number;
7
+ size: number;
8
+ }): Promise<any>;
9
+ //# sourceMappingURL=uploadChunk.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/uploadChunk.ts"],"names":[],"mappings":"AAaA,wBAA8B,WAAW,CAAC,EACxC,IAAI,EACJ,EAAE,EACF,IAAI,EACJ,MAAM,EACN,GAAG,EACH,IAAI,GACL,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,GAAG,CAAC;IACV,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CACd,gBA6DA"}
@@ -0,0 +1,47 @@
1
+ import path from "node:path";
2
+ import { existsSync } from "node:fs";
3
+ import { readFile, writeFile } from "node:fs/promises";
4
+ import { prefix, fileDir, metaDir, fetchTimeoutMs, uploadChunkDirectory, } from "./index.js";
5
+ import uploadFile from "../file/uploadFile.js";
6
+ export default async function uploadChunk({ host, id, body, offset, end, size, }) {
7
+ if (!host) {
8
+ const metaExists = existsSync(path.join(metaDir, `${id}.json`));
9
+ if (!metaExists) {
10
+ return { error: "File not found", code: 404 };
11
+ }
12
+ const meta = JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"));
13
+ if (size !== meta.size) {
14
+ return { error: "Total size mismatch", code: 400 };
15
+ }
16
+ if (offset !== meta.uploaded) {
17
+ return { error: "Wrong offset", expected: meta.uploaded, code: 409 };
18
+ }
19
+ if (body?.length !== end - offset + 1) {
20
+ return { error: "Chunk size mismatch", code: 400 };
21
+ }
22
+ // append chunk to existing file
23
+ const filePath = path.join(fileDir, `${id}.${meta.extension}`);
24
+ // await appendFile(filePath, body);
25
+ await uploadFile(path
26
+ .join(uploadChunkDirectory, `${id}.${meta.extension}`)
27
+ .replace(/\\/g, "/"), body);
28
+ meta.uploaded = end + 1;
29
+ // update metadata for resumable upload
30
+ await writeFile(path.join(metaDir, `${id}.json`), JSON.stringify(meta, null, 2));
31
+ return { uploaded: meta.uploaded, finished: meta.uploaded === meta.size };
32
+ }
33
+ if (!end || !size) {
34
+ return { error: "not enough params: offset/end/size", code: 400 };
35
+ }
36
+ const res = await fetch(`${host}/${prefix}/${id}`, {
37
+ method: "PATCH",
38
+ headers: {
39
+ "Content-Range": `bytes ${offset || 0}-${end}/${size}`,
40
+ },
41
+ body,
42
+ signal: AbortSignal.timeout(fetchTimeoutMs),
43
+ }).catch((err) => ({
44
+ json: () => Promise.resolve({ error: err.toString(), code: 501 }),
45
+ }));
46
+ return res.json();
47
+ }