@opengis/fastify-table 2.0.110 → 2.0.112
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/server/plugins/redis/index.d.ts.map +1 -1
- package/dist/server/plugins/redis/index.js +3 -1
- package/dist/server/routes/file/controllers/export.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/export.js +40 -29
- package/dist/server/routes/file/controllers/resize.d.ts +1 -0
- package/dist/server/routes/file/controllers/resize.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/resize.js +5 -1
- package/dist/server/routes/file/controllers/utils/jsonlToJsonFile.d.ts +2 -0
- package/dist/server/routes/file/controllers/utils/jsonlToJsonFile.d.ts.map +1 -0
- package/dist/server/routes/file/controllers/utils/jsonlToJsonFile.js +32 -0
- package/dist/server/routes/file/controllers/utils/pubsub.d.ts +2 -0
- package/dist/server/routes/file/controllers/utils/pubsub.d.ts.map +1 -0
- package/dist/server/routes/file/controllers/utils/pubsub.js +112 -0
- package/package.json +1 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;AAa/C,iBAAe,MAAM,CAAC,OAAO,EAAE,eAAe,iBAE7C;AAED,eAAe,MAAM,CAAC"}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import redisClients from "./funcs/redisClients.js";
|
|
2
2
|
function close() {
|
|
3
|
-
Object.keys(redisClients)
|
|
3
|
+
Object.keys(redisClients)
|
|
4
|
+
.filter((key) => redisClients[key]?.status !== "end")
|
|
5
|
+
.forEach((key) => {
|
|
4
6
|
console.log("Closing redis client", key);
|
|
5
7
|
redisClients[key].quit();
|
|
6
8
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"export.d.ts","sourceRoot":"","sources":["../../../../../server/routes/file/controllers/export.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"export.d.ts","sourceRoot":"","sources":["../../../../../server/routes/file/controllers/export.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAC5C,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAgCzD;;;;;;;;;;;;;;;;;;;;GAoBG;AAEH,wBAA8B,WAAW,CACvC,EACE,EAAqB,EACrB,OAAO,EACP,IAAI,EACJ,OAAO,EAAE,QAAQ,EACjB,GAAG,EACH,KAAU,EACV,IAAkB,EAClB,QAAQ,EACR,UAAU,GACX,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC9B,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC3B,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,GAAG,CAAC,EAAE,GAAG,CAAC;IACV,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC5B,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB,EACD,KAAK,EAAE,YAAY,gBAsYpB"}
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
import path from "node:path";
|
|
7
7
|
import { createHash } from "node:crypto";
|
|
8
8
|
import { existsSync } from "node:fs";
|
|
9
|
-
import { mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
9
|
+
import { appendFile, mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
|
10
10
|
import config from "../../../../config.js";
|
|
11
11
|
import logger from "../../../plugins/logger/getLogger.js";
|
|
12
12
|
import getTemplate from "../../../plugins/table/funcs/getTemplate.js";
|
|
@@ -19,6 +19,8 @@ import metaFormat from "../../../plugins/table/funcs/metaFormat/index.js";
|
|
|
19
19
|
import jsonToXls from "./utils/jsonToXls.js";
|
|
20
20
|
import jsonToCsv from "./utils/jsonToCsv.js";
|
|
21
21
|
import formatResult from "./utils/formatResult.js";
|
|
22
|
+
import jsonlToJsonFile from "./utils/jsonlToJsonFile.js";
|
|
23
|
+
import pubsub from "./utils/pubsub.js";
|
|
22
24
|
const startStreamWithTotal = 10000;
|
|
23
25
|
const rootDir = getFolder(config, "local");
|
|
24
26
|
/**
|
|
@@ -83,13 +85,6 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
83
85
|
reply,
|
|
84
86
|
});
|
|
85
87
|
}
|
|
86
|
-
// delete old file, prevent append
|
|
87
|
-
if (nocache || config.disableCache) {
|
|
88
|
-
if (cacheFile)
|
|
89
|
-
await rm(filePath);
|
|
90
|
-
if (cacheFileJSON && format !== "json")
|
|
91
|
-
await rm(filePathJSON);
|
|
92
|
-
}
|
|
93
88
|
const loadTable = await getTemplate("table", table);
|
|
94
89
|
const meta = await getMeta({ pg, table: loadTable?.table || table });
|
|
95
90
|
const viewSql = await getTemplate("view", loadTable?.table || table);
|
|
@@ -165,12 +160,22 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
165
160
|
.split(".")
|
|
166
161
|
.pop()}` // check for json data
|
|
167
162
|
: el.name);
|
|
168
|
-
const
|
|
169
|
-
process.env.NODE_ENV !== "test"
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
163
|
+
const isStream = ((!slice && +filtered > startStreamWithTotal) || stream) &&
|
|
164
|
+
process.env.NODE_ENV !== "test" &&
|
|
165
|
+
!process.env.VITEST;
|
|
166
|
+
const sendOriginal = isStream ? eventStream(reply) : () => { };
|
|
167
|
+
const redisKey = `exportTable:${fileName}`;
|
|
168
|
+
const send = isStream
|
|
169
|
+
? await pubsub(redisKey, (msg, finishStream = false) => {
|
|
170
|
+
sendOriginal(msg, finishStream);
|
|
171
|
+
}, query.reload && user?.user_type?.includes?.("admin"))
|
|
172
|
+
: () => { };
|
|
173
|
+
// delete old file, prevent append
|
|
174
|
+
if (cacheFile && (nocache || config.disableCache)) {
|
|
175
|
+
await rm(filePath);
|
|
176
|
+
}
|
|
177
|
+
// create directory if not exists
|
|
178
|
+
await mkdir(path.dirname(filePath), { recursive: true });
|
|
174
179
|
// export xlsx / csv / json
|
|
175
180
|
const source = loadTable?.title || loadTable?.ua || table || sourceName;
|
|
176
181
|
const interval = setInterval(async () => {
|
|
@@ -183,6 +188,19 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
183
188
|
let seq = 0;
|
|
184
189
|
send(`Всього в реєстрі: ${result.total} (${filtered} з урахуванням фільтрів)`);
|
|
185
190
|
if (!cacheFileJSON || nocache || config.disableCache) {
|
|
191
|
+
// delete old json line file, prevent append
|
|
192
|
+
if (existsSync(filePathJSON.replace(/.json$/, ".jsonl"))) {
|
|
193
|
+
await rm(filePathJSON.replace(/.json$/, ".jsonl"), {
|
|
194
|
+
recursive: true,
|
|
195
|
+
force: true,
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
// delete old file, prevent append
|
|
199
|
+
if (existsSync(filePathJSON)) {
|
|
200
|
+
await rm(filePathJSON, { recursive: true, force: true });
|
|
201
|
+
}
|
|
202
|
+
// create empty temporary json lines file to append to
|
|
203
|
+
await writeFile(filePathJSON.replace(/.json$/, ".jsonl"), "");
|
|
186
204
|
while (+filtered - offset > 0 && !res?.error) {
|
|
187
205
|
try {
|
|
188
206
|
send(`Оброблено: ${offset}/${filtered}`);
|
|
@@ -211,19 +229,8 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
211
229
|
.forEach((key) => delete row[key]);
|
|
212
230
|
});
|
|
213
231
|
}
|
|
214
|
-
const
|
|
215
|
-
|
|
216
|
-
if (!jsonFileExists) {
|
|
217
|
-
// if json not exists
|
|
218
|
-
await mkdir(path.dirname(filePath), { recursive: true });
|
|
219
|
-
await writeFile(filePathJSON, JSON.stringify(rows));
|
|
220
|
-
}
|
|
221
|
-
else {
|
|
222
|
-
// if json exists
|
|
223
|
-
const jsonData = JSON.parse((await readFile(filePathJSON, "utf8")) || "{}");
|
|
224
|
-
const moreData = jsonData.concat(rows); // rewrite to appendFile?
|
|
225
|
-
await writeFile(filePathJSON, JSON.stringify(moreData));
|
|
226
|
-
}
|
|
232
|
+
const data = rows.map((row) => JSON.stringify(row)).join("\n") + "\n";
|
|
233
|
+
await appendFile(filePathJSON.replace(/.json$/, ".jsonl"), data);
|
|
227
234
|
offset += rows.length;
|
|
228
235
|
page++;
|
|
229
236
|
}
|
|
@@ -242,9 +249,12 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
242
249
|
}
|
|
243
250
|
}
|
|
244
251
|
}
|
|
252
|
+
// convert json line to json array
|
|
253
|
+
await jsonlToJsonFile(filePathJSON.replace(/.json$/, ".jsonl"));
|
|
245
254
|
clearInterval(interval);
|
|
246
255
|
if (res.error) {
|
|
247
|
-
send(
|
|
256
|
+
send("finish");
|
|
257
|
+
sendOriginal(res.error, 1);
|
|
248
258
|
return reply.status(500).send(res.error);
|
|
249
259
|
}
|
|
250
260
|
logger.file("export/table", {
|
|
@@ -298,7 +308,8 @@ export default async function exportTable({ pg = pgClients.client, headers, user
|
|
|
298
308
|
if (resp.error) {
|
|
299
309
|
return reply.status(resp.status || 500).send(resp.error);
|
|
300
310
|
}
|
|
301
|
-
send("
|
|
311
|
+
send("finish");
|
|
312
|
+
sendOriginal("Файл успішно сформовано. Натистіть кнопку ще раз для завантаження даних", 1);
|
|
302
313
|
return formatResult({
|
|
303
314
|
filePath,
|
|
304
315
|
formatAnswer,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"resize.d.ts","sourceRoot":"","sources":["../../../../../server/routes/file/controllers/resize.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"resize.d.ts","sourceRoot":"","sources":["../../../../../server/routes/file/controllers/resize.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AAoC5C;;GAEG;AAEH,wBAA8B,MAAM,CAClC,EACE,KAAK,GACN,EAAE;IACD,KAAK,EAAE;QACL,QAAQ,EAAE,MAAM,CAAC;QACjB,OAAO,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;QAC1B,IAAI,EAAE,MAAM,CAAC;QACb,CAAC,CAAC,EAAE,MAAM,CAAC;QACX,CAAC,CAAC,EAAE,MAAM,CAAC;QACX,OAAO,CAAC,EAAE,GAAG,CAAC;QACd,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,OAAO,CAAC,EAAE,GAAG,CAAC;KACf,CAAC;CACH,EACD,KAAK,EAAE,YAAY,kBAwIpB"}
|
|
@@ -15,11 +15,15 @@ const getHeight = (width, size, ratio = 1) => {
|
|
|
15
15
|
return defaultHeight;
|
|
16
16
|
};
|
|
17
17
|
const { resizeImage } = grpc();
|
|
18
|
+
const transparentGif = Buffer.from("R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7", "base64");
|
|
18
19
|
/**
|
|
19
20
|
* Апі використовується для зміни розміру фото за шляхом
|
|
20
21
|
*/
|
|
21
22
|
export default async function resize({ query, }, reply) {
|
|
22
|
-
const { filepath, quality, size, w, h, nocache, format } = query || {};
|
|
23
|
+
const { filepath, quality, size, w, h, nocache, format, noimage } = query || {};
|
|
24
|
+
if (noimage) {
|
|
25
|
+
return reply.headers({ "Content-Type": "image/gif" }).send(transparentGif);
|
|
26
|
+
}
|
|
23
27
|
if (!filepath) {
|
|
24
28
|
return reply
|
|
25
29
|
.status(400)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jsonlToJsonFile.d.ts","sourceRoot":"","sources":["../../../../../../server/routes/file/controllers/utils/jsonlToJsonFile.ts"],"names":[],"mappings":"AAKA,wBAA8B,eAAe,CAAC,SAAS,EAAE,MAAM,mBAiC9D"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { once } from "node:events";
|
|
2
|
+
import { rm } from "node:fs/promises";
|
|
3
|
+
import { createInterface } from "node:readline";
|
|
4
|
+
import { createReadStream, createWriteStream } from "node:fs";
|
|
5
|
+
export default async function jsonlToJsonFile(inputPath) {
|
|
6
|
+
const outputPath = inputPath.replace(/.jsonl$/g, ".json");
|
|
7
|
+
const input = createReadStream(inputPath);
|
|
8
|
+
const output = createWriteStream(outputPath);
|
|
9
|
+
const stream = createInterface({ input, crlfDelay: Infinity });
|
|
10
|
+
output.write("[\n");
|
|
11
|
+
let first = true;
|
|
12
|
+
stream.on("line", (line) => {
|
|
13
|
+
if (!line.trim())
|
|
14
|
+
return;
|
|
15
|
+
if (!first)
|
|
16
|
+
output.write(",\n");
|
|
17
|
+
output.write(JSON.stringify(JSON.parse(line)));
|
|
18
|
+
first = false;
|
|
19
|
+
});
|
|
20
|
+
// await stream to close /until convert finished
|
|
21
|
+
await once(stream, "close");
|
|
22
|
+
output.write("\n]");
|
|
23
|
+
output.end();
|
|
24
|
+
// wait for write stream to finish flushing
|
|
25
|
+
await once(output, "finish");
|
|
26
|
+
// rm temporary json line
|
|
27
|
+
await rm(inputPath, {
|
|
28
|
+
recursive: true,
|
|
29
|
+
force: true,
|
|
30
|
+
});
|
|
31
|
+
return outputPath;
|
|
32
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pubsub.d.ts","sourceRoot":"","sources":["../../../../../../server/routes/file/controllers/utils/pubsub.ts"],"names":[],"mappings":"AAkDA,wBAA8B,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,gBA2EzE"}
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
const keyCacheSubscribe = "emit_cache";
|
|
2
|
+
const listenter = {}; // callback send message listener
|
|
3
|
+
const promise = {}; // promise await for subscribe
|
|
4
|
+
const resolve = {}; // resolve
|
|
5
|
+
const publish = {}; // publish
|
|
6
|
+
import config from "../../../../../config.js";
|
|
7
|
+
import getRedis from "../../../../plugins/redis/funcs/getRedis.js";
|
|
8
|
+
// getRedisPublisher
|
|
9
|
+
function getRedisPublisher() {
|
|
10
|
+
const rst = getRedis({
|
|
11
|
+
name: "rst",
|
|
12
|
+
...(config.redis || {}),
|
|
13
|
+
});
|
|
14
|
+
const subscriber = getRedis({
|
|
15
|
+
name: "sub",
|
|
16
|
+
...(config.redis || {}),
|
|
17
|
+
});
|
|
18
|
+
subscriber.unsubscribe(keyCacheSubscribe);
|
|
19
|
+
subscriber.subscribe(keyCacheSubscribe, () => { });
|
|
20
|
+
subscriber.on("message", async (channel, text) => {
|
|
21
|
+
const [code, msg] = text.split("|||");
|
|
22
|
+
if (msg === "finish" && resolve[code]) {
|
|
23
|
+
resolve[code]();
|
|
24
|
+
resolve[code] = null;
|
|
25
|
+
}
|
|
26
|
+
listenter[code].forEach((cb1, i) => {
|
|
27
|
+
cb1(`${msg} - ${i}`);
|
|
28
|
+
});
|
|
29
|
+
});
|
|
30
|
+
return rst;
|
|
31
|
+
}
|
|
32
|
+
// native publisher
|
|
33
|
+
function getPublisher() {
|
|
34
|
+
const rst = {
|
|
35
|
+
publish(key, text) {
|
|
36
|
+
const [code, msg] = text.split("|||");
|
|
37
|
+
listenter[code]?.forEach((cb1, i) => {
|
|
38
|
+
cb1?.(`${msg} - ${i}`);
|
|
39
|
+
});
|
|
40
|
+
},
|
|
41
|
+
};
|
|
42
|
+
return rst;
|
|
43
|
+
}
|
|
44
|
+
// !!! work non cluster mode for cluster subscribe redis
|
|
45
|
+
export default async function subscribe(code, cb, reload) {
|
|
46
|
+
try {
|
|
47
|
+
const rclient = getRedis();
|
|
48
|
+
const { isRedis, timeout = 150 } = config?.redis?.pubsub || {}; // test!
|
|
49
|
+
const publisher = isRedis ? getRedisPublisher() : getPublisher();
|
|
50
|
+
const keyCacheCode = `${keyCacheSubscribe}:${code}`;
|
|
51
|
+
// check code is run
|
|
52
|
+
const isNotRun = await rclient.setnx(keyCacheCode, 1);
|
|
53
|
+
const ttl = await rclient.ttl(keyCacheCode);
|
|
54
|
+
// if Run
|
|
55
|
+
if (!isNotRun && !reload && ttl > 0) {
|
|
56
|
+
// add listener
|
|
57
|
+
cb(`==== run as slave ttl: ${ttl}==== `);
|
|
58
|
+
const alldata = await rclient.lrange(keyCacheCode + ":list", 0, -1);
|
|
59
|
+
alldata.forEach((el) => {
|
|
60
|
+
cb(el);
|
|
61
|
+
});
|
|
62
|
+
// for redis
|
|
63
|
+
if (!listenter[code]) {
|
|
64
|
+
listenter[code] = [];
|
|
65
|
+
promise[code] = new Promise((rsv) => {
|
|
66
|
+
resolve[code] = rsv;
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
// check master finish
|
|
70
|
+
const interval = setInterval(async () => {
|
|
71
|
+
const ttl1 = await rclient.ttl(keyCacheCode);
|
|
72
|
+
if (ttl1 < 0) {
|
|
73
|
+
clearInterval(interval);
|
|
74
|
+
cb(`finish master timeout ${timeout}`, 1);
|
|
75
|
+
}
|
|
76
|
+
}, 5000);
|
|
77
|
+
listenter[code].push(cb);
|
|
78
|
+
return promise[code];
|
|
79
|
+
}
|
|
80
|
+
await rclient.expire(keyCacheCode, timeout);
|
|
81
|
+
// if not run
|
|
82
|
+
// create return promise
|
|
83
|
+
promise[code] = new Promise((rsv) => {
|
|
84
|
+
resolve[code] = rsv;
|
|
85
|
+
});
|
|
86
|
+
listenter[code] = [cb];
|
|
87
|
+
// listenter[code].push(cb);
|
|
88
|
+
// create publish function
|
|
89
|
+
publish[code] = (msg) => {
|
|
90
|
+
rclient.rpush(keyCacheCode + ":list", msg);
|
|
91
|
+
rclient.expire(keyCacheCode, timeout);
|
|
92
|
+
// send message to all listener
|
|
93
|
+
publisher.publish(keyCacheSubscribe, `${code}|||${msg}`); // redis
|
|
94
|
+
// finish and clear
|
|
95
|
+
if (msg === "finish") {
|
|
96
|
+
// clear code to run again
|
|
97
|
+
rclient.del(keyCacheCode);
|
|
98
|
+
rclient.del(keyCacheCode + ":list");
|
|
99
|
+
// resolve promise
|
|
100
|
+
resolve[code]();
|
|
101
|
+
// clear
|
|
102
|
+
resolve[code] = null;
|
|
103
|
+
promise[code] = null;
|
|
104
|
+
listenter[code] = [];
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
return publish[code];
|
|
108
|
+
}
|
|
109
|
+
catch (err) {
|
|
110
|
+
console.error(err.toString());
|
|
111
|
+
}
|
|
112
|
+
}
|