@opengis/fastify-table 2.0.106 → 2.0.108
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config.d.ts.map +1 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -0
- package/dist/script/adduser +15 -0
- package/dist/script/dump.js +176 -0
- package/dist/script/migrate.js +25 -0
- package/dist/server/plugins/auth/funcs/verifyPassword.d.ts.map +1 -1
- package/dist/server/plugins/auth/funcs/verifyPassword.js +3 -1
- package/dist/server/plugins/crud/funcs/dataInsert.js +2 -2
- package/dist/server/plugins/crud/funcs/dataUpdate.d.ts.map +1 -1
- package/dist/server/plugins/crud/funcs/dataUpdate.js +9 -6
- package/dist/server/plugins/crud/funcs/validateData.js +1 -1
- package/dist/server/plugins/file/providers/fs.js +2 -2
- package/dist/server/plugins/file/providers/s3/funcs/downloadFile.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/downloadFile.js +1 -2
- package/dist/server/plugins/file/providers/s3/funcs/fileExists.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/fileExists.js +1 -2
- package/dist/server/plugins/file/providers/s3/funcs/uploadFile.d.ts.map +1 -1
- package/dist/server/plugins/file/providers/s3/funcs/uploadFile.js +1 -2
- package/dist/server/plugins/file/providers/s3/index.js +4 -4
- package/dist/server/plugins/logger/getLogger.d.ts.map +1 -1
- package/dist/server/plugins/logger/getLogger.js +14 -11
- package/dist/server/plugins/logger/index.d.ts.map +1 -1
- package/dist/server/plugins/logger/index.js +5 -4
- package/dist/server/plugins/migration/exec.migrations.js +6 -6
- package/dist/server/plugins/pg/funcs/getMeta.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/getMeta.js +3 -5
- package/dist/server/plugins/pg/funcs/getPG.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/getPG.js +2 -1
- package/dist/server/plugins/pg/funcs/getPGAsync.js +2 -2
- package/dist/server/plugins/pg/funcs/init.d.ts +1 -1
- package/dist/server/plugins/pg/funcs/init.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/init.js +43 -36
- package/dist/server/plugins/pg/funcs/pool.d.ts.map +1 -1
- package/dist/server/plugins/pg/funcs/pool.js +12 -18
- package/dist/server/plugins/pg/index.d.ts.map +1 -1
- package/dist/server/plugins/pg/index.js +3 -2
- package/dist/server/plugins/redis/funcs/getRedis.d.ts.map +1 -1
- package/dist/server/plugins/redis/funcs/getRedis.js +7 -5
- package/dist/server/plugins/redis/index.d.ts.map +1 -1
- package/dist/server/plugins/redis/index.js +4 -1
- package/dist/server/plugins/sqlite/index.d.ts.map +1 -1
- package/dist/server/plugins/sqlite/index.js +7 -3
- package/dist/server/plugins/table/funcs/getFilter.d.ts +1 -1
- package/dist/server/plugins/table/funcs/getFilter.d.ts.map +1 -1
- package/dist/server/plugins/table/funcs/getFilter.js +14 -1
- package/dist/server/plugins/table/funcs/getSelectMeta.d.ts.map +1 -1
- package/dist/server/plugins/table/funcs/getSelectMeta.js +2 -4
- package/dist/server/plugins/table/funcs/gisIRColumn.d.ts +2 -2
- package/dist/server/plugins/table/funcs/gisIRColumn.js +1 -1
- package/dist/server/plugins/upload/finishUpload.d.ts +9 -0
- package/dist/server/plugins/upload/finishUpload.d.ts.map +1 -0
- package/dist/server/plugins/upload/finishUpload.js +33 -0
- package/dist/server/plugins/upload/getUploadStatus.d.ts +5 -0
- package/dist/server/plugins/upload/getUploadStatus.d.ts.map +1 -0
- package/dist/server/plugins/upload/getUploadStatus.js +36 -0
- package/dist/server/plugins/upload/index.d.ts +6 -0
- package/dist/server/plugins/upload/index.d.ts.map +1 -0
- package/dist/server/plugins/upload/index.js +12 -0
- package/dist/server/plugins/upload/startUpload.d.ts +8 -0
- package/dist/server/plugins/upload/startUpload.d.ts.map +1 -0
- package/dist/server/plugins/upload/startUpload.js +53 -0
- package/dist/server/plugins/upload/uploadChunk.d.ts +9 -0
- package/dist/server/plugins/upload/uploadChunk.d.ts.map +1 -0
- package/dist/server/plugins/upload/uploadChunk.js +47 -0
- package/dist/server/plugins/util/funcs/unflattenObject.d.ts.map +1 -1
- package/dist/server/plugins/util/funcs/unflattenObject.js +5 -3
- package/dist/server/routes/access/controllers/access.group.d.ts +2 -2
- package/dist/server/routes/access/controllers/access.group.d.ts.map +1 -1
- package/dist/server/routes/access/controllers/access.group.js +0 -1
- package/dist/server/routes/access/controllers/access.group.post.d.ts +2 -2
- package/dist/server/routes/access/controllers/access.group.post.d.ts.map +1 -1
- package/dist/server/routes/access/controllers/access.group.post.js +0 -1
- package/dist/server/routes/auth/controllers/2factor/providers/totp.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/2factor/providers/totp.js +1 -1
- package/dist/server/routes/auth/controllers/core/registration.d.ts +1 -1
- package/dist/server/routes/auth/controllers/core/registration.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/core/registration.js +37 -9
- package/dist/server/routes/auth/controllers/core/updateUserInfo.js +1 -1
- package/dist/server/routes/auth/controllers/jwt/authorize.js +5 -5
- package/dist/server/routes/auth/controllers/jwt/token.d.ts.map +1 -1
- package/dist/server/routes/auth/controllers/jwt/token.js +10 -12
- package/dist/server/routes/cron/controllers/cronApi.d.ts +1 -1
- package/dist/server/routes/cron/controllers/cronApi.d.ts.map +1 -1
- package/dist/server/routes/cron/controllers/cronApi.js +5 -3
- package/dist/server/routes/crud/controllers/insert.d.ts +1 -4
- package/dist/server/routes/crud/controllers/insert.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/insert.js +24 -16
- package/dist/server/routes/crud/controllers/table.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/table.js +13 -6
- package/dist/server/routes/crud/controllers/update.d.ts.map +1 -1
- package/dist/server/routes/crud/controllers/update.js +23 -15
- package/dist/server/routes/file/controllers/delete.d.ts +1 -15
- package/dist/server/routes/file/controllers/delete.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/delete.js +13 -20
- package/dist/server/routes/file/controllers/download.d.ts +2 -2
- package/dist/server/routes/file/controllers/download.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/download.js +39 -30
- package/dist/server/routes/file/controllers/files.d.ts +2 -1
- package/dist/server/routes/file/controllers/files.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/files.js +15 -11
- package/dist/server/routes/file/controllers/resize.d.ts +1 -2
- package/dist/server/routes/file/controllers/resize.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/resize.js +17 -6
- package/dist/server/routes/file/controllers/upload.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/upload.js +17 -16
- package/dist/server/routes/file/controllers/uploadImage.d.ts +11 -13
- package/dist/server/routes/file/controllers/uploadImage.d.ts.map +1 -1
- package/dist/server/routes/file/controllers/uploadImage.js +13 -15
- package/dist/server/routes/logger/controllers/logger.file.js +1 -1
- package/dist/server/routes/menu/controllers/interfaces.d.ts +1 -7
- package/dist/server/routes/menu/controllers/interfaces.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/card.d.ts +1 -1
- package/dist/server/routes/table/controllers/card.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/card.js +15 -9
- package/dist/server/routes/table/controllers/filter.d.ts +1 -1
- package/dist/server/routes/table/controllers/filter.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/filter.js +9 -2
- package/dist/server/routes/table/controllers/form.d.ts +1 -1
- package/dist/server/routes/table/controllers/form.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/form.js +8 -5
- package/dist/server/routes/table/controllers/search.d.ts +1 -1
- package/dist/server/routes/table/controllers/search.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/search.js +5 -6
- package/dist/server/routes/table/controllers/suggest.d.ts +1 -1
- package/dist/server/routes/table/controllers/suggest.d.ts.map +1 -1
- package/dist/server/routes/table/controllers/suggest.js +30 -15
- package/dist/server/routes/table/functions/getData.d.ts +1 -1
- package/dist/server/routes/table/functions/getData.d.ts.map +1 -1
- package/dist/server/routes/table/functions/getData.js +60 -45
- package/dist/server/routes/upload/index.d.ts +2 -0
- package/dist/server/routes/upload/index.d.ts.map +1 -0
- package/dist/server/routes/upload/index.js +72 -0
- package/dist/server/types/core.d.ts +7 -1
- package/dist/server/types/core.d.ts.map +1 -1
- package/dist/utils.d.ts +5 -0
- package/dist/utils.d.ts.map +1 -1
- package/dist/utils.js +5 -0
- package/package.json +3 -3
|
@@ -5,38 +5,40 @@ import getRedis from "../../redis/funcs/getRedis.js";
|
|
|
5
5
|
import logger from "../../logger/getLogger.js";
|
|
6
6
|
const rclient = getRedis({ db: 0 });
|
|
7
7
|
async function init(client) {
|
|
8
|
-
if (!client
|
|
8
|
+
if (!client)
|
|
9
|
+
return;
|
|
10
|
+
// for unit tests
|
|
11
|
+
const options = client.options || {
|
|
12
|
+
database: client.database,
|
|
13
|
+
user: client.user,
|
|
14
|
+
password: client.password,
|
|
15
|
+
port: client.port,
|
|
16
|
+
host: client.host,
|
|
17
|
+
};
|
|
18
|
+
if (!client || !client.query || !client.connect) {
|
|
9
19
|
return;
|
|
10
20
|
}
|
|
11
|
-
const
|
|
12
|
-
.query(`SELECT
|
|
13
|
-
(
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
) AS pk,
|
|
33
|
-
(
|
|
34
|
-
SELECT
|
|
35
|
-
json_object_agg(t.oid:: text, pg_catalog.format_type(t.oid, NULL))
|
|
36
|
-
FROM
|
|
37
|
-
pg_catalog.pg_type t
|
|
38
|
-
) AS "pgType"`)
|
|
39
|
-
.then((d) => d.rows[0]);
|
|
21
|
+
const pgType = await client
|
|
22
|
+
.query(`SELECT json_object_agg(t.oid:: text, pg_catalog.format_type(t.oid, NULL)) FROM pg_catalog.pg_type t`)
|
|
23
|
+
.then((el) => el.rows[0]?.json_object_agg || {});
|
|
24
|
+
const pks = await client
|
|
25
|
+
.query(`
|
|
26
|
+
SELECT
|
|
27
|
+
connamespace::regnamespace::text,
|
|
28
|
+
conrelid::regclass,
|
|
29
|
+
(
|
|
30
|
+
SELECT attname FROM pg_attribute WHERE attrelid = c.conrelid AND attnum = c.conkey[1]
|
|
31
|
+
) as pk
|
|
32
|
+
FROM pg_constraint c
|
|
33
|
+
WHERE contype = 'p'
|
|
34
|
+
AND connamespace::regnamespace::text NOT IN ('sde')`)
|
|
35
|
+
.then((el) => el.rows || []);
|
|
36
|
+
const pk = pks.reduce((acc, curr) => ({
|
|
37
|
+
...acc,
|
|
38
|
+
[curr.connamespace === "public"
|
|
39
|
+
? `${curr.connamespace}.${curr.conrelid}`
|
|
40
|
+
: curr.conrelid]: curr.pk,
|
|
41
|
+
}), {});
|
|
40
42
|
const tlist = await client
|
|
41
43
|
.query(`SELECT
|
|
42
44
|
array_agg(
|
|
@@ -72,6 +74,8 @@ async function init(client) {
|
|
|
72
74
|
.then((el) => el.rows || []);
|
|
73
75
|
const relkinds = rows.reduce((acc, curr) => Object.assign(acc, { [curr.tname]: curr.relkind }), {});
|
|
74
76
|
async function query(q, args = []) {
|
|
77
|
+
if (!client)
|
|
78
|
+
throw new Error("empty pg client");
|
|
75
79
|
try {
|
|
76
80
|
const data = await client.query(q, args);
|
|
77
81
|
return data;
|
|
@@ -86,7 +90,7 @@ async function init(client) {
|
|
|
86
90
|
}
|
|
87
91
|
async function querySafe(q, param) {
|
|
88
92
|
const pg1 = new pg.Pool({
|
|
89
|
-
...
|
|
93
|
+
...options,
|
|
90
94
|
statement_timeout: param?.timeout || 100000000,
|
|
91
95
|
});
|
|
92
96
|
try {
|
|
@@ -97,10 +101,10 @@ async function init(client) {
|
|
|
97
101
|
}
|
|
98
102
|
catch (err) {
|
|
99
103
|
if (err.code === "57014") {
|
|
100
|
-
console.warn("pg.querySafe timeout", q);
|
|
104
|
+
console.warn("⚠️ pg.querySafe timeout", q);
|
|
101
105
|
return { rows: [], timeout: true };
|
|
102
106
|
}
|
|
103
|
-
console.warn("pg.querySafe error", q);
|
|
107
|
+
console.warn("⚠️ pg.querySafe error", q);
|
|
104
108
|
throw err;
|
|
105
109
|
}
|
|
106
110
|
finally {
|
|
@@ -114,6 +118,8 @@ async function init(client) {
|
|
|
114
118
|
return result;
|
|
115
119
|
}
|
|
116
120
|
async function queryNotice(q, args, cb = () => { }) {
|
|
121
|
+
if (!client)
|
|
122
|
+
throw new Error("empty pg client");
|
|
117
123
|
const clientCb = await client.connect();
|
|
118
124
|
clientCb.on("notice", (e) => {
|
|
119
125
|
cb(e.message);
|
|
@@ -159,6 +165,8 @@ async function init(client) {
|
|
|
159
165
|
return data;
|
|
160
166
|
}
|
|
161
167
|
Object.assign(client, {
|
|
168
|
+
...options,
|
|
169
|
+
options,
|
|
162
170
|
one,
|
|
163
171
|
pgType,
|
|
164
172
|
pk,
|
|
@@ -168,11 +176,10 @@ async function init(client) {
|
|
|
168
176
|
queryNotice,
|
|
169
177
|
querySafe,
|
|
170
178
|
});
|
|
171
|
-
|
|
172
|
-
console.log("New client init finished", client.options?.database);
|
|
179
|
+
console.log("New client init finished", client.database);
|
|
173
180
|
logger.file("pg", {
|
|
174
181
|
message: "client init finished",
|
|
175
|
-
database: client.
|
|
182
|
+
database: client.database,
|
|
176
183
|
});
|
|
177
184
|
}
|
|
178
185
|
// export default client;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/pool.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/pg/funcs/pool.ts"],"names":[],"mappings":"AAgBA,eAAO,MAAM,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAM,CAAC;yBAE7B,QAAO,GAAQ;AAA/B,wBAgEE"}
|
|
@@ -6,9 +6,8 @@ types.setTypeParser(1114, (stringValue) => stringValue);
|
|
|
6
6
|
import pgClients from "../pgClients.js";
|
|
7
7
|
import init from "./init.js";
|
|
8
8
|
import config from "../../../../config.js";
|
|
9
|
+
config.ready = config.ready || {};
|
|
9
10
|
export const Pools = {};
|
|
10
|
-
const errored = {};
|
|
11
|
-
const inited = {};
|
|
12
11
|
export default (param = {}) => {
|
|
13
12
|
if (!config.pg)
|
|
14
13
|
return null;
|
|
@@ -26,24 +25,24 @@ export default (param = {}) => {
|
|
|
26
25
|
}
|
|
27
26
|
});
|
|
28
27
|
pool.on("error", (err) => {
|
|
29
|
-
console.warn("Unexpected error on idle client", param.database);
|
|
30
|
-
|
|
28
|
+
console.warn("⚠️ Unexpected error on idle client", param.database);
|
|
29
|
+
config.ready[`pg:${param.database}`] = false;
|
|
31
30
|
logger.file("pg", {
|
|
32
31
|
error: err.toString(),
|
|
33
32
|
database: param.database,
|
|
34
33
|
});
|
|
35
34
|
if (config.trace) {
|
|
36
|
-
console.warn("Unexpected error on idle client details:", err.toString(), err.stack);
|
|
35
|
+
console.warn("⚠️ Unexpected error on idle client details:", err.toString(), err.stack);
|
|
37
36
|
}
|
|
38
37
|
});
|
|
39
|
-
pool.on("connect", (
|
|
38
|
+
pool.on("connect", () => {
|
|
40
39
|
// skip auto drops and reconnects handled by pg internally
|
|
41
|
-
if (!
|
|
40
|
+
if (!config.ready[`pg:${param.database}`]) {
|
|
42
41
|
logger.file("pg", {
|
|
43
42
|
message: "client connected",
|
|
44
43
|
database: param.database,
|
|
45
44
|
});
|
|
46
|
-
|
|
45
|
+
config.ready[`pg:${param.database}`] = true;
|
|
47
46
|
}
|
|
48
47
|
if (config.trace) {
|
|
49
48
|
console.log("PG client connected", param.database);
|
|
@@ -52,17 +51,12 @@ export default (param = {}) => {
|
|
|
52
51
|
if (pgClients[name] && !pgClients[name]?.tlist) {
|
|
53
52
|
init(Pools[name]);
|
|
54
53
|
}
|
|
55
|
-
Pools[name] =
|
|
54
|
+
Pools[name] = pool;
|
|
56
55
|
});
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
console.log("PG client acquired", param.database);
|
|
60
|
-
}
|
|
61
|
-
});
|
|
62
|
-
pool.on("remove", () => {
|
|
63
|
-
if (config.trace) {
|
|
56
|
+
if (config.trace) {
|
|
57
|
+
pool.on("remove", () => {
|
|
64
58
|
console.log("PG Client removed from the pool.", param.database);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
59
|
+
});
|
|
60
|
+
}
|
|
67
61
|
return pool;
|
|
68
62
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/pg/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/pg/index.ts"],"names":[],"mappings":"AAgCA,iBAAS,MAAM,CAAC,OAAO,EAAE,GAAG,QAwB3B;AAED,eAAe,MAAM,CAAC"}
|
|
@@ -3,8 +3,9 @@ import pgClients from "./pgClients.js";
|
|
|
3
3
|
import getPGAsync from "./funcs/getPGAsync.js";
|
|
4
4
|
import logger from "../logger/getLogger.js";
|
|
5
5
|
function close() {
|
|
6
|
-
Object.keys(pgClients).forEach((
|
|
7
|
-
|
|
6
|
+
Object.keys(pgClients).forEach((key) => {
|
|
7
|
+
console.log("Closing pg client", key);
|
|
8
|
+
pgClients[key].end();
|
|
8
9
|
});
|
|
9
10
|
}
|
|
10
11
|
async function getHeadersPG(req) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getRedis.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/redis/funcs/getRedis.ts"],"names":[],"mappings":"AAQA,iBAAS,QAAQ,CACf,EACE,EAAE,EACF,IAAI,EACJ,IAAI,EACJ,
|
|
1
|
+
{"version":3,"file":"getRedis.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/redis/funcs/getRedis.ts"],"names":[],"mappings":"AAQA,iBAAS,QAAQ,CACf,EACE,EAAE,EACF,IAAI,EACJ,IAAI,EACJ,WAAmB,EACnB,aAA8D,EAC9D,kBAAyB,EACzB,WAAkB,EAClB,cAAqB,EACrB,oBAAwB,GACzB,GAAE;IACD,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,aAAa,CAAC,EAAE,GAAG,CAAC;IACpB,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,oBAAoB,CAAC,EAAE,MAAM,CAAC;CACnB,OA6Cd;AAED,eAAe,QAAQ,CAAC"}
|
|
@@ -3,10 +3,15 @@ import config from "../../../../config.js";
|
|
|
3
3
|
import logger from "../../logger/getLogger.js";
|
|
4
4
|
import redisClients from "./redisClients.js";
|
|
5
5
|
config.ready = config.ready || {};
|
|
6
|
-
function getRedis({ db, host, port, closeClient =
|
|
6
|
+
function getRedis({ db, host, port, closeClient = false, retryStrategy = (times) => Math.min(times * 500, 5000), enableOfflineQueue = true, lazyConnect = true, connectTimeout = 2000, maxRetriesPerRequest = 1, } = { db: 0 }) {
|
|
7
7
|
if (!config.redis && !host)
|
|
8
8
|
return null;
|
|
9
9
|
const key = host || port ? [host, port, db].join("-") : db;
|
|
10
|
+
// try to reconnect after connection error / disconnect
|
|
11
|
+
if (redisClients[key]?.status === "end" &&
|
|
12
|
+
typeof redisClients[key].connect === "function") {
|
|
13
|
+
redisClients[key].connect();
|
|
14
|
+
}
|
|
10
15
|
if (redisClients[key]) {
|
|
11
16
|
return redisClients[key];
|
|
12
17
|
}
|
|
@@ -28,11 +33,8 @@ function getRedis({ db, host, port, closeClient = true, retryStrategy = () => {
|
|
|
28
33
|
config.ready[`redis:${key}`] = true;
|
|
29
34
|
});
|
|
30
35
|
redisClients[key].on("error", (err) => {
|
|
31
|
-
console.warn("Ignored redis error:", err.message);
|
|
36
|
+
console.warn("⚠️ Ignored redis error:", err.message);
|
|
32
37
|
logger.file("redis", { error: err.toString() });
|
|
33
|
-
if (err.code === "ETIMEDOUT") {
|
|
34
|
-
// redisClients[key].disconnect();
|
|
35
|
-
}
|
|
36
38
|
});
|
|
37
39
|
console.log("redis connected", db, host, port);
|
|
38
40
|
return redisClients[key];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/redis/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,eAAe,EAAE,MAAM,SAAS,CAAC;AAW/C,iBAAe,MAAM,CAAC,OAAO,EAAE,eAAe,iBAE7C;AAED,eAAe,MAAM,CAAC"}
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import redisClients from "./funcs/redisClients.js";
|
|
2
2
|
function close() {
|
|
3
|
-
Object.keys(redisClients).forEach((key) =>
|
|
3
|
+
Object.keys(redisClients).forEach((key) => {
|
|
4
|
+
console.log("Closing redis client", key);
|
|
5
|
+
redisClients[key].quit();
|
|
6
|
+
});
|
|
4
7
|
}
|
|
5
8
|
async function plugin(fastify) {
|
|
6
9
|
fastify.addHook("onClose", close);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/sqlite/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/sqlite/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAa1C,iBAAe,QAAQ,CAAC,GAAG,EAAE,eAAe,iBAE3C;;AAED,wBAA4B"}
|
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import fp from "fastify-plugin";
|
|
2
2
|
import sqliteClients from "./sqliteClients.js";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
3
|
+
function close() {
|
|
4
|
+
Object.keys(sqliteClients).forEach((key) => {
|
|
5
|
+
console.log("Closing sqlite client", key);
|
|
6
|
+
sqliteClients[key].close();
|
|
6
7
|
});
|
|
7
8
|
}
|
|
9
|
+
async function dbPlugin(app) {
|
|
10
|
+
app.addHook("onClose", close);
|
|
11
|
+
}
|
|
8
12
|
export default fp(dbPlugin);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getFilter.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getFilter.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;
|
|
1
|
+
{"version":3,"file":"getFilter.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getFilter.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAmBzD,wBAA8B,SAAS,CACrC,EACE,EAAqB,EACrB,KAAK,EACL,MAAM,EACN,MAAM,EACN,KAAK,EACL,MAAM,EACN,IAAI,GACL,EAAE;IACD,EAAE,EAAE,UAAU,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5B,EACD,KAAK,CAAC,EAAE,GAAG,gBAoBZ"}
|
|
@@ -1,5 +1,18 @@
|
|
|
1
1
|
import routeFilter from "../../../routes/table/controllers/filter.js";
|
|
2
2
|
import pgClients from "../../pg/pgClients.js";
|
|
3
|
+
const reply1 = {
|
|
4
|
+
response: {},
|
|
5
|
+
// redirect: (txt) => txt,
|
|
6
|
+
redirect: (txt) => Object.assign(reply1.response, {
|
|
7
|
+
body: txt,
|
|
8
|
+
statusCode: reply1.response.statusCode || 200,
|
|
9
|
+
}),
|
|
10
|
+
status: (statusCode) => {
|
|
11
|
+
Object.assign(reply1.response, { status: statusCode });
|
|
12
|
+
return reply1;
|
|
13
|
+
},
|
|
14
|
+
send: (txt) => Object.assign(reply1.response, txt),
|
|
15
|
+
};
|
|
3
16
|
export default async function getFilter({ pg = pgClients.client, table, filter, custom, state, search, user, }, reply) {
|
|
4
17
|
const params = { table };
|
|
5
18
|
const query = {
|
|
@@ -13,6 +26,6 @@ export default async function getFilter({ pg = pgClients.client, table, filter,
|
|
|
13
26
|
params,
|
|
14
27
|
query,
|
|
15
28
|
user,
|
|
16
|
-
}, reply, 1);
|
|
29
|
+
}, reply || reply1, 1);
|
|
17
30
|
return result;
|
|
18
31
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"getSelectMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getSelectMeta.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWzD,wBAA8B,aAAa,CAAC,EAC1C,IAAI,EACJ,UAAU,EACV,GAAG,EACH,OAAO,EACP,MAAM,EACN,EAAqB,GACtB,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,EAAE,CAAC,EAAE,UAAU,CAAC;CACjB,
|
|
1
|
+
{"version":3,"file":"getSelectMeta.d.ts","sourceRoot":"","sources":["../../../../../server/plugins/table/funcs/getSelectMeta.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAWzD,wBAA8B,aAAa,CAAC,EAC1C,IAAI,EACJ,UAAU,EACV,GAAG,EACH,OAAO,EACP,MAAM,EACN,EAAqB,GACtB,EAAE;IACD,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,EAAE,CAAC,EAAE,UAAU,CAAC;CACjB,gBA4GA"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import logger from "../../logger/getLogger.js";
|
|
2
2
|
import autoIndex from "../../pg/funcs/autoIndex.js";
|
|
3
|
-
import
|
|
3
|
+
import getPGAsync from "../../pg/funcs/getPGAsync.js";
|
|
4
4
|
import pgClients from "../../pg/pgClients.js";
|
|
5
5
|
import getSelect from "./getSelect.js";
|
|
6
6
|
const limit = 50;
|
|
@@ -17,9 +17,7 @@ export default async function getSelectMeta({ name, startsWith, key, nocache, pa
|
|
|
17
17
|
}
|
|
18
18
|
const cls = await getSelect(name, pg, nocache);
|
|
19
19
|
const db = typeof cls?.db === "string" ? { database: cls.db } : cls?.db;
|
|
20
|
-
const pg1 = cls?.db ?
|
|
21
|
-
if (!pg1?.pk)
|
|
22
|
-
await pg1?.init?.();
|
|
20
|
+
const pg1 = cls?.db ? await getPGAsync(db) : pg;
|
|
23
21
|
if (!cls)
|
|
24
22
|
return null;
|
|
25
23
|
if (cls.arr)
|
|
@@ -28,7 +28,7 @@ export default function gisIRColumn({ pg, layer, column, sql, query, filter, sta
|
|
|
28
28
|
} | {
|
|
29
29
|
count: any;
|
|
30
30
|
sql: string | undefined;
|
|
31
|
-
rows: any
|
|
31
|
+
rows: any;
|
|
32
32
|
error?: undefined;
|
|
33
33
|
status?: undefined;
|
|
34
34
|
message?: undefined;
|
|
@@ -37,7 +37,7 @@ export default function gisIRColumn({ pg, layer, column, sql, query, filter, sta
|
|
|
37
37
|
time: number;
|
|
38
38
|
count: any;
|
|
39
39
|
sql: string | undefined;
|
|
40
|
-
rows: any
|
|
40
|
+
rows: any;
|
|
41
41
|
error?: undefined;
|
|
42
42
|
status?: undefined;
|
|
43
43
|
message?: undefined;
|
|
@@ -19,7 +19,7 @@ export default async function gisIRColumn({ pg = pgClients.client, layer, column
|
|
|
19
19
|
const { tlist } = await pg
|
|
20
20
|
.query(`select array_agg((select nspname from pg_namespace where oid=relnamespace)||'.'||relname) tlist from pg_class
|
|
21
21
|
where relkind in ('r','v','m')`)
|
|
22
|
-
.then((el) => el.rows[0]);
|
|
22
|
+
.then((el) => el.rows?.[0] || {});
|
|
23
23
|
const tableName = body?.table || layer;
|
|
24
24
|
if (!tlist.includes(tableName))
|
|
25
25
|
return { error: `table not found: ${tableName}`, status: 400 };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"finishUpload.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/finishUpload.ts"],"names":[],"mappings":"AAMA,wBAA8B,YAAY,CAAC,EACzC,IAAI,EACJ,EAAE,GACH,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;CACZ,GAAG,OAAO,CAAC;IAAE,KAAK,CAAC,EAAE,MAAM,CAAC;IAAC,OAAO,CAAC,EAAE,OAAO,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAC,CAqC/D"}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
4
|
+
import { prefix, metaDir, fetchTimeoutMs } from "./index.js";
|
|
5
|
+
export default async function finishUpload({ host, id, }) {
|
|
6
|
+
if (!host) {
|
|
7
|
+
const metaExists = existsSync(path.join(metaDir, `${id}.json`));
|
|
8
|
+
if (!metaExists) {
|
|
9
|
+
return { error: "upload not found: " + id, code: 404 };
|
|
10
|
+
}
|
|
11
|
+
const meta = JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"));
|
|
12
|
+
if (meta.uploaded !== meta.size) {
|
|
13
|
+
return { error: "Upload not complete", code: 400 };
|
|
14
|
+
}
|
|
15
|
+
const metaData = {
|
|
16
|
+
...meta,
|
|
17
|
+
uploadDate: new Date().toISOString(),
|
|
18
|
+
exists: true,
|
|
19
|
+
finished: true,
|
|
20
|
+
};
|
|
21
|
+
const fileBytes = Buffer.from(JSON.stringify(metaData, null, 2));
|
|
22
|
+
// save metadata
|
|
23
|
+
await writeFile(path.join(metaDir, `${id}.json`), fileBytes);
|
|
24
|
+
return { success: true, code: 200 };
|
|
25
|
+
}
|
|
26
|
+
const res = await fetch(`${host}/${prefix}/${id}/finish`, {
|
|
27
|
+
method: "POST",
|
|
28
|
+
signal: AbortSignal.timeout(fetchTimeoutMs),
|
|
29
|
+
}).catch((err) => ({
|
|
30
|
+
json: () => Promise.resolve({ error: err.toString(), code: 501 }),
|
|
31
|
+
}));
|
|
32
|
+
return res.json();
|
|
33
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"getUploadStatus.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/getUploadStatus.ts"],"names":[],"mappings":"AAaA,wBAA8B,eAAe,CAAC,EAC5C,IAAI,EACJ,EAAE,GACH,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;CACZ,gBAoCA"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { readFile } from "node:fs/promises";
|
|
4
|
+
import { prefix, metaDir, uploadChunkDirectory, fetchTimeoutMs, } from "./index.js";
|
|
5
|
+
import isFileExists from "../file/isFileExists.js";
|
|
6
|
+
export default async function getUploadStatus({ host, id, }) {
|
|
7
|
+
// return local file metadata
|
|
8
|
+
if (!host) {
|
|
9
|
+
const metaExists = existsSync(path.join(metaDir, `${id}.json`));
|
|
10
|
+
// check file upload status: finished/inprogress
|
|
11
|
+
const meta = metaExists
|
|
12
|
+
? JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"))
|
|
13
|
+
: {};
|
|
14
|
+
// check if file exists
|
|
15
|
+
// const fileExists = existsSync(
|
|
16
|
+
// path.join(fileDir, `${id}.${meta.extension}`)
|
|
17
|
+
// );
|
|
18
|
+
const fileExists = await isFileExists(path
|
|
19
|
+
.join(uploadChunkDirectory, `${id}.${meta.extension}`)
|
|
20
|
+
.replace(/\\/g, "/"));
|
|
21
|
+
return {
|
|
22
|
+
...meta,
|
|
23
|
+
// metaPath: path.join(metaDir, `${id}.json`),
|
|
24
|
+
uploadChunkDirectory,
|
|
25
|
+
exists: !!fileExists,
|
|
26
|
+
finished: meta.uploaded && meta.size && meta.uploaded === meta.size,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
// request remote file upload status
|
|
30
|
+
const resp = await fetch(`${host}/${prefix}/${id}`, {
|
|
31
|
+
signal: AbortSignal.timeout(fetchTimeoutMs),
|
|
32
|
+
}).catch((err) => ({
|
|
33
|
+
json: () => Promise.resolve({ error: err.toString(), code: 501 }),
|
|
34
|
+
}));
|
|
35
|
+
return resp.json();
|
|
36
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare const fetchTimeoutMs: number;
|
|
2
|
+
export declare const prefix = "file/upload2";
|
|
3
|
+
export declare const uploadChunkDirectory = "/files/uploads";
|
|
4
|
+
export declare const fileDir: string;
|
|
5
|
+
export declare const metaDir: string;
|
|
6
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/index.ts"],"names":[],"mappings":"AASA,eAAO,MAAM,cAAc,QAAmC,CAAC;AAE/D,eAAO,MAAM,MAAM,iBAAiB,CAAC;AAErC,eAAO,MAAM,oBAAoB,mBAAmB,CAAC;AAErD,eAAO,MAAM,OAAO,QAEE,CAAC;AAEvB,eAAO,MAAM,OAAO,QAA4B,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import config from "../../../config.js";
|
|
3
|
+
import getFolder from "../crud/funcs/utils/getFolder.js";
|
|
4
|
+
config.chunkSize = +(config.chunkSize || 1048576); // 1 MB per chunk by default
|
|
5
|
+
const rootDir = getFolder(config, "local");
|
|
6
|
+
export const fetchTimeoutMs = +(config.fetchTimeoutMs || 5000);
|
|
7
|
+
export const prefix = "file/upload2";
|
|
8
|
+
export const uploadChunkDirectory = "/files/uploads";
|
|
9
|
+
export const fileDir = path
|
|
10
|
+
.join(rootDir, uploadChunkDirectory)
|
|
11
|
+
.replace(/\\/g, "/");
|
|
12
|
+
export const metaDir = path.join(fileDir, "tmp");
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"startUpload.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/startUpload.ts"],"names":[],"mappings":"AAaA,wBAA8B,WAAW,CAAC,EACxC,IAAI,EACJ,EAAE,EACF,QAAQ,EACR,IAAI,EACJ,MAAM,GACP,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,oBA2DA"}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { randomUUID } from "node:crypto";
|
|
3
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
4
|
+
import getUploadStatus from "./getUploadStatus.js";
|
|
5
|
+
import { prefix, fileDir, metaDir, uploadChunkDirectory, fetchTimeoutMs, } from "./index.js";
|
|
6
|
+
export default async function startUpload({ host, id, fileName, size, subdir, }) {
|
|
7
|
+
if (!host) {
|
|
8
|
+
const extension = path.extname(fileName).substring(1);
|
|
9
|
+
const id1 = id || randomUUID();
|
|
10
|
+
const key = id1.split("-").pop();
|
|
11
|
+
const meta = {
|
|
12
|
+
id: id1,
|
|
13
|
+
key,
|
|
14
|
+
fileName,
|
|
15
|
+
size,
|
|
16
|
+
extension,
|
|
17
|
+
uploaded: 0,
|
|
18
|
+
exists: false,
|
|
19
|
+
};
|
|
20
|
+
// check current upload state
|
|
21
|
+
if (id) {
|
|
22
|
+
const status = await getUploadStatus({ id });
|
|
23
|
+
Object.assign(meta, status);
|
|
24
|
+
}
|
|
25
|
+
// resume file upload
|
|
26
|
+
if (meta.uploaded > 0) {
|
|
27
|
+
return meta;
|
|
28
|
+
}
|
|
29
|
+
// if not started - start new upload
|
|
30
|
+
const relativeDirpath = path
|
|
31
|
+
.join(uploadChunkDirectory, subdir || "")
|
|
32
|
+
.replace(/\\/g, "/");
|
|
33
|
+
await mkdir(path.join(fileDir, "tmp"), { recursive: true });
|
|
34
|
+
// create metadata for resumable upload
|
|
35
|
+
const metaData = {
|
|
36
|
+
...meta,
|
|
37
|
+
subdir,
|
|
38
|
+
metaPath: undefined,
|
|
39
|
+
relativeDirpath,
|
|
40
|
+
};
|
|
41
|
+
await writeFile(path.join(metaDir, `${id1}.json`), JSON.stringify(metaData, null, 2));
|
|
42
|
+
return { ...meta, metaPath: undefined };
|
|
43
|
+
}
|
|
44
|
+
const res = await fetch(`${host}/${prefix}/start`, {
|
|
45
|
+
method: "POST",
|
|
46
|
+
headers: { "Content-Type": "application/json" },
|
|
47
|
+
body: JSON.stringify({ fileName, size, subdir, id }),
|
|
48
|
+
signal: AbortSignal.timeout(fetchTimeoutMs),
|
|
49
|
+
}).catch((err) => ({
|
|
50
|
+
json: () => Promise.resolve({ error: err.toString(), code: 501 }),
|
|
51
|
+
}));
|
|
52
|
+
return res.json();
|
|
53
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../server/plugins/upload/uploadChunk.ts"],"names":[],"mappings":"AAaA,wBAA8B,WAAW,CAAC,EACxC,IAAI,EACJ,EAAE,EACF,IAAI,EACJ,MAAM,EACN,GAAG,EACH,IAAI,GACL,EAAE;IACD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,GAAG,CAAC;IACV,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CACd,gBA6DA"}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { readFile, writeFile } from "node:fs/promises";
|
|
4
|
+
import { prefix, fileDir, metaDir, fetchTimeoutMs, uploadChunkDirectory, } from "./index.js";
|
|
5
|
+
import uploadFile from "../file/uploadFile.js";
|
|
6
|
+
export default async function uploadChunk({ host, id, body, offset, end, size, }) {
|
|
7
|
+
if (!host) {
|
|
8
|
+
const metaExists = existsSync(path.join(metaDir, `${id}.json`));
|
|
9
|
+
if (!metaExists) {
|
|
10
|
+
return { error: "File not found", code: 404 };
|
|
11
|
+
}
|
|
12
|
+
const meta = JSON.parse(await readFile(path.join(metaDir, `${id}.json`), "utf8"));
|
|
13
|
+
if (size !== meta.size) {
|
|
14
|
+
return { error: "Total size mismatch", code: 400 };
|
|
15
|
+
}
|
|
16
|
+
if (offset !== meta.uploaded) {
|
|
17
|
+
return { error: "Wrong offset", expected: meta.uploaded, code: 409 };
|
|
18
|
+
}
|
|
19
|
+
if (body?.length !== end - offset + 1) {
|
|
20
|
+
return { error: "Chunk size mismatch", code: 400 };
|
|
21
|
+
}
|
|
22
|
+
// append chunk to existing file
|
|
23
|
+
const filePath = path.join(fileDir, `${id}.${meta.extension}`);
|
|
24
|
+
// await appendFile(filePath, body);
|
|
25
|
+
await uploadFile(path
|
|
26
|
+
.join(uploadChunkDirectory, `${id}.${meta.extension}`)
|
|
27
|
+
.replace(/\\/g, "/"), body);
|
|
28
|
+
meta.uploaded = end + 1;
|
|
29
|
+
// update metadata for resumable upload
|
|
30
|
+
await writeFile(path.join(metaDir, `${id}.json`), JSON.stringify(meta, null, 2));
|
|
31
|
+
return { uploaded: meta.uploaded, finished: meta.uploaded === meta.size };
|
|
32
|
+
}
|
|
33
|
+
if (!end || !size) {
|
|
34
|
+
return { error: "not enough params: offset/end/size", code: 400 };
|
|
35
|
+
}
|
|
36
|
+
const res = await fetch(`${host}/${prefix}/${id}`, {
|
|
37
|
+
method: "PATCH",
|
|
38
|
+
headers: {
|
|
39
|
+
"Content-Range": `bytes ${offset || 0}-${end}/${size}`,
|
|
40
|
+
},
|
|
41
|
+
body,
|
|
42
|
+
signal: AbortSignal.timeout(fetchTimeoutMs),
|
|
43
|
+
}).catch((err) => ({
|
|
44
|
+
json: () => Promise.resolve({ error: err.toString(), code: 501 }),
|
|
45
|
+
}));
|
|
46
|
+
return res.json();
|
|
47
|
+
}
|