geonix 1.23.8 → 1.30.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +1 -1
- package/README.md +348 -4
- package/exports.js +0 -2
- package/index.d.ts +292 -237
- package/package.json +12 -11
- package/src/Codec.js +21 -8
- package/src/Connection.js +164 -53
- package/src/Crypto.js +117 -0
- package/src/Gateway.js +172 -87
- package/src/Logger.js +101 -11
- package/src/Registry.js +136 -18
- package/src/Remote.js +21 -8
- package/src/Request.js +140 -87
- package/src/RequestOptions.js +11 -8
- package/src/Service.js +176 -113
- package/src/Stream.js +78 -18
- package/src/Util.js +229 -188
- package/src/WebServer.js +29 -22
- package/.claude/settings.local.json +0 -10
- package/.vscode/settings.json +0 -11
- package/PROJECT.md +0 -164
- package/REVIEW.md +0 -372
package/src/Util.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { createHash, randomBytes } from "crypto";
|
|
2
|
-
import { URL
|
|
3
|
-
import {
|
|
2
|
+
import { URL } from "url";
|
|
3
|
+
import { createRequire } from "module";
|
|
4
4
|
import { join } from "path";
|
|
5
5
|
import { Transform } from "node:stream";
|
|
6
6
|
import { networkInterfaces } from "os";
|
|
@@ -14,22 +14,22 @@ import { tmpdir } from "os";
|
|
|
14
14
|
|
|
15
15
|
/**
|
|
16
16
|
* Wait for {delay} ms
|
|
17
|
-
* @param {number} delay
|
|
18
|
-
* @returns
|
|
17
|
+
* @param {number} delay
|
|
18
|
+
* @returns
|
|
19
19
|
*/
|
|
20
|
-
export const sleep = delay => new Promise(resolve => setTimeout(resolve, delay));
|
|
20
|
+
export const sleep = (delay) => new Promise((resolve) => setTimeout(resolve, delay));
|
|
21
21
|
|
|
22
22
|
/**
|
|
23
23
|
* Wait for next tick
|
|
24
|
-
*
|
|
25
|
-
* @returns
|
|
24
|
+
*
|
|
25
|
+
* @returns
|
|
26
26
|
*/
|
|
27
|
-
export const
|
|
27
|
+
export const yieldToEventLoop = () => new Promise((resolve) => setImmediate(resolve));
|
|
28
28
|
|
|
29
29
|
/**
|
|
30
30
|
* Parse nats:// URL
|
|
31
|
-
* @param {string} url
|
|
32
|
-
* @returns
|
|
31
|
+
* @param {string} url
|
|
32
|
+
* @returns
|
|
33
33
|
*/
|
|
34
34
|
export function parseURL(url) {
|
|
35
35
|
const parsed = new URL(url);
|
|
@@ -38,35 +38,54 @@ export function parseURL(url) {
|
|
|
38
38
|
servers: `${parsed.hostname}:${parsed.port || 4222}`,
|
|
39
39
|
user: parsed.password ? parsed.username : "",
|
|
40
40
|
pass: parsed.password,
|
|
41
|
-
token: parsed.username && !parsed.password ? parsed.username : undefined
|
|
41
|
+
token: parsed.username && !parsed.password ? parsed.username : undefined,
|
|
42
42
|
};
|
|
43
43
|
|
|
44
44
|
return {
|
|
45
45
|
...basic,
|
|
46
|
-
...Object.fromEntries(parsed.searchParams)
|
|
46
|
+
...Object.fromEntries(parsed.searchParams),
|
|
47
47
|
};
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
+
const BASE62 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
51
|
+
const LOG256_LOG62 = Math.log(256) / Math.log(62); // ≈ 1.3437
|
|
52
|
+
|
|
53
|
+
export function encodeBase62(buffer) {
|
|
54
|
+
if (buffer.length === 0) {
|
|
55
|
+
return "";
|
|
56
|
+
}
|
|
57
|
+
const len = Math.ceil(buffer.length * LOG256_LOG62);
|
|
58
|
+
let n = BigInt("0x" + buffer.toString("hex"));
|
|
59
|
+
const chars = new Array(len);
|
|
60
|
+
for (let i = len - 1; i >= 0; i--) {
|
|
61
|
+
chars[i] = BASE62[Number(n % 62n)];
|
|
62
|
+
n /= 62n;
|
|
63
|
+
}
|
|
64
|
+
return chars.join("");
|
|
65
|
+
}
|
|
66
|
+
|
|
50
67
|
/**
|
|
51
|
-
*
|
|
52
|
-
*
|
|
53
|
-
*
|
|
68
|
+
* Generates a cryptographically random Base62-encoded ID string.
|
|
69
|
+
* Exported as `randomID` in the public API.
|
|
70
|
+
*
|
|
71
|
+
* @param {number} [size=16] - Number of random bytes to encode (more bytes → longer, more unique ID).
|
|
72
|
+
* @returns {string} URL-safe, Base62-encoded random string.
|
|
54
73
|
*/
|
|
55
|
-
export const picoid = (size =
|
|
74
|
+
export const picoid = (size = 16) => encodeBase62(randomBytes(size));
|
|
56
75
|
|
|
57
76
|
/**
|
|
58
77
|
* Get SHA256 hash of a string or a buffer
|
|
59
|
-
* @param {string|Buffer} data
|
|
60
|
-
* @returns
|
|
78
|
+
* @param {string|Buffer} data
|
|
79
|
+
* @returns
|
|
61
80
|
*/
|
|
62
81
|
export const hash = (data) => createHash("sha256").update(data).digest("hex");
|
|
63
82
|
|
|
64
83
|
/**
|
|
65
84
|
* Create TCP or HTTP server at specified port
|
|
66
|
-
* @param {number} port
|
|
67
|
-
* @param {Object} pkg
|
|
68
|
-
* @param {Function} handler
|
|
69
|
-
* @returns
|
|
85
|
+
* @param {number} port
|
|
86
|
+
* @param {Object} pkg
|
|
87
|
+
* @param {Function} handler
|
|
88
|
+
* @returns
|
|
70
89
|
*/
|
|
71
90
|
export const createServerAtPort = (port, pkg, handler) =>
|
|
72
91
|
new Promise((resolve) => {
|
|
@@ -81,39 +100,24 @@ export const createServerAtPort = (port, pkg, handler) =>
|
|
|
81
100
|
});
|
|
82
101
|
|
|
83
102
|
/**
|
|
84
|
-
* Create TCP or HTTP server at
|
|
85
|
-
* @param {Object} pkg
|
|
86
|
-
* @param {Function} handler
|
|
87
|
-
* @
|
|
88
|
-
* @param {number} poolSize
|
|
89
|
-
* @returns
|
|
90
|
-
*/
|
|
91
|
-
export const createServerAtFreePort = async (pkg, handler, start = 30000, poolSize = 20000) => {
|
|
92
|
-
for (let port = start; port < start + poolSize; port++) {
|
|
93
|
-
try {
|
|
94
|
-
const result = await createServerAtPort(port, pkg, handler);
|
|
95
|
-
if (result) {
|
|
96
|
-
return result;
|
|
97
|
-
}
|
|
98
|
-
} catch {
|
|
99
|
-
// silenty ignore errors
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
};
|
|
103
|
-
|
|
104
|
-
/**
|
|
105
|
-
* Create TCP server at random port
|
|
106
|
-
* @param {Function} handler
|
|
107
|
-
* @returns
|
|
103
|
+
* Create TCP or HTTP server at an OS-assigned free port
|
|
104
|
+
* @param {Object} pkg
|
|
105
|
+
* @param {Function} handler
|
|
106
|
+
* @returns
|
|
108
107
|
*/
|
|
109
|
-
export const
|
|
108
|
+
export const createServerAtFreePort = (pkg, handler) =>
|
|
109
|
+
new Promise((resolve, reject) => {
|
|
110
|
+
const server = pkg.createServer(handler);
|
|
111
|
+
server.on("error", reject);
|
|
112
|
+
server.listen(0, () => resolve({ server, port: server.address().port }));
|
|
113
|
+
});
|
|
110
114
|
|
|
111
115
|
/**
|
|
112
|
-
* Create
|
|
113
|
-
* @param {Function} handler
|
|
114
|
-
* @returns
|
|
116
|
+
* Create TCP server at an OS-assigned free port
|
|
117
|
+
* @param {Function} handler
|
|
118
|
+
* @returns
|
|
115
119
|
*/
|
|
116
|
-
export const
|
|
120
|
+
export const createTCPServer = (handler) => createServerAtFreePort(net, handler);
|
|
117
121
|
|
|
118
122
|
/**
|
|
119
123
|
* Return number of seconds passed from the start of the day (0-86399)
|
|
@@ -124,45 +128,12 @@ export const getSecondsSinceMidnight = () => {
|
|
|
124
128
|
return Math.floor((date.getTime() - date.setHours(0, 0, 0, 0)) / 1000);
|
|
125
129
|
};
|
|
126
130
|
|
|
127
|
-
/**
|
|
128
|
-
* Parse function body and return array of param names
|
|
129
|
-
* @param {*} fn
|
|
130
|
-
* @returns string[]
|
|
131
|
-
*/
|
|
132
|
-
export const getFunctionParams = (fn) => {
|
|
133
|
-
const code = fn.toString();
|
|
134
|
-
const endParenthesisPosition = code.indexOf(")");
|
|
135
|
-
let params;
|
|
136
|
-
|
|
137
|
-
if (endParenthesisPosition != -1) {
|
|
138
|
-
params = code.substring(code.indexOf("(") + 1, endParenthesisPosition);
|
|
139
|
-
} else {
|
|
140
|
-
params = code.substring(0, code.indexOf("=>"));
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
params = params
|
|
144
|
-
// cleanup spaces
|
|
145
|
-
.replaceAll(" ", "")
|
|
146
|
-
// split into array
|
|
147
|
-
.split(",");
|
|
148
|
-
|
|
149
|
-
// remove potential default values
|
|
150
|
-
for (let index = 0; index < params.length; index++) {
|
|
151
|
-
const defaultValueAssignmentPosition = params[index].indexOf("=");
|
|
152
|
-
if (defaultValueAssignmentPosition != -1) {
|
|
153
|
-
params[index] = params[index].substring(0, defaultValueAssignmentPosition - 1);
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
return params;
|
|
158
|
-
};
|
|
159
|
-
|
|
160
131
|
export const proxyHttp = (target, req, res) =>
|
|
161
132
|
new Promise((resolve, reject) => {
|
|
162
133
|
const remoteTarget = `${target}${req.originalUrl}`;
|
|
163
134
|
const options = {
|
|
164
135
|
method: req.method,
|
|
165
|
-
headers: req.headers
|
|
136
|
+
headers: req.headers,
|
|
166
137
|
};
|
|
167
138
|
|
|
168
139
|
const protocol = req.protocol === "https" ? https : http;
|
|
@@ -182,43 +153,48 @@ export const proxyHttp = (target, req, res) =>
|
|
|
182
153
|
|
|
183
154
|
/**
|
|
184
155
|
* Create a object proxy that overlays overlay object
|
|
185
|
-
* @param {*} object
|
|
186
|
-
* @param {*} overlay
|
|
187
|
-
* @returns
|
|
156
|
+
* @param {*} object
|
|
157
|
+
* @param {*} overlay
|
|
158
|
+
* @returns
|
|
188
159
|
*/
|
|
189
|
-
export const OverlayObject = (object, overlay) =>
|
|
190
|
-
|
|
191
|
-
let _geonix_version = "N/A";
|
|
192
|
-
try {
|
|
193
|
-
const __dirname = fileURLToPath(new URL("..", import.meta.url));
|
|
194
|
-
const local = JSON.parse(await readFile(join(__dirname, "package.json")));
|
|
195
|
-
_geonix_version = local.version;
|
|
196
|
-
} catch {
|
|
197
|
-
// ignore errors
|
|
198
|
-
}
|
|
160
|
+
export const OverlayObject = (object, overlay) =>
|
|
161
|
+
new Proxy(object, { get: (t, p) => (overlay[p] !== undefined ? overlay[p] : t[p]) });
|
|
199
162
|
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
done();
|
|
214
|
-
};
|
|
215
|
-
|
|
216
|
-
chunker._flush = function (done) {
|
|
217
|
-
done();
|
|
218
|
-
};
|
|
163
|
+
/**
|
|
164
|
+
* The version string of the currently installed Geonix package, read from `package.json` at
|
|
165
|
+
* module load time. Equals `"N/A"` when the package metadata cannot be found.
|
|
166
|
+
*
|
|
167
|
+
* @type {string}
|
|
168
|
+
*/
|
|
169
|
+
export const GeonixVersion = (() => {
|
|
170
|
+
try {
|
|
171
|
+
return createRequire(import.meta.url)("../package.json").version ?? "N/A";
|
|
172
|
+
} catch {
|
|
173
|
+
return "N/A";
|
|
174
|
+
}
|
|
175
|
+
})();
|
|
219
176
|
|
|
220
|
-
|
|
221
|
-
|
|
177
|
+
/**
|
|
178
|
+
* Chunk a stream into smaller chunks
|
|
179
|
+
*
|
|
180
|
+
* @param {*} chunkSize
|
|
181
|
+
* @returns
|
|
182
|
+
*/
|
|
183
|
+
export const StreamChunker = (chunkSize = 65536) =>
|
|
184
|
+
new Transform({
|
|
185
|
+
transform(chunk, _encoding, done) {
|
|
186
|
+
let offset = 0;
|
|
187
|
+
while (offset < chunk.length) {
|
|
188
|
+
const sliceSize = Math.min(chunkSize, chunk.length - offset);
|
|
189
|
+
this.push(chunk.slice(offset, offset + sliceSize));
|
|
190
|
+
offset += sliceSize;
|
|
191
|
+
}
|
|
192
|
+
done();
|
|
193
|
+
},
|
|
194
|
+
flush(done) {
|
|
195
|
+
done();
|
|
196
|
+
},
|
|
197
|
+
});
|
|
222
198
|
|
|
223
199
|
export async function getFirstItemFromAsyncIterable(asyncIterable) {
|
|
224
200
|
const iterator = asyncIterable[Symbol.asyncIterator]();
|
|
@@ -250,22 +226,31 @@ export function isIterable(obj) {
|
|
|
250
226
|
}
|
|
251
227
|
|
|
252
228
|
/**
|
|
253
|
-
*
|
|
254
|
-
*
|
|
255
|
-
* @
|
|
256
|
-
*
|
|
257
|
-
*
|
|
229
|
+
* Parses a `multipart/form-data` request body into an array of part objects. Each part
|
|
230
|
+
* exposes parsed headers (e.g. `content-disposition`), a `name`, an optional `filename`,
|
|
231
|
+
* and a `body` {@link import('stream').Readable}.
|
|
232
|
+
*
|
|
233
|
+
* By default parts are streamed through temporary files on disk; set `options.useMemory` to
|
|
234
|
+
* `true` to buffer them in memory instead.
|
|
235
|
+
*
|
|
236
|
+
* @param {import('http').IncomingMessage} req - Incoming HTTP request with a `multipart/form-data` content-type.
|
|
237
|
+
* @param {object} [_options] - Parsing options.
|
|
238
|
+
* @param {boolean} [_options.useMemory=false] - Buffer parts in memory instead of temp files.
|
|
239
|
+
* @param {number} [_options.maxFileSize] - Maximum allowed size in bytes for a single part.
|
|
240
|
+
* @param {number} [_options.maxFiles] - Maximum number of parts allowed.
|
|
241
|
+
* @returns {Promise<Array<{ name: string|null, filename: string|null, headers: object, body: import('stream').Readable, size: number }>>}
|
|
242
|
+
* @throws {Error} If the content-type is not `multipart/form-data` or a size/count limit is exceeded.
|
|
258
243
|
*/
|
|
259
244
|
export async function parseMultipart(req, _options) {
|
|
260
245
|
if (!req.headers["content-type"]?.startsWith("multipart/form-data")) {
|
|
261
|
-
throw
|
|
246
|
+
throw Error("Invalid content type (multipart/form-data expected)");
|
|
262
247
|
}
|
|
263
248
|
|
|
264
249
|
const BUFFER_SIZE = 1024 * 1024;
|
|
265
250
|
const END_OF_HEADERS = Buffer.from("\r\n\r\n");
|
|
266
251
|
const options = {
|
|
267
252
|
useMemory: false,
|
|
268
|
-
..._options
|
|
253
|
+
..._options,
|
|
269
254
|
};
|
|
270
255
|
const parts = [];
|
|
271
256
|
let stream = req;
|
|
@@ -276,15 +261,41 @@ export async function parseMultipart(req, _options) {
|
|
|
276
261
|
options.useMemory = true;
|
|
277
262
|
}
|
|
278
263
|
|
|
279
|
-
const
|
|
264
|
+
const boundaryValue = req.headers["content-type"].match(/boundary=([^;,\s]+)/)?.[1];
|
|
265
|
+
if (!boundaryValue) {
|
|
266
|
+
throw Error("parseMultipart: missing boundary in content-type");
|
|
267
|
+
}
|
|
268
|
+
const boundary = Buffer.from("\r\n--" + boundaryValue);
|
|
280
269
|
|
|
281
|
-
await
|
|
270
|
+
await yieldToEventLoop();
|
|
282
271
|
|
|
283
272
|
let lastChunk = Buffer.from("\r\n");
|
|
284
273
|
let activePart;
|
|
285
|
-
|
|
274
|
+
|
|
275
|
+
const cleanup = async () => {
|
|
276
|
+
for (const part of parts) {
|
|
277
|
+
if (part.bodyFile) {
|
|
278
|
+
try {
|
|
279
|
+
part.body.destroy();
|
|
280
|
+
} catch {
|
|
281
|
+
// ignore errors
|
|
282
|
+
}
|
|
283
|
+
try {
|
|
284
|
+
await unlink(part.bodyFile);
|
|
285
|
+
} catch {
|
|
286
|
+
// ignore errors
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
};
|
|
286
291
|
|
|
287
292
|
const write = (chunk) => {
|
|
293
|
+
if (options.maxFileSize !== undefined) {
|
|
294
|
+
activePart.size += chunk.length;
|
|
295
|
+
if (activePart.size > options.maxFileSize) {
|
|
296
|
+
throw Error(`parseMultipart: part exceeds maxFileSize of ${options.maxFileSize} bytes`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
288
299
|
if (options.useMemory) {
|
|
289
300
|
activePart.body.push(chunk);
|
|
290
301
|
} else {
|
|
@@ -293,75 +304,98 @@ export async function parseMultipart(req, _options) {
|
|
|
293
304
|
};
|
|
294
305
|
|
|
295
306
|
const newPart = () => {
|
|
296
|
-
|
|
307
|
+
if (options.maxFiles !== undefined && parts.length >= options.maxFiles) {
|
|
308
|
+
throw Error(`parseMultipart: exceeded maxFiles limit of ${options.maxFiles}`);
|
|
309
|
+
}
|
|
297
310
|
const bodyFile = tempFilename();
|
|
298
311
|
activePart = {
|
|
299
312
|
headers: {},
|
|
300
313
|
bodyFile: options.useMemory ? undefined : bodyFile,
|
|
301
|
-
body: options.useMemory ? [] : createWriteStream(bodyFile, { flags: "wx" })
|
|
314
|
+
body: options.useMemory ? [] : createWriteStream(bodyFile, { flags: "wx" }),
|
|
315
|
+
size: 0,
|
|
302
316
|
};
|
|
303
317
|
parts.push(activePart);
|
|
304
318
|
};
|
|
305
319
|
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
320
|
+
try {
|
|
321
|
+
while (stream.readable) {
|
|
322
|
+
// next next chunk
|
|
323
|
+
let chunk = stream.read(BUFFER_SIZE);
|
|
309
324
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
325
|
+
if (!chunk) {
|
|
326
|
+
await yieldToEventLoop();
|
|
327
|
+
continue;
|
|
328
|
+
}
|
|
314
329
|
|
|
315
|
-
|
|
330
|
+
let combined = Buffer.concat([lastChunk, chunk]);
|
|
331
|
+
let lookbehindSet = false;
|
|
332
|
+
|
|
333
|
+
while (combined.length >= boundary.length + 2) {
|
|
334
|
+
const boundaryIndex = combined.indexOf(boundary);
|
|
335
|
+
|
|
336
|
+
if (boundaryIndex === -1) {
|
|
337
|
+
// Keep only the last boundary.length-1 bytes as lookbehind so a
|
|
338
|
+
// boundary that straddles a read boundary is not split across chunks.
|
|
339
|
+
const safeLength = combined.length - (boundary.length - 1);
|
|
340
|
+
if (activePart && safeLength > 0) {
|
|
341
|
+
write(combined.subarray(0, safeLength));
|
|
342
|
+
}
|
|
343
|
+
lastChunk = combined.subarray(safeLength);
|
|
344
|
+
lookbehindSet = true;
|
|
345
|
+
break;
|
|
346
|
+
}
|
|
316
347
|
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
348
|
+
const isLastBoundary =
|
|
349
|
+
combined[boundaryIndex + boundary.length] === 45 &&
|
|
350
|
+
combined[boundaryIndex + boundary.length + 1] === 45;
|
|
320
351
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
}
|
|
352
|
+
if (boundaryIndex > 0) {
|
|
353
|
+
write(combined.subarray(0, boundaryIndex));
|
|
354
|
+
}
|
|
325
355
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
356
|
+
if (isLastBoundary) {
|
|
357
|
+
break;
|
|
358
|
+
}
|
|
329
359
|
|
|
330
|
-
|
|
331
|
-
combined = combined.subarray(boundaryIndex + boundary.length + 2);
|
|
332
|
-
done = true;
|
|
333
|
-
break;
|
|
334
|
-
}
|
|
360
|
+
newPart();
|
|
335
361
|
|
|
336
|
-
|
|
362
|
+
const endOfHeaders = combined.indexOf(END_OF_HEADERS, boundaryIndex);
|
|
337
363
|
|
|
338
|
-
|
|
364
|
+
if (endOfHeaders === -1) {
|
|
365
|
+
throw Error("parseMultipart: malformed part — missing header terminator");
|
|
366
|
+
}
|
|
339
367
|
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
368
|
+
activePart.headers = combined
|
|
369
|
+
.subarray(boundaryIndex + boundary.length + 2, endOfHeaders)
|
|
370
|
+
.toString()
|
|
371
|
+
.split("\r\n")
|
|
372
|
+
.reduce((acc, val) => {
|
|
373
|
+
const [header, value] = val.split(": ");
|
|
374
|
+
acc[header.toLowerCase()] = value;
|
|
375
|
+
return acc;
|
|
376
|
+
}, Object.create(null));
|
|
348
377
|
|
|
349
|
-
|
|
378
|
+
combined = combined.subarray(endOfHeaders + END_OF_HEADERS.length);
|
|
350
379
|
|
|
351
|
-
|
|
352
|
-
|
|
380
|
+
lastChunk = combined;
|
|
381
|
+
}
|
|
353
382
|
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
383
|
+
// Carry the unprocessed remainder into the next read so it gets
|
|
384
|
+
// prepended to the next chunk. Skip when the lookbehind was already
|
|
385
|
+
// set inside the boundaryIndex === -1 branch above.
|
|
386
|
+
if (!lookbehindSet) {
|
|
387
|
+
lastChunk = combined;
|
|
388
|
+
}
|
|
358
389
|
}
|
|
390
|
+
} catch (e) {
|
|
391
|
+
await cleanup();
|
|
392
|
+
throw e;
|
|
359
393
|
}
|
|
360
394
|
|
|
361
395
|
for (const part of parts) {
|
|
362
396
|
// extract name and filename from content-disposition header
|
|
363
397
|
if (part.headers["content-disposition"]) {
|
|
364
|
-
const [, name] = part.headers["content-disposition"].match(/name="([^"]+)"/);
|
|
398
|
+
const [, name] = part.headers["content-disposition"].match(/name="([^"]+)"/) || [];
|
|
365
399
|
const [, filename] = part.headers["content-disposition"].match(/filename="([^"]+)"/) || [];
|
|
366
400
|
part.name = name ?? null;
|
|
367
401
|
part.filename = filename ?? null;
|
|
@@ -377,7 +411,7 @@ export async function parseMultipart(req, _options) {
|
|
|
377
411
|
try {
|
|
378
412
|
await unlink(part.bodyFile);
|
|
379
413
|
} catch {
|
|
380
|
-
// ignore errors
|
|
414
|
+
// ignore errors
|
|
381
415
|
}
|
|
382
416
|
});
|
|
383
417
|
}
|
|
@@ -387,24 +421,13 @@ export async function parseMultipart(req, _options) {
|
|
|
387
421
|
}
|
|
388
422
|
|
|
389
423
|
export function tempFilename() {
|
|
390
|
-
return join(tmpdir(), `${
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
export function randomSafeId(size = 12) {
|
|
394
|
-
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
|
395
|
-
let result = "";
|
|
396
|
-
|
|
397
|
-
for (let i = 0; i < size; i++) {
|
|
398
|
-
result += charset.charAt(Math.floor(Math.random() * charset.length));
|
|
399
|
-
}
|
|
400
|
-
|
|
401
|
-
return result;
|
|
424
|
+
return join(tmpdir(), `${picoid(12)}.gxtmp`);
|
|
402
425
|
}
|
|
403
426
|
|
|
404
427
|
export function deepMerge(target, ...source) {
|
|
405
428
|
for (const src of source) {
|
|
406
|
-
for (const key
|
|
407
|
-
if (src[key] instanceof Object) {
|
|
429
|
+
for (const key of Object.keys(src)) {
|
|
430
|
+
if (src[key] instanceof Object && !Array.isArray(src[key])) {
|
|
408
431
|
if (!target[key]) {
|
|
409
432
|
target[key] = {};
|
|
410
433
|
}
|
|
@@ -427,4 +450,22 @@ export function cleanupWebsocketUrl(url) {
|
|
|
427
450
|
} catch {
|
|
428
451
|
return url;
|
|
429
452
|
}
|
|
430
|
-
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
export async function fetchWithTimeout(url, options = {}, timeout = 500) {
|
|
456
|
+
const ac = new AbortController();
|
|
457
|
+
const timer = setTimeout(() => ac.abort(), timeout);
|
|
458
|
+
try {
|
|
459
|
+
return await fetch(url, { ...options, signal: ac.signal });
|
|
460
|
+
} finally {
|
|
461
|
+
clearTimeout(timer);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
export function withTimeout(promise, timeout) {
|
|
466
|
+
let timeoutId;
|
|
467
|
+
const timer = new Promise((_, reject) => {
|
|
468
|
+
timeoutId = setTimeout(() => reject(Error("Timeout")), timeout);
|
|
469
|
+
});
|
|
470
|
+
return Promise.race([promise, timer]).finally(() => clearTimeout(timeoutId));
|
|
471
|
+
}
|