@rpcbase/server 0.472.0 → 0.474.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/handler-CGko2pJM.js +500 -0
  2. package/dist/handler-D6oN38vE.js +122 -0
  3. package/dist/handler-ybYk2VTq.js +109 -0
  4. package/dist/{index-BSIupjlE.js → index-dlSIqvl2.js} +1 -1
  5. package/dist/index.js +1 -1
  6. package/dist/rts/api/changes/handler.d.ts.map +1 -1
  7. package/dist/rts.js +2 -2
  8. package/dist/{handler-DHunTqwt.js → schemas-CyxqObur.js} +6 -106
  9. package/dist/shared-8XhCreJo.js +119 -0
  10. package/dist/uploads/api/file-uploads/handler.d.ts +5 -0
  11. package/dist/uploads/api/file-uploads/handler.d.ts.map +1 -0
  12. package/dist/uploads/api/file-uploads/handlers/completeUpload.d.ts +5 -0
  13. package/dist/uploads/api/file-uploads/handlers/completeUpload.d.ts.map +1 -0
  14. package/dist/uploads/api/file-uploads/handlers/getStatus.d.ts +5 -0
  15. package/dist/uploads/api/file-uploads/handlers/getStatus.d.ts.map +1 -0
  16. package/dist/uploads/api/file-uploads/handlers/initUpload.d.ts +5 -0
  17. package/dist/uploads/api/file-uploads/handlers/initUpload.d.ts.map +1 -0
  18. package/dist/uploads/api/file-uploads/handlers/uploadChunk.d.ts +9 -0
  19. package/dist/uploads/api/file-uploads/handlers/uploadChunk.d.ts.map +1 -0
  20. package/dist/uploads/api/file-uploads/index.d.ts +42 -0
  21. package/dist/uploads/api/file-uploads/index.d.ts.map +1 -0
  22. package/dist/uploads/api/file-uploads/middleware/rawBodyParser.d.ts +5 -0
  23. package/dist/uploads/api/file-uploads/middleware/rawBodyParser.d.ts.map +1 -0
  24. package/dist/uploads/api/file-uploads/shared.d.ts +29 -0
  25. package/dist/uploads/api/file-uploads/shared.d.ts.map +1 -0
  26. package/dist/uploads/api/files/handler.d.ts +4 -0
  27. package/dist/uploads/api/files/handler.d.ts.map +1 -0
  28. package/dist/uploads/api/files/handlers/deleteFile.d.ts +9 -0
  29. package/dist/uploads/api/files/handlers/deleteFile.d.ts.map +1 -0
  30. package/dist/uploads/api/files/handlers/getFile.d.ts +3 -0
  31. package/dist/uploads/api/files/handlers/getFile.d.ts.map +1 -0
  32. package/dist/uploads/api/files/handlers/getFile.test.d.ts +2 -0
  33. package/dist/uploads/api/files/handlers/getFile.test.d.ts.map +1 -0
  34. package/dist/uploads/api/files/index.d.ts +4 -0
  35. package/dist/uploads/api/files/index.d.ts.map +1 -0
  36. package/dist/uploads/routes.d.ts +2 -0
  37. package/dist/uploads/routes.d.ts.map +1 -0
  38. package/dist/uploads.d.ts +2 -0
  39. package/dist/uploads.d.ts.map +1 -0
  40. package/dist/uploads.js +9 -0
  41. package/package.json +6 -1
@@ -0,0 +1,500 @@
1
+ import { loadModel, getTenantFilesystemDb } from "@rpcbase/db";
2
+ import { GridFSBucket, ObjectId } from "mongodb";
3
+ import { g as getTenantId, a as getModelCtx, b as getOwnershipSelector, e as ensureUploadIndexes, c as getBucketName, d as getUserId, f as getChunkSizeBytes, h as getSessionTtlMs, i as computeSha256Hex, t as toBufferPayload, n as normalizeSha256Hex, j as getMaxClientUploadBytesPerSecond, k as getRawBodyLimitBytes } from "./shared-8XhCreJo.js";
4
+ import { randomBytes } from "node:crypto";
5
+ import { o as object, n as number, s as string, b as boolean, a as array, _ as _enum } from "./schemas-CyxqObur.js";
6
+ const waitForStreamFinished = async (stream) => new Promise((resolve, reject) => {
7
+ stream.once("finish", resolve);
8
+ stream.once("error", reject);
9
+ });
10
+ const writeToStream = async (stream, chunk) => {
11
+ const ok = stream.write(chunk);
12
+ if (ok) return;
13
+ await new Promise((resolve, reject) => {
14
+ const onDrain = () => {
15
+ cleanup();
16
+ resolve();
17
+ };
18
+ const onError = (error) => {
19
+ cleanup();
20
+ reject(error);
21
+ };
22
+ const cleanup = () => {
23
+ stream.off("drain", onDrain);
24
+ stream.off("error", onError);
25
+ };
26
+ stream.on("drain", onDrain);
27
+ stream.on("error", onError);
28
+ });
29
+ };
30
+ const abortUploadStream = async (stream) => {
31
+ if (!stream) return;
32
+ if (typeof stream.abort === "function") {
33
+ try {
34
+ await stream.abort();
35
+ return;
36
+ } catch {
37
+ }
38
+ }
39
+ try {
40
+ stream.destroy();
41
+ } catch {
42
+ }
43
+ };
44
+ const completeUpload = async (_payload, ctx) => {
45
+ const tenantId = getTenantId(ctx);
46
+ if (!tenantId) {
47
+ ctx.res.status(400);
48
+ return { ok: false, error: "tenant_missing" };
49
+ }
50
+ const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
51
+ if (!uploadId) {
52
+ ctx.res.status(400);
53
+ return { ok: false, error: "invalid_upload_id" };
54
+ }
55
+ const modelCtx = getModelCtx(ctx, tenantId);
56
+ const [UploadSession, UploadChunk] = await Promise.all([
57
+ loadModel("RBUploadSession", modelCtx),
58
+ loadModel("RBUploadChunk", modelCtx)
59
+ ]);
60
+ const existing = await UploadSession.findOne({ _id: uploadId }).lean();
61
+ if (!existing) {
62
+ ctx.res.status(404);
63
+ return { ok: false, error: "not_found" };
64
+ }
65
+ const ownershipSelector = getOwnershipSelector(ctx, existing);
66
+ if (!ownershipSelector) {
67
+ ctx.res.status(401);
68
+ return { ok: false, error: "unauthorized" };
69
+ }
70
+ if (existing.status === "done" && existing.fileId) {
71
+ return { ok: true, fileId: existing.fileId };
72
+ }
73
+ const locked = await UploadSession.findOneAndUpdate(
74
+ { _id: uploadId, ...ownershipSelector, status: "uploading" },
75
+ { $set: { status: "assembling" }, $unset: { error: "" } },
76
+ { new: true }
77
+ ).lean();
78
+ if (!locked) {
79
+ ctx.res.status(409);
80
+ return { ok: false, error: "not_uploading" };
81
+ }
82
+ await ensureUploadIndexes(UploadSession, UploadChunk);
83
+ const fsDb = await getTenantFilesystemDb(tenantId);
84
+ const nativeDb = fsDb.db;
85
+ if (!nativeDb) {
86
+ await UploadSession.updateOne(
87
+ { _id: uploadId, ...ownershipSelector },
88
+ { $set: { status: "error", error: "filesystem_db_unavailable" } }
89
+ );
90
+ ctx.res.status(500);
91
+ return { ok: false, error: "assembly_failed" };
92
+ }
93
+ const bucketName = getBucketName();
94
+ const bucket = new GridFSBucket(nativeDb, { bucketName });
95
+ const lockedUserId = typeof locked.userId === "string" ? locked.userId : void 0;
96
+ const uploadStream = bucket.openUploadStream(locked.filename, {
97
+ metadata: {
98
+ uploadId,
99
+ tenantId,
100
+ mimeType: locked.mimeType,
101
+ totalSize: locked.totalSize,
102
+ ...lockedUserId ? { userId: lockedUserId } : {}
103
+ }
104
+ });
105
+ try {
106
+ const cursor = UploadChunk.find({ uploadId }).sort({ index: 1 }).cursor();
107
+ let expectedIndex = 0;
108
+ try {
109
+ for await (const doc of cursor) {
110
+ const chunkDoc = doc;
111
+ if (chunkDoc.index !== expectedIndex) {
112
+ throw new Error("missing_chunks");
113
+ }
114
+ await writeToStream(uploadStream, chunkDoc.data);
115
+ expectedIndex += 1;
116
+ }
117
+ } finally {
118
+ try {
119
+ await cursor.close();
120
+ } catch {
121
+ }
122
+ }
123
+ if (expectedIndex !== locked.chunksTotal) {
124
+ throw new Error("missing_chunks");
125
+ }
126
+ const finished = waitForStreamFinished(uploadStream);
127
+ uploadStream.end();
128
+ await finished;
129
+ const fileId = String(uploadStream.id ?? "");
130
+ if (!fileId) {
131
+ throw new Error("missing_file_id");
132
+ }
133
+ await UploadSession.updateOne(
134
+ { _id: uploadId, ...ownershipSelector },
135
+ { $set: { status: "done", fileId }, $unset: { error: "" } }
136
+ );
137
+ try {
138
+ await UploadChunk.deleteMany({ uploadId });
139
+ } catch {
140
+ }
141
+ return { ok: true, fileId };
142
+ } catch (error) {
143
+ const message = error instanceof Error ? error.message : String(error);
144
+ await abortUploadStream(uploadStream);
145
+ if (message === "missing_chunks") {
146
+ await UploadSession.updateOne(
147
+ { _id: uploadId, ...ownershipSelector },
148
+ { $set: { status: "uploading" } }
149
+ );
150
+ ctx.res.status(409);
151
+ return { ok: false, error: "missing_chunks" };
152
+ }
153
+ await UploadSession.updateOne(
154
+ { _id: uploadId, ...ownershipSelector },
155
+ { $set: { status: "error", error: message } }
156
+ );
157
+ ctx.res.status(500);
158
+ return { ok: false, error: "assembly_failed" };
159
+ }
160
+ };
161
+ const getStatus = async (_payload, ctx) => {
162
+ const tenantId = getTenantId(ctx);
163
+ if (!tenantId) {
164
+ ctx.res.status(400);
165
+ return { ok: false, error: "tenant_missing" };
166
+ }
167
+ const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
168
+ if (!uploadId) {
169
+ ctx.res.status(400);
170
+ return { ok: false, error: "invalid_upload_id" };
171
+ }
172
+ const modelCtx = getModelCtx(ctx, tenantId);
173
+ const [UploadSession, UploadChunk] = await Promise.all([
174
+ loadModel("RBUploadSession", modelCtx),
175
+ loadModel("RBUploadChunk", modelCtx)
176
+ ]);
177
+ const session = await UploadSession.findOne({ _id: uploadId }).lean();
178
+ if (!session) {
179
+ ctx.res.status(404);
180
+ return { ok: false, error: "not_found" };
181
+ }
182
+ const ownershipSelector = getOwnershipSelector(ctx, session);
183
+ if (!ownershipSelector) {
184
+ ctx.res.status(401);
185
+ return { ok: false, error: "unauthorized" };
186
+ }
187
+ const receivedDocs = await UploadChunk.find(
188
+ { uploadId },
189
+ { index: 1, _id: 0 }
190
+ ).sort({ index: 1 }).lean();
191
+ const received = receivedDocs.map((d) => Number(d?.index ?? -1)).filter((n) => Number.isInteger(n) && n >= 0);
192
+ return {
193
+ ok: true,
194
+ status: session.status,
195
+ chunkSize: session.chunkSize,
196
+ chunksTotal: session.chunksTotal,
197
+ received,
198
+ ...session.fileId ? { fileId: session.fileId } : {}
199
+ };
200
+ };
201
+ const InitRoute = "/api/rb/file-uploads";
202
+ const ChunkRoute = "/api/rb/file-uploads/:uploadId/chunks/:index";
203
+ const StatusRoute = "/api/rb/file-uploads/:uploadId/status";
204
+ const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
205
+ const initRequestSchema = object({
206
+ filename: string().min(1),
207
+ mimeType: string().min(1),
208
+ totalSize: number().int().min(1)
209
+ });
210
+ object({
211
+ ok: boolean(),
212
+ error: string().optional(),
213
+ uploadId: string().optional(),
214
+ uploadKey: string().optional(),
215
+ chunkSize: number().int().optional(),
216
+ chunksTotal: number().int().optional()
217
+ });
218
+ object({
219
+ ok: boolean(),
220
+ error: string().optional(),
221
+ status: _enum(["uploading", "assembling", "done", "error"]).optional(),
222
+ chunkSize: number().int().optional(),
223
+ chunksTotal: number().int().optional(),
224
+ received: array(number().int().min(0)).optional(),
225
+ fileId: string().optional()
226
+ });
227
+ object({
228
+ ok: boolean(),
229
+ error: string().optional(),
230
+ fileId: string().optional()
231
+ });
232
+ const initUpload = async (payload, ctx) => {
233
+ const tenantId = getTenantId(ctx);
234
+ if (!tenantId) {
235
+ ctx.res.status(400);
236
+ return { ok: false, error: "tenant_missing" };
237
+ }
238
+ const userId = getUserId(ctx);
239
+ const parsed = initRequestSchema.safeParse(payload ?? {});
240
+ if (!parsed.success) {
241
+ ctx.res.status(400);
242
+ return { ok: false, error: "invalid_payload" };
243
+ }
244
+ const chunkSize = getChunkSizeBytes();
245
+ const { filename, mimeType, totalSize } = parsed.data;
246
+ const chunksTotal = Math.ceil(totalSize / chunkSize);
247
+ const modelCtx = getModelCtx(ctx, tenantId);
248
+ const [UploadSession, UploadChunk] = await Promise.all([
249
+ loadModel("RBUploadSession", modelCtx),
250
+ loadModel("RBUploadChunk", modelCtx)
251
+ ]);
252
+ await ensureUploadIndexes(UploadSession, UploadChunk);
253
+ const uploadId = new ObjectId().toString();
254
+ const now = Date.now();
255
+ const expiresAt = new Date(now + getSessionTtlMs());
256
+ const uploadKey = userId ? null : randomBytes(32).toString("base64url");
257
+ const ownerKeyHash = uploadKey ? computeSha256Hex(Buffer.from(uploadKey)) : void 0;
258
+ await UploadSession.create({
259
+ _id: uploadId,
260
+ ...userId ? { userId } : {},
261
+ ...ownerKeyHash ? { ownerKeyHash } : {},
262
+ filename,
263
+ mimeType,
264
+ totalSize,
265
+ chunkSize,
266
+ chunksTotal,
267
+ status: "uploading",
268
+ createdAt: new Date(now),
269
+ expiresAt
270
+ });
271
+ return {
272
+ ok: true,
273
+ uploadId,
274
+ chunkSize,
275
+ chunksTotal,
276
+ ...uploadKey ? { uploadKey } : {}
277
+ };
278
+ };
279
+ const uploadChunk = async (payload, ctx) => {
280
+ const tenantId = getTenantId(ctx);
281
+ if (!tenantId) {
282
+ ctx.res.status(400);
283
+ return { ok: false, error: "tenant_missing" };
284
+ }
285
+ const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
286
+ const indexRaw = String(ctx.req.params?.index ?? "").trim();
287
+ const index = Number(indexRaw);
288
+ if (!uploadId || !Number.isInteger(index) || index < 0) {
289
+ ctx.res.status(400);
290
+ return { ok: false, error: "invalid_chunk_ref" };
291
+ }
292
+ const modelCtx = getModelCtx(ctx, tenantId);
293
+ const [UploadSession, UploadChunk] = await Promise.all([
294
+ loadModel("RBUploadSession", modelCtx),
295
+ loadModel("RBUploadChunk", modelCtx)
296
+ ]);
297
+ const session = await UploadSession.findOne({ _id: uploadId }).lean();
298
+ if (!session) {
299
+ ctx.res.status(404);
300
+ return { ok: false, error: "not_found" };
301
+ }
302
+ const ownershipSelector = getOwnershipSelector(ctx, session);
303
+ if (!ownershipSelector) {
304
+ ctx.res.status(401);
305
+ return { ok: false, error: "unauthorized" };
306
+ }
307
+ if (session.status !== "uploading") {
308
+ ctx.res.status(409);
309
+ return { ok: false, error: "not_uploading" };
310
+ }
311
+ if (index >= session.chunksTotal) {
312
+ ctx.res.status(400);
313
+ return { ok: false, error: "index_out_of_range" };
314
+ }
315
+ const data = toBufferPayload(payload);
316
+ if (!data) {
317
+ ctx.res.status(400);
318
+ return { ok: false, error: "invalid_body" };
319
+ }
320
+ const expectedSize = index === session.chunksTotal - 1 ? session.totalSize - session.chunkSize * (session.chunksTotal - 1) : session.chunkSize;
321
+ if (data.length > expectedSize) {
322
+ ctx.res.status(413);
323
+ return { ok: false, error: "chunk_too_large" };
324
+ }
325
+ if (data.length !== expectedSize) {
326
+ ctx.res.status(400);
327
+ return { ok: false, error: "invalid_chunk_size" };
328
+ }
329
+ const checksumHeader = ctx.req.get("X-Chunk-SHA256");
330
+ const sha256 = checksumHeader ? computeSha256Hex(data) : void 0;
331
+ if (checksumHeader) {
332
+ const expectedSha256 = normalizeSha256Hex(checksumHeader);
333
+ if (sha256 !== expectedSha256) {
334
+ ctx.res.status(400);
335
+ return { ok: false, error: "checksum_mismatch" };
336
+ }
337
+ }
338
+ await ensureUploadIndexes(UploadSession, UploadChunk);
339
+ await UploadChunk.updateOne(
340
+ { uploadId, index },
341
+ {
342
+ $set: {
343
+ uploadId,
344
+ index,
345
+ data,
346
+ size: data.length,
347
+ sha256,
348
+ expiresAt: session.expiresAt
349
+ },
350
+ $setOnInsert: {
351
+ createdAt: /* @__PURE__ */ new Date()
352
+ }
353
+ },
354
+ { upsert: true }
355
+ );
356
+ ctx.res.status(204);
357
+ return { ok: true };
358
+ };
359
+ const rawBodyParser = ({
360
+ limitBytes,
361
+ maxClientBytesPerSecond
362
+ }) => {
363
+ return (req, res, next) => {
364
+ const contentType = typeof req?.headers?.["content-type"] === "string" ? String(req.headers["content-type"]) : "";
365
+ if (!contentType.includes("application/octet-stream")) {
366
+ next();
367
+ return;
368
+ }
369
+ let total = 0;
370
+ const chunks = [];
371
+ let done = false;
372
+ let paused = false;
373
+ let throttleTimeout = null;
374
+ const rateBytesPerSecond = typeof maxClientBytesPerSecond === "number" && maxClientBytesPerSecond > 0 ? maxClientBytesPerSecond : null;
375
+ const cleanup = () => {
376
+ req.off("data", onData);
377
+ req.off("end", onEnd);
378
+ req.off("error", onError);
379
+ req.off("aborted", onAborted);
380
+ if (throttleTimeout) {
381
+ clearTimeout(throttleTimeout);
382
+ throttleTimeout = null;
383
+ }
384
+ };
385
+ const finish = (error) => {
386
+ if (done) return;
387
+ done = true;
388
+ cleanup();
389
+ if (error) {
390
+ next(error);
391
+ return;
392
+ }
393
+ req.body = Buffer.concat(chunks, total);
394
+ next();
395
+ };
396
+ const onData = (chunk) => {
397
+ if (done) return;
398
+ const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
399
+ total += buffer.length;
400
+ if (total > limitBytes) {
401
+ done = true;
402
+ cleanup();
403
+ req.destroy();
404
+ res.status(413).json({ ok: false, error: "chunk_too_large" });
405
+ return;
406
+ }
407
+ chunks.push(buffer);
408
+ if (!rateBytesPerSecond) return;
409
+ const now = Date.now();
410
+ const clientKey = getClientKey(req);
411
+ const state = getClientRateState(clientKey, rateBytesPerSecond, now);
412
+ const waitMs = consumeRateBudget(state, buffer.length, rateBytesPerSecond, now);
413
+ if (waitMs > 0 && !paused) {
414
+ paused = true;
415
+ req.pause();
416
+ throttleTimeout = setTimeout(() => {
417
+ throttleTimeout = null;
418
+ paused = false;
419
+ if (done) return;
420
+ try {
421
+ req.resume();
422
+ } catch {
423
+ }
424
+ }, waitMs);
425
+ }
426
+ };
427
+ const onEnd = () => finish();
428
+ const onError = (err) => finish(err);
429
+ const onAborted = () => finish(new Error("request_aborted"));
430
+ req.on("data", onData);
431
+ req.on("end", onEnd);
432
+ req.on("error", onError);
433
+ req.on("aborted", onAborted);
434
+ };
435
+ };
436
+ const MAX_BURST_SECONDS = 1;
437
+ const STALE_CLIENT_MS = 15 * 60 * 1e3;
438
+ const clientRateStates = /* @__PURE__ */ new Map();
439
+ let lastCleanupMs = 0;
440
+ const getClientKey = (req) => {
441
+ const rawClientIp = typeof req?.clientIp === "string" ? req.clientIp : "";
442
+ if (rawClientIp.trim()) return rawClientIp.trim();
443
+ const rawIp = typeof req?.ip === "string" ? req.ip : "";
444
+ return rawIp.trim() || "unknown";
445
+ };
446
+ const maybeCleanupStates = (now) => {
447
+ if (now - lastCleanupMs < 6e4) return;
448
+ lastCleanupMs = now;
449
+ if (clientRateStates.size < 2e3) return;
450
+ for (const [key, state] of clientRateStates) {
451
+ if (now - state.lastSeenMs > STALE_CLIENT_MS) {
452
+ clientRateStates.delete(key);
453
+ }
454
+ }
455
+ };
456
+ const getClientRateState = (key, rateBytesPerSecond, now) => {
457
+ maybeCleanupStates(now);
458
+ const capacity = rateBytesPerSecond * MAX_BURST_SECONDS;
459
+ const existing = clientRateStates.get(key);
460
+ if (existing) {
461
+ existing.lastSeenMs = now;
462
+ existing.tokens = Math.min(capacity, existing.tokens);
463
+ return existing;
464
+ }
465
+ const next = {
466
+ tokens: capacity,
467
+ lastRefillMs: now,
468
+ lastSeenMs: now
469
+ };
470
+ clientRateStates.set(key, next);
471
+ return next;
472
+ };
473
+ const consumeRateBudget = (state, bytes, rateBytesPerSecond, now) => {
474
+ const capacity = rateBytesPerSecond * MAX_BURST_SECONDS;
475
+ const elapsedMs = Math.max(0, now - state.lastRefillMs);
476
+ if (elapsedMs > 0) {
477
+ state.tokens = Math.min(capacity, state.tokens + elapsedMs * rateBytesPerSecond / 1e3);
478
+ state.lastRefillMs = now;
479
+ }
480
+ state.tokens -= bytes;
481
+ if (state.tokens >= 0) return 0;
482
+ return Math.ceil(-state.tokens / rateBytesPerSecond * 1e3);
483
+ };
484
+ const handler = (api) => {
485
+ const chunkSizeBytes = getChunkSizeBytes();
486
+ api.use(
487
+ InitRoute,
488
+ rawBodyParser({
489
+ limitBytes: getRawBodyLimitBytes(chunkSizeBytes),
490
+ maxClientBytesPerSecond: getMaxClientUploadBytesPerSecond()
491
+ })
492
+ );
493
+ api.post(InitRoute, initUpload);
494
+ api.put(ChunkRoute, uploadChunk);
495
+ api.get(StatusRoute, getStatus);
496
+ api.post(CompleteRoute, completeUpload);
497
+ };
498
+ export {
499
+ handler as default
500
+ };
@@ -0,0 +1,122 @@
1
+ import { getTenantFilesystemDb } from "@rpcbase/db";
2
+ import { ObjectId, GridFSBucket } from "mongodb";
3
+ import { g as getTenantId, c as getBucketName } from "./shared-8XhCreJo.js";
4
+ const deleteFile = async (_payload, ctx) => {
5
+ const tenantId = getTenantId(ctx);
6
+ if (!tenantId) {
7
+ ctx.res.status(400);
8
+ return { ok: false, error: "tenant_missing" };
9
+ }
10
+ const fileIdRaw = String(ctx.req.params?.fileId ?? "").trim();
11
+ let fileObjectId;
12
+ try {
13
+ fileObjectId = new ObjectId(fileIdRaw);
14
+ } catch {
15
+ ctx.res.status(400);
16
+ return { ok: false, error: "invalid_file_id" };
17
+ }
18
+ const fsDb = await getTenantFilesystemDb(tenantId);
19
+ const nativeDb = fsDb.db;
20
+ if (!nativeDb) {
21
+ ctx.res.status(500);
22
+ return { ok: false, error: "filesystem_db_unavailable" };
23
+ }
24
+ const bucketName = getBucketName();
25
+ const bucket = new GridFSBucket(nativeDb, { bucketName });
26
+ try {
27
+ await bucket.delete(fileObjectId);
28
+ } catch (error) {
29
+ const message = error instanceof Error ? error.message : String(error);
30
+ if (!message.includes("FileNotFound")) {
31
+ ctx.res.status(500);
32
+ return { ok: false, error: "delete_failed" };
33
+ }
34
+ }
35
+ ctx.res.status(204);
36
+ return { ok: true };
37
+ };
38
+ const resolveHeaderString = (value) => {
39
+ if (typeof value !== "string") return null;
40
+ const normalized = value.trim();
41
+ return normalized ? normalized : null;
42
+ };
43
+ const escapeHeaderFilename = (filename) => filename.replace(/[\\"]/g, "_");
44
+ const getFile = async (_payload, ctx) => {
45
+ const tenantId = getTenantId(ctx);
46
+ if (!tenantId) {
47
+ ctx.res.status(400).end();
48
+ return {};
49
+ }
50
+ const fileIdRaw = String(ctx.req.params?.fileId ?? "").trim();
51
+ let fileObjectId;
52
+ try {
53
+ fileObjectId = new ObjectId(fileIdRaw);
54
+ } catch {
55
+ ctx.res.status(400).end();
56
+ return {};
57
+ }
58
+ const fsDb = await getTenantFilesystemDb(tenantId);
59
+ const nativeDb = fsDb.db;
60
+ if (!nativeDb) {
61
+ ctx.res.status(500).end();
62
+ return {};
63
+ }
64
+ const bucketName = getBucketName();
65
+ const bucket = new GridFSBucket(nativeDb, { bucketName });
66
+ const [file] = await bucket.find({ _id: fileObjectId }).limit(1).toArray();
67
+ if (!file) {
68
+ ctx.res.status(404).end();
69
+ return {};
70
+ }
71
+ const mimeTypeFromMetadata = resolveHeaderString(file?.metadata?.mimeType);
72
+ const mimeType = mimeTypeFromMetadata ?? "application/octet-stream";
73
+ const filenameFromDb = resolveHeaderString(file?.filename);
74
+ const filename = filenameFromDb ?? fileIdRaw;
75
+ const filenameSafe = escapeHeaderFilename(filename);
76
+ const cacheControl = "private, max-age=0, must-revalidate";
77
+ const md5 = resolveHeaderString(file?.md5);
78
+ const uploadDate = file?.uploadDate instanceof Date ? file.uploadDate : null;
79
+ const etagValue = md5 ?? `${fileObjectId.toHexString()}-${String(file?.length ?? 0)}-${String(uploadDate?.getTime() ?? 0)}`;
80
+ const etag = md5 ? `"${etagValue}"` : `W/"${etagValue}"`;
81
+ const ifNoneMatch = resolveHeaderString(ctx.req.headers?.["if-none-match"]);
82
+ if (ifNoneMatch) {
83
+ const candidates = ifNoneMatch.split(",").map((value) => value.trim()).filter(Boolean);
84
+ if (candidates.includes("*") || candidates.includes(etag)) {
85
+ ctx.res.status(304);
86
+ ctx.res.setHeader("Cache-Control", cacheControl);
87
+ ctx.res.setHeader("ETag", etag);
88
+ ctx.res.end();
89
+ return {};
90
+ }
91
+ }
92
+ ctx.res.status(200);
93
+ ctx.res.setHeader("Content-Type", mimeType);
94
+ ctx.res.setHeader("Content-Length", String(file?.length ?? 0));
95
+ ctx.res.setHeader("Content-Disposition", `inline; filename="${filenameSafe}"`);
96
+ ctx.res.setHeader("Cache-Control", cacheControl);
97
+ ctx.res.setHeader("ETag", etag);
98
+ ctx.res.flushHeaders();
99
+ if (ctx.req.method === "HEAD") {
100
+ ctx.res.end();
101
+ return {};
102
+ }
103
+ const stream = bucket.openDownloadStream(fileObjectId);
104
+ stream.on("error", () => {
105
+ try {
106
+ ctx.res.destroy();
107
+ } catch {
108
+ }
109
+ });
110
+ stream.pipe(ctx.res);
111
+ return {};
112
+ };
113
+ const Route = "/api/rb/files/:fileId";
114
+ const GetRoute = Route;
115
+ const DeleteRoute = Route;
116
+ const handler = (api) => {
117
+ api.get(GetRoute, getFile);
118
+ api.delete(DeleteRoute, deleteFile);
119
+ };
120
+ export {
121
+ handler as default
122
+ };
@@ -0,0 +1,109 @@
1
+ import { loadModel, ZRBRtsChangeOp } from "@rpcbase/db";
2
+ import { o as object, a as array, s as string, n as number, b as boolean, _ as _enum } from "./schemas-CyxqObur.js";
3
+ const Route = "/api/rb/rts/changes";
4
+ const requestSchema = object({
5
+ sinceSeq: number().int().min(0).default(0),
6
+ limit: number().int().min(1).max(5e3).default(2e3),
7
+ modelNames: array(string().min(1)).optional()
8
+ });
9
+ object({
10
+ ok: boolean(),
11
+ needsFullResync: boolean().optional(),
12
+ earliestSeq: number().int().min(0).optional(),
13
+ latestSeq: number().int().min(0),
14
+ changes: array(object({
15
+ seq: number().int().min(1),
16
+ modelName: string().min(1),
17
+ op: _enum(["delete", "reset_model"]),
18
+ docId: string().optional()
19
+ }))
20
+ });
21
+ const getTenantId = (ctx) => {
22
+ const raw = ctx.req.query?.["rb-tenant-id"];
23
+ const queryTenantId = Array.isArray(raw) ? raw[0] : raw;
24
+ if (typeof queryTenantId === "string" && queryTenantId.trim()) return queryTenantId.trim();
25
+ const sessionTenantId = ctx.req.session?.user?.current_tenant_id;
26
+ if (typeof sessionTenantId === "string" && sessionTenantId.trim()) return sessionTenantId.trim();
27
+ return null;
28
+ };
29
+ const ensureAuthorized = (ctx, tenantId) => {
30
+ const userId = ctx.req.session?.user?.id;
31
+ if (!userId) return null;
32
+ const signedInTenants = ctx.req.session?.user?.signed_in_tenants;
33
+ const currentTenantId = ctx.req.session?.user?.current_tenant_id;
34
+ const hasTenantAccessFromList = Array.isArray(signedInTenants) && signedInTenants.includes(tenantId);
35
+ const normalizedCurrentTenantId = typeof currentTenantId === "string" ? currentTenantId.trim() : "";
36
+ const hasTenantAccessFromCurrent = Boolean(normalizedCurrentTenantId) && normalizedCurrentTenantId === tenantId;
37
+ if (!hasTenantAccessFromList && !hasTenantAccessFromCurrent) return null;
38
+ return userId;
39
+ };
40
+ const getModelCtx = (_ctx, tenantId) => ({
41
+ req: {
42
+ session: {
43
+ user: {
44
+ current_tenant_id: tenantId
45
+ }
46
+ }
47
+ }
48
+ });
49
+ const isRtsChangeRecord = (value) => {
50
+ if (!value || typeof value !== "object") return false;
51
+ const obj = value;
52
+ const isOp = ZRBRtsChangeOp.safeParse(obj.op).success;
53
+ return typeof obj.seq === "number" && typeof obj.modelName === "string" && isOp;
54
+ };
55
+ const changesHandler = async (payload, ctx) => {
56
+ const parsed = requestSchema.safeParse(payload ?? {});
57
+ if (!parsed.success) {
58
+ ctx.res.status(400);
59
+ return { ok: false, latestSeq: 0, changes: [] };
60
+ }
61
+ const tenantId = getTenantId(ctx);
62
+ if (!tenantId) {
63
+ ctx.res.status(400);
64
+ return { ok: false, latestSeq: 0, changes: [] };
65
+ }
66
+ const userId = ensureAuthorized(ctx, tenantId);
67
+ if (!userId) {
68
+ ctx.res.status(401);
69
+ return { ok: false, latestSeq: 0, changes: [] };
70
+ }
71
+ const modelCtx = getModelCtx(ctx, tenantId);
72
+ const [RtsChange, RtsCounter] = await Promise.all([
73
+ loadModel("RBRtsChange", modelCtx),
74
+ loadModel("RBRtsCounter", modelCtx)
75
+ ]);
76
+ const counter = await RtsCounter.findOne({ _id: "rts" }, { seq: 1 }).lean();
77
+ const latestSeq = Number(counter?.seq ?? 0) || 0;
78
+ const { sinceSeq, limit, modelNames } = parsed.data;
79
+ const earliestSelector = {};
80
+ if (Array.isArray(modelNames) && modelNames.length) {
81
+ earliestSelector.modelName = { $in: modelNames };
82
+ }
83
+ const earliest = await RtsChange.findOne(earliestSelector, { seq: 1 }).sort({ seq: 1 }).lean();
84
+ const earliestSeq = earliest?.seq ? Number(earliest.seq) : void 0;
85
+ const needsFullResync = typeof earliestSeq === "number" && sinceSeq < earliestSeq - 1;
86
+ const selector = { seq: { $gt: sinceSeq } };
87
+ if (Array.isArray(modelNames) && modelNames.length) {
88
+ selector.modelName = { $in: modelNames };
89
+ }
90
+ const changes = await RtsChange.find(selector, { _id: 0, seq: 1, modelName: 1, op: 1, docId: 1 }).sort({ seq: 1 }).limit(limit).lean();
91
+ return {
92
+ ok: true,
93
+ needsFullResync: needsFullResync || void 0,
94
+ earliestSeq,
95
+ latestSeq,
96
+ changes: Array.isArray(changes) ? changes.filter(isRtsChangeRecord).map((c) => ({
97
+ seq: Number(c.seq),
98
+ modelName: String(c.modelName),
99
+ op: c.op,
100
+ docId: c.docId ? String(c.docId) : void 0
101
+ })) : []
102
+ };
103
+ };
104
+ const handler = (api) => {
105
+ api.post(Route, changesHandler);
106
+ };
107
+ export {
108
+ handler as default
109
+ };
@@ -5,7 +5,7 @@ const TENANT_ID_QUERY_PARAM = "rb-tenant-id";
5
5
  const USER_ID_HEADER = "rb-user-id";
6
6
  const QUERY_KEY_MAX_LEN = 4096;
7
7
  const QUERY_MAX_LIMIT = 4096;
8
- const INTERNAL_MODEL_NAMES = /* @__PURE__ */ new Set(["RtsChange", "RtsCounter"]);
8
+ const INTERNAL_MODEL_NAMES = /* @__PURE__ */ new Set(["RBRtsChange", "RBRtsCounter"]);
9
9
  const DEFAULT_MAX_PAYLOAD_BYTES = 1024 * 1024;
10
10
  const DEFAULT_MAX_SUBSCRIPTIONS_PER_SOCKET = 256;
11
11
  const DEFAULT_DISPATCH_DEBOUNCE_MS = 25;
package/dist/index.js CHANGED
@@ -18,7 +18,7 @@ import { StrictMode, createElement } from "react";
18
18
  import { renderToPipeableStream, renderToStaticMarkup } from "react-dom/server";
19
19
  import { jsx } from "react/jsx-runtime";
20
20
  import { createPath, matchRoutes, parsePath, createStaticRouter, StaticRouterProvider } from "@rpcbase/router";
21
- import { i, n, r } from "./index-BSIupjlE.js";
21
+ import { i, n, r } from "./index-dlSIqvl2.js";
22
22
  function getDefaultExportFromCjs(x) {
23
23
  return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, "default") ? x["default"] : x;
24
24
  }
@@ -1 +1 @@
1
- {"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/rts/api/changes/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAmB,MAAM,cAAc,CAAA;AAOnD,KAAK,WAAW,GAAG;IACjB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;CAC7B,CAAA;yBAqIe,KAAK,GAAG,CAAC,WAAW,CAAC;AAArC,wBAEC"}
1
+ {"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/rts/api/changes/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAmB,MAAM,cAAc,CAAA;AAOnD,KAAK,WAAW,GAAG;IACjB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;CAC7B,CAAA;yBA0He,KAAK,GAAG,CAAC,WAAW,CAAC;AAArC,wBAEC"}
package/dist/rts.js CHANGED
@@ -1,6 +1,6 @@
1
- import { i, n, r } from "./index-BSIupjlE.js";
1
+ import { i, n, r } from "./index-dlSIqvl2.js";
2
2
  const routes = Object.entries({
3
- .../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("./handler-DHunTqwt.js") })
3
+ .../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("./handler-ybYk2VTq.js") })
4
4
  }).reduce((acc, [path, mod]) => {
5
5
  acc[path.replace("./api/", "@rpcbase/server/rts/api/")] = mod;
6
6
  return acc;
@@ -1,4 +1,3 @@
1
- import { loadModel } from "@rpcbase/db";
2
1
  function $constructor(name, initializer2, params) {
3
2
  function init(inst, def) {
4
3
  if (!inst._zod) {
@@ -3275,110 +3274,11 @@ function refine(fn, _params = {}) {
3275
3274
  function superRefine(fn) {
3276
3275
  return _superRefine(fn);
3277
3276
  }
3278
- const Route = "/api/rb/rts/changes";
3279
- const requestSchema = object({
3280
- sinceSeq: number().int().min(0).default(0),
3281
- limit: number().int().min(1).max(5e3).default(2e3),
3282
- modelNames: array(string().min(1)).optional()
3283
- });
3284
- object({
3285
- ok: boolean(),
3286
- needsFullResync: boolean().optional(),
3287
- earliestSeq: number().int().min(0).optional(),
3288
- latestSeq: number().int().min(0),
3289
- changes: array(object({
3290
- seq: number().int().min(1),
3291
- modelName: string().min(1),
3292
- op: _enum(["delete", "reset_model"]),
3293
- docId: string().optional()
3294
- }))
3295
- });
3296
- const getTenantId = (ctx) => {
3297
- const raw = ctx.req.query?.["rb-tenant-id"];
3298
- const queryTenantId = Array.isArray(raw) ? raw[0] : raw;
3299
- if (typeof queryTenantId === "string" && queryTenantId.trim()) return queryTenantId.trim();
3300
- const sessionTenantId = ctx.req.session?.user?.current_tenant_id;
3301
- if (typeof sessionTenantId === "string" && sessionTenantId.trim()) return sessionTenantId.trim();
3302
- return null;
3303
- };
3304
- const ensureAuthorized = (ctx, tenantId) => {
3305
- const userId = ctx.req.session?.user?.id;
3306
- if (!userId) return null;
3307
- const signedInTenants = ctx.req.session?.user?.signed_in_tenants;
3308
- const currentTenantId = ctx.req.session?.user?.current_tenant_id;
3309
- const hasTenantAccessFromList = Array.isArray(signedInTenants) && signedInTenants.includes(tenantId);
3310
- const normalizedCurrentTenantId = typeof currentTenantId === "string" ? currentTenantId.trim() : "";
3311
- const hasTenantAccessFromCurrent = Boolean(normalizedCurrentTenantId) && normalizedCurrentTenantId === tenantId;
3312
- if (!hasTenantAccessFromList && !hasTenantAccessFromCurrent) return null;
3313
- return userId;
3314
- };
3315
- const getModelCtx = (_ctx, tenantId) => ({
3316
- req: {
3317
- session: {
3318
- user: {
3319
- current_tenant_id: tenantId
3320
- }
3321
- }
3322
- }
3323
- });
3324
- const isRtsChangeRecord = (value) => {
3325
- if (!value || typeof value !== "object") return false;
3326
- const obj = value;
3327
- const isOp = obj.op === "delete" || obj.op === "reset_model";
3328
- return typeof obj.seq === "number" && typeof obj.modelName === "string" && isOp;
3329
- };
3330
- const changesHandler = async (payload, ctx) => {
3331
- const parsed = requestSchema.safeParse(payload ?? {});
3332
- if (!parsed.success) {
3333
- ctx.res.status(400);
3334
- return { ok: false, latestSeq: 0, changes: [] };
3335
- }
3336
- const tenantId = getTenantId(ctx);
3337
- if (!tenantId) {
3338
- ctx.res.status(400);
3339
- return { ok: false, latestSeq: 0, changes: [] };
3340
- }
3341
- const userId = ensureAuthorized(ctx, tenantId);
3342
- if (!userId) {
3343
- ctx.res.status(401);
3344
- return { ok: false, latestSeq: 0, changes: [] };
3345
- }
3346
- const modelCtx = getModelCtx(ctx, tenantId);
3347
- const [RtsChange, RtsCounter] = await Promise.all([
3348
- loadModel("RtsChange", modelCtx),
3349
- loadModel("RtsCounter", modelCtx)
3350
- ]);
3351
- const counter = await RtsCounter.findOne({ _id: "rts" }, { seq: 1 }).lean();
3352
- const latestSeq = Number(counter?.seq ?? 0) || 0;
3353
- const { sinceSeq, limit, modelNames } = parsed.data;
3354
- const earliestSelector = {};
3355
- if (Array.isArray(modelNames) && modelNames.length) {
3356
- earliestSelector.modelName = { $in: modelNames };
3357
- }
3358
- const earliest = await RtsChange.findOne(earliestSelector, { seq: 1 }).sort({ seq: 1 }).lean();
3359
- const earliestSeq = earliest?.seq ? Number(earliest.seq) : void 0;
3360
- const needsFullResync = typeof earliestSeq === "number" && sinceSeq < earliestSeq - 1;
3361
- const selector = { seq: { $gt: sinceSeq } };
3362
- if (Array.isArray(modelNames) && modelNames.length) {
3363
- selector.modelName = { $in: modelNames };
3364
- }
3365
- const changes = await RtsChange.find(selector, { _id: 0, seq: 1, modelName: 1, op: 1, docId: 1 }).sort({ seq: 1 }).limit(limit).lean();
3366
- return {
3367
- ok: true,
3368
- needsFullResync: needsFullResync || void 0,
3369
- earliestSeq,
3370
- latestSeq,
3371
- changes: Array.isArray(changes) ? changes.filter(isRtsChangeRecord).map((c) => ({
3372
- seq: Number(c.seq),
3373
- modelName: String(c.modelName),
3374
- op: c.op,
3375
- docId: c.docId ? String(c.docId) : void 0
3376
- })) : []
3377
- };
3378
- };
3379
- const handler = (api) => {
3380
- api.post(Route, changesHandler);
3381
- };
3382
3277
  export {
3383
- handler as default
3278
+ _enum as _,
3279
+ array as a,
3280
+ boolean as b,
3281
+ number as n,
3282
+ object as o,
3283
+ string as s
3384
3284
  };
@@ -0,0 +1,119 @@
1
+ import { createHash, timingSafeEqual } from "node:crypto";
2
+ const DEFAULT_CHUNK_SIZE_BYTES = 5 * 1024 * 1024;
3
+ const MAX_CHUNK_SIZE_BYTES = 15 * 1024 * 1024;
4
+ const DEFAULT_MAX_CLIENT_BYTES_PER_SECOND = 10 * 1024 * 1024;
5
+ const DEFAULT_SESSION_TTL_S = 60 * 60 * 24;
6
+ const ensuredIndexDbNames = /* @__PURE__ */ new Set();
7
+ const parseOptionalPositiveInt = (rawValue) => {
8
+ if (typeof rawValue !== "string") return null;
9
+ const normalized = rawValue.trim();
10
+ if (!normalized) return null;
11
+ const parsed = Number(normalized);
12
+ if (!Number.isFinite(parsed) || parsed <= 0) return null;
13
+ return Math.floor(parsed);
14
+ };
15
+ const getChunkSizeBytes = () => {
16
+ const configured = parseOptionalPositiveInt(process.env.RB_UPLOAD_CHUNK_SIZE_BYTES);
17
+ const resolved = configured ?? DEFAULT_CHUNK_SIZE_BYTES;
18
+ return Math.min(MAX_CHUNK_SIZE_BYTES, resolved);
19
+ };
20
+ const getMaxClientUploadBytesPerSecond = () => {
21
+ const configured = parseOptionalPositiveInt(process.env.RB_UPLOAD_MAX_CLIENT_BYTES_PER_SECOND);
22
+ return configured ?? DEFAULT_MAX_CLIENT_BYTES_PER_SECOND;
23
+ };
24
+ const getSessionTtlMs = () => {
25
+ const ttlSeconds = parseOptionalPositiveInt(process.env.RB_UPLOAD_SESSION_TTL_S) ?? DEFAULT_SESSION_TTL_S;
26
+ return ttlSeconds * 1e3;
27
+ };
28
+ const getRawBodyLimitBytes = (chunkSizeBytes) => chunkSizeBytes + 1024 * 1024;
29
+ const getBucketName = () => (process.env.RB_FILESYSTEM_BUCKET_NAME ?? "").trim() || "fs";
30
+ const getUserId = (ctx) => {
31
+ const raw = ctx.req.session?.user?.id;
32
+ if (typeof raw !== "string") return null;
33
+ const normalized = raw.trim();
34
+ return normalized ? normalized : null;
35
+ };
36
+ const getTenantId = (ctx) => {
37
+ const rawSession = ctx.req.session?.user?.current_tenant_id;
38
+ const sessionTenantId = typeof rawSession === "string" ? rawSession.trim() : "";
39
+ const userId = getUserId(ctx);
40
+ const rawQuery = ctx.req.query?.["rb-tenant-id"];
41
+ const queryTenantId = Array.isArray(rawQuery) ? rawQuery[0] : rawQuery;
42
+ const queryValue = typeof queryTenantId === "string" && queryTenantId.trim() ? queryTenantId.trim() : null;
43
+ if (!userId && queryValue) return queryValue;
44
+ if (userId) return sessionTenantId || null;
45
+ if (sessionTenantId) return sessionTenantId;
46
+ return queryValue;
47
+ };
48
+ const computeSha256Hex = (data) => createHash("sha256").update(data).digest("hex");
49
+ const normalizeSha256Hex = (value) => value.trim().toLowerCase();
50
+ const getModelCtx = (_ctx, tenantId) => ({
51
+ req: {
52
+ session: {
53
+ user: {
54
+ current_tenant_id: tenantId
55
+ }
56
+ }
57
+ }
58
+ });
59
+ const toBufferPayload = (payload) => {
60
+ if (Buffer.isBuffer(payload)) return payload;
61
+ if (payload instanceof Uint8Array) return Buffer.from(payload);
62
+ return null;
63
+ };
64
+ const ensureUploadIndexes = async (UploadSession, UploadChunk) => {
65
+ const dbName = String(UploadSession?.db?.name ?? "");
66
+ if (dbName && ensuredIndexDbNames.has(dbName)) return;
67
+ await Promise.all([
68
+ UploadSession.createIndexes(),
69
+ UploadChunk.createIndexes()
70
+ ]);
71
+ if (dbName) ensuredIndexDbNames.add(dbName);
72
+ };
73
+ const normalizeUploadKey = (raw) => {
74
+ if (typeof raw !== "string") return null;
75
+ const normalized = raw.trim();
76
+ return normalized ? normalized : null;
77
+ };
78
+ const getUploadKeyHash = (ctx) => {
79
+ const uploadKey = normalizeUploadKey(ctx.req.get("X-Upload-Key"));
80
+ if (!uploadKey) return null;
81
+ return computeSha256Hex(Buffer.from(uploadKey));
82
+ };
83
+ const timingSafeEqualHex = (left, right) => {
84
+ if (left.length !== right.length) return false;
85
+ try {
86
+ return timingSafeEqual(Buffer.from(left, "hex"), Buffer.from(right, "hex"));
87
+ } catch {
88
+ return false;
89
+ }
90
+ };
91
+ const getOwnershipSelector = (ctx, session) => {
92
+ if (session.userId) {
93
+ const userId = getUserId(ctx);
94
+ if (!userId || userId !== session.userId) return null;
95
+ return { userId: session.userId };
96
+ }
97
+ if (session.ownerKeyHash) {
98
+ const uploadKeyHash = getUploadKeyHash(ctx);
99
+ if (!uploadKeyHash) return null;
100
+ if (!timingSafeEqualHex(session.ownerKeyHash, uploadKeyHash)) return null;
101
+ return { ownerKeyHash: session.ownerKeyHash };
102
+ }
103
+ return null;
104
+ };
105
+ export {
106
+ getModelCtx as a,
107
+ getOwnershipSelector as b,
108
+ getBucketName as c,
109
+ getUserId as d,
110
+ ensureUploadIndexes as e,
111
+ getChunkSizeBytes as f,
112
+ getTenantId as g,
113
+ getSessionTtlMs as h,
114
+ computeSha256Hex as i,
115
+ getMaxClientUploadBytesPerSecond as j,
116
+ getRawBodyLimitBytes as k,
117
+ normalizeSha256Hex as n,
118
+ toBufferPayload as t
119
+ };
@@ -0,0 +1,5 @@
1
+ import { Api } from '../../../../../api/src';
2
+ import { SessionUser } from './shared';
3
+ declare const _default: (api: Api<SessionUser>) => void;
4
+ export default _default;
5
+ //# sourceMappingURL=handler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAOlC,OAAO,EAA6E,KAAK,WAAW,EAAE,MAAM,UAAU,CAAA;yBAKtG,KAAK,GAAG,CAAC,WAAW,CAAC;AAArC,wBAcC"}
@@ -0,0 +1,5 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ import { SessionUser } from '../shared';
3
+ import * as Uploads from "../index";
4
+ export declare const completeUpload: ApiHandler<Record<string, never>, Uploads.CompleteResponsePayload, SessionUser>;
5
+ //# sourceMappingURL=completeUpload.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"completeUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/completeUpload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAQjB,MAAM,WAAW,CAAA;AAiDlB,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,uBAAuB,EAAE,WAAW,CAmJ1G,CAAA"}
@@ -0,0 +1,5 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ import { SessionUser } from '../shared';
3
+ import * as Uploads from "../index";
4
+ export declare const getStatus: ApiHandler<Record<string, never>, Uploads.StatusResponsePayload, SessionUser>;
5
+ //# sourceMappingURL=getStatus.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getStatus.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/getStatus.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAMjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,SAAS,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,qBAAqB,EAAE,WAAW,CAkDnG,CAAA"}
@@ -0,0 +1,5 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ import { SessionUser } from '../shared';
3
+ import * as Uploads from "../index";
4
+ export declare const initUpload: ApiHandler<Uploads.InitRequestPayload, Uploads.InitResponsePayload, SessionUser>;
5
+ //# sourceMappingURL=initUpload.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"initUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/initUpload.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,OAAO,CAAC,kBAAkB,EAAE,OAAO,CAAC,mBAAmB,EAAE,WAAW,CA2DvG,CAAA"}
@@ -0,0 +1,9 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ import { SessionUser } from '../shared';
3
+ type ChunkResponsePayload = {
4
+ ok: boolean;
5
+ error?: string;
6
+ };
7
+ export declare const uploadChunk: ApiHandler<Buffer, ChunkResponsePayload, SessionUser>;
8
+ export {};
9
+ //# sourceMappingURL=uploadChunk.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/uploadChunk.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,KAAK,oBAAoB,GAAG;IAC1B,EAAE,EAAE,OAAO,CAAA;IACX,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,eAAO,MAAM,WAAW,EAAE,UAAU,CAAC,MAAM,EAAE,oBAAoB,EAAE,WAAW,CAqG7E,CAAA"}
@@ -0,0 +1,42 @@
1
+ import { z } from '../../../../../vite/node_modules/zod';
2
+ export declare const InitRoute = "/api/rb/file-uploads";
3
+ export declare const ChunkRoute = "/api/rb/file-uploads/:uploadId/chunks/:index";
4
+ export declare const StatusRoute = "/api/rb/file-uploads/:uploadId/status";
5
+ export declare const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
6
+ export declare const initRequestSchema: z.ZodObject<{
7
+ filename: z.ZodString;
8
+ mimeType: z.ZodString;
9
+ totalSize: z.ZodNumber;
10
+ }, z.core.$strip>;
11
+ export type InitRequestPayload = z.infer<typeof initRequestSchema>;
12
+ export declare const initResponseSchema: z.ZodObject<{
13
+ ok: z.ZodBoolean;
14
+ error: z.ZodOptional<z.ZodString>;
15
+ uploadId: z.ZodOptional<z.ZodString>;
16
+ uploadKey: z.ZodOptional<z.ZodString>;
17
+ chunkSize: z.ZodOptional<z.ZodNumber>;
18
+ chunksTotal: z.ZodOptional<z.ZodNumber>;
19
+ }, z.core.$strip>;
20
+ export type InitResponsePayload = z.infer<typeof initResponseSchema>;
21
+ export declare const statusResponseSchema: z.ZodObject<{
22
+ ok: z.ZodBoolean;
23
+ error: z.ZodOptional<z.ZodString>;
24
+ status: z.ZodOptional<z.ZodEnum<{
25
+ error: "error";
26
+ done: "done";
27
+ uploading: "uploading";
28
+ assembling: "assembling";
29
+ }>>;
30
+ chunkSize: z.ZodOptional<z.ZodNumber>;
31
+ chunksTotal: z.ZodOptional<z.ZodNumber>;
32
+ received: z.ZodOptional<z.ZodArray<z.ZodNumber>>;
33
+ fileId: z.ZodOptional<z.ZodString>;
34
+ }, z.core.$strip>;
35
+ export type StatusResponsePayload = z.infer<typeof statusResponseSchema>;
36
+ export declare const completeResponseSchema: z.ZodObject<{
37
+ ok: z.ZodBoolean;
38
+ error: z.ZodOptional<z.ZodString>;
39
+ fileId: z.ZodOptional<z.ZodString>;
40
+ }, z.core.$strip>;
41
+ export type CompleteResponsePayload = z.infer<typeof completeResponseSchema>;
42
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AAGvB,eAAO,MAAM,SAAS,yBAAyB,CAAA;AAC/C,eAAO,MAAM,UAAU,iDAAiD,CAAA;AACxE,eAAO,MAAM,WAAW,0CAA0C,CAAA;AAClE,eAAO,MAAM,aAAa,4CAA4C,CAAA;AAEtE,eAAO,MAAM,iBAAiB;;;;iBAI5B,CAAA;AAEF,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAA;AAElE,eAAO,MAAM,kBAAkB;;;;;;;iBAO7B,CAAA;AAEF,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,CAAA;AAEpE,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;iBAQ/B,CAAA;AAEF,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAA;AAExE,eAAO,MAAM,sBAAsB;;;;iBAIjC,CAAA;AAEF,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAAA"}
@@ -0,0 +1,5 @@
1
+ export declare const rawBodyParser: ({ limitBytes, maxClientBytesPerSecond, }: {
2
+ limitBytes: number;
3
+ maxClientBytesPerSecond?: number | null;
4
+ }) => (req: any, res: any, next: any) => void;
5
+ //# sourceMappingURL=rawBodyParser.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"rawBodyParser.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/middleware/rawBodyParser.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,aAAa,GAAI,0CAG3B;IACD,UAAU,EAAE,MAAM,CAAA;IAClB,uBAAuB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;CACxC,MACS,KAAK,GAAG,EAAE,KAAK,GAAG,EAAE,MAAM,GAAG,SA6FtC,CAAA"}
@@ -0,0 +1,29 @@
1
+ import { Ctx } from '../../../../../api/src';
2
+ import { IRBUploadChunk, IRBUploadSession, LoadModelCtx } from '../../../../../db/src';
3
+ import { Model } from '../../../../../vite/node_modules/mongoose';
4
+ export type SessionUser = {
5
+ id?: string;
6
+ current_tenant_id?: string;
7
+ };
8
+ export type UploadSessionDoc = IRBUploadSession;
9
+ export type UploadChunkDoc = Omit<IRBUploadChunk, "data"> & {
10
+ data: Buffer;
11
+ };
12
+ export declare const getChunkSizeBytes: () => number;
13
+ export declare const getMaxClientUploadBytesPerSecond: () => number | null;
14
+ export declare const getSessionTtlMs: () => number;
15
+ export declare const getRawBodyLimitBytes: (chunkSizeBytes: number) => number;
16
+ export declare const getBucketName: () => string;
17
+ export declare const getUserId: (ctx: Ctx<SessionUser>) => string | null;
18
+ export declare const getTenantId: (ctx: Ctx<SessionUser>) => string | null;
19
+ export declare const computeSha256Hex: (data: Buffer) => string;
20
+ export declare const normalizeSha256Hex: (value: string) => string;
21
+ export declare const getModelCtx: (_ctx: Ctx<SessionUser>, tenantId: string) => LoadModelCtx;
22
+ export declare const toBufferPayload: (payload: unknown) => Buffer | null;
23
+ export declare const ensureUploadIndexes: (UploadSession: Model<UploadSessionDoc>, UploadChunk: Model<UploadChunkDoc>) => Promise<void>;
24
+ export declare const getUploadKeyHash: (ctx: Ctx<SessionUser>) => string | null;
25
+ export declare const getOwnershipSelector: (ctx: Ctx<SessionUser>, session: Pick<UploadSessionDoc, "userId" | "ownerKeyHash">) => {
26
+ userId?: string;
27
+ ownerKeyHash?: string;
28
+ } | null;
29
+ //# sourceMappingURL=shared.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"shared.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/shared.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAClC,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,gBAAgB,EACrB,KAAK,YAAY,EAClB,MAAM,aAAa,CAAA;AACpB,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,UAAU,CAAA;AAGrC,MAAM,MAAM,WAAW,GAAG;IACxB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;CAC3B,CAAA;AAED,MAAM,MAAM,gBAAgB,GAAG,gBAAgB,CAAA;AAC/C,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AAoB5E,eAAO,MAAM,iBAAiB,QAAO,MAIpC,CAAA;AAED,eAAO,MAAM,gCAAgC,QAAO,MAAM,GAAG,IAG5D,CAAA;AAED,eAAO,MAAM,eAAe,QAAO,MAGlC,CAAA;AAED,eAAO,MAAM,oBAAoB,GAAI,gBAAgB,MAAM,KAAG,MAAsC,CAAA;AAEpG,eAAO,MAAM,aAAa,QAAO,MAAsE,CAAA;AAEvG,eAAO,MAAM,SAAS,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAK1D,CAAA;AAED,eAAO,MAAM,WAAW,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAgB5D,CAAA;AAED,eAAO,MAAM,gBAAgB,GAAI,MAAM,MAAM,KAAG,MAAyD,CAAA;AAEzG,eAAO,MAAM,kBAAkB,GAAI,OAAO,MAAM,KAAG,MAAoC,CAAA;AAEvF,eAAO,MAAM,WAAW,GAAI,MAAM,GAAG,CAAC,WAAW,CAAC,EAAE,UAAU,MAAM,KAAG,YAQrE,CAAA;AAEF,eAAO,MAAM,eAAe,GAAI,SAAS,OAAO,KAAG,MAAM,GAAG,IAI3D,CAAA;AAED,eAAO,MAAM,mBAAmB,GAC9B,eAAe,KAAK,CAAC,gBAAgB,CAAC,EACtC,aAAa,KAAK,CAAC,cAAc,CAAC,KACjC,OAAO,CAAC,IAAI,CAUd,CAAA;AAQD,eAAO,MAAM,gBAAgB,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAIjE,CAAA;AAWD,eAAO,MAAM,oBAAoB,GAC/B,KAAK,GAAG,CAAC,WAAW,CAAC,EACrB,SAAS,IAAI,CAAC,gBAAgB,EAAE,QAAQ,GAAG,cAAc,CAAC,KACzD;IAAE,MAAM,CAAC,EAAE,MAAM,CAAC;IAAC,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAe/C,CAAA"}
@@ -0,0 +1,4 @@
1
+ import { Api } from '../../../../../api/src';
2
+ declare const _default: (api: Api) => void;
3
+ export default _default;
4
+ //# sourceMappingURL=handler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/files/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;yBAQlB,KAAK,GAAG;AAAxB,wBAGC"}
@@ -0,0 +1,9 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ import { SessionUser } from '../../file-uploads/shared';
3
+ type DeleteResponsePayload = {
4
+ ok: boolean;
5
+ error?: string;
6
+ };
7
+ export declare const deleteFile: ApiHandler<Record<string, never>, DeleteResponsePayload, SessionUser>;
8
+ export {};
9
+ //# sourceMappingURL=deleteFile.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deleteFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/deleteFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EAA8B,KAAK,WAAW,EAAE,MAAM,2BAA2B,CAAA;AAGxF,KAAK,qBAAqB,GAAG;IAC3B,EAAE,EAAE,OAAO,CAAA;IACX,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,qBAAqB,EAAE,WAAW,CAyC5F,CAAA"}
@@ -0,0 +1,3 @@
1
+ import { ApiHandler } from '../../../../../../api/src';
2
+ export declare const getFile: ApiHandler<Record<string, never>, Record<string, never>>;
3
+ //# sourceMappingURL=getFile.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/getFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAezC,eAAO,MAAM,OAAO,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAmF5E,CAAA"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=getFile.test.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getFile.test.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/getFile.test.ts"],"names":[],"mappings":""}
@@ -0,0 +1,4 @@
1
+ export declare const Route = "/api/rb/files/:fileId";
2
+ export declare const GetRoute = "/api/rb/files/:fileId";
3
+ export declare const DeleteRoute = "/api/rb/files/:fileId";
4
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/files/index.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,0BAA0B,CAAA;AAE5C,eAAO,MAAM,QAAQ,0BAAQ,CAAA;AAC7B,eAAO,MAAM,WAAW,0BAAQ,CAAA"}
@@ -0,0 +1,2 @@
1
+ export declare const routes: Record<string, unknown>;
2
+ //# sourceMappingURL=routes.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"routes.d.ts","sourceRoot":"","sources":["../../src/uploads/routes.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,MAAM,yBAKb,CAAA"}
@@ -0,0 +1,2 @@
1
+ export * from './uploads/routes';
2
+ //# sourceMappingURL=uploads.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"uploads.d.ts","sourceRoot":"","sources":["../src/uploads.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAA"}
@@ -0,0 +1,9 @@
1
+ const routes = Object.entries({
2
+ .../* @__PURE__ */ Object.assign({ "./api/file-uploads/handler.ts": () => import("./handler-CGko2pJM.js"), "./api/files/handler.ts": () => import("./handler-D6oN38vE.js") })
3
+ }).reduce((acc, [path, mod]) => {
4
+ acc[path.replace("./api/", "@rpcbase/server/uploads/api/")] = mod;
5
+ return acc;
6
+ }, {});
7
+ export {
8
+ routes
9
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rpcbase/server",
3
- "version": "0.472.0",
3
+ "version": "0.474.0",
4
4
  "type": "module",
5
5
  "files": [
6
6
  "dist"
@@ -17,6 +17,11 @@
17
17
  "types": "./dist/rts.d.ts",
18
18
  "import": "./dist/rts.js",
19
19
  "default": "./dist/rts.js"
20
+ },
21
+ "./uploads": {
22
+ "types": "./dist/uploads.d.ts",
23
+ "import": "./dist/uploads.js",
24
+ "default": "./dist/uploads.js"
20
25
  }
21
26
  },
22
27
  "scripts": {