@rpcbase/server 0.472.0 → 0.473.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/handler-CTRE1McR.js +517 -0
- package/dist/handler-ybYk2VTq.js +109 -0
- package/dist/{index-BSIupjlE.js → index-dlSIqvl2.js} +1 -1
- package/dist/index.js +1 -1
- package/dist/rts/api/changes/handler.d.ts.map +1 -1
- package/dist/rts.js +2 -2
- package/dist/{handler-DHunTqwt.js → schemas-CyxqObur.js} +6 -106
- package/dist/uploads/api/file-uploads/handler.d.ts +5 -0
- package/dist/uploads/api/file-uploads/handler.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/handlers/completeUpload.d.ts +5 -0
- package/dist/uploads/api/file-uploads/handlers/completeUpload.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/handlers/getStatus.d.ts +5 -0
- package/dist/uploads/api/file-uploads/handlers/getStatus.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/handlers/initUpload.d.ts +5 -0
- package/dist/uploads/api/file-uploads/handlers/initUpload.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/handlers/uploadChunk.d.ts +9 -0
- package/dist/uploads/api/file-uploads/handlers/uploadChunk.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/index.d.ts +42 -0
- package/dist/uploads/api/file-uploads/index.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/middleware/rawBodyParser.d.ts +4 -0
- package/dist/uploads/api/file-uploads/middleware/rawBodyParser.d.ts.map +1 -0
- package/dist/uploads/api/file-uploads/shared.d.ts +28 -0
- package/dist/uploads/api/file-uploads/shared.d.ts.map +1 -0
- package/dist/uploads/routes.d.ts +2 -0
- package/dist/uploads/routes.d.ts.map +1 -0
- package/dist/uploads.d.ts +2 -0
- package/dist/uploads.d.ts.map +1 -0
- package/dist/uploads.js +9 -0
- package/package.json +6 -1
|
@@ -0,0 +1,517 @@
|
|
|
1
|
+
import { loadModel, getTenantFilesystemDb } from "@rpcbase/db";
|
|
2
|
+
import { GridFSBucket, ObjectId } from "mongodb";
|
|
3
|
+
import { createHash, timingSafeEqual, randomBytes } from "node:crypto";
|
|
4
|
+
import { o as object, n as number, s as string, b as boolean, a as array, _ as _enum } from "./schemas-CyxqObur.js";
|
|
5
|
+
const DEFAULT_CHUNK_SIZE_BYTES = 5 * 1024 * 1024;
|
|
6
|
+
const MAX_CHUNK_SIZE_BYTES = 15 * 1024 * 1024;
|
|
7
|
+
const DEFAULT_SESSION_TTL_S = 60 * 60 * 24;
|
|
8
|
+
const ensuredIndexDbNames = /* @__PURE__ */ new Set();
|
|
9
|
+
const parseOptionalPositiveInt = (rawValue) => {
|
|
10
|
+
if (typeof rawValue !== "string") return null;
|
|
11
|
+
const normalized = rawValue.trim();
|
|
12
|
+
if (!normalized) return null;
|
|
13
|
+
const parsed = Number(normalized);
|
|
14
|
+
if (!Number.isFinite(parsed) || parsed <= 0) return null;
|
|
15
|
+
return Math.floor(parsed);
|
|
16
|
+
};
|
|
17
|
+
const getChunkSizeBytes = () => {
|
|
18
|
+
const configured = parseOptionalPositiveInt(process.env.RB_UPLOAD_CHUNK_SIZE_BYTES);
|
|
19
|
+
const resolved = configured ?? DEFAULT_CHUNK_SIZE_BYTES;
|
|
20
|
+
return Math.min(MAX_CHUNK_SIZE_BYTES, resolved);
|
|
21
|
+
};
|
|
22
|
+
const getSessionTtlMs = () => {
|
|
23
|
+
const ttlSeconds = parseOptionalPositiveInt(process.env.RB_UPLOAD_SESSION_TTL_S) ?? DEFAULT_SESSION_TTL_S;
|
|
24
|
+
return ttlSeconds * 1e3;
|
|
25
|
+
};
|
|
26
|
+
const getRawBodyLimitBytes = (chunkSizeBytes) => chunkSizeBytes + 1024 * 1024;
|
|
27
|
+
const getBucketName = () => (process.env.RB_FILESYSTEM_BUCKET_NAME ?? "").trim() || "fs";
|
|
28
|
+
const getUserId = (ctx) => {
|
|
29
|
+
const raw = ctx.req.session?.user?.id;
|
|
30
|
+
if (typeof raw !== "string") return null;
|
|
31
|
+
const normalized = raw.trim();
|
|
32
|
+
return normalized ? normalized : null;
|
|
33
|
+
};
|
|
34
|
+
const getTenantId = (ctx) => {
|
|
35
|
+
const rawSession = ctx.req.session?.user?.current_tenant_id;
|
|
36
|
+
const sessionTenantId = typeof rawSession === "string" ? rawSession.trim() : "";
|
|
37
|
+
const userId = getUserId(ctx);
|
|
38
|
+
if (userId) return sessionTenantId || null;
|
|
39
|
+
if (sessionTenantId) return sessionTenantId;
|
|
40
|
+
const rawQuery = ctx.req.query?.["rb-tenant-id"];
|
|
41
|
+
const queryTenantId = Array.isArray(rawQuery) ? rawQuery[0] : rawQuery;
|
|
42
|
+
if (typeof queryTenantId !== "string") return null;
|
|
43
|
+
const normalized = queryTenantId.trim();
|
|
44
|
+
return normalized ? normalized : null;
|
|
45
|
+
};
|
|
46
|
+
const computeSha256Hex = (data) => createHash("sha256").update(data).digest("hex");
|
|
47
|
+
const normalizeSha256Hex = (value) => value.trim().toLowerCase();
|
|
48
|
+
const getModelCtx = (_ctx, tenantId) => ({
|
|
49
|
+
req: {
|
|
50
|
+
session: {
|
|
51
|
+
user: {
|
|
52
|
+
current_tenant_id: tenantId
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
const toBufferPayload = (payload) => {
|
|
58
|
+
if (Buffer.isBuffer(payload)) return payload;
|
|
59
|
+
if (payload instanceof Uint8Array) return Buffer.from(payload);
|
|
60
|
+
return null;
|
|
61
|
+
};
|
|
62
|
+
const ensureUploadIndexes = async (UploadSession, UploadChunk) => {
|
|
63
|
+
const dbName = String(UploadSession?.db?.name ?? "");
|
|
64
|
+
if (dbName && ensuredIndexDbNames.has(dbName)) return;
|
|
65
|
+
await Promise.all([
|
|
66
|
+
UploadSession.createIndexes(),
|
|
67
|
+
UploadChunk.createIndexes()
|
|
68
|
+
]);
|
|
69
|
+
if (dbName) ensuredIndexDbNames.add(dbName);
|
|
70
|
+
};
|
|
71
|
+
const normalizeUploadKey = (raw) => {
|
|
72
|
+
if (typeof raw !== "string") return null;
|
|
73
|
+
const normalized = raw.trim();
|
|
74
|
+
return normalized ? normalized : null;
|
|
75
|
+
};
|
|
76
|
+
const getUploadKeyHash = (ctx) => {
|
|
77
|
+
const uploadKey = normalizeUploadKey(ctx.req.get("X-Upload-Key"));
|
|
78
|
+
if (!uploadKey) return null;
|
|
79
|
+
return computeSha256Hex(Buffer.from(uploadKey));
|
|
80
|
+
};
|
|
81
|
+
const timingSafeEqualHex = (left, right) => {
|
|
82
|
+
if (left.length !== right.length) return false;
|
|
83
|
+
try {
|
|
84
|
+
return timingSafeEqual(Buffer.from(left, "hex"), Buffer.from(right, "hex"));
|
|
85
|
+
} catch {
|
|
86
|
+
return false;
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
const getOwnershipSelector = (ctx, session) => {
|
|
90
|
+
if (session.userId) {
|
|
91
|
+
const userId = getUserId(ctx);
|
|
92
|
+
if (!userId || userId !== session.userId) return null;
|
|
93
|
+
return { userId: session.userId };
|
|
94
|
+
}
|
|
95
|
+
if (session.ownerKeyHash) {
|
|
96
|
+
const uploadKeyHash = getUploadKeyHash(ctx);
|
|
97
|
+
if (!uploadKeyHash) return null;
|
|
98
|
+
if (!timingSafeEqualHex(session.ownerKeyHash, uploadKeyHash)) return null;
|
|
99
|
+
return { ownerKeyHash: session.ownerKeyHash };
|
|
100
|
+
}
|
|
101
|
+
return null;
|
|
102
|
+
};
|
|
103
|
+
const waitForStreamFinished = async (stream) => new Promise((resolve, reject) => {
|
|
104
|
+
stream.once("finish", resolve);
|
|
105
|
+
stream.once("error", reject);
|
|
106
|
+
});
|
|
107
|
+
const writeToStream = async (stream, chunk) => {
|
|
108
|
+
const ok = stream.write(chunk);
|
|
109
|
+
if (ok) return;
|
|
110
|
+
await new Promise((resolve, reject) => {
|
|
111
|
+
const onDrain = () => {
|
|
112
|
+
cleanup();
|
|
113
|
+
resolve();
|
|
114
|
+
};
|
|
115
|
+
const onError = (error) => {
|
|
116
|
+
cleanup();
|
|
117
|
+
reject(error);
|
|
118
|
+
};
|
|
119
|
+
const cleanup = () => {
|
|
120
|
+
stream.off("drain", onDrain);
|
|
121
|
+
stream.off("error", onError);
|
|
122
|
+
};
|
|
123
|
+
stream.on("drain", onDrain);
|
|
124
|
+
stream.on("error", onError);
|
|
125
|
+
});
|
|
126
|
+
};
|
|
127
|
+
const abortUploadStream = async (stream) => {
|
|
128
|
+
if (!stream) return;
|
|
129
|
+
if (typeof stream.abort === "function") {
|
|
130
|
+
try {
|
|
131
|
+
await stream.abort();
|
|
132
|
+
return;
|
|
133
|
+
} catch {
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
try {
|
|
137
|
+
stream.destroy();
|
|
138
|
+
} catch {
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
const completeUpload = async (_payload, ctx) => {
|
|
142
|
+
const tenantId = getTenantId(ctx);
|
|
143
|
+
if (!tenantId) {
|
|
144
|
+
ctx.res.status(400);
|
|
145
|
+
return { ok: false, error: "tenant_missing" };
|
|
146
|
+
}
|
|
147
|
+
const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
|
|
148
|
+
if (!uploadId) {
|
|
149
|
+
ctx.res.status(400);
|
|
150
|
+
return { ok: false, error: "invalid_upload_id" };
|
|
151
|
+
}
|
|
152
|
+
const modelCtx = getModelCtx(ctx, tenantId);
|
|
153
|
+
const [UploadSession, UploadChunk] = await Promise.all([
|
|
154
|
+
loadModel("RBUploadSession", modelCtx),
|
|
155
|
+
loadModel("RBUploadChunk", modelCtx)
|
|
156
|
+
]);
|
|
157
|
+
const existing = await UploadSession.findOne({ _id: uploadId }).lean();
|
|
158
|
+
if (!existing) {
|
|
159
|
+
ctx.res.status(404);
|
|
160
|
+
return { ok: false, error: "not_found" };
|
|
161
|
+
}
|
|
162
|
+
const ownershipSelector = getOwnershipSelector(ctx, existing);
|
|
163
|
+
if (!ownershipSelector) {
|
|
164
|
+
ctx.res.status(401);
|
|
165
|
+
return { ok: false, error: "unauthorized" };
|
|
166
|
+
}
|
|
167
|
+
if (existing.status === "done" && existing.fileId) {
|
|
168
|
+
return { ok: true, fileId: existing.fileId };
|
|
169
|
+
}
|
|
170
|
+
const locked = await UploadSession.findOneAndUpdate(
|
|
171
|
+
{ _id: uploadId, ...ownershipSelector, status: "uploading" },
|
|
172
|
+
{ $set: { status: "assembling" }, $unset: { error: "" } },
|
|
173
|
+
{ new: true }
|
|
174
|
+
).lean();
|
|
175
|
+
if (!locked) {
|
|
176
|
+
ctx.res.status(409);
|
|
177
|
+
return { ok: false, error: "not_uploading" };
|
|
178
|
+
}
|
|
179
|
+
await ensureUploadIndexes(UploadSession, UploadChunk);
|
|
180
|
+
const fsDb = await getTenantFilesystemDb(tenantId);
|
|
181
|
+
const nativeDb = fsDb.db;
|
|
182
|
+
if (!nativeDb) {
|
|
183
|
+
await UploadSession.updateOne(
|
|
184
|
+
{ _id: uploadId, ...ownershipSelector },
|
|
185
|
+
{ $set: { status: "error", error: "filesystem_db_unavailable" } }
|
|
186
|
+
);
|
|
187
|
+
ctx.res.status(500);
|
|
188
|
+
return { ok: false, error: "assembly_failed" };
|
|
189
|
+
}
|
|
190
|
+
const bucketName = getBucketName();
|
|
191
|
+
const bucket = new GridFSBucket(nativeDb, { bucketName });
|
|
192
|
+
const lockedUserId = typeof locked.userId === "string" ? locked.userId : void 0;
|
|
193
|
+
const uploadStream = bucket.openUploadStream(locked.filename, {
|
|
194
|
+
metadata: {
|
|
195
|
+
uploadId,
|
|
196
|
+
tenantId,
|
|
197
|
+
mimeType: locked.mimeType,
|
|
198
|
+
totalSize: locked.totalSize,
|
|
199
|
+
...lockedUserId ? { userId: lockedUserId } : {}
|
|
200
|
+
}
|
|
201
|
+
});
|
|
202
|
+
try {
|
|
203
|
+
const cursor = UploadChunk.find({ uploadId }).sort({ index: 1 }).cursor();
|
|
204
|
+
let expectedIndex = 0;
|
|
205
|
+
try {
|
|
206
|
+
for await (const doc of cursor) {
|
|
207
|
+
const chunkDoc = doc;
|
|
208
|
+
if (chunkDoc.index !== expectedIndex) {
|
|
209
|
+
throw new Error("missing_chunks");
|
|
210
|
+
}
|
|
211
|
+
await writeToStream(uploadStream, chunkDoc.data);
|
|
212
|
+
expectedIndex += 1;
|
|
213
|
+
}
|
|
214
|
+
} finally {
|
|
215
|
+
try {
|
|
216
|
+
await cursor.close();
|
|
217
|
+
} catch {
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
if (expectedIndex !== locked.chunksTotal) {
|
|
221
|
+
throw new Error("missing_chunks");
|
|
222
|
+
}
|
|
223
|
+
const finished = waitForStreamFinished(uploadStream);
|
|
224
|
+
uploadStream.end();
|
|
225
|
+
await finished;
|
|
226
|
+
const fileId = String(uploadStream.id ?? "");
|
|
227
|
+
if (!fileId) {
|
|
228
|
+
throw new Error("missing_file_id");
|
|
229
|
+
}
|
|
230
|
+
await UploadSession.updateOne(
|
|
231
|
+
{ _id: uploadId, ...ownershipSelector },
|
|
232
|
+
{ $set: { status: "done", fileId }, $unset: { error: "" } }
|
|
233
|
+
);
|
|
234
|
+
try {
|
|
235
|
+
await UploadChunk.deleteMany({ uploadId });
|
|
236
|
+
} catch {
|
|
237
|
+
}
|
|
238
|
+
return { ok: true, fileId };
|
|
239
|
+
} catch (error) {
|
|
240
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
241
|
+
await abortUploadStream(uploadStream);
|
|
242
|
+
if (message === "missing_chunks") {
|
|
243
|
+
await UploadSession.updateOne(
|
|
244
|
+
{ _id: uploadId, ...ownershipSelector },
|
|
245
|
+
{ $set: { status: "uploading" } }
|
|
246
|
+
);
|
|
247
|
+
ctx.res.status(409);
|
|
248
|
+
return { ok: false, error: "missing_chunks" };
|
|
249
|
+
}
|
|
250
|
+
await UploadSession.updateOne(
|
|
251
|
+
{ _id: uploadId, ...ownershipSelector },
|
|
252
|
+
{ $set: { status: "error", error: message } }
|
|
253
|
+
);
|
|
254
|
+
ctx.res.status(500);
|
|
255
|
+
return { ok: false, error: "assembly_failed" };
|
|
256
|
+
}
|
|
257
|
+
};
|
|
258
|
+
const getStatus = async (_payload, ctx) => {
|
|
259
|
+
const tenantId = getTenantId(ctx);
|
|
260
|
+
if (!tenantId) {
|
|
261
|
+
ctx.res.status(400);
|
|
262
|
+
return { ok: false, error: "tenant_missing" };
|
|
263
|
+
}
|
|
264
|
+
const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
|
|
265
|
+
if (!uploadId) {
|
|
266
|
+
ctx.res.status(400);
|
|
267
|
+
return { ok: false, error: "invalid_upload_id" };
|
|
268
|
+
}
|
|
269
|
+
const modelCtx = getModelCtx(ctx, tenantId);
|
|
270
|
+
const [UploadSession, UploadChunk] = await Promise.all([
|
|
271
|
+
loadModel("RBUploadSession", modelCtx),
|
|
272
|
+
loadModel("RBUploadChunk", modelCtx)
|
|
273
|
+
]);
|
|
274
|
+
const session = await UploadSession.findOne({ _id: uploadId }).lean();
|
|
275
|
+
if (!session) {
|
|
276
|
+
ctx.res.status(404);
|
|
277
|
+
return { ok: false, error: "not_found" };
|
|
278
|
+
}
|
|
279
|
+
const ownershipSelector = getOwnershipSelector(ctx, session);
|
|
280
|
+
if (!ownershipSelector) {
|
|
281
|
+
ctx.res.status(401);
|
|
282
|
+
return { ok: false, error: "unauthorized" };
|
|
283
|
+
}
|
|
284
|
+
const receivedDocs = await UploadChunk.find(
|
|
285
|
+
{ uploadId },
|
|
286
|
+
{ index: 1, _id: 0 }
|
|
287
|
+
).sort({ index: 1 }).lean();
|
|
288
|
+
const received = receivedDocs.map((d) => Number(d?.index ?? -1)).filter((n) => Number.isInteger(n) && n >= 0);
|
|
289
|
+
return {
|
|
290
|
+
ok: true,
|
|
291
|
+
status: session.status,
|
|
292
|
+
chunkSize: session.chunkSize,
|
|
293
|
+
chunksTotal: session.chunksTotal,
|
|
294
|
+
received,
|
|
295
|
+
...session.fileId ? { fileId: session.fileId } : {}
|
|
296
|
+
};
|
|
297
|
+
};
|
|
298
|
+
const InitRoute = "/api/rb/file-uploads";
|
|
299
|
+
const ChunkRoute = "/api/rb/file-uploads/:uploadId/chunks/:index";
|
|
300
|
+
const StatusRoute = "/api/rb/file-uploads/:uploadId/status";
|
|
301
|
+
const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
|
|
302
|
+
const initRequestSchema = object({
|
|
303
|
+
filename: string().min(1),
|
|
304
|
+
mimeType: string().min(1),
|
|
305
|
+
totalSize: number().int().min(1)
|
|
306
|
+
});
|
|
307
|
+
object({
|
|
308
|
+
ok: boolean(),
|
|
309
|
+
error: string().optional(),
|
|
310
|
+
uploadId: string().optional(),
|
|
311
|
+
uploadKey: string().optional(),
|
|
312
|
+
chunkSize: number().int().optional(),
|
|
313
|
+
chunksTotal: number().int().optional()
|
|
314
|
+
});
|
|
315
|
+
object({
|
|
316
|
+
ok: boolean(),
|
|
317
|
+
error: string().optional(),
|
|
318
|
+
status: _enum(["uploading", "assembling", "done", "error"]).optional(),
|
|
319
|
+
chunkSize: number().int().optional(),
|
|
320
|
+
chunksTotal: number().int().optional(),
|
|
321
|
+
received: array(number().int().min(0)).optional(),
|
|
322
|
+
fileId: string().optional()
|
|
323
|
+
});
|
|
324
|
+
object({
|
|
325
|
+
ok: boolean(),
|
|
326
|
+
error: string().optional(),
|
|
327
|
+
fileId: string().optional()
|
|
328
|
+
});
|
|
329
|
+
const initUpload = async (payload, ctx) => {
|
|
330
|
+
const tenantId = getTenantId(ctx);
|
|
331
|
+
if (!tenantId) {
|
|
332
|
+
ctx.res.status(400);
|
|
333
|
+
return { ok: false, error: "tenant_missing" };
|
|
334
|
+
}
|
|
335
|
+
const userId = getUserId(ctx);
|
|
336
|
+
const parsed = initRequestSchema.safeParse(payload ?? {});
|
|
337
|
+
if (!parsed.success) {
|
|
338
|
+
ctx.res.status(400);
|
|
339
|
+
return { ok: false, error: "invalid_payload" };
|
|
340
|
+
}
|
|
341
|
+
const chunkSize = getChunkSizeBytes();
|
|
342
|
+
const { filename, mimeType, totalSize } = parsed.data;
|
|
343
|
+
const chunksTotal = Math.ceil(totalSize / chunkSize);
|
|
344
|
+
const modelCtx = getModelCtx(ctx, tenantId);
|
|
345
|
+
const [UploadSession, UploadChunk] = await Promise.all([
|
|
346
|
+
loadModel("RBUploadSession", modelCtx),
|
|
347
|
+
loadModel("RBUploadChunk", modelCtx)
|
|
348
|
+
]);
|
|
349
|
+
await ensureUploadIndexes(UploadSession, UploadChunk);
|
|
350
|
+
const uploadId = new ObjectId().toString();
|
|
351
|
+
const now = Date.now();
|
|
352
|
+
const expiresAt = new Date(now + getSessionTtlMs());
|
|
353
|
+
const uploadKey = userId ? null : randomBytes(32).toString("base64url");
|
|
354
|
+
const ownerKeyHash = uploadKey ? computeSha256Hex(Buffer.from(uploadKey)) : void 0;
|
|
355
|
+
await UploadSession.create({
|
|
356
|
+
_id: uploadId,
|
|
357
|
+
...userId ? { userId } : {},
|
|
358
|
+
...ownerKeyHash ? { ownerKeyHash } : {},
|
|
359
|
+
filename,
|
|
360
|
+
mimeType,
|
|
361
|
+
totalSize,
|
|
362
|
+
chunkSize,
|
|
363
|
+
chunksTotal,
|
|
364
|
+
status: "uploading",
|
|
365
|
+
createdAt: new Date(now),
|
|
366
|
+
expiresAt
|
|
367
|
+
});
|
|
368
|
+
return {
|
|
369
|
+
ok: true,
|
|
370
|
+
uploadId,
|
|
371
|
+
chunkSize,
|
|
372
|
+
chunksTotal,
|
|
373
|
+
...uploadKey ? { uploadKey } : {}
|
|
374
|
+
};
|
|
375
|
+
};
|
|
376
|
+
const uploadChunk = async (payload, ctx) => {
|
|
377
|
+
const tenantId = getTenantId(ctx);
|
|
378
|
+
if (!tenantId) {
|
|
379
|
+
ctx.res.status(400);
|
|
380
|
+
return { ok: false, error: "tenant_missing" };
|
|
381
|
+
}
|
|
382
|
+
const uploadId = String(ctx.req.params?.uploadId ?? "").trim();
|
|
383
|
+
const indexRaw = String(ctx.req.params?.index ?? "").trim();
|
|
384
|
+
const index = Number(indexRaw);
|
|
385
|
+
if (!uploadId || !Number.isInteger(index) || index < 0) {
|
|
386
|
+
ctx.res.status(400);
|
|
387
|
+
return { ok: false, error: "invalid_chunk_ref" };
|
|
388
|
+
}
|
|
389
|
+
const modelCtx = getModelCtx(ctx, tenantId);
|
|
390
|
+
const [UploadSession, UploadChunk] = await Promise.all([
|
|
391
|
+
loadModel("RBUploadSession", modelCtx),
|
|
392
|
+
loadModel("RBUploadChunk", modelCtx)
|
|
393
|
+
]);
|
|
394
|
+
const session = await UploadSession.findOne({ _id: uploadId }).lean();
|
|
395
|
+
if (!session) {
|
|
396
|
+
ctx.res.status(404);
|
|
397
|
+
return { ok: false, error: "not_found" };
|
|
398
|
+
}
|
|
399
|
+
const ownershipSelector = getOwnershipSelector(ctx, session);
|
|
400
|
+
if (!ownershipSelector) {
|
|
401
|
+
ctx.res.status(401);
|
|
402
|
+
return { ok: false, error: "unauthorized" };
|
|
403
|
+
}
|
|
404
|
+
if (session.status !== "uploading") {
|
|
405
|
+
ctx.res.status(409);
|
|
406
|
+
return { ok: false, error: "not_uploading" };
|
|
407
|
+
}
|
|
408
|
+
if (index >= session.chunksTotal) {
|
|
409
|
+
ctx.res.status(400);
|
|
410
|
+
return { ok: false, error: "index_out_of_range" };
|
|
411
|
+
}
|
|
412
|
+
const data = toBufferPayload(payload);
|
|
413
|
+
if (!data) {
|
|
414
|
+
ctx.res.status(400);
|
|
415
|
+
return { ok: false, error: "invalid_body" };
|
|
416
|
+
}
|
|
417
|
+
const expectedSize = index === session.chunksTotal - 1 ? session.totalSize - session.chunkSize * (session.chunksTotal - 1) : session.chunkSize;
|
|
418
|
+
if (data.length > expectedSize) {
|
|
419
|
+
ctx.res.status(413);
|
|
420
|
+
return { ok: false, error: "chunk_too_large" };
|
|
421
|
+
}
|
|
422
|
+
if (data.length !== expectedSize) {
|
|
423
|
+
ctx.res.status(400);
|
|
424
|
+
return { ok: false, error: "invalid_chunk_size" };
|
|
425
|
+
}
|
|
426
|
+
const checksumHeader = ctx.req.get("X-Chunk-SHA256");
|
|
427
|
+
const sha256 = checksumHeader ? computeSha256Hex(data) : void 0;
|
|
428
|
+
if (checksumHeader) {
|
|
429
|
+
const expectedSha256 = normalizeSha256Hex(checksumHeader);
|
|
430
|
+
if (sha256 !== expectedSha256) {
|
|
431
|
+
ctx.res.status(400);
|
|
432
|
+
return { ok: false, error: "checksum_mismatch" };
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
await ensureUploadIndexes(UploadSession, UploadChunk);
|
|
436
|
+
await UploadChunk.updateOne(
|
|
437
|
+
{ uploadId, index },
|
|
438
|
+
{
|
|
439
|
+
$set: {
|
|
440
|
+
uploadId,
|
|
441
|
+
index,
|
|
442
|
+
data,
|
|
443
|
+
size: data.length,
|
|
444
|
+
sha256,
|
|
445
|
+
expiresAt: session.expiresAt
|
|
446
|
+
},
|
|
447
|
+
$setOnInsert: {
|
|
448
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
449
|
+
}
|
|
450
|
+
},
|
|
451
|
+
{ upsert: true }
|
|
452
|
+
);
|
|
453
|
+
ctx.res.status(204);
|
|
454
|
+
return { ok: true };
|
|
455
|
+
};
|
|
456
|
+
const rawBodyParser = ({
|
|
457
|
+
limitBytes
|
|
458
|
+
}) => {
|
|
459
|
+
return (req, res, next) => {
|
|
460
|
+
const contentType = typeof req?.headers?.["content-type"] === "string" ? String(req.headers["content-type"]) : "";
|
|
461
|
+
if (!contentType.includes("application/octet-stream")) {
|
|
462
|
+
next();
|
|
463
|
+
return;
|
|
464
|
+
}
|
|
465
|
+
let total = 0;
|
|
466
|
+
const chunks = [];
|
|
467
|
+
let done = false;
|
|
468
|
+
const cleanup = () => {
|
|
469
|
+
req.off("data", onData);
|
|
470
|
+
req.off("end", onEnd);
|
|
471
|
+
req.off("error", onError);
|
|
472
|
+
req.off("aborted", onAborted);
|
|
473
|
+
};
|
|
474
|
+
const finish = (error) => {
|
|
475
|
+
if (done) return;
|
|
476
|
+
done = true;
|
|
477
|
+
cleanup();
|
|
478
|
+
if (error) {
|
|
479
|
+
next(error);
|
|
480
|
+
return;
|
|
481
|
+
}
|
|
482
|
+
req.body = Buffer.concat(chunks, total);
|
|
483
|
+
next();
|
|
484
|
+
};
|
|
485
|
+
const onData = (chunk) => {
|
|
486
|
+
if (done) return;
|
|
487
|
+
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
|
488
|
+
total += buffer.length;
|
|
489
|
+
if (total > limitBytes) {
|
|
490
|
+
done = true;
|
|
491
|
+
cleanup();
|
|
492
|
+
req.destroy();
|
|
493
|
+
res.status(413).json({ ok: false, error: "chunk_too_large" });
|
|
494
|
+
return;
|
|
495
|
+
}
|
|
496
|
+
chunks.push(buffer);
|
|
497
|
+
};
|
|
498
|
+
const onEnd = () => finish();
|
|
499
|
+
const onError = (err) => finish(err);
|
|
500
|
+
const onAborted = () => finish(new Error("request_aborted"));
|
|
501
|
+
req.on("data", onData);
|
|
502
|
+
req.on("end", onEnd);
|
|
503
|
+
req.on("error", onError);
|
|
504
|
+
req.on("aborted", onAborted);
|
|
505
|
+
};
|
|
506
|
+
};
|
|
507
|
+
const handler = (api) => {
|
|
508
|
+
const chunkSizeBytes = getChunkSizeBytes();
|
|
509
|
+
api.use(InitRoute, rawBodyParser({ limitBytes: getRawBodyLimitBytes(chunkSizeBytes) }));
|
|
510
|
+
api.post(InitRoute, initUpload);
|
|
511
|
+
api.put(ChunkRoute, uploadChunk);
|
|
512
|
+
api.get(StatusRoute, getStatus);
|
|
513
|
+
api.post(CompleteRoute, completeUpload);
|
|
514
|
+
};
|
|
515
|
+
export {
|
|
516
|
+
handler as default
|
|
517
|
+
};
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { loadModel, ZRBRtsChangeOp } from "@rpcbase/db";
|
|
2
|
+
import { o as object, a as array, s as string, n as number, b as boolean, _ as _enum } from "./schemas-CyxqObur.js";
|
|
3
|
+
const Route = "/api/rb/rts/changes";
|
|
4
|
+
const requestSchema = object({
|
|
5
|
+
sinceSeq: number().int().min(0).default(0),
|
|
6
|
+
limit: number().int().min(1).max(5e3).default(2e3),
|
|
7
|
+
modelNames: array(string().min(1)).optional()
|
|
8
|
+
});
|
|
9
|
+
object({
|
|
10
|
+
ok: boolean(),
|
|
11
|
+
needsFullResync: boolean().optional(),
|
|
12
|
+
earliestSeq: number().int().min(0).optional(),
|
|
13
|
+
latestSeq: number().int().min(0),
|
|
14
|
+
changes: array(object({
|
|
15
|
+
seq: number().int().min(1),
|
|
16
|
+
modelName: string().min(1),
|
|
17
|
+
op: _enum(["delete", "reset_model"]),
|
|
18
|
+
docId: string().optional()
|
|
19
|
+
}))
|
|
20
|
+
});
|
|
21
|
+
const getTenantId = (ctx) => {
|
|
22
|
+
const raw = ctx.req.query?.["rb-tenant-id"];
|
|
23
|
+
const queryTenantId = Array.isArray(raw) ? raw[0] : raw;
|
|
24
|
+
if (typeof queryTenantId === "string" && queryTenantId.trim()) return queryTenantId.trim();
|
|
25
|
+
const sessionTenantId = ctx.req.session?.user?.current_tenant_id;
|
|
26
|
+
if (typeof sessionTenantId === "string" && sessionTenantId.trim()) return sessionTenantId.trim();
|
|
27
|
+
return null;
|
|
28
|
+
};
|
|
29
|
+
const ensureAuthorized = (ctx, tenantId) => {
|
|
30
|
+
const userId = ctx.req.session?.user?.id;
|
|
31
|
+
if (!userId) return null;
|
|
32
|
+
const signedInTenants = ctx.req.session?.user?.signed_in_tenants;
|
|
33
|
+
const currentTenantId = ctx.req.session?.user?.current_tenant_id;
|
|
34
|
+
const hasTenantAccessFromList = Array.isArray(signedInTenants) && signedInTenants.includes(tenantId);
|
|
35
|
+
const normalizedCurrentTenantId = typeof currentTenantId === "string" ? currentTenantId.trim() : "";
|
|
36
|
+
const hasTenantAccessFromCurrent = Boolean(normalizedCurrentTenantId) && normalizedCurrentTenantId === tenantId;
|
|
37
|
+
if (!hasTenantAccessFromList && !hasTenantAccessFromCurrent) return null;
|
|
38
|
+
return userId;
|
|
39
|
+
};
|
|
40
|
+
const getModelCtx = (_ctx, tenantId) => ({
|
|
41
|
+
req: {
|
|
42
|
+
session: {
|
|
43
|
+
user: {
|
|
44
|
+
current_tenant_id: tenantId
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
const isRtsChangeRecord = (value) => {
|
|
50
|
+
if (!value || typeof value !== "object") return false;
|
|
51
|
+
const obj = value;
|
|
52
|
+
const isOp = ZRBRtsChangeOp.safeParse(obj.op).success;
|
|
53
|
+
return typeof obj.seq === "number" && typeof obj.modelName === "string" && isOp;
|
|
54
|
+
};
|
|
55
|
+
const changesHandler = async (payload, ctx) => {
|
|
56
|
+
const parsed = requestSchema.safeParse(payload ?? {});
|
|
57
|
+
if (!parsed.success) {
|
|
58
|
+
ctx.res.status(400);
|
|
59
|
+
return { ok: false, latestSeq: 0, changes: [] };
|
|
60
|
+
}
|
|
61
|
+
const tenantId = getTenantId(ctx);
|
|
62
|
+
if (!tenantId) {
|
|
63
|
+
ctx.res.status(400);
|
|
64
|
+
return { ok: false, latestSeq: 0, changes: [] };
|
|
65
|
+
}
|
|
66
|
+
const userId = ensureAuthorized(ctx, tenantId);
|
|
67
|
+
if (!userId) {
|
|
68
|
+
ctx.res.status(401);
|
|
69
|
+
return { ok: false, latestSeq: 0, changes: [] };
|
|
70
|
+
}
|
|
71
|
+
const modelCtx = getModelCtx(ctx, tenantId);
|
|
72
|
+
const [RtsChange, RtsCounter] = await Promise.all([
|
|
73
|
+
loadModel("RBRtsChange", modelCtx),
|
|
74
|
+
loadModel("RBRtsCounter", modelCtx)
|
|
75
|
+
]);
|
|
76
|
+
const counter = await RtsCounter.findOne({ _id: "rts" }, { seq: 1 }).lean();
|
|
77
|
+
const latestSeq = Number(counter?.seq ?? 0) || 0;
|
|
78
|
+
const { sinceSeq, limit, modelNames } = parsed.data;
|
|
79
|
+
const earliestSelector = {};
|
|
80
|
+
if (Array.isArray(modelNames) && modelNames.length) {
|
|
81
|
+
earliestSelector.modelName = { $in: modelNames };
|
|
82
|
+
}
|
|
83
|
+
const earliest = await RtsChange.findOne(earliestSelector, { seq: 1 }).sort({ seq: 1 }).lean();
|
|
84
|
+
const earliestSeq = earliest?.seq ? Number(earliest.seq) : void 0;
|
|
85
|
+
const needsFullResync = typeof earliestSeq === "number" && sinceSeq < earliestSeq - 1;
|
|
86
|
+
const selector = { seq: { $gt: sinceSeq } };
|
|
87
|
+
if (Array.isArray(modelNames) && modelNames.length) {
|
|
88
|
+
selector.modelName = { $in: modelNames };
|
|
89
|
+
}
|
|
90
|
+
const changes = await RtsChange.find(selector, { _id: 0, seq: 1, modelName: 1, op: 1, docId: 1 }).sort({ seq: 1 }).limit(limit).lean();
|
|
91
|
+
return {
|
|
92
|
+
ok: true,
|
|
93
|
+
needsFullResync: needsFullResync || void 0,
|
|
94
|
+
earliestSeq,
|
|
95
|
+
latestSeq,
|
|
96
|
+
changes: Array.isArray(changes) ? changes.filter(isRtsChangeRecord).map((c) => ({
|
|
97
|
+
seq: Number(c.seq),
|
|
98
|
+
modelName: String(c.modelName),
|
|
99
|
+
op: c.op,
|
|
100
|
+
docId: c.docId ? String(c.docId) : void 0
|
|
101
|
+
})) : []
|
|
102
|
+
};
|
|
103
|
+
};
|
|
104
|
+
const handler = (api) => {
|
|
105
|
+
api.post(Route, changesHandler);
|
|
106
|
+
};
|
|
107
|
+
export {
|
|
108
|
+
handler as default
|
|
109
|
+
};
|
|
@@ -5,7 +5,7 @@ const TENANT_ID_QUERY_PARAM = "rb-tenant-id";
|
|
|
5
5
|
const USER_ID_HEADER = "rb-user-id";
|
|
6
6
|
const QUERY_KEY_MAX_LEN = 4096;
|
|
7
7
|
const QUERY_MAX_LIMIT = 4096;
|
|
8
|
-
const INTERNAL_MODEL_NAMES = /* @__PURE__ */ new Set(["
|
|
8
|
+
const INTERNAL_MODEL_NAMES = /* @__PURE__ */ new Set(["RBRtsChange", "RBRtsCounter"]);
|
|
9
9
|
const DEFAULT_MAX_PAYLOAD_BYTES = 1024 * 1024;
|
|
10
10
|
const DEFAULT_MAX_SUBSCRIPTIONS_PER_SOCKET = 256;
|
|
11
11
|
const DEFAULT_DISPATCH_DEBOUNCE_MS = 25;
|
package/dist/index.js
CHANGED
|
@@ -18,7 +18,7 @@ import { StrictMode, createElement } from "react";
|
|
|
18
18
|
import { renderToPipeableStream, renderToStaticMarkup } from "react-dom/server";
|
|
19
19
|
import { jsx } from "react/jsx-runtime";
|
|
20
20
|
import { createPath, matchRoutes, parsePath, createStaticRouter, StaticRouterProvider } from "@rpcbase/router";
|
|
21
|
-
import { i, n, r } from "./index-
|
|
21
|
+
import { i, n, r } from "./index-dlSIqvl2.js";
|
|
22
22
|
function getDefaultExportFromCjs(x) {
|
|
23
23
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, "default") ? x["default"] : x;
|
|
24
24
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/rts/api/changes/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAmB,MAAM,cAAc,CAAA;AAOnD,KAAK,WAAW,GAAG;IACjB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;CAC7B,CAAA;
|
|
1
|
+
{"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/rts/api/changes/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAmB,MAAM,cAAc,CAAA;AAOnD,KAAK,WAAW,GAAG;IACjB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAA;CAC7B,CAAA;yBA0He,KAAK,GAAG,CAAC,WAAW,CAAC;AAArC,wBAEC"}
|
package/dist/rts.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { i, n, r } from "./index-
|
|
1
|
+
import { i, n, r } from "./index-dlSIqvl2.js";
|
|
2
2
|
const routes = Object.entries({
|
|
3
|
-
.../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("./handler-
|
|
3
|
+
.../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("./handler-ybYk2VTq.js") })
|
|
4
4
|
}).reduce((acc, [path, mod]) => {
|
|
5
5
|
acc[path.replace("./api/", "@rpcbase/server/rts/api/")] = mod;
|
|
6
6
|
return acc;
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { loadModel } from "@rpcbase/db";
|
|
2
1
|
function $constructor(name, initializer2, params) {
|
|
3
2
|
function init(inst, def) {
|
|
4
3
|
if (!inst._zod) {
|
|
@@ -3275,110 +3274,11 @@ function refine(fn, _params = {}) {
|
|
|
3275
3274
|
function superRefine(fn) {
|
|
3276
3275
|
return _superRefine(fn);
|
|
3277
3276
|
}
|
|
3278
|
-
const Route = "/api/rb/rts/changes";
|
|
3279
|
-
const requestSchema = object({
|
|
3280
|
-
sinceSeq: number().int().min(0).default(0),
|
|
3281
|
-
limit: number().int().min(1).max(5e3).default(2e3),
|
|
3282
|
-
modelNames: array(string().min(1)).optional()
|
|
3283
|
-
});
|
|
3284
|
-
object({
|
|
3285
|
-
ok: boolean(),
|
|
3286
|
-
needsFullResync: boolean().optional(),
|
|
3287
|
-
earliestSeq: number().int().min(0).optional(),
|
|
3288
|
-
latestSeq: number().int().min(0),
|
|
3289
|
-
changes: array(object({
|
|
3290
|
-
seq: number().int().min(1),
|
|
3291
|
-
modelName: string().min(1),
|
|
3292
|
-
op: _enum(["delete", "reset_model"]),
|
|
3293
|
-
docId: string().optional()
|
|
3294
|
-
}))
|
|
3295
|
-
});
|
|
3296
|
-
const getTenantId = (ctx) => {
|
|
3297
|
-
const raw = ctx.req.query?.["rb-tenant-id"];
|
|
3298
|
-
const queryTenantId = Array.isArray(raw) ? raw[0] : raw;
|
|
3299
|
-
if (typeof queryTenantId === "string" && queryTenantId.trim()) return queryTenantId.trim();
|
|
3300
|
-
const sessionTenantId = ctx.req.session?.user?.current_tenant_id;
|
|
3301
|
-
if (typeof sessionTenantId === "string" && sessionTenantId.trim()) return sessionTenantId.trim();
|
|
3302
|
-
return null;
|
|
3303
|
-
};
|
|
3304
|
-
const ensureAuthorized = (ctx, tenantId) => {
|
|
3305
|
-
const userId = ctx.req.session?.user?.id;
|
|
3306
|
-
if (!userId) return null;
|
|
3307
|
-
const signedInTenants = ctx.req.session?.user?.signed_in_tenants;
|
|
3308
|
-
const currentTenantId = ctx.req.session?.user?.current_tenant_id;
|
|
3309
|
-
const hasTenantAccessFromList = Array.isArray(signedInTenants) && signedInTenants.includes(tenantId);
|
|
3310
|
-
const normalizedCurrentTenantId = typeof currentTenantId === "string" ? currentTenantId.trim() : "";
|
|
3311
|
-
const hasTenantAccessFromCurrent = Boolean(normalizedCurrentTenantId) && normalizedCurrentTenantId === tenantId;
|
|
3312
|
-
if (!hasTenantAccessFromList && !hasTenantAccessFromCurrent) return null;
|
|
3313
|
-
return userId;
|
|
3314
|
-
};
|
|
3315
|
-
const getModelCtx = (_ctx, tenantId) => ({
|
|
3316
|
-
req: {
|
|
3317
|
-
session: {
|
|
3318
|
-
user: {
|
|
3319
|
-
current_tenant_id: tenantId
|
|
3320
|
-
}
|
|
3321
|
-
}
|
|
3322
|
-
}
|
|
3323
|
-
});
|
|
3324
|
-
const isRtsChangeRecord = (value) => {
|
|
3325
|
-
if (!value || typeof value !== "object") return false;
|
|
3326
|
-
const obj = value;
|
|
3327
|
-
const isOp = obj.op === "delete" || obj.op === "reset_model";
|
|
3328
|
-
return typeof obj.seq === "number" && typeof obj.modelName === "string" && isOp;
|
|
3329
|
-
};
|
|
3330
|
-
const changesHandler = async (payload, ctx) => {
|
|
3331
|
-
const parsed = requestSchema.safeParse(payload ?? {});
|
|
3332
|
-
if (!parsed.success) {
|
|
3333
|
-
ctx.res.status(400);
|
|
3334
|
-
return { ok: false, latestSeq: 0, changes: [] };
|
|
3335
|
-
}
|
|
3336
|
-
const tenantId = getTenantId(ctx);
|
|
3337
|
-
if (!tenantId) {
|
|
3338
|
-
ctx.res.status(400);
|
|
3339
|
-
return { ok: false, latestSeq: 0, changes: [] };
|
|
3340
|
-
}
|
|
3341
|
-
const userId = ensureAuthorized(ctx, tenantId);
|
|
3342
|
-
if (!userId) {
|
|
3343
|
-
ctx.res.status(401);
|
|
3344
|
-
return { ok: false, latestSeq: 0, changes: [] };
|
|
3345
|
-
}
|
|
3346
|
-
const modelCtx = getModelCtx(ctx, tenantId);
|
|
3347
|
-
const [RtsChange, RtsCounter] = await Promise.all([
|
|
3348
|
-
loadModel("RtsChange", modelCtx),
|
|
3349
|
-
loadModel("RtsCounter", modelCtx)
|
|
3350
|
-
]);
|
|
3351
|
-
const counter = await RtsCounter.findOne({ _id: "rts" }, { seq: 1 }).lean();
|
|
3352
|
-
const latestSeq = Number(counter?.seq ?? 0) || 0;
|
|
3353
|
-
const { sinceSeq, limit, modelNames } = parsed.data;
|
|
3354
|
-
const earliestSelector = {};
|
|
3355
|
-
if (Array.isArray(modelNames) && modelNames.length) {
|
|
3356
|
-
earliestSelector.modelName = { $in: modelNames };
|
|
3357
|
-
}
|
|
3358
|
-
const earliest = await RtsChange.findOne(earliestSelector, { seq: 1 }).sort({ seq: 1 }).lean();
|
|
3359
|
-
const earliestSeq = earliest?.seq ? Number(earliest.seq) : void 0;
|
|
3360
|
-
const needsFullResync = typeof earliestSeq === "number" && sinceSeq < earliestSeq - 1;
|
|
3361
|
-
const selector = { seq: { $gt: sinceSeq } };
|
|
3362
|
-
if (Array.isArray(modelNames) && modelNames.length) {
|
|
3363
|
-
selector.modelName = { $in: modelNames };
|
|
3364
|
-
}
|
|
3365
|
-
const changes = await RtsChange.find(selector, { _id: 0, seq: 1, modelName: 1, op: 1, docId: 1 }).sort({ seq: 1 }).limit(limit).lean();
|
|
3366
|
-
return {
|
|
3367
|
-
ok: true,
|
|
3368
|
-
needsFullResync: needsFullResync || void 0,
|
|
3369
|
-
earliestSeq,
|
|
3370
|
-
latestSeq,
|
|
3371
|
-
changes: Array.isArray(changes) ? changes.filter(isRtsChangeRecord).map((c) => ({
|
|
3372
|
-
seq: Number(c.seq),
|
|
3373
|
-
modelName: String(c.modelName),
|
|
3374
|
-
op: c.op,
|
|
3375
|
-
docId: c.docId ? String(c.docId) : void 0
|
|
3376
|
-
})) : []
|
|
3377
|
-
};
|
|
3378
|
-
};
|
|
3379
|
-
const handler = (api) => {
|
|
3380
|
-
api.post(Route, changesHandler);
|
|
3381
|
-
};
|
|
3382
3277
|
export {
|
|
3383
|
-
|
|
3278
|
+
_enum as _,
|
|
3279
|
+
array as a,
|
|
3280
|
+
boolean as b,
|
|
3281
|
+
number as n,
|
|
3282
|
+
object as o,
|
|
3283
|
+
string as s
|
|
3384
3284
|
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"handler.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/handler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAOlC,OAAO,EAA2C,KAAK,WAAW,EAAE,MAAM,UAAU,CAAA;yBAKpE,KAAK,GAAG,CAAC,WAAW,CAAC;AAArC,wBAQC"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { ApiHandler } from '../../../../../../api/src';
|
|
2
|
+
import { SessionUser } from '../shared';
|
|
3
|
+
import * as Uploads from "../index";
|
|
4
|
+
export declare const completeUpload: ApiHandler<Record<string, never>, Uploads.CompleteResponsePayload, SessionUser>;
|
|
5
|
+
//# sourceMappingURL=completeUpload.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"completeUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/completeUpload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAQjB,MAAM,WAAW,CAAA;AAiDlB,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,uBAAuB,EAAE,WAAW,CAmJ1G,CAAA"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { ApiHandler } from '../../../../../../api/src';
|
|
2
|
+
import { SessionUser } from '../shared';
|
|
3
|
+
import * as Uploads from "../index";
|
|
4
|
+
export declare const getStatus: ApiHandler<Record<string, never>, Uploads.StatusResponsePayload, SessionUser>;
|
|
5
|
+
//# sourceMappingURL=getStatus.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"getStatus.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/getStatus.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAMjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,SAAS,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,qBAAqB,EAAE,WAAW,CAkDnG,CAAA"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import { ApiHandler } from '../../../../../../api/src';
|
|
2
|
+
import { SessionUser } from '../shared';
|
|
3
|
+
import * as Uploads from "../index";
|
|
4
|
+
export declare const initUpload: ApiHandler<Uploads.InitRequestPayload, Uploads.InitResponsePayload, SessionUser>;
|
|
5
|
+
//# sourceMappingURL=initUpload.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"initUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/initUpload.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,OAAO,CAAC,kBAAkB,EAAE,OAAO,CAAC,mBAAmB,EAAE,WAAW,CA2DvG,CAAA"}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import { ApiHandler } from '../../../../../../api/src';
|
|
2
|
+
import { SessionUser } from '../shared';
|
|
3
|
+
type ChunkResponsePayload = {
|
|
4
|
+
ok: boolean;
|
|
5
|
+
error?: string;
|
|
6
|
+
};
|
|
7
|
+
export declare const uploadChunk: ApiHandler<Buffer, ChunkResponsePayload, SessionUser>;
|
|
8
|
+
export {};
|
|
9
|
+
//# sourceMappingURL=uploadChunk.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"uploadChunk.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/uploadChunk.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,KAAK,oBAAoB,GAAG;IAC1B,EAAE,EAAE,OAAO,CAAA;IACX,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,eAAO,MAAM,WAAW,EAAE,UAAU,CAAC,MAAM,EAAE,oBAAoB,EAAE,WAAW,CAqG7E,CAAA"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { z } from '../../../../../vite/node_modules/zod';
|
|
2
|
+
export declare const InitRoute = "/api/rb/file-uploads";
|
|
3
|
+
export declare const ChunkRoute = "/api/rb/file-uploads/:uploadId/chunks/:index";
|
|
4
|
+
export declare const StatusRoute = "/api/rb/file-uploads/:uploadId/status";
|
|
5
|
+
export declare const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
|
|
6
|
+
export declare const initRequestSchema: z.ZodObject<{
|
|
7
|
+
filename: z.ZodString;
|
|
8
|
+
mimeType: z.ZodString;
|
|
9
|
+
totalSize: z.ZodNumber;
|
|
10
|
+
}, z.core.$strip>;
|
|
11
|
+
export type InitRequestPayload = z.infer<typeof initRequestSchema>;
|
|
12
|
+
export declare const initResponseSchema: z.ZodObject<{
|
|
13
|
+
ok: z.ZodBoolean;
|
|
14
|
+
error: z.ZodOptional<z.ZodString>;
|
|
15
|
+
uploadId: z.ZodOptional<z.ZodString>;
|
|
16
|
+
uploadKey: z.ZodOptional<z.ZodString>;
|
|
17
|
+
chunkSize: z.ZodOptional<z.ZodNumber>;
|
|
18
|
+
chunksTotal: z.ZodOptional<z.ZodNumber>;
|
|
19
|
+
}, z.core.$strip>;
|
|
20
|
+
export type InitResponsePayload = z.infer<typeof initResponseSchema>;
|
|
21
|
+
export declare const statusResponseSchema: z.ZodObject<{
|
|
22
|
+
ok: z.ZodBoolean;
|
|
23
|
+
error: z.ZodOptional<z.ZodString>;
|
|
24
|
+
status: z.ZodOptional<z.ZodEnum<{
|
|
25
|
+
error: "error";
|
|
26
|
+
done: "done";
|
|
27
|
+
uploading: "uploading";
|
|
28
|
+
assembling: "assembling";
|
|
29
|
+
}>>;
|
|
30
|
+
chunkSize: z.ZodOptional<z.ZodNumber>;
|
|
31
|
+
chunksTotal: z.ZodOptional<z.ZodNumber>;
|
|
32
|
+
received: z.ZodOptional<z.ZodArray<z.ZodNumber>>;
|
|
33
|
+
fileId: z.ZodOptional<z.ZodString>;
|
|
34
|
+
}, z.core.$strip>;
|
|
35
|
+
export type StatusResponsePayload = z.infer<typeof statusResponseSchema>;
|
|
36
|
+
export declare const completeResponseSchema: z.ZodObject<{
|
|
37
|
+
ok: z.ZodBoolean;
|
|
38
|
+
error: z.ZodOptional<z.ZodString>;
|
|
39
|
+
fileId: z.ZodOptional<z.ZodString>;
|
|
40
|
+
}, z.core.$strip>;
|
|
41
|
+
export type CompleteResponsePayload = z.infer<typeof completeResponseSchema>;
|
|
42
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AAGvB,eAAO,MAAM,SAAS,yBAAyB,CAAA;AAC/C,eAAO,MAAM,UAAU,iDAAiD,CAAA;AACxE,eAAO,MAAM,WAAW,0CAA0C,CAAA;AAClE,eAAO,MAAM,aAAa,4CAA4C,CAAA;AAEtE,eAAO,MAAM,iBAAiB;;;;iBAI5B,CAAA;AAEF,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAA;AAElE,eAAO,MAAM,kBAAkB;;;;;;;iBAO7B,CAAA;AAEF,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,CAAA;AAEpE,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;iBAQ/B,CAAA;AAEF,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAA;AAExE,eAAO,MAAM,sBAAsB;;;;iBAIjC,CAAA;AAEF,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"rawBodyParser.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/middleware/rawBodyParser.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,aAAa,GAAI,iBAE3B;IACD,UAAU,EAAE,MAAM,CAAA;CACnB,MACS,KAAK,GAAG,EAAE,KAAK,GAAG,EAAE,MAAM,GAAG,SA6DtC,CAAA"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { Ctx } from '../../../../../api/src';
|
|
2
|
+
import { IRBUploadChunk, IRBUploadSession, LoadModelCtx } from '../../../../../db/src';
|
|
3
|
+
import { Model } from '../../../../../vite/node_modules/mongoose';
|
|
4
|
+
export type SessionUser = {
|
|
5
|
+
id?: string;
|
|
6
|
+
current_tenant_id?: string;
|
|
7
|
+
};
|
|
8
|
+
export type UploadSessionDoc = IRBUploadSession;
|
|
9
|
+
export type UploadChunkDoc = Omit<IRBUploadChunk, "data"> & {
|
|
10
|
+
data: Buffer;
|
|
11
|
+
};
|
|
12
|
+
export declare const getChunkSizeBytes: () => number;
|
|
13
|
+
export declare const getSessionTtlMs: () => number;
|
|
14
|
+
export declare const getRawBodyLimitBytes: (chunkSizeBytes: number) => number;
|
|
15
|
+
export declare const getBucketName: () => string;
|
|
16
|
+
export declare const getUserId: (ctx: Ctx<SessionUser>) => string | null;
|
|
17
|
+
export declare const getTenantId: (ctx: Ctx<SessionUser>) => string | null;
|
|
18
|
+
export declare const computeSha256Hex: (data: Buffer) => string;
|
|
19
|
+
export declare const normalizeSha256Hex: (value: string) => string;
|
|
20
|
+
export declare const getModelCtx: (_ctx: Ctx<SessionUser>, tenantId: string) => LoadModelCtx;
|
|
21
|
+
export declare const toBufferPayload: (payload: unknown) => Buffer | null;
|
|
22
|
+
export declare const ensureUploadIndexes: (UploadSession: Model<UploadSessionDoc>, UploadChunk: Model<UploadChunkDoc>) => Promise<void>;
|
|
23
|
+
export declare const getUploadKeyHash: (ctx: Ctx<SessionUser>) => string | null;
|
|
24
|
+
export declare const getOwnershipSelector: (ctx: Ctx<SessionUser>, session: Pick<UploadSessionDoc, "userId" | "ownerKeyHash">) => {
|
|
25
|
+
userId?: string;
|
|
26
|
+
ownerKeyHash?: string;
|
|
27
|
+
} | null;
|
|
28
|
+
//# sourceMappingURL=shared.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shared.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/shared.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,GAAG,EAAE,MAAM,cAAc,CAAA;AAClC,OAAO,EACL,KAAK,cAAc,EACnB,KAAK,gBAAgB,EACrB,KAAK,YAAY,EAClB,MAAM,aAAa,CAAA;AACpB,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,UAAU,CAAA;AAGrC,MAAM,MAAM,WAAW,GAAG;IACxB,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,iBAAiB,CAAC,EAAE,MAAM,CAAA;CAC3B,CAAA;AAED,MAAM,MAAM,gBAAgB,GAAG,gBAAgB,CAAA;AAC/C,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG;IAAE,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AAkB5E,eAAO,MAAM,iBAAiB,QAAO,MAIpC,CAAA;AAED,eAAO,MAAM,eAAe,QAAO,MAGlC,CAAA;AAED,eAAO,MAAM,oBAAoB,GAAI,gBAAgB,MAAM,KAAG,MAAsC,CAAA;AAEpG,eAAO,MAAM,aAAa,QAAO,MAAsE,CAAA;AAEvG,eAAO,MAAM,SAAS,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAK1D,CAAA;AAED,eAAO,MAAM,WAAW,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAc5D,CAAA;AAED,eAAO,MAAM,gBAAgB,GAAI,MAAM,MAAM,KAAG,MAAyD,CAAA;AAEzG,eAAO,MAAM,kBAAkB,GAAI,OAAO,MAAM,KAAG,MAAoC,CAAA;AAEvF,eAAO,MAAM,WAAW,GAAI,MAAM,GAAG,CAAC,WAAW,CAAC,EAAE,UAAU,MAAM,KAAG,YAQrE,CAAA;AAEF,eAAO,MAAM,eAAe,GAAI,SAAS,OAAO,KAAG,MAAM,GAAG,IAI3D,CAAA;AAED,eAAO,MAAM,mBAAmB,GAC9B,eAAe,KAAK,CAAC,gBAAgB,CAAC,EACtC,aAAa,KAAK,CAAC,cAAc,CAAC,KACjC,OAAO,CAAC,IAAI,CAUd,CAAA;AAQD,eAAO,MAAM,gBAAgB,GAAI,KAAK,GAAG,CAAC,WAAW,CAAC,KAAG,MAAM,GAAG,IAIjE,CAAA;AAWD,eAAO,MAAM,oBAAoB,GAC/B,KAAK,GAAG,CAAC,WAAW,CAAC,EACrB,SAAS,IAAI,CAAC,gBAAgB,EAAE,QAAQ,GAAG,cAAc,CAAC,KACzD;IAAE,MAAM,CAAC,EAAE,MAAM,CAAC;IAAC,YAAY,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAe/C,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"routes.d.ts","sourceRoot":"","sources":["../../src/uploads/routes.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,MAAM,yBAKb,CAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"uploads.d.ts","sourceRoot":"","sources":["../src/uploads.ts"],"names":[],"mappings":"AAAA,cAAc,kBAAkB,CAAA"}
|
package/dist/uploads.js
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
const routes = Object.entries({
|
|
2
|
+
.../* @__PURE__ */ Object.assign({ "./api/file-uploads/handler.ts": () => import("./handler-CTRE1McR.js") })
|
|
3
|
+
}).reduce((acc, [path, mod]) => {
|
|
4
|
+
acc[path.replace("./api/", "@rpcbase/server/uploads/api/")] = mod;
|
|
5
|
+
return acc;
|
|
6
|
+
}, {});
|
|
7
|
+
export {
|
|
8
|
+
routes
|
|
9
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rpcbase/server",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.473.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"files": [
|
|
6
6
|
"dist"
|
|
@@ -17,6 +17,11 @@
|
|
|
17
17
|
"types": "./dist/rts.d.ts",
|
|
18
18
|
"import": "./dist/rts.js",
|
|
19
19
|
"default": "./dist/rts.js"
|
|
20
|
+
},
|
|
21
|
+
"./uploads": {
|
|
22
|
+
"types": "./dist/uploads.d.ts",
|
|
23
|
+
"import": "./dist/uploads.js",
|
|
24
|
+
"default": "./dist/uploads.js"
|
|
20
25
|
}
|
|
21
26
|
},
|
|
22
27
|
"scripts": {
|