@rpcbase/server 0.480.0 → 0.482.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"applyRouteLoaders.d.ts","sourceRoot":"","sources":["../src/applyRouteLoaders.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,OAAO,EAAC,MAAM,SAAS,CAAA;AAC/B,OAAO,EACL,oBAAoB,EAMrB,MAAM,iBAAiB,CAAA;AAuFxB,MAAM,MAAM,yBAAyB,GAAG,oBAAoB,GAAG;IAC7D,gBAAgB,CAAC,EAAE,QAAQ,CAAA;IAC3B,eAAe,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;IAC/B,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;CAClC,CAAA;AAED,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,OAAO,EACZ,UAAU,EAAE,GAAG,EAAE,GAChB,OAAO,CAAC,yBAAyB,CAAC,CA6KpC"}
1
+ {"version":3,"file":"applyRouteLoaders.d.ts","sourceRoot":"","sources":["../src/applyRouteLoaders.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,OAAO,EAAC,MAAM,SAAS,CAAA;AAC/B,OAAO,EACL,oBAAoB,EAMrB,MAAM,iBAAiB,CAAA;AA0GxB,MAAM,MAAM,yBAAyB,GAAG,oBAAoB,GAAG;IAC7D,gBAAgB,CAAC,EAAE,QAAQ,CAAA;IAC3B,eAAe,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;IAC/B,iBAAiB,CAAC,EAAE,MAAM,GAAG,IAAI,CAAA;CAClC,CAAA;AAED,wBAAsB,iBAAiB,CACrC,GAAG,EAAE,OAAO,EACZ,UAAU,EAAE,GAAG,EAAE,GAChB,OAAO,CAAC,yBAAyB,CAAC,CAwLpC"}
@@ -1,8 +1,77 @@
1
- import { loadModel, getTenantFilesystemDb } from "@rpcbase/db";
1
+ import { models, getTenantFilesystemDb } from "@rpcbase/db";
2
2
  import { GridFSBucket, ObjectId } from "mongodb";
3
- import { g as getTenantId, a as getModelCtx, b as buildUploadsAbility, c as getUploadSessionAccessQuery, e as ensureUploadIndexes, d as getBucketName, f as getUserId, h as getChunkSizeBytes, i as getSessionTtlMs, j as computeSha256Hex, t as toBufferPayload, n as normalizeSha256Hex, k as getMaxClientUploadBytesPerSecond, l as getRawBodyLimitBytes } from "./shared-UGuDRAKK.js";
3
+ import { JSDOM } from "jsdom";
4
+ import createDOMPurify from "dompurify";
5
+ import { g as getTenantId, a as getModelCtx, b as buildUploadsAbility, c as getUploadSessionAccessQuery, e as ensureUploadIndexes, d as getBucketName, f as getUserId, h as getChunkSizeBytes, i as getSessionTtlMs, j as computeSha256Hex, t as toBufferPayload, n as normalizeSha256Hex, k as getMaxClientUploadBytesPerSecond, l as getRawBodyLimitBytes } from "./shared-BJomDDWK.js";
4
6
  import { randomBytes } from "node:crypto";
5
- import { o as object, n as number, s as string, b as boolean, a as array, _ as _enum } from "./schemas-D5T9tDtI.js";
7
+ import { o as object, n as number, b as boolean, s as string, a as array, _ as _enum } from "./schemas-D5T9tDtI.js";
8
+ const MAX_SVG_BYTES = 128 * 1024;
9
+ const window = new JSDOM("").window;
10
+ const DOMPurify = createDOMPurify(window);
11
+ const normalizeForSniff = (raw) => raw.replace(/^\uFEFF/, "").trimStart();
12
+ const looksLikeSvgText = (text) => {
13
+ const normalized = normalizeForSniff(text);
14
+ if (!normalized.startsWith("<")) return false;
15
+ return /<svg(?:\s|>)/i.test(normalized);
16
+ };
17
+ const looksLikeSvg = (sniff) => looksLikeSvgText(sniff.toString("utf8"));
18
+ const sanitizeSvg = (svg) => DOMPurify.sanitize(svg, {
19
+ USE_PROFILES: { svg: true, svgFilters: true }
20
+ });
21
+ const sanitizeSvgProcessor = {
22
+ id: "sanitize-svg",
23
+ maxBytes: MAX_SVG_BYTES,
24
+ match: ({ sniff }) => looksLikeSvg(sniff),
25
+ process: (data) => {
26
+ if (data.length > MAX_SVG_BYTES) {
27
+ throw new Error("svg_too_large");
28
+ }
29
+ const svgText = data.toString("utf8");
30
+ if (!looksLikeSvgText(svgText)) {
31
+ throw new Error("svg_invalid");
32
+ }
33
+ const sanitized = sanitizeSvg(svgText);
34
+ if (!sanitized.trim() || !looksLikeSvgText(sanitized)) {
35
+ throw new Error("svg_sanitize_failed");
36
+ }
37
+ const sanitizedBuffer = Buffer.from(sanitized, "utf8");
38
+ if (sanitizedBuffer.length > MAX_SVG_BYTES) {
39
+ throw new Error("svg_too_large");
40
+ }
41
+ return { data: sanitizedBuffer, mimeType: "image/svg+xml" };
42
+ }
43
+ };
44
+ const uploadProcessors = Object.freeze([sanitizeSvgProcessor]);
45
+ const getMaxUploadProcessorBytes = () => uploadProcessors.reduce((max, processor) => Math.max(max, processor.maxBytes), 0);
46
+ const selectUploadProcessors = (ctx) => uploadProcessors.filter((processor) => processor.match(ctx));
47
+ const applyUploadProcessors = async (data, ctx) => {
48
+ let currentData = data;
49
+ let currentMimeType = ctx.clientMimeType;
50
+ const applied = [];
51
+ for (const processor of uploadProcessors) {
52
+ const processorCtx = {
53
+ filename: ctx.filename,
54
+ clientMimeType: currentMimeType,
55
+ totalSize: currentData.length,
56
+ sniff: currentData
57
+ };
58
+ if (!processor.match(processorCtx)) continue;
59
+ if (currentData.length > processor.maxBytes) {
60
+ throw new Error("processor_input_too_large");
61
+ }
62
+ const result = await processor.process(currentData, processorCtx);
63
+ currentData = result.data;
64
+ if (typeof result.mimeType === "string" && result.mimeType.trim()) {
65
+ currentMimeType = result.mimeType.trim();
66
+ }
67
+ applied.push(processor.id);
68
+ }
69
+ return {
70
+ data: currentData,
71
+ mimeType: currentMimeType,
72
+ applied
73
+ };
74
+ };
6
75
  const waitForStreamFinished = async (stream) => new Promise((resolve, reject) => {
7
76
  stream.once("finish", resolve);
8
77
  stream.once("error", reject);
@@ -55,8 +124,8 @@ const completeUpload = async (_payload, ctx) => {
55
124
  }
56
125
  const modelCtx = getModelCtx(ctx, tenantId);
57
126
  const [UploadSession, UploadChunk] = await Promise.all([
58
- loadModel("RBUploadSession", modelCtx),
59
- loadModel("RBUploadChunk", modelCtx)
127
+ models.get("RBUploadSession", modelCtx),
128
+ models.get("RBUploadChunk", modelCtx)
60
129
  ]);
61
130
  const ability = buildUploadsAbility(ctx, tenantId);
62
131
  if (!ability.can("update", "RBUploadSession")) {
@@ -94,24 +163,70 @@ const completeUpload = async (_payload, ctx) => {
94
163
  const bucketName = getBucketName();
95
164
  const bucket = new GridFSBucket(nativeDb, { bucketName });
96
165
  const lockedUserId = typeof locked.userId === "string" ? locked.userId : void 0;
97
- const uploadStream = bucket.openUploadStream(locked.filename, {
98
- metadata: {
99
- uploadId,
100
- tenantId,
101
- mimeType: locked.mimeType,
102
- totalSize: locked.totalSize,
103
- ...lockedUserId ? { userId: lockedUserId } : {}
104
- }
105
- });
166
+ const maxProcessorBytes = getMaxUploadProcessorBytes();
167
+ const shouldBufferForProcessing = locked.totalSize <= maxProcessorBytes;
168
+ const declaredMimeType = locked.mimeType.trim().toLowerCase();
169
+ const declaredSvg = declaredMimeType === "image/svg+xml" || locked.filename.trim().toLowerCase().endsWith(".svg");
170
+ let uploadStream = null;
106
171
  try {
172
+ if (!shouldBufferForProcessing && declaredSvg) {
173
+ throw new Error("svg_too_large");
174
+ }
107
175
  const cursor = UploadChunk.find({ uploadId }).sort({ index: 1 }).cursor();
108
176
  let expectedIndex = 0;
177
+ const chunks = [];
178
+ let bufferedBytes = 0;
179
+ const pendingChunks = [];
180
+ const sniffParts = [];
181
+ let sniffBytes = 0;
109
182
  try {
110
183
  for await (const chunkDoc of cursor) {
111
184
  if (chunkDoc.index !== expectedIndex) {
112
185
  throw new Error("missing_chunks");
113
186
  }
114
- await writeToStream(uploadStream, chunkDoc.data);
187
+ const chunk = chunkDoc.data;
188
+ if (shouldBufferForProcessing) {
189
+ chunks.push(chunk);
190
+ bufferedBytes += chunk.length;
191
+ } else if (!uploadStream) {
192
+ pendingChunks.push(chunk);
193
+ if (sniffBytes < maxProcessorBytes) {
194
+ const slice = chunk.subarray(0, Math.min(chunk.length, maxProcessorBytes - sniffBytes));
195
+ if (slice.length) {
196
+ sniffParts.push(slice);
197
+ sniffBytes += slice.length;
198
+ }
199
+ }
200
+ if (sniffBytes >= maxProcessorBytes) {
201
+ const sniff = Buffer.concat(sniffParts, sniffBytes);
202
+ const processors = selectUploadProcessors({
203
+ filename: locked.filename,
204
+ clientMimeType: locked.mimeType,
205
+ totalSize: locked.totalSize,
206
+ sniff
207
+ });
208
+ if (processors.length) {
209
+ throw new Error("svg_too_large");
210
+ }
211
+ uploadStream = bucket.openUploadStream(locked.filename, {
212
+ metadata: {
213
+ uploadId,
214
+ tenantId,
215
+ mimeType: locked.mimeType,
216
+ totalSize: locked.totalSize,
217
+ ...typeof locked.isPublic === "boolean" ? { isPublic: locked.isPublic } : {},
218
+ ...typeof locked.ownerKeyHash === "string" ? { ownerKeyHash: locked.ownerKeyHash } : {},
219
+ ...lockedUserId ? { userId: lockedUserId } : {}
220
+ }
221
+ });
222
+ for (const pending of pendingChunks) {
223
+ await writeToStream(uploadStream, pending);
224
+ }
225
+ pendingChunks.length = 0;
226
+ }
227
+ } else {
228
+ await writeToStream(uploadStream, chunk);
229
+ }
115
230
  expectedIndex += 1;
116
231
  }
117
232
  } finally {
@@ -123,9 +238,59 @@ const completeUpload = async (_payload, ctx) => {
123
238
  if (expectedIndex !== locked.chunksTotal) {
124
239
  throw new Error("missing_chunks");
125
240
  }
126
- const finished = waitForStreamFinished(uploadStream);
127
- uploadStream.end();
128
- await finished;
241
+ if (shouldBufferForProcessing) {
242
+ const assembled = Buffer.concat(chunks, bufferedBytes);
243
+ const { data: processed, mimeType: processedMimeType, applied } = await applyUploadProcessors(assembled, {
244
+ filename: locked.filename,
245
+ clientMimeType: locked.mimeType
246
+ });
247
+ uploadStream = bucket.openUploadStream(locked.filename, {
248
+ metadata: {
249
+ uploadId,
250
+ tenantId,
251
+ mimeType: processedMimeType,
252
+ totalSize: locked.totalSize,
253
+ ...applied.length ? { processors: applied, processedSize: processed.length } : {},
254
+ ...typeof locked.isPublic === "boolean" ? { isPublic: locked.isPublic } : {},
255
+ ...typeof locked.ownerKeyHash === "string" ? { ownerKeyHash: locked.ownerKeyHash } : {},
256
+ ...lockedUserId ? { userId: lockedUserId } : {}
257
+ }
258
+ });
259
+ const finished = waitForStreamFinished(uploadStream);
260
+ uploadStream.end(processed);
261
+ await finished;
262
+ } else {
263
+ if (!uploadStream) {
264
+ const sniff = Buffer.concat(sniffParts, sniffBytes);
265
+ const processors = selectUploadProcessors({
266
+ filename: locked.filename,
267
+ clientMimeType: locked.mimeType,
268
+ totalSize: locked.totalSize,
269
+ sniff
270
+ });
271
+ if (processors.length) {
272
+ throw new Error("svg_too_large");
273
+ }
274
+ uploadStream = bucket.openUploadStream(locked.filename, {
275
+ metadata: {
276
+ uploadId,
277
+ tenantId,
278
+ mimeType: locked.mimeType,
279
+ totalSize: locked.totalSize,
280
+ ...typeof locked.isPublic === "boolean" ? { isPublic: locked.isPublic } : {},
281
+ ...typeof locked.ownerKeyHash === "string" ? { ownerKeyHash: locked.ownerKeyHash } : {},
282
+ ...lockedUserId ? { userId: lockedUserId } : {}
283
+ }
284
+ });
285
+ for (const pending of pendingChunks) {
286
+ await writeToStream(uploadStream, pending);
287
+ }
288
+ pendingChunks.length = 0;
289
+ }
290
+ const finished = waitForStreamFinished(uploadStream);
291
+ uploadStream.end();
292
+ await finished;
293
+ }
129
294
  const fileId = String(uploadStream.id ?? "");
130
295
  if (!fileId) {
131
296
  throw new Error("missing_file_id");
@@ -150,6 +315,22 @@ const completeUpload = async (_payload, ctx) => {
150
315
  ctx.res.status(409);
151
316
  return { ok: false, error: "missing_chunks" };
152
317
  }
318
+ if (message === "svg_too_large") {
319
+ await UploadSession.updateOne(
320
+ { $and: [{ _id: uploadId }, getUploadSessionAccessQuery(ability, "update")] },
321
+ { $set: { status: "error", error: message } }
322
+ );
323
+ ctx.res.status(413);
324
+ return { ok: false, error: message };
325
+ }
326
+ if (message === "svg_invalid" || message === "svg_sanitize_failed") {
327
+ await UploadSession.updateOne(
328
+ { $and: [{ _id: uploadId }, getUploadSessionAccessQuery(ability, "update")] },
329
+ { $set: { status: "error", error: message } }
330
+ );
331
+ ctx.res.status(400);
332
+ return { ok: false, error: message };
333
+ }
153
334
  await UploadSession.updateOne(
154
335
  { $and: [{ _id: uploadId }, getUploadSessionAccessQuery(ability, "update")] },
155
336
  { $set: { status: "error", error: message } }
@@ -171,8 +352,8 @@ const getStatus = async (_payload, ctx) => {
171
352
  }
172
353
  const modelCtx = getModelCtx(ctx, tenantId);
173
354
  const [UploadSession, UploadChunk] = await Promise.all([
174
- loadModel("RBUploadSession", modelCtx),
175
- loadModel("RBUploadChunk", modelCtx)
355
+ models.get("RBUploadSession", modelCtx),
356
+ models.get("RBUploadChunk", modelCtx)
176
357
  ]);
177
358
  const ability = buildUploadsAbility(ctx, tenantId);
178
359
  if (!ability.can("read", "RBUploadSession")) {
@@ -205,6 +386,7 @@ const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
205
386
  const initRequestSchema = object({
206
387
  filename: string().min(1),
207
388
  mimeType: string().min(1),
389
+ isPublic: boolean().optional(),
208
390
  totalSize: number().int().min(1)
209
391
  });
210
392
  object({
@@ -242,12 +424,12 @@ const initUpload = async (payload, ctx) => {
242
424
  return { ok: false, error: "invalid_payload" };
243
425
  }
244
426
  const chunkSize = getChunkSizeBytes();
245
- const { filename, mimeType, totalSize } = parsed.data;
427
+ const { filename, mimeType, totalSize, isPublic } = parsed.data;
246
428
  const chunksTotal = Math.ceil(totalSize / chunkSize);
247
429
  const modelCtx = getModelCtx(ctx, tenantId);
248
430
  const [UploadSession, UploadChunk] = await Promise.all([
249
- loadModel("RBUploadSession", modelCtx),
250
- loadModel("RBUploadChunk", modelCtx)
431
+ models.get("RBUploadSession", modelCtx),
432
+ models.get("RBUploadChunk", modelCtx)
251
433
  ]);
252
434
  await ensureUploadIndexes(UploadSession, UploadChunk);
253
435
  const uploadId = new ObjectId().toString();
@@ -261,6 +443,7 @@ const initUpload = async (payload, ctx) => {
261
443
  ...ownerKeyHash ? { ownerKeyHash } : {},
262
444
  filename,
263
445
  mimeType,
446
+ ...typeof isPublic === "boolean" ? { isPublic } : {},
264
447
  totalSize,
265
448
  chunkSize,
266
449
  chunksTotal,
@@ -291,8 +474,8 @@ const uploadChunk = async (payload, ctx) => {
291
474
  }
292
475
  const modelCtx = getModelCtx(ctx, tenantId);
293
476
  const [UploadSession, UploadChunk] = await Promise.all([
294
- loadModel("RBUploadSession", modelCtx),
295
- loadModel("RBUploadChunk", modelCtx)
477
+ models.get("RBUploadSession", modelCtx),
478
+ models.get("RBUploadChunk", modelCtx)
296
479
  ]);
297
480
  const ability = buildUploadsAbility(ctx, tenantId);
298
481
  if (!ability.can("update", "RBUploadSession")) {
@@ -1,4 +1,4 @@
1
- import { loadModel } from "@rpcbase/db";
1
+ import { models } from "@rpcbase/db";
2
2
  import { buildAbilityFromSession, getAccessibleByQuery } from "@rpcbase/db/acl";
3
3
  import { createNotification, sendNotificationsDigestForUser } from "./notifications.js";
4
4
  import { o as object, b as boolean, n as number, a as array, s as string, r as record, u as unknown, _ as _enum } from "./schemas-D5T9tDtI.js";
@@ -139,10 +139,10 @@ const listNotifications = async (payload, ctx) => {
139
139
  const unreadOnly = parsed.data.unreadOnly === true;
140
140
  const limit = parsed.data.limit ?? 50;
141
141
  const markSeen = parsed.data.markSeen === true;
142
- const SettingsModel = await loadModel("RBNotificationSettings", ctx);
142
+ const SettingsModel = await models.get("RBNotificationSettings", ctx);
143
143
  const settings = await SettingsModel.findOne({ userId }).lean();
144
144
  const disabledTopics = buildDisabledTopics(settings, "inApp");
145
- const NotificationModel = await loadModel("RBNotification", ctx);
145
+ const NotificationModel = await models.get("RBNotification", ctx);
146
146
  const queryFilters = [
147
147
  { userId },
148
148
  getAccessibleByQuery(ability, "read", "RBNotification")
@@ -234,7 +234,7 @@ const markRead = async (_payload, ctx) => {
234
234
  ctx.res.status(400);
235
235
  return { ok: false, error: "missing_notification_id" };
236
236
  }
237
- const NotificationModel = await loadModel("RBNotification", ctx);
237
+ const NotificationModel = await models.get("RBNotification", ctx);
238
238
  const now = /* @__PURE__ */ new Date();
239
239
  try {
240
240
  await NotificationModel.updateOne(
@@ -257,10 +257,10 @@ const markAllRead = async (_payload, ctx) => {
257
257
  ctx.res.status(403);
258
258
  return { ok: false, error: "forbidden" };
259
259
  }
260
- const SettingsModel = await loadModel("RBNotificationSettings", ctx);
260
+ const SettingsModel = await models.get("RBNotificationSettings", ctx);
261
261
  const settings = await SettingsModel.findOne({ userId: session.userId }).lean();
262
262
  const disabledTopics = buildDisabledTopics(settings, "inApp");
263
- const NotificationModel = await loadModel("RBNotification", ctx);
263
+ const NotificationModel = await models.get("RBNotification", ctx);
264
264
  const queryFilters = [
265
265
  { userId: session.userId },
266
266
  { archivedAt: { $exists: false } },
@@ -288,7 +288,7 @@ const archiveNotification = async (_payload, ctx) => {
288
288
  ctx.res.status(400);
289
289
  return { ok: false, error: "missing_notification_id" };
290
290
  }
291
- const NotificationModel = await loadModel("RBNotification", ctx);
291
+ const NotificationModel = await models.get("RBNotification", ctx);
292
292
  try {
293
293
  await NotificationModel.updateOne(
294
294
  { $and: [{ _id: notificationId }, { archivedAt: { $exists: false } }, getAccessibleByQuery(ability, "update", "RBNotification")] },
@@ -310,7 +310,7 @@ const getSettings = async (_payload, ctx) => {
310
310
  ctx.res.status(403);
311
311
  return { ok: false, error: "forbidden" };
312
312
  }
313
- const SettingsModel = await loadModel("RBNotificationSettings", ctx);
313
+ const SettingsModel = await models.get("RBNotificationSettings", ctx);
314
314
  const settings = await SettingsModel.findOne(
315
315
  { $and: [{ userId: session.userId }, getAccessibleByQuery(ability, "read", "RBNotificationSettings")] }
316
316
  ).lean();
@@ -346,7 +346,7 @@ const updateSettings = async (payload, ctx) => {
346
346
  ctx.res.status(400);
347
347
  return { ok: false, error: "invalid_payload" };
348
348
  }
349
- const SettingsModel = await loadModel("RBNotificationSettings", ctx);
349
+ const SettingsModel = await models.get("RBNotificationSettings", ctx);
350
350
  const nextValues = {};
351
351
  if (parsed.data.digestFrequency) {
352
352
  nextValues.digestFrequency = parsed.data.digestFrequency;
@@ -1,6 +1,11 @@
1
1
  import { getTenantFilesystemDb } from "@rpcbase/db";
2
2
  import { ObjectId, GridFSBucket } from "mongodb";
3
- import { g as getTenantId, d as getBucketName } from "./shared-UGuDRAKK.js";
3
+ import { g as getTenantId, d as getBucketName, f as getUserId, m as getUploadKeyHash } from "./shared-BJomDDWK.js";
4
+ const resolveHeaderString$1 = (value) => {
5
+ if (typeof value !== "string") return null;
6
+ const normalized = value.trim();
7
+ return normalized ? normalized : null;
8
+ };
4
9
  const deleteFile = async (_payload, ctx) => {
5
10
  const tenantId = getTenantId(ctx);
6
11
  if (!tenantId) {
@@ -23,6 +28,25 @@ const deleteFile = async (_payload, ctx) => {
23
28
  }
24
29
  const bucketName = getBucketName();
25
30
  const bucket = new GridFSBucket(nativeDb, { bucketName });
31
+ const userId = getUserId(ctx);
32
+ const uploadKeyHash = getUploadKeyHash(ctx);
33
+ if (!userId && !uploadKeyHash) {
34
+ ctx.res.status(401);
35
+ return { ok: false, error: "unauthorized" };
36
+ }
37
+ const [file] = await bucket.find({ _id: fileObjectId }).limit(1).toArray();
38
+ if (!file) {
39
+ ctx.res.status(204);
40
+ return { ok: true };
41
+ }
42
+ const metadataUserId = resolveHeaderString$1(file?.metadata?.userId);
43
+ const ownerKeyHash = resolveHeaderString$1(file?.metadata?.ownerKeyHash);
44
+ const authorizedByUser = Boolean(userId && metadataUserId && userId === metadataUserId);
45
+ const authorizedByKey = Boolean(uploadKeyHash && ownerKeyHash && uploadKeyHash === ownerKeyHash);
46
+ if (!authorizedByUser && !authorizedByKey) {
47
+ ctx.res.status(401);
48
+ return { ok: false, error: "unauthorized" };
49
+ }
26
50
  try {
27
51
  await bucket.delete(fileObjectId);
28
52
  } catch (error) {
@@ -40,6 +64,15 @@ const resolveHeaderString = (value) => {
40
64
  const normalized = value.trim();
41
65
  return normalized ? normalized : null;
42
66
  };
67
+ const resolveHeaderBoolean = (value) => {
68
+ if (typeof value === "boolean") return value;
69
+ if (typeof value !== "string") return null;
70
+ const normalized = value.trim().toLowerCase();
71
+ if (!normalized) return null;
72
+ if (normalized === "true") return true;
73
+ if (normalized === "false") return false;
74
+ return null;
75
+ };
43
76
  const escapeHeaderFilename = (filename) => filename.replace(/[\\"]/g, "_");
44
77
  const getFile = async (_payload, ctx) => {
45
78
  const tenantId = getTenantId(ctx);
@@ -68,6 +101,19 @@ const getFile = async (_payload, ctx) => {
68
101
  ctx.res.status(404).end();
69
102
  return {};
70
103
  }
104
+ const isPublic = resolveHeaderBoolean(file?.metadata?.isPublic) ?? false;
105
+ if (!isPublic) {
106
+ const userId = getUserId(ctx);
107
+ const uploadKeyHash = getUploadKeyHash(ctx);
108
+ const metadataUserId = resolveHeaderString(file?.metadata?.userId);
109
+ const ownerKeyHash = resolveHeaderString(file?.metadata?.ownerKeyHash);
110
+ const authorizedByUser = Boolean(userId && metadataUserId && userId === metadataUserId);
111
+ const authorizedByKey = Boolean(uploadKeyHash && ownerKeyHash && uploadKeyHash === ownerKeyHash);
112
+ if (!authorizedByUser && !authorizedByKey) {
113
+ ctx.res.status(401).end();
114
+ return {};
115
+ }
116
+ }
71
117
  const mimeTypeFromMetadata = resolveHeaderString(file?.metadata?.mimeType);
72
118
  const mimeType = mimeTypeFromMetadata ?? "application/octet-stream";
73
119
  const filenameFromDb = resolveHeaderString(file?.filename);
@@ -95,6 +141,13 @@ const getFile = async (_payload, ctx) => {
95
141
  ctx.res.setHeader("Content-Disposition", `inline; filename="${filenameSafe}"`);
96
142
  ctx.res.setHeader("Cache-Control", cacheControl);
97
143
  ctx.res.setHeader("ETag", etag);
144
+ ctx.res.setHeader("X-Content-Type-Options", "nosniff");
145
+ if (mimeType === "image/svg+xml") {
146
+ ctx.res.setHeader(
147
+ "Content-Security-Policy",
148
+ "default-src 'none'; style-src 'unsafe-inline'; sandbox; base-uri 'none'; form-action 'none'"
149
+ );
150
+ }
98
151
  ctx.res.flushHeaders();
99
152
  if (ctx.req.method === "HEAD") {
100
153
  ctx.res.end();
@@ -1,4 +1,4 @@
1
- import { loadModel, ZRBRtsChangeOp } from "@rpcbase/db";
1
+ import { models, ZRBRtsChangeOp } from "@rpcbase/db";
2
2
  import { buildAbilityFromSession } from "@rpcbase/db/acl";
3
3
  import { o as object, a as array, s as string, n as number, b as boolean, _ as _enum } from "./schemas-D5T9tDtI.js";
4
4
  const Route = "/api/rb/rts/changes";
@@ -72,8 +72,8 @@ const changesHandler = async (payload, ctx) => {
72
72
  const ability = buildAbilityFromSession({ tenantId, session: ctx.req.session });
73
73
  const modelCtx = getModelCtx(ctx, tenantId);
74
74
  const [RtsChange, RtsCounter] = await Promise.all([
75
- loadModel("RBRtsChange", modelCtx),
76
- loadModel("RBRtsCounter", modelCtx)
75
+ models.get("RBRtsChange", modelCtx),
76
+ models.get("RBRtsCounter", modelCtx)
77
77
  ]);
78
78
  const counter = await RtsCounter.findOne({ _id: "rts" }, { seq: 1 }).lean();
79
79
  const latestSeq = Number(counter?.seq ?? 0) || 0;
package/dist/index.js CHANGED
@@ -3429,8 +3429,11 @@ const getMongoUrl = (serverEnv) => {
3429
3429
  }
3430
3430
  if (serverEnv.DB_PORT) {
3431
3431
  const host = serverEnv.DB_HOST ?? "localhost";
3432
- const dbName = serverEnv.APP_NAME ?? "rb";
3433
- return `mongodb://${host}:${serverEnv.DB_PORT}/${dbName}-sessions`;
3432
+ const appName = serverEnv.APP_NAME?.trim();
3433
+ if (!appName) {
3434
+ throw new Error("Missing APP_NAME (required to build MongoDB session store DB name)");
3435
+ }
3436
+ return `mongodb://${host}:${serverEnv.DB_PORT}/${appName}-sessions`;
3434
3437
  }
3435
3438
  return void 0;
3436
3439
  };
@@ -3781,6 +3784,22 @@ const isRedirectResponse = (value) => {
3781
3784
  if (!isResponseLike(value)) return false;
3782
3785
  return value.status >= 300 && value.status < 400 && Boolean(value.headers.get("Location"));
3783
3786
  };
3787
+ const getRouteHandleStatusCode = (route) => {
3788
+ if (!route) return void 0;
3789
+ const handle = route.handle;
3790
+ if (!handle) return void 0;
3791
+ const candidate = handle.statusCode ?? handle.status ?? handle.httpStatus;
3792
+ if (typeof candidate === "number") return candidate;
3793
+ return void 0;
3794
+ };
3795
+ const isNotFoundFallbackRoute = (route) => {
3796
+ if (!route) return false;
3797
+ if (route.path !== "*") return false;
3798
+ if (route.loader) return false;
3799
+ if (route.action) return false;
3800
+ if (route.children?.length) return false;
3801
+ return true;
3802
+ };
3784
3803
  async function applyRouteLoaders(req, dataRoutes) {
3785
3804
  const baseUrl = `${req.protocol}://${req.get("host")}`;
3786
3805
  const url = new URL(req.originalUrl, baseUrl);
@@ -3792,8 +3811,8 @@ async function applyRouteLoaders(req, dataRoutes) {
3792
3811
  loaderHeaders: {},
3793
3812
  actionHeaders: {}
3794
3813
  };
3795
- const matches = matchRoutes(dataRoutes, location) || [];
3796
- if (!matches) {
3814
+ const matches = matchRoutes(dataRoutes, location);
3815
+ if (!matches || matches.length === 0) {
3797
3816
  const error = {
3798
3817
  status: 404,
3799
3818
  message: `No route matches URL: ${req.originalUrl}`
@@ -3920,6 +3939,15 @@ async function applyRouteLoaders(req, dataRoutes) {
3920
3939
  statusCode = 500;
3921
3940
  }
3922
3941
  }
3942
+ if (!errors && statusCode === 200) {
3943
+ const leafRoute = matches.at(-1)?.route;
3944
+ const handleStatusCode = getRouteHandleStatusCode(leafRoute);
3945
+ if (typeof handleStatusCode === "number") {
3946
+ statusCode = handleStatusCode;
3947
+ } else if (isNotFoundFallbackRoute(leafRoute)) {
3948
+ statusCode = NOT_FOUND_STATUS;
3949
+ }
3950
+ }
3923
3951
  return {
3924
3952
  ...baseContext,
3925
3953
  matches,
@@ -1 +1 @@
1
- {"version":3,"file":"initServer.d.ts","sourceRoot":"","sources":["../src/initServer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAyBrC,KAAK,SAAS,GAAG;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAA;CAAE,CAAA;AAuFtD,eAAO,MAAM,UAAU,GAAU,KAAK,WAAW,EAAE,WAAW,SAAS,kBAsEtE,CAAA"}
1
+ {"version":3,"file":"initServer.d.ts","sourceRoot":"","sources":["../src/initServer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAyBrC,KAAK,SAAS,GAAG;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAA;CAAE,CAAA;AA0FtD,eAAO,MAAM,UAAU,GAAU,KAAK,WAAW,EAAE,WAAW,SAAS,kBAsEtE,CAAA"}
@@ -1,7 +1,7 @@
1
- import { loadModel, loadRbModel } from "@rpcbase/db";
1
+ import { models } from "@rpcbase/db";
2
2
  import { s as sendEmail } from "./email-DEw8keax.js";
3
3
  const routes = Object.entries({
4
- .../* @__PURE__ */ Object.assign({ "./api/notifications/handler.ts": () => import("./handler-B_mMDLBO.js") })
4
+ .../* @__PURE__ */ Object.assign({ "./api/notifications/handler.ts": () => import("./handler-BvRk-c8E.js") })
5
5
  }).reduce((acc, [path, mod]) => {
6
6
  acc[path.replace("./api/", "@rpcbase/server/notifications/api/")] = mod;
7
7
  return acc;
@@ -18,7 +18,7 @@ const createNotification = async (ctx, input) => {
18
18
  const topic = typeof input.topic === "string" ? input.topic.trim() : "";
19
19
  const body = typeof input.body === "string" ? input.body.trim() : "";
20
20
  const url = typeof input.url === "string" ? input.url.trim() : "";
21
- const NotificationModel = await loadModel("RBNotification", ctx);
21
+ const NotificationModel = await models.get("RBNotification", ctx);
22
22
  const doc = await NotificationModel.create({
23
23
  userId,
24
24
  ...topic ? { topic } : {},
@@ -57,8 +57,8 @@ const sendNotificationsDigestForUser = async (ctx, {
57
57
  userId,
58
58
  force = false
59
59
  }) => {
60
- const SettingsModel = await loadModel("RBNotificationSettings", ctx);
61
- const NotificationModel = await loadModel("RBNotification", ctx);
60
+ const SettingsModel = await models.get("RBNotificationSettings", ctx);
61
+ const NotificationModel = await models.get("RBNotification", ctx);
62
62
  const settings = await SettingsModel.findOne({ userId }).lean();
63
63
  const digestFrequencyRaw = typeof settings?.digestFrequency === "string" ? settings.digestFrequency : "weekly";
64
64
  const digestFrequency = digestFrequencyRaw === "daily" || digestFrequencyRaw === "weekly" || digestFrequencyRaw === "off" ? digestFrequencyRaw : "weekly";
@@ -92,7 +92,7 @@ const sendNotificationsDigestForUser = async (ctx, {
92
92
  );
93
93
  return { ok: true, sent: false, skippedReason: "empty" };
94
94
  }
95
- const UserModel = await loadRbModel("RBUser", ctx);
95
+ const UserModel = await models.getGlobal("RBUser", ctx);
96
96
  const user = await UserModel.findById(userId, { email: 1 }).lean();
97
97
  const email = typeof user?.email === "string" ? user.email.trim() : "";
98
98
  if (!email) {
package/dist/rts/index.js CHANGED
@@ -1,9 +1,9 @@
1
1
  import { randomUUID } from "node:crypto";
2
- import { loadRbModel, loadModel } from "@rpcbase/db";
2
+ import { models } from "@rpcbase/db";
3
3
  import { buildAbilityFromSession, getTenantRolesFromSessionUser, buildAbility, getAccessibleByQuery } from "@rpcbase/db/acl";
4
4
  import { WebSocketServer } from "ws";
5
5
  const routes = Object.entries({
6
- .../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("../handler-Dd20DHyz.js") })
6
+ .../* @__PURE__ */ Object.assign({ "./api/changes/handler.ts": () => import("../handler-lOVgWqyF.js") })
7
7
  }).reduce((acc, [path, mod]) => {
8
8
  acc[path.replace("./api/", "@rpcbase/server/rts/api/")] = mod;
9
9
  return acc;
@@ -166,7 +166,7 @@ const parseUpgradeMeta = async ({
166
166
  throw new Error("Missing rb-user-id header (reverse-proxy) and no session middleware configured");
167
167
  }
168
168
  const rbCtx = { req: { session: null } };
169
- const User = await loadRbModel("RBUser", rbCtx);
169
+ const User = await models.getGlobal("RBUser", rbCtx);
170
170
  const user = await User.findById(headerUserId, { tenants: 1, tenantRoles: 1 }).lean();
171
171
  const tenantsRaw = user?.tenants;
172
172
  const tenants = Array.isArray(tenantsRaw) ? tenantsRaw.map((t) => String(t)) : [];
@@ -187,7 +187,7 @@ const getTenantModel = async (tenantId, modelName) => {
187
187
  }
188
188
  }
189
189
  };
190
- return loadModel(modelName, ctx);
190
+ return models.get(modelName, ctx);
191
191
  };
192
192
  const normalizeLimit = (limit) => {
193
193
  if (typeof limit !== "number") return QUERY_MAX_LIMIT;
@@ -100,6 +100,7 @@ export {
100
100
  computeSha256Hex as j,
101
101
  getMaxClientUploadBytesPerSecond as k,
102
102
  getRawBodyLimitBytes as l,
103
+ getUploadKeyHash as m,
103
104
  normalizeSha256Hex as n,
104
105
  toBufferPayload as t
105
106
  };
@@ -1 +1 @@
1
- {"version":3,"file":"completeUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/completeUpload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EASjB,MAAM,WAAW,CAAA;AAiDlB,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,uBAAuB,EAAE,WAAW,CAoJ1G,CAAA"}
1
+ {"version":3,"file":"completeUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/completeUpload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AAEnC,OAAO,EACL,KAAK,WAAW,EASjB,MAAM,WAAW,CAAA;AAiDlB,eAAO,MAAM,cAAc,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,OAAO,CAAC,uBAAuB,EAAE,WAAW,CAqR1G,CAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"initUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/initUpload.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,OAAO,CAAC,kBAAkB,EAAE,OAAO,CAAC,mBAAmB,EAAE,WAAW,CA2DvG,CAAA"}
1
+ {"version":3,"file":"initUpload.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/handlers/initUpload.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAKzC,OAAO,KAAK,OAAO,MAAM,UAAU,CAAA;AACnC,OAAO,EACL,KAAK,WAAW,EAUjB,MAAM,WAAW,CAAA;AAGlB,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,OAAO,CAAC,kBAAkB,EAAE,OAAO,CAAC,mBAAmB,EAAE,WAAW,CA4DvG,CAAA"}
@@ -6,6 +6,7 @@ export declare const CompleteRoute = "/api/rb/file-uploads/:uploadId/complete";
6
6
  export declare const initRequestSchema: z.ZodObject<{
7
7
  filename: z.ZodString;
8
8
  mimeType: z.ZodString;
9
+ isPublic: z.ZodOptional<z.ZodBoolean>;
9
10
  totalSize: z.ZodNumber;
10
11
  }, z.core.$strip>;
11
12
  export type InitRequestPayload = z.infer<typeof initRequestSchema>;
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AAGvB,eAAO,MAAM,SAAS,yBAAyB,CAAA;AAC/C,eAAO,MAAM,UAAU,iDAAiD,CAAA;AACxE,eAAO,MAAM,WAAW,0CAA0C,CAAA;AAClE,eAAO,MAAM,aAAa,4CAA4C,CAAA;AAEtE,eAAO,MAAM,iBAAiB;;;;iBAI5B,CAAA;AAEF,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAA;AAElE,eAAO,MAAM,kBAAkB;;;;;;;iBAO7B,CAAA;AAEF,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,CAAA;AAEpE,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;iBAQ/B,CAAA;AAEF,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAA;AAExE,eAAO,MAAM,sBAAsB;;;;iBAIjC,CAAA;AAEF,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/uploads/api/file-uploads/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AAGvB,eAAO,MAAM,SAAS,yBAAyB,CAAA;AAC/C,eAAO,MAAM,UAAU,iDAAiD,CAAA;AACxE,eAAO,MAAM,WAAW,0CAA0C,CAAA;AAClE,eAAO,MAAM,aAAa,4CAA4C,CAAA;AAEtE,eAAO,MAAM,iBAAiB;;;;;iBAK5B,CAAA;AAEF,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,iBAAiB,CAAC,CAAA;AAElE,eAAO,MAAM,kBAAkB;;;;;;;iBAO7B,CAAA;AAEF,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,CAAA;AAEpE,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;iBAQ/B,CAAA;AAEF,MAAM,MAAM,qBAAqB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,oBAAoB,CAAC,CAAA;AAExE,eAAO,MAAM,sBAAsB;;;;iBAIjC,CAAA;AAEF,MAAM,MAAM,uBAAuB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,sBAAsB,CAAC,CAAA"}
@@ -0,0 +1,25 @@
1
+ export type UploadFileProcessorContext = {
2
+ filename: string;
3
+ clientMimeType: string;
4
+ totalSize: number;
5
+ sniff: Buffer;
6
+ };
7
+ export type UploadFileProcessorResult = {
8
+ data: Buffer;
9
+ mimeType?: string;
10
+ };
11
+ export type UploadFileProcessor = {
12
+ id: string;
13
+ maxBytes: number;
14
+ match: (ctx: UploadFileProcessorContext) => boolean;
15
+ process: (data: Buffer, ctx: UploadFileProcessorContext) => Promise<UploadFileProcessorResult> | UploadFileProcessorResult;
16
+ };
17
+ export declare const uploadProcessors: readonly UploadFileProcessor[];
18
+ export declare const getMaxUploadProcessorBytes: () => number;
19
+ export declare const selectUploadProcessors: (ctx: UploadFileProcessorContext) => UploadFileProcessor[];
20
+ export declare const applyUploadProcessors: (data: Buffer, ctx: Omit<UploadFileProcessorContext, "sniff" | "totalSize">) => Promise<{
21
+ data: Buffer;
22
+ mimeType: string;
23
+ applied: string[];
24
+ }>;
25
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/processors/index.ts"],"names":[],"mappings":"AAGA,MAAM,MAAM,0BAA0B,GAAG;IACvC,QAAQ,EAAE,MAAM,CAAA;IAChB,cAAc,EAAE,MAAM,CAAA;IACtB,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,EAAE,MAAM,CAAA;CACd,CAAA;AAED,MAAM,MAAM,yBAAyB,GAAG;IACtC,IAAI,EAAE,MAAM,CAAA;IACZ,QAAQ,CAAC,EAAE,MAAM,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG;IAChC,EAAE,EAAE,MAAM,CAAA;IACV,QAAQ,EAAE,MAAM,CAAA;IAChB,KAAK,EAAE,CAAC,GAAG,EAAE,0BAA0B,KAAK,OAAO,CAAA;IACnD,OAAO,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,0BAA0B,KAAK,OAAO,CAAC,yBAAyB,CAAC,GAAG,yBAAyB,CAAA;CAC3H,CAAA;AAED,eAAO,MAAM,gBAAgB,gCAAwE,CAAA;AAErG,eAAO,MAAM,0BAA0B,QAAO,MACqC,CAAA;AAEnF,eAAO,MAAM,sBAAsB,GAAI,KAAK,0BAA0B,KAAG,mBAAmB,EAC9B,CAAA;AAE9D,eAAO,MAAM,qBAAqB,GAChC,MAAM,MAAM,EACZ,KAAK,IAAI,CAAC,0BAA0B,EAAE,OAAO,GAAG,WAAW,CAAC,KAC3D,OAAO,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,EAAE,CAAA;CAAE,CAgC/D,CAAA"}
@@ -0,0 +1,5 @@
1
+ import { UploadFileProcessor } from './index';
2
+ export declare const looksLikeSvg: (sniff: Buffer) => boolean;
3
+ export declare const sanitizeSvg: (svg: string) => string;
4
+ export declare const sanitizeSvgProcessor: UploadFileProcessor;
5
+ //# sourceMappingURL=sanitizeSvg.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sanitizeSvg.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/file-uploads/processors/sanitizeSvg.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,SAAS,CAAA;AAgBlD,eAAO,MAAM,YAAY,GAAI,OAAO,MAAM,KAAG,OAAmD,CAAA;AAEhG,eAAO,MAAM,WAAW,GAAI,KAAK,MAAM,KAAG,MAGtC,CAAA;AAEJ,eAAO,MAAM,oBAAoB,EAAE,mBA0BlC,CAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"deleteFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/deleteFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EAA8B,KAAK,WAAW,EAAE,MAAM,2BAA2B,CAAA;AAGxF,KAAK,qBAAqB,GAAG;IAC3B,EAAE,EAAE,OAAO,CAAA;IACX,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,qBAAqB,EAAE,WAAW,CAyC5F,CAAA"}
1
+ {"version":3,"file":"deleteFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/deleteFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EAA2D,KAAK,WAAW,EAAE,MAAM,2BAA2B,CAAA;AAGrH,KAAK,qBAAqB,GAAG;IAC3B,EAAE,EAAE,OAAO,CAAA;IACX,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAQD,eAAO,MAAM,UAAU,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,qBAAqB,EAAE,WAAW,CAiE5F,CAAA"}
@@ -1,3 +1,4 @@
1
1
  import { ApiHandler } from '../../../../../../api/src';
2
- export declare const getFile: ApiHandler<Record<string, never>, Record<string, never>>;
2
+ import { SessionUser } from '../../file-uploads/shared';
3
+ export declare const getFile: ApiHandler<Record<string, never>, Record<string, never>, SessionUser>;
3
4
  //# sourceMappingURL=getFile.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"getFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/getFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAezC,eAAO,MAAM,OAAO,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,CAmF5E,CAAA"}
1
+ {"version":3,"file":"getFile.d.ts","sourceRoot":"","sources":["../../../../../src/uploads/api/files/handlers/getFile.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAIzC,OAAO,EAA2D,KAAK,WAAW,EAAE,MAAM,2BAA2B,CAAA;AAqBrH,eAAO,MAAM,OAAO,EAAE,UAAU,CAAC,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC,EAAE,WAAW,CA0GzF,CAAA"}
package/dist/uploads.js CHANGED
@@ -1,5 +1,5 @@
1
1
  const routes = Object.entries({
2
- .../* @__PURE__ */ Object.assign({ "./api/file-uploads/handler.ts": () => import("./handler-BOTZftAB.js"), "./api/files/handler.ts": () => import("./handler-Cl-0-832.js") })
2
+ .../* @__PURE__ */ Object.assign({ "./api/file-uploads/handler.ts": () => import("./handler-B45bHxic.js"), "./api/files/handler.ts": () => import("./handler-Cohj3cz3.js") })
3
3
  }).reduce((acc, [path, mod]) => {
4
4
  acc[path.replace("./api/", "@rpcbase/server/uploads/api/")] = mod;
5
5
  return acc;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rpcbase/server",
3
- "version": "0.480.0",
3
+ "version": "0.482.0",
4
4
  "type": "module",
5
5
  "files": [
6
6
  "dist"
@@ -85,13 +85,16 @@
85
85
  "dependencies": {
86
86
  "connect-mongo": "6.0.0",
87
87
  "connect-redis": "9.0.0",
88
+ "dompurify": "3.3.1",
88
89
  "express-session": "1.18.2",
89
- "mongodb": "7.0.0",
90
90
  "http-proxy-3": "1.23.2",
91
+ "jsdom": "27.4.0",
92
+ "mongodb": "7.0.0",
91
93
  "redis": "5.10.0",
92
94
  "ws": "8.18.3"
93
95
  },
94
96
  "devDependencies": {
97
+ "@types/jsdom": "27.0.0",
95
98
  "@types/ws": "8.18.1",
96
99
  "request-ip": "3.3.0",
97
100
  "resend": "6.6.0"