@bepalo/router 1.11.32 → 1.12.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/framework.d.ts +2 -4
- package/dist/cjs/framework.d.ts.map +1 -1
- package/dist/cjs/framework.js +4 -6
- package/dist/cjs/framework.js.map +1 -1
- package/dist/cjs/helpers.d.ts +2 -2
- package/dist/cjs/helpers.d.ts.map +1 -1
- package/dist/cjs/helpers.js +1 -1
- package/dist/cjs/helpers.js.map +1 -1
- package/dist/cjs/index.d.ts +5 -5
- package/dist/cjs/index.d.ts.map +1 -1
- package/dist/cjs/index.js +5 -5
- package/dist/cjs/index.js.map +1 -1
- package/dist/cjs/middlewares.d.ts +2 -2
- package/dist/cjs/middlewares.d.ts.map +1 -1
- package/dist/cjs/middlewares.js +24 -24
- package/dist/cjs/middlewares.js.map +1 -1
- package/dist/cjs/router.d.ts +2 -2
- package/dist/cjs/router.d.ts.map +1 -1
- package/dist/cjs/router.js +8 -8
- package/dist/cjs/router.js.map +1 -1
- package/dist/cjs/types.d.ts +1 -1
- package/dist/cjs/types.d.ts.map +1 -1
- package/dist/cjs/upload-stream.d.ts +1 -1
- package/dist/cjs/upload-stream.d.ts.map +1 -1
- package/dist/cjs/upload-stream.js +7 -7
- package/dist/cjs/upload-stream.js.map +1 -1
- package/dist/framework.d.ts +2 -4
- package/dist/framework.d.ts.map +1 -1
- package/dist/framework.js +4 -6
- package/dist/framework.js.map +1 -1
- package/dist/helpers.d.ts +2 -2
- package/dist/helpers.d.ts.map +1 -1
- package/dist/helpers.js +1 -1
- package/dist/helpers.js.map +1 -1
- package/dist/index.d.ts +5 -5
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -5
- package/dist/index.js.map +1 -1
- package/dist/middlewares.d.ts +2 -2
- package/dist/middlewares.d.ts.map +1 -1
- package/dist/middlewares.js +24 -24
- package/dist/middlewares.js.map +1 -1
- package/dist/router.d.ts +2 -2
- package/dist/router.d.ts.map +1 -1
- package/dist/router.js +8 -8
- package/dist/router.js.map +1 -1
- package/dist/types.d.ts +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/upload-stream.d.ts +1 -1
- package/dist/upload-stream.d.ts.map +1 -1
- package/dist/upload-stream.js +7 -7
- package/dist/upload-stream.js.map +1 -1
- package/package.json +8 -1
- package/src/framework.deno.ts +194 -0
- package/src/framework.ts +197 -0
- package/src/helpers.ts +829 -0
- package/src/index.ts +5 -0
- package/src/list.ts +462 -0
- package/src/middlewares.deno.ts +855 -0
- package/src/middlewares.ts +851 -0
- package/src/router.ts +993 -0
- package/src/tree.ts +139 -0
- package/src/types.ts +197 -0
- package/src/upload-stream.ts +661 -0
- package/dist/cjs/framework.deno.d.ts +0 -31
- package/dist/cjs/framework.deno.d.ts.map +0 -1
- package/dist/cjs/framework.deno.js +0 -245
- package/dist/cjs/framework.deno.js.map +0 -1
- package/dist/framework.deno.d.ts +0 -31
- package/dist/framework.deno.d.ts.map +0 -1
- package/dist/framework.deno.js +0 -245
- package/dist/framework.deno.js.map +0 -1
|
@@ -0,0 +1,661 @@
|
|
|
1
|
+
import { status } from "./helpers.ts";
|
|
2
|
+
import { RouterContext } from "./router.ts";
|
|
3
|
+
import { FreeHandler } from "./types.ts";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Context object containing parsed upload data from multipart/form-data requests.
|
|
7
|
+
* @typedef {Object} CTXUpload
|
|
8
|
+
* @property {Map<string, StreamingUploadedFile>} files - Map of field names to uploaded file information
|
|
9
|
+
* @property {Map<string, string>} fields - Map of field names to string values
|
|
10
|
+
* @property {string} [uploadId] - Optional identifier for the upload session
|
|
11
|
+
*/
|
|
12
|
+
export type CTXUpload = {
|
|
13
|
+
files: Map<string, StreamingUploadedFile>; // Field name -> array of files
|
|
14
|
+
fields: Map<string, string>; // Field name -> array of values
|
|
15
|
+
uploadId?: string; // Optional identifier for the upload
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Represents a file uploaded via streaming multipart/form-data.
|
|
20
|
+
* @typedef {Object} StreamingUploadedFile
|
|
21
|
+
* @property {string} name - Original filename from the upload
|
|
22
|
+
* @property {string} type - MIME type of the file
|
|
23
|
+
* @property {number} size - Size of the file in bytes
|
|
24
|
+
* @property {string} [customFilename] - Custom filename if provided by onFileStart handler
|
|
25
|
+
* @property {Record<string, any>} [metadata] - Additional metadata for the file
|
|
26
|
+
*/
|
|
27
|
+
|
|
28
|
+
export type StreamingUploadedFile = {
|
|
29
|
+
name: string; // Original filename
|
|
30
|
+
type: string; // MIME type
|
|
31
|
+
size: number; // Size in bytes
|
|
32
|
+
customFilename?: string; // Custom filename if provided by onFileStart
|
|
33
|
+
metadata?: Record<string, any>; // Additional metadata
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Configuration options for streaming multipart upload parsing.
|
|
38
|
+
* @typedef {Object} StreamingUploadOptions
|
|
39
|
+
* @property {number} [maxTotalSize=100*1024*1024] - Maximum total request size in bytes (default: 100MB)
|
|
40
|
+
* @property {number} [maxFileSize=20*1024*1024] - Maximum size per file in bytes (default: 20MB)
|
|
41
|
+
* @property {number} [maxFiles=50] - Maximum number of files allowed
|
|
42
|
+
* @property {number} [maxFields=1000] - Maximum number of text fields allowed
|
|
43
|
+
* @property {string[]} [allowedTypes] - Array of allowed MIME types (if not provided, all types are allowed)
|
|
44
|
+
* @property {Function} [uploadIdGenerator] - Function that generates a unique upload ID
|
|
45
|
+
*
|
|
46
|
+
* @property {Function} [onUploadStart] - Called when upload starts
|
|
47
|
+
* @property {Function} [onUploadComplete] - Called when upload completes (successfully or not)
|
|
48
|
+
*
|
|
49
|
+
* @property {Function} [onFileStart] - Called when a file upload starts
|
|
50
|
+
* @property {Function} [onFileChunk] - Called for each chunk of file data
|
|
51
|
+
* @property {Function} [onFileComplete] - Called when a file upload completes
|
|
52
|
+
* @property {Function} [onFileError] - Called when a file upload encounters an error
|
|
53
|
+
*
|
|
54
|
+
* @property {Function} [onField] - Called when a text field is parsed
|
|
55
|
+
* @property {Function} [onError] - Called when the overall upload encounters an error
|
|
56
|
+
*/
|
|
57
|
+
export type StreamingUploadOptions = {
|
|
58
|
+
maxTotalSize?: number; // Maximum total request size
|
|
59
|
+
maxFileSize?: number; // Maximum size per file
|
|
60
|
+
maxFiles?: number; // Maximum number of files
|
|
61
|
+
maxFields?: number; // Maximum number of text fields
|
|
62
|
+
allowedTypes?: string[]; // Allowed MIME types
|
|
63
|
+
uploadIdGenerator?: () => string | Promise<string>; // Generate upload ID
|
|
64
|
+
|
|
65
|
+
// Event handlers
|
|
66
|
+
onUploadStart?: (uploadId: string, totalSize: number) => Promise<void>;
|
|
67
|
+
onUploadComplete?: (uploadId: string, success: boolean) => Promise<void>;
|
|
68
|
+
|
|
69
|
+
// File event handlers
|
|
70
|
+
onFileStart?: (
|
|
71
|
+
uploadId: string,
|
|
72
|
+
fieldName: string,
|
|
73
|
+
fileName: string,
|
|
74
|
+
contentType: string,
|
|
75
|
+
) => Promise<{
|
|
76
|
+
customFilename?: string;
|
|
77
|
+
metadata?: Record<string, any>;
|
|
78
|
+
} | void>;
|
|
79
|
+
|
|
80
|
+
onFileChunk?: (
|
|
81
|
+
uploadId: string,
|
|
82
|
+
fieldName: string,
|
|
83
|
+
fileName: string,
|
|
84
|
+
chunk: Uint8Array,
|
|
85
|
+
offset: number,
|
|
86
|
+
isLast: boolean,
|
|
87
|
+
) => Promise<void>;
|
|
88
|
+
|
|
89
|
+
onFileComplete?: (
|
|
90
|
+
uploadId: string,
|
|
91
|
+
fieldName: string,
|
|
92
|
+
fileName: string,
|
|
93
|
+
fileSize: number,
|
|
94
|
+
customFilename?: string,
|
|
95
|
+
metadata?: Record<string, any>,
|
|
96
|
+
) => Promise<void>;
|
|
97
|
+
|
|
98
|
+
onFileError?: (
|
|
99
|
+
uploadId: string,
|
|
100
|
+
fieldName: string,
|
|
101
|
+
fileName: string,
|
|
102
|
+
error: Error,
|
|
103
|
+
) => Promise<void>;
|
|
104
|
+
|
|
105
|
+
// Field event handlers
|
|
106
|
+
onField?: (
|
|
107
|
+
uploadId: string,
|
|
108
|
+
fieldName: string,
|
|
109
|
+
value: string,
|
|
110
|
+
) => Promise<void>;
|
|
111
|
+
|
|
112
|
+
// Error handler
|
|
113
|
+
onError?: (uploadId: string, error: Error) => Promise<void>;
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* Creates a middleware function for streaming multipart/form-data upload parsing.
|
|
118
|
+
* This function processes uploads in chunks as they arrive, allowing for handling
|
|
119
|
+
* of large files without buffering the entire request in memory.
|
|
120
|
+
*
|
|
121
|
+
* @param {StreamingUploadOptions} [options] - Configuration options for upload parsing
|
|
122
|
+
* @returns {Function} A middleware function that adds uploaded files and fields to context
|
|
123
|
+
* @throws {Response} Returns a 415 response if content-type is not multipart/form-data
|
|
124
|
+
* @throws {Response} Returns a 400 response if boundary is missing in Content-Type
|
|
125
|
+
* @throws {Response} Returns a 413 response if request or file exceeds size limits
|
|
126
|
+
* @throws {Response} Returns a 415 response if file type is not allowed
|
|
127
|
+
* @throws {Response} Returns a 400 response if maximum file/field count is exceeded
|
|
128
|
+
*
|
|
129
|
+
* @example
|
|
130
|
+
* const uploadHandler = parseUploadStreaming({
|
|
131
|
+
* maxFileSize: 10 * 1024 * 1024, // 10MB
|
|
132
|
+
* allowedTypes: ['image/jpeg', 'image/png'],
|
|
133
|
+
* onFileStart: async (uploadId, fieldName, fileName, contentType) => {
|
|
134
|
+
* console.log(`Starting upload: ${fileName}`);
|
|
135
|
+
* return { customFilename: `custom_${Date.now()}_${fileName}` };
|
|
136
|
+
* },
|
|
137
|
+
* onFileChunk: async (uploadId, fieldName, fileName, chunk, offset, isLast) => {
|
|
138
|
+
* console.log(`Received chunk for ${fileName}: ${chunk.length} bytes at offset ${offset}`);
|
|
139
|
+
* }
|
|
140
|
+
* });
|
|
141
|
+
*
|
|
142
|
+
* // Use in respondWith:
|
|
143
|
+
* const handler = respondWith({}, uploadHandler, (req, ctx) => {
|
|
144
|
+
* return json({
|
|
145
|
+
* uploadId: ctx.uploadId,
|
|
146
|
+
* files: Array.from(ctx.files.entries()),
|
|
147
|
+
* fields: Array.from(ctx.fields.entries())
|
|
148
|
+
* });
|
|
149
|
+
* });
|
|
150
|
+
*/
|
|
151
|
+
export const parseUploadStreaming = <XContext = {}>(
|
|
152
|
+
options?: StreamingUploadOptions,
|
|
153
|
+
): FreeHandler<XContext & CTXUpload> => {
|
|
154
|
+
const {
|
|
155
|
+
maxTotalSize = 100 * 1024 * 1024, // 100MB default
|
|
156
|
+
maxFileSize = 20 * 1024 * 1024, // 20MB per file
|
|
157
|
+
maxFiles = 50,
|
|
158
|
+
maxFields = 1000,
|
|
159
|
+
allowedTypes,
|
|
160
|
+
uploadIdGenerator = () =>
|
|
161
|
+
`upload_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
|
|
162
|
+
onUploadStart,
|
|
163
|
+
onUploadComplete,
|
|
164
|
+
onFileStart,
|
|
165
|
+
onFileChunk,
|
|
166
|
+
onFileComplete,
|
|
167
|
+
onFileError,
|
|
168
|
+
onField,
|
|
169
|
+
onError,
|
|
170
|
+
} = options || {};
|
|
171
|
+
|
|
172
|
+
return async (req: Request, ctx: RouterContext<XContext & CTXUpload>) => {
|
|
173
|
+
const contentType = req.headers.get("content-type");
|
|
174
|
+
|
|
175
|
+
// Check if it's multipart/form-data
|
|
176
|
+
if (!contentType?.startsWith("multipart/form-data")) {
|
|
177
|
+
return status(415, "Expected multipart/form-data");
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// Get boundary from content-type
|
|
181
|
+
const boundaryMatch = contentType.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
|
182
|
+
if (!boundaryMatch) {
|
|
183
|
+
return status(400, "Missing boundary in Content-Type");
|
|
184
|
+
}
|
|
185
|
+
const boundary = boundaryMatch[1] || boundaryMatch[2];
|
|
186
|
+
|
|
187
|
+
// Generate upload ID
|
|
188
|
+
const uploadId = await uploadIdGenerator();
|
|
189
|
+
ctx.uploadId = uploadId;
|
|
190
|
+
ctx.files = new Map();
|
|
191
|
+
ctx.fields = new Map();
|
|
192
|
+
|
|
193
|
+
try {
|
|
194
|
+
const reader = req.body?.getReader();
|
|
195
|
+
if (!reader) {
|
|
196
|
+
throw new Error("No request body");
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// Initialize tracking
|
|
200
|
+
let buffer = new Uint8Array();
|
|
201
|
+
let totalRead = 0;
|
|
202
|
+
let fileCount = 0;
|
|
203
|
+
let fieldCount = 0;
|
|
204
|
+
let currentPart: {
|
|
205
|
+
headers: Map<string, string>;
|
|
206
|
+
fieldName?: string;
|
|
207
|
+
fileName?: string;
|
|
208
|
+
contentType?: string;
|
|
209
|
+
customFilename?: string;
|
|
210
|
+
metadata?: Record<string, any>;
|
|
211
|
+
bytesRead: number;
|
|
212
|
+
isProcessing: boolean;
|
|
213
|
+
} | null = null;
|
|
214
|
+
|
|
215
|
+
const boundaryBytes = new TextEncoder().encode(`--${boundary}`);
|
|
216
|
+
const boundaryEndBytes = new TextEncoder().encode(`--${boundary}--`);
|
|
217
|
+
const headerEndBytes = new TextEncoder().encode("\r\n\r\n");
|
|
218
|
+
|
|
219
|
+
// Notify upload start
|
|
220
|
+
if (onUploadStart) {
|
|
221
|
+
const contentLength = req.headers.get("content-length");
|
|
222
|
+
await onUploadStart(
|
|
223
|
+
uploadId,
|
|
224
|
+
contentLength ? parseInt(contentLength) : 0,
|
|
225
|
+
);
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Process streaming data
|
|
229
|
+
while (true) {
|
|
230
|
+
const { done, value } = await reader.read();
|
|
231
|
+
if (done) break;
|
|
232
|
+
|
|
233
|
+
totalRead += value.length;
|
|
234
|
+
if (totalRead > maxTotalSize) {
|
|
235
|
+
await reader.cancel();
|
|
236
|
+
throw new Error(
|
|
237
|
+
`Request exceeds maximum size of ${maxTotalSize} bytes`,
|
|
238
|
+
);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
// Add new data to buffer
|
|
242
|
+
const newBuffer = new Uint8Array(buffer.length + value.length);
|
|
243
|
+
newBuffer.set(buffer);
|
|
244
|
+
newBuffer.set(value, buffer.length);
|
|
245
|
+
buffer = newBuffer;
|
|
246
|
+
|
|
247
|
+
// Process while we have data
|
|
248
|
+
let processed = false;
|
|
249
|
+
do {
|
|
250
|
+
processed = false;
|
|
251
|
+
|
|
252
|
+
if (!currentPart) {
|
|
253
|
+
// Look for boundary to start new part
|
|
254
|
+
const boundaryIndex = findSequence(buffer, boundaryBytes);
|
|
255
|
+
if (boundaryIndex !== -1) {
|
|
256
|
+
// Check if this is the closing boundary
|
|
257
|
+
const isClosingBoundary =
|
|
258
|
+
buffer.length >= boundaryIndex + boundaryBytes.length + 2 &&
|
|
259
|
+
buffer[boundaryIndex + boundaryBytes.length] === 45 && // '-'
|
|
260
|
+
buffer[boundaryIndex + boundaryBytes.length + 1] === 45; // '-'
|
|
261
|
+
|
|
262
|
+
if (isClosingBoundary) {
|
|
263
|
+
// End of multipart data
|
|
264
|
+
buffer = buffer.slice(boundaryIndex + boundaryEndBytes.length);
|
|
265
|
+
break;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Remove boundary and look for headers
|
|
269
|
+
buffer = buffer.slice(boundaryIndex + boundaryBytes.length);
|
|
270
|
+
|
|
271
|
+
// Look for header end
|
|
272
|
+
const headerEndIndex = findSequence(buffer, headerEndBytes);
|
|
273
|
+
if (headerEndIndex !== -1) {
|
|
274
|
+
const headerBytes = buffer.slice(0, headerEndIndex);
|
|
275
|
+
const headers = parseHeaders(
|
|
276
|
+
new TextDecoder().decode(headerBytes),
|
|
277
|
+
);
|
|
278
|
+
|
|
279
|
+
// Parse content-disposition
|
|
280
|
+
const contentDisposition = headers.get("content-disposition");
|
|
281
|
+
if (contentDisposition) {
|
|
282
|
+
const dispositionParams =
|
|
283
|
+
parseDisposition(contentDisposition);
|
|
284
|
+
const fieldName = dispositionParams.get("name");
|
|
285
|
+
const fileName = dispositionParams.get("filename");
|
|
286
|
+
const contentType =
|
|
287
|
+
headers.get("content-type") || "application/octet-stream";
|
|
288
|
+
if (fieldName) {
|
|
289
|
+
currentPart = {
|
|
290
|
+
headers,
|
|
291
|
+
fieldName,
|
|
292
|
+
fileName,
|
|
293
|
+
contentType,
|
|
294
|
+
bytesRead: 0,
|
|
295
|
+
isProcessing: false,
|
|
296
|
+
};
|
|
297
|
+
// Remove headers from buffer
|
|
298
|
+
buffer = buffer.slice(
|
|
299
|
+
headerEndIndex + headerEndBytes.length,
|
|
300
|
+
);
|
|
301
|
+
// Start processing this part
|
|
302
|
+
processed = true;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
if (currentPart && !currentPart.isProcessing) {
|
|
310
|
+
// Initialize part processing
|
|
311
|
+
currentPart.isProcessing = true;
|
|
312
|
+
|
|
313
|
+
if (currentPart.fileName) {
|
|
314
|
+
// This is a file
|
|
315
|
+
fileCount++;
|
|
316
|
+
if (fileCount > maxFiles) {
|
|
317
|
+
throw new Error(
|
|
318
|
+
`Maximum number of files (${maxFiles}) exceeded`,
|
|
319
|
+
);
|
|
320
|
+
}
|
|
321
|
+
if (
|
|
322
|
+
allowedTypes &&
|
|
323
|
+
!allowedTypes.includes(currentPart.contentType!)
|
|
324
|
+
) {
|
|
325
|
+
throw new Error(
|
|
326
|
+
`File type ${currentPart.contentType} not allowed`,
|
|
327
|
+
);
|
|
328
|
+
}
|
|
329
|
+
// Call onFileStart
|
|
330
|
+
if (onFileStart) {
|
|
331
|
+
try {
|
|
332
|
+
const result = await onFileStart(
|
|
333
|
+
uploadId,
|
|
334
|
+
currentPart.fieldName!,
|
|
335
|
+
currentPart.fileName,
|
|
336
|
+
currentPart.contentType!,
|
|
337
|
+
);
|
|
338
|
+
if (result) {
|
|
339
|
+
currentPart.customFilename = result.customFilename;
|
|
340
|
+
currentPart.metadata = result.metadata;
|
|
341
|
+
}
|
|
342
|
+
} catch (error) {
|
|
343
|
+
if (onFileError) {
|
|
344
|
+
await onFileError(
|
|
345
|
+
uploadId,
|
|
346
|
+
currentPart.fieldName!,
|
|
347
|
+
currentPart.fileName,
|
|
348
|
+
error as Error,
|
|
349
|
+
);
|
|
350
|
+
}
|
|
351
|
+
currentPart = null;
|
|
352
|
+
continue;
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// Add to ctx.files
|
|
357
|
+
ctx.files.set(currentPart.fieldName!, {
|
|
358
|
+
name: currentPart.fileName,
|
|
359
|
+
type: currentPart.contentType!,
|
|
360
|
+
size: 0, // Will be updated as we read
|
|
361
|
+
customFilename: currentPart.customFilename,
|
|
362
|
+
metadata: currentPart.metadata,
|
|
363
|
+
});
|
|
364
|
+
} else {
|
|
365
|
+
// This is a text field
|
|
366
|
+
fieldCount++;
|
|
367
|
+
if (fieldCount > maxFields) {
|
|
368
|
+
throw new Error(
|
|
369
|
+
`Maximum number of fields (${maxFields}) exceeded`,
|
|
370
|
+
);
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
if (currentPart && currentPart.isProcessing) {
|
|
376
|
+
// Process part data until we hit a boundary
|
|
377
|
+
const boundaryIndex = findSequence(buffer, boundaryBytes);
|
|
378
|
+
|
|
379
|
+
if (boundaryIndex !== -1) {
|
|
380
|
+
// We found the next boundary
|
|
381
|
+
// Check for CRLF before boundary (it should be there)
|
|
382
|
+
let dataEnd = boundaryIndex;
|
|
383
|
+
if (boundaryIndex >= 2) {
|
|
384
|
+
// Check if there's a CRLF before the boundary
|
|
385
|
+
if (
|
|
386
|
+
buffer[boundaryIndex - 2] === 13 &&
|
|
387
|
+
buffer[boundaryIndex - 1] === 10
|
|
388
|
+
) {
|
|
389
|
+
dataEnd = boundaryIndex - 2; // Exclude the CRLF
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
const partData = buffer.slice(0, dataEnd);
|
|
394
|
+
|
|
395
|
+
if (currentPart.fileName) {
|
|
396
|
+
// Process file chunk
|
|
397
|
+
if (partData.length > 0) {
|
|
398
|
+
currentPart.bytesRead += partData.length;
|
|
399
|
+
|
|
400
|
+
// Check file size limit
|
|
401
|
+
if (currentPart.bytesRead > maxFileSize) {
|
|
402
|
+
throw new Error(
|
|
403
|
+
`File ${currentPart.fileName} exceeds maximum size of ${maxFileSize} bytes`,
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// Update file size in context
|
|
408
|
+
const file = ctx.files.get(currentPart.fieldName!)!;
|
|
409
|
+
file.size = currentPart.bytesRead;
|
|
410
|
+
|
|
411
|
+
// Call onFileChunk
|
|
412
|
+
if (onFileChunk) {
|
|
413
|
+
await onFileChunk(
|
|
414
|
+
uploadId,
|
|
415
|
+
currentPart.fieldName!,
|
|
416
|
+
currentPart.fileName,
|
|
417
|
+
partData,
|
|
418
|
+
currentPart.bytesRead - partData.length,
|
|
419
|
+
true, // This is the last chunk for this file
|
|
420
|
+
);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
// Call onFileComplete
|
|
424
|
+
if (onFileComplete) {
|
|
425
|
+
await onFileComplete(
|
|
426
|
+
uploadId,
|
|
427
|
+
currentPart.fieldName!,
|
|
428
|
+
currentPart.fileName,
|
|
429
|
+
currentPart.bytesRead,
|
|
430
|
+
currentPart.customFilename,
|
|
431
|
+
currentPart.metadata,
|
|
432
|
+
);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
} else {
|
|
436
|
+
// Process text field
|
|
437
|
+
if (partData.length > 0) {
|
|
438
|
+
const value = new TextDecoder().decode(partData);
|
|
439
|
+
|
|
440
|
+
// Add to context
|
|
441
|
+
ctx.fields.set(currentPart.fieldName!, value);
|
|
442
|
+
|
|
443
|
+
// Call onField
|
|
444
|
+
if (onField) {
|
|
445
|
+
await onField(uploadId, currentPart.fieldName!, value);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
|
|
450
|
+
// Move buffer past processed data and boundary
|
|
451
|
+
buffer = buffer.slice(boundaryIndex);
|
|
452
|
+
currentPart = null;
|
|
453
|
+
processed = true;
|
|
454
|
+
} else {
|
|
455
|
+
// No boundary found yet
|
|
456
|
+
// Process what we have in buffer (but leave enough bytes for a potential boundary)
|
|
457
|
+
const minBoundaryLength = boundaryBytes.length;
|
|
458
|
+
|
|
459
|
+
// Only process if we have significantly more data than boundary length
|
|
460
|
+
if (buffer.length > minBoundaryLength * 2) {
|
|
461
|
+
// Process chunks, but keep enough in buffer to detect boundary
|
|
462
|
+
const chunkSize = buffer.length - minBoundaryLength;
|
|
463
|
+
|
|
464
|
+
if (currentPart.fileName && chunkSize > 0) {
|
|
465
|
+
const partData = buffer.slice(0, chunkSize);
|
|
466
|
+
currentPart.bytesRead += partData.length;
|
|
467
|
+
|
|
468
|
+
// Check file size limit
|
|
469
|
+
if (currentPart.bytesRead > maxFileSize) {
|
|
470
|
+
throw new Error(
|
|
471
|
+
`File ${currentPart.fileName} exceeds maximum size of ${maxFileSize} bytes`,
|
|
472
|
+
);
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// Update file size in context
|
|
476
|
+
const file = ctx.files.get(currentPart.fieldName!)!;
|
|
477
|
+
file.size = currentPart.bytesRead;
|
|
478
|
+
|
|
479
|
+
// Call onFileChunk
|
|
480
|
+
if (onFileChunk) {
|
|
481
|
+
await onFileChunk(
|
|
482
|
+
uploadId,
|
|
483
|
+
currentPart.fieldName!,
|
|
484
|
+
currentPart.fileName,
|
|
485
|
+
partData,
|
|
486
|
+
currentPart.bytesRead - partData.length,
|
|
487
|
+
false, // Not the last chunk
|
|
488
|
+
);
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
// Move buffer past processed data
|
|
492
|
+
buffer = buffer.slice(chunkSize);
|
|
493
|
+
processed = true;
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
} while (processed);
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
// Process any remaining data in buffer (for the last part)
|
|
502
|
+
if (currentPart && buffer.length > 0) {
|
|
503
|
+
if (currentPart.fileName) {
|
|
504
|
+
// Process remaining file data
|
|
505
|
+
currentPart.bytesRead += buffer.length;
|
|
506
|
+
|
|
507
|
+
// Check file size limit
|
|
508
|
+
if (currentPart.bytesRead > maxFileSize) {
|
|
509
|
+
throw new Error(
|
|
510
|
+
`File ${currentPart.fileName} exceeds maximum size of ${maxFileSize} bytes`,
|
|
511
|
+
);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
// Update file size in context
|
|
515
|
+
const file = ctx.files.get(currentPart.fieldName!)!;
|
|
516
|
+
file.size = currentPart.bytesRead;
|
|
517
|
+
|
|
518
|
+
// Call onFileChunk for last chunk
|
|
519
|
+
if (onFileChunk) {
|
|
520
|
+
await onFileChunk(
|
|
521
|
+
uploadId,
|
|
522
|
+
currentPart.fieldName!,
|
|
523
|
+
currentPart.fileName,
|
|
524
|
+
buffer,
|
|
525
|
+
currentPart.bytesRead - buffer.length,
|
|
526
|
+
true,
|
|
527
|
+
);
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
// Call onFileComplete
|
|
531
|
+
if (onFileComplete) {
|
|
532
|
+
await onFileComplete(
|
|
533
|
+
uploadId,
|
|
534
|
+
currentPart.fieldName!,
|
|
535
|
+
currentPart.fileName,
|
|
536
|
+
currentPart.bytesRead,
|
|
537
|
+
currentPart.customFilename,
|
|
538
|
+
currentPart.metadata,
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
} else {
|
|
542
|
+
// Process remaining text field data
|
|
543
|
+
const value = new TextDecoder().decode(buffer);
|
|
544
|
+
|
|
545
|
+
// Add to context
|
|
546
|
+
ctx.fields.set(currentPart.fieldName!, value);
|
|
547
|
+
|
|
548
|
+
// Call onField
|
|
549
|
+
if (onField) {
|
|
550
|
+
await onField(uploadId, currentPart.fieldName!, value);
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
// Notify upload completion
|
|
556
|
+
if (onUploadComplete) {
|
|
557
|
+
await onUploadComplete(uploadId, true);
|
|
558
|
+
}
|
|
559
|
+
} catch (error: any) {
|
|
560
|
+
// Notify upload completion with failure
|
|
561
|
+
if (onUploadComplete) {
|
|
562
|
+
await onUploadComplete(uploadId, false).catch(() => {});
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
// Call error handler
|
|
566
|
+
if (onError) {
|
|
567
|
+
await onError(uploadId, error).catch(() => {});
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
console.error("Upload parsing error:", error);
|
|
571
|
+
|
|
572
|
+
if (error.message.includes("exceeds maximum")) {
|
|
573
|
+
return status(413, error.message);
|
|
574
|
+
}
|
|
575
|
+
if (error.message.includes("not allowed")) {
|
|
576
|
+
return status(415, error.message);
|
|
577
|
+
}
|
|
578
|
+
if (error.message.includes("exceeded")) {
|
|
579
|
+
return status(400, error.message);
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
return status(400, "Failed to parse upload");
|
|
583
|
+
}
|
|
584
|
+
};
|
|
585
|
+
};
|
|
586
|
+
|
|
587
|
+
/**
|
|
588
|
+
* Helper function to find a byte sequence within a buffer.
|
|
589
|
+
* Performs a linear search for the sequence in the buffer.
|
|
590
|
+
*
|
|
591
|
+
* @private
|
|
592
|
+
* @param {Uint8Array} buffer - The buffer to search within
|
|
593
|
+
* @param {Uint8Array} sequence - The byte sequence to find
|
|
594
|
+
* @returns {number} The index of the sequence, or -1 if not found
|
|
595
|
+
*/
|
|
596
|
+
function findSequence(buffer: Uint8Array, sequence: Uint8Array): number {
|
|
597
|
+
if (buffer.length < sequence.length) return -1;
|
|
598
|
+
|
|
599
|
+
// Use a more efficient search algorithm for large buffers
|
|
600
|
+
for (let i = 0; i <= buffer.length - sequence.length; i++) {
|
|
601
|
+
let found = true;
|
|
602
|
+
for (let j = 0; j < sequence.length; j++) {
|
|
603
|
+
if (buffer[i + j] !== sequence[j]) {
|
|
604
|
+
found = false;
|
|
605
|
+
break;
|
|
606
|
+
}
|
|
607
|
+
}
|
|
608
|
+
if (found) return i;
|
|
609
|
+
}
|
|
610
|
+
return -1;
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
/**
|
|
614
|
+
* Helper function to parse HTTP headers from text.
|
|
615
|
+
*
|
|
616
|
+
* @private
|
|
617
|
+
* @param {string} headerText - Raw header text with CRLF separators
|
|
618
|
+
* @returns {Map<string, string>} Map of lowercase header names to values
|
|
619
|
+
*/
|
|
620
|
+
function parseHeaders(headerText: string): Map<string, string> {
|
|
621
|
+
const headers = new Map<string, string>();
|
|
622
|
+
const lines = headerText.split("\r\n");
|
|
623
|
+
for (const line of lines) {
|
|
624
|
+
const colonIndex = line.indexOf(":");
|
|
625
|
+
if (colonIndex !== -1) {
|
|
626
|
+
const key = line.slice(0, colonIndex).trim().toLowerCase();
|
|
627
|
+
const value = line.slice(colonIndex + 1).trim();
|
|
628
|
+
headers.set(key, value);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
return headers;
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
/**
|
|
635
|
+
* Helper function to parse content-disposition header parameters.
|
|
636
|
+
*
|
|
637
|
+
* @private
|
|
638
|
+
* @param {string} disposition - Content-disposition header value
|
|
639
|
+
* @returns {Map<string, string>} Map of parameter names to values
|
|
640
|
+
*/
|
|
641
|
+
function parseDisposition(disposition: string): Map<string, string> {
|
|
642
|
+
const params = new Map<string, string>();
|
|
643
|
+
const parts = disposition.split(";").map((p) => p.trim());
|
|
644
|
+
|
|
645
|
+
for (let i = 1; i < parts.length; i++) {
|
|
646
|
+
const part = parts[i];
|
|
647
|
+
const eqIndex = part.indexOf("=");
|
|
648
|
+
if (eqIndex !== -1) {
|
|
649
|
+
let key = part.slice(0, eqIndex).trim();
|
|
650
|
+
let value = part.slice(eqIndex + 1).trim();
|
|
651
|
+
|
|
652
|
+
if (value.startsWith('"') && value.endsWith('"')) {
|
|
653
|
+
value = value.slice(1, -1);
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
params.set(key, value);
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
return params;
|
|
661
|
+
}
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import Router, { RouterConfig, RouterContext } from "./router";
|
|
2
|
-
import type { Handler, HttpMethod, Pipeline, CTXError } from "./types";
|
|
3
|
-
export type UCHandlerType = "FILTER" | "HOOK" | "HANDLER" | "FALLBACK" | "CATCHER" | "AFTER";
|
|
4
|
-
export type RouterHandlers<CommonXContext = {}, MethodContexts extends Partial<Record<HttpMethod | "ALL" | "CRUD", Record<string, any>>> = {}> = {
|
|
5
|
-
[K in HttpMethod | "ALL" | "CRUD" as K]?: {
|
|
6
|
-
[H in UCHandlerType as H]?: H extends "CATCHER" ? Handler<CommonXContext & CTXError & MethodContexts[K]> | Pipeline<CommonXContext & CTXError & MethodContexts[K]> : Handler<CommonXContext & MethodContexts[K]> | Pipeline<CommonXContext & MethodContexts[K]>;
|
|
7
|
-
};
|
|
8
|
-
};
|
|
9
|
-
export interface RouterFrameworkConfig<Context extends RouterContext = RouterContext> extends RouterConfig<Context> {
|
|
10
|
-
rootPath?: string;
|
|
11
|
-
filterNode?: (node: DirWalkNode) => boolean;
|
|
12
|
-
processNode?: (node: DirWalkNode) => void;
|
|
13
|
-
onDir?: (node: DirWalkNode) => void;
|
|
14
|
-
}
|
|
15
|
-
export declare class RouterFramework<EXTContext = {}, Context extends RouterContext<EXTContext> = RouterContext<EXTContext>> extends Router<Context> {
|
|
16
|
-
#private;
|
|
17
|
-
get loading(): boolean;
|
|
18
|
-
get loaded(): boolean;
|
|
19
|
-
constructor(config?: RouterFrameworkConfig<Context>);
|
|
20
|
-
load(): Promise<RouterFramework<Context>>;
|
|
21
|
-
}
|
|
22
|
-
export interface DirWalkNode {
|
|
23
|
-
type: string;
|
|
24
|
-
name: string;
|
|
25
|
-
path: string;
|
|
26
|
-
parent: string;
|
|
27
|
-
fullPath: string;
|
|
28
|
-
relativePath: string;
|
|
29
|
-
}
|
|
30
|
-
export declare function walk(dir: string, rootPath?: string): AsyncGenerator<DirWalkNode>;
|
|
31
|
-
//# sourceMappingURL=framework.deno.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"framework.deno.d.ts","sourceRoot":"","sources":["../../src/framework.deno.ts"],"names":[],"mappings":"AAAA,OAAO,MAAM,EAAE,EAEb,YAAY,EACZ,aAAa,EACd,MAAM,UAAU,CAAC;AAClB,OAAO,KAAK,EACV,OAAO,EAEP,UAAU,EAEV,QAAQ,EACR,QAAQ,EACT,MAAM,SAAS,CAAC;AAUjB,MAAM,MAAM,aAAa,GACrB,QAAQ,GACR,MAAM,GACN,SAAS,GACT,UAAU,GACV,SAAS,GACT,OAAO,CAAC;AAEZ,MAAM,MAAM,cAAc,CACxB,cAAc,GAAG,EAAE,EACnB,cAAc,SAAS,OAAO,CAC5B,MAAM,CAAC,UAAU,GAAG,KAAK,GAAG,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CACzD,GAAG,EAAE,IACJ;KACD,CAAC,IAAI,UAAU,GAAG,KAAK,GAAG,MAAM,IAAI,CAAC,CAAC,CAAC,EAAE;SACvC,CAAC,IAAI,aAAa,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,SAAS,SAAS,GAEvC,OAAO,CAAC,cAAc,GAAG,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC,GACtD,QAAQ,CAAC,cAAc,GAAG,QAAQ,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC,GAEvD,OAAO,CAAC,cAAc,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC,GAC3C,QAAQ,CAAC,cAAc,GAAG,cAAc,CAAC,CAAC,CAAC,CAAC;KACrD;CACF,CAAC;AAEF,MAAM,WAAW,qBAAqB,CACpC,OAAO,SAAS,aAAa,GAAG,aAAa,CAC7C,SAAQ,YAAY,CAAC,OAAO,CAAC;IAC7B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,CAAC,IAAI,EAAE,WAAW,KAAK,OAAO,CAAC;IAC5C,WAAW,CAAC,EAAE,CAAC,IAAI,EAAE,WAAW,KAAK,IAAI,CAAC;IAC1C,KAAK,CAAC,EAAE,CAAC,IAAI,EAAE,WAAW,KAAK,IAAI,CAAC;CACrC;AAED,qBAAa,eAAe,CAC1B,UAAU,GAAG,EAAE,EACf,OAAO,SAAS,aAAa,CAAC,UAAU,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,CACrE,SAAQ,MAAM,CAAC,OAAO,CAAC;;IAQvB,IAAI,OAAO,IAAI,OAAO,CAErB;IAED,IAAI,MAAM,IAAI,OAAO,CAEpB;gBAEW,MAAM,CAAC,EAAE,qBAAqB,CAAC,OAAO,CAAC;IAU7C,IAAI,IAAI,OAAO,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;CA8EhD;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,YAAY,EAAE,MAAM,CAAC;CACtB;AAED,wBAAuB,IAAI,CACzB,GAAG,EAAE,MAAM,EACX,QAAQ,CAAC,EAAE,MAAM,GAChB,cAAc,CAAC,WAAW,CAAC,CAgB7B"}
|