@syncular/server-hono 0.0.6-158 → 0.0.6-165

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/blobs.d.ts +10 -4
  2. package/dist/blobs.d.ts.map +1 -1
  3. package/dist/blobs.js +260 -26
  4. package/dist/blobs.js.map +1 -1
  5. package/dist/console/gateway.d.ts +4 -0
  6. package/dist/console/gateway.d.ts.map +1 -1
  7. package/dist/console/gateway.js +97 -60
  8. package/dist/console/gateway.js.map +1 -1
  9. package/dist/console/route-descriptor.d.ts +6 -0
  10. package/dist/console/route-descriptor.d.ts.map +1 -0
  11. package/dist/console/route-descriptor.js +16 -0
  12. package/dist/console/route-descriptor.js.map +1 -0
  13. package/dist/console/routes.d.ts.map +1 -1
  14. package/dist/console/routes.js +153 -108
  15. package/dist/console/routes.js.map +1 -1
  16. package/dist/console/schema-errors.d.ts +2 -0
  17. package/dist/console/schema-errors.d.ts.map +1 -0
  18. package/dist/console/schema-errors.js +17 -0
  19. package/dist/console/schema-errors.js.map +1 -0
  20. package/dist/console/schemas.js +1 -1
  21. package/dist/console/schemas.js.map +1 -1
  22. package/dist/console/types.d.ts +32 -0
  23. package/dist/console/types.d.ts.map +1 -1
  24. package/dist/create-server.d.ts.map +1 -1
  25. package/dist/create-server.js +13 -10
  26. package/dist/create-server.js.map +1 -1
  27. package/dist/proxy/routes.d.ts +10 -0
  28. package/dist/proxy/routes.d.ts.map +1 -1
  29. package/dist/proxy/routes.js +57 -6
  30. package/dist/proxy/routes.js.map +1 -1
  31. package/dist/routes.d.ts +21 -0
  32. package/dist/routes.d.ts.map +1 -1
  33. package/dist/routes.js +338 -352
  34. package/dist/routes.js.map +1 -1
  35. package/package.json +7 -6
  36. package/src/__tests__/blob-routes.test.ts +286 -18
  37. package/src/__tests__/console-gateway-live-routes.test.ts +61 -1
  38. package/src/__tests__/console-routes.test.ts +30 -1
  39. package/src/__tests__/create-server.test.ts +237 -1
  40. package/src/__tests__/pull-chunk-storage.test.ts +98 -0
  41. package/src/blobs.ts +360 -34
  42. package/src/console/gateway.ts +335 -288
  43. package/src/console/route-descriptor.ts +22 -0
  44. package/src/console/routes.ts +327 -248
  45. package/src/console/schema-errors.ts +23 -0
  46. package/src/console/schemas.ts +1 -1
  47. package/src/console/types.ts +32 -0
  48. package/src/create-server.ts +13 -10
  49. package/src/proxy/routes.ts +73 -9
  50. package/src/routes.ts +449 -396
package/src/blobs.ts CHANGED
@@ -9,6 +9,8 @@
9
9
  * - GET /blobs/:hash/download - Direct download (for database adapter)
10
10
  */
11
11
 
12
+ import { sha256 } from '@noble/hashes/sha2.js';
13
+ import { bytesToHex } from '@noble/hashes/utils.js';
12
14
  import {
13
15
  BlobUploadCompleteResponseSchema,
14
16
  BlobUploadInitRequestSchema,
@@ -35,6 +37,7 @@ import { z } from 'zod';
35
37
 
36
38
  interface BlobAuthResult {
37
39
  actorId: string;
40
+ partitionId?: string;
38
41
  }
39
42
 
40
43
  export interface CreateBlobRoutesOptions<DB extends SyncBlobsDb = SyncBlobsDb> {
@@ -53,11 +56,14 @@ export interface CreateBlobRoutesOptions<DB extends SyncBlobsDb = SyncBlobsDb> {
53
56
  */
54
57
  db?: Kysely<DB>;
55
58
  /**
56
- * Optional: Check if actor can access a blob.
57
- * By default, any authenticated actor can access any completed blob.
58
- * Provide this to implement scope-based access control.
59
+ * Check whether an authenticated actor can access a blob hash.
60
+ * This must enforce your tenant or ownership model.
59
61
  */
60
- canAccessBlob?: (args: { actorId: string; hash: string }) => Promise<boolean>;
62
+ canAccessBlob: (args: {
63
+ actorId: string;
64
+ hash: string;
65
+ partitionId: string;
66
+ }) => Promise<boolean>;
61
67
  /**
62
68
  * Maximum upload size in bytes.
63
69
  * Default: 100MB (104857600)
@@ -86,6 +92,11 @@ const tokenQuerySchema = z.object({
86
92
  * const user = await verifyToken(token);
87
93
  * return user ? { actorId: user.id } : null;
88
94
  * },
95
+ * canAccessBlob: async ({ actorId, hash, partitionId }) => {
96
+ * // Enforce tenant/ownership permissions here.
97
+ * // partitionId defaults to "default" when not provided by auth.
98
+ * return true;
99
+ * },
89
100
  * });
90
101
  *
91
102
  * app.route('/api/sync', blobRoutes);
@@ -158,11 +169,13 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
158
169
  }
159
170
 
160
171
  try {
172
+ const partitionId = auth.partitionId ?? 'default';
161
173
  const result = await blobManager.initiateUpload({
162
174
  hash: body.hash,
163
175
  size: body.size,
164
176
  mimeType: body.mimeType,
165
177
  actorId: auth.actorId,
178
+ partitionId,
166
179
  });
167
180
 
168
181
  return c.json(result, 200);
@@ -227,9 +240,16 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
227
240
  );
228
241
  }
229
242
 
230
- const result = await blobManager.completeUpload(hash);
243
+ const partitionId = auth.partitionId ?? 'default';
244
+ const result = await blobManager.completeUpload(hash, {
245
+ actorId: auth.actorId,
246
+ partitionId,
247
+ });
231
248
 
232
249
  if (!result.ok) {
250
+ if (result.error === 'FORBIDDEN') {
251
+ return c.json({ error: 'FORBIDDEN' }, 403);
252
+ }
233
253
  return c.json({ error: 'UPLOAD_FAILED', message: result.error }, 400);
234
254
  }
235
255
 
@@ -300,18 +320,21 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
300
320
  return c.json({ error: 'NOT_FOUND' }, 404);
301
321
  }
302
322
 
303
- // Check access if canAccessBlob is provided
304
- if (canAccessBlob) {
305
- const canAccess = await canAccessBlob({ actorId: auth.actorId, hash });
306
- if (!canAccess) {
307
- return c.json({ error: 'FORBIDDEN' }, 403);
308
- }
323
+ const partitionId = auth.partitionId ?? 'default';
324
+ const canAccess = await canAccessBlob({
325
+ actorId: auth.actorId,
326
+ hash,
327
+ partitionId,
328
+ });
329
+ if (!canAccess) {
330
+ return c.json({ error: 'FORBIDDEN' }, 403);
309
331
  }
310
332
 
311
333
  try {
312
334
  const result = await blobManager.getDownloadUrl({
313
335
  hash,
314
336
  actorId: auth.actorId,
337
+ partitionId,
315
338
  });
316
339
  return c.json(result, 200);
317
340
  } catch (err) {
@@ -365,29 +388,134 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
365
388
  return c.json({ error: 'INVALID_TOKEN' }, 401);
366
389
  }
367
390
 
368
- // Get upload metadata
369
- const metadata = await blobManager.getMetadata(hash);
370
-
371
- // Read body
372
- const body = await c.req.arrayBuffer();
373
- const bodyBytes = new Uint8Array(body);
374
-
375
- // Verify size
376
- const expectedSize = metadata?.size;
377
- if (expectedSize !== undefined && bodyBytes.length !== expectedSize) {
391
+ const uploadRecord = await blobManager.getUploadRecord(hash, {
392
+ partitionId: payload.partitionId,
393
+ });
394
+ if (!uploadRecord) {
395
+ return c.json({ error: 'UPLOAD_NOT_FOUND' }, 404);
396
+ }
397
+ if (uploadRecord.status !== 'pending') {
398
+ return c.json({ error: 'UPLOAD_NOT_PENDING' }, 409);
399
+ }
400
+ if (payload.size !== uploadRecord.size) {
401
+ return c.json({ error: 'INVALID_TOKEN' }, 401);
402
+ }
403
+ if (uploadRecord.size > maxUploadSize) {
378
404
  return c.json(
379
405
  {
380
- error: 'SIZE_MISMATCH',
381
- message: `Expected ${expectedSize} bytes, got ${bodyBytes.length}`,
406
+ error: 'BLOB_TOO_LARGE',
407
+ message: `Maximum upload size is ${maxUploadSize} bytes`,
382
408
  },
383
409
  400
384
410
  );
385
411
  }
386
412
 
413
+ const contentLengthHeader = c.req.header('Content-Length');
414
+ if (contentLengthHeader) {
415
+ const contentLength = Number(contentLengthHeader);
416
+ if (!Number.isFinite(contentLength) || contentLength < 0) {
417
+ return c.json(
418
+ { error: 'INVALID_REQUEST', message: 'Invalid Content-Length' },
419
+ 400
420
+ );
421
+ }
422
+ if (contentLength > maxUploadSize) {
423
+ return c.json(
424
+ {
425
+ error: 'BLOB_TOO_LARGE',
426
+ message: `Maximum upload size is ${maxUploadSize} bytes`,
427
+ },
428
+ 400
429
+ );
430
+ }
431
+ if (contentLength !== uploadRecord.size) {
432
+ return c.json(
433
+ {
434
+ error: 'SIZE_MISMATCH',
435
+ message: `Expected ${uploadRecord.size} bytes, got ${contentLength}`,
436
+ },
437
+ 400
438
+ );
439
+ }
440
+ }
441
+
442
+ const mimeType =
443
+ c.req.header('Content-Type') ??
444
+ uploadRecord.mimeType ??
445
+ 'application/octet-stream';
446
+ const storagePartitionOptions = { partitionId: payload.partitionId };
447
+
448
+ const streamingUpload = blobManager.adapter.putStream
449
+ ? createValidatedUploadStream(c.req.raw, {
450
+ expectedSize: uploadRecord.size,
451
+ maxSize: maxUploadSize,
452
+ })
453
+ : null;
454
+
455
+ if (streamingUpload && blobManager.adapter.putStream) {
456
+ try {
457
+ await blobManager.adapter.putStream(
458
+ hash,
459
+ streamingUpload.stream,
460
+ { mimeType },
461
+ storagePartitionOptions
462
+ );
463
+ } catch (err) {
464
+ if (isBlobUploadBodyError(err)) {
465
+ void streamingUpload.hashHex.catch(() => {});
466
+ return c.json(
467
+ {
468
+ error: err.code,
469
+ message: err.message,
470
+ },
471
+ 400
472
+ );
473
+ }
474
+ void streamingUpload.hashHex.catch(() => {});
475
+ throw err;
476
+ }
477
+
478
+ const computedHash = await streamingUpload.hashHex;
479
+ const expectedHex = parseBlobHash(hash);
480
+ if (!expectedHex || computedHash !== expectedHex) {
481
+ await deleteUploadedBlobBestEffort(blobManager, hash, {
482
+ partitionId: payload.partitionId,
483
+ });
484
+ return c.json(
485
+ {
486
+ error: 'HASH_MISMATCH',
487
+ message: 'Content hash does not match',
488
+ },
489
+ 400
490
+ );
491
+ }
492
+
493
+ return c.text('OK', 200);
494
+ }
495
+
496
+ let bodyBytes: Uint8Array;
497
+ try {
498
+ bodyBytes = await readRequestBodyWithLimit(c.req.raw, {
499
+ expectedSize: uploadRecord.size,
500
+ maxSize: maxUploadSize,
501
+ });
502
+ } catch (err) {
503
+ if (isBlobUploadBodyError(err)) {
504
+ return c.json(
505
+ {
506
+ error: err.code,
507
+ message: err.message,
508
+ },
509
+ 400
510
+ );
511
+ }
512
+ throw err;
513
+ }
514
+
387
515
  // Verify hash
388
516
  const computedHash = await computeSha256Hash(bodyBytes);
389
517
  const expectedHex = parseBlobHash(hash);
390
- if (computedHash !== expectedHex) {
518
+ if (!expectedHex || computedHash !== expectedHex) {
391
519
  return c.json(
392
520
  {
393
521
  error: 'HASH_MISMATCH',
@@ -397,16 +525,16 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
397
525
  );
398
526
  }
399
527
 
400
- // Store via the blob adapter (R2, database, etc.)
401
- const mimeType =
402
- c.req.header('Content-Type') ??
403
- metadata?.mimeType ??
404
- 'application/octet-stream';
405
-
406
528
  if (blobManager.adapter.put) {
407
- await blobManager.adapter.put(hash, bodyBytes, { mimeType });
529
+ await blobManager.adapter.put(
530
+ hash,
531
+ bodyBytes,
532
+ { mimeType },
533
+ storagePartitionOptions
534
+ );
408
535
  } else {
409
536
  await storeBlobInDatabase(db, {
537
+ partitionId: payload.partitionId,
410
538
  hash,
411
539
  size: bodyBytes.length,
412
540
  mimeType,
@@ -465,12 +593,16 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
465
593
 
466
594
  // Read via the blob adapter (R2, database, etc.)
467
595
  if (blobManager.adapter.get) {
468
- const data = await blobManager.adapter.get(hash);
596
+ const data = await blobManager.adapter.get(hash, {
597
+ partitionId: payload.partitionId,
598
+ });
469
599
  if (!data) {
470
600
  return c.json({ error: 'NOT_FOUND' }, 404);
471
601
  }
472
602
  const meta = blobManager.adapter.getMetadata
473
- ? await blobManager.adapter.getMetadata(hash)
603
+ ? await blobManager.adapter.getMetadata(hash, {
604
+ partitionId: payload.partitionId,
605
+ })
474
606
  : null;
475
607
  return new Response(data as BodyInit, {
476
608
  status: 200,
@@ -483,7 +615,9 @@ export function createBlobRoutes<DB extends SyncBlobsDb>(
483
615
  }
484
616
 
485
617
  // Fallback: read from database directly
486
- const blob = await readBlobFromDatabase(db, hash);
618
+ const blob = await readBlobFromDatabase(db, hash, {
619
+ partitionId: payload.partitionId,
620
+ });
487
621
  if (!blob) {
488
622
  return c.json({ error: 'NOT_FOUND' }, 404);
489
623
  }
@@ -523,6 +657,198 @@ function isBlobNotFoundError(err: unknown): err is BlobNotFoundError {
523
657
  );
524
658
  }
525
659
 
660
+ class BlobUploadBodyError extends Error {
661
+ constructor(
662
+ public readonly code: 'BLOB_TOO_LARGE' | 'SIZE_MISMATCH',
663
+ message: string
664
+ ) {
665
+ super(message);
666
+ this.name = 'BlobUploadBodyError';
667
+ }
668
+ }
669
+
670
+ function isBlobUploadBodyError(err: unknown): err is BlobUploadBodyError {
671
+ return (
672
+ typeof err === 'object' &&
673
+ err !== null &&
674
+ (err as { name?: string }).name === 'BlobUploadBodyError'
675
+ );
676
+ }
677
+
678
+ async function deleteUploadedBlobBestEffort(
679
+ blobManager: BlobManager,
680
+ hash: string,
681
+ options: { partitionId: string }
682
+ ): Promise<void> {
683
+ try {
684
+ await blobManager.adapter.delete(hash, options);
685
+ } catch {
686
+ // Best-effort cleanup only.
687
+ }
688
+ }
689
+
690
+ interface IncrementalSha256 {
691
+ update(chunk: Uint8Array): void;
692
+ digestHex(): string;
693
+ }
694
+
695
+ function createIncrementalSha256(): IncrementalSha256 {
696
+ const hasher = sha256.create();
697
+ return {
698
+ update(chunk) {
699
+ hasher.update(chunk);
700
+ },
701
+ digestHex() {
702
+ return bytesToHex(hasher.digest());
703
+ },
704
+ };
705
+ }
706
+
707
+ interface ValidatedUploadStream {
708
+ stream: ReadableStream<Uint8Array>;
709
+ hashHex: Promise<string>;
710
+ }
711
+
712
+ function createValidatedUploadStream(
713
+ request: Request,
714
+ args: { expectedSize: number; maxSize: number }
715
+ ): ValidatedUploadStream | null {
716
+ const body = request.body;
717
+ if (!body) return null;
718
+
719
+ const hasher = createIncrementalSha256();
720
+ const reader = body.getReader();
721
+
722
+ let resolveHash: ((hashHex: string) => void) | null = null;
723
+ let rejectHash: ((reason: Error) => void) | null = null;
724
+ const hashHex = new Promise<string>((resolve, reject) => {
725
+ resolveHash = resolve;
726
+ rejectHash = reject;
727
+ });
728
+
729
+ let totalSize = 0;
730
+ let finalized = false;
731
+
732
+ const fail = (error: Error): void => {
733
+ if (finalized) return;
734
+ finalized = true;
735
+ rejectHash?.(error);
736
+ };
737
+
738
+ const complete = (): void => {
739
+ if (finalized) return;
740
+ finalized = true;
741
+ resolveHash?.(hasher.digestHex());
742
+ };
743
+
744
+ const stream = new ReadableStream<Uint8Array>({
745
+ async pull(controller) {
746
+ try {
747
+ const { done, value } = await reader.read();
748
+ if (done) {
749
+ if (totalSize !== args.expectedSize) {
750
+ const sizeError = new BlobUploadBodyError(
751
+ 'SIZE_MISMATCH',
752
+ `Expected ${args.expectedSize} bytes, got ${totalSize}`
753
+ );
754
+ fail(sizeError);
755
+ controller.error(sizeError);
756
+ return;
757
+ }
758
+ complete();
759
+ controller.close();
760
+ return;
761
+ }
762
+
763
+ if (!value || value.length === 0) {
764
+ return;
765
+ }
766
+
767
+ totalSize += value.length;
768
+ if (totalSize > args.maxSize) {
769
+ const limitError = new BlobUploadBodyError(
770
+ 'BLOB_TOO_LARGE',
771
+ `Maximum upload size is ${args.maxSize} bytes`
772
+ );
773
+ fail(limitError);
774
+ controller.error(limitError);
775
+ return;
776
+ }
777
+ if (totalSize > args.expectedSize) {
778
+ const mismatchError = new BlobUploadBodyError(
779
+ 'SIZE_MISMATCH',
780
+ `Expected ${args.expectedSize} bytes, got more than expected`
781
+ );
782
+ fail(mismatchError);
783
+ controller.error(mismatchError);
784
+ return;
785
+ }
786
+
787
+ hasher.update(value);
788
+ controller.enqueue(value);
789
+ } catch (err) {
790
+ const streamError =
791
+ err instanceof Error ? err : new Error('Failed to read upload body');
792
+ fail(streamError);
793
+ controller.error(streamError);
794
+ }
795
+ },
796
+ cancel() {
797
+ reader.cancel().catch(() => {});
798
+ },
799
+ });
800
+
801
+ return { stream, hashHex };
802
+ }
803
+
804
+ async function readRequestBodyWithLimit(
805
+ request: Request,
806
+ args: { expectedSize: number; maxSize: number }
807
+ ): Promise<Uint8Array> {
808
+ const body = request.body;
809
+ if (!body) {
810
+ if (args.expectedSize === 0) return new Uint8Array();
811
+ throw new BlobUploadBodyError(
812
+ 'SIZE_MISMATCH',
813
+ `Expected ${args.expectedSize} bytes, got 0`
814
+ );
815
+ }
816
+
817
+ const reader = body.getReader();
818
+ const merged = new Uint8Array(args.expectedSize);
819
+ let totalSize = 0;
820
+
821
+ while (true) {
822
+ const { done, value } = await reader.read();
823
+ if (done) break;
824
+ if (!value || value.length === 0) continue;
825
+
826
+ totalSize += value.length;
827
+ if (totalSize > args.maxSize) {
828
+ throw new BlobUploadBodyError(
829
+ 'BLOB_TOO_LARGE',
830
+ `Maximum upload size is ${args.maxSize} bytes`
831
+ );
832
+ }
833
+ if (totalSize > args.expectedSize) {
834
+ throw new BlobUploadBodyError(
835
+ 'SIZE_MISMATCH',
836
+ `Expected ${args.expectedSize} bytes, got more than expected`
837
+ );
838
+ }
839
+ merged.set(value, totalSize - value.length);
840
+ }
841
+
842
+ if (totalSize !== args.expectedSize) {
843
+ throw new BlobUploadBodyError(
844
+ 'SIZE_MISMATCH',
845
+ `Expected ${args.expectedSize} bytes, got ${totalSize}`
846
+ );
847
+ }
848
+
849
+ return merged;
850
+ }
851
+
526
852
  async function computeSha256Hash(data: Uint8Array): Promise<string> {
527
853
  // Create a new ArrayBuffer copy to satisfy TypeScript's strict typing
528
854
  const buffer = new Uint8Array(data).buffer as ArrayBuffer;