@syncular/server-hono 0.0.6-158 → 0.0.6-165

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/dist/blobs.d.ts +10 -4
  2. package/dist/blobs.d.ts.map +1 -1
  3. package/dist/blobs.js +260 -26
  4. package/dist/blobs.js.map +1 -1
  5. package/dist/console/gateway.d.ts +4 -0
  6. package/dist/console/gateway.d.ts.map +1 -1
  7. package/dist/console/gateway.js +97 -60
  8. package/dist/console/gateway.js.map +1 -1
  9. package/dist/console/route-descriptor.d.ts +6 -0
  10. package/dist/console/route-descriptor.d.ts.map +1 -0
  11. package/dist/console/route-descriptor.js +16 -0
  12. package/dist/console/route-descriptor.js.map +1 -0
  13. package/dist/console/routes.d.ts.map +1 -1
  14. package/dist/console/routes.js +153 -108
  15. package/dist/console/routes.js.map +1 -1
  16. package/dist/console/schema-errors.d.ts +2 -0
  17. package/dist/console/schema-errors.d.ts.map +1 -0
  18. package/dist/console/schema-errors.js +17 -0
  19. package/dist/console/schema-errors.js.map +1 -0
  20. package/dist/console/schemas.js +1 -1
  21. package/dist/console/schemas.js.map +1 -1
  22. package/dist/console/types.d.ts +32 -0
  23. package/dist/console/types.d.ts.map +1 -1
  24. package/dist/create-server.d.ts.map +1 -1
  25. package/dist/create-server.js +13 -10
  26. package/dist/create-server.js.map +1 -1
  27. package/dist/proxy/routes.d.ts +10 -0
  28. package/dist/proxy/routes.d.ts.map +1 -1
  29. package/dist/proxy/routes.js +57 -6
  30. package/dist/proxy/routes.js.map +1 -1
  31. package/dist/routes.d.ts +21 -0
  32. package/dist/routes.d.ts.map +1 -1
  33. package/dist/routes.js +338 -352
  34. package/dist/routes.js.map +1 -1
  35. package/package.json +7 -6
  36. package/src/__tests__/blob-routes.test.ts +286 -18
  37. package/src/__tests__/console-gateway-live-routes.test.ts +61 -1
  38. package/src/__tests__/console-routes.test.ts +30 -1
  39. package/src/__tests__/create-server.test.ts +237 -1
  40. package/src/__tests__/pull-chunk-storage.test.ts +98 -0
  41. package/src/blobs.ts +360 -34
  42. package/src/console/gateway.ts +335 -288
  43. package/src/console/route-descriptor.ts +22 -0
  44. package/src/console/routes.ts +327 -248
  45. package/src/console/schema-errors.ts +23 -0
  46. package/src/console/schemas.ts +1 -1
  47. package/src/console/types.ts +32 -0
  48. package/src/create-server.ts +13 -10
  49. package/src/proxy/routes.ts +73 -9
  50. package/src/routes.ts +449 -396
@@ -30,6 +30,7 @@ interface CompleteResponse {
30
30
  }
31
31
 
32
32
  const ACTOR_HEADER = 'x-user-id';
33
+ const PARTITION_HEADER = 'x-partition-id';
33
34
  const ACTOR_ID = 'user-1';
34
35
  const INVALID_HASH = 'invalid-hash';
35
36
 
@@ -48,11 +49,31 @@ async function signBlobToken(args: {
48
49
  signer: BlobTokenSigner;
49
50
  hash: string;
50
51
  action: 'upload' | 'download';
52
+ size?: number;
53
+ partitionId?: string;
51
54
  }): Promise<string> {
55
+ const partitionId = args.partitionId ?? 'default';
56
+ if (args.action === 'upload') {
57
+ if (typeof args.size !== 'number') {
58
+ throw new Error('size is required for upload tokens');
59
+ }
60
+ return args.signer.sign(
61
+ {
62
+ hash: args.hash,
63
+ partitionId,
64
+ action: 'upload',
65
+ size: args.size,
66
+ expiresAt: Date.now() + 60_000,
67
+ },
68
+ 60
69
+ );
70
+ }
71
+
52
72
  return args.signer.sign(
53
73
  {
54
74
  hash: args.hash,
55
- action: args.action,
75
+ partitionId,
76
+ action: 'download',
56
77
  expiresAt: Date.now() + 60_000,
57
78
  },
58
79
  60
@@ -85,6 +106,78 @@ function createFallbackAdapter(
85
106
  };
86
107
  }
87
108
 
109
+ async function readStreamBytes(
110
+ stream: ReadableStream<Uint8Array>
111
+ ): Promise<Uint8Array> {
112
+ const reader = stream.getReader();
113
+ const chunks: Uint8Array[] = [];
114
+ let total = 0;
115
+
116
+ while (true) {
117
+ const { done, value } = await reader.read();
118
+ if (done) break;
119
+ if (!value || value.length === 0) continue;
120
+ chunks.push(value);
121
+ total += value.length;
122
+ }
123
+
124
+ const merged = new Uint8Array(total);
125
+ let offset = 0;
126
+ for (const chunk of chunks) {
127
+ merged.set(chunk, offset);
128
+ offset += chunk.length;
129
+ }
130
+ return merged;
131
+ }
132
+
133
+ function createStreamCapableAdapter(
134
+ db: Kysely<SyncBlobDb>,
135
+ tokenSigner: BlobTokenSigner
136
+ ): {
137
+ adapter: BlobStorageAdapter;
138
+ getCounts: () => {
139
+ putCalls: number;
140
+ putStreamCalls: number;
141
+ deleteCalls: number;
142
+ };
143
+ } {
144
+ const baseAdapter = createDefaultAdapter(db, tokenSigner);
145
+ let putCalls = 0;
146
+ let putStreamCalls = 0;
147
+ let deleteCalls = 0;
148
+
149
+ const adapter: BlobStorageAdapter = {
150
+ name: 'database-stream-capable',
151
+ signUpload: baseAdapter.signUpload,
152
+ signDownload: baseAdapter.signDownload,
153
+ exists: baseAdapter.exists,
154
+ getMetadata: baseAdapter.getMetadata,
155
+ get: baseAdapter.get,
156
+ put: async (hash, data, metadata, options) => {
157
+ putCalls += 1;
158
+ await baseAdapter.put?.(hash, data, metadata, options);
159
+ },
160
+ putStream: async (hash, stream, metadata, options) => {
161
+ putStreamCalls += 1;
162
+ const bytes = await readStreamBytes(stream);
163
+ await baseAdapter.put?.(hash, bytes, metadata, options);
164
+ },
165
+ delete: async (hash, options) => {
166
+ deleteCalls += 1;
167
+ await baseAdapter.delete(hash, options);
168
+ },
169
+ };
170
+
171
+ return {
172
+ adapter,
173
+ getCounts: () => ({
174
+ putCalls,
175
+ putStreamCalls,
176
+ deleteCalls,
177
+ }),
178
+ };
179
+ }
180
+
88
181
  function buildApp(args: {
89
182
  db: Kysely<SyncBlobDb>;
90
183
  tokenSigner: BlobTokenSigner;
@@ -113,7 +206,7 @@ function buildApp(args: {
113
206
  },
114
207
  tokenSigner: args.tokenSigner,
115
208
  db: args.db,
116
- canAccessBlob: args.canAccessBlob,
209
+ canAccessBlob: args.canAccessBlob ?? (async () => true),
117
210
  })
118
211
  );
119
212
  return app;
@@ -124,6 +217,7 @@ async function initiateUpload(args: {
124
217
  hash: string;
125
218
  size: number;
126
219
  mimeType?: string;
220
+ partitionId?: string;
127
221
  }): Promise<UploadInitResponse> {
128
222
  const response = await args.app.request(
129
223
  'http://localhost/sync/blobs/upload',
@@ -132,6 +226,7 @@ async function initiateUpload(args: {
132
226
  headers: {
133
227
  'content-type': 'application/json',
134
228
  [ACTOR_HEADER]: ACTOR_ID,
229
+ [PARTITION_HEADER]: args.partitionId ?? 'default',
135
230
  },
136
231
  body: JSON.stringify({
137
232
  hash: args.hash,
@@ -213,31 +308,18 @@ describe('createBlobRoutes', () => {
213
308
 
214
309
  const content = new Uint8Array([1, 2, 3, 4]);
215
310
  const hash = await createHash(content);
216
- const init = await initiateUpload({
311
+ await initiateUpload({
217
312
  app,
218
313
  hash,
219
314
  size: content.length,
220
315
  });
221
316
 
222
- const firstUpload = await app.request(init.uploadUrl!, {
223
- method: init.uploadMethod ?? 'PUT',
224
- body: content,
225
- });
226
- expect(firstUpload.status).toBe(200);
227
-
228
- const complete = await app.request(
229
- `http://localhost/sync/blobs/${encodeURIComponent(hash)}/complete`,
230
- {
231
- method: 'POST',
232
- headers: { [ACTOR_HEADER]: ACTOR_ID },
233
- }
234
- );
235
- expect(complete.status).toBe(200);
236
-
237
317
  const token = await signBlobToken({
238
318
  signer: tokenSigner,
239
319
  hash,
240
320
  action: 'upload',
321
+ size: content.length,
322
+ partitionId: 'default',
241
323
  });
242
324
 
243
325
  const response = await app.request(
@@ -272,6 +354,8 @@ describe('createBlobRoutes', () => {
272
354
  signer: tokenSigner,
273
355
  hash,
274
356
  action: 'upload',
357
+ size: expected.length,
358
+ partitionId: 'default',
275
359
  });
276
360
 
277
361
  const response = await app.request(
@@ -315,6 +399,41 @@ describe('createBlobRoutes', () => {
315
399
  expect(await forbiddenResponse.json()).toEqual({ error: 'FORBIDDEN' });
316
400
  });
317
401
 
402
+ it('rejects upload completion from a different actor', async () => {
403
+ const app = buildApp({
404
+ db,
405
+ tokenSigner,
406
+ adapter: createDefaultAdapter(db, tokenSigner),
407
+ });
408
+
409
+ const content = new TextEncoder().encode('actor-ownership-check');
410
+ const hash = await createHash(content);
411
+ const init = await initiateUpload({
412
+ app,
413
+ hash,
414
+ size: content.length,
415
+ mimeType: 'text/plain',
416
+ partitionId: 'default',
417
+ });
418
+ const uploadResponse = await app.request(init.uploadUrl!, {
419
+ method: init.uploadMethod ?? 'PUT',
420
+ headers: { 'content-type': 'text/plain' },
421
+ body: content,
422
+ });
423
+ expect(uploadResponse.status).toBe(200);
424
+
425
+ const completeResponse = await app.request(
426
+ `http://localhost/sync/blobs/${encodeURIComponent(hash)}/complete`,
427
+ {
428
+ method: 'POST',
429
+ headers: { [ACTOR_HEADER]: 'user-2' },
430
+ }
431
+ );
432
+
433
+ expect(completeResponse.status).toBe(403);
434
+ expect(await completeResponse.json()).toEqual({ error: 'FORBIDDEN' });
435
+ });
436
+
318
437
  it('uploads and downloads blobs through adapter put/get branches', async () => {
319
438
  const app = buildApp({
320
439
  db,
@@ -329,6 +448,7 @@ describe('createBlobRoutes', () => {
329
448
  hash,
330
449
  size: content.length,
331
450
  mimeType: 'text/plain',
451
+ partitionId: 'default',
332
452
  });
333
453
 
334
454
  expect(init.exists).toBe(false);
@@ -370,6 +490,86 @@ describe('createBlobRoutes', () => {
370
490
  );
371
491
  });
372
492
 
493
+ it('prefers streaming direct upload when adapter exposes putStream', async () => {
494
+ const streamHarness = createStreamCapableAdapter(db, tokenSigner);
495
+ const app = buildApp({
496
+ db,
497
+ tokenSigner,
498
+ adapter: streamHarness.adapter,
499
+ });
500
+
501
+ const content = new TextEncoder().encode('streaming-upload-content');
502
+ const hash = await createHash(content);
503
+ const init = await initiateUpload({
504
+ app,
505
+ hash,
506
+ size: content.length,
507
+ mimeType: 'text/plain',
508
+ partitionId: 'default',
509
+ });
510
+
511
+ const uploadResponse = await app.request(init.uploadUrl!, {
512
+ method: init.uploadMethod ?? 'PUT',
513
+ headers: { 'content-type': 'text/plain' },
514
+ body: content,
515
+ });
516
+ expect(uploadResponse.status).toBe(200);
517
+
518
+ expect(streamHarness.getCounts()).toEqual({
519
+ putCalls: 0,
520
+ putStreamCalls: 1,
521
+ deleteCalls: 0,
522
+ });
523
+ });
524
+
525
+ it('deletes streamed upload on hash mismatch', async () => {
526
+ const streamHarness = createStreamCapableAdapter(db, tokenSigner);
527
+ const app = buildApp({
528
+ db,
529
+ tokenSigner,
530
+ adapter: streamHarness.adapter,
531
+ });
532
+
533
+ const expected = new Uint8Array([1, 2, 3, 4, 5]);
534
+ const hash = await createHash(expected);
535
+ await initiateUpload({
536
+ app,
537
+ hash,
538
+ size: expected.length,
539
+ partitionId: 'default',
540
+ });
541
+
542
+ const uploadResponse = await app.request(
543
+ `http://localhost/sync/blobs/${encodeURIComponent(hash)}/upload?token=${encodeURIComponent(
544
+ await signBlobToken({
545
+ signer: tokenSigner,
546
+ hash,
547
+ action: 'upload',
548
+ size: expected.length,
549
+ partitionId: 'default',
550
+ })
551
+ )}`,
552
+ {
553
+ method: 'PUT',
554
+ body: new Uint8Array([9, 9, 9, 9, 9]),
555
+ }
556
+ );
557
+ expect(uploadResponse.status).toBe(400);
558
+ expect(await uploadResponse.json()).toEqual({
559
+ error: 'HASH_MISMATCH',
560
+ message: 'Content hash does not match',
561
+ });
562
+
563
+ expect(
564
+ await streamHarness.adapter.exists(hash, { partitionId: 'default' })
565
+ ).toBe(false);
566
+ expect(streamHarness.getCounts()).toEqual({
567
+ putCalls: 0,
568
+ putStreamCalls: 1,
569
+ deleteCalls: 1,
570
+ });
571
+ });
572
+
373
573
  it('uploads and downloads blobs through DB fallback branches when adapter lacks put/get', async () => {
374
574
  const app = buildApp({
375
575
  db,
@@ -384,6 +584,7 @@ describe('createBlobRoutes', () => {
384
584
  hash,
385
585
  size: content.length,
386
586
  mimeType: 'text/plain',
587
+ partitionId: 'default',
387
588
  });
388
589
 
389
590
  const uploadResponse = await app.request(init.uploadUrl!, {
@@ -421,4 +622,71 @@ describe('createBlobRoutes', () => {
421
622
  content
422
623
  );
423
624
  });
625
+
626
+ it('isolates blob lookup by partition', async () => {
627
+ const app = buildApp({
628
+ db,
629
+ tokenSigner,
630
+ adapter: createDefaultAdapter(db, tokenSigner),
631
+ authenticate: async (c) => {
632
+ const actorId = c.req.header(ACTOR_HEADER);
633
+ if (!actorId) return null;
634
+ return {
635
+ actorId,
636
+ partitionId: c.req.header(PARTITION_HEADER) ?? 'default',
637
+ };
638
+ },
639
+ canAccessBlob: async () => true,
640
+ });
641
+
642
+ const content = new TextEncoder().encode('partition-isolated-blob');
643
+ const hash = await createHash(content);
644
+ const init = await initiateUpload({
645
+ app,
646
+ hash,
647
+ size: content.length,
648
+ partitionId: 'tenant-a',
649
+ });
650
+ const uploadResponse = await app.request(init.uploadUrl!, {
651
+ method: init.uploadMethod ?? 'PUT',
652
+ headers: { 'content-type': 'application/octet-stream' },
653
+ body: content,
654
+ });
655
+ expect(uploadResponse.status).toBe(200);
656
+
657
+ const completeResponse = await app.request(
658
+ `http://localhost/sync/blobs/${encodeURIComponent(hash)}/complete`,
659
+ {
660
+ method: 'POST',
661
+ headers: {
662
+ [ACTOR_HEADER]: ACTOR_ID,
663
+ [PARTITION_HEADER]: 'tenant-a',
664
+ },
665
+ }
666
+ );
667
+ expect(completeResponse.status).toBe(200);
668
+
669
+ const samePartitionUrl = await app.request(
670
+ `http://localhost/sync/blobs/${encodeURIComponent(hash)}/url`,
671
+ {
672
+ headers: {
673
+ [ACTOR_HEADER]: ACTOR_ID,
674
+ [PARTITION_HEADER]: 'tenant-a',
675
+ },
676
+ }
677
+ );
678
+ expect(samePartitionUrl.status).toBe(200);
679
+
680
+ const otherPartitionUrl = await app.request(
681
+ `http://localhost/sync/blobs/${encodeURIComponent(hash)}/url`,
682
+ {
683
+ headers: {
684
+ [ACTOR_HEADER]: ACTOR_ID,
685
+ [PARTITION_HEADER]: 'tenant-b',
686
+ },
687
+ }
688
+ );
689
+ expect(otherPartitionUrl.status).toBe(404);
690
+ expect(await otherPartitionUrl.json()).toEqual({ error: 'NOT_FOUND' });
691
+ });
424
692
  });
@@ -44,7 +44,14 @@ class MockDownstreamSocket {
44
44
  }
45
45
  }
46
46
 
47
- function createGatewayLiveHarness() {
47
+ function createGatewayLiveHarness(options?: {
48
+ websocket?: {
49
+ allowedOrigins?: string[] | '*';
50
+ maxMessageBytes?: number;
51
+ maxMessagesPerWindow?: number;
52
+ messageRateWindowMs?: number;
53
+ };
54
+ }) {
48
55
  const downstreamSockets: MockDownstreamSocket[] = [];
49
56
  let capturedEvents: WSEvents | null = null;
50
57
 
@@ -85,6 +92,7 @@ function createGatewayLiveHarness() {
85
92
  downstreamSockets.push(socket);
86
93
  return socket;
87
94
  },
95
+ ...options?.websocket,
88
96
  },
89
97
  })
90
98
  );
@@ -294,4 +302,56 @@ describe('createConsoleGatewayRoutes live fan-in', () => {
294
302
  ]);
295
303
  expect(downstreamSockets).toHaveLength(0);
296
304
  });
305
+
306
+ it('rejects websocket upgrades from disallowed origins', async () => {
307
+ const { app } = createGatewayLiveHarness({
308
+ websocket: {
309
+ allowedOrigins: ['https://allowed.syncular.test'],
310
+ },
311
+ });
312
+
313
+ const response = await app.request('http://localhost/console/events/live', {
314
+ headers: { Authorization: `Bearer ${CONSOLE_TOKEN}` },
315
+ });
316
+
317
+ expect(response.status).toBe(403);
318
+ expect(await response.json()).toEqual({
319
+ error: 'FORBIDDEN_ORIGIN',
320
+ });
321
+ });
322
+
323
+ it('enforces inbound websocket message rate limits per upstream connection', async () => {
324
+ const { app, getEvents } = createGatewayLiveHarness({
325
+ websocket: {
326
+ maxMessagesPerWindow: 1,
327
+ messageRateWindowMs: 60000,
328
+ },
329
+ });
330
+
331
+ const response = await app.request('http://localhost/console/events/live', {
332
+ headers: { Authorization: `Bearer ${CONSOLE_TOKEN}` },
333
+ });
334
+ expect(response.status).toBe(200);
335
+
336
+ const events = getEvents();
337
+ if (!events?.onOpen || !events.onMessage) {
338
+ throw new Error('Expected websocket handlers to be captured.');
339
+ }
340
+
341
+ const upstream = createUpstreamSocketHarness();
342
+ events.onOpen(new Event('open'), upstream.ws);
343
+
344
+ await events.onMessage(
345
+ new MessageEvent('message', { data: '{}' }),
346
+ upstream.ws
347
+ );
348
+ await events.onMessage(
349
+ new MessageEvent('message', { data: '{}' }),
350
+ upstream.ws
351
+ );
352
+
353
+ const latestClose = upstream.closes[upstream.closes.length - 1];
354
+ expect(latestClose?.code).toBe(1008);
355
+ expect(latestClose?.reason).toBe('message rate exceeded');
356
+ });
297
357
  });
@@ -157,6 +157,7 @@ describe('console timeline route filters', () => {
157
157
  async function requestTimeline(args: {
158
158
  query?: Record<string, string | number | undefined>;
159
159
  authenticated?: boolean;
160
+ targetApp?: Hono;
160
161
  }): Promise<Response> {
161
162
  const params = new URLSearchParams({ limit: '50', offset: '0' });
162
163
  for (const [key, value] of Object.entries(args.query ?? {})) {
@@ -164,7 +165,7 @@ describe('console timeline route filters', () => {
164
165
  params.set(key, String(value));
165
166
  }
166
167
 
167
- return app.request(
168
+ return (args.targetApp ?? app).request(
168
169
  `http://localhost/console/timeline?${params.toString()}`,
169
170
  {
170
171
  headers:
@@ -735,6 +736,34 @@ describe('console timeline route filters', () => {
735
736
  expect(pageOneKeys.some((key) => pageTwoKeys.includes(key))).toBe(false);
736
737
  });
737
738
 
739
+ it('caps timeline source scans to avoid unbounded in-memory merges', async () => {
740
+ const cappedTimelineApp = createTestApp({
741
+ maintenance: {
742
+ timelineScanMaxRows: 1,
743
+ },
744
+ });
745
+
746
+ const response = await requestTimeline({
747
+ targetApp: cappedTimelineApp,
748
+ });
749
+
750
+ expect(response.status).toBe(200);
751
+ expect(response.headers.get('x-timeline-truncated')).toBe('true');
752
+ expect(response.headers.get('x-timeline-scan-limit')).toBe('1');
753
+
754
+ const payload = (await response.json()) as TimelineResponse;
755
+ expect(payload.total).toBeLessThanOrEqual(2);
756
+ expect(payload.items.length).toBeLessThanOrEqual(2);
757
+ });
758
+
759
+ it('rejects oversized timeline offsets to prevent deep pagination scans', async () => {
760
+ const response = await requestTimeline({
761
+ query: { offset: 10001 },
762
+ });
763
+
764
+ expect(response.status).toBe(400);
765
+ });
766
+
738
767
  it('lists operation audit events and filters by operation type', async () => {
739
768
  const allOps = await readOperations();
740
769
  expect(allOps.total).toBe(2);