@syncular/client 0.0.1-60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. package/dist/blobs/index.d.ts +7 -0
  2. package/dist/blobs/index.d.ts.map +1 -0
  3. package/dist/blobs/index.js +7 -0
  4. package/dist/blobs/index.js.map +1 -0
  5. package/dist/blobs/manager.d.ts +345 -0
  6. package/dist/blobs/manager.d.ts.map +1 -0
  7. package/dist/blobs/manager.js +749 -0
  8. package/dist/blobs/manager.js.map +1 -0
  9. package/dist/blobs/migrate.d.ts +14 -0
  10. package/dist/blobs/migrate.d.ts.map +1 -0
  11. package/dist/blobs/migrate.js +59 -0
  12. package/dist/blobs/migrate.js.map +1 -0
  13. package/dist/blobs/types.d.ts +62 -0
  14. package/dist/blobs/types.d.ts.map +1 -0
  15. package/dist/blobs/types.js +5 -0
  16. package/dist/blobs/types.js.map +1 -0
  17. package/dist/client.d.ts +338 -0
  18. package/dist/client.d.ts.map +1 -0
  19. package/dist/client.js +834 -0
  20. package/dist/client.js.map +1 -0
  21. package/dist/conflicts.d.ts +31 -0
  22. package/dist/conflicts.d.ts.map +1 -0
  23. package/dist/conflicts.js +118 -0
  24. package/dist/conflicts.js.map +1 -0
  25. package/dist/create-client.d.ts +115 -0
  26. package/dist/create-client.d.ts.map +1 -0
  27. package/dist/create-client.js +162 -0
  28. package/dist/create-client.js.map +1 -0
  29. package/dist/engine/SyncEngine.d.ts +215 -0
  30. package/dist/engine/SyncEngine.d.ts.map +1 -0
  31. package/dist/engine/SyncEngine.js +1066 -0
  32. package/dist/engine/SyncEngine.js.map +1 -0
  33. package/dist/engine/index.d.ts +6 -0
  34. package/dist/engine/index.d.ts.map +1 -0
  35. package/dist/engine/index.js +6 -0
  36. package/dist/engine/index.js.map +1 -0
  37. package/dist/engine/types.d.ts +230 -0
  38. package/dist/engine/types.d.ts.map +1 -0
  39. package/dist/engine/types.js +7 -0
  40. package/dist/engine/types.js.map +1 -0
  41. package/dist/handlers/create-handler.d.ts +110 -0
  42. package/dist/handlers/create-handler.d.ts.map +1 -0
  43. package/dist/handlers/create-handler.js +140 -0
  44. package/dist/handlers/create-handler.js.map +1 -0
  45. package/dist/handlers/registry.d.ts +15 -0
  46. package/dist/handlers/registry.d.ts.map +1 -0
  47. package/dist/handlers/registry.js +29 -0
  48. package/dist/handlers/registry.js.map +1 -0
  49. package/dist/handlers/types.d.ts +83 -0
  50. package/dist/handlers/types.d.ts.map +1 -0
  51. package/dist/handlers/types.js +5 -0
  52. package/dist/handlers/types.js.map +1 -0
  53. package/dist/index.d.ts +24 -0
  54. package/dist/index.d.ts.map +1 -0
  55. package/dist/index.js +24 -0
  56. package/dist/index.js.map +1 -0
  57. package/dist/migrate.d.ts +19 -0
  58. package/dist/migrate.d.ts.map +1 -0
  59. package/dist/migrate.js +106 -0
  60. package/dist/migrate.js.map +1 -0
  61. package/dist/mutations.d.ts +138 -0
  62. package/dist/mutations.d.ts.map +1 -0
  63. package/dist/mutations.js +611 -0
  64. package/dist/mutations.js.map +1 -0
  65. package/dist/outbox.d.ts +112 -0
  66. package/dist/outbox.d.ts.map +1 -0
  67. package/dist/outbox.js +304 -0
  68. package/dist/outbox.js.map +1 -0
  69. package/dist/plugins/incrementing-version.d.ts +34 -0
  70. package/dist/plugins/incrementing-version.d.ts.map +1 -0
  71. package/dist/plugins/incrementing-version.js +83 -0
  72. package/dist/plugins/incrementing-version.js.map +1 -0
  73. package/dist/plugins/index.d.ts +3 -0
  74. package/dist/plugins/index.d.ts.map +1 -0
  75. package/dist/plugins/index.js +3 -0
  76. package/dist/plugins/index.js.map +1 -0
  77. package/dist/plugins/types.d.ts +49 -0
  78. package/dist/plugins/types.d.ts.map +1 -0
  79. package/dist/plugins/types.js +15 -0
  80. package/dist/plugins/types.js.map +1 -0
  81. package/dist/proxy/connection.d.ts +33 -0
  82. package/dist/proxy/connection.d.ts.map +1 -0
  83. package/dist/proxy/connection.js +153 -0
  84. package/dist/proxy/connection.js.map +1 -0
  85. package/dist/proxy/dialect.d.ts +46 -0
  86. package/dist/proxy/dialect.d.ts.map +1 -0
  87. package/dist/proxy/dialect.js +58 -0
  88. package/dist/proxy/dialect.js.map +1 -0
  89. package/dist/proxy/driver.d.ts +42 -0
  90. package/dist/proxy/driver.d.ts.map +1 -0
  91. package/dist/proxy/driver.js +78 -0
  92. package/dist/proxy/driver.js.map +1 -0
  93. package/dist/proxy/index.d.ts +10 -0
  94. package/dist/proxy/index.d.ts.map +1 -0
  95. package/dist/proxy/index.js +10 -0
  96. package/dist/proxy/index.js.map +1 -0
  97. package/dist/proxy/mutations.d.ts +9 -0
  98. package/dist/proxy/mutations.d.ts.map +1 -0
  99. package/dist/proxy/mutations.js +11 -0
  100. package/dist/proxy/mutations.js.map +1 -0
  101. package/dist/pull-engine.d.ts +45 -0
  102. package/dist/pull-engine.d.ts.map +1 -0
  103. package/dist/pull-engine.js +391 -0
  104. package/dist/pull-engine.js.map +1 -0
  105. package/dist/push-engine.d.ts +18 -0
  106. package/dist/push-engine.d.ts.map +1 -0
  107. package/dist/push-engine.js +155 -0
  108. package/dist/push-engine.js.map +1 -0
  109. package/dist/query/FingerprintCollector.d.ts +18 -0
  110. package/dist/query/FingerprintCollector.d.ts.map +1 -0
  111. package/dist/query/FingerprintCollector.js +28 -0
  112. package/dist/query/FingerprintCollector.js.map +1 -0
  113. package/dist/query/QueryContext.d.ts +33 -0
  114. package/dist/query/QueryContext.d.ts.map +1 -0
  115. package/dist/query/QueryContext.js +16 -0
  116. package/dist/query/QueryContext.js.map +1 -0
  117. package/dist/query/fingerprint.d.ts +61 -0
  118. package/dist/query/fingerprint.d.ts.map +1 -0
  119. package/dist/query/fingerprint.js +91 -0
  120. package/dist/query/fingerprint.js.map +1 -0
  121. package/dist/query/index.d.ts +7 -0
  122. package/dist/query/index.d.ts.map +1 -0
  123. package/dist/query/index.js +7 -0
  124. package/dist/query/index.js.map +1 -0
  125. package/dist/query/tracked-select.d.ts +18 -0
  126. package/dist/query/tracked-select.d.ts.map +1 -0
  127. package/dist/query/tracked-select.js +90 -0
  128. package/dist/query/tracked-select.js.map +1 -0
  129. package/dist/schema.d.ts +83 -0
  130. package/dist/schema.d.ts.map +1 -0
  131. package/dist/schema.js +7 -0
  132. package/dist/schema.js.map +1 -0
  133. package/dist/sync-loop.d.ts +32 -0
  134. package/dist/sync-loop.d.ts.map +1 -0
  135. package/dist/sync-loop.js +249 -0
  136. package/dist/sync-loop.js.map +1 -0
  137. package/dist/utils/id.d.ts +8 -0
  138. package/dist/utils/id.d.ts.map +1 -0
  139. package/dist/utils/id.js +19 -0
  140. package/dist/utils/id.js.map +1 -0
  141. package/package.json +58 -0
  142. package/src/blobs/index.ts +7 -0
  143. package/src/blobs/manager.ts +1027 -0
  144. package/src/blobs/migrate.ts +67 -0
  145. package/src/blobs/types.ts +84 -0
  146. package/src/client.ts +1222 -0
  147. package/src/conflicts.ts +180 -0
  148. package/src/create-client.ts +297 -0
  149. package/src/engine/SyncEngine.ts +1337 -0
  150. package/src/engine/index.ts +6 -0
  151. package/src/engine/types.ts +268 -0
  152. package/src/handlers/create-handler.ts +287 -0
  153. package/src/handlers/registry.ts +36 -0
  154. package/src/handlers/types.ts +102 -0
  155. package/src/index.ts +25 -0
  156. package/src/migrate.ts +122 -0
  157. package/src/mutations.ts +926 -0
  158. package/src/outbox.ts +397 -0
  159. package/src/plugins/incrementing-version.ts +133 -0
  160. package/src/plugins/index.ts +2 -0
  161. package/src/plugins/types.ts +63 -0
  162. package/src/proxy/connection.ts +191 -0
  163. package/src/proxy/dialect.ts +76 -0
  164. package/src/proxy/driver.ts +126 -0
  165. package/src/proxy/index.ts +10 -0
  166. package/src/proxy/mutations.ts +18 -0
  167. package/src/pull-engine.ts +518 -0
  168. package/src/push-engine.ts +201 -0
  169. package/src/query/FingerprintCollector.ts +29 -0
  170. package/src/query/QueryContext.ts +54 -0
  171. package/src/query/fingerprint.ts +109 -0
  172. package/src/query/index.ts +10 -0
  173. package/src/query/tracked-select.ts +139 -0
  174. package/src/schema.ts +94 -0
  175. package/src/sync-loop.ts +368 -0
  176. package/src/utils/id.ts +20 -0
@@ -0,0 +1,1027 @@
1
+ /**
2
+ * @syncular/client - Client-side blob manager
3
+ *
4
+ * Handles blob upload/download with:
5
+ * - Local caching for offline access
6
+ * - Upload queue for offline uploads
7
+ * - SHA-256 hash computation
8
+ * - Optional client-side encryption
9
+ */
10
+
11
+ import type { BlobRef, BlobTransport } from '@syncular/core';
12
+ import { createBlobHash, createBlobRef } from '@syncular/core';
13
+ import type { Kysely } from 'kysely';
14
+ import type { BlobUploadStatus, SyncBlobClientDb } from './types';
15
+
16
+ // Re-export BlobTransport for convenience
17
+ export type { BlobTransport } from '@syncular/core';
18
+
19
+ // ============================================================================
20
+ // Types
21
+ // ============================================================================
22
+
23
+ interface BlobEncryption {
24
+ /**
25
+ * Encrypt blob content.
26
+ * Returns encrypted bytes and the key ID used.
27
+ */
28
+ encrypt(
29
+ data: Uint8Array,
30
+ options?: { keyId?: string }
31
+ ): Promise<{ encrypted: Uint8Array; keyId: string }>;
32
+
33
+ /**
34
+ * Decrypt blob content.
35
+ */
36
+ decrypt(data: Uint8Array, keyId: string): Promise<Uint8Array>;
37
+ }
38
+
39
+ export interface ClientBlobManagerOptions {
40
+ /** Kysely database instance */
41
+ db: Kysely<SyncBlobClientDb>;
42
+ /** Blob transport for server communication */
43
+ transport: BlobTransport;
44
+ /** Optional encryption handler */
45
+ encryption?: BlobEncryption;
46
+ /** Maximum cache size in bytes. Default: 100MB */
47
+ maxCacheSize?: number;
48
+ /** Maximum retry attempts for uploads. Default: 3 */
49
+ maxUploadRetries?: number;
50
+ /** Custom fetch function for blob uploads/downloads. Default: globalThis.fetch */
51
+ fetch?: (input: RequestInfo | URL, init?: RequestInit) => Promise<Response>;
52
+ }
53
+
54
+ export interface UploadOptions {
55
+ /** Encrypt the blob before uploading */
56
+ encrypt?: boolean;
57
+ /** Specific encryption key ID to use */
58
+ keyId?: string;
59
+ /** Skip queuing and upload immediately (blocks until complete) */
60
+ immediate?: boolean;
61
+ }
62
+
63
+ export interface DownloadOptions {
64
+ /** Skip cache lookup and always fetch from server */
65
+ skipCache?: boolean;
66
+ /** Update last_accessed_at in cache */
67
+ updateAccessTime?: boolean;
68
+ }
69
+
70
+ // ============================================================================
71
+ // Client Blob Manager
72
+ // ============================================================================
73
+
74
+ /**
75
+ * Create a client-side blob manager.
76
+ *
77
+ * @example
78
+ * ```typescript
79
+ * const blobManager = createClientBlobManager({
80
+ * db,
81
+ * transport: {
82
+ * async initiateUpload(args) {
83
+ * const res = await fetch('/api/sync/blobs/upload', {
84
+ * method: 'POST',
85
+ * body: JSON.stringify(args),
86
+ * });
87
+ * return res.json();
88
+ * },
89
+ * async completeUpload(hash) {
90
+ * const res = await fetch(`/api/sync/blobs/${hash}/complete`, { method: 'POST' });
91
+ * return res.json();
92
+ * },
93
+ * async getDownloadUrl(hash) {
94
+ * const res = await fetch(`/api/sync/blobs/${hash}/url`);
95
+ * return res.json();
96
+ * },
97
+ * },
98
+ * });
99
+ *
100
+ * // Upload a file
101
+ * const blobRef = await blobManager.upload(file);
102
+ *
103
+ * // Download a blob
104
+ * const blob = await blobManager.download(blobRef);
105
+ * ```
106
+ */
107
+ export function createClientBlobManager(options: ClientBlobManagerOptions) {
108
+ const {
109
+ db,
110
+ transport,
111
+ encryption,
112
+ maxCacheSize = 100 * 1024 * 1024,
113
+ maxUploadRetries = 3,
114
+ fetch: customFetch = globalThis.fetch,
115
+ } = options;
116
+
117
+ return {
118
+ /**
119
+ * Upload a blob to the server.
120
+ *
121
+ * If `immediate` is false (default), the blob is queued for background upload.
122
+ * If `immediate` is true, the upload blocks until complete.
123
+ */
124
+ async upload(
125
+ data: Blob | File | Uint8Array,
126
+ opts?: UploadOptions
127
+ ): Promise<BlobRef> {
128
+ const bytes = await toUint8Array(data);
129
+ const mimeType =
130
+ data instanceof Blob ? data.type : 'application/octet-stream';
131
+
132
+ let finalBytes = bytes;
133
+ let encrypted = false;
134
+ let keyId: string | undefined;
135
+
136
+ // Encrypt if requested
137
+ if (opts?.encrypt && encryption) {
138
+ const result = await encryption.encrypt(bytes, { keyId: opts.keyId });
139
+ finalBytes = result.encrypted;
140
+ encrypted = true;
141
+ keyId = result.keyId;
142
+ }
143
+
144
+ // Compute hash of final (possibly encrypted) bytes
145
+ const hash = await computeSha256(finalBytes);
146
+
147
+ // Create blob ref
148
+ const blobRef = createBlobRef({
149
+ hash,
150
+ size: finalBytes.length,
151
+ mimeType,
152
+ encrypted,
153
+ keyId,
154
+ });
155
+
156
+ // Check if already in cache (dedup locally)
157
+ const cached = await db
158
+ .selectFrom('sync_blob_cache')
159
+ .select('hash')
160
+ .where('hash', '=', hash)
161
+ .executeTakeFirst();
162
+
163
+ if (cached) {
164
+ return blobRef;
165
+ }
166
+
167
+ // Check if already in outbox
168
+ const queued = await db
169
+ .selectFrom('sync_blob_outbox')
170
+ .select('hash')
171
+ .where('hash', '=', hash)
172
+ .where('status', '!=', 'failed')
173
+ .executeTakeFirst();
174
+
175
+ if (queued) {
176
+ return blobRef;
177
+ }
178
+
179
+ if (opts?.immediate) {
180
+ // Upload immediately
181
+ await uploadBlob(finalBytes, hash, mimeType);
182
+
183
+ // Complete the upload (mark as done on server)
184
+ const completeResult = await transport.completeUpload(hash);
185
+ if (!completeResult.ok) {
186
+ throw new BlobUploadError(
187
+ `Failed to complete upload: ${completeResult.error}`
188
+ );
189
+ }
190
+
191
+ // Cache the blob
192
+ await cacheBlob(hash, finalBytes, mimeType, encrypted, keyId);
193
+ } else {
194
+ // Queue for background upload
195
+ const now = Date.now();
196
+ await db
197
+ .insertInto('sync_blob_outbox')
198
+ .values({
199
+ hash,
200
+ size: finalBytes.length,
201
+ mime_type: mimeType,
202
+ body: finalBytes,
203
+ encrypted: encrypted ? 1 : 0,
204
+ key_id: keyId ?? null,
205
+ status: 'pending',
206
+ attempt_count: 0,
207
+ error: null,
208
+ created_at: now,
209
+ updated_at: now,
210
+ })
211
+ .onConflict((oc) => oc.column('hash').doNothing())
212
+ .execute();
213
+ }
214
+
215
+ return blobRef;
216
+ },
217
+
218
+ /**
219
+ * Download a blob.
220
+ *
221
+ * First checks the local cache, then fetches from server if needed.
222
+ * Automatically decrypts if the blob was encrypted.
223
+ */
224
+ async download(ref: BlobRef, opts?: DownloadOptions): Promise<Uint8Array> {
225
+ const hash = ref.hash;
226
+
227
+ // Check cache first (unless skipCache)
228
+ if (!opts?.skipCache) {
229
+ const cached = await db
230
+ .selectFrom('sync_blob_cache')
231
+ .select(['body', 'encrypted', 'key_id'])
232
+ .where('hash', '=', hash)
233
+ .executeTakeFirst();
234
+
235
+ if (cached) {
236
+ // Update access time if requested
237
+ if (opts?.updateAccessTime !== false) {
238
+ await db
239
+ .updateTable('sync_blob_cache')
240
+ .set({ last_accessed_at: Date.now() })
241
+ .where('hash', '=', hash)
242
+ .execute();
243
+ }
244
+
245
+ let data = cached.body;
246
+
247
+ // Decrypt if needed
248
+ if (cached.encrypted && cached.key_id && encryption) {
249
+ data = await encryption.decrypt(data, cached.key_id);
250
+ }
251
+
252
+ return data;
253
+ }
254
+ }
255
+
256
+ // Check if blob is in upload outbox (not yet on server)
257
+ const outbox = await db
258
+ .selectFrom('sync_blob_outbox')
259
+ .select(['body', 'encrypted', 'key_id'])
260
+ .where('hash', '=', hash)
261
+ .executeTakeFirst();
262
+
263
+ if (outbox) {
264
+ let data = outbox.body;
265
+ if (outbox.encrypted && outbox.key_id && encryption) {
266
+ data = await encryption.decrypt(data, outbox.key_id);
267
+ }
268
+ return data;
269
+ }
270
+
271
+ // Fetch from server
272
+ const { url } = await transport.getDownloadUrl(hash);
273
+ const response = await customFetch(url);
274
+
275
+ if (!response.ok) {
276
+ throw new BlobDownloadError(
277
+ `Failed to download blob: ${response.status}`
278
+ );
279
+ }
280
+
281
+ const buffer = await response.arrayBuffer();
282
+ const data = new Uint8Array(buffer);
283
+
284
+ // Verify hash
285
+ const computedHash = await computeSha256(data);
286
+ if (computedHash !== hash) {
287
+ throw new BlobDownloadError('Downloaded blob hash mismatch');
288
+ }
289
+
290
+ // Cache the blob
291
+ await cacheBlob(
292
+ hash,
293
+ data,
294
+ ref.mimeType,
295
+ ref.encrypted ?? false,
296
+ ref.keyId
297
+ );
298
+
299
+ // Decrypt if needed
300
+ if (ref.encrypted && ref.keyId && encryption) {
301
+ return encryption.decrypt(data, ref.keyId);
302
+ }
303
+
304
+ return data;
305
+ },
306
+
307
+ /**
308
+ * Check if a blob is cached locally.
309
+ */
310
+ async isCached(hash: string): Promise<boolean> {
311
+ const row = await db
312
+ .selectFrom('sync_blob_cache')
313
+ .select('hash')
314
+ .where('hash', '=', hash)
315
+ .executeTakeFirst();
316
+ return !!row;
317
+ },
318
+
319
+ /**
320
+ * Get a blob URL for display.
321
+ *
322
+ * Returns a blob: URL if cached locally, or fetches and creates one.
323
+ */
324
+ async getBlobUrl(ref: BlobRef): Promise<string> {
325
+ const data = await this.download(ref);
326
+ const blob = new Blob([data.buffer as ArrayBuffer], {
327
+ type: ref.mimeType,
328
+ });
329
+ return URL.createObjectURL(blob);
330
+ },
331
+
332
+ /**
333
+ * Preload blobs into the cache.
334
+ */
335
+ async preload(refs: BlobRef[]): Promise<void> {
336
+ await Promise.all(refs.map((ref) => this.download(ref)));
337
+ },
338
+
339
+ /**
340
+ * Process pending uploads in the outbox.
341
+ *
342
+ * Call this periodically or when online to sync pending uploads.
343
+ * Returns the number of blobs processed.
344
+ */
345
+ async processUploadQueue(): Promise<{
346
+ uploaded: number;
347
+ failed: number;
348
+ errors: Array<{ hash: string; error: string }>;
349
+ }> {
350
+ let uploaded = 0;
351
+ let failed = 0;
352
+ const errors: Array<{ hash: string; error: string }> = [];
353
+
354
+ // Get pending uploads
355
+ const pending = await db
356
+ .selectFrom('sync_blob_outbox')
357
+ .selectAll()
358
+ .where('status', 'in', ['pending', 'uploading', 'uploaded'])
359
+ .where('attempt_count', '<', maxUploadRetries)
360
+ .orderBy('created_at', 'asc')
361
+ .execute();
362
+
363
+ for (const row of pending) {
364
+ try {
365
+ // Process based on current status
366
+ if (row.status === 'pending' || row.status === 'uploading') {
367
+ // Mark as uploading
368
+ await db
369
+ .updateTable('sync_blob_outbox')
370
+ .set({
371
+ status: 'uploading',
372
+ attempt_count: row.attempt_count + 1,
373
+ updated_at: Date.now(),
374
+ })
375
+ .where('hash', '=', row.hash)
376
+ .execute();
377
+
378
+ // Upload to server
379
+ await uploadBlob(row.body, row.hash, row.mime_type);
380
+
381
+ // Mark as uploaded (waiting for confirmation)
382
+ await db
383
+ .updateTable('sync_blob_outbox')
384
+ .set({ status: 'uploaded', updated_at: Date.now() })
385
+ .where('hash', '=', row.hash)
386
+ .execute();
387
+ }
388
+
389
+ if (
390
+ row.status === 'uploaded' ||
391
+ row.status === 'confirming' ||
392
+ row.status === 'pending'
393
+ ) {
394
+ // Confirm upload
395
+ await db
396
+ .updateTable('sync_blob_outbox')
397
+ .set({ status: 'confirming', updated_at: Date.now() })
398
+ .where('hash', '=', row.hash)
399
+ .execute();
400
+
401
+ const result = await transport.completeUpload(row.hash);
402
+
403
+ if (result.ok) {
404
+ // Cache the blob
405
+ await cacheBlob(
406
+ row.hash,
407
+ row.body,
408
+ row.mime_type,
409
+ row.encrypted === 1,
410
+ row.key_id ?? undefined
411
+ );
412
+
413
+ // Remove from outbox
414
+ await db
415
+ .deleteFrom('sync_blob_outbox')
416
+ .where('hash', '=', row.hash)
417
+ .execute();
418
+
419
+ uploaded++;
420
+ } else {
421
+ throw new Error(result.error ?? 'Upload confirmation failed');
422
+ }
423
+ }
424
+ } catch (err) {
425
+ const errorMessage = err instanceof Error ? err.message : String(err);
426
+
427
+ // Check if max retries exceeded
428
+ if (row.attempt_count + 1 >= maxUploadRetries) {
429
+ await db
430
+ .updateTable('sync_blob_outbox')
431
+ .set({
432
+ status: 'failed',
433
+ error: errorMessage,
434
+ updated_at: Date.now(),
435
+ })
436
+ .where('hash', '=', row.hash)
437
+ .execute();
438
+ failed++;
439
+ } else {
440
+ // Mark as pending for retry
441
+ await db
442
+ .updateTable('sync_blob_outbox')
443
+ .set({
444
+ status: 'pending',
445
+ error: errorMessage,
446
+ updated_at: Date.now(),
447
+ })
448
+ .where('hash', '=', row.hash)
449
+ .execute();
450
+ }
451
+
452
+ errors.push({ hash: row.hash, error: errorMessage });
453
+ }
454
+ }
455
+
456
+ return { uploaded, failed, errors };
457
+ },
458
+
459
+ /**
460
+ * Get the status of a pending upload.
461
+ */
462
+ async getUploadStatus(
463
+ hash: string
464
+ ): Promise<{ status: BlobUploadStatus; error?: string } | null> {
465
+ const row = await db
466
+ .selectFrom('sync_blob_outbox')
467
+ .select(['status', 'error'])
468
+ .where('hash', '=', hash)
469
+ .executeTakeFirst();
470
+
471
+ if (!row) return null;
472
+ return { status: row.status, error: row.error ?? undefined };
473
+ },
474
+
475
+ /**
476
+ * Clear failed uploads from the outbox.
477
+ */
478
+ async clearFailedUploads(): Promise<number> {
479
+ const result = await db
480
+ .deleteFrom('sync_blob_outbox')
481
+ .where('status', '=', 'failed')
482
+ .executeTakeFirst();
483
+ return Number(result.numDeletedRows ?? 0);
484
+ },
485
+
486
+ /**
487
+ * Retry a failed upload.
488
+ */
489
+ async retryUpload(hash: string): Promise<boolean> {
490
+ const result = await db
491
+ .updateTable('sync_blob_outbox')
492
+ .set({
493
+ status: 'pending',
494
+ attempt_count: 0,
495
+ error: null,
496
+ updated_at: Date.now(),
497
+ })
498
+ .where('hash', '=', hash)
499
+ .where('status', '=', 'failed')
500
+ .executeTakeFirst();
501
+ return Number(result.numUpdatedRows ?? 0) > 0;
502
+ },
503
+
504
+ /**
505
+ * Prune the cache to stay under maxCacheSize.
506
+ * Uses LRU (least recently used) eviction.
507
+ */
508
+ async pruneCache(): Promise<{ evicted: number; freedBytes: number }> {
509
+ // Calculate current cache size
510
+ const stats = await db
511
+ .selectFrom('sync_blob_cache')
512
+ .select(({ fn }) => [fn.sum<number>('size').as('total_size')])
513
+ .executeTakeFirst();
514
+
515
+ const currentSize = stats?.total_size ?? 0;
516
+
517
+ if (currentSize <= maxCacheSize) {
518
+ return { evicted: 0, freedBytes: 0 };
519
+ }
520
+
521
+ const targetSize = maxCacheSize * 0.8; // Prune to 80% of max
522
+ let freedBytes = 0;
523
+ let evicted = 0;
524
+
525
+ // Get blobs ordered by last access (LRU)
526
+ const blobs = await db
527
+ .selectFrom('sync_blob_cache')
528
+ .select(['hash', 'size'])
529
+ .orderBy('last_accessed_at', 'asc')
530
+ .execute();
531
+
532
+ for (const blob of blobs) {
533
+ if (currentSize - freedBytes <= targetSize) break;
534
+
535
+ await db
536
+ .deleteFrom('sync_blob_cache')
537
+ .where('hash', '=', blob.hash)
538
+ .execute();
539
+
540
+ freedBytes += blob.size;
541
+ evicted++;
542
+ }
543
+
544
+ return { evicted, freedBytes };
545
+ },
546
+
547
+ /**
548
+ * Clear the entire cache.
549
+ */
550
+ async clearCache(): Promise<number> {
551
+ const result = await db.deleteFrom('sync_blob_cache').executeTakeFirst();
552
+ return Number(result.numDeletedRows ?? 0);
553
+ },
554
+
555
+ /**
556
+ * Get cache statistics.
557
+ */
558
+ async getCacheStats(): Promise<{
559
+ count: number;
560
+ totalSize: number;
561
+ maxSize: number;
562
+ }> {
563
+ const stats = await db
564
+ .selectFrom('sync_blob_cache')
565
+ .select(({ fn }) => [
566
+ fn.count<number>('hash').as('count'),
567
+ fn.sum<number>('size').as('total_size'),
568
+ ])
569
+ .executeTakeFirst();
570
+
571
+ return {
572
+ count: stats?.count ?? 0,
573
+ totalSize: stats?.total_size ?? 0,
574
+ maxSize: maxCacheSize,
575
+ };
576
+ },
577
+
578
+ /**
579
+ * Get upload queue statistics.
580
+ */
581
+ async getUploadQueueStats(): Promise<{
582
+ pending: number;
583
+ uploading: number;
584
+ failed: number;
585
+ total: number;
586
+ }> {
587
+ const rows = await db
588
+ .selectFrom('sync_blob_outbox')
589
+ .select(['status'])
590
+ .execute();
591
+
592
+ const stats = { pending: 0, uploading: 0, failed: 0, total: 0 };
593
+ for (const row of rows) {
594
+ stats.total++;
595
+ if (row.status === 'pending') stats.pending++;
596
+ else if (
597
+ row.status === 'uploading' ||
598
+ row.status === 'uploaded' ||
599
+ row.status === 'confirming'
600
+ )
601
+ stats.uploading++;
602
+ else if (row.status === 'failed') stats.failed++;
603
+ }
604
+ return stats;
605
+ },
606
+ };
607
+
608
+ // -------------------------------------------------------------------------
609
+ // Internal helpers
610
+ // -------------------------------------------------------------------------
611
+
612
+ async function uploadBlob(
613
+ data: Uint8Array,
614
+ hash: string,
615
+ mimeType: string
616
+ ): Promise<void> {
617
+ // Initiate upload
618
+ const initResult = await transport.initiateUpload({
619
+ hash,
620
+ size: data.length,
621
+ mimeType,
622
+ });
623
+
624
+ // If blob already exists, we're done
625
+ if (initResult.exists) {
626
+ return;
627
+ }
628
+
629
+ if (!initResult.uploadUrl) {
630
+ throw new BlobUploadError('No upload URL returned');
631
+ }
632
+
633
+ // Upload to presigned URL
634
+ const response = await customFetch(initResult.uploadUrl, {
635
+ method: initResult.uploadMethod ?? 'PUT',
636
+ headers: {
637
+ ...initResult.uploadHeaders,
638
+ 'Content-Type': mimeType,
639
+ },
640
+ body: new Blob([data.buffer as ArrayBuffer], { type: mimeType }),
641
+ });
642
+
643
+ if (!response.ok) {
644
+ throw new BlobUploadError(`Upload failed: ${response.status}`);
645
+ }
646
+ }
647
+
648
+ async function cacheBlob(
649
+ hash: string,
650
+ data: Uint8Array,
651
+ mimeType: string,
652
+ encrypted: boolean,
653
+ keyId?: string
654
+ ): Promise<void> {
655
+ const now = Date.now();
656
+ await db
657
+ .insertInto('sync_blob_cache')
658
+ .values({
659
+ hash,
660
+ size: data.length,
661
+ mime_type: mimeType,
662
+ body: data,
663
+ encrypted: encrypted ? 1 : 0,
664
+ key_id: keyId ?? null,
665
+ cached_at: now,
666
+ last_accessed_at: now,
667
+ })
668
+ .onConflict((oc) =>
669
+ oc.column('hash').doUpdateSet({
670
+ last_accessed_at: now,
671
+ })
672
+ )
673
+ .execute();
674
+
675
+ // Prune cache if needed (async, don't block)
676
+ void pruneCache().catch(() => {});
677
+ }
678
+
679
+ async function pruneCache(): Promise<void> {
680
+ const stats = await db
681
+ .selectFrom('sync_blob_cache')
682
+ .select(({ fn }) => [fn.sum<number>('size').as('total_size')])
683
+ .executeTakeFirst();
684
+
685
+ const currentSize = stats?.total_size ?? 0;
686
+
687
+ if (currentSize > maxCacheSize) {
688
+ const targetSize = maxCacheSize * 0.8;
689
+ let freedBytes = 0;
690
+
691
+ const blobs = await db
692
+ .selectFrom('sync_blob_cache')
693
+ .select(['hash', 'size'])
694
+ .orderBy('last_accessed_at', 'asc')
695
+ .limit(100) // Limit batch size
696
+ .execute();
697
+
698
+ for (const blob of blobs) {
699
+ if (currentSize - freedBytes <= targetSize) break;
700
+
701
+ await db
702
+ .deleteFrom('sync_blob_cache')
703
+ .where('hash', '=', blob.hash)
704
+ .execute();
705
+
706
+ freedBytes += blob.size;
707
+ }
708
+ }
709
+ }
710
+ }
711
+
712
+ type ClientBlobManager = ReturnType<typeof createClientBlobManager>;
713
+
714
+ // ============================================================================
715
+ // Cache Pruning Scheduler
716
+ // ============================================================================
717
+
718
+ interface BlobCachePruneSchedulerOptions {
719
+ /** Client blob manager instance */
720
+ blobManager: ClientBlobManager;
721
+ /** Interval between prune runs in milliseconds. Default: 300000 (5 minutes) */
722
+ intervalMs?: number;
723
+ /** Optional: Called after each prune run */
724
+ onPrune?: (result: {
725
+ evicted: number;
726
+ freedBytes: number;
727
+ error?: Error;
728
+ }) => void;
729
+ }
730
+
731
+ /**
732
+ * Create a cache pruning scheduler for the client blob manager.
733
+ *
734
+ * Periodically prunes the cache to stay under maxCacheSize using LRU eviction.
735
+ *
736
+ * @example
737
+ * ```typescript
738
+ * const scheduler = createBlobCachePruneScheduler({
739
+ * blobManager,
740
+ * intervalMs: 5 * 60 * 1000, // 5 minutes
741
+ * onPrune: (result) => {
742
+ * if (result.evicted > 0) {
743
+ * console.log(`Cache pruned: ${result.evicted} blobs, ${result.freedBytes} bytes freed`);
744
+ * }
745
+ * },
746
+ * });
747
+ *
748
+ * // Start the scheduler
749
+ * scheduler.start();
750
+ *
751
+ * // Stop when unmounting/shutting down
752
+ * scheduler.stop();
753
+ * ```
754
+ */
755
+ export function createBlobCachePruneScheduler(
756
+ options: BlobCachePruneSchedulerOptions
757
+ ) {
758
+ const {
759
+ blobManager,
760
+ intervalMs = 300000, // 5 minutes
761
+ onPrune,
762
+ } = options;
763
+
764
+ let intervalId: ReturnType<typeof setInterval> | null = null;
765
+ let isRunning = false;
766
+
767
+ const runPrune = async (): Promise<{
768
+ evicted: number;
769
+ freedBytes: number;
770
+ error?: Error;
771
+ }> => {
772
+ if (isRunning) {
773
+ return { evicted: 0, freedBytes: 0 };
774
+ }
775
+
776
+ isRunning = true;
777
+
778
+ try {
779
+ const result = await blobManager.pruneCache();
780
+ onPrune?.({ evicted: result.evicted, freedBytes: result.freedBytes });
781
+ return result;
782
+ } catch (err) {
783
+ const error = err instanceof Error ? err : new Error(String(err));
784
+ onPrune?.({ evicted: 0, freedBytes: 0, error });
785
+ return { evicted: 0, freedBytes: 0, error };
786
+ } finally {
787
+ isRunning = false;
788
+ }
789
+ };
790
+
791
+ return {
792
+ /**
793
+ * Start the prune scheduler.
794
+ * Optionally runs an immediate prune before starting the interval.
795
+ */
796
+ start(options?: { immediate?: boolean }): void {
797
+ if (intervalId) {
798
+ return; // Already running
799
+ }
800
+
801
+ if (options?.immediate) {
802
+ void runPrune();
803
+ }
804
+
805
+ intervalId = setInterval(() => {
806
+ void runPrune();
807
+ }, intervalMs);
808
+ },
809
+
810
+ /**
811
+ * Stop the prune scheduler.
812
+ */
813
+ stop(): void {
814
+ if (intervalId) {
815
+ clearInterval(intervalId);
816
+ intervalId = null;
817
+ }
818
+ },
819
+
820
+ /**
821
+ * Run a single prune manually.
822
+ */
823
+ async runOnce(): Promise<{
824
+ evicted: number;
825
+ freedBytes: number;
826
+ error?: Error;
827
+ }> {
828
+ return runPrune();
829
+ },
830
+
831
+ /**
832
+ * Check if the scheduler is currently active.
833
+ */
834
+ get active(): boolean {
835
+ return intervalId !== null;
836
+ },
837
+
838
+ /**
839
+ * Check if a prune is currently in progress.
840
+ */
841
+ get running(): boolean {
842
+ return isRunning;
843
+ },
844
+ };
845
+ }
846
+
847
+ // ============================================================================
848
+ // Upload Queue Processor Scheduler
849
+ // ============================================================================
850
+
851
+ interface BlobUploadQueueSchedulerOptions {
852
+ /** Client blob manager instance */
853
+ blobManager: ClientBlobManager;
854
+ /** Interval between processing runs in milliseconds. Default: 30000 (30 seconds) */
855
+ intervalMs?: number;
856
+ /** Optional: Called after each processing run */
857
+ onProcess?: (result: {
858
+ uploaded: number;
859
+ failed: number;
860
+ errors: Array<{ hash: string; error: string }>;
861
+ error?: Error;
862
+ }) => void;
863
+ }
864
+
865
+ /**
866
+ * Create an upload queue processor scheduler for the client blob manager.
867
+ *
868
+ * Periodically processes pending uploads when online.
869
+ *
870
+ * @example
871
+ * ```typescript
872
+ * const scheduler = createBlobUploadQueueScheduler({
873
+ * blobManager,
874
+ * intervalMs: 30 * 1000, // 30 seconds
875
+ * onProcess: (result) => {
876
+ * if (result.uploaded > 0) {
877
+ * console.log(`Uploaded ${result.uploaded} blobs`);
878
+ * }
879
+ * if (result.failed > 0) {
880
+ * console.warn(`Failed to upload ${result.failed} blobs`);
881
+ * }
882
+ * },
883
+ * });
884
+ *
885
+ * // Start when online
886
+ * scheduler.start();
887
+ *
888
+ * // Stop when offline or shutting down
889
+ * scheduler.stop();
890
+ * ```
891
+ */
892
+ export function createBlobUploadQueueScheduler(
893
+ options: BlobUploadQueueSchedulerOptions
894
+ ) {
895
+ const {
896
+ blobManager,
897
+ intervalMs = 30000, // 30 seconds
898
+ onProcess,
899
+ } = options;
900
+
901
+ let intervalId: ReturnType<typeof setInterval> | null = null;
902
+ let isRunning = false;
903
+
904
+ const runProcess = async (): Promise<{
905
+ uploaded: number;
906
+ failed: number;
907
+ errors: Array<{ hash: string; error: string }>;
908
+ error?: Error;
909
+ }> => {
910
+ if (isRunning) {
911
+ return { uploaded: 0, failed: 0, errors: [] };
912
+ }
913
+
914
+ isRunning = true;
915
+
916
+ try {
917
+ const result = await blobManager.processUploadQueue();
918
+ onProcess?.(result);
919
+ return result;
920
+ } catch (err) {
921
+ const error = err instanceof Error ? err : new Error(String(err));
922
+ const result = { uploaded: 0, failed: 0, errors: [], error };
923
+ onProcess?.(result);
924
+ return result;
925
+ } finally {
926
+ isRunning = false;
927
+ }
928
+ };
929
+
930
+ return {
931
+ /**
932
+ * Start the upload queue processor.
933
+ * Optionally runs an immediate processing before starting the interval.
934
+ */
935
+ start(options?: { immediate?: boolean }): void {
936
+ if (intervalId) {
937
+ return; // Already running
938
+ }
939
+
940
+ if (options?.immediate) {
941
+ void runProcess();
942
+ }
943
+
944
+ intervalId = setInterval(() => {
945
+ void runProcess();
946
+ }, intervalMs);
947
+ },
948
+
949
+ /**
950
+ * Stop the upload queue processor.
951
+ */
952
+ stop(): void {
953
+ if (intervalId) {
954
+ clearInterval(intervalId);
955
+ intervalId = null;
956
+ }
957
+ },
958
+
959
+ /**
960
+ * Run a single processing manually.
961
+ */
962
+ async runOnce(): Promise<{
963
+ uploaded: number;
964
+ failed: number;
965
+ errors: Array<{ hash: string; error: string }>;
966
+ error?: Error;
967
+ }> {
968
+ return runProcess();
969
+ },
970
+
971
+ /**
972
+ * Check if the processor is currently active.
973
+ */
974
+ get active(): boolean {
975
+ return intervalId !== null;
976
+ },
977
+
978
+ /**
979
+ * Check if processing is currently in progress.
980
+ */
981
+ get running(): boolean {
982
+ return isRunning;
983
+ },
984
+ };
985
+ }
986
+
987
+ // ============================================================================
988
+ // Utilities
989
+ // ============================================================================
990
+
991
+ async function toUint8Array(
992
+ data: Blob | File | Uint8Array
993
+ ): Promise<Uint8Array> {
994
+ if (data instanceof Uint8Array) {
995
+ return data;
996
+ }
997
+ const buffer = await data.arrayBuffer();
998
+ return new Uint8Array(buffer);
999
+ }
1000
+
1001
+ async function computeSha256(data: Uint8Array): Promise<string> {
1002
+ const buffer = new Uint8Array(data).buffer as ArrayBuffer;
1003
+ const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
1004
+ const hashArray = new Uint8Array(hashBuffer);
1005
+ const hex = Array.from(hashArray)
1006
+ .map((b) => b.toString(16).padStart(2, '0'))
1007
+ .join('');
1008
+ return createBlobHash(hex);
1009
+ }
1010
+
1011
+ // ============================================================================
1012
+ // Errors
1013
+ // ============================================================================
1014
+
1015
+ class BlobUploadError extends Error {
1016
+ constructor(message: string) {
1017
+ super(message);
1018
+ this.name = 'BlobUploadError';
1019
+ }
1020
+ }
1021
+
1022
+ class BlobDownloadError extends Error {
1023
+ constructor(message: string) {
1024
+ super(message);
1025
+ this.name = 'BlobDownloadError';
1026
+ }
1027
+ }