@syncular/client 0.0.1-60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. package/dist/blobs/index.d.ts +7 -0
  2. package/dist/blobs/index.d.ts.map +1 -0
  3. package/dist/blobs/index.js +7 -0
  4. package/dist/blobs/index.js.map +1 -0
  5. package/dist/blobs/manager.d.ts +345 -0
  6. package/dist/blobs/manager.d.ts.map +1 -0
  7. package/dist/blobs/manager.js +749 -0
  8. package/dist/blobs/manager.js.map +1 -0
  9. package/dist/blobs/migrate.d.ts +14 -0
  10. package/dist/blobs/migrate.d.ts.map +1 -0
  11. package/dist/blobs/migrate.js +59 -0
  12. package/dist/blobs/migrate.js.map +1 -0
  13. package/dist/blobs/types.d.ts +62 -0
  14. package/dist/blobs/types.d.ts.map +1 -0
  15. package/dist/blobs/types.js +5 -0
  16. package/dist/blobs/types.js.map +1 -0
  17. package/dist/client.d.ts +338 -0
  18. package/dist/client.d.ts.map +1 -0
  19. package/dist/client.js +834 -0
  20. package/dist/client.js.map +1 -0
  21. package/dist/conflicts.d.ts +31 -0
  22. package/dist/conflicts.d.ts.map +1 -0
  23. package/dist/conflicts.js +118 -0
  24. package/dist/conflicts.js.map +1 -0
  25. package/dist/create-client.d.ts +115 -0
  26. package/dist/create-client.d.ts.map +1 -0
  27. package/dist/create-client.js +162 -0
  28. package/dist/create-client.js.map +1 -0
  29. package/dist/engine/SyncEngine.d.ts +215 -0
  30. package/dist/engine/SyncEngine.d.ts.map +1 -0
  31. package/dist/engine/SyncEngine.js +1066 -0
  32. package/dist/engine/SyncEngine.js.map +1 -0
  33. package/dist/engine/index.d.ts +6 -0
  34. package/dist/engine/index.d.ts.map +1 -0
  35. package/dist/engine/index.js +6 -0
  36. package/dist/engine/index.js.map +1 -0
  37. package/dist/engine/types.d.ts +230 -0
  38. package/dist/engine/types.d.ts.map +1 -0
  39. package/dist/engine/types.js +7 -0
  40. package/dist/engine/types.js.map +1 -0
  41. package/dist/handlers/create-handler.d.ts +110 -0
  42. package/dist/handlers/create-handler.d.ts.map +1 -0
  43. package/dist/handlers/create-handler.js +140 -0
  44. package/dist/handlers/create-handler.js.map +1 -0
  45. package/dist/handlers/registry.d.ts +15 -0
  46. package/dist/handlers/registry.d.ts.map +1 -0
  47. package/dist/handlers/registry.js +29 -0
  48. package/dist/handlers/registry.js.map +1 -0
  49. package/dist/handlers/types.d.ts +83 -0
  50. package/dist/handlers/types.d.ts.map +1 -0
  51. package/dist/handlers/types.js +5 -0
  52. package/dist/handlers/types.js.map +1 -0
  53. package/dist/index.d.ts +24 -0
  54. package/dist/index.d.ts.map +1 -0
  55. package/dist/index.js +24 -0
  56. package/dist/index.js.map +1 -0
  57. package/dist/migrate.d.ts +19 -0
  58. package/dist/migrate.d.ts.map +1 -0
  59. package/dist/migrate.js +106 -0
  60. package/dist/migrate.js.map +1 -0
  61. package/dist/mutations.d.ts +138 -0
  62. package/dist/mutations.d.ts.map +1 -0
  63. package/dist/mutations.js +611 -0
  64. package/dist/mutations.js.map +1 -0
  65. package/dist/outbox.d.ts +112 -0
  66. package/dist/outbox.d.ts.map +1 -0
  67. package/dist/outbox.js +304 -0
  68. package/dist/outbox.js.map +1 -0
  69. package/dist/plugins/incrementing-version.d.ts +34 -0
  70. package/dist/plugins/incrementing-version.d.ts.map +1 -0
  71. package/dist/plugins/incrementing-version.js +83 -0
  72. package/dist/plugins/incrementing-version.js.map +1 -0
  73. package/dist/plugins/index.d.ts +3 -0
  74. package/dist/plugins/index.d.ts.map +1 -0
  75. package/dist/plugins/index.js +3 -0
  76. package/dist/plugins/index.js.map +1 -0
  77. package/dist/plugins/types.d.ts +49 -0
  78. package/dist/plugins/types.d.ts.map +1 -0
  79. package/dist/plugins/types.js +15 -0
  80. package/dist/plugins/types.js.map +1 -0
  81. package/dist/proxy/connection.d.ts +33 -0
  82. package/dist/proxy/connection.d.ts.map +1 -0
  83. package/dist/proxy/connection.js +153 -0
  84. package/dist/proxy/connection.js.map +1 -0
  85. package/dist/proxy/dialect.d.ts +46 -0
  86. package/dist/proxy/dialect.d.ts.map +1 -0
  87. package/dist/proxy/dialect.js +58 -0
  88. package/dist/proxy/dialect.js.map +1 -0
  89. package/dist/proxy/driver.d.ts +42 -0
  90. package/dist/proxy/driver.d.ts.map +1 -0
  91. package/dist/proxy/driver.js +78 -0
  92. package/dist/proxy/driver.js.map +1 -0
  93. package/dist/proxy/index.d.ts +10 -0
  94. package/dist/proxy/index.d.ts.map +1 -0
  95. package/dist/proxy/index.js +10 -0
  96. package/dist/proxy/index.js.map +1 -0
  97. package/dist/proxy/mutations.d.ts +9 -0
  98. package/dist/proxy/mutations.d.ts.map +1 -0
  99. package/dist/proxy/mutations.js +11 -0
  100. package/dist/proxy/mutations.js.map +1 -0
  101. package/dist/pull-engine.d.ts +45 -0
  102. package/dist/pull-engine.d.ts.map +1 -0
  103. package/dist/pull-engine.js +391 -0
  104. package/dist/pull-engine.js.map +1 -0
  105. package/dist/push-engine.d.ts +18 -0
  106. package/dist/push-engine.d.ts.map +1 -0
  107. package/dist/push-engine.js +155 -0
  108. package/dist/push-engine.js.map +1 -0
  109. package/dist/query/FingerprintCollector.d.ts +18 -0
  110. package/dist/query/FingerprintCollector.d.ts.map +1 -0
  111. package/dist/query/FingerprintCollector.js +28 -0
  112. package/dist/query/FingerprintCollector.js.map +1 -0
  113. package/dist/query/QueryContext.d.ts +33 -0
  114. package/dist/query/QueryContext.d.ts.map +1 -0
  115. package/dist/query/QueryContext.js +16 -0
  116. package/dist/query/QueryContext.js.map +1 -0
  117. package/dist/query/fingerprint.d.ts +61 -0
  118. package/dist/query/fingerprint.d.ts.map +1 -0
  119. package/dist/query/fingerprint.js +91 -0
  120. package/dist/query/fingerprint.js.map +1 -0
  121. package/dist/query/index.d.ts +7 -0
  122. package/dist/query/index.d.ts.map +1 -0
  123. package/dist/query/index.js +7 -0
  124. package/dist/query/index.js.map +1 -0
  125. package/dist/query/tracked-select.d.ts +18 -0
  126. package/dist/query/tracked-select.d.ts.map +1 -0
  127. package/dist/query/tracked-select.js +90 -0
  128. package/dist/query/tracked-select.js.map +1 -0
  129. package/dist/schema.d.ts +83 -0
  130. package/dist/schema.d.ts.map +1 -0
  131. package/dist/schema.js +7 -0
  132. package/dist/schema.js.map +1 -0
  133. package/dist/sync-loop.d.ts +32 -0
  134. package/dist/sync-loop.d.ts.map +1 -0
  135. package/dist/sync-loop.js +249 -0
  136. package/dist/sync-loop.js.map +1 -0
  137. package/dist/utils/id.d.ts +8 -0
  138. package/dist/utils/id.d.ts.map +1 -0
  139. package/dist/utils/id.js +19 -0
  140. package/dist/utils/id.js.map +1 -0
  141. package/package.json +58 -0
  142. package/src/blobs/index.ts +7 -0
  143. package/src/blobs/manager.ts +1027 -0
  144. package/src/blobs/migrate.ts +67 -0
  145. package/src/blobs/types.ts +84 -0
  146. package/src/client.ts +1222 -0
  147. package/src/conflicts.ts +180 -0
  148. package/src/create-client.ts +297 -0
  149. package/src/engine/SyncEngine.ts +1337 -0
  150. package/src/engine/index.ts +6 -0
  151. package/src/engine/types.ts +268 -0
  152. package/src/handlers/create-handler.ts +287 -0
  153. package/src/handlers/registry.ts +36 -0
  154. package/src/handlers/types.ts +102 -0
  155. package/src/index.ts +25 -0
  156. package/src/migrate.ts +122 -0
  157. package/src/mutations.ts +926 -0
  158. package/src/outbox.ts +397 -0
  159. package/src/plugins/incrementing-version.ts +133 -0
  160. package/src/plugins/index.ts +2 -0
  161. package/src/plugins/types.ts +63 -0
  162. package/src/proxy/connection.ts +191 -0
  163. package/src/proxy/dialect.ts +76 -0
  164. package/src/proxy/driver.ts +126 -0
  165. package/src/proxy/index.ts +10 -0
  166. package/src/proxy/mutations.ts +18 -0
  167. package/src/pull-engine.ts +518 -0
  168. package/src/push-engine.ts +201 -0
  169. package/src/query/FingerprintCollector.ts +29 -0
  170. package/src/query/QueryContext.ts +54 -0
  171. package/src/query/fingerprint.ts +109 -0
  172. package/src/query/index.ts +10 -0
  173. package/src/query/tracked-select.ts +139 -0
  174. package/src/schema.ts +94 -0
  175. package/src/sync-loop.ts +368 -0
  176. package/src/utils/id.ts +20 -0
@@ -0,0 +1,749 @@
1
+ /**
2
+ * @syncular/client - Client-side blob manager
3
+ *
4
+ * Handles blob upload/download with:
5
+ * - Local caching for offline access
6
+ * - Upload queue for offline uploads
7
+ * - SHA-256 hash computation
8
+ * - Optional client-side encryption
9
+ */
10
+ import { createBlobHash, createBlobRef } from '@syncular/core';
11
+ // ============================================================================
12
+ // Client Blob Manager
13
+ // ============================================================================
14
+ /**
15
+ * Create a client-side blob manager.
16
+ *
17
+ * @example
18
+ * ```typescript
19
+ * const blobManager = createClientBlobManager({
20
+ * db,
21
+ * transport: {
22
+ * async initiateUpload(args) {
23
+ * const res = await fetch('/api/sync/blobs/upload', {
24
+ * method: 'POST',
25
+ * body: JSON.stringify(args),
26
+ * });
27
+ * return res.json();
28
+ * },
29
+ * async completeUpload(hash) {
30
+ * const res = await fetch(`/api/sync/blobs/${hash}/complete`, { method: 'POST' });
31
+ * return res.json();
32
+ * },
33
+ * async getDownloadUrl(hash) {
34
+ * const res = await fetch(`/api/sync/blobs/${hash}/url`);
35
+ * return res.json();
36
+ * },
37
+ * },
38
+ * });
39
+ *
40
+ * // Upload a file
41
+ * const blobRef = await blobManager.upload(file);
42
+ *
43
+ * // Download a blob
44
+ * const blob = await blobManager.download(blobRef);
45
+ * ```
46
+ */
47
+ export function createClientBlobManager(options) {
48
+ const { db, transport, encryption, maxCacheSize = 100 * 1024 * 1024, maxUploadRetries = 3, fetch: customFetch = globalThis.fetch, } = options;
49
+ return {
50
+ /**
51
+ * Upload a blob to the server.
52
+ *
53
+ * If `immediate` is false (default), the blob is queued for background upload.
54
+ * If `immediate` is true, the upload blocks until complete.
55
+ */
56
+ async upload(data, opts) {
57
+ const bytes = await toUint8Array(data);
58
+ const mimeType = data instanceof Blob ? data.type : 'application/octet-stream';
59
+ let finalBytes = bytes;
60
+ let encrypted = false;
61
+ let keyId;
62
+ // Encrypt if requested
63
+ if (opts?.encrypt && encryption) {
64
+ const result = await encryption.encrypt(bytes, { keyId: opts.keyId });
65
+ finalBytes = result.encrypted;
66
+ encrypted = true;
67
+ keyId = result.keyId;
68
+ }
69
+ // Compute hash of final (possibly encrypted) bytes
70
+ const hash = await computeSha256(finalBytes);
71
+ // Create blob ref
72
+ const blobRef = createBlobRef({
73
+ hash,
74
+ size: finalBytes.length,
75
+ mimeType,
76
+ encrypted,
77
+ keyId,
78
+ });
79
+ // Check if already in cache (dedup locally)
80
+ const cached = await db
81
+ .selectFrom('sync_blob_cache')
82
+ .select('hash')
83
+ .where('hash', '=', hash)
84
+ .executeTakeFirst();
85
+ if (cached) {
86
+ return blobRef;
87
+ }
88
+ // Check if already in outbox
89
+ const queued = await db
90
+ .selectFrom('sync_blob_outbox')
91
+ .select('hash')
92
+ .where('hash', '=', hash)
93
+ .where('status', '!=', 'failed')
94
+ .executeTakeFirst();
95
+ if (queued) {
96
+ return blobRef;
97
+ }
98
+ if (opts?.immediate) {
99
+ // Upload immediately
100
+ await uploadBlob(finalBytes, hash, mimeType);
101
+ // Complete the upload (mark as done on server)
102
+ const completeResult = await transport.completeUpload(hash);
103
+ if (!completeResult.ok) {
104
+ throw new BlobUploadError(`Failed to complete upload: ${completeResult.error}`);
105
+ }
106
+ // Cache the blob
107
+ await cacheBlob(hash, finalBytes, mimeType, encrypted, keyId);
108
+ }
109
+ else {
110
+ // Queue for background upload
111
+ const now = Date.now();
112
+ await db
113
+ .insertInto('sync_blob_outbox')
114
+ .values({
115
+ hash,
116
+ size: finalBytes.length,
117
+ mime_type: mimeType,
118
+ body: finalBytes,
119
+ encrypted: encrypted ? 1 : 0,
120
+ key_id: keyId ?? null,
121
+ status: 'pending',
122
+ attempt_count: 0,
123
+ error: null,
124
+ created_at: now,
125
+ updated_at: now,
126
+ })
127
+ .onConflict((oc) => oc.column('hash').doNothing())
128
+ .execute();
129
+ }
130
+ return blobRef;
131
+ },
132
+ /**
133
+ * Download a blob.
134
+ *
135
+ * First checks the local cache, then fetches from server if needed.
136
+ * Automatically decrypts if the blob was encrypted.
137
+ */
138
+ async download(ref, opts) {
139
+ const hash = ref.hash;
140
+ // Check cache first (unless skipCache)
141
+ if (!opts?.skipCache) {
142
+ const cached = await db
143
+ .selectFrom('sync_blob_cache')
144
+ .select(['body', 'encrypted', 'key_id'])
145
+ .where('hash', '=', hash)
146
+ .executeTakeFirst();
147
+ if (cached) {
148
+ // Update access time if requested
149
+ if (opts?.updateAccessTime !== false) {
150
+ await db
151
+ .updateTable('sync_blob_cache')
152
+ .set({ last_accessed_at: Date.now() })
153
+ .where('hash', '=', hash)
154
+ .execute();
155
+ }
156
+ let data = cached.body;
157
+ // Decrypt if needed
158
+ if (cached.encrypted && cached.key_id && encryption) {
159
+ data = await encryption.decrypt(data, cached.key_id);
160
+ }
161
+ return data;
162
+ }
163
+ }
164
+ // Check if blob is in upload outbox (not yet on server)
165
+ const outbox = await db
166
+ .selectFrom('sync_blob_outbox')
167
+ .select(['body', 'encrypted', 'key_id'])
168
+ .where('hash', '=', hash)
169
+ .executeTakeFirst();
170
+ if (outbox) {
171
+ let data = outbox.body;
172
+ if (outbox.encrypted && outbox.key_id && encryption) {
173
+ data = await encryption.decrypt(data, outbox.key_id);
174
+ }
175
+ return data;
176
+ }
177
+ // Fetch from server
178
+ const { url } = await transport.getDownloadUrl(hash);
179
+ const response = await customFetch(url);
180
+ if (!response.ok) {
181
+ throw new BlobDownloadError(`Failed to download blob: ${response.status}`);
182
+ }
183
+ const buffer = await response.arrayBuffer();
184
+ const data = new Uint8Array(buffer);
185
+ // Verify hash
186
+ const computedHash = await computeSha256(data);
187
+ if (computedHash !== hash) {
188
+ throw new BlobDownloadError('Downloaded blob hash mismatch');
189
+ }
190
+ // Cache the blob
191
+ await cacheBlob(hash, data, ref.mimeType, ref.encrypted ?? false, ref.keyId);
192
+ // Decrypt if needed
193
+ if (ref.encrypted && ref.keyId && encryption) {
194
+ return encryption.decrypt(data, ref.keyId);
195
+ }
196
+ return data;
197
+ },
198
+ /**
199
+ * Check if a blob is cached locally.
200
+ */
201
+ async isCached(hash) {
202
+ const row = await db
203
+ .selectFrom('sync_blob_cache')
204
+ .select('hash')
205
+ .where('hash', '=', hash)
206
+ .executeTakeFirst();
207
+ return !!row;
208
+ },
209
+ /**
210
+ * Get a blob URL for display.
211
+ *
212
+ * Returns a blob: URL if cached locally, or fetches and creates one.
213
+ */
214
+ async getBlobUrl(ref) {
215
+ const data = await this.download(ref);
216
+ const blob = new Blob([data.buffer], {
217
+ type: ref.mimeType,
218
+ });
219
+ return URL.createObjectURL(blob);
220
+ },
221
+ /**
222
+ * Preload blobs into the cache.
223
+ */
224
+ async preload(refs) {
225
+ await Promise.all(refs.map((ref) => this.download(ref)));
226
+ },
227
+ /**
228
+ * Process pending uploads in the outbox.
229
+ *
230
+ * Call this periodically or when online to sync pending uploads.
231
+ * Returns the number of blobs processed.
232
+ */
233
+ async processUploadQueue() {
234
+ let uploaded = 0;
235
+ let failed = 0;
236
+ const errors = [];
237
+ // Get pending uploads
238
+ const pending = await db
239
+ .selectFrom('sync_blob_outbox')
240
+ .selectAll()
241
+ .where('status', 'in', ['pending', 'uploading', 'uploaded'])
242
+ .where('attempt_count', '<', maxUploadRetries)
243
+ .orderBy('created_at', 'asc')
244
+ .execute();
245
+ for (const row of pending) {
246
+ try {
247
+ // Process based on current status
248
+ if (row.status === 'pending' || row.status === 'uploading') {
249
+ // Mark as uploading
250
+ await db
251
+ .updateTable('sync_blob_outbox')
252
+ .set({
253
+ status: 'uploading',
254
+ attempt_count: row.attempt_count + 1,
255
+ updated_at: Date.now(),
256
+ })
257
+ .where('hash', '=', row.hash)
258
+ .execute();
259
+ // Upload to server
260
+ await uploadBlob(row.body, row.hash, row.mime_type);
261
+ // Mark as uploaded (waiting for confirmation)
262
+ await db
263
+ .updateTable('sync_blob_outbox')
264
+ .set({ status: 'uploaded', updated_at: Date.now() })
265
+ .where('hash', '=', row.hash)
266
+ .execute();
267
+ }
268
+ if (row.status === 'uploaded' ||
269
+ row.status === 'confirming' ||
270
+ row.status === 'pending') {
271
+ // Confirm upload
272
+ await db
273
+ .updateTable('sync_blob_outbox')
274
+ .set({ status: 'confirming', updated_at: Date.now() })
275
+ .where('hash', '=', row.hash)
276
+ .execute();
277
+ const result = await transport.completeUpload(row.hash);
278
+ if (result.ok) {
279
+ // Cache the blob
280
+ await cacheBlob(row.hash, row.body, row.mime_type, row.encrypted === 1, row.key_id ?? undefined);
281
+ // Remove from outbox
282
+ await db
283
+ .deleteFrom('sync_blob_outbox')
284
+ .where('hash', '=', row.hash)
285
+ .execute();
286
+ uploaded++;
287
+ }
288
+ else {
289
+ throw new Error(result.error ?? 'Upload confirmation failed');
290
+ }
291
+ }
292
+ }
293
+ catch (err) {
294
+ const errorMessage = err instanceof Error ? err.message : String(err);
295
+ // Check if max retries exceeded
296
+ if (row.attempt_count + 1 >= maxUploadRetries) {
297
+ await db
298
+ .updateTable('sync_blob_outbox')
299
+ .set({
300
+ status: 'failed',
301
+ error: errorMessage,
302
+ updated_at: Date.now(),
303
+ })
304
+ .where('hash', '=', row.hash)
305
+ .execute();
306
+ failed++;
307
+ }
308
+ else {
309
+ // Mark as pending for retry
310
+ await db
311
+ .updateTable('sync_blob_outbox')
312
+ .set({
313
+ status: 'pending',
314
+ error: errorMessage,
315
+ updated_at: Date.now(),
316
+ })
317
+ .where('hash', '=', row.hash)
318
+ .execute();
319
+ }
320
+ errors.push({ hash: row.hash, error: errorMessage });
321
+ }
322
+ }
323
+ return { uploaded, failed, errors };
324
+ },
325
+ /**
326
+ * Get the status of a pending upload.
327
+ */
328
+ async getUploadStatus(hash) {
329
+ const row = await db
330
+ .selectFrom('sync_blob_outbox')
331
+ .select(['status', 'error'])
332
+ .where('hash', '=', hash)
333
+ .executeTakeFirst();
334
+ if (!row)
335
+ return null;
336
+ return { status: row.status, error: row.error ?? undefined };
337
+ },
338
+ /**
339
+ * Clear failed uploads from the outbox.
340
+ */
341
+ async clearFailedUploads() {
342
+ const result = await db
343
+ .deleteFrom('sync_blob_outbox')
344
+ .where('status', '=', 'failed')
345
+ .executeTakeFirst();
346
+ return Number(result.numDeletedRows ?? 0);
347
+ },
348
+ /**
349
+ * Retry a failed upload.
350
+ */
351
+ async retryUpload(hash) {
352
+ const result = await db
353
+ .updateTable('sync_blob_outbox')
354
+ .set({
355
+ status: 'pending',
356
+ attempt_count: 0,
357
+ error: null,
358
+ updated_at: Date.now(),
359
+ })
360
+ .where('hash', '=', hash)
361
+ .where('status', '=', 'failed')
362
+ .executeTakeFirst();
363
+ return Number(result.numUpdatedRows ?? 0) > 0;
364
+ },
365
+ /**
366
+ * Prune the cache to stay under maxCacheSize.
367
+ * Uses LRU (least recently used) eviction.
368
+ */
369
+ async pruneCache() {
370
+ // Calculate current cache size
371
+ const stats = await db
372
+ .selectFrom('sync_blob_cache')
373
+ .select(({ fn }) => [fn.sum('size').as('total_size')])
374
+ .executeTakeFirst();
375
+ const currentSize = stats?.total_size ?? 0;
376
+ if (currentSize <= maxCacheSize) {
377
+ return { evicted: 0, freedBytes: 0 };
378
+ }
379
+ const targetSize = maxCacheSize * 0.8; // Prune to 80% of max
380
+ let freedBytes = 0;
381
+ let evicted = 0;
382
+ // Get blobs ordered by last access (LRU)
383
+ const blobs = await db
384
+ .selectFrom('sync_blob_cache')
385
+ .select(['hash', 'size'])
386
+ .orderBy('last_accessed_at', 'asc')
387
+ .execute();
388
+ for (const blob of blobs) {
389
+ if (currentSize - freedBytes <= targetSize)
390
+ break;
391
+ await db
392
+ .deleteFrom('sync_blob_cache')
393
+ .where('hash', '=', blob.hash)
394
+ .execute();
395
+ freedBytes += blob.size;
396
+ evicted++;
397
+ }
398
+ return { evicted, freedBytes };
399
+ },
400
+ /**
401
+ * Clear the entire cache.
402
+ */
403
+ async clearCache() {
404
+ const result = await db.deleteFrom('sync_blob_cache').executeTakeFirst();
405
+ return Number(result.numDeletedRows ?? 0);
406
+ },
407
+ /**
408
+ * Get cache statistics.
409
+ */
410
+ async getCacheStats() {
411
+ const stats = await db
412
+ .selectFrom('sync_blob_cache')
413
+ .select(({ fn }) => [
414
+ fn.count('hash').as('count'),
415
+ fn.sum('size').as('total_size'),
416
+ ])
417
+ .executeTakeFirst();
418
+ return {
419
+ count: stats?.count ?? 0,
420
+ totalSize: stats?.total_size ?? 0,
421
+ maxSize: maxCacheSize,
422
+ };
423
+ },
424
+ /**
425
+ * Get upload queue statistics.
426
+ */
427
+ async getUploadQueueStats() {
428
+ const rows = await db
429
+ .selectFrom('sync_blob_outbox')
430
+ .select(['status'])
431
+ .execute();
432
+ const stats = { pending: 0, uploading: 0, failed: 0, total: 0 };
433
+ for (const row of rows) {
434
+ stats.total++;
435
+ if (row.status === 'pending')
436
+ stats.pending++;
437
+ else if (row.status === 'uploading' ||
438
+ row.status === 'uploaded' ||
439
+ row.status === 'confirming')
440
+ stats.uploading++;
441
+ else if (row.status === 'failed')
442
+ stats.failed++;
443
+ }
444
+ return stats;
445
+ },
446
+ };
447
+ // -------------------------------------------------------------------------
448
+ // Internal helpers
449
+ // -------------------------------------------------------------------------
450
+ async function uploadBlob(data, hash, mimeType) {
451
+ // Initiate upload
452
+ const initResult = await transport.initiateUpload({
453
+ hash,
454
+ size: data.length,
455
+ mimeType,
456
+ });
457
+ // If blob already exists, we're done
458
+ if (initResult.exists) {
459
+ return;
460
+ }
461
+ if (!initResult.uploadUrl) {
462
+ throw new BlobUploadError('No upload URL returned');
463
+ }
464
+ // Upload to presigned URL
465
+ const response = await customFetch(initResult.uploadUrl, {
466
+ method: initResult.uploadMethod ?? 'PUT',
467
+ headers: {
468
+ ...initResult.uploadHeaders,
469
+ 'Content-Type': mimeType,
470
+ },
471
+ body: new Blob([data.buffer], { type: mimeType }),
472
+ });
473
+ if (!response.ok) {
474
+ throw new BlobUploadError(`Upload failed: ${response.status}`);
475
+ }
476
+ }
477
+ async function cacheBlob(hash, data, mimeType, encrypted, keyId) {
478
+ const now = Date.now();
479
+ await db
480
+ .insertInto('sync_blob_cache')
481
+ .values({
482
+ hash,
483
+ size: data.length,
484
+ mime_type: mimeType,
485
+ body: data,
486
+ encrypted: encrypted ? 1 : 0,
487
+ key_id: keyId ?? null,
488
+ cached_at: now,
489
+ last_accessed_at: now,
490
+ })
491
+ .onConflict((oc) => oc.column('hash').doUpdateSet({
492
+ last_accessed_at: now,
493
+ }))
494
+ .execute();
495
+ // Prune cache if needed (async, don't block)
496
+ void pruneCache().catch(() => { });
497
+ }
498
+ async function pruneCache() {
499
+ const stats = await db
500
+ .selectFrom('sync_blob_cache')
501
+ .select(({ fn }) => [fn.sum('size').as('total_size')])
502
+ .executeTakeFirst();
503
+ const currentSize = stats?.total_size ?? 0;
504
+ if (currentSize > maxCacheSize) {
505
+ const targetSize = maxCacheSize * 0.8;
506
+ let freedBytes = 0;
507
+ const blobs = await db
508
+ .selectFrom('sync_blob_cache')
509
+ .select(['hash', 'size'])
510
+ .orderBy('last_accessed_at', 'asc')
511
+ .limit(100) // Limit batch size
512
+ .execute();
513
+ for (const blob of blobs) {
514
+ if (currentSize - freedBytes <= targetSize)
515
+ break;
516
+ await db
517
+ .deleteFrom('sync_blob_cache')
518
+ .where('hash', '=', blob.hash)
519
+ .execute();
520
+ freedBytes += blob.size;
521
+ }
522
+ }
523
+ }
524
+ }
525
+ /**
526
+ * Create a cache pruning scheduler for the client blob manager.
527
+ *
528
+ * Periodically prunes the cache to stay under maxCacheSize using LRU eviction.
529
+ *
530
+ * @example
531
+ * ```typescript
532
+ * const scheduler = createBlobCachePruneScheduler({
533
+ * blobManager,
534
+ * intervalMs: 5 * 60 * 1000, // 5 minutes
535
+ * onPrune: (result) => {
536
+ * if (result.evicted > 0) {
537
+ * console.log(`Cache pruned: ${result.evicted} blobs, ${result.freedBytes} bytes freed`);
538
+ * }
539
+ * },
540
+ * });
541
+ *
542
+ * // Start the scheduler
543
+ * scheduler.start();
544
+ *
545
+ * // Stop when unmounting/shutting down
546
+ * scheduler.stop();
547
+ * ```
548
+ */
549
+ export function createBlobCachePruneScheduler(options) {
550
+ const { blobManager, intervalMs = 300000, // 5 minutes
551
+ onPrune, } = options;
552
+ let intervalId = null;
553
+ let isRunning = false;
554
+ const runPrune = async () => {
555
+ if (isRunning) {
556
+ return { evicted: 0, freedBytes: 0 };
557
+ }
558
+ isRunning = true;
559
+ try {
560
+ const result = await blobManager.pruneCache();
561
+ onPrune?.({ evicted: result.evicted, freedBytes: result.freedBytes });
562
+ return result;
563
+ }
564
+ catch (err) {
565
+ const error = err instanceof Error ? err : new Error(String(err));
566
+ onPrune?.({ evicted: 0, freedBytes: 0, error });
567
+ return { evicted: 0, freedBytes: 0, error };
568
+ }
569
+ finally {
570
+ isRunning = false;
571
+ }
572
+ };
573
+ return {
574
+ /**
575
+ * Start the prune scheduler.
576
+ * Optionally runs an immediate prune before starting the interval.
577
+ */
578
+ start(options) {
579
+ if (intervalId) {
580
+ return; // Already running
581
+ }
582
+ if (options?.immediate) {
583
+ void runPrune();
584
+ }
585
+ intervalId = setInterval(() => {
586
+ void runPrune();
587
+ }, intervalMs);
588
+ },
589
+ /**
590
+ * Stop the prune scheduler.
591
+ */
592
+ stop() {
593
+ if (intervalId) {
594
+ clearInterval(intervalId);
595
+ intervalId = null;
596
+ }
597
+ },
598
+ /**
599
+ * Run a single prune manually.
600
+ */
601
+ async runOnce() {
602
+ return runPrune();
603
+ },
604
+ /**
605
+ * Check if the scheduler is currently active.
606
+ */
607
+ get active() {
608
+ return intervalId !== null;
609
+ },
610
+ /**
611
+ * Check if a prune is currently in progress.
612
+ */
613
+ get running() {
614
+ return isRunning;
615
+ },
616
+ };
617
+ }
618
+ /**
619
+ * Create an upload queue processor scheduler for the client blob manager.
620
+ *
621
+ * Periodically processes pending uploads when online.
622
+ *
623
+ * @example
624
+ * ```typescript
625
+ * const scheduler = createBlobUploadQueueScheduler({
626
+ * blobManager,
627
+ * intervalMs: 30 * 1000, // 30 seconds
628
+ * onProcess: (result) => {
629
+ * if (result.uploaded > 0) {
630
+ * console.log(`Uploaded ${result.uploaded} blobs`);
631
+ * }
632
+ * if (result.failed > 0) {
633
+ * console.warn(`Failed to upload ${result.failed} blobs`);
634
+ * }
635
+ * },
636
+ * });
637
+ *
638
+ * // Start when online
639
+ * scheduler.start();
640
+ *
641
+ * // Stop when offline or shutting down
642
+ * scheduler.stop();
643
+ * ```
644
+ */
645
+ export function createBlobUploadQueueScheduler(options) {
646
+ const { blobManager, intervalMs = 30000, // 30 seconds
647
+ onProcess, } = options;
648
+ let intervalId = null;
649
+ let isRunning = false;
650
+ const runProcess = async () => {
651
+ if (isRunning) {
652
+ return { uploaded: 0, failed: 0, errors: [] };
653
+ }
654
+ isRunning = true;
655
+ try {
656
+ const result = await blobManager.processUploadQueue();
657
+ onProcess?.(result);
658
+ return result;
659
+ }
660
+ catch (err) {
661
+ const error = err instanceof Error ? err : new Error(String(err));
662
+ const result = { uploaded: 0, failed: 0, errors: [], error };
663
+ onProcess?.(result);
664
+ return result;
665
+ }
666
+ finally {
667
+ isRunning = false;
668
+ }
669
+ };
670
+ return {
671
+ /**
672
+ * Start the upload queue processor.
673
+ * Optionally runs an immediate processing before starting the interval.
674
+ */
675
+ start(options) {
676
+ if (intervalId) {
677
+ return; // Already running
678
+ }
679
+ if (options?.immediate) {
680
+ void runProcess();
681
+ }
682
+ intervalId = setInterval(() => {
683
+ void runProcess();
684
+ }, intervalMs);
685
+ },
686
+ /**
687
+ * Stop the upload queue processor.
688
+ */
689
+ stop() {
690
+ if (intervalId) {
691
+ clearInterval(intervalId);
692
+ intervalId = null;
693
+ }
694
+ },
695
+ /**
696
+ * Run a single processing manually.
697
+ */
698
+ async runOnce() {
699
+ return runProcess();
700
+ },
701
+ /**
702
+ * Check if the processor is currently active.
703
+ */
704
+ get active() {
705
+ return intervalId !== null;
706
+ },
707
+ /**
708
+ * Check if processing is currently in progress.
709
+ */
710
+ get running() {
711
+ return isRunning;
712
+ },
713
+ };
714
+ }
715
+ // ============================================================================
716
+ // Utilities
717
+ // ============================================================================
718
+ async function toUint8Array(data) {
719
+ if (data instanceof Uint8Array) {
720
+ return data;
721
+ }
722
+ const buffer = await data.arrayBuffer();
723
+ return new Uint8Array(buffer);
724
+ }
725
+ async function computeSha256(data) {
726
+ const buffer = new Uint8Array(data).buffer;
727
+ const hashBuffer = await crypto.subtle.digest('SHA-256', buffer);
728
+ const hashArray = new Uint8Array(hashBuffer);
729
+ const hex = Array.from(hashArray)
730
+ .map((b) => b.toString(16).padStart(2, '0'))
731
+ .join('');
732
+ return createBlobHash(hex);
733
+ }
734
+ // ============================================================================
735
+ // Errors
736
+ // ============================================================================
737
+ class BlobUploadError extends Error {
738
+ constructor(message) {
739
+ super(message);
740
+ this.name = 'BlobUploadError';
741
+ }
742
+ }
743
+ class BlobDownloadError extends Error {
744
+ constructor(message) {
745
+ super(message);
746
+ this.name = 'BlobDownloadError';
747
+ }
748
+ }
749
+ //# sourceMappingURL=manager.js.map