@powersync/common 0.0.0-dev-20260120150240 → 0.0.0-dev-20260128165909

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/README.md +5 -1
  2. package/dist/bundle.cjs +1433 -386
  3. package/dist/bundle.cjs.map +1 -1
  4. package/dist/bundle.mjs +1425 -387
  5. package/dist/bundle.mjs.map +1 -1
  6. package/dist/bundle.node.cjs +1433 -386
  7. package/dist/bundle.node.cjs.map +1 -1
  8. package/dist/bundle.node.mjs +1425 -387
  9. package/dist/bundle.node.mjs.map +1 -1
  10. package/dist/index.d.cts +617 -44
  11. package/lib/attachments/AttachmentContext.d.ts +86 -0
  12. package/lib/attachments/AttachmentContext.js +229 -0
  13. package/lib/attachments/AttachmentContext.js.map +1 -0
  14. package/lib/attachments/AttachmentErrorHandler.d.ts +31 -0
  15. package/lib/attachments/AttachmentErrorHandler.js +2 -0
  16. package/lib/attachments/AttachmentErrorHandler.js.map +1 -0
  17. package/lib/attachments/AttachmentQueue.d.ts +149 -0
  18. package/lib/attachments/AttachmentQueue.js +362 -0
  19. package/lib/attachments/AttachmentQueue.js.map +1 -0
  20. package/lib/attachments/AttachmentService.d.ts +29 -0
  21. package/lib/attachments/AttachmentService.js +56 -0
  22. package/lib/attachments/AttachmentService.js.map +1 -0
  23. package/lib/attachments/LocalStorageAdapter.d.ts +62 -0
  24. package/lib/attachments/LocalStorageAdapter.js +6 -0
  25. package/lib/attachments/LocalStorageAdapter.js.map +1 -0
  26. package/lib/attachments/RemoteStorageAdapter.d.ts +27 -0
  27. package/lib/attachments/RemoteStorageAdapter.js +2 -0
  28. package/lib/attachments/RemoteStorageAdapter.js.map +1 -0
  29. package/lib/attachments/Schema.d.ts +50 -0
  30. package/lib/attachments/Schema.js +62 -0
  31. package/lib/attachments/Schema.js.map +1 -0
  32. package/lib/attachments/SyncingService.d.ts +62 -0
  33. package/lib/attachments/SyncingService.js +168 -0
  34. package/lib/attachments/SyncingService.js.map +1 -0
  35. package/lib/attachments/WatchedAttachmentItem.d.ts +17 -0
  36. package/lib/attachments/WatchedAttachmentItem.js +2 -0
  37. package/lib/attachments/WatchedAttachmentItem.js.map +1 -0
  38. package/lib/client/AbstractPowerSyncDatabase.d.ts +8 -1
  39. package/lib/client/AbstractPowerSyncDatabase.js +16 -3
  40. package/lib/client/AbstractPowerSyncDatabase.js.map +1 -1
  41. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +7 -12
  42. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +10 -12
  43. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  44. package/lib/client/triggers/MemoryTriggerClaimManager.d.ts +6 -0
  45. package/lib/client/triggers/MemoryTriggerClaimManager.js +21 -0
  46. package/lib/client/triggers/MemoryTriggerClaimManager.js.map +1 -0
  47. package/lib/client/triggers/TriggerManager.d.ts +37 -0
  48. package/lib/client/triggers/TriggerManagerImpl.d.ts +24 -3
  49. package/lib/client/triggers/TriggerManagerImpl.js +133 -11
  50. package/lib/client/triggers/TriggerManagerImpl.js.map +1 -1
  51. package/lib/index.d.ts +12 -1
  52. package/lib/index.js +12 -1
  53. package/lib/index.js.map +1 -1
  54. package/package.json +1 -1
  55. package/src/attachments/AttachmentContext.ts +279 -0
  56. package/src/attachments/AttachmentErrorHandler.ts +34 -0
  57. package/src/attachments/AttachmentQueue.ts +472 -0
  58. package/src/attachments/AttachmentService.ts +62 -0
  59. package/src/attachments/LocalStorageAdapter.ts +72 -0
  60. package/src/attachments/README.md +718 -0
  61. package/src/attachments/RemoteStorageAdapter.ts +30 -0
  62. package/src/attachments/Schema.ts +87 -0
  63. package/src/attachments/SyncingService.ts +193 -0
  64. package/src/attachments/WatchedAttachmentItem.ts +19 -0
  65. package/src/client/AbstractPowerSyncDatabase.ts +19 -4
  66. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +10 -12
  67. package/src/client/triggers/MemoryTriggerClaimManager.ts +25 -0
  68. package/src/client/triggers/TriggerManager.ts +50 -6
  69. package/src/client/triggers/TriggerManagerImpl.ts +177 -13
  70. package/src/index.ts +13 -1
package/dist/bundle.mjs CHANGED
@@ -1,5 +1,1210 @@
1
1
  import { Mutex } from 'async-mutex';
2
2
 
3
+ // https://www.sqlite.org/lang_expr.html#castexpr
4
+ var ColumnType;
5
+ (function (ColumnType) {
6
+ ColumnType["TEXT"] = "TEXT";
7
+ ColumnType["INTEGER"] = "INTEGER";
8
+ ColumnType["REAL"] = "REAL";
9
+ })(ColumnType || (ColumnType = {}));
10
+ const text = {
11
+ type: ColumnType.TEXT
12
+ };
13
+ const integer = {
14
+ type: ColumnType.INTEGER
15
+ };
16
+ const real = {
17
+ type: ColumnType.REAL
18
+ };
19
+ // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
20
+ // In earlier versions this was limited to 63.
21
+ const MAX_AMOUNT_OF_COLUMNS = 1999;
22
+ const column = {
23
+ text,
24
+ integer,
25
+ real
26
+ };
27
+ class Column {
28
+ options;
29
+ constructor(options) {
30
+ this.options = options;
31
+ }
32
+ get name() {
33
+ return this.options.name;
34
+ }
35
+ get type() {
36
+ return this.options.type;
37
+ }
38
+ toJSON() {
39
+ return {
40
+ name: this.name,
41
+ type: this.type
42
+ };
43
+ }
44
+ }
45
+
46
+ const DEFAULT_INDEX_COLUMN_OPTIONS = {
47
+ ascending: true
48
+ };
49
+ class IndexedColumn {
50
+ options;
51
+ static createAscending(column) {
52
+ return new IndexedColumn({
53
+ name: column,
54
+ ascending: true
55
+ });
56
+ }
57
+ constructor(options) {
58
+ this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
59
+ }
60
+ get name() {
61
+ return this.options.name;
62
+ }
63
+ get ascending() {
64
+ return this.options.ascending;
65
+ }
66
+ toJSON(table) {
67
+ return {
68
+ name: this.name,
69
+ ascending: this.ascending,
70
+ type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
71
+ };
72
+ }
73
+ }
74
+
75
+ const DEFAULT_INDEX_OPTIONS = {
76
+ columns: []
77
+ };
78
+ class Index {
79
+ options;
80
+ static createAscending(options, columnNames) {
81
+ return new Index({
82
+ ...options,
83
+ columns: columnNames.map((name) => IndexedColumn.createAscending(name))
84
+ });
85
+ }
86
+ constructor(options) {
87
+ this.options = options;
88
+ this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
89
+ }
90
+ get name() {
91
+ return this.options.name;
92
+ }
93
+ get columns() {
94
+ return this.options.columns ?? [];
95
+ }
96
+ toJSON(table) {
97
+ return {
98
+ name: this.name,
99
+ columns: this.columns.map((c) => c.toJSON(table))
100
+ };
101
+ }
102
+ }
103
+
104
+ const DEFAULT_TABLE_OPTIONS = {
105
+ indexes: [],
106
+ insertOnly: false,
107
+ localOnly: false,
108
+ trackPrevious: false,
109
+ trackMetadata: false,
110
+ ignoreEmptyUpdates: false
111
+ };
112
+ const InvalidSQLCharacters = /["'%,.#\s[\]]/;
113
+ class Table {
114
+ options;
115
+ _mappedColumns;
116
+ static createLocalOnly(options) {
117
+ return new Table({ ...options, localOnly: true, insertOnly: false });
118
+ }
119
+ static createInsertOnly(options) {
120
+ return new Table({ ...options, localOnly: false, insertOnly: true });
121
+ }
122
+ /**
123
+ * Create a table.
124
+ * @deprecated This was only only included for TableV2 and is no longer necessary.
125
+ * Prefer to use new Table() directly.
126
+ *
127
+ * TODO remove in the next major release.
128
+ */
129
+ static createTable(name, table) {
130
+ return new Table({
131
+ name,
132
+ columns: table.columns,
133
+ indexes: table.indexes,
134
+ localOnly: table.options.localOnly,
135
+ insertOnly: table.options.insertOnly,
136
+ viewName: table.options.viewName
137
+ });
138
+ }
139
+ constructor(optionsOrColumns, v2Options) {
140
+ if (this.isTableV1(optionsOrColumns)) {
141
+ this.initTableV1(optionsOrColumns);
142
+ }
143
+ else {
144
+ this.initTableV2(optionsOrColumns, v2Options);
145
+ }
146
+ }
147
+ copyWithName(name) {
148
+ return new Table({
149
+ ...this.options,
150
+ name
151
+ });
152
+ }
153
+ isTableV1(arg) {
154
+ return 'columns' in arg && Array.isArray(arg.columns);
155
+ }
156
+ initTableV1(options) {
157
+ this.options = {
158
+ ...options,
159
+ indexes: options.indexes || []
160
+ };
161
+ this.applyDefaultOptions();
162
+ }
163
+ initTableV2(columns, options) {
164
+ const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
165
+ const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
166
+ name,
167
+ columns: columnNames.map((name) => new IndexedColumn({
168
+ name: name.replace(/^-/, ''),
169
+ ascending: !name.startsWith('-')
170
+ }))
171
+ }));
172
+ this.options = {
173
+ name: '',
174
+ columns: convertedColumns,
175
+ indexes: convertedIndexes,
176
+ viewName: options?.viewName,
177
+ insertOnly: options?.insertOnly,
178
+ localOnly: options?.localOnly,
179
+ trackPrevious: options?.trackPrevious,
180
+ trackMetadata: options?.trackMetadata,
181
+ ignoreEmptyUpdates: options?.ignoreEmptyUpdates
182
+ };
183
+ this.applyDefaultOptions();
184
+ this._mappedColumns = columns;
185
+ }
186
+ applyDefaultOptions() {
187
+ this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
188
+ this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
189
+ this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
190
+ this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
191
+ this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
192
+ }
193
+ get name() {
194
+ return this.options.name;
195
+ }
196
+ get viewNameOverride() {
197
+ return this.options.viewName;
198
+ }
199
+ get viewName() {
200
+ return this.viewNameOverride ?? this.name;
201
+ }
202
+ get columns() {
203
+ return this.options.columns;
204
+ }
205
+ get columnMap() {
206
+ return (this._mappedColumns ??
207
+ this.columns.reduce((hash, column) => {
208
+ hash[column.name] = { type: column.type ?? ColumnType.TEXT };
209
+ return hash;
210
+ }, {}));
211
+ }
212
+ get indexes() {
213
+ return this.options.indexes ?? [];
214
+ }
215
+ get localOnly() {
216
+ return this.options.localOnly;
217
+ }
218
+ get insertOnly() {
219
+ return this.options.insertOnly;
220
+ }
221
+ get trackPrevious() {
222
+ return this.options.trackPrevious;
223
+ }
224
+ get trackMetadata() {
225
+ return this.options.trackMetadata;
226
+ }
227
+ get ignoreEmptyUpdates() {
228
+ return this.options.ignoreEmptyUpdates;
229
+ }
230
+ get internalName() {
231
+ if (this.options.localOnly) {
232
+ return `ps_data_local__${this.name}`;
233
+ }
234
+ return `ps_data__${this.name}`;
235
+ }
236
+ get validName() {
237
+ if (InvalidSQLCharacters.test(this.name)) {
238
+ return false;
239
+ }
240
+ if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
241
+ return false;
242
+ }
243
+ return true;
244
+ }
245
+ validate() {
246
+ if (InvalidSQLCharacters.test(this.name)) {
247
+ throw new Error(`Invalid characters in table name: ${this.name}`);
248
+ }
249
+ if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
250
+ throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
251
+ }
252
+ if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
253
+ throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
254
+ }
255
+ if (this.trackMetadata && this.localOnly) {
256
+ throw new Error(`Can't include metadata for local-only tables.`);
257
+ }
258
+ if (this.trackPrevious != false && this.localOnly) {
259
+ throw new Error(`Can't include old values for local-only tables.`);
260
+ }
261
+ const columnNames = new Set();
262
+ columnNames.add('id');
263
+ for (const column of this.columns) {
264
+ const { name: columnName } = column;
265
+ if (column.name === 'id') {
266
+ throw new Error(`An id column is automatically added, custom id columns are not supported`);
267
+ }
268
+ if (columnNames.has(columnName)) {
269
+ throw new Error(`Duplicate column ${columnName}`);
270
+ }
271
+ if (InvalidSQLCharacters.test(columnName)) {
272
+ throw new Error(`Invalid characters in column name: ${column.name}`);
273
+ }
274
+ columnNames.add(columnName);
275
+ }
276
+ const indexNames = new Set();
277
+ for (const index of this.indexes) {
278
+ if (indexNames.has(index.name)) {
279
+ throw new Error(`Duplicate index ${index.name}`);
280
+ }
281
+ if (InvalidSQLCharacters.test(index.name)) {
282
+ throw new Error(`Invalid characters in index name: ${index.name}`);
283
+ }
284
+ for (const column of index.columns) {
285
+ if (!columnNames.has(column.name)) {
286
+ throw new Error(`Column ${column.name} not found for index ${index.name}`);
287
+ }
288
+ }
289
+ indexNames.add(index.name);
290
+ }
291
+ }
292
+ toJSON() {
293
+ const trackPrevious = this.trackPrevious;
294
+ return {
295
+ name: this.name,
296
+ view_name: this.viewName,
297
+ local_only: this.localOnly,
298
+ insert_only: this.insertOnly,
299
+ include_old: trackPrevious && (trackPrevious.columns ?? true),
300
+ include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
301
+ include_metadata: this.trackMetadata,
302
+ ignore_empty_update: this.ignoreEmptyUpdates,
303
+ columns: this.columns.map((c) => c.toJSON()),
304
+ indexes: this.indexes.map((e) => e.toJSON(this))
305
+ };
306
+ }
307
+ }
308
+
309
+ const ATTACHMENT_TABLE = 'attachments';
310
+ /**
311
+ * Maps a database row to an AttachmentRecord.
312
+ *
313
+ * @param row - The database row object
314
+ * @returns The corresponding AttachmentRecord
315
+ *
316
+ * @experimental
317
+ */
318
+ function attachmentFromSql(row) {
319
+ return {
320
+ id: row.id,
321
+ filename: row.filename,
322
+ localUri: row.local_uri,
323
+ size: row.size,
324
+ mediaType: row.media_type,
325
+ timestamp: row.timestamp,
326
+ metaData: row.meta_data,
327
+ hasSynced: row.has_synced === 1,
328
+ state: row.state
329
+ };
330
+ }
331
+ /**
332
+ * AttachmentState represents the current synchronization state of an attachment.
333
+ *
334
+ * @experimental
335
+ */
336
+ var AttachmentState;
337
+ (function (AttachmentState) {
338
+ AttachmentState[AttachmentState["QUEUED_UPLOAD"] = 0] = "QUEUED_UPLOAD";
339
+ AttachmentState[AttachmentState["QUEUED_DOWNLOAD"] = 1] = "QUEUED_DOWNLOAD";
340
+ AttachmentState[AttachmentState["QUEUED_DELETE"] = 2] = "QUEUED_DELETE";
341
+ AttachmentState[AttachmentState["SYNCED"] = 3] = "SYNCED";
342
+ AttachmentState[AttachmentState["ARCHIVED"] = 4] = "ARCHIVED"; // Attachment has been orphaned, i.e. the associated record has been deleted
343
+ })(AttachmentState || (AttachmentState = {}));
344
+ /**
345
+ * AttachmentTable defines the schema for the attachment queue table.
346
+ *
347
+ * @internal
348
+ */
349
+ class AttachmentTable extends Table {
350
+ constructor(options) {
351
+ super({
352
+ filename: column.text,
353
+ local_uri: column.text,
354
+ timestamp: column.integer,
355
+ size: column.integer,
356
+ media_type: column.text,
357
+ state: column.integer, // Corresponds to AttachmentState
358
+ has_synced: column.integer,
359
+ meta_data: column.text
360
+ }, {
361
+ ...options,
362
+ viewName: options?.viewName ?? ATTACHMENT_TABLE,
363
+ localOnly: true,
364
+ insertOnly: false
365
+ });
366
+ }
367
+ }
368
+
369
+ /**
370
+ * AttachmentContext provides database operations for managing attachment records.
371
+ *
372
+ * Provides methods to query, insert, update, and delete attachment records with
373
+ * proper transaction management through PowerSync.
374
+ *
375
+ * @internal
376
+ */
377
+ class AttachmentContext {
378
+ /** PowerSync database instance for executing queries */
379
+ db;
380
+ /** Name of the database table storing attachment records */
381
+ tableName;
382
+ /** Logger instance for diagnostic information */
383
+ logger;
384
+ /** Maximum number of archived attachments to keep before cleanup */
385
+ archivedCacheLimit = 100;
386
+ /**
387
+ * Creates a new AttachmentContext instance.
388
+ *
389
+ * @param db - PowerSync database instance
390
+ * @param tableName - Name of the table storing attachment records. Default: 'attachments'
391
+ * @param logger - Logger instance for diagnostic output
392
+ */
393
+ constructor(db, tableName = 'attachments', logger, archivedCacheLimit) {
394
+ this.db = db;
395
+ this.tableName = tableName;
396
+ this.logger = logger;
397
+ this.archivedCacheLimit = archivedCacheLimit;
398
+ }
399
+ /**
400
+ * Retrieves all active attachments that require synchronization.
401
+ * Active attachments include those queued for upload, download, or delete.
402
+ * Results are ordered by timestamp in ascending order.
403
+ *
404
+ * @returns Promise resolving to an array of active attachment records
405
+ */
406
+ async getActiveAttachments() {
407
+ const attachments = await this.db.getAll(
408
+ /* sql */
409
+ `
410
+ SELECT
411
+ *
412
+ FROM
413
+ ${this.tableName}
414
+ WHERE
415
+ state = ?
416
+ OR state = ?
417
+ OR state = ?
418
+ ORDER BY
419
+ timestamp ASC
420
+ `, [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]);
421
+ return attachments.map(attachmentFromSql);
422
+ }
423
+ /**
424
+ * Retrieves all archived attachments.
425
+ *
426
+ * Archived attachments are no longer referenced but haven't been permanently deleted.
427
+ * These are candidates for cleanup operations to free up storage space.
428
+ *
429
+ * @returns Promise resolving to an array of archived attachment records
430
+ */
431
+ async getArchivedAttachments() {
432
+ const attachments = await this.db.getAll(
433
+ /* sql */
434
+ `
435
+ SELECT
436
+ *
437
+ FROM
438
+ ${this.tableName}
439
+ WHERE
440
+ state = ?
441
+ ORDER BY
442
+ timestamp ASC
443
+ `, [AttachmentState.ARCHIVED]);
444
+ return attachments.map(attachmentFromSql);
445
+ }
446
+ /**
447
+ * Retrieves all attachment records regardless of state.
448
+ * Results are ordered by timestamp in ascending order.
449
+ *
450
+ * @returns Promise resolving to an array of all attachment records
451
+ */
452
+ async getAttachments() {
453
+ const attachments = await this.db.getAll(
454
+ /* sql */
455
+ `
456
+ SELECT
457
+ *
458
+ FROM
459
+ ${this.tableName}
460
+ ORDER BY
461
+ timestamp ASC
462
+ `, []);
463
+ return attachments.map(attachmentFromSql);
464
+ }
465
+ /**
466
+ * Inserts or updates an attachment record within an existing transaction.
467
+ *
468
+ * Performs an upsert operation (INSERT OR REPLACE). Must be called within
469
+ * an active database transaction context.
470
+ *
471
+ * @param attachment - The attachment record to upsert
472
+ * @param context - Active database transaction context
473
+ */
474
+ async upsertAttachment(attachment, context) {
475
+ await context.execute(
476
+ /* sql */
477
+ `
478
+ INSERT
479
+ OR REPLACE INTO ${this.tableName} (
480
+ id,
481
+ filename,
482
+ local_uri,
483
+ size,
484
+ media_type,
485
+ timestamp,
486
+ state,
487
+ has_synced,
488
+ meta_data
489
+ )
490
+ VALUES
491
+ (?, ?, ?, ?, ?, ?, ?, ?, ?)
492
+ `, [
493
+ attachment.id,
494
+ attachment.filename,
495
+ attachment.localUri || null,
496
+ attachment.size || null,
497
+ attachment.mediaType || null,
498
+ attachment.timestamp,
499
+ attachment.state,
500
+ attachment.hasSynced ? 1 : 0,
501
+ attachment.metaData || null
502
+ ]);
503
+ }
504
+ async getAttachment(id) {
505
+ const attachment = await this.db.get(
506
+ /* sql */
507
+ `
508
+ SELECT
509
+ *
510
+ FROM
511
+ ${this.tableName}
512
+ WHERE
513
+ id = ?
514
+ `, [id]);
515
+ return attachment ? attachmentFromSql(attachment) : undefined;
516
+ }
517
+ /**
518
+ * Permanently deletes an attachment record from the database.
519
+ *
520
+ * This operation removes the attachment record but does not delete
521
+ * the associated local or remote files. File deletion should be handled
522
+ * separately through the appropriate storage adapters.
523
+ *
524
+ * @param attachmentId - Unique identifier of the attachment to delete
525
+ */
526
+ async deleteAttachment(attachmentId) {
527
+ await this.db.writeTransaction((tx) => tx.execute(
528
+ /* sql */
529
+ `
530
+ DELETE FROM ${this.tableName}
531
+ WHERE
532
+ id = ?
533
+ `, [attachmentId]));
534
+ }
535
+ async clearQueue() {
536
+ await this.db.writeTransaction((tx) => tx.execute(/* sql */ ` DELETE FROM ${this.tableName} `));
537
+ }
538
+ async deleteArchivedAttachments(callback) {
539
+ const limit = 1000;
540
+ const results = await this.db.getAll(
541
+ /* sql */
542
+ `
543
+ SELECT
544
+ *
545
+ FROM
546
+ ${this.tableName}
547
+ WHERE
548
+ state = ?
549
+ ORDER BY
550
+ timestamp DESC
551
+ LIMIT
552
+ ?
553
+ OFFSET
554
+ ?
555
+ `, [AttachmentState.ARCHIVED, limit, this.archivedCacheLimit]);
556
+ const archivedAttachments = results.map(attachmentFromSql);
557
+ if (archivedAttachments.length === 0)
558
+ return false;
559
+ await callback?.(archivedAttachments);
560
+ this.logger.info(`Deleting ${archivedAttachments.length} archived attachments. Archived attachment exceeds cache archiveCacheLimit of ${this.archivedCacheLimit}.`);
561
+ const ids = archivedAttachments.map((attachment) => attachment.id);
562
+ await this.db.execute(
563
+ /* sql */
564
+ `
565
+ DELETE FROM ${this.tableName}
566
+ WHERE
567
+ id IN (
568
+ SELECT
569
+ json_each.value
570
+ FROM
571
+ json_each (?)
572
+ );
573
+ `, [JSON.stringify(ids)]);
574
+ this.logger.info(`Deleted ${archivedAttachments.length} archived attachments`);
575
+ return archivedAttachments.length < limit;
576
+ }
577
+ /**
578
+ * Saves multiple attachment records in a single transaction.
579
+ *
580
+ * All updates are saved in a single batch after processing.
581
+ * If the attachments array is empty, no database operations are performed.
582
+ *
583
+ * @param attachments - Array of attachment records to save
584
+ */
585
+ async saveAttachments(attachments) {
586
+ if (attachments.length === 0) {
587
+ return;
588
+ }
589
+ await this.db.writeTransaction(async (tx) => {
590
+ for (const attachment of attachments) {
591
+ await this.upsertAttachment(attachment, tx);
592
+ }
593
+ });
594
+ }
595
+ }
596
+
597
+ var WatchedQueryListenerEvent;
598
+ (function (WatchedQueryListenerEvent) {
599
+ WatchedQueryListenerEvent["ON_DATA"] = "onData";
600
+ WatchedQueryListenerEvent["ON_ERROR"] = "onError";
601
+ WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
602
+ WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
603
+ WatchedQueryListenerEvent["CLOSED"] = "closed";
604
+ })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
605
+ const DEFAULT_WATCH_THROTTLE_MS = 30;
606
+ const DEFAULT_WATCH_QUERY_OPTIONS = {
607
+ throttleMs: DEFAULT_WATCH_THROTTLE_MS,
608
+ reportFetching: true
609
+ };
610
+
611
+ /**
612
+ * Orchestrates attachment synchronization between local and remote storage.
613
+ * Handles uploads, downloads, deletions, and state transitions.
614
+ *
615
+ * @internal
616
+ */
617
+ class SyncingService {
618
+ attachmentService;
619
+ localStorage;
620
+ remoteStorage;
621
+ logger;
622
+ errorHandler;
623
+ constructor(attachmentService, localStorage, remoteStorage, logger, errorHandler) {
624
+ this.attachmentService = attachmentService;
625
+ this.localStorage = localStorage;
626
+ this.remoteStorage = remoteStorage;
627
+ this.logger = logger;
628
+ this.errorHandler = errorHandler;
629
+ }
630
+ /**
631
+ * Processes attachments based on their state (upload, download, or delete).
632
+ * All updates are saved in a single batch after processing.
633
+ *
634
+ * @param attachments - Array of attachment records to process
635
+ * @param context - Attachment context for database operations
636
+ * @returns Promise that resolves when all attachments have been processed and saved
637
+ */
638
+ async processAttachments(attachments, context) {
639
+ const updatedAttachments = [];
640
+ for (const attachment of attachments) {
641
+ switch (attachment.state) {
642
+ case AttachmentState.QUEUED_UPLOAD:
643
+ const uploaded = await this.uploadAttachment(attachment);
644
+ updatedAttachments.push(uploaded);
645
+ break;
646
+ case AttachmentState.QUEUED_DOWNLOAD:
647
+ const downloaded = await this.downloadAttachment(attachment);
648
+ updatedAttachments.push(downloaded);
649
+ break;
650
+ case AttachmentState.QUEUED_DELETE:
651
+ const deleted = await this.deleteAttachment(attachment);
652
+ updatedAttachments.push(deleted);
653
+ break;
654
+ }
655
+ }
656
+ await context.saveAttachments(updatedAttachments);
657
+ }
658
+ /**
659
+ * Uploads an attachment from local storage to remote storage.
660
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
661
+ *
662
+ * @param attachment - The attachment record to upload
663
+ * @returns Updated attachment record with new state
664
+ * @throws Error if the attachment has no localUri
665
+ */
666
+ async uploadAttachment(attachment) {
667
+ this.logger.info(`Uploading attachment ${attachment.filename}`);
668
+ try {
669
+ if (attachment.localUri == null) {
670
+ throw new Error(`No localUri for attachment ${attachment.id}`);
671
+ }
672
+ const fileBlob = await this.localStorage.readFile(attachment.localUri);
673
+ await this.remoteStorage.uploadFile(fileBlob, attachment);
674
+ return {
675
+ ...attachment,
676
+ state: AttachmentState.SYNCED,
677
+ hasSynced: true
678
+ };
679
+ }
680
+ catch (error) {
681
+ const shouldRetry = (await this.errorHandler?.onUploadError(attachment, error)) ?? true;
682
+ if (!shouldRetry) {
683
+ return {
684
+ ...attachment,
685
+ state: AttachmentState.ARCHIVED
686
+ };
687
+ }
688
+ return attachment;
689
+ }
690
+ }
691
+ /**
692
+ * Downloads an attachment from remote storage to local storage.
693
+ * Retrieves the file, converts to base64, and saves locally.
694
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
695
+ *
696
+ * @param attachment - The attachment record to download
697
+ * @returns Updated attachment record with local URI and new state
698
+ */
699
+ async downloadAttachment(attachment) {
700
+ this.logger.info(`Downloading attachment ${attachment.filename}`);
701
+ try {
702
+ const fileData = await this.remoteStorage.downloadFile(attachment);
703
+ const localUri = this.localStorage.getLocalUri(attachment.filename);
704
+ await this.localStorage.saveFile(localUri, fileData);
705
+ return {
706
+ ...attachment,
707
+ state: AttachmentState.SYNCED,
708
+ localUri: localUri,
709
+ hasSynced: true
710
+ };
711
+ }
712
+ catch (error) {
713
+ const shouldRetry = (await this.errorHandler?.onDownloadError(attachment, error)) ?? true;
714
+ if (!shouldRetry) {
715
+ return {
716
+ ...attachment,
717
+ state: AttachmentState.ARCHIVED
718
+ };
719
+ }
720
+ return attachment;
721
+ }
722
+ }
723
+ /**
724
+ * Deletes an attachment from both remote and local storage.
725
+ * Removes the remote file, local file (if exists), and the attachment record.
726
+ * On failure, defers to error handler or archives.
727
+ *
728
+ * @param attachment - The attachment record to delete
729
+ * @returns Updated attachment record
730
+ */
731
+ async deleteAttachment(attachment) {
732
+ try {
733
+ await this.remoteStorage.deleteFile(attachment);
734
+ if (attachment.localUri) {
735
+ await this.localStorage.deleteFile(attachment.localUri);
736
+ }
737
+ await this.attachmentService.withContext(async (ctx) => {
738
+ await ctx.deleteAttachment(attachment.id);
739
+ });
740
+ return {
741
+ ...attachment,
742
+ state: AttachmentState.ARCHIVED
743
+ };
744
+ }
745
+ catch (error) {
746
+ const shouldRetry = (await this.errorHandler?.onDeleteError(attachment, error)) ?? true;
747
+ if (!shouldRetry) {
748
+ return {
749
+ ...attachment,
750
+ state: AttachmentState.ARCHIVED
751
+ };
752
+ }
753
+ return attachment;
754
+ }
755
+ }
756
+ /**
757
+ * Performs cleanup of archived attachments by removing their local files and records.
758
+ * Errors during local file deletion are logged but do not prevent record deletion.
759
+ */
760
+ async deleteArchivedAttachments(context) {
761
+ return await context.deleteArchivedAttachments(async (archivedAttachments) => {
762
+ for (const attachment of archivedAttachments) {
763
+ if (attachment.localUri) {
764
+ try {
765
+ await this.localStorage.deleteFile(attachment.localUri);
766
+ }
767
+ catch (error) {
768
+ this.logger.error('Error deleting local file for archived attachment', error);
769
+ }
770
+ }
771
+ }
772
+ });
773
+ }
774
+ }
775
+
776
+ /**
777
+ * Wrapper for async-mutex runExclusive, which allows for a timeout on each exclusive lock.
778
+ */
779
+ async function mutexRunExclusive(mutex, callback, options) {
780
+ return new Promise((resolve, reject) => {
781
+ mutex.runExclusive(async () => {
782
+ try {
783
+ resolve(await callback());
784
+ }
785
+ catch (ex) {
786
+ reject(ex);
787
+ }
788
+ });
789
+ });
790
+ }
791
+
792
+ /**
793
+ * Service for querying and watching attachment records in the database.
794
+ *
795
+ * @internal
796
+ */
797
+ class AttachmentService {
798
+ db;
799
+ logger;
800
+ tableName;
801
+ mutex = new Mutex();
802
+ context;
803
+ constructor(db, logger, tableName = 'attachments', archivedCacheLimit = 100) {
804
+ this.db = db;
805
+ this.logger = logger;
806
+ this.tableName = tableName;
807
+ this.context = new AttachmentContext(db, tableName, logger, archivedCacheLimit);
808
+ }
809
+ /**
810
+ * Creates a differential watch query for active attachments requiring synchronization.
811
+ * @returns Watch query that emits changes for queued uploads, downloads, and deletes
812
+ */
813
+ watchActiveAttachments({ throttleMs } = {}) {
814
+ this.logger.info('Watching active attachments...');
815
+ const watch = this.db
816
+ .query({
817
+ sql: /* sql */ `
818
+ SELECT
819
+ *
820
+ FROM
821
+ ${this.tableName}
822
+ WHERE
823
+ state = ?
824
+ OR state = ?
825
+ OR state = ?
826
+ ORDER BY
827
+ timestamp ASC
828
+ `,
829
+ parameters: [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]
830
+ })
831
+ .differentialWatch({ throttleMs });
832
+ return watch;
833
+ }
834
+ /**
835
+ * Executes a callback with exclusive access to the attachment context.
836
+ */
837
+ async withContext(callback) {
838
+ return mutexRunExclusive(this.mutex, async () => {
839
+ return callback(this.context);
840
+ });
841
+ }
842
+ }
843
+
844
+ /**
845
+ * AttachmentQueue manages the lifecycle and synchronization of attachments
846
+ * between local and remote storage.
847
+ * Provides automatic synchronization, upload/download queuing, attachment monitoring,
848
+ * verification and repair of local files, and cleanup of archived attachments.
849
+ *
850
+ * @experimental
851
+ * @alpha This is currently experimental and may change without a major version bump.
852
+ */
853
+ class AttachmentQueue {
854
+ /** Timer for periodic synchronization operations */
855
+ periodicSyncTimer;
856
+ /** Service for synchronizing attachments between local and remote storage */
857
+ syncingService;
858
+ /** Adapter for local file storage operations */
859
+ localStorage;
860
+ /** Adapter for remote file storage operations */
861
+ remoteStorage;
862
+ /**
863
+ * Callback function to watch for changes in attachment references in your data model.
864
+ *
865
+ * This should be implemented by the user of AttachmentQueue to monitor changes in your application's
866
+ * data that reference attachments. When attachments are added, removed, or modified,
867
+ * this callback should trigger the onUpdate function with the current set of attachments.
868
+ */
869
+ watchAttachments;
870
+ /** Name of the database table storing attachment records */
871
+ tableName;
872
+ /** Logger instance for diagnostic information */
873
+ logger;
874
+ /** Interval in milliseconds between periodic sync operations. Default: 30000 (30 seconds) */
875
+ syncIntervalMs = 30 * 1000;
876
+ /** Duration in milliseconds to throttle sync operations */
877
+ syncThrottleDuration;
878
+ /** Whether to automatically download remote attachments. Default: true */
879
+ downloadAttachments = true;
880
+ /** Maximum number of archived attachments to keep before cleanup. Default: 100 */
881
+ archivedCacheLimit;
882
+ /** Service for managing attachment-related database operations */
883
+ attachmentService;
884
+ /** PowerSync database instance */
885
+ db;
886
+ /** Cleanup function for status change listener */
887
+ statusListenerDispose;
888
+ watchActiveAttachments;
889
+ watchAttachmentsAbortController;
890
+ /**
891
+ * Creates a new AttachmentQueue instance.
892
+ *
893
+ * @param options - Configuration options
894
+ * @param options.db - PowerSync database instance
895
+ * @param options.remoteStorage - Remote storage adapter for upload/download operations
896
+ * @param options.localStorage - Local storage adapter for file persistence
897
+ * @param options.watchAttachments - Callback for monitoring attachment changes in your data model
898
+ * @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
899
+ * @param options.logger - Logger instance. Defaults to db.logger
900
+ * @param options.syncIntervalMs - Interval between automatic syncs in milliseconds. Default: 30000
901
+ * @param options.syncThrottleDuration - Throttle duration for sync operations in milliseconds. Default: 1000
902
+ * @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
903
+ * @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
904
+ */
905
+ constructor({ db, localStorage, remoteStorage, watchAttachments, logger, tableName = ATTACHMENT_TABLE, syncIntervalMs = 30 * 1000, syncThrottleDuration = DEFAULT_WATCH_THROTTLE_MS, downloadAttachments = true, archivedCacheLimit = 100, errorHandler }) {
906
+ this.db = db;
907
+ this.remoteStorage = remoteStorage;
908
+ this.localStorage = localStorage;
909
+ this.watchAttachments = watchAttachments;
910
+ this.tableName = tableName;
911
+ this.syncIntervalMs = syncIntervalMs;
912
+ this.syncThrottleDuration = syncThrottleDuration;
913
+ this.archivedCacheLimit = archivedCacheLimit;
914
+ this.downloadAttachments = downloadAttachments;
915
+ this.logger = logger ?? db.logger;
916
+ this.attachmentService = new AttachmentService(db, this.logger, tableName, archivedCacheLimit);
917
+ this.syncingService = new SyncingService(this.attachmentService, localStorage, remoteStorage, this.logger, errorHandler);
918
+ }
919
+ /**
920
+ * Generates a new attachment ID using a SQLite UUID function.
921
+ *
922
+ * @returns Promise resolving to the new attachment ID
923
+ */
924
+ async generateAttachmentId() {
925
+ return this.db.get('SELECT uuid() as id').then((row) => row.id);
926
+ }
927
+ /**
928
+ * Starts the attachment synchronization process.
929
+ *
930
+ * This method:
931
+ * - Stops any existing sync operations
932
+ * - Sets up periodic synchronization based on syncIntervalMs
933
+ * - Registers listeners for active attachment changes
934
+ * - Processes watched attachments to queue uploads/downloads
935
+ * - Handles state transitions for archived and new attachments
936
+ */
937
+ async startSync() {
938
+ await this.stopSync();
939
+ this.watchActiveAttachments = this.attachmentService.watchActiveAttachments({
940
+ throttleMs: this.syncThrottleDuration
941
+ });
942
+ // immediately invoke the sync storage to initialize local storage
943
+ await this.localStorage.initialize();
944
+ await this.verifyAttachments();
945
+ // Sync storage periodically
946
+ this.periodicSyncTimer = setInterval(async () => {
947
+ await this.syncStorage();
948
+ }, this.syncIntervalMs);
949
+ // Sync storage when there is a change in active attachments
950
+ this.watchActiveAttachments.registerListener({
951
+ onDiff: async () => {
952
+ await this.syncStorage();
953
+ }
954
+ });
955
+ this.statusListenerDispose = this.db.registerListener({
956
+ statusChanged: (status) => {
957
+ if (status.connected) {
958
+ // Device came online, process attachments immediately
959
+ this.syncStorage().catch((error) => {
960
+ this.logger.error('Error syncing storage on connection:', error);
961
+ });
962
+ }
963
+ }
964
+ });
965
+ this.watchAttachmentsAbortController = new AbortController();
966
+ const signal = this.watchAttachmentsAbortController.signal;
967
+ // Process attachments when there is a change in watched attachments
968
+ this.watchAttachments(async (watchedAttachments) => {
969
+ // Skip processing if sync has been stopped
970
+ if (signal.aborted) {
971
+ return;
972
+ }
973
+ await this.attachmentService.withContext(async (ctx) => {
974
+ // Need to get all the attachments which are tracked in the DB.
975
+ // We might need to restore an archived attachment.
976
+ const currentAttachments = await ctx.getAttachments();
977
+ const attachmentUpdates = [];
978
+ for (const watchedAttachment of watchedAttachments) {
979
+ const existingQueueItem = currentAttachments.find((a) => a.id === watchedAttachment.id);
980
+ if (!existingQueueItem) {
981
+ // Item is watched but not in the queue yet. Need to add it.
982
+ if (!this.downloadAttachments) {
983
+ continue;
984
+ }
985
+ const filename = watchedAttachment.filename ?? `${watchedAttachment.id}.${watchedAttachment.fileExtension}`;
986
+ attachmentUpdates.push({
987
+ id: watchedAttachment.id,
988
+ filename,
989
+ state: AttachmentState.QUEUED_DOWNLOAD,
990
+ hasSynced: false,
991
+ metaData: watchedAttachment.metaData,
992
+ timestamp: new Date().getTime()
993
+ });
994
+ continue;
995
+ }
996
+ if (existingQueueItem.state === AttachmentState.ARCHIVED) {
997
+ // The attachment is present again. Need to queue it for sync.
998
+ // We might be able to optimize this in future
999
+ if (existingQueueItem.hasSynced === true) {
1000
+ // No remote action required, we can restore the record (avoids deletion)
1001
+ attachmentUpdates.push({
1002
+ ...existingQueueItem,
1003
+ state: AttachmentState.SYNCED
1004
+ });
1005
+ }
1006
+ else {
1007
+ // The localURI should be set if the record was meant to be uploaded
1008
+ // and hasSynced is false then
1009
+ // it must be an upload operation
1010
+ const newState = existingQueueItem.localUri == null ? AttachmentState.QUEUED_DOWNLOAD : AttachmentState.QUEUED_UPLOAD;
1011
+ attachmentUpdates.push({
1012
+ ...existingQueueItem,
1013
+ state: newState
1014
+ });
1015
+ }
1016
+ }
1017
+ }
1018
+ for (const attachment of currentAttachments) {
1019
+ const notInWatchedItems = watchedAttachments.find((i) => i.id === attachment.id) == null;
1020
+ if (notInWatchedItems) {
1021
+ switch (attachment.state) {
1022
+ case AttachmentState.QUEUED_DELETE:
1023
+ case AttachmentState.QUEUED_UPLOAD:
1024
+ // Only archive if it has synced
1025
+ if (attachment.hasSynced === true) {
1026
+ attachmentUpdates.push({
1027
+ ...attachment,
1028
+ state: AttachmentState.ARCHIVED
1029
+ });
1030
+ }
1031
+ break;
1032
+ default:
1033
+ // Archive other states such as QUEUED_DOWNLOAD
1034
+ attachmentUpdates.push({
1035
+ ...attachment,
1036
+ state: AttachmentState.ARCHIVED
1037
+ });
1038
+ }
1039
+ }
1040
+ }
1041
+ if (attachmentUpdates.length > 0) {
1042
+ await ctx.saveAttachments(attachmentUpdates);
1043
+ }
1044
+ });
1045
+ }, signal);
1046
+ }
1047
+ /**
1048
+ * Synchronizes all active attachments between local and remote storage.
1049
+ *
1050
+ * This is called automatically at regular intervals when sync is started,
1051
+ * but can also be called manually to trigger an immediate sync.
1052
+ */
1053
+ async syncStorage() {
1054
+ await this.attachmentService.withContext(async (ctx) => {
1055
+ const activeAttachments = await ctx.getActiveAttachments();
1056
+ await this.localStorage.initialize();
1057
+ await this.syncingService.processAttachments(activeAttachments, ctx);
1058
+ await this.syncingService.deleteArchivedAttachments(ctx);
1059
+ });
1060
+ }
1061
+ /**
1062
+ * Stops the attachment synchronization process.
1063
+ *
1064
+ * Clears the periodic sync timer and closes all active attachment watchers.
1065
+ */
1066
+ async stopSync() {
1067
+ clearInterval(this.periodicSyncTimer);
1068
+ this.periodicSyncTimer = undefined;
1069
+ if (this.watchActiveAttachments)
1070
+ await this.watchActiveAttachments.close();
1071
+ if (this.watchAttachmentsAbortController) {
1072
+ this.watchAttachmentsAbortController.abort();
1073
+ }
1074
+ if (this.statusListenerDispose) {
1075
+ this.statusListenerDispose();
1076
+ this.statusListenerDispose = undefined;
1077
+ }
1078
+ }
1079
+ /**
1080
+ * Saves a file to local storage and queues it for upload to remote storage.
1081
+ *
1082
+ * @param options - File save options
1083
+ * @param options.data - The file data as ArrayBuffer, Blob, or base64 string
1084
+ * @param options.fileExtension - File extension (e.g., 'jpg', 'pdf')
1085
+ * @param options.mediaType - MIME type of the file (e.g., 'image/jpeg')
1086
+ * @param options.metaData - Optional metadata to associate with the attachment
1087
+ * @param options.id - Optional custom ID. If not provided, a UUID will be generated
1088
+ * @param options.updateHook - Optional callback to execute additional database operations
1089
+ * within the same transaction as the attachment creation
1090
+ * @returns Promise resolving to the created attachment record
1091
+ */
1092
+ async saveFile({ data, fileExtension, mediaType, metaData, id, updateHook }) {
1093
+ const resolvedId = id ?? (await this.generateAttachmentId());
1094
+ const filename = `${resolvedId}.${fileExtension}`;
1095
+ const localUri = this.localStorage.getLocalUri(filename);
1096
+ const size = await this.localStorage.saveFile(localUri, data);
1097
+ const attachment = {
1098
+ id: resolvedId,
1099
+ filename,
1100
+ mediaType,
1101
+ localUri,
1102
+ state: AttachmentState.QUEUED_UPLOAD,
1103
+ hasSynced: false,
1104
+ size,
1105
+ timestamp: new Date().getTime(),
1106
+ metaData
1107
+ };
1108
+ await this.attachmentService.withContext(async (ctx) => {
1109
+ await ctx.db.writeTransaction(async (tx) => {
1110
+ await updateHook?.(tx, attachment);
1111
+ await ctx.upsertAttachment(attachment, tx);
1112
+ });
1113
+ });
1114
+ return attachment;
1115
+ }
1116
+ async deleteFile({ id, updateHook }) {
1117
+ await this.attachmentService.withContext(async (ctx) => {
1118
+ const attachment = await ctx.getAttachment(id);
1119
+ if (!attachment) {
1120
+ throw new Error(`Attachment with id ${id} not found`);
1121
+ }
1122
+ await ctx.db.writeTransaction(async (tx) => {
1123
+ await updateHook?.(tx, attachment);
1124
+ await ctx.upsertAttachment({
1125
+ ...attachment,
1126
+ state: AttachmentState.QUEUED_DELETE,
1127
+ hasSynced: false
1128
+ }, tx);
1129
+ });
1130
+ });
1131
+ }
1132
+ async expireCache() {
1133
+ let isDone = false;
1134
+ while (!isDone) {
1135
+ await this.attachmentService.withContext(async (ctx) => {
1136
+ isDone = await this.syncingService.deleteArchivedAttachments(ctx);
1137
+ });
1138
+ }
1139
+ }
1140
+ async clearQueue() {
1141
+ await this.attachmentService.withContext(async (ctx) => {
1142
+ await ctx.clearQueue();
1143
+ });
1144
+ await this.localStorage.clear();
1145
+ }
1146
+ /**
1147
+ * Verifies the integrity of all attachment records and repairs inconsistencies.
1148
+ *
1149
+ * This method checks each attachment record against the local filesystem and:
1150
+ * - Updates localUri if the file exists at a different path
1151
+ * - Archives attachments with missing local files that haven't been uploaded
1152
+ * - Requeues synced attachments for download if their local files are missing
1153
+ */
1154
+ async verifyAttachments() {
1155
+ await this.attachmentService.withContext(async (ctx) => {
1156
+ const attachments = await ctx.getAttachments();
1157
+ const updates = [];
1158
+ for (const attachment of attachments) {
1159
+ if (attachment.localUri == null) {
1160
+ continue;
1161
+ }
1162
+ const exists = await this.localStorage.fileExists(attachment.localUri);
1163
+ if (exists) {
1164
+ // The file exists, this is correct
1165
+ continue;
1166
+ }
1167
+ const newLocalUri = this.localStorage.getLocalUri(attachment.filename);
1168
+ const newExists = await this.localStorage.fileExists(newLocalUri);
1169
+ if (newExists) {
1170
+ // The file exists locally but the localUri is broken, we update it.
1171
+ updates.push({
1172
+ ...attachment,
1173
+ localUri: newLocalUri
1174
+ });
1175
+ }
1176
+ else {
1177
+ // the file doesn't exist locally.
1178
+ if (attachment.state === AttachmentState.SYNCED) {
1179
+ // the file has been successfully synced to remote storage but is missing
1180
+ // we download it again
1181
+ updates.push({
1182
+ ...attachment,
1183
+ state: AttachmentState.QUEUED_DOWNLOAD,
1184
+ localUri: undefined
1185
+ });
1186
+ }
1187
+ else {
1188
+ // the file wasn't successfully synced to remote storage, we archive it
1189
+ updates.push({
1190
+ ...attachment,
1191
+ state: AttachmentState.ARCHIVED,
1192
+ localUri: undefined // Clears the value
1193
+ });
1194
+ }
1195
+ }
1196
+ }
1197
+ await ctx.saveAttachments(updates);
1198
+ });
1199
+ }
1200
+ }
1201
+
1202
+ var EncodingType;
1203
+ (function (EncodingType) {
1204
+ EncodingType["UTF8"] = "utf8";
1205
+ EncodingType["Base64"] = "base64";
1206
+ })(EncodingType || (EncodingType = {}));
1207
+
3
1208
  function getDefaultExportFromCjs (x) {
4
1209
  return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
5
1210
  }
@@ -1316,20 +2521,6 @@ class MetaBaseObserver extends BaseObserver {
1316
2521
  }
1317
2522
  }
1318
2523
 
1319
- var WatchedQueryListenerEvent;
1320
- (function (WatchedQueryListenerEvent) {
1321
- WatchedQueryListenerEvent["ON_DATA"] = "onData";
1322
- WatchedQueryListenerEvent["ON_ERROR"] = "onError";
1323
- WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
1324
- WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
1325
- WatchedQueryListenerEvent["CLOSED"] = "closed";
1326
- })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
1327
- const DEFAULT_WATCH_THROTTLE_MS = 30;
1328
- const DEFAULT_WATCH_QUERY_OPTIONS = {
1329
- throttleMs: DEFAULT_WATCH_THROTTLE_MS,
1330
- reportFetching: true
1331
- };
1332
-
1333
2524
  /**
1334
2525
  * Performs underlying watching and yields a stream of results.
1335
2526
  * @internal
@@ -9242,7 +10433,7 @@ function requireDist () {
9242
10433
 
9243
10434
  var distExports = requireDist();
9244
10435
 
9245
- var version = "1.45.0";
10436
+ var version = "1.46.0";
9246
10437
  var PACKAGE = {
9247
10438
  version: version};
9248
10439
 
@@ -10144,18 +11335,17 @@ var SyncClientImplementation;
10144
11335
  *
10145
11336
  * This is the default option.
10146
11337
  *
10147
- * @deprecated Don't use {@link SyncClientImplementation.JAVASCRIPT} directly. Instead, use
10148
- * {@link DEFAULT_SYNC_CLIENT_IMPLEMENTATION} or omit the option. The explicit choice to use
10149
- * the JavaScript-based sync implementation will be removed from a future version of the SDK.
11338
+ * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
11339
+ * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
11340
+ * version of the PowerSync SDK.
10150
11341
  */
10151
11342
  SyncClientImplementation["JAVASCRIPT"] = "js";
10152
11343
  /**
10153
11344
  * This implementation offloads the sync line decoding and handling into the PowerSync
10154
11345
  * core extension.
10155
11346
  *
10156
- * @experimental
10157
- * While this implementation is more performant than {@link SyncClientImplementation.JAVASCRIPT},
10158
- * it has seen less real-world testing and is marked as __experimental__ at the moment.
11347
+ * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
11348
+ * recommended client implementation for all apps.
10159
11349
  *
10160
11350
  * ## Compatibility warning
10161
11351
  *
@@ -10173,13 +11363,9 @@ var SyncClientImplementation;
10173
11363
  SyncClientImplementation["RUST"] = "rust";
10174
11364
  })(SyncClientImplementation || (SyncClientImplementation = {}));
10175
11365
  /**
10176
- * The default {@link SyncClientImplementation} to use.
10177
- *
10178
- * Please use this field instead of {@link SyncClientImplementation.JAVASCRIPT} directly. A future version
10179
- * of the PowerSync SDK will enable {@link SyncClientImplementation.RUST} by default and remove the JavaScript
10180
- * option.
11366
+ * The default {@link SyncClientImplementation} to use, {@link SyncClientImplementation.RUST}.
10181
11367
  */
10182
- const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.JAVASCRIPT;
11368
+ const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.RUST;
10183
11369
  const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
10184
11370
  const DEFAULT_RETRY_DELAY_MS = 5000;
10185
11371
  const DEFAULT_STREAMING_SYNC_OPTIONS = {
@@ -10589,6 +11775,9 @@ The next upload iteration will be delayed.`);
10589
11775
  if (rawTables != null && rawTables.length) {
10590
11776
  this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
10591
11777
  }
11778
+ if (this.activeStreams.length) {
11779
+ this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
11780
+ }
10592
11781
  this.logger.debug('Streaming sync iteration started');
10593
11782
  this.options.adapter.startSession();
10594
11783
  let [req, bucketMap] = await this.collectLocalBucketState();
@@ -11104,6 +12293,27 @@ The next upload iteration will be delayed.`);
11104
12293
  }
11105
12294
  }
11106
12295
 
12296
+ const CLAIM_STORE = new Map();
12297
+ /**
12298
+ * @internal
12299
+ * @experimental
12300
+ */
12301
+ const MEMORY_TRIGGER_CLAIM_MANAGER = {
12302
+ async obtainClaim(identifier) {
12303
+ if (CLAIM_STORE.has(identifier)) {
12304
+ throw new Error(`A claim is already present for ${identifier}`);
12305
+ }
12306
+ const release = async () => {
12307
+ CLAIM_STORE.delete(identifier);
12308
+ };
12309
+ CLAIM_STORE.set(identifier, release);
12310
+ return release;
12311
+ },
12312
+ async checkClaim(identifier) {
12313
+ return CLAIM_STORE.has(identifier);
12314
+ }
12315
+ };
12316
+
11107
12317
  /**
11108
12318
  * SQLite operations to track changes for with {@link TriggerManager}
11109
12319
  * @experimental
@@ -11115,9 +12325,20 @@ var DiffTriggerOperation;
11115
12325
  DiffTriggerOperation["DELETE"] = "DELETE";
11116
12326
  })(DiffTriggerOperation || (DiffTriggerOperation = {}));
11117
12327
 
12328
+ const DEFAULT_TRIGGER_MANAGER_CONFIGURATION = {
12329
+ useStorageByDefault: false
12330
+ };
12331
+ const TRIGGER_CLEANUP_INTERVAL_MS = 120_000; // 2 minutes
12332
+ /**
12333
+ * @internal
12334
+ * @experimental
12335
+ */
11118
12336
  class TriggerManagerImpl {
11119
12337
  options;
11120
12338
  schema;
12339
+ defaultConfig;
12340
+ cleanupTimeout;
12341
+ isDisposed;
11121
12342
  constructor(options) {
11122
12343
  this.options = options;
11123
12344
  this.schema = options.schema;
@@ -11126,6 +12347,33 @@ class TriggerManagerImpl {
11126
12347
  this.schema = schema;
11127
12348
  }
11128
12349
  });
12350
+ this.isDisposed = false;
12351
+ /**
12352
+ * Configure a cleanup to run on an interval.
12353
+ * The interval is configured using setTimeout to take the async
12354
+ * execution time of the callback into account.
12355
+ */
12356
+ this.defaultConfig = DEFAULT_TRIGGER_MANAGER_CONFIGURATION;
12357
+ const cleanupCallback = async () => {
12358
+ this.cleanupTimeout = null;
12359
+ if (this.isDisposed) {
12360
+ return;
12361
+ }
12362
+ try {
12363
+ await this.cleanupResources();
12364
+ }
12365
+ catch (ex) {
12366
+ this.db.logger.error(`Caught error while attempting to cleanup triggers`, ex);
12367
+ }
12368
+ finally {
12369
+ // if not closed, set another timeout
12370
+ if (this.isDisposed) {
12371
+ return;
12372
+ }
12373
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
12374
+ }
12375
+ };
12376
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
11129
12377
  }
11130
12378
  get db() {
11131
12379
  return this.options.db;
@@ -11143,13 +12391,95 @@ class TriggerManagerImpl {
11143
12391
  await tx.execute(/* sql */ `DROP TRIGGER IF EXISTS ${triggerId}; `);
11144
12392
  }
11145
12393
  }
12394
+ dispose() {
12395
+ this.isDisposed = true;
12396
+ if (this.cleanupTimeout) {
12397
+ clearTimeout(this.cleanupTimeout);
12398
+ }
12399
+ }
12400
+ /**
12401
+ * Updates default config settings for platform specific use-cases.
12402
+ */
12403
+ updateDefaults(config) {
12404
+ this.defaultConfig = {
12405
+ ...this.defaultConfig,
12406
+ ...config
12407
+ };
12408
+ }
12409
+ generateTriggerName(operation, destinationTable, triggerId) {
12410
+ return `__ps_temp_trigger_${operation.toLowerCase()}__${destinationTable}__${triggerId}`;
12411
+ }
12412
+ /**
12413
+ * Cleanup any SQLite triggers or tables that are no longer in use.
12414
+ */
12415
+ async cleanupResources() {
12416
+ // we use the database here since cleanupResources is called during the PowerSyncDatabase initialization
12417
+ await this.db.database.writeLock(async (ctx) => {
12418
+ /**
12419
+ * Note: We only cleanup persisted triggers. These are tracked in the sqlite_master table.
12420
+ * temporary triggers will not be affected by this.
12421
+ * Query all triggers that match our naming pattern
12422
+ */
12423
+ const triggers = await ctx.getAll(/* sql */ `
12424
+ SELECT
12425
+ name
12426
+ FROM
12427
+ sqlite_master
12428
+ WHERE
12429
+ type = 'trigger'
12430
+ AND name LIKE '__ps_temp_trigger_%'
12431
+ `);
12432
+ /** Use regex to extract table names and IDs from trigger names
12433
+ * Trigger naming convention: __ps_temp_trigger_<operation>__<destination_table>__<id>
12434
+ */
12435
+ const triggerPattern = /^__ps_temp_trigger_(?:insert|update|delete)__(.+)__([a-f0-9_]{36})$/i;
12436
+ const trackedItems = new Map();
12437
+ for (const trigger of triggers) {
12438
+ const match = trigger.name.match(triggerPattern);
12439
+ if (match) {
12440
+ const [, table, id] = match;
12441
+ // Collect all trigger names for each id combo
12442
+ const existing = trackedItems.get(id);
12443
+ if (existing) {
12444
+ existing.triggerNames.push(trigger.name);
12445
+ }
12446
+ else {
12447
+ trackedItems.set(id, { table, id, triggerNames: [trigger.name] });
12448
+ }
12449
+ }
12450
+ }
12451
+ for (const trackedItem of trackedItems.values()) {
12452
+ // check if there is anything holding on to this item
12453
+ const hasClaim = await this.options.claimManager.checkClaim(trackedItem.id);
12454
+ if (hasClaim) {
12455
+ // This does not require cleanup
12456
+ continue;
12457
+ }
12458
+ this.db.logger.debug(`Clearing resources for trigger ${trackedItem.id} with table ${trackedItem.table}`);
12459
+ // We need to delete the triggers and table
12460
+ for (const triggerName of trackedItem.triggerNames) {
12461
+ await ctx.execute(`DROP TRIGGER IF EXISTS ${triggerName}`);
12462
+ }
12463
+ await ctx.execute(`DROP TABLE IF EXISTS ${trackedItem.table}`);
12464
+ }
12465
+ });
12466
+ }
11146
12467
  async createDiffTrigger(options) {
11147
12468
  await this.db.waitForReady();
11148
- const { source, destination, columns, when, hooks } = options;
12469
+ const { source, destination, columns, when, hooks,
12470
+ // Fall back to the provided default if not given on this level
12471
+ useStorage = this.defaultConfig.useStorageByDefault } = options;
11149
12472
  const operations = Object.keys(when);
11150
12473
  if (operations.length == 0) {
11151
12474
  throw new Error('At least one WHEN operation must be specified for the trigger.');
11152
12475
  }
12476
+ /**
12477
+ * The clause to use when executing
12478
+ * CREATE ${tableTriggerTypeClause} TABLE
12479
+ * OR
12480
+ * CREATE ${tableTriggerTypeClause} TRIGGER
12481
+ */
12482
+ const tableTriggerTypeClause = !useStorage ? 'TEMP' : '';
11153
12483
  const whenClauses = Object.fromEntries(Object.entries(when).map(([operation, filter]) => [operation, `WHEN ${filter}`]));
11154
12484
  /**
11155
12485
  * Allow specifying the View name as the source.
@@ -11163,6 +12493,7 @@ class TriggerManagerImpl {
11163
12493
  const internalSource = sourceDefinition.internalName;
11164
12494
  const triggerIds = [];
11165
12495
  const id = await this.getUUID();
12496
+ const releaseStorageClaim = useStorage ? await this.options.claimManager.obtainClaim(id) : null;
11166
12497
  /**
11167
12498
  * We default to replicating all columns if no columns array is provided.
11168
12499
  */
@@ -11195,26 +12526,27 @@ class TriggerManagerImpl {
11195
12526
  return this.db.writeLock(async (tx) => {
11196
12527
  await this.removeTriggers(tx, triggerIds);
11197
12528
  await tx.execute(/* sql */ `DROP TABLE IF EXISTS ${destination};`);
12529
+ await releaseStorageClaim?.();
11198
12530
  });
11199
12531
  };
11200
12532
  const setup = async (tx) => {
11201
12533
  // Allow user code to execute in this lock context before the trigger is created.
11202
12534
  await hooks?.beforeCreate?.(tx);
11203
12535
  await tx.execute(/* sql */ `
11204
- CREATE TEMP TABLE ${destination} (
12536
+ CREATE ${tableTriggerTypeClause} TABLE ${destination} (
11205
12537
  operation_id INTEGER PRIMARY KEY AUTOINCREMENT,
11206
12538
  id TEXT,
11207
12539
  operation TEXT,
11208
12540
  timestamp TEXT,
11209
12541
  value TEXT,
11210
12542
  previous_value TEXT
11211
- );
12543
+ )
11212
12544
  `);
11213
12545
  if (operations.includes(DiffTriggerOperation.INSERT)) {
11214
- const insertTriggerId = `ps_temp_trigger_insert_${id}`;
12546
+ const insertTriggerId = this.generateTriggerName(DiffTriggerOperation.INSERT, destination, id);
11215
12547
  triggerIds.push(insertTriggerId);
11216
12548
  await tx.execute(/* sql */ `
11217
- CREATE TEMP TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
12549
+ CREATE ${tableTriggerTypeClause} TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
11218
12550
  INSERT INTO
11219
12551
  ${destination} (id, operation, timestamp, value)
11220
12552
  VALUES
@@ -11225,14 +12557,14 @@ class TriggerManagerImpl {
11225
12557
  ${jsonFragment('NEW')}
11226
12558
  );
11227
12559
 
11228
- END;
12560
+ END
11229
12561
  `);
11230
12562
  }
11231
12563
  if (operations.includes(DiffTriggerOperation.UPDATE)) {
11232
- const updateTriggerId = `ps_temp_trigger_update_${id}`;
12564
+ const updateTriggerId = this.generateTriggerName(DiffTriggerOperation.UPDATE, destination, id);
11233
12565
  triggerIds.push(updateTriggerId);
11234
12566
  await tx.execute(/* sql */ `
11235
- CREATE TEMP TRIGGER ${updateTriggerId} AFTER
12567
+ CREATE ${tableTriggerTypeClause} TRIGGER ${updateTriggerId} AFTER
11236
12568
  UPDATE ON ${internalSource} ${whenClauses[DiffTriggerOperation.UPDATE]} BEGIN
11237
12569
  INSERT INTO
11238
12570
  ${destination} (id, operation, timestamp, value, previous_value)
@@ -11249,11 +12581,11 @@ class TriggerManagerImpl {
11249
12581
  `);
11250
12582
  }
11251
12583
  if (operations.includes(DiffTriggerOperation.DELETE)) {
11252
- const deleteTriggerId = `ps_temp_trigger_delete_${id}`;
12584
+ const deleteTriggerId = this.generateTriggerName(DiffTriggerOperation.DELETE, destination, id);
11253
12585
  triggerIds.push(deleteTriggerId);
11254
12586
  // Create delete trigger for basic JSON
11255
12587
  await tx.execute(/* sql */ `
11256
- CREATE TEMP TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
12588
+ CREATE ${tableTriggerTypeClause} TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
11257
12589
  INSERT INTO
11258
12590
  ${destination} (id, operation, timestamp, value)
11259
12591
  VALUES
@@ -11297,7 +12629,7 @@ class TriggerManagerImpl {
11297
12629
  // If no array is provided, we use all columns from the source table.
11298
12630
  const contextColumns = columns ?? sourceDefinition.columns.map((col) => col.name);
11299
12631
  const id = await this.getUUID();
11300
- const destination = `ps_temp_track_${source}_${id}`;
12632
+ const destination = `__ps_temp_track_${source}_${id}`;
11301
12633
  // register an onChange before the trigger is created
11302
12634
  const abortController = new AbortController();
11303
12635
  const abortOnChange = () => abortController.abort();
@@ -11452,6 +12784,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11452
12784
  * Allows creating SQLite triggers which can be used to track various operations on SQLite tables.
11453
12785
  */
11454
12786
  triggers;
12787
+ triggersImpl;
11455
12788
  logger;
11456
12789
  constructor(options) {
11457
12790
  super();
@@ -11515,9 +12848,10 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11515
12848
  logger: this.logger
11516
12849
  });
11517
12850
  this._isReadyPromise = this.initialize();
11518
- this.triggers = new TriggerManagerImpl({
12851
+ this.triggers = this.triggersImpl = new TriggerManagerImpl({
11519
12852
  db: this,
11520
- schema: this.schema
12853
+ schema: this.schema,
12854
+ ...this.generateTriggerManagerConfig()
11521
12855
  });
11522
12856
  }
11523
12857
  /**
@@ -11543,6 +12877,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11543
12877
  get connecting() {
11544
12878
  return this.currentStatus?.connecting || false;
11545
12879
  }
12880
+ /**
12881
+ * Generates a base configuration for {@link TriggerManagerImpl}.
12882
+ * Implementations should override this if necessary.
12883
+ */
12884
+ generateTriggerManagerConfig() {
12885
+ return {
12886
+ claimManager: MEMORY_TRIGGER_CLAIM_MANAGER
12887
+ };
12888
+ }
11546
12889
  /**
11547
12890
  * @returns A promise which will resolve once initialization is completed.
11548
12891
  */
@@ -11607,6 +12950,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11607
12950
  await this.updateSchema(this.options.schema);
11608
12951
  await this.resolveOfflineSyncStatus();
11609
12952
  await this.database.execute('PRAGMA RECURSIVE_TRIGGERS=TRUE');
12953
+ await this.triggersImpl.cleanupResources();
11610
12954
  this.ready = true;
11611
12955
  this.iterateListeners((cb) => cb.initialized?.());
11612
12956
  }
@@ -11726,7 +13070,6 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11726
13070
  await this.disconnect();
11727
13071
  await this.waitForReady();
11728
13072
  const { clearLocal } = options;
11729
- // TODO DB name, verify this is necessary with extension
11730
13073
  await this.database.writeTransaction(async (tx) => {
11731
13074
  await tx.execute('SELECT powersync_clear(?)', [clearLocal ? 1 : 0]);
11732
13075
  });
@@ -11758,6 +13101,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
11758
13101
  if (this.closed) {
11759
13102
  return;
11760
13103
  }
13104
+ this.triggersImpl.dispose();
11761
13105
  await this.iterateAsyncListeners(async (cb) => cb.closing?.());
11762
13106
  const { disconnect } = options;
11763
13107
  if (disconnect) {
@@ -12723,151 +14067,50 @@ class SqliteBucketStorage extends BaseObserver {
12723
14067
  ]);
12724
14068
  return r != 0;
12725
14069
  }
12726
- async migrateToFixedSubkeys() {
12727
- await this.writeTransaction(async (tx) => {
12728
- await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
12729
- await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
12730
- SqliteBucketStorage._subkeyMigrationKey,
12731
- '1'
12732
- ]);
12733
- });
12734
- }
12735
- static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
12736
- }
12737
- function hasMatchingPriority(priority, bucket) {
12738
- return bucket.priority != null && bucket.priority <= priority;
12739
- }
12740
-
12741
- // TODO JSON
12742
- class SyncDataBatch {
12743
- buckets;
12744
- static fromJSON(json) {
12745
- return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
12746
- }
12747
- constructor(buckets) {
12748
- this.buckets = buckets;
12749
- }
12750
- }
12751
-
12752
- /**
12753
- * Thrown when an underlying database connection is closed.
12754
- * This is particularly relevant when worker connections are marked as closed while
12755
- * operations are still in progress.
12756
- */
12757
- class ConnectionClosedError extends Error {
12758
- static NAME = 'ConnectionClosedError';
12759
- static MATCHES(input) {
12760
- /**
12761
- * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
12762
- * check might fail. This also performs a failsafe check.
12763
- * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
12764
- */
12765
- return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
12766
- }
12767
- constructor(message) {
12768
- super(message);
12769
- this.name = ConnectionClosedError.NAME;
12770
- }
12771
- }
12772
-
12773
- // https://www.sqlite.org/lang_expr.html#castexpr
12774
- var ColumnType;
12775
- (function (ColumnType) {
12776
- ColumnType["TEXT"] = "TEXT";
12777
- ColumnType["INTEGER"] = "INTEGER";
12778
- ColumnType["REAL"] = "REAL";
12779
- })(ColumnType || (ColumnType = {}));
12780
- const text = {
12781
- type: ColumnType.TEXT
12782
- };
12783
- const integer = {
12784
- type: ColumnType.INTEGER
12785
- };
12786
- const real = {
12787
- type: ColumnType.REAL
12788
- };
12789
- // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
12790
- // In earlier versions this was limited to 63.
12791
- const MAX_AMOUNT_OF_COLUMNS = 1999;
12792
- const column = {
12793
- text,
12794
- integer,
12795
- real
12796
- };
12797
- class Column {
12798
- options;
12799
- constructor(options) {
12800
- this.options = options;
12801
- }
12802
- get name() {
12803
- return this.options.name;
12804
- }
12805
- get type() {
12806
- return this.options.type;
12807
- }
12808
- toJSON() {
12809
- return {
12810
- name: this.name,
12811
- type: this.type
12812
- };
12813
- }
12814
- }
12815
-
12816
- const DEFAULT_INDEX_COLUMN_OPTIONS = {
12817
- ascending: true
12818
- };
12819
- class IndexedColumn {
12820
- options;
12821
- static createAscending(column) {
12822
- return new IndexedColumn({
12823
- name: column,
12824
- ascending: true
12825
- });
12826
- }
12827
- constructor(options) {
12828
- this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
12829
- }
12830
- get name() {
12831
- return this.options.name;
12832
- }
12833
- get ascending() {
12834
- return this.options.ascending;
12835
- }
12836
- toJSON(table) {
12837
- return {
12838
- name: this.name,
12839
- ascending: this.ascending,
12840
- type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
12841
- };
12842
- }
12843
- }
12844
-
12845
- const DEFAULT_INDEX_OPTIONS = {
12846
- columns: []
12847
- };
12848
- class Index {
12849
- options;
12850
- static createAscending(options, columnNames) {
12851
- return new Index({
12852
- ...options,
12853
- columns: columnNames.map((name) => IndexedColumn.createAscending(name))
14070
+ async migrateToFixedSubkeys() {
14071
+ await this.writeTransaction(async (tx) => {
14072
+ await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
14073
+ await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
14074
+ SqliteBucketStorage._subkeyMigrationKey,
14075
+ '1'
14076
+ ]);
12854
14077
  });
12855
14078
  }
12856
- constructor(options) {
12857
- this.options = options;
12858
- this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
14079
+ static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
14080
+ }
14081
+ function hasMatchingPriority(priority, bucket) {
14082
+ return bucket.priority != null && bucket.priority <= priority;
14083
+ }
14084
+
14085
+ // TODO JSON
14086
+ class SyncDataBatch {
14087
+ buckets;
14088
+ static fromJSON(json) {
14089
+ return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
12859
14090
  }
12860
- get name() {
12861
- return this.options.name;
14091
+ constructor(buckets) {
14092
+ this.buckets = buckets;
12862
14093
  }
12863
- get columns() {
12864
- return this.options.columns ?? [];
14094
+ }
14095
+
14096
+ /**
14097
+ * Thrown when an underlying database connection is closed.
14098
+ * This is particularly relevant when worker connections are marked as closed while
14099
+ * operations are still in progress.
14100
+ */
14101
+ class ConnectionClosedError extends Error {
14102
+ static NAME = 'ConnectionClosedError';
14103
+ static MATCHES(input) {
14104
+ /**
14105
+ * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
14106
+ * check might fail. This also performs a failsafe check.
14107
+ * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
14108
+ */
14109
+ return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
12865
14110
  }
12866
- toJSON(table) {
12867
- return {
12868
- name: this.name,
12869
- columns: this.columns.map((c) => c.toJSON(table))
12870
- };
14111
+ constructor(message) {
14112
+ super(message);
14113
+ this.name = ConnectionClosedError.NAME;
12871
14114
  }
12872
14115
  }
12873
14116
 
@@ -12964,211 +14207,6 @@ class Schema {
12964
14207
  }
12965
14208
  }
12966
14209
 
12967
- const DEFAULT_TABLE_OPTIONS = {
12968
- indexes: [],
12969
- insertOnly: false,
12970
- localOnly: false,
12971
- trackPrevious: false,
12972
- trackMetadata: false,
12973
- ignoreEmptyUpdates: false
12974
- };
12975
- const InvalidSQLCharacters = /["'%,.#\s[\]]/;
12976
- class Table {
12977
- options;
12978
- _mappedColumns;
12979
- static createLocalOnly(options) {
12980
- return new Table({ ...options, localOnly: true, insertOnly: false });
12981
- }
12982
- static createInsertOnly(options) {
12983
- return new Table({ ...options, localOnly: false, insertOnly: true });
12984
- }
12985
- /**
12986
- * Create a table.
12987
- * @deprecated This was only only included for TableV2 and is no longer necessary.
12988
- * Prefer to use new Table() directly.
12989
- *
12990
- * TODO remove in the next major release.
12991
- */
12992
- static createTable(name, table) {
12993
- return new Table({
12994
- name,
12995
- columns: table.columns,
12996
- indexes: table.indexes,
12997
- localOnly: table.options.localOnly,
12998
- insertOnly: table.options.insertOnly,
12999
- viewName: table.options.viewName
13000
- });
13001
- }
13002
- constructor(optionsOrColumns, v2Options) {
13003
- if (this.isTableV1(optionsOrColumns)) {
13004
- this.initTableV1(optionsOrColumns);
13005
- }
13006
- else {
13007
- this.initTableV2(optionsOrColumns, v2Options);
13008
- }
13009
- }
13010
- copyWithName(name) {
13011
- return new Table({
13012
- ...this.options,
13013
- name
13014
- });
13015
- }
13016
- isTableV1(arg) {
13017
- return 'columns' in arg && Array.isArray(arg.columns);
13018
- }
13019
- initTableV1(options) {
13020
- this.options = {
13021
- ...options,
13022
- indexes: options.indexes || []
13023
- };
13024
- this.applyDefaultOptions();
13025
- }
13026
- initTableV2(columns, options) {
13027
- const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
13028
- const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
13029
- name,
13030
- columns: columnNames.map((name) => new IndexedColumn({
13031
- name: name.replace(/^-/, ''),
13032
- ascending: !name.startsWith('-')
13033
- }))
13034
- }));
13035
- this.options = {
13036
- name: '',
13037
- columns: convertedColumns,
13038
- indexes: convertedIndexes,
13039
- viewName: options?.viewName,
13040
- insertOnly: options?.insertOnly,
13041
- localOnly: options?.localOnly,
13042
- trackPrevious: options?.trackPrevious,
13043
- trackMetadata: options?.trackMetadata,
13044
- ignoreEmptyUpdates: options?.ignoreEmptyUpdates
13045
- };
13046
- this.applyDefaultOptions();
13047
- this._mappedColumns = columns;
13048
- }
13049
- applyDefaultOptions() {
13050
- this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
13051
- this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
13052
- this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
13053
- this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
13054
- this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
13055
- }
13056
- get name() {
13057
- return this.options.name;
13058
- }
13059
- get viewNameOverride() {
13060
- return this.options.viewName;
13061
- }
13062
- get viewName() {
13063
- return this.viewNameOverride ?? this.name;
13064
- }
13065
- get columns() {
13066
- return this.options.columns;
13067
- }
13068
- get columnMap() {
13069
- return (this._mappedColumns ??
13070
- this.columns.reduce((hash, column) => {
13071
- hash[column.name] = { type: column.type ?? ColumnType.TEXT };
13072
- return hash;
13073
- }, {}));
13074
- }
13075
- get indexes() {
13076
- return this.options.indexes ?? [];
13077
- }
13078
- get localOnly() {
13079
- return this.options.localOnly;
13080
- }
13081
- get insertOnly() {
13082
- return this.options.insertOnly;
13083
- }
13084
- get trackPrevious() {
13085
- return this.options.trackPrevious;
13086
- }
13087
- get trackMetadata() {
13088
- return this.options.trackMetadata;
13089
- }
13090
- get ignoreEmptyUpdates() {
13091
- return this.options.ignoreEmptyUpdates;
13092
- }
13093
- get internalName() {
13094
- if (this.options.localOnly) {
13095
- return `ps_data_local__${this.name}`;
13096
- }
13097
- return `ps_data__${this.name}`;
13098
- }
13099
- get validName() {
13100
- if (InvalidSQLCharacters.test(this.name)) {
13101
- return false;
13102
- }
13103
- if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
13104
- return false;
13105
- }
13106
- return true;
13107
- }
13108
- validate() {
13109
- if (InvalidSQLCharacters.test(this.name)) {
13110
- throw new Error(`Invalid characters in table name: ${this.name}`);
13111
- }
13112
- if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
13113
- throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
13114
- }
13115
- if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
13116
- throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
13117
- }
13118
- if (this.trackMetadata && this.localOnly) {
13119
- throw new Error(`Can't include metadata for local-only tables.`);
13120
- }
13121
- if (this.trackPrevious != false && this.localOnly) {
13122
- throw new Error(`Can't include old values for local-only tables.`);
13123
- }
13124
- const columnNames = new Set();
13125
- columnNames.add('id');
13126
- for (const column of this.columns) {
13127
- const { name: columnName } = column;
13128
- if (column.name === 'id') {
13129
- throw new Error(`An id column is automatically added, custom id columns are not supported`);
13130
- }
13131
- if (columnNames.has(columnName)) {
13132
- throw new Error(`Duplicate column ${columnName}`);
13133
- }
13134
- if (InvalidSQLCharacters.test(columnName)) {
13135
- throw new Error(`Invalid characters in column name: ${column.name}`);
13136
- }
13137
- columnNames.add(columnName);
13138
- }
13139
- const indexNames = new Set();
13140
- for (const index of this.indexes) {
13141
- if (indexNames.has(index.name)) {
13142
- throw new Error(`Duplicate index ${index.name}`);
13143
- }
13144
- if (InvalidSQLCharacters.test(index.name)) {
13145
- throw new Error(`Invalid characters in index name: ${index.name}`);
13146
- }
13147
- for (const column of index.columns) {
13148
- if (!columnNames.has(column.name)) {
13149
- throw new Error(`Column ${column.name} not found for index ${index.name}`);
13150
- }
13151
- }
13152
- indexNames.add(index.name);
13153
- }
13154
- }
13155
- toJSON() {
13156
- const trackPrevious = this.trackPrevious;
13157
- return {
13158
- name: this.name,
13159
- view_name: this.viewName,
13160
- local_only: this.localOnly,
13161
- insert_only: this.insertOnly,
13162
- include_old: trackPrevious && (trackPrevious.columns ?? true),
13163
- include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
13164
- include_metadata: this.trackMetadata,
13165
- ignore_empty_update: this.ignoreEmptyUpdates,
13166
- columns: this.columns.map((c) => c.toJSON()),
13167
- indexes: this.indexes.map((e) => e.toJSON(this))
13168
- };
13169
- }
13170
- }
13171
-
13172
14210
  /**
13173
14211
  Generate a new table from the columns and indexes
13174
14212
  @deprecated You should use {@link Table} instead as it now allows TableV2 syntax.
@@ -13324,5 +14362,5 @@ const parseQuery = (query, parameters) => {
13324
14362
  return { sqlStatement, parameters: parameters };
13325
14363
  };
13326
14364
 
13327
- export { AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RawTable, RowUpdateType, Schema, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, Table, TableV2, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID };
14365
+ export { ATTACHMENT_TABLE, AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, AttachmentContext, AttachmentQueue, AttachmentService, AttachmentState, AttachmentTable, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, EncodingType, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RawTable, RowUpdateType, Schema, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, SyncingService, Table, TableV2, TriggerManagerImpl, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, attachmentFromSql, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID };
13328
14366
  //# sourceMappingURL=bundle.mjs.map