@powersync/common 1.45.0 → 1.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +5 -1
  2. package/dist/bundle.cjs +4165 -4914
  3. package/dist/bundle.cjs.map +1 -1
  4. package/dist/bundle.mjs +4155 -4915
  5. package/dist/bundle.mjs.map +1 -1
  6. package/dist/bundle.node.cjs +1502 -422
  7. package/dist/bundle.node.cjs.map +1 -1
  8. package/dist/bundle.node.mjs +1492 -423
  9. package/dist/bundle.node.mjs.map +1 -1
  10. package/dist/index.d.cts +622 -42
  11. package/lib/attachments/AttachmentContext.d.ts +86 -0
  12. package/lib/attachments/AttachmentContext.js +229 -0
  13. package/lib/attachments/AttachmentContext.js.map +1 -0
  14. package/lib/attachments/AttachmentErrorHandler.d.ts +31 -0
  15. package/lib/attachments/AttachmentErrorHandler.js +2 -0
  16. package/lib/attachments/AttachmentErrorHandler.js.map +1 -0
  17. package/lib/attachments/AttachmentQueue.d.ts +149 -0
  18. package/lib/attachments/AttachmentQueue.js +362 -0
  19. package/lib/attachments/AttachmentQueue.js.map +1 -0
  20. package/lib/attachments/AttachmentService.d.ts +29 -0
  21. package/lib/attachments/AttachmentService.js +56 -0
  22. package/lib/attachments/AttachmentService.js.map +1 -0
  23. package/lib/attachments/LocalStorageAdapter.d.ts +62 -0
  24. package/lib/attachments/LocalStorageAdapter.js +6 -0
  25. package/lib/attachments/LocalStorageAdapter.js.map +1 -0
  26. package/lib/attachments/RemoteStorageAdapter.d.ts +27 -0
  27. package/lib/attachments/RemoteStorageAdapter.js +2 -0
  28. package/lib/attachments/RemoteStorageAdapter.js.map +1 -0
  29. package/lib/attachments/Schema.d.ts +50 -0
  30. package/lib/attachments/Schema.js +62 -0
  31. package/lib/attachments/Schema.js.map +1 -0
  32. package/lib/attachments/SyncingService.d.ts +62 -0
  33. package/lib/attachments/SyncingService.js +168 -0
  34. package/lib/attachments/SyncingService.js.map +1 -0
  35. package/lib/attachments/WatchedAttachmentItem.d.ts +17 -0
  36. package/lib/attachments/WatchedAttachmentItem.js +2 -0
  37. package/lib/attachments/WatchedAttachmentItem.js.map +1 -0
  38. package/lib/client/AbstractPowerSyncDatabase.d.ts +9 -2
  39. package/lib/client/AbstractPowerSyncDatabase.js +18 -5
  40. package/lib/client/AbstractPowerSyncDatabase.js.map +1 -1
  41. package/lib/client/sync/stream/AbstractRemote.js +41 -32
  42. package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
  43. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +7 -12
  44. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +10 -12
  45. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  46. package/lib/client/triggers/MemoryTriggerClaimManager.d.ts +6 -0
  47. package/lib/client/triggers/MemoryTriggerClaimManager.js +21 -0
  48. package/lib/client/triggers/MemoryTriggerClaimManager.js.map +1 -0
  49. package/lib/client/triggers/TriggerManager.d.ts +37 -0
  50. package/lib/client/triggers/TriggerManagerImpl.d.ts +24 -3
  51. package/lib/client/triggers/TriggerManagerImpl.js +133 -11
  52. package/lib/client/triggers/TriggerManagerImpl.js.map +1 -1
  53. package/lib/index.d.ts +13 -0
  54. package/lib/index.js +13 -0
  55. package/lib/index.js.map +1 -1
  56. package/lib/utils/DataStream.js +11 -2
  57. package/lib/utils/DataStream.js.map +1 -1
  58. package/lib/utils/mutex.d.ts +1 -1
  59. package/lib/utils/mutex.js.map +1 -1
  60. package/package.json +4 -3
  61. package/src/attachments/AttachmentContext.ts +279 -0
  62. package/src/attachments/AttachmentErrorHandler.ts +34 -0
  63. package/src/attachments/AttachmentQueue.ts +472 -0
  64. package/src/attachments/AttachmentService.ts +62 -0
  65. package/src/attachments/LocalStorageAdapter.ts +72 -0
  66. package/src/attachments/README.md +718 -0
  67. package/src/attachments/RemoteStorageAdapter.ts +30 -0
  68. package/src/attachments/Schema.ts +87 -0
  69. package/src/attachments/SyncingService.ts +193 -0
  70. package/src/attachments/WatchedAttachmentItem.ts +19 -0
  71. package/src/client/AbstractPowerSyncDatabase.ts +21 -6
  72. package/src/client/sync/stream/AbstractRemote.ts +47 -35
  73. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +11 -14
  74. package/src/client/triggers/MemoryTriggerClaimManager.ts +25 -0
  75. package/src/client/triggers/TriggerManager.ts +50 -6
  76. package/src/client/triggers/TriggerManagerImpl.ts +177 -13
  77. package/src/index.ts +14 -0
  78. package/src/utils/DataStream.ts +13 -2
  79. package/src/utils/mutex.ts +1 -1
@@ -4,6 +4,1224 @@ var asyncMutex = require('async-mutex');
4
4
  var eventIterator = require('event-iterator');
5
5
  var node_buffer = require('node:buffer');
6
6
 
7
+ // https://www.sqlite.org/lang_expr.html#castexpr
8
+ exports.ColumnType = void 0;
9
+ (function (ColumnType) {
10
+ ColumnType["TEXT"] = "TEXT";
11
+ ColumnType["INTEGER"] = "INTEGER";
12
+ ColumnType["REAL"] = "REAL";
13
+ })(exports.ColumnType || (exports.ColumnType = {}));
14
+ const text = {
15
+ type: exports.ColumnType.TEXT
16
+ };
17
+ const integer = {
18
+ type: exports.ColumnType.INTEGER
19
+ };
20
+ const real = {
21
+ type: exports.ColumnType.REAL
22
+ };
23
+ // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
24
+ // In earlier versions this was limited to 63.
25
+ const MAX_AMOUNT_OF_COLUMNS = 1999;
26
+ const column = {
27
+ text,
28
+ integer,
29
+ real
30
+ };
31
+ class Column {
32
+ options;
33
+ constructor(options) {
34
+ this.options = options;
35
+ }
36
+ get name() {
37
+ return this.options.name;
38
+ }
39
+ get type() {
40
+ return this.options.type;
41
+ }
42
+ toJSON() {
43
+ return {
44
+ name: this.name,
45
+ type: this.type
46
+ };
47
+ }
48
+ }
49
+
50
+ const DEFAULT_INDEX_COLUMN_OPTIONS = {
51
+ ascending: true
52
+ };
53
+ class IndexedColumn {
54
+ options;
55
+ static createAscending(column) {
56
+ return new IndexedColumn({
57
+ name: column,
58
+ ascending: true
59
+ });
60
+ }
61
+ constructor(options) {
62
+ this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
63
+ }
64
+ get name() {
65
+ return this.options.name;
66
+ }
67
+ get ascending() {
68
+ return this.options.ascending;
69
+ }
70
+ toJSON(table) {
71
+ return {
72
+ name: this.name,
73
+ ascending: this.ascending,
74
+ type: table.columns.find((column) => column.name === this.name)?.type ?? exports.ColumnType.TEXT
75
+ };
76
+ }
77
+ }
78
+
79
+ const DEFAULT_INDEX_OPTIONS = {
80
+ columns: []
81
+ };
82
+ class Index {
83
+ options;
84
+ static createAscending(options, columnNames) {
85
+ return new Index({
86
+ ...options,
87
+ columns: columnNames.map((name) => IndexedColumn.createAscending(name))
88
+ });
89
+ }
90
+ constructor(options) {
91
+ this.options = options;
92
+ this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
93
+ }
94
+ get name() {
95
+ return this.options.name;
96
+ }
97
+ get columns() {
98
+ return this.options.columns ?? [];
99
+ }
100
+ toJSON(table) {
101
+ return {
102
+ name: this.name,
103
+ columns: this.columns.map((c) => c.toJSON(table))
104
+ };
105
+ }
106
+ }
107
+
108
+ const DEFAULT_TABLE_OPTIONS = {
109
+ indexes: [],
110
+ insertOnly: false,
111
+ localOnly: false,
112
+ trackPrevious: false,
113
+ trackMetadata: false,
114
+ ignoreEmptyUpdates: false
115
+ };
116
+ const InvalidSQLCharacters = /["'%,.#\s[\]]/;
117
+ class Table {
118
+ options;
119
+ _mappedColumns;
120
+ static createLocalOnly(options) {
121
+ return new Table({ ...options, localOnly: true, insertOnly: false });
122
+ }
123
+ static createInsertOnly(options) {
124
+ return new Table({ ...options, localOnly: false, insertOnly: true });
125
+ }
126
+ /**
127
+ * Create a table.
128
+ * @deprecated This was only only included for TableV2 and is no longer necessary.
129
+ * Prefer to use new Table() directly.
130
+ *
131
+ * TODO remove in the next major release.
132
+ */
133
+ static createTable(name, table) {
134
+ return new Table({
135
+ name,
136
+ columns: table.columns,
137
+ indexes: table.indexes,
138
+ localOnly: table.options.localOnly,
139
+ insertOnly: table.options.insertOnly,
140
+ viewName: table.options.viewName
141
+ });
142
+ }
143
+ constructor(optionsOrColumns, v2Options) {
144
+ if (this.isTableV1(optionsOrColumns)) {
145
+ this.initTableV1(optionsOrColumns);
146
+ }
147
+ else {
148
+ this.initTableV2(optionsOrColumns, v2Options);
149
+ }
150
+ }
151
+ copyWithName(name) {
152
+ return new Table({
153
+ ...this.options,
154
+ name
155
+ });
156
+ }
157
+ isTableV1(arg) {
158
+ return 'columns' in arg && Array.isArray(arg.columns);
159
+ }
160
+ initTableV1(options) {
161
+ this.options = {
162
+ ...options,
163
+ indexes: options.indexes || []
164
+ };
165
+ this.applyDefaultOptions();
166
+ }
167
+ initTableV2(columns, options) {
168
+ const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
169
+ const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
170
+ name,
171
+ columns: columnNames.map((name) => new IndexedColumn({
172
+ name: name.replace(/^-/, ''),
173
+ ascending: !name.startsWith('-')
174
+ }))
175
+ }));
176
+ this.options = {
177
+ name: '',
178
+ columns: convertedColumns,
179
+ indexes: convertedIndexes,
180
+ viewName: options?.viewName,
181
+ insertOnly: options?.insertOnly,
182
+ localOnly: options?.localOnly,
183
+ trackPrevious: options?.trackPrevious,
184
+ trackMetadata: options?.trackMetadata,
185
+ ignoreEmptyUpdates: options?.ignoreEmptyUpdates
186
+ };
187
+ this.applyDefaultOptions();
188
+ this._mappedColumns = columns;
189
+ }
190
+ applyDefaultOptions() {
191
+ this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
192
+ this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
193
+ this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
194
+ this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
195
+ this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
196
+ }
197
+ get name() {
198
+ return this.options.name;
199
+ }
200
+ get viewNameOverride() {
201
+ return this.options.viewName;
202
+ }
203
+ get viewName() {
204
+ return this.viewNameOverride ?? this.name;
205
+ }
206
+ get columns() {
207
+ return this.options.columns;
208
+ }
209
+ get columnMap() {
210
+ return (this._mappedColumns ??
211
+ this.columns.reduce((hash, column) => {
212
+ hash[column.name] = { type: column.type ?? exports.ColumnType.TEXT };
213
+ return hash;
214
+ }, {}));
215
+ }
216
+ get indexes() {
217
+ return this.options.indexes ?? [];
218
+ }
219
+ get localOnly() {
220
+ return this.options.localOnly;
221
+ }
222
+ get insertOnly() {
223
+ return this.options.insertOnly;
224
+ }
225
+ get trackPrevious() {
226
+ return this.options.trackPrevious;
227
+ }
228
+ get trackMetadata() {
229
+ return this.options.trackMetadata;
230
+ }
231
+ get ignoreEmptyUpdates() {
232
+ return this.options.ignoreEmptyUpdates;
233
+ }
234
+ get internalName() {
235
+ if (this.options.localOnly) {
236
+ return `ps_data_local__${this.name}`;
237
+ }
238
+ return `ps_data__${this.name}`;
239
+ }
240
+ get validName() {
241
+ if (InvalidSQLCharacters.test(this.name)) {
242
+ return false;
243
+ }
244
+ if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
245
+ return false;
246
+ }
247
+ return true;
248
+ }
249
+ validate() {
250
+ if (InvalidSQLCharacters.test(this.name)) {
251
+ throw new Error(`Invalid characters in table name: ${this.name}`);
252
+ }
253
+ if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
254
+ throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
255
+ }
256
+ if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
257
+ throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
258
+ }
259
+ if (this.trackMetadata && this.localOnly) {
260
+ throw new Error(`Can't include metadata for local-only tables.`);
261
+ }
262
+ if (this.trackPrevious != false && this.localOnly) {
263
+ throw new Error(`Can't include old values for local-only tables.`);
264
+ }
265
+ const columnNames = new Set();
266
+ columnNames.add('id');
267
+ for (const column of this.columns) {
268
+ const { name: columnName } = column;
269
+ if (column.name === 'id') {
270
+ throw new Error(`An id column is automatically added, custom id columns are not supported`);
271
+ }
272
+ if (columnNames.has(columnName)) {
273
+ throw new Error(`Duplicate column ${columnName}`);
274
+ }
275
+ if (InvalidSQLCharacters.test(columnName)) {
276
+ throw new Error(`Invalid characters in column name: ${column.name}`);
277
+ }
278
+ columnNames.add(columnName);
279
+ }
280
+ const indexNames = new Set();
281
+ for (const index of this.indexes) {
282
+ if (indexNames.has(index.name)) {
283
+ throw new Error(`Duplicate index ${index.name}`);
284
+ }
285
+ if (InvalidSQLCharacters.test(index.name)) {
286
+ throw new Error(`Invalid characters in index name: ${index.name}`);
287
+ }
288
+ for (const column of index.columns) {
289
+ if (!columnNames.has(column.name)) {
290
+ throw new Error(`Column ${column.name} not found for index ${index.name}`);
291
+ }
292
+ }
293
+ indexNames.add(index.name);
294
+ }
295
+ }
296
+ toJSON() {
297
+ const trackPrevious = this.trackPrevious;
298
+ return {
299
+ name: this.name,
300
+ view_name: this.viewName,
301
+ local_only: this.localOnly,
302
+ insert_only: this.insertOnly,
303
+ include_old: trackPrevious && (trackPrevious.columns ?? true),
304
+ include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
305
+ include_metadata: this.trackMetadata,
306
+ ignore_empty_update: this.ignoreEmptyUpdates,
307
+ columns: this.columns.map((c) => c.toJSON()),
308
+ indexes: this.indexes.map((e) => e.toJSON(this))
309
+ };
310
+ }
311
+ }
312
+
313
+ const ATTACHMENT_TABLE = 'attachments';
314
+ /**
315
+ * Maps a database row to an AttachmentRecord.
316
+ *
317
+ * @param row - The database row object
318
+ * @returns The corresponding AttachmentRecord
319
+ *
320
+ * @experimental
321
+ */
322
+ function attachmentFromSql(row) {
323
+ return {
324
+ id: row.id,
325
+ filename: row.filename,
326
+ localUri: row.local_uri,
327
+ size: row.size,
328
+ mediaType: row.media_type,
329
+ timestamp: row.timestamp,
330
+ metaData: row.meta_data,
331
+ hasSynced: row.has_synced === 1,
332
+ state: row.state
333
+ };
334
+ }
335
+ /**
336
+ * AttachmentState represents the current synchronization state of an attachment.
337
+ *
338
+ * @experimental
339
+ */
340
+ exports.AttachmentState = void 0;
341
+ (function (AttachmentState) {
342
+ AttachmentState[AttachmentState["QUEUED_UPLOAD"] = 0] = "QUEUED_UPLOAD";
343
+ AttachmentState[AttachmentState["QUEUED_DOWNLOAD"] = 1] = "QUEUED_DOWNLOAD";
344
+ AttachmentState[AttachmentState["QUEUED_DELETE"] = 2] = "QUEUED_DELETE";
345
+ AttachmentState[AttachmentState["SYNCED"] = 3] = "SYNCED";
346
+ AttachmentState[AttachmentState["ARCHIVED"] = 4] = "ARCHIVED"; // Attachment has been orphaned, i.e. the associated record has been deleted
347
+ })(exports.AttachmentState || (exports.AttachmentState = {}));
348
+ /**
349
+ * AttachmentTable defines the schema for the attachment queue table.
350
+ *
351
+ * @internal
352
+ */
353
+ class AttachmentTable extends Table {
354
+ constructor(options) {
355
+ super({
356
+ filename: column.text,
357
+ local_uri: column.text,
358
+ timestamp: column.integer,
359
+ size: column.integer,
360
+ media_type: column.text,
361
+ state: column.integer, // Corresponds to AttachmentState
362
+ has_synced: column.integer,
363
+ meta_data: column.text
364
+ }, {
365
+ ...options,
366
+ viewName: options?.viewName ?? ATTACHMENT_TABLE,
367
+ localOnly: true,
368
+ insertOnly: false
369
+ });
370
+ }
371
+ }
372
+
373
+ /**
374
+ * AttachmentContext provides database operations for managing attachment records.
375
+ *
376
+ * Provides methods to query, insert, update, and delete attachment records with
377
+ * proper transaction management through PowerSync.
378
+ *
379
+ * @internal
380
+ */
381
+ class AttachmentContext {
382
+ /** PowerSync database instance for executing queries */
383
+ db;
384
+ /** Name of the database table storing attachment records */
385
+ tableName;
386
+ /** Logger instance for diagnostic information */
387
+ logger;
388
+ /** Maximum number of archived attachments to keep before cleanup */
389
+ archivedCacheLimit = 100;
390
+ /**
391
+ * Creates a new AttachmentContext instance.
392
+ *
393
+ * @param db - PowerSync database instance
394
+ * @param tableName - Name of the table storing attachment records. Default: 'attachments'
395
+ * @param logger - Logger instance for diagnostic output
396
+ */
397
+ constructor(db, tableName = 'attachments', logger, archivedCacheLimit) {
398
+ this.db = db;
399
+ this.tableName = tableName;
400
+ this.logger = logger;
401
+ this.archivedCacheLimit = archivedCacheLimit;
402
+ }
403
+ /**
404
+ * Retrieves all active attachments that require synchronization.
405
+ * Active attachments include those queued for upload, download, or delete.
406
+ * Results are ordered by timestamp in ascending order.
407
+ *
408
+ * @returns Promise resolving to an array of active attachment records
409
+ */
410
+ async getActiveAttachments() {
411
+ const attachments = await this.db.getAll(
412
+ /* sql */
413
+ `
414
+ SELECT
415
+ *
416
+ FROM
417
+ ${this.tableName}
418
+ WHERE
419
+ state = ?
420
+ OR state = ?
421
+ OR state = ?
422
+ ORDER BY
423
+ timestamp ASC
424
+ `, [exports.AttachmentState.QUEUED_UPLOAD, exports.AttachmentState.QUEUED_DOWNLOAD, exports.AttachmentState.QUEUED_DELETE]);
425
+ return attachments.map(attachmentFromSql);
426
+ }
427
+ /**
428
+ * Retrieves all archived attachments.
429
+ *
430
+ * Archived attachments are no longer referenced but haven't been permanently deleted.
431
+ * These are candidates for cleanup operations to free up storage space.
432
+ *
433
+ * @returns Promise resolving to an array of archived attachment records
434
+ */
435
+ async getArchivedAttachments() {
436
+ const attachments = await this.db.getAll(
437
+ /* sql */
438
+ `
439
+ SELECT
440
+ *
441
+ FROM
442
+ ${this.tableName}
443
+ WHERE
444
+ state = ?
445
+ ORDER BY
446
+ timestamp ASC
447
+ `, [exports.AttachmentState.ARCHIVED]);
448
+ return attachments.map(attachmentFromSql);
449
+ }
450
+ /**
451
+ * Retrieves all attachment records regardless of state.
452
+ * Results are ordered by timestamp in ascending order.
453
+ *
454
+ * @returns Promise resolving to an array of all attachment records
455
+ */
456
+ async getAttachments() {
457
+ const attachments = await this.db.getAll(
458
+ /* sql */
459
+ `
460
+ SELECT
461
+ *
462
+ FROM
463
+ ${this.tableName}
464
+ ORDER BY
465
+ timestamp ASC
466
+ `, []);
467
+ return attachments.map(attachmentFromSql);
468
+ }
469
+ /**
470
+ * Inserts or updates an attachment record within an existing transaction.
471
+ *
472
+ * Performs an upsert operation (INSERT OR REPLACE). Must be called within
473
+ * an active database transaction context.
474
+ *
475
+ * @param attachment - The attachment record to upsert
476
+ * @param context - Active database transaction context
477
+ */
478
+ async upsertAttachment(attachment, context) {
479
+ await context.execute(
480
+ /* sql */
481
+ `
482
+ INSERT
483
+ OR REPLACE INTO ${this.tableName} (
484
+ id,
485
+ filename,
486
+ local_uri,
487
+ size,
488
+ media_type,
489
+ timestamp,
490
+ state,
491
+ has_synced,
492
+ meta_data
493
+ )
494
+ VALUES
495
+ (?, ?, ?, ?, ?, ?, ?, ?, ?)
496
+ `, [
497
+ attachment.id,
498
+ attachment.filename,
499
+ attachment.localUri || null,
500
+ attachment.size || null,
501
+ attachment.mediaType || null,
502
+ attachment.timestamp,
503
+ attachment.state,
504
+ attachment.hasSynced ? 1 : 0,
505
+ attachment.metaData || null
506
+ ]);
507
+ }
508
+ async getAttachment(id) {
509
+ const attachment = await this.db.get(
510
+ /* sql */
511
+ `
512
+ SELECT
513
+ *
514
+ FROM
515
+ ${this.tableName}
516
+ WHERE
517
+ id = ?
518
+ `, [id]);
519
+ return attachment ? attachmentFromSql(attachment) : undefined;
520
+ }
521
+ /**
522
+ * Permanently deletes an attachment record from the database.
523
+ *
524
+ * This operation removes the attachment record but does not delete
525
+ * the associated local or remote files. File deletion should be handled
526
+ * separately through the appropriate storage adapters.
527
+ *
528
+ * @param attachmentId - Unique identifier of the attachment to delete
529
+ */
530
+ async deleteAttachment(attachmentId) {
531
+ await this.db.writeTransaction((tx) => tx.execute(
532
+ /* sql */
533
+ `
534
+ DELETE FROM ${this.tableName}
535
+ WHERE
536
+ id = ?
537
+ `, [attachmentId]));
538
+ }
539
+ async clearQueue() {
540
+ await this.db.writeTransaction((tx) => tx.execute(/* sql */ ` DELETE FROM ${this.tableName} `));
541
+ }
542
+ async deleteArchivedAttachments(callback) {
543
+ const limit = 1000;
544
+ const results = await this.db.getAll(
545
+ /* sql */
546
+ `
547
+ SELECT
548
+ *
549
+ FROM
550
+ ${this.tableName}
551
+ WHERE
552
+ state = ?
553
+ ORDER BY
554
+ timestamp DESC
555
+ LIMIT
556
+ ?
557
+ OFFSET
558
+ ?
559
+ `, [exports.AttachmentState.ARCHIVED, limit, this.archivedCacheLimit]);
560
+ const archivedAttachments = results.map(attachmentFromSql);
561
+ if (archivedAttachments.length === 0)
562
+ return false;
563
+ await callback?.(archivedAttachments);
564
+ this.logger.info(`Deleting ${archivedAttachments.length} archived attachments. Archived attachment exceeds cache archiveCacheLimit of ${this.archivedCacheLimit}.`);
565
+ const ids = archivedAttachments.map((attachment) => attachment.id);
566
+ await this.db.execute(
567
+ /* sql */
568
+ `
569
+ DELETE FROM ${this.tableName}
570
+ WHERE
571
+ id IN (
572
+ SELECT
573
+ json_each.value
574
+ FROM
575
+ json_each (?)
576
+ );
577
+ `, [JSON.stringify(ids)]);
578
+ this.logger.info(`Deleted ${archivedAttachments.length} archived attachments`);
579
+ return archivedAttachments.length < limit;
580
+ }
581
+ /**
582
+ * Saves multiple attachment records in a single transaction.
583
+ *
584
+ * All updates are saved in a single batch after processing.
585
+ * If the attachments array is empty, no database operations are performed.
586
+ *
587
+ * @param attachments - Array of attachment records to save
588
+ */
589
+ async saveAttachments(attachments) {
590
+ if (attachments.length === 0) {
591
+ return;
592
+ }
593
+ await this.db.writeTransaction(async (tx) => {
594
+ for (const attachment of attachments) {
595
+ await this.upsertAttachment(attachment, tx);
596
+ }
597
+ });
598
+ }
599
+ }
600
+
601
+ exports.WatchedQueryListenerEvent = void 0;
602
+ (function (WatchedQueryListenerEvent) {
603
+ WatchedQueryListenerEvent["ON_DATA"] = "onData";
604
+ WatchedQueryListenerEvent["ON_ERROR"] = "onError";
605
+ WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
606
+ WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
607
+ WatchedQueryListenerEvent["CLOSED"] = "closed";
608
+ })(exports.WatchedQueryListenerEvent || (exports.WatchedQueryListenerEvent = {}));
609
+ const DEFAULT_WATCH_THROTTLE_MS = 30;
610
+ const DEFAULT_WATCH_QUERY_OPTIONS = {
611
+ throttleMs: DEFAULT_WATCH_THROTTLE_MS,
612
+ reportFetching: true
613
+ };
614
+
615
+ /**
616
+ * Orchestrates attachment synchronization between local and remote storage.
617
+ * Handles uploads, downloads, deletions, and state transitions.
618
+ *
619
+ * @internal
620
+ */
621
+ class SyncingService {
622
+ attachmentService;
623
+ localStorage;
624
+ remoteStorage;
625
+ logger;
626
+ errorHandler;
627
+ constructor(attachmentService, localStorage, remoteStorage, logger, errorHandler) {
628
+ this.attachmentService = attachmentService;
629
+ this.localStorage = localStorage;
630
+ this.remoteStorage = remoteStorage;
631
+ this.logger = logger;
632
+ this.errorHandler = errorHandler;
633
+ }
634
+ /**
635
+ * Processes attachments based on their state (upload, download, or delete).
636
+ * All updates are saved in a single batch after processing.
637
+ *
638
+ * @param attachments - Array of attachment records to process
639
+ * @param context - Attachment context for database operations
640
+ * @returns Promise that resolves when all attachments have been processed and saved
641
+ */
642
+ async processAttachments(attachments, context) {
643
+ const updatedAttachments = [];
644
+ for (const attachment of attachments) {
645
+ switch (attachment.state) {
646
+ case exports.AttachmentState.QUEUED_UPLOAD:
647
+ const uploaded = await this.uploadAttachment(attachment);
648
+ updatedAttachments.push(uploaded);
649
+ break;
650
+ case exports.AttachmentState.QUEUED_DOWNLOAD:
651
+ const downloaded = await this.downloadAttachment(attachment);
652
+ updatedAttachments.push(downloaded);
653
+ break;
654
+ case exports.AttachmentState.QUEUED_DELETE:
655
+ const deleted = await this.deleteAttachment(attachment);
656
+ updatedAttachments.push(deleted);
657
+ break;
658
+ }
659
+ }
660
+ await context.saveAttachments(updatedAttachments);
661
+ }
662
+ /**
663
+ * Uploads an attachment from local storage to remote storage.
664
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
665
+ *
666
+ * @param attachment - The attachment record to upload
667
+ * @returns Updated attachment record with new state
668
+ * @throws Error if the attachment has no localUri
669
+ */
670
+ async uploadAttachment(attachment) {
671
+ this.logger.info(`Uploading attachment ${attachment.filename}`);
672
+ try {
673
+ if (attachment.localUri == null) {
674
+ throw new Error(`No localUri for attachment ${attachment.id}`);
675
+ }
676
+ const fileBlob = await this.localStorage.readFile(attachment.localUri);
677
+ await this.remoteStorage.uploadFile(fileBlob, attachment);
678
+ return {
679
+ ...attachment,
680
+ state: exports.AttachmentState.SYNCED,
681
+ hasSynced: true
682
+ };
683
+ }
684
+ catch (error) {
685
+ const shouldRetry = (await this.errorHandler?.onUploadError(attachment, error)) ?? true;
686
+ if (!shouldRetry) {
687
+ return {
688
+ ...attachment,
689
+ state: exports.AttachmentState.ARCHIVED
690
+ };
691
+ }
692
+ return attachment;
693
+ }
694
+ }
695
+ /**
696
+ * Downloads an attachment from remote storage to local storage.
697
+ * Retrieves the file, converts to base64, and saves locally.
698
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
699
+ *
700
+ * @param attachment - The attachment record to download
701
+ * @returns Updated attachment record with local URI and new state
702
+ */
703
+ async downloadAttachment(attachment) {
704
+ this.logger.info(`Downloading attachment ${attachment.filename}`);
705
+ try {
706
+ const fileData = await this.remoteStorage.downloadFile(attachment);
707
+ const localUri = this.localStorage.getLocalUri(attachment.filename);
708
+ await this.localStorage.saveFile(localUri, fileData);
709
+ return {
710
+ ...attachment,
711
+ state: exports.AttachmentState.SYNCED,
712
+ localUri: localUri,
713
+ hasSynced: true
714
+ };
715
+ }
716
+ catch (error) {
717
+ const shouldRetry = (await this.errorHandler?.onDownloadError(attachment, error)) ?? true;
718
+ if (!shouldRetry) {
719
+ return {
720
+ ...attachment,
721
+ state: exports.AttachmentState.ARCHIVED
722
+ };
723
+ }
724
+ return attachment;
725
+ }
726
+ }
727
+ /**
728
+ * Deletes an attachment from both remote and local storage.
729
+ * Removes the remote file, local file (if exists), and the attachment record.
730
+ * On failure, defers to error handler or archives.
731
+ *
732
+ * @param attachment - The attachment record to delete
733
+ * @returns Updated attachment record
734
+ */
735
+ async deleteAttachment(attachment) {
736
+ try {
737
+ await this.remoteStorage.deleteFile(attachment);
738
+ if (attachment.localUri) {
739
+ await this.localStorage.deleteFile(attachment.localUri);
740
+ }
741
+ await this.attachmentService.withContext(async (ctx) => {
742
+ await ctx.deleteAttachment(attachment.id);
743
+ });
744
+ return {
745
+ ...attachment,
746
+ state: exports.AttachmentState.ARCHIVED
747
+ };
748
+ }
749
+ catch (error) {
750
+ const shouldRetry = (await this.errorHandler?.onDeleteError(attachment, error)) ?? true;
751
+ if (!shouldRetry) {
752
+ return {
753
+ ...attachment,
754
+ state: exports.AttachmentState.ARCHIVED
755
+ };
756
+ }
757
+ return attachment;
758
+ }
759
+ }
760
+ /**
761
+ * Performs cleanup of archived attachments by removing their local files and records.
762
+ * Errors during local file deletion are logged but do not prevent record deletion.
763
+ */
764
+ async deleteArchivedAttachments(context) {
765
+ return await context.deleteArchivedAttachments(async (archivedAttachments) => {
766
+ for (const attachment of archivedAttachments) {
767
+ if (attachment.localUri) {
768
+ try {
769
+ await this.localStorage.deleteFile(attachment.localUri);
770
+ }
771
+ catch (error) {
772
+ this.logger.error('Error deleting local file for archived attachment', error);
773
+ }
774
+ }
775
+ }
776
+ });
777
+ }
778
+ }
779
+
780
+ /**
781
+ * Wrapper for async-mutex runExclusive, which allows for a timeout on each exclusive lock.
782
+ */
783
+ async function mutexRunExclusive(mutex, callback, options) {
784
+ return new Promise((resolve, reject) => {
785
+ const timeout = options?.timeoutMs;
786
+ let timedOut = false;
787
+ const timeoutId = timeout
788
+ ? setTimeout(() => {
789
+ timedOut = true;
790
+ reject(new Error('Timeout waiting for lock'));
791
+ }, timeout)
792
+ : undefined;
793
+ mutex.runExclusive(async () => {
794
+ if (timeoutId) {
795
+ clearTimeout(timeoutId);
796
+ }
797
+ if (timedOut)
798
+ return;
799
+ try {
800
+ resolve(await callback());
801
+ }
802
+ catch (ex) {
803
+ reject(ex);
804
+ }
805
+ });
806
+ });
807
+ }
808
+
809
+ /**
810
+ * Service for querying and watching attachment records in the database.
811
+ *
812
+ * @internal
813
+ */
814
+ class AttachmentService {
815
+ db;
816
+ logger;
817
+ tableName;
818
+ mutex = new asyncMutex.Mutex();
819
+ context;
820
+ constructor(db, logger, tableName = 'attachments', archivedCacheLimit = 100) {
821
+ this.db = db;
822
+ this.logger = logger;
823
+ this.tableName = tableName;
824
+ this.context = new AttachmentContext(db, tableName, logger, archivedCacheLimit);
825
+ }
826
+ /**
827
+ * Creates a differential watch query for active attachments requiring synchronization.
828
+ * @returns Watch query that emits changes for queued uploads, downloads, and deletes
829
+ */
830
+ watchActiveAttachments({ throttleMs } = {}) {
831
+ this.logger.info('Watching active attachments...');
832
+ const watch = this.db
833
+ .query({
834
+ sql: /* sql */ `
835
+ SELECT
836
+ *
837
+ FROM
838
+ ${this.tableName}
839
+ WHERE
840
+ state = ?
841
+ OR state = ?
842
+ OR state = ?
843
+ ORDER BY
844
+ timestamp ASC
845
+ `,
846
+ parameters: [exports.AttachmentState.QUEUED_UPLOAD, exports.AttachmentState.QUEUED_DOWNLOAD, exports.AttachmentState.QUEUED_DELETE]
847
+ })
848
+ .differentialWatch({ throttleMs });
849
+ return watch;
850
+ }
851
+ /**
852
+ * Executes a callback with exclusive access to the attachment context.
853
+ */
854
+ async withContext(callback) {
855
+ return mutexRunExclusive(this.mutex, async () => {
856
+ return callback(this.context);
857
+ });
858
+ }
859
+ }
860
+
861
+ /**
862
+ * AttachmentQueue manages the lifecycle and synchronization of attachments
863
+ * between local and remote storage.
864
+ * Provides automatic synchronization, upload/download queuing, attachment monitoring,
865
+ * verification and repair of local files, and cleanup of archived attachments.
866
+ *
867
+ * @experimental
868
+ * @alpha This is currently experimental and may change without a major version bump.
869
+ */
870
+ class AttachmentQueue {
871
+ /** Timer for periodic synchronization operations */
872
+ periodicSyncTimer;
873
+ /** Service for synchronizing attachments between local and remote storage */
874
+ syncingService;
875
+ /** Adapter for local file storage operations */
876
+ localStorage;
877
+ /** Adapter for remote file storage operations */
878
+ remoteStorage;
879
+ /**
880
+ * Callback function to watch for changes in attachment references in your data model.
881
+ *
882
+ * This should be implemented by the user of AttachmentQueue to monitor changes in your application's
883
+ * data that reference attachments. When attachments are added, removed, or modified,
884
+ * this callback should trigger the onUpdate function with the current set of attachments.
885
+ */
886
+ watchAttachments;
887
+ /** Name of the database table storing attachment records */
888
+ tableName;
889
+ /** Logger instance for diagnostic information */
890
+ logger;
891
+ /** Interval in milliseconds between periodic sync operations. Default: 30000 (30 seconds) */
892
+ syncIntervalMs = 30 * 1000;
893
+ /** Duration in milliseconds to throttle sync operations */
894
+ syncThrottleDuration;
895
+ /** Whether to automatically download remote attachments. Default: true */
896
+ downloadAttachments = true;
897
+ /** Maximum number of archived attachments to keep before cleanup. Default: 100 */
898
+ archivedCacheLimit;
899
+ /** Service for managing attachment-related database operations */
900
+ attachmentService;
901
+ /** PowerSync database instance */
902
+ db;
903
+ /** Cleanup function for status change listener */
904
+ statusListenerDispose;
905
+ watchActiveAttachments;
906
+ watchAttachmentsAbortController;
907
+ /**
908
+ * Creates a new AttachmentQueue instance.
909
+ *
910
+ * @param options - Configuration options
911
+ * @param options.db - PowerSync database instance
912
+ * @param options.remoteStorage - Remote storage adapter for upload/download operations
913
+ * @param options.localStorage - Local storage adapter for file persistence
914
+ * @param options.watchAttachments - Callback for monitoring attachment changes in your data model
915
+ * @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
916
+ * @param options.logger - Logger instance. Defaults to db.logger
917
+ * @param options.syncIntervalMs - Interval between automatic syncs in milliseconds. Default: 30000
918
+ * @param options.syncThrottleDuration - Throttle duration for sync operations in milliseconds. Default: 1000
919
+ * @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
920
+ * @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
921
+ */
922
+ constructor({ db, localStorage, remoteStorage, watchAttachments, logger, tableName = ATTACHMENT_TABLE, syncIntervalMs = 30 * 1000, syncThrottleDuration = DEFAULT_WATCH_THROTTLE_MS, downloadAttachments = true, archivedCacheLimit = 100, errorHandler }) {
923
+ this.db = db;
924
+ this.remoteStorage = remoteStorage;
925
+ this.localStorage = localStorage;
926
+ this.watchAttachments = watchAttachments;
927
+ this.tableName = tableName;
928
+ this.syncIntervalMs = syncIntervalMs;
929
+ this.syncThrottleDuration = syncThrottleDuration;
930
+ this.archivedCacheLimit = archivedCacheLimit;
931
+ this.downloadAttachments = downloadAttachments;
932
+ this.logger = logger ?? db.logger;
933
+ this.attachmentService = new AttachmentService(db, this.logger, tableName, archivedCacheLimit);
934
+ this.syncingService = new SyncingService(this.attachmentService, localStorage, remoteStorage, this.logger, errorHandler);
935
+ }
936
+ /**
937
+ * Generates a new attachment ID using a SQLite UUID function.
938
+ *
939
+ * @returns Promise resolving to the new attachment ID
940
+ */
941
+ async generateAttachmentId() {
942
+ return this.db.get('SELECT uuid() as id').then((row) => row.id);
943
+ }
944
+ /**
945
+ * Starts the attachment synchronization process.
946
+ *
947
+ * This method:
948
+ * - Stops any existing sync operations
949
+ * - Sets up periodic synchronization based on syncIntervalMs
950
+ * - Registers listeners for active attachment changes
951
+ * - Processes watched attachments to queue uploads/downloads
952
+ * - Handles state transitions for archived and new attachments
953
+ */
954
+ async startSync() {
955
+ await this.stopSync();
956
+ this.watchActiveAttachments = this.attachmentService.watchActiveAttachments({
957
+ throttleMs: this.syncThrottleDuration
958
+ });
959
+ // immediately invoke the sync storage to initialize local storage
960
+ await this.localStorage.initialize();
961
+ await this.verifyAttachments();
962
+ // Sync storage periodically
963
+ this.periodicSyncTimer = setInterval(async () => {
964
+ await this.syncStorage();
965
+ }, this.syncIntervalMs);
966
+ // Sync storage when there is a change in active attachments
967
+ this.watchActiveAttachments.registerListener({
968
+ onDiff: async () => {
969
+ await this.syncStorage();
970
+ }
971
+ });
972
+ this.statusListenerDispose = this.db.registerListener({
973
+ statusChanged: (status) => {
974
+ if (status.connected) {
975
+ // Device came online, process attachments immediately
976
+ this.syncStorage().catch((error) => {
977
+ this.logger.error('Error syncing storage on connection:', error);
978
+ });
979
+ }
980
+ }
981
+ });
982
+ this.watchAttachmentsAbortController = new AbortController();
983
+ const signal = this.watchAttachmentsAbortController.signal;
984
+ // Process attachments when there is a change in watched attachments
985
+ this.watchAttachments(async (watchedAttachments) => {
986
+ // Skip processing if sync has been stopped
987
+ if (signal.aborted) {
988
+ return;
989
+ }
990
+ await this.attachmentService.withContext(async (ctx) => {
991
+ // Need to get all the attachments which are tracked in the DB.
992
+ // We might need to restore an archived attachment.
993
+ const currentAttachments = await ctx.getAttachments();
994
+ const attachmentUpdates = [];
995
+ for (const watchedAttachment of watchedAttachments) {
996
+ const existingQueueItem = currentAttachments.find((a) => a.id === watchedAttachment.id);
997
+ if (!existingQueueItem) {
998
+ // Item is watched but not in the queue yet. Need to add it.
999
+ if (!this.downloadAttachments) {
1000
+ continue;
1001
+ }
1002
+ const filename = watchedAttachment.filename ?? `${watchedAttachment.id}.${watchedAttachment.fileExtension}`;
1003
+ attachmentUpdates.push({
1004
+ id: watchedAttachment.id,
1005
+ filename,
1006
+ state: exports.AttachmentState.QUEUED_DOWNLOAD,
1007
+ hasSynced: false,
1008
+ metaData: watchedAttachment.metaData,
1009
+ timestamp: new Date().getTime()
1010
+ });
1011
+ continue;
1012
+ }
1013
+ if (existingQueueItem.state === exports.AttachmentState.ARCHIVED) {
1014
+ // The attachment is present again. Need to queue it for sync.
1015
+ // We might be able to optimize this in future
1016
+ if (existingQueueItem.hasSynced === true) {
1017
+ // No remote action required, we can restore the record (avoids deletion)
1018
+ attachmentUpdates.push({
1019
+ ...existingQueueItem,
1020
+ state: exports.AttachmentState.SYNCED
1021
+ });
1022
+ }
1023
+ else {
1024
+ // The localURI should be set if the record was meant to be uploaded
1025
+ // and hasSynced is false then
1026
+ // it must be an upload operation
1027
+ const newState = existingQueueItem.localUri == null ? exports.AttachmentState.QUEUED_DOWNLOAD : exports.AttachmentState.QUEUED_UPLOAD;
1028
+ attachmentUpdates.push({
1029
+ ...existingQueueItem,
1030
+ state: newState
1031
+ });
1032
+ }
1033
+ }
1034
+ }
1035
+ for (const attachment of currentAttachments) {
1036
+ const notInWatchedItems = watchedAttachments.find((i) => i.id === attachment.id) == null;
1037
+ if (notInWatchedItems) {
1038
+ switch (attachment.state) {
1039
+ case exports.AttachmentState.QUEUED_DELETE:
1040
+ case exports.AttachmentState.QUEUED_UPLOAD:
1041
+ // Only archive if it has synced
1042
+ if (attachment.hasSynced === true) {
1043
+ attachmentUpdates.push({
1044
+ ...attachment,
1045
+ state: exports.AttachmentState.ARCHIVED
1046
+ });
1047
+ }
1048
+ break;
1049
+ default:
1050
+ // Archive other states such as QUEUED_DOWNLOAD
1051
+ attachmentUpdates.push({
1052
+ ...attachment,
1053
+ state: exports.AttachmentState.ARCHIVED
1054
+ });
1055
+ }
1056
+ }
1057
+ }
1058
+ if (attachmentUpdates.length > 0) {
1059
+ await ctx.saveAttachments(attachmentUpdates);
1060
+ }
1061
+ });
1062
+ }, signal);
1063
+ }
1064
+ /**
1065
+ * Synchronizes all active attachments between local and remote storage.
1066
+ *
1067
+ * This is called automatically at regular intervals when sync is started,
1068
+ * but can also be called manually to trigger an immediate sync.
1069
+ */
1070
+ async syncStorage() {
1071
+ await this.attachmentService.withContext(async (ctx) => {
1072
+ const activeAttachments = await ctx.getActiveAttachments();
1073
+ await this.localStorage.initialize();
1074
+ await this.syncingService.processAttachments(activeAttachments, ctx);
1075
+ await this.syncingService.deleteArchivedAttachments(ctx);
1076
+ });
1077
+ }
1078
+ /**
1079
+ * Stops the attachment synchronization process.
1080
+ *
1081
+ * Clears the periodic sync timer and closes all active attachment watchers.
1082
+ */
1083
+ async stopSync() {
1084
+ clearInterval(this.periodicSyncTimer);
1085
+ this.periodicSyncTimer = undefined;
1086
+ if (this.watchActiveAttachments)
1087
+ await this.watchActiveAttachments.close();
1088
+ if (this.watchAttachmentsAbortController) {
1089
+ this.watchAttachmentsAbortController.abort();
1090
+ }
1091
+ if (this.statusListenerDispose) {
1092
+ this.statusListenerDispose();
1093
+ this.statusListenerDispose = undefined;
1094
+ }
1095
+ }
1096
+ /**
1097
+ * Saves a file to local storage and queues it for upload to remote storage.
1098
+ *
1099
+ * @param options - File save options
1100
+ * @param options.data - The file data as ArrayBuffer, Blob, or base64 string
1101
+ * @param options.fileExtension - File extension (e.g., 'jpg', 'pdf')
1102
+ * @param options.mediaType - MIME type of the file (e.g., 'image/jpeg')
1103
+ * @param options.metaData - Optional metadata to associate with the attachment
1104
+ * @param options.id - Optional custom ID. If not provided, a UUID will be generated
1105
+ * @param options.updateHook - Optional callback to execute additional database operations
1106
+ * within the same transaction as the attachment creation
1107
+ * @returns Promise resolving to the created attachment record
1108
+ */
1109
+ async saveFile({ data, fileExtension, mediaType, metaData, id, updateHook }) {
1110
+ const resolvedId = id ?? (await this.generateAttachmentId());
1111
+ const filename = `${resolvedId}.${fileExtension}`;
1112
+ const localUri = this.localStorage.getLocalUri(filename);
1113
+ const size = await this.localStorage.saveFile(localUri, data);
1114
+ const attachment = {
1115
+ id: resolvedId,
1116
+ filename,
1117
+ mediaType,
1118
+ localUri,
1119
+ state: exports.AttachmentState.QUEUED_UPLOAD,
1120
+ hasSynced: false,
1121
+ size,
1122
+ timestamp: new Date().getTime(),
1123
+ metaData
1124
+ };
1125
+ await this.attachmentService.withContext(async (ctx) => {
1126
+ await ctx.db.writeTransaction(async (tx) => {
1127
+ await updateHook?.(tx, attachment);
1128
+ await ctx.upsertAttachment(attachment, tx);
1129
+ });
1130
+ });
1131
+ return attachment;
1132
+ }
1133
+ async deleteFile({ id, updateHook }) {
1134
+ await this.attachmentService.withContext(async (ctx) => {
1135
+ const attachment = await ctx.getAttachment(id);
1136
+ if (!attachment) {
1137
+ throw new Error(`Attachment with id ${id} not found`);
1138
+ }
1139
+ await ctx.db.writeTransaction(async (tx) => {
1140
+ await updateHook?.(tx, attachment);
1141
+ await ctx.upsertAttachment({
1142
+ ...attachment,
1143
+ state: exports.AttachmentState.QUEUED_DELETE,
1144
+ hasSynced: false
1145
+ }, tx);
1146
+ });
1147
+ });
1148
+ }
1149
+ async expireCache() {
1150
+ let isDone = false;
1151
+ while (!isDone) {
1152
+ await this.attachmentService.withContext(async (ctx) => {
1153
+ isDone = await this.syncingService.deleteArchivedAttachments(ctx);
1154
+ });
1155
+ }
1156
+ }
1157
+ async clearQueue() {
1158
+ await this.attachmentService.withContext(async (ctx) => {
1159
+ await ctx.clearQueue();
1160
+ });
1161
+ await this.localStorage.clear();
1162
+ }
1163
+ /**
1164
+ * Verifies the integrity of all attachment records and repairs inconsistencies.
1165
+ *
1166
+ * This method checks each attachment record against the local filesystem and:
1167
+ * - Updates localUri if the file exists at a different path
1168
+ * - Archives attachments with missing local files that haven't been uploaded
1169
+ * - Requeues synced attachments for download if their local files are missing
1170
+ */
1171
+ async verifyAttachments() {
1172
+ await this.attachmentService.withContext(async (ctx) => {
1173
+ const attachments = await ctx.getAttachments();
1174
+ const updates = [];
1175
+ for (const attachment of attachments) {
1176
+ if (attachment.localUri == null) {
1177
+ continue;
1178
+ }
1179
+ const exists = await this.localStorage.fileExists(attachment.localUri);
1180
+ if (exists) {
1181
+ // The file exists, this is correct
1182
+ continue;
1183
+ }
1184
+ const newLocalUri = this.localStorage.getLocalUri(attachment.filename);
1185
+ const newExists = await this.localStorage.fileExists(newLocalUri);
1186
+ if (newExists) {
1187
+ // The file exists locally but the localUri is broken, we update it.
1188
+ updates.push({
1189
+ ...attachment,
1190
+ localUri: newLocalUri
1191
+ });
1192
+ }
1193
+ else {
1194
+ // the file doesn't exist locally.
1195
+ if (attachment.state === exports.AttachmentState.SYNCED) {
1196
+ // the file has been successfully synced to remote storage but is missing
1197
+ // we download it again
1198
+ updates.push({
1199
+ ...attachment,
1200
+ state: exports.AttachmentState.QUEUED_DOWNLOAD,
1201
+ localUri: undefined
1202
+ });
1203
+ }
1204
+ else {
1205
+ // the file wasn't successfully synced to remote storage, we archive it
1206
+ updates.push({
1207
+ ...attachment,
1208
+ state: exports.AttachmentState.ARCHIVED,
1209
+ localUri: undefined // Clears the value
1210
+ });
1211
+ }
1212
+ }
1213
+ }
1214
+ await ctx.saveAttachments(updates);
1215
+ });
1216
+ }
1217
+ }
1218
+
1219
+ exports.EncodingType = void 0;
1220
+ (function (EncodingType) {
1221
+ EncodingType["UTF8"] = "utf8";
1222
+ EncodingType["Base64"] = "base64";
1223
+ })(exports.EncodingType || (exports.EncodingType = {}));
1224
+
7
1225
  function getDefaultExportFromCjs (x) {
8
1226
  return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
9
1227
  }
@@ -1167,20 +2385,6 @@ class MetaBaseObserver extends BaseObserver {
1167
2385
  }
1168
2386
  }
1169
2387
 
1170
- exports.WatchedQueryListenerEvent = void 0;
1171
- (function (WatchedQueryListenerEvent) {
1172
- WatchedQueryListenerEvent["ON_DATA"] = "onData";
1173
- WatchedQueryListenerEvent["ON_ERROR"] = "onError";
1174
- WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
1175
- WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
1176
- WatchedQueryListenerEvent["CLOSED"] = "closed";
1177
- })(exports.WatchedQueryListenerEvent || (exports.WatchedQueryListenerEvent = {}));
1178
- const DEFAULT_WATCH_THROTTLE_MS = 30;
1179
- const DEFAULT_WATCH_QUERY_OPTIONS = {
1180
- throttleMs: DEFAULT_WATCH_THROTTLE_MS,
1181
- reportFetching: true
1182
- };
1183
-
1184
2388
  /**
1185
2389
  * Performs underlying watching and yields a stream of results.
1186
2390
  * @internal
@@ -6722,7 +7926,7 @@ function requireDist () {
6722
7926
 
6723
7927
  var distExports = requireDist();
6724
7928
 
6725
- var version = "1.45.0";
7929
+ var version = "1.47.0";
6726
7930
  var PACKAGE = {
6727
7931
  version: version};
6728
7932
 
@@ -6793,6 +7997,16 @@ class DataStream extends BaseObserver {
6793
7997
  * @returns a Data payload or Null if the stream closed.
6794
7998
  */
6795
7999
  async read() {
8000
+ if (this.closed) {
8001
+ return null;
8002
+ }
8003
+ // Wait for any pending processing to complete first.
8004
+ // This ensures we register our listener before calling processQueue(),
8005
+ // avoiding a race where processQueue() sees no reader and returns early.
8006
+ if (this.processingPromise) {
8007
+ await this.processingPromise;
8008
+ }
8009
+ // Re-check after await - stream may have closed while we were waiting
6796
8010
  if (this.closed) {
6797
8011
  return null;
6798
8012
  }
@@ -6832,7 +8046,7 @@ class DataStream extends BaseObserver {
6832
8046
  }
6833
8047
  const promise = (this.processingPromise = this._processQueue());
6834
8048
  promise.finally(() => {
6835
- return (this.processingPromise = null);
8049
+ this.processingPromise = null;
6836
8050
  });
6837
8051
  return promise;
6838
8052
  }
@@ -6864,7 +8078,6 @@ class DataStream extends BaseObserver {
6864
8078
  this.notifyDataAdded = null;
6865
8079
  }
6866
8080
  if (this.dataQueue.length > 0) {
6867
- // Next tick
6868
8081
  setTimeout(() => this.processQueue());
6869
8082
  }
6870
8083
  }
@@ -7484,7 +8697,11 @@ class AbstractRemote {
7484
8697
  };
7485
8698
  const stream = new DataStream({
7486
8699
  logger: this.logger,
7487
- mapLine: mapLine
8700
+ mapLine: mapLine,
8701
+ pressure: {
8702
+ highWaterMark: 20,
8703
+ lowWaterMark: 10
8704
+ }
7488
8705
  });
7489
8706
  abortSignal?.addEventListener('abort', () => {
7490
8707
  closeReader();
@@ -7492,42 +8709,47 @@ class AbstractRemote {
7492
8709
  });
7493
8710
  const decoder = this.createTextDecoder();
7494
8711
  let buffer = '';
7495
- const l = stream.registerListener({
7496
- lowWater: async () => {
7497
- if (stream.closed || abortSignal?.aborted || readerReleased) {
8712
+ const consumeStream = async () => {
8713
+ while (!stream.closed && !abortSignal?.aborted && !readerReleased) {
8714
+ const { done, value } = await reader.read();
8715
+ if (done) {
8716
+ const remaining = buffer.trim();
8717
+ if (remaining.length != 0) {
8718
+ stream.enqueueData(remaining);
8719
+ }
8720
+ stream.close();
8721
+ await closeReader();
7498
8722
  return;
7499
8723
  }
7500
- try {
7501
- let didCompleteLine = false;
7502
- while (!didCompleteLine) {
7503
- const { done, value } = await reader.read();
7504
- if (done) {
7505
- const remaining = buffer.trim();
7506
- if (remaining.length != 0) {
7507
- stream.enqueueData(remaining);
7508
- }
7509
- stream.close();
7510
- await closeReader();
7511
- return;
7512
- }
7513
- const data = decoder.decode(value, { stream: true });
7514
- buffer += data;
7515
- const lines = buffer.split('\n');
7516
- for (var i = 0; i < lines.length - 1; i++) {
7517
- var l = lines[i].trim();
7518
- if (l.length > 0) {
7519
- stream.enqueueData(l);
7520
- didCompleteLine = true;
7521
- }
7522
- }
7523
- buffer = lines[lines.length - 1];
8724
+ const data = decoder.decode(value, { stream: true });
8725
+ buffer += data;
8726
+ const lines = buffer.split('\n');
8727
+ for (var i = 0; i < lines.length - 1; i++) {
8728
+ var l = lines[i].trim();
8729
+ if (l.length > 0) {
8730
+ stream.enqueueData(l);
7524
8731
  }
7525
8732
  }
7526
- catch (ex) {
7527
- stream.close();
7528
- throw ex;
8733
+ buffer = lines[lines.length - 1];
8734
+ // Implement backpressure by waiting for the low water mark to be reached
8735
+ if (stream.dataQueue.length > stream.highWatermark) {
8736
+ await new Promise((resolve) => {
8737
+ const dispose = stream.registerListener({
8738
+ lowWater: async () => {
8739
+ resolve();
8740
+ dispose();
8741
+ },
8742
+ closed: () => {
8743
+ resolve();
8744
+ dispose();
8745
+ }
8746
+ });
8747
+ });
7529
8748
  }
7530
- },
8749
+ }
8750
+ };
8751
+ consumeStream().catch(ex => this.logger.error('Error consuming stream', ex));
8752
+ const l = stream.registerListener({
7531
8753
  closed: () => {
7532
8754
  closeReader();
7533
8755
  l?.();
@@ -7606,18 +8828,17 @@ exports.SyncClientImplementation = void 0;
7606
8828
  *
7607
8829
  * This is the default option.
7608
8830
  *
7609
- * @deprecated Don't use {@link SyncClientImplementation.JAVASCRIPT} directly. Instead, use
7610
- * {@link DEFAULT_SYNC_CLIENT_IMPLEMENTATION} or omit the option. The explicit choice to use
7611
- * the JavaScript-based sync implementation will be removed from a future version of the SDK.
8831
+ * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
8832
+ * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
8833
+ * version of the PowerSync SDK.
7612
8834
  */
7613
8835
  SyncClientImplementation["JAVASCRIPT"] = "js";
7614
8836
  /**
7615
8837
  * This implementation offloads the sync line decoding and handling into the PowerSync
7616
8838
  * core extension.
7617
8839
  *
7618
- * @experimental
7619
- * While this implementation is more performant than {@link SyncClientImplementation.JAVASCRIPT},
7620
- * it has seen less real-world testing and is marked as __experimental__ at the moment.
8840
+ * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
8841
+ * recommended client implementation for all apps.
7621
8842
  *
7622
8843
  * ## Compatibility warning
7623
8844
  *
@@ -7635,13 +8856,9 @@ exports.SyncClientImplementation = void 0;
7635
8856
  SyncClientImplementation["RUST"] = "rust";
7636
8857
  })(exports.SyncClientImplementation || (exports.SyncClientImplementation = {}));
7637
8858
  /**
7638
- * The default {@link SyncClientImplementation} to use.
7639
- *
7640
- * Please use this field instead of {@link SyncClientImplementation.JAVASCRIPT} directly. A future version
7641
- * of the PowerSync SDK will enable {@link SyncClientImplementation.RUST} by default and remove the JavaScript
7642
- * option.
8859
+ * The default {@link SyncClientImplementation} to use, {@link SyncClientImplementation.RUST}.
7643
8860
  */
7644
- const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = exports.SyncClientImplementation.JAVASCRIPT;
8861
+ const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = exports.SyncClientImplementation.RUST;
7645
8862
  const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
7646
8863
  const DEFAULT_RETRY_DELAY_MS = 5000;
7647
8864
  const DEFAULT_STREAMING_SYNC_OPTIONS = {
@@ -8051,6 +9268,9 @@ The next upload iteration will be delayed.`);
8051
9268
  if (rawTables != null && rawTables.length) {
8052
9269
  this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
8053
9270
  }
9271
+ if (this.activeStreams.length) {
9272
+ this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
9273
+ }
8054
9274
  this.logger.debug('Streaming sync iteration started');
8055
9275
  this.options.adapter.startSession();
8056
9276
  let [req, bucketMap] = await this.collectLocalBucketState();
@@ -8566,6 +9786,27 @@ The next upload iteration will be delayed.`);
8566
9786
  }
8567
9787
  }
8568
9788
 
9789
+ const CLAIM_STORE = new Map();
9790
+ /**
9791
+ * @internal
9792
+ * @experimental
9793
+ */
9794
+ const MEMORY_TRIGGER_CLAIM_MANAGER = {
9795
+ async obtainClaim(identifier) {
9796
+ if (CLAIM_STORE.has(identifier)) {
9797
+ throw new Error(`A claim is already present for ${identifier}`);
9798
+ }
9799
+ const release = async () => {
9800
+ CLAIM_STORE.delete(identifier);
9801
+ };
9802
+ CLAIM_STORE.set(identifier, release);
9803
+ return release;
9804
+ },
9805
+ async checkClaim(identifier) {
9806
+ return CLAIM_STORE.has(identifier);
9807
+ }
9808
+ };
9809
+
8569
9810
  /**
8570
9811
  * SQLite operations to track changes for with {@link TriggerManager}
8571
9812
  * @experimental
@@ -8577,9 +9818,20 @@ exports.DiffTriggerOperation = void 0;
8577
9818
  DiffTriggerOperation["DELETE"] = "DELETE";
8578
9819
  })(exports.DiffTriggerOperation || (exports.DiffTriggerOperation = {}));
8579
9820
 
9821
+ const DEFAULT_TRIGGER_MANAGER_CONFIGURATION = {
9822
+ useStorageByDefault: false
9823
+ };
9824
+ const TRIGGER_CLEANUP_INTERVAL_MS = 120_000; // 2 minutes
9825
+ /**
9826
+ * @internal
9827
+ * @experimental
9828
+ */
8580
9829
  class TriggerManagerImpl {
8581
9830
  options;
8582
9831
  schema;
9832
+ defaultConfig;
9833
+ cleanupTimeout;
9834
+ isDisposed;
8583
9835
  constructor(options) {
8584
9836
  this.options = options;
8585
9837
  this.schema = options.schema;
@@ -8588,6 +9840,33 @@ class TriggerManagerImpl {
8588
9840
  this.schema = schema;
8589
9841
  }
8590
9842
  });
9843
+ this.isDisposed = false;
9844
+ /**
9845
+ * Configure a cleanup to run on an interval.
9846
+ * The interval is configured using setTimeout to take the async
9847
+ * execution time of the callback into account.
9848
+ */
9849
+ this.defaultConfig = DEFAULT_TRIGGER_MANAGER_CONFIGURATION;
9850
+ const cleanupCallback = async () => {
9851
+ this.cleanupTimeout = null;
9852
+ if (this.isDisposed) {
9853
+ return;
9854
+ }
9855
+ try {
9856
+ await this.cleanupResources();
9857
+ }
9858
+ catch (ex) {
9859
+ this.db.logger.error(`Caught error while attempting to cleanup triggers`, ex);
9860
+ }
9861
+ finally {
9862
+ // if not closed, set another timeout
9863
+ if (this.isDisposed) {
9864
+ return;
9865
+ }
9866
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
9867
+ }
9868
+ };
9869
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
8591
9870
  }
8592
9871
  get db() {
8593
9872
  return this.options.db;
@@ -8605,13 +9884,95 @@ class TriggerManagerImpl {
8605
9884
  await tx.execute(/* sql */ `DROP TRIGGER IF EXISTS ${triggerId}; `);
8606
9885
  }
8607
9886
  }
9887
+ dispose() {
9888
+ this.isDisposed = true;
9889
+ if (this.cleanupTimeout) {
9890
+ clearTimeout(this.cleanupTimeout);
9891
+ }
9892
+ }
9893
+ /**
9894
+ * Updates default config settings for platform specific use-cases.
9895
+ */
9896
+ updateDefaults(config) {
9897
+ this.defaultConfig = {
9898
+ ...this.defaultConfig,
9899
+ ...config
9900
+ };
9901
+ }
9902
+ generateTriggerName(operation, destinationTable, triggerId) {
9903
+ return `__ps_temp_trigger_${operation.toLowerCase()}__${destinationTable}__${triggerId}`;
9904
+ }
9905
+ /**
9906
+ * Cleanup any SQLite triggers or tables that are no longer in use.
9907
+ */
9908
+ async cleanupResources() {
9909
+ // we use the database here since cleanupResources is called during the PowerSyncDatabase initialization
9910
+ await this.db.database.writeLock(async (ctx) => {
9911
+ /**
9912
+ * Note: We only cleanup persisted triggers. These are tracked in the sqlite_master table.
9913
+ * temporary triggers will not be affected by this.
9914
+ * Query all triggers that match our naming pattern
9915
+ */
9916
+ const triggers = await ctx.getAll(/* sql */ `
9917
+ SELECT
9918
+ name
9919
+ FROM
9920
+ sqlite_master
9921
+ WHERE
9922
+ type = 'trigger'
9923
+ AND name LIKE '__ps_temp_trigger_%'
9924
+ `);
9925
+ /** Use regex to extract table names and IDs from trigger names
9926
+ * Trigger naming convention: __ps_temp_trigger_<operation>__<destination_table>__<id>
9927
+ */
9928
+ const triggerPattern = /^__ps_temp_trigger_(?:insert|update|delete)__(.+)__([a-f0-9_]{36})$/i;
9929
+ const trackedItems = new Map();
9930
+ for (const trigger of triggers) {
9931
+ const match = trigger.name.match(triggerPattern);
9932
+ if (match) {
9933
+ const [, table, id] = match;
9934
+ // Collect all trigger names for each id combo
9935
+ const existing = trackedItems.get(id);
9936
+ if (existing) {
9937
+ existing.triggerNames.push(trigger.name);
9938
+ }
9939
+ else {
9940
+ trackedItems.set(id, { table, id, triggerNames: [trigger.name] });
9941
+ }
9942
+ }
9943
+ }
9944
+ for (const trackedItem of trackedItems.values()) {
9945
+ // check if there is anything holding on to this item
9946
+ const hasClaim = await this.options.claimManager.checkClaim(trackedItem.id);
9947
+ if (hasClaim) {
9948
+ // This does not require cleanup
9949
+ continue;
9950
+ }
9951
+ this.db.logger.debug(`Clearing resources for trigger ${trackedItem.id} with table ${trackedItem.table}`);
9952
+ // We need to delete the triggers and table
9953
+ for (const triggerName of trackedItem.triggerNames) {
9954
+ await ctx.execute(`DROP TRIGGER IF EXISTS ${triggerName}`);
9955
+ }
9956
+ await ctx.execute(`DROP TABLE IF EXISTS ${trackedItem.table}`);
9957
+ }
9958
+ });
9959
+ }
8608
9960
  async createDiffTrigger(options) {
8609
9961
  await this.db.waitForReady();
8610
- const { source, destination, columns, when, hooks } = options;
9962
+ const { source, destination, columns, when, hooks,
9963
+ // Fall back to the provided default if not given on this level
9964
+ useStorage = this.defaultConfig.useStorageByDefault } = options;
8611
9965
  const operations = Object.keys(when);
8612
9966
  if (operations.length == 0) {
8613
9967
  throw new Error('At least one WHEN operation must be specified for the trigger.');
8614
9968
  }
9969
+ /**
9970
+ * The clause to use when executing
9971
+ * CREATE ${tableTriggerTypeClause} TABLE
9972
+ * OR
9973
+ * CREATE ${tableTriggerTypeClause} TRIGGER
9974
+ */
9975
+ const tableTriggerTypeClause = !useStorage ? 'TEMP' : '';
8615
9976
  const whenClauses = Object.fromEntries(Object.entries(when).map(([operation, filter]) => [operation, `WHEN ${filter}`]));
8616
9977
  /**
8617
9978
  * Allow specifying the View name as the source.
@@ -8625,6 +9986,7 @@ class TriggerManagerImpl {
8625
9986
  const internalSource = sourceDefinition.internalName;
8626
9987
  const triggerIds = [];
8627
9988
  const id = await this.getUUID();
9989
+ const releaseStorageClaim = useStorage ? await this.options.claimManager.obtainClaim(id) : null;
8628
9990
  /**
8629
9991
  * We default to replicating all columns if no columns array is provided.
8630
9992
  */
@@ -8657,26 +10019,27 @@ class TriggerManagerImpl {
8657
10019
  return this.db.writeLock(async (tx) => {
8658
10020
  await this.removeTriggers(tx, triggerIds);
8659
10021
  await tx.execute(/* sql */ `DROP TABLE IF EXISTS ${destination};`);
10022
+ await releaseStorageClaim?.();
8660
10023
  });
8661
10024
  };
8662
10025
  const setup = async (tx) => {
8663
10026
  // Allow user code to execute in this lock context before the trigger is created.
8664
10027
  await hooks?.beforeCreate?.(tx);
8665
10028
  await tx.execute(/* sql */ `
8666
- CREATE TEMP TABLE ${destination} (
10029
+ CREATE ${tableTriggerTypeClause} TABLE ${destination} (
8667
10030
  operation_id INTEGER PRIMARY KEY AUTOINCREMENT,
8668
10031
  id TEXT,
8669
10032
  operation TEXT,
8670
10033
  timestamp TEXT,
8671
10034
  value TEXT,
8672
10035
  previous_value TEXT
8673
- );
10036
+ )
8674
10037
  `);
8675
10038
  if (operations.includes(exports.DiffTriggerOperation.INSERT)) {
8676
- const insertTriggerId = `ps_temp_trigger_insert_${id}`;
10039
+ const insertTriggerId = this.generateTriggerName(exports.DiffTriggerOperation.INSERT, destination, id);
8677
10040
  triggerIds.push(insertTriggerId);
8678
10041
  await tx.execute(/* sql */ `
8679
- CREATE TEMP TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[exports.DiffTriggerOperation.INSERT]} BEGIN
10042
+ CREATE ${tableTriggerTypeClause} TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[exports.DiffTriggerOperation.INSERT]} BEGIN
8680
10043
  INSERT INTO
8681
10044
  ${destination} (id, operation, timestamp, value)
8682
10045
  VALUES
@@ -8687,14 +10050,14 @@ class TriggerManagerImpl {
8687
10050
  ${jsonFragment('NEW')}
8688
10051
  );
8689
10052
 
8690
- END;
10053
+ END
8691
10054
  `);
8692
10055
  }
8693
10056
  if (operations.includes(exports.DiffTriggerOperation.UPDATE)) {
8694
- const updateTriggerId = `ps_temp_trigger_update_${id}`;
10057
+ const updateTriggerId = this.generateTriggerName(exports.DiffTriggerOperation.UPDATE, destination, id);
8695
10058
  triggerIds.push(updateTriggerId);
8696
10059
  await tx.execute(/* sql */ `
8697
- CREATE TEMP TRIGGER ${updateTriggerId} AFTER
10060
+ CREATE ${tableTriggerTypeClause} TRIGGER ${updateTriggerId} AFTER
8698
10061
  UPDATE ON ${internalSource} ${whenClauses[exports.DiffTriggerOperation.UPDATE]} BEGIN
8699
10062
  INSERT INTO
8700
10063
  ${destination} (id, operation, timestamp, value, previous_value)
@@ -8711,11 +10074,11 @@ class TriggerManagerImpl {
8711
10074
  `);
8712
10075
  }
8713
10076
  if (operations.includes(exports.DiffTriggerOperation.DELETE)) {
8714
- const deleteTriggerId = `ps_temp_trigger_delete_${id}`;
10077
+ const deleteTriggerId = this.generateTriggerName(exports.DiffTriggerOperation.DELETE, destination, id);
8715
10078
  triggerIds.push(deleteTriggerId);
8716
10079
  // Create delete trigger for basic JSON
8717
10080
  await tx.execute(/* sql */ `
8718
- CREATE TEMP TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[exports.DiffTriggerOperation.DELETE]} BEGIN
10081
+ CREATE ${tableTriggerTypeClause} TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[exports.DiffTriggerOperation.DELETE]} BEGIN
8719
10082
  INSERT INTO
8720
10083
  ${destination} (id, operation, timestamp, value)
8721
10084
  VALUES
@@ -8759,7 +10122,7 @@ class TriggerManagerImpl {
8759
10122
  // If no array is provided, we use all columns from the source table.
8760
10123
  const contextColumns = columns ?? sourceDefinition.columns.map((col) => col.name);
8761
10124
  const id = await this.getUUID();
8762
- const destination = `ps_temp_track_${source}_${id}`;
10125
+ const destination = `__ps_temp_track_${source}_${id}`;
8763
10126
  // register an onChange before the trigger is created
8764
10127
  const abortController = new AbortController();
8765
10128
  const abortOnChange = () => abortController.abort();
@@ -8914,6 +10277,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
8914
10277
  * Allows creating SQLite triggers which can be used to track various operations on SQLite tables.
8915
10278
  */
8916
10279
  triggers;
10280
+ triggersImpl;
8917
10281
  logger;
8918
10282
  constructor(options) {
8919
10283
  super();
@@ -8977,9 +10341,10 @@ class AbstractPowerSyncDatabase extends BaseObserver {
8977
10341
  logger: this.logger
8978
10342
  });
8979
10343
  this._isReadyPromise = this.initialize();
8980
- this.triggers = new TriggerManagerImpl({
10344
+ this.triggers = this.triggersImpl = new TriggerManagerImpl({
8981
10345
  db: this,
8982
- schema: this.schema
10346
+ schema: this.schema,
10347
+ ...this.generateTriggerManagerConfig()
8983
10348
  });
8984
10349
  }
8985
10350
  /**
@@ -9005,6 +10370,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9005
10370
  get connecting() {
9006
10371
  return this.currentStatus?.connecting || false;
9007
10372
  }
10373
+ /**
10374
+ * Generates a base configuration for {@link TriggerManagerImpl}.
10375
+ * Implementations should override this if necessary.
10376
+ */
10377
+ generateTriggerManagerConfig() {
10378
+ return {
10379
+ claimManager: MEMORY_TRIGGER_CLAIM_MANAGER
10380
+ };
10381
+ }
9008
10382
  /**
9009
10383
  * @returns A promise which will resolve once initialization is completed.
9010
10384
  */
@@ -9065,14 +10439,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9065
10439
  async initialize() {
9066
10440
  await this._initialize();
9067
10441
  await this.bucketStorageAdapter.init();
9068
- await this._loadVersion();
10442
+ await this.loadVersion();
9069
10443
  await this.updateSchema(this.options.schema);
9070
10444
  await this.resolveOfflineSyncStatus();
9071
10445
  await this.database.execute('PRAGMA RECURSIVE_TRIGGERS=TRUE');
10446
+ await this.triggersImpl.cleanupResources();
9072
10447
  this.ready = true;
9073
10448
  this.iterateListeners((cb) => cb.initialized?.());
9074
10449
  }
9075
- async _loadVersion() {
10450
+ async loadVersion() {
9076
10451
  try {
9077
10452
  const { version } = await this.database.get('SELECT powersync_rs_version() as version');
9078
10453
  this.sdkVersion = version;
@@ -9188,7 +10563,6 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9188
10563
  await this.disconnect();
9189
10564
  await this.waitForReady();
9190
10565
  const { clearLocal } = options;
9191
- // TODO DB name, verify this is necessary with extension
9192
10566
  await this.database.writeTransaction(async (tx) => {
9193
10567
  await tx.execute('SELECT powersync_clear(?)', [clearLocal ? 1 : 0]);
9194
10568
  });
@@ -9220,6 +10594,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9220
10594
  if (this.closed) {
9221
10595
  return;
9222
10596
  }
10597
+ this.triggersImpl.dispose();
9223
10598
  await this.iterateAsyncListeners(async (cb) => cb.closing?.());
9224
10599
  const { disconnect } = options;
9225
10600
  if (disconnect) {
@@ -10185,151 +11560,50 @@ class SqliteBucketStorage extends BaseObserver {
10185
11560
  ]);
10186
11561
  return r != 0;
10187
11562
  }
10188
- async migrateToFixedSubkeys() {
10189
- await this.writeTransaction(async (tx) => {
10190
- await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
10191
- await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
10192
- SqliteBucketStorage._subkeyMigrationKey,
10193
- '1'
10194
- ]);
10195
- });
10196
- }
10197
- static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
10198
- }
10199
- function hasMatchingPriority(priority, bucket) {
10200
- return bucket.priority != null && bucket.priority <= priority;
10201
- }
10202
-
10203
- // TODO JSON
10204
- class SyncDataBatch {
10205
- buckets;
10206
- static fromJSON(json) {
10207
- return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
10208
- }
10209
- constructor(buckets) {
10210
- this.buckets = buckets;
10211
- }
10212
- }
10213
-
10214
- /**
10215
- * Thrown when an underlying database connection is closed.
10216
- * This is particularly relevant when worker connections are marked as closed while
10217
- * operations are still in progress.
10218
- */
10219
- class ConnectionClosedError extends Error {
10220
- static NAME = 'ConnectionClosedError';
10221
- static MATCHES(input) {
10222
- /**
10223
- * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
10224
- * check might fail. This also performs a failsafe check.
10225
- * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
10226
- */
10227
- return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
10228
- }
10229
- constructor(message) {
10230
- super(message);
10231
- this.name = ConnectionClosedError.NAME;
10232
- }
10233
- }
10234
-
10235
- // https://www.sqlite.org/lang_expr.html#castexpr
10236
- exports.ColumnType = void 0;
10237
- (function (ColumnType) {
10238
- ColumnType["TEXT"] = "TEXT";
10239
- ColumnType["INTEGER"] = "INTEGER";
10240
- ColumnType["REAL"] = "REAL";
10241
- })(exports.ColumnType || (exports.ColumnType = {}));
10242
- const text = {
10243
- type: exports.ColumnType.TEXT
10244
- };
10245
- const integer = {
10246
- type: exports.ColumnType.INTEGER
10247
- };
10248
- const real = {
10249
- type: exports.ColumnType.REAL
10250
- };
10251
- // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
10252
- // In earlier versions this was limited to 63.
10253
- const MAX_AMOUNT_OF_COLUMNS = 1999;
10254
- const column = {
10255
- text,
10256
- integer,
10257
- real
10258
- };
10259
- class Column {
10260
- options;
10261
- constructor(options) {
10262
- this.options = options;
10263
- }
10264
- get name() {
10265
- return this.options.name;
10266
- }
10267
- get type() {
10268
- return this.options.type;
10269
- }
10270
- toJSON() {
10271
- return {
10272
- name: this.name,
10273
- type: this.type
10274
- };
10275
- }
10276
- }
10277
-
10278
- const DEFAULT_INDEX_COLUMN_OPTIONS = {
10279
- ascending: true
10280
- };
10281
- class IndexedColumn {
10282
- options;
10283
- static createAscending(column) {
10284
- return new IndexedColumn({
10285
- name: column,
10286
- ascending: true
10287
- });
10288
- }
10289
- constructor(options) {
10290
- this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
10291
- }
10292
- get name() {
10293
- return this.options.name;
10294
- }
10295
- get ascending() {
10296
- return this.options.ascending;
10297
- }
10298
- toJSON(table) {
10299
- return {
10300
- name: this.name,
10301
- ascending: this.ascending,
10302
- type: table.columns.find((column) => column.name === this.name)?.type ?? exports.ColumnType.TEXT
10303
- };
10304
- }
10305
- }
10306
-
10307
- const DEFAULT_INDEX_OPTIONS = {
10308
- columns: []
10309
- };
10310
- class Index {
10311
- options;
10312
- static createAscending(options, columnNames) {
10313
- return new Index({
10314
- ...options,
10315
- columns: columnNames.map((name) => IndexedColumn.createAscending(name))
11563
+ async migrateToFixedSubkeys() {
11564
+ await this.writeTransaction(async (tx) => {
11565
+ await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
11566
+ await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
11567
+ SqliteBucketStorage._subkeyMigrationKey,
11568
+ '1'
11569
+ ]);
10316
11570
  });
10317
11571
  }
10318
- constructor(options) {
10319
- this.options = options;
10320
- this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
11572
+ static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
11573
+ }
11574
+ function hasMatchingPriority(priority, bucket) {
11575
+ return bucket.priority != null && bucket.priority <= priority;
11576
+ }
11577
+
11578
+ // TODO JSON
11579
+ class SyncDataBatch {
11580
+ buckets;
11581
+ static fromJSON(json) {
11582
+ return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
10321
11583
  }
10322
- get name() {
10323
- return this.options.name;
11584
+ constructor(buckets) {
11585
+ this.buckets = buckets;
10324
11586
  }
10325
- get columns() {
10326
- return this.options.columns ?? [];
11587
+ }
11588
+
11589
+ /**
11590
+ * Thrown when an underlying database connection is closed.
11591
+ * This is particularly relevant when worker connections are marked as closed while
11592
+ * operations are still in progress.
11593
+ */
11594
+ class ConnectionClosedError extends Error {
11595
+ static NAME = 'ConnectionClosedError';
11596
+ static MATCHES(input) {
11597
+ /**
11598
+ * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
11599
+ * check might fail. This also performs a failsafe check.
11600
+ * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
11601
+ */
11602
+ return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
10327
11603
  }
10328
- toJSON(table) {
10329
- return {
10330
- name: this.name,
10331
- columns: this.columns.map((c) => c.toJSON(table))
10332
- };
11604
+ constructor(message) {
11605
+ super(message);
11606
+ this.name = ConnectionClosedError.NAME;
10333
11607
  }
10334
11608
  }
10335
11609
 
@@ -10426,211 +11700,6 @@ class Schema {
10426
11700
  }
10427
11701
  }
10428
11702
 
10429
- const DEFAULT_TABLE_OPTIONS = {
10430
- indexes: [],
10431
- insertOnly: false,
10432
- localOnly: false,
10433
- trackPrevious: false,
10434
- trackMetadata: false,
10435
- ignoreEmptyUpdates: false
10436
- };
10437
- const InvalidSQLCharacters = /["'%,.#\s[\]]/;
10438
- class Table {
10439
- options;
10440
- _mappedColumns;
10441
- static createLocalOnly(options) {
10442
- return new Table({ ...options, localOnly: true, insertOnly: false });
10443
- }
10444
- static createInsertOnly(options) {
10445
- return new Table({ ...options, localOnly: false, insertOnly: true });
10446
- }
10447
- /**
10448
- * Create a table.
10449
- * @deprecated This was only only included for TableV2 and is no longer necessary.
10450
- * Prefer to use new Table() directly.
10451
- *
10452
- * TODO remove in the next major release.
10453
- */
10454
- static createTable(name, table) {
10455
- return new Table({
10456
- name,
10457
- columns: table.columns,
10458
- indexes: table.indexes,
10459
- localOnly: table.options.localOnly,
10460
- insertOnly: table.options.insertOnly,
10461
- viewName: table.options.viewName
10462
- });
10463
- }
10464
- constructor(optionsOrColumns, v2Options) {
10465
- if (this.isTableV1(optionsOrColumns)) {
10466
- this.initTableV1(optionsOrColumns);
10467
- }
10468
- else {
10469
- this.initTableV2(optionsOrColumns, v2Options);
10470
- }
10471
- }
10472
- copyWithName(name) {
10473
- return new Table({
10474
- ...this.options,
10475
- name
10476
- });
10477
- }
10478
- isTableV1(arg) {
10479
- return 'columns' in arg && Array.isArray(arg.columns);
10480
- }
10481
- initTableV1(options) {
10482
- this.options = {
10483
- ...options,
10484
- indexes: options.indexes || []
10485
- };
10486
- this.applyDefaultOptions();
10487
- }
10488
- initTableV2(columns, options) {
10489
- const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
10490
- const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
10491
- name,
10492
- columns: columnNames.map((name) => new IndexedColumn({
10493
- name: name.replace(/^-/, ''),
10494
- ascending: !name.startsWith('-')
10495
- }))
10496
- }));
10497
- this.options = {
10498
- name: '',
10499
- columns: convertedColumns,
10500
- indexes: convertedIndexes,
10501
- viewName: options?.viewName,
10502
- insertOnly: options?.insertOnly,
10503
- localOnly: options?.localOnly,
10504
- trackPrevious: options?.trackPrevious,
10505
- trackMetadata: options?.trackMetadata,
10506
- ignoreEmptyUpdates: options?.ignoreEmptyUpdates
10507
- };
10508
- this.applyDefaultOptions();
10509
- this._mappedColumns = columns;
10510
- }
10511
- applyDefaultOptions() {
10512
- this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
10513
- this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
10514
- this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
10515
- this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
10516
- this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
10517
- }
10518
- get name() {
10519
- return this.options.name;
10520
- }
10521
- get viewNameOverride() {
10522
- return this.options.viewName;
10523
- }
10524
- get viewName() {
10525
- return this.viewNameOverride ?? this.name;
10526
- }
10527
- get columns() {
10528
- return this.options.columns;
10529
- }
10530
- get columnMap() {
10531
- return (this._mappedColumns ??
10532
- this.columns.reduce((hash, column) => {
10533
- hash[column.name] = { type: column.type ?? exports.ColumnType.TEXT };
10534
- return hash;
10535
- }, {}));
10536
- }
10537
- get indexes() {
10538
- return this.options.indexes ?? [];
10539
- }
10540
- get localOnly() {
10541
- return this.options.localOnly;
10542
- }
10543
- get insertOnly() {
10544
- return this.options.insertOnly;
10545
- }
10546
- get trackPrevious() {
10547
- return this.options.trackPrevious;
10548
- }
10549
- get trackMetadata() {
10550
- return this.options.trackMetadata;
10551
- }
10552
- get ignoreEmptyUpdates() {
10553
- return this.options.ignoreEmptyUpdates;
10554
- }
10555
- get internalName() {
10556
- if (this.options.localOnly) {
10557
- return `ps_data_local__${this.name}`;
10558
- }
10559
- return `ps_data__${this.name}`;
10560
- }
10561
- get validName() {
10562
- if (InvalidSQLCharacters.test(this.name)) {
10563
- return false;
10564
- }
10565
- if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
10566
- return false;
10567
- }
10568
- return true;
10569
- }
10570
- validate() {
10571
- if (InvalidSQLCharacters.test(this.name)) {
10572
- throw new Error(`Invalid characters in table name: ${this.name}`);
10573
- }
10574
- if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
10575
- throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
10576
- }
10577
- if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
10578
- throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
10579
- }
10580
- if (this.trackMetadata && this.localOnly) {
10581
- throw new Error(`Can't include metadata for local-only tables.`);
10582
- }
10583
- if (this.trackPrevious != false && this.localOnly) {
10584
- throw new Error(`Can't include old values for local-only tables.`);
10585
- }
10586
- const columnNames = new Set();
10587
- columnNames.add('id');
10588
- for (const column of this.columns) {
10589
- const { name: columnName } = column;
10590
- if (column.name === 'id') {
10591
- throw new Error(`An id column is automatically added, custom id columns are not supported`);
10592
- }
10593
- if (columnNames.has(columnName)) {
10594
- throw new Error(`Duplicate column ${columnName}`);
10595
- }
10596
- if (InvalidSQLCharacters.test(columnName)) {
10597
- throw new Error(`Invalid characters in column name: ${column.name}`);
10598
- }
10599
- columnNames.add(columnName);
10600
- }
10601
- const indexNames = new Set();
10602
- for (const index of this.indexes) {
10603
- if (indexNames.has(index.name)) {
10604
- throw new Error(`Duplicate index ${index.name}`);
10605
- }
10606
- if (InvalidSQLCharacters.test(index.name)) {
10607
- throw new Error(`Invalid characters in index name: ${index.name}`);
10608
- }
10609
- for (const column of index.columns) {
10610
- if (!columnNames.has(column.name)) {
10611
- throw new Error(`Column ${column.name} not found for index ${index.name}`);
10612
- }
10613
- }
10614
- indexNames.add(index.name);
10615
- }
10616
- }
10617
- toJSON() {
10618
- const trackPrevious = this.trackPrevious;
10619
- return {
10620
- name: this.name,
10621
- view_name: this.viewName,
10622
- local_only: this.localOnly,
10623
- insert_only: this.insertOnly,
10624
- include_old: trackPrevious && (trackPrevious.columns ?? true),
10625
- include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
10626
- include_metadata: this.trackMetadata,
10627
- ignore_empty_update: this.ignoreEmptyUpdates,
10628
- columns: this.columns.map((c) => c.toJSON()),
10629
- indexes: this.indexes.map((e) => e.toJSON(this))
10630
- };
10631
- }
10632
- }
10633
-
10634
11703
  /**
10635
11704
  Generate a new table from the columns and indexes
10636
11705
  @deprecated You should use {@link Table} instead as it now allows TableV2 syntax.
@@ -10786,6 +11855,7 @@ const parseQuery = (query, parameters) => {
10786
11855
  return { sqlStatement, parameters: parameters };
10787
11856
  };
10788
11857
 
11858
+ exports.ATTACHMENT_TABLE = ATTACHMENT_TABLE;
10789
11859
  exports.AbortOperation = AbortOperation;
10790
11860
  exports.AbstractPowerSyncDatabase = AbstractPowerSyncDatabase;
10791
11861
  exports.AbstractPowerSyncDatabaseOpenFactory = AbstractPowerSyncDatabaseOpenFactory;
@@ -10793,6 +11863,10 @@ exports.AbstractQueryProcessor = AbstractQueryProcessor;
10793
11863
  exports.AbstractRemote = AbstractRemote;
10794
11864
  exports.AbstractStreamingSyncImplementation = AbstractStreamingSyncImplementation;
10795
11865
  exports.ArrayComparator = ArrayComparator;
11866
+ exports.AttachmentContext = AttachmentContext;
11867
+ exports.AttachmentQueue = AttachmentQueue;
11868
+ exports.AttachmentService = AttachmentService;
11869
+ exports.AttachmentTable = AttachmentTable;
10796
11870
  exports.BaseObserver = BaseObserver;
10797
11871
  exports.Column = Column;
10798
11872
  exports.ConnectionClosedError = ConnectionClosedError;
@@ -10831,18 +11905,23 @@ exports.InvalidSQLCharacters = InvalidSQLCharacters;
10831
11905
  exports.LogLevel = LogLevel;
10832
11906
  exports.MAX_AMOUNT_OF_COLUMNS = MAX_AMOUNT_OF_COLUMNS;
10833
11907
  exports.MAX_OP_ID = MAX_OP_ID;
11908
+ exports.MEMORY_TRIGGER_CLAIM_MANAGER = MEMORY_TRIGGER_CLAIM_MANAGER;
10834
11909
  exports.OnChangeQueryProcessor = OnChangeQueryProcessor;
10835
11910
  exports.OpType = OpType;
10836
11911
  exports.OplogEntry = OplogEntry;
11912
+ exports.RawTable = RawTable;
10837
11913
  exports.Schema = Schema;
10838
11914
  exports.SqliteBucketStorage = SqliteBucketStorage;
10839
11915
  exports.SyncDataBatch = SyncDataBatch;
10840
11916
  exports.SyncDataBucket = SyncDataBucket;
10841
11917
  exports.SyncProgress = SyncProgress;
10842
11918
  exports.SyncStatus = SyncStatus;
11919
+ exports.SyncingService = SyncingService;
10843
11920
  exports.Table = Table;
10844
11921
  exports.TableV2 = TableV2;
11922
+ exports.TriggerManagerImpl = TriggerManagerImpl;
10845
11923
  exports.UploadQueueStats = UploadQueueStats;
11924
+ exports.attachmentFromSql = attachmentFromSql;
10846
11925
  exports.column = column;
10847
11926
  exports.compilableQueryWatch = compilableQueryWatch;
10848
11927
  exports.createBaseLogger = createBaseLogger;
@@ -10861,6 +11940,7 @@ exports.isStreamingSyncCheckpointDiff = isStreamingSyncCheckpointDiff;
10861
11940
  exports.isStreamingSyncCheckpointPartiallyComplete = isStreamingSyncCheckpointPartiallyComplete;
10862
11941
  exports.isStreamingSyncData = isStreamingSyncData;
10863
11942
  exports.isSyncNewCheckpointRequest = isSyncNewCheckpointRequest;
11943
+ exports.mutexRunExclusive = mutexRunExclusive;
10864
11944
  exports.parseQuery = parseQuery;
10865
11945
  exports.runOnSchemaChange = runOnSchemaChange;
10866
11946
  exports.sanitizeSQL = sanitizeSQL;