@powersync/common 1.45.0 → 1.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/README.md +5 -1
  2. package/dist/bundle.cjs +4165 -4914
  3. package/dist/bundle.cjs.map +1 -1
  4. package/dist/bundle.mjs +4155 -4915
  5. package/dist/bundle.mjs.map +1 -1
  6. package/dist/bundle.node.cjs +1502 -422
  7. package/dist/bundle.node.cjs.map +1 -1
  8. package/dist/bundle.node.mjs +1492 -423
  9. package/dist/bundle.node.mjs.map +1 -1
  10. package/dist/index.d.cts +622 -42
  11. package/lib/attachments/AttachmentContext.d.ts +86 -0
  12. package/lib/attachments/AttachmentContext.js +229 -0
  13. package/lib/attachments/AttachmentContext.js.map +1 -0
  14. package/lib/attachments/AttachmentErrorHandler.d.ts +31 -0
  15. package/lib/attachments/AttachmentErrorHandler.js +2 -0
  16. package/lib/attachments/AttachmentErrorHandler.js.map +1 -0
  17. package/lib/attachments/AttachmentQueue.d.ts +149 -0
  18. package/lib/attachments/AttachmentQueue.js +362 -0
  19. package/lib/attachments/AttachmentQueue.js.map +1 -0
  20. package/lib/attachments/AttachmentService.d.ts +29 -0
  21. package/lib/attachments/AttachmentService.js +56 -0
  22. package/lib/attachments/AttachmentService.js.map +1 -0
  23. package/lib/attachments/LocalStorageAdapter.d.ts +62 -0
  24. package/lib/attachments/LocalStorageAdapter.js +6 -0
  25. package/lib/attachments/LocalStorageAdapter.js.map +1 -0
  26. package/lib/attachments/RemoteStorageAdapter.d.ts +27 -0
  27. package/lib/attachments/RemoteStorageAdapter.js +2 -0
  28. package/lib/attachments/RemoteStorageAdapter.js.map +1 -0
  29. package/lib/attachments/Schema.d.ts +50 -0
  30. package/lib/attachments/Schema.js +62 -0
  31. package/lib/attachments/Schema.js.map +1 -0
  32. package/lib/attachments/SyncingService.d.ts +62 -0
  33. package/lib/attachments/SyncingService.js +168 -0
  34. package/lib/attachments/SyncingService.js.map +1 -0
  35. package/lib/attachments/WatchedAttachmentItem.d.ts +17 -0
  36. package/lib/attachments/WatchedAttachmentItem.js +2 -0
  37. package/lib/attachments/WatchedAttachmentItem.js.map +1 -0
  38. package/lib/client/AbstractPowerSyncDatabase.d.ts +9 -2
  39. package/lib/client/AbstractPowerSyncDatabase.js +18 -5
  40. package/lib/client/AbstractPowerSyncDatabase.js.map +1 -1
  41. package/lib/client/sync/stream/AbstractRemote.js +41 -32
  42. package/lib/client/sync/stream/AbstractRemote.js.map +1 -1
  43. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.d.ts +7 -12
  44. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js +10 -12
  45. package/lib/client/sync/stream/AbstractStreamingSyncImplementation.js.map +1 -1
  46. package/lib/client/triggers/MemoryTriggerClaimManager.d.ts +6 -0
  47. package/lib/client/triggers/MemoryTriggerClaimManager.js +21 -0
  48. package/lib/client/triggers/MemoryTriggerClaimManager.js.map +1 -0
  49. package/lib/client/triggers/TriggerManager.d.ts +37 -0
  50. package/lib/client/triggers/TriggerManagerImpl.d.ts +24 -3
  51. package/lib/client/triggers/TriggerManagerImpl.js +133 -11
  52. package/lib/client/triggers/TriggerManagerImpl.js.map +1 -1
  53. package/lib/index.d.ts +13 -0
  54. package/lib/index.js +13 -0
  55. package/lib/index.js.map +1 -1
  56. package/lib/utils/DataStream.js +11 -2
  57. package/lib/utils/DataStream.js.map +1 -1
  58. package/lib/utils/mutex.d.ts +1 -1
  59. package/lib/utils/mutex.js.map +1 -1
  60. package/package.json +4 -3
  61. package/src/attachments/AttachmentContext.ts +279 -0
  62. package/src/attachments/AttachmentErrorHandler.ts +34 -0
  63. package/src/attachments/AttachmentQueue.ts +472 -0
  64. package/src/attachments/AttachmentService.ts +62 -0
  65. package/src/attachments/LocalStorageAdapter.ts +72 -0
  66. package/src/attachments/README.md +718 -0
  67. package/src/attachments/RemoteStorageAdapter.ts +30 -0
  68. package/src/attachments/Schema.ts +87 -0
  69. package/src/attachments/SyncingService.ts +193 -0
  70. package/src/attachments/WatchedAttachmentItem.ts +19 -0
  71. package/src/client/AbstractPowerSyncDatabase.ts +21 -6
  72. package/src/client/sync/stream/AbstractRemote.ts +47 -35
  73. package/src/client/sync/stream/AbstractStreamingSyncImplementation.ts +11 -14
  74. package/src/client/triggers/MemoryTriggerClaimManager.ts +25 -0
  75. package/src/client/triggers/TriggerManager.ts +50 -6
  76. package/src/client/triggers/TriggerManagerImpl.ts +177 -13
  77. package/src/index.ts +14 -0
  78. package/src/utils/DataStream.ts +13 -2
  79. package/src/utils/mutex.ts +1 -1
@@ -2,6 +2,1224 @@ import { Mutex } from 'async-mutex';
2
2
  import { EventIterator } from 'event-iterator';
3
3
  import { Buffer } from 'node:buffer';
4
4
 
5
+ // https://www.sqlite.org/lang_expr.html#castexpr
6
+ var ColumnType;
7
+ (function (ColumnType) {
8
+ ColumnType["TEXT"] = "TEXT";
9
+ ColumnType["INTEGER"] = "INTEGER";
10
+ ColumnType["REAL"] = "REAL";
11
+ })(ColumnType || (ColumnType = {}));
12
+ const text = {
13
+ type: ColumnType.TEXT
14
+ };
15
+ const integer = {
16
+ type: ColumnType.INTEGER
17
+ };
18
+ const real = {
19
+ type: ColumnType.REAL
20
+ };
21
+ // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
22
+ // In earlier versions this was limited to 63.
23
+ const MAX_AMOUNT_OF_COLUMNS = 1999;
24
+ const column = {
25
+ text,
26
+ integer,
27
+ real
28
+ };
29
+ class Column {
30
+ options;
31
+ constructor(options) {
32
+ this.options = options;
33
+ }
34
+ get name() {
35
+ return this.options.name;
36
+ }
37
+ get type() {
38
+ return this.options.type;
39
+ }
40
+ toJSON() {
41
+ return {
42
+ name: this.name,
43
+ type: this.type
44
+ };
45
+ }
46
+ }
47
+
48
+ const DEFAULT_INDEX_COLUMN_OPTIONS = {
49
+ ascending: true
50
+ };
51
+ class IndexedColumn {
52
+ options;
53
+ static createAscending(column) {
54
+ return new IndexedColumn({
55
+ name: column,
56
+ ascending: true
57
+ });
58
+ }
59
+ constructor(options) {
60
+ this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
61
+ }
62
+ get name() {
63
+ return this.options.name;
64
+ }
65
+ get ascending() {
66
+ return this.options.ascending;
67
+ }
68
+ toJSON(table) {
69
+ return {
70
+ name: this.name,
71
+ ascending: this.ascending,
72
+ type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
73
+ };
74
+ }
75
+ }
76
+
77
+ const DEFAULT_INDEX_OPTIONS = {
78
+ columns: []
79
+ };
80
+ class Index {
81
+ options;
82
+ static createAscending(options, columnNames) {
83
+ return new Index({
84
+ ...options,
85
+ columns: columnNames.map((name) => IndexedColumn.createAscending(name))
86
+ });
87
+ }
88
+ constructor(options) {
89
+ this.options = options;
90
+ this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
91
+ }
92
+ get name() {
93
+ return this.options.name;
94
+ }
95
+ get columns() {
96
+ return this.options.columns ?? [];
97
+ }
98
+ toJSON(table) {
99
+ return {
100
+ name: this.name,
101
+ columns: this.columns.map((c) => c.toJSON(table))
102
+ };
103
+ }
104
+ }
105
+
106
+ const DEFAULT_TABLE_OPTIONS = {
107
+ indexes: [],
108
+ insertOnly: false,
109
+ localOnly: false,
110
+ trackPrevious: false,
111
+ trackMetadata: false,
112
+ ignoreEmptyUpdates: false
113
+ };
114
+ const InvalidSQLCharacters = /["'%,.#\s[\]]/;
115
+ class Table {
116
+ options;
117
+ _mappedColumns;
118
+ static createLocalOnly(options) {
119
+ return new Table({ ...options, localOnly: true, insertOnly: false });
120
+ }
121
+ static createInsertOnly(options) {
122
+ return new Table({ ...options, localOnly: false, insertOnly: true });
123
+ }
124
+ /**
125
+ * Create a table.
126
+ * @deprecated This was only only included for TableV2 and is no longer necessary.
127
+ * Prefer to use new Table() directly.
128
+ *
129
+ * TODO remove in the next major release.
130
+ */
131
+ static createTable(name, table) {
132
+ return new Table({
133
+ name,
134
+ columns: table.columns,
135
+ indexes: table.indexes,
136
+ localOnly: table.options.localOnly,
137
+ insertOnly: table.options.insertOnly,
138
+ viewName: table.options.viewName
139
+ });
140
+ }
141
+ constructor(optionsOrColumns, v2Options) {
142
+ if (this.isTableV1(optionsOrColumns)) {
143
+ this.initTableV1(optionsOrColumns);
144
+ }
145
+ else {
146
+ this.initTableV2(optionsOrColumns, v2Options);
147
+ }
148
+ }
149
+ copyWithName(name) {
150
+ return new Table({
151
+ ...this.options,
152
+ name
153
+ });
154
+ }
155
+ isTableV1(arg) {
156
+ return 'columns' in arg && Array.isArray(arg.columns);
157
+ }
158
+ initTableV1(options) {
159
+ this.options = {
160
+ ...options,
161
+ indexes: options.indexes || []
162
+ };
163
+ this.applyDefaultOptions();
164
+ }
165
+ initTableV2(columns, options) {
166
+ const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
167
+ const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
168
+ name,
169
+ columns: columnNames.map((name) => new IndexedColumn({
170
+ name: name.replace(/^-/, ''),
171
+ ascending: !name.startsWith('-')
172
+ }))
173
+ }));
174
+ this.options = {
175
+ name: '',
176
+ columns: convertedColumns,
177
+ indexes: convertedIndexes,
178
+ viewName: options?.viewName,
179
+ insertOnly: options?.insertOnly,
180
+ localOnly: options?.localOnly,
181
+ trackPrevious: options?.trackPrevious,
182
+ trackMetadata: options?.trackMetadata,
183
+ ignoreEmptyUpdates: options?.ignoreEmptyUpdates
184
+ };
185
+ this.applyDefaultOptions();
186
+ this._mappedColumns = columns;
187
+ }
188
+ applyDefaultOptions() {
189
+ this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
190
+ this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
191
+ this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
192
+ this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
193
+ this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
194
+ }
195
+ get name() {
196
+ return this.options.name;
197
+ }
198
+ get viewNameOverride() {
199
+ return this.options.viewName;
200
+ }
201
+ get viewName() {
202
+ return this.viewNameOverride ?? this.name;
203
+ }
204
+ get columns() {
205
+ return this.options.columns;
206
+ }
207
+ get columnMap() {
208
+ return (this._mappedColumns ??
209
+ this.columns.reduce((hash, column) => {
210
+ hash[column.name] = { type: column.type ?? ColumnType.TEXT };
211
+ return hash;
212
+ }, {}));
213
+ }
214
+ get indexes() {
215
+ return this.options.indexes ?? [];
216
+ }
217
+ get localOnly() {
218
+ return this.options.localOnly;
219
+ }
220
+ get insertOnly() {
221
+ return this.options.insertOnly;
222
+ }
223
+ get trackPrevious() {
224
+ return this.options.trackPrevious;
225
+ }
226
+ get trackMetadata() {
227
+ return this.options.trackMetadata;
228
+ }
229
+ get ignoreEmptyUpdates() {
230
+ return this.options.ignoreEmptyUpdates;
231
+ }
232
+ get internalName() {
233
+ if (this.options.localOnly) {
234
+ return `ps_data_local__${this.name}`;
235
+ }
236
+ return `ps_data__${this.name}`;
237
+ }
238
+ get validName() {
239
+ if (InvalidSQLCharacters.test(this.name)) {
240
+ return false;
241
+ }
242
+ if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
243
+ return false;
244
+ }
245
+ return true;
246
+ }
247
+ validate() {
248
+ if (InvalidSQLCharacters.test(this.name)) {
249
+ throw new Error(`Invalid characters in table name: ${this.name}`);
250
+ }
251
+ if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
252
+ throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
253
+ }
254
+ if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
255
+ throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
256
+ }
257
+ if (this.trackMetadata && this.localOnly) {
258
+ throw new Error(`Can't include metadata for local-only tables.`);
259
+ }
260
+ if (this.trackPrevious != false && this.localOnly) {
261
+ throw new Error(`Can't include old values for local-only tables.`);
262
+ }
263
+ const columnNames = new Set();
264
+ columnNames.add('id');
265
+ for (const column of this.columns) {
266
+ const { name: columnName } = column;
267
+ if (column.name === 'id') {
268
+ throw new Error(`An id column is automatically added, custom id columns are not supported`);
269
+ }
270
+ if (columnNames.has(columnName)) {
271
+ throw new Error(`Duplicate column ${columnName}`);
272
+ }
273
+ if (InvalidSQLCharacters.test(columnName)) {
274
+ throw new Error(`Invalid characters in column name: ${column.name}`);
275
+ }
276
+ columnNames.add(columnName);
277
+ }
278
+ const indexNames = new Set();
279
+ for (const index of this.indexes) {
280
+ if (indexNames.has(index.name)) {
281
+ throw new Error(`Duplicate index ${index.name}`);
282
+ }
283
+ if (InvalidSQLCharacters.test(index.name)) {
284
+ throw new Error(`Invalid characters in index name: ${index.name}`);
285
+ }
286
+ for (const column of index.columns) {
287
+ if (!columnNames.has(column.name)) {
288
+ throw new Error(`Column ${column.name} not found for index ${index.name}`);
289
+ }
290
+ }
291
+ indexNames.add(index.name);
292
+ }
293
+ }
294
+ toJSON() {
295
+ const trackPrevious = this.trackPrevious;
296
+ return {
297
+ name: this.name,
298
+ view_name: this.viewName,
299
+ local_only: this.localOnly,
300
+ insert_only: this.insertOnly,
301
+ include_old: trackPrevious && (trackPrevious.columns ?? true),
302
+ include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
303
+ include_metadata: this.trackMetadata,
304
+ ignore_empty_update: this.ignoreEmptyUpdates,
305
+ columns: this.columns.map((c) => c.toJSON()),
306
+ indexes: this.indexes.map((e) => e.toJSON(this))
307
+ };
308
+ }
309
+ }
310
+
311
+ const ATTACHMENT_TABLE = 'attachments';
312
+ /**
313
+ * Maps a database row to an AttachmentRecord.
314
+ *
315
+ * @param row - The database row object
316
+ * @returns The corresponding AttachmentRecord
317
+ *
318
+ * @experimental
319
+ */
320
+ function attachmentFromSql(row) {
321
+ return {
322
+ id: row.id,
323
+ filename: row.filename,
324
+ localUri: row.local_uri,
325
+ size: row.size,
326
+ mediaType: row.media_type,
327
+ timestamp: row.timestamp,
328
+ metaData: row.meta_data,
329
+ hasSynced: row.has_synced === 1,
330
+ state: row.state
331
+ };
332
+ }
333
+ /**
334
+ * AttachmentState represents the current synchronization state of an attachment.
335
+ *
336
+ * @experimental
337
+ */
338
+ var AttachmentState;
339
+ (function (AttachmentState) {
340
+ AttachmentState[AttachmentState["QUEUED_UPLOAD"] = 0] = "QUEUED_UPLOAD";
341
+ AttachmentState[AttachmentState["QUEUED_DOWNLOAD"] = 1] = "QUEUED_DOWNLOAD";
342
+ AttachmentState[AttachmentState["QUEUED_DELETE"] = 2] = "QUEUED_DELETE";
343
+ AttachmentState[AttachmentState["SYNCED"] = 3] = "SYNCED";
344
+ AttachmentState[AttachmentState["ARCHIVED"] = 4] = "ARCHIVED"; // Attachment has been orphaned, i.e. the associated record has been deleted
345
+ })(AttachmentState || (AttachmentState = {}));
346
+ /**
347
+ * AttachmentTable defines the schema for the attachment queue table.
348
+ *
349
+ * @internal
350
+ */
351
+ class AttachmentTable extends Table {
352
+ constructor(options) {
353
+ super({
354
+ filename: column.text,
355
+ local_uri: column.text,
356
+ timestamp: column.integer,
357
+ size: column.integer,
358
+ media_type: column.text,
359
+ state: column.integer, // Corresponds to AttachmentState
360
+ has_synced: column.integer,
361
+ meta_data: column.text
362
+ }, {
363
+ ...options,
364
+ viewName: options?.viewName ?? ATTACHMENT_TABLE,
365
+ localOnly: true,
366
+ insertOnly: false
367
+ });
368
+ }
369
+ }
370
+
371
+ /**
372
+ * AttachmentContext provides database operations for managing attachment records.
373
+ *
374
+ * Provides methods to query, insert, update, and delete attachment records with
375
+ * proper transaction management through PowerSync.
376
+ *
377
+ * @internal
378
+ */
379
+ class AttachmentContext {
380
+ /** PowerSync database instance for executing queries */
381
+ db;
382
+ /** Name of the database table storing attachment records */
383
+ tableName;
384
+ /** Logger instance for diagnostic information */
385
+ logger;
386
+ /** Maximum number of archived attachments to keep before cleanup */
387
+ archivedCacheLimit = 100;
388
+ /**
389
+ * Creates a new AttachmentContext instance.
390
+ *
391
+ * @param db - PowerSync database instance
392
+ * @param tableName - Name of the table storing attachment records. Default: 'attachments'
393
+ * @param logger - Logger instance for diagnostic output
394
+ */
395
+ constructor(db, tableName = 'attachments', logger, archivedCacheLimit) {
396
+ this.db = db;
397
+ this.tableName = tableName;
398
+ this.logger = logger;
399
+ this.archivedCacheLimit = archivedCacheLimit;
400
+ }
401
+ /**
402
+ * Retrieves all active attachments that require synchronization.
403
+ * Active attachments include those queued for upload, download, or delete.
404
+ * Results are ordered by timestamp in ascending order.
405
+ *
406
+ * @returns Promise resolving to an array of active attachment records
407
+ */
408
+ async getActiveAttachments() {
409
+ const attachments = await this.db.getAll(
410
+ /* sql */
411
+ `
412
+ SELECT
413
+ *
414
+ FROM
415
+ ${this.tableName}
416
+ WHERE
417
+ state = ?
418
+ OR state = ?
419
+ OR state = ?
420
+ ORDER BY
421
+ timestamp ASC
422
+ `, [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]);
423
+ return attachments.map(attachmentFromSql);
424
+ }
425
+ /**
426
+ * Retrieves all archived attachments.
427
+ *
428
+ * Archived attachments are no longer referenced but haven't been permanently deleted.
429
+ * These are candidates for cleanup operations to free up storage space.
430
+ *
431
+ * @returns Promise resolving to an array of archived attachment records
432
+ */
433
+ async getArchivedAttachments() {
434
+ const attachments = await this.db.getAll(
435
+ /* sql */
436
+ `
437
+ SELECT
438
+ *
439
+ FROM
440
+ ${this.tableName}
441
+ WHERE
442
+ state = ?
443
+ ORDER BY
444
+ timestamp ASC
445
+ `, [AttachmentState.ARCHIVED]);
446
+ return attachments.map(attachmentFromSql);
447
+ }
448
+ /**
449
+ * Retrieves all attachment records regardless of state.
450
+ * Results are ordered by timestamp in ascending order.
451
+ *
452
+ * @returns Promise resolving to an array of all attachment records
453
+ */
454
+ async getAttachments() {
455
+ const attachments = await this.db.getAll(
456
+ /* sql */
457
+ `
458
+ SELECT
459
+ *
460
+ FROM
461
+ ${this.tableName}
462
+ ORDER BY
463
+ timestamp ASC
464
+ `, []);
465
+ return attachments.map(attachmentFromSql);
466
+ }
467
+ /**
468
+ * Inserts or updates an attachment record within an existing transaction.
469
+ *
470
+ * Performs an upsert operation (INSERT OR REPLACE). Must be called within
471
+ * an active database transaction context.
472
+ *
473
+ * @param attachment - The attachment record to upsert
474
+ * @param context - Active database transaction context
475
+ */
476
+ async upsertAttachment(attachment, context) {
477
+ await context.execute(
478
+ /* sql */
479
+ `
480
+ INSERT
481
+ OR REPLACE INTO ${this.tableName} (
482
+ id,
483
+ filename,
484
+ local_uri,
485
+ size,
486
+ media_type,
487
+ timestamp,
488
+ state,
489
+ has_synced,
490
+ meta_data
491
+ )
492
+ VALUES
493
+ (?, ?, ?, ?, ?, ?, ?, ?, ?)
494
+ `, [
495
+ attachment.id,
496
+ attachment.filename,
497
+ attachment.localUri || null,
498
+ attachment.size || null,
499
+ attachment.mediaType || null,
500
+ attachment.timestamp,
501
+ attachment.state,
502
+ attachment.hasSynced ? 1 : 0,
503
+ attachment.metaData || null
504
+ ]);
505
+ }
506
+ async getAttachment(id) {
507
+ const attachment = await this.db.get(
508
+ /* sql */
509
+ `
510
+ SELECT
511
+ *
512
+ FROM
513
+ ${this.tableName}
514
+ WHERE
515
+ id = ?
516
+ `, [id]);
517
+ return attachment ? attachmentFromSql(attachment) : undefined;
518
+ }
519
+ /**
520
+ * Permanently deletes an attachment record from the database.
521
+ *
522
+ * This operation removes the attachment record but does not delete
523
+ * the associated local or remote files. File deletion should be handled
524
+ * separately through the appropriate storage adapters.
525
+ *
526
+ * @param attachmentId - Unique identifier of the attachment to delete
527
+ */
528
+ async deleteAttachment(attachmentId) {
529
+ await this.db.writeTransaction((tx) => tx.execute(
530
+ /* sql */
531
+ `
532
+ DELETE FROM ${this.tableName}
533
+ WHERE
534
+ id = ?
535
+ `, [attachmentId]));
536
+ }
537
+ async clearQueue() {
538
+ await this.db.writeTransaction((tx) => tx.execute(/* sql */ ` DELETE FROM ${this.tableName} `));
539
+ }
540
+ async deleteArchivedAttachments(callback) {
541
+ const limit = 1000;
542
+ const results = await this.db.getAll(
543
+ /* sql */
544
+ `
545
+ SELECT
546
+ *
547
+ FROM
548
+ ${this.tableName}
549
+ WHERE
550
+ state = ?
551
+ ORDER BY
552
+ timestamp DESC
553
+ LIMIT
554
+ ?
555
+ OFFSET
556
+ ?
557
+ `, [AttachmentState.ARCHIVED, limit, this.archivedCacheLimit]);
558
+ const archivedAttachments = results.map(attachmentFromSql);
559
+ if (archivedAttachments.length === 0)
560
+ return false;
561
+ await callback?.(archivedAttachments);
562
+ this.logger.info(`Deleting ${archivedAttachments.length} archived attachments. Archived attachment exceeds cache archiveCacheLimit of ${this.archivedCacheLimit}.`);
563
+ const ids = archivedAttachments.map((attachment) => attachment.id);
564
+ await this.db.execute(
565
+ /* sql */
566
+ `
567
+ DELETE FROM ${this.tableName}
568
+ WHERE
569
+ id IN (
570
+ SELECT
571
+ json_each.value
572
+ FROM
573
+ json_each (?)
574
+ );
575
+ `, [JSON.stringify(ids)]);
576
+ this.logger.info(`Deleted ${archivedAttachments.length} archived attachments`);
577
+ return archivedAttachments.length < limit;
578
+ }
579
+ /**
580
+ * Saves multiple attachment records in a single transaction.
581
+ *
582
+ * All updates are saved in a single batch after processing.
583
+ * If the attachments array is empty, no database operations are performed.
584
+ *
585
+ * @param attachments - Array of attachment records to save
586
+ */
587
+ async saveAttachments(attachments) {
588
+ if (attachments.length === 0) {
589
+ return;
590
+ }
591
+ await this.db.writeTransaction(async (tx) => {
592
+ for (const attachment of attachments) {
593
+ await this.upsertAttachment(attachment, tx);
594
+ }
595
+ });
596
+ }
597
+ }
598
+
599
+ var WatchedQueryListenerEvent;
600
+ (function (WatchedQueryListenerEvent) {
601
+ WatchedQueryListenerEvent["ON_DATA"] = "onData";
602
+ WatchedQueryListenerEvent["ON_ERROR"] = "onError";
603
+ WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
604
+ WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
605
+ WatchedQueryListenerEvent["CLOSED"] = "closed";
606
+ })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
607
+ const DEFAULT_WATCH_THROTTLE_MS = 30;
608
+ const DEFAULT_WATCH_QUERY_OPTIONS = {
609
+ throttleMs: DEFAULT_WATCH_THROTTLE_MS,
610
+ reportFetching: true
611
+ };
612
+
613
+ /**
614
+ * Orchestrates attachment synchronization between local and remote storage.
615
+ * Handles uploads, downloads, deletions, and state transitions.
616
+ *
617
+ * @internal
618
+ */
619
+ class SyncingService {
620
+ attachmentService;
621
+ localStorage;
622
+ remoteStorage;
623
+ logger;
624
+ errorHandler;
625
+ constructor(attachmentService, localStorage, remoteStorage, logger, errorHandler) {
626
+ this.attachmentService = attachmentService;
627
+ this.localStorage = localStorage;
628
+ this.remoteStorage = remoteStorage;
629
+ this.logger = logger;
630
+ this.errorHandler = errorHandler;
631
+ }
632
+ /**
633
+ * Processes attachments based on their state (upload, download, or delete).
634
+ * All updates are saved in a single batch after processing.
635
+ *
636
+ * @param attachments - Array of attachment records to process
637
+ * @param context - Attachment context for database operations
638
+ * @returns Promise that resolves when all attachments have been processed and saved
639
+ */
640
+ async processAttachments(attachments, context) {
641
+ const updatedAttachments = [];
642
+ for (const attachment of attachments) {
643
+ switch (attachment.state) {
644
+ case AttachmentState.QUEUED_UPLOAD:
645
+ const uploaded = await this.uploadAttachment(attachment);
646
+ updatedAttachments.push(uploaded);
647
+ break;
648
+ case AttachmentState.QUEUED_DOWNLOAD:
649
+ const downloaded = await this.downloadAttachment(attachment);
650
+ updatedAttachments.push(downloaded);
651
+ break;
652
+ case AttachmentState.QUEUED_DELETE:
653
+ const deleted = await this.deleteAttachment(attachment);
654
+ updatedAttachments.push(deleted);
655
+ break;
656
+ }
657
+ }
658
+ await context.saveAttachments(updatedAttachments);
659
+ }
660
+ /**
661
+ * Uploads an attachment from local storage to remote storage.
662
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
663
+ *
664
+ * @param attachment - The attachment record to upload
665
+ * @returns Updated attachment record with new state
666
+ * @throws Error if the attachment has no localUri
667
+ */
668
+ async uploadAttachment(attachment) {
669
+ this.logger.info(`Uploading attachment ${attachment.filename}`);
670
+ try {
671
+ if (attachment.localUri == null) {
672
+ throw new Error(`No localUri for attachment ${attachment.id}`);
673
+ }
674
+ const fileBlob = await this.localStorage.readFile(attachment.localUri);
675
+ await this.remoteStorage.uploadFile(fileBlob, attachment);
676
+ return {
677
+ ...attachment,
678
+ state: AttachmentState.SYNCED,
679
+ hasSynced: true
680
+ };
681
+ }
682
+ catch (error) {
683
+ const shouldRetry = (await this.errorHandler?.onUploadError(attachment, error)) ?? true;
684
+ if (!shouldRetry) {
685
+ return {
686
+ ...attachment,
687
+ state: AttachmentState.ARCHIVED
688
+ };
689
+ }
690
+ return attachment;
691
+ }
692
+ }
693
+ /**
694
+ * Downloads an attachment from remote storage to local storage.
695
+ * Retrieves the file, converts to base64, and saves locally.
696
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
697
+ *
698
+ * @param attachment - The attachment record to download
699
+ * @returns Updated attachment record with local URI and new state
700
+ */
701
+ async downloadAttachment(attachment) {
702
+ this.logger.info(`Downloading attachment ${attachment.filename}`);
703
+ try {
704
+ const fileData = await this.remoteStorage.downloadFile(attachment);
705
+ const localUri = this.localStorage.getLocalUri(attachment.filename);
706
+ await this.localStorage.saveFile(localUri, fileData);
707
+ return {
708
+ ...attachment,
709
+ state: AttachmentState.SYNCED,
710
+ localUri: localUri,
711
+ hasSynced: true
712
+ };
713
+ }
714
+ catch (error) {
715
+ const shouldRetry = (await this.errorHandler?.onDownloadError(attachment, error)) ?? true;
716
+ if (!shouldRetry) {
717
+ return {
718
+ ...attachment,
719
+ state: AttachmentState.ARCHIVED
720
+ };
721
+ }
722
+ return attachment;
723
+ }
724
+ }
725
+ /**
726
+ * Deletes an attachment from both remote and local storage.
727
+ * Removes the remote file, local file (if exists), and the attachment record.
728
+ * On failure, defers to error handler or archives.
729
+ *
730
+ * @param attachment - The attachment record to delete
731
+ * @returns Updated attachment record
732
+ */
733
+ async deleteAttachment(attachment) {
734
+ try {
735
+ await this.remoteStorage.deleteFile(attachment);
736
+ if (attachment.localUri) {
737
+ await this.localStorage.deleteFile(attachment.localUri);
738
+ }
739
+ await this.attachmentService.withContext(async (ctx) => {
740
+ await ctx.deleteAttachment(attachment.id);
741
+ });
742
+ return {
743
+ ...attachment,
744
+ state: AttachmentState.ARCHIVED
745
+ };
746
+ }
747
+ catch (error) {
748
+ const shouldRetry = (await this.errorHandler?.onDeleteError(attachment, error)) ?? true;
749
+ if (!shouldRetry) {
750
+ return {
751
+ ...attachment,
752
+ state: AttachmentState.ARCHIVED
753
+ };
754
+ }
755
+ return attachment;
756
+ }
757
+ }
758
+ /**
759
+ * Performs cleanup of archived attachments by removing their local files and records.
760
+ * Errors during local file deletion are logged but do not prevent record deletion.
761
+ */
762
+ async deleteArchivedAttachments(context) {
763
+ return await context.deleteArchivedAttachments(async (archivedAttachments) => {
764
+ for (const attachment of archivedAttachments) {
765
+ if (attachment.localUri) {
766
+ try {
767
+ await this.localStorage.deleteFile(attachment.localUri);
768
+ }
769
+ catch (error) {
770
+ this.logger.error('Error deleting local file for archived attachment', error);
771
+ }
772
+ }
773
+ }
774
+ });
775
+ }
776
+ }
777
+
778
+ /**
779
+ * Wrapper for async-mutex runExclusive, which allows for a timeout on each exclusive lock.
780
+ */
781
+ async function mutexRunExclusive(mutex, callback, options) {
782
+ return new Promise((resolve, reject) => {
783
+ const timeout = options?.timeoutMs;
784
+ let timedOut = false;
785
+ const timeoutId = timeout
786
+ ? setTimeout(() => {
787
+ timedOut = true;
788
+ reject(new Error('Timeout waiting for lock'));
789
+ }, timeout)
790
+ : undefined;
791
+ mutex.runExclusive(async () => {
792
+ if (timeoutId) {
793
+ clearTimeout(timeoutId);
794
+ }
795
+ if (timedOut)
796
+ return;
797
+ try {
798
+ resolve(await callback());
799
+ }
800
+ catch (ex) {
801
+ reject(ex);
802
+ }
803
+ });
804
+ });
805
+ }
806
+
807
+ /**
808
+ * Service for querying and watching attachment records in the database.
809
+ *
810
+ * @internal
811
+ */
812
+ class AttachmentService {
813
+ db;
814
+ logger;
815
+ tableName;
816
+ mutex = new Mutex();
817
+ context;
818
+ constructor(db, logger, tableName = 'attachments', archivedCacheLimit = 100) {
819
+ this.db = db;
820
+ this.logger = logger;
821
+ this.tableName = tableName;
822
+ this.context = new AttachmentContext(db, tableName, logger, archivedCacheLimit);
823
+ }
824
+ /**
825
+ * Creates a differential watch query for active attachments requiring synchronization.
826
+ * @returns Watch query that emits changes for queued uploads, downloads, and deletes
827
+ */
828
+ watchActiveAttachments({ throttleMs } = {}) {
829
+ this.logger.info('Watching active attachments...');
830
+ const watch = this.db
831
+ .query({
832
+ sql: /* sql */ `
833
+ SELECT
834
+ *
835
+ FROM
836
+ ${this.tableName}
837
+ WHERE
838
+ state = ?
839
+ OR state = ?
840
+ OR state = ?
841
+ ORDER BY
842
+ timestamp ASC
843
+ `,
844
+ parameters: [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]
845
+ })
846
+ .differentialWatch({ throttleMs });
847
+ return watch;
848
+ }
849
+ /**
850
+ * Executes a callback with exclusive access to the attachment context.
851
+ */
852
+ async withContext(callback) {
853
+ return mutexRunExclusive(this.mutex, async () => {
854
+ return callback(this.context);
855
+ });
856
+ }
857
+ }
858
+
859
+ /**
860
+ * AttachmentQueue manages the lifecycle and synchronization of attachments
861
+ * between local and remote storage.
862
+ * Provides automatic synchronization, upload/download queuing, attachment monitoring,
863
+ * verification and repair of local files, and cleanup of archived attachments.
864
+ *
865
+ * @experimental
866
+ * @alpha This is currently experimental and may change without a major version bump.
867
+ */
868
+ class AttachmentQueue {
869
+ /** Timer for periodic synchronization operations */
870
+ periodicSyncTimer;
871
+ /** Service for synchronizing attachments between local and remote storage */
872
+ syncingService;
873
+ /** Adapter for local file storage operations */
874
+ localStorage;
875
+ /** Adapter for remote file storage operations */
876
+ remoteStorage;
877
+ /**
878
+ * Callback function to watch for changes in attachment references in your data model.
879
+ *
880
+ * This should be implemented by the user of AttachmentQueue to monitor changes in your application's
881
+ * data that reference attachments. When attachments are added, removed, or modified,
882
+ * this callback should trigger the onUpdate function with the current set of attachments.
883
+ */
884
+ watchAttachments;
885
+ /** Name of the database table storing attachment records */
886
+ tableName;
887
+ /** Logger instance for diagnostic information */
888
+ logger;
889
+ /** Interval in milliseconds between periodic sync operations. Default: 30000 (30 seconds) */
890
+ syncIntervalMs = 30 * 1000;
891
+ /** Duration in milliseconds to throttle sync operations */
892
+ syncThrottleDuration;
893
+ /** Whether to automatically download remote attachments. Default: true */
894
+ downloadAttachments = true;
895
+ /** Maximum number of archived attachments to keep before cleanup. Default: 100 */
896
+ archivedCacheLimit;
897
+ /** Service for managing attachment-related database operations */
898
+ attachmentService;
899
+ /** PowerSync database instance */
900
+ db;
901
+ /** Cleanup function for status change listener */
902
+ statusListenerDispose;
903
+ watchActiveAttachments;
904
+ watchAttachmentsAbortController;
905
+ /**
906
+ * Creates a new AttachmentQueue instance.
907
+ *
908
+ * @param options - Configuration options
909
+ * @param options.db - PowerSync database instance
910
+ * @param options.remoteStorage - Remote storage adapter for upload/download operations
911
+ * @param options.localStorage - Local storage adapter for file persistence
912
+ * @param options.watchAttachments - Callback for monitoring attachment changes in your data model
913
+ * @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
914
+ * @param options.logger - Logger instance. Defaults to db.logger
915
+ * @param options.syncIntervalMs - Interval between automatic syncs in milliseconds. Default: 30000
916
+ * @param options.syncThrottleDuration - Throttle duration for sync operations in milliseconds. Default: 1000
917
+ * @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
918
+ * @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
919
+ */
920
+ constructor({ db, localStorage, remoteStorage, watchAttachments, logger, tableName = ATTACHMENT_TABLE, syncIntervalMs = 30 * 1000, syncThrottleDuration = DEFAULT_WATCH_THROTTLE_MS, downloadAttachments = true, archivedCacheLimit = 100, errorHandler }) {
921
+ this.db = db;
922
+ this.remoteStorage = remoteStorage;
923
+ this.localStorage = localStorage;
924
+ this.watchAttachments = watchAttachments;
925
+ this.tableName = tableName;
926
+ this.syncIntervalMs = syncIntervalMs;
927
+ this.syncThrottleDuration = syncThrottleDuration;
928
+ this.archivedCacheLimit = archivedCacheLimit;
929
+ this.downloadAttachments = downloadAttachments;
930
+ this.logger = logger ?? db.logger;
931
+ this.attachmentService = new AttachmentService(db, this.logger, tableName, archivedCacheLimit);
932
+ this.syncingService = new SyncingService(this.attachmentService, localStorage, remoteStorage, this.logger, errorHandler);
933
+ }
934
+ /**
935
+ * Generates a new attachment ID using a SQLite UUID function.
936
+ *
937
+ * @returns Promise resolving to the new attachment ID
938
+ */
939
+ async generateAttachmentId() {
940
+ return this.db.get('SELECT uuid() as id').then((row) => row.id);
941
+ }
942
+ /**
943
+ * Starts the attachment synchronization process.
944
+ *
945
+ * This method:
946
+ * - Stops any existing sync operations
947
+ * - Sets up periodic synchronization based on syncIntervalMs
948
+ * - Registers listeners for active attachment changes
949
+ * - Processes watched attachments to queue uploads/downloads
950
+ * - Handles state transitions for archived and new attachments
951
+ */
952
+ async startSync() {
953
+ await this.stopSync();
954
+ this.watchActiveAttachments = this.attachmentService.watchActiveAttachments({
955
+ throttleMs: this.syncThrottleDuration
956
+ });
957
+ // immediately invoke the sync storage to initialize local storage
958
+ await this.localStorage.initialize();
959
+ await this.verifyAttachments();
960
+ // Sync storage periodically
961
+ this.periodicSyncTimer = setInterval(async () => {
962
+ await this.syncStorage();
963
+ }, this.syncIntervalMs);
964
+ // Sync storage when there is a change in active attachments
965
+ this.watchActiveAttachments.registerListener({
966
+ onDiff: async () => {
967
+ await this.syncStorage();
968
+ }
969
+ });
970
+ this.statusListenerDispose = this.db.registerListener({
971
+ statusChanged: (status) => {
972
+ if (status.connected) {
973
+ // Device came online, process attachments immediately
974
+ this.syncStorage().catch((error) => {
975
+ this.logger.error('Error syncing storage on connection:', error);
976
+ });
977
+ }
978
+ }
979
+ });
980
+ this.watchAttachmentsAbortController = new AbortController();
981
+ const signal = this.watchAttachmentsAbortController.signal;
982
+ // Process attachments when there is a change in watched attachments
983
+ this.watchAttachments(async (watchedAttachments) => {
984
+ // Skip processing if sync has been stopped
985
+ if (signal.aborted) {
986
+ return;
987
+ }
988
+ await this.attachmentService.withContext(async (ctx) => {
989
+ // Need to get all the attachments which are tracked in the DB.
990
+ // We might need to restore an archived attachment.
991
+ const currentAttachments = await ctx.getAttachments();
992
+ const attachmentUpdates = [];
993
+ for (const watchedAttachment of watchedAttachments) {
994
+ const existingQueueItem = currentAttachments.find((a) => a.id === watchedAttachment.id);
995
+ if (!existingQueueItem) {
996
+ // Item is watched but not in the queue yet. Need to add it.
997
+ if (!this.downloadAttachments) {
998
+ continue;
999
+ }
1000
+ const filename = watchedAttachment.filename ?? `${watchedAttachment.id}.${watchedAttachment.fileExtension}`;
1001
+ attachmentUpdates.push({
1002
+ id: watchedAttachment.id,
1003
+ filename,
1004
+ state: AttachmentState.QUEUED_DOWNLOAD,
1005
+ hasSynced: false,
1006
+ metaData: watchedAttachment.metaData,
1007
+ timestamp: new Date().getTime()
1008
+ });
1009
+ continue;
1010
+ }
1011
+ if (existingQueueItem.state === AttachmentState.ARCHIVED) {
1012
+ // The attachment is present again. Need to queue it for sync.
1013
+ // We might be able to optimize this in future
1014
+ if (existingQueueItem.hasSynced === true) {
1015
+ // No remote action required, we can restore the record (avoids deletion)
1016
+ attachmentUpdates.push({
1017
+ ...existingQueueItem,
1018
+ state: AttachmentState.SYNCED
1019
+ });
1020
+ }
1021
+ else {
1022
+ // The localURI should be set if the record was meant to be uploaded
1023
+ // and hasSynced is false then
1024
+ // it must be an upload operation
1025
+ const newState = existingQueueItem.localUri == null ? AttachmentState.QUEUED_DOWNLOAD : AttachmentState.QUEUED_UPLOAD;
1026
+ attachmentUpdates.push({
1027
+ ...existingQueueItem,
1028
+ state: newState
1029
+ });
1030
+ }
1031
+ }
1032
+ }
1033
+ for (const attachment of currentAttachments) {
1034
+ const notInWatchedItems = watchedAttachments.find((i) => i.id === attachment.id) == null;
1035
+ if (notInWatchedItems) {
1036
+ switch (attachment.state) {
1037
+ case AttachmentState.QUEUED_DELETE:
1038
+ case AttachmentState.QUEUED_UPLOAD:
1039
+ // Only archive if it has synced
1040
+ if (attachment.hasSynced === true) {
1041
+ attachmentUpdates.push({
1042
+ ...attachment,
1043
+ state: AttachmentState.ARCHIVED
1044
+ });
1045
+ }
1046
+ break;
1047
+ default:
1048
+ // Archive other states such as QUEUED_DOWNLOAD
1049
+ attachmentUpdates.push({
1050
+ ...attachment,
1051
+ state: AttachmentState.ARCHIVED
1052
+ });
1053
+ }
1054
+ }
1055
+ }
1056
+ if (attachmentUpdates.length > 0) {
1057
+ await ctx.saveAttachments(attachmentUpdates);
1058
+ }
1059
+ });
1060
+ }, signal);
1061
+ }
1062
+ /**
1063
+ * Synchronizes all active attachments between local and remote storage.
1064
+ *
1065
+ * This is called automatically at regular intervals when sync is started,
1066
+ * but can also be called manually to trigger an immediate sync.
1067
+ */
1068
+ async syncStorage() {
1069
+ await this.attachmentService.withContext(async (ctx) => {
1070
+ const activeAttachments = await ctx.getActiveAttachments();
1071
+ await this.localStorage.initialize();
1072
+ await this.syncingService.processAttachments(activeAttachments, ctx);
1073
+ await this.syncingService.deleteArchivedAttachments(ctx);
1074
+ });
1075
+ }
1076
+ /**
1077
+ * Stops the attachment synchronization process.
1078
+ *
1079
+ * Clears the periodic sync timer and closes all active attachment watchers.
1080
+ */
1081
+ async stopSync() {
1082
+ clearInterval(this.periodicSyncTimer);
1083
+ this.periodicSyncTimer = undefined;
1084
+ if (this.watchActiveAttachments)
1085
+ await this.watchActiveAttachments.close();
1086
+ if (this.watchAttachmentsAbortController) {
1087
+ this.watchAttachmentsAbortController.abort();
1088
+ }
1089
+ if (this.statusListenerDispose) {
1090
+ this.statusListenerDispose();
1091
+ this.statusListenerDispose = undefined;
1092
+ }
1093
+ }
1094
+ /**
1095
+ * Saves a file to local storage and queues it for upload to remote storage.
1096
+ *
1097
+ * @param options - File save options
1098
+ * @param options.data - The file data as ArrayBuffer, Blob, or base64 string
1099
+ * @param options.fileExtension - File extension (e.g., 'jpg', 'pdf')
1100
+ * @param options.mediaType - MIME type of the file (e.g., 'image/jpeg')
1101
+ * @param options.metaData - Optional metadata to associate with the attachment
1102
+ * @param options.id - Optional custom ID. If not provided, a UUID will be generated
1103
+ * @param options.updateHook - Optional callback to execute additional database operations
1104
+ * within the same transaction as the attachment creation
1105
+ * @returns Promise resolving to the created attachment record
1106
+ */
1107
+ async saveFile({ data, fileExtension, mediaType, metaData, id, updateHook }) {
1108
+ const resolvedId = id ?? (await this.generateAttachmentId());
1109
+ const filename = `${resolvedId}.${fileExtension}`;
1110
+ const localUri = this.localStorage.getLocalUri(filename);
1111
+ const size = await this.localStorage.saveFile(localUri, data);
1112
+ const attachment = {
1113
+ id: resolvedId,
1114
+ filename,
1115
+ mediaType,
1116
+ localUri,
1117
+ state: AttachmentState.QUEUED_UPLOAD,
1118
+ hasSynced: false,
1119
+ size,
1120
+ timestamp: new Date().getTime(),
1121
+ metaData
1122
+ };
1123
+ await this.attachmentService.withContext(async (ctx) => {
1124
+ await ctx.db.writeTransaction(async (tx) => {
1125
+ await updateHook?.(tx, attachment);
1126
+ await ctx.upsertAttachment(attachment, tx);
1127
+ });
1128
+ });
1129
+ return attachment;
1130
+ }
1131
+ async deleteFile({ id, updateHook }) {
1132
+ await this.attachmentService.withContext(async (ctx) => {
1133
+ const attachment = await ctx.getAttachment(id);
1134
+ if (!attachment) {
1135
+ throw new Error(`Attachment with id ${id} not found`);
1136
+ }
1137
+ await ctx.db.writeTransaction(async (tx) => {
1138
+ await updateHook?.(tx, attachment);
1139
+ await ctx.upsertAttachment({
1140
+ ...attachment,
1141
+ state: AttachmentState.QUEUED_DELETE,
1142
+ hasSynced: false
1143
+ }, tx);
1144
+ });
1145
+ });
1146
+ }
1147
+ async expireCache() {
1148
+ let isDone = false;
1149
+ while (!isDone) {
1150
+ await this.attachmentService.withContext(async (ctx) => {
1151
+ isDone = await this.syncingService.deleteArchivedAttachments(ctx);
1152
+ });
1153
+ }
1154
+ }
1155
+ async clearQueue() {
1156
+ await this.attachmentService.withContext(async (ctx) => {
1157
+ await ctx.clearQueue();
1158
+ });
1159
+ await this.localStorage.clear();
1160
+ }
1161
+ /**
1162
+ * Verifies the integrity of all attachment records and repairs inconsistencies.
1163
+ *
1164
+ * This method checks each attachment record against the local filesystem and:
1165
+ * - Updates localUri if the file exists at a different path
1166
+ * - Archives attachments with missing local files that haven't been uploaded
1167
+ * - Requeues synced attachments for download if their local files are missing
1168
+ */
1169
+ async verifyAttachments() {
1170
+ await this.attachmentService.withContext(async (ctx) => {
1171
+ const attachments = await ctx.getAttachments();
1172
+ const updates = [];
1173
+ for (const attachment of attachments) {
1174
+ if (attachment.localUri == null) {
1175
+ continue;
1176
+ }
1177
+ const exists = await this.localStorage.fileExists(attachment.localUri);
1178
+ if (exists) {
1179
+ // The file exists, this is correct
1180
+ continue;
1181
+ }
1182
+ const newLocalUri = this.localStorage.getLocalUri(attachment.filename);
1183
+ const newExists = await this.localStorage.fileExists(newLocalUri);
1184
+ if (newExists) {
1185
+ // The file exists locally but the localUri is broken, we update it.
1186
+ updates.push({
1187
+ ...attachment,
1188
+ localUri: newLocalUri
1189
+ });
1190
+ }
1191
+ else {
1192
+ // the file doesn't exist locally.
1193
+ if (attachment.state === AttachmentState.SYNCED) {
1194
+ // the file has been successfully synced to remote storage but is missing
1195
+ // we download it again
1196
+ updates.push({
1197
+ ...attachment,
1198
+ state: AttachmentState.QUEUED_DOWNLOAD,
1199
+ localUri: undefined
1200
+ });
1201
+ }
1202
+ else {
1203
+ // the file wasn't successfully synced to remote storage, we archive it
1204
+ updates.push({
1205
+ ...attachment,
1206
+ state: AttachmentState.ARCHIVED,
1207
+ localUri: undefined // Clears the value
1208
+ });
1209
+ }
1210
+ }
1211
+ }
1212
+ await ctx.saveAttachments(updates);
1213
+ });
1214
+ }
1215
+ }
1216
+
1217
+ var EncodingType;
1218
+ (function (EncodingType) {
1219
+ EncodingType["UTF8"] = "utf8";
1220
+ EncodingType["Base64"] = "base64";
1221
+ })(EncodingType || (EncodingType = {}));
1222
+
5
1223
  function getDefaultExportFromCjs (x) {
6
1224
  return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
7
1225
  }
@@ -1165,20 +2383,6 @@ class MetaBaseObserver extends BaseObserver {
1165
2383
  }
1166
2384
  }
1167
2385
 
1168
- var WatchedQueryListenerEvent;
1169
- (function (WatchedQueryListenerEvent) {
1170
- WatchedQueryListenerEvent["ON_DATA"] = "onData";
1171
- WatchedQueryListenerEvent["ON_ERROR"] = "onError";
1172
- WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
1173
- WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
1174
- WatchedQueryListenerEvent["CLOSED"] = "closed";
1175
- })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
1176
- const DEFAULT_WATCH_THROTTLE_MS = 30;
1177
- const DEFAULT_WATCH_QUERY_OPTIONS = {
1178
- throttleMs: DEFAULT_WATCH_THROTTLE_MS,
1179
- reportFetching: true
1180
- };
1181
-
1182
2386
  /**
1183
2387
  * Performs underlying watching and yields a stream of results.
1184
2388
  * @internal
@@ -6720,7 +7924,7 @@ function requireDist () {
6720
7924
 
6721
7925
  var distExports = requireDist();
6722
7926
 
6723
- var version = "1.45.0";
7927
+ var version = "1.47.0";
6724
7928
  var PACKAGE = {
6725
7929
  version: version};
6726
7930
 
@@ -6791,6 +7995,16 @@ class DataStream extends BaseObserver {
6791
7995
  * @returns a Data payload or Null if the stream closed.
6792
7996
  */
6793
7997
  async read() {
7998
+ if (this.closed) {
7999
+ return null;
8000
+ }
8001
+ // Wait for any pending processing to complete first.
8002
+ // This ensures we register our listener before calling processQueue(),
8003
+ // avoiding a race where processQueue() sees no reader and returns early.
8004
+ if (this.processingPromise) {
8005
+ await this.processingPromise;
8006
+ }
8007
+ // Re-check after await - stream may have closed while we were waiting
6794
8008
  if (this.closed) {
6795
8009
  return null;
6796
8010
  }
@@ -6830,7 +8044,7 @@ class DataStream extends BaseObserver {
6830
8044
  }
6831
8045
  const promise = (this.processingPromise = this._processQueue());
6832
8046
  promise.finally(() => {
6833
- return (this.processingPromise = null);
8047
+ this.processingPromise = null;
6834
8048
  });
6835
8049
  return promise;
6836
8050
  }
@@ -6862,7 +8076,6 @@ class DataStream extends BaseObserver {
6862
8076
  this.notifyDataAdded = null;
6863
8077
  }
6864
8078
  if (this.dataQueue.length > 0) {
6865
- // Next tick
6866
8079
  setTimeout(() => this.processQueue());
6867
8080
  }
6868
8081
  }
@@ -7482,7 +8695,11 @@ class AbstractRemote {
7482
8695
  };
7483
8696
  const stream = new DataStream({
7484
8697
  logger: this.logger,
7485
- mapLine: mapLine
8698
+ mapLine: mapLine,
8699
+ pressure: {
8700
+ highWaterMark: 20,
8701
+ lowWaterMark: 10
8702
+ }
7486
8703
  });
7487
8704
  abortSignal?.addEventListener('abort', () => {
7488
8705
  closeReader();
@@ -7490,42 +8707,47 @@ class AbstractRemote {
7490
8707
  });
7491
8708
  const decoder = this.createTextDecoder();
7492
8709
  let buffer = '';
7493
- const l = stream.registerListener({
7494
- lowWater: async () => {
7495
- if (stream.closed || abortSignal?.aborted || readerReleased) {
8710
+ const consumeStream = async () => {
8711
+ while (!stream.closed && !abortSignal?.aborted && !readerReleased) {
8712
+ const { done, value } = await reader.read();
8713
+ if (done) {
8714
+ const remaining = buffer.trim();
8715
+ if (remaining.length != 0) {
8716
+ stream.enqueueData(remaining);
8717
+ }
8718
+ stream.close();
8719
+ await closeReader();
7496
8720
  return;
7497
8721
  }
7498
- try {
7499
- let didCompleteLine = false;
7500
- while (!didCompleteLine) {
7501
- const { done, value } = await reader.read();
7502
- if (done) {
7503
- const remaining = buffer.trim();
7504
- if (remaining.length != 0) {
7505
- stream.enqueueData(remaining);
7506
- }
7507
- stream.close();
7508
- await closeReader();
7509
- return;
7510
- }
7511
- const data = decoder.decode(value, { stream: true });
7512
- buffer += data;
7513
- const lines = buffer.split('\n');
7514
- for (var i = 0; i < lines.length - 1; i++) {
7515
- var l = lines[i].trim();
7516
- if (l.length > 0) {
7517
- stream.enqueueData(l);
7518
- didCompleteLine = true;
7519
- }
7520
- }
7521
- buffer = lines[lines.length - 1];
8722
+ const data = decoder.decode(value, { stream: true });
8723
+ buffer += data;
8724
+ const lines = buffer.split('\n');
8725
+ for (var i = 0; i < lines.length - 1; i++) {
8726
+ var l = lines[i].trim();
8727
+ if (l.length > 0) {
8728
+ stream.enqueueData(l);
7522
8729
  }
7523
8730
  }
7524
- catch (ex) {
7525
- stream.close();
7526
- throw ex;
8731
+ buffer = lines[lines.length - 1];
8732
+ // Implement backpressure by waiting for the low water mark to be reached
8733
+ if (stream.dataQueue.length > stream.highWatermark) {
8734
+ await new Promise((resolve) => {
8735
+ const dispose = stream.registerListener({
8736
+ lowWater: async () => {
8737
+ resolve();
8738
+ dispose();
8739
+ },
8740
+ closed: () => {
8741
+ resolve();
8742
+ dispose();
8743
+ }
8744
+ });
8745
+ });
7527
8746
  }
7528
- },
8747
+ }
8748
+ };
8749
+ consumeStream().catch(ex => this.logger.error('Error consuming stream', ex));
8750
+ const l = stream.registerListener({
7529
8751
  closed: () => {
7530
8752
  closeReader();
7531
8753
  l?.();
@@ -7604,18 +8826,17 @@ var SyncClientImplementation;
7604
8826
  *
7605
8827
  * This is the default option.
7606
8828
  *
7607
- * @deprecated Don't use {@link SyncClientImplementation.JAVASCRIPT} directly. Instead, use
7608
- * {@link DEFAULT_SYNC_CLIENT_IMPLEMENTATION} or omit the option. The explicit choice to use
7609
- * the JavaScript-based sync implementation will be removed from a future version of the SDK.
8829
+ * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
8830
+ * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
8831
+ * version of the PowerSync SDK.
7610
8832
  */
7611
8833
  SyncClientImplementation["JAVASCRIPT"] = "js";
7612
8834
  /**
7613
8835
  * This implementation offloads the sync line decoding and handling into the PowerSync
7614
8836
  * core extension.
7615
8837
  *
7616
- * @experimental
7617
- * While this implementation is more performant than {@link SyncClientImplementation.JAVASCRIPT},
7618
- * it has seen less real-world testing and is marked as __experimental__ at the moment.
8838
+ * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
8839
+ * recommended client implementation for all apps.
7619
8840
  *
7620
8841
  * ## Compatibility warning
7621
8842
  *
@@ -7633,13 +8854,9 @@ var SyncClientImplementation;
7633
8854
  SyncClientImplementation["RUST"] = "rust";
7634
8855
  })(SyncClientImplementation || (SyncClientImplementation = {}));
7635
8856
  /**
7636
- * The default {@link SyncClientImplementation} to use.
7637
- *
7638
- * Please use this field instead of {@link SyncClientImplementation.JAVASCRIPT} directly. A future version
7639
- * of the PowerSync SDK will enable {@link SyncClientImplementation.RUST} by default and remove the JavaScript
7640
- * option.
8857
+ * The default {@link SyncClientImplementation} to use, {@link SyncClientImplementation.RUST}.
7641
8858
  */
7642
- const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.JAVASCRIPT;
8859
+ const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.RUST;
7643
8860
  const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
7644
8861
  const DEFAULT_RETRY_DELAY_MS = 5000;
7645
8862
  const DEFAULT_STREAMING_SYNC_OPTIONS = {
@@ -8049,6 +9266,9 @@ The next upload iteration will be delayed.`);
8049
9266
  if (rawTables != null && rawTables.length) {
8050
9267
  this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
8051
9268
  }
9269
+ if (this.activeStreams.length) {
9270
+ this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
9271
+ }
8052
9272
  this.logger.debug('Streaming sync iteration started');
8053
9273
  this.options.adapter.startSession();
8054
9274
  let [req, bucketMap] = await this.collectLocalBucketState();
@@ -8564,6 +9784,27 @@ The next upload iteration will be delayed.`);
8564
9784
  }
8565
9785
  }
8566
9786
 
9787
+ const CLAIM_STORE = new Map();
9788
+ /**
9789
+ * @internal
9790
+ * @experimental
9791
+ */
9792
+ const MEMORY_TRIGGER_CLAIM_MANAGER = {
9793
+ async obtainClaim(identifier) {
9794
+ if (CLAIM_STORE.has(identifier)) {
9795
+ throw new Error(`A claim is already present for ${identifier}`);
9796
+ }
9797
+ const release = async () => {
9798
+ CLAIM_STORE.delete(identifier);
9799
+ };
9800
+ CLAIM_STORE.set(identifier, release);
9801
+ return release;
9802
+ },
9803
+ async checkClaim(identifier) {
9804
+ return CLAIM_STORE.has(identifier);
9805
+ }
9806
+ };
9807
+
8567
9808
  /**
8568
9809
  * SQLite operations to track changes for with {@link TriggerManager}
8569
9810
  * @experimental
@@ -8575,9 +9816,20 @@ var DiffTriggerOperation;
8575
9816
  DiffTriggerOperation["DELETE"] = "DELETE";
8576
9817
  })(DiffTriggerOperation || (DiffTriggerOperation = {}));
8577
9818
 
9819
+ const DEFAULT_TRIGGER_MANAGER_CONFIGURATION = {
9820
+ useStorageByDefault: false
9821
+ };
9822
+ const TRIGGER_CLEANUP_INTERVAL_MS = 120_000; // 2 minutes
9823
+ /**
9824
+ * @internal
9825
+ * @experimental
9826
+ */
8578
9827
  class TriggerManagerImpl {
8579
9828
  options;
8580
9829
  schema;
9830
+ defaultConfig;
9831
+ cleanupTimeout;
9832
+ isDisposed;
8581
9833
  constructor(options) {
8582
9834
  this.options = options;
8583
9835
  this.schema = options.schema;
@@ -8586,6 +9838,33 @@ class TriggerManagerImpl {
8586
9838
  this.schema = schema;
8587
9839
  }
8588
9840
  });
9841
+ this.isDisposed = false;
9842
+ /**
9843
+ * Configure a cleanup to run on an interval.
9844
+ * The interval is configured using setTimeout to take the async
9845
+ * execution time of the callback into account.
9846
+ */
9847
+ this.defaultConfig = DEFAULT_TRIGGER_MANAGER_CONFIGURATION;
9848
+ const cleanupCallback = async () => {
9849
+ this.cleanupTimeout = null;
9850
+ if (this.isDisposed) {
9851
+ return;
9852
+ }
9853
+ try {
9854
+ await this.cleanupResources();
9855
+ }
9856
+ catch (ex) {
9857
+ this.db.logger.error(`Caught error while attempting to cleanup triggers`, ex);
9858
+ }
9859
+ finally {
9860
+ // if not closed, set another timeout
9861
+ if (this.isDisposed) {
9862
+ return;
9863
+ }
9864
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
9865
+ }
9866
+ };
9867
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
8589
9868
  }
8590
9869
  get db() {
8591
9870
  return this.options.db;
@@ -8603,13 +9882,95 @@ class TriggerManagerImpl {
8603
9882
  await tx.execute(/* sql */ `DROP TRIGGER IF EXISTS ${triggerId}; `);
8604
9883
  }
8605
9884
  }
9885
+ dispose() {
9886
+ this.isDisposed = true;
9887
+ if (this.cleanupTimeout) {
9888
+ clearTimeout(this.cleanupTimeout);
9889
+ }
9890
+ }
9891
+ /**
9892
+ * Updates default config settings for platform specific use-cases.
9893
+ */
9894
+ updateDefaults(config) {
9895
+ this.defaultConfig = {
9896
+ ...this.defaultConfig,
9897
+ ...config
9898
+ };
9899
+ }
9900
+ generateTriggerName(operation, destinationTable, triggerId) {
9901
+ return `__ps_temp_trigger_${operation.toLowerCase()}__${destinationTable}__${triggerId}`;
9902
+ }
9903
+ /**
9904
+ * Cleanup any SQLite triggers or tables that are no longer in use.
9905
+ */
9906
+ async cleanupResources() {
9907
+ // we use the database here since cleanupResources is called during the PowerSyncDatabase initialization
9908
+ await this.db.database.writeLock(async (ctx) => {
9909
+ /**
9910
+ * Note: We only cleanup persisted triggers. These are tracked in the sqlite_master table.
9911
+ * temporary triggers will not be affected by this.
9912
+ * Query all triggers that match our naming pattern
9913
+ */
9914
+ const triggers = await ctx.getAll(/* sql */ `
9915
+ SELECT
9916
+ name
9917
+ FROM
9918
+ sqlite_master
9919
+ WHERE
9920
+ type = 'trigger'
9921
+ AND name LIKE '__ps_temp_trigger_%'
9922
+ `);
9923
+ /** Use regex to extract table names and IDs from trigger names
9924
+ * Trigger naming convention: __ps_temp_trigger_<operation>__<destination_table>__<id>
9925
+ */
9926
+ const triggerPattern = /^__ps_temp_trigger_(?:insert|update|delete)__(.+)__([a-f0-9_]{36})$/i;
9927
+ const trackedItems = new Map();
9928
+ for (const trigger of triggers) {
9929
+ const match = trigger.name.match(triggerPattern);
9930
+ if (match) {
9931
+ const [, table, id] = match;
9932
+ // Collect all trigger names for each id combo
9933
+ const existing = trackedItems.get(id);
9934
+ if (existing) {
9935
+ existing.triggerNames.push(trigger.name);
9936
+ }
9937
+ else {
9938
+ trackedItems.set(id, { table, id, triggerNames: [trigger.name] });
9939
+ }
9940
+ }
9941
+ }
9942
+ for (const trackedItem of trackedItems.values()) {
9943
+ // check if there is anything holding on to this item
9944
+ const hasClaim = await this.options.claimManager.checkClaim(trackedItem.id);
9945
+ if (hasClaim) {
9946
+ // This does not require cleanup
9947
+ continue;
9948
+ }
9949
+ this.db.logger.debug(`Clearing resources for trigger ${trackedItem.id} with table ${trackedItem.table}`);
9950
+ // We need to delete the triggers and table
9951
+ for (const triggerName of trackedItem.triggerNames) {
9952
+ await ctx.execute(`DROP TRIGGER IF EXISTS ${triggerName}`);
9953
+ }
9954
+ await ctx.execute(`DROP TABLE IF EXISTS ${trackedItem.table}`);
9955
+ }
9956
+ });
9957
+ }
8606
9958
  async createDiffTrigger(options) {
8607
9959
  await this.db.waitForReady();
8608
- const { source, destination, columns, when, hooks } = options;
9960
+ const { source, destination, columns, when, hooks,
9961
+ // Fall back to the provided default if not given on this level
9962
+ useStorage = this.defaultConfig.useStorageByDefault } = options;
8609
9963
  const operations = Object.keys(when);
8610
9964
  if (operations.length == 0) {
8611
9965
  throw new Error('At least one WHEN operation must be specified for the trigger.');
8612
9966
  }
9967
+ /**
9968
+ * The clause to use when executing
9969
+ * CREATE ${tableTriggerTypeClause} TABLE
9970
+ * OR
9971
+ * CREATE ${tableTriggerTypeClause} TRIGGER
9972
+ */
9973
+ const tableTriggerTypeClause = !useStorage ? 'TEMP' : '';
8613
9974
  const whenClauses = Object.fromEntries(Object.entries(when).map(([operation, filter]) => [operation, `WHEN ${filter}`]));
8614
9975
  /**
8615
9976
  * Allow specifying the View name as the source.
@@ -8623,6 +9984,7 @@ class TriggerManagerImpl {
8623
9984
  const internalSource = sourceDefinition.internalName;
8624
9985
  const triggerIds = [];
8625
9986
  const id = await this.getUUID();
9987
+ const releaseStorageClaim = useStorage ? await this.options.claimManager.obtainClaim(id) : null;
8626
9988
  /**
8627
9989
  * We default to replicating all columns if no columns array is provided.
8628
9990
  */
@@ -8655,26 +10017,27 @@ class TriggerManagerImpl {
8655
10017
  return this.db.writeLock(async (tx) => {
8656
10018
  await this.removeTriggers(tx, triggerIds);
8657
10019
  await tx.execute(/* sql */ `DROP TABLE IF EXISTS ${destination};`);
10020
+ await releaseStorageClaim?.();
8658
10021
  });
8659
10022
  };
8660
10023
  const setup = async (tx) => {
8661
10024
  // Allow user code to execute in this lock context before the trigger is created.
8662
10025
  await hooks?.beforeCreate?.(tx);
8663
10026
  await tx.execute(/* sql */ `
8664
- CREATE TEMP TABLE ${destination} (
10027
+ CREATE ${tableTriggerTypeClause} TABLE ${destination} (
8665
10028
  operation_id INTEGER PRIMARY KEY AUTOINCREMENT,
8666
10029
  id TEXT,
8667
10030
  operation TEXT,
8668
10031
  timestamp TEXT,
8669
10032
  value TEXT,
8670
10033
  previous_value TEXT
8671
- );
10034
+ )
8672
10035
  `);
8673
10036
  if (operations.includes(DiffTriggerOperation.INSERT)) {
8674
- const insertTriggerId = `ps_temp_trigger_insert_${id}`;
10037
+ const insertTriggerId = this.generateTriggerName(DiffTriggerOperation.INSERT, destination, id);
8675
10038
  triggerIds.push(insertTriggerId);
8676
10039
  await tx.execute(/* sql */ `
8677
- CREATE TEMP TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
10040
+ CREATE ${tableTriggerTypeClause} TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
8678
10041
  INSERT INTO
8679
10042
  ${destination} (id, operation, timestamp, value)
8680
10043
  VALUES
@@ -8685,14 +10048,14 @@ class TriggerManagerImpl {
8685
10048
  ${jsonFragment('NEW')}
8686
10049
  );
8687
10050
 
8688
- END;
10051
+ END
8689
10052
  `);
8690
10053
  }
8691
10054
  if (operations.includes(DiffTriggerOperation.UPDATE)) {
8692
- const updateTriggerId = `ps_temp_trigger_update_${id}`;
10055
+ const updateTriggerId = this.generateTriggerName(DiffTriggerOperation.UPDATE, destination, id);
8693
10056
  triggerIds.push(updateTriggerId);
8694
10057
  await tx.execute(/* sql */ `
8695
- CREATE TEMP TRIGGER ${updateTriggerId} AFTER
10058
+ CREATE ${tableTriggerTypeClause} TRIGGER ${updateTriggerId} AFTER
8696
10059
  UPDATE ON ${internalSource} ${whenClauses[DiffTriggerOperation.UPDATE]} BEGIN
8697
10060
  INSERT INTO
8698
10061
  ${destination} (id, operation, timestamp, value, previous_value)
@@ -8709,11 +10072,11 @@ class TriggerManagerImpl {
8709
10072
  `);
8710
10073
  }
8711
10074
  if (operations.includes(DiffTriggerOperation.DELETE)) {
8712
- const deleteTriggerId = `ps_temp_trigger_delete_${id}`;
10075
+ const deleteTriggerId = this.generateTriggerName(DiffTriggerOperation.DELETE, destination, id);
8713
10076
  triggerIds.push(deleteTriggerId);
8714
10077
  // Create delete trigger for basic JSON
8715
10078
  await tx.execute(/* sql */ `
8716
- CREATE TEMP TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
10079
+ CREATE ${tableTriggerTypeClause} TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
8717
10080
  INSERT INTO
8718
10081
  ${destination} (id, operation, timestamp, value)
8719
10082
  VALUES
@@ -8757,7 +10120,7 @@ class TriggerManagerImpl {
8757
10120
  // If no array is provided, we use all columns from the source table.
8758
10121
  const contextColumns = columns ?? sourceDefinition.columns.map((col) => col.name);
8759
10122
  const id = await this.getUUID();
8760
- const destination = `ps_temp_track_${source}_${id}`;
10123
+ const destination = `__ps_temp_track_${source}_${id}`;
8761
10124
  // register an onChange before the trigger is created
8762
10125
  const abortController = new AbortController();
8763
10126
  const abortOnChange = () => abortController.abort();
@@ -8912,6 +10275,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
8912
10275
  * Allows creating SQLite triggers which can be used to track various operations on SQLite tables.
8913
10276
  */
8914
10277
  triggers;
10278
+ triggersImpl;
8915
10279
  logger;
8916
10280
  constructor(options) {
8917
10281
  super();
@@ -8975,9 +10339,10 @@ class AbstractPowerSyncDatabase extends BaseObserver {
8975
10339
  logger: this.logger
8976
10340
  });
8977
10341
  this._isReadyPromise = this.initialize();
8978
- this.triggers = new TriggerManagerImpl({
10342
+ this.triggers = this.triggersImpl = new TriggerManagerImpl({
8979
10343
  db: this,
8980
- schema: this.schema
10344
+ schema: this.schema,
10345
+ ...this.generateTriggerManagerConfig()
8981
10346
  });
8982
10347
  }
8983
10348
  /**
@@ -9003,6 +10368,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9003
10368
  get connecting() {
9004
10369
  return this.currentStatus?.connecting || false;
9005
10370
  }
10371
+ /**
10372
+ * Generates a base configuration for {@link TriggerManagerImpl}.
10373
+ * Implementations should override this if necessary.
10374
+ */
10375
+ generateTriggerManagerConfig() {
10376
+ return {
10377
+ claimManager: MEMORY_TRIGGER_CLAIM_MANAGER
10378
+ };
10379
+ }
9006
10380
  /**
9007
10381
  * @returns A promise which will resolve once initialization is completed.
9008
10382
  */
@@ -9063,14 +10437,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9063
10437
  async initialize() {
9064
10438
  await this._initialize();
9065
10439
  await this.bucketStorageAdapter.init();
9066
- await this._loadVersion();
10440
+ await this.loadVersion();
9067
10441
  await this.updateSchema(this.options.schema);
9068
10442
  await this.resolveOfflineSyncStatus();
9069
10443
  await this.database.execute('PRAGMA RECURSIVE_TRIGGERS=TRUE');
10444
+ await this.triggersImpl.cleanupResources();
9070
10445
  this.ready = true;
9071
10446
  this.iterateListeners((cb) => cb.initialized?.());
9072
10447
  }
9073
- async _loadVersion() {
10448
+ async loadVersion() {
9074
10449
  try {
9075
10450
  const { version } = await this.database.get('SELECT powersync_rs_version() as version');
9076
10451
  this.sdkVersion = version;
@@ -9186,7 +10561,6 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9186
10561
  await this.disconnect();
9187
10562
  await this.waitForReady();
9188
10563
  const { clearLocal } = options;
9189
- // TODO DB name, verify this is necessary with extension
9190
10564
  await this.database.writeTransaction(async (tx) => {
9191
10565
  await tx.execute('SELECT powersync_clear(?)', [clearLocal ? 1 : 0]);
9192
10566
  });
@@ -9218,6 +10592,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
9218
10592
  if (this.closed) {
9219
10593
  return;
9220
10594
  }
10595
+ this.triggersImpl.dispose();
9221
10596
  await this.iterateAsyncListeners(async (cb) => cb.closing?.());
9222
10597
  const { disconnect } = options;
9223
10598
  if (disconnect) {
@@ -10183,151 +11558,50 @@ class SqliteBucketStorage extends BaseObserver {
10183
11558
  ]);
10184
11559
  return r != 0;
10185
11560
  }
10186
- async migrateToFixedSubkeys() {
10187
- await this.writeTransaction(async (tx) => {
10188
- await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
10189
- await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
10190
- SqliteBucketStorage._subkeyMigrationKey,
10191
- '1'
10192
- ]);
10193
- });
10194
- }
10195
- static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
10196
- }
10197
- function hasMatchingPriority(priority, bucket) {
10198
- return bucket.priority != null && bucket.priority <= priority;
10199
- }
10200
-
10201
- // TODO JSON
10202
- class SyncDataBatch {
10203
- buckets;
10204
- static fromJSON(json) {
10205
- return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
10206
- }
10207
- constructor(buckets) {
10208
- this.buckets = buckets;
10209
- }
10210
- }
10211
-
10212
- /**
10213
- * Thrown when an underlying database connection is closed.
10214
- * This is particularly relevant when worker connections are marked as closed while
10215
- * operations are still in progress.
10216
- */
10217
- class ConnectionClosedError extends Error {
10218
- static NAME = 'ConnectionClosedError';
10219
- static MATCHES(input) {
10220
- /**
10221
- * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
10222
- * check might fail. This also performs a failsafe check.
10223
- * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
10224
- */
10225
- return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
10226
- }
10227
- constructor(message) {
10228
- super(message);
10229
- this.name = ConnectionClosedError.NAME;
10230
- }
10231
- }
10232
-
10233
- // https://www.sqlite.org/lang_expr.html#castexpr
10234
- var ColumnType;
10235
- (function (ColumnType) {
10236
- ColumnType["TEXT"] = "TEXT";
10237
- ColumnType["INTEGER"] = "INTEGER";
10238
- ColumnType["REAL"] = "REAL";
10239
- })(ColumnType || (ColumnType = {}));
10240
- const text = {
10241
- type: ColumnType.TEXT
10242
- };
10243
- const integer = {
10244
- type: ColumnType.INTEGER
10245
- };
10246
- const real = {
10247
- type: ColumnType.REAL
10248
- };
10249
- // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
10250
- // In earlier versions this was limited to 63.
10251
- const MAX_AMOUNT_OF_COLUMNS = 1999;
10252
- const column = {
10253
- text,
10254
- integer,
10255
- real
10256
- };
10257
- class Column {
10258
- options;
10259
- constructor(options) {
10260
- this.options = options;
10261
- }
10262
- get name() {
10263
- return this.options.name;
10264
- }
10265
- get type() {
10266
- return this.options.type;
10267
- }
10268
- toJSON() {
10269
- return {
10270
- name: this.name,
10271
- type: this.type
10272
- };
10273
- }
10274
- }
10275
-
10276
- const DEFAULT_INDEX_COLUMN_OPTIONS = {
10277
- ascending: true
10278
- };
10279
- class IndexedColumn {
10280
- options;
10281
- static createAscending(column) {
10282
- return new IndexedColumn({
10283
- name: column,
10284
- ascending: true
10285
- });
10286
- }
10287
- constructor(options) {
10288
- this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
10289
- }
10290
- get name() {
10291
- return this.options.name;
10292
- }
10293
- get ascending() {
10294
- return this.options.ascending;
10295
- }
10296
- toJSON(table) {
10297
- return {
10298
- name: this.name,
10299
- ascending: this.ascending,
10300
- type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
10301
- };
10302
- }
10303
- }
10304
-
10305
- const DEFAULT_INDEX_OPTIONS = {
10306
- columns: []
10307
- };
10308
- class Index {
10309
- options;
10310
- static createAscending(options, columnNames) {
10311
- return new Index({
10312
- ...options,
10313
- columns: columnNames.map((name) => IndexedColumn.createAscending(name))
11561
+ async migrateToFixedSubkeys() {
11562
+ await this.writeTransaction(async (tx) => {
11563
+ await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
11564
+ await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
11565
+ SqliteBucketStorage._subkeyMigrationKey,
11566
+ '1'
11567
+ ]);
10314
11568
  });
10315
11569
  }
10316
- constructor(options) {
10317
- this.options = options;
10318
- this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
11570
+ static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
11571
+ }
11572
+ function hasMatchingPriority(priority, bucket) {
11573
+ return bucket.priority != null && bucket.priority <= priority;
11574
+ }
11575
+
11576
+ // TODO JSON
11577
+ class SyncDataBatch {
11578
+ buckets;
11579
+ static fromJSON(json) {
11580
+ return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
10319
11581
  }
10320
- get name() {
10321
- return this.options.name;
11582
+ constructor(buckets) {
11583
+ this.buckets = buckets;
10322
11584
  }
10323
- get columns() {
10324
- return this.options.columns ?? [];
11585
+ }
11586
+
11587
+ /**
11588
+ * Thrown when an underlying database connection is closed.
11589
+ * This is particularly relevant when worker connections are marked as closed while
11590
+ * operations are still in progress.
11591
+ */
11592
+ class ConnectionClosedError extends Error {
11593
+ static NAME = 'ConnectionClosedError';
11594
+ static MATCHES(input) {
11595
+ /**
11596
+ * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
11597
+ * check might fail. This also performs a failsafe check.
11598
+ * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
11599
+ */
11600
+ return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
10325
11601
  }
10326
- toJSON(table) {
10327
- return {
10328
- name: this.name,
10329
- columns: this.columns.map((c) => c.toJSON(table))
10330
- };
11602
+ constructor(message) {
11603
+ super(message);
11604
+ this.name = ConnectionClosedError.NAME;
10331
11605
  }
10332
11606
  }
10333
11607
 
@@ -10424,211 +11698,6 @@ class Schema {
10424
11698
  }
10425
11699
  }
10426
11700
 
10427
- const DEFAULT_TABLE_OPTIONS = {
10428
- indexes: [],
10429
- insertOnly: false,
10430
- localOnly: false,
10431
- trackPrevious: false,
10432
- trackMetadata: false,
10433
- ignoreEmptyUpdates: false
10434
- };
10435
- const InvalidSQLCharacters = /["'%,.#\s[\]]/;
10436
- class Table {
10437
- options;
10438
- _mappedColumns;
10439
- static createLocalOnly(options) {
10440
- return new Table({ ...options, localOnly: true, insertOnly: false });
10441
- }
10442
- static createInsertOnly(options) {
10443
- return new Table({ ...options, localOnly: false, insertOnly: true });
10444
- }
10445
- /**
10446
- * Create a table.
10447
- * @deprecated This was only only included for TableV2 and is no longer necessary.
10448
- * Prefer to use new Table() directly.
10449
- *
10450
- * TODO remove in the next major release.
10451
- */
10452
- static createTable(name, table) {
10453
- return new Table({
10454
- name,
10455
- columns: table.columns,
10456
- indexes: table.indexes,
10457
- localOnly: table.options.localOnly,
10458
- insertOnly: table.options.insertOnly,
10459
- viewName: table.options.viewName
10460
- });
10461
- }
10462
- constructor(optionsOrColumns, v2Options) {
10463
- if (this.isTableV1(optionsOrColumns)) {
10464
- this.initTableV1(optionsOrColumns);
10465
- }
10466
- else {
10467
- this.initTableV2(optionsOrColumns, v2Options);
10468
- }
10469
- }
10470
- copyWithName(name) {
10471
- return new Table({
10472
- ...this.options,
10473
- name
10474
- });
10475
- }
10476
- isTableV1(arg) {
10477
- return 'columns' in arg && Array.isArray(arg.columns);
10478
- }
10479
- initTableV1(options) {
10480
- this.options = {
10481
- ...options,
10482
- indexes: options.indexes || []
10483
- };
10484
- this.applyDefaultOptions();
10485
- }
10486
- initTableV2(columns, options) {
10487
- const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
10488
- const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
10489
- name,
10490
- columns: columnNames.map((name) => new IndexedColumn({
10491
- name: name.replace(/^-/, ''),
10492
- ascending: !name.startsWith('-')
10493
- }))
10494
- }));
10495
- this.options = {
10496
- name: '',
10497
- columns: convertedColumns,
10498
- indexes: convertedIndexes,
10499
- viewName: options?.viewName,
10500
- insertOnly: options?.insertOnly,
10501
- localOnly: options?.localOnly,
10502
- trackPrevious: options?.trackPrevious,
10503
- trackMetadata: options?.trackMetadata,
10504
- ignoreEmptyUpdates: options?.ignoreEmptyUpdates
10505
- };
10506
- this.applyDefaultOptions();
10507
- this._mappedColumns = columns;
10508
- }
10509
- applyDefaultOptions() {
10510
- this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
10511
- this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
10512
- this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
10513
- this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
10514
- this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
10515
- }
10516
- get name() {
10517
- return this.options.name;
10518
- }
10519
- get viewNameOverride() {
10520
- return this.options.viewName;
10521
- }
10522
- get viewName() {
10523
- return this.viewNameOverride ?? this.name;
10524
- }
10525
- get columns() {
10526
- return this.options.columns;
10527
- }
10528
- get columnMap() {
10529
- return (this._mappedColumns ??
10530
- this.columns.reduce((hash, column) => {
10531
- hash[column.name] = { type: column.type ?? ColumnType.TEXT };
10532
- return hash;
10533
- }, {}));
10534
- }
10535
- get indexes() {
10536
- return this.options.indexes ?? [];
10537
- }
10538
- get localOnly() {
10539
- return this.options.localOnly;
10540
- }
10541
- get insertOnly() {
10542
- return this.options.insertOnly;
10543
- }
10544
- get trackPrevious() {
10545
- return this.options.trackPrevious;
10546
- }
10547
- get trackMetadata() {
10548
- return this.options.trackMetadata;
10549
- }
10550
- get ignoreEmptyUpdates() {
10551
- return this.options.ignoreEmptyUpdates;
10552
- }
10553
- get internalName() {
10554
- if (this.options.localOnly) {
10555
- return `ps_data_local__${this.name}`;
10556
- }
10557
- return `ps_data__${this.name}`;
10558
- }
10559
- get validName() {
10560
- if (InvalidSQLCharacters.test(this.name)) {
10561
- return false;
10562
- }
10563
- if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
10564
- return false;
10565
- }
10566
- return true;
10567
- }
10568
- validate() {
10569
- if (InvalidSQLCharacters.test(this.name)) {
10570
- throw new Error(`Invalid characters in table name: ${this.name}`);
10571
- }
10572
- if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
10573
- throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
10574
- }
10575
- if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
10576
- throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
10577
- }
10578
- if (this.trackMetadata && this.localOnly) {
10579
- throw new Error(`Can't include metadata for local-only tables.`);
10580
- }
10581
- if (this.trackPrevious != false && this.localOnly) {
10582
- throw new Error(`Can't include old values for local-only tables.`);
10583
- }
10584
- const columnNames = new Set();
10585
- columnNames.add('id');
10586
- for (const column of this.columns) {
10587
- const { name: columnName } = column;
10588
- if (column.name === 'id') {
10589
- throw new Error(`An id column is automatically added, custom id columns are not supported`);
10590
- }
10591
- if (columnNames.has(columnName)) {
10592
- throw new Error(`Duplicate column ${columnName}`);
10593
- }
10594
- if (InvalidSQLCharacters.test(columnName)) {
10595
- throw new Error(`Invalid characters in column name: ${column.name}`);
10596
- }
10597
- columnNames.add(columnName);
10598
- }
10599
- const indexNames = new Set();
10600
- for (const index of this.indexes) {
10601
- if (indexNames.has(index.name)) {
10602
- throw new Error(`Duplicate index ${index.name}`);
10603
- }
10604
- if (InvalidSQLCharacters.test(index.name)) {
10605
- throw new Error(`Invalid characters in index name: ${index.name}`);
10606
- }
10607
- for (const column of index.columns) {
10608
- if (!columnNames.has(column.name)) {
10609
- throw new Error(`Column ${column.name} not found for index ${index.name}`);
10610
- }
10611
- }
10612
- indexNames.add(index.name);
10613
- }
10614
- }
10615
- toJSON() {
10616
- const trackPrevious = this.trackPrevious;
10617
- return {
10618
- name: this.name,
10619
- view_name: this.viewName,
10620
- local_only: this.localOnly,
10621
- insert_only: this.insertOnly,
10622
- include_old: trackPrevious && (trackPrevious.columns ?? true),
10623
- include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
10624
- include_metadata: this.trackMetadata,
10625
- ignore_empty_update: this.ignoreEmptyUpdates,
10626
- columns: this.columns.map((c) => c.toJSON()),
10627
- indexes: this.indexes.map((e) => e.toJSON(this))
10628
- };
10629
- }
10630
- }
10631
-
10632
11701
  /**
10633
11702
  Generate a new table from the columns and indexes
10634
11703
  @deprecated You should use {@link Table} instead as it now allows TableV2 syntax.
@@ -10784,5 +11853,5 @@ const parseQuery = (query, parameters) => {
10784
11853
  return { sqlStatement, parameters: parameters };
10785
11854
  };
10786
11855
 
10787
- export { AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RowUpdateType, Schema, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, Table, TableV2, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID };
11856
+ export { ATTACHMENT_TABLE, AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, AttachmentContext, AttachmentQueue, AttachmentService, AttachmentState, AttachmentTable, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, EncodingType, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RawTable, RowUpdateType, Schema, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, SyncingService, Table, TableV2, TriggerManagerImpl, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, attachmentFromSql, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, mutexRunExclusive, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID };
10788
11857
  //# sourceMappingURL=bundle.node.mjs.map