@powersync/web 0.0.0-dev-20260120150240 → 0.0.0-dev-20260128165909

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/_journeyapps_wa-sqlite-_journeyapps_wa-sqlite_src_examples_AccessHandlePoolVFS_js-_journeyapp-89f0ba.index.umd.js +5 -1
  2. package/dist/_journeyapps_wa-sqlite-_journeyapps_wa-sqlite_src_examples_AccessHandlePoolVFS_js-_journeyapp-89f0ba.index.umd.js.map +1 -1
  3. package/dist/index.umd.js +288 -65
  4. package/dist/index.umd.js.map +1 -1
  5. package/dist/worker/SharedSyncImplementation.umd.js +1438 -385
  6. package/dist/worker/SharedSyncImplementation.umd.js.map +1 -1
  7. package/dist/worker/WASQLiteDB.umd.js +1435 -386
  8. package/dist/worker/WASQLiteDB.umd.js.map +1 -1
  9. package/lib/package.json +2 -4
  10. package/lib/src/attachments/IndexDBFileSystemAdapter.d.ts +25 -0
  11. package/lib/src/attachments/IndexDBFileSystemAdapter.js +104 -0
  12. package/lib/src/db/NavigatorTriggerClaimManager.d.ts +6 -0
  13. package/lib/src/db/NavigatorTriggerClaimManager.js +20 -0
  14. package/lib/src/db/PowerSyncDatabase.d.ts +2 -1
  15. package/lib/src/db/PowerSyncDatabase.js +27 -1
  16. package/lib/src/db/adapters/LockedAsyncDatabaseAdapter.d.ts +2 -2
  17. package/lib/src/db/adapters/LockedAsyncDatabaseAdapter.js +5 -1
  18. package/lib/src/db/adapters/WebDBAdapter.d.ts +4 -1
  19. package/lib/src/db/adapters/wa-sqlite/InternalWASQLiteDBAdapter.d.ts +12 -0
  20. package/lib/src/db/adapters/wa-sqlite/InternalWASQLiteDBAdapter.js +19 -0
  21. package/lib/src/db/adapters/wa-sqlite/WASQLiteDBAdapter.d.ts +2 -2
  22. package/lib/src/db/adapters/wa-sqlite/WASQLiteDBAdapter.js +2 -2
  23. package/lib/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.js +2 -2
  24. package/lib/src/index.d.ts +1 -0
  25. package/lib/src/index.js +1 -0
  26. package/lib/tsconfig.tsbuildinfo +1 -1
  27. package/package.json +3 -5
  28. package/src/attachments/IndexDBFileSystemAdapter.ts +117 -0
  29. package/src/db/NavigatorTriggerClaimManager.ts +23 -0
  30. package/src/db/PowerSyncDatabase.ts +30 -1
  31. package/src/db/adapters/LockedAsyncDatabaseAdapter.ts +7 -3
  32. package/src/db/adapters/WebDBAdapter.ts +5 -1
  33. package/src/db/adapters/wa-sqlite/InternalWASQLiteDBAdapter.ts +23 -0
  34. package/src/db/adapters/wa-sqlite/WASQLiteDBAdapter.ts +2 -2
  35. package/src/db/adapters/wa-sqlite/WASQLiteOpenFactory.ts +2 -2
  36. package/src/index.ts +1 -0
@@ -2417,6 +2417,7 @@ function generateUUID() {
2417
2417
 
2418
2418
  __webpack_require__.r(__webpack_exports__);
2419
2419
  /* harmony export */ __webpack_require__.d(__webpack_exports__, {
2420
+ /* harmony export */ ATTACHMENT_TABLE: () => (/* binding */ ATTACHMENT_TABLE),
2420
2421
  /* harmony export */ AbortOperation: () => (/* binding */ AbortOperation),
2421
2422
  /* harmony export */ AbstractPowerSyncDatabase: () => (/* binding */ AbstractPowerSyncDatabase),
2422
2423
  /* harmony export */ AbstractPowerSyncDatabaseOpenFactory: () => (/* binding */ AbstractPowerSyncDatabaseOpenFactory),
@@ -2424,6 +2425,11 @@ __webpack_require__.r(__webpack_exports__);
2424
2425
  /* harmony export */ AbstractRemote: () => (/* binding */ AbstractRemote),
2425
2426
  /* harmony export */ AbstractStreamingSyncImplementation: () => (/* binding */ AbstractStreamingSyncImplementation),
2426
2427
  /* harmony export */ ArrayComparator: () => (/* binding */ ArrayComparator),
2428
+ /* harmony export */ AttachmentContext: () => (/* binding */ AttachmentContext),
2429
+ /* harmony export */ AttachmentQueue: () => (/* binding */ AttachmentQueue),
2430
+ /* harmony export */ AttachmentService: () => (/* binding */ AttachmentService),
2431
+ /* harmony export */ AttachmentState: () => (/* binding */ AttachmentState),
2432
+ /* harmony export */ AttachmentTable: () => (/* binding */ AttachmentTable),
2427
2433
  /* harmony export */ BaseObserver: () => (/* binding */ BaseObserver),
2428
2434
  /* harmony export */ Column: () => (/* binding */ Column),
2429
2435
  /* harmony export */ ColumnType: () => (/* binding */ ColumnType),
@@ -2455,6 +2461,7 @@ __webpack_require__.r(__webpack_exports__);
2455
2461
  /* harmony export */ DiffTriggerOperation: () => (/* binding */ DiffTriggerOperation),
2456
2462
  /* harmony export */ DifferentialQueryProcessor: () => (/* binding */ DifferentialQueryProcessor),
2457
2463
  /* harmony export */ EMPTY_DIFFERENTIAL: () => (/* binding */ EMPTY_DIFFERENTIAL),
2464
+ /* harmony export */ EncodingType: () => (/* binding */ EncodingType),
2458
2465
  /* harmony export */ FalsyComparator: () => (/* binding */ FalsyComparator),
2459
2466
  /* harmony export */ FetchImplementationProvider: () => (/* binding */ FetchImplementationProvider),
2460
2467
  /* harmony export */ FetchStrategy: () => (/* binding */ FetchStrategy),
@@ -2466,6 +2473,7 @@ __webpack_require__.r(__webpack_exports__);
2466
2473
  /* harmony export */ LogLevel: () => (/* binding */ LogLevel),
2467
2474
  /* harmony export */ MAX_AMOUNT_OF_COLUMNS: () => (/* binding */ MAX_AMOUNT_OF_COLUMNS),
2468
2475
  /* harmony export */ MAX_OP_ID: () => (/* binding */ MAX_OP_ID),
2476
+ /* harmony export */ MEMORY_TRIGGER_CLAIM_MANAGER: () => (/* binding */ MEMORY_TRIGGER_CLAIM_MANAGER),
2469
2477
  /* harmony export */ OnChangeQueryProcessor: () => (/* binding */ OnChangeQueryProcessor),
2470
2478
  /* harmony export */ OpType: () => (/* binding */ OpType),
2471
2479
  /* harmony export */ OpTypeEnum: () => (/* binding */ OpTypeEnum),
@@ -2482,11 +2490,14 @@ __webpack_require__.r(__webpack_exports__);
2482
2490
  /* harmony export */ SyncProgress: () => (/* binding */ SyncProgress),
2483
2491
  /* harmony export */ SyncStatus: () => (/* binding */ SyncStatus),
2484
2492
  /* harmony export */ SyncStreamConnectionMethod: () => (/* binding */ SyncStreamConnectionMethod),
2493
+ /* harmony export */ SyncingService: () => (/* binding */ SyncingService),
2485
2494
  /* harmony export */ Table: () => (/* binding */ Table),
2486
2495
  /* harmony export */ TableV2: () => (/* binding */ TableV2),
2496
+ /* harmony export */ TriggerManagerImpl: () => (/* binding */ TriggerManagerImpl),
2487
2497
  /* harmony export */ UpdateType: () => (/* binding */ UpdateType),
2488
2498
  /* harmony export */ UploadQueueStats: () => (/* binding */ UploadQueueStats),
2489
2499
  /* harmony export */ WatchedQueryListenerEvent: () => (/* binding */ WatchedQueryListenerEvent),
2500
+ /* harmony export */ attachmentFromSql: () => (/* binding */ attachmentFromSql),
2490
2501
  /* harmony export */ column: () => (/* binding */ column),
2491
2502
  /* harmony export */ compilableQueryWatch: () => (/* binding */ compilableQueryWatch),
2492
2503
  /* harmony export */ createBaseLogger: () => (/* binding */ createBaseLogger),
@@ -2513,6 +2524,1211 @@ __webpack_require__.r(__webpack_exports__);
2513
2524
  /* harmony import */ var async_mutex__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! async-mutex */ "../../node_modules/.pnpm/async-mutex@0.5.0/node_modules/async-mutex/index.mjs");
2514
2525
 
2515
2526
 
2527
+ // https://www.sqlite.org/lang_expr.html#castexpr
2528
+ var ColumnType;
2529
+ (function (ColumnType) {
2530
+ ColumnType["TEXT"] = "TEXT";
2531
+ ColumnType["INTEGER"] = "INTEGER";
2532
+ ColumnType["REAL"] = "REAL";
2533
+ })(ColumnType || (ColumnType = {}));
2534
+ const text = {
2535
+ type: ColumnType.TEXT
2536
+ };
2537
+ const integer = {
2538
+ type: ColumnType.INTEGER
2539
+ };
2540
+ const real = {
2541
+ type: ColumnType.REAL
2542
+ };
2543
+ // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
2544
+ // In earlier versions this was limited to 63.
2545
+ const MAX_AMOUNT_OF_COLUMNS = 1999;
2546
+ const column = {
2547
+ text,
2548
+ integer,
2549
+ real
2550
+ };
2551
+ class Column {
2552
+ options;
2553
+ constructor(options) {
2554
+ this.options = options;
2555
+ }
2556
+ get name() {
2557
+ return this.options.name;
2558
+ }
2559
+ get type() {
2560
+ return this.options.type;
2561
+ }
2562
+ toJSON() {
2563
+ return {
2564
+ name: this.name,
2565
+ type: this.type
2566
+ };
2567
+ }
2568
+ }
2569
+
2570
+ const DEFAULT_INDEX_COLUMN_OPTIONS = {
2571
+ ascending: true
2572
+ };
2573
+ class IndexedColumn {
2574
+ options;
2575
+ static createAscending(column) {
2576
+ return new IndexedColumn({
2577
+ name: column,
2578
+ ascending: true
2579
+ });
2580
+ }
2581
+ constructor(options) {
2582
+ this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
2583
+ }
2584
+ get name() {
2585
+ return this.options.name;
2586
+ }
2587
+ get ascending() {
2588
+ return this.options.ascending;
2589
+ }
2590
+ toJSON(table) {
2591
+ return {
2592
+ name: this.name,
2593
+ ascending: this.ascending,
2594
+ type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
2595
+ };
2596
+ }
2597
+ }
2598
+
2599
+ const DEFAULT_INDEX_OPTIONS = {
2600
+ columns: []
2601
+ };
2602
+ class Index {
2603
+ options;
2604
+ static createAscending(options, columnNames) {
2605
+ return new Index({
2606
+ ...options,
2607
+ columns: columnNames.map((name) => IndexedColumn.createAscending(name))
2608
+ });
2609
+ }
2610
+ constructor(options) {
2611
+ this.options = options;
2612
+ this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
2613
+ }
2614
+ get name() {
2615
+ return this.options.name;
2616
+ }
2617
+ get columns() {
2618
+ return this.options.columns ?? [];
2619
+ }
2620
+ toJSON(table) {
2621
+ return {
2622
+ name: this.name,
2623
+ columns: this.columns.map((c) => c.toJSON(table))
2624
+ };
2625
+ }
2626
+ }
2627
+
2628
+ const DEFAULT_TABLE_OPTIONS = {
2629
+ indexes: [],
2630
+ insertOnly: false,
2631
+ localOnly: false,
2632
+ trackPrevious: false,
2633
+ trackMetadata: false,
2634
+ ignoreEmptyUpdates: false
2635
+ };
2636
+ const InvalidSQLCharacters = /["'%,.#\s[\]]/;
2637
+ class Table {
2638
+ options;
2639
+ _mappedColumns;
2640
+ static createLocalOnly(options) {
2641
+ return new Table({ ...options, localOnly: true, insertOnly: false });
2642
+ }
2643
+ static createInsertOnly(options) {
2644
+ return new Table({ ...options, localOnly: false, insertOnly: true });
2645
+ }
2646
+ /**
2647
+ * Create a table.
2648
+ * @deprecated This was only only included for TableV2 and is no longer necessary.
2649
+ * Prefer to use new Table() directly.
2650
+ *
2651
+ * TODO remove in the next major release.
2652
+ */
2653
+ static createTable(name, table) {
2654
+ return new Table({
2655
+ name,
2656
+ columns: table.columns,
2657
+ indexes: table.indexes,
2658
+ localOnly: table.options.localOnly,
2659
+ insertOnly: table.options.insertOnly,
2660
+ viewName: table.options.viewName
2661
+ });
2662
+ }
2663
+ constructor(optionsOrColumns, v2Options) {
2664
+ if (this.isTableV1(optionsOrColumns)) {
2665
+ this.initTableV1(optionsOrColumns);
2666
+ }
2667
+ else {
2668
+ this.initTableV2(optionsOrColumns, v2Options);
2669
+ }
2670
+ }
2671
+ copyWithName(name) {
2672
+ return new Table({
2673
+ ...this.options,
2674
+ name
2675
+ });
2676
+ }
2677
+ isTableV1(arg) {
2678
+ return 'columns' in arg && Array.isArray(arg.columns);
2679
+ }
2680
+ initTableV1(options) {
2681
+ this.options = {
2682
+ ...options,
2683
+ indexes: options.indexes || []
2684
+ };
2685
+ this.applyDefaultOptions();
2686
+ }
2687
+ initTableV2(columns, options) {
2688
+ const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
2689
+ const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
2690
+ name,
2691
+ columns: columnNames.map((name) => new IndexedColumn({
2692
+ name: name.replace(/^-/, ''),
2693
+ ascending: !name.startsWith('-')
2694
+ }))
2695
+ }));
2696
+ this.options = {
2697
+ name: '',
2698
+ columns: convertedColumns,
2699
+ indexes: convertedIndexes,
2700
+ viewName: options?.viewName,
2701
+ insertOnly: options?.insertOnly,
2702
+ localOnly: options?.localOnly,
2703
+ trackPrevious: options?.trackPrevious,
2704
+ trackMetadata: options?.trackMetadata,
2705
+ ignoreEmptyUpdates: options?.ignoreEmptyUpdates
2706
+ };
2707
+ this.applyDefaultOptions();
2708
+ this._mappedColumns = columns;
2709
+ }
2710
+ applyDefaultOptions() {
2711
+ this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
2712
+ this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
2713
+ this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
2714
+ this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
2715
+ this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
2716
+ }
2717
+ get name() {
2718
+ return this.options.name;
2719
+ }
2720
+ get viewNameOverride() {
2721
+ return this.options.viewName;
2722
+ }
2723
+ get viewName() {
2724
+ return this.viewNameOverride ?? this.name;
2725
+ }
2726
+ get columns() {
2727
+ return this.options.columns;
2728
+ }
2729
+ get columnMap() {
2730
+ return (this._mappedColumns ??
2731
+ this.columns.reduce((hash, column) => {
2732
+ hash[column.name] = { type: column.type ?? ColumnType.TEXT };
2733
+ return hash;
2734
+ }, {}));
2735
+ }
2736
+ get indexes() {
2737
+ return this.options.indexes ?? [];
2738
+ }
2739
+ get localOnly() {
2740
+ return this.options.localOnly;
2741
+ }
2742
+ get insertOnly() {
2743
+ return this.options.insertOnly;
2744
+ }
2745
+ get trackPrevious() {
2746
+ return this.options.trackPrevious;
2747
+ }
2748
+ get trackMetadata() {
2749
+ return this.options.trackMetadata;
2750
+ }
2751
+ get ignoreEmptyUpdates() {
2752
+ return this.options.ignoreEmptyUpdates;
2753
+ }
2754
+ get internalName() {
2755
+ if (this.options.localOnly) {
2756
+ return `ps_data_local__${this.name}`;
2757
+ }
2758
+ return `ps_data__${this.name}`;
2759
+ }
2760
+ get validName() {
2761
+ if (InvalidSQLCharacters.test(this.name)) {
2762
+ return false;
2763
+ }
2764
+ if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
2765
+ return false;
2766
+ }
2767
+ return true;
2768
+ }
2769
+ validate() {
2770
+ if (InvalidSQLCharacters.test(this.name)) {
2771
+ throw new Error(`Invalid characters in table name: ${this.name}`);
2772
+ }
2773
+ if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
2774
+ throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
2775
+ }
2776
+ if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
2777
+ throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
2778
+ }
2779
+ if (this.trackMetadata && this.localOnly) {
2780
+ throw new Error(`Can't include metadata for local-only tables.`);
2781
+ }
2782
+ if (this.trackPrevious != false && this.localOnly) {
2783
+ throw new Error(`Can't include old values for local-only tables.`);
2784
+ }
2785
+ const columnNames = new Set();
2786
+ columnNames.add('id');
2787
+ for (const column of this.columns) {
2788
+ const { name: columnName } = column;
2789
+ if (column.name === 'id') {
2790
+ throw new Error(`An id column is automatically added, custom id columns are not supported`);
2791
+ }
2792
+ if (columnNames.has(columnName)) {
2793
+ throw new Error(`Duplicate column ${columnName}`);
2794
+ }
2795
+ if (InvalidSQLCharacters.test(columnName)) {
2796
+ throw new Error(`Invalid characters in column name: ${column.name}`);
2797
+ }
2798
+ columnNames.add(columnName);
2799
+ }
2800
+ const indexNames = new Set();
2801
+ for (const index of this.indexes) {
2802
+ if (indexNames.has(index.name)) {
2803
+ throw new Error(`Duplicate index ${index.name}`);
2804
+ }
2805
+ if (InvalidSQLCharacters.test(index.name)) {
2806
+ throw new Error(`Invalid characters in index name: ${index.name}`);
2807
+ }
2808
+ for (const column of index.columns) {
2809
+ if (!columnNames.has(column.name)) {
2810
+ throw new Error(`Column ${column.name} not found for index ${index.name}`);
2811
+ }
2812
+ }
2813
+ indexNames.add(index.name);
2814
+ }
2815
+ }
2816
+ toJSON() {
2817
+ const trackPrevious = this.trackPrevious;
2818
+ return {
2819
+ name: this.name,
2820
+ view_name: this.viewName,
2821
+ local_only: this.localOnly,
2822
+ insert_only: this.insertOnly,
2823
+ include_old: trackPrevious && (trackPrevious.columns ?? true),
2824
+ include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
2825
+ include_metadata: this.trackMetadata,
2826
+ ignore_empty_update: this.ignoreEmptyUpdates,
2827
+ columns: this.columns.map((c) => c.toJSON()),
2828
+ indexes: this.indexes.map((e) => e.toJSON(this))
2829
+ };
2830
+ }
2831
+ }
2832
+
2833
+ const ATTACHMENT_TABLE = 'attachments';
2834
+ /**
2835
+ * Maps a database row to an AttachmentRecord.
2836
+ *
2837
+ * @param row - The database row object
2838
+ * @returns The corresponding AttachmentRecord
2839
+ *
2840
+ * @experimental
2841
+ */
2842
+ function attachmentFromSql(row) {
2843
+ return {
2844
+ id: row.id,
2845
+ filename: row.filename,
2846
+ localUri: row.local_uri,
2847
+ size: row.size,
2848
+ mediaType: row.media_type,
2849
+ timestamp: row.timestamp,
2850
+ metaData: row.meta_data,
2851
+ hasSynced: row.has_synced === 1,
2852
+ state: row.state
2853
+ };
2854
+ }
2855
+ /**
2856
+ * AttachmentState represents the current synchronization state of an attachment.
2857
+ *
2858
+ * @experimental
2859
+ */
2860
+ var AttachmentState;
2861
+ (function (AttachmentState) {
2862
+ AttachmentState[AttachmentState["QUEUED_UPLOAD"] = 0] = "QUEUED_UPLOAD";
2863
+ AttachmentState[AttachmentState["QUEUED_DOWNLOAD"] = 1] = "QUEUED_DOWNLOAD";
2864
+ AttachmentState[AttachmentState["QUEUED_DELETE"] = 2] = "QUEUED_DELETE";
2865
+ AttachmentState[AttachmentState["SYNCED"] = 3] = "SYNCED";
2866
+ AttachmentState[AttachmentState["ARCHIVED"] = 4] = "ARCHIVED"; // Attachment has been orphaned, i.e. the associated record has been deleted
2867
+ })(AttachmentState || (AttachmentState = {}));
2868
+ /**
2869
+ * AttachmentTable defines the schema for the attachment queue table.
2870
+ *
2871
+ * @internal
2872
+ */
2873
+ class AttachmentTable extends Table {
2874
+ constructor(options) {
2875
+ super({
2876
+ filename: column.text,
2877
+ local_uri: column.text,
2878
+ timestamp: column.integer,
2879
+ size: column.integer,
2880
+ media_type: column.text,
2881
+ state: column.integer, // Corresponds to AttachmentState
2882
+ has_synced: column.integer,
2883
+ meta_data: column.text
2884
+ }, {
2885
+ ...options,
2886
+ viewName: options?.viewName ?? ATTACHMENT_TABLE,
2887
+ localOnly: true,
2888
+ insertOnly: false
2889
+ });
2890
+ }
2891
+ }
2892
+
2893
+ /**
2894
+ * AttachmentContext provides database operations for managing attachment records.
2895
+ *
2896
+ * Provides methods to query, insert, update, and delete attachment records with
2897
+ * proper transaction management through PowerSync.
2898
+ *
2899
+ * @internal
2900
+ */
2901
+ class AttachmentContext {
2902
+ /** PowerSync database instance for executing queries */
2903
+ db;
2904
+ /** Name of the database table storing attachment records */
2905
+ tableName;
2906
+ /** Logger instance for diagnostic information */
2907
+ logger;
2908
+ /** Maximum number of archived attachments to keep before cleanup */
2909
+ archivedCacheLimit = 100;
2910
+ /**
2911
+ * Creates a new AttachmentContext instance.
2912
+ *
2913
+ * @param db - PowerSync database instance
2914
+ * @param tableName - Name of the table storing attachment records. Default: 'attachments'
2915
+ * @param logger - Logger instance for diagnostic output
2916
+ */
2917
+ constructor(db, tableName = 'attachments', logger, archivedCacheLimit) {
2918
+ this.db = db;
2919
+ this.tableName = tableName;
2920
+ this.logger = logger;
2921
+ this.archivedCacheLimit = archivedCacheLimit;
2922
+ }
2923
+ /**
2924
+ * Retrieves all active attachments that require synchronization.
2925
+ * Active attachments include those queued for upload, download, or delete.
2926
+ * Results are ordered by timestamp in ascending order.
2927
+ *
2928
+ * @returns Promise resolving to an array of active attachment records
2929
+ */
2930
+ async getActiveAttachments() {
2931
+ const attachments = await this.db.getAll(
2932
+ /* sql */
2933
+ `
2934
+ SELECT
2935
+ *
2936
+ FROM
2937
+ ${this.tableName}
2938
+ WHERE
2939
+ state = ?
2940
+ OR state = ?
2941
+ OR state = ?
2942
+ ORDER BY
2943
+ timestamp ASC
2944
+ `, [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]);
2945
+ return attachments.map(attachmentFromSql);
2946
+ }
2947
+ /**
2948
+ * Retrieves all archived attachments.
2949
+ *
2950
+ * Archived attachments are no longer referenced but haven't been permanently deleted.
2951
+ * These are candidates for cleanup operations to free up storage space.
2952
+ *
2953
+ * @returns Promise resolving to an array of archived attachment records
2954
+ */
2955
+ async getArchivedAttachments() {
2956
+ const attachments = await this.db.getAll(
2957
+ /* sql */
2958
+ `
2959
+ SELECT
2960
+ *
2961
+ FROM
2962
+ ${this.tableName}
2963
+ WHERE
2964
+ state = ?
2965
+ ORDER BY
2966
+ timestamp ASC
2967
+ `, [AttachmentState.ARCHIVED]);
2968
+ return attachments.map(attachmentFromSql);
2969
+ }
2970
+ /**
2971
+ * Retrieves all attachment records regardless of state.
2972
+ * Results are ordered by timestamp in ascending order.
2973
+ *
2974
+ * @returns Promise resolving to an array of all attachment records
2975
+ */
2976
+ async getAttachments() {
2977
+ const attachments = await this.db.getAll(
2978
+ /* sql */
2979
+ `
2980
+ SELECT
2981
+ *
2982
+ FROM
2983
+ ${this.tableName}
2984
+ ORDER BY
2985
+ timestamp ASC
2986
+ `, []);
2987
+ return attachments.map(attachmentFromSql);
2988
+ }
2989
+ /**
2990
+ * Inserts or updates an attachment record within an existing transaction.
2991
+ *
2992
+ * Performs an upsert operation (INSERT OR REPLACE). Must be called within
2993
+ * an active database transaction context.
2994
+ *
2995
+ * @param attachment - The attachment record to upsert
2996
+ * @param context - Active database transaction context
2997
+ */
2998
+ async upsertAttachment(attachment, context) {
2999
+ await context.execute(
3000
+ /* sql */
3001
+ `
3002
+ INSERT
3003
+ OR REPLACE INTO ${this.tableName} (
3004
+ id,
3005
+ filename,
3006
+ local_uri,
3007
+ size,
3008
+ media_type,
3009
+ timestamp,
3010
+ state,
3011
+ has_synced,
3012
+ meta_data
3013
+ )
3014
+ VALUES
3015
+ (?, ?, ?, ?, ?, ?, ?, ?, ?)
3016
+ `, [
3017
+ attachment.id,
3018
+ attachment.filename,
3019
+ attachment.localUri || null,
3020
+ attachment.size || null,
3021
+ attachment.mediaType || null,
3022
+ attachment.timestamp,
3023
+ attachment.state,
3024
+ attachment.hasSynced ? 1 : 0,
3025
+ attachment.metaData || null
3026
+ ]);
3027
+ }
3028
+ async getAttachment(id) {
3029
+ const attachment = await this.db.get(
3030
+ /* sql */
3031
+ `
3032
+ SELECT
3033
+ *
3034
+ FROM
3035
+ ${this.tableName}
3036
+ WHERE
3037
+ id = ?
3038
+ `, [id]);
3039
+ return attachment ? attachmentFromSql(attachment) : undefined;
3040
+ }
3041
+ /**
3042
+ * Permanently deletes an attachment record from the database.
3043
+ *
3044
+ * This operation removes the attachment record but does not delete
3045
+ * the associated local or remote files. File deletion should be handled
3046
+ * separately through the appropriate storage adapters.
3047
+ *
3048
+ * @param attachmentId - Unique identifier of the attachment to delete
3049
+ */
3050
+ async deleteAttachment(attachmentId) {
3051
+ await this.db.writeTransaction((tx) => tx.execute(
3052
+ /* sql */
3053
+ `
3054
+ DELETE FROM ${this.tableName}
3055
+ WHERE
3056
+ id = ?
3057
+ `, [attachmentId]));
3058
+ }
3059
+ async clearQueue() {
3060
+ await this.db.writeTransaction((tx) => tx.execute(/* sql */ ` DELETE FROM ${this.tableName} `));
3061
+ }
3062
+ async deleteArchivedAttachments(callback) {
3063
+ const limit = 1000;
3064
+ const results = await this.db.getAll(
3065
+ /* sql */
3066
+ `
3067
+ SELECT
3068
+ *
3069
+ FROM
3070
+ ${this.tableName}
3071
+ WHERE
3072
+ state = ?
3073
+ ORDER BY
3074
+ timestamp DESC
3075
+ LIMIT
3076
+ ?
3077
+ OFFSET
3078
+ ?
3079
+ `, [AttachmentState.ARCHIVED, limit, this.archivedCacheLimit]);
3080
+ const archivedAttachments = results.map(attachmentFromSql);
3081
+ if (archivedAttachments.length === 0)
3082
+ return false;
3083
+ await callback?.(archivedAttachments);
3084
+ this.logger.info(`Deleting ${archivedAttachments.length} archived attachments. Archived attachment exceeds cache archiveCacheLimit of ${this.archivedCacheLimit}.`);
3085
+ const ids = archivedAttachments.map((attachment) => attachment.id);
3086
+ await this.db.execute(
3087
+ /* sql */
3088
+ `
3089
+ DELETE FROM ${this.tableName}
3090
+ WHERE
3091
+ id IN (
3092
+ SELECT
3093
+ json_each.value
3094
+ FROM
3095
+ json_each (?)
3096
+ );
3097
+ `, [JSON.stringify(ids)]);
3098
+ this.logger.info(`Deleted ${archivedAttachments.length} archived attachments`);
3099
+ return archivedAttachments.length < limit;
3100
+ }
3101
+ /**
3102
+ * Saves multiple attachment records in a single transaction.
3103
+ *
3104
+ * All updates are saved in a single batch after processing.
3105
+ * If the attachments array is empty, no database operations are performed.
3106
+ *
3107
+ * @param attachments - Array of attachment records to save
3108
+ */
3109
+ async saveAttachments(attachments) {
3110
+ if (attachments.length === 0) {
3111
+ return;
3112
+ }
3113
+ await this.db.writeTransaction(async (tx) => {
3114
+ for (const attachment of attachments) {
3115
+ await this.upsertAttachment(attachment, tx);
3116
+ }
3117
+ });
3118
+ }
3119
+ }
3120
+
3121
+ var WatchedQueryListenerEvent;
3122
+ (function (WatchedQueryListenerEvent) {
3123
+ WatchedQueryListenerEvent["ON_DATA"] = "onData";
3124
+ WatchedQueryListenerEvent["ON_ERROR"] = "onError";
3125
+ WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
3126
+ WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
3127
+ WatchedQueryListenerEvent["CLOSED"] = "closed";
3128
+ })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
3129
+ const DEFAULT_WATCH_THROTTLE_MS = 30;
3130
+ const DEFAULT_WATCH_QUERY_OPTIONS = {
3131
+ throttleMs: DEFAULT_WATCH_THROTTLE_MS,
3132
+ reportFetching: true
3133
+ };
3134
+
3135
+ /**
3136
+ * Orchestrates attachment synchronization between local and remote storage.
3137
+ * Handles uploads, downloads, deletions, and state transitions.
3138
+ *
3139
+ * @internal
3140
+ */
3141
+ class SyncingService {
3142
+ attachmentService;
3143
+ localStorage;
3144
+ remoteStorage;
3145
+ logger;
3146
+ errorHandler;
3147
+ constructor(attachmentService, localStorage, remoteStorage, logger, errorHandler) {
3148
+ this.attachmentService = attachmentService;
3149
+ this.localStorage = localStorage;
3150
+ this.remoteStorage = remoteStorage;
3151
+ this.logger = logger;
3152
+ this.errorHandler = errorHandler;
3153
+ }
3154
+ /**
3155
+ * Processes attachments based on their state (upload, download, or delete).
3156
+ * All updates are saved in a single batch after processing.
3157
+ *
3158
+ * @param attachments - Array of attachment records to process
3159
+ * @param context - Attachment context for database operations
3160
+ * @returns Promise that resolves when all attachments have been processed and saved
3161
+ */
3162
+ async processAttachments(attachments, context) {
3163
+ const updatedAttachments = [];
3164
+ for (const attachment of attachments) {
3165
+ switch (attachment.state) {
3166
+ case AttachmentState.QUEUED_UPLOAD:
3167
+ const uploaded = await this.uploadAttachment(attachment);
3168
+ updatedAttachments.push(uploaded);
3169
+ break;
3170
+ case AttachmentState.QUEUED_DOWNLOAD:
3171
+ const downloaded = await this.downloadAttachment(attachment);
3172
+ updatedAttachments.push(downloaded);
3173
+ break;
3174
+ case AttachmentState.QUEUED_DELETE:
3175
+ const deleted = await this.deleteAttachment(attachment);
3176
+ updatedAttachments.push(deleted);
3177
+ break;
3178
+ }
3179
+ }
3180
+ await context.saveAttachments(updatedAttachments);
3181
+ }
3182
+ /**
3183
+ * Uploads an attachment from local storage to remote storage.
3184
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
3185
+ *
3186
+ * @param attachment - The attachment record to upload
3187
+ * @returns Updated attachment record with new state
3188
+ * @throws Error if the attachment has no localUri
3189
+ */
3190
+ async uploadAttachment(attachment) {
3191
+ this.logger.info(`Uploading attachment ${attachment.filename}`);
3192
+ try {
3193
+ if (attachment.localUri == null) {
3194
+ throw new Error(`No localUri for attachment ${attachment.id}`);
3195
+ }
3196
+ const fileBlob = await this.localStorage.readFile(attachment.localUri);
3197
+ await this.remoteStorage.uploadFile(fileBlob, attachment);
3198
+ return {
3199
+ ...attachment,
3200
+ state: AttachmentState.SYNCED,
3201
+ hasSynced: true
3202
+ };
3203
+ }
3204
+ catch (error) {
3205
+ const shouldRetry = (await this.errorHandler?.onUploadError(attachment, error)) ?? true;
3206
+ if (!shouldRetry) {
3207
+ return {
3208
+ ...attachment,
3209
+ state: AttachmentState.ARCHIVED
3210
+ };
3211
+ }
3212
+ return attachment;
3213
+ }
3214
+ }
3215
+ /**
3216
+ * Downloads an attachment from remote storage to local storage.
3217
+ * Retrieves the file, converts to base64, and saves locally.
3218
+ * On success, marks as SYNCED. On failure, defers to error handler or archives.
3219
+ *
3220
+ * @param attachment - The attachment record to download
3221
+ * @returns Updated attachment record with local URI and new state
3222
+ */
3223
+ async downloadAttachment(attachment) {
3224
+ this.logger.info(`Downloading attachment ${attachment.filename}`);
3225
+ try {
3226
+ const fileData = await this.remoteStorage.downloadFile(attachment);
3227
+ const localUri = this.localStorage.getLocalUri(attachment.filename);
3228
+ await this.localStorage.saveFile(localUri, fileData);
3229
+ return {
3230
+ ...attachment,
3231
+ state: AttachmentState.SYNCED,
3232
+ localUri: localUri,
3233
+ hasSynced: true
3234
+ };
3235
+ }
3236
+ catch (error) {
3237
+ const shouldRetry = (await this.errorHandler?.onDownloadError(attachment, error)) ?? true;
3238
+ if (!shouldRetry) {
3239
+ return {
3240
+ ...attachment,
3241
+ state: AttachmentState.ARCHIVED
3242
+ };
3243
+ }
3244
+ return attachment;
3245
+ }
3246
+ }
3247
+ /**
3248
+ * Deletes an attachment from both remote and local storage.
3249
+ * Removes the remote file, local file (if exists), and the attachment record.
3250
+ * On failure, defers to error handler or archives.
3251
+ *
3252
+ * @param attachment - The attachment record to delete
3253
+ * @returns Updated attachment record
3254
+ */
3255
+ async deleteAttachment(attachment) {
3256
+ try {
3257
+ await this.remoteStorage.deleteFile(attachment);
3258
+ if (attachment.localUri) {
3259
+ await this.localStorage.deleteFile(attachment.localUri);
3260
+ }
3261
+ await this.attachmentService.withContext(async (ctx) => {
3262
+ await ctx.deleteAttachment(attachment.id);
3263
+ });
3264
+ return {
3265
+ ...attachment,
3266
+ state: AttachmentState.ARCHIVED
3267
+ };
3268
+ }
3269
+ catch (error) {
3270
+ const shouldRetry = (await this.errorHandler?.onDeleteError(attachment, error)) ?? true;
3271
+ if (!shouldRetry) {
3272
+ return {
3273
+ ...attachment,
3274
+ state: AttachmentState.ARCHIVED
3275
+ };
3276
+ }
3277
+ return attachment;
3278
+ }
3279
+ }
3280
+ /**
3281
+ * Performs cleanup of archived attachments by removing their local files and records.
3282
+ * Errors during local file deletion are logged but do not prevent record deletion.
3283
+ */
3284
+ async deleteArchivedAttachments(context) {
3285
+ return await context.deleteArchivedAttachments(async (archivedAttachments) => {
3286
+ for (const attachment of archivedAttachments) {
3287
+ if (attachment.localUri) {
3288
+ try {
3289
+ await this.localStorage.deleteFile(attachment.localUri);
3290
+ }
3291
+ catch (error) {
3292
+ this.logger.error('Error deleting local file for archived attachment', error);
3293
+ }
3294
+ }
3295
+ }
3296
+ });
3297
+ }
3298
+ }
3299
+
3300
+ /**
3301
+ * Wrapper for async-mutex runExclusive, which allows for a timeout on each exclusive lock.
3302
+ */
3303
+ async function mutexRunExclusive(mutex, callback, options) {
3304
+ return new Promise((resolve, reject) => {
3305
+ mutex.runExclusive(async () => {
3306
+ try {
3307
+ resolve(await callback());
3308
+ }
3309
+ catch (ex) {
3310
+ reject(ex);
3311
+ }
3312
+ });
3313
+ });
3314
+ }
3315
+
3316
+ /**
3317
+ * Service for querying and watching attachment records in the database.
3318
+ *
3319
+ * @internal
3320
+ */
3321
+ class AttachmentService {
3322
+ db;
3323
+ logger;
3324
+ tableName;
3325
+ mutex = new async_mutex__WEBPACK_IMPORTED_MODULE_0__.Mutex();
3326
+ context;
3327
+ constructor(db, logger, tableName = 'attachments', archivedCacheLimit = 100) {
3328
+ this.db = db;
3329
+ this.logger = logger;
3330
+ this.tableName = tableName;
3331
+ this.context = new AttachmentContext(db, tableName, logger, archivedCacheLimit);
3332
+ }
3333
+ /**
3334
+ * Creates a differential watch query for active attachments requiring synchronization.
3335
+ * @returns Watch query that emits changes for queued uploads, downloads, and deletes
3336
+ */
3337
+ watchActiveAttachments({ throttleMs } = {}) {
3338
+ this.logger.info('Watching active attachments...');
3339
+ const watch = this.db
3340
+ .query({
3341
+ sql: /* sql */ `
3342
+ SELECT
3343
+ *
3344
+ FROM
3345
+ ${this.tableName}
3346
+ WHERE
3347
+ state = ?
3348
+ OR state = ?
3349
+ OR state = ?
3350
+ ORDER BY
3351
+ timestamp ASC
3352
+ `,
3353
+ parameters: [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]
3354
+ })
3355
+ .differentialWatch({ throttleMs });
3356
+ return watch;
3357
+ }
3358
+ /**
3359
+ * Executes a callback with exclusive access to the attachment context.
3360
+ */
3361
+ async withContext(callback) {
3362
+ return mutexRunExclusive(this.mutex, async () => {
3363
+ return callback(this.context);
3364
+ });
3365
+ }
3366
+ }
3367
+
3368
+ /**
3369
+ * AttachmentQueue manages the lifecycle and synchronization of attachments
3370
+ * between local and remote storage.
3371
+ * Provides automatic synchronization, upload/download queuing, attachment monitoring,
3372
+ * verification and repair of local files, and cleanup of archived attachments.
3373
+ *
3374
+ * @experimental
3375
+ * @alpha This is currently experimental and may change without a major version bump.
3376
+ */
3377
+ class AttachmentQueue {
3378
+ /** Timer for periodic synchronization operations */
3379
+ periodicSyncTimer;
3380
+ /** Service for synchronizing attachments between local and remote storage */
3381
+ syncingService;
3382
+ /** Adapter for local file storage operations */
3383
+ localStorage;
3384
+ /** Adapter for remote file storage operations */
3385
+ remoteStorage;
3386
+ /**
3387
+ * Callback function to watch for changes in attachment references in your data model.
3388
+ *
3389
+ * This should be implemented by the user of AttachmentQueue to monitor changes in your application's
3390
+ * data that reference attachments. When attachments are added, removed, or modified,
3391
+ * this callback should trigger the onUpdate function with the current set of attachments.
3392
+ */
3393
+ watchAttachments;
3394
+ /** Name of the database table storing attachment records */
3395
+ tableName;
3396
+ /** Logger instance for diagnostic information */
3397
+ logger;
3398
+ /** Interval in milliseconds between periodic sync operations. Default: 30000 (30 seconds) */
3399
+ syncIntervalMs = 30 * 1000;
3400
+ /** Duration in milliseconds to throttle sync operations */
3401
+ syncThrottleDuration;
3402
+ /** Whether to automatically download remote attachments. Default: true */
3403
+ downloadAttachments = true;
3404
+ /** Maximum number of archived attachments to keep before cleanup. Default: 100 */
3405
+ archivedCacheLimit;
3406
+ /** Service for managing attachment-related database operations */
3407
+ attachmentService;
3408
+ /** PowerSync database instance */
3409
+ db;
3410
+ /** Cleanup function for status change listener */
3411
+ statusListenerDispose;
3412
+ watchActiveAttachments;
3413
+ watchAttachmentsAbortController;
3414
+ /**
3415
+ * Creates a new AttachmentQueue instance.
3416
+ *
3417
+ * @param options - Configuration options
3418
+ * @param options.db - PowerSync database instance
3419
+ * @param options.remoteStorage - Remote storage adapter for upload/download operations
3420
+ * @param options.localStorage - Local storage adapter for file persistence
3421
+ * @param options.watchAttachments - Callback for monitoring attachment changes in your data model
3422
+ * @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
3423
+ * @param options.logger - Logger instance. Defaults to db.logger
3424
+ * @param options.syncIntervalMs - Interval between automatic syncs in milliseconds. Default: 30000
3425
+ * @param options.syncThrottleDuration - Throttle duration for sync operations in milliseconds. Default: 1000
3426
+ * @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
3427
+ * @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
3428
+ */
3429
+ constructor({ db, localStorage, remoteStorage, watchAttachments, logger, tableName = ATTACHMENT_TABLE, syncIntervalMs = 30 * 1000, syncThrottleDuration = DEFAULT_WATCH_THROTTLE_MS, downloadAttachments = true, archivedCacheLimit = 100, errorHandler }) {
3430
+ this.db = db;
3431
+ this.remoteStorage = remoteStorage;
3432
+ this.localStorage = localStorage;
3433
+ this.watchAttachments = watchAttachments;
3434
+ this.tableName = tableName;
3435
+ this.syncIntervalMs = syncIntervalMs;
3436
+ this.syncThrottleDuration = syncThrottleDuration;
3437
+ this.archivedCacheLimit = archivedCacheLimit;
3438
+ this.downloadAttachments = downloadAttachments;
3439
+ this.logger = logger ?? db.logger;
3440
+ this.attachmentService = new AttachmentService(db, this.logger, tableName, archivedCacheLimit);
3441
+ this.syncingService = new SyncingService(this.attachmentService, localStorage, remoteStorage, this.logger, errorHandler);
3442
+ }
3443
+ /**
3444
+ * Generates a new attachment ID using a SQLite UUID function.
3445
+ *
3446
+ * @returns Promise resolving to the new attachment ID
3447
+ */
3448
+ async generateAttachmentId() {
3449
+ return this.db.get('SELECT uuid() as id').then((row) => row.id);
3450
+ }
3451
+ /**
3452
+ * Starts the attachment synchronization process.
3453
+ *
3454
+ * This method:
3455
+ * - Stops any existing sync operations
3456
+ * - Sets up periodic synchronization based on syncIntervalMs
3457
+ * - Registers listeners for active attachment changes
3458
+ * - Processes watched attachments to queue uploads/downloads
3459
+ * - Handles state transitions for archived and new attachments
3460
+ */
3461
+ async startSync() {
3462
+ await this.stopSync();
3463
+ this.watchActiveAttachments = this.attachmentService.watchActiveAttachments({
3464
+ throttleMs: this.syncThrottleDuration
3465
+ });
3466
+ // immediately invoke the sync storage to initialize local storage
3467
+ await this.localStorage.initialize();
3468
+ await this.verifyAttachments();
3469
+ // Sync storage periodically
3470
+ this.periodicSyncTimer = setInterval(async () => {
3471
+ await this.syncStorage();
3472
+ }, this.syncIntervalMs);
3473
+ // Sync storage when there is a change in active attachments
3474
+ this.watchActiveAttachments.registerListener({
3475
+ onDiff: async () => {
3476
+ await this.syncStorage();
3477
+ }
3478
+ });
3479
+ this.statusListenerDispose = this.db.registerListener({
3480
+ statusChanged: (status) => {
3481
+ if (status.connected) {
3482
+ // Device came online, process attachments immediately
3483
+ this.syncStorage().catch((error) => {
3484
+ this.logger.error('Error syncing storage on connection:', error);
3485
+ });
3486
+ }
3487
+ }
3488
+ });
3489
+ this.watchAttachmentsAbortController = new AbortController();
3490
+ const signal = this.watchAttachmentsAbortController.signal;
3491
+ // Process attachments when there is a change in watched attachments
3492
+ this.watchAttachments(async (watchedAttachments) => {
3493
+ // Skip processing if sync has been stopped
3494
+ if (signal.aborted) {
3495
+ return;
3496
+ }
3497
+ await this.attachmentService.withContext(async (ctx) => {
3498
+ // Need to get all the attachments which are tracked in the DB.
3499
+ // We might need to restore an archived attachment.
3500
+ const currentAttachments = await ctx.getAttachments();
3501
+ const attachmentUpdates = [];
3502
+ for (const watchedAttachment of watchedAttachments) {
3503
+ const existingQueueItem = currentAttachments.find((a) => a.id === watchedAttachment.id);
3504
+ if (!existingQueueItem) {
3505
+ // Item is watched but not in the queue yet. Need to add it.
3506
+ if (!this.downloadAttachments) {
3507
+ continue;
3508
+ }
3509
+ const filename = watchedAttachment.filename ?? `${watchedAttachment.id}.${watchedAttachment.fileExtension}`;
3510
+ attachmentUpdates.push({
3511
+ id: watchedAttachment.id,
3512
+ filename,
3513
+ state: AttachmentState.QUEUED_DOWNLOAD,
3514
+ hasSynced: false,
3515
+ metaData: watchedAttachment.metaData,
3516
+ timestamp: new Date().getTime()
3517
+ });
3518
+ continue;
3519
+ }
3520
+ if (existingQueueItem.state === AttachmentState.ARCHIVED) {
3521
+ // The attachment is present again. Need to queue it for sync.
3522
+ // We might be able to optimize this in future
3523
+ if (existingQueueItem.hasSynced === true) {
3524
+ // No remote action required, we can restore the record (avoids deletion)
3525
+ attachmentUpdates.push({
3526
+ ...existingQueueItem,
3527
+ state: AttachmentState.SYNCED
3528
+ });
3529
+ }
3530
+ else {
3531
+ // The localURI should be set if the record was meant to be uploaded
3532
+ // and hasSynced is false then
3533
+ // it must be an upload operation
3534
+ const newState = existingQueueItem.localUri == null ? AttachmentState.QUEUED_DOWNLOAD : AttachmentState.QUEUED_UPLOAD;
3535
+ attachmentUpdates.push({
3536
+ ...existingQueueItem,
3537
+ state: newState
3538
+ });
3539
+ }
3540
+ }
3541
+ }
3542
+ for (const attachment of currentAttachments) {
3543
+ const notInWatchedItems = watchedAttachments.find((i) => i.id === attachment.id) == null;
3544
+ if (notInWatchedItems) {
3545
+ switch (attachment.state) {
3546
+ case AttachmentState.QUEUED_DELETE:
3547
+ case AttachmentState.QUEUED_UPLOAD:
3548
+ // Only archive if it has synced
3549
+ if (attachment.hasSynced === true) {
3550
+ attachmentUpdates.push({
3551
+ ...attachment,
3552
+ state: AttachmentState.ARCHIVED
3553
+ });
3554
+ }
3555
+ break;
3556
+ default:
3557
+ // Archive other states such as QUEUED_DOWNLOAD
3558
+ attachmentUpdates.push({
3559
+ ...attachment,
3560
+ state: AttachmentState.ARCHIVED
3561
+ });
3562
+ }
3563
+ }
3564
+ }
3565
+ if (attachmentUpdates.length > 0) {
3566
+ await ctx.saveAttachments(attachmentUpdates);
3567
+ }
3568
+ });
3569
+ }, signal);
3570
+ }
3571
+ /**
3572
+ * Synchronizes all active attachments between local and remote storage.
3573
+ *
3574
+ * This is called automatically at regular intervals when sync is started,
3575
+ * but can also be called manually to trigger an immediate sync.
3576
+ */
3577
+ async syncStorage() {
3578
+ await this.attachmentService.withContext(async (ctx) => {
3579
+ const activeAttachments = await ctx.getActiveAttachments();
3580
+ await this.localStorage.initialize();
3581
+ await this.syncingService.processAttachments(activeAttachments, ctx);
3582
+ await this.syncingService.deleteArchivedAttachments(ctx);
3583
+ });
3584
+ }
3585
+ /**
3586
+ * Stops the attachment synchronization process.
3587
+ *
3588
+ * Clears the periodic sync timer and closes all active attachment watchers.
3589
+ */
3590
+ async stopSync() {
3591
+ clearInterval(this.periodicSyncTimer);
3592
+ this.periodicSyncTimer = undefined;
3593
+ if (this.watchActiveAttachments)
3594
+ await this.watchActiveAttachments.close();
3595
+ if (this.watchAttachmentsAbortController) {
3596
+ this.watchAttachmentsAbortController.abort();
3597
+ }
3598
+ if (this.statusListenerDispose) {
3599
+ this.statusListenerDispose();
3600
+ this.statusListenerDispose = undefined;
3601
+ }
3602
+ }
3603
+ /**
3604
+ * Saves a file to local storage and queues it for upload to remote storage.
3605
+ *
3606
+ * @param options - File save options
3607
+ * @param options.data - The file data as ArrayBuffer, Blob, or base64 string
3608
+ * @param options.fileExtension - File extension (e.g., 'jpg', 'pdf')
3609
+ * @param options.mediaType - MIME type of the file (e.g., 'image/jpeg')
3610
+ * @param options.metaData - Optional metadata to associate with the attachment
3611
+ * @param options.id - Optional custom ID. If not provided, a UUID will be generated
3612
+ * @param options.updateHook - Optional callback to execute additional database operations
3613
+ * within the same transaction as the attachment creation
3614
+ * @returns Promise resolving to the created attachment record
3615
+ */
3616
+ async saveFile({ data, fileExtension, mediaType, metaData, id, updateHook }) {
3617
+ const resolvedId = id ?? (await this.generateAttachmentId());
3618
+ const filename = `${resolvedId}.${fileExtension}`;
3619
+ const localUri = this.localStorage.getLocalUri(filename);
3620
+ const size = await this.localStorage.saveFile(localUri, data);
3621
+ const attachment = {
3622
+ id: resolvedId,
3623
+ filename,
3624
+ mediaType,
3625
+ localUri,
3626
+ state: AttachmentState.QUEUED_UPLOAD,
3627
+ hasSynced: false,
3628
+ size,
3629
+ timestamp: new Date().getTime(),
3630
+ metaData
3631
+ };
3632
+ await this.attachmentService.withContext(async (ctx) => {
3633
+ await ctx.db.writeTransaction(async (tx) => {
3634
+ await updateHook?.(tx, attachment);
3635
+ await ctx.upsertAttachment(attachment, tx);
3636
+ });
3637
+ });
3638
+ return attachment;
3639
+ }
3640
+ async deleteFile({ id, updateHook }) {
3641
+ await this.attachmentService.withContext(async (ctx) => {
3642
+ const attachment = await ctx.getAttachment(id);
3643
+ if (!attachment) {
3644
+ throw new Error(`Attachment with id ${id} not found`);
3645
+ }
3646
+ await ctx.db.writeTransaction(async (tx) => {
3647
+ await updateHook?.(tx, attachment);
3648
+ await ctx.upsertAttachment({
3649
+ ...attachment,
3650
+ state: AttachmentState.QUEUED_DELETE,
3651
+ hasSynced: false
3652
+ }, tx);
3653
+ });
3654
+ });
3655
+ }
3656
+ async expireCache() {
3657
+ let isDone = false;
3658
+ while (!isDone) {
3659
+ await this.attachmentService.withContext(async (ctx) => {
3660
+ isDone = await this.syncingService.deleteArchivedAttachments(ctx);
3661
+ });
3662
+ }
3663
+ }
3664
+ async clearQueue() {
3665
+ await this.attachmentService.withContext(async (ctx) => {
3666
+ await ctx.clearQueue();
3667
+ });
3668
+ await this.localStorage.clear();
3669
+ }
3670
+ /**
3671
+ * Verifies the integrity of all attachment records and repairs inconsistencies.
3672
+ *
3673
+ * This method checks each attachment record against the local filesystem and:
3674
+ * - Updates localUri if the file exists at a different path
3675
+ * - Archives attachments with missing local files that haven't been uploaded
3676
+ * - Requeues synced attachments for download if their local files are missing
3677
+ */
3678
+ async verifyAttachments() {
3679
+ await this.attachmentService.withContext(async (ctx) => {
3680
+ const attachments = await ctx.getAttachments();
3681
+ const updates = [];
3682
+ for (const attachment of attachments) {
3683
+ if (attachment.localUri == null) {
3684
+ continue;
3685
+ }
3686
+ const exists = await this.localStorage.fileExists(attachment.localUri);
3687
+ if (exists) {
3688
+ // The file exists, this is correct
3689
+ continue;
3690
+ }
3691
+ const newLocalUri = this.localStorage.getLocalUri(attachment.filename);
3692
+ const newExists = await this.localStorage.fileExists(newLocalUri);
3693
+ if (newExists) {
3694
+ // The file exists locally but the localUri is broken, we update it.
3695
+ updates.push({
3696
+ ...attachment,
3697
+ localUri: newLocalUri
3698
+ });
3699
+ }
3700
+ else {
3701
+ // the file doesn't exist locally.
3702
+ if (attachment.state === AttachmentState.SYNCED) {
3703
+ // the file has been successfully synced to remote storage but is missing
3704
+ // we download it again
3705
+ updates.push({
3706
+ ...attachment,
3707
+ state: AttachmentState.QUEUED_DOWNLOAD,
3708
+ localUri: undefined
3709
+ });
3710
+ }
3711
+ else {
3712
+ // the file wasn't successfully synced to remote storage, we archive it
3713
+ updates.push({
3714
+ ...attachment,
3715
+ state: AttachmentState.ARCHIVED,
3716
+ localUri: undefined // Clears the value
3717
+ });
3718
+ }
3719
+ }
3720
+ }
3721
+ await ctx.saveAttachments(updates);
3722
+ });
3723
+ }
3724
+ }
3725
+
3726
+ var EncodingType;
3727
+ (function (EncodingType) {
3728
+ EncodingType["UTF8"] = "utf8";
3729
+ EncodingType["Base64"] = "base64";
3730
+ })(EncodingType || (EncodingType = {}));
3731
+
2516
3732
  function getDefaultExportFromCjs (x) {
2517
3733
  return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
2518
3734
  }
@@ -3829,20 +5045,6 @@ class MetaBaseObserver extends BaseObserver {
3829
5045
  }
3830
5046
  }
3831
5047
 
3832
- var WatchedQueryListenerEvent;
3833
- (function (WatchedQueryListenerEvent) {
3834
- WatchedQueryListenerEvent["ON_DATA"] = "onData";
3835
- WatchedQueryListenerEvent["ON_ERROR"] = "onError";
3836
- WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
3837
- WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
3838
- WatchedQueryListenerEvent["CLOSED"] = "closed";
3839
- })(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
3840
- const DEFAULT_WATCH_THROTTLE_MS = 30;
3841
- const DEFAULT_WATCH_QUERY_OPTIONS = {
3842
- throttleMs: DEFAULT_WATCH_THROTTLE_MS,
3843
- reportFetching: true
3844
- };
3845
-
3846
5048
  /**
3847
5049
  * Performs underlying watching and yields a stream of results.
3848
5050
  * @internal
@@ -11755,7 +12957,7 @@ function requireDist () {
11755
12957
 
11756
12958
  var distExports = requireDist();
11757
12959
 
11758
- var version = "1.45.0";
12960
+ var version = "1.46.0";
11759
12961
  var PACKAGE = {
11760
12962
  version: version};
11761
12963
 
@@ -12657,18 +13859,17 @@ var SyncClientImplementation;
12657
13859
  *
12658
13860
  * This is the default option.
12659
13861
  *
12660
- * @deprecated Don't use {@link SyncClientImplementation.JAVASCRIPT} directly. Instead, use
12661
- * {@link DEFAULT_SYNC_CLIENT_IMPLEMENTATION} or omit the option. The explicit choice to use
12662
- * the JavaScript-based sync implementation will be removed from a future version of the SDK.
13862
+ * @deprecated We recommend the {@link RUST} client implementation for all apps. If you have issues with
13863
+ * the Rust client, please file an issue or reach out to us. The JavaScript client will be removed in a future
13864
+ * version of the PowerSync SDK.
12663
13865
  */
12664
13866
  SyncClientImplementation["JAVASCRIPT"] = "js";
12665
13867
  /**
12666
13868
  * This implementation offloads the sync line decoding and handling into the PowerSync
12667
13869
  * core extension.
12668
13870
  *
12669
- * @experimental
12670
- * While this implementation is more performant than {@link SyncClientImplementation.JAVASCRIPT},
12671
- * it has seen less real-world testing and is marked as __experimental__ at the moment.
13871
+ * This option is more performant than the {@link JAVASCRIPT} client, enabled by default and the
13872
+ * recommended client implementation for all apps.
12672
13873
  *
12673
13874
  * ## Compatibility warning
12674
13875
  *
@@ -12686,13 +13887,9 @@ var SyncClientImplementation;
12686
13887
  SyncClientImplementation["RUST"] = "rust";
12687
13888
  })(SyncClientImplementation || (SyncClientImplementation = {}));
12688
13889
  /**
12689
- * The default {@link SyncClientImplementation} to use.
12690
- *
12691
- * Please use this field instead of {@link SyncClientImplementation.JAVASCRIPT} directly. A future version
12692
- * of the PowerSync SDK will enable {@link SyncClientImplementation.RUST} by default and remove the JavaScript
12693
- * option.
13890
+ * The default {@link SyncClientImplementation} to use, {@link SyncClientImplementation.RUST}.
12694
13891
  */
12695
- const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.JAVASCRIPT;
13892
+ const DEFAULT_SYNC_CLIENT_IMPLEMENTATION = SyncClientImplementation.RUST;
12696
13893
  const DEFAULT_CRUD_UPLOAD_THROTTLE_MS = 1000;
12697
13894
  const DEFAULT_RETRY_DELAY_MS = 5000;
12698
13895
  const DEFAULT_STREAMING_SYNC_OPTIONS = {
@@ -13102,6 +14299,9 @@ The next upload iteration will be delayed.`);
13102
14299
  if (rawTables != null && rawTables.length) {
13103
14300
  this.logger.warn('Raw tables require the Rust-based sync client. The JS client will ignore them.');
13104
14301
  }
14302
+ if (this.activeStreams.length) {
14303
+ this.logger.error('Sync streams require `clientImplementation: SyncClientImplementation.RUST` when connecting.');
14304
+ }
13105
14305
  this.logger.debug('Streaming sync iteration started');
13106
14306
  this.options.adapter.startSession();
13107
14307
  let [req, bucketMap] = await this.collectLocalBucketState();
@@ -13617,6 +14817,27 @@ The next upload iteration will be delayed.`);
13617
14817
  }
13618
14818
  }
13619
14819
 
14820
+ const CLAIM_STORE = new Map();
14821
+ /**
14822
+ * @internal
14823
+ * @experimental
14824
+ */
14825
+ const MEMORY_TRIGGER_CLAIM_MANAGER = {
14826
+ async obtainClaim(identifier) {
14827
+ if (CLAIM_STORE.has(identifier)) {
14828
+ throw new Error(`A claim is already present for ${identifier}`);
14829
+ }
14830
+ const release = async () => {
14831
+ CLAIM_STORE.delete(identifier);
14832
+ };
14833
+ CLAIM_STORE.set(identifier, release);
14834
+ return release;
14835
+ },
14836
+ async checkClaim(identifier) {
14837
+ return CLAIM_STORE.has(identifier);
14838
+ }
14839
+ };
14840
+
13620
14841
  /**
13621
14842
  * SQLite operations to track changes for with {@link TriggerManager}
13622
14843
  * @experimental
@@ -13628,9 +14849,20 @@ var DiffTriggerOperation;
13628
14849
  DiffTriggerOperation["DELETE"] = "DELETE";
13629
14850
  })(DiffTriggerOperation || (DiffTriggerOperation = {}));
13630
14851
 
14852
+ const DEFAULT_TRIGGER_MANAGER_CONFIGURATION = {
14853
+ useStorageByDefault: false
14854
+ };
14855
+ const TRIGGER_CLEANUP_INTERVAL_MS = 120_000; // 2 minutes
14856
+ /**
14857
+ * @internal
14858
+ * @experimental
14859
+ */
13631
14860
  class TriggerManagerImpl {
13632
14861
  options;
13633
14862
  schema;
14863
+ defaultConfig;
14864
+ cleanupTimeout;
14865
+ isDisposed;
13634
14866
  constructor(options) {
13635
14867
  this.options = options;
13636
14868
  this.schema = options.schema;
@@ -13639,6 +14871,33 @@ class TriggerManagerImpl {
13639
14871
  this.schema = schema;
13640
14872
  }
13641
14873
  });
14874
+ this.isDisposed = false;
14875
+ /**
14876
+ * Configure a cleanup to run on an interval.
14877
+ * The interval is configured using setTimeout to take the async
14878
+ * execution time of the callback into account.
14879
+ */
14880
+ this.defaultConfig = DEFAULT_TRIGGER_MANAGER_CONFIGURATION;
14881
+ const cleanupCallback = async () => {
14882
+ this.cleanupTimeout = null;
14883
+ if (this.isDisposed) {
14884
+ return;
14885
+ }
14886
+ try {
14887
+ await this.cleanupResources();
14888
+ }
14889
+ catch (ex) {
14890
+ this.db.logger.error(`Caught error while attempting to cleanup triggers`, ex);
14891
+ }
14892
+ finally {
14893
+ // if not closed, set another timeout
14894
+ if (this.isDisposed) {
14895
+ return;
14896
+ }
14897
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
14898
+ }
14899
+ };
14900
+ this.cleanupTimeout = setTimeout(cleanupCallback, TRIGGER_CLEANUP_INTERVAL_MS);
13642
14901
  }
13643
14902
  get db() {
13644
14903
  return this.options.db;
@@ -13656,13 +14915,95 @@ class TriggerManagerImpl {
13656
14915
  await tx.execute(/* sql */ `DROP TRIGGER IF EXISTS ${triggerId}; `);
13657
14916
  }
13658
14917
  }
14918
+ dispose() {
14919
+ this.isDisposed = true;
14920
+ if (this.cleanupTimeout) {
14921
+ clearTimeout(this.cleanupTimeout);
14922
+ }
14923
+ }
14924
+ /**
14925
+ * Updates default config settings for platform specific use-cases.
14926
+ */
14927
+ updateDefaults(config) {
14928
+ this.defaultConfig = {
14929
+ ...this.defaultConfig,
14930
+ ...config
14931
+ };
14932
+ }
14933
+ generateTriggerName(operation, destinationTable, triggerId) {
14934
+ return `__ps_temp_trigger_${operation.toLowerCase()}__${destinationTable}__${triggerId}`;
14935
+ }
14936
+ /**
14937
+ * Cleanup any SQLite triggers or tables that are no longer in use.
14938
+ */
14939
+ async cleanupResources() {
14940
+ // we use the database here since cleanupResources is called during the PowerSyncDatabase initialization
14941
+ await this.db.database.writeLock(async (ctx) => {
14942
+ /**
14943
+ * Note: We only cleanup persisted triggers. These are tracked in the sqlite_master table.
14944
+ * temporary triggers will not be affected by this.
14945
+ * Query all triggers that match our naming pattern
14946
+ */
14947
+ const triggers = await ctx.getAll(/* sql */ `
14948
+ SELECT
14949
+ name
14950
+ FROM
14951
+ sqlite_master
14952
+ WHERE
14953
+ type = 'trigger'
14954
+ AND name LIKE '__ps_temp_trigger_%'
14955
+ `);
14956
+ /** Use regex to extract table names and IDs from trigger names
14957
+ * Trigger naming convention: __ps_temp_trigger_<operation>__<destination_table>__<id>
14958
+ */
14959
+ const triggerPattern = /^__ps_temp_trigger_(?:insert|update|delete)__(.+)__([a-f0-9_]{36})$/i;
14960
+ const trackedItems = new Map();
14961
+ for (const trigger of triggers) {
14962
+ const match = trigger.name.match(triggerPattern);
14963
+ if (match) {
14964
+ const [, table, id] = match;
14965
+ // Collect all trigger names for each id combo
14966
+ const existing = trackedItems.get(id);
14967
+ if (existing) {
14968
+ existing.triggerNames.push(trigger.name);
14969
+ }
14970
+ else {
14971
+ trackedItems.set(id, { table, id, triggerNames: [trigger.name] });
14972
+ }
14973
+ }
14974
+ }
14975
+ for (const trackedItem of trackedItems.values()) {
14976
+ // check if there is anything holding on to this item
14977
+ const hasClaim = await this.options.claimManager.checkClaim(trackedItem.id);
14978
+ if (hasClaim) {
14979
+ // This does not require cleanup
14980
+ continue;
14981
+ }
14982
+ this.db.logger.debug(`Clearing resources for trigger ${trackedItem.id} with table ${trackedItem.table}`);
14983
+ // We need to delete the triggers and table
14984
+ for (const triggerName of trackedItem.triggerNames) {
14985
+ await ctx.execute(`DROP TRIGGER IF EXISTS ${triggerName}`);
14986
+ }
14987
+ await ctx.execute(`DROP TABLE IF EXISTS ${trackedItem.table}`);
14988
+ }
14989
+ });
14990
+ }
13659
14991
  async createDiffTrigger(options) {
13660
14992
  await this.db.waitForReady();
13661
- const { source, destination, columns, when, hooks } = options;
14993
+ const { source, destination, columns, when, hooks,
14994
+ // Fall back to the provided default if not given on this level
14995
+ useStorage = this.defaultConfig.useStorageByDefault } = options;
13662
14996
  const operations = Object.keys(when);
13663
14997
  if (operations.length == 0) {
13664
14998
  throw new Error('At least one WHEN operation must be specified for the trigger.');
13665
14999
  }
15000
+ /**
15001
+ * The clause to use when executing
15002
+ * CREATE ${tableTriggerTypeClause} TABLE
15003
+ * OR
15004
+ * CREATE ${tableTriggerTypeClause} TRIGGER
15005
+ */
15006
+ const tableTriggerTypeClause = !useStorage ? 'TEMP' : '';
13666
15007
  const whenClauses = Object.fromEntries(Object.entries(when).map(([operation, filter]) => [operation, `WHEN ${filter}`]));
13667
15008
  /**
13668
15009
  * Allow specifying the View name as the source.
@@ -13676,6 +15017,7 @@ class TriggerManagerImpl {
13676
15017
  const internalSource = sourceDefinition.internalName;
13677
15018
  const triggerIds = [];
13678
15019
  const id = await this.getUUID();
15020
+ const releaseStorageClaim = useStorage ? await this.options.claimManager.obtainClaim(id) : null;
13679
15021
  /**
13680
15022
  * We default to replicating all columns if no columns array is provided.
13681
15023
  */
@@ -13708,26 +15050,27 @@ class TriggerManagerImpl {
13708
15050
  return this.db.writeLock(async (tx) => {
13709
15051
  await this.removeTriggers(tx, triggerIds);
13710
15052
  await tx.execute(/* sql */ `DROP TABLE IF EXISTS ${destination};`);
15053
+ await releaseStorageClaim?.();
13711
15054
  });
13712
15055
  };
13713
15056
  const setup = async (tx) => {
13714
15057
  // Allow user code to execute in this lock context before the trigger is created.
13715
15058
  await hooks?.beforeCreate?.(tx);
13716
15059
  await tx.execute(/* sql */ `
13717
- CREATE TEMP TABLE ${destination} (
15060
+ CREATE ${tableTriggerTypeClause} TABLE ${destination} (
13718
15061
  operation_id INTEGER PRIMARY KEY AUTOINCREMENT,
13719
15062
  id TEXT,
13720
15063
  operation TEXT,
13721
15064
  timestamp TEXT,
13722
15065
  value TEXT,
13723
15066
  previous_value TEXT
13724
- );
15067
+ )
13725
15068
  `);
13726
15069
  if (operations.includes(DiffTriggerOperation.INSERT)) {
13727
- const insertTriggerId = `ps_temp_trigger_insert_${id}`;
15070
+ const insertTriggerId = this.generateTriggerName(DiffTriggerOperation.INSERT, destination, id);
13728
15071
  triggerIds.push(insertTriggerId);
13729
15072
  await tx.execute(/* sql */ `
13730
- CREATE TEMP TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
15073
+ CREATE ${tableTriggerTypeClause} TRIGGER ${insertTriggerId} AFTER INSERT ON ${internalSource} ${whenClauses[DiffTriggerOperation.INSERT]} BEGIN
13731
15074
  INSERT INTO
13732
15075
  ${destination} (id, operation, timestamp, value)
13733
15076
  VALUES
@@ -13738,14 +15081,14 @@ class TriggerManagerImpl {
13738
15081
  ${jsonFragment('NEW')}
13739
15082
  );
13740
15083
 
13741
- END;
15084
+ END
13742
15085
  `);
13743
15086
  }
13744
15087
  if (operations.includes(DiffTriggerOperation.UPDATE)) {
13745
- const updateTriggerId = `ps_temp_trigger_update_${id}`;
15088
+ const updateTriggerId = this.generateTriggerName(DiffTriggerOperation.UPDATE, destination, id);
13746
15089
  triggerIds.push(updateTriggerId);
13747
15090
  await tx.execute(/* sql */ `
13748
- CREATE TEMP TRIGGER ${updateTriggerId} AFTER
15091
+ CREATE ${tableTriggerTypeClause} TRIGGER ${updateTriggerId} AFTER
13749
15092
  UPDATE ON ${internalSource} ${whenClauses[DiffTriggerOperation.UPDATE]} BEGIN
13750
15093
  INSERT INTO
13751
15094
  ${destination} (id, operation, timestamp, value, previous_value)
@@ -13762,11 +15105,11 @@ class TriggerManagerImpl {
13762
15105
  `);
13763
15106
  }
13764
15107
  if (operations.includes(DiffTriggerOperation.DELETE)) {
13765
- const deleteTriggerId = `ps_temp_trigger_delete_${id}`;
15108
+ const deleteTriggerId = this.generateTriggerName(DiffTriggerOperation.DELETE, destination, id);
13766
15109
  triggerIds.push(deleteTriggerId);
13767
15110
  // Create delete trigger for basic JSON
13768
15111
  await tx.execute(/* sql */ `
13769
- CREATE TEMP TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
15112
+ CREATE ${tableTriggerTypeClause} TRIGGER ${deleteTriggerId} AFTER DELETE ON ${internalSource} ${whenClauses[DiffTriggerOperation.DELETE]} BEGIN
13770
15113
  INSERT INTO
13771
15114
  ${destination} (id, operation, timestamp, value)
13772
15115
  VALUES
@@ -13810,7 +15153,7 @@ class TriggerManagerImpl {
13810
15153
  // If no array is provided, we use all columns from the source table.
13811
15154
  const contextColumns = columns ?? sourceDefinition.columns.map((col) => col.name);
13812
15155
  const id = await this.getUUID();
13813
- const destination = `ps_temp_track_${source}_${id}`;
15156
+ const destination = `__ps_temp_track_${source}_${id}`;
13814
15157
  // register an onChange before the trigger is created
13815
15158
  const abortController = new AbortController();
13816
15159
  const abortOnChange = () => abortController.abort();
@@ -13965,6 +15308,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
13965
15308
  * Allows creating SQLite triggers which can be used to track various operations on SQLite tables.
13966
15309
  */
13967
15310
  triggers;
15311
+ triggersImpl;
13968
15312
  logger;
13969
15313
  constructor(options) {
13970
15314
  super();
@@ -14028,9 +15372,10 @@ class AbstractPowerSyncDatabase extends BaseObserver {
14028
15372
  logger: this.logger
14029
15373
  });
14030
15374
  this._isReadyPromise = this.initialize();
14031
- this.triggers = new TriggerManagerImpl({
15375
+ this.triggers = this.triggersImpl = new TriggerManagerImpl({
14032
15376
  db: this,
14033
- schema: this.schema
15377
+ schema: this.schema,
15378
+ ...this.generateTriggerManagerConfig()
14034
15379
  });
14035
15380
  }
14036
15381
  /**
@@ -14056,6 +15401,15 @@ class AbstractPowerSyncDatabase extends BaseObserver {
14056
15401
  get connecting() {
14057
15402
  return this.currentStatus?.connecting || false;
14058
15403
  }
15404
+ /**
15405
+ * Generates a base configuration for {@link TriggerManagerImpl}.
15406
+ * Implementations should override this if necessary.
15407
+ */
15408
+ generateTriggerManagerConfig() {
15409
+ return {
15410
+ claimManager: MEMORY_TRIGGER_CLAIM_MANAGER
15411
+ };
15412
+ }
14059
15413
  /**
14060
15414
  * @returns A promise which will resolve once initialization is completed.
14061
15415
  */
@@ -14120,6 +15474,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
14120
15474
  await this.updateSchema(this.options.schema);
14121
15475
  await this.resolveOfflineSyncStatus();
14122
15476
  await this.database.execute('PRAGMA RECURSIVE_TRIGGERS=TRUE');
15477
+ await this.triggersImpl.cleanupResources();
14123
15478
  this.ready = true;
14124
15479
  this.iterateListeners((cb) => cb.initialized?.());
14125
15480
  }
@@ -14239,7 +15594,6 @@ class AbstractPowerSyncDatabase extends BaseObserver {
14239
15594
  await this.disconnect();
14240
15595
  await this.waitForReady();
14241
15596
  const { clearLocal } = options;
14242
- // TODO DB name, verify this is necessary with extension
14243
15597
  await this.database.writeTransaction(async (tx) => {
14244
15598
  await tx.execute('SELECT powersync_clear(?)', [clearLocal ? 1 : 0]);
14245
15599
  });
@@ -14271,6 +15625,7 @@ class AbstractPowerSyncDatabase extends BaseObserver {
14271
15625
  if (this.closed) {
14272
15626
  return;
14273
15627
  }
15628
+ this.triggersImpl.dispose();
14274
15629
  await this.iterateAsyncListeners(async (cb) => cb.closing?.());
14275
15630
  const { disconnect } = options;
14276
15631
  if (disconnect) {
@@ -15236,151 +16591,50 @@ class SqliteBucketStorage extends BaseObserver {
15236
16591
  ]);
15237
16592
  return r != 0;
15238
16593
  }
15239
- async migrateToFixedSubkeys() {
15240
- await this.writeTransaction(async (tx) => {
15241
- await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
15242
- await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
15243
- SqliteBucketStorage._subkeyMigrationKey,
15244
- '1'
15245
- ]);
15246
- });
15247
- }
15248
- static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
15249
- }
15250
- function hasMatchingPriority(priority, bucket) {
15251
- return bucket.priority != null && bucket.priority <= priority;
15252
- }
15253
-
15254
- // TODO JSON
15255
- class SyncDataBatch {
15256
- buckets;
15257
- static fromJSON(json) {
15258
- return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
15259
- }
15260
- constructor(buckets) {
15261
- this.buckets = buckets;
15262
- }
15263
- }
15264
-
15265
- /**
15266
- * Thrown when an underlying database connection is closed.
15267
- * This is particularly relevant when worker connections are marked as closed while
15268
- * operations are still in progress.
15269
- */
15270
- class ConnectionClosedError extends Error {
15271
- static NAME = 'ConnectionClosedError';
15272
- static MATCHES(input) {
15273
- /**
15274
- * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
15275
- * check might fail. This also performs a failsafe check.
15276
- * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
15277
- */
15278
- return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
15279
- }
15280
- constructor(message) {
15281
- super(message);
15282
- this.name = ConnectionClosedError.NAME;
15283
- }
15284
- }
15285
-
15286
- // https://www.sqlite.org/lang_expr.html#castexpr
15287
- var ColumnType;
15288
- (function (ColumnType) {
15289
- ColumnType["TEXT"] = "TEXT";
15290
- ColumnType["INTEGER"] = "INTEGER";
15291
- ColumnType["REAL"] = "REAL";
15292
- })(ColumnType || (ColumnType = {}));
15293
- const text = {
15294
- type: ColumnType.TEXT
15295
- };
15296
- const integer = {
15297
- type: ColumnType.INTEGER
15298
- };
15299
- const real = {
15300
- type: ColumnType.REAL
15301
- };
15302
- // powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
15303
- // In earlier versions this was limited to 63.
15304
- const MAX_AMOUNT_OF_COLUMNS = 1999;
15305
- const column = {
15306
- text,
15307
- integer,
15308
- real
15309
- };
15310
- class Column {
15311
- options;
15312
- constructor(options) {
15313
- this.options = options;
15314
- }
15315
- get name() {
15316
- return this.options.name;
15317
- }
15318
- get type() {
15319
- return this.options.type;
15320
- }
15321
- toJSON() {
15322
- return {
15323
- name: this.name,
15324
- type: this.type
15325
- };
15326
- }
15327
- }
15328
-
15329
- const DEFAULT_INDEX_COLUMN_OPTIONS = {
15330
- ascending: true
15331
- };
15332
- class IndexedColumn {
15333
- options;
15334
- static createAscending(column) {
15335
- return new IndexedColumn({
15336
- name: column,
15337
- ascending: true
15338
- });
15339
- }
15340
- constructor(options) {
15341
- this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
15342
- }
15343
- get name() {
15344
- return this.options.name;
15345
- }
15346
- get ascending() {
15347
- return this.options.ascending;
15348
- }
15349
- toJSON(table) {
15350
- return {
15351
- name: this.name,
15352
- ascending: this.ascending,
15353
- type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
15354
- };
15355
- }
15356
- }
15357
-
15358
- const DEFAULT_INDEX_OPTIONS = {
15359
- columns: []
15360
- };
15361
- class Index {
15362
- options;
15363
- static createAscending(options, columnNames) {
15364
- return new Index({
15365
- ...options,
15366
- columns: columnNames.map((name) => IndexedColumn.createAscending(name))
16594
+ async migrateToFixedSubkeys() {
16595
+ await this.writeTransaction(async (tx) => {
16596
+ await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
16597
+ await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
16598
+ SqliteBucketStorage._subkeyMigrationKey,
16599
+ '1'
16600
+ ]);
15367
16601
  });
15368
16602
  }
15369
- constructor(options) {
15370
- this.options = options;
15371
- this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
16603
+ static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
16604
+ }
16605
+ function hasMatchingPriority(priority, bucket) {
16606
+ return bucket.priority != null && bucket.priority <= priority;
16607
+ }
16608
+
16609
+ // TODO JSON
16610
+ class SyncDataBatch {
16611
+ buckets;
16612
+ static fromJSON(json) {
16613
+ return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
15372
16614
  }
15373
- get name() {
15374
- return this.options.name;
16615
+ constructor(buckets) {
16616
+ this.buckets = buckets;
15375
16617
  }
15376
- get columns() {
15377
- return this.options.columns ?? [];
16618
+ }
16619
+
16620
+ /**
16621
+ * Thrown when an underlying database connection is closed.
16622
+ * This is particularly relevant when worker connections are marked as closed while
16623
+ * operations are still in progress.
16624
+ */
16625
+ class ConnectionClosedError extends Error {
16626
+ static NAME = 'ConnectionClosedError';
16627
+ static MATCHES(input) {
16628
+ /**
16629
+ * If there are weird package issues which cause multiple versions of classes to be present, the instanceof
16630
+ * check might fail. This also performs a failsafe check.
16631
+ * This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
16632
+ */
16633
+ return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
15378
16634
  }
15379
- toJSON(table) {
15380
- return {
15381
- name: this.name,
15382
- columns: this.columns.map((c) => c.toJSON(table))
15383
- };
16635
+ constructor(message) {
16636
+ super(message);
16637
+ this.name = ConnectionClosedError.NAME;
15384
16638
  }
15385
16639
  }
15386
16640
 
@@ -15477,211 +16731,6 @@ class Schema {
15477
16731
  }
15478
16732
  }
15479
16733
 
15480
- const DEFAULT_TABLE_OPTIONS = {
15481
- indexes: [],
15482
- insertOnly: false,
15483
- localOnly: false,
15484
- trackPrevious: false,
15485
- trackMetadata: false,
15486
- ignoreEmptyUpdates: false
15487
- };
15488
- const InvalidSQLCharacters = /["'%,.#\s[\]]/;
15489
- class Table {
15490
- options;
15491
- _mappedColumns;
15492
- static createLocalOnly(options) {
15493
- return new Table({ ...options, localOnly: true, insertOnly: false });
15494
- }
15495
- static createInsertOnly(options) {
15496
- return new Table({ ...options, localOnly: false, insertOnly: true });
15497
- }
15498
- /**
15499
- * Create a table.
15500
- * @deprecated This was only only included for TableV2 and is no longer necessary.
15501
- * Prefer to use new Table() directly.
15502
- *
15503
- * TODO remove in the next major release.
15504
- */
15505
- static createTable(name, table) {
15506
- return new Table({
15507
- name,
15508
- columns: table.columns,
15509
- indexes: table.indexes,
15510
- localOnly: table.options.localOnly,
15511
- insertOnly: table.options.insertOnly,
15512
- viewName: table.options.viewName
15513
- });
15514
- }
15515
- constructor(optionsOrColumns, v2Options) {
15516
- if (this.isTableV1(optionsOrColumns)) {
15517
- this.initTableV1(optionsOrColumns);
15518
- }
15519
- else {
15520
- this.initTableV2(optionsOrColumns, v2Options);
15521
- }
15522
- }
15523
- copyWithName(name) {
15524
- return new Table({
15525
- ...this.options,
15526
- name
15527
- });
15528
- }
15529
- isTableV1(arg) {
15530
- return 'columns' in arg && Array.isArray(arg.columns);
15531
- }
15532
- initTableV1(options) {
15533
- this.options = {
15534
- ...options,
15535
- indexes: options.indexes || []
15536
- };
15537
- this.applyDefaultOptions();
15538
- }
15539
- initTableV2(columns, options) {
15540
- const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
15541
- const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
15542
- name,
15543
- columns: columnNames.map((name) => new IndexedColumn({
15544
- name: name.replace(/^-/, ''),
15545
- ascending: !name.startsWith('-')
15546
- }))
15547
- }));
15548
- this.options = {
15549
- name: '',
15550
- columns: convertedColumns,
15551
- indexes: convertedIndexes,
15552
- viewName: options?.viewName,
15553
- insertOnly: options?.insertOnly,
15554
- localOnly: options?.localOnly,
15555
- trackPrevious: options?.trackPrevious,
15556
- trackMetadata: options?.trackMetadata,
15557
- ignoreEmptyUpdates: options?.ignoreEmptyUpdates
15558
- };
15559
- this.applyDefaultOptions();
15560
- this._mappedColumns = columns;
15561
- }
15562
- applyDefaultOptions() {
15563
- this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
15564
- this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
15565
- this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
15566
- this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
15567
- this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
15568
- }
15569
- get name() {
15570
- return this.options.name;
15571
- }
15572
- get viewNameOverride() {
15573
- return this.options.viewName;
15574
- }
15575
- get viewName() {
15576
- return this.viewNameOverride ?? this.name;
15577
- }
15578
- get columns() {
15579
- return this.options.columns;
15580
- }
15581
- get columnMap() {
15582
- return (this._mappedColumns ??
15583
- this.columns.reduce((hash, column) => {
15584
- hash[column.name] = { type: column.type ?? ColumnType.TEXT };
15585
- return hash;
15586
- }, {}));
15587
- }
15588
- get indexes() {
15589
- return this.options.indexes ?? [];
15590
- }
15591
- get localOnly() {
15592
- return this.options.localOnly;
15593
- }
15594
- get insertOnly() {
15595
- return this.options.insertOnly;
15596
- }
15597
- get trackPrevious() {
15598
- return this.options.trackPrevious;
15599
- }
15600
- get trackMetadata() {
15601
- return this.options.trackMetadata;
15602
- }
15603
- get ignoreEmptyUpdates() {
15604
- return this.options.ignoreEmptyUpdates;
15605
- }
15606
- get internalName() {
15607
- if (this.options.localOnly) {
15608
- return `ps_data_local__${this.name}`;
15609
- }
15610
- return `ps_data__${this.name}`;
15611
- }
15612
- get validName() {
15613
- if (InvalidSQLCharacters.test(this.name)) {
15614
- return false;
15615
- }
15616
- if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
15617
- return false;
15618
- }
15619
- return true;
15620
- }
15621
- validate() {
15622
- if (InvalidSQLCharacters.test(this.name)) {
15623
- throw new Error(`Invalid characters in table name: ${this.name}`);
15624
- }
15625
- if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
15626
- throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
15627
- }
15628
- if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
15629
- throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
15630
- }
15631
- if (this.trackMetadata && this.localOnly) {
15632
- throw new Error(`Can't include metadata for local-only tables.`);
15633
- }
15634
- if (this.trackPrevious != false && this.localOnly) {
15635
- throw new Error(`Can't include old values for local-only tables.`);
15636
- }
15637
- const columnNames = new Set();
15638
- columnNames.add('id');
15639
- for (const column of this.columns) {
15640
- const { name: columnName } = column;
15641
- if (column.name === 'id') {
15642
- throw new Error(`An id column is automatically added, custom id columns are not supported`);
15643
- }
15644
- if (columnNames.has(columnName)) {
15645
- throw new Error(`Duplicate column ${columnName}`);
15646
- }
15647
- if (InvalidSQLCharacters.test(columnName)) {
15648
- throw new Error(`Invalid characters in column name: ${column.name}`);
15649
- }
15650
- columnNames.add(columnName);
15651
- }
15652
- const indexNames = new Set();
15653
- for (const index of this.indexes) {
15654
- if (indexNames.has(index.name)) {
15655
- throw new Error(`Duplicate index ${index.name}`);
15656
- }
15657
- if (InvalidSQLCharacters.test(index.name)) {
15658
- throw new Error(`Invalid characters in index name: ${index.name}`);
15659
- }
15660
- for (const column of index.columns) {
15661
- if (!columnNames.has(column.name)) {
15662
- throw new Error(`Column ${column.name} not found for index ${index.name}`);
15663
- }
15664
- }
15665
- indexNames.add(index.name);
15666
- }
15667
- }
15668
- toJSON() {
15669
- const trackPrevious = this.trackPrevious;
15670
- return {
15671
- name: this.name,
15672
- view_name: this.viewName,
15673
- local_only: this.localOnly,
15674
- insert_only: this.insertOnly,
15675
- include_old: trackPrevious && (trackPrevious.columns ?? true),
15676
- include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
15677
- include_metadata: this.trackMetadata,
15678
- ignore_empty_update: this.ignoreEmptyUpdates,
15679
- columns: this.columns.map((c) => c.toJSON()),
15680
- indexes: this.indexes.map((e) => e.toJSON(this))
15681
- };
15682
- }
15683
- }
15684
-
15685
16734
  /**
15686
16735
  Generate a new table from the columns and indexes
15687
16736
  @deprecated You should use {@link Table} instead as it now allows TableV2 syntax.