@powersync/common 1.46.0 → 1.48.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/dist/bundle.cjs +1298 -395
- package/dist/bundle.cjs.map +1 -1
- package/dist/bundle.mjs +1291 -395
- package/dist/bundle.mjs.map +1 -1
- package/dist/bundle.node.cjs +1298 -395
- package/dist/bundle.node.cjs.map +1 -1
- package/dist/bundle.node.mjs +1291 -395
- package/dist/bundle.node.mjs.map +1 -1
- package/dist/index.d.cts +652 -106
- package/lib/attachments/AttachmentContext.d.ts +86 -0
- package/lib/attachments/AttachmentContext.js +229 -0
- package/lib/attachments/AttachmentContext.js.map +1 -0
- package/lib/attachments/AttachmentErrorHandler.d.ts +31 -0
- package/lib/attachments/AttachmentErrorHandler.js +2 -0
- package/lib/attachments/AttachmentErrorHandler.js.map +1 -0
- package/lib/attachments/AttachmentQueue.d.ts +149 -0
- package/lib/attachments/AttachmentQueue.js +362 -0
- package/lib/attachments/AttachmentQueue.js.map +1 -0
- package/lib/attachments/AttachmentService.d.ts +29 -0
- package/lib/attachments/AttachmentService.js +56 -0
- package/lib/attachments/AttachmentService.js.map +1 -0
- package/lib/attachments/LocalStorageAdapter.d.ts +62 -0
- package/lib/attachments/LocalStorageAdapter.js +6 -0
- package/lib/attachments/LocalStorageAdapter.js.map +1 -0
- package/lib/attachments/RemoteStorageAdapter.d.ts +27 -0
- package/lib/attachments/RemoteStorageAdapter.js +2 -0
- package/lib/attachments/RemoteStorageAdapter.js.map +1 -0
- package/lib/attachments/Schema.d.ts +50 -0
- package/lib/attachments/Schema.js +62 -0
- package/lib/attachments/Schema.js.map +1 -0
- package/lib/attachments/SyncingService.d.ts +62 -0
- package/lib/attachments/SyncingService.js +168 -0
- package/lib/attachments/SyncingService.js.map +1 -0
- package/lib/attachments/WatchedAttachmentItem.d.ts +17 -0
- package/lib/attachments/WatchedAttachmentItem.js +2 -0
- package/lib/attachments/WatchedAttachmentItem.js.map +1 -0
- package/lib/db/schema/RawTable.d.ts +61 -26
- package/lib/db/schema/RawTable.js +1 -32
- package/lib/db/schema/RawTable.js.map +1 -1
- package/lib/db/schema/Schema.d.ts +14 -7
- package/lib/db/schema/Schema.js +25 -3
- package/lib/db/schema/Schema.js.map +1 -1
- package/lib/db/schema/Table.d.ts +13 -8
- package/lib/db/schema/Table.js +3 -8
- package/lib/db/schema/Table.js.map +1 -1
- package/lib/db/schema/internal.d.ts +12 -0
- package/lib/db/schema/internal.js +15 -0
- package/lib/db/schema/internal.js.map +1 -0
- package/lib/index.d.ts +11 -1
- package/lib/index.js +10 -1
- package/lib/index.js.map +1 -1
- package/lib/utils/mutex.d.ts +1 -1
- package/lib/utils/mutex.js.map +1 -1
- package/package.json +1 -1
- package/src/attachments/AttachmentContext.ts +279 -0
- package/src/attachments/AttachmentErrorHandler.ts +34 -0
- package/src/attachments/AttachmentQueue.ts +472 -0
- package/src/attachments/AttachmentService.ts +62 -0
- package/src/attachments/LocalStorageAdapter.ts +72 -0
- package/src/attachments/README.md +718 -0
- package/src/attachments/RemoteStorageAdapter.ts +30 -0
- package/src/attachments/Schema.ts +87 -0
- package/src/attachments/SyncingService.ts +193 -0
- package/src/attachments/WatchedAttachmentItem.ts +19 -0
- package/src/db/schema/RawTable.ts +66 -31
- package/src/db/schema/Schema.ts +27 -2
- package/src/db/schema/Table.ts +11 -11
- package/src/db/schema/internal.ts +17 -0
- package/src/index.ts +12 -1
- package/src/utils/mutex.ts +1 -1
package/dist/bundle.mjs
CHANGED
|
@@ -1,5 +1,1232 @@
|
|
|
1
1
|
import { Mutex } from 'async-mutex';
|
|
2
2
|
|
|
3
|
+
// https://www.sqlite.org/lang_expr.html#castexpr
|
|
4
|
+
var ColumnType;
|
|
5
|
+
(function (ColumnType) {
|
|
6
|
+
ColumnType["TEXT"] = "TEXT";
|
|
7
|
+
ColumnType["INTEGER"] = "INTEGER";
|
|
8
|
+
ColumnType["REAL"] = "REAL";
|
|
9
|
+
})(ColumnType || (ColumnType = {}));
|
|
10
|
+
const text = {
|
|
11
|
+
type: ColumnType.TEXT
|
|
12
|
+
};
|
|
13
|
+
const integer = {
|
|
14
|
+
type: ColumnType.INTEGER
|
|
15
|
+
};
|
|
16
|
+
const real = {
|
|
17
|
+
type: ColumnType.REAL
|
|
18
|
+
};
|
|
19
|
+
// powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
|
|
20
|
+
// In earlier versions this was limited to 63.
|
|
21
|
+
const MAX_AMOUNT_OF_COLUMNS = 1999;
|
|
22
|
+
const column = {
|
|
23
|
+
text,
|
|
24
|
+
integer,
|
|
25
|
+
real
|
|
26
|
+
};
|
|
27
|
+
class Column {
|
|
28
|
+
options;
|
|
29
|
+
constructor(options) {
|
|
30
|
+
this.options = options;
|
|
31
|
+
}
|
|
32
|
+
get name() {
|
|
33
|
+
return this.options.name;
|
|
34
|
+
}
|
|
35
|
+
get type() {
|
|
36
|
+
return this.options.type;
|
|
37
|
+
}
|
|
38
|
+
toJSON() {
|
|
39
|
+
return {
|
|
40
|
+
name: this.name,
|
|
41
|
+
type: this.type
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const DEFAULT_INDEX_COLUMN_OPTIONS = {
|
|
47
|
+
ascending: true
|
|
48
|
+
};
|
|
49
|
+
class IndexedColumn {
|
|
50
|
+
options;
|
|
51
|
+
static createAscending(column) {
|
|
52
|
+
return new IndexedColumn({
|
|
53
|
+
name: column,
|
|
54
|
+
ascending: true
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
constructor(options) {
|
|
58
|
+
this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
|
|
59
|
+
}
|
|
60
|
+
get name() {
|
|
61
|
+
return this.options.name;
|
|
62
|
+
}
|
|
63
|
+
get ascending() {
|
|
64
|
+
return this.options.ascending;
|
|
65
|
+
}
|
|
66
|
+
toJSON(table) {
|
|
67
|
+
return {
|
|
68
|
+
name: this.name,
|
|
69
|
+
ascending: this.ascending,
|
|
70
|
+
type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const DEFAULT_INDEX_OPTIONS = {
|
|
76
|
+
columns: []
|
|
77
|
+
};
|
|
78
|
+
class Index {
|
|
79
|
+
options;
|
|
80
|
+
static createAscending(options, columnNames) {
|
|
81
|
+
return new Index({
|
|
82
|
+
...options,
|
|
83
|
+
columns: columnNames.map((name) => IndexedColumn.createAscending(name))
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
constructor(options) {
|
|
87
|
+
this.options = options;
|
|
88
|
+
this.options = { ...DEFAULT_INDEX_OPTIONS, ...options };
|
|
89
|
+
}
|
|
90
|
+
get name() {
|
|
91
|
+
return this.options.name;
|
|
92
|
+
}
|
|
93
|
+
get columns() {
|
|
94
|
+
return this.options.columns ?? [];
|
|
95
|
+
}
|
|
96
|
+
toJSON(table) {
|
|
97
|
+
return {
|
|
98
|
+
name: this.name,
|
|
99
|
+
columns: this.columns.map((c) => c.toJSON(table))
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* @internal Not exported from `index.ts`.
|
|
106
|
+
*/
|
|
107
|
+
function encodeTableOptions(options) {
|
|
108
|
+
const trackPrevious = options.trackPrevious;
|
|
109
|
+
return {
|
|
110
|
+
local_only: options.localOnly,
|
|
111
|
+
insert_only: options.insertOnly,
|
|
112
|
+
include_old: trackPrevious && (trackPrevious.columns ?? true),
|
|
113
|
+
include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
|
|
114
|
+
include_metadata: options.trackMetadata,
|
|
115
|
+
ignore_empty_update: options.ignoreEmptyUpdates
|
|
116
|
+
};
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const DEFAULT_TABLE_OPTIONS = {
|
|
120
|
+
indexes: [],
|
|
121
|
+
insertOnly: false,
|
|
122
|
+
localOnly: false,
|
|
123
|
+
trackPrevious: false,
|
|
124
|
+
trackMetadata: false,
|
|
125
|
+
ignoreEmptyUpdates: false
|
|
126
|
+
};
|
|
127
|
+
const InvalidSQLCharacters = /["'%,.#\s[\]]/;
|
|
128
|
+
class Table {
|
|
129
|
+
options;
|
|
130
|
+
_mappedColumns;
|
|
131
|
+
static createLocalOnly(options) {
|
|
132
|
+
return new Table({ ...options, localOnly: true, insertOnly: false });
|
|
133
|
+
}
|
|
134
|
+
static createInsertOnly(options) {
|
|
135
|
+
return new Table({ ...options, localOnly: false, insertOnly: true });
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Create a table.
|
|
139
|
+
* @deprecated This was only only included for TableV2 and is no longer necessary.
|
|
140
|
+
* Prefer to use new Table() directly.
|
|
141
|
+
*
|
|
142
|
+
* TODO remove in the next major release.
|
|
143
|
+
*/
|
|
144
|
+
static createTable(name, table) {
|
|
145
|
+
return new Table({
|
|
146
|
+
name,
|
|
147
|
+
columns: table.columns,
|
|
148
|
+
indexes: table.indexes,
|
|
149
|
+
localOnly: table.options.localOnly,
|
|
150
|
+
insertOnly: table.options.insertOnly,
|
|
151
|
+
viewName: table.options.viewName
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
constructor(optionsOrColumns, v2Options) {
|
|
155
|
+
if (this.isTableV1(optionsOrColumns)) {
|
|
156
|
+
this.initTableV1(optionsOrColumns);
|
|
157
|
+
}
|
|
158
|
+
else {
|
|
159
|
+
this.initTableV2(optionsOrColumns, v2Options);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
copyWithName(name) {
|
|
163
|
+
return new Table({
|
|
164
|
+
...this.options,
|
|
165
|
+
name
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
isTableV1(arg) {
|
|
169
|
+
return 'columns' in arg && Array.isArray(arg.columns);
|
|
170
|
+
}
|
|
171
|
+
initTableV1(options) {
|
|
172
|
+
this.options = {
|
|
173
|
+
...options,
|
|
174
|
+
indexes: options.indexes || []
|
|
175
|
+
};
|
|
176
|
+
this.applyDefaultOptions();
|
|
177
|
+
}
|
|
178
|
+
initTableV2(columns, options) {
|
|
179
|
+
const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
|
|
180
|
+
const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
|
|
181
|
+
name,
|
|
182
|
+
columns: columnNames.map((name) => new IndexedColumn({
|
|
183
|
+
name: name.replace(/^-/, ''),
|
|
184
|
+
ascending: !name.startsWith('-')
|
|
185
|
+
}))
|
|
186
|
+
}));
|
|
187
|
+
this.options = {
|
|
188
|
+
name: '',
|
|
189
|
+
columns: convertedColumns,
|
|
190
|
+
indexes: convertedIndexes,
|
|
191
|
+
viewName: options?.viewName,
|
|
192
|
+
insertOnly: options?.insertOnly,
|
|
193
|
+
localOnly: options?.localOnly,
|
|
194
|
+
trackPrevious: options?.trackPrevious,
|
|
195
|
+
trackMetadata: options?.trackMetadata,
|
|
196
|
+
ignoreEmptyUpdates: options?.ignoreEmptyUpdates
|
|
197
|
+
};
|
|
198
|
+
this.applyDefaultOptions();
|
|
199
|
+
this._mappedColumns = columns;
|
|
200
|
+
}
|
|
201
|
+
applyDefaultOptions() {
|
|
202
|
+
this.options.insertOnly ??= DEFAULT_TABLE_OPTIONS.insertOnly;
|
|
203
|
+
this.options.localOnly ??= DEFAULT_TABLE_OPTIONS.localOnly;
|
|
204
|
+
this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
|
|
205
|
+
this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
|
|
206
|
+
this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
|
|
207
|
+
}
|
|
208
|
+
get name() {
|
|
209
|
+
return this.options.name;
|
|
210
|
+
}
|
|
211
|
+
get viewNameOverride() {
|
|
212
|
+
return this.options.viewName;
|
|
213
|
+
}
|
|
214
|
+
get viewName() {
|
|
215
|
+
return this.viewNameOverride ?? this.name;
|
|
216
|
+
}
|
|
217
|
+
get columns() {
|
|
218
|
+
return this.options.columns;
|
|
219
|
+
}
|
|
220
|
+
get columnMap() {
|
|
221
|
+
return (this._mappedColumns ??
|
|
222
|
+
this.columns.reduce((hash, column) => {
|
|
223
|
+
hash[column.name] = { type: column.type ?? ColumnType.TEXT };
|
|
224
|
+
return hash;
|
|
225
|
+
}, {}));
|
|
226
|
+
}
|
|
227
|
+
get indexes() {
|
|
228
|
+
return this.options.indexes ?? [];
|
|
229
|
+
}
|
|
230
|
+
get localOnly() {
|
|
231
|
+
return this.options.localOnly;
|
|
232
|
+
}
|
|
233
|
+
get insertOnly() {
|
|
234
|
+
return this.options.insertOnly;
|
|
235
|
+
}
|
|
236
|
+
get trackPrevious() {
|
|
237
|
+
return this.options.trackPrevious;
|
|
238
|
+
}
|
|
239
|
+
get trackMetadata() {
|
|
240
|
+
return this.options.trackMetadata;
|
|
241
|
+
}
|
|
242
|
+
get ignoreEmptyUpdates() {
|
|
243
|
+
return this.options.ignoreEmptyUpdates;
|
|
244
|
+
}
|
|
245
|
+
get internalName() {
|
|
246
|
+
if (this.options.localOnly) {
|
|
247
|
+
return `ps_data_local__${this.name}`;
|
|
248
|
+
}
|
|
249
|
+
return `ps_data__${this.name}`;
|
|
250
|
+
}
|
|
251
|
+
get validName() {
|
|
252
|
+
if (InvalidSQLCharacters.test(this.name)) {
|
|
253
|
+
return false;
|
|
254
|
+
}
|
|
255
|
+
if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
|
|
256
|
+
return false;
|
|
257
|
+
}
|
|
258
|
+
return true;
|
|
259
|
+
}
|
|
260
|
+
validate() {
|
|
261
|
+
if (InvalidSQLCharacters.test(this.name)) {
|
|
262
|
+
throw new Error(`Invalid characters in table name: ${this.name}`);
|
|
263
|
+
}
|
|
264
|
+
if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
|
|
265
|
+
throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
|
|
266
|
+
}
|
|
267
|
+
if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
|
|
268
|
+
throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
|
|
269
|
+
}
|
|
270
|
+
if (this.trackMetadata && this.localOnly) {
|
|
271
|
+
throw new Error(`Can't include metadata for local-only tables.`);
|
|
272
|
+
}
|
|
273
|
+
if (this.trackPrevious != false && this.localOnly) {
|
|
274
|
+
throw new Error(`Can't include old values for local-only tables.`);
|
|
275
|
+
}
|
|
276
|
+
const columnNames = new Set();
|
|
277
|
+
columnNames.add('id');
|
|
278
|
+
for (const column of this.columns) {
|
|
279
|
+
const { name: columnName } = column;
|
|
280
|
+
if (column.name === 'id') {
|
|
281
|
+
throw new Error(`An id column is automatically added, custom id columns are not supported`);
|
|
282
|
+
}
|
|
283
|
+
if (columnNames.has(columnName)) {
|
|
284
|
+
throw new Error(`Duplicate column ${columnName}`);
|
|
285
|
+
}
|
|
286
|
+
if (InvalidSQLCharacters.test(columnName)) {
|
|
287
|
+
throw new Error(`Invalid characters in column name: ${column.name}`);
|
|
288
|
+
}
|
|
289
|
+
columnNames.add(columnName);
|
|
290
|
+
}
|
|
291
|
+
const indexNames = new Set();
|
|
292
|
+
for (const index of this.indexes) {
|
|
293
|
+
if (indexNames.has(index.name)) {
|
|
294
|
+
throw new Error(`Duplicate index ${index.name}`);
|
|
295
|
+
}
|
|
296
|
+
if (InvalidSQLCharacters.test(index.name)) {
|
|
297
|
+
throw new Error(`Invalid characters in index name: ${index.name}`);
|
|
298
|
+
}
|
|
299
|
+
for (const column of index.columns) {
|
|
300
|
+
if (!columnNames.has(column.name)) {
|
|
301
|
+
throw new Error(`Column ${column.name} not found for index ${index.name}`);
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
indexNames.add(index.name);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
toJSON() {
|
|
308
|
+
return {
|
|
309
|
+
name: this.name,
|
|
310
|
+
view_name: this.viewName,
|
|
311
|
+
columns: this.columns.map((c) => c.toJSON()),
|
|
312
|
+
indexes: this.indexes.map((e) => e.toJSON(this)),
|
|
313
|
+
...encodeTableOptions(this)
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
const ATTACHMENT_TABLE = 'attachments';
|
|
319
|
+
/**
|
|
320
|
+
* Maps a database row to an AttachmentRecord.
|
|
321
|
+
*
|
|
322
|
+
* @param row - The database row object
|
|
323
|
+
* @returns The corresponding AttachmentRecord
|
|
324
|
+
*
|
|
325
|
+
* @experimental
|
|
326
|
+
*/
|
|
327
|
+
function attachmentFromSql(row) {
|
|
328
|
+
return {
|
|
329
|
+
id: row.id,
|
|
330
|
+
filename: row.filename,
|
|
331
|
+
localUri: row.local_uri,
|
|
332
|
+
size: row.size,
|
|
333
|
+
mediaType: row.media_type,
|
|
334
|
+
timestamp: row.timestamp,
|
|
335
|
+
metaData: row.meta_data,
|
|
336
|
+
hasSynced: row.has_synced === 1,
|
|
337
|
+
state: row.state
|
|
338
|
+
};
|
|
339
|
+
}
|
|
340
|
+
/**
|
|
341
|
+
* AttachmentState represents the current synchronization state of an attachment.
|
|
342
|
+
*
|
|
343
|
+
* @experimental
|
|
344
|
+
*/
|
|
345
|
+
var AttachmentState;
|
|
346
|
+
(function (AttachmentState) {
|
|
347
|
+
AttachmentState[AttachmentState["QUEUED_UPLOAD"] = 0] = "QUEUED_UPLOAD";
|
|
348
|
+
AttachmentState[AttachmentState["QUEUED_DOWNLOAD"] = 1] = "QUEUED_DOWNLOAD";
|
|
349
|
+
AttachmentState[AttachmentState["QUEUED_DELETE"] = 2] = "QUEUED_DELETE";
|
|
350
|
+
AttachmentState[AttachmentState["SYNCED"] = 3] = "SYNCED";
|
|
351
|
+
AttachmentState[AttachmentState["ARCHIVED"] = 4] = "ARCHIVED"; // Attachment has been orphaned, i.e. the associated record has been deleted
|
|
352
|
+
})(AttachmentState || (AttachmentState = {}));
|
|
353
|
+
/**
|
|
354
|
+
* AttachmentTable defines the schema for the attachment queue table.
|
|
355
|
+
*
|
|
356
|
+
* @internal
|
|
357
|
+
*/
|
|
358
|
+
class AttachmentTable extends Table {
|
|
359
|
+
constructor(options) {
|
|
360
|
+
super({
|
|
361
|
+
filename: column.text,
|
|
362
|
+
local_uri: column.text,
|
|
363
|
+
timestamp: column.integer,
|
|
364
|
+
size: column.integer,
|
|
365
|
+
media_type: column.text,
|
|
366
|
+
state: column.integer, // Corresponds to AttachmentState
|
|
367
|
+
has_synced: column.integer,
|
|
368
|
+
meta_data: column.text
|
|
369
|
+
}, {
|
|
370
|
+
...options,
|
|
371
|
+
viewName: options?.viewName ?? ATTACHMENT_TABLE,
|
|
372
|
+
localOnly: true,
|
|
373
|
+
insertOnly: false
|
|
374
|
+
});
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
/**
|
|
379
|
+
* AttachmentContext provides database operations for managing attachment records.
|
|
380
|
+
*
|
|
381
|
+
* Provides methods to query, insert, update, and delete attachment records with
|
|
382
|
+
* proper transaction management through PowerSync.
|
|
383
|
+
*
|
|
384
|
+
* @internal
|
|
385
|
+
*/
|
|
386
|
+
class AttachmentContext {
|
|
387
|
+
/** PowerSync database instance for executing queries */
|
|
388
|
+
db;
|
|
389
|
+
/** Name of the database table storing attachment records */
|
|
390
|
+
tableName;
|
|
391
|
+
/** Logger instance for diagnostic information */
|
|
392
|
+
logger;
|
|
393
|
+
/** Maximum number of archived attachments to keep before cleanup */
|
|
394
|
+
archivedCacheLimit = 100;
|
|
395
|
+
/**
|
|
396
|
+
* Creates a new AttachmentContext instance.
|
|
397
|
+
*
|
|
398
|
+
* @param db - PowerSync database instance
|
|
399
|
+
* @param tableName - Name of the table storing attachment records. Default: 'attachments'
|
|
400
|
+
* @param logger - Logger instance for diagnostic output
|
|
401
|
+
*/
|
|
402
|
+
constructor(db, tableName = 'attachments', logger, archivedCacheLimit) {
|
|
403
|
+
this.db = db;
|
|
404
|
+
this.tableName = tableName;
|
|
405
|
+
this.logger = logger;
|
|
406
|
+
this.archivedCacheLimit = archivedCacheLimit;
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Retrieves all active attachments that require synchronization.
|
|
410
|
+
* Active attachments include those queued for upload, download, or delete.
|
|
411
|
+
* Results are ordered by timestamp in ascending order.
|
|
412
|
+
*
|
|
413
|
+
* @returns Promise resolving to an array of active attachment records
|
|
414
|
+
*/
|
|
415
|
+
async getActiveAttachments() {
|
|
416
|
+
const attachments = await this.db.getAll(
|
|
417
|
+
/* sql */
|
|
418
|
+
`
|
|
419
|
+
SELECT
|
|
420
|
+
*
|
|
421
|
+
FROM
|
|
422
|
+
${this.tableName}
|
|
423
|
+
WHERE
|
|
424
|
+
state = ?
|
|
425
|
+
OR state = ?
|
|
426
|
+
OR state = ?
|
|
427
|
+
ORDER BY
|
|
428
|
+
timestamp ASC
|
|
429
|
+
`, [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]);
|
|
430
|
+
return attachments.map(attachmentFromSql);
|
|
431
|
+
}
|
|
432
|
+
/**
|
|
433
|
+
* Retrieves all archived attachments.
|
|
434
|
+
*
|
|
435
|
+
* Archived attachments are no longer referenced but haven't been permanently deleted.
|
|
436
|
+
* These are candidates for cleanup operations to free up storage space.
|
|
437
|
+
*
|
|
438
|
+
* @returns Promise resolving to an array of archived attachment records
|
|
439
|
+
*/
|
|
440
|
+
async getArchivedAttachments() {
|
|
441
|
+
const attachments = await this.db.getAll(
|
|
442
|
+
/* sql */
|
|
443
|
+
`
|
|
444
|
+
SELECT
|
|
445
|
+
*
|
|
446
|
+
FROM
|
|
447
|
+
${this.tableName}
|
|
448
|
+
WHERE
|
|
449
|
+
state = ?
|
|
450
|
+
ORDER BY
|
|
451
|
+
timestamp ASC
|
|
452
|
+
`, [AttachmentState.ARCHIVED]);
|
|
453
|
+
return attachments.map(attachmentFromSql);
|
|
454
|
+
}
|
|
455
|
+
/**
|
|
456
|
+
* Retrieves all attachment records regardless of state.
|
|
457
|
+
* Results are ordered by timestamp in ascending order.
|
|
458
|
+
*
|
|
459
|
+
* @returns Promise resolving to an array of all attachment records
|
|
460
|
+
*/
|
|
461
|
+
async getAttachments() {
|
|
462
|
+
const attachments = await this.db.getAll(
|
|
463
|
+
/* sql */
|
|
464
|
+
`
|
|
465
|
+
SELECT
|
|
466
|
+
*
|
|
467
|
+
FROM
|
|
468
|
+
${this.tableName}
|
|
469
|
+
ORDER BY
|
|
470
|
+
timestamp ASC
|
|
471
|
+
`, []);
|
|
472
|
+
return attachments.map(attachmentFromSql);
|
|
473
|
+
}
|
|
474
|
+
/**
|
|
475
|
+
* Inserts or updates an attachment record within an existing transaction.
|
|
476
|
+
*
|
|
477
|
+
* Performs an upsert operation (INSERT OR REPLACE). Must be called within
|
|
478
|
+
* an active database transaction context.
|
|
479
|
+
*
|
|
480
|
+
* @param attachment - The attachment record to upsert
|
|
481
|
+
* @param context - Active database transaction context
|
|
482
|
+
*/
|
|
483
|
+
async upsertAttachment(attachment, context) {
|
|
484
|
+
await context.execute(
|
|
485
|
+
/* sql */
|
|
486
|
+
`
|
|
487
|
+
INSERT
|
|
488
|
+
OR REPLACE INTO ${this.tableName} (
|
|
489
|
+
id,
|
|
490
|
+
filename,
|
|
491
|
+
local_uri,
|
|
492
|
+
size,
|
|
493
|
+
media_type,
|
|
494
|
+
timestamp,
|
|
495
|
+
state,
|
|
496
|
+
has_synced,
|
|
497
|
+
meta_data
|
|
498
|
+
)
|
|
499
|
+
VALUES
|
|
500
|
+
(?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
501
|
+
`, [
|
|
502
|
+
attachment.id,
|
|
503
|
+
attachment.filename,
|
|
504
|
+
attachment.localUri || null,
|
|
505
|
+
attachment.size || null,
|
|
506
|
+
attachment.mediaType || null,
|
|
507
|
+
attachment.timestamp,
|
|
508
|
+
attachment.state,
|
|
509
|
+
attachment.hasSynced ? 1 : 0,
|
|
510
|
+
attachment.metaData || null
|
|
511
|
+
]);
|
|
512
|
+
}
|
|
513
|
+
async getAttachment(id) {
|
|
514
|
+
const attachment = await this.db.get(
|
|
515
|
+
/* sql */
|
|
516
|
+
`
|
|
517
|
+
SELECT
|
|
518
|
+
*
|
|
519
|
+
FROM
|
|
520
|
+
${this.tableName}
|
|
521
|
+
WHERE
|
|
522
|
+
id = ?
|
|
523
|
+
`, [id]);
|
|
524
|
+
return attachment ? attachmentFromSql(attachment) : undefined;
|
|
525
|
+
}
|
|
526
|
+
/**
|
|
527
|
+
* Permanently deletes an attachment record from the database.
|
|
528
|
+
*
|
|
529
|
+
* This operation removes the attachment record but does not delete
|
|
530
|
+
* the associated local or remote files. File deletion should be handled
|
|
531
|
+
* separately through the appropriate storage adapters.
|
|
532
|
+
*
|
|
533
|
+
* @param attachmentId - Unique identifier of the attachment to delete
|
|
534
|
+
*/
|
|
535
|
+
async deleteAttachment(attachmentId) {
|
|
536
|
+
await this.db.writeTransaction((tx) => tx.execute(
|
|
537
|
+
/* sql */
|
|
538
|
+
`
|
|
539
|
+
DELETE FROM ${this.tableName}
|
|
540
|
+
WHERE
|
|
541
|
+
id = ?
|
|
542
|
+
`, [attachmentId]));
|
|
543
|
+
}
|
|
544
|
+
async clearQueue() {
|
|
545
|
+
await this.db.writeTransaction((tx) => tx.execute(/* sql */ ` DELETE FROM ${this.tableName} `));
|
|
546
|
+
}
|
|
547
|
+
async deleteArchivedAttachments(callback) {
|
|
548
|
+
const limit = 1000;
|
|
549
|
+
const results = await this.db.getAll(
|
|
550
|
+
/* sql */
|
|
551
|
+
`
|
|
552
|
+
SELECT
|
|
553
|
+
*
|
|
554
|
+
FROM
|
|
555
|
+
${this.tableName}
|
|
556
|
+
WHERE
|
|
557
|
+
state = ?
|
|
558
|
+
ORDER BY
|
|
559
|
+
timestamp DESC
|
|
560
|
+
LIMIT
|
|
561
|
+
?
|
|
562
|
+
OFFSET
|
|
563
|
+
?
|
|
564
|
+
`, [AttachmentState.ARCHIVED, limit, this.archivedCacheLimit]);
|
|
565
|
+
const archivedAttachments = results.map(attachmentFromSql);
|
|
566
|
+
if (archivedAttachments.length === 0)
|
|
567
|
+
return false;
|
|
568
|
+
await callback?.(archivedAttachments);
|
|
569
|
+
this.logger.info(`Deleting ${archivedAttachments.length} archived attachments. Archived attachment exceeds cache archiveCacheLimit of ${this.archivedCacheLimit}.`);
|
|
570
|
+
const ids = archivedAttachments.map((attachment) => attachment.id);
|
|
571
|
+
await this.db.execute(
|
|
572
|
+
/* sql */
|
|
573
|
+
`
|
|
574
|
+
DELETE FROM ${this.tableName}
|
|
575
|
+
WHERE
|
|
576
|
+
id IN (
|
|
577
|
+
SELECT
|
|
578
|
+
json_each.value
|
|
579
|
+
FROM
|
|
580
|
+
json_each (?)
|
|
581
|
+
);
|
|
582
|
+
`, [JSON.stringify(ids)]);
|
|
583
|
+
this.logger.info(`Deleted ${archivedAttachments.length} archived attachments`);
|
|
584
|
+
return archivedAttachments.length < limit;
|
|
585
|
+
}
|
|
586
|
+
/**
|
|
587
|
+
* Saves multiple attachment records in a single transaction.
|
|
588
|
+
*
|
|
589
|
+
* All updates are saved in a single batch after processing.
|
|
590
|
+
* If the attachments array is empty, no database operations are performed.
|
|
591
|
+
*
|
|
592
|
+
* @param attachments - Array of attachment records to save
|
|
593
|
+
*/
|
|
594
|
+
async saveAttachments(attachments) {
|
|
595
|
+
if (attachments.length === 0) {
|
|
596
|
+
return;
|
|
597
|
+
}
|
|
598
|
+
await this.db.writeTransaction(async (tx) => {
|
|
599
|
+
for (const attachment of attachments) {
|
|
600
|
+
await this.upsertAttachment(attachment, tx);
|
|
601
|
+
}
|
|
602
|
+
});
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
var WatchedQueryListenerEvent;
|
|
607
|
+
(function (WatchedQueryListenerEvent) {
|
|
608
|
+
WatchedQueryListenerEvent["ON_DATA"] = "onData";
|
|
609
|
+
WatchedQueryListenerEvent["ON_ERROR"] = "onError";
|
|
610
|
+
WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
|
|
611
|
+
WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
|
|
612
|
+
WatchedQueryListenerEvent["CLOSED"] = "closed";
|
|
613
|
+
})(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
|
|
614
|
+
const DEFAULT_WATCH_THROTTLE_MS = 30;
|
|
615
|
+
const DEFAULT_WATCH_QUERY_OPTIONS = {
|
|
616
|
+
throttleMs: DEFAULT_WATCH_THROTTLE_MS,
|
|
617
|
+
reportFetching: true
|
|
618
|
+
};
|
|
619
|
+
|
|
620
|
+
/**
|
|
621
|
+
* Orchestrates attachment synchronization between local and remote storage.
|
|
622
|
+
* Handles uploads, downloads, deletions, and state transitions.
|
|
623
|
+
*
|
|
624
|
+
* @internal
|
|
625
|
+
*/
|
|
626
|
+
class SyncingService {
|
|
627
|
+
attachmentService;
|
|
628
|
+
localStorage;
|
|
629
|
+
remoteStorage;
|
|
630
|
+
logger;
|
|
631
|
+
errorHandler;
|
|
632
|
+
constructor(attachmentService, localStorage, remoteStorage, logger, errorHandler) {
|
|
633
|
+
this.attachmentService = attachmentService;
|
|
634
|
+
this.localStorage = localStorage;
|
|
635
|
+
this.remoteStorage = remoteStorage;
|
|
636
|
+
this.logger = logger;
|
|
637
|
+
this.errorHandler = errorHandler;
|
|
638
|
+
}
|
|
639
|
+
/**
|
|
640
|
+
* Processes attachments based on their state (upload, download, or delete).
|
|
641
|
+
* All updates are saved in a single batch after processing.
|
|
642
|
+
*
|
|
643
|
+
* @param attachments - Array of attachment records to process
|
|
644
|
+
* @param context - Attachment context for database operations
|
|
645
|
+
* @returns Promise that resolves when all attachments have been processed and saved
|
|
646
|
+
*/
|
|
647
|
+
async processAttachments(attachments, context) {
|
|
648
|
+
const updatedAttachments = [];
|
|
649
|
+
for (const attachment of attachments) {
|
|
650
|
+
switch (attachment.state) {
|
|
651
|
+
case AttachmentState.QUEUED_UPLOAD:
|
|
652
|
+
const uploaded = await this.uploadAttachment(attachment);
|
|
653
|
+
updatedAttachments.push(uploaded);
|
|
654
|
+
break;
|
|
655
|
+
case AttachmentState.QUEUED_DOWNLOAD:
|
|
656
|
+
const downloaded = await this.downloadAttachment(attachment);
|
|
657
|
+
updatedAttachments.push(downloaded);
|
|
658
|
+
break;
|
|
659
|
+
case AttachmentState.QUEUED_DELETE:
|
|
660
|
+
const deleted = await this.deleteAttachment(attachment);
|
|
661
|
+
updatedAttachments.push(deleted);
|
|
662
|
+
break;
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
await context.saveAttachments(updatedAttachments);
|
|
666
|
+
}
|
|
667
|
+
/**
|
|
668
|
+
* Uploads an attachment from local storage to remote storage.
|
|
669
|
+
* On success, marks as SYNCED. On failure, defers to error handler or archives.
|
|
670
|
+
*
|
|
671
|
+
* @param attachment - The attachment record to upload
|
|
672
|
+
* @returns Updated attachment record with new state
|
|
673
|
+
* @throws Error if the attachment has no localUri
|
|
674
|
+
*/
|
|
675
|
+
async uploadAttachment(attachment) {
|
|
676
|
+
this.logger.info(`Uploading attachment ${attachment.filename}`);
|
|
677
|
+
try {
|
|
678
|
+
if (attachment.localUri == null) {
|
|
679
|
+
throw new Error(`No localUri for attachment ${attachment.id}`);
|
|
680
|
+
}
|
|
681
|
+
const fileBlob = await this.localStorage.readFile(attachment.localUri);
|
|
682
|
+
await this.remoteStorage.uploadFile(fileBlob, attachment);
|
|
683
|
+
return {
|
|
684
|
+
...attachment,
|
|
685
|
+
state: AttachmentState.SYNCED,
|
|
686
|
+
hasSynced: true
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
catch (error) {
|
|
690
|
+
const shouldRetry = (await this.errorHandler?.onUploadError(attachment, error)) ?? true;
|
|
691
|
+
if (!shouldRetry) {
|
|
692
|
+
return {
|
|
693
|
+
...attachment,
|
|
694
|
+
state: AttachmentState.ARCHIVED
|
|
695
|
+
};
|
|
696
|
+
}
|
|
697
|
+
return attachment;
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
/**
|
|
701
|
+
* Downloads an attachment from remote storage to local storage.
|
|
702
|
+
* Retrieves the file, converts to base64, and saves locally.
|
|
703
|
+
* On success, marks as SYNCED. On failure, defers to error handler or archives.
|
|
704
|
+
*
|
|
705
|
+
* @param attachment - The attachment record to download
|
|
706
|
+
* @returns Updated attachment record with local URI and new state
|
|
707
|
+
*/
|
|
708
|
+
async downloadAttachment(attachment) {
|
|
709
|
+
this.logger.info(`Downloading attachment ${attachment.filename}`);
|
|
710
|
+
try {
|
|
711
|
+
const fileData = await this.remoteStorage.downloadFile(attachment);
|
|
712
|
+
const localUri = this.localStorage.getLocalUri(attachment.filename);
|
|
713
|
+
await this.localStorage.saveFile(localUri, fileData);
|
|
714
|
+
return {
|
|
715
|
+
...attachment,
|
|
716
|
+
state: AttachmentState.SYNCED,
|
|
717
|
+
localUri: localUri,
|
|
718
|
+
hasSynced: true
|
|
719
|
+
};
|
|
720
|
+
}
|
|
721
|
+
catch (error) {
|
|
722
|
+
const shouldRetry = (await this.errorHandler?.onDownloadError(attachment, error)) ?? true;
|
|
723
|
+
if (!shouldRetry) {
|
|
724
|
+
return {
|
|
725
|
+
...attachment,
|
|
726
|
+
state: AttachmentState.ARCHIVED
|
|
727
|
+
};
|
|
728
|
+
}
|
|
729
|
+
return attachment;
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
/**
|
|
733
|
+
* Deletes an attachment from both remote and local storage.
|
|
734
|
+
* Removes the remote file, local file (if exists), and the attachment record.
|
|
735
|
+
* On failure, defers to error handler or archives.
|
|
736
|
+
*
|
|
737
|
+
* @param attachment - The attachment record to delete
|
|
738
|
+
* @returns Updated attachment record
|
|
739
|
+
*/
|
|
740
|
+
async deleteAttachment(attachment) {
|
|
741
|
+
try {
|
|
742
|
+
await this.remoteStorage.deleteFile(attachment);
|
|
743
|
+
if (attachment.localUri) {
|
|
744
|
+
await this.localStorage.deleteFile(attachment.localUri);
|
|
745
|
+
}
|
|
746
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
747
|
+
await ctx.deleteAttachment(attachment.id);
|
|
748
|
+
});
|
|
749
|
+
return {
|
|
750
|
+
...attachment,
|
|
751
|
+
state: AttachmentState.ARCHIVED
|
|
752
|
+
};
|
|
753
|
+
}
|
|
754
|
+
catch (error) {
|
|
755
|
+
const shouldRetry = (await this.errorHandler?.onDeleteError(attachment, error)) ?? true;
|
|
756
|
+
if (!shouldRetry) {
|
|
757
|
+
return {
|
|
758
|
+
...attachment,
|
|
759
|
+
state: AttachmentState.ARCHIVED
|
|
760
|
+
};
|
|
761
|
+
}
|
|
762
|
+
return attachment;
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
/**
|
|
766
|
+
* Performs cleanup of archived attachments by removing their local files and records.
|
|
767
|
+
* Errors during local file deletion are logged but do not prevent record deletion.
|
|
768
|
+
*/
|
|
769
|
+
async deleteArchivedAttachments(context) {
|
|
770
|
+
return await context.deleteArchivedAttachments(async (archivedAttachments) => {
|
|
771
|
+
for (const attachment of archivedAttachments) {
|
|
772
|
+
if (attachment.localUri) {
|
|
773
|
+
try {
|
|
774
|
+
await this.localStorage.deleteFile(attachment.localUri);
|
|
775
|
+
}
|
|
776
|
+
catch (error) {
|
|
777
|
+
this.logger.error('Error deleting local file for archived attachment', error);
|
|
778
|
+
}
|
|
779
|
+
}
|
|
780
|
+
}
|
|
781
|
+
});
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
/**
|
|
786
|
+
* Wrapper for async-mutex runExclusive, which allows for a timeout on each exclusive lock.
|
|
787
|
+
*/
|
|
788
|
+
async function mutexRunExclusive(mutex, callback, options) {
|
|
789
|
+
return new Promise((resolve, reject) => {
|
|
790
|
+
const timeout = options?.timeoutMs;
|
|
791
|
+
let timedOut = false;
|
|
792
|
+
const timeoutId = timeout
|
|
793
|
+
? setTimeout(() => {
|
|
794
|
+
timedOut = true;
|
|
795
|
+
reject(new Error('Timeout waiting for lock'));
|
|
796
|
+
}, timeout)
|
|
797
|
+
: undefined;
|
|
798
|
+
mutex.runExclusive(async () => {
|
|
799
|
+
if (timeoutId) {
|
|
800
|
+
clearTimeout(timeoutId);
|
|
801
|
+
}
|
|
802
|
+
if (timedOut)
|
|
803
|
+
return;
|
|
804
|
+
try {
|
|
805
|
+
resolve(await callback());
|
|
806
|
+
}
|
|
807
|
+
catch (ex) {
|
|
808
|
+
reject(ex);
|
|
809
|
+
}
|
|
810
|
+
});
|
|
811
|
+
});
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
/**
|
|
815
|
+
* Service for querying and watching attachment records in the database.
|
|
816
|
+
*
|
|
817
|
+
* @internal
|
|
818
|
+
*/
|
|
819
|
+
class AttachmentService {
|
|
820
|
+
db;
|
|
821
|
+
logger;
|
|
822
|
+
tableName;
|
|
823
|
+
mutex = new Mutex();
|
|
824
|
+
context;
|
|
825
|
+
constructor(db, logger, tableName = 'attachments', archivedCacheLimit = 100) {
|
|
826
|
+
this.db = db;
|
|
827
|
+
this.logger = logger;
|
|
828
|
+
this.tableName = tableName;
|
|
829
|
+
this.context = new AttachmentContext(db, tableName, logger, archivedCacheLimit);
|
|
830
|
+
}
|
|
831
|
+
/**
|
|
832
|
+
* Creates a differential watch query for active attachments requiring synchronization.
|
|
833
|
+
* @returns Watch query that emits changes for queued uploads, downloads, and deletes
|
|
834
|
+
*/
|
|
835
|
+
watchActiveAttachments({ throttleMs } = {}) {
|
|
836
|
+
this.logger.info('Watching active attachments...');
|
|
837
|
+
const watch = this.db
|
|
838
|
+
.query({
|
|
839
|
+
sql: /* sql */ `
|
|
840
|
+
SELECT
|
|
841
|
+
*
|
|
842
|
+
FROM
|
|
843
|
+
${this.tableName}
|
|
844
|
+
WHERE
|
|
845
|
+
state = ?
|
|
846
|
+
OR state = ?
|
|
847
|
+
OR state = ?
|
|
848
|
+
ORDER BY
|
|
849
|
+
timestamp ASC
|
|
850
|
+
`,
|
|
851
|
+
parameters: [AttachmentState.QUEUED_UPLOAD, AttachmentState.QUEUED_DOWNLOAD, AttachmentState.QUEUED_DELETE]
|
|
852
|
+
})
|
|
853
|
+
.differentialWatch({ throttleMs });
|
|
854
|
+
return watch;
|
|
855
|
+
}
|
|
856
|
+
/**
|
|
857
|
+
* Executes a callback with exclusive access to the attachment context.
|
|
858
|
+
*/
|
|
859
|
+
async withContext(callback) {
|
|
860
|
+
return mutexRunExclusive(this.mutex, async () => {
|
|
861
|
+
return callback(this.context);
|
|
862
|
+
});
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
|
|
866
|
+
/**
|
|
867
|
+
* AttachmentQueue manages the lifecycle and synchronization of attachments
|
|
868
|
+
* between local and remote storage.
|
|
869
|
+
* Provides automatic synchronization, upload/download queuing, attachment monitoring,
|
|
870
|
+
* verification and repair of local files, and cleanup of archived attachments.
|
|
871
|
+
*
|
|
872
|
+
* @experimental
|
|
873
|
+
* @alpha This is currently experimental and may change without a major version bump.
|
|
874
|
+
*/
|
|
875
|
+
class AttachmentQueue {
|
|
876
|
+
/** Timer for periodic synchronization operations */
|
|
877
|
+
periodicSyncTimer;
|
|
878
|
+
/** Service for synchronizing attachments between local and remote storage */
|
|
879
|
+
syncingService;
|
|
880
|
+
/** Adapter for local file storage operations */
|
|
881
|
+
localStorage;
|
|
882
|
+
/** Adapter for remote file storage operations */
|
|
883
|
+
remoteStorage;
|
|
884
|
+
/**
|
|
885
|
+
* Callback function to watch for changes in attachment references in your data model.
|
|
886
|
+
*
|
|
887
|
+
* This should be implemented by the user of AttachmentQueue to monitor changes in your application's
|
|
888
|
+
* data that reference attachments. When attachments are added, removed, or modified,
|
|
889
|
+
* this callback should trigger the onUpdate function with the current set of attachments.
|
|
890
|
+
*/
|
|
891
|
+
watchAttachments;
|
|
892
|
+
/** Name of the database table storing attachment records */
|
|
893
|
+
tableName;
|
|
894
|
+
/** Logger instance for diagnostic information */
|
|
895
|
+
logger;
|
|
896
|
+
/** Interval in milliseconds between periodic sync operations. Default: 30000 (30 seconds) */
|
|
897
|
+
syncIntervalMs = 30 * 1000;
|
|
898
|
+
/** Duration in milliseconds to throttle sync operations */
|
|
899
|
+
syncThrottleDuration;
|
|
900
|
+
/** Whether to automatically download remote attachments. Default: true */
|
|
901
|
+
downloadAttachments = true;
|
|
902
|
+
/** Maximum number of archived attachments to keep before cleanup. Default: 100 */
|
|
903
|
+
archivedCacheLimit;
|
|
904
|
+
/** Service for managing attachment-related database operations */
|
|
905
|
+
attachmentService;
|
|
906
|
+
/** PowerSync database instance */
|
|
907
|
+
db;
|
|
908
|
+
/** Cleanup function for status change listener */
|
|
909
|
+
statusListenerDispose;
|
|
910
|
+
watchActiveAttachments;
|
|
911
|
+
watchAttachmentsAbortController;
|
|
912
|
+
/**
|
|
913
|
+
* Creates a new AttachmentQueue instance.
|
|
914
|
+
*
|
|
915
|
+
* @param options - Configuration options
|
|
916
|
+
* @param options.db - PowerSync database instance
|
|
917
|
+
* @param options.remoteStorage - Remote storage adapter for upload/download operations
|
|
918
|
+
* @param options.localStorage - Local storage adapter for file persistence
|
|
919
|
+
* @param options.watchAttachments - Callback for monitoring attachment changes in your data model
|
|
920
|
+
* @param options.tableName - Name of the table to store attachment records. Default: 'ps_attachment_queue'
|
|
921
|
+
* @param options.logger - Logger instance. Defaults to db.logger
|
|
922
|
+
* @param options.syncIntervalMs - Interval between automatic syncs in milliseconds. Default: 30000
|
|
923
|
+
* @param options.syncThrottleDuration - Throttle duration for sync operations in milliseconds. Default: 1000
|
|
924
|
+
* @param options.downloadAttachments - Whether to automatically download remote attachments. Default: true
|
|
925
|
+
* @param options.archivedCacheLimit - Maximum archived attachments before cleanup. Default: 100
|
|
926
|
+
*/
|
|
927
|
+
constructor({ db, localStorage, remoteStorage, watchAttachments, logger, tableName = ATTACHMENT_TABLE, syncIntervalMs = 30 * 1000, syncThrottleDuration = DEFAULT_WATCH_THROTTLE_MS, downloadAttachments = true, archivedCacheLimit = 100, errorHandler }) {
|
|
928
|
+
this.db = db;
|
|
929
|
+
this.remoteStorage = remoteStorage;
|
|
930
|
+
this.localStorage = localStorage;
|
|
931
|
+
this.watchAttachments = watchAttachments;
|
|
932
|
+
this.tableName = tableName;
|
|
933
|
+
this.syncIntervalMs = syncIntervalMs;
|
|
934
|
+
this.syncThrottleDuration = syncThrottleDuration;
|
|
935
|
+
this.archivedCacheLimit = archivedCacheLimit;
|
|
936
|
+
this.downloadAttachments = downloadAttachments;
|
|
937
|
+
this.logger = logger ?? db.logger;
|
|
938
|
+
this.attachmentService = new AttachmentService(db, this.logger, tableName, archivedCacheLimit);
|
|
939
|
+
this.syncingService = new SyncingService(this.attachmentService, localStorage, remoteStorage, this.logger, errorHandler);
|
|
940
|
+
}
|
|
941
|
+
/**
|
|
942
|
+
* Generates a new attachment ID using a SQLite UUID function.
|
|
943
|
+
*
|
|
944
|
+
* @returns Promise resolving to the new attachment ID
|
|
945
|
+
*/
|
|
946
|
+
async generateAttachmentId() {
|
|
947
|
+
return this.db.get('SELECT uuid() as id').then((row) => row.id);
|
|
948
|
+
}
|
|
949
|
+
/**
|
|
950
|
+
* Starts the attachment synchronization process.
|
|
951
|
+
*
|
|
952
|
+
* This method:
|
|
953
|
+
* - Stops any existing sync operations
|
|
954
|
+
* - Sets up periodic synchronization based on syncIntervalMs
|
|
955
|
+
* - Registers listeners for active attachment changes
|
|
956
|
+
* - Processes watched attachments to queue uploads/downloads
|
|
957
|
+
* - Handles state transitions for archived and new attachments
|
|
958
|
+
*/
|
|
959
|
+
async startSync() {
|
|
960
|
+
await this.stopSync();
|
|
961
|
+
this.watchActiveAttachments = this.attachmentService.watchActiveAttachments({
|
|
962
|
+
throttleMs: this.syncThrottleDuration
|
|
963
|
+
});
|
|
964
|
+
// immediately invoke the sync storage to initialize local storage
|
|
965
|
+
await this.localStorage.initialize();
|
|
966
|
+
await this.verifyAttachments();
|
|
967
|
+
// Sync storage periodically
|
|
968
|
+
this.periodicSyncTimer = setInterval(async () => {
|
|
969
|
+
await this.syncStorage();
|
|
970
|
+
}, this.syncIntervalMs);
|
|
971
|
+
// Sync storage when there is a change in active attachments
|
|
972
|
+
this.watchActiveAttachments.registerListener({
|
|
973
|
+
onDiff: async () => {
|
|
974
|
+
await this.syncStorage();
|
|
975
|
+
}
|
|
976
|
+
});
|
|
977
|
+
this.statusListenerDispose = this.db.registerListener({
|
|
978
|
+
statusChanged: (status) => {
|
|
979
|
+
if (status.connected) {
|
|
980
|
+
// Device came online, process attachments immediately
|
|
981
|
+
this.syncStorage().catch((error) => {
|
|
982
|
+
this.logger.error('Error syncing storage on connection:', error);
|
|
983
|
+
});
|
|
984
|
+
}
|
|
985
|
+
}
|
|
986
|
+
});
|
|
987
|
+
this.watchAttachmentsAbortController = new AbortController();
|
|
988
|
+
const signal = this.watchAttachmentsAbortController.signal;
|
|
989
|
+
// Process attachments when there is a change in watched attachments
|
|
990
|
+
this.watchAttachments(async (watchedAttachments) => {
|
|
991
|
+
// Skip processing if sync has been stopped
|
|
992
|
+
if (signal.aborted) {
|
|
993
|
+
return;
|
|
994
|
+
}
|
|
995
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
996
|
+
// Need to get all the attachments which are tracked in the DB.
|
|
997
|
+
// We might need to restore an archived attachment.
|
|
998
|
+
const currentAttachments = await ctx.getAttachments();
|
|
999
|
+
const attachmentUpdates = [];
|
|
1000
|
+
for (const watchedAttachment of watchedAttachments) {
|
|
1001
|
+
const existingQueueItem = currentAttachments.find((a) => a.id === watchedAttachment.id);
|
|
1002
|
+
if (!existingQueueItem) {
|
|
1003
|
+
// Item is watched but not in the queue yet. Need to add it.
|
|
1004
|
+
if (!this.downloadAttachments) {
|
|
1005
|
+
continue;
|
|
1006
|
+
}
|
|
1007
|
+
const filename = watchedAttachment.filename ?? `${watchedAttachment.id}.${watchedAttachment.fileExtension}`;
|
|
1008
|
+
attachmentUpdates.push({
|
|
1009
|
+
id: watchedAttachment.id,
|
|
1010
|
+
filename,
|
|
1011
|
+
state: AttachmentState.QUEUED_DOWNLOAD,
|
|
1012
|
+
hasSynced: false,
|
|
1013
|
+
metaData: watchedAttachment.metaData,
|
|
1014
|
+
timestamp: new Date().getTime()
|
|
1015
|
+
});
|
|
1016
|
+
continue;
|
|
1017
|
+
}
|
|
1018
|
+
if (existingQueueItem.state === AttachmentState.ARCHIVED) {
|
|
1019
|
+
// The attachment is present again. Need to queue it for sync.
|
|
1020
|
+
// We might be able to optimize this in future
|
|
1021
|
+
if (existingQueueItem.hasSynced === true) {
|
|
1022
|
+
// No remote action required, we can restore the record (avoids deletion)
|
|
1023
|
+
attachmentUpdates.push({
|
|
1024
|
+
...existingQueueItem,
|
|
1025
|
+
state: AttachmentState.SYNCED
|
|
1026
|
+
});
|
|
1027
|
+
}
|
|
1028
|
+
else {
|
|
1029
|
+
// The localURI should be set if the record was meant to be uploaded
|
|
1030
|
+
// and hasSynced is false then
|
|
1031
|
+
// it must be an upload operation
|
|
1032
|
+
const newState = existingQueueItem.localUri == null ? AttachmentState.QUEUED_DOWNLOAD : AttachmentState.QUEUED_UPLOAD;
|
|
1033
|
+
attachmentUpdates.push({
|
|
1034
|
+
...existingQueueItem,
|
|
1035
|
+
state: newState
|
|
1036
|
+
});
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
}
|
|
1040
|
+
for (const attachment of currentAttachments) {
|
|
1041
|
+
const notInWatchedItems = watchedAttachments.find((i) => i.id === attachment.id) == null;
|
|
1042
|
+
if (notInWatchedItems) {
|
|
1043
|
+
switch (attachment.state) {
|
|
1044
|
+
case AttachmentState.QUEUED_DELETE:
|
|
1045
|
+
case AttachmentState.QUEUED_UPLOAD:
|
|
1046
|
+
// Only archive if it has synced
|
|
1047
|
+
if (attachment.hasSynced === true) {
|
|
1048
|
+
attachmentUpdates.push({
|
|
1049
|
+
...attachment,
|
|
1050
|
+
state: AttachmentState.ARCHIVED
|
|
1051
|
+
});
|
|
1052
|
+
}
|
|
1053
|
+
break;
|
|
1054
|
+
default:
|
|
1055
|
+
// Archive other states such as QUEUED_DOWNLOAD
|
|
1056
|
+
attachmentUpdates.push({
|
|
1057
|
+
...attachment,
|
|
1058
|
+
state: AttachmentState.ARCHIVED
|
|
1059
|
+
});
|
|
1060
|
+
}
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
if (attachmentUpdates.length > 0) {
|
|
1064
|
+
await ctx.saveAttachments(attachmentUpdates);
|
|
1065
|
+
}
|
|
1066
|
+
});
|
|
1067
|
+
}, signal);
|
|
1068
|
+
}
|
|
1069
|
+
/**
|
|
1070
|
+
* Synchronizes all active attachments between local and remote storage.
|
|
1071
|
+
*
|
|
1072
|
+
* This is called automatically at regular intervals when sync is started,
|
|
1073
|
+
* but can also be called manually to trigger an immediate sync.
|
|
1074
|
+
*/
|
|
1075
|
+
async syncStorage() {
|
|
1076
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1077
|
+
const activeAttachments = await ctx.getActiveAttachments();
|
|
1078
|
+
await this.localStorage.initialize();
|
|
1079
|
+
await this.syncingService.processAttachments(activeAttachments, ctx);
|
|
1080
|
+
await this.syncingService.deleteArchivedAttachments(ctx);
|
|
1081
|
+
});
|
|
1082
|
+
}
|
|
1083
|
+
/**
|
|
1084
|
+
* Stops the attachment synchronization process.
|
|
1085
|
+
*
|
|
1086
|
+
* Clears the periodic sync timer and closes all active attachment watchers.
|
|
1087
|
+
*/
|
|
1088
|
+
async stopSync() {
|
|
1089
|
+
clearInterval(this.periodicSyncTimer);
|
|
1090
|
+
this.periodicSyncTimer = undefined;
|
|
1091
|
+
if (this.watchActiveAttachments)
|
|
1092
|
+
await this.watchActiveAttachments.close();
|
|
1093
|
+
if (this.watchAttachmentsAbortController) {
|
|
1094
|
+
this.watchAttachmentsAbortController.abort();
|
|
1095
|
+
}
|
|
1096
|
+
if (this.statusListenerDispose) {
|
|
1097
|
+
this.statusListenerDispose();
|
|
1098
|
+
this.statusListenerDispose = undefined;
|
|
1099
|
+
}
|
|
1100
|
+
}
|
|
1101
|
+
/**
|
|
1102
|
+
* Saves a file to local storage and queues it for upload to remote storage.
|
|
1103
|
+
*
|
|
1104
|
+
* @param options - File save options
|
|
1105
|
+
* @param options.data - The file data as ArrayBuffer, Blob, or base64 string
|
|
1106
|
+
* @param options.fileExtension - File extension (e.g., 'jpg', 'pdf')
|
|
1107
|
+
* @param options.mediaType - MIME type of the file (e.g., 'image/jpeg')
|
|
1108
|
+
* @param options.metaData - Optional metadata to associate with the attachment
|
|
1109
|
+
* @param options.id - Optional custom ID. If not provided, a UUID will be generated
|
|
1110
|
+
* @param options.updateHook - Optional callback to execute additional database operations
|
|
1111
|
+
* within the same transaction as the attachment creation
|
|
1112
|
+
* @returns Promise resolving to the created attachment record
|
|
1113
|
+
*/
|
|
1114
|
+
async saveFile({ data, fileExtension, mediaType, metaData, id, updateHook }) {
|
|
1115
|
+
const resolvedId = id ?? (await this.generateAttachmentId());
|
|
1116
|
+
const filename = `${resolvedId}.${fileExtension}`;
|
|
1117
|
+
const localUri = this.localStorage.getLocalUri(filename);
|
|
1118
|
+
const size = await this.localStorage.saveFile(localUri, data);
|
|
1119
|
+
const attachment = {
|
|
1120
|
+
id: resolvedId,
|
|
1121
|
+
filename,
|
|
1122
|
+
mediaType,
|
|
1123
|
+
localUri,
|
|
1124
|
+
state: AttachmentState.QUEUED_UPLOAD,
|
|
1125
|
+
hasSynced: false,
|
|
1126
|
+
size,
|
|
1127
|
+
timestamp: new Date().getTime(),
|
|
1128
|
+
metaData
|
|
1129
|
+
};
|
|
1130
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1131
|
+
await ctx.db.writeTransaction(async (tx) => {
|
|
1132
|
+
await updateHook?.(tx, attachment);
|
|
1133
|
+
await ctx.upsertAttachment(attachment, tx);
|
|
1134
|
+
});
|
|
1135
|
+
});
|
|
1136
|
+
return attachment;
|
|
1137
|
+
}
|
|
1138
|
+
async deleteFile({ id, updateHook }) {
|
|
1139
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1140
|
+
const attachment = await ctx.getAttachment(id);
|
|
1141
|
+
if (!attachment) {
|
|
1142
|
+
throw new Error(`Attachment with id ${id} not found`);
|
|
1143
|
+
}
|
|
1144
|
+
await ctx.db.writeTransaction(async (tx) => {
|
|
1145
|
+
await updateHook?.(tx, attachment);
|
|
1146
|
+
await ctx.upsertAttachment({
|
|
1147
|
+
...attachment,
|
|
1148
|
+
state: AttachmentState.QUEUED_DELETE,
|
|
1149
|
+
hasSynced: false
|
|
1150
|
+
}, tx);
|
|
1151
|
+
});
|
|
1152
|
+
});
|
|
1153
|
+
}
|
|
1154
|
+
async expireCache() {
|
|
1155
|
+
let isDone = false;
|
|
1156
|
+
while (!isDone) {
|
|
1157
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1158
|
+
isDone = await this.syncingService.deleteArchivedAttachments(ctx);
|
|
1159
|
+
});
|
|
1160
|
+
}
|
|
1161
|
+
}
|
|
1162
|
+
async clearQueue() {
|
|
1163
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1164
|
+
await ctx.clearQueue();
|
|
1165
|
+
});
|
|
1166
|
+
await this.localStorage.clear();
|
|
1167
|
+
}
|
|
1168
|
+
/**
|
|
1169
|
+
* Verifies the integrity of all attachment records and repairs inconsistencies.
|
|
1170
|
+
*
|
|
1171
|
+
* This method checks each attachment record against the local filesystem and:
|
|
1172
|
+
* - Updates localUri if the file exists at a different path
|
|
1173
|
+
* - Archives attachments with missing local files that haven't been uploaded
|
|
1174
|
+
* - Requeues synced attachments for download if their local files are missing
|
|
1175
|
+
*/
|
|
1176
|
+
async verifyAttachments() {
|
|
1177
|
+
await this.attachmentService.withContext(async (ctx) => {
|
|
1178
|
+
const attachments = await ctx.getAttachments();
|
|
1179
|
+
const updates = [];
|
|
1180
|
+
for (const attachment of attachments) {
|
|
1181
|
+
if (attachment.localUri == null) {
|
|
1182
|
+
continue;
|
|
1183
|
+
}
|
|
1184
|
+
const exists = await this.localStorage.fileExists(attachment.localUri);
|
|
1185
|
+
if (exists) {
|
|
1186
|
+
// The file exists, this is correct
|
|
1187
|
+
continue;
|
|
1188
|
+
}
|
|
1189
|
+
const newLocalUri = this.localStorage.getLocalUri(attachment.filename);
|
|
1190
|
+
const newExists = await this.localStorage.fileExists(newLocalUri);
|
|
1191
|
+
if (newExists) {
|
|
1192
|
+
// The file exists locally but the localUri is broken, we update it.
|
|
1193
|
+
updates.push({
|
|
1194
|
+
...attachment,
|
|
1195
|
+
localUri: newLocalUri
|
|
1196
|
+
});
|
|
1197
|
+
}
|
|
1198
|
+
else {
|
|
1199
|
+
// the file doesn't exist locally.
|
|
1200
|
+
if (attachment.state === AttachmentState.SYNCED) {
|
|
1201
|
+
// the file has been successfully synced to remote storage but is missing
|
|
1202
|
+
// we download it again
|
|
1203
|
+
updates.push({
|
|
1204
|
+
...attachment,
|
|
1205
|
+
state: AttachmentState.QUEUED_DOWNLOAD,
|
|
1206
|
+
localUri: undefined
|
|
1207
|
+
});
|
|
1208
|
+
}
|
|
1209
|
+
else {
|
|
1210
|
+
// the file wasn't successfully synced to remote storage, we archive it
|
|
1211
|
+
updates.push({
|
|
1212
|
+
...attachment,
|
|
1213
|
+
state: AttachmentState.ARCHIVED,
|
|
1214
|
+
localUri: undefined // Clears the value
|
|
1215
|
+
});
|
|
1216
|
+
}
|
|
1217
|
+
}
|
|
1218
|
+
}
|
|
1219
|
+
await ctx.saveAttachments(updates);
|
|
1220
|
+
});
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
var EncodingType;
|
|
1225
|
+
(function (EncodingType) {
|
|
1226
|
+
EncodingType["UTF8"] = "utf8";
|
|
1227
|
+
EncodingType["Base64"] = "base64";
|
|
1228
|
+
})(EncodingType || (EncodingType = {}));
|
|
1229
|
+
|
|
3
1230
|
function getDefaultExportFromCjs (x) {
|
|
4
1231
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
|
5
1232
|
}
|
|
@@ -1316,20 +2543,6 @@ class MetaBaseObserver extends BaseObserver {
|
|
|
1316
2543
|
}
|
|
1317
2544
|
}
|
|
1318
2545
|
|
|
1319
|
-
var WatchedQueryListenerEvent;
|
|
1320
|
-
(function (WatchedQueryListenerEvent) {
|
|
1321
|
-
WatchedQueryListenerEvent["ON_DATA"] = "onData";
|
|
1322
|
-
WatchedQueryListenerEvent["ON_ERROR"] = "onError";
|
|
1323
|
-
WatchedQueryListenerEvent["ON_STATE_CHANGE"] = "onStateChange";
|
|
1324
|
-
WatchedQueryListenerEvent["SETTINGS_WILL_UPDATE"] = "settingsWillUpdate";
|
|
1325
|
-
WatchedQueryListenerEvent["CLOSED"] = "closed";
|
|
1326
|
-
})(WatchedQueryListenerEvent || (WatchedQueryListenerEvent = {}));
|
|
1327
|
-
const DEFAULT_WATCH_THROTTLE_MS = 30;
|
|
1328
|
-
const DEFAULT_WATCH_QUERY_OPTIONS = {
|
|
1329
|
-
throttleMs: DEFAULT_WATCH_THROTTLE_MS,
|
|
1330
|
-
reportFetching: true
|
|
1331
|
-
};
|
|
1332
|
-
|
|
1333
2546
|
/**
|
|
1334
2547
|
* Performs underlying watching and yields a stream of results.
|
|
1335
2548
|
* @internal
|
|
@@ -9242,7 +10455,7 @@ function requireDist () {
|
|
|
9242
10455
|
|
|
9243
10456
|
var distExports = requireDist();
|
|
9244
10457
|
|
|
9245
|
-
var version = "1.
|
|
10458
|
+
var version = "1.48.0";
|
|
9246
10459
|
var PACKAGE = {
|
|
9247
10460
|
version: version};
|
|
9248
10461
|
|
|
@@ -12867,193 +14080,59 @@ class SqliteBucketStorage extends BaseObserver {
|
|
|
12867
14080
|
async control(op, payload) {
|
|
12868
14081
|
return await this.writeTransaction(async (tx) => {
|
|
12869
14082
|
const [[raw]] = await tx.executeRaw('SELECT powersync_control(?, ?)', [op, payload]);
|
|
12870
|
-
return raw;
|
|
12871
|
-
});
|
|
12872
|
-
}
|
|
12873
|
-
async hasMigratedSubkeys() {
|
|
12874
|
-
const { r } = await this.db.get('SELECT EXISTS(SELECT * FROM ps_kv WHERE key = ?) as r', [
|
|
12875
|
-
SqliteBucketStorage._subkeyMigrationKey
|
|
12876
|
-
]);
|
|
12877
|
-
return r != 0;
|
|
12878
|
-
}
|
|
12879
|
-
async migrateToFixedSubkeys() {
|
|
12880
|
-
await this.writeTransaction(async (tx) => {
|
|
12881
|
-
await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
|
|
12882
|
-
await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
|
|
12883
|
-
SqliteBucketStorage._subkeyMigrationKey,
|
|
12884
|
-
'1'
|
|
12885
|
-
]);
|
|
12886
|
-
});
|
|
12887
|
-
}
|
|
12888
|
-
static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
|
|
12889
|
-
}
|
|
12890
|
-
function hasMatchingPriority(priority, bucket) {
|
|
12891
|
-
return bucket.priority != null && bucket.priority <= priority;
|
|
12892
|
-
}
|
|
12893
|
-
|
|
12894
|
-
// TODO JSON
|
|
12895
|
-
class SyncDataBatch {
|
|
12896
|
-
buckets;
|
|
12897
|
-
static fromJSON(json) {
|
|
12898
|
-
return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
|
|
12899
|
-
}
|
|
12900
|
-
constructor(buckets) {
|
|
12901
|
-
this.buckets = buckets;
|
|
12902
|
-
}
|
|
12903
|
-
}
|
|
12904
|
-
|
|
12905
|
-
/**
|
|
12906
|
-
* Thrown when an underlying database connection is closed.
|
|
12907
|
-
* This is particularly relevant when worker connections are marked as closed while
|
|
12908
|
-
* operations are still in progress.
|
|
12909
|
-
*/
|
|
12910
|
-
class ConnectionClosedError extends Error {
|
|
12911
|
-
static NAME = 'ConnectionClosedError';
|
|
12912
|
-
static MATCHES(input) {
|
|
12913
|
-
/**
|
|
12914
|
-
* If there are weird package issues which cause multiple versions of classes to be present, the instanceof
|
|
12915
|
-
* check might fail. This also performs a failsafe check.
|
|
12916
|
-
* This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
|
|
12917
|
-
*/
|
|
12918
|
-
return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
|
|
12919
|
-
}
|
|
12920
|
-
constructor(message) {
|
|
12921
|
-
super(message);
|
|
12922
|
-
this.name = ConnectionClosedError.NAME;
|
|
12923
|
-
}
|
|
12924
|
-
}
|
|
12925
|
-
|
|
12926
|
-
// https://www.sqlite.org/lang_expr.html#castexpr
|
|
12927
|
-
var ColumnType;
|
|
12928
|
-
(function (ColumnType) {
|
|
12929
|
-
ColumnType["TEXT"] = "TEXT";
|
|
12930
|
-
ColumnType["INTEGER"] = "INTEGER";
|
|
12931
|
-
ColumnType["REAL"] = "REAL";
|
|
12932
|
-
})(ColumnType || (ColumnType = {}));
|
|
12933
|
-
const text = {
|
|
12934
|
-
type: ColumnType.TEXT
|
|
12935
|
-
};
|
|
12936
|
-
const integer = {
|
|
12937
|
-
type: ColumnType.INTEGER
|
|
12938
|
-
};
|
|
12939
|
-
const real = {
|
|
12940
|
-
type: ColumnType.REAL
|
|
12941
|
-
};
|
|
12942
|
-
// powersync-sqlite-core limits the number of column per table to 1999, due to internal SQLite limits.
|
|
12943
|
-
// In earlier versions this was limited to 63.
|
|
12944
|
-
const MAX_AMOUNT_OF_COLUMNS = 1999;
|
|
12945
|
-
const column = {
|
|
12946
|
-
text,
|
|
12947
|
-
integer,
|
|
12948
|
-
real
|
|
12949
|
-
};
|
|
12950
|
-
class Column {
|
|
12951
|
-
options;
|
|
12952
|
-
constructor(options) {
|
|
12953
|
-
this.options = options;
|
|
12954
|
-
}
|
|
12955
|
-
get name() {
|
|
12956
|
-
return this.options.name;
|
|
12957
|
-
}
|
|
12958
|
-
get type() {
|
|
12959
|
-
return this.options.type;
|
|
12960
|
-
}
|
|
12961
|
-
toJSON() {
|
|
12962
|
-
return {
|
|
12963
|
-
name: this.name,
|
|
12964
|
-
type: this.type
|
|
12965
|
-
};
|
|
12966
|
-
}
|
|
12967
|
-
}
|
|
12968
|
-
|
|
12969
|
-
const DEFAULT_INDEX_COLUMN_OPTIONS = {
|
|
12970
|
-
ascending: true
|
|
12971
|
-
};
|
|
12972
|
-
class IndexedColumn {
|
|
12973
|
-
options;
|
|
12974
|
-
static createAscending(column) {
|
|
12975
|
-
return new IndexedColumn({
|
|
12976
|
-
name: column,
|
|
12977
|
-
ascending: true
|
|
12978
|
-
});
|
|
12979
|
-
}
|
|
12980
|
-
constructor(options) {
|
|
12981
|
-
this.options = { ...DEFAULT_INDEX_COLUMN_OPTIONS, ...options };
|
|
12982
|
-
}
|
|
12983
|
-
get name() {
|
|
12984
|
-
return this.options.name;
|
|
12985
|
-
}
|
|
12986
|
-
get ascending() {
|
|
12987
|
-
return this.options.ascending;
|
|
12988
|
-
}
|
|
12989
|
-
toJSON(table) {
|
|
12990
|
-
return {
|
|
12991
|
-
name: this.name,
|
|
12992
|
-
ascending: this.ascending,
|
|
12993
|
-
type: table.columns.find((column) => column.name === this.name)?.type ?? ColumnType.TEXT
|
|
12994
|
-
};
|
|
12995
|
-
}
|
|
12996
|
-
}
|
|
12997
|
-
|
|
12998
|
-
const DEFAULT_INDEX_OPTIONS = {
|
|
12999
|
-
columns: []
|
|
13000
|
-
};
|
|
13001
|
-
class Index {
|
|
13002
|
-
options;
|
|
13003
|
-
static createAscending(options, columnNames) {
|
|
13004
|
-
return new Index({
|
|
13005
|
-
...options,
|
|
13006
|
-
columns: columnNames.map((name) => IndexedColumn.createAscending(name))
|
|
14083
|
+
return raw;
|
|
13007
14084
|
});
|
|
13008
14085
|
}
|
|
13009
|
-
|
|
13010
|
-
this.
|
|
13011
|
-
|
|
14086
|
+
async hasMigratedSubkeys() {
|
|
14087
|
+
const { r } = await this.db.get('SELECT EXISTS(SELECT * FROM ps_kv WHERE key = ?) as r', [
|
|
14088
|
+
SqliteBucketStorage._subkeyMigrationKey
|
|
14089
|
+
]);
|
|
14090
|
+
return r != 0;
|
|
13012
14091
|
}
|
|
13013
|
-
|
|
13014
|
-
|
|
14092
|
+
async migrateToFixedSubkeys() {
|
|
14093
|
+
await this.writeTransaction(async (tx) => {
|
|
14094
|
+
await tx.execute('UPDATE ps_oplog SET key = powersync_remove_duplicate_key_encoding(key);');
|
|
14095
|
+
await tx.execute('INSERT OR REPLACE INTO ps_kv (key, value) VALUES (?, ?);', [
|
|
14096
|
+
SqliteBucketStorage._subkeyMigrationKey,
|
|
14097
|
+
'1'
|
|
14098
|
+
]);
|
|
14099
|
+
});
|
|
13015
14100
|
}
|
|
13016
|
-
|
|
13017
|
-
|
|
14101
|
+
static _subkeyMigrationKey = 'powersync_js_migrated_subkeys';
|
|
14102
|
+
}
|
|
14103
|
+
function hasMatchingPriority(priority, bucket) {
|
|
14104
|
+
return bucket.priority != null && bucket.priority <= priority;
|
|
14105
|
+
}
|
|
14106
|
+
|
|
14107
|
+
// TODO JSON
|
|
14108
|
+
class SyncDataBatch {
|
|
14109
|
+
buckets;
|
|
14110
|
+
static fromJSON(json) {
|
|
14111
|
+
return new SyncDataBatch(json.buckets.map((bucket) => SyncDataBucket.fromRow(bucket)));
|
|
13018
14112
|
}
|
|
13019
|
-
|
|
13020
|
-
|
|
13021
|
-
name: this.name,
|
|
13022
|
-
columns: this.columns.map((c) => c.toJSON(table))
|
|
13023
|
-
};
|
|
14113
|
+
constructor(buckets) {
|
|
14114
|
+
this.buckets = buckets;
|
|
13024
14115
|
}
|
|
13025
14116
|
}
|
|
13026
14117
|
|
|
13027
14118
|
/**
|
|
13028
|
-
*
|
|
13029
|
-
*
|
|
13030
|
-
*
|
|
13031
|
-
* using client-side table and column constraints.
|
|
13032
|
-
*
|
|
13033
|
-
* To collect local writes to raw tables with PowerSync, custom triggers are required. See
|
|
13034
|
-
* {@link https://docs.powersync.com/usage/use-case-examples/raw-tables the documentation} for details and an example on
|
|
13035
|
-
* using raw tables.
|
|
13036
|
-
*
|
|
13037
|
-
* Note that raw tables are only supported when using the new `SyncClientImplementation.rust` sync client.
|
|
13038
|
-
*
|
|
13039
|
-
* @experimental Please note that this feature is experimental at the moment, and not covered by PowerSync semver or
|
|
13040
|
-
* stability guarantees.
|
|
14119
|
+
* Thrown when an underlying database connection is closed.
|
|
14120
|
+
* This is particularly relevant when worker connections are marked as closed while
|
|
14121
|
+
* operations are still in progress.
|
|
13041
14122
|
*/
|
|
13042
|
-
class
|
|
13043
|
-
|
|
13044
|
-
|
|
13045
|
-
|
|
13046
|
-
|
|
13047
|
-
|
|
13048
|
-
|
|
13049
|
-
|
|
13050
|
-
|
|
13051
|
-
|
|
13052
|
-
|
|
13053
|
-
|
|
13054
|
-
this.name =
|
|
13055
|
-
this.put = type.put;
|
|
13056
|
-
this.delete = type.delete;
|
|
14123
|
+
class ConnectionClosedError extends Error {
|
|
14124
|
+
static NAME = 'ConnectionClosedError';
|
|
14125
|
+
static MATCHES(input) {
|
|
14126
|
+
/**
|
|
14127
|
+
* If there are weird package issues which cause multiple versions of classes to be present, the instanceof
|
|
14128
|
+
* check might fail. This also performs a failsafe check.
|
|
14129
|
+
* This might also happen if the Error is serialized and parsed over a bridging channel like a MessagePort.
|
|
14130
|
+
*/
|
|
14131
|
+
return (input instanceof ConnectionClosedError || (input instanceof Error && input.name == ConnectionClosedError.NAME));
|
|
14132
|
+
}
|
|
14133
|
+
constructor(message) {
|
|
14134
|
+
super(message);
|
|
14135
|
+
this.name = ConnectionClosedError.NAME;
|
|
13057
14136
|
}
|
|
13058
14137
|
}
|
|
13059
14138
|
|
|
@@ -13101,7 +14180,7 @@ class Schema {
|
|
|
13101
14180
|
*/
|
|
13102
14181
|
withRawTables(tables) {
|
|
13103
14182
|
for (const [name, rawTableDefinition] of Object.entries(tables)) {
|
|
13104
|
-
this.rawTables.push(
|
|
14183
|
+
this.rawTables.push({ name, ...rawTableDefinition });
|
|
13105
14184
|
}
|
|
13106
14185
|
}
|
|
13107
14186
|
validate() {
|
|
@@ -13112,213 +14191,30 @@ class Schema {
|
|
|
13112
14191
|
toJSON() {
|
|
13113
14192
|
return {
|
|
13114
14193
|
tables: this.tables.map((t) => t.toJSON()),
|
|
13115
|
-
raw_tables: this.rawTables
|
|
14194
|
+
raw_tables: this.rawTables.map(Schema.rawTableToJson)
|
|
13116
14195
|
};
|
|
13117
14196
|
}
|
|
13118
|
-
}
|
|
13119
|
-
|
|
13120
|
-
const DEFAULT_TABLE_OPTIONS = {
|
|
13121
|
-
indexes: [],
|
|
13122
|
-
insertOnly: false,
|
|
13123
|
-
localOnly: false,
|
|
13124
|
-
trackPrevious: false,
|
|
13125
|
-
trackMetadata: false,
|
|
13126
|
-
ignoreEmptyUpdates: false
|
|
13127
|
-
};
|
|
13128
|
-
const InvalidSQLCharacters = /["'%,.#\s[\]]/;
|
|
13129
|
-
class Table {
|
|
13130
|
-
options;
|
|
13131
|
-
_mappedColumns;
|
|
13132
|
-
static createLocalOnly(options) {
|
|
13133
|
-
return new Table({ ...options, localOnly: true, insertOnly: false });
|
|
13134
|
-
}
|
|
13135
|
-
static createInsertOnly(options) {
|
|
13136
|
-
return new Table({ ...options, localOnly: false, insertOnly: true });
|
|
13137
|
-
}
|
|
13138
14197
|
/**
|
|
13139
|
-
*
|
|
13140
|
-
* @deprecated This was only only included for TableV2 and is no longer necessary.
|
|
13141
|
-
* Prefer to use new Table() directly.
|
|
14198
|
+
* Returns a representation of the raw table that is understood by the PowerSync SQLite core extension.
|
|
13142
14199
|
*
|
|
13143
|
-
*
|
|
13144
|
-
|
|
13145
|
-
|
|
13146
|
-
|
|
13147
|
-
|
|
13148
|
-
|
|
13149
|
-
|
|
13150
|
-
|
|
13151
|
-
|
|
13152
|
-
viewName: table.options.viewName
|
|
13153
|
-
});
|
|
13154
|
-
}
|
|
13155
|
-
constructor(optionsOrColumns, v2Options) {
|
|
13156
|
-
if (this.isTableV1(optionsOrColumns)) {
|
|
13157
|
-
this.initTableV1(optionsOrColumns);
|
|
13158
|
-
}
|
|
13159
|
-
else {
|
|
13160
|
-
this.initTableV2(optionsOrColumns, v2Options);
|
|
13161
|
-
}
|
|
13162
|
-
}
|
|
13163
|
-
copyWithName(name) {
|
|
13164
|
-
return new Table({
|
|
13165
|
-
...this.options,
|
|
13166
|
-
name
|
|
13167
|
-
});
|
|
13168
|
-
}
|
|
13169
|
-
isTableV1(arg) {
|
|
13170
|
-
return 'columns' in arg && Array.isArray(arg.columns);
|
|
13171
|
-
}
|
|
13172
|
-
initTableV1(options) {
|
|
13173
|
-
this.options = {
|
|
13174
|
-
...options,
|
|
13175
|
-
indexes: options.indexes || []
|
|
13176
|
-
};
|
|
13177
|
-
this.applyDefaultOptions();
|
|
13178
|
-
}
|
|
13179
|
-
initTableV2(columns, options) {
|
|
13180
|
-
const convertedColumns = Object.entries(columns).map(([name, columnInfo]) => new Column({ name, type: columnInfo.type }));
|
|
13181
|
-
const convertedIndexes = Object.entries(options?.indexes ?? {}).map(([name, columnNames]) => new Index({
|
|
13182
|
-
name,
|
|
13183
|
-
columns: columnNames.map((name) => new IndexedColumn({
|
|
13184
|
-
name: name.replace(/^-/, ''),
|
|
13185
|
-
ascending: !name.startsWith('-')
|
|
13186
|
-
}))
|
|
13187
|
-
}));
|
|
13188
|
-
this.options = {
|
|
13189
|
-
name: '',
|
|
13190
|
-
columns: convertedColumns,
|
|
13191
|
-
indexes: convertedIndexes,
|
|
13192
|
-
viewName: options?.viewName,
|
|
13193
|
-
insertOnly: options?.insertOnly,
|
|
13194
|
-
localOnly: options?.localOnly,
|
|
13195
|
-
trackPrevious: options?.trackPrevious,
|
|
13196
|
-
trackMetadata: options?.trackMetadata,
|
|
13197
|
-
ignoreEmptyUpdates: options?.ignoreEmptyUpdates
|
|
14200
|
+
* The output of this can be passed through `JSON.serialize` and then used in `powersync_create_raw_table_crud_trigger`
|
|
14201
|
+
* to define triggers for this table.
|
|
14202
|
+
*/
|
|
14203
|
+
static rawTableToJson(table) {
|
|
14204
|
+
const serialized = {
|
|
14205
|
+
name: table.name,
|
|
14206
|
+
put: table.put,
|
|
14207
|
+
delete: table.delete,
|
|
14208
|
+
clear: table.clear
|
|
13198
14209
|
};
|
|
13199
|
-
|
|
13200
|
-
|
|
13201
|
-
|
|
13202
|
-
|
|
13203
|
-
|
|
13204
|
-
|
|
13205
|
-
this.options.trackPrevious ??= DEFAULT_TABLE_OPTIONS.trackPrevious;
|
|
13206
|
-
this.options.trackMetadata ??= DEFAULT_TABLE_OPTIONS.trackMetadata;
|
|
13207
|
-
this.options.ignoreEmptyUpdates ??= DEFAULT_TABLE_OPTIONS.ignoreEmptyUpdates;
|
|
13208
|
-
}
|
|
13209
|
-
get name() {
|
|
13210
|
-
return this.options.name;
|
|
13211
|
-
}
|
|
13212
|
-
get viewNameOverride() {
|
|
13213
|
-
return this.options.viewName;
|
|
13214
|
-
}
|
|
13215
|
-
get viewName() {
|
|
13216
|
-
return this.viewNameOverride ?? this.name;
|
|
13217
|
-
}
|
|
13218
|
-
get columns() {
|
|
13219
|
-
return this.options.columns;
|
|
13220
|
-
}
|
|
13221
|
-
get columnMap() {
|
|
13222
|
-
return (this._mappedColumns ??
|
|
13223
|
-
this.columns.reduce((hash, column) => {
|
|
13224
|
-
hash[column.name] = { type: column.type ?? ColumnType.TEXT };
|
|
13225
|
-
return hash;
|
|
13226
|
-
}, {}));
|
|
13227
|
-
}
|
|
13228
|
-
get indexes() {
|
|
13229
|
-
return this.options.indexes ?? [];
|
|
13230
|
-
}
|
|
13231
|
-
get localOnly() {
|
|
13232
|
-
return this.options.localOnly;
|
|
13233
|
-
}
|
|
13234
|
-
get insertOnly() {
|
|
13235
|
-
return this.options.insertOnly;
|
|
13236
|
-
}
|
|
13237
|
-
get trackPrevious() {
|
|
13238
|
-
return this.options.trackPrevious;
|
|
13239
|
-
}
|
|
13240
|
-
get trackMetadata() {
|
|
13241
|
-
return this.options.trackMetadata;
|
|
13242
|
-
}
|
|
13243
|
-
get ignoreEmptyUpdates() {
|
|
13244
|
-
return this.options.ignoreEmptyUpdates;
|
|
13245
|
-
}
|
|
13246
|
-
get internalName() {
|
|
13247
|
-
if (this.options.localOnly) {
|
|
13248
|
-
return `ps_data_local__${this.name}`;
|
|
14210
|
+
if ('schema' in table) {
|
|
14211
|
+
// We have schema options, those are flattened into the outer JSON object for the core extension.
|
|
14212
|
+
const schema = table.schema;
|
|
14213
|
+
serialized.table_name = schema.tableName ?? table.name;
|
|
14214
|
+
serialized.synced_columns = schema.syncedColumns;
|
|
14215
|
+
Object.assign(serialized, encodeTableOptions(table.schema));
|
|
13249
14216
|
}
|
|
13250
|
-
return
|
|
13251
|
-
}
|
|
13252
|
-
get validName() {
|
|
13253
|
-
if (InvalidSQLCharacters.test(this.name)) {
|
|
13254
|
-
return false;
|
|
13255
|
-
}
|
|
13256
|
-
if (this.viewNameOverride != null && InvalidSQLCharacters.test(this.viewNameOverride)) {
|
|
13257
|
-
return false;
|
|
13258
|
-
}
|
|
13259
|
-
return true;
|
|
13260
|
-
}
|
|
13261
|
-
validate() {
|
|
13262
|
-
if (InvalidSQLCharacters.test(this.name)) {
|
|
13263
|
-
throw new Error(`Invalid characters in table name: ${this.name}`);
|
|
13264
|
-
}
|
|
13265
|
-
if (this.viewNameOverride && InvalidSQLCharacters.test(this.viewNameOverride)) {
|
|
13266
|
-
throw new Error(`Invalid characters in view name: ${this.viewNameOverride}`);
|
|
13267
|
-
}
|
|
13268
|
-
if (this.columns.length > MAX_AMOUNT_OF_COLUMNS) {
|
|
13269
|
-
throw new Error(`Table has too many columns. The maximum number of columns is ${MAX_AMOUNT_OF_COLUMNS}.`);
|
|
13270
|
-
}
|
|
13271
|
-
if (this.trackMetadata && this.localOnly) {
|
|
13272
|
-
throw new Error(`Can't include metadata for local-only tables.`);
|
|
13273
|
-
}
|
|
13274
|
-
if (this.trackPrevious != false && this.localOnly) {
|
|
13275
|
-
throw new Error(`Can't include old values for local-only tables.`);
|
|
13276
|
-
}
|
|
13277
|
-
const columnNames = new Set();
|
|
13278
|
-
columnNames.add('id');
|
|
13279
|
-
for (const column of this.columns) {
|
|
13280
|
-
const { name: columnName } = column;
|
|
13281
|
-
if (column.name === 'id') {
|
|
13282
|
-
throw new Error(`An id column is automatically added, custom id columns are not supported`);
|
|
13283
|
-
}
|
|
13284
|
-
if (columnNames.has(columnName)) {
|
|
13285
|
-
throw new Error(`Duplicate column ${columnName}`);
|
|
13286
|
-
}
|
|
13287
|
-
if (InvalidSQLCharacters.test(columnName)) {
|
|
13288
|
-
throw new Error(`Invalid characters in column name: ${column.name}`);
|
|
13289
|
-
}
|
|
13290
|
-
columnNames.add(columnName);
|
|
13291
|
-
}
|
|
13292
|
-
const indexNames = new Set();
|
|
13293
|
-
for (const index of this.indexes) {
|
|
13294
|
-
if (indexNames.has(index.name)) {
|
|
13295
|
-
throw new Error(`Duplicate index ${index.name}`);
|
|
13296
|
-
}
|
|
13297
|
-
if (InvalidSQLCharacters.test(index.name)) {
|
|
13298
|
-
throw new Error(`Invalid characters in index name: ${index.name}`);
|
|
13299
|
-
}
|
|
13300
|
-
for (const column of index.columns) {
|
|
13301
|
-
if (!columnNames.has(column.name)) {
|
|
13302
|
-
throw new Error(`Column ${column.name} not found for index ${index.name}`);
|
|
13303
|
-
}
|
|
13304
|
-
}
|
|
13305
|
-
indexNames.add(index.name);
|
|
13306
|
-
}
|
|
13307
|
-
}
|
|
13308
|
-
toJSON() {
|
|
13309
|
-
const trackPrevious = this.trackPrevious;
|
|
13310
|
-
return {
|
|
13311
|
-
name: this.name,
|
|
13312
|
-
view_name: this.viewName,
|
|
13313
|
-
local_only: this.localOnly,
|
|
13314
|
-
insert_only: this.insertOnly,
|
|
13315
|
-
include_old: trackPrevious && (trackPrevious.columns ?? true),
|
|
13316
|
-
include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
|
|
13317
|
-
include_metadata: this.trackMetadata,
|
|
13318
|
-
ignore_empty_update: this.ignoreEmptyUpdates,
|
|
13319
|
-
columns: this.columns.map((c) => c.toJSON()),
|
|
13320
|
-
indexes: this.indexes.map((e) => e.toJSON(this))
|
|
13321
|
-
};
|
|
14217
|
+
return serialized;
|
|
13322
14218
|
}
|
|
13323
14219
|
}
|
|
13324
14220
|
|
|
@@ -13477,5 +14373,5 @@ const parseQuery = (query, parameters) => {
|
|
|
13477
14373
|
return { sqlStatement, parameters: parameters };
|
|
13478
14374
|
};
|
|
13479
14375
|
|
|
13480
|
-
export { AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand,
|
|
14376
|
+
export { ATTACHMENT_TABLE, AbortOperation, AbstractPowerSyncDatabase, AbstractPowerSyncDatabaseOpenFactory, AbstractQueryProcessor, AbstractRemote, AbstractStreamingSyncImplementation, ArrayComparator, AttachmentContext, AttachmentQueue, AttachmentService, AttachmentState, AttachmentTable, BaseObserver, Column, ColumnType, ConnectionClosedError, ConnectionManager, ControlledExecutor, CrudBatch, CrudEntry, CrudTransaction, DEFAULT_CRUD_BATCH_LIMIT, DEFAULT_CRUD_UPLOAD_THROTTLE_MS, DEFAULT_INDEX_COLUMN_OPTIONS, DEFAULT_INDEX_OPTIONS, DEFAULT_LOCK_TIMEOUT_MS, DEFAULT_POWERSYNC_CLOSE_OPTIONS, DEFAULT_POWERSYNC_DB_OPTIONS, DEFAULT_PRESSURE_LIMITS, DEFAULT_REMOTE_LOGGER, DEFAULT_REMOTE_OPTIONS, DEFAULT_RETRY_DELAY_MS, DEFAULT_ROW_COMPARATOR, DEFAULT_STREAMING_SYNC_OPTIONS, DEFAULT_STREAM_CONNECTION_OPTIONS, DEFAULT_SYNC_CLIENT_IMPLEMENTATION, DEFAULT_TABLE_OPTIONS, DEFAULT_WATCH_QUERY_OPTIONS, DEFAULT_WATCH_THROTTLE_MS, DataStream, DiffTriggerOperation, DifferentialQueryProcessor, EMPTY_DIFFERENTIAL, EncodingType, FalsyComparator, FetchImplementationProvider, FetchStrategy, GetAllQuery, Index, IndexedColumn, InvalidSQLCharacters, LockType, LogLevel, MAX_AMOUNT_OF_COLUMNS, MAX_OP_ID, MEMORY_TRIGGER_CLAIM_MANAGER, OnChangeQueryProcessor, OpType, OpTypeEnum, OplogEntry, PSInternalTable, PowerSyncControlCommand, RowUpdateType, Schema, SqliteBucketStorage, SyncClientImplementation, SyncDataBatch, SyncDataBucket, SyncProgress, SyncStatus, SyncStreamConnectionMethod, SyncingService, Table, TableV2, TriggerManagerImpl, UpdateType, UploadQueueStats, WatchedQueryListenerEvent, attachmentFromSql, column, compilableQueryWatch, createBaseLogger, createLogger, extractTableUpdates, isBatchedUpdateNotification, isContinueCheckpointRequest, isDBAdapter, isPowerSyncDatabaseOptionsWithSettings, isSQLOpenFactory, isSQLOpenOptions, isStreamingKeepalive, isStreamingSyncCheckpoint, isStreamingSyncCheckpointComplete, isStreamingSyncCheckpointDiff, isStreamingSyncCheckpointPartiallyComplete, isStreamingSyncData, isSyncNewCheckpointRequest, mutexRunExclusive, parseQuery, runOnSchemaChange, sanitizeSQL, sanitizeUUID };
|
|
13481
14377
|
//# sourceMappingURL=bundle.mjs.map
|