@powersync/common 1.47.0 → 1.49.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/bundle.cjs +189 -52
  2. package/dist/bundle.cjs.map +1 -1
  3. package/dist/bundle.mjs +188 -52
  4. package/dist/bundle.mjs.map +1 -1
  5. package/dist/bundle.node.cjs +189 -52
  6. package/dist/bundle.node.cjs.map +1 -1
  7. package/dist/bundle.node.mjs +188 -52
  8. package/dist/bundle.node.mjs.map +1 -1
  9. package/dist/index.d.cts +186 -89
  10. package/lib/client/triggers/TriggerManager.d.ts +13 -1
  11. package/lib/client/triggers/TriggerManagerImpl.d.ts +2 -2
  12. package/lib/client/triggers/TriggerManagerImpl.js +19 -7
  13. package/lib/client/triggers/TriggerManagerImpl.js.map +1 -1
  14. package/lib/db/DBAdapter.d.ts +48 -8
  15. package/lib/db/DBAdapter.js +126 -0
  16. package/lib/db/DBAdapter.js.map +1 -1
  17. package/lib/db/schema/RawTable.d.ts +61 -26
  18. package/lib/db/schema/RawTable.js +1 -32
  19. package/lib/db/schema/RawTable.js.map +1 -1
  20. package/lib/db/schema/Schema.d.ts +14 -7
  21. package/lib/db/schema/Schema.js +25 -3
  22. package/lib/db/schema/Schema.js.map +1 -1
  23. package/lib/db/schema/Table.d.ts +13 -8
  24. package/lib/db/schema/Table.js +3 -8
  25. package/lib/db/schema/Table.js.map +1 -1
  26. package/lib/db/schema/internal.d.ts +12 -0
  27. package/lib/db/schema/internal.js +15 -0
  28. package/lib/db/schema/internal.js.map +1 -0
  29. package/lib/index.d.ts +1 -1
  30. package/lib/index.js +0 -1
  31. package/lib/index.js.map +1 -1
  32. package/package.json +1 -1
  33. package/src/client/triggers/TriggerManager.ts +15 -2
  34. package/src/client/triggers/TriggerManagerImpl.ts +18 -6
  35. package/src/db/DBAdapter.ts +160 -8
  36. package/src/db/schema/RawTable.ts +66 -31
  37. package/src/db/schema/Schema.ts +27 -2
  38. package/src/db/schema/Table.ts +11 -11
  39. package/src/db/schema/internal.ts +17 -0
  40. package/src/index.ts +1 -1
@@ -9,6 +9,7 @@ import {
9
9
  TriggerManager,
10
10
  TriggerManagerConfig,
11
11
  TriggerRemoveCallback,
12
+ TriggerRemoveCallbackOptions,
12
13
  WithDiffOptions
13
14
  } from './TriggerManager.js';
14
15
 
@@ -201,6 +202,7 @@ export class TriggerManagerImpl implements TriggerManager {
201
202
  columns,
202
203
  when,
203
204
  hooks,
205
+ setupContext,
204
206
  // Fall back to the provided default if not given on this level
205
207
  useStorage = this.defaultConfig.useStorageByDefault
206
208
  } = options;
@@ -268,13 +270,19 @@ export class TriggerManagerImpl implements TriggerManager {
268
270
  * we need to ensure we can cleanup the created resources.
269
271
  * We unfortunately cannot rely on transaction rollback.
270
272
  */
271
- const cleanup = async () => {
273
+ const cleanup = async (options?: TriggerRemoveCallbackOptions) => {
274
+ const { context } = options ?? {};
272
275
  disposeWarningListener();
273
- return this.db.writeLock(async (tx) => {
276
+ const doCleanup = async (tx: LockContext) => {
274
277
  await this.removeTriggers(tx, triggerIds);
275
- await tx.execute(/* sql */ `DROP TABLE IF EXISTS ${destination};`);
278
+ await tx.execute(`DROP TABLE IF EXISTS ${destination};`);
276
279
  await releaseStorageClaim?.();
277
- });
280
+ };
281
+ if (context) {
282
+ await doCleanup(context);
283
+ } else {
284
+ await this.db.writeLock(doCleanup);
285
+ }
278
286
  };
279
287
 
280
288
  const setup = async (tx: LockContext) => {
@@ -360,11 +368,15 @@ export class TriggerManagerImpl implements TriggerManager {
360
368
  };
361
369
 
362
370
  try {
363
- await this.db.writeLock(setup);
371
+ if (setupContext) {
372
+ await setup(setupContext);
373
+ } else {
374
+ await this.db.writeLock(setup);
375
+ }
364
376
  return cleanup;
365
377
  } catch (error) {
366
378
  try {
367
- await cleanup();
379
+ await cleanup(setupContext ? { context: setupContext } : undefined);
368
380
  } catch (cleanupError) {
369
381
  throw new AggregateError([error, cleanupError], 'Error during operation and cleanup');
370
382
  }
@@ -41,7 +41,7 @@ export interface DBGetUtils {
41
41
  get<T>(sql: string, parameters?: any[]): Promise<T>;
42
42
  }
43
43
 
44
- export interface LockContext extends DBGetUtils {
44
+ export interface SqlExecutor {
45
45
  /** Execute a single write statement. */
46
46
  execute: (query: string, params?: any[] | undefined) => Promise<QueryResult>;
47
47
  /**
@@ -59,6 +59,61 @@ export interface LockContext extends DBGetUtils {
59
59
  * ```[ { id: '33', name: 'list 1', content: 'Post content', list_id: '1' } ]```
60
60
  */
61
61
  executeRaw: (query: string, params?: any[] | undefined) => Promise<any[][]>;
62
+
63
+ executeBatch: (query: string, params?: any[][]) => Promise<QueryResult>;
64
+ }
65
+
66
+ export interface LockContext extends SqlExecutor, DBGetUtils {}
67
+
68
+ /**
69
+ * Implements {@link DBGetUtils} on a {@link SqlRunner}.
70
+ */
71
+ export function DBGetUtilsDefaultMixin<TBase extends new (...args: any[]) => Omit<SqlExecutor, 'executeBatch'>>(
72
+ Base: TBase
73
+ ) {
74
+ return class extends Base implements DBGetUtils, SqlExecutor {
75
+ async getAll<T>(sql: string, parameters?: any[]): Promise<T[]> {
76
+ const res = await this.execute(sql, parameters);
77
+ return res.rows?._array ?? [];
78
+ }
79
+
80
+ async getOptional<T>(sql: string, parameters?: any[]): Promise<T | null> {
81
+ const res = await this.execute(sql, parameters);
82
+ return res.rows?.item(0) ?? null;
83
+ }
84
+
85
+ async get<T>(sql: string, parameters?: any[]): Promise<T> {
86
+ const res = await this.execute(sql, parameters);
87
+ const first = res.rows?.item(0);
88
+ if (!first) {
89
+ throw new Error('Result set is empty');
90
+ }
91
+ return first;
92
+ }
93
+
94
+ async executeBatch(query: string, params: any[][] = []): Promise<QueryResult> {
95
+ // If this context can run batch statements natively, use that.
96
+ // @ts-ignore
97
+ if (super.executeBatch) {
98
+ // @ts-ignore
99
+ return super.executeBatch(query, params);
100
+ }
101
+
102
+ // Emulate executeBatch by running statements individually.
103
+ let lastInsertId: number | undefined;
104
+ let rowsAffected = 0;
105
+ for (const set of params) {
106
+ const result = await this.execute(query, set);
107
+ lastInsertId = result.insertId;
108
+ rowsAffected += result.rowsAffected;
109
+ }
110
+
111
+ return {
112
+ rowsAffected,
113
+ insertId: lastInsertId
114
+ };
115
+ }
116
+ };
62
117
  }
63
118
 
64
119
  export interface Transaction extends LockContext {
@@ -107,22 +162,119 @@ export interface DBLockOptions {
107
162
  timeoutMs?: number;
108
163
  }
109
164
 
110
- export interface DBAdapter extends BaseObserverInterface<DBAdapterListener>, DBGetUtils {
111
- close: () => void | Promise<void>;
112
- execute: (query: string, params?: any[]) => Promise<QueryResult>;
113
- executeRaw: (query: string, params?: any[]) => Promise<any[][]>;
114
- executeBatch: (query: string, params?: any[][]) => Promise<QueryResult>;
165
+ export interface ConnectionPool extends BaseObserverInterface<DBAdapterListener> {
115
166
  name: string;
167
+ close: () => void | Promise<void>;
116
168
  readLock: <T>(fn: (tx: LockContext) => Promise<T>, options?: DBLockOptions) => Promise<T>;
117
- readTransaction: <T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions) => Promise<T>;
118
169
  writeLock: <T>(fn: (tx: LockContext) => Promise<T>, options?: DBLockOptions) => Promise<T>;
119
- writeTransaction: <T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions) => Promise<T>;
170
+
120
171
  /**
121
172
  * This method refreshes the schema information across all connections. This is for advanced use cases, and should generally not be needed.
122
173
  */
123
174
  refreshSchema: () => Promise<void>;
124
175
  }
125
176
 
177
+ export interface DBAdapter extends ConnectionPool, SqlExecutor, DBGetUtils {
178
+ readTransaction: <T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions) => Promise<T>;
179
+ writeTransaction: <T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions) => Promise<T>;
180
+ }
181
+
182
+ /**
183
+ * A mixin to implement {@link DBAdapter} by delegating to {@link ConnectionPool.readLock} and
184
+ * {@link ConnectionPool.writeLock}.
185
+ */
186
+ export function DBAdapterDefaultMixin<TBase extends new (...args: any[]) => ConnectionPool>(Base: TBase) {
187
+ return class extends Base implements DBAdapter {
188
+ readTransaction<T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions): Promise<T> {
189
+ return this.readLock((ctx) => TransactionImplementation.runWith(ctx, fn), options);
190
+ }
191
+
192
+ writeTransaction<T>(fn: (tx: Transaction) => Promise<T>, options?: DBLockOptions): Promise<T> {
193
+ return this.writeLock((ctx) => TransactionImplementation.runWith(ctx, fn), options);
194
+ }
195
+
196
+ getAll<T>(sql: string, parameters?: any[]): Promise<T[]> {
197
+ return this.readLock((ctx) => ctx.getAll(sql, parameters));
198
+ }
199
+
200
+ getOptional<T>(sql: string, parameters?: any[]): Promise<T | null> {
201
+ return this.readLock((ctx) => ctx.getOptional(sql, parameters));
202
+ }
203
+
204
+ get<T>(sql: string, parameters?: any[]): Promise<T> {
205
+ return this.readLock((ctx) => ctx.get(sql, parameters));
206
+ }
207
+
208
+ execute(query: string, params?: any[]): Promise<QueryResult> {
209
+ return this.writeLock((ctx) => ctx.execute(query, params));
210
+ }
211
+
212
+ executeRaw(query: string, params?: any[]): Promise<any[][]> {
213
+ return this.writeLock((ctx) => ctx.executeRaw(query, params));
214
+ }
215
+
216
+ executeBatch(query: string, params?: any[][]): Promise<QueryResult> {
217
+ return this.writeTransaction((tx) => tx.executeBatch(query, params));
218
+ }
219
+ };
220
+ }
221
+
222
+ class BaseTransaction implements SqlExecutor {
223
+ private finalized = false;
224
+
225
+ constructor(private inner: SqlExecutor) {}
226
+
227
+ async commit(): Promise<QueryResult> {
228
+ if (this.finalized) {
229
+ return { rowsAffected: 0 };
230
+ }
231
+ this.finalized = true;
232
+ return this.inner.execute('COMMIT');
233
+ }
234
+
235
+ async rollback(): Promise<QueryResult> {
236
+ if (this.finalized) {
237
+ return { rowsAffected: 0 };
238
+ }
239
+ this.finalized = true;
240
+ return this.inner.execute('ROLLBACK');
241
+ }
242
+
243
+ execute(query: string, params?: any[] | undefined): Promise<QueryResult> {
244
+ return this.inner.execute(query, params);
245
+ }
246
+
247
+ executeRaw(query: string, params?: any[] | undefined): Promise<any[][]> {
248
+ return this.inner.executeRaw(query, params);
249
+ }
250
+
251
+ executeBatch(query: string, params?: any[][]): Promise<QueryResult> {
252
+ return this.inner.executeBatch(query, params);
253
+ }
254
+ }
255
+
256
+ class TransactionImplementation extends DBGetUtilsDefaultMixin(BaseTransaction) {
257
+ static async runWith<T>(ctx: LockContext, fn: (tx: Transaction) => Promise<T>): Promise<T> {
258
+ let tx = new TransactionImplementation(ctx);
259
+
260
+ try {
261
+ await ctx.execute('BEGIN IMMEDIATE');
262
+
263
+ const result = await fn(tx);
264
+ await tx.commit();
265
+ return result;
266
+ } catch (ex) {
267
+ try {
268
+ await tx.rollback();
269
+ } catch (ex2) {
270
+ // In rare cases, a rollback may fail.
271
+ // Safe to ignore.
272
+ }
273
+ throw ex;
274
+ }
275
+ }
276
+ }
277
+
126
278
  export function isBatchedUpdateNotification(
127
279
  update: BatchedUpdateNotification | UpdateNotification
128
280
  ): update is BatchedUpdateNotification {
@@ -1,8 +1,23 @@
1
+ import { TableOrRawTableOptions } from './Table.js';
2
+
1
3
  /**
2
- * A pending variant of a {@link RawTable} that doesn't have a name (because it would be inferred when creating the
3
- * schema).
4
+ * Instructs PowerSync to sync data into a "raw" table.
5
+ *
6
+ * Since raw tables are not backed by JSON, running complex queries on them may be more efficient. Further, they allow
7
+ * using client-side table and column constraints.
8
+ *
9
+ * To collect local writes to raw tables with PowerSync, custom triggers are required. See
10
+ * {@link https://docs.powersync.com/usage/use-case-examples/raw-tables the documentation} for details and an example on
11
+ * using raw tables.
12
+ *
13
+ * Note that raw tables are only supported when using the new `SyncClientImplementation.rust` sync client.
14
+ *
15
+ * @experimental Please note that this feature is experimental at the moment, and not covered by PowerSync semver or
16
+ * stability guarantees.
4
17
  */
5
- export type RawTableType = {
18
+ export type RawTableType = RawTableTypeWithStatements | InferredRawTableType;
19
+
20
+ interface RawTableTypeWithStatements {
6
21
  /**
7
22
  * The statement to run when PowerSync detects that a row needs to be inserted or updated.
8
23
  */
@@ -11,7 +26,44 @@ export type RawTableType = {
11
26
  * The statement to run when PowerSync detects that a row needs to be deleted.
12
27
  */
13
28
  delete: PendingStatement;
14
- };
29
+
30
+ /**
31
+ * An optional statement to run when `disconnectAndClear()` is called on a PowerSync database.
32
+ */
33
+ clear?: string;
34
+ }
35
+
36
+ /**
37
+ * The schema of a {@link RawTableType} in the local database.
38
+ *
39
+ * This information is optional when declaring raw tables. However, providing it allows the sync client to infer `put`
40
+ * and `delete` statements automatically.
41
+ */
42
+ interface RawTableSchema extends TableOrRawTableOptions {
43
+ /**
44
+ * The actual name of the raw table in the local schema.
45
+ *
46
+ * Unlike {@link RawTable.name}, which describes the name of synced tables to match, this reflects the SQLite table
47
+ * name. This is used to infer {@link RawTableType.put} and {@link RawTableType.delete} statements for the sync
48
+ * client. It can also be used to auto-generate triggers forwarding writes on raw tables into the CRUD upload queue
49
+ * (using the `powersync_create_raw_table_crud_trigger` SQL function).
50
+ *
51
+ * When absent, defaults to {@link RawTable.name}.
52
+ */
53
+ tableName?: string;
54
+
55
+ /**
56
+ * An optional filter of columns that should be synced.
57
+ *
58
+ * By default, all columns in a raw table are considered for sync. If a filter is specified, PowerSync treats
59
+ * unmatched columns as local-only and will not attempt to sync them.
60
+ */
61
+ syncedColumns?: string[];
62
+ }
63
+
64
+ interface InferredRawTableType extends Partial<RawTableTypeWithStatements> {
65
+ schema: RawTableSchema;
66
+ }
15
67
 
16
68
  /**
17
69
  * A parameter to use as part of {@link PendingStatement}.
@@ -21,8 +73,10 @@ export type RawTableType = {
21
73
  *
22
74
  * For insert and replace operations, the values of columns in the table are available as parameters through
23
75
  * `{Column: 'name'}`.
76
+ * The `"Rest"` parameter gets resolved to a JSON object covering all values from the synced row that haven't been
77
+ * covered by a `Column` parameter.
24
78
  */
25
- export type PendingStatementParameter = 'Id' | { Column: string };
79
+ export type PendingStatementParameter = 'Id' | { Column: string } | 'Rest';
26
80
 
27
81
  /**
28
82
  * A statement that the PowerSync client should use to insert or delete data into a table managed by the user.
@@ -33,35 +87,16 @@ export type PendingStatement = {
33
87
  };
34
88
 
35
89
  /**
36
- * Instructs PowerSync to sync data into a "raw" table.
37
- *
38
- * Since raw tables are not backed by JSON, running complex queries on them may be more efficient. Further, they allow
39
- * using client-side table and column constraints.
40
- *
41
- * To collect local writes to raw tables with PowerSync, custom triggers are required. See
42
- * {@link https://docs.powersync.com/usage/use-case-examples/raw-tables the documentation} for details and an example on
43
- * using raw tables.
44
- *
45
- * Note that raw tables are only supported when using the new `SyncClientImplementation.rust` sync client.
46
- *
47
- * @experimental Please note that this feature is experimental at the moment, and not covered by PowerSync semver or
48
- * stability guarantees.
90
+ * @internal
49
91
  */
50
- export class RawTable implements RawTableType {
92
+ export type RawTable<T extends RawTableType = RawTableType> = T & {
51
93
  /**
52
94
  * The name of the table.
53
95
  *
54
- * This does not have to match the actual table name in the schema - {@link put} and {@link delete} are free to use
55
- * another table. Instead, this name is used by the sync client to recognize that operations on this table (as it
56
- * appears in the source / backend database) are to be handled specially.
96
+ * This does not have to match the actual table name in the schema - {@link RawTableType.put} and
97
+ * {@link RawTableType.delete} are free to use another table. Instead, this name is used by the sync client to
98
+ * recognize that operations on this table (as it appears in the source / backend database) are to be handled
99
+ * specially.
57
100
  */
58
101
  name: string;
59
- put: PendingStatement;
60
- delete: PendingStatement;
61
-
62
- constructor(name: string, type: RawTableType) {
63
- this.name = name;
64
- this.put = type.put;
65
- this.delete = type.delete;
66
- }
67
- }
102
+ };
@@ -1,3 +1,4 @@
1
+ import { encodeTableOptions } from './internal.js';
1
2
  import { RawTable, RawTableType } from './RawTable.js';
2
3
  import { RowType, Table } from './Table.js';
3
4
 
@@ -57,7 +58,7 @@ export class Schema<S extends SchemaType = SchemaType> {
57
58
  */
58
59
  withRawTables(tables: Record<string, RawTableType>) {
59
60
  for (const [name, rawTableDefinition] of Object.entries(tables)) {
60
- this.rawTables.push(new RawTable(name, rawTableDefinition));
61
+ this.rawTables.push({ name, ...rawTableDefinition });
61
62
  }
62
63
  }
63
64
 
@@ -70,7 +71,31 @@ export class Schema<S extends SchemaType = SchemaType> {
70
71
  toJSON() {
71
72
  return {
72
73
  tables: this.tables.map((t) => t.toJSON()),
73
- raw_tables: this.rawTables
74
+ raw_tables: this.rawTables.map(Schema.rawTableToJson)
74
75
  };
75
76
  }
77
+
78
+ /**
79
+ * Returns a representation of the raw table that is understood by the PowerSync SQLite core extension.
80
+ *
81
+ * The output of this can be passed through `JSON.serialize` and then used in `powersync_create_raw_table_crud_trigger`
82
+ * to define triggers for this table.
83
+ */
84
+ static rawTableToJson(table: RawTable): unknown {
85
+ const serialized: any = {
86
+ name: table.name,
87
+ put: table.put,
88
+ delete: table.delete,
89
+ clear: table.clear
90
+ };
91
+ if ('schema' in table) {
92
+ // We have schema options, those are flattened into the outer JSON object for the core extension.
93
+ const schema = table.schema;
94
+ serialized.table_name = schema.tableName ?? table.name;
95
+ serialized.synced_columns = schema.syncedColumns;
96
+ Object.assign(serialized, encodeTableOptions(table.schema));
97
+ }
98
+
99
+ return serialized;
100
+ }
76
101
  }
@@ -8,17 +8,24 @@ import {
8
8
  } from './Column.js';
9
9
  import { Index } from './Index.js';
10
10
  import { IndexedColumn } from './IndexedColumn.js';
11
+ import { encodeTableOptions } from './internal.js';
11
12
  import { TableV2 } from './TableV2.js';
12
13
 
13
- interface SharedTableOptions {
14
+ /**
15
+ * Options that apply both to JSON-based tables and raw tables.
16
+ */
17
+ export interface TableOrRawTableOptions {
14
18
  localOnly?: boolean;
15
19
  insertOnly?: boolean;
16
- viewName?: string;
17
20
  trackPrevious?: boolean | TrackPreviousOptions;
18
21
  trackMetadata?: boolean;
19
22
  ignoreEmptyUpdates?: boolean;
20
23
  }
21
24
 
25
+ interface SharedTableOptions extends TableOrRawTableOptions {
26
+ viewName?: string;
27
+ }
28
+
22
29
  /** Whether to include previous column values when PowerSync tracks local changes.
23
30
  *
24
31
  * Including old values may be helpful for some backend connector implementations, which is
@@ -341,19 +348,12 @@ export class Table<Columns extends ColumnsType = ColumnsType> {
341
348
  }
342
349
 
343
350
  toJSON() {
344
- const trackPrevious = this.trackPrevious;
345
-
346
351
  return {
347
352
  name: this.name,
348
353
  view_name: this.viewName,
349
- local_only: this.localOnly,
350
- insert_only: this.insertOnly,
351
- include_old: trackPrevious && ((trackPrevious as any).columns ?? true),
352
- include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
353
- include_metadata: this.trackMetadata,
354
- ignore_empty_update: this.ignoreEmptyUpdates,
355
354
  columns: this.columns.map((c) => c.toJSON()),
356
- indexes: this.indexes.map((e) => e.toJSON(this))
355
+ indexes: this.indexes.map((e) => e.toJSON(this)),
356
+ ...encodeTableOptions(this)
357
357
  };
358
358
  }
359
359
  }
@@ -0,0 +1,17 @@
1
+ import { TableOrRawTableOptions } from './Table.js';
2
+
3
+ /**
4
+ * @internal Not exported from `index.ts`.
5
+ */
6
+ export function encodeTableOptions(options: TableOrRawTableOptions) {
7
+ const trackPrevious = options.trackPrevious;
8
+
9
+ return {
10
+ local_only: options.localOnly,
11
+ insert_only: options.insertOnly,
12
+ include_old: trackPrevious && ((trackPrevious as any).columns ?? true),
13
+ include_old_only_when_changed: typeof trackPrevious == 'object' && trackPrevious.onlyWhenChanged == true,
14
+ include_metadata: options.trackMetadata,
15
+ ignore_empty_update: options.ignoreEmptyUpdates
16
+ };
17
+ }
package/src/index.ts CHANGED
@@ -39,7 +39,7 @@ export * from './db/DBAdapter.js';
39
39
  export * from './db/schema/Column.js';
40
40
  export * from './db/schema/Index.js';
41
41
  export * from './db/schema/IndexedColumn.js';
42
- export * from './db/schema/RawTable.js';
42
+ export { RawTableType, PendingStatementParameter, PendingStatement } from './db/schema/RawTable.js';
43
43
  export * from './db/schema/Schema.js';
44
44
  export * from './db/schema/Table.js';
45
45
  export * from './db/schema/TableV2.js';