@tanstack/powersync-db-collection 0.1.20 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/PowerSyncTransactor.cjs +61 -14
- package/dist/cjs/PowerSyncTransactor.cjs.map +1 -1
- package/dist/cjs/PowerSyncTransactor.d.cts +8 -0
- package/dist/cjs/definitions.cjs.map +1 -1
- package/dist/cjs/definitions.d.cts +4 -0
- package/dist/cjs/powersync.cjs +2 -1
- package/dist/cjs/powersync.cjs.map +1 -1
- package/dist/esm/PowerSyncTransactor.d.ts +8 -0
- package/dist/esm/PowerSyncTransactor.js +61 -14
- package/dist/esm/PowerSyncTransactor.js.map +1 -1
- package/dist/esm/definitions.d.ts +4 -0
- package/dist/esm/definitions.js.map +1 -1
- package/dist/esm/powersync.js +2 -1
- package/dist/esm/powersync.js.map +1 -1
- package/package.json +5 -5
- package/src/PowerSyncTransactor.ts +85 -21
- package/src/definitions.ts +5 -0
- package/src/powersync.ts +2 -1
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
3
|
const common = require("@powersync/common");
|
|
4
4
|
const DebugModule = require("debug");
|
|
5
|
-
const helpers = require("./helpers.cjs");
|
|
6
5
|
const PendingOperationStore = require("./PendingOperationStore.cjs");
|
|
6
|
+
const helpers = require("./helpers.cjs");
|
|
7
7
|
const debug = DebugModule.debug(`ts/db:powersync`);
|
|
8
8
|
class PowerSyncTransactor {
|
|
9
9
|
constructor(options) {
|
|
@@ -79,6 +79,12 @@ class PowerSyncTransactor {
|
|
|
79
79
|
async (tableName, mutation2, serializeValue) => {
|
|
80
80
|
const values = serializeValue(mutation2.modified);
|
|
81
81
|
const keys = Object.keys(values).map((key) => common.sanitizeSQL`${key}`);
|
|
82
|
+
const queryParameters = Object.values(values);
|
|
83
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
84
|
+
if (metadataValue != null) {
|
|
85
|
+
keys.push(`_metadata`);
|
|
86
|
+
queryParameters.push(metadataValue);
|
|
87
|
+
}
|
|
82
88
|
await context.execute(
|
|
83
89
|
`
|
|
84
90
|
INSERT into ${tableName}
|
|
@@ -86,7 +92,7 @@ class PowerSyncTransactor {
|
|
|
86
92
|
VALUES
|
|
87
93
|
(${keys.map((_) => `?`).join(`, `)})
|
|
88
94
|
`,
|
|
89
|
-
|
|
95
|
+
queryParameters
|
|
90
96
|
);
|
|
91
97
|
}
|
|
92
98
|
);
|
|
@@ -100,13 +106,19 @@ class PowerSyncTransactor {
|
|
|
100
106
|
async (tableName, mutation2, serializeValue) => {
|
|
101
107
|
const values = serializeValue(mutation2.modified);
|
|
102
108
|
const keys = Object.keys(values).map((key) => common.sanitizeSQL`${key}`);
|
|
109
|
+
const queryParameters = Object.values(values);
|
|
110
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
111
|
+
if (metadataValue != null) {
|
|
112
|
+
keys.push(`_metadata`);
|
|
113
|
+
queryParameters.push(metadataValue);
|
|
114
|
+
}
|
|
103
115
|
await context.execute(
|
|
104
116
|
`
|
|
105
117
|
UPDATE ${tableName}
|
|
106
118
|
SET ${keys.map((key) => `${key} = ?`).join(`, `)}
|
|
107
119
|
WHERE id = ?
|
|
108
120
|
`,
|
|
109
|
-
[...
|
|
121
|
+
[...queryParameters, helpers.asPowerSyncRecord(mutation2.modified).id]
|
|
110
122
|
);
|
|
111
123
|
}
|
|
112
124
|
);
|
|
@@ -118,12 +130,22 @@ class PowerSyncTransactor {
|
|
|
118
130
|
context,
|
|
119
131
|
waitForCompletion,
|
|
120
132
|
async (tableName, mutation2) => {
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
133
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
134
|
+
if (metadataValue != null) {
|
|
135
|
+
await context.execute(
|
|
136
|
+
`
|
|
137
|
+
UPDATE ${tableName} SET _deleted = TRUE, _metadata = ? WHERE id = ?
|
|
138
|
+
`,
|
|
139
|
+
[metadataValue, helpers.asPowerSyncRecord(mutation2.original).id]
|
|
140
|
+
);
|
|
141
|
+
} else {
|
|
142
|
+
await context.execute(
|
|
143
|
+
`
|
|
144
|
+
DELETE FROM ${tableName} WHERE id = ?
|
|
145
|
+
`,
|
|
146
|
+
[helpers.asPowerSyncRecord(mutation2.original).id]
|
|
147
|
+
);
|
|
148
|
+
}
|
|
127
149
|
}
|
|
128
150
|
);
|
|
129
151
|
}
|
|
@@ -134,11 +156,7 @@ class PowerSyncTransactor {
|
|
|
134
156
|
* - Returning the last pending diff operation if required
|
|
135
157
|
*/
|
|
136
158
|
async handleOperationWithCompletion(mutation, context, waitForCompletion, handler) {
|
|
137
|
-
|
|
138
|
-
throw new Error(`Could not get tableName from mutation's collection config.
|
|
139
|
-
The provided mutation might not have originated from PowerSync.`);
|
|
140
|
-
}
|
|
141
|
-
const { tableName, trackedTableName, serializeValue } = mutation.collection.config.utils.getMeta();
|
|
159
|
+
const { tableName, trackedTableName, serializeValue } = this.getMutationCollectionMeta(mutation);
|
|
142
160
|
await handler(common.sanitizeSQL`${tableName}`, mutation, serializeValue);
|
|
143
161
|
if (!waitForCompletion) {
|
|
144
162
|
return null;
|
|
@@ -153,6 +171,35 @@ class PowerSyncTransactor {
|
|
|
153
171
|
timestamp: diffOperation.timestamp
|
|
154
172
|
};
|
|
155
173
|
}
|
|
174
|
+
getMutationCollectionMeta(mutation) {
|
|
175
|
+
if (typeof mutation.collection.config.utils?.getMeta != `function`) {
|
|
176
|
+
throw new Error(`Collection is not a PowerSync collection.`);
|
|
177
|
+
}
|
|
178
|
+
return mutation.collection.config.utils.getMeta();
|
|
179
|
+
}
|
|
180
|
+
/**
|
|
181
|
+
* Processes collection mutation metadata for persistence to the database.
|
|
182
|
+
* We only support storing string metadata.
|
|
183
|
+
* @returns null if no metadata should be stored.
|
|
184
|
+
*/
|
|
185
|
+
processMutationMetadata(mutation) {
|
|
186
|
+
const { metadataIsTracked } = this.getMutationCollectionMeta(mutation);
|
|
187
|
+
if (!metadataIsTracked) {
|
|
188
|
+
if (typeof mutation.metadata != `undefined`) {
|
|
189
|
+
this.database.logger.warn(
|
|
190
|
+
`Metadata provided for collection ${mutation.collection.id} but the PowerSync table does not track metadata. The PowerSync table should be configured with trackMetadata: true.`,
|
|
191
|
+
mutation.metadata
|
|
192
|
+
);
|
|
193
|
+
}
|
|
194
|
+
return null;
|
|
195
|
+
} else if (typeof mutation.metadata == `undefined`) {
|
|
196
|
+
return null;
|
|
197
|
+
} else if (typeof mutation.metadata == `string`) {
|
|
198
|
+
return mutation.metadata;
|
|
199
|
+
} else {
|
|
200
|
+
return JSON.stringify(mutation.metadata);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
156
203
|
}
|
|
157
204
|
exports.PowerSyncTransactor = PowerSyncTransactor;
|
|
158
205
|
//# sourceMappingURL=PowerSyncTransactor.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"PowerSyncTransactor.cjs","sources":["../../src/PowerSyncTransactor.ts"],"sourcesContent":["import { sanitizeSQL } from '@powersync/common'\nimport DebugModule from 'debug'\nimport { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport type { AbstractPowerSyncDatabase, LockContext } from '@powersync/common'\nimport type { PendingMutation, Transaction } from '@tanstack/db'\nimport type { EnhancedPowerSyncCollectionConfig } from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\n\nconst debug = DebugModule.debug(`ts/db:powersync`)\n\nexport type TransactorOptions = {\n database: AbstractPowerSyncDatabase\n}\n\n/**\n * Applies mutations to the PowerSync database. This method is called automatically by the collection's\n * insert, update, and delete operations. You typically don't need to call this directly unless you\n * have special transaction requirements.\n *\n * @example\n * ```typescript\n * // Create a collection\n * const collection = createCollection(\n * powerSyncCollectionOptions<Document>({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * })\n * )\n *\n * const addTx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)\n * },\n * })\n *\n * addTx.mutate(() => {\n * for (let i = 0; i < 5; i++) {\n * collection.insert({ id: randomUUID(), name: `tx-${i}` })\n * }\n * })\n *\n * await addTx.commit()\n * await addTx.isPersisted.promise\n * ```\n *\n * @param transaction - The transaction containing mutations to apply\n * @returns A promise that resolves when the mutations have been persisted to PowerSync\n */\nexport class PowerSyncTransactor {\n database: AbstractPowerSyncDatabase\n pendingOperationStore: PendingOperationStore\n\n constructor(options: TransactorOptions) {\n this.database = options.database\n this.pendingOperationStore = PendingOperationStore.GLOBAL\n }\n\n /**\n * Persists a {@link Transaction} to the PowerSync SQLite database.\n */\n async applyTransaction(transaction: Transaction<any>) {\n const { mutations } = transaction\n\n if (mutations.length == 0) {\n return\n }\n /**\n * The transaction might contain operations for different collections.\n * We can do some optimizations for single-collection transactions.\n */\n const mutationsCollectionIds = mutations.map(\n (mutation) => mutation.collection.id,\n )\n const collectionIds = Array.from(new Set(mutationsCollectionIds))\n const lastCollectionMutationIndexes = new Map<string, number>()\n const allCollections = collectionIds\n .map((id) => mutations.find((mutation) => mutation.collection.id == id)!)\n .map((mutation) => mutation.collection)\n for (const collectionId of collectionIds) {\n lastCollectionMutationIndexes.set(\n collectionId,\n mutationsCollectionIds.lastIndexOf(collectionId),\n )\n }\n\n // Check all the observers are ready before taking a lock\n await Promise.all(\n allCollections.map(async (collection) => {\n if (collection.isReady()) {\n return\n }\n await new Promise<void>((resolve) => collection.onFirstReady(resolve))\n }),\n )\n\n // Persist to PowerSync\n const { whenComplete } = await this.database.writeTransaction(\n async (tx) => {\n const pendingOperations: Array<PendingOperation | null> = []\n\n for (const [index, mutation] of mutations.entries()) {\n /**\n * Each collection processes events independently. We need to make sure the\n * last operation for each collection has been observed.\n */\n const shouldWait =\n index == lastCollectionMutationIndexes.get(mutation.collection.id)\n switch (mutation.type) {\n case `insert`:\n pendingOperations.push(\n await this.handleInsert(mutation, tx, shouldWait),\n )\n break\n case `update`:\n pendingOperations.push(\n await this.handleUpdate(mutation, tx, shouldWait),\n )\n break\n case `delete`:\n pendingOperations.push(\n await this.handleDelete(mutation, tx, shouldWait),\n )\n break\n }\n }\n\n /**\n * Return a promise from the writeTransaction, without awaiting it.\n * This promise will resolve once the entire transaction has been\n * observed via the diff triggers.\n * We return without awaiting in order to free the write lock.\n */\n return {\n whenComplete: Promise.all(\n pendingOperations\n .filter((op) => !!op)\n .map((op) => this.pendingOperationStore.waitFor(op)),\n ),\n }\n },\n )\n\n // Wait for the change to be observed via the diff trigger\n await whenComplete\n }\n\n protected async handleInsert(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`insert`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n INSERT into ${tableName} \n (${keys.join(`, `)}) \n VALUES \n (${keys.map((_) => `?`).join(`, `)})\n `,\n Object.values(values),\n )\n },\n )\n }\n\n protected async handleUpdate(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n UPDATE ${tableName} \n SET ${keys.map((key) => `${key} = ?`).join(`, `)}\n WHERE id = ?\n `,\n [...Object.values(values), asPowerSyncRecord(mutation.modified).id],\n )\n },\n )\n }\n\n protected async handleDelete(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation) => {\n await context.execute(\n `\n DELETE FROM ${tableName} WHERE id = ?\n `,\n [asPowerSyncRecord(mutation.original).id],\n )\n },\n )\n }\n\n /**\n * Helper function which wraps a persistence operation by:\n * - Fetching the mutation's collection's SQLite table details\n * - Executing the mutation\n * - Returning the last pending diff operation if required\n */\n protected async handleOperationWithCompletion(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean,\n handler: (\n tableName: string,\n mutation: PendingMutation<any>,\n serializeValue: (value: any) => Record<string, unknown>,\n ) => Promise<void>,\n ): Promise<PendingOperation | null> {\n if (\n typeof (mutation.collection.config as any).utils?.getMeta != `function`\n ) {\n throw new Error(`Could not get tableName from mutation's collection config.\n The provided mutation might not have originated from PowerSync.`)\n }\n\n const { tableName, trackedTableName, serializeValue } = (\n mutation.collection\n .config as unknown as EnhancedPowerSyncCollectionConfig<any>\n ).utils.getMeta()\n\n await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)\n\n if (!waitForCompletion) {\n return null\n }\n\n // Need to get the operation in order to wait for it\n const diffOperation = await context.get<{ id: string; timestamp: string }>(\n sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`,\n )\n return {\n tableName,\n id: diffOperation.id,\n operation: mapOperationToPowerSync(mutation.type),\n timestamp: diffOperation.timestamp,\n }\n }\n}\n"],"names":["PendingOperationStore","mutation","sanitizeSQL","asPowerSyncRecord","mapOperationToPowerSync"],"mappings":";;;;;;AASA,MAAM,QAAQ,YAAY,MAAM,iBAAiB;AAyC1C,MAAM,oBAAoB;AAAA,EAI/B,YAAY,SAA4B;AACtC,SAAK,WAAW,QAAQ;AACxB,SAAK,wBAAwBA,sBAAAA,sBAAsB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,aAA+B;AACpD,UAAM,EAAE,cAAc;AAEtB,QAAI,UAAU,UAAU,GAAG;AACzB;AAAA,IACF;AAKA,UAAM,yBAAyB,UAAU;AAAA,MACvC,CAAC,aAAa,SAAS,WAAW;AAAA,IAAA;AAEpC,UAAM,gBAAgB,MAAM,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAChE,UAAM,oDAAoC,IAAA;AAC1C,UAAM,iBAAiB,cACpB,IAAI,CAAC,OAAO,UAAU,KAAK,CAAC,aAAa,SAAS,WAAW,MAAM,EAAE,CAAE,EACvE,IAAI,CAAC,aAAa,SAAS,UAAU;AACxC,eAAW,gBAAgB,eAAe;AACxC,oCAA8B;AAAA,QAC5B;AAAA,QACA,uBAAuB,YAAY,YAAY;AAAA,MAAA;AAAA,IAEnD;AAGA,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,eAAe;AACvC,YAAI,WAAW,WAAW;AACxB;AAAA,QACF;AACA,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,aAAa,OAAO,CAAC;AAAA,MACvE,CAAC;AAAA,IAAA;AAIH,UAAM,EAAE,aAAA,IAAiB,MAAM,KAAK,SAAS;AAAA,MAC3C,OAAO,OAAO;AACZ,cAAM,oBAAoD,CAAA;AAE1D,mBAAW,CAAC,OAAO,QAAQ,KAAK,UAAU,WAAW;AAKnD,gBAAM,aACJ,SAAS,8BAA8B,IAAI,SAAS,WAAW,EAAE;AACnE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,UAAA;AAAA,QAEN;AAQA,eAAO;AAAA,UACL,cAAc,QAAQ;AAAA,YACpB,kBACG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,EACnB,IAAI,CAAC,OAAO,KAAK,sBAAsB,QAAQ,EAAE,CAAC;AAAA,UAAA;AAAA,QACvD;AAAA,MAEJ;AAAA,IAAA;AAIF,UAAM;AAAA,EACR;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWC,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA,eAChB,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,eAEf,KAAK,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,UAEpC,OAAO,OAAO,MAAM;AAAA,QAAA;AAAA,MAExB;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWD,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,iBACO,SAAS;AAAA,cACZ,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,UAG9C,CAAC,GAAG,OAAO,OAAO,MAAM,GAAGC,QAAAA,kBAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAEtE;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,cAAa;AAC7B,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA;AAAA,UAErB,CAACE,0BAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAE5C;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAgB,8BACd,UACA,SACA,mBACA,SAKkC;AAClC,QACE,OAAQ,SAAS,WAAW,OAAe,OAAO,WAAW,YAC7D;AACA,YAAM,IAAI,MAAM;AAAA,wEACkD;AAAA,IACpE;AAEA,UAAM,EAAE,WAAW,kBAAkB,eAAA,IACnC,SAAS,WACN,OACH,MAAM,QAAA;AAER,UAAM,QAAQC,OAAAA,cAAc,SAAS,IAAI,UAAU,cAAc;AAEjE,QAAI,CAAC,mBAAmB;AACtB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClCA,+CAAwC,gBAAgB;AAAA,IAAA;AAE1D,WAAO;AAAA,MACL;AAAA,MACA,IAAI,cAAc;AAAA,MAClB,WAAWE,QAAAA,wBAAwB,SAAS,IAAI;AAAA,MAChD,WAAW,cAAc;AAAA,IAAA;AAAA,EAE7B;AACF;;"}
|
|
1
|
+
{"version":3,"file":"PowerSyncTransactor.cjs","sources":["../../src/PowerSyncTransactor.ts"],"sourcesContent":["import { sanitizeSQL } from '@powersync/common'\nimport DebugModule from 'debug'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'\nimport type { AbstractPowerSyncDatabase, LockContext } from '@powersync/common'\nimport type { PendingMutation, Transaction } from '@tanstack/db'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type {\n EnhancedPowerSyncCollectionConfig,\n PowerSyncCollectionMeta,\n} from './definitions'\n\nconst debug = DebugModule.debug(`ts/db:powersync`)\n\nexport type TransactorOptions = {\n database: AbstractPowerSyncDatabase\n}\n\n/**\n * Applies mutations to the PowerSync database. This method is called automatically by the collection's\n * insert, update, and delete operations. You typically don't need to call this directly unless you\n * have special transaction requirements.\n *\n * @example\n * ```typescript\n * // Create a collection\n * const collection = createCollection(\n * powerSyncCollectionOptions<Document>({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * })\n * )\n *\n * const addTx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)\n * },\n * })\n *\n * addTx.mutate(() => {\n * for (let i = 0; i < 5; i++) {\n * collection.insert({ id: randomUUID(), name: `tx-${i}` })\n * }\n * })\n *\n * await addTx.commit()\n * await addTx.isPersisted.promise\n * ```\n *\n * @param transaction - The transaction containing mutations to apply\n * @returns A promise that resolves when the mutations have been persisted to PowerSync\n */\nexport class PowerSyncTransactor {\n database: AbstractPowerSyncDatabase\n pendingOperationStore: PendingOperationStore\n\n constructor(options: TransactorOptions) {\n this.database = options.database\n this.pendingOperationStore = PendingOperationStore.GLOBAL\n }\n\n /**\n * Persists a {@link Transaction} to the PowerSync SQLite database.\n */\n async applyTransaction(transaction: Transaction<any>) {\n const { mutations } = transaction\n\n if (mutations.length == 0) {\n return\n }\n /**\n * The transaction might contain operations for different collections.\n * We can do some optimizations for single-collection transactions.\n */\n const mutationsCollectionIds = mutations.map(\n (mutation) => mutation.collection.id,\n )\n const collectionIds = Array.from(new Set(mutationsCollectionIds))\n const lastCollectionMutationIndexes = new Map<string, number>()\n const allCollections = collectionIds\n .map((id) => mutations.find((mutation) => mutation.collection.id == id)!)\n .map((mutation) => mutation.collection)\n for (const collectionId of collectionIds) {\n lastCollectionMutationIndexes.set(\n collectionId,\n mutationsCollectionIds.lastIndexOf(collectionId),\n )\n }\n\n // Check all the observers are ready before taking a lock\n await Promise.all(\n allCollections.map(async (collection) => {\n if (collection.isReady()) {\n return\n }\n await new Promise<void>((resolve) => collection.onFirstReady(resolve))\n }),\n )\n\n // Persist to PowerSync\n const { whenComplete } = await this.database.writeTransaction(\n async (tx) => {\n const pendingOperations: Array<PendingOperation | null> = []\n\n for (const [index, mutation] of mutations.entries()) {\n /**\n * Each collection processes events independently. We need to make sure the\n * last operation for each collection has been observed.\n */\n const shouldWait =\n index == lastCollectionMutationIndexes.get(mutation.collection.id)\n switch (mutation.type) {\n case `insert`:\n pendingOperations.push(\n await this.handleInsert(mutation, tx, shouldWait),\n )\n break\n case `update`:\n pendingOperations.push(\n await this.handleUpdate(mutation, tx, shouldWait),\n )\n break\n case `delete`:\n pendingOperations.push(\n await this.handleDelete(mutation, tx, shouldWait),\n )\n break\n }\n }\n\n /**\n * Return a promise from the writeTransaction, without awaiting it.\n * This promise will resolve once the entire transaction has been\n * observed via the diff triggers.\n * We return without awaiting in order to free the write lock.\n */\n return {\n whenComplete: Promise.all(\n pendingOperations\n .filter((op) => !!op)\n .map((op) => this.pendingOperationStore.waitFor(op)),\n ),\n }\n },\n )\n\n // Wait for the change to be observed via the diff trigger\n await whenComplete\n }\n\n protected async handleInsert(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`insert`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n const queryParameters = Object.values(values)\n\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n keys.push(`_metadata`)\n queryParameters.push(metadataValue)\n }\n\n await context.execute(\n `\n INSERT into ${tableName} \n (${keys.join(`, `)}) \n VALUES \n (${keys.map((_) => `?`).join(`, `)})\n `,\n queryParameters,\n )\n },\n )\n }\n\n protected async handleUpdate(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n const queryParameters = Object.values(values)\n\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n keys.push(`_metadata`)\n queryParameters.push(metadataValue)\n }\n\n await context.execute(\n `\n UPDATE ${tableName} \n SET ${keys.map((key) => `${key} = ?`).join(`, `)}\n WHERE id = ?\n `,\n [...queryParameters, asPowerSyncRecord(mutation.modified).id],\n )\n },\n )\n }\n\n protected async handleDelete(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation) => {\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n /**\n * Delete operations with metadata require a different approach to handle metadata.\n * This will delete the record.\n */\n await context.execute(\n `\n UPDATE ${tableName} SET _deleted = TRUE, _metadata = ? WHERE id = ?\n `,\n [metadataValue, asPowerSyncRecord(mutation.original).id],\n )\n } else {\n await context.execute(\n `\n DELETE FROM ${tableName} WHERE id = ?\n `,\n [asPowerSyncRecord(mutation.original).id],\n )\n }\n },\n )\n }\n\n /**\n * Helper function which wraps a persistence operation by:\n * - Fetching the mutation's collection's SQLite table details\n * - Executing the mutation\n * - Returning the last pending diff operation if required\n */\n protected async handleOperationWithCompletion(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean,\n handler: (\n tableName: string,\n mutation: PendingMutation<any>,\n serializeValue: (value: any) => Record<string, unknown>,\n ) => Promise<void>,\n ): Promise<PendingOperation | null> {\n const { tableName, trackedTableName, serializeValue } =\n this.getMutationCollectionMeta(mutation)\n\n await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)\n\n if (!waitForCompletion) {\n return null\n }\n\n // Need to get the operation in order to wait for it\n const diffOperation = await context.get<{ id: string; timestamp: string }>(\n sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`,\n )\n return {\n tableName,\n id: diffOperation.id,\n operation: mapOperationToPowerSync(mutation.type),\n timestamp: diffOperation.timestamp,\n }\n }\n\n protected getMutationCollectionMeta(\n mutation: PendingMutation<any>,\n ): PowerSyncCollectionMeta<any> {\n if (\n typeof (mutation.collection.config as any).utils?.getMeta != `function`\n ) {\n throw new Error(`Collection is not a PowerSync collection.`)\n }\n return (\n mutation.collection\n .config as unknown as EnhancedPowerSyncCollectionConfig<any>\n ).utils.getMeta()\n }\n\n /**\n * Processes collection mutation metadata for persistence to the database.\n * We only support storing string metadata.\n * @returns null if no metadata should be stored.\n */\n protected processMutationMetadata(\n mutation: PendingMutation<any>,\n ): string | null {\n const { metadataIsTracked } = this.getMutationCollectionMeta(mutation)\n if (!metadataIsTracked) {\n // If it's not supported, we don't store metadata.\n if (typeof mutation.metadata != `undefined`) {\n // Log a warning if metadata is provided but not tracked.\n this.database.logger.warn(\n `Metadata provided for collection ${mutation.collection.id} but the PowerSync table does not track metadata. The PowerSync table should be configured with trackMetadata: true.`,\n mutation.metadata,\n )\n }\n return null\n } else if (typeof mutation.metadata == `undefined`) {\n return null\n } else if (typeof mutation.metadata == `string`) {\n return mutation.metadata\n } else {\n return JSON.stringify(mutation.metadata)\n }\n }\n}\n"],"names":["PendingOperationStore","mutation","sanitizeSQL","asPowerSyncRecord","mapOperationToPowerSync"],"mappings":";;;;;;AAYA,MAAM,QAAQ,YAAY,MAAM,iBAAiB;AAyC1C,MAAM,oBAAoB;AAAA,EAI/B,YAAY,SAA4B;AACtC,SAAK,WAAW,QAAQ;AACxB,SAAK,wBAAwBA,sBAAAA,sBAAsB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,aAA+B;AACpD,UAAM,EAAE,cAAc;AAEtB,QAAI,UAAU,UAAU,GAAG;AACzB;AAAA,IACF;AAKA,UAAM,yBAAyB,UAAU;AAAA,MACvC,CAAC,aAAa,SAAS,WAAW;AAAA,IAAA;AAEpC,UAAM,gBAAgB,MAAM,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAChE,UAAM,oDAAoC,IAAA;AAC1C,UAAM,iBAAiB,cACpB,IAAI,CAAC,OAAO,UAAU,KAAK,CAAC,aAAa,SAAS,WAAW,MAAM,EAAE,CAAE,EACvE,IAAI,CAAC,aAAa,SAAS,UAAU;AACxC,eAAW,gBAAgB,eAAe;AACxC,oCAA8B;AAAA,QAC5B;AAAA,QACA,uBAAuB,YAAY,YAAY;AAAA,MAAA;AAAA,IAEnD;AAGA,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,eAAe;AACvC,YAAI,WAAW,WAAW;AACxB;AAAA,QACF;AACA,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,aAAa,OAAO,CAAC;AAAA,MACvE,CAAC;AAAA,IAAA;AAIH,UAAM,EAAE,aAAA,IAAiB,MAAM,KAAK,SAAS;AAAA,MAC3C,OAAO,OAAO;AACZ,cAAM,oBAAoD,CAAA;AAE1D,mBAAW,CAAC,OAAO,QAAQ,KAAK,UAAU,WAAW;AAKnD,gBAAM,aACJ,SAAS,8BAA8B,IAAI,SAAS,WAAW,EAAE;AACnE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,UAAA;AAAA,QAEN;AAQA,eAAO;AAAA,UACL,cAAc,QAAQ;AAAA,YACpB,kBACG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,EACnB,IAAI,CAAC,OAAO,KAAK,sBAAsB,QAAQ,EAAE,CAAC;AAAA,UAAA;AAAA,QACvD;AAAA,MAEJ;AAAA,IAAA;AAIF,UAAM;AAAA,EACR;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWC,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AACjE,cAAM,kBAAkB,OAAO,OAAO,MAAM;AAE5C,cAAM,gBAAgB,KAAK,wBAAwBD,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AACzB,eAAK,KAAK,WAAW;AACrB,0BAAgB,KAAK,aAAa;AAAA,QACpC;AAEA,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA,eAChB,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,eAEf,KAAK,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,UAEpC;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AACjE,cAAM,kBAAkB,OAAO,OAAO,MAAM;AAE5C,cAAM,gBAAgB,KAAK,wBAAwBD,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AACzB,eAAK,KAAK,WAAW;AACrB,0BAAgB,KAAK,aAAa;AAAA,QACpC;AAEA,cAAM,QAAQ;AAAA,UACZ;AAAA,iBACO,SAAS;AAAA,cACZ,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,UAG9C,CAAC,GAAG,iBAAiBE,QAAAA,kBAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAEhE;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,cAAa;AAC7B,cAAM,gBAAgB,KAAK,wBAAwBA,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AAKzB,gBAAM,QAAQ;AAAA,YACZ;AAAA,qBACS,SAAS;AAAA;AAAA,YAElB,CAAC,eAAeE,QAAAA,kBAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,UAAA;AAAA,QAE3D,OAAO;AACL,gBAAM,QAAQ;AAAA,YACZ;AAAA,0BACc,SAAS;AAAA;AAAA,YAEvB,CAACE,0BAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,UAAA;AAAA,QAE5C;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAgB,8BACd,UACA,SACA,mBACA,SAKkC;AAClC,UAAM,EAAE,WAAW,kBAAkB,mBACnC,KAAK,0BAA0B,QAAQ;AAEzC,UAAM,QAAQC,OAAAA,cAAc,SAAS,IAAI,UAAU,cAAc;AAEjE,QAAI,CAAC,mBAAmB;AACtB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClCA,+CAAwC,gBAAgB;AAAA,IAAA;AAE1D,WAAO;AAAA,MACL;AAAA,MACA,IAAI,cAAc;AAAA,MAClB,WAAWE,QAAAA,wBAAwB,SAAS,IAAI;AAAA,MAChD,WAAW,cAAc;AAAA,IAAA;AAAA,EAE7B;AAAA,EAEU,0BACR,UAC8B;AAC9B,QACE,OAAQ,SAAS,WAAW,OAAe,OAAO,WAAW,YAC7D;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC7D;AACA,WACE,SAAS,WACN,OACH,MAAM,QAAA;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOU,wBACR,UACe;AACf,UAAM,EAAE,kBAAA,IAAsB,KAAK,0BAA0B,QAAQ;AACrE,QAAI,CAAC,mBAAmB;AAEtB,UAAI,OAAO,SAAS,YAAY,aAAa;AAE3C,aAAK,SAAS,OAAO;AAAA,UACnB,oCAAoC,SAAS,WAAW,EAAE;AAAA,UAC1D,SAAS;AAAA,QAAA;AAAA,MAEb;AACA,aAAO;AAAA,IACT,WAAW,OAAO,SAAS,YAAY,aAAa;AAClD,aAAO;AAAA,IACT,WAAW,OAAO,SAAS,YAAY,UAAU;AAC/C,aAAO,SAAS;AAAA,IAClB,OAAO;AACL,aAAO,KAAK,UAAU,SAAS,QAAQ;AAAA,IACzC;AAAA,EACF;AACF;;"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { PendingOperationStore, PendingOperation } from './PendingOperationStore.cjs';
|
|
2
2
|
import { AbstractPowerSyncDatabase, LockContext } from '@powersync/common';
|
|
3
3
|
import { PendingMutation, Transaction } from '@tanstack/db';
|
|
4
|
+
import { PowerSyncCollectionMeta } from './definitions.cjs';
|
|
4
5
|
export type TransactorOptions = {
|
|
5
6
|
database: AbstractPowerSyncDatabase;
|
|
6
7
|
};
|
|
@@ -57,4 +58,11 @@ export declare class PowerSyncTransactor {
|
|
|
57
58
|
* - Returning the last pending diff operation if required
|
|
58
59
|
*/
|
|
59
60
|
protected handleOperationWithCompletion(mutation: PendingMutation<any>, context: LockContext, waitForCompletion: boolean, handler: (tableName: string, mutation: PendingMutation<any>, serializeValue: (value: any) => Record<string, unknown>) => Promise<void>): Promise<PendingOperation | null>;
|
|
61
|
+
protected getMutationCollectionMeta(mutation: PendingMutation<any>): PowerSyncCollectionMeta<any>;
|
|
62
|
+
/**
|
|
63
|
+
* Processes collection mutation metadata for persistence to the database.
|
|
64
|
+
* We only support storing string metadata.
|
|
65
|
+
* @returns null if no metadata should be stored.
|
|
66
|
+
*/
|
|
67
|
+
protected processMutationMetadata(mutation: PendingMutation<any>): string | null;
|
|
60
68
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"definitions.cjs","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<OutputType, string, TSchema> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":";;
|
|
1
|
+
{"version":3,"file":"definitions.cjs","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n\n /**\n * Whether the PowerSync table tracks metadata.\n */\n metadataIsTracked: boolean\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<OutputType, string, TSchema> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":";;AAsRO,MAAM,qBAAqB;;"}
|
|
@@ -179,6 +179,10 @@ export type PowerSyncCollectionMeta<TTable extends Table = Table> = {
|
|
|
179
179
|
* Serializes a collection value to the SQLite type
|
|
180
180
|
*/
|
|
181
181
|
serializeValue: (value: any) => ExtractedTable<TTable>;
|
|
182
|
+
/**
|
|
183
|
+
* Whether the PowerSync table tracks metadata.
|
|
184
|
+
*/
|
|
185
|
+
metadataIsTracked: boolean;
|
|
182
186
|
};
|
|
183
187
|
/**
|
|
184
188
|
* A CollectionConfig which includes utilities for PowerSync.
|
package/dist/cjs/powersync.cjs
CHANGED
|
@@ -18,7 +18,7 @@ function powerSyncCollectionOptions(config) {
|
|
|
18
18
|
const deserializationSchema = `deserializationSchema` in config ? config.deserializationSchema : null;
|
|
19
19
|
const serializer = `serializer` in config ? config.serializer : void 0;
|
|
20
20
|
const onDeserializationError = `onDeserializationError` in config ? config.onDeserializationError : void 0;
|
|
21
|
-
const { viewName } = table;
|
|
21
|
+
const { viewName, trackMetadata: metadataIsTracked } = table;
|
|
22
22
|
const deserializeSyncRow = (value) => {
|
|
23
23
|
const validationSchema = deserializationSchema || schema$1;
|
|
24
24
|
const validation = validationSchema[`~standard`].validate(value);
|
|
@@ -184,6 +184,7 @@ function powerSyncCollectionOptions(config) {
|
|
|
184
184
|
getMeta: () => ({
|
|
185
185
|
tableName: viewName,
|
|
186
186
|
trackedTableName,
|
|
187
|
+
metadataIsTracked,
|
|
187
188
|
serializeValue: (value) => serialization.serializeForSQLite(
|
|
188
189
|
value,
|
|
189
190
|
// This is required by the input generic
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"powersync.cjs","sources":["../../src/powersync.ts"],"sourcesContent":["import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { PowerSyncTransactor } from './PowerSyncTransactor'\nimport { DEFAULT_BATCH_SIZE } from './definitions'\nimport { asPowerSyncRecord, mapOperation } from './helpers'\nimport { convertTableToSchema } from './schema'\nimport { serializeForSQLite } from './serialization'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n OptionalExtractedTable,\n} from './helpers'\nimport type {\n BasePowerSyncCollectionConfig,\n ConfigWithArbitraryCollectionTypes,\n ConfigWithSQLiteInputType,\n ConfigWithSQLiteTypes,\n CustomSQLiteSerializer,\n EnhancedPowerSyncCollectionConfig,\n InferPowerSyncOutputType,\n PowerSyncCollectionConfig,\n PowerSyncCollectionUtils,\n} from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type { SyncConfig } from '@tanstack/db'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { Table, TriggerDiffRecord } from '@powersync/common'\n\n/**\n * Creates PowerSync collection options for use with a standard Collection.\n *\n * @template TTable - The SQLite-based typing\n * @template TSchema - The validation schema type (optionally supports a custom input type)\n * @param config - Configuration options for the PowerSync collection\n * @returns Collection options with utilities\n */\n\n// Overload 1: No schema is provided\n\n/**\n * Creates a PowerSync collection configuration with basic default validation.\n * Input and Output types are the SQLite column types.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * type Document = (typeof APP_SCHEMA)[\"types\"][\"documents\"]\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<TTable extends Table = Table>(\n config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n OptionalExtractedTable<TTable>,\n never\n>\n\n// Overload 2: Schema is provided and the TInput matches SQLite types.\n\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types satisfy the SQLite column types.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Dates are stored as ISO date strings in SQLite\n * created_at: column.text\n * }),\n * })\n *\n * // Advanced Zod validations. The output type of this schema\n * // is constrained to the SQLite schema of APP_SCHEMA\n * const schema = z.object({\n * id: z.string(),\n * // Notice that `name` is not nullable (is required) here and it has additional validation\n * name: z.string().min(3, { message: \"Should be at least 3 characters\" }).nullable(),\n * // The input type is still the SQLite string type. While collections will output smart Date instances.\n * created_at: z.string().transform(val => new Date(val))\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * serializer: {\n * // The default is toISOString, this is just to demonstrate custom overrides\n * created_at: (outputValue) => outputValue.toISOString(),\n * },\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types. We can use the supplied schema to validate sync input\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithSQLiteInputType<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n schema: TSchema\n}\n\n// Overload 3: Schema is provided with arbitrary TInput and TOutput\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types are not linked to the internal SQLite table types. This can\n * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.\n * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Booleans are represented as integers in SQLite\n * is_active: column.integer\n * }),\n * })\n *\n * // Advanced Zod validations.\n * // We accept boolean values as input for operations and expose Booleans in query results\n * const schema = z.object({\n * id: z.string(),\n * isActive: z.boolean(), // TInput and TOutput are boolean\n * })\n *\n * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.\n * const deserializationSchema = z.object({\n * id: z.string(),\n * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * deserializationSchema,\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithArbitraryCollectionTypes<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n utils: PowerSyncCollectionUtils<TTable>\n schema: TSchema\n}\n\n/**\n * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.\n */\n\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<any> = never,\n>(config: PowerSyncCollectionConfig<TTable, TSchema>) {\n const {\n database,\n table,\n schema: inputSchema,\n syncBatchSize = DEFAULT_BATCH_SIZE,\n ...restConfig\n } = config\n\n const deserializationSchema =\n `deserializationSchema` in config ? config.deserializationSchema : null\n const serializer = `serializer` in config ? config.serializer : undefined\n const onDeserializationError =\n `onDeserializationError` in config\n ? config.onDeserializationError\n : undefined\n\n // The SQLite table type\n type TableType = ExtractedTable<TTable>\n\n // The collection output type\n type OutputType = InferPowerSyncOutputType<TTable, TSchema>\n\n const { viewName } = table\n\n /**\n * Deserializes data from the incoming sync stream\n */\n const deserializeSyncRow = (value: TableType): OutputType => {\n const validationSchema = deserializationSchema || schema\n const validation = validationSchema[`~standard`].validate(value)\n if (`value` in validation) {\n return validation.value\n } else if (`issues` in validation) {\n const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`\n database.logger.error(issueMessage)\n onDeserializationError!(validation)\n throw new Error(issueMessage)\n } else {\n const unknownErrorMessage = `Unknown deserialization error for ${viewName}`\n database.logger.error(unknownErrorMessage)\n onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })\n throw new Error(unknownErrorMessage)\n }\n }\n\n // We can do basic runtime validations for columns if not explicit schema has been provided\n const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)\n /**\n * The onInsert, onUpdate, and onDelete handlers should only return\n * after we have written the changes to TanStack DB.\n * We currently only write to TanStack DB from a diff trigger.\n * We wait for the diff trigger to observe the change,\n * and only then return from the on[X] handlers.\n * This ensures that when the transaction is reported as\n * complete to the caller, the in-memory state is already\n * consistent with the database.\n */\n const pendingOperationStore = PendingOperationStore.GLOBAL\n // Keep the tracked table unique in case of multiple tabs.\n const trackedTableName = `__${viewName}_tracking_${Math.floor(\n Math.random() * 0xffffffff,\n )\n .toString(16)\n .padStart(8, `0`)}`\n\n const transactor = new PowerSyncTransactor({\n database,\n })\n\n /**\n * \"sync\"\n * Notice that this describes the Sync between the local SQLite table\n * and the in-memory tanstack-db collection.\n */\n const sync: SyncConfig<OutputType, string> = {\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n const abortController = new AbortController()\n\n // The sync function needs to be synchronous\n async function start() {\n database.logger.info(\n `Sync is starting for ${viewName} into ${trackedTableName}`,\n )\n database.onChangeWithCallback(\n {\n onChange: async () => {\n await database\n .writeTransaction(async (context) => {\n begin()\n const operations = await context.getAll<TriggerDiffRecord>(\n `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`,\n )\n const pendingOperations: Array<PendingOperation> = []\n\n for (const op of operations) {\n const { id, operation, timestamp, value } = op\n const parsedValue = deserializeSyncRow({\n id,\n ...JSON.parse(value),\n })\n const parsedPreviousValue =\n op.operation == DiffTriggerOperation.UPDATE\n ? deserializeSyncRow({\n id,\n ...JSON.parse(op.previous_value),\n })\n : undefined\n write({\n type: mapOperation(operation),\n value: parsedValue,\n previousValue: parsedPreviousValue,\n })\n pendingOperations.push({\n id,\n operation,\n timestamp,\n tableName: viewName,\n })\n }\n\n // clear the current operations\n await context.execute(`DELETE FROM ${trackedTableName}`)\n\n commit()\n pendingOperationStore.resolvePendingFor(pendingOperations)\n })\n .catch((error) => {\n database.logger.error(\n `An error has been detected in the sync handler`,\n error,\n )\n })\n },\n },\n {\n signal: abortController.signal,\n triggerImmediate: false,\n tables: [trackedTableName],\n },\n )\n\n const disposeTracking = await database.triggers.createDiffTrigger({\n source: viewName,\n destination: trackedTableName,\n when: {\n [DiffTriggerOperation.INSERT]: `TRUE`,\n [DiffTriggerOperation.UPDATE]: `TRUE`,\n [DiffTriggerOperation.DELETE]: `TRUE`,\n },\n hooks: {\n beforeCreate: async (context) => {\n let currentBatchCount = syncBatchSize\n let cursor = 0\n while (currentBatchCount == syncBatchSize) {\n begin()\n const batchItems = await context.getAll<TableType>(\n sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,\n [syncBatchSize, cursor],\n )\n currentBatchCount = batchItems.length\n cursor += currentBatchCount\n for (const row of batchItems) {\n write({\n type: `insert`,\n value: deserializeSyncRow(row),\n })\n }\n commit()\n }\n markReady()\n database.logger.info(\n `Sync is ready for ${viewName} into ${trackedTableName}`,\n )\n },\n },\n })\n\n // If the abort controller was aborted while processing the request above\n if (abortController.signal.aborted) {\n await disposeTracking()\n } else {\n abortController.signal.addEventListener(\n `abort`,\n () => {\n disposeTracking()\n },\n { once: true },\n )\n }\n }\n\n start().catch((error) =>\n database.logger.error(\n `Could not start syncing process for ${viewName} into ${trackedTableName}`,\n error,\n ),\n )\n\n return () => {\n database.logger.info(\n `Sync has been stopped for ${viewName} into ${trackedTableName}`,\n )\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n const getKey = (record: OutputType) => asPowerSyncRecord(record).id\n\n const outputConfig: EnhancedPowerSyncCollectionConfig<\n TTable,\n OutputType,\n TSchema\n > = {\n ...restConfig,\n schema,\n getKey,\n // Syncing should start immediately since we need to monitor the changes for mutations\n startSync: true,\n sync,\n onInsert: async (params) => {\n // The transaction here should only ever contain a single insert mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onUpdate: async (params) => {\n // The transaction here should only ever contain a single update mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onDelete: async (params) => {\n // The transaction here should only ever contain a single delete mutation\n return await transactor.applyTransaction(params.transaction)\n },\n utils: {\n getMeta: () => ({\n tableName: viewName,\n trackedTableName,\n serializeValue: (value) =>\n serializeForSQLite(\n value,\n // This is required by the input generic\n table as Table<\n MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>\n >,\n // Coerce serializer to the shape that corresponds to the Table constructed from OutputType\n serializer as CustomSQLiteSerializer<\n OutputType,\n ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>\n >,\n ),\n }),\n },\n }\n return outputConfig\n}\n"],"names":["DEFAULT_BATCH_SIZE","schema","convertTableToSchema","PendingOperationStore","PowerSyncTransactor","DiffTriggerOperation","mapOperation","sanitizeSQL","asPowerSyncRecord","serializeForSQLite"],"mappings":";;;;;;;;;AA0NO,SAAS,2BAGd,QAAoD;AACpD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,gBAAgBA,YAAAA;AAAAA,IAChB,GAAG;AAAA,EAAA,IACD;AAEJ,QAAM,wBACJ,2BAA2B,SAAS,OAAO,wBAAwB;AACrE,QAAM,aAAa,gBAAgB,SAAS,OAAO,aAAa;AAChE,QAAM,yBACJ,4BAA4B,SACxB,OAAO,yBACP;AAQN,QAAM,EAAE,aAAa;AAKrB,QAAM,qBAAqB,CAAC,UAAiC;AAC3D,UAAM,mBAAmB,yBAAyBC;AAClD,UAAM,aAAa,iBAAiB,WAAW,EAAE,SAAS,KAAK;AAC/D,QAAI,WAAW,YAAY;AACzB,aAAO,WAAW;AAAA,IACpB,WAAW,YAAY,YAAY;AACjC,YAAM,eAAe,wCAAwC,QAAQ,aAAa,WAAW,OAAO,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,MAAM,MAAM,OAAO,EAAE,CAAC;AACtJ,eAAS,OAAO,MAAM,YAAY;AAClC,6BAAwB,UAAU;AAClC,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B,OAAO;AACL,YAAM,sBAAsB,qCAAqC,QAAQ;AACzE,eAAS,OAAO,MAAM,mBAAmB;AACzC,6BAAwB,EAAE,QAAQ,CAAC,EAAE,SAAS,oBAAA,CAAqB,GAAG;AACtE,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AAAA,EACF;AAGA,QAAMA,WAAS,eAAgBC,OAAAA,qBAAqB,KAAK;AAWzD,QAAM,wBAAwBC,sBAAAA,sBAAsB;AAEpD,QAAM,mBAAmB,KAAK,QAAQ,aAAa,KAAK;AAAA,IACtD,KAAK,WAAW;AAAA,EAAA,EAEf,SAAS,EAAE,EACX,SAAS,GAAG,GAAG,CAAC;AAEnB,QAAM,aAAa,IAAIC,wCAAoB;AAAA,IACzC;AAAA,EAAA,CACD;AAOD,QAAM,OAAuC;AAAA,IAC3C,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAC5C,YAAM,kBAAkB,IAAI,gBAAA;AAG5B,qBAAe,QAAQ;AACrB,iBAAS,OAAO;AAAA,UACd,wBAAwB,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAE3D,iBAAS;AAAA,UACP;AAAA,YACE,UAAU,YAAY;AACpB,oBAAM,SACH,iBAAiB,OAAO,YAAY;AACnC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,iBAAiB,gBAAgB;AAAA,gBAAA;AAEnC,sBAAM,oBAA6C,CAAA;AAEnD,2BAAW,MAAM,YAAY;AAC3B,wBAAM,EAAE,IAAI,WAAW,WAAW,UAAU;AAC5C,wBAAM,cAAc,mBAAmB;AAAA,oBACrC;AAAA,oBACA,GAAG,KAAK,MAAM,KAAK;AAAA,kBAAA,CACpB;AACD,wBAAM,sBACJ,GAAG,aAAaC,OAAAA,qBAAqB,SACjC,mBAAmB;AAAA,oBACjB;AAAA,oBACA,GAAG,KAAK,MAAM,GAAG,cAAc;AAAA,kBAAA,CAChC,IACD;AACN,wBAAM;AAAA,oBACJ,MAAMC,QAAAA,aAAa,SAAS;AAAA,oBAC5B,OAAO;AAAA,oBACP,eAAe;AAAA,kBAAA,CAChB;AACD,oCAAkB,KAAK;AAAA,oBACrB;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,WAAW;AAAA,kBAAA,CACZ;AAAA,gBACH;AAGA,sBAAM,QAAQ,QAAQ,eAAe,gBAAgB,EAAE;AAEvD,uBAAA;AACA,sCAAsB,kBAAkB,iBAAiB;AAAA,cAC3D,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,yBAAS,OAAO;AAAA,kBACd;AAAA,kBACA;AAAA,gBAAA;AAAA,cAEJ,CAAC;AAAA,YACL;AAAA,UAAA;AAAA,UAEF;AAAA,YACE,QAAQ,gBAAgB;AAAA,YACxB,kBAAkB;AAAA,YAClB,QAAQ,CAAC,gBAAgB;AAAA,UAAA;AAAA,QAC3B;AAGF,cAAM,kBAAkB,MAAM,SAAS,SAAS,kBAAkB;AAAA,UAChE,QAAQ;AAAA,UACR,aAAa;AAAA,UACb,MAAM;AAAA,YACJ,CAACD,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,UAAA;AAAA,UAEjC,OAAO;AAAA,YACL,cAAc,OAAO,YAAY;AAC/B,kBAAI,oBAAoB;AACxB,kBAAI,SAAS;AACb,qBAAO,qBAAqB,eAAe;AACzC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/BE,mCAA4B,QAAQ;AAAA,kBACpC,CAAC,eAAe,MAAM;AAAA,gBAAA;AAExB,oCAAoB,WAAW;AAC/B,0BAAU;AACV,2BAAW,OAAO,YAAY;AAC5B,wBAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,OAAO,mBAAmB,GAAG;AAAA,kBAAA,CAC9B;AAAA,gBACH;AACA,uBAAA;AAAA,cACF;AACA,wBAAA;AACA,uBAAS,OAAO;AAAA,gBACd,qBAAqB,QAAQ,SAAS,gBAAgB;AAAA,cAAA;AAAA,YAE1D;AAAA,UAAA;AAAA,QACF,CACD;AAGD,YAAI,gBAAgB,OAAO,SAAS;AAClC,gBAAM,gBAAA;AAAA,QACR,OAAO;AACL,0BAAgB,OAAO;AAAA,YACrB;AAAA,YACA,MAAM;AACJ,8BAAA;AAAA,YACF;AAAA,YACA,EAAE,MAAM,KAAA;AAAA,UAAK;AAAA,QAEjB;AAAA,MACF;AAEA,YAAA,EAAQ;AAAA,QAAM,CAAC,UACb,SAAS,OAAO;AAAA,UACd,uCAAuC,QAAQ,SAAS,gBAAgB;AAAA,UACxE;AAAA,QAAA;AAAA,MACF;AAGF,aAAO,MAAM;AACX,iBAAS,OAAO;AAAA,UACd,6BAA6B,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAEhE,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,QAAM,SAAS,CAAC,WAAuBC,QAAAA,kBAAkB,MAAM,EAAE;AAEjE,QAAM,eAIF;AAAA,IACF,GAAG;AAAA,IAAA,QACHP;AAAAA,IACA;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,OAAO;AAAA,MACL,SAAS,OAAO;AAAA,QACd,WAAW;AAAA,QACX;AAAA,QACA,gBAAgB,CAAC,UACfQ,cAAAA;AAAAA,UACE;AAAA;AAAA,UAEA;AAAA;AAAA,UAIA;AAAA,QAAA;AAAA,MAIF;AAAA,IACJ;AAAA,EACF;AAEF,SAAO;AACT;;"}
|
|
1
|
+
{"version":3,"file":"powersync.cjs","sources":["../../src/powersync.ts"],"sourcesContent":["import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { PowerSyncTransactor } from './PowerSyncTransactor'\nimport { DEFAULT_BATCH_SIZE } from './definitions'\nimport { asPowerSyncRecord, mapOperation } from './helpers'\nimport { convertTableToSchema } from './schema'\nimport { serializeForSQLite } from './serialization'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n OptionalExtractedTable,\n} from './helpers'\nimport type {\n BasePowerSyncCollectionConfig,\n ConfigWithArbitraryCollectionTypes,\n ConfigWithSQLiteInputType,\n ConfigWithSQLiteTypes,\n CustomSQLiteSerializer,\n EnhancedPowerSyncCollectionConfig,\n InferPowerSyncOutputType,\n PowerSyncCollectionConfig,\n PowerSyncCollectionUtils,\n} from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type { SyncConfig } from '@tanstack/db'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { Table, TriggerDiffRecord } from '@powersync/common'\n\n/**\n * Creates PowerSync collection options for use with a standard Collection.\n *\n * @template TTable - The SQLite-based typing\n * @template TSchema - The validation schema type (optionally supports a custom input type)\n * @param config - Configuration options for the PowerSync collection\n * @returns Collection options with utilities\n */\n\n// Overload 1: No schema is provided\n\n/**\n * Creates a PowerSync collection configuration with basic default validation.\n * Input and Output types are the SQLite column types.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * type Document = (typeof APP_SCHEMA)[\"types\"][\"documents\"]\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<TTable extends Table = Table>(\n config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n OptionalExtractedTable<TTable>,\n never\n>\n\n// Overload 2: Schema is provided and the TInput matches SQLite types.\n\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types satisfy the SQLite column types.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Dates are stored as ISO date strings in SQLite\n * created_at: column.text\n * }),\n * })\n *\n * // Advanced Zod validations. The output type of this schema\n * // is constrained to the SQLite schema of APP_SCHEMA\n * const schema = z.object({\n * id: z.string(),\n * // Notice that `name` is not nullable (is required) here and it has additional validation\n * name: z.string().min(3, { message: \"Should be at least 3 characters\" }).nullable(),\n * // The input type is still the SQLite string type. While collections will output smart Date instances.\n * created_at: z.string().transform(val => new Date(val))\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * serializer: {\n * // The default is toISOString, this is just to demonstrate custom overrides\n * created_at: (outputValue) => outputValue.toISOString(),\n * },\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types. We can use the supplied schema to validate sync input\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithSQLiteInputType<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n schema: TSchema\n}\n\n// Overload 3: Schema is provided with arbitrary TInput and TOutput\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types are not linked to the internal SQLite table types. This can\n * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.\n * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Booleans are represented as integers in SQLite\n * is_active: column.integer\n * }),\n * })\n *\n * // Advanced Zod validations.\n * // We accept boolean values as input for operations and expose Booleans in query results\n * const schema = z.object({\n * id: z.string(),\n * isActive: z.boolean(), // TInput and TOutput are boolean\n * })\n *\n * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.\n * const deserializationSchema = z.object({\n * id: z.string(),\n * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * deserializationSchema,\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithArbitraryCollectionTypes<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n utils: PowerSyncCollectionUtils<TTable>\n schema: TSchema\n}\n\n/**\n * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.\n */\n\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<any> = never,\n>(config: PowerSyncCollectionConfig<TTable, TSchema>) {\n const {\n database,\n table,\n schema: inputSchema,\n syncBatchSize = DEFAULT_BATCH_SIZE,\n ...restConfig\n } = config\n\n const deserializationSchema =\n `deserializationSchema` in config ? config.deserializationSchema : null\n const serializer = `serializer` in config ? config.serializer : undefined\n const onDeserializationError =\n `onDeserializationError` in config\n ? config.onDeserializationError\n : undefined\n\n // The SQLite table type\n type TableType = ExtractedTable<TTable>\n\n // The collection output type\n type OutputType = InferPowerSyncOutputType<TTable, TSchema>\n\n const { viewName, trackMetadata: metadataIsTracked } = table\n\n /**\n * Deserializes data from the incoming sync stream\n */\n const deserializeSyncRow = (value: TableType): OutputType => {\n const validationSchema = deserializationSchema || schema\n const validation = validationSchema[`~standard`].validate(value)\n if (`value` in validation) {\n return validation.value\n } else if (`issues` in validation) {\n const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`\n database.logger.error(issueMessage)\n onDeserializationError!(validation)\n throw new Error(issueMessage)\n } else {\n const unknownErrorMessage = `Unknown deserialization error for ${viewName}`\n database.logger.error(unknownErrorMessage)\n onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })\n throw new Error(unknownErrorMessage)\n }\n }\n\n // We can do basic runtime validations for columns if not explicit schema has been provided\n const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)\n /**\n * The onInsert, onUpdate, and onDelete handlers should only return\n * after we have written the changes to TanStack DB.\n * We currently only write to TanStack DB from a diff trigger.\n * We wait for the diff trigger to observe the change,\n * and only then return from the on[X] handlers.\n * This ensures that when the transaction is reported as\n * complete to the caller, the in-memory state is already\n * consistent with the database.\n */\n const pendingOperationStore = PendingOperationStore.GLOBAL\n // Keep the tracked table unique in case of multiple tabs.\n const trackedTableName = `__${viewName}_tracking_${Math.floor(\n Math.random() * 0xffffffff,\n )\n .toString(16)\n .padStart(8, `0`)}`\n\n const transactor = new PowerSyncTransactor({\n database,\n })\n\n /**\n * \"sync\"\n * Notice that this describes the Sync between the local SQLite table\n * and the in-memory tanstack-db collection.\n */\n const sync: SyncConfig<OutputType, string> = {\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n const abortController = new AbortController()\n\n // The sync function needs to be synchronous\n async function start() {\n database.logger.info(\n `Sync is starting for ${viewName} into ${trackedTableName}`,\n )\n database.onChangeWithCallback(\n {\n onChange: async () => {\n await database\n .writeTransaction(async (context) => {\n begin()\n const operations = await context.getAll<TriggerDiffRecord>(\n `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`,\n )\n const pendingOperations: Array<PendingOperation> = []\n\n for (const op of operations) {\n const { id, operation, timestamp, value } = op\n const parsedValue = deserializeSyncRow({\n id,\n ...JSON.parse(value),\n })\n const parsedPreviousValue =\n op.operation == DiffTriggerOperation.UPDATE\n ? deserializeSyncRow({\n id,\n ...JSON.parse(op.previous_value),\n })\n : undefined\n write({\n type: mapOperation(operation),\n value: parsedValue,\n previousValue: parsedPreviousValue,\n })\n pendingOperations.push({\n id,\n operation,\n timestamp,\n tableName: viewName,\n })\n }\n\n // clear the current operations\n await context.execute(`DELETE FROM ${trackedTableName}`)\n\n commit()\n pendingOperationStore.resolvePendingFor(pendingOperations)\n })\n .catch((error) => {\n database.logger.error(\n `An error has been detected in the sync handler`,\n error,\n )\n })\n },\n },\n {\n signal: abortController.signal,\n triggerImmediate: false,\n tables: [trackedTableName],\n },\n )\n\n const disposeTracking = await database.triggers.createDiffTrigger({\n source: viewName,\n destination: trackedTableName,\n when: {\n [DiffTriggerOperation.INSERT]: `TRUE`,\n [DiffTriggerOperation.UPDATE]: `TRUE`,\n [DiffTriggerOperation.DELETE]: `TRUE`,\n },\n hooks: {\n beforeCreate: async (context) => {\n let currentBatchCount = syncBatchSize\n let cursor = 0\n while (currentBatchCount == syncBatchSize) {\n begin()\n const batchItems = await context.getAll<TableType>(\n sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,\n [syncBatchSize, cursor],\n )\n currentBatchCount = batchItems.length\n cursor += currentBatchCount\n for (const row of batchItems) {\n write({\n type: `insert`,\n value: deserializeSyncRow(row),\n })\n }\n commit()\n }\n markReady()\n database.logger.info(\n `Sync is ready for ${viewName} into ${trackedTableName}`,\n )\n },\n },\n })\n\n // If the abort controller was aborted while processing the request above\n if (abortController.signal.aborted) {\n await disposeTracking()\n } else {\n abortController.signal.addEventListener(\n `abort`,\n () => {\n disposeTracking()\n },\n { once: true },\n )\n }\n }\n\n start().catch((error) =>\n database.logger.error(\n `Could not start syncing process for ${viewName} into ${trackedTableName}`,\n error,\n ),\n )\n\n return () => {\n database.logger.info(\n `Sync has been stopped for ${viewName} into ${trackedTableName}`,\n )\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n const getKey = (record: OutputType) => asPowerSyncRecord(record).id\n\n const outputConfig: EnhancedPowerSyncCollectionConfig<\n TTable,\n OutputType,\n TSchema\n > = {\n ...restConfig,\n schema,\n getKey,\n // Syncing should start immediately since we need to monitor the changes for mutations\n startSync: true,\n sync,\n onInsert: async (params) => {\n // The transaction here should only ever contain a single insert mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onUpdate: async (params) => {\n // The transaction here should only ever contain a single update mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onDelete: async (params) => {\n // The transaction here should only ever contain a single delete mutation\n return await transactor.applyTransaction(params.transaction)\n },\n utils: {\n getMeta: () => ({\n tableName: viewName,\n trackedTableName,\n metadataIsTracked,\n serializeValue: (value) =>\n serializeForSQLite(\n value,\n // This is required by the input generic\n table as Table<\n MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>\n >,\n // Coerce serializer to the shape that corresponds to the Table constructed from OutputType\n serializer as CustomSQLiteSerializer<\n OutputType,\n ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>\n >,\n ),\n }),\n },\n }\n return outputConfig\n}\n"],"names":["DEFAULT_BATCH_SIZE","schema","convertTableToSchema","PendingOperationStore","PowerSyncTransactor","DiffTriggerOperation","mapOperation","sanitizeSQL","asPowerSyncRecord","serializeForSQLite"],"mappings":";;;;;;;;;AA0NO,SAAS,2BAGd,QAAoD;AACpD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,gBAAgBA,YAAAA;AAAAA,IAChB,GAAG;AAAA,EAAA,IACD;AAEJ,QAAM,wBACJ,2BAA2B,SAAS,OAAO,wBAAwB;AACrE,QAAM,aAAa,gBAAgB,SAAS,OAAO,aAAa;AAChE,QAAM,yBACJ,4BAA4B,SACxB,OAAO,yBACP;AAQN,QAAM,EAAE,UAAU,eAAe,kBAAA,IAAsB;AAKvD,QAAM,qBAAqB,CAAC,UAAiC;AAC3D,UAAM,mBAAmB,yBAAyBC;AAClD,UAAM,aAAa,iBAAiB,WAAW,EAAE,SAAS,KAAK;AAC/D,QAAI,WAAW,YAAY;AACzB,aAAO,WAAW;AAAA,IACpB,WAAW,YAAY,YAAY;AACjC,YAAM,eAAe,wCAAwC,QAAQ,aAAa,WAAW,OAAO,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,MAAM,MAAM,OAAO,EAAE,CAAC;AACtJ,eAAS,OAAO,MAAM,YAAY;AAClC,6BAAwB,UAAU;AAClC,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B,OAAO;AACL,YAAM,sBAAsB,qCAAqC,QAAQ;AACzE,eAAS,OAAO,MAAM,mBAAmB;AACzC,6BAAwB,EAAE,QAAQ,CAAC,EAAE,SAAS,oBAAA,CAAqB,GAAG;AACtE,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AAAA,EACF;AAGA,QAAMA,WAAS,eAAgBC,OAAAA,qBAAqB,KAAK;AAWzD,QAAM,wBAAwBC,sBAAAA,sBAAsB;AAEpD,QAAM,mBAAmB,KAAK,QAAQ,aAAa,KAAK;AAAA,IACtD,KAAK,WAAW;AAAA,EAAA,EAEf,SAAS,EAAE,EACX,SAAS,GAAG,GAAG,CAAC;AAEnB,QAAM,aAAa,IAAIC,wCAAoB;AAAA,IACzC;AAAA,EAAA,CACD;AAOD,QAAM,OAAuC;AAAA,IAC3C,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAC5C,YAAM,kBAAkB,IAAI,gBAAA;AAG5B,qBAAe,QAAQ;AACrB,iBAAS,OAAO;AAAA,UACd,wBAAwB,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAE3D,iBAAS;AAAA,UACP;AAAA,YACE,UAAU,YAAY;AACpB,oBAAM,SACH,iBAAiB,OAAO,YAAY;AACnC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,iBAAiB,gBAAgB;AAAA,gBAAA;AAEnC,sBAAM,oBAA6C,CAAA;AAEnD,2BAAW,MAAM,YAAY;AAC3B,wBAAM,EAAE,IAAI,WAAW,WAAW,UAAU;AAC5C,wBAAM,cAAc,mBAAmB;AAAA,oBACrC;AAAA,oBACA,GAAG,KAAK,MAAM,KAAK;AAAA,kBAAA,CACpB;AACD,wBAAM,sBACJ,GAAG,aAAaC,OAAAA,qBAAqB,SACjC,mBAAmB;AAAA,oBACjB;AAAA,oBACA,GAAG,KAAK,MAAM,GAAG,cAAc;AAAA,kBAAA,CAChC,IACD;AACN,wBAAM;AAAA,oBACJ,MAAMC,QAAAA,aAAa,SAAS;AAAA,oBAC5B,OAAO;AAAA,oBACP,eAAe;AAAA,kBAAA,CAChB;AACD,oCAAkB,KAAK;AAAA,oBACrB;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,WAAW;AAAA,kBAAA,CACZ;AAAA,gBACH;AAGA,sBAAM,QAAQ,QAAQ,eAAe,gBAAgB,EAAE;AAEvD,uBAAA;AACA,sCAAsB,kBAAkB,iBAAiB;AAAA,cAC3D,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,yBAAS,OAAO;AAAA,kBACd;AAAA,kBACA;AAAA,gBAAA;AAAA,cAEJ,CAAC;AAAA,YACL;AAAA,UAAA;AAAA,UAEF;AAAA,YACE,QAAQ,gBAAgB;AAAA,YACxB,kBAAkB;AAAA,YAClB,QAAQ,CAAC,gBAAgB;AAAA,UAAA;AAAA,QAC3B;AAGF,cAAM,kBAAkB,MAAM,SAAS,SAAS,kBAAkB;AAAA,UAChE,QAAQ;AAAA,UACR,aAAa;AAAA,UACb,MAAM;AAAA,YACJ,CAACD,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,UAAA;AAAA,UAEjC,OAAO;AAAA,YACL,cAAc,OAAO,YAAY;AAC/B,kBAAI,oBAAoB;AACxB,kBAAI,SAAS;AACb,qBAAO,qBAAqB,eAAe;AACzC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/BE,mCAA4B,QAAQ;AAAA,kBACpC,CAAC,eAAe,MAAM;AAAA,gBAAA;AAExB,oCAAoB,WAAW;AAC/B,0BAAU;AACV,2BAAW,OAAO,YAAY;AAC5B,wBAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,OAAO,mBAAmB,GAAG;AAAA,kBAAA,CAC9B;AAAA,gBACH;AACA,uBAAA;AAAA,cACF;AACA,wBAAA;AACA,uBAAS,OAAO;AAAA,gBACd,qBAAqB,QAAQ,SAAS,gBAAgB;AAAA,cAAA;AAAA,YAE1D;AAAA,UAAA;AAAA,QACF,CACD;AAGD,YAAI,gBAAgB,OAAO,SAAS;AAClC,gBAAM,gBAAA;AAAA,QACR,OAAO;AACL,0BAAgB,OAAO;AAAA,YACrB;AAAA,YACA,MAAM;AACJ,8BAAA;AAAA,YACF;AAAA,YACA,EAAE,MAAM,KAAA;AAAA,UAAK;AAAA,QAEjB;AAAA,MACF;AAEA,YAAA,EAAQ;AAAA,QAAM,CAAC,UACb,SAAS,OAAO;AAAA,UACd,uCAAuC,QAAQ,SAAS,gBAAgB;AAAA,UACxE;AAAA,QAAA;AAAA,MACF;AAGF,aAAO,MAAM;AACX,iBAAS,OAAO;AAAA,UACd,6BAA6B,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAEhE,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,QAAM,SAAS,CAAC,WAAuBC,QAAAA,kBAAkB,MAAM,EAAE;AAEjE,QAAM,eAIF;AAAA,IACF,GAAG;AAAA,IAAA,QACHP;AAAAA,IACA;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,OAAO;AAAA,MACL,SAAS,OAAO;AAAA,QACd,WAAW;AAAA,QACX;AAAA,QACA;AAAA,QACA,gBAAgB,CAAC,UACfQ,cAAAA;AAAAA,UACE;AAAA;AAAA,UAEA;AAAA;AAAA,UAIA;AAAA,QAAA;AAAA,MAIF;AAAA,IACJ;AAAA,EACF;AAEF,SAAO;AACT;;"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { PendingOperationStore, PendingOperation } from './PendingOperationStore.js';
|
|
2
2
|
import { AbstractPowerSyncDatabase, LockContext } from '@powersync/common';
|
|
3
3
|
import { PendingMutation, Transaction } from '@tanstack/db';
|
|
4
|
+
import { PowerSyncCollectionMeta } from './definitions.js';
|
|
4
5
|
export type TransactorOptions = {
|
|
5
6
|
database: AbstractPowerSyncDatabase;
|
|
6
7
|
};
|
|
@@ -57,4 +58,11 @@ export declare class PowerSyncTransactor {
|
|
|
57
58
|
* - Returning the last pending diff operation if required
|
|
58
59
|
*/
|
|
59
60
|
protected handleOperationWithCompletion(mutation: PendingMutation<any>, context: LockContext, waitForCompletion: boolean, handler: (tableName: string, mutation: PendingMutation<any>, serializeValue: (value: any) => Record<string, unknown>) => Promise<void>): Promise<PendingOperation | null>;
|
|
61
|
+
protected getMutationCollectionMeta(mutation: PendingMutation<any>): PowerSyncCollectionMeta<any>;
|
|
62
|
+
/**
|
|
63
|
+
* Processes collection mutation metadata for persistence to the database.
|
|
64
|
+
* We only support storing string metadata.
|
|
65
|
+
* @returns null if no metadata should be stored.
|
|
66
|
+
*/
|
|
67
|
+
protected processMutationMetadata(mutation: PendingMutation<any>): string | null;
|
|
60
68
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { sanitizeSQL } from "@powersync/common";
|
|
2
2
|
import DebugModule from "debug";
|
|
3
|
-
import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers.js";
|
|
4
3
|
import { PendingOperationStore } from "./PendingOperationStore.js";
|
|
4
|
+
import { asPowerSyncRecord, mapOperationToPowerSync } from "./helpers.js";
|
|
5
5
|
const debug = DebugModule.debug(`ts/db:powersync`);
|
|
6
6
|
class PowerSyncTransactor {
|
|
7
7
|
constructor(options) {
|
|
@@ -77,6 +77,12 @@ class PowerSyncTransactor {
|
|
|
77
77
|
async (tableName, mutation2, serializeValue) => {
|
|
78
78
|
const values = serializeValue(mutation2.modified);
|
|
79
79
|
const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`);
|
|
80
|
+
const queryParameters = Object.values(values);
|
|
81
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
82
|
+
if (metadataValue != null) {
|
|
83
|
+
keys.push(`_metadata`);
|
|
84
|
+
queryParameters.push(metadataValue);
|
|
85
|
+
}
|
|
80
86
|
await context.execute(
|
|
81
87
|
`
|
|
82
88
|
INSERT into ${tableName}
|
|
@@ -84,7 +90,7 @@ class PowerSyncTransactor {
|
|
|
84
90
|
VALUES
|
|
85
91
|
(${keys.map((_) => `?`).join(`, `)})
|
|
86
92
|
`,
|
|
87
|
-
|
|
93
|
+
queryParameters
|
|
88
94
|
);
|
|
89
95
|
}
|
|
90
96
|
);
|
|
@@ -98,13 +104,19 @@ class PowerSyncTransactor {
|
|
|
98
104
|
async (tableName, mutation2, serializeValue) => {
|
|
99
105
|
const values = serializeValue(mutation2.modified);
|
|
100
106
|
const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`);
|
|
107
|
+
const queryParameters = Object.values(values);
|
|
108
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
109
|
+
if (metadataValue != null) {
|
|
110
|
+
keys.push(`_metadata`);
|
|
111
|
+
queryParameters.push(metadataValue);
|
|
112
|
+
}
|
|
101
113
|
await context.execute(
|
|
102
114
|
`
|
|
103
115
|
UPDATE ${tableName}
|
|
104
116
|
SET ${keys.map((key) => `${key} = ?`).join(`, `)}
|
|
105
117
|
WHERE id = ?
|
|
106
118
|
`,
|
|
107
|
-
[...
|
|
119
|
+
[...queryParameters, asPowerSyncRecord(mutation2.modified).id]
|
|
108
120
|
);
|
|
109
121
|
}
|
|
110
122
|
);
|
|
@@ -116,12 +128,22 @@ class PowerSyncTransactor {
|
|
|
116
128
|
context,
|
|
117
129
|
waitForCompletion,
|
|
118
130
|
async (tableName, mutation2) => {
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
131
|
+
const metadataValue = this.processMutationMetadata(mutation2);
|
|
132
|
+
if (metadataValue != null) {
|
|
133
|
+
await context.execute(
|
|
134
|
+
`
|
|
135
|
+
UPDATE ${tableName} SET _deleted = TRUE, _metadata = ? WHERE id = ?
|
|
136
|
+
`,
|
|
137
|
+
[metadataValue, asPowerSyncRecord(mutation2.original).id]
|
|
138
|
+
);
|
|
139
|
+
} else {
|
|
140
|
+
await context.execute(
|
|
141
|
+
`
|
|
142
|
+
DELETE FROM ${tableName} WHERE id = ?
|
|
143
|
+
`,
|
|
144
|
+
[asPowerSyncRecord(mutation2.original).id]
|
|
145
|
+
);
|
|
146
|
+
}
|
|
125
147
|
}
|
|
126
148
|
);
|
|
127
149
|
}
|
|
@@ -132,11 +154,7 @@ class PowerSyncTransactor {
|
|
|
132
154
|
* - Returning the last pending diff operation if required
|
|
133
155
|
*/
|
|
134
156
|
async handleOperationWithCompletion(mutation, context, waitForCompletion, handler) {
|
|
135
|
-
|
|
136
|
-
throw new Error(`Could not get tableName from mutation's collection config.
|
|
137
|
-
The provided mutation might not have originated from PowerSync.`);
|
|
138
|
-
}
|
|
139
|
-
const { tableName, trackedTableName, serializeValue } = mutation.collection.config.utils.getMeta();
|
|
157
|
+
const { tableName, trackedTableName, serializeValue } = this.getMutationCollectionMeta(mutation);
|
|
140
158
|
await handler(sanitizeSQL`${tableName}`, mutation, serializeValue);
|
|
141
159
|
if (!waitForCompletion) {
|
|
142
160
|
return null;
|
|
@@ -151,6 +169,35 @@ class PowerSyncTransactor {
|
|
|
151
169
|
timestamp: diffOperation.timestamp
|
|
152
170
|
};
|
|
153
171
|
}
|
|
172
|
+
getMutationCollectionMeta(mutation) {
|
|
173
|
+
if (typeof mutation.collection.config.utils?.getMeta != `function`) {
|
|
174
|
+
throw new Error(`Collection is not a PowerSync collection.`);
|
|
175
|
+
}
|
|
176
|
+
return mutation.collection.config.utils.getMeta();
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Processes collection mutation metadata for persistence to the database.
|
|
180
|
+
* We only support storing string metadata.
|
|
181
|
+
* @returns null if no metadata should be stored.
|
|
182
|
+
*/
|
|
183
|
+
processMutationMetadata(mutation) {
|
|
184
|
+
const { metadataIsTracked } = this.getMutationCollectionMeta(mutation);
|
|
185
|
+
if (!metadataIsTracked) {
|
|
186
|
+
if (typeof mutation.metadata != `undefined`) {
|
|
187
|
+
this.database.logger.warn(
|
|
188
|
+
`Metadata provided for collection ${mutation.collection.id} but the PowerSync table does not track metadata. The PowerSync table should be configured with trackMetadata: true.`,
|
|
189
|
+
mutation.metadata
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
return null;
|
|
193
|
+
} else if (typeof mutation.metadata == `undefined`) {
|
|
194
|
+
return null;
|
|
195
|
+
} else if (typeof mutation.metadata == `string`) {
|
|
196
|
+
return mutation.metadata;
|
|
197
|
+
} else {
|
|
198
|
+
return JSON.stringify(mutation.metadata);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
154
201
|
}
|
|
155
202
|
export {
|
|
156
203
|
PowerSyncTransactor
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"PowerSyncTransactor.js","sources":["../../src/PowerSyncTransactor.ts"],"sourcesContent":["import { sanitizeSQL } from '@powersync/common'\nimport DebugModule from 'debug'\nimport { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport type { AbstractPowerSyncDatabase, LockContext } from '@powersync/common'\nimport type { PendingMutation, Transaction } from '@tanstack/db'\nimport type { EnhancedPowerSyncCollectionConfig } from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\n\nconst debug = DebugModule.debug(`ts/db:powersync`)\n\nexport type TransactorOptions = {\n database: AbstractPowerSyncDatabase\n}\n\n/**\n * Applies mutations to the PowerSync database. This method is called automatically by the collection's\n * insert, update, and delete operations. You typically don't need to call this directly unless you\n * have special transaction requirements.\n *\n * @example\n * ```typescript\n * // Create a collection\n * const collection = createCollection(\n * powerSyncCollectionOptions<Document>({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * })\n * )\n *\n * const addTx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)\n * },\n * })\n *\n * addTx.mutate(() => {\n * for (let i = 0; i < 5; i++) {\n * collection.insert({ id: randomUUID(), name: `tx-${i}` })\n * }\n * })\n *\n * await addTx.commit()\n * await addTx.isPersisted.promise\n * ```\n *\n * @param transaction - The transaction containing mutations to apply\n * @returns A promise that resolves when the mutations have been persisted to PowerSync\n */\nexport class PowerSyncTransactor {\n database: AbstractPowerSyncDatabase\n pendingOperationStore: PendingOperationStore\n\n constructor(options: TransactorOptions) {\n this.database = options.database\n this.pendingOperationStore = PendingOperationStore.GLOBAL\n }\n\n /**\n * Persists a {@link Transaction} to the PowerSync SQLite database.\n */\n async applyTransaction(transaction: Transaction<any>) {\n const { mutations } = transaction\n\n if (mutations.length == 0) {\n return\n }\n /**\n * The transaction might contain operations for different collections.\n * We can do some optimizations for single-collection transactions.\n */\n const mutationsCollectionIds = mutations.map(\n (mutation) => mutation.collection.id,\n )\n const collectionIds = Array.from(new Set(mutationsCollectionIds))\n const lastCollectionMutationIndexes = new Map<string, number>()\n const allCollections = collectionIds\n .map((id) => mutations.find((mutation) => mutation.collection.id == id)!)\n .map((mutation) => mutation.collection)\n for (const collectionId of collectionIds) {\n lastCollectionMutationIndexes.set(\n collectionId,\n mutationsCollectionIds.lastIndexOf(collectionId),\n )\n }\n\n // Check all the observers are ready before taking a lock\n await Promise.all(\n allCollections.map(async (collection) => {\n if (collection.isReady()) {\n return\n }\n await new Promise<void>((resolve) => collection.onFirstReady(resolve))\n }),\n )\n\n // Persist to PowerSync\n const { whenComplete } = await this.database.writeTransaction(\n async (tx) => {\n const pendingOperations: Array<PendingOperation | null> = []\n\n for (const [index, mutation] of mutations.entries()) {\n /**\n * Each collection processes events independently. We need to make sure the\n * last operation for each collection has been observed.\n */\n const shouldWait =\n index == lastCollectionMutationIndexes.get(mutation.collection.id)\n switch (mutation.type) {\n case `insert`:\n pendingOperations.push(\n await this.handleInsert(mutation, tx, shouldWait),\n )\n break\n case `update`:\n pendingOperations.push(\n await this.handleUpdate(mutation, tx, shouldWait),\n )\n break\n case `delete`:\n pendingOperations.push(\n await this.handleDelete(mutation, tx, shouldWait),\n )\n break\n }\n }\n\n /**\n * Return a promise from the writeTransaction, without awaiting it.\n * This promise will resolve once the entire transaction has been\n * observed via the diff triggers.\n * We return without awaiting in order to free the write lock.\n */\n return {\n whenComplete: Promise.all(\n pendingOperations\n .filter((op) => !!op)\n .map((op) => this.pendingOperationStore.waitFor(op)),\n ),\n }\n },\n )\n\n // Wait for the change to be observed via the diff trigger\n await whenComplete\n }\n\n protected async handleInsert(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`insert`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n INSERT into ${tableName} \n (${keys.join(`, `)}) \n VALUES \n (${keys.map((_) => `?`).join(`, `)})\n `,\n Object.values(values),\n )\n },\n )\n }\n\n protected async handleUpdate(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n UPDATE ${tableName} \n SET ${keys.map((key) => `${key} = ?`).join(`, `)}\n WHERE id = ?\n `,\n [...Object.values(values), asPowerSyncRecord(mutation.modified).id],\n )\n },\n )\n }\n\n protected async handleDelete(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation) => {\n await context.execute(\n `\n DELETE FROM ${tableName} WHERE id = ?\n `,\n [asPowerSyncRecord(mutation.original).id],\n )\n },\n )\n }\n\n /**\n * Helper function which wraps a persistence operation by:\n * - Fetching the mutation's collection's SQLite table details\n * - Executing the mutation\n * - Returning the last pending diff operation if required\n */\n protected async handleOperationWithCompletion(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean,\n handler: (\n tableName: string,\n mutation: PendingMutation<any>,\n serializeValue: (value: any) => Record<string, unknown>,\n ) => Promise<void>,\n ): Promise<PendingOperation | null> {\n if (\n typeof (mutation.collection.config as any).utils?.getMeta != `function`\n ) {\n throw new Error(`Could not get tableName from mutation's collection config.\n The provided mutation might not have originated from PowerSync.`)\n }\n\n const { tableName, trackedTableName, serializeValue } = (\n mutation.collection\n .config as unknown as EnhancedPowerSyncCollectionConfig<any>\n ).utils.getMeta()\n\n await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)\n\n if (!waitForCompletion) {\n return null\n }\n\n // Need to get the operation in order to wait for it\n const diffOperation = await context.get<{ id: string; timestamp: string }>(\n sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`,\n )\n return {\n tableName,\n id: diffOperation.id,\n operation: mapOperationToPowerSync(mutation.type),\n timestamp: diffOperation.timestamp,\n }\n }\n}\n"],"names":["mutation"],"mappings":";;;;AASA,MAAM,QAAQ,YAAY,MAAM,iBAAiB;AAyC1C,MAAM,oBAAoB;AAAA,EAI/B,YAAY,SAA4B;AACtC,SAAK,WAAW,QAAQ;AACxB,SAAK,wBAAwB,sBAAsB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,aAA+B;AACpD,UAAM,EAAE,cAAc;AAEtB,QAAI,UAAU,UAAU,GAAG;AACzB;AAAA,IACF;AAKA,UAAM,yBAAyB,UAAU;AAAA,MACvC,CAAC,aAAa,SAAS,WAAW;AAAA,IAAA;AAEpC,UAAM,gBAAgB,MAAM,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAChE,UAAM,oDAAoC,IAAA;AAC1C,UAAM,iBAAiB,cACpB,IAAI,CAAC,OAAO,UAAU,KAAK,CAAC,aAAa,SAAS,WAAW,MAAM,EAAE,CAAE,EACvE,IAAI,CAAC,aAAa,SAAS,UAAU;AACxC,eAAW,gBAAgB,eAAe;AACxC,oCAA8B;AAAA,QAC5B;AAAA,QACA,uBAAuB,YAAY,YAAY;AAAA,MAAA;AAAA,IAEnD;AAGA,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,eAAe;AACvC,YAAI,WAAW,WAAW;AACxB;AAAA,QACF;AACA,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,aAAa,OAAO,CAAC;AAAA,MACvE,CAAC;AAAA,IAAA;AAIH,UAAM,EAAE,aAAA,IAAiB,MAAM,KAAK,SAAS;AAAA,MAC3C,OAAO,OAAO;AACZ,cAAM,oBAAoD,CAAA;AAE1D,mBAAW,CAAC,OAAO,QAAQ,KAAK,UAAU,WAAW;AAKnD,gBAAM,aACJ,SAAS,8BAA8B,IAAI,SAAS,WAAW,EAAE;AACnE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,UAAA;AAAA,QAEN;AAQA,eAAO;AAAA,UACL,cAAc,QAAQ;AAAA,YACpB,kBACG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,EACnB,IAAI,CAAC,OAAO,KAAK,sBAAsB,QAAQ,EAAE,CAAC;AAAA,UAAA;AAAA,QACvD;AAAA,MAEJ;AAAA,IAAA;AAIF,UAAM;AAAA,EACR;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQ,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA,eAChB,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,eAEf,KAAK,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,UAEpC,OAAO,OAAO,MAAM;AAAA,QAAA;AAAA,MAExB;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQ,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,iBACO,SAAS;AAAA,cACZ,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,UAG9C,CAAC,GAAG,OAAO,OAAO,MAAM,GAAG,kBAAkBA,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAEtE;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,cAAa;AAC7B,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA;AAAA,UAErB,CAAC,kBAAkBA,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAE5C;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAgB,8BACd,UACA,SACA,mBACA,SAKkC;AAClC,QACE,OAAQ,SAAS,WAAW,OAAe,OAAO,WAAW,YAC7D;AACA,YAAM,IAAI,MAAM;AAAA,wEACkD;AAAA,IACpE;AAEA,UAAM,EAAE,WAAW,kBAAkB,eAAA,IACnC,SAAS,WACN,OACH,MAAM,QAAA;AAER,UAAM,QAAQ,cAAc,SAAS,IAAI,UAAU,cAAc;AAEjE,QAAI,CAAC,mBAAmB;AACtB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClC,wCAAwC,gBAAgB;AAAA,IAAA;AAE1D,WAAO;AAAA,MACL;AAAA,MACA,IAAI,cAAc;AAAA,MAClB,WAAW,wBAAwB,SAAS,IAAI;AAAA,MAChD,WAAW,cAAc;AAAA,IAAA;AAAA,EAE7B;AACF;"}
|
|
1
|
+
{"version":3,"file":"PowerSyncTransactor.js","sources":["../../src/PowerSyncTransactor.ts"],"sourcesContent":["import { sanitizeSQL } from '@powersync/common'\nimport DebugModule from 'debug'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'\nimport type { AbstractPowerSyncDatabase, LockContext } from '@powersync/common'\nimport type { PendingMutation, Transaction } from '@tanstack/db'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type {\n EnhancedPowerSyncCollectionConfig,\n PowerSyncCollectionMeta,\n} from './definitions'\n\nconst debug = DebugModule.debug(`ts/db:powersync`)\n\nexport type TransactorOptions = {\n database: AbstractPowerSyncDatabase\n}\n\n/**\n * Applies mutations to the PowerSync database. This method is called automatically by the collection's\n * insert, update, and delete operations. You typically don't need to call this directly unless you\n * have special transaction requirements.\n *\n * @example\n * ```typescript\n * // Create a collection\n * const collection = createCollection(\n * powerSyncCollectionOptions<Document>({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * })\n * )\n *\n * const addTx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)\n * },\n * })\n *\n * addTx.mutate(() => {\n * for (let i = 0; i < 5; i++) {\n * collection.insert({ id: randomUUID(), name: `tx-${i}` })\n * }\n * })\n *\n * await addTx.commit()\n * await addTx.isPersisted.promise\n * ```\n *\n * @param transaction - The transaction containing mutations to apply\n * @returns A promise that resolves when the mutations have been persisted to PowerSync\n */\nexport class PowerSyncTransactor {\n database: AbstractPowerSyncDatabase\n pendingOperationStore: PendingOperationStore\n\n constructor(options: TransactorOptions) {\n this.database = options.database\n this.pendingOperationStore = PendingOperationStore.GLOBAL\n }\n\n /**\n * Persists a {@link Transaction} to the PowerSync SQLite database.\n */\n async applyTransaction(transaction: Transaction<any>) {\n const { mutations } = transaction\n\n if (mutations.length == 0) {\n return\n }\n /**\n * The transaction might contain operations for different collections.\n * We can do some optimizations for single-collection transactions.\n */\n const mutationsCollectionIds = mutations.map(\n (mutation) => mutation.collection.id,\n )\n const collectionIds = Array.from(new Set(mutationsCollectionIds))\n const lastCollectionMutationIndexes = new Map<string, number>()\n const allCollections = collectionIds\n .map((id) => mutations.find((mutation) => mutation.collection.id == id)!)\n .map((mutation) => mutation.collection)\n for (const collectionId of collectionIds) {\n lastCollectionMutationIndexes.set(\n collectionId,\n mutationsCollectionIds.lastIndexOf(collectionId),\n )\n }\n\n // Check all the observers are ready before taking a lock\n await Promise.all(\n allCollections.map(async (collection) => {\n if (collection.isReady()) {\n return\n }\n await new Promise<void>((resolve) => collection.onFirstReady(resolve))\n }),\n )\n\n // Persist to PowerSync\n const { whenComplete } = await this.database.writeTransaction(\n async (tx) => {\n const pendingOperations: Array<PendingOperation | null> = []\n\n for (const [index, mutation] of mutations.entries()) {\n /**\n * Each collection processes events independently. We need to make sure the\n * last operation for each collection has been observed.\n */\n const shouldWait =\n index == lastCollectionMutationIndexes.get(mutation.collection.id)\n switch (mutation.type) {\n case `insert`:\n pendingOperations.push(\n await this.handleInsert(mutation, tx, shouldWait),\n )\n break\n case `update`:\n pendingOperations.push(\n await this.handleUpdate(mutation, tx, shouldWait),\n )\n break\n case `delete`:\n pendingOperations.push(\n await this.handleDelete(mutation, tx, shouldWait),\n )\n break\n }\n }\n\n /**\n * Return a promise from the writeTransaction, without awaiting it.\n * This promise will resolve once the entire transaction has been\n * observed via the diff triggers.\n * We return without awaiting in order to free the write lock.\n */\n return {\n whenComplete: Promise.all(\n pendingOperations\n .filter((op) => !!op)\n .map((op) => this.pendingOperationStore.waitFor(op)),\n ),\n }\n },\n )\n\n // Wait for the change to be observed via the diff trigger\n await whenComplete\n }\n\n protected async handleInsert(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`insert`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n const queryParameters = Object.values(values)\n\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n keys.push(`_metadata`)\n queryParameters.push(metadataValue)\n }\n\n await context.execute(\n `\n INSERT into ${tableName} \n (${keys.join(`, `)}) \n VALUES \n (${keys.map((_) => `?`).join(`, `)})\n `,\n queryParameters,\n )\n },\n )\n }\n\n protected async handleUpdate(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n const queryParameters = Object.values(values)\n\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n keys.push(`_metadata`)\n queryParameters.push(metadataValue)\n }\n\n await context.execute(\n `\n UPDATE ${tableName} \n SET ${keys.map((key) => `${key} = ?`).join(`, `)}\n WHERE id = ?\n `,\n [...queryParameters, asPowerSyncRecord(mutation.modified).id],\n )\n },\n )\n }\n\n protected async handleDelete(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false,\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation) => {\n const metadataValue = this.processMutationMetadata(mutation)\n if (metadataValue != null) {\n /**\n * Delete operations with metadata require a different approach to handle metadata.\n * This will delete the record.\n */\n await context.execute(\n `\n UPDATE ${tableName} SET _deleted = TRUE, _metadata = ? WHERE id = ?\n `,\n [metadataValue, asPowerSyncRecord(mutation.original).id],\n )\n } else {\n await context.execute(\n `\n DELETE FROM ${tableName} WHERE id = ?\n `,\n [asPowerSyncRecord(mutation.original).id],\n )\n }\n },\n )\n }\n\n /**\n * Helper function which wraps a persistence operation by:\n * - Fetching the mutation's collection's SQLite table details\n * - Executing the mutation\n * - Returning the last pending diff operation if required\n */\n protected async handleOperationWithCompletion(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean,\n handler: (\n tableName: string,\n mutation: PendingMutation<any>,\n serializeValue: (value: any) => Record<string, unknown>,\n ) => Promise<void>,\n ): Promise<PendingOperation | null> {\n const { tableName, trackedTableName, serializeValue } =\n this.getMutationCollectionMeta(mutation)\n\n await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)\n\n if (!waitForCompletion) {\n return null\n }\n\n // Need to get the operation in order to wait for it\n const diffOperation = await context.get<{ id: string; timestamp: string }>(\n sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`,\n )\n return {\n tableName,\n id: diffOperation.id,\n operation: mapOperationToPowerSync(mutation.type),\n timestamp: diffOperation.timestamp,\n }\n }\n\n protected getMutationCollectionMeta(\n mutation: PendingMutation<any>,\n ): PowerSyncCollectionMeta<any> {\n if (\n typeof (mutation.collection.config as any).utils?.getMeta != `function`\n ) {\n throw new Error(`Collection is not a PowerSync collection.`)\n }\n return (\n mutation.collection\n .config as unknown as EnhancedPowerSyncCollectionConfig<any>\n ).utils.getMeta()\n }\n\n /**\n * Processes collection mutation metadata for persistence to the database.\n * We only support storing string metadata.\n * @returns null if no metadata should be stored.\n */\n protected processMutationMetadata(\n mutation: PendingMutation<any>,\n ): string | null {\n const { metadataIsTracked } = this.getMutationCollectionMeta(mutation)\n if (!metadataIsTracked) {\n // If it's not supported, we don't store metadata.\n if (typeof mutation.metadata != `undefined`) {\n // Log a warning if metadata is provided but not tracked.\n this.database.logger.warn(\n `Metadata provided for collection ${mutation.collection.id} but the PowerSync table does not track metadata. The PowerSync table should be configured with trackMetadata: true.`,\n mutation.metadata,\n )\n }\n return null\n } else if (typeof mutation.metadata == `undefined`) {\n return null\n } else if (typeof mutation.metadata == `string`) {\n return mutation.metadata\n } else {\n return JSON.stringify(mutation.metadata)\n }\n }\n}\n"],"names":["mutation"],"mappings":";;;;AAYA,MAAM,QAAQ,YAAY,MAAM,iBAAiB;AAyC1C,MAAM,oBAAoB;AAAA,EAI/B,YAAY,SAA4B;AACtC,SAAK,WAAW,QAAQ;AACxB,SAAK,wBAAwB,sBAAsB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,aAA+B;AACpD,UAAM,EAAE,cAAc;AAEtB,QAAI,UAAU,UAAU,GAAG;AACzB;AAAA,IACF;AAKA,UAAM,yBAAyB,UAAU;AAAA,MACvC,CAAC,aAAa,SAAS,WAAW;AAAA,IAAA;AAEpC,UAAM,gBAAgB,MAAM,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAChE,UAAM,oDAAoC,IAAA;AAC1C,UAAM,iBAAiB,cACpB,IAAI,CAAC,OAAO,UAAU,KAAK,CAAC,aAAa,SAAS,WAAW,MAAM,EAAE,CAAE,EACvE,IAAI,CAAC,aAAa,SAAS,UAAU;AACxC,eAAW,gBAAgB,eAAe;AACxC,oCAA8B;AAAA,QAC5B;AAAA,QACA,uBAAuB,YAAY,YAAY;AAAA,MAAA;AAAA,IAEnD;AAGA,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,eAAe;AACvC,YAAI,WAAW,WAAW;AACxB;AAAA,QACF;AACA,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,aAAa,OAAO,CAAC;AAAA,MACvE,CAAC;AAAA,IAAA;AAIH,UAAM,EAAE,aAAA,IAAiB,MAAM,KAAK,SAAS;AAAA,MAC3C,OAAO,OAAO;AACZ,cAAM,oBAAoD,CAAA;AAE1D,mBAAW,CAAC,OAAO,QAAQ,KAAK,UAAU,WAAW;AAKnD,gBAAM,aACJ,SAAS,8BAA8B,IAAI,SAAS,WAAW,EAAE;AACnE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,UAAA;AAAA,QAEN;AAQA,eAAO;AAAA,UACL,cAAc,QAAQ;AAAA,YACpB,kBACG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,EACnB,IAAI,CAAC,OAAO,KAAK,sBAAsB,QAAQ,EAAE,CAAC;AAAA,UAAA;AAAA,QACvD;AAAA,MAEJ;AAAA,IAAA;AAIF,UAAM;AAAA,EACR;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQ,cAAc,GAAG,EAAE;AACjE,cAAM,kBAAkB,OAAO,OAAO,MAAM;AAE5C,cAAM,gBAAgB,KAAK,wBAAwBA,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AACzB,eAAK,KAAK,WAAW;AACrB,0BAAgB,KAAK,aAAa;AAAA,QACpC;AAEA,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA,eAChB,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,eAEf,KAAK,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,UAEpC;AAAA,QAAA;AAAA,MAEJ;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQ,cAAc,GAAG,EAAE;AACjE,cAAM,kBAAkB,OAAO,OAAO,MAAM;AAE5C,cAAM,gBAAgB,KAAK,wBAAwBA,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AACzB,eAAK,KAAK,WAAW;AACrB,0BAAgB,KAAK,aAAa;AAAA,QACpC;AAEA,cAAM,QAAQ;AAAA,UACZ;AAAA,iBACO,SAAS;AAAA,cACZ,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,UAG9C,CAAC,GAAG,iBAAiB,kBAAkBA,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAEhE;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,cAAa;AAC7B,cAAM,gBAAgB,KAAK,wBAAwBA,SAAQ;AAC3D,YAAI,iBAAiB,MAAM;AAKzB,gBAAM,QAAQ;AAAA,YACZ;AAAA,qBACS,SAAS;AAAA;AAAA,YAElB,CAAC,eAAe,kBAAkBA,UAAS,QAAQ,EAAE,EAAE;AAAA,UAAA;AAAA,QAE3D,OAAO;AACL,gBAAM,QAAQ;AAAA,YACZ;AAAA,0BACc,SAAS;AAAA;AAAA,YAEvB,CAAC,kBAAkBA,UAAS,QAAQ,EAAE,EAAE;AAAA,UAAA;AAAA,QAE5C;AAAA,MACF;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAgB,8BACd,UACA,SACA,mBACA,SAKkC;AAClC,UAAM,EAAE,WAAW,kBAAkB,mBACnC,KAAK,0BAA0B,QAAQ;AAEzC,UAAM,QAAQ,cAAc,SAAS,IAAI,UAAU,cAAc;AAEjE,QAAI,CAAC,mBAAmB;AACtB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClC,wCAAwC,gBAAgB;AAAA,IAAA;AAE1D,WAAO;AAAA,MACL;AAAA,MACA,IAAI,cAAc;AAAA,MAClB,WAAW,wBAAwB,SAAS,IAAI;AAAA,MAChD,WAAW,cAAc;AAAA,IAAA;AAAA,EAE7B;AAAA,EAEU,0BACR,UAC8B;AAC9B,QACE,OAAQ,SAAS,WAAW,OAAe,OAAO,WAAW,YAC7D;AACA,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC7D;AACA,WACE,SAAS,WACN,OACH,MAAM,QAAA;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOU,wBACR,UACe;AACf,UAAM,EAAE,kBAAA,IAAsB,KAAK,0BAA0B,QAAQ;AACrE,QAAI,CAAC,mBAAmB;AAEtB,UAAI,OAAO,SAAS,YAAY,aAAa;AAE3C,aAAK,SAAS,OAAO;AAAA,UACnB,oCAAoC,SAAS,WAAW,EAAE;AAAA,UAC1D,SAAS;AAAA,QAAA;AAAA,MAEb;AACA,aAAO;AAAA,IACT,WAAW,OAAO,SAAS,YAAY,aAAa;AAClD,aAAO;AAAA,IACT,WAAW,OAAO,SAAS,YAAY,UAAU;AAC/C,aAAO,SAAS;AAAA,IAClB,OAAO;AACL,aAAO,KAAK,UAAU,SAAS,QAAQ;AAAA,IACzC;AAAA,EACF;AACF;"}
|
|
@@ -179,6 +179,10 @@ export type PowerSyncCollectionMeta<TTable extends Table = Table> = {
|
|
|
179
179
|
* Serializes a collection value to the SQLite type
|
|
180
180
|
*/
|
|
181
181
|
serializeValue: (value: any) => ExtractedTable<TTable>;
|
|
182
|
+
/**
|
|
183
|
+
* Whether the PowerSync table tracks metadata.
|
|
184
|
+
*/
|
|
185
|
+
metadataIsTracked: boolean;
|
|
182
186
|
};
|
|
183
187
|
/**
|
|
184
188
|
* A CollectionConfig which includes utilities for PowerSync.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"definitions.js","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<OutputType, string, TSchema> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"definitions.js","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n\n /**\n * Whether the PowerSync table tracks metadata.\n */\n metadataIsTracked: boolean\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<OutputType, string, TSchema> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":"AAsRO,MAAM,qBAAqB;"}
|
package/dist/esm/powersync.js
CHANGED
|
@@ -16,7 +16,7 @@ function powerSyncCollectionOptions(config) {
|
|
|
16
16
|
const deserializationSchema = `deserializationSchema` in config ? config.deserializationSchema : null;
|
|
17
17
|
const serializer = `serializer` in config ? config.serializer : void 0;
|
|
18
18
|
const onDeserializationError = `onDeserializationError` in config ? config.onDeserializationError : void 0;
|
|
19
|
-
const { viewName } = table;
|
|
19
|
+
const { viewName, trackMetadata: metadataIsTracked } = table;
|
|
20
20
|
const deserializeSyncRow = (value) => {
|
|
21
21
|
const validationSchema = deserializationSchema || schema;
|
|
22
22
|
const validation = validationSchema[`~standard`].validate(value);
|
|
@@ -182,6 +182,7 @@ function powerSyncCollectionOptions(config) {
|
|
|
182
182
|
getMeta: () => ({
|
|
183
183
|
tableName: viewName,
|
|
184
184
|
trackedTableName,
|
|
185
|
+
metadataIsTracked,
|
|
185
186
|
serializeValue: (value) => serializeForSQLite(
|
|
186
187
|
value,
|
|
187
188
|
// This is required by the input generic
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"powersync.js","sources":["../../src/powersync.ts"],"sourcesContent":["import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { PowerSyncTransactor } from './PowerSyncTransactor'\nimport { DEFAULT_BATCH_SIZE } from './definitions'\nimport { asPowerSyncRecord, mapOperation } from './helpers'\nimport { convertTableToSchema } from './schema'\nimport { serializeForSQLite } from './serialization'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n OptionalExtractedTable,\n} from './helpers'\nimport type {\n BasePowerSyncCollectionConfig,\n ConfigWithArbitraryCollectionTypes,\n ConfigWithSQLiteInputType,\n ConfigWithSQLiteTypes,\n CustomSQLiteSerializer,\n EnhancedPowerSyncCollectionConfig,\n InferPowerSyncOutputType,\n PowerSyncCollectionConfig,\n PowerSyncCollectionUtils,\n} from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type { SyncConfig } from '@tanstack/db'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { Table, TriggerDiffRecord } from '@powersync/common'\n\n/**\n * Creates PowerSync collection options for use with a standard Collection.\n *\n * @template TTable - The SQLite-based typing\n * @template TSchema - The validation schema type (optionally supports a custom input type)\n * @param config - Configuration options for the PowerSync collection\n * @returns Collection options with utilities\n */\n\n// Overload 1: No schema is provided\n\n/**\n * Creates a PowerSync collection configuration with basic default validation.\n * Input and Output types are the SQLite column types.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * type Document = (typeof APP_SCHEMA)[\"types\"][\"documents\"]\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<TTable extends Table = Table>(\n config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n OptionalExtractedTable<TTable>,\n never\n>\n\n// Overload 2: Schema is provided and the TInput matches SQLite types.\n\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types satisfy the SQLite column types.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Dates are stored as ISO date strings in SQLite\n * created_at: column.text\n * }),\n * })\n *\n * // Advanced Zod validations. The output type of this schema\n * // is constrained to the SQLite schema of APP_SCHEMA\n * const schema = z.object({\n * id: z.string(),\n * // Notice that `name` is not nullable (is required) here and it has additional validation\n * name: z.string().min(3, { message: \"Should be at least 3 characters\" }).nullable(),\n * // The input type is still the SQLite string type. While collections will output smart Date instances.\n * created_at: z.string().transform(val => new Date(val))\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * serializer: {\n * // The default is toISOString, this is just to demonstrate custom overrides\n * created_at: (outputValue) => outputValue.toISOString(),\n * },\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types. We can use the supplied schema to validate sync input\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithSQLiteInputType<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n schema: TSchema\n}\n\n// Overload 3: Schema is provided with arbitrary TInput and TOutput\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types are not linked to the internal SQLite table types. This can\n * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.\n * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Booleans are represented as integers in SQLite\n * is_active: column.integer\n * }),\n * })\n *\n * // Advanced Zod validations.\n * // We accept boolean values as input for operations and expose Booleans in query results\n * const schema = z.object({\n * id: z.string(),\n * isActive: z.boolean(), // TInput and TOutput are boolean\n * })\n *\n * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.\n * const deserializationSchema = z.object({\n * id: z.string(),\n * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * deserializationSchema,\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithArbitraryCollectionTypes<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n utils: PowerSyncCollectionUtils<TTable>\n schema: TSchema\n}\n\n/**\n * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.\n */\n\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<any> = never,\n>(config: PowerSyncCollectionConfig<TTable, TSchema>) {\n const {\n database,\n table,\n schema: inputSchema,\n syncBatchSize = DEFAULT_BATCH_SIZE,\n ...restConfig\n } = config\n\n const deserializationSchema =\n `deserializationSchema` in config ? config.deserializationSchema : null\n const serializer = `serializer` in config ? config.serializer : undefined\n const onDeserializationError =\n `onDeserializationError` in config\n ? config.onDeserializationError\n : undefined\n\n // The SQLite table type\n type TableType = ExtractedTable<TTable>\n\n // The collection output type\n type OutputType = InferPowerSyncOutputType<TTable, TSchema>\n\n const { viewName } = table\n\n /**\n * Deserializes data from the incoming sync stream\n */\n const deserializeSyncRow = (value: TableType): OutputType => {\n const validationSchema = deserializationSchema || schema\n const validation = validationSchema[`~standard`].validate(value)\n if (`value` in validation) {\n return validation.value\n } else if (`issues` in validation) {\n const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`\n database.logger.error(issueMessage)\n onDeserializationError!(validation)\n throw new Error(issueMessage)\n } else {\n const unknownErrorMessage = `Unknown deserialization error for ${viewName}`\n database.logger.error(unknownErrorMessage)\n onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })\n throw new Error(unknownErrorMessage)\n }\n }\n\n // We can do basic runtime validations for columns if not explicit schema has been provided\n const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)\n /**\n * The onInsert, onUpdate, and onDelete handlers should only return\n * after we have written the changes to TanStack DB.\n * We currently only write to TanStack DB from a diff trigger.\n * We wait for the diff trigger to observe the change,\n * and only then return from the on[X] handlers.\n * This ensures that when the transaction is reported as\n * complete to the caller, the in-memory state is already\n * consistent with the database.\n */\n const pendingOperationStore = PendingOperationStore.GLOBAL\n // Keep the tracked table unique in case of multiple tabs.\n const trackedTableName = `__${viewName}_tracking_${Math.floor(\n Math.random() * 0xffffffff,\n )\n .toString(16)\n .padStart(8, `0`)}`\n\n const transactor = new PowerSyncTransactor({\n database,\n })\n\n /**\n * \"sync\"\n * Notice that this describes the Sync between the local SQLite table\n * and the in-memory tanstack-db collection.\n */\n const sync: SyncConfig<OutputType, string> = {\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n const abortController = new AbortController()\n\n // The sync function needs to be synchronous\n async function start() {\n database.logger.info(\n `Sync is starting for ${viewName} into ${trackedTableName}`,\n )\n database.onChangeWithCallback(\n {\n onChange: async () => {\n await database\n .writeTransaction(async (context) => {\n begin()\n const operations = await context.getAll<TriggerDiffRecord>(\n `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`,\n )\n const pendingOperations: Array<PendingOperation> = []\n\n for (const op of operations) {\n const { id, operation, timestamp, value } = op\n const parsedValue = deserializeSyncRow({\n id,\n ...JSON.parse(value),\n })\n const parsedPreviousValue =\n op.operation == DiffTriggerOperation.UPDATE\n ? deserializeSyncRow({\n id,\n ...JSON.parse(op.previous_value),\n })\n : undefined\n write({\n type: mapOperation(operation),\n value: parsedValue,\n previousValue: parsedPreviousValue,\n })\n pendingOperations.push({\n id,\n operation,\n timestamp,\n tableName: viewName,\n })\n }\n\n // clear the current operations\n await context.execute(`DELETE FROM ${trackedTableName}`)\n\n commit()\n pendingOperationStore.resolvePendingFor(pendingOperations)\n })\n .catch((error) => {\n database.logger.error(\n `An error has been detected in the sync handler`,\n error,\n )\n })\n },\n },\n {\n signal: abortController.signal,\n triggerImmediate: false,\n tables: [trackedTableName],\n },\n )\n\n const disposeTracking = await database.triggers.createDiffTrigger({\n source: viewName,\n destination: trackedTableName,\n when: {\n [DiffTriggerOperation.INSERT]: `TRUE`,\n [DiffTriggerOperation.UPDATE]: `TRUE`,\n [DiffTriggerOperation.DELETE]: `TRUE`,\n },\n hooks: {\n beforeCreate: async (context) => {\n let currentBatchCount = syncBatchSize\n let cursor = 0\n while (currentBatchCount == syncBatchSize) {\n begin()\n const batchItems = await context.getAll<TableType>(\n sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,\n [syncBatchSize, cursor],\n )\n currentBatchCount = batchItems.length\n cursor += currentBatchCount\n for (const row of batchItems) {\n write({\n type: `insert`,\n value: deserializeSyncRow(row),\n })\n }\n commit()\n }\n markReady()\n database.logger.info(\n `Sync is ready for ${viewName} into ${trackedTableName}`,\n )\n },\n },\n })\n\n // If the abort controller was aborted while processing the request above\n if (abortController.signal.aborted) {\n await disposeTracking()\n } else {\n abortController.signal.addEventListener(\n `abort`,\n () => {\n disposeTracking()\n },\n { once: true },\n )\n }\n }\n\n start().catch((error) =>\n database.logger.error(\n `Could not start syncing process for ${viewName} into ${trackedTableName}`,\n error,\n ),\n )\n\n return () => {\n database.logger.info(\n `Sync has been stopped for ${viewName} into ${trackedTableName}`,\n )\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n const getKey = (record: OutputType) => asPowerSyncRecord(record).id\n\n const outputConfig: EnhancedPowerSyncCollectionConfig<\n TTable,\n OutputType,\n TSchema\n > = {\n ...restConfig,\n schema,\n getKey,\n // Syncing should start immediately since we need to monitor the changes for mutations\n startSync: true,\n sync,\n onInsert: async (params) => {\n // The transaction here should only ever contain a single insert mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onUpdate: async (params) => {\n // The transaction here should only ever contain a single update mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onDelete: async (params) => {\n // The transaction here should only ever contain a single delete mutation\n return await transactor.applyTransaction(params.transaction)\n },\n utils: {\n getMeta: () => ({\n tableName: viewName,\n trackedTableName,\n serializeValue: (value) =>\n serializeForSQLite(\n value,\n // This is required by the input generic\n table as Table<\n MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>\n >,\n // Coerce serializer to the shape that corresponds to the Table constructed from OutputType\n serializer as CustomSQLiteSerializer<\n OutputType,\n ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>\n >,\n ),\n }),\n },\n }\n return outputConfig\n}\n"],"names":[],"mappings":";;;;;;;AA0NO,SAAS,2BAGd,QAAoD;AACpD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,gBAAgB;AAAA,IAChB,GAAG;AAAA,EAAA,IACD;AAEJ,QAAM,wBACJ,2BAA2B,SAAS,OAAO,wBAAwB;AACrE,QAAM,aAAa,gBAAgB,SAAS,OAAO,aAAa;AAChE,QAAM,yBACJ,4BAA4B,SACxB,OAAO,yBACP;AAQN,QAAM,EAAE,aAAa;AAKrB,QAAM,qBAAqB,CAAC,UAAiC;AAC3D,UAAM,mBAAmB,yBAAyB;AAClD,UAAM,aAAa,iBAAiB,WAAW,EAAE,SAAS,KAAK;AAC/D,QAAI,WAAW,YAAY;AACzB,aAAO,WAAW;AAAA,IACpB,WAAW,YAAY,YAAY;AACjC,YAAM,eAAe,wCAAwC,QAAQ,aAAa,WAAW,OAAO,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,MAAM,MAAM,OAAO,EAAE,CAAC;AACtJ,eAAS,OAAO,MAAM,YAAY;AAClC,6BAAwB,UAAU;AAClC,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B,OAAO;AACL,YAAM,sBAAsB,qCAAqC,QAAQ;AACzE,eAAS,OAAO,MAAM,mBAAmB;AACzC,6BAAwB,EAAE,QAAQ,CAAC,EAAE,SAAS,oBAAA,CAAqB,GAAG;AACtE,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,SAAS,eAAgB,qBAAqB,KAAK;AAWzD,QAAM,wBAAwB,sBAAsB;AAEpD,QAAM,mBAAmB,KAAK,QAAQ,aAAa,KAAK;AAAA,IACtD,KAAK,WAAW;AAAA,EAAA,EAEf,SAAS,EAAE,EACX,SAAS,GAAG,GAAG,CAAC;AAEnB,QAAM,aAAa,IAAI,oBAAoB;AAAA,IACzC;AAAA,EAAA,CACD;AAOD,QAAM,OAAuC;AAAA,IAC3C,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAC5C,YAAM,kBAAkB,IAAI,gBAAA;AAG5B,qBAAe,QAAQ;AACrB,iBAAS,OAAO;AAAA,UACd,wBAAwB,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAE3D,iBAAS;AAAA,UACP;AAAA,YACE,UAAU,YAAY;AACpB,oBAAM,SACH,iBAAiB,OAAO,YAAY;AACnC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,iBAAiB,gBAAgB;AAAA,gBAAA;AAEnC,sBAAM,oBAA6C,CAAA;AAEnD,2BAAW,MAAM,YAAY;AAC3B,wBAAM,EAAE,IAAI,WAAW,WAAW,UAAU;AAC5C,wBAAM,cAAc,mBAAmB;AAAA,oBACrC;AAAA,oBACA,GAAG,KAAK,MAAM,KAAK;AAAA,kBAAA,CACpB;AACD,wBAAM,sBACJ,GAAG,aAAa,qBAAqB,SACjC,mBAAmB;AAAA,oBACjB;AAAA,oBACA,GAAG,KAAK,MAAM,GAAG,cAAc;AAAA,kBAAA,CAChC,IACD;AACN,wBAAM;AAAA,oBACJ,MAAM,aAAa,SAAS;AAAA,oBAC5B,OAAO;AAAA,oBACP,eAAe;AAAA,kBAAA,CAChB;AACD,oCAAkB,KAAK;AAAA,oBACrB;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,WAAW;AAAA,kBAAA,CACZ;AAAA,gBACH;AAGA,sBAAM,QAAQ,QAAQ,eAAe,gBAAgB,EAAE;AAEvD,uBAAA;AACA,sCAAsB,kBAAkB,iBAAiB;AAAA,cAC3D,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,yBAAS,OAAO;AAAA,kBACd;AAAA,kBACA;AAAA,gBAAA;AAAA,cAEJ,CAAC;AAAA,YACL;AAAA,UAAA;AAAA,UAEF;AAAA,YACE,QAAQ,gBAAgB;AAAA,YACxB,kBAAkB;AAAA,YAClB,QAAQ,CAAC,gBAAgB;AAAA,UAAA;AAAA,QAC3B;AAGF,cAAM,kBAAkB,MAAM,SAAS,SAAS,kBAAkB;AAAA,UAChE,QAAQ;AAAA,UACR,aAAa;AAAA,UACb,MAAM;AAAA,YACJ,CAAC,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAAC,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAAC,qBAAqB,MAAM,GAAG;AAAA,UAAA;AAAA,UAEjC,OAAO;AAAA,YACL,cAAc,OAAO,YAAY;AAC/B,kBAAI,oBAAoB;AACxB,kBAAI,SAAS;AACb,qBAAO,qBAAqB,eAAe;AACzC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,4BAA4B,QAAQ;AAAA,kBACpC,CAAC,eAAe,MAAM;AAAA,gBAAA;AAExB,oCAAoB,WAAW;AAC/B,0BAAU;AACV,2BAAW,OAAO,YAAY;AAC5B,wBAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,OAAO,mBAAmB,GAAG;AAAA,kBAAA,CAC9B;AAAA,gBACH;AACA,uBAAA;AAAA,cACF;AACA,wBAAA;AACA,uBAAS,OAAO;AAAA,gBACd,qBAAqB,QAAQ,SAAS,gBAAgB;AAAA,cAAA;AAAA,YAE1D;AAAA,UAAA;AAAA,QACF,CACD;AAGD,YAAI,gBAAgB,OAAO,SAAS;AAClC,gBAAM,gBAAA;AAAA,QACR,OAAO;AACL,0BAAgB,OAAO;AAAA,YACrB;AAAA,YACA,MAAM;AACJ,8BAAA;AAAA,YACF;AAAA,YACA,EAAE,MAAM,KAAA;AAAA,UAAK;AAAA,QAEjB;AAAA,MACF;AAEA,YAAA,EAAQ;AAAA,QAAM,CAAC,UACb,SAAS,OAAO;AAAA,UACd,uCAAuC,QAAQ,SAAS,gBAAgB;AAAA,UACxE;AAAA,QAAA;AAAA,MACF;AAGF,aAAO,MAAM;AACX,iBAAS,OAAO;AAAA,UACd,6BAA6B,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAEhE,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,QAAM,SAAS,CAAC,WAAuB,kBAAkB,MAAM,EAAE;AAEjE,QAAM,eAIF;AAAA,IACF,GAAG;AAAA,IACH;AAAA,IACA;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,OAAO;AAAA,MACL,SAAS,OAAO;AAAA,QACd,WAAW;AAAA,QACX;AAAA,QACA,gBAAgB,CAAC,UACf;AAAA,UACE;AAAA;AAAA,UAEA;AAAA;AAAA,UAIA;AAAA,QAAA;AAAA,MAIF;AAAA,IACJ;AAAA,EACF;AAEF,SAAO;AACT;"}
|
|
1
|
+
{"version":3,"file":"powersync.js","sources":["../../src/powersync.ts"],"sourcesContent":["import { DiffTriggerOperation, sanitizeSQL } from '@powersync/common'\nimport { PendingOperationStore } from './PendingOperationStore'\nimport { PowerSyncTransactor } from './PowerSyncTransactor'\nimport { DEFAULT_BATCH_SIZE } from './definitions'\nimport { asPowerSyncRecord, mapOperation } from './helpers'\nimport { convertTableToSchema } from './schema'\nimport { serializeForSQLite } from './serialization'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n OptionalExtractedTable,\n} from './helpers'\nimport type {\n BasePowerSyncCollectionConfig,\n ConfigWithArbitraryCollectionTypes,\n ConfigWithSQLiteInputType,\n ConfigWithSQLiteTypes,\n CustomSQLiteSerializer,\n EnhancedPowerSyncCollectionConfig,\n InferPowerSyncOutputType,\n PowerSyncCollectionConfig,\n PowerSyncCollectionUtils,\n} from './definitions'\nimport type { PendingOperation } from './PendingOperationStore'\nimport type { SyncConfig } from '@tanstack/db'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type { Table, TriggerDiffRecord } from '@powersync/common'\n\n/**\n * Creates PowerSync collection options for use with a standard Collection.\n *\n * @template TTable - The SQLite-based typing\n * @template TSchema - The validation schema type (optionally supports a custom input type)\n * @param config - Configuration options for the PowerSync collection\n * @returns Collection options with utilities\n */\n\n// Overload 1: No schema is provided\n\n/**\n * Creates a PowerSync collection configuration with basic default validation.\n * Input and Output types are the SQLite column types.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * type Document = (typeof APP_SCHEMA)[\"types\"][\"documents\"]\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<TTable extends Table = Table>(\n config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n OptionalExtractedTable<TTable>,\n never\n>\n\n// Overload 2: Schema is provided and the TInput matches SQLite types.\n\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types satisfy the SQLite column types.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Dates are stored as ISO date strings in SQLite\n * created_at: column.text\n * }),\n * })\n *\n * // Advanced Zod validations. The output type of this schema\n * // is constrained to the SQLite schema of APP_SCHEMA\n * const schema = z.object({\n * id: z.string(),\n * // Notice that `name` is not nullable (is required) here and it has additional validation\n * name: z.string().min(3, { message: \"Should be at least 3 characters\" }).nullable(),\n * // The input type is still the SQLite string type. While collections will output smart Date instances.\n * created_at: z.string().transform(val => new Date(val))\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * serializer: {\n * // The default is toISOString, this is just to demonstrate custom overrides\n * created_at: (outputValue) => outputValue.toISOString(),\n * },\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types. We can use the supplied schema to validate sync input\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithSQLiteInputType<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n schema: TSchema\n}\n\n// Overload 3: Schema is provided with arbitrary TInput and TOutput\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types are not linked to the internal SQLite table types. This can\n * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.\n * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Booleans are represented as integers in SQLite\n * is_active: column.integer\n * }),\n * })\n *\n * // Advanced Zod validations.\n * // We accept boolean values as input for operations and expose Booleans in query results\n * const schema = z.object({\n * id: z.string(),\n * isActive: z.boolean(), // TInput and TOutput are boolean\n * })\n *\n * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.\n * const deserializationSchema = z.object({\n * id: z.string(),\n * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * deserializationSchema,\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithArbitraryCollectionTypes<TTable, TSchema>,\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n utils: PowerSyncCollectionUtils<TTable>\n schema: TSchema\n}\n\n/**\n * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.\n */\n\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<any> = never,\n>(config: PowerSyncCollectionConfig<TTable, TSchema>) {\n const {\n database,\n table,\n schema: inputSchema,\n syncBatchSize = DEFAULT_BATCH_SIZE,\n ...restConfig\n } = config\n\n const deserializationSchema =\n `deserializationSchema` in config ? config.deserializationSchema : null\n const serializer = `serializer` in config ? config.serializer : undefined\n const onDeserializationError =\n `onDeserializationError` in config\n ? config.onDeserializationError\n : undefined\n\n // The SQLite table type\n type TableType = ExtractedTable<TTable>\n\n // The collection output type\n type OutputType = InferPowerSyncOutputType<TTable, TSchema>\n\n const { viewName, trackMetadata: metadataIsTracked } = table\n\n /**\n * Deserializes data from the incoming sync stream\n */\n const deserializeSyncRow = (value: TableType): OutputType => {\n const validationSchema = deserializationSchema || schema\n const validation = validationSchema[`~standard`].validate(value)\n if (`value` in validation) {\n return validation.value\n } else if (`issues` in validation) {\n const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`\n database.logger.error(issueMessage)\n onDeserializationError!(validation)\n throw new Error(issueMessage)\n } else {\n const unknownErrorMessage = `Unknown deserialization error for ${viewName}`\n database.logger.error(unknownErrorMessage)\n onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })\n throw new Error(unknownErrorMessage)\n }\n }\n\n // We can do basic runtime validations for columns if not explicit schema has been provided\n const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)\n /**\n * The onInsert, onUpdate, and onDelete handlers should only return\n * after we have written the changes to TanStack DB.\n * We currently only write to TanStack DB from a diff trigger.\n * We wait for the diff trigger to observe the change,\n * and only then return from the on[X] handlers.\n * This ensures that when the transaction is reported as\n * complete to the caller, the in-memory state is already\n * consistent with the database.\n */\n const pendingOperationStore = PendingOperationStore.GLOBAL\n // Keep the tracked table unique in case of multiple tabs.\n const trackedTableName = `__${viewName}_tracking_${Math.floor(\n Math.random() * 0xffffffff,\n )\n .toString(16)\n .padStart(8, `0`)}`\n\n const transactor = new PowerSyncTransactor({\n database,\n })\n\n /**\n * \"sync\"\n * Notice that this describes the Sync between the local SQLite table\n * and the in-memory tanstack-db collection.\n */\n const sync: SyncConfig<OutputType, string> = {\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n const abortController = new AbortController()\n\n // The sync function needs to be synchronous\n async function start() {\n database.logger.info(\n `Sync is starting for ${viewName} into ${trackedTableName}`,\n )\n database.onChangeWithCallback(\n {\n onChange: async () => {\n await database\n .writeTransaction(async (context) => {\n begin()\n const operations = await context.getAll<TriggerDiffRecord>(\n `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`,\n )\n const pendingOperations: Array<PendingOperation> = []\n\n for (const op of operations) {\n const { id, operation, timestamp, value } = op\n const parsedValue = deserializeSyncRow({\n id,\n ...JSON.parse(value),\n })\n const parsedPreviousValue =\n op.operation == DiffTriggerOperation.UPDATE\n ? deserializeSyncRow({\n id,\n ...JSON.parse(op.previous_value),\n })\n : undefined\n write({\n type: mapOperation(operation),\n value: parsedValue,\n previousValue: parsedPreviousValue,\n })\n pendingOperations.push({\n id,\n operation,\n timestamp,\n tableName: viewName,\n })\n }\n\n // clear the current operations\n await context.execute(`DELETE FROM ${trackedTableName}`)\n\n commit()\n pendingOperationStore.resolvePendingFor(pendingOperations)\n })\n .catch((error) => {\n database.logger.error(\n `An error has been detected in the sync handler`,\n error,\n )\n })\n },\n },\n {\n signal: abortController.signal,\n triggerImmediate: false,\n tables: [trackedTableName],\n },\n )\n\n const disposeTracking = await database.triggers.createDiffTrigger({\n source: viewName,\n destination: trackedTableName,\n when: {\n [DiffTriggerOperation.INSERT]: `TRUE`,\n [DiffTriggerOperation.UPDATE]: `TRUE`,\n [DiffTriggerOperation.DELETE]: `TRUE`,\n },\n hooks: {\n beforeCreate: async (context) => {\n let currentBatchCount = syncBatchSize\n let cursor = 0\n while (currentBatchCount == syncBatchSize) {\n begin()\n const batchItems = await context.getAll<TableType>(\n sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,\n [syncBatchSize, cursor],\n )\n currentBatchCount = batchItems.length\n cursor += currentBatchCount\n for (const row of batchItems) {\n write({\n type: `insert`,\n value: deserializeSyncRow(row),\n })\n }\n commit()\n }\n markReady()\n database.logger.info(\n `Sync is ready for ${viewName} into ${trackedTableName}`,\n )\n },\n },\n })\n\n // If the abort controller was aborted while processing the request above\n if (abortController.signal.aborted) {\n await disposeTracking()\n } else {\n abortController.signal.addEventListener(\n `abort`,\n () => {\n disposeTracking()\n },\n { once: true },\n )\n }\n }\n\n start().catch((error) =>\n database.logger.error(\n `Could not start syncing process for ${viewName} into ${trackedTableName}`,\n error,\n ),\n )\n\n return () => {\n database.logger.info(\n `Sync has been stopped for ${viewName} into ${trackedTableName}`,\n )\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n const getKey = (record: OutputType) => asPowerSyncRecord(record).id\n\n const outputConfig: EnhancedPowerSyncCollectionConfig<\n TTable,\n OutputType,\n TSchema\n > = {\n ...restConfig,\n schema,\n getKey,\n // Syncing should start immediately since we need to monitor the changes for mutations\n startSync: true,\n sync,\n onInsert: async (params) => {\n // The transaction here should only ever contain a single insert mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onUpdate: async (params) => {\n // The transaction here should only ever contain a single update mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onDelete: async (params) => {\n // The transaction here should only ever contain a single delete mutation\n return await transactor.applyTransaction(params.transaction)\n },\n utils: {\n getMeta: () => ({\n tableName: viewName,\n trackedTableName,\n metadataIsTracked,\n serializeValue: (value) =>\n serializeForSQLite(\n value,\n // This is required by the input generic\n table as Table<\n MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>\n >,\n // Coerce serializer to the shape that corresponds to the Table constructed from OutputType\n serializer as CustomSQLiteSerializer<\n OutputType,\n ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>\n >,\n ),\n }),\n },\n }\n return outputConfig\n}\n"],"names":[],"mappings":";;;;;;;AA0NO,SAAS,2BAGd,QAAoD;AACpD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,gBAAgB;AAAA,IAChB,GAAG;AAAA,EAAA,IACD;AAEJ,QAAM,wBACJ,2BAA2B,SAAS,OAAO,wBAAwB;AACrE,QAAM,aAAa,gBAAgB,SAAS,OAAO,aAAa;AAChE,QAAM,yBACJ,4BAA4B,SACxB,OAAO,yBACP;AAQN,QAAM,EAAE,UAAU,eAAe,kBAAA,IAAsB;AAKvD,QAAM,qBAAqB,CAAC,UAAiC;AAC3D,UAAM,mBAAmB,yBAAyB;AAClD,UAAM,aAAa,iBAAiB,WAAW,EAAE,SAAS,KAAK;AAC/D,QAAI,WAAW,YAAY;AACzB,aAAO,WAAW;AAAA,IACpB,WAAW,YAAY,YAAY;AACjC,YAAM,eAAe,wCAAwC,QAAQ,aAAa,WAAW,OAAO,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,MAAM,MAAM,OAAO,EAAE,CAAC;AACtJ,eAAS,OAAO,MAAM,YAAY;AAClC,6BAAwB,UAAU;AAClC,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B,OAAO;AACL,YAAM,sBAAsB,qCAAqC,QAAQ;AACzE,eAAS,OAAO,MAAM,mBAAmB;AACzC,6BAAwB,EAAE,QAAQ,CAAC,EAAE,SAAS,oBAAA,CAAqB,GAAG;AACtE,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AAAA,EACF;AAGA,QAAM,SAAS,eAAgB,qBAAqB,KAAK;AAWzD,QAAM,wBAAwB,sBAAsB;AAEpD,QAAM,mBAAmB,KAAK,QAAQ,aAAa,KAAK;AAAA,IACtD,KAAK,WAAW;AAAA,EAAA,EAEf,SAAS,EAAE,EACX,SAAS,GAAG,GAAG,CAAC;AAEnB,QAAM,aAAa,IAAI,oBAAoB;AAAA,IACzC;AAAA,EAAA,CACD;AAOD,QAAM,OAAuC;AAAA,IAC3C,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAC5C,YAAM,kBAAkB,IAAI,gBAAA;AAG5B,qBAAe,QAAQ;AACrB,iBAAS,OAAO;AAAA,UACd,wBAAwB,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAE3D,iBAAS;AAAA,UACP;AAAA,YACE,UAAU,YAAY;AACpB,oBAAM,SACH,iBAAiB,OAAO,YAAY;AACnC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,iBAAiB,gBAAgB;AAAA,gBAAA;AAEnC,sBAAM,oBAA6C,CAAA;AAEnD,2BAAW,MAAM,YAAY;AAC3B,wBAAM,EAAE,IAAI,WAAW,WAAW,UAAU;AAC5C,wBAAM,cAAc,mBAAmB;AAAA,oBACrC;AAAA,oBACA,GAAG,KAAK,MAAM,KAAK;AAAA,kBAAA,CACpB;AACD,wBAAM,sBACJ,GAAG,aAAa,qBAAqB,SACjC,mBAAmB;AAAA,oBACjB;AAAA,oBACA,GAAG,KAAK,MAAM,GAAG,cAAc;AAAA,kBAAA,CAChC,IACD;AACN,wBAAM;AAAA,oBACJ,MAAM,aAAa,SAAS;AAAA,oBAC5B,OAAO;AAAA,oBACP,eAAe;AAAA,kBAAA,CAChB;AACD,oCAAkB,KAAK;AAAA,oBACrB;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,WAAW;AAAA,kBAAA,CACZ;AAAA,gBACH;AAGA,sBAAM,QAAQ,QAAQ,eAAe,gBAAgB,EAAE;AAEvD,uBAAA;AACA,sCAAsB,kBAAkB,iBAAiB;AAAA,cAC3D,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,yBAAS,OAAO;AAAA,kBACd;AAAA,kBACA;AAAA,gBAAA;AAAA,cAEJ,CAAC;AAAA,YACL;AAAA,UAAA;AAAA,UAEF;AAAA,YACE,QAAQ,gBAAgB;AAAA,YACxB,kBAAkB;AAAA,YAClB,QAAQ,CAAC,gBAAgB;AAAA,UAAA;AAAA,QAC3B;AAGF,cAAM,kBAAkB,MAAM,SAAS,SAAS,kBAAkB;AAAA,UAChE,QAAQ;AAAA,UACR,aAAa;AAAA,UACb,MAAM;AAAA,YACJ,CAAC,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAAC,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAAC,qBAAqB,MAAM,GAAG;AAAA,UAAA;AAAA,UAEjC,OAAO;AAAA,YACL,cAAc,OAAO,YAAY;AAC/B,kBAAI,oBAAoB;AACxB,kBAAI,SAAS;AACb,qBAAO,qBAAqB,eAAe;AACzC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,4BAA4B,QAAQ;AAAA,kBACpC,CAAC,eAAe,MAAM;AAAA,gBAAA;AAExB,oCAAoB,WAAW;AAC/B,0BAAU;AACV,2BAAW,OAAO,YAAY;AAC5B,wBAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,OAAO,mBAAmB,GAAG;AAAA,kBAAA,CAC9B;AAAA,gBACH;AACA,uBAAA;AAAA,cACF;AACA,wBAAA;AACA,uBAAS,OAAO;AAAA,gBACd,qBAAqB,QAAQ,SAAS,gBAAgB;AAAA,cAAA;AAAA,YAE1D;AAAA,UAAA;AAAA,QACF,CACD;AAGD,YAAI,gBAAgB,OAAO,SAAS;AAClC,gBAAM,gBAAA;AAAA,QACR,OAAO;AACL,0BAAgB,OAAO;AAAA,YACrB;AAAA,YACA,MAAM;AACJ,8BAAA;AAAA,YACF;AAAA,YACA,EAAE,MAAM,KAAA;AAAA,UAAK;AAAA,QAEjB;AAAA,MACF;AAEA,YAAA,EAAQ;AAAA,QAAM,CAAC,UACb,SAAS,OAAO;AAAA,UACd,uCAAuC,QAAQ,SAAS,gBAAgB;AAAA,UACxE;AAAA,QAAA;AAAA,MACF;AAGF,aAAO,MAAM;AACX,iBAAS,OAAO;AAAA,UACd,6BAA6B,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAEhE,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,QAAM,SAAS,CAAC,WAAuB,kBAAkB,MAAM,EAAE;AAEjE,QAAM,eAIF;AAAA,IACF,GAAG;AAAA,IACH;AAAA,IACA;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,OAAO;AAAA,MACL,SAAS,OAAO;AAAA,QACd,WAAW;AAAA,QACX;AAAA,QACA;AAAA,QACA,gBAAgB,CAAC,UACf;AAAA,UACE;AAAA;AAAA,UAEA;AAAA;AAAA,UAIA;AAAA,QAAA;AAAA,MAIF;AAAA,IACJ;AAAA,EACF;AAEF,SAAO;AACT;"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tanstack/powersync-db-collection",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.21",
|
|
4
4
|
"description": "PowerSync collection for TanStack DB",
|
|
5
5
|
"author": "POWERSYNC",
|
|
6
6
|
"license": "MIT",
|
|
@@ -46,18 +46,18 @@
|
|
|
46
46
|
"src"
|
|
47
47
|
],
|
|
48
48
|
"dependencies": {
|
|
49
|
-
"@standard-schema/spec": "^1.
|
|
49
|
+
"@standard-schema/spec": "^1.1.0",
|
|
50
50
|
"@tanstack/store": "^0.8.0",
|
|
51
51
|
"debug": "^4.4.3",
|
|
52
52
|
"p-defer": "^4.0.1",
|
|
53
|
-
"@tanstack/db": "0.5.
|
|
53
|
+
"@tanstack/db": "0.5.17"
|
|
54
54
|
},
|
|
55
55
|
"peerDependencies": {
|
|
56
56
|
"@powersync/common": "^1.41.0"
|
|
57
57
|
},
|
|
58
58
|
"devDependencies": {
|
|
59
|
-
"@powersync/common": "^1.
|
|
60
|
-
"@powersync/node": "^0.
|
|
59
|
+
"@powersync/common": "^1.44.0",
|
|
60
|
+
"@powersync/node": "^0.15.1",
|
|
61
61
|
"@types/debug": "^4.1.12",
|
|
62
62
|
"@vitest/coverage-istanbul": "^3.2.4"
|
|
63
63
|
},
|
|
@@ -1,11 +1,14 @@
|
|
|
1
1
|
import { sanitizeSQL } from '@powersync/common'
|
|
2
2
|
import DebugModule from 'debug'
|
|
3
|
-
import { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'
|
|
4
3
|
import { PendingOperationStore } from './PendingOperationStore'
|
|
4
|
+
import { asPowerSyncRecord, mapOperationToPowerSync } from './helpers'
|
|
5
5
|
import type { AbstractPowerSyncDatabase, LockContext } from '@powersync/common'
|
|
6
6
|
import type { PendingMutation, Transaction } from '@tanstack/db'
|
|
7
|
-
import type { EnhancedPowerSyncCollectionConfig } from './definitions'
|
|
8
7
|
import type { PendingOperation } from './PendingOperationStore'
|
|
8
|
+
import type {
|
|
9
|
+
EnhancedPowerSyncCollectionConfig,
|
|
10
|
+
PowerSyncCollectionMeta,
|
|
11
|
+
} from './definitions'
|
|
9
12
|
|
|
10
13
|
const debug = DebugModule.debug(`ts/db:powersync`)
|
|
11
14
|
|
|
@@ -160,6 +163,13 @@ export class PowerSyncTransactor {
|
|
|
160
163
|
async (tableName, mutation, serializeValue) => {
|
|
161
164
|
const values = serializeValue(mutation.modified)
|
|
162
165
|
const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)
|
|
166
|
+
const queryParameters = Object.values(values)
|
|
167
|
+
|
|
168
|
+
const metadataValue = this.processMutationMetadata(mutation)
|
|
169
|
+
if (metadataValue != null) {
|
|
170
|
+
keys.push(`_metadata`)
|
|
171
|
+
queryParameters.push(metadataValue)
|
|
172
|
+
}
|
|
163
173
|
|
|
164
174
|
await context.execute(
|
|
165
175
|
`
|
|
@@ -168,7 +178,7 @@ export class PowerSyncTransactor {
|
|
|
168
178
|
VALUES
|
|
169
179
|
(${keys.map((_) => `?`).join(`, `)})
|
|
170
180
|
`,
|
|
171
|
-
|
|
181
|
+
queryParameters,
|
|
172
182
|
)
|
|
173
183
|
},
|
|
174
184
|
)
|
|
@@ -188,6 +198,13 @@ export class PowerSyncTransactor {
|
|
|
188
198
|
async (tableName, mutation, serializeValue) => {
|
|
189
199
|
const values = serializeValue(mutation.modified)
|
|
190
200
|
const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)
|
|
201
|
+
const queryParameters = Object.values(values)
|
|
202
|
+
|
|
203
|
+
const metadataValue = this.processMutationMetadata(mutation)
|
|
204
|
+
if (metadataValue != null) {
|
|
205
|
+
keys.push(`_metadata`)
|
|
206
|
+
queryParameters.push(metadataValue)
|
|
207
|
+
}
|
|
191
208
|
|
|
192
209
|
await context.execute(
|
|
193
210
|
`
|
|
@@ -195,7 +212,7 @@ export class PowerSyncTransactor {
|
|
|
195
212
|
SET ${keys.map((key) => `${key} = ?`).join(`, `)}
|
|
196
213
|
WHERE id = ?
|
|
197
214
|
`,
|
|
198
|
-
[...
|
|
215
|
+
[...queryParameters, asPowerSyncRecord(mutation.modified).id],
|
|
199
216
|
)
|
|
200
217
|
},
|
|
201
218
|
)
|
|
@@ -213,12 +230,26 @@ export class PowerSyncTransactor {
|
|
|
213
230
|
context,
|
|
214
231
|
waitForCompletion,
|
|
215
232
|
async (tableName, mutation) => {
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
233
|
+
const metadataValue = this.processMutationMetadata(mutation)
|
|
234
|
+
if (metadataValue != null) {
|
|
235
|
+
/**
|
|
236
|
+
* Delete operations with metadata require a different approach to handle metadata.
|
|
237
|
+
* This will delete the record.
|
|
238
|
+
*/
|
|
239
|
+
await context.execute(
|
|
240
|
+
`
|
|
241
|
+
UPDATE ${tableName} SET _deleted = TRUE, _metadata = ? WHERE id = ?
|
|
242
|
+
`,
|
|
243
|
+
[metadataValue, asPowerSyncRecord(mutation.original).id],
|
|
244
|
+
)
|
|
245
|
+
} else {
|
|
246
|
+
await context.execute(
|
|
247
|
+
`
|
|
248
|
+
DELETE FROM ${tableName} WHERE id = ?
|
|
249
|
+
`,
|
|
250
|
+
[asPowerSyncRecord(mutation.original).id],
|
|
251
|
+
)
|
|
252
|
+
}
|
|
222
253
|
},
|
|
223
254
|
)
|
|
224
255
|
}
|
|
@@ -239,17 +270,8 @@ export class PowerSyncTransactor {
|
|
|
239
270
|
serializeValue: (value: any) => Record<string, unknown>,
|
|
240
271
|
) => Promise<void>,
|
|
241
272
|
): Promise<PendingOperation | null> {
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
) {
|
|
245
|
-
throw new Error(`Could not get tableName from mutation's collection config.
|
|
246
|
-
The provided mutation might not have originated from PowerSync.`)
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
const { tableName, trackedTableName, serializeValue } = (
|
|
250
|
-
mutation.collection
|
|
251
|
-
.config as unknown as EnhancedPowerSyncCollectionConfig<any>
|
|
252
|
-
).utils.getMeta()
|
|
273
|
+
const { tableName, trackedTableName, serializeValue } =
|
|
274
|
+
this.getMutationCollectionMeta(mutation)
|
|
253
275
|
|
|
254
276
|
await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)
|
|
255
277
|
|
|
@@ -268,4 +290,46 @@ export class PowerSyncTransactor {
|
|
|
268
290
|
timestamp: diffOperation.timestamp,
|
|
269
291
|
}
|
|
270
292
|
}
|
|
293
|
+
|
|
294
|
+
protected getMutationCollectionMeta(
|
|
295
|
+
mutation: PendingMutation<any>,
|
|
296
|
+
): PowerSyncCollectionMeta<any> {
|
|
297
|
+
if (
|
|
298
|
+
typeof (mutation.collection.config as any).utils?.getMeta != `function`
|
|
299
|
+
) {
|
|
300
|
+
throw new Error(`Collection is not a PowerSync collection.`)
|
|
301
|
+
}
|
|
302
|
+
return (
|
|
303
|
+
mutation.collection
|
|
304
|
+
.config as unknown as EnhancedPowerSyncCollectionConfig<any>
|
|
305
|
+
).utils.getMeta()
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
/**
|
|
309
|
+
* Processes collection mutation metadata for persistence to the database.
|
|
310
|
+
* We only support storing string metadata.
|
|
311
|
+
* @returns null if no metadata should be stored.
|
|
312
|
+
*/
|
|
313
|
+
protected processMutationMetadata(
|
|
314
|
+
mutation: PendingMutation<any>,
|
|
315
|
+
): string | null {
|
|
316
|
+
const { metadataIsTracked } = this.getMutationCollectionMeta(mutation)
|
|
317
|
+
if (!metadataIsTracked) {
|
|
318
|
+
// If it's not supported, we don't store metadata.
|
|
319
|
+
if (typeof mutation.metadata != `undefined`) {
|
|
320
|
+
// Log a warning if metadata is provided but not tracked.
|
|
321
|
+
this.database.logger.warn(
|
|
322
|
+
`Metadata provided for collection ${mutation.collection.id} but the PowerSync table does not track metadata. The PowerSync table should be configured with trackMetadata: true.`,
|
|
323
|
+
mutation.metadata,
|
|
324
|
+
)
|
|
325
|
+
}
|
|
326
|
+
return null
|
|
327
|
+
} else if (typeof mutation.metadata == `undefined`) {
|
|
328
|
+
return null
|
|
329
|
+
} else if (typeof mutation.metadata == `string`) {
|
|
330
|
+
return mutation.metadata
|
|
331
|
+
} else {
|
|
332
|
+
return JSON.stringify(mutation.metadata)
|
|
333
|
+
}
|
|
334
|
+
}
|
|
271
335
|
}
|
package/src/definitions.ts
CHANGED
|
@@ -246,6 +246,11 @@ export type PowerSyncCollectionMeta<TTable extends Table = Table> = {
|
|
|
246
246
|
* Serializes a collection value to the SQLite type
|
|
247
247
|
*/
|
|
248
248
|
serializeValue: (value: any) => ExtractedTable<TTable>
|
|
249
|
+
|
|
250
|
+
/**
|
|
251
|
+
* Whether the PowerSync table tracks metadata.
|
|
252
|
+
*/
|
|
253
|
+
metadataIsTracked: boolean
|
|
249
254
|
}
|
|
250
255
|
|
|
251
256
|
/**
|
package/src/powersync.ts
CHANGED
|
@@ -242,7 +242,7 @@ export function powerSyncCollectionOptions<
|
|
|
242
242
|
// The collection output type
|
|
243
243
|
type OutputType = InferPowerSyncOutputType<TTable, TSchema>
|
|
244
244
|
|
|
245
|
-
const { viewName } = table
|
|
245
|
+
const { viewName, trackMetadata: metadataIsTracked } = table
|
|
246
246
|
|
|
247
247
|
/**
|
|
248
248
|
* Deserializes data from the incoming sync stream
|
|
@@ -459,6 +459,7 @@ export function powerSyncCollectionOptions<
|
|
|
459
459
|
getMeta: () => ({
|
|
460
460
|
tableName: viewName,
|
|
461
461
|
trackedTableName,
|
|
462
|
+
metadataIsTracked,
|
|
462
463
|
serializeValue: (value) =>
|
|
463
464
|
serializeForSQLite(
|
|
464
465
|
value,
|