@tanstack/powersync-db-collection 0.0.0 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cjs/PendingOperationStore.cjs +33 -0
- package/dist/cjs/PendingOperationStore.cjs.map +1 -0
- package/dist/cjs/PendingOperationStore.d.cts +29 -0
- package/dist/cjs/PowerSyncTransactor.cjs +158 -0
- package/dist/cjs/PowerSyncTransactor.cjs.map +1 -0
- package/dist/cjs/PowerSyncTransactor.d.cts +60 -0
- package/dist/cjs/definitions.cjs +5 -0
- package/dist/cjs/definitions.cjs.map +1 -0
- package/dist/cjs/definitions.d.cts +200 -0
- package/dist/cjs/helpers.cjs +35 -0
- package/dist/cjs/helpers.cjs.map +1 -0
- package/dist/cjs/helpers.d.cts +70 -0
- package/dist/cjs/index.cjs +9 -0
- package/dist/cjs/index.cjs.map +1 -0
- package/dist/cjs/index.d.cts +3 -0
- package/dist/cjs/powersync.cjs +200 -0
- package/dist/cjs/powersync.cjs.map +1 -0
- package/dist/cjs/powersync.d.cts +145 -0
- package/dist/cjs/schema.cjs +65 -0
- package/dist/cjs/schema.cjs.map +1 -0
- package/dist/cjs/schema.d.cts +21 -0
- package/dist/cjs/serialization.cjs +47 -0
- package/dist/cjs/serialization.cjs.map +1 -0
- package/dist/cjs/serialization.d.cts +34 -0
- package/dist/esm/PendingOperationStore.d.ts +29 -0
- package/dist/esm/PendingOperationStore.js +33 -0
- package/dist/esm/PendingOperationStore.js.map +1 -0
- package/dist/esm/PowerSyncTransactor.d.ts +60 -0
- package/dist/esm/PowerSyncTransactor.js +158 -0
- package/dist/esm/PowerSyncTransactor.js.map +1 -0
- package/dist/esm/definitions.d.ts +200 -0
- package/dist/esm/definitions.js +5 -0
- package/dist/esm/definitions.js.map +1 -0
- package/dist/esm/helpers.d.ts +70 -0
- package/dist/esm/helpers.js +35 -0
- package/dist/esm/helpers.js.map +1 -0
- package/dist/esm/index.d.ts +3 -0
- package/dist/esm/index.js +9 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/powersync.d.ts +145 -0
- package/dist/esm/powersync.js +200 -0
- package/dist/esm/powersync.js.map +1 -0
- package/dist/esm/schema.d.ts +21 -0
- package/dist/esm/schema.js +65 -0
- package/dist/esm/schema.js.map +1 -0
- package/dist/esm/serialization.d.ts +34 -0
- package/dist/esm/serialization.js +47 -0
- package/dist/esm/serialization.js.map +1 -0
- package/package.json +8 -9
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Kyle Mathews
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const pDefer = require("p-defer");
|
|
4
|
+
const _PendingOperationStore = class _PendingOperationStore {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.pendingOperations = /* @__PURE__ */ new Map();
|
|
7
|
+
}
|
|
8
|
+
/**
|
|
9
|
+
* @returns A promise which will resolve once the specified operation has been seen.
|
|
10
|
+
*/
|
|
11
|
+
waitFor(operation) {
|
|
12
|
+
const managedPromise = pDefer();
|
|
13
|
+
this.pendingOperations.set(operation, managedPromise);
|
|
14
|
+
return managedPromise.promise;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Marks a set of operations as seen. This will resolve any pending promises.
|
|
18
|
+
*/
|
|
19
|
+
resolvePendingFor(operations) {
|
|
20
|
+
for (const operation of operations) {
|
|
21
|
+
for (const [pendingOp, deferred] of this.pendingOperations.entries()) {
|
|
22
|
+
if (pendingOp.tableName == operation.tableName && pendingOp.operation == operation.operation && pendingOp.id == operation.id && pendingOp.timestamp == operation.timestamp) {
|
|
23
|
+
deferred.resolve();
|
|
24
|
+
this.pendingOperations.delete(pendingOp);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
_PendingOperationStore.GLOBAL = new _PendingOperationStore();
|
|
31
|
+
let PendingOperationStore = _PendingOperationStore;
|
|
32
|
+
exports.PendingOperationStore = PendingOperationStore;
|
|
33
|
+
//# sourceMappingURL=PendingOperationStore.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PendingOperationStore.cjs","sources":["../../src/PendingOperationStore.ts"],"sourcesContent":["import pDefer from \"p-defer\"\nimport type { DiffTriggerOperation } from \"@powersync/common\"\nimport type { DeferredPromise } from \"p-defer\"\n\nexport type PendingOperation = {\n tableName: string\n operation: DiffTriggerOperation\n id: string\n timestamp: string\n}\n\n/**\n * Optimistic mutations have their optimistic state discarded once transactions have\n * been applied.\n * We need to ensure that an applied transaction has been observed by the sync diff trigger\n * before resolving the transaction application call.\n * This store allows registering a wait for a pending operation to have been observed.\n */\nexport class PendingOperationStore {\n private pendingOperations = new Map<PendingOperation, DeferredPromise<void>>()\n\n /**\n * Globally accessible PendingOperationStore\n */\n static GLOBAL = new PendingOperationStore()\n\n /**\n * @returns A promise which will resolve once the specified operation has been seen.\n */\n waitFor(operation: PendingOperation): Promise<void> {\n const managedPromise = pDefer<void>()\n this.pendingOperations.set(operation, managedPromise)\n return managedPromise.promise\n }\n\n /**\n * Marks a set of operations as seen. This will resolve any pending promises.\n */\n resolvePendingFor(operations: Array<PendingOperation>) {\n for (const operation of operations) {\n for (const [pendingOp, deferred] of this.pendingOperations.entries()) {\n if (\n pendingOp.tableName == operation.tableName &&\n pendingOp.operation == operation.operation &&\n pendingOp.id == operation.id &&\n pendingOp.timestamp == operation.timestamp\n ) {\n deferred.resolve()\n this.pendingOperations.delete(pendingOp)\n }\n }\n }\n }\n}\n"],"names":[],"mappings":";;;AAkBO,MAAM,yBAAN,MAAM,uBAAsB;AAAA,EAA5B,cAAA;AACL,SAAQ,wCAAwB,IAAA;AAAA,EAA6C;AAAA;AAAA;AAAA;AAAA,EAU7E,QAAQ,WAA4C;AAClD,UAAM,iBAAiB,OAAA;AACvB,SAAK,kBAAkB,IAAI,WAAW,cAAc;AACpD,WAAO,eAAe;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,kBAAkB,YAAqC;AACrD,eAAW,aAAa,YAAY;AAClC,iBAAW,CAAC,WAAW,QAAQ,KAAK,KAAK,kBAAkB,WAAW;AACpE,YACE,UAAU,aAAa,UAAU,aACjC,UAAU,aAAa,UAAU,aACjC,UAAU,MAAM,UAAU,MAC1B,UAAU,aAAa,UAAU,WACjC;AACA,mBAAS,QAAA;AACT,eAAK,kBAAkB,OAAO,SAAS;AAAA,QACzC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AA7BE,uBAAO,SAAS,IAAI,uBAAA;AANf,IAAM,wBAAN;;"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { DiffTriggerOperation } from '@powersync/common';
|
|
2
|
+
export type PendingOperation = {
|
|
3
|
+
tableName: string;
|
|
4
|
+
operation: DiffTriggerOperation;
|
|
5
|
+
id: string;
|
|
6
|
+
timestamp: string;
|
|
7
|
+
};
|
|
8
|
+
/**
|
|
9
|
+
* Optimistic mutations have their optimistic state discarded once transactions have
|
|
10
|
+
* been applied.
|
|
11
|
+
* We need to ensure that an applied transaction has been observed by the sync diff trigger
|
|
12
|
+
* before resolving the transaction application call.
|
|
13
|
+
* This store allows registering a wait for a pending operation to have been observed.
|
|
14
|
+
*/
|
|
15
|
+
export declare class PendingOperationStore {
|
|
16
|
+
private pendingOperations;
|
|
17
|
+
/**
|
|
18
|
+
* Globally accessible PendingOperationStore
|
|
19
|
+
*/
|
|
20
|
+
static GLOBAL: PendingOperationStore;
|
|
21
|
+
/**
|
|
22
|
+
* @returns A promise which will resolve once the specified operation has been seen.
|
|
23
|
+
*/
|
|
24
|
+
waitFor(operation: PendingOperation): Promise<void>;
|
|
25
|
+
/**
|
|
26
|
+
* Marks a set of operations as seen. This will resolve any pending promises.
|
|
27
|
+
*/
|
|
28
|
+
resolvePendingFor(operations: Array<PendingOperation>): void;
|
|
29
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const common = require("@powersync/common");
|
|
4
|
+
const DebugModule = require("debug");
|
|
5
|
+
const helpers = require("./helpers.cjs");
|
|
6
|
+
const PendingOperationStore = require("./PendingOperationStore.cjs");
|
|
7
|
+
const debug = DebugModule.debug(`ts/db:powersync`);
|
|
8
|
+
class PowerSyncTransactor {
|
|
9
|
+
constructor(options) {
|
|
10
|
+
this.database = options.database;
|
|
11
|
+
this.pendingOperationStore = PendingOperationStore.PendingOperationStore.GLOBAL;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Persists a {@link Transaction} to the PowerSync SQLite database.
|
|
15
|
+
*/
|
|
16
|
+
async applyTransaction(transaction) {
|
|
17
|
+
const { mutations } = transaction;
|
|
18
|
+
if (mutations.length == 0) {
|
|
19
|
+
return;
|
|
20
|
+
}
|
|
21
|
+
const mutationsCollectionIds = mutations.map(
|
|
22
|
+
(mutation) => mutation.collection.id
|
|
23
|
+
);
|
|
24
|
+
const collectionIds = Array.from(new Set(mutationsCollectionIds));
|
|
25
|
+
const lastCollectionMutationIndexes = /* @__PURE__ */ new Map();
|
|
26
|
+
const allCollections = collectionIds.map((id) => mutations.find((mutation) => mutation.collection.id == id)).map((mutation) => mutation.collection);
|
|
27
|
+
for (const collectionId of collectionIds) {
|
|
28
|
+
lastCollectionMutationIndexes.set(
|
|
29
|
+
collectionId,
|
|
30
|
+
mutationsCollectionIds.lastIndexOf(collectionId)
|
|
31
|
+
);
|
|
32
|
+
}
|
|
33
|
+
await Promise.all(
|
|
34
|
+
allCollections.map(async (collection) => {
|
|
35
|
+
if (collection.isReady()) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
await new Promise((resolve) => collection.onFirstReady(resolve));
|
|
39
|
+
})
|
|
40
|
+
);
|
|
41
|
+
const { whenComplete } = await this.database.writeTransaction(
|
|
42
|
+
async (tx) => {
|
|
43
|
+
const pendingOperations = [];
|
|
44
|
+
for (const [index, mutation] of mutations.entries()) {
|
|
45
|
+
const shouldWait = index == lastCollectionMutationIndexes.get(mutation.collection.id);
|
|
46
|
+
switch (mutation.type) {
|
|
47
|
+
case `insert`:
|
|
48
|
+
pendingOperations.push(
|
|
49
|
+
await this.handleInsert(mutation, tx, shouldWait)
|
|
50
|
+
);
|
|
51
|
+
break;
|
|
52
|
+
case `update`:
|
|
53
|
+
pendingOperations.push(
|
|
54
|
+
await this.handleUpdate(mutation, tx, shouldWait)
|
|
55
|
+
);
|
|
56
|
+
break;
|
|
57
|
+
case `delete`:
|
|
58
|
+
pendingOperations.push(
|
|
59
|
+
await this.handleDelete(mutation, tx, shouldWait)
|
|
60
|
+
);
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return {
|
|
65
|
+
whenComplete: Promise.all(
|
|
66
|
+
pendingOperations.filter((op) => !!op).map((op) => this.pendingOperationStore.waitFor(op))
|
|
67
|
+
)
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
);
|
|
71
|
+
await whenComplete;
|
|
72
|
+
}
|
|
73
|
+
async handleInsert(mutation, context, waitForCompletion = false) {
|
|
74
|
+
debug(`insert`, mutation);
|
|
75
|
+
return this.handleOperationWithCompletion(
|
|
76
|
+
mutation,
|
|
77
|
+
context,
|
|
78
|
+
waitForCompletion,
|
|
79
|
+
async (tableName, mutation2, serializeValue) => {
|
|
80
|
+
const values = serializeValue(mutation2.modified);
|
|
81
|
+
const keys = Object.keys(values).map((key) => common.sanitizeSQL`${key}`);
|
|
82
|
+
await context.execute(
|
|
83
|
+
`
|
|
84
|
+
INSERT into ${tableName}
|
|
85
|
+
(${keys.join(`, `)})
|
|
86
|
+
VALUES
|
|
87
|
+
(${keys.map((_) => `?`).join(`, `)})
|
|
88
|
+
`,
|
|
89
|
+
Object.values(values)
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
);
|
|
93
|
+
}
|
|
94
|
+
async handleUpdate(mutation, context, waitForCompletion = false) {
|
|
95
|
+
debug(`update`, mutation);
|
|
96
|
+
return this.handleOperationWithCompletion(
|
|
97
|
+
mutation,
|
|
98
|
+
context,
|
|
99
|
+
waitForCompletion,
|
|
100
|
+
async (tableName, mutation2, serializeValue) => {
|
|
101
|
+
const values = serializeValue(mutation2.modified);
|
|
102
|
+
const keys = Object.keys(values).map((key) => common.sanitizeSQL`${key}`);
|
|
103
|
+
await context.execute(
|
|
104
|
+
`
|
|
105
|
+
UPDATE ${tableName}
|
|
106
|
+
SET ${keys.map((key) => `${key} = ?`).join(`, `)}
|
|
107
|
+
WHERE id = ?
|
|
108
|
+
`,
|
|
109
|
+
[...Object.values(values), helpers.asPowerSyncRecord(mutation2.modified).id]
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
);
|
|
113
|
+
}
|
|
114
|
+
async handleDelete(mutation, context, waitForCompletion = false) {
|
|
115
|
+
debug(`update`, mutation);
|
|
116
|
+
return this.handleOperationWithCompletion(
|
|
117
|
+
mutation,
|
|
118
|
+
context,
|
|
119
|
+
waitForCompletion,
|
|
120
|
+
async (tableName, mutation2) => {
|
|
121
|
+
await context.execute(
|
|
122
|
+
`
|
|
123
|
+
DELETE FROM ${tableName} WHERE id = ?
|
|
124
|
+
`,
|
|
125
|
+
[helpers.asPowerSyncRecord(mutation2.original).id]
|
|
126
|
+
);
|
|
127
|
+
}
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Helper function which wraps a persistence operation by:
|
|
132
|
+
* - Fetching the mutation's collection's SQLite table details
|
|
133
|
+
* - Executing the mutation
|
|
134
|
+
* - Returning the last pending diff operation if required
|
|
135
|
+
*/
|
|
136
|
+
async handleOperationWithCompletion(mutation, context, waitForCompletion, handler) {
|
|
137
|
+
if (typeof mutation.collection.config.utils?.getMeta != `function`) {
|
|
138
|
+
throw new Error(`Could not get tableName from mutation's collection config.
|
|
139
|
+
The provided mutation might not have originated from PowerSync.`);
|
|
140
|
+
}
|
|
141
|
+
const { tableName, trackedTableName, serializeValue } = mutation.collection.config.utils.getMeta();
|
|
142
|
+
await handler(common.sanitizeSQL`${tableName}`, mutation, serializeValue);
|
|
143
|
+
if (!waitForCompletion) {
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
146
|
+
const diffOperation = await context.get(
|
|
147
|
+
common.sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`
|
|
148
|
+
);
|
|
149
|
+
return {
|
|
150
|
+
tableName,
|
|
151
|
+
id: diffOperation.id,
|
|
152
|
+
operation: helpers.mapOperationToPowerSync(mutation.type),
|
|
153
|
+
timestamp: diffOperation.timestamp
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
exports.PowerSyncTransactor = PowerSyncTransactor;
|
|
158
|
+
//# sourceMappingURL=PowerSyncTransactor.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PowerSyncTransactor.cjs","sources":["../../src/PowerSyncTransactor.ts"],"sourcesContent":["import { sanitizeSQL } from \"@powersync/common\"\nimport DebugModule from \"debug\"\nimport { asPowerSyncRecord, mapOperationToPowerSync } from \"./helpers\"\nimport { PendingOperationStore } from \"./PendingOperationStore\"\nimport type { AbstractPowerSyncDatabase, LockContext } from \"@powersync/common\"\nimport type { PendingMutation, Transaction } from \"@tanstack/db\"\nimport type { EnhancedPowerSyncCollectionConfig } from \"./definitions\"\nimport type { PendingOperation } from \"./PendingOperationStore\"\n\nconst debug = DebugModule.debug(`ts/db:powersync`)\n\nexport type TransactorOptions = {\n database: AbstractPowerSyncDatabase\n}\n\n/**\n * Applies mutations to the PowerSync database. This method is called automatically by the collection's\n * insert, update, and delete operations. You typically don't need to call this directly unless you\n * have special transaction requirements.\n *\n * @example\n * ```typescript\n * // Create a collection\n * const collection = createCollection(\n * powerSyncCollectionOptions<Document>({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * })\n * )\n *\n * const addTx = createTransaction({\n * autoCommit: false,\n * mutationFn: async ({ transaction }) => {\n * await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)\n * },\n * })\n *\n * addTx.mutate(() => {\n * for (let i = 0; i < 5; i++) {\n * collection.insert({ id: randomUUID(), name: `tx-${i}` })\n * }\n * })\n *\n * await addTx.commit()\n * await addTx.isPersisted.promise\n * ```\n *\n * @param transaction - The transaction containing mutations to apply\n * @returns A promise that resolves when the mutations have been persisted to PowerSync\n */\nexport class PowerSyncTransactor {\n database: AbstractPowerSyncDatabase\n pendingOperationStore: PendingOperationStore\n\n constructor(options: TransactorOptions) {\n this.database = options.database\n this.pendingOperationStore = PendingOperationStore.GLOBAL\n }\n\n /**\n * Persists a {@link Transaction} to the PowerSync SQLite database.\n */\n async applyTransaction(transaction: Transaction<any>) {\n const { mutations } = transaction\n\n if (mutations.length == 0) {\n return\n }\n /**\n * The transaction might contain operations for different collections.\n * We can do some optimizations for single-collection transactions.\n */\n const mutationsCollectionIds = mutations.map(\n (mutation) => mutation.collection.id\n )\n const collectionIds = Array.from(new Set(mutationsCollectionIds))\n const lastCollectionMutationIndexes = new Map<string, number>()\n const allCollections = collectionIds\n .map((id) => mutations.find((mutation) => mutation.collection.id == id)!)\n .map((mutation) => mutation.collection)\n for (const collectionId of collectionIds) {\n lastCollectionMutationIndexes.set(\n collectionId,\n mutationsCollectionIds.lastIndexOf(collectionId)\n )\n }\n\n // Check all the observers are ready before taking a lock\n await Promise.all(\n allCollections.map(async (collection) => {\n if (collection.isReady()) {\n return\n }\n await new Promise<void>((resolve) => collection.onFirstReady(resolve))\n })\n )\n\n // Persist to PowerSync\n const { whenComplete } = await this.database.writeTransaction(\n async (tx) => {\n const pendingOperations: Array<PendingOperation | null> = []\n\n for (const [index, mutation] of mutations.entries()) {\n /**\n * Each collection processes events independently. We need to make sure the\n * last operation for each collection has been observed.\n */\n const shouldWait =\n index == lastCollectionMutationIndexes.get(mutation.collection.id)\n switch (mutation.type) {\n case `insert`:\n pendingOperations.push(\n await this.handleInsert(mutation, tx, shouldWait)\n )\n break\n case `update`:\n pendingOperations.push(\n await this.handleUpdate(mutation, tx, shouldWait)\n )\n break\n case `delete`:\n pendingOperations.push(\n await this.handleDelete(mutation, tx, shouldWait)\n )\n break\n }\n }\n\n /**\n * Return a promise from the writeTransaction, without awaiting it.\n * This promise will resolve once the entire transaction has been\n * observed via the diff triggers.\n * We return without awaiting in order to free the write lock.\n */\n return {\n whenComplete: Promise.all(\n pendingOperations\n .filter((op) => !!op)\n .map((op) => this.pendingOperationStore.waitFor(op))\n ),\n }\n }\n )\n\n // Wait for the change to be observed via the diff trigger\n await whenComplete\n }\n\n protected async handleInsert(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false\n ): Promise<PendingOperation | null> {\n debug(`insert`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n INSERT into ${tableName} \n (${keys.join(`, `)}) \n VALUES \n (${keys.map((_) => `?`).join(`, `)})\n `,\n Object.values(values)\n )\n }\n )\n }\n\n protected async handleUpdate(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation, serializeValue) => {\n const values = serializeValue(mutation.modified)\n const keys = Object.keys(values).map((key) => sanitizeSQL`${key}`)\n\n await context.execute(\n `\n UPDATE ${tableName} \n SET ${keys.map((key) => `${key} = ?`).join(`, `)}\n WHERE id = ?\n `,\n [...Object.values(values), asPowerSyncRecord(mutation.modified).id]\n )\n }\n )\n }\n\n protected async handleDelete(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean = false\n ): Promise<PendingOperation | null> {\n debug(`update`, mutation)\n\n return this.handleOperationWithCompletion(\n mutation,\n context,\n waitForCompletion,\n async (tableName, mutation) => {\n await context.execute(\n `\n DELETE FROM ${tableName} WHERE id = ?\n `,\n [asPowerSyncRecord(mutation.original).id]\n )\n }\n )\n }\n\n /**\n * Helper function which wraps a persistence operation by:\n * - Fetching the mutation's collection's SQLite table details\n * - Executing the mutation\n * - Returning the last pending diff operation if required\n */\n protected async handleOperationWithCompletion(\n mutation: PendingMutation<any>,\n context: LockContext,\n waitForCompletion: boolean,\n handler: (\n tableName: string,\n mutation: PendingMutation<any>,\n serializeValue: (value: any) => Record<string, unknown>\n ) => Promise<void>\n ): Promise<PendingOperation | null> {\n if (\n typeof (mutation.collection.config as any).utils?.getMeta != `function`\n ) {\n throw new Error(`Could not get tableName from mutation's collection config.\n The provided mutation might not have originated from PowerSync.`)\n }\n\n const { tableName, trackedTableName, serializeValue } = (\n mutation.collection\n .config as unknown as EnhancedPowerSyncCollectionConfig<any>\n ).utils.getMeta()\n\n await handler(sanitizeSQL`${tableName}`, mutation, serializeValue)\n\n if (!waitForCompletion) {\n return null\n }\n\n // Need to get the operation in order to wait for it\n const diffOperation = await context.get<{ id: string; timestamp: string }>(\n sanitizeSQL`SELECT id, timestamp FROM ${trackedTableName} ORDER BY timestamp DESC LIMIT 1`\n )\n return {\n tableName,\n id: diffOperation.id,\n operation: mapOperationToPowerSync(mutation.type),\n timestamp: diffOperation.timestamp,\n }\n }\n}\n"],"names":["PendingOperationStore","mutation","sanitizeSQL","asPowerSyncRecord","mapOperationToPowerSync"],"mappings":";;;;;;AASA,MAAM,QAAQ,YAAY,MAAM,iBAAiB;AAyC1C,MAAM,oBAAoB;AAAA,EAI/B,YAAY,SAA4B;AACtC,SAAK,WAAW,QAAQ;AACxB,SAAK,wBAAwBA,sBAAAA,sBAAsB;AAAA,EACrD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAiB,aAA+B;AACpD,UAAM,EAAE,cAAc;AAEtB,QAAI,UAAU,UAAU,GAAG;AACzB;AAAA,IACF;AAKA,UAAM,yBAAyB,UAAU;AAAA,MACvC,CAAC,aAAa,SAAS,WAAW;AAAA,IAAA;AAEpC,UAAM,gBAAgB,MAAM,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAChE,UAAM,oDAAoC,IAAA;AAC1C,UAAM,iBAAiB,cACpB,IAAI,CAAC,OAAO,UAAU,KAAK,CAAC,aAAa,SAAS,WAAW,MAAM,EAAE,CAAE,EACvE,IAAI,CAAC,aAAa,SAAS,UAAU;AACxC,eAAW,gBAAgB,eAAe;AACxC,oCAA8B;AAAA,QAC5B;AAAA,QACA,uBAAuB,YAAY,YAAY;AAAA,MAAA;AAAA,IAEnD;AAGA,UAAM,QAAQ;AAAA,MACZ,eAAe,IAAI,OAAO,eAAe;AACvC,YAAI,WAAW,WAAW;AACxB;AAAA,QACF;AACA,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,aAAa,OAAO,CAAC;AAAA,MACvE,CAAC;AAAA,IAAA;AAIH,UAAM,EAAE,aAAA,IAAiB,MAAM,KAAK,SAAS;AAAA,MAC3C,OAAO,OAAO;AACZ,cAAM,oBAAoD,CAAA;AAE1D,mBAAW,CAAC,OAAO,QAAQ,KAAK,UAAU,WAAW;AAKnD,gBAAM,aACJ,SAAS,8BAA8B,IAAI,SAAS,WAAW,EAAE;AACnE,kBAAQ,SAAS,MAAA;AAAA,YACf,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,YACF,KAAK;AACH,gCAAkB;AAAA,gBAChB,MAAM,KAAK,aAAa,UAAU,IAAI,UAAU;AAAA,cAAA;AAElD;AAAA,UAAA;AAAA,QAEN;AAQA,eAAO;AAAA,UACL,cAAc,QAAQ;AAAA,YACpB,kBACG,OAAO,CAAC,OAAO,CAAC,CAAC,EAAE,EACnB,IAAI,CAAC,OAAO,KAAK,sBAAsB,QAAQ,EAAE,CAAC;AAAA,UAAA;AAAA,QACvD;AAAA,MAEJ;AAAA,IAAA;AAIF,UAAM;AAAA,EACR;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWC,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA,eAChB,KAAK,KAAK,IAAI,CAAC;AAAA;AAAA,eAEf,KAAK,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,UAEpC,OAAO,OAAO,MAAM;AAAA,QAAA;AAAA,MAExB;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWD,WAAU,mBAAmB;AAC7C,cAAM,SAAS,eAAeA,UAAS,QAAQ;AAC/C,cAAM,OAAO,OAAO,KAAK,MAAM,EAAE,IAAI,CAAC,QAAQC,OAAAA,cAAc,GAAG,EAAE;AAEjE,cAAM,QAAQ;AAAA,UACZ;AAAA,iBACO,SAAS;AAAA,cACZ,KAAK,IAAI,CAAC,QAAQ,GAAG,GAAG,MAAM,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA,UAG9C,CAAC,GAAG,OAAO,OAAO,MAAM,GAAGC,QAAAA,kBAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAEtE;AAAA,IAAA;AAAA,EAEJ;AAAA,EAEA,MAAgB,aACd,UACA,SACA,oBAA6B,OACK;AAClC,UAAM,UAAU,QAAQ;AAExB,WAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA;AAAA,MACA,OAAO,WAAWA,cAAa;AAC7B,cAAM,QAAQ;AAAA,UACZ;AAAA,sBACY,SAAS;AAAA;AAAA,UAErB,CAACE,0BAAkBF,UAAS,QAAQ,EAAE,EAAE;AAAA,QAAA;AAAA,MAE5C;AAAA,IAAA;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAgB,8BACd,UACA,SACA,mBACA,SAKkC;AAClC,QACE,OAAQ,SAAS,WAAW,OAAe,OAAO,WAAW,YAC7D;AACA,YAAM,IAAI,MAAM;AAAA,wEACkD;AAAA,IACpE;AAEA,UAAM,EAAE,WAAW,kBAAkB,eAAA,IACnC,SAAS,WACN,OACH,MAAM,QAAA;AAER,UAAM,QAAQC,OAAAA,cAAc,SAAS,IAAI,UAAU,cAAc;AAEjE,QAAI,CAAC,mBAAmB;AACtB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,MAAM,QAAQ;AAAA,MAClCA,+CAAwC,gBAAgB;AAAA,IAAA;AAE1D,WAAO;AAAA,MACL;AAAA,MACA,IAAI,cAAc;AAAA,MAClB,WAAWE,QAAAA,wBAAwB,SAAS,IAAI;AAAA,MAChD,WAAW,cAAc;AAAA,IAAA;AAAA,EAE7B;AACF;;"}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { PendingOperationStore, PendingOperation } from './PendingOperationStore.cjs';
|
|
2
|
+
import { AbstractPowerSyncDatabase, LockContext } from '@powersync/common';
|
|
3
|
+
import { PendingMutation, Transaction } from '@tanstack/db';
|
|
4
|
+
export type TransactorOptions = {
|
|
5
|
+
database: AbstractPowerSyncDatabase;
|
|
6
|
+
};
|
|
7
|
+
/**
|
|
8
|
+
* Applies mutations to the PowerSync database. This method is called automatically by the collection's
|
|
9
|
+
* insert, update, and delete operations. You typically don't need to call this directly unless you
|
|
10
|
+
* have special transaction requirements.
|
|
11
|
+
*
|
|
12
|
+
* @example
|
|
13
|
+
* ```typescript
|
|
14
|
+
* // Create a collection
|
|
15
|
+
* const collection = createCollection(
|
|
16
|
+
* powerSyncCollectionOptions<Document>({
|
|
17
|
+
* database: db,
|
|
18
|
+
* table: APP_SCHEMA.props.documents,
|
|
19
|
+
* })
|
|
20
|
+
* )
|
|
21
|
+
*
|
|
22
|
+
* const addTx = createTransaction({
|
|
23
|
+
* autoCommit: false,
|
|
24
|
+
* mutationFn: async ({ transaction }) => {
|
|
25
|
+
* await new PowerSyncTransactor({ database: db }).applyTransaction(transaction)
|
|
26
|
+
* },
|
|
27
|
+
* })
|
|
28
|
+
*
|
|
29
|
+
* addTx.mutate(() => {
|
|
30
|
+
* for (let i = 0; i < 5; i++) {
|
|
31
|
+
* collection.insert({ id: randomUUID(), name: `tx-${i}` })
|
|
32
|
+
* }
|
|
33
|
+
* })
|
|
34
|
+
*
|
|
35
|
+
* await addTx.commit()
|
|
36
|
+
* await addTx.isPersisted.promise
|
|
37
|
+
* ```
|
|
38
|
+
*
|
|
39
|
+
* @param transaction - The transaction containing mutations to apply
|
|
40
|
+
* @returns A promise that resolves when the mutations have been persisted to PowerSync
|
|
41
|
+
*/
|
|
42
|
+
export declare class PowerSyncTransactor {
|
|
43
|
+
database: AbstractPowerSyncDatabase;
|
|
44
|
+
pendingOperationStore: PendingOperationStore;
|
|
45
|
+
constructor(options: TransactorOptions);
|
|
46
|
+
/**
|
|
47
|
+
* Persists a {@link Transaction} to the PowerSync SQLite database.
|
|
48
|
+
*/
|
|
49
|
+
applyTransaction(transaction: Transaction<any>): Promise<void>;
|
|
50
|
+
protected handleInsert(mutation: PendingMutation<any>, context: LockContext, waitForCompletion?: boolean): Promise<PendingOperation | null>;
|
|
51
|
+
protected handleUpdate(mutation: PendingMutation<any>, context: LockContext, waitForCompletion?: boolean): Promise<PendingOperation | null>;
|
|
52
|
+
protected handleDelete(mutation: PendingMutation<any>, context: LockContext, waitForCompletion?: boolean): Promise<PendingOperation | null>;
|
|
53
|
+
/**
|
|
54
|
+
* Helper function which wraps a persistence operation by:
|
|
55
|
+
* - Fetching the mutation's collection's SQLite table details
|
|
56
|
+
* - Executing the mutation
|
|
57
|
+
* - Returning the last pending diff operation if required
|
|
58
|
+
*/
|
|
59
|
+
protected handleOperationWithCompletion(mutation: PendingMutation<any>, context: LockContext, waitForCompletion: boolean, handler: (tableName: string, mutation: PendingMutation<any>, serializeValue: (value: any) => Record<string, unknown>) => Promise<void>): Promise<PendingOperation | null>;
|
|
60
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"definitions.cjs","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from \"@powersync/common\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from \"@tanstack/db\"\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from \"./helpers\"\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key]\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<OutputType, string, TSchema> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":";;AAiRO,MAAM,qBAAqB;;"}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import { AbstractPowerSyncDatabase, Table } from '@powersync/common';
|
|
2
|
+
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
|
+
import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput } from '@tanstack/db';
|
|
4
|
+
import { AnyTableColumnType, ExtractedTable, OptionalExtractedTable, PowerSyncRecord } from './helpers.cjs';
|
|
5
|
+
/**
|
|
6
|
+
* Small helper which determines the output type if:
|
|
7
|
+
* - Standard SQLite types are to be used OR
|
|
8
|
+
* - If the provided schema should be used.
|
|
9
|
+
*/
|
|
10
|
+
export type InferPowerSyncOutputType<TTable extends Table = Table, TSchema extends StandardSchemaV1<PowerSyncRecord> = never> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>;
|
|
11
|
+
/**
|
|
12
|
+
* A mapping type for custom serialization of object properties to SQLite-compatible values.
|
|
13
|
+
*
|
|
14
|
+
* This type allows you to override, for keys in the input object (`TOutput`), a function that transforms
|
|
15
|
+
* the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.
|
|
16
|
+
*
|
|
17
|
+
* ## Generics
|
|
18
|
+
* - `TOutput`: The input object type, representing the row data to be serialized.
|
|
19
|
+
* - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.
|
|
20
|
+
*
|
|
21
|
+
* ## Usage
|
|
22
|
+
* Use this type to define a map of serialization functions for specific keys when you need custom handling
|
|
23
|
+
* (e.g., converting complex objects, formatting dates, or handling enums).
|
|
24
|
+
*
|
|
25
|
+
* Example:
|
|
26
|
+
* ```ts
|
|
27
|
+
* const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {
|
|
28
|
+
* createdAt: (date) => date.toISOString(),
|
|
29
|
+
* status: (status) => status ? 1 : 0,
|
|
30
|
+
* meta: (meta) => JSON.stringify(meta),
|
|
31
|
+
* };
|
|
32
|
+
* ```
|
|
33
|
+
*
|
|
34
|
+
* ## Behavior
|
|
35
|
+
* - Each key maps to a function that receives the value and returns the SQLite-compatible value.
|
|
36
|
+
* - Used by `serializeForSQLite` to override default serialization for specific columns.
|
|
37
|
+
*/
|
|
38
|
+
export type CustomSQLiteSerializer<TOutput extends Record<string, unknown>, TSQLite extends Record<string, unknown>> = Partial<{
|
|
39
|
+
[Key in keyof TOutput]: (value: TOutput[Key]) => Key extends keyof TSQLite ? TSQLite[Key] : never;
|
|
40
|
+
}>;
|
|
41
|
+
export type SerializerConfig<TOutput extends Record<string, unknown>, TSQLite extends Record<string, unknown>> = {
|
|
42
|
+
/**
|
|
43
|
+
* Optional partial serializer object for customizing how individual columns are serialized for SQLite.
|
|
44
|
+
*
|
|
45
|
+
* This should be a partial map of column keys to serialization functions, following the
|
|
46
|
+
* {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value
|
|
47
|
+
* compatible with SQLite storage.
|
|
48
|
+
*
|
|
49
|
+
* If not provided for a column, the default behavior is used:
|
|
50
|
+
* - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.
|
|
51
|
+
* - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.
|
|
52
|
+
*
|
|
53
|
+
* Use this option to override serialization for specific columns, such as formatting dates, handling enums,
|
|
54
|
+
* or serializing complex objects.
|
|
55
|
+
*
|
|
56
|
+
* Example:
|
|
57
|
+
* ```typescript
|
|
58
|
+
* serializer: {
|
|
59
|
+
* createdAt: (date) => date.getTime(), // Store as timestamp
|
|
60
|
+
* meta: (meta) => JSON.stringify(meta), // Custom object serialization
|
|
61
|
+
* }
|
|
62
|
+
* ```
|
|
63
|
+
*/
|
|
64
|
+
serializer?: CustomSQLiteSerializer<TOutput, TSQLite>;
|
|
65
|
+
/**
|
|
66
|
+
* Application logic should ensure that incoming synced data is always valid.
|
|
67
|
+
* Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.
|
|
68
|
+
* Use this callback to react to deserialization errors.
|
|
69
|
+
*/
|
|
70
|
+
onDeserializationError: (error: StandardSchemaV1.FailureResult) => void;
|
|
71
|
+
};
|
|
72
|
+
/**
|
|
73
|
+
* Config for when TInput and TOutput are both the SQLite types.
|
|
74
|
+
*/
|
|
75
|
+
export type ConfigWithSQLiteTypes = {};
|
|
76
|
+
/**
|
|
77
|
+
* Config where TInput is the SQLite types while TOutput can be defined by TSchema.
|
|
78
|
+
* We can use the same schema to validate TInput and incoming SQLite changes.
|
|
79
|
+
*/
|
|
80
|
+
export type ConfigWithSQLiteInputType<TTable extends Table, TSchema extends StandardSchemaV1<OptionalExtractedTable<TTable>, AnyTableColumnType<TTable>>> = SerializerConfig<StandardSchemaV1.InferOutput<TSchema>, ExtractedTable<TTable>> & {
|
|
81
|
+
schema: TSchema;
|
|
82
|
+
};
|
|
83
|
+
/**
|
|
84
|
+
* Config where TInput and TOutput have arbitrarily typed values.
|
|
85
|
+
* The keys of the types need to equal the SQLite types.
|
|
86
|
+
* Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.
|
|
87
|
+
*/
|
|
88
|
+
export type ConfigWithArbitraryCollectionTypes<TTable extends Table, TSchema extends StandardSchemaV1<AnyTableColumnType<TTable>, AnyTableColumnType<TTable>>> = SerializerConfig<StandardSchemaV1.InferOutput<TSchema>, ExtractedTable<TTable>> & {
|
|
89
|
+
schema: TSchema;
|
|
90
|
+
/**
|
|
91
|
+
* Schema for deserializing and validating input data from the sync stream.
|
|
92
|
+
*
|
|
93
|
+
* This schema defines how to transform and validate data coming from SQLite types (as stored in the database)
|
|
94
|
+
* into the desired output types (`TOutput`) expected by your application or validation logic.
|
|
95
|
+
*
|
|
96
|
+
* The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules
|
|
97
|
+
* for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)
|
|
98
|
+
* than what SQLite natively supports.
|
|
99
|
+
*
|
|
100
|
+
* Use this to ensure that incoming data from the sync stream is properly converted and validated before use.
|
|
101
|
+
*
|
|
102
|
+
* Example:
|
|
103
|
+
* ```typescript
|
|
104
|
+
* deserializationSchema: z.object({
|
|
105
|
+
* createdAt: z.preprocess((val) => new Date(val as string), z.date()),
|
|
106
|
+
* meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),
|
|
107
|
+
* })
|
|
108
|
+
* ```
|
|
109
|
+
*
|
|
110
|
+
* This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage
|
|
111
|
+
* and your application's expected types.
|
|
112
|
+
*/
|
|
113
|
+
deserializationSchema: StandardSchemaV1<ExtractedTable<TTable>, StandardSchemaV1.InferOutput<TSchema>>;
|
|
114
|
+
};
|
|
115
|
+
export type BasePowerSyncCollectionConfig<TTable extends Table = Table, TSchema extends StandardSchemaV1 = never> = Omit<BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey`> & {
|
|
116
|
+
/** The PowerSync schema Table definition */
|
|
117
|
+
table: TTable;
|
|
118
|
+
/** The PowerSync database instance */
|
|
119
|
+
database: AbstractPowerSyncDatabase;
|
|
120
|
+
/**
|
|
121
|
+
* The maximum number of documents to read from the SQLite table
|
|
122
|
+
* in a single batch during the initial sync between PowerSync and the
|
|
123
|
+
* in-memory TanStack DB collection.
|
|
124
|
+
*
|
|
125
|
+
* @remarks
|
|
126
|
+
* - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.
|
|
127
|
+
* - Larger values reduce the number of round trips to the storage
|
|
128
|
+
* engine but increase memory usage per batch.
|
|
129
|
+
* - Smaller values may lower memory usage and allow earlier
|
|
130
|
+
* streaming of initial results, at the cost of more query calls.
|
|
131
|
+
*/
|
|
132
|
+
syncBatchSize?: number;
|
|
133
|
+
};
|
|
134
|
+
/**
|
|
135
|
+
* Configuration interface for PowerSync collection options.
|
|
136
|
+
* @template TTable - The PowerSync table schema definition
|
|
137
|
+
* @template TSchema - The validation schema type
|
|
138
|
+
*/
|
|
139
|
+
/**
|
|
140
|
+
* Configuration options for creating a PowerSync collection.
|
|
141
|
+
*
|
|
142
|
+
* @example
|
|
143
|
+
* ```typescript
|
|
144
|
+
* const APP_SCHEMA = new Schema({
|
|
145
|
+
* documents: new Table({
|
|
146
|
+
* name: column.text,
|
|
147
|
+
* }),
|
|
148
|
+
* })
|
|
149
|
+
*
|
|
150
|
+
* const db = new PowerSyncDatabase({
|
|
151
|
+
* database: {
|
|
152
|
+
* dbFilename: "test.sqlite",
|
|
153
|
+
* },
|
|
154
|
+
* schema: APP_SCHEMA,
|
|
155
|
+
* })
|
|
156
|
+
*
|
|
157
|
+
* const collection = createCollection(
|
|
158
|
+
* powerSyncCollectionOptions({
|
|
159
|
+
* database: db,
|
|
160
|
+
* table: APP_SCHEMA.props.documents
|
|
161
|
+
* })
|
|
162
|
+
* )
|
|
163
|
+
* ```
|
|
164
|
+
*/
|
|
165
|
+
export type PowerSyncCollectionConfig<TTable extends Table = Table, TSchema extends StandardSchemaV1<any> = never> = BasePowerSyncCollectionConfig<TTable, TSchema> & (ConfigWithSQLiteTypes | ConfigWithSQLiteInputType<TTable, TSchema> | ConfigWithArbitraryCollectionTypes<TTable, TSchema>);
|
|
166
|
+
/**
|
|
167
|
+
* Metadata for the PowerSync Collection.
|
|
168
|
+
*/
|
|
169
|
+
export type PowerSyncCollectionMeta<TTable extends Table = Table> = {
|
|
170
|
+
/**
|
|
171
|
+
* The SQLite table representing the collection.
|
|
172
|
+
*/
|
|
173
|
+
tableName: string;
|
|
174
|
+
/**
|
|
175
|
+
* The internal table used to track diffs for the collection.
|
|
176
|
+
*/
|
|
177
|
+
trackedTableName: string;
|
|
178
|
+
/**
|
|
179
|
+
* Serializes a collection value to the SQLite type
|
|
180
|
+
*/
|
|
181
|
+
serializeValue: (value: any) => ExtractedTable<TTable>;
|
|
182
|
+
};
|
|
183
|
+
/**
|
|
184
|
+
* A CollectionConfig which includes utilities for PowerSync.
|
|
185
|
+
*/
|
|
186
|
+
export type EnhancedPowerSyncCollectionConfig<TTable extends Table, OutputType extends Record<string, unknown> = Record<string, unknown>, TSchema extends StandardSchemaV1 = never> = CollectionConfig<OutputType, string, TSchema> & {
|
|
187
|
+
id?: string;
|
|
188
|
+
utils: PowerSyncCollectionUtils<TTable>;
|
|
189
|
+
schema?: TSchema;
|
|
190
|
+
};
|
|
191
|
+
/**
|
|
192
|
+
* Collection-level utilities for PowerSync.
|
|
193
|
+
*/
|
|
194
|
+
export type PowerSyncCollectionUtils<TTable extends Table = Table> = {
|
|
195
|
+
getMeta: () => PowerSyncCollectionMeta<TTable>;
|
|
196
|
+
};
|
|
197
|
+
/**
|
|
198
|
+
* Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.
|
|
199
|
+
*/
|
|
200
|
+
export declare const DEFAULT_BATCH_SIZE = 1000;
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const common = require("@powersync/common");
|
|
4
|
+
function asPowerSyncRecord(record) {
|
|
5
|
+
if (typeof record.id !== `string`) {
|
|
6
|
+
throw new Error(`Record must have a string id field`);
|
|
7
|
+
}
|
|
8
|
+
return record;
|
|
9
|
+
}
|
|
10
|
+
function mapOperation(operation) {
|
|
11
|
+
switch (operation) {
|
|
12
|
+
case common.DiffTriggerOperation.INSERT:
|
|
13
|
+
return `insert`;
|
|
14
|
+
case common.DiffTriggerOperation.UPDATE:
|
|
15
|
+
return `update`;
|
|
16
|
+
case common.DiffTriggerOperation.DELETE:
|
|
17
|
+
return `delete`;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
function mapOperationToPowerSync(operation) {
|
|
21
|
+
switch (operation) {
|
|
22
|
+
case `insert`:
|
|
23
|
+
return common.DiffTriggerOperation.INSERT;
|
|
24
|
+
case `update`:
|
|
25
|
+
return common.DiffTriggerOperation.UPDATE;
|
|
26
|
+
case `delete`:
|
|
27
|
+
return common.DiffTriggerOperation.DELETE;
|
|
28
|
+
default:
|
|
29
|
+
throw new Error(`Unknown operation ${operation} received`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
exports.asPowerSyncRecord = asPowerSyncRecord;
|
|
33
|
+
exports.mapOperation = mapOperation;
|
|
34
|
+
exports.mapOperationToPowerSync = mapOperationToPowerSync;
|
|
35
|
+
//# sourceMappingURL=helpers.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"helpers.cjs","sources":["../../src/helpers.ts"],"sourcesContent":["import { DiffTriggerOperation } from \"@powersync/common\"\nimport type {\n BaseColumnType,\n ExtractColumnValueType,\n Table,\n} from \"@powersync/common\"\n\n/**\n * All PowerSync table records include a UUID `id` column.\n */\nexport type PowerSyncRecord = {\n id: string\n [key: string]: unknown\n}\n\n/**\n * Utility type: If T includes null, also allow undefined (to support optional fields in insert/update operations).\n * PowerSync records are typically typed as `string | null`, where insert\n * and update operations may also allow not specifying a value at all (optional).\n */\ntype WithUndefinedIfNull<T> = null extends T ? T | undefined : T\ntype OptionalIfUndefined<T> = {\n [K in keyof T as undefined extends T[K] ? K : never]?: T[K]\n} & {\n [K in keyof T as undefined extends T[K] ? never : K]: T[K]\n}\n\n/**\n * Provides the base column types for a table. This excludes the `id` column.\n */\nexport type ExtractedTableColumns<TTable extends Table> = {\n [K in keyof TTable[`columnMap`]]: ExtractColumnValueType<\n TTable[`columnMap`][K]\n >\n}\n/**\n * Utility type that extracts the typed structure of a table based on its column definitions.\n * Maps each column to its corresponding TypeScript type using ExtractColumnValueType.\n *\n * @template TTable - The PowerSync table definition\n * @example\n * ```typescript\n * const table = new Table({\n * name: column.text,\n * age: column.integer\n * })\n * type TableType = ExtractedTable<typeof table>\n * // Results in: { id: string, name: string | null, age: number | null }\n * ```\n */\nexport type ExtractedTable<TTable extends Table> =\n ExtractedTableColumns<TTable> & {\n id: string\n }\n\nexport type OptionalExtractedTable<TTable extends Table> = OptionalIfUndefined<{\n [K in keyof TTable[`columnMap`]]: WithUndefinedIfNull<\n ExtractColumnValueType<TTable[`columnMap`][K]>\n >\n}> & {\n id: string\n}\n\n/**\n * Maps the schema of TTable to a type which\n * requires the keys be equal, but the values can have any value type.\n */\nexport type AnyTableColumnType<TTable extends Table> = {\n [K in keyof TTable[`columnMap`]]: any\n} & { id: string }\n\nexport function asPowerSyncRecord(record: any): PowerSyncRecord {\n if (typeof record.id !== `string`) {\n throw new Error(`Record must have a string id field`)\n }\n return record as PowerSyncRecord\n}\n\n// Helper type to ensure the keys of TOutput match the Table columns\nexport type MapBaseColumnType<TOutput> = {\n [Key in keyof TOutput]: BaseColumnType<any>\n}\n\n/**\n * Maps {@link DiffTriggerOperation} to TanstackDB operations\n */\nexport function mapOperation(operation: DiffTriggerOperation) {\n switch (operation) {\n case DiffTriggerOperation.INSERT:\n return `insert`\n case DiffTriggerOperation.UPDATE:\n return `update`\n case DiffTriggerOperation.DELETE:\n return `delete`\n }\n}\n\n/**\n * Maps TanstackDB operations to {@link DiffTriggerOperation}\n */\nexport function mapOperationToPowerSync(operation: string) {\n switch (operation) {\n case `insert`:\n return DiffTriggerOperation.INSERT\n case `update`:\n return DiffTriggerOperation.UPDATE\n case `delete`:\n return DiffTriggerOperation.DELETE\n default:\n throw new Error(`Unknown operation ${operation} received`)\n }\n}\n"],"names":["DiffTriggerOperation"],"mappings":";;;AAuEO,SAAS,kBAAkB,QAA8B;AAC9D,MAAI,OAAO,OAAO,OAAO,UAAU;AACjC,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AACA,SAAO;AACT;AAUO,SAAS,aAAa,WAAiC;AAC5D,UAAQ,WAAA;AAAA,IACN,KAAKA,OAAAA,qBAAqB;AACxB,aAAO;AAAA,IACT,KAAKA,OAAAA,qBAAqB;AACxB,aAAO;AAAA,IACT,KAAKA,OAAAA,qBAAqB;AACxB,aAAO;AAAA,EAAA;AAEb;AAKO,SAAS,wBAAwB,WAAmB;AACzD,UAAQ,WAAA;AAAA,IACN,KAAK;AACH,aAAOA,OAAAA,qBAAqB;AAAA,IAC9B,KAAK;AACH,aAAOA,OAAAA,qBAAqB;AAAA,IAC9B,KAAK;AACH,aAAOA,OAAAA,qBAAqB;AAAA,IAC9B;AACE,YAAM,IAAI,MAAM,qBAAqB,SAAS,WAAW;AAAA,EAAA;AAE/D;;;;"}
|