@tanstack/powersync-db-collection 0.0.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cjs/PendingOperationStore.cjs +33 -0
- package/dist/cjs/PendingOperationStore.cjs.map +1 -0
- package/dist/cjs/PendingOperationStore.d.cts +29 -0
- package/dist/cjs/PowerSyncTransactor.cjs +158 -0
- package/dist/cjs/PowerSyncTransactor.cjs.map +1 -0
- package/dist/cjs/PowerSyncTransactor.d.cts +60 -0
- package/dist/cjs/definitions.cjs +5 -0
- package/dist/cjs/definitions.cjs.map +1 -0
- package/dist/cjs/definitions.d.cts +200 -0
- package/dist/cjs/helpers.cjs +35 -0
- package/dist/cjs/helpers.cjs.map +1 -0
- package/dist/cjs/helpers.d.cts +70 -0
- package/dist/cjs/index.cjs +9 -0
- package/dist/cjs/index.cjs.map +1 -0
- package/dist/cjs/index.d.cts +3 -0
- package/dist/cjs/powersync.cjs +200 -0
- package/dist/cjs/powersync.cjs.map +1 -0
- package/dist/cjs/powersync.d.cts +145 -0
- package/dist/cjs/schema.cjs +65 -0
- package/dist/cjs/schema.cjs.map +1 -0
- package/dist/cjs/schema.d.cts +21 -0
- package/dist/cjs/serialization.cjs +47 -0
- package/dist/cjs/serialization.cjs.map +1 -0
- package/dist/cjs/serialization.d.cts +34 -0
- package/dist/esm/PendingOperationStore.d.ts +29 -0
- package/dist/esm/PendingOperationStore.js +33 -0
- package/dist/esm/PendingOperationStore.js.map +1 -0
- package/dist/esm/PowerSyncTransactor.d.ts +60 -0
- package/dist/esm/PowerSyncTransactor.js +158 -0
- package/dist/esm/PowerSyncTransactor.js.map +1 -0
- package/dist/esm/definitions.d.ts +200 -0
- package/dist/esm/definitions.js +5 -0
- package/dist/esm/definitions.js.map +1 -0
- package/dist/esm/helpers.d.ts +70 -0
- package/dist/esm/helpers.js +35 -0
- package/dist/esm/helpers.js.map +1 -0
- package/dist/esm/index.d.ts +3 -0
- package/dist/esm/index.js +9 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/powersync.d.ts +145 -0
- package/dist/esm/powersync.js +200 -0
- package/dist/esm/powersync.js.map +1 -0
- package/dist/esm/schema.d.ts +21 -0
- package/dist/esm/schema.js +65 -0
- package/dist/esm/schema.js.map +1 -0
- package/dist/esm/serialization.d.ts +34 -0
- package/dist/esm/serialization.js +47 -0
- package/dist/esm/serialization.js.map +1 -0
- package/package.json +8 -9
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import { DiffTriggerOperation, BaseColumnType, ExtractColumnValueType, Table } from '@powersync/common';
|
|
2
|
+
/**
|
|
3
|
+
* All PowerSync table records include a UUID `id` column.
|
|
4
|
+
*/
|
|
5
|
+
export type PowerSyncRecord = {
|
|
6
|
+
id: string;
|
|
7
|
+
[key: string]: unknown;
|
|
8
|
+
};
|
|
9
|
+
/**
|
|
10
|
+
* Utility type: If T includes null, also allow undefined (to support optional fields in insert/update operations).
|
|
11
|
+
* PowerSync records are typically typed as `string | null`, where insert
|
|
12
|
+
* and update operations may also allow not specifying a value at all (optional).
|
|
13
|
+
*/
|
|
14
|
+
type WithUndefinedIfNull<T> = null extends T ? T | undefined : T;
|
|
15
|
+
type OptionalIfUndefined<T> = {
|
|
16
|
+
[K in keyof T as undefined extends T[K] ? K : never]?: T[K];
|
|
17
|
+
} & {
|
|
18
|
+
[K in keyof T as undefined extends T[K] ? never : K]: T[K];
|
|
19
|
+
};
|
|
20
|
+
/**
|
|
21
|
+
* Provides the base column types for a table. This excludes the `id` column.
|
|
22
|
+
*/
|
|
23
|
+
export type ExtractedTableColumns<TTable extends Table> = {
|
|
24
|
+
[K in keyof TTable[`columnMap`]]: ExtractColumnValueType<TTable[`columnMap`][K]>;
|
|
25
|
+
};
|
|
26
|
+
/**
|
|
27
|
+
* Utility type that extracts the typed structure of a table based on its column definitions.
|
|
28
|
+
* Maps each column to its corresponding TypeScript type using ExtractColumnValueType.
|
|
29
|
+
*
|
|
30
|
+
* @template TTable - The PowerSync table definition
|
|
31
|
+
* @example
|
|
32
|
+
* ```typescript
|
|
33
|
+
* const table = new Table({
|
|
34
|
+
* name: column.text,
|
|
35
|
+
* age: column.integer
|
|
36
|
+
* })
|
|
37
|
+
* type TableType = ExtractedTable<typeof table>
|
|
38
|
+
* // Results in: { id: string, name: string | null, age: number | null }
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
41
|
+
export type ExtractedTable<TTable extends Table> = ExtractedTableColumns<TTable> & {
|
|
42
|
+
id: string;
|
|
43
|
+
};
|
|
44
|
+
export type OptionalExtractedTable<TTable extends Table> = OptionalIfUndefined<{
|
|
45
|
+
[K in keyof TTable[`columnMap`]]: WithUndefinedIfNull<ExtractColumnValueType<TTable[`columnMap`][K]>>;
|
|
46
|
+
}> & {
|
|
47
|
+
id: string;
|
|
48
|
+
};
|
|
49
|
+
/**
|
|
50
|
+
* Maps the schema of TTable to a type which
|
|
51
|
+
* requires the keys be equal, but the values can have any value type.
|
|
52
|
+
*/
|
|
53
|
+
export type AnyTableColumnType<TTable extends Table> = {
|
|
54
|
+
[K in keyof TTable[`columnMap`]]: any;
|
|
55
|
+
} & {
|
|
56
|
+
id: string;
|
|
57
|
+
};
|
|
58
|
+
export declare function asPowerSyncRecord(record: any): PowerSyncRecord;
|
|
59
|
+
export type MapBaseColumnType<TOutput> = {
|
|
60
|
+
[Key in keyof TOutput]: BaseColumnType<any>;
|
|
61
|
+
};
|
|
62
|
+
/**
|
|
63
|
+
* Maps {@link DiffTriggerOperation} to TanstackDB operations
|
|
64
|
+
*/
|
|
65
|
+
export declare function mapOperation(operation: DiffTriggerOperation): "insert" | "update" | "delete";
|
|
66
|
+
/**
|
|
67
|
+
* Maps TanstackDB operations to {@link DiffTriggerOperation}
|
|
68
|
+
*/
|
|
69
|
+
export declare function mapOperationToPowerSync(operation: string): DiffTriggerOperation;
|
|
70
|
+
export {};
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const definitions = require("./definitions.cjs");
|
|
4
|
+
const powersync = require("./powersync.cjs");
|
|
5
|
+
const PowerSyncTransactor = require("./PowerSyncTransactor.cjs");
|
|
6
|
+
exports.DEFAULT_BATCH_SIZE = definitions.DEFAULT_BATCH_SIZE;
|
|
7
|
+
exports.powerSyncCollectionOptions = powersync.powerSyncCollectionOptions;
|
|
8
|
+
exports.PowerSyncTransactor = PowerSyncTransactor.PowerSyncTransactor;
|
|
9
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const common = require("@powersync/common");
|
|
4
|
+
const PendingOperationStore = require("./PendingOperationStore.cjs");
|
|
5
|
+
const PowerSyncTransactor = require("./PowerSyncTransactor.cjs");
|
|
6
|
+
const definitions = require("./definitions.cjs");
|
|
7
|
+
const helpers = require("./helpers.cjs");
|
|
8
|
+
const schema = require("./schema.cjs");
|
|
9
|
+
const serialization = require("./serialization.cjs");
|
|
10
|
+
function powerSyncCollectionOptions(config) {
|
|
11
|
+
const {
|
|
12
|
+
database,
|
|
13
|
+
table,
|
|
14
|
+
schema: inputSchema,
|
|
15
|
+
syncBatchSize = definitions.DEFAULT_BATCH_SIZE,
|
|
16
|
+
...restConfig
|
|
17
|
+
} = config;
|
|
18
|
+
const deserializationSchema = `deserializationSchema` in config ? config.deserializationSchema : null;
|
|
19
|
+
const serializer = `serializer` in config ? config.serializer : void 0;
|
|
20
|
+
const onDeserializationError = `onDeserializationError` in config ? config.onDeserializationError : void 0;
|
|
21
|
+
const { viewName } = table;
|
|
22
|
+
const deserializeSyncRow = (value) => {
|
|
23
|
+
const validationSchema = deserializationSchema || schema$1;
|
|
24
|
+
const validation = validationSchema[`~standard`].validate(value);
|
|
25
|
+
if (`value` in validation) {
|
|
26
|
+
return validation.value;
|
|
27
|
+
} else if (`issues` in validation) {
|
|
28
|
+
const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`;
|
|
29
|
+
database.logger.error(issueMessage);
|
|
30
|
+
onDeserializationError(validation);
|
|
31
|
+
throw new Error(issueMessage);
|
|
32
|
+
} else {
|
|
33
|
+
const unknownErrorMessage = `Unknown deserialization error for ${viewName}`;
|
|
34
|
+
database.logger.error(unknownErrorMessage);
|
|
35
|
+
onDeserializationError({ issues: [{ message: unknownErrorMessage }] });
|
|
36
|
+
throw new Error(unknownErrorMessage);
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
const schema$1 = inputSchema ?? schema.convertTableToSchema(table);
|
|
40
|
+
const pendingOperationStore = PendingOperationStore.PendingOperationStore.GLOBAL;
|
|
41
|
+
const trackedTableName = `__${viewName}_tracking_${Math.floor(
|
|
42
|
+
Math.random() * 4294967295
|
|
43
|
+
).toString(16).padStart(8, `0`)}`;
|
|
44
|
+
const transactor = new PowerSyncTransactor.PowerSyncTransactor({
|
|
45
|
+
database
|
|
46
|
+
});
|
|
47
|
+
const sync = {
|
|
48
|
+
sync: (params) => {
|
|
49
|
+
const { begin, write, commit, markReady } = params;
|
|
50
|
+
const abortController = new AbortController();
|
|
51
|
+
async function start() {
|
|
52
|
+
database.logger.info(
|
|
53
|
+
`Sync is starting for ${viewName} into ${trackedTableName}`
|
|
54
|
+
);
|
|
55
|
+
database.onChangeWithCallback(
|
|
56
|
+
{
|
|
57
|
+
onChange: async () => {
|
|
58
|
+
await database.writeTransaction(async (context) => {
|
|
59
|
+
begin();
|
|
60
|
+
const operations = await context.getAll(
|
|
61
|
+
`SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`
|
|
62
|
+
);
|
|
63
|
+
const pendingOperations = [];
|
|
64
|
+
for (const op of operations) {
|
|
65
|
+
const { id, operation, timestamp, value } = op;
|
|
66
|
+
const parsedValue = deserializeSyncRow({
|
|
67
|
+
id,
|
|
68
|
+
...JSON.parse(value)
|
|
69
|
+
});
|
|
70
|
+
const parsedPreviousValue = op.operation == common.DiffTriggerOperation.UPDATE ? deserializeSyncRow({
|
|
71
|
+
id,
|
|
72
|
+
...JSON.parse(op.previous_value)
|
|
73
|
+
}) : void 0;
|
|
74
|
+
write({
|
|
75
|
+
type: helpers.mapOperation(operation),
|
|
76
|
+
value: parsedValue,
|
|
77
|
+
previousValue: parsedPreviousValue
|
|
78
|
+
});
|
|
79
|
+
pendingOperations.push({
|
|
80
|
+
id,
|
|
81
|
+
operation,
|
|
82
|
+
timestamp,
|
|
83
|
+
tableName: viewName
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
await context.execute(`DELETE FROM ${trackedTableName}`);
|
|
87
|
+
commit();
|
|
88
|
+
pendingOperationStore.resolvePendingFor(pendingOperations);
|
|
89
|
+
}).catch((error) => {
|
|
90
|
+
database.logger.error(
|
|
91
|
+
`An error has been detected in the sync handler`,
|
|
92
|
+
error
|
|
93
|
+
);
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
signal: abortController.signal,
|
|
99
|
+
triggerImmediate: false,
|
|
100
|
+
tables: [trackedTableName]
|
|
101
|
+
}
|
|
102
|
+
);
|
|
103
|
+
const disposeTracking = await database.triggers.createDiffTrigger({
|
|
104
|
+
source: viewName,
|
|
105
|
+
destination: trackedTableName,
|
|
106
|
+
when: {
|
|
107
|
+
[common.DiffTriggerOperation.INSERT]: `TRUE`,
|
|
108
|
+
[common.DiffTriggerOperation.UPDATE]: `TRUE`,
|
|
109
|
+
[common.DiffTriggerOperation.DELETE]: `TRUE`
|
|
110
|
+
},
|
|
111
|
+
hooks: {
|
|
112
|
+
beforeCreate: async (context) => {
|
|
113
|
+
let currentBatchCount = syncBatchSize;
|
|
114
|
+
let cursor = 0;
|
|
115
|
+
while (currentBatchCount == syncBatchSize) {
|
|
116
|
+
begin();
|
|
117
|
+
const batchItems = await context.getAll(
|
|
118
|
+
common.sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,
|
|
119
|
+
[syncBatchSize, cursor]
|
|
120
|
+
);
|
|
121
|
+
currentBatchCount = batchItems.length;
|
|
122
|
+
cursor += currentBatchCount;
|
|
123
|
+
for (const row of batchItems) {
|
|
124
|
+
write({
|
|
125
|
+
type: `insert`,
|
|
126
|
+
value: deserializeSyncRow(row)
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
commit();
|
|
130
|
+
}
|
|
131
|
+
markReady();
|
|
132
|
+
database.logger.info(
|
|
133
|
+
`Sync is ready for ${viewName} into ${trackedTableName}`
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
if (abortController.signal.aborted) {
|
|
139
|
+
await disposeTracking();
|
|
140
|
+
} else {
|
|
141
|
+
abortController.signal.addEventListener(
|
|
142
|
+
`abort`,
|
|
143
|
+
() => {
|
|
144
|
+
disposeTracking();
|
|
145
|
+
},
|
|
146
|
+
{ once: true }
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
start().catch(
|
|
151
|
+
(error) => database.logger.error(
|
|
152
|
+
`Could not start syncing process for ${viewName} into ${trackedTableName}`,
|
|
153
|
+
error
|
|
154
|
+
)
|
|
155
|
+
);
|
|
156
|
+
return () => {
|
|
157
|
+
database.logger.info(
|
|
158
|
+
`Sync has been stopped for ${viewName} into ${trackedTableName}`
|
|
159
|
+
);
|
|
160
|
+
abortController.abort();
|
|
161
|
+
};
|
|
162
|
+
},
|
|
163
|
+
// Expose the getSyncMetadata function
|
|
164
|
+
getSyncMetadata: void 0
|
|
165
|
+
};
|
|
166
|
+
const getKey = (record) => helpers.asPowerSyncRecord(record).id;
|
|
167
|
+
const outputConfig = {
|
|
168
|
+
...restConfig,
|
|
169
|
+
schema: schema$1,
|
|
170
|
+
getKey,
|
|
171
|
+
// Syncing should start immediately since we need to monitor the changes for mutations
|
|
172
|
+
startSync: true,
|
|
173
|
+
sync,
|
|
174
|
+
onInsert: async (params) => {
|
|
175
|
+
return await transactor.applyTransaction(params.transaction);
|
|
176
|
+
},
|
|
177
|
+
onUpdate: async (params) => {
|
|
178
|
+
return await transactor.applyTransaction(params.transaction);
|
|
179
|
+
},
|
|
180
|
+
onDelete: async (params) => {
|
|
181
|
+
return await transactor.applyTransaction(params.transaction);
|
|
182
|
+
},
|
|
183
|
+
utils: {
|
|
184
|
+
getMeta: () => ({
|
|
185
|
+
tableName: viewName,
|
|
186
|
+
trackedTableName,
|
|
187
|
+
serializeValue: (value) => serialization.serializeForSQLite(
|
|
188
|
+
value,
|
|
189
|
+
// This is required by the input generic
|
|
190
|
+
table,
|
|
191
|
+
// Coerce serializer to the shape that corresponds to the Table constructed from OutputType
|
|
192
|
+
serializer
|
|
193
|
+
)
|
|
194
|
+
})
|
|
195
|
+
}
|
|
196
|
+
};
|
|
197
|
+
return outputConfig;
|
|
198
|
+
}
|
|
199
|
+
exports.powerSyncCollectionOptions = powerSyncCollectionOptions;
|
|
200
|
+
//# sourceMappingURL=powersync.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"powersync.cjs","sources":["../../src/powersync.ts"],"sourcesContent":["import { DiffTriggerOperation, sanitizeSQL } from \"@powersync/common\"\nimport { PendingOperationStore } from \"./PendingOperationStore\"\nimport { PowerSyncTransactor } from \"./PowerSyncTransactor\"\nimport { DEFAULT_BATCH_SIZE } from \"./definitions\"\nimport { asPowerSyncRecord, mapOperation } from \"./helpers\"\nimport { convertTableToSchema } from \"./schema\"\nimport { serializeForSQLite } from \"./serialization\"\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n OptionalExtractedTable,\n} from \"./helpers\"\nimport type {\n BasePowerSyncCollectionConfig,\n ConfigWithArbitraryCollectionTypes,\n ConfigWithSQLiteInputType,\n ConfigWithSQLiteTypes,\n CustomSQLiteSerializer,\n EnhancedPowerSyncCollectionConfig,\n InferPowerSyncOutputType,\n PowerSyncCollectionConfig,\n PowerSyncCollectionUtils,\n} from \"./definitions\"\nimport type { PendingOperation } from \"./PendingOperationStore\"\nimport type { SyncConfig } from \"@tanstack/db\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type { Table, TriggerDiffRecord } from \"@powersync/common\"\n\n/**\n * Creates PowerSync collection options for use with a standard Collection.\n *\n * @template TTable - The SQLite-based typing\n * @template TSchema - The validation schema type (optionally supports a custom input type)\n * @param config - Configuration options for the PowerSync collection\n * @returns Collection options with utilities\n */\n\n// Overload 1: No schema is provided\n\n/**\n * Creates a PowerSync collection configuration with basic default validation.\n * Input and Output types are the SQLite column types.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * type Document = (typeof APP_SCHEMA)[\"types\"][\"documents\"]\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<TTable extends Table = Table>(\n config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n OptionalExtractedTable<TTable>,\n never\n>\n\n// Overload 2: Schema is provided and the TInput matches SQLite types.\n\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types satisfy the SQLite column types.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Dates are stored as ISO date strings in SQLite\n * created_at: column.text\n * }),\n * })\n *\n * // Advanced Zod validations. The output type of this schema\n * // is constrained to the SQLite schema of APP_SCHEMA\n * const schema = z.object({\n * id: z.string(),\n * // Notice that `name` is not nullable (is required) here and it has additional validation\n * name: z.string().min(3, { message: \"Should be at least 3 characters\" }).nullable(),\n * // The input type is still the SQLite string type. While collections will output smart Date instances.\n * created_at: z.string().transform(val => new Date(val))\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * serializer: {\n * // The default is toISOString, this is just to demonstrate custom overrides\n * created_at: (outputValue) => outputValue.toISOString(),\n * },\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types. We can use the supplied schema to validate sync input\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithSQLiteInputType<TTable, TSchema>\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n schema: TSchema\n}\n\n// Overload 3: Schema is provided with arbitrary TInput and TOutput\n/**\n * Creates a PowerSync collection configuration with schema validation.\n *\n * The input types are not linked to the internal SQLite table types. This can\n * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.\n * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.\n *\n * The output types are defined by the provided schema. This schema can enforce additional\n * validation or type transforms.\n * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard\n * serialization implementation to serialize column values. Custom or advanced types require providing additional\n * serializer specifications. Partial column overrides can be supplied to `serializer`.\n *\n * @example\n * ```typescript\n * import { z } from \"zod\"\n *\n * // The PowerSync SQLite schema\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * // Booleans are represented as integers in SQLite\n * is_active: column.integer\n * }),\n * })\n *\n * // Advanced Zod validations.\n * // We accept boolean values as input for operations and expose Booleans in query results\n * const schema = z.object({\n * id: z.string(),\n * isActive: z.boolean(), // TInput and TOutput are boolean\n * })\n *\n * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.\n * const deserializationSchema = z.object({\n * id: z.string(),\n * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents,\n * schema,\n * deserializationSchema,\n * })\n * )\n * ```\n */\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n>(\n config: BasePowerSyncCollectionConfig<TTable, TSchema> &\n ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n): EnhancedPowerSyncCollectionConfig<\n TTable,\n InferPowerSyncOutputType<TTable, TSchema>,\n TSchema\n> & {\n utils: PowerSyncCollectionUtils<TTable>\n schema: TSchema\n}\n\n/**\n * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.\n */\n\nexport function powerSyncCollectionOptions<\n TTable extends Table,\n TSchema extends StandardSchemaV1<any> = never,\n>(config: PowerSyncCollectionConfig<TTable, TSchema>) {\n const {\n database,\n table,\n schema: inputSchema,\n syncBatchSize = DEFAULT_BATCH_SIZE,\n ...restConfig\n } = config\n\n const deserializationSchema =\n `deserializationSchema` in config ? config.deserializationSchema : null\n const serializer = `serializer` in config ? config.serializer : undefined\n const onDeserializationError =\n `onDeserializationError` in config\n ? config.onDeserializationError\n : undefined\n\n // The SQLite table type\n type TableType = ExtractedTable<TTable>\n\n // The collection output type\n type OutputType = InferPowerSyncOutputType<TTable, TSchema>\n\n const { viewName } = table\n\n /**\n * Deserializes data from the incoming sync stream\n */\n const deserializeSyncRow = (value: TableType): OutputType => {\n const validationSchema = deserializationSchema || schema\n const validation = validationSchema[`~standard`].validate(value)\n if (`value` in validation) {\n return validation.value\n } else if (`issues` in validation) {\n const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`\n database.logger.error(issueMessage)\n onDeserializationError!(validation)\n throw new Error(issueMessage)\n } else {\n const unknownErrorMessage = `Unknown deserialization error for ${viewName}`\n database.logger.error(unknownErrorMessage)\n onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })\n throw new Error(unknownErrorMessage)\n }\n }\n\n // We can do basic runtime validations for columns if not explicit schema has been provided\n const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)\n /**\n * The onInsert, onUpdate, and onDelete handlers should only return\n * after we have written the changes to TanStack DB.\n * We currently only write to TanStack DB from a diff trigger.\n * We wait for the diff trigger to observe the change,\n * and only then return from the on[X] handlers.\n * This ensures that when the transaction is reported as\n * complete to the caller, the in-memory state is already\n * consistent with the database.\n */\n const pendingOperationStore = PendingOperationStore.GLOBAL\n // Keep the tracked table unique in case of multiple tabs.\n const trackedTableName = `__${viewName}_tracking_${Math.floor(\n Math.random() * 0xffffffff\n )\n .toString(16)\n .padStart(8, `0`)}`\n\n const transactor = new PowerSyncTransactor({\n database,\n })\n\n /**\n * \"sync\"\n * Notice that this describes the Sync between the local SQLite table\n * and the in-memory tanstack-db collection.\n */\n const sync: SyncConfig<OutputType, string> = {\n sync: (params) => {\n const { begin, write, commit, markReady } = params\n const abortController = new AbortController()\n\n // The sync function needs to be synchronous\n async function start() {\n database.logger.info(\n `Sync is starting for ${viewName} into ${trackedTableName}`\n )\n database.onChangeWithCallback(\n {\n onChange: async () => {\n await database\n .writeTransaction(async (context) => {\n begin()\n const operations = await context.getAll<TriggerDiffRecord>(\n `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`\n )\n const pendingOperations: Array<PendingOperation> = []\n\n for (const op of operations) {\n const { id, operation, timestamp, value } = op\n const parsedValue = deserializeSyncRow({\n id,\n ...JSON.parse(value),\n })\n const parsedPreviousValue =\n op.operation == DiffTriggerOperation.UPDATE\n ? deserializeSyncRow({\n id,\n ...JSON.parse(op.previous_value),\n })\n : undefined\n write({\n type: mapOperation(operation),\n value: parsedValue,\n previousValue: parsedPreviousValue,\n })\n pendingOperations.push({\n id,\n operation,\n timestamp,\n tableName: viewName,\n })\n }\n\n // clear the current operations\n await context.execute(`DELETE FROM ${trackedTableName}`)\n\n commit()\n pendingOperationStore.resolvePendingFor(pendingOperations)\n })\n .catch((error) => {\n database.logger.error(\n `An error has been detected in the sync handler`,\n error\n )\n })\n },\n },\n {\n signal: abortController.signal,\n triggerImmediate: false,\n tables: [trackedTableName],\n }\n )\n\n const disposeTracking = await database.triggers.createDiffTrigger({\n source: viewName,\n destination: trackedTableName,\n when: {\n [DiffTriggerOperation.INSERT]: `TRUE`,\n [DiffTriggerOperation.UPDATE]: `TRUE`,\n [DiffTriggerOperation.DELETE]: `TRUE`,\n },\n hooks: {\n beforeCreate: async (context) => {\n let currentBatchCount = syncBatchSize\n let cursor = 0\n while (currentBatchCount == syncBatchSize) {\n begin()\n const batchItems = await context.getAll<TableType>(\n sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,\n [syncBatchSize, cursor]\n )\n currentBatchCount = batchItems.length\n cursor += currentBatchCount\n for (const row of batchItems) {\n write({\n type: `insert`,\n value: deserializeSyncRow(row),\n })\n }\n commit()\n }\n markReady()\n database.logger.info(\n `Sync is ready for ${viewName} into ${trackedTableName}`\n )\n },\n },\n })\n\n // If the abort controller was aborted while processing the request above\n if (abortController.signal.aborted) {\n await disposeTracking()\n } else {\n abortController.signal.addEventListener(\n `abort`,\n () => {\n disposeTracking()\n },\n { once: true }\n )\n }\n }\n\n start().catch((error) =>\n database.logger.error(\n `Could not start syncing process for ${viewName} into ${trackedTableName}`,\n error\n )\n )\n\n return () => {\n database.logger.info(\n `Sync has been stopped for ${viewName} into ${trackedTableName}`\n )\n abortController.abort()\n }\n },\n // Expose the getSyncMetadata function\n getSyncMetadata: undefined,\n }\n\n const getKey = (record: OutputType) => asPowerSyncRecord(record).id\n\n const outputConfig: EnhancedPowerSyncCollectionConfig<\n TTable,\n OutputType,\n TSchema\n > = {\n ...restConfig,\n schema,\n getKey,\n // Syncing should start immediately since we need to monitor the changes for mutations\n startSync: true,\n sync,\n onInsert: async (params) => {\n // The transaction here should only ever contain a single insert mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onUpdate: async (params) => {\n // The transaction here should only ever contain a single update mutation\n return await transactor.applyTransaction(params.transaction)\n },\n onDelete: async (params) => {\n // The transaction here should only ever contain a single delete mutation\n return await transactor.applyTransaction(params.transaction)\n },\n utils: {\n getMeta: () => ({\n tableName: viewName,\n trackedTableName,\n serializeValue: (value) =>\n serializeForSQLite(\n value,\n // This is required by the input generic\n table as Table<\n MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>\n >,\n // Coerce serializer to the shape that corresponds to the Table constructed from OutputType\n serializer as CustomSQLiteSerializer<\n OutputType,\n ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>\n >\n ),\n }),\n },\n }\n return outputConfig\n}\n"],"names":["DEFAULT_BATCH_SIZE","schema","convertTableToSchema","PendingOperationStore","PowerSyncTransactor","DiffTriggerOperation","mapOperation","sanitizeSQL","asPowerSyncRecord","serializeForSQLite"],"mappings":";;;;;;;;;AA0NO,SAAS,2BAGd,QAAoD;AACpD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,IACR,gBAAgBA,YAAAA;AAAAA,IAChB,GAAG;AAAA,EAAA,IACD;AAEJ,QAAM,wBACJ,2BAA2B,SAAS,OAAO,wBAAwB;AACrE,QAAM,aAAa,gBAAgB,SAAS,OAAO,aAAa;AAChE,QAAM,yBACJ,4BAA4B,SACxB,OAAO,yBACP;AAQN,QAAM,EAAE,aAAa;AAKrB,QAAM,qBAAqB,CAAC,UAAiC;AAC3D,UAAM,mBAAmB,yBAAyBC;AAClD,UAAM,aAAa,iBAAiB,WAAW,EAAE,SAAS,KAAK;AAC/D,QAAI,WAAW,YAAY;AACzB,aAAO,WAAW;AAAA,IACpB,WAAW,YAAY,YAAY;AACjC,YAAM,eAAe,wCAAwC,QAAQ,aAAa,WAAW,OAAO,IAAI,CAAC,UAAU,GAAG,MAAM,IAAI,MAAM,MAAM,OAAO,EAAE,CAAC;AACtJ,eAAS,OAAO,MAAM,YAAY;AAClC,6BAAwB,UAAU;AAClC,YAAM,IAAI,MAAM,YAAY;AAAA,IAC9B,OAAO;AACL,YAAM,sBAAsB,qCAAqC,QAAQ;AACzE,eAAS,OAAO,MAAM,mBAAmB;AACzC,6BAAwB,EAAE,QAAQ,CAAC,EAAE,SAAS,oBAAA,CAAqB,GAAG;AACtE,YAAM,IAAI,MAAM,mBAAmB;AAAA,IACrC;AAAA,EACF;AAGA,QAAMA,WAAS,eAAgBC,OAAAA,qBAAqB,KAAK;AAWzD,QAAM,wBAAwBC,sBAAAA,sBAAsB;AAEpD,QAAM,mBAAmB,KAAK,QAAQ,aAAa,KAAK;AAAA,IACtD,KAAK,WAAW;AAAA,EAAA,EAEf,SAAS,EAAE,EACX,SAAS,GAAG,GAAG,CAAC;AAEnB,QAAM,aAAa,IAAIC,wCAAoB;AAAA,IACzC;AAAA,EAAA,CACD;AAOD,QAAM,OAAuC;AAAA,IAC3C,MAAM,CAAC,WAAW;AAChB,YAAM,EAAE,OAAO,OAAO,QAAQ,cAAc;AAC5C,YAAM,kBAAkB,IAAI,gBAAA;AAG5B,qBAAe,QAAQ;AACrB,iBAAS,OAAO;AAAA,UACd,wBAAwB,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAE3D,iBAAS;AAAA,UACP;AAAA,YACE,UAAU,YAAY;AACpB,oBAAM,SACH,iBAAiB,OAAO,YAAY;AACnC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/B,iBAAiB,gBAAgB;AAAA,gBAAA;AAEnC,sBAAM,oBAA6C,CAAA;AAEnD,2BAAW,MAAM,YAAY;AAC3B,wBAAM,EAAE,IAAI,WAAW,WAAW,UAAU;AAC5C,wBAAM,cAAc,mBAAmB;AAAA,oBACrC;AAAA,oBACA,GAAG,KAAK,MAAM,KAAK;AAAA,kBAAA,CACpB;AACD,wBAAM,sBACJ,GAAG,aAAaC,OAAAA,qBAAqB,SACjC,mBAAmB;AAAA,oBACjB;AAAA,oBACA,GAAG,KAAK,MAAM,GAAG,cAAc;AAAA,kBAAA,CAChC,IACD;AACN,wBAAM;AAAA,oBACJ,MAAMC,QAAAA,aAAa,SAAS;AAAA,oBAC5B,OAAO;AAAA,oBACP,eAAe;AAAA,kBAAA,CAChB;AACD,oCAAkB,KAAK;AAAA,oBACrB;AAAA,oBACA;AAAA,oBACA;AAAA,oBACA,WAAW;AAAA,kBAAA,CACZ;AAAA,gBACH;AAGA,sBAAM,QAAQ,QAAQ,eAAe,gBAAgB,EAAE;AAEvD,uBAAA;AACA,sCAAsB,kBAAkB,iBAAiB;AAAA,cAC3D,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,yBAAS,OAAO;AAAA,kBACd;AAAA,kBACA;AAAA,gBAAA;AAAA,cAEJ,CAAC;AAAA,YACL;AAAA,UAAA;AAAA,UAEF;AAAA,YACE,QAAQ,gBAAgB;AAAA,YACxB,kBAAkB;AAAA,YAClB,QAAQ,CAAC,gBAAgB;AAAA,UAAA;AAAA,QAC3B;AAGF,cAAM,kBAAkB,MAAM,SAAS,SAAS,kBAAkB;AAAA,UAChE,QAAQ;AAAA,UACR,aAAa;AAAA,UACb,MAAM;AAAA,YACJ,CAACD,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,YAC/B,CAACA,OAAAA,qBAAqB,MAAM,GAAG;AAAA,UAAA;AAAA,UAEjC,OAAO;AAAA,YACL,cAAc,OAAO,YAAY;AAC/B,kBAAI,oBAAoB;AACxB,kBAAI,SAAS;AACb,qBAAO,qBAAqB,eAAe;AACzC,sBAAA;AACA,sBAAM,aAAa,MAAM,QAAQ;AAAA,kBAC/BE,mCAA4B,QAAQ;AAAA,kBACpC,CAAC,eAAe,MAAM;AAAA,gBAAA;AAExB,oCAAoB,WAAW;AAC/B,0BAAU;AACV,2BAAW,OAAO,YAAY;AAC5B,wBAAM;AAAA,oBACJ,MAAM;AAAA,oBACN,OAAO,mBAAmB,GAAG;AAAA,kBAAA,CAC9B;AAAA,gBACH;AACA,uBAAA;AAAA,cACF;AACA,wBAAA;AACA,uBAAS,OAAO;AAAA,gBACd,qBAAqB,QAAQ,SAAS,gBAAgB;AAAA,cAAA;AAAA,YAE1D;AAAA,UAAA;AAAA,QACF,CACD;AAGD,YAAI,gBAAgB,OAAO,SAAS;AAClC,gBAAM,gBAAA;AAAA,QACR,OAAO;AACL,0BAAgB,OAAO;AAAA,YACrB;AAAA,YACA,MAAM;AACJ,8BAAA;AAAA,YACF;AAAA,YACA,EAAE,MAAM,KAAA;AAAA,UAAK;AAAA,QAEjB;AAAA,MACF;AAEA,YAAA,EAAQ;AAAA,QAAM,CAAC,UACb,SAAS,OAAO;AAAA,UACd,uCAAuC,QAAQ,SAAS,gBAAgB;AAAA,UACxE;AAAA,QAAA;AAAA,MACF;AAGF,aAAO,MAAM;AACX,iBAAS,OAAO;AAAA,UACd,6BAA6B,QAAQ,SAAS,gBAAgB;AAAA,QAAA;AAEhE,wBAAgB,MAAA;AAAA,MAClB;AAAA,IACF;AAAA;AAAA,IAEA,iBAAiB;AAAA,EAAA;AAGnB,QAAM,SAAS,CAAC,WAAuBC,QAAAA,kBAAkB,MAAM,EAAE;AAEjE,QAAM,eAIF;AAAA,IACF,GAAG;AAAA,IAAA,QACHP;AAAAA,IACA;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,UAAU,OAAO,WAAW;AAE1B,aAAO,MAAM,WAAW,iBAAiB,OAAO,WAAW;AAAA,IAC7D;AAAA,IACA,OAAO;AAAA,MACL,SAAS,OAAO;AAAA,QACd,WAAW;AAAA,QACX;AAAA,QACA,gBAAgB,CAAC,UACfQ,cAAAA;AAAAA,UACE;AAAA;AAAA,UAEA;AAAA;AAAA,UAIA;AAAA,QAAA;AAAA,MAIF;AAAA,IACJ;AAAA,EACF;AAEF,SAAO;AACT;;"}
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import { AnyTableColumnType, OptionalExtractedTable } from './helpers.cjs';
|
|
2
|
+
import { BasePowerSyncCollectionConfig, ConfigWithArbitraryCollectionTypes, ConfigWithSQLiteInputType, ConfigWithSQLiteTypes, EnhancedPowerSyncCollectionConfig, InferPowerSyncOutputType, PowerSyncCollectionUtils } from './definitions.cjs';
|
|
3
|
+
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
4
|
+
import { Table } from '@powersync/common';
|
|
5
|
+
/**
|
|
6
|
+
* Creates PowerSync collection options for use with a standard Collection.
|
|
7
|
+
*
|
|
8
|
+
* @template TTable - The SQLite-based typing
|
|
9
|
+
* @template TSchema - The validation schema type (optionally supports a custom input type)
|
|
10
|
+
* @param config - Configuration options for the PowerSync collection
|
|
11
|
+
* @returns Collection options with utilities
|
|
12
|
+
*/
|
|
13
|
+
/**
|
|
14
|
+
* Creates a PowerSync collection configuration with basic default validation.
|
|
15
|
+
* Input and Output types are the SQLite column types.
|
|
16
|
+
*
|
|
17
|
+
* @example
|
|
18
|
+
* ```typescript
|
|
19
|
+
* const APP_SCHEMA = new Schema({
|
|
20
|
+
* documents: new Table({
|
|
21
|
+
* name: column.text,
|
|
22
|
+
* }),
|
|
23
|
+
* })
|
|
24
|
+
*
|
|
25
|
+
* type Document = (typeof APP_SCHEMA)["types"]["documents"]
|
|
26
|
+
*
|
|
27
|
+
* const db = new PowerSyncDatabase({
|
|
28
|
+
* database: {
|
|
29
|
+
* dbFilename: "test.sqlite",
|
|
30
|
+
* },
|
|
31
|
+
* schema: APP_SCHEMA,
|
|
32
|
+
* })
|
|
33
|
+
*
|
|
34
|
+
* const collection = createCollection(
|
|
35
|
+
* powerSyncCollectionOptions({
|
|
36
|
+
* database: db,
|
|
37
|
+
* table: APP_SCHEMA.props.documents
|
|
38
|
+
* })
|
|
39
|
+
* )
|
|
40
|
+
* ```
|
|
41
|
+
*/
|
|
42
|
+
export declare function powerSyncCollectionOptions<TTable extends Table = Table>(config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes): EnhancedPowerSyncCollectionConfig<TTable, OptionalExtractedTable<TTable>, never>;
|
|
43
|
+
/**
|
|
44
|
+
* Creates a PowerSync collection configuration with schema validation.
|
|
45
|
+
*
|
|
46
|
+
* The input types satisfy the SQLite column types.
|
|
47
|
+
*
|
|
48
|
+
* The output types are defined by the provided schema. This schema can enforce additional
|
|
49
|
+
* validation or type transforms.
|
|
50
|
+
* Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard
|
|
51
|
+
* serialization implementation to serialize column values. Custom or advanced types require providing additional
|
|
52
|
+
* serializer specifications. Partial column overrides can be supplied to `serializer`.
|
|
53
|
+
*
|
|
54
|
+
* @example
|
|
55
|
+
* ```typescript
|
|
56
|
+
* import { z } from "zod"
|
|
57
|
+
*
|
|
58
|
+
* // The PowerSync SQLite schema
|
|
59
|
+
* const APP_SCHEMA = new Schema({
|
|
60
|
+
* documents: new Table({
|
|
61
|
+
* name: column.text,
|
|
62
|
+
* // Dates are stored as ISO date strings in SQLite
|
|
63
|
+
* created_at: column.text
|
|
64
|
+
* }),
|
|
65
|
+
* })
|
|
66
|
+
*
|
|
67
|
+
* // Advanced Zod validations. The output type of this schema
|
|
68
|
+
* // is constrained to the SQLite schema of APP_SCHEMA
|
|
69
|
+
* const schema = z.object({
|
|
70
|
+
* id: z.string(),
|
|
71
|
+
* // Notice that `name` is not nullable (is required) here and it has additional validation
|
|
72
|
+
* name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(),
|
|
73
|
+
* // The input type is still the SQLite string type. While collections will output smart Date instances.
|
|
74
|
+
* created_at: z.string().transform(val => new Date(val))
|
|
75
|
+
* })
|
|
76
|
+
*
|
|
77
|
+
* const collection = createCollection(
|
|
78
|
+
* powerSyncCollectionOptions({
|
|
79
|
+
* database: db,
|
|
80
|
+
* table: APP_SCHEMA.props.documents,
|
|
81
|
+
* schema,
|
|
82
|
+
* serializer: {
|
|
83
|
+
* // The default is toISOString, this is just to demonstrate custom overrides
|
|
84
|
+
* created_at: (outputValue) => outputValue.toISOString(),
|
|
85
|
+
* },
|
|
86
|
+
* })
|
|
87
|
+
* )
|
|
88
|
+
* ```
|
|
89
|
+
*/
|
|
90
|
+
export declare function powerSyncCollectionOptions<TTable extends Table, TSchema extends StandardSchemaV1<OptionalExtractedTable<TTable>, AnyTableColumnType<TTable>>>(config: BasePowerSyncCollectionConfig<TTable, TSchema> & ConfigWithSQLiteInputType<TTable, TSchema>): EnhancedPowerSyncCollectionConfig<TTable, InferPowerSyncOutputType<TTable, TSchema>, TSchema> & {
|
|
91
|
+
schema: TSchema;
|
|
92
|
+
};
|
|
93
|
+
/**
|
|
94
|
+
* Creates a PowerSync collection configuration with schema validation.
|
|
95
|
+
*
|
|
96
|
+
* The input types are not linked to the internal SQLite table types. This can
|
|
97
|
+
* give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.
|
|
98
|
+
* An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.
|
|
99
|
+
*
|
|
100
|
+
* The output types are defined by the provided schema. This schema can enforce additional
|
|
101
|
+
* validation or type transforms.
|
|
102
|
+
* Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard
|
|
103
|
+
* serialization implementation to serialize column values. Custom or advanced types require providing additional
|
|
104
|
+
* serializer specifications. Partial column overrides can be supplied to `serializer`.
|
|
105
|
+
*
|
|
106
|
+
* @example
|
|
107
|
+
* ```typescript
|
|
108
|
+
* import { z } from "zod"
|
|
109
|
+
*
|
|
110
|
+
* // The PowerSync SQLite schema
|
|
111
|
+
* const APP_SCHEMA = new Schema({
|
|
112
|
+
* documents: new Table({
|
|
113
|
+
* name: column.text,
|
|
114
|
+
* // Booleans are represented as integers in SQLite
|
|
115
|
+
* is_active: column.integer
|
|
116
|
+
* }),
|
|
117
|
+
* })
|
|
118
|
+
*
|
|
119
|
+
* // Advanced Zod validations.
|
|
120
|
+
* // We accept boolean values as input for operations and expose Booleans in query results
|
|
121
|
+
* const schema = z.object({
|
|
122
|
+
* id: z.string(),
|
|
123
|
+
* isActive: z.boolean(), // TInput and TOutput are boolean
|
|
124
|
+
* })
|
|
125
|
+
*
|
|
126
|
+
* // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.
|
|
127
|
+
* const deserializationSchema = z.object({
|
|
128
|
+
* id: z.string(),
|
|
129
|
+
* isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),
|
|
130
|
+
* })
|
|
131
|
+
*
|
|
132
|
+
* const collection = createCollection(
|
|
133
|
+
* powerSyncCollectionOptions({
|
|
134
|
+
* database: db,
|
|
135
|
+
* table: APP_SCHEMA.props.documents,
|
|
136
|
+
* schema,
|
|
137
|
+
* deserializationSchema,
|
|
138
|
+
* })
|
|
139
|
+
* )
|
|
140
|
+
* ```
|
|
141
|
+
*/
|
|
142
|
+
export declare function powerSyncCollectionOptions<TTable extends Table, TSchema extends StandardSchemaV1<AnyTableColumnType<TTable>, AnyTableColumnType<TTable>>>(config: BasePowerSyncCollectionConfig<TTable, TSchema> & ConfigWithArbitraryCollectionTypes<TTable, TSchema>): EnhancedPowerSyncCollectionConfig<TTable, InferPowerSyncOutputType<TTable, TSchema>, TSchema> & {
|
|
143
|
+
utils: PowerSyncCollectionUtils<TTable>;
|
|
144
|
+
schema: TSchema;
|
|
145
|
+
};
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const common = require("@powersync/common");
|
|
4
|
+
function convertTableToSchema(table) {
|
|
5
|
+
const validate = (value) => {
|
|
6
|
+
if (typeof value != `object` || value == null) {
|
|
7
|
+
return {
|
|
8
|
+
issues: [
|
|
9
|
+
{
|
|
10
|
+
message: `Value must be an object`
|
|
11
|
+
}
|
|
12
|
+
]
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
const issues = [];
|
|
16
|
+
if (!(`id` in value) || typeof value.id != `string`) {
|
|
17
|
+
issues.push({
|
|
18
|
+
message: `id field must be a string`,
|
|
19
|
+
path: [`id`]
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
for (const column of table.columns) {
|
|
23
|
+
const val = value[column.name];
|
|
24
|
+
if (val == null) {
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
switch (column.type) {
|
|
28
|
+
case common.ColumnType.TEXT:
|
|
29
|
+
if (typeof val != `string`) {
|
|
30
|
+
issues.push({
|
|
31
|
+
message: `${column.name} must be a string or null`,
|
|
32
|
+
path: [column.name]
|
|
33
|
+
});
|
|
34
|
+
}
|
|
35
|
+
break;
|
|
36
|
+
case common.ColumnType.INTEGER:
|
|
37
|
+
case common.ColumnType.REAL:
|
|
38
|
+
if (typeof val != `number`) {
|
|
39
|
+
issues.push({
|
|
40
|
+
message: `${column.name} must be a number or null`,
|
|
41
|
+
path: [column.name]
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
if (issues.length > 0) {
|
|
48
|
+
return { issues };
|
|
49
|
+
}
|
|
50
|
+
return { value: { ...value } };
|
|
51
|
+
};
|
|
52
|
+
return {
|
|
53
|
+
"~standard": {
|
|
54
|
+
version: 1,
|
|
55
|
+
vendor: `powersync`,
|
|
56
|
+
validate,
|
|
57
|
+
types: {
|
|
58
|
+
input: {},
|
|
59
|
+
output: {}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
exports.convertTableToSchema = convertTableToSchema;
|
|
65
|
+
//# sourceMappingURL=schema.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.cjs","sources":["../../src/schema.ts"],"sourcesContent":["import { ColumnType } from \"@powersync/common\"\nimport type { Table } from \"@powersync/common\"\nimport type { StandardSchemaV1 } from \"@standard-schema/spec\"\nimport type { ExtractedTable } from \"./helpers\"\n\n/**\n * Converts a PowerSync Table instance to a StandardSchemaV1 schema.\n * Creates a schema that validates the structure and types of table records\n * according to the PowerSync table definition.\n *\n * @template TTable - The PowerSync schema-typed Table definition\n * @param table - The PowerSync Table instance to convert\n * @returns A StandardSchemaV1-compatible schema with proper type validation\n *\n * @example\n * ```typescript\n * const usersTable = new Table({\n * name: column.text,\n * age: column.integer\n * })\n * ```\n */\nexport function convertTableToSchema<TTable extends Table>(\n table: TTable\n): StandardSchemaV1<ExtractedTable<TTable>> {\n type TExtracted = ExtractedTable<TTable>\n // Create validate function that checks types according to column definitions\n const validate = (\n value: unknown\n ):\n | StandardSchemaV1.SuccessResult<TExtracted>\n | StandardSchemaV1.FailureResult => {\n if (typeof value != `object` || value == null) {\n return {\n issues: [\n {\n message: `Value must be an object`,\n },\n ],\n }\n }\n\n const issues: Array<StandardSchemaV1.Issue> = []\n\n // Check id field\n if (!(`id` in value) || typeof (value as any).id != `string`) {\n issues.push({\n message: `id field must be a string`,\n path: [`id`],\n })\n }\n\n // Check each column\n for (const column of table.columns) {\n const val = (value as TExtracted)[column.name as keyof TExtracted]\n\n if (val == null) {\n continue\n }\n\n switch (column.type) {\n case ColumnType.TEXT:\n if (typeof val != `string`) {\n issues.push({\n message: `${column.name} must be a string or null`,\n path: [column.name],\n })\n }\n break\n case ColumnType.INTEGER:\n case ColumnType.REAL:\n if (typeof val != `number`) {\n issues.push({\n message: `${column.name} must be a number or null`,\n path: [column.name],\n })\n }\n break\n }\n }\n\n if (issues.length > 0) {\n return { issues }\n }\n\n return { value: { ...value } as TExtracted }\n }\n\n return {\n \"~standard\": {\n version: 1,\n vendor: `powersync`,\n validate,\n types: {\n input: {} as TExtracted,\n output: {} as TExtracted,\n },\n },\n }\n}\n"],"names":["ColumnType"],"mappings":";;;AAsBO,SAAS,qBACd,OAC0C;AAG1C,QAAM,WAAW,CACf,UAGoC;AACpC,QAAI,OAAO,SAAS,YAAY,SAAS,MAAM;AAC7C,aAAO;AAAA,QACL,QAAQ;AAAA,UACN;AAAA,YACE,SAAS;AAAA,UAAA;AAAA,QACX;AAAA,MACF;AAAA,IAEJ;AAEA,UAAM,SAAwC,CAAA;AAG9C,QAAI,EAAE,QAAQ,UAAU,OAAQ,MAAc,MAAM,UAAU;AAC5D,aAAO,KAAK;AAAA,QACV,SAAS;AAAA,QACT,MAAM,CAAC,IAAI;AAAA,MAAA,CACZ;AAAA,IACH;AAGA,eAAW,UAAU,MAAM,SAAS;AAClC,YAAM,MAAO,MAAqB,OAAO,IAAwB;AAEjE,UAAI,OAAO,MAAM;AACf;AAAA,MACF;AAEA,cAAQ,OAAO,MAAA;AAAA,QACb,KAAKA,OAAAA,WAAW;AACd,cAAI,OAAO,OAAO,UAAU;AAC1B,mBAAO,KAAK;AAAA,cACV,SAAS,GAAG,OAAO,IAAI;AAAA,cACvB,MAAM,CAAC,OAAO,IAAI;AAAA,YAAA,CACnB;AAAA,UACH;AACA;AAAA,QACF,KAAKA,OAAAA,WAAW;AAAA,QAChB,KAAKA,OAAAA,WAAW;AACd,cAAI,OAAO,OAAO,UAAU;AAC1B,mBAAO,KAAK;AAAA,cACV,SAAS,GAAG,OAAO,IAAI;AAAA,cACvB,MAAM,CAAC,OAAO,IAAI;AAAA,YAAA,CACnB;AAAA,UACH;AACA;AAAA,MAAA;AAAA,IAEN;AAEA,QAAI,OAAO,SAAS,GAAG;AACrB,aAAO,EAAE,OAAA;AAAA,IACX;AAEA,WAAO,EAAE,OAAO,EAAE,GAAG,QAAM;AAAA,EAC7B;AAEA,SAAO;AAAA,IACL,aAAa;AAAA,MACX,SAAS;AAAA,MACT,QAAQ;AAAA,MACR;AAAA,MACA,OAAO;AAAA,QACL,OAAO,CAAA;AAAA,QACP,QAAQ,CAAA;AAAA,MAAC;AAAA,IACX;AAAA,EACF;AAEJ;;"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { Table } from '@powersync/common';
|
|
2
|
+
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
|
+
import { ExtractedTable } from './helpers.cjs';
|
|
4
|
+
/**
|
|
5
|
+
* Converts a PowerSync Table instance to a StandardSchemaV1 schema.
|
|
6
|
+
* Creates a schema that validates the structure and types of table records
|
|
7
|
+
* according to the PowerSync table definition.
|
|
8
|
+
*
|
|
9
|
+
* @template TTable - The PowerSync schema-typed Table definition
|
|
10
|
+
* @param table - The PowerSync Table instance to convert
|
|
11
|
+
* @returns A StandardSchemaV1-compatible schema with proper type validation
|
|
12
|
+
*
|
|
13
|
+
* @example
|
|
14
|
+
* ```typescript
|
|
15
|
+
* const usersTable = new Table({
|
|
16
|
+
* name: column.text,
|
|
17
|
+
* age: column.integer
|
|
18
|
+
* })
|
|
19
|
+
* ```
|
|
20
|
+
*/
|
|
21
|
+
export declare function convertTableToSchema<TTable extends Table>(table: TTable): StandardSchemaV1<ExtractedTable<TTable>>;
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
const common = require("@powersync/common");
|
|
4
|
+
function serializeForSQLite(value, tableSchema, customSerializer = {}) {
|
|
5
|
+
return Object.fromEntries(
|
|
6
|
+
Object.entries(value).map(([key, value2]) => {
|
|
7
|
+
const outputType = key == `id` ? common.ColumnType.TEXT : tableSchema.columns.find((column) => column.name == key)?.type;
|
|
8
|
+
if (!outputType) {
|
|
9
|
+
throw new Error(`Could not find schema for ${key} column.`);
|
|
10
|
+
}
|
|
11
|
+
if (value2 == null) {
|
|
12
|
+
return [key, value2];
|
|
13
|
+
}
|
|
14
|
+
const customTransform = customSerializer[key];
|
|
15
|
+
if (customTransform) {
|
|
16
|
+
return [key, customTransform(value2)];
|
|
17
|
+
}
|
|
18
|
+
switch (outputType) {
|
|
19
|
+
case common.ColumnType.TEXT:
|
|
20
|
+
if (typeof value2 == `string`) {
|
|
21
|
+
return [key, value2];
|
|
22
|
+
} else if (value2 instanceof Date) {
|
|
23
|
+
return [key, value2.toISOString()];
|
|
24
|
+
} else {
|
|
25
|
+
return [key, JSON.stringify(value2)];
|
|
26
|
+
}
|
|
27
|
+
case common.ColumnType.INTEGER:
|
|
28
|
+
case common.ColumnType.REAL:
|
|
29
|
+
if (typeof value2 == `number`) {
|
|
30
|
+
return [key, value2];
|
|
31
|
+
} else if (typeof value2 == `boolean`) {
|
|
32
|
+
return [key, value2 ? 1 : 0];
|
|
33
|
+
} else {
|
|
34
|
+
const numberValue = Number(value2);
|
|
35
|
+
if (isNaN(numberValue)) {
|
|
36
|
+
throw new Error(
|
|
37
|
+
`Could not convert ${key}=${value2} to a number for SQLite`
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
return [key, numberValue];
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
})
|
|
44
|
+
);
|
|
45
|
+
}
|
|
46
|
+
exports.serializeForSQLite = serializeForSQLite;
|
|
47
|
+
//# sourceMappingURL=serialization.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"serialization.cjs","sources":["../../src/serialization.ts"],"sourcesContent":["import { ColumnType } from \"@powersync/common\"\nimport type { Table } from \"@powersync/common\"\nimport type { CustomSQLiteSerializer } from \"./definitions\"\nimport type {\n ExtractedTable,\n ExtractedTableColumns,\n MapBaseColumnType,\n} from \"./helpers\"\n\n/**\n * Serializes an object for persistence to a SQLite table, mapping its values to appropriate SQLite types.\n *\n * This function takes an object representing a row, a table schema, and an optional custom serializer map.\n * It returns a new object with values transformed to be compatible with SQLite column types.\n *\n * ## Generics\n * - `TOutput`: The shape of the input object, typically matching the row data.\n * - `TTable`: The table schema, which must match the keys of `TOutput`.\n *\n * ## Parameters\n * - `value`: The object to serialize (row data).\n * - `tableSchema`: The schema describing the SQLite table columns and types.\n * - `customSerializer`: An optional map of custom serialization functions for specific keys.\n *\n * ## Behavior\n * - For each key in `value`, finds the corresponding column in `tableSchema`.\n * - If a custom serializer is provided for a key, it is used to transform the value.\n * - Otherwise, values are mapped according to the column type:\n * - `TEXT`: Strings are passed through; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are passed through; booleans are mapped to 1/0; other types are coerced to numbers.\n * - Throws if a column type is unknown or a value cannot be converted.\n *\n * ## Returns\n * - An object with the same keys as `value`, with values transformed for SQLite compatibility.\n *\n * ## Errors\n * - Throws if a key in `value` does not exist in the schema.\n * - Throws if a value cannot be converted to the required SQLite type.\n */\nexport function serializeForSQLite<\n TOutput extends Record<string, unknown>,\n // The keys should match\n TTable extends Table<MapBaseColumnType<TOutput>> = Table<\n MapBaseColumnType<TOutput>\n >,\n>(\n value: TOutput,\n tableSchema: TTable,\n customSerializer: Partial<\n CustomSQLiteSerializer<TOutput, ExtractedTableColumns<TTable>>\n > = {}\n): ExtractedTable<TTable> {\n return Object.fromEntries(\n Object.entries(value).map(([key, value]) => {\n // First get the output schema type\n const outputType =\n key == `id`\n ? ColumnType.TEXT\n : tableSchema.columns.find((column) => column.name == key)?.type\n if (!outputType) {\n throw new Error(`Could not find schema for ${key} column.`)\n }\n\n if (value == null) {\n return [key, value]\n }\n\n const customTransform = customSerializer[key]\n if (customTransform) {\n return [key, customTransform(value as TOutput[string])]\n }\n\n // Map to the output\n switch (outputType) {\n case ColumnType.TEXT:\n if (typeof value == `string`) {\n return [key, value]\n } else if (value instanceof Date) {\n return [key, value.toISOString()]\n } else {\n return [key, JSON.stringify(value)]\n }\n case ColumnType.INTEGER:\n case ColumnType.REAL:\n if (typeof value == `number`) {\n return [key, value]\n } else if (typeof value == `boolean`) {\n return [key, value ? 1 : 0]\n } else {\n const numberValue = Number(value)\n if (isNaN(numberValue)) {\n throw new Error(\n `Could not convert ${key}=${value} to a number for SQLite`\n )\n }\n return [key, numberValue]\n }\n }\n })\n )\n}\n"],"names":["value","ColumnType"],"mappings":";;;AAuCO,SAAS,mBAOd,OACA,aACA,mBAEI,CAAA,GACoB;AACxB,SAAO,OAAO;AAAA,IACZ,OAAO,QAAQ,KAAK,EAAE,IAAI,CAAC,CAAC,KAAKA,MAAK,MAAM;AAE1C,YAAM,aACJ,OAAO,OACHC,OAAAA,WAAW,OACX,YAAY,QAAQ,KAAK,CAAC,WAAW,OAAO,QAAQ,GAAG,GAAG;AAChE,UAAI,CAAC,YAAY;AACf,cAAM,IAAI,MAAM,6BAA6B,GAAG,UAAU;AAAA,MAC5D;AAEA,UAAID,UAAS,MAAM;AACjB,eAAO,CAAC,KAAKA,MAAK;AAAA,MACpB;AAEA,YAAM,kBAAkB,iBAAiB,GAAG;AAC5C,UAAI,iBAAiB;AACnB,eAAO,CAAC,KAAK,gBAAgBA,MAAwB,CAAC;AAAA,MACxD;AAGA,cAAQ,YAAA;AAAA,QACN,KAAKC,OAAAA,WAAW;AACd,cAAI,OAAOD,UAAS,UAAU;AAC5B,mBAAO,CAAC,KAAKA,MAAK;AAAA,UACpB,WAAWA,kBAAiB,MAAM;AAChC,mBAAO,CAAC,KAAKA,OAAM,aAAa;AAAA,UAClC,OAAO;AACL,mBAAO,CAAC,KAAK,KAAK,UAAUA,MAAK,CAAC;AAAA,UACpC;AAAA,QACF,KAAKC,OAAAA,WAAW;AAAA,QAChB,KAAKA,OAAAA,WAAW;AACd,cAAI,OAAOD,UAAS,UAAU;AAC5B,mBAAO,CAAC,KAAKA,MAAK;AAAA,UACpB,WAAW,OAAOA,UAAS,WAAW;AACpC,mBAAO,CAAC,KAAKA,SAAQ,IAAI,CAAC;AAAA,UAC5B,OAAO;AACL,kBAAM,cAAc,OAAOA,MAAK;AAChC,gBAAI,MAAM,WAAW,GAAG;AACtB,oBAAM,IAAI;AAAA,gBACR,qBAAqB,GAAG,IAAIA,MAAK;AAAA,cAAA;AAAA,YAErC;AACA,mBAAO,CAAC,KAAK,WAAW;AAAA,UAC1B;AAAA,MAAA;AAAA,IAEN,CAAC;AAAA,EAAA;AAEL;;"}
|