@tanstack/powersync-db-collection 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/helpers.ts ADDED
@@ -0,0 +1,112 @@
1
+ import { DiffTriggerOperation } from "@powersync/common"
2
+ import type {
3
+ BaseColumnType,
4
+ ExtractColumnValueType,
5
+ Table,
6
+ } from "@powersync/common"
7
+
8
+ /**
9
+ * All PowerSync table records include a UUID `id` column.
10
+ */
11
+ export type PowerSyncRecord = {
12
+ id: string
13
+ [key: string]: unknown
14
+ }
15
+
16
+ /**
17
+ * Utility type: If T includes null, also allow undefined (to support optional fields in insert/update operations).
18
+ * PowerSync records are typically typed as `string | null`, where insert
19
+ * and update operations may also allow not specifying a value at all (optional).
20
+ */
21
+ type WithUndefinedIfNull<T> = null extends T ? T | undefined : T
22
+ type OptionalIfUndefined<T> = {
23
+ [K in keyof T as undefined extends T[K] ? K : never]?: T[K]
24
+ } & {
25
+ [K in keyof T as undefined extends T[K] ? never : K]: T[K]
26
+ }
27
+
28
+ /**
29
+ * Provides the base column types for a table. This excludes the `id` column.
30
+ */
31
+ export type ExtractedTableColumns<TTable extends Table> = {
32
+ [K in keyof TTable[`columnMap`]]: ExtractColumnValueType<
33
+ TTable[`columnMap`][K]
34
+ >
35
+ }
36
+ /**
37
+ * Utility type that extracts the typed structure of a table based on its column definitions.
38
+ * Maps each column to its corresponding TypeScript type using ExtractColumnValueType.
39
+ *
40
+ * @template TTable - The PowerSync table definition
41
+ * @example
42
+ * ```typescript
43
+ * const table = new Table({
44
+ * name: column.text,
45
+ * age: column.integer
46
+ * })
47
+ * type TableType = ExtractedTable<typeof table>
48
+ * // Results in: { id: string, name: string | null, age: number | null }
49
+ * ```
50
+ */
51
+ export type ExtractedTable<TTable extends Table> =
52
+ ExtractedTableColumns<TTable> & {
53
+ id: string
54
+ }
55
+
56
+ export type OptionalExtractedTable<TTable extends Table> = OptionalIfUndefined<{
57
+ [K in keyof TTable[`columnMap`]]: WithUndefinedIfNull<
58
+ ExtractColumnValueType<TTable[`columnMap`][K]>
59
+ >
60
+ }> & {
61
+ id: string
62
+ }
63
+
64
+ /**
65
+ * Maps the schema of TTable to a type which
66
+ * requires the keys be equal, but the values can have any value type.
67
+ */
68
+ export type AnyTableColumnType<TTable extends Table> = {
69
+ [K in keyof TTable[`columnMap`]]: any
70
+ } & { id: string }
71
+
72
+ export function asPowerSyncRecord(record: any): PowerSyncRecord {
73
+ if (typeof record.id !== `string`) {
74
+ throw new Error(`Record must have a string id field`)
75
+ }
76
+ return record as PowerSyncRecord
77
+ }
78
+
79
+ // Helper type to ensure the keys of TOutput match the Table columns
80
+ export type MapBaseColumnType<TOutput> = {
81
+ [Key in keyof TOutput]: BaseColumnType<any>
82
+ }
83
+
84
+ /**
85
+ * Maps {@link DiffTriggerOperation} to TanstackDB operations
86
+ */
87
+ export function mapOperation(operation: DiffTriggerOperation) {
88
+ switch (operation) {
89
+ case DiffTriggerOperation.INSERT:
90
+ return `insert`
91
+ case DiffTriggerOperation.UPDATE:
92
+ return `update`
93
+ case DiffTriggerOperation.DELETE:
94
+ return `delete`
95
+ }
96
+ }
97
+
98
+ /**
99
+ * Maps TanstackDB operations to {@link DiffTriggerOperation}
100
+ */
101
+ export function mapOperationToPowerSync(operation: string) {
102
+ switch (operation) {
103
+ case `insert`:
104
+ return DiffTriggerOperation.INSERT
105
+ case `update`:
106
+ return DiffTriggerOperation.UPDATE
107
+ case `delete`:
108
+ return DiffTriggerOperation.DELETE
109
+ default:
110
+ throw new Error(`Unknown operation ${operation} received`)
111
+ }
112
+ }
package/src/index.ts ADDED
@@ -0,0 +1,3 @@
1
+ export * from "./definitions"
2
+ export * from "./powersync"
3
+ export * from "./PowerSyncTransactor"
@@ -0,0 +1,479 @@
1
+ import { DiffTriggerOperation, sanitizeSQL } from "@powersync/common"
2
+ import { PendingOperationStore } from "./PendingOperationStore"
3
+ import { PowerSyncTransactor } from "./PowerSyncTransactor"
4
+ import { DEFAULT_BATCH_SIZE } from "./definitions"
5
+ import { asPowerSyncRecord, mapOperation } from "./helpers"
6
+ import { convertTableToSchema } from "./schema"
7
+ import { serializeForSQLite } from "./serialization"
8
+ import type {
9
+ AnyTableColumnType,
10
+ ExtractedTable,
11
+ ExtractedTableColumns,
12
+ MapBaseColumnType,
13
+ OptionalExtractedTable,
14
+ } from "./helpers"
15
+ import type {
16
+ BasePowerSyncCollectionConfig,
17
+ ConfigWithArbitraryCollectionTypes,
18
+ ConfigWithSQLiteInputType,
19
+ ConfigWithSQLiteTypes,
20
+ CustomSQLiteSerializer,
21
+ EnhancedPowerSyncCollectionConfig,
22
+ InferPowerSyncOutputType,
23
+ PowerSyncCollectionConfig,
24
+ PowerSyncCollectionUtils,
25
+ } from "./definitions"
26
+ import type { PendingOperation } from "./PendingOperationStore"
27
+ import type { SyncConfig } from "@tanstack/db"
28
+ import type { StandardSchemaV1 } from "@standard-schema/spec"
29
+ import type { Table, TriggerDiffRecord } from "@powersync/common"
30
+
31
+ /**
32
+ * Creates PowerSync collection options for use with a standard Collection.
33
+ *
34
+ * @template TTable - The SQLite-based typing
35
+ * @template TSchema - The validation schema type (optionally supports a custom input type)
36
+ * @param config - Configuration options for the PowerSync collection
37
+ * @returns Collection options with utilities
38
+ */
39
+
40
+ // Overload 1: No schema is provided
41
+
42
+ /**
43
+ * Creates a PowerSync collection configuration with basic default validation.
44
+ * Input and Output types are the SQLite column types.
45
+ *
46
+ * @example
47
+ * ```typescript
48
+ * const APP_SCHEMA = new Schema({
49
+ * documents: new Table({
50
+ * name: column.text,
51
+ * }),
52
+ * })
53
+ *
54
+ * type Document = (typeof APP_SCHEMA)["types"]["documents"]
55
+ *
56
+ * const db = new PowerSyncDatabase({
57
+ * database: {
58
+ * dbFilename: "test.sqlite",
59
+ * },
60
+ * schema: APP_SCHEMA,
61
+ * })
62
+ *
63
+ * const collection = createCollection(
64
+ * powerSyncCollectionOptions({
65
+ * database: db,
66
+ * table: APP_SCHEMA.props.documents
67
+ * })
68
+ * )
69
+ * ```
70
+ */
71
+ export function powerSyncCollectionOptions<TTable extends Table = Table>(
72
+ config: BasePowerSyncCollectionConfig<TTable, never> & ConfigWithSQLiteTypes
73
+ ): EnhancedPowerSyncCollectionConfig<
74
+ TTable,
75
+ OptionalExtractedTable<TTable>,
76
+ never
77
+ >
78
+
79
+ // Overload 2: Schema is provided and the TInput matches SQLite types.
80
+
81
+ /**
82
+ * Creates a PowerSync collection configuration with schema validation.
83
+ *
84
+ * The input types satisfy the SQLite column types.
85
+ *
86
+ * The output types are defined by the provided schema. This schema can enforce additional
87
+ * validation or type transforms.
88
+ * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard
89
+ * serialization implementation to serialize column values. Custom or advanced types require providing additional
90
+ * serializer specifications. Partial column overrides can be supplied to `serializer`.
91
+ *
92
+ * @example
93
+ * ```typescript
94
+ * import { z } from "zod"
95
+ *
96
+ * // The PowerSync SQLite schema
97
+ * const APP_SCHEMA = new Schema({
98
+ * documents: new Table({
99
+ * name: column.text,
100
+ * // Dates are stored as ISO date strings in SQLite
101
+ * created_at: column.text
102
+ * }),
103
+ * })
104
+ *
105
+ * // Advanced Zod validations. The output type of this schema
106
+ * // is constrained to the SQLite schema of APP_SCHEMA
107
+ * const schema = z.object({
108
+ * id: z.string(),
109
+ * // Notice that `name` is not nullable (is required) here and it has additional validation
110
+ * name: z.string().min(3, { message: "Should be at least 3 characters" }).nullable(),
111
+ * // The input type is still the SQLite string type. While collections will output smart Date instances.
112
+ * created_at: z.string().transform(val => new Date(val))
113
+ * })
114
+ *
115
+ * const collection = createCollection(
116
+ * powerSyncCollectionOptions({
117
+ * database: db,
118
+ * table: APP_SCHEMA.props.documents,
119
+ * schema,
120
+ * serializer: {
121
+ * // The default is toISOString, this is just to demonstrate custom overrides
122
+ * created_at: (outputValue) => outputValue.toISOString(),
123
+ * },
124
+ * })
125
+ * )
126
+ * ```
127
+ */
128
+ export function powerSyncCollectionOptions<
129
+ TTable extends Table,
130
+ TSchema extends StandardSchemaV1<
131
+ // TInput is the SQLite types. We can use the supplied schema to validate sync input
132
+ OptionalExtractedTable<TTable>,
133
+ AnyTableColumnType<TTable>
134
+ >,
135
+ >(
136
+ config: BasePowerSyncCollectionConfig<TTable, TSchema> &
137
+ ConfigWithSQLiteInputType<TTable, TSchema>
138
+ ): EnhancedPowerSyncCollectionConfig<
139
+ TTable,
140
+ InferPowerSyncOutputType<TTable, TSchema>,
141
+ TSchema
142
+ > & {
143
+ schema: TSchema
144
+ }
145
+
146
+ // Overload 3: Schema is provided with arbitrary TInput and TOutput
147
+ /**
148
+ * Creates a PowerSync collection configuration with schema validation.
149
+ *
150
+ * The input types are not linked to the internal SQLite table types. This can
151
+ * give greater flexibility, e.g. by accepting rich types as input for `insert` or `update` operations.
152
+ * An additional `deserializationSchema` is required in order to process incoming SQLite updates to the output type.
153
+ *
154
+ * The output types are defined by the provided schema. This schema can enforce additional
155
+ * validation or type transforms.
156
+ * Arbitrary output typed mutations are encoded to SQLite for persistence. We provide a basic standard
157
+ * serialization implementation to serialize column values. Custom or advanced types require providing additional
158
+ * serializer specifications. Partial column overrides can be supplied to `serializer`.
159
+ *
160
+ * @example
161
+ * ```typescript
162
+ * import { z } from "zod"
163
+ *
164
+ * // The PowerSync SQLite schema
165
+ * const APP_SCHEMA = new Schema({
166
+ * documents: new Table({
167
+ * name: column.text,
168
+ * // Booleans are represented as integers in SQLite
169
+ * is_active: column.integer
170
+ * }),
171
+ * })
172
+ *
173
+ * // Advanced Zod validations.
174
+ * // We accept boolean values as input for operations and expose Booleans in query results
175
+ * const schema = z.object({
176
+ * id: z.string(),
177
+ * isActive: z.boolean(), // TInput and TOutput are boolean
178
+ * })
179
+ *
180
+ * // The deserializationSchema converts the SQLite synced INTEGER (0/1) values to booleans.
181
+ * const deserializationSchema = z.object({
182
+ * id: z.string(),
183
+ * isActive: z.number().nullable().transform((val) => val == null ? true : val > 0),
184
+ * })
185
+ *
186
+ * const collection = createCollection(
187
+ * powerSyncCollectionOptions({
188
+ * database: db,
189
+ * table: APP_SCHEMA.props.documents,
190
+ * schema,
191
+ * deserializationSchema,
192
+ * })
193
+ * )
194
+ * ```
195
+ */
196
+ export function powerSyncCollectionOptions<
197
+ TTable extends Table,
198
+ TSchema extends StandardSchemaV1<
199
+ // The input and output must have the same keys, the value types can be arbitrary
200
+ AnyTableColumnType<TTable>,
201
+ AnyTableColumnType<TTable>
202
+ >,
203
+ >(
204
+ config: BasePowerSyncCollectionConfig<TTable, TSchema> &
205
+ ConfigWithArbitraryCollectionTypes<TTable, TSchema>
206
+ ): EnhancedPowerSyncCollectionConfig<
207
+ TTable,
208
+ InferPowerSyncOutputType<TTable, TSchema>,
209
+ TSchema
210
+ > & {
211
+ utils: PowerSyncCollectionUtils<TTable>
212
+ schema: TSchema
213
+ }
214
+
215
+ /**
216
+ * Implementation of powerSyncCollectionOptions that handles both schema and non-schema configurations.
217
+ */
218
+
219
+ export function powerSyncCollectionOptions<
220
+ TTable extends Table,
221
+ TSchema extends StandardSchemaV1<any> = never,
222
+ >(config: PowerSyncCollectionConfig<TTable, TSchema>) {
223
+ const {
224
+ database,
225
+ table,
226
+ schema: inputSchema,
227
+ syncBatchSize = DEFAULT_BATCH_SIZE,
228
+ ...restConfig
229
+ } = config
230
+
231
+ const deserializationSchema =
232
+ `deserializationSchema` in config ? config.deserializationSchema : null
233
+ const serializer = `serializer` in config ? config.serializer : undefined
234
+ const onDeserializationError =
235
+ `onDeserializationError` in config
236
+ ? config.onDeserializationError
237
+ : undefined
238
+
239
+ // The SQLite table type
240
+ type TableType = ExtractedTable<TTable>
241
+
242
+ // The collection output type
243
+ type OutputType = InferPowerSyncOutputType<TTable, TSchema>
244
+
245
+ const { viewName } = table
246
+
247
+ /**
248
+ * Deserializes data from the incoming sync stream
249
+ */
250
+ const deserializeSyncRow = (value: TableType): OutputType => {
251
+ const validationSchema = deserializationSchema || schema
252
+ const validation = validationSchema[`~standard`].validate(value)
253
+ if (`value` in validation) {
254
+ return validation.value
255
+ } else if (`issues` in validation) {
256
+ const issueMessage = `Failed to validate incoming data for ${viewName}. Issues: ${validation.issues.map((issue) => `${issue.path} - ${issue.message}`)}`
257
+ database.logger.error(issueMessage)
258
+ onDeserializationError!(validation)
259
+ throw new Error(issueMessage)
260
+ } else {
261
+ const unknownErrorMessage = `Unknown deserialization error for ${viewName}`
262
+ database.logger.error(unknownErrorMessage)
263
+ onDeserializationError!({ issues: [{ message: unknownErrorMessage }] })
264
+ throw new Error(unknownErrorMessage)
265
+ }
266
+ }
267
+
268
+ // We can do basic runtime validations for columns if not explicit schema has been provided
269
+ const schema = inputSchema ?? (convertTableToSchema(table) as TSchema)
270
+ /**
271
+ * The onInsert, onUpdate, and onDelete handlers should only return
272
+ * after we have written the changes to TanStack DB.
273
+ * We currently only write to TanStack DB from a diff trigger.
274
+ * We wait for the diff trigger to observe the change,
275
+ * and only then return from the on[X] handlers.
276
+ * This ensures that when the transaction is reported as
277
+ * complete to the caller, the in-memory state is already
278
+ * consistent with the database.
279
+ */
280
+ const pendingOperationStore = PendingOperationStore.GLOBAL
281
+ // Keep the tracked table unique in case of multiple tabs.
282
+ const trackedTableName = `__${viewName}_tracking_${Math.floor(
283
+ Math.random() * 0xffffffff
284
+ )
285
+ .toString(16)
286
+ .padStart(8, `0`)}`
287
+
288
+ const transactor = new PowerSyncTransactor({
289
+ database,
290
+ })
291
+
292
+ /**
293
+ * "sync"
294
+ * Notice that this describes the Sync between the local SQLite table
295
+ * and the in-memory tanstack-db collection.
296
+ */
297
+ const sync: SyncConfig<OutputType, string> = {
298
+ sync: (params) => {
299
+ const { begin, write, commit, markReady } = params
300
+ const abortController = new AbortController()
301
+
302
+ // The sync function needs to be synchronous
303
+ async function start() {
304
+ database.logger.info(
305
+ `Sync is starting for ${viewName} into ${trackedTableName}`
306
+ )
307
+ database.onChangeWithCallback(
308
+ {
309
+ onChange: async () => {
310
+ await database
311
+ .writeTransaction(async (context) => {
312
+ begin()
313
+ const operations = await context.getAll<TriggerDiffRecord>(
314
+ `SELECT * FROM ${trackedTableName} ORDER BY timestamp ASC`
315
+ )
316
+ const pendingOperations: Array<PendingOperation> = []
317
+
318
+ for (const op of operations) {
319
+ const { id, operation, timestamp, value } = op
320
+ const parsedValue = deserializeSyncRow({
321
+ id,
322
+ ...JSON.parse(value),
323
+ })
324
+ const parsedPreviousValue =
325
+ op.operation == DiffTriggerOperation.UPDATE
326
+ ? deserializeSyncRow({
327
+ id,
328
+ ...JSON.parse(op.previous_value),
329
+ })
330
+ : undefined
331
+ write({
332
+ type: mapOperation(operation),
333
+ value: parsedValue,
334
+ previousValue: parsedPreviousValue,
335
+ })
336
+ pendingOperations.push({
337
+ id,
338
+ operation,
339
+ timestamp,
340
+ tableName: viewName,
341
+ })
342
+ }
343
+
344
+ // clear the current operations
345
+ await context.execute(`DELETE FROM ${trackedTableName}`)
346
+
347
+ commit()
348
+ pendingOperationStore.resolvePendingFor(pendingOperations)
349
+ })
350
+ .catch((error) => {
351
+ database.logger.error(
352
+ `An error has been detected in the sync handler`,
353
+ error
354
+ )
355
+ })
356
+ },
357
+ },
358
+ {
359
+ signal: abortController.signal,
360
+ triggerImmediate: false,
361
+ tables: [trackedTableName],
362
+ }
363
+ )
364
+
365
+ const disposeTracking = await database.triggers.createDiffTrigger({
366
+ source: viewName,
367
+ destination: trackedTableName,
368
+ when: {
369
+ [DiffTriggerOperation.INSERT]: `TRUE`,
370
+ [DiffTriggerOperation.UPDATE]: `TRUE`,
371
+ [DiffTriggerOperation.DELETE]: `TRUE`,
372
+ },
373
+ hooks: {
374
+ beforeCreate: async (context) => {
375
+ let currentBatchCount = syncBatchSize
376
+ let cursor = 0
377
+ while (currentBatchCount == syncBatchSize) {
378
+ begin()
379
+ const batchItems = await context.getAll<TableType>(
380
+ sanitizeSQL`SELECT * FROM ${viewName} LIMIT ? OFFSET ?`,
381
+ [syncBatchSize, cursor]
382
+ )
383
+ currentBatchCount = batchItems.length
384
+ cursor += currentBatchCount
385
+ for (const row of batchItems) {
386
+ write({
387
+ type: `insert`,
388
+ value: deserializeSyncRow(row),
389
+ })
390
+ }
391
+ commit()
392
+ }
393
+ markReady()
394
+ database.logger.info(
395
+ `Sync is ready for ${viewName} into ${trackedTableName}`
396
+ )
397
+ },
398
+ },
399
+ })
400
+
401
+ // If the abort controller was aborted while processing the request above
402
+ if (abortController.signal.aborted) {
403
+ await disposeTracking()
404
+ } else {
405
+ abortController.signal.addEventListener(
406
+ `abort`,
407
+ () => {
408
+ disposeTracking()
409
+ },
410
+ { once: true }
411
+ )
412
+ }
413
+ }
414
+
415
+ start().catch((error) =>
416
+ database.logger.error(
417
+ `Could not start syncing process for ${viewName} into ${trackedTableName}`,
418
+ error
419
+ )
420
+ )
421
+
422
+ return () => {
423
+ database.logger.info(
424
+ `Sync has been stopped for ${viewName} into ${trackedTableName}`
425
+ )
426
+ abortController.abort()
427
+ }
428
+ },
429
+ // Expose the getSyncMetadata function
430
+ getSyncMetadata: undefined,
431
+ }
432
+
433
+ const getKey = (record: OutputType) => asPowerSyncRecord(record).id
434
+
435
+ const outputConfig: EnhancedPowerSyncCollectionConfig<
436
+ TTable,
437
+ OutputType,
438
+ TSchema
439
+ > = {
440
+ ...restConfig,
441
+ schema,
442
+ getKey,
443
+ // Syncing should start immediately since we need to monitor the changes for mutations
444
+ startSync: true,
445
+ sync,
446
+ onInsert: async (params) => {
447
+ // The transaction here should only ever contain a single insert mutation
448
+ return await transactor.applyTransaction(params.transaction)
449
+ },
450
+ onUpdate: async (params) => {
451
+ // The transaction here should only ever contain a single update mutation
452
+ return await transactor.applyTransaction(params.transaction)
453
+ },
454
+ onDelete: async (params) => {
455
+ // The transaction here should only ever contain a single delete mutation
456
+ return await transactor.applyTransaction(params.transaction)
457
+ },
458
+ utils: {
459
+ getMeta: () => ({
460
+ tableName: viewName,
461
+ trackedTableName,
462
+ serializeValue: (value) =>
463
+ serializeForSQLite(
464
+ value,
465
+ // This is required by the input generic
466
+ table as Table<
467
+ MapBaseColumnType<InferPowerSyncOutputType<TTable, TSchema>>
468
+ >,
469
+ // Coerce serializer to the shape that corresponds to the Table constructed from OutputType
470
+ serializer as CustomSQLiteSerializer<
471
+ OutputType,
472
+ ExtractedTableColumns<Table<MapBaseColumnType<OutputType>>>
473
+ >
474
+ ),
475
+ }),
476
+ },
477
+ }
478
+ return outputConfig
479
+ }
package/src/schema.ts ADDED
@@ -0,0 +1,100 @@
1
+ import { ColumnType } from "@powersync/common"
2
+ import type { Table } from "@powersync/common"
3
+ import type { StandardSchemaV1 } from "@standard-schema/spec"
4
+ import type { ExtractedTable } from "./helpers"
5
+
6
+ /**
7
+ * Converts a PowerSync Table instance to a StandardSchemaV1 schema.
8
+ * Creates a schema that validates the structure and types of table records
9
+ * according to the PowerSync table definition.
10
+ *
11
+ * @template TTable - The PowerSync schema-typed Table definition
12
+ * @param table - The PowerSync Table instance to convert
13
+ * @returns A StandardSchemaV1-compatible schema with proper type validation
14
+ *
15
+ * @example
16
+ * ```typescript
17
+ * const usersTable = new Table({
18
+ * name: column.text,
19
+ * age: column.integer
20
+ * })
21
+ * ```
22
+ */
23
+ export function convertTableToSchema<TTable extends Table>(
24
+ table: TTable
25
+ ): StandardSchemaV1<ExtractedTable<TTable>> {
26
+ type TExtracted = ExtractedTable<TTable>
27
+ // Create validate function that checks types according to column definitions
28
+ const validate = (
29
+ value: unknown
30
+ ):
31
+ | StandardSchemaV1.SuccessResult<TExtracted>
32
+ | StandardSchemaV1.FailureResult => {
33
+ if (typeof value != `object` || value == null) {
34
+ return {
35
+ issues: [
36
+ {
37
+ message: `Value must be an object`,
38
+ },
39
+ ],
40
+ }
41
+ }
42
+
43
+ const issues: Array<StandardSchemaV1.Issue> = []
44
+
45
+ // Check id field
46
+ if (!(`id` in value) || typeof (value as any).id != `string`) {
47
+ issues.push({
48
+ message: `id field must be a string`,
49
+ path: [`id`],
50
+ })
51
+ }
52
+
53
+ // Check each column
54
+ for (const column of table.columns) {
55
+ const val = (value as TExtracted)[column.name as keyof TExtracted]
56
+
57
+ if (val == null) {
58
+ continue
59
+ }
60
+
61
+ switch (column.type) {
62
+ case ColumnType.TEXT:
63
+ if (typeof val != `string`) {
64
+ issues.push({
65
+ message: `${column.name} must be a string or null`,
66
+ path: [column.name],
67
+ })
68
+ }
69
+ break
70
+ case ColumnType.INTEGER:
71
+ case ColumnType.REAL:
72
+ if (typeof val != `number`) {
73
+ issues.push({
74
+ message: `${column.name} must be a number or null`,
75
+ path: [column.name],
76
+ })
77
+ }
78
+ break
79
+ }
80
+ }
81
+
82
+ if (issues.length > 0) {
83
+ return { issues }
84
+ }
85
+
86
+ return { value: { ...value } as TExtracted }
87
+ }
88
+
89
+ return {
90
+ "~standard": {
91
+ version: 1,
92
+ vendor: `powersync`,
93
+ validate,
94
+ types: {
95
+ input: {} as TExtracted,
96
+ output: {} as TExtracted,
97
+ },
98
+ },
99
+ }
100
+ }