@tanstack/powersync-db-collection 0.1.37 → 0.1.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/definitions.cjs.map +1 -1
- package/dist/cjs/definitions.d.cts +34 -3
- package/dist/cjs/index.cjs +2 -0
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/index.d.cts +1 -0
- package/dist/cjs/powersync.cjs +233 -78
- package/dist/cjs/powersync.cjs.map +1 -1
- package/dist/cjs/sqlite-compiler.cjs +219 -0
- package/dist/cjs/sqlite-compiler.cjs.map +1 -0
- package/dist/cjs/sqlite-compiler.d.cts +42 -0
- package/dist/esm/definitions.d.ts +34 -3
- package/dist/esm/definitions.js.map +1 -1
- package/dist/esm/index.d.ts +1 -0
- package/dist/esm/index.js +2 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/powersync.js +233 -78
- package/dist/esm/powersync.js.map +1 -1
- package/dist/esm/sqlite-compiler.d.ts +42 -0
- package/dist/esm/sqlite-compiler.js +219 -0
- package/dist/esm/sqlite-compiler.js.map +1 -0
- package/package.json +7 -6
- package/src/definitions.ts +40 -2
- package/src/index.ts +1 -0
- package/src/powersync.ts +325 -89
- package/src/sqlite-compiler.ts +354 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, Symbol.toStringTag, { value: "Module" });
|
|
3
|
+
function compileSQLite(options, compileOptions) {
|
|
4
|
+
const { where, orderBy, limit } = options;
|
|
5
|
+
const params = [];
|
|
6
|
+
const result = { params };
|
|
7
|
+
if (where) {
|
|
8
|
+
result.where = compileExpression(where, params, compileOptions);
|
|
9
|
+
}
|
|
10
|
+
if (orderBy) {
|
|
11
|
+
result.orderBy = compileOrderBy(orderBy, params, compileOptions);
|
|
12
|
+
}
|
|
13
|
+
if (limit !== void 0) {
|
|
14
|
+
result.limit = limit;
|
|
15
|
+
}
|
|
16
|
+
return result;
|
|
17
|
+
}
|
|
18
|
+
function quoteIdentifier(name) {
|
|
19
|
+
const escaped = name.replace(/"/g, `""`);
|
|
20
|
+
return `"${escaped}"`;
|
|
21
|
+
}
|
|
22
|
+
function compileExpression(exp, params, compileOptions) {
|
|
23
|
+
switch (exp.type) {
|
|
24
|
+
case `val`:
|
|
25
|
+
params.push(exp.value);
|
|
26
|
+
return `?`;
|
|
27
|
+
case `ref`: {
|
|
28
|
+
if (exp.path.length !== 1) {
|
|
29
|
+
throw new Error(
|
|
30
|
+
`SQLite compiler doesn't support nested properties: ${exp.path.join(`.`)}`
|
|
31
|
+
);
|
|
32
|
+
}
|
|
33
|
+
const columnName = exp.path[0];
|
|
34
|
+
if (compileOptions?.jsonColumn && columnName !== `id`) {
|
|
35
|
+
return `json_extract(${compileOptions.jsonColumn}, '$.${columnName}')`;
|
|
36
|
+
}
|
|
37
|
+
return quoteIdentifier(columnName);
|
|
38
|
+
}
|
|
39
|
+
case `func`:
|
|
40
|
+
return compileFunction(exp, params, compileOptions);
|
|
41
|
+
default:
|
|
42
|
+
throw new Error(`Unknown expression type: ${exp.type}`);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
function compileOrderBy(orderBy, params, compileOptions) {
|
|
46
|
+
const clauses = orderBy.map(
|
|
47
|
+
(clause) => compileOrderByClause(clause, params, compileOptions)
|
|
48
|
+
);
|
|
49
|
+
return clauses.join(`, `);
|
|
50
|
+
}
|
|
51
|
+
function compileOrderByClause(clause, params, compileOptions) {
|
|
52
|
+
const { expression, compareOptions } = clause;
|
|
53
|
+
let sql = compileExpression(expression, params, compileOptions);
|
|
54
|
+
if (compareOptions.direction === `desc`) {
|
|
55
|
+
sql = `${sql} DESC`;
|
|
56
|
+
}
|
|
57
|
+
if (compareOptions.nulls === `first`) {
|
|
58
|
+
sql = `${sql} NULLS FIRST`;
|
|
59
|
+
} else {
|
|
60
|
+
sql = `${sql} NULLS LAST`;
|
|
61
|
+
}
|
|
62
|
+
return sql;
|
|
63
|
+
}
|
|
64
|
+
function isNullValue(exp) {
|
|
65
|
+
return exp.type === `val` && (exp.value === null || exp.value === void 0);
|
|
66
|
+
}
|
|
67
|
+
function compileFunction(exp, params, compileOptions) {
|
|
68
|
+
const { name, args } = exp;
|
|
69
|
+
if (isComparisonOp(name)) {
|
|
70
|
+
const hasNullArg = args.some((arg) => isNullValue(arg));
|
|
71
|
+
if (hasNullArg) {
|
|
72
|
+
throw new Error(
|
|
73
|
+
`Cannot use null/undefined with '${name}' operator. Use isNull() to check for null values.`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const compiledArgs = args.map(
|
|
78
|
+
(arg) => compileExpression(arg, params, compileOptions)
|
|
79
|
+
);
|
|
80
|
+
switch (name) {
|
|
81
|
+
// Binary comparison operators
|
|
82
|
+
case `eq`:
|
|
83
|
+
case `gt`:
|
|
84
|
+
case `gte`:
|
|
85
|
+
case `lt`:
|
|
86
|
+
case `lte`: {
|
|
87
|
+
if (compiledArgs.length !== 2) {
|
|
88
|
+
throw new Error(`${name} expects 2 arguments`);
|
|
89
|
+
}
|
|
90
|
+
const opSymbol = getComparisonOp(name);
|
|
91
|
+
return `${compiledArgs[0]} ${opSymbol} ${compiledArgs[1]}`;
|
|
92
|
+
}
|
|
93
|
+
// Logical operators
|
|
94
|
+
case `and`:
|
|
95
|
+
case `or`: {
|
|
96
|
+
if (compiledArgs.length < 2) {
|
|
97
|
+
throw new Error(`${name} expects at least 2 arguments`);
|
|
98
|
+
}
|
|
99
|
+
const opKeyword = name === `and` ? `AND` : `OR`;
|
|
100
|
+
return compiledArgs.map((arg) => `(${arg})`).join(` ${opKeyword} `);
|
|
101
|
+
}
|
|
102
|
+
case `not`: {
|
|
103
|
+
if (compiledArgs.length !== 1) {
|
|
104
|
+
throw new Error(`not expects 1 argument`);
|
|
105
|
+
}
|
|
106
|
+
const arg = args[0];
|
|
107
|
+
if (arg && arg.type === `func`) {
|
|
108
|
+
if (arg.name === `isNull` || arg.name === `isUndefined`) {
|
|
109
|
+
const innerArg = compileExpression(
|
|
110
|
+
arg.args[0],
|
|
111
|
+
params,
|
|
112
|
+
compileOptions
|
|
113
|
+
);
|
|
114
|
+
return `${innerArg} IS NOT NULL`;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return `NOT (${compiledArgs[0]})`;
|
|
118
|
+
}
|
|
119
|
+
// Null checking
|
|
120
|
+
case `isNull`:
|
|
121
|
+
case `isUndefined`: {
|
|
122
|
+
if (compiledArgs.length !== 1) {
|
|
123
|
+
throw new Error(`${name} expects 1 argument`);
|
|
124
|
+
}
|
|
125
|
+
return `${compiledArgs[0]} IS NULL`;
|
|
126
|
+
}
|
|
127
|
+
// IN operator
|
|
128
|
+
case `in`: {
|
|
129
|
+
if (compiledArgs.length !== 2) {
|
|
130
|
+
throw new Error(`in expects 2 arguments (column and array)`);
|
|
131
|
+
}
|
|
132
|
+
const lastParamIndex = params.length - 1;
|
|
133
|
+
const arrayValue = params[lastParamIndex];
|
|
134
|
+
if (!Array.isArray(arrayValue)) {
|
|
135
|
+
throw new Error(`in operator requires an array value`);
|
|
136
|
+
}
|
|
137
|
+
params.pop();
|
|
138
|
+
const placeholders = arrayValue.map(() => {
|
|
139
|
+
params.push(arrayValue[params.length - lastParamIndex]);
|
|
140
|
+
return `?`;
|
|
141
|
+
});
|
|
142
|
+
params.length = lastParamIndex;
|
|
143
|
+
for (const val of arrayValue) {
|
|
144
|
+
params.push(val);
|
|
145
|
+
}
|
|
146
|
+
return `${compiledArgs[0]} IN (${placeholders.join(`, `)})`;
|
|
147
|
+
}
|
|
148
|
+
// String operators
|
|
149
|
+
case `like`: {
|
|
150
|
+
if (compiledArgs.length !== 2) {
|
|
151
|
+
throw new Error(`like expects 2 arguments`);
|
|
152
|
+
}
|
|
153
|
+
return `${compiledArgs[0]} LIKE ${compiledArgs[1]}`;
|
|
154
|
+
}
|
|
155
|
+
case `ilike`: {
|
|
156
|
+
if (compiledArgs.length !== 2) {
|
|
157
|
+
throw new Error(`ilike expects 2 arguments`);
|
|
158
|
+
}
|
|
159
|
+
return `${compiledArgs[0]} LIKE ${compiledArgs[1]} COLLATE NOCASE`;
|
|
160
|
+
}
|
|
161
|
+
// String case functions
|
|
162
|
+
case `upper`: {
|
|
163
|
+
if (compiledArgs.length !== 1) {
|
|
164
|
+
throw new Error(`upper expects 1 argument`);
|
|
165
|
+
}
|
|
166
|
+
return `UPPER(${compiledArgs[0]})`;
|
|
167
|
+
}
|
|
168
|
+
case `lower`: {
|
|
169
|
+
if (compiledArgs.length !== 1) {
|
|
170
|
+
throw new Error(`lower expects 1 argument`);
|
|
171
|
+
}
|
|
172
|
+
return `LOWER(${compiledArgs[0]})`;
|
|
173
|
+
}
|
|
174
|
+
case `length`: {
|
|
175
|
+
if (compiledArgs.length !== 1) {
|
|
176
|
+
throw new Error(`length expects 1 argument`);
|
|
177
|
+
}
|
|
178
|
+
return `LENGTH(${compiledArgs[0]})`;
|
|
179
|
+
}
|
|
180
|
+
case `concat`: {
|
|
181
|
+
if (compiledArgs.length < 1) {
|
|
182
|
+
throw new Error(`concat expects at least 1 argument`);
|
|
183
|
+
}
|
|
184
|
+
return `CONCAT(${compiledArgs.join(`, `)})`;
|
|
185
|
+
}
|
|
186
|
+
case `add`: {
|
|
187
|
+
if (compiledArgs.length !== 2) {
|
|
188
|
+
throw new Error(`add expects 2 arguments`);
|
|
189
|
+
}
|
|
190
|
+
return `${compiledArgs[0]} + ${compiledArgs[1]}`;
|
|
191
|
+
}
|
|
192
|
+
// Null fallback
|
|
193
|
+
case `coalesce`: {
|
|
194
|
+
if (compiledArgs.length < 1) {
|
|
195
|
+
throw new Error(`coalesce expects at least 1 argument`);
|
|
196
|
+
}
|
|
197
|
+
return `COALESCE(${compiledArgs.join(`, `)})`;
|
|
198
|
+
}
|
|
199
|
+
default:
|
|
200
|
+
throw new Error(
|
|
201
|
+
`Operator '${name}' is not supported in PowerSync on-demand sync. Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like, ilike, upper, lower, length, concat, add, coalesce`
|
|
202
|
+
);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
function isComparisonOp(name) {
|
|
206
|
+
return [`eq`, `gt`, `gte`, `lt`, `lte`, `like`, `ilike`].includes(name);
|
|
207
|
+
}
|
|
208
|
+
function getComparisonOp(name) {
|
|
209
|
+
const ops = {
|
|
210
|
+
eq: `=`,
|
|
211
|
+
gt: `>`,
|
|
212
|
+
gte: `>=`,
|
|
213
|
+
lt: `<`,
|
|
214
|
+
lte: `<=`
|
|
215
|
+
};
|
|
216
|
+
return ops[name];
|
|
217
|
+
}
|
|
218
|
+
exports.compileSQLite = compileSQLite;
|
|
219
|
+
//# sourceMappingURL=sqlite-compiler.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sqlite-compiler.cjs","sources":["../../src/sqlite-compiler.ts"],"sourcesContent":["import type { IR, LoadSubsetOptions } from '@tanstack/db'\n\n/**\n * Result of compiling LoadSubsetOptions to SQLite\n */\nexport interface SQLiteCompiledQuery {\n /** The WHERE clause (without \"WHERE\" keyword), e.g., \"price > ?\" */\n where?: string\n /** The ORDER BY clause (without \"ORDER BY\" keyword), e.g., \"price DESC\" */\n orderBy?: string\n /** The LIMIT value */\n limit?: number\n /** Parameter values in order, to be passed to SQLite query */\n params: Array<unknown>\n}\n\n/**\n * Options for controlling how SQL is compiled.\n */\nexport interface CompileSQLiteOptions {\n /**\n * When set, column references emit `json_extract(<jsonColumn>, '$.<columnName>')`\n * instead of `\"<columnName>\"`. The `id` column is excluded since it's stored\n * as a direct column in the tracked table.\n */\n jsonColumn?: string\n}\n\n/**\n * Compiles TanStack DB LoadSubsetOptions to SQLite query components.\n *\n * @example\n * ```typescript\n * const compiled = compileSQLite({\n * where: { type: 'func', name: 'gt', args: [\n * { type: 'ref', path: ['price'] },\n * { type: 'val', value: 100 }\n * ]},\n * orderBy: [{ expression: { type: 'ref', path: ['price'] }, compareOptions: { direction: 'desc', nulls: 'last' } }],\n * limit: 50\n * })\n * // Result: { where: '\"price\" > ?', orderBy: '\"price\" DESC', limit: 50, params: [100] }\n * ```\n */\nexport function compileSQLite(\n options: LoadSubsetOptions,\n compileOptions?: CompileSQLiteOptions,\n): SQLiteCompiledQuery {\n const { where, orderBy, limit } = options\n\n const params: Array<unknown> = []\n const result: SQLiteCompiledQuery = { params }\n\n if (where) {\n result.where = compileExpression(where, params, compileOptions)\n }\n\n if (orderBy) {\n result.orderBy = compileOrderBy(orderBy, params, compileOptions)\n }\n\n if (limit !== undefined) {\n result.limit = limit\n }\n\n return result\n}\n\n/**\n * Quote SQLite identifiers to handle column names correctly.\n * SQLite uses double quotes for identifiers.\n */\nfunction quoteIdentifier(name: string): string {\n // Escape any double quotes in the name by doubling them\n const escaped = name.replace(/\"/g, `\"\"`)\n return `\"${escaped}\"`\n}\n\n/**\n * Compiles a BasicExpression to a SQL string, mutating the params array.\n */\nfunction compileExpression(\n exp: IR.BasicExpression<unknown>,\n params: Array<unknown>,\n compileOptions?: CompileSQLiteOptions,\n): string {\n switch (exp.type) {\n case `val`:\n params.push(exp.value)\n return `?`\n case `ref`: {\n if (exp.path.length !== 1) {\n throw new Error(\n `SQLite compiler doesn't support nested properties: ${exp.path.join(`.`)}`,\n )\n }\n const columnName = exp.path[0]!\n if (compileOptions?.jsonColumn && columnName !== `id`) {\n return `json_extract(${compileOptions.jsonColumn}, '$.${columnName}')`\n }\n return quoteIdentifier(columnName)\n }\n case `func`:\n return compileFunction(exp, params, compileOptions)\n default:\n throw new Error(`Unknown expression type: ${(exp as any).type}`)\n }\n}\n\n/**\n * Compiles an OrderBy array to a SQL ORDER BY clause.\n */\nfunction compileOrderBy(\n orderBy: IR.OrderBy,\n params: Array<unknown>,\n compileOptions?: CompileSQLiteOptions,\n): string {\n const clauses = orderBy.map((clause: IR.OrderByClause) =>\n compileOrderByClause(clause, params, compileOptions),\n )\n return clauses.join(`, `)\n}\n\n/**\n * Compiles a single OrderByClause to SQL.\n */\nfunction compileOrderByClause(\n clause: IR.OrderByClause,\n params: Array<unknown>,\n compileOptions?: CompileSQLiteOptions,\n): string {\n const { expression, compareOptions } = clause\n let sql = compileExpression(expression, params, compileOptions)\n\n if (compareOptions.direction === `desc`) {\n sql = `${sql} DESC`\n }\n\n // SQLite supports NULLS FIRST/LAST (since 3.30.0)\n if (compareOptions.nulls === `first`) {\n sql = `${sql} NULLS FIRST`\n } else {\n // Default to NULLS LAST (nulls === 'last')\n sql = `${sql} NULLS LAST`\n }\n\n return sql\n}\n\n/**\n * Check if a BasicExpression represents a null/undefined value\n */\nfunction isNullValue(exp: IR.BasicExpression<unknown>): boolean {\n return exp.type === `val` && (exp.value === null || exp.value === undefined)\n}\n\n/**\n * Compiles a function expression (operator) to SQL.\n */\nfunction compileFunction(\n exp: IR.Func<unknown>,\n params: Array<unknown>,\n compileOptions?: CompileSQLiteOptions,\n): string {\n const { name, args } = exp\n\n // Check for null values in comparison operators\n if (isComparisonOp(name)) {\n const hasNullArg = args.some((arg: IR.BasicExpression) => isNullValue(arg))\n if (hasNullArg) {\n throw new Error(\n `Cannot use null/undefined with '${name}' operator. ` +\n `Use isNull() to check for null values.`,\n )\n }\n }\n\n // Compile arguments\n const compiledArgs = args.map((arg: IR.BasicExpression) =>\n compileExpression(arg, params, compileOptions),\n )\n\n // Handle different operator types\n switch (name) {\n // Binary comparison operators\n case `eq`:\n case `gt`:\n case `gte`:\n case `lt`:\n case `lte`: {\n if (compiledArgs.length !== 2) {\n throw new Error(`${name} expects 2 arguments`)\n }\n const opSymbol = getComparisonOp(name)\n return `${compiledArgs[0]} ${opSymbol} ${compiledArgs[1]}`\n }\n\n // Logical operators\n case `and`:\n case `or`: {\n if (compiledArgs.length < 2) {\n throw new Error(`${name} expects at least 2 arguments`)\n }\n const opKeyword = name === `and` ? `AND` : `OR`\n return compiledArgs\n .map((arg: string) => `(${arg})`)\n .join(` ${opKeyword} `)\n }\n\n case `not`: {\n if (compiledArgs.length !== 1) {\n throw new Error(`not expects 1 argument`)\n }\n // Check if argument is isNull/isUndefined for IS NOT NULL\n const arg = args[0]\n if (arg && arg.type === `func`) {\n if (arg.name === `isNull` || arg.name === `isUndefined`) {\n const innerArg = compileExpression(\n arg.args[0]!,\n params,\n compileOptions,\n )\n return `${innerArg} IS NOT NULL`\n }\n }\n return `NOT (${compiledArgs[0]})`\n }\n\n // Null checking\n case `isNull`:\n case `isUndefined`: {\n if (compiledArgs.length !== 1) {\n throw new Error(`${name} expects 1 argument`)\n }\n return `${compiledArgs[0]} IS NULL`\n }\n\n // IN operator\n case `in`: {\n if (compiledArgs.length !== 2) {\n throw new Error(`in expects 2 arguments (column and array)`)\n }\n // The second argument should be an array value\n // We need to handle this specially - expand the array into multiple placeholders\n const lastParamIndex = params.length - 1\n const arrayValue = params[lastParamIndex]\n\n if (!Array.isArray(arrayValue)) {\n throw new Error(`in operator requires an array value`)\n }\n\n // Remove the array param and add individual values\n params.pop()\n const placeholders = arrayValue.map(() => {\n params.push(arrayValue[params.length - lastParamIndex])\n return `?`\n })\n\n // Re-add individual values properly\n params.length = lastParamIndex // Reset to before array\n for (const val of arrayValue) {\n params.push(val)\n }\n\n return `${compiledArgs[0]} IN (${placeholders.join(`, `)})`\n }\n\n // String operators\n case `like`: {\n if (compiledArgs.length !== 2) {\n throw new Error(`like expects 2 arguments`)\n }\n return `${compiledArgs[0]} LIKE ${compiledArgs[1]}`\n }\n\n case `ilike`: {\n if (compiledArgs.length !== 2) {\n throw new Error(`ilike expects 2 arguments`)\n }\n return `${compiledArgs[0]} LIKE ${compiledArgs[1]} COLLATE NOCASE`\n }\n\n // String case functions\n case `upper`: {\n if (compiledArgs.length !== 1) {\n throw new Error(`upper expects 1 argument`)\n }\n return `UPPER(${compiledArgs[0]})`\n }\n\n case `lower`: {\n if (compiledArgs.length !== 1) {\n throw new Error(`lower expects 1 argument`)\n }\n return `LOWER(${compiledArgs[0]})`\n }\n\n case `length`: {\n if (compiledArgs.length !== 1) {\n throw new Error(`length expects 1 argument`)\n }\n return `LENGTH(${compiledArgs[0]})`\n }\n\n case `concat`: {\n if (compiledArgs.length < 1) {\n throw new Error(`concat expects at least 1 argument`)\n }\n return `CONCAT(${compiledArgs.join(`, `)})`\n }\n\n case `add`: {\n if (compiledArgs.length !== 2) {\n throw new Error(`add expects 2 arguments`)\n }\n return `${compiledArgs[0]} + ${compiledArgs[1]}`\n }\n\n // Null fallback\n case `coalesce`: {\n if (compiledArgs.length < 1) {\n throw new Error(`coalesce expects at least 1 argument`)\n }\n return `COALESCE(${compiledArgs.join(`, `)})`\n }\n\n default:\n throw new Error(\n `Operator '${name}' is not supported in PowerSync on-demand sync. ` +\n `Supported operators: eq, gt, gte, lt, lte, and, or, not, isNull, in, like, ilike, upper, lower, length, concat, add, coalesce`,\n )\n }\n}\n\n/**\n * Check if operator is a comparison operator\n */\nfunction isComparisonOp(name: string): boolean {\n return [`eq`, `gt`, `gte`, `lt`, `lte`, `like`, `ilike`].includes(name)\n}\n\n/**\n * Get the SQL symbol for a comparison operator\n */\nfunction getComparisonOp(name: string): string {\n const ops: Record<string, string> = {\n eq: `=`,\n gt: `>`,\n gte: `>=`,\n lt: `<`,\n lte: `<=`,\n }\n return ops[name]!\n}\n"],"names":[],"mappings":";;AA4CO,SAAS,cACd,SACA,gBACqB;AACrB,QAAM,EAAE,OAAO,SAAS,MAAA,IAAU;AAElC,QAAM,SAAyB,CAAA;AAC/B,QAAM,SAA8B,EAAE,OAAA;AAEtC,MAAI,OAAO;AACT,WAAO,QAAQ,kBAAkB,OAAO,QAAQ,cAAc;AAAA,EAChE;AAEA,MAAI,SAAS;AACX,WAAO,UAAU,eAAe,SAAS,QAAQ,cAAc;AAAA,EACjE;AAEA,MAAI,UAAU,QAAW;AACvB,WAAO,QAAQ;AAAA,EACjB;AAEA,SAAO;AACT;AAMA,SAAS,gBAAgB,MAAsB;AAE7C,QAAM,UAAU,KAAK,QAAQ,MAAM,IAAI;AACvC,SAAO,IAAI,OAAO;AACpB;AAKA,SAAS,kBACP,KACA,QACA,gBACQ;AACR,UAAQ,IAAI,MAAA;AAAA,IACV,KAAK;AACH,aAAO,KAAK,IAAI,KAAK;AACrB,aAAO;AAAA,IACT,KAAK,OAAO;AACV,UAAI,IAAI,KAAK,WAAW,GAAG;AACzB,cAAM,IAAI;AAAA,UACR,sDAAsD,IAAI,KAAK,KAAK,GAAG,CAAC;AAAA,QAAA;AAAA,MAE5E;AACA,YAAM,aAAa,IAAI,KAAK,CAAC;AAC7B,UAAI,gBAAgB,cAAc,eAAe,MAAM;AACrD,eAAO,gBAAgB,eAAe,UAAU,QAAQ,UAAU;AAAA,MACpE;AACA,aAAO,gBAAgB,UAAU;AAAA,IACnC;AAAA,IACA,KAAK;AACH,aAAO,gBAAgB,KAAK,QAAQ,cAAc;AAAA,IACpD;AACE,YAAM,IAAI,MAAM,4BAA6B,IAAY,IAAI,EAAE;AAAA,EAAA;AAErE;AAKA,SAAS,eACP,SACA,QACA,gBACQ;AACR,QAAM,UAAU,QAAQ;AAAA,IAAI,CAAC,WAC3B,qBAAqB,QAAQ,QAAQ,cAAc;AAAA,EAAA;AAErD,SAAO,QAAQ,KAAK,IAAI;AAC1B;AAKA,SAAS,qBACP,QACA,QACA,gBACQ;AACR,QAAM,EAAE,YAAY,eAAA,IAAmB;AACvC,MAAI,MAAM,kBAAkB,YAAY,QAAQ,cAAc;AAE9D,MAAI,eAAe,cAAc,QAAQ;AACvC,UAAM,GAAG,GAAG;AAAA,EACd;AAGA,MAAI,eAAe,UAAU,SAAS;AACpC,UAAM,GAAG,GAAG;AAAA,EACd,OAAO;AAEL,UAAM,GAAG,GAAG;AAAA,EACd;AAEA,SAAO;AACT;AAKA,SAAS,YAAY,KAA2C;AAC9D,SAAO,IAAI,SAAS,UAAU,IAAI,UAAU,QAAQ,IAAI,UAAU;AACpE;AAKA,SAAS,gBACP,KACA,QACA,gBACQ;AACR,QAAM,EAAE,MAAM,KAAA,IAAS;AAGvB,MAAI,eAAe,IAAI,GAAG;AACxB,UAAM,aAAa,KAAK,KAAK,CAAC,QAA4B,YAAY,GAAG,CAAC;AAC1E,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR,mCAAmC,IAAI;AAAA,MAAA;AAAA,IAG3C;AAAA,EACF;AAGA,QAAM,eAAe,KAAK;AAAA,IAAI,CAAC,QAC7B,kBAAkB,KAAK,QAAQ,cAAc;AAAA,EAAA;AAI/C,UAAQ,MAAA;AAAA;AAAA,IAEN,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,OAAO;AACV,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,GAAG,IAAI,sBAAsB;AAAA,MAC/C;AACA,YAAM,WAAW,gBAAgB,IAAI;AACrC,aAAO,GAAG,aAAa,CAAC,CAAC,IAAI,QAAQ,IAAI,aAAa,CAAC,CAAC;AAAA,IAC1D;AAAA;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,MAAM;AACT,UAAI,aAAa,SAAS,GAAG;AAC3B,cAAM,IAAI,MAAM,GAAG,IAAI,+BAA+B;AAAA,MACxD;AACA,YAAM,YAAY,SAAS,QAAQ,QAAQ;AAC3C,aAAO,aACJ,IAAI,CAAC,QAAgB,IAAI,GAAG,GAAG,EAC/B,KAAK,IAAI,SAAS,GAAG;AAAA,IAC1B;AAAA,IAEA,KAAK,OAAO;AACV,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,wBAAwB;AAAA,MAC1C;AAEA,YAAM,MAAM,KAAK,CAAC;AAClB,UAAI,OAAO,IAAI,SAAS,QAAQ;AAC9B,YAAI,IAAI,SAAS,YAAY,IAAI,SAAS,eAAe;AACvD,gBAAM,WAAW;AAAA,YACf,IAAI,KAAK,CAAC;AAAA,YACV;AAAA,YACA;AAAA,UAAA;AAEF,iBAAO,GAAG,QAAQ;AAAA,QACpB;AAAA,MACF;AACA,aAAO,QAAQ,aAAa,CAAC,CAAC;AAAA,IAChC;AAAA;AAAA,IAGA,KAAK;AAAA,IACL,KAAK,eAAe;AAClB,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,GAAG,IAAI,qBAAqB;AAAA,MAC9C;AACA,aAAO,GAAG,aAAa,CAAC,CAAC;AAAA,IAC3B;AAAA;AAAA,IAGA,KAAK,MAAM;AACT,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,2CAA2C;AAAA,MAC7D;AAGA,YAAM,iBAAiB,OAAO,SAAS;AACvC,YAAM,aAAa,OAAO,cAAc;AAExC,UAAI,CAAC,MAAM,QAAQ,UAAU,GAAG;AAC9B,cAAM,IAAI,MAAM,qCAAqC;AAAA,MACvD;AAGA,aAAO,IAAA;AACP,YAAM,eAAe,WAAW,IAAI,MAAM;AACxC,eAAO,KAAK,WAAW,OAAO,SAAS,cAAc,CAAC;AACtD,eAAO;AAAA,MACT,CAAC;AAGD,aAAO,SAAS;AAChB,iBAAW,OAAO,YAAY;AAC5B,eAAO,KAAK,GAAG;AAAA,MACjB;AAEA,aAAO,GAAG,aAAa,CAAC,CAAC,QAAQ,aAAa,KAAK,IAAI,CAAC;AAAA,IAC1D;AAAA;AAAA,IAGA,KAAK,QAAQ;AACX,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AACA,aAAO,GAAG,aAAa,CAAC,CAAC,SAAS,aAAa,CAAC,CAAC;AAAA,IACnD;AAAA,IAEA,KAAK,SAAS;AACZ,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AACA,aAAO,GAAG,aAAa,CAAC,CAAC,SAAS,aAAa,CAAC,CAAC;AAAA,IACnD;AAAA;AAAA,IAGA,KAAK,SAAS;AACZ,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AACA,aAAO,SAAS,aAAa,CAAC,CAAC;AAAA,IACjC;AAAA,IAEA,KAAK,SAAS;AACZ,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,0BAA0B;AAAA,MAC5C;AACA,aAAO,SAAS,aAAa,CAAC,CAAC;AAAA,IACjC;AAAA,IAEA,KAAK,UAAU;AACb,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,2BAA2B;AAAA,MAC7C;AACA,aAAO,UAAU,aAAa,CAAC,CAAC;AAAA,IAClC;AAAA,IAEA,KAAK,UAAU;AACb,UAAI,aAAa,SAAS,GAAG;AAC3B,cAAM,IAAI,MAAM,oCAAoC;AAAA,MACtD;AACA,aAAO,UAAU,aAAa,KAAK,IAAI,CAAC;AAAA,IAC1C;AAAA,IAEA,KAAK,OAAO;AACV,UAAI,aAAa,WAAW,GAAG;AAC7B,cAAM,IAAI,MAAM,yBAAyB;AAAA,MAC3C;AACA,aAAO,GAAG,aAAa,CAAC,CAAC,MAAM,aAAa,CAAC,CAAC;AAAA,IAChD;AAAA;AAAA,IAGA,KAAK,YAAY;AACf,UAAI,aAAa,SAAS,GAAG;AAC3B,cAAM,IAAI,MAAM,sCAAsC;AAAA,MACxD;AACA,aAAO,YAAY,aAAa,KAAK,IAAI,CAAC;AAAA,IAC5C;AAAA,IAEA;AACE,YAAM,IAAI;AAAA,QACR,aAAa,IAAI;AAAA,MAAA;AAAA,EAEnB;AAEN;AAKA,SAAS,eAAe,MAAuB;AAC7C,SAAO,CAAC,MAAM,MAAM,OAAO,MAAM,OAAO,QAAQ,OAAO,EAAE,SAAS,IAAI;AACxE;AAKA,SAAS,gBAAgB,MAAsB;AAC7C,QAAM,MAA8B;AAAA,IAClC,IAAI;AAAA,IACJ,IAAI;AAAA,IACJ,KAAK;AAAA,IACL,IAAI;AAAA,IACJ,KAAK;AAAA,EAAA;AAEP,SAAO,IAAI,IAAI;AACjB;;"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { LoadSubsetOptions } from '@tanstack/db';
|
|
2
|
+
/**
|
|
3
|
+
* Result of compiling LoadSubsetOptions to SQLite
|
|
4
|
+
*/
|
|
5
|
+
export interface SQLiteCompiledQuery {
|
|
6
|
+
/** The WHERE clause (without "WHERE" keyword), e.g., "price > ?" */
|
|
7
|
+
where?: string;
|
|
8
|
+
/** The ORDER BY clause (without "ORDER BY" keyword), e.g., "price DESC" */
|
|
9
|
+
orderBy?: string;
|
|
10
|
+
/** The LIMIT value */
|
|
11
|
+
limit?: number;
|
|
12
|
+
/** Parameter values in order, to be passed to SQLite query */
|
|
13
|
+
params: Array<unknown>;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Options for controlling how SQL is compiled.
|
|
17
|
+
*/
|
|
18
|
+
export interface CompileSQLiteOptions {
|
|
19
|
+
/**
|
|
20
|
+
* When set, column references emit `json_extract(<jsonColumn>, '$.<columnName>')`
|
|
21
|
+
* instead of `"<columnName>"`. The `id` column is excluded since it's stored
|
|
22
|
+
* as a direct column in the tracked table.
|
|
23
|
+
*/
|
|
24
|
+
jsonColumn?: string;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Compiles TanStack DB LoadSubsetOptions to SQLite query components.
|
|
28
|
+
*
|
|
29
|
+
* @example
|
|
30
|
+
* ```typescript
|
|
31
|
+
* const compiled = compileSQLite({
|
|
32
|
+
* where: { type: 'func', name: 'gt', args: [
|
|
33
|
+
* { type: 'ref', path: ['price'] },
|
|
34
|
+
* { type: 'val', value: 100 }
|
|
35
|
+
* ]},
|
|
36
|
+
* orderBy: [{ expression: { type: 'ref', path: ['price'] }, compareOptions: { direction: 'desc', nulls: 'last' } }],
|
|
37
|
+
* limit: 50
|
|
38
|
+
* })
|
|
39
|
+
* // Result: { where: '"price" > ?', orderBy: '"price" DESC', limit: 50, params: [100] }
|
|
40
|
+
* ```
|
|
41
|
+
*/
|
|
42
|
+
export declare function compileSQLite(options: LoadSubsetOptions, compileOptions?: CompileSQLiteOptions): SQLiteCompiledQuery;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { AbstractPowerSyncDatabase, Table } from '@powersync/common';
|
|
2
2
|
import { StandardSchemaV1 } from '@standard-schema/spec';
|
|
3
|
-
import { BaseCollectionConfig, CollectionConfig, InferSchemaOutput } from '@tanstack/db';
|
|
3
|
+
import { BaseCollectionConfig, CleanupFn, CollectionConfig, InferSchemaOutput, LoadSubsetOptions } from '@tanstack/db';
|
|
4
4
|
import { AnyTableColumnType, ExtractedTable, OptionalExtractedTable, PowerSyncRecord } from './helpers.js';
|
|
5
5
|
/**
|
|
6
6
|
* Small helper which determines the output type if:
|
|
@@ -112,7 +112,38 @@ export type ConfigWithArbitraryCollectionTypes<TTable extends Table, TSchema ext
|
|
|
112
112
|
*/
|
|
113
113
|
deserializationSchema: StandardSchemaV1<ExtractedTable<TTable>, StandardSchemaV1.InferOutput<TSchema>>;
|
|
114
114
|
};
|
|
115
|
-
|
|
115
|
+
/**
|
|
116
|
+
* Eager sync mode hooks.
|
|
117
|
+
* Called once when the collection sync starts and stops.
|
|
118
|
+
*/
|
|
119
|
+
export type EagerSyncHooks = {
|
|
120
|
+
syncMode?: 'eager';
|
|
121
|
+
/**
|
|
122
|
+
* Called when the collection sync starts.
|
|
123
|
+
* Use this to set up external data sources (e.g. subscribing to a sync stream).
|
|
124
|
+
*
|
|
125
|
+
* @returns A cleanup function that is called when the collection sync is cleaned up.
|
|
126
|
+
*/
|
|
127
|
+
onLoad?: () => CleanupFn | void | Promise<CleanupFn | void>;
|
|
128
|
+
onLoadSubset?: never;
|
|
129
|
+
};
|
|
130
|
+
/**
|
|
131
|
+
* On-demand sync mode hooks.
|
|
132
|
+
* Called each time a subset is loaded or unloaded in response to live query changes.
|
|
133
|
+
*/
|
|
134
|
+
export type OnDemandSyncHooks = {
|
|
135
|
+
syncMode: 'on-demand';
|
|
136
|
+
onLoad?: never;
|
|
137
|
+
/**
|
|
138
|
+
* Called when a subset of data is requested by a live query.
|
|
139
|
+
* Use this to set up external data sources for the requested subset
|
|
140
|
+
* (e.g. subscribing to a sync stream with parameters derived from the query predicate).
|
|
141
|
+
*
|
|
142
|
+
* @returns A cleanup function that is called when the subset is unloaded.
|
|
143
|
+
*/
|
|
144
|
+
onLoadSubset?: (options: LoadSubsetOptions) => CleanupFn | void | Promise<CleanupFn | void>;
|
|
145
|
+
};
|
|
146
|
+
export type BasePowerSyncCollectionConfig<TTable extends Table = Table, TSchema extends StandardSchemaV1 = never> = Omit<BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>, `onInsert` | `onUpdate` | `onDelete` | `getKey` | `syncMode`> & {
|
|
116
147
|
/** The PowerSync schema Table definition */
|
|
117
148
|
table: TTable;
|
|
118
149
|
/** The PowerSync database instance */
|
|
@@ -130,7 +161,7 @@ export type BasePowerSyncCollectionConfig<TTable extends Table = Table, TSchema
|
|
|
130
161
|
* streaming of initial results, at the cost of more query calls.
|
|
131
162
|
*/
|
|
132
163
|
syncBatchSize?: number;
|
|
133
|
-
};
|
|
164
|
+
} & (EagerSyncHooks | OnDemandSyncHooks);
|
|
134
165
|
/**
|
|
135
166
|
* Configuration interface for PowerSync collection options.
|
|
136
167
|
* @template TTable - The PowerSync table schema definition
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"definitions.js","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CollectionConfig,\n InferSchemaOutput,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n}\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n\n /**\n * Whether the PowerSync table tracks metadata.\n */\n metadataIsTracked: boolean\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<\n OutputType,\n string,\n TSchema,\n PowerSyncCollectionUtils<TTable>\n> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":"AA2RO,MAAM,qBAAqB;"}
|
|
1
|
+
{"version":3,"file":"definitions.js","sources":["../../src/definitions.ts"],"sourcesContent":["import type { AbstractPowerSyncDatabase, Table } from '@powersync/common'\nimport type { StandardSchemaV1 } from '@standard-schema/spec'\nimport type {\n BaseCollectionConfig,\n CleanupFn,\n CollectionConfig,\n InferSchemaOutput,\n LoadSubsetOptions,\n} from '@tanstack/db'\nimport type {\n AnyTableColumnType,\n ExtractedTable,\n OptionalExtractedTable,\n PowerSyncRecord,\n} from './helpers'\n\n/**\n * Small helper which determines the output type if:\n * - Standard SQLite types are to be used OR\n * - If the provided schema should be used.\n */\nexport type InferPowerSyncOutputType<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<PowerSyncRecord> = never,\n> = TSchema extends never ? ExtractedTable<TTable> : InferSchemaOutput<TSchema>\n\n/**\n * A mapping type for custom serialization of object properties to SQLite-compatible values.\n *\n * This type allows you to override, for keys in the input object (`TOutput`), a function that transforms\n * the value to the corresponding SQLite type (`TSQLite`). Keys not specified will use the default SQLite serialization.\n *\n * ## Generics\n * - `TOutput`: The input object type, representing the row data to be serialized.\n * - `TSQLite`: The target SQLite-compatible type for each property, typically inferred from the table schema.\n *\n * ## Usage\n * Use this type to define a map of serialization functions for specific keys when you need custom handling\n * (e.g., converting complex objects, formatting dates, or handling enums).\n *\n * Example:\n * ```ts\n * const serializer: CustomSQLiteSerializer<MyRowType, MySQLiteType> = {\n * createdAt: (date) => date.toISOString(),\n * status: (status) => status ? 1 : 0,\n * meta: (meta) => JSON.stringify(meta),\n * };\n * ```\n *\n * ## Behavior\n * - Each key maps to a function that receives the value and returns the SQLite-compatible value.\n * - Used by `serializeForSQLite` to override default serialization for specific columns.\n */\nexport type CustomSQLiteSerializer<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = Partial<{\n [Key in keyof TOutput]: (\n value: TOutput[Key],\n ) => Key extends keyof TSQLite ? TSQLite[Key] : never\n}>\n\nexport type SerializerConfig<\n TOutput extends Record<string, unknown>,\n TSQLite extends Record<string, unknown>,\n> = {\n /**\n * Optional partial serializer object for customizing how individual columns are serialized for SQLite.\n *\n * This should be a partial map of column keys to serialization functions, following the\n * {@link CustomSQLiteSerializer} type. Each function receives the column value and returns a value\n * compatible with SQLite storage.\n *\n * If not provided for a column, the default behavior is used:\n * - `TEXT`: Strings are stored as-is; Dates are converted to ISO strings; other types are JSON-stringified.\n * - `INTEGER`/`REAL`: Numbers are stored as-is; booleans are mapped to 1/0.\n *\n * Use this option to override serialization for specific columns, such as formatting dates, handling enums,\n * or serializing complex objects.\n *\n * Example:\n * ```typescript\n * serializer: {\n * createdAt: (date) => date.getTime(), // Store as timestamp\n * meta: (meta) => JSON.stringify(meta), // Custom object serialization\n * }\n * ```\n */\n serializer?: CustomSQLiteSerializer<TOutput, TSQLite>\n\n /**\n * Application logic should ensure that incoming synced data is always valid.\n * Failing to deserialize and apply incoming changes results in data inconsistency - which is a fatal error.\n * Use this callback to react to deserialization errors.\n */\n onDeserializationError: (error: StandardSchemaV1.FailureResult) => void\n}\n\n/**\n * Config for when TInput and TOutput are both the SQLite types.\n */\nexport type ConfigWithSQLiteTypes = {}\n\n/**\n * Config where TInput is the SQLite types while TOutput can be defined by TSchema.\n * We can use the same schema to validate TInput and incoming SQLite changes.\n */\nexport type ConfigWithSQLiteInputType<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // TInput is the SQLite types.\n OptionalExtractedTable<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n}\n\n/**\n * Config where TInput and TOutput have arbitrarily typed values.\n * The keys of the types need to equal the SQLite types.\n * Since TInput is not the SQLite types, we require a schema in order to deserialize incoming SQLite updates. The schema should validate from SQLite to TOutput.\n */\nexport type ConfigWithArbitraryCollectionTypes<\n TTable extends Table,\n TSchema extends StandardSchemaV1<\n // The input and output must have the same keys, the value types can be arbitrary\n AnyTableColumnType<TTable>,\n AnyTableColumnType<TTable>\n >,\n> = SerializerConfig<\n StandardSchemaV1.InferOutput<TSchema>,\n ExtractedTable<TTable>\n> & {\n schema: TSchema\n /**\n * Schema for deserializing and validating input data from the sync stream.\n *\n * This schema defines how to transform and validate data coming from SQLite types (as stored in the database)\n * into the desired output types (`TOutput`) expected by your application or validation logic.\n *\n * The generic parameters allow for arbitrary input and output types, so you can specify custom conversion rules\n * for each column. This is especially useful when your application expects richer types (e.g., Date, enums, objects)\n * than what SQLite natively supports.\n *\n * Use this to ensure that incoming data from the sync stream is properly converted and validated before use.\n *\n * Example:\n * ```typescript\n * deserializationSchema: z.object({\n * createdAt: z.preprocess((val) => new Date(val as string), z.date()),\n * meta: z.preprocess((val) => JSON.parse(val as string), z.object({ ... })),\n * })\n * ```\n *\n * This enables robust type safety and validation for incoming data, bridging the gap between SQLite storage\n * and your application's expected types.\n */\n deserializationSchema: StandardSchemaV1<\n ExtractedTable<TTable>,\n StandardSchemaV1.InferOutput<TSchema>\n >\n}\n/**\n * Eager sync mode hooks.\n * Called once when the collection sync starts and stops.\n */\nexport type EagerSyncHooks = {\n syncMode?: 'eager'\n /**\n * Called when the collection sync starts.\n * Use this to set up external data sources (e.g. subscribing to a sync stream).\n *\n * @returns A cleanup function that is called when the collection sync is cleaned up.\n */\n onLoad?: () => CleanupFn | void | Promise<CleanupFn | void>\n onLoadSubset?: never\n}\n\n/**\n * On-demand sync mode hooks.\n * Called each time a subset is loaded or unloaded in response to live query changes.\n */\nexport type OnDemandSyncHooks = {\n syncMode: 'on-demand'\n onLoad?: never\n /**\n * Called when a subset of data is requested by a live query.\n * Use this to set up external data sources for the requested subset\n * (e.g. subscribing to a sync stream with parameters derived from the query predicate).\n *\n * @returns A cleanup function that is called when the subset is unloaded.\n */\n\n onLoadSubset?: (\n options: LoadSubsetOptions,\n ) => CleanupFn | void | Promise<CleanupFn | void>\n}\n\nexport type BasePowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1 = never,\n> = Omit<\n BaseCollectionConfig<ExtractedTable<TTable>, string, TSchema>,\n `onInsert` | `onUpdate` | `onDelete` | `getKey` | `syncMode`\n> & {\n /** The PowerSync schema Table definition */\n table: TTable\n /** The PowerSync database instance */\n database: AbstractPowerSyncDatabase\n /**\n * The maximum number of documents to read from the SQLite table\n * in a single batch during the initial sync between PowerSync and the\n * in-memory TanStack DB collection.\n *\n * @remarks\n * - Defaults to {@link DEFAULT_BATCH_SIZE} if not specified.\n * - Larger values reduce the number of round trips to the storage\n * engine but increase memory usage per batch.\n * - Smaller values may lower memory usage and allow earlier\n * streaming of initial results, at the cost of more query calls.\n */\n syncBatchSize?: number\n} & (EagerSyncHooks | OnDemandSyncHooks)\n\n/**\n * Configuration interface for PowerSync collection options.\n * @template TTable - The PowerSync table schema definition\n * @template TSchema - The validation schema type\n */\n/**\n * Configuration options for creating a PowerSync collection.\n *\n * @example\n * ```typescript\n * const APP_SCHEMA = new Schema({\n * documents: new Table({\n * name: column.text,\n * }),\n * })\n *\n * const db = new PowerSyncDatabase({\n * database: {\n * dbFilename: \"test.sqlite\",\n * },\n * schema: APP_SCHEMA,\n * })\n *\n * const collection = createCollection(\n * powerSyncCollectionOptions({\n * database: db,\n * table: APP_SCHEMA.props.documents\n * })\n * )\n * ```\n */\nexport type PowerSyncCollectionConfig<\n TTable extends Table = Table,\n TSchema extends StandardSchemaV1<any> = never,\n> = BasePowerSyncCollectionConfig<TTable, TSchema> &\n (\n | ConfigWithSQLiteTypes\n | ConfigWithSQLiteInputType<TTable, TSchema>\n | ConfigWithArbitraryCollectionTypes<TTable, TSchema>\n )\n\n/**\n * Metadata for the PowerSync Collection.\n */\nexport type PowerSyncCollectionMeta<TTable extends Table = Table> = {\n /**\n * The SQLite table representing the collection.\n */\n tableName: string\n /**\n * The internal table used to track diffs for the collection.\n */\n trackedTableName: string\n\n /**\n * Serializes a collection value to the SQLite type\n */\n serializeValue: (value: any) => ExtractedTable<TTable>\n\n /**\n * Whether the PowerSync table tracks metadata.\n */\n metadataIsTracked: boolean\n}\n\n/**\n * A CollectionConfig which includes utilities for PowerSync.\n */\nexport type EnhancedPowerSyncCollectionConfig<\n TTable extends Table,\n OutputType extends Record<string, unknown> = Record<string, unknown>,\n TSchema extends StandardSchemaV1 = never,\n> = CollectionConfig<\n OutputType,\n string,\n TSchema,\n PowerSyncCollectionUtils<TTable>\n> & {\n id?: string\n utils: PowerSyncCollectionUtils<TTable>\n schema?: TSchema\n}\n\n/**\n * Collection-level utilities for PowerSync.\n */\nexport type PowerSyncCollectionUtils<TTable extends Table = Table> = {\n getMeta: () => PowerSyncCollectionMeta<TTable>\n}\n\n/**\n * Default value for {@link PowerSyncCollectionConfig#syncBatchSize}.\n */\nexport const DEFAULT_BATCH_SIZE = 1000\n"],"names":[],"mappings":"AAiUO,MAAM,qBAAqB;"}
|
package/dist/esm/index.d.ts
CHANGED
package/dist/esm/index.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { DEFAULT_BATCH_SIZE } from "./definitions.js";
|
|
2
2
|
import { powerSyncCollectionOptions } from "./powersync.js";
|
|
3
3
|
import { PowerSyncTransactor } from "./PowerSyncTransactor.js";
|
|
4
|
+
import { compileSQLite } from "./sqlite-compiler.js";
|
|
4
5
|
export {
|
|
5
6
|
DEFAULT_BATCH_SIZE,
|
|
6
7
|
PowerSyncTransactor,
|
|
8
|
+
compileSQLite,
|
|
7
9
|
powerSyncCollectionOptions
|
|
8
10
|
};
|
|
9
11
|
//# sourceMappingURL=index.js.map
|
package/dist/esm/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;"}
|