@pol-studios/powersync 1.0.7 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. package/README.md +933 -0
  2. package/dist/CacheSettingsManager-uz-kbnRH.d.ts +461 -0
  3. package/dist/attachments/index.d.ts +709 -6
  4. package/dist/attachments/index.js +133 -5
  5. package/dist/chunk-24RDMMCL.js +44 -0
  6. package/dist/chunk-24RDMMCL.js.map +1 -0
  7. package/dist/chunk-4TXTAEF2.js +2060 -0
  8. package/dist/chunk-4TXTAEF2.js.map +1 -0
  9. package/dist/chunk-63PXSPIN.js +358 -0
  10. package/dist/chunk-63PXSPIN.js.map +1 -0
  11. package/dist/chunk-654ERHA7.js +1 -0
  12. package/dist/{chunk-BREGB4WL.js → chunk-BRXQNASY.js} +287 -335
  13. package/dist/chunk-BRXQNASY.js.map +1 -0
  14. package/dist/{chunk-DHYUBVP7.js → chunk-CAB26E6F.js} +20 -9
  15. package/dist/chunk-CAB26E6F.js.map +1 -0
  16. package/dist/{chunk-H772V6XQ.js → chunk-CUCAYK7Z.js} +7 -43
  17. package/dist/chunk-CUCAYK7Z.js.map +1 -0
  18. package/dist/{chunk-4C3RY5SU.js → chunk-HWSNV45P.js} +76 -1
  19. package/dist/chunk-HWSNV45P.js.map +1 -0
  20. package/dist/{chunk-HFOFLW5F.js → chunk-KN2IZERF.js} +139 -6
  21. package/dist/chunk-KN2IZERF.js.map +1 -0
  22. package/dist/{chunk-UEYRTLKE.js → chunk-P4HZA6ZT.js} +20 -9
  23. package/dist/chunk-P4HZA6ZT.js.map +1 -0
  24. package/dist/chunk-T4AO7JIG.js +1 -0
  25. package/dist/{chunk-XQAJM2MW.js → chunk-VACPAAQZ.js} +33 -2
  26. package/dist/{chunk-XQAJM2MW.js.map → chunk-VACPAAQZ.js.map} +1 -1
  27. package/dist/{chunk-53WH2JJV.js → chunk-WN5ZJ3E2.js} +5 -8
  28. package/dist/chunk-WN5ZJ3E2.js.map +1 -0
  29. package/dist/chunk-XAEII4ZX.js +456 -0
  30. package/dist/chunk-XAEII4ZX.js.map +1 -0
  31. package/dist/chunk-XOY2CJ67.js +289 -0
  32. package/dist/chunk-XOY2CJ67.js.map +1 -0
  33. package/dist/chunk-YHTZ7VMV.js +1 -0
  34. package/dist/{chunk-MKD2VCX3.js → chunk-Z6VOBGTU.js} +8 -8
  35. package/dist/chunk-Z6VOBGTU.js.map +1 -0
  36. package/dist/chunk-ZM4ENYMF.js +230 -0
  37. package/dist/chunk-ZM4ENYMF.js.map +1 -0
  38. package/dist/connector/index.d.ts +56 -3
  39. package/dist/connector/index.js +8 -5
  40. package/dist/core/index.d.ts +12 -1
  41. package/dist/core/index.js +3 -2
  42. package/dist/error/index.js +0 -1
  43. package/dist/generator/cli.js +527 -0
  44. package/dist/generator/index.d.ts +168 -0
  45. package/dist/generator/index.js +370 -0
  46. package/dist/generator/index.js.map +1 -0
  47. package/dist/index.d.ts +12 -10
  48. package/dist/index.js +191 -29
  49. package/dist/index.native.d.ts +11 -9
  50. package/dist/index.native.js +191 -29
  51. package/dist/index.web.d.ts +11 -9
  52. package/dist/index.web.js +191 -29
  53. package/dist/maintenance/index.js +0 -1
  54. package/dist/platform/index.js +0 -2
  55. package/dist/platform/index.js.map +1 -1
  56. package/dist/platform/index.native.js +1 -2
  57. package/dist/platform/index.web.js +0 -1
  58. package/dist/pol-attachment-queue-BVAIueoP.d.ts +817 -0
  59. package/dist/provider/index.d.ts +38 -34
  60. package/dist/provider/index.js +11 -12
  61. package/dist/react/index.d.ts +372 -0
  62. package/dist/react/index.js +25 -0
  63. package/dist/storage/index.d.ts +3 -3
  64. package/dist/storage/index.js +22 -8
  65. package/dist/storage/index.native.d.ts +3 -3
  66. package/dist/storage/index.native.js +21 -7
  67. package/dist/storage/index.web.d.ts +3 -3
  68. package/dist/storage/index.web.js +21 -7
  69. package/dist/storage/upload/index.d.ts +7 -8
  70. package/dist/storage/upload/index.js +3 -3
  71. package/dist/storage/upload/index.native.d.ts +7 -8
  72. package/dist/storage/upload/index.native.js +4 -3
  73. package/dist/storage/upload/index.web.d.ts +1 -4
  74. package/dist/storage/upload/index.web.js +3 -3
  75. package/dist/supabase-connector-T9vHq_3i.d.ts +202 -0
  76. package/dist/sync/index.js +3 -3
  77. package/dist/{supabase-connector-qLm-WHkM.d.ts → types-B212hgfA.d.ts} +48 -170
  78. package/dist/{types-BVacP54t.d.ts → types-CyvBaAl8.d.ts} +12 -4
  79. package/dist/types-D0WcHrq6.d.ts +234 -0
  80. package/package.json +28 -4
  81. package/dist/CacheSettingsManager-1exbOC6S.d.ts +0 -261
  82. package/dist/chunk-4C3RY5SU.js.map +0 -1
  83. package/dist/chunk-53WH2JJV.js.map +0 -1
  84. package/dist/chunk-BREGB4WL.js.map +0 -1
  85. package/dist/chunk-DGUM43GV.js +0 -11
  86. package/dist/chunk-DHYUBVP7.js.map +0 -1
  87. package/dist/chunk-GKF7TOMT.js +0 -1
  88. package/dist/chunk-H772V6XQ.js.map +0 -1
  89. package/dist/chunk-HFOFLW5F.js.map +0 -1
  90. package/dist/chunk-KGSFAE5B.js +0 -1
  91. package/dist/chunk-LNL64IJZ.js +0 -1
  92. package/dist/chunk-MKD2VCX3.js.map +0 -1
  93. package/dist/chunk-UEYRTLKE.js.map +0 -1
  94. package/dist/chunk-WQ5MPAVC.js +0 -449
  95. package/dist/chunk-WQ5MPAVC.js.map +0 -1
  96. package/dist/chunk-ZEOKPWUC.js +0 -1165
  97. package/dist/chunk-ZEOKPWUC.js.map +0 -1
  98. package/dist/pol-attachment-queue-C7YNXXhK.d.ts +0 -676
  99. package/dist/types-Bgvx7-E8.d.ts +0 -187
  100. /package/dist/{chunk-DGUM43GV.js.map → chunk-654ERHA7.js.map} +0 -0
  101. /package/dist/{chunk-GKF7TOMT.js.map → chunk-T4AO7JIG.js.map} +0 -0
  102. /package/dist/{chunk-KGSFAE5B.js.map → chunk-YHTZ7VMV.js.map} +0 -0
  103. /package/dist/{chunk-LNL64IJZ.js.map → react/index.js.map} +0 -0
@@ -0,0 +1,370 @@
1
+ // src/generator/config.ts
2
+ function defineConfig(config) {
3
+ return config;
4
+ }
5
+ var DEFAULT_SKIP_COLUMNS = [
6
+ "id",
7
+ // PowerSync handles id automatically
8
+ // Legacy numeric ID columns - typically not needed after UUID migration
9
+ "legacyId"
10
+ ];
11
+ var DEFAULT_DECIMAL_PATTERNS = ["hours", "watts", "voltage", "rate", "amount", "price", "cost", "total"];
12
+
13
+ // src/generator/generator.ts
14
+ import * as fs from "fs";
15
+ import * as path from "path";
16
+
17
+ // src/generator/parser.ts
18
+ function parseRowType(tableContent, options) {
19
+ const columns = /* @__PURE__ */ new Map();
20
+ const rowMatch = tableContent.match(/Row:\s*\{([^}]+(?:\{[^}]*\}[^}]*)*)\}/s);
21
+ if (!rowMatch) return columns;
22
+ const rowContent = rowMatch[1];
23
+ const includePrimaryKey = options.syncPrimaryKey ?? options.includeId ?? false;
24
+ const columnRegex = /(\w+)\??:\s*([^,\n]+)/g;
25
+ let match;
26
+ while ((match = columnRegex.exec(rowContent)) !== null) {
27
+ const [, columnName, columnType] = match;
28
+ const shouldSkip = options.skipColumns.has(columnName) && !(includePrimaryKey && columnName === "id");
29
+ if (!shouldSkip) {
30
+ columns.set(columnName, columnType.trim());
31
+ }
32
+ }
33
+ return columns;
34
+ }
35
+ function extractTableDef(content, tableName, schema) {
36
+ const schemaRegex = new RegExp(`${schema}:\\s*\\{[\\s\\S]*?Tables:\\s*\\{`, "g");
37
+ const schemaMatch = schemaRegex.exec(content);
38
+ if (!schemaMatch) return null;
39
+ const startIndex = schemaMatch.index;
40
+ const tableRegex = new RegExp(`(?<![A-Za-z])${tableName}:\\s*\\{[\\s\\S]*?Row:\\s*\\{[\\s\\S]*?\\}[\\s\\S]*?Relationships:\\s*\\[[^\\]]*\\]\\s*\\}`, "g");
41
+ const searchContent = content.slice(startIndex);
42
+ const tableMatch = tableRegex.exec(searchContent);
43
+ return tableMatch ? tableMatch[0] : null;
44
+ }
45
+ function parseTypesFile(content, tables, skipColumns) {
46
+ const parsedTables = [];
47
+ for (const tableConfig of tables) {
48
+ const {
49
+ name,
50
+ schema = "public",
51
+ syncPrimaryKey,
52
+ includeId
53
+ } = tableConfig;
54
+ const tableDef = extractTableDef(content, name, schema);
55
+ if (!tableDef) {
56
+ continue;
57
+ }
58
+ const columns = parseRowType(tableDef, {
59
+ skipColumns,
60
+ syncPrimaryKey,
61
+ includeId
62
+ });
63
+ if (columns.size > 0) {
64
+ parsedTables.push({
65
+ name,
66
+ schema,
67
+ columns,
68
+ config: tableConfig
69
+ });
70
+ }
71
+ }
72
+ return parsedTables;
73
+ }
74
+ function getAvailableSchemas(content) {
75
+ const schemas = [];
76
+ const schemaRegex = /(\w+):\s*\{[\s\S]*?Tables:\s*\{/g;
77
+ let match;
78
+ while ((match = schemaRegex.exec(content)) !== null) {
79
+ schemas.push(match[1]);
80
+ }
81
+ return schemas;
82
+ }
83
+ function getTablesInSchema(content, schema) {
84
+ const tables = [];
85
+ const schemaRegex = new RegExp(`${schema}:\\s*\\{[\\s\\S]*?Tables:\\s*\\{([\\s\\S]*?)\\}\\s*Views:`, "g");
86
+ const schemaMatch = schemaRegex.exec(content);
87
+ if (!schemaMatch) return tables;
88
+ const tablesContent = schemaMatch[1];
89
+ const tableNameRegex = /^\s*(\w+):\s*\{/gm;
90
+ let match;
91
+ while ((match = tableNameRegex.exec(tablesContent)) !== null) {
92
+ tables.push(match[1]);
93
+ }
94
+ return tables;
95
+ }
96
+
97
+ // src/generator/templates.ts
98
+ function generateHeader(typesPath) {
99
+ return `/**
100
+ * PowerSync Schema Definition
101
+ *
102
+ * AUTO-GENERATED from ${typesPath}
103
+ * Run: npx @pol-studios/powersync generate-schema
104
+ *
105
+ * DO NOT EDIT MANUALLY - changes will be overwritten
106
+ */
107
+
108
+ import { column, Schema, Table } from "@powersync/react-native";
109
+ `;
110
+ }
111
+ function generateTableDefinition(table, columnDefs) {
112
+ if (columnDefs.length === 0) {
113
+ return `// ${table.name} - no syncable columns found`;
114
+ }
115
+ const optionsStr = table.config.trackMetadata ? ", { trackMetadata: true }" : "";
116
+ return `const ${table.name} = new Table({
117
+ ${columnDefs.join("\n")}
118
+ }${optionsStr});`;
119
+ }
120
+ function generateSchemaExport(tableNames) {
121
+ return `// ============================================================================
122
+ // SCHEMA EXPORT
123
+ // ============================================================================
124
+
125
+ // NOTE: photo_attachments is NOT included here.
126
+ // The AttachmentQueue from @powersync/attachments creates and manages
127
+ // its own internal SQLite table (not a view) during queue.init().
128
+ // This allows INSERT/UPDATE operations to work correctly.
129
+
130
+ export const AppSchema = new Schema({
131
+ ${tableNames.map((name) => ` ${name},`).join("\n")}
132
+ });
133
+
134
+ export type Database = (typeof AppSchema)["types"];`;
135
+ }
136
+ function generateSchemaMapping(tables, schemas) {
137
+ const schemaGroups = /* @__PURE__ */ new Map();
138
+ for (const schema of schemas) {
139
+ if (schema !== "public") {
140
+ schemaGroups.set(schema, []);
141
+ }
142
+ }
143
+ for (const table of tables) {
144
+ if (table.schema !== "public" && schemaGroups.has(table.schema)) {
145
+ schemaGroups.get(table.schema).push(table.name);
146
+ }
147
+ }
148
+ const sections = [`// ============================================================================
149
+ // SCHEMA MAPPING FOR CONNECTOR
150
+ // ============================================================================`];
151
+ for (const [schema, tableNames] of schemaGroups) {
152
+ if (tableNames.length > 0) {
153
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
154
+ sections.push(`
155
+ // Tables in the '${schema}' schema (need .schema('${schema}') in Supabase queries)
156
+ export const ${constName} = new Set([
157
+ ${tableNames.map((name) => ` "${name}",`).join("\n")}
158
+ ]);`);
159
+ }
160
+ }
161
+ const schemaChecks = Array.from(schemaGroups.entries()).filter(([, names]) => names.length > 0).map(([schema]) => {
162
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
163
+ return ` if (${constName}.has(tableName)) return "${schema}";`;
164
+ });
165
+ if (schemaChecks.length > 0) {
166
+ sections.push(`
167
+ /**
168
+ * Get the Supabase schema for a table
169
+ */
170
+ export function getTableSchema(tableName: string): ${schemas.map((s) => `"${s}"`).join(" | ")} {
171
+ ${schemaChecks.join("\n")}
172
+ return "public";
173
+ }`);
174
+ } else {
175
+ sections.push(`
176
+ /**
177
+ * Get the Supabase schema for a table
178
+ */
179
+ export function getTableSchema(tableName: string): "public" {
180
+ return "public";
181
+ }`);
182
+ }
183
+ return sections.join("\n");
184
+ }
185
+ function generateFKUtility() {
186
+ return `
187
+ // ============================================================================
188
+ // FOREIGN KEY UTILITIES
189
+ // ============================================================================
190
+
191
+ /**
192
+ * Check if a column name represents a foreign key reference
193
+ * Convention: columns ending in 'Id' are foreign keys (e.g., projectId -> Project table)
194
+ */
195
+ export function isForeignKeyColumn(columnName: string): boolean {
196
+ return columnName.endsWith('Id') && columnName !== 'id';
197
+ }
198
+
199
+ /**
200
+ * Get the referenced table name from a foreign key column
201
+ * e.g., 'projectId' -> 'Project', 'equipmentFixtureUnitId' -> 'EquipmentFixtureUnit'
202
+ */
203
+ export function getForeignKeyTable(columnName: string): string | null {
204
+ if (!isForeignKeyColumn(columnName)) return null;
205
+ // Remove 'Id' suffix and capitalize first letter
206
+ const baseName = columnName.slice(0, -2);
207
+ return baseName.charAt(0).toUpperCase() + baseName.slice(1);
208
+ }`;
209
+ }
210
+ function generateOutputFile(tables, tableDefs, schemas, typesPath) {
211
+ const tableNames = tables.map((t) => t.name);
212
+ return `${generateHeader(typesPath)}
213
+
214
+ // ============================================================================
215
+ // TABLE DEFINITIONS
216
+ // ============================================================================
217
+
218
+ ${tableDefs.join("\n\n")}
219
+
220
+ ${generateSchemaExport(tableNames)}
221
+
222
+ ${generateSchemaMapping(tables, ["public", ...schemas.filter((s) => s !== "public")])}
223
+ ${generateFKUtility()}
224
+ `;
225
+ }
226
+
227
+ // src/generator/generator.ts
228
+ function mapTypeToPowerSync(tsType, columnName, decimalPatterns) {
229
+ const cleanType = tsType.trim().replace(/\s*\|\s*null/g, "");
230
+ if (cleanType.includes("Json") || cleanType.includes("unknown") || cleanType.includes("{")) {
231
+ return null;
232
+ }
233
+ if (cleanType.includes("[]")) {
234
+ return null;
235
+ }
236
+ if (cleanType === "boolean") {
237
+ return {
238
+ type: "column.integer",
239
+ isBoolean: true
240
+ };
241
+ }
242
+ if (cleanType === "number") {
243
+ if (decimalPatterns.some((pattern) => columnName.toLowerCase().includes(pattern.toLowerCase()))) {
244
+ return {
245
+ type: "column.real"
246
+ };
247
+ }
248
+ return {
249
+ type: "column.integer"
250
+ };
251
+ }
252
+ if (cleanType === "string") {
253
+ return {
254
+ type: "column.text"
255
+ };
256
+ }
257
+ if (cleanType.includes("Database[") && cleanType.includes("Enums")) {
258
+ return {
259
+ type: "column.text",
260
+ isEnum: true
261
+ };
262
+ }
263
+ return {
264
+ type: "column.text"
265
+ };
266
+ }
267
+ function generateColumnDefs(table, decimalPatterns) {
268
+ const columnDefs = [];
269
+ for (const [columnName, tsType] of table.columns) {
270
+ const mapping = mapTypeToPowerSync(tsType, columnName, decimalPatterns);
271
+ if (mapping) {
272
+ let comment = "";
273
+ if (mapping.isBoolean) {
274
+ comment = " // boolean stored as 0/1";
275
+ } else if (mapping.isEnum) {
276
+ comment = " // enum stored as text";
277
+ }
278
+ columnDefs.push(` ${columnName}: ${mapping.type},${comment}`);
279
+ }
280
+ }
281
+ return columnDefs;
282
+ }
283
+ async function generateSchema(config, options) {
284
+ const cwd = options?.cwd ?? process.cwd();
285
+ const verbose = options?.verbose ?? false;
286
+ const dryRun = options?.dryRun ?? false;
287
+ const result = {
288
+ success: false,
289
+ tablesGenerated: 0,
290
+ outputPath: "",
291
+ errors: [],
292
+ warnings: []
293
+ };
294
+ const typesPath = path.isAbsolute(config.typesPath) ? config.typesPath : path.resolve(cwd, config.typesPath);
295
+ const outputPath = path.isAbsolute(config.outputPath) ? config.outputPath : path.resolve(cwd, config.outputPath);
296
+ result.outputPath = outputPath;
297
+ if (!fs.existsSync(typesPath)) {
298
+ result.errors.push(`Types file not found: ${typesPath}`);
299
+ return result;
300
+ }
301
+ if (verbose) {
302
+ console.log(`Reading types from: ${typesPath}`);
303
+ }
304
+ const typesContent = fs.readFileSync(typesPath, "utf-8");
305
+ const skipColumns = /* @__PURE__ */ new Set([...DEFAULT_SKIP_COLUMNS, ...config.skipColumns ?? []]);
306
+ const decimalPatterns = [...DEFAULT_DECIMAL_PATTERNS, ...config.decimalPatterns ?? []];
307
+ const parsedTables = parseTypesFile(typesContent, config.tables, skipColumns);
308
+ for (const tableConfig of config.tables) {
309
+ const found = parsedTables.some((t) => t.name === tableConfig.name);
310
+ if (!found) {
311
+ result.warnings.push(`Table '${tableConfig.name}' not found in schema '${tableConfig.schema ?? "public"}'`);
312
+ }
313
+ }
314
+ if (parsedTables.length === 0) {
315
+ result.errors.push("No tables were parsed successfully");
316
+ return result;
317
+ }
318
+ const tableDefs = [];
319
+ for (const table of parsedTables) {
320
+ if (verbose) {
321
+ const syncPK = table.config.syncPrimaryKey || table.config.includeId;
322
+ console.log(`Processing ${table.schema}.${table.name} (${table.columns.size} columns)${table.config.trackMetadata ? " [trackMetadata]" : ""}${syncPK ? " [syncPrimaryKey]" : ""}`);
323
+ }
324
+ const columnDefs = generateColumnDefs(table, decimalPatterns);
325
+ if (columnDefs.length === 0) {
326
+ result.warnings.push(`Table '${table.name}' has no syncable columns`);
327
+ continue;
328
+ }
329
+ tableDefs.push(generateTableDefinition(table, columnDefs));
330
+ }
331
+ const schemas = [...new Set(config.tables.map((t) => t.schema ?? "public"))];
332
+ const relativePath = path.relative(cwd, typesPath);
333
+ const output = generateOutputFile(parsedTables.filter((t) => tableDefs.some((def) => def.includes(`const ${t.name} =`))), tableDefs, schemas, relativePath);
334
+ if (dryRun) {
335
+ result.success = true;
336
+ result.tablesGenerated = tableDefs.length;
337
+ result.output = output;
338
+ return result;
339
+ }
340
+ const outputDir = path.dirname(outputPath);
341
+ if (!fs.existsSync(outputDir)) {
342
+ fs.mkdirSync(outputDir, {
343
+ recursive: true
344
+ });
345
+ }
346
+ fs.writeFileSync(outputPath, output);
347
+ result.success = true;
348
+ result.tablesGenerated = tableDefs.length;
349
+ return result;
350
+ }
351
+ export {
352
+ DEFAULT_DECIMAL_PATTERNS,
353
+ DEFAULT_SKIP_COLUMNS,
354
+ defineConfig,
355
+ extractTableDef,
356
+ generateColumnDefs,
357
+ generateFKUtility,
358
+ generateHeader,
359
+ generateOutputFile,
360
+ generateSchema,
361
+ generateSchemaExport,
362
+ generateSchemaMapping,
363
+ generateTableDefinition,
364
+ getAvailableSchemas,
365
+ getTablesInSchema,
366
+ mapTypeToPowerSync,
367
+ parseRowType,
368
+ parseTypesFile
369
+ };
370
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/generator/config.ts","../../src/generator/generator.ts","../../src/generator/parser.ts","../../src/generator/templates.ts"],"sourcesContent":["/**\n * Configuration types and helpers for PowerSync schema generator\n */\n\nexport interface TableConfig {\n /** Table name (PascalCase as it appears in database.types.ts) */\n name: string;\n /** Schema name (defaults to 'public') */\n schema?: string;\n /** Enable ps_crud timestamp tracking for optimistic UI updates */\n trackMetadata?: boolean;\n /**\n * Sync the primary key column (normally skipped as PowerSync handles it internally).\n *\n * Use this for tables with integer PKs that are referenced by FKs in other tables.\n * Example: `Group.id` is referenced by `UserGroup.groupId`, so Group needs `syncPrimaryKey: true`\n * to ensure the integer ID is available for client-side joins.\n */\n syncPrimaryKey?: boolean;\n /** @deprecated Use `syncPrimaryKey` instead */\n includeId?: boolean;\n /** Columns to skip for this specific table (in addition to global skipColumns) */\n skipColumns?: string[];\n /** Only include these columns (overrides skipColumns if specified) */\n onlyColumns?: string[];\n}\nexport interface GeneratorConfig {\n /** Path to Supabase-generated database.types.ts file */\n typesPath: string;\n /** Output path for generated PowerSync schema */\n outputPath: string;\n /** Tables to include in the PowerSync schema */\n tables: TableConfig[];\n /** Columns to always skip (in addition to defaults like 'id') */\n skipColumns?: string[];\n /** Column name patterns that should use column.real for decimal values */\n decimalPatterns?: string[];\n /** Additional schemas to track (besides 'public' which is the default) */\n schemas?: string[];\n}\n\n/**\n * Define a PowerSync generator configuration with type safety\n */\nexport function defineConfig(config: GeneratorConfig): GeneratorConfig {\n return config;\n}\n\n/**\n * Default columns that are skipped during generation\n */\nexport const DEFAULT_SKIP_COLUMNS = ['id',\n// PowerSync handles id automatically\n// Legacy numeric ID columns - typically not needed after UUID migration\n'legacyId'];\n\n/**\n * Default column name patterns that indicate decimal values\n */\nexport const DEFAULT_DECIMAL_PATTERNS = ['hours', 'watts', 'voltage', 'rate', 'amount', 'price', 'cost', 'total'];","/**\n * PowerSync schema generator\n *\n * Converts Supabase database.types.ts into PowerSync schema definitions\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { GeneratorConfig } from './config.js';\nimport { DEFAULT_SKIP_COLUMNS, DEFAULT_DECIMAL_PATTERNS } from './config.js';\nimport { parseTypesFile, type ParsedTable } from './parser.js';\nimport { generateTableDefinition, generateOutputFile } from './templates.js';\nexport interface ColumnMapping {\n type: 'column.text' | 'column.integer' | 'column.real';\n isEnum?: boolean;\n isBoolean?: boolean;\n}\nexport interface GenerateResult {\n success: boolean;\n tablesGenerated: number;\n outputPath: string;\n errors: string[];\n warnings: string[];\n /** Generated output (included when dryRun is true) */\n output?: string;\n}\n\n/**\n * Map TypeScript types to PowerSync column types\n */\nexport function mapTypeToPowerSync(tsType: string, columnName: string, decimalPatterns: string[]): ColumnMapping | null {\n // Clean up the type (remove nullability)\n const cleanType = tsType.trim().replace(/\\s*\\|\\s*null/g, '');\n\n // Skip complex types that can't be stored in SQLite\n if (cleanType.includes('Json') || cleanType.includes('unknown') || cleanType.includes('{')) {\n return null;\n }\n\n // Array types - skip\n if (cleanType.includes('[]')) {\n return null;\n }\n\n // Boolean -> integer (0/1)\n if (cleanType === 'boolean') {\n return {\n type: 'column.integer',\n isBoolean: true\n };\n }\n\n // Number types\n if (cleanType === 'number') {\n // Use real for columns that might have decimals\n if (decimalPatterns.some(pattern => columnName.toLowerCase().includes(pattern.toLowerCase()))) {\n return {\n type: 'column.real'\n };\n }\n return {\n type: 'column.integer'\n };\n }\n\n // String types\n if (cleanType === 'string') {\n return {\n type: 'column.text'\n };\n }\n\n // Enum types (Database[\"schema\"][\"Enums\"][\"EnumName\"]) -> store as text\n if (cleanType.includes('Database[') && cleanType.includes('Enums')) {\n return {\n type: 'column.text',\n isEnum: true\n };\n }\n\n // Default to text for unknown types (likely enums or other string-like types)\n return {\n type: 'column.text'\n };\n}\n\n/**\n * Generate column definitions for a table\n */\nexport function generateColumnDefs(table: ParsedTable, decimalPatterns: string[]): string[] {\n const columnDefs: string[] = [];\n for (const [columnName, tsType] of table.columns) {\n const mapping = mapTypeToPowerSync(tsType, columnName, decimalPatterns);\n if (mapping) {\n // Add comment for boolean and enum columns\n let comment = '';\n if (mapping.isBoolean) {\n comment = ' // boolean stored as 0/1';\n } else if (mapping.isEnum) {\n comment = ' // enum stored as text';\n }\n columnDefs.push(` ${columnName}: ${mapping.type},${comment}`);\n }\n }\n return columnDefs;\n}\n\n/**\n * Generate PowerSync schema from configuration\n */\nexport async function generateSchema(config: GeneratorConfig, options?: {\n cwd?: string;\n verbose?: boolean;\n dryRun?: boolean;\n}): Promise<GenerateResult> {\n const cwd = options?.cwd ?? process.cwd();\n const verbose = options?.verbose ?? false;\n const dryRun = options?.dryRun ?? false;\n const result: GenerateResult = {\n success: false,\n tablesGenerated: 0,\n outputPath: '',\n errors: [],\n warnings: []\n };\n\n // Resolve paths relative to cwd\n const typesPath = path.isAbsolute(config.typesPath) ? config.typesPath : path.resolve(cwd, config.typesPath);\n const outputPath = path.isAbsolute(config.outputPath) ? config.outputPath : path.resolve(cwd, config.outputPath);\n result.outputPath = outputPath;\n\n // Check if types file exists\n if (!fs.existsSync(typesPath)) {\n result.errors.push(`Types file not found: ${typesPath}`);\n return result;\n }\n\n // Read types file\n if (verbose) {\n console.log(`Reading types from: ${typesPath}`);\n }\n const typesContent = fs.readFileSync(typesPath, 'utf-8');\n\n // Build skip columns set\n const skipColumns = new Set([...DEFAULT_SKIP_COLUMNS, ...(config.skipColumns ?? [])]);\n\n // Build decimal patterns\n const decimalPatterns = [...DEFAULT_DECIMAL_PATTERNS, ...(config.decimalPatterns ?? [])];\n\n // Parse tables from types file\n const parsedTables = parseTypesFile(typesContent, config.tables, skipColumns);\n\n // Check for tables that weren't found\n for (const tableConfig of config.tables) {\n const found = parsedTables.some(t => t.name === tableConfig.name);\n if (!found) {\n result.warnings.push(`Table '${tableConfig.name}' not found in schema '${tableConfig.schema ?? 'public'}'`);\n }\n }\n if (parsedTables.length === 0) {\n result.errors.push('No tables were parsed successfully');\n return result;\n }\n\n // Generate table definitions\n const tableDefs: string[] = [];\n for (const table of parsedTables) {\n if (verbose) {\n const syncPK = table.config.syncPrimaryKey || table.config.includeId;\n console.log(`Processing ${table.schema}.${table.name} (${table.columns.size} columns)${table.config.trackMetadata ? ' [trackMetadata]' : ''}${syncPK ? ' [syncPrimaryKey]' : ''}`);\n }\n const columnDefs = generateColumnDefs(table, decimalPatterns);\n if (columnDefs.length === 0) {\n result.warnings.push(`Table '${table.name}' has no syncable columns`);\n continue;\n }\n tableDefs.push(generateTableDefinition(table, columnDefs));\n }\n\n // Collect unique schemas\n const schemas = [...new Set(config.tables.map(t => t.schema ?? 'public'))];\n\n // Generate output file content\n const relativePath = path.relative(cwd, typesPath);\n const output = generateOutputFile(parsedTables.filter(t => tableDefs.some(def => def.includes(`const ${t.name} =`))), tableDefs, schemas, relativePath);\n\n // If dry-run, return output without writing\n if (dryRun) {\n result.success = true;\n result.tablesGenerated = tableDefs.length;\n result.output = output;\n return result;\n }\n\n // Ensure output directory exists\n const outputDir = path.dirname(outputPath);\n if (!fs.existsSync(outputDir)) {\n fs.mkdirSync(outputDir, {\n recursive: true\n });\n }\n\n // Write output file\n fs.writeFileSync(outputPath, output);\n result.success = true;\n result.tablesGenerated = tableDefs.length;\n return result;\n}","/**\n * Parser for Supabase database.types.ts files\n *\n * Extracts table definitions and column types from the generated TypeScript types\n */\n\nimport type { TableConfig } from './config.js';\nexport interface ColumnInfo {\n name: string;\n tsType: string;\n isNullable: boolean;\n}\nexport interface ParsedTable {\n name: string;\n schema: string;\n columns: Map<string, string>;\n config: TableConfig;\n}\nexport interface ParseOptions {\n /** Columns to skip */\n skipColumns: Set<string>;\n /**\n * Include the id column (normally skipped).\n * Use for tables with integer PKs referenced by FKs in other tables.\n */\n syncPrimaryKey?: boolean;\n /** @deprecated Use `syncPrimaryKey` instead */\n includeId?: boolean;\n}\n\n/**\n * Parse the Row type from a table definition and extract columns\n */\nexport function parseRowType(tableContent: string, options: ParseOptions): Map<string, string> {\n const columns = new Map<string, string>();\n\n // Find the Row block - handles nested braces in type definitions\n const rowMatch = tableContent.match(/Row:\\s*\\{([^}]+(?:\\{[^}]*\\}[^}]*)*)\\}/s);\n if (!rowMatch) return columns;\n const rowContent = rowMatch[1];\n\n // syncPrimaryKey takes precedence, with includeId as fallback for backwards compat\n const includePrimaryKey = options.syncPrimaryKey ?? options.includeId ?? false;\n\n // Parse each column: \"columnName: type\" or \"columnName?: type\"\n const columnRegex = /(\\w+)\\??:\\s*([^,\\n]+)/g;\n let match;\n while ((match = columnRegex.exec(rowContent)) !== null) {\n const [, columnName, columnType] = match;\n // Skip columns unless syncPrimaryKey is true for id column\n const shouldSkip = options.skipColumns.has(columnName) && !(includePrimaryKey && columnName === 'id');\n if (!shouldSkip) {\n columns.set(columnName, columnType.trim());\n }\n }\n return columns;\n}\n\n/**\n * Extract a table definition from the database.types.ts content\n */\nexport function extractTableDef(content: string, tableName: string, schema: string): string | null {\n // Find the schema section\n const schemaRegex = new RegExp(`${schema}:\\\\s*\\\\{[\\\\s\\\\S]*?Tables:\\\\s*\\\\{`, 'g');\n const schemaMatch = schemaRegex.exec(content);\n if (!schemaMatch) return null;\n const startIndex = schemaMatch.index;\n\n // Find this specific table within the schema\n // Use negative lookbehind (?<![A-Za-z]) to avoid matching table names that are\n // substrings of other names (e.g., \"Tag\" in \"CommentTag\")\n const tableRegex = new RegExp(`(?<![A-Za-z])${tableName}:\\\\s*\\\\{[\\\\s\\\\S]*?Row:\\\\s*\\\\{[\\\\s\\\\S]*?\\\\}[\\\\s\\\\S]*?Relationships:\\\\s*\\\\[[^\\\\]]*\\\\]\\\\s*\\\\}`, 'g');\n\n // Search from the schema start\n const searchContent = content.slice(startIndex);\n const tableMatch = tableRegex.exec(searchContent);\n return tableMatch ? tableMatch[0] : null;\n}\n\n/**\n * Parse a database.types.ts file and extract specified tables\n */\nexport function parseTypesFile(content: string, tables: TableConfig[], skipColumns: Set<string>): ParsedTable[] {\n const parsedTables: ParsedTable[] = [];\n for (const tableConfig of tables) {\n const {\n name,\n schema = 'public',\n syncPrimaryKey,\n includeId\n } = tableConfig;\n const tableDef = extractTableDef(content, name, schema);\n if (!tableDef) {\n continue;\n }\n const columns = parseRowType(tableDef, {\n skipColumns,\n syncPrimaryKey,\n includeId\n });\n if (columns.size > 0) {\n parsedTables.push({\n name,\n schema,\n columns,\n config: tableConfig\n });\n }\n }\n return parsedTables;\n}\n\n/**\n * Get all available schemas from the types file\n */\nexport function getAvailableSchemas(content: string): string[] {\n const schemas: string[] = [];\n const schemaRegex = /(\\w+):\\s*\\{[\\s\\S]*?Tables:\\s*\\{/g;\n let match;\n while ((match = schemaRegex.exec(content)) !== null) {\n schemas.push(match[1]);\n }\n return schemas;\n}\n\n/**\n * Get all table names in a schema\n */\nexport function getTablesInSchema(content: string, schema: string): string[] {\n const tables: string[] = [];\n\n // Find the schema section\n const schemaRegex = new RegExp(`${schema}:\\\\s*\\\\{[\\\\s\\\\S]*?Tables:\\\\s*\\\\{([\\\\s\\\\S]*?)\\\\}\\\\s*Views:`, 'g');\n const schemaMatch = schemaRegex.exec(content);\n if (!schemaMatch) return tables;\n const tablesContent = schemaMatch[1];\n\n // Find table names (they're at the start of each table definition)\n const tableNameRegex = /^\\s*(\\w+):\\s*\\{/gm;\n let match;\n while ((match = tableNameRegex.exec(tablesContent)) !== null) {\n tables.push(match[1]);\n }\n return tables;\n}","/**\n * Output templates for PowerSync schema generation\n */\n\nimport type { ParsedTable } from './parser.js';\n\n/**\n * File header template\n */\nexport function generateHeader(typesPath: string): string {\n return `/**\n * PowerSync Schema Definition\n *\n * AUTO-GENERATED from ${typesPath}\n * Run: npx @pol-studios/powersync generate-schema\n *\n * DO NOT EDIT MANUALLY - changes will be overwritten\n */\n\nimport { column, Schema, Table } from \"@powersync/react-native\";\n`;\n}\n\n/**\n * Generate the table definition for a parsed table\n */\nexport function generateTableDefinition(table: ParsedTable, columnDefs: string[]): string {\n if (columnDefs.length === 0) {\n return `// ${table.name} - no syncable columns found`;\n }\n const optionsStr = table.config.trackMetadata ? ', { trackMetadata: true }' : '';\n return `const ${table.name} = new Table({\n${columnDefs.join('\\n')}\n}${optionsStr});`;\n}\n\n/**\n * Generate the schema export section\n */\nexport function generateSchemaExport(tableNames: string[]): string {\n return `// ============================================================================\n// SCHEMA EXPORT\n// ============================================================================\n\n// NOTE: photo_attachments is NOT included here.\n// The AttachmentQueue from @powersync/attachments creates and manages\n// its own internal SQLite table (not a view) during queue.init().\n// This allows INSERT/UPDATE operations to work correctly.\n\nexport const AppSchema = new Schema({\n${tableNames.map(name => ` ${name},`).join('\\n')}\n});\n\nexport type Database = (typeof AppSchema)[\"types\"];`;\n}\n\n/**\n * Generate schema mapping utilities\n */\nexport function generateSchemaMapping(tables: ParsedTable[], schemas: string[]): string {\n // Group tables by non-public schemas\n const schemaGroups = new Map<string, string[]>();\n for (const schema of schemas) {\n if (schema !== 'public') {\n schemaGroups.set(schema, []);\n }\n }\n for (const table of tables) {\n if (table.schema !== 'public' && schemaGroups.has(table.schema)) {\n schemaGroups.get(table.schema)!.push(table.name);\n }\n }\n const sections: string[] = [`// ============================================================================\n// SCHEMA MAPPING FOR CONNECTOR\n// ============================================================================`];\n\n // Generate constants for each non-public schema\n for (const [schema, tableNames] of schemaGroups) {\n if (tableNames.length > 0) {\n const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;\n sections.push(`\n// Tables in the '${schema}' schema (need .schema('${schema}') in Supabase queries)\nexport const ${constName} = new Set([\n${tableNames.map(name => ` \"${name}\",`).join('\\n')}\n]);`);\n }\n }\n\n // Generate helper function\n const schemaChecks = Array.from(schemaGroups.entries()).filter(([, names]) => names.length > 0).map(([schema]) => {\n const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;\n return ` if (${constName}.has(tableName)) return \"${schema}\";`;\n });\n if (schemaChecks.length > 0) {\n sections.push(`\n/**\n * Get the Supabase schema for a table\n */\nexport function getTableSchema(tableName: string): ${schemas.map(s => `\"${s}\"`).join(' | ')} {\n${schemaChecks.join('\\n')}\n return \"public\";\n}`);\n } else {\n sections.push(`\n/**\n * Get the Supabase schema for a table\n */\nexport function getTableSchema(tableName: string): \"public\" {\n return \"public\";\n}`);\n }\n return sections.join('\\n');\n}\n\n/**\n * Generate the FK detection utility (helpful for consumers)\n */\nexport function generateFKUtility(): string {\n return `\n// ============================================================================\n// FOREIGN KEY UTILITIES\n// ============================================================================\n\n/**\n * Check if a column name represents a foreign key reference\n * Convention: columns ending in 'Id' are foreign keys (e.g., projectId -> Project table)\n */\nexport function isForeignKeyColumn(columnName: string): boolean {\n return columnName.endsWith('Id') && columnName !== 'id';\n}\n\n/**\n * Get the referenced table name from a foreign key column\n * e.g., 'projectId' -> 'Project', 'equipmentFixtureUnitId' -> 'EquipmentFixtureUnit'\n */\nexport function getForeignKeyTable(columnName: string): string | null {\n if (!isForeignKeyColumn(columnName)) return null;\n // Remove 'Id' suffix and capitalize first letter\n const baseName = columnName.slice(0, -2);\n return baseName.charAt(0).toUpperCase() + baseName.slice(1);\n}`;\n}\n\n/**\n * Generate complete output file\n */\nexport function generateOutputFile(tables: ParsedTable[], tableDefs: string[], schemas: string[], typesPath: string): string {\n const tableNames = tables.map(t => t.name);\n return `${generateHeader(typesPath)}\n\n// ============================================================================\n// TABLE DEFINITIONS\n// ============================================================================\n\n${tableDefs.join('\\n\\n')}\n\n${generateSchemaExport(tableNames)}\n\n${generateSchemaMapping(tables, ['public', ...schemas.filter(s => s !== 'public')])}\n${generateFKUtility()}\n`;\n}"],"mappings":";AA4CO,SAAS,aAAa,QAA0C;AACrE,SAAO;AACT;AAKO,IAAM,uBAAuB;AAAA,EAAC;AAAA;AAAA;AAAA,EAGrC;AAAU;AAKH,IAAM,2BAA2B,CAAC,SAAS,SAAS,WAAW,QAAQ,UAAU,SAAS,QAAQ,OAAO;;;ACrDhH,YAAY,QAAQ;AACpB,YAAY,UAAU;;;AC0Bf,SAAS,aAAa,cAAsB,SAA4C;AAC7F,QAAM,UAAU,oBAAI,IAAoB;AAGxC,QAAM,WAAW,aAAa,MAAM,wCAAwC;AAC5E,MAAI,CAAC,SAAU,QAAO;AACtB,QAAM,aAAa,SAAS,CAAC;AAG7B,QAAM,oBAAoB,QAAQ,kBAAkB,QAAQ,aAAa;AAGzE,QAAM,cAAc;AACpB,MAAI;AACJ,UAAQ,QAAQ,YAAY,KAAK,UAAU,OAAO,MAAM;AACtD,UAAM,CAAC,EAAE,YAAY,UAAU,IAAI;AAEnC,UAAM,aAAa,QAAQ,YAAY,IAAI,UAAU,KAAK,EAAE,qBAAqB,eAAe;AAChG,QAAI,CAAC,YAAY;AACf,cAAQ,IAAI,YAAY,WAAW,KAAK,CAAC;AAAA,IAC3C;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,gBAAgB,SAAiB,WAAmB,QAA+B;AAEjG,QAAM,cAAc,IAAI,OAAO,GAAG,MAAM,oCAAoC,GAAG;AAC/E,QAAM,cAAc,YAAY,KAAK,OAAO;AAC5C,MAAI,CAAC,YAAa,QAAO;AACzB,QAAM,aAAa,YAAY;AAK/B,QAAM,aAAa,IAAI,OAAO,gBAAgB,SAAS,8FAA8F,GAAG;AAGxJ,QAAM,gBAAgB,QAAQ,MAAM,UAAU;AAC9C,QAAM,aAAa,WAAW,KAAK,aAAa;AAChD,SAAO,aAAa,WAAW,CAAC,IAAI;AACtC;AAKO,SAAS,eAAe,SAAiB,QAAuB,aAAyC;AAC9G,QAAM,eAA8B,CAAC;AACrC,aAAW,eAAe,QAAQ;AAChC,UAAM;AAAA,MACJ;AAAA,MACA,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF,IAAI;AACJ,UAAM,WAAW,gBAAgB,SAAS,MAAM,MAAM;AACtD,QAAI,CAAC,UAAU;AACb;AAAA,IACF;AACA,UAAM,UAAU,aAAa,UAAU;AAAA,MACrC;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,QAAI,QAAQ,OAAO,GAAG;AACpB,mBAAa,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,oBAAoB,SAA2B;AAC7D,QAAM,UAAoB,CAAC;AAC3B,QAAM,cAAc;AACpB,MAAI;AACJ,UAAQ,QAAQ,YAAY,KAAK,OAAO,OAAO,MAAM;AACnD,YAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,EACvB;AACA,SAAO;AACT;AAKO,SAAS,kBAAkB,SAAiB,QAA0B;AAC3E,QAAM,SAAmB,CAAC;AAG1B,QAAM,cAAc,IAAI,OAAO,GAAG,MAAM,6DAA6D,GAAG;AACxG,QAAM,cAAc,YAAY,KAAK,OAAO;AAC5C,MAAI,CAAC,YAAa,QAAO;AACzB,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,iBAAiB;AACvB,MAAI;AACJ,UAAQ,QAAQ,eAAe,KAAK,aAAa,OAAO,MAAM;AAC5D,WAAO,KAAK,MAAM,CAAC,CAAC;AAAA,EACtB;AACA,SAAO;AACT;;;ACvIO,SAAS,eAAe,WAA2B;AACxD,SAAO;AAAA;AAAA;AAAA,yBAGgB,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQlC;AAKO,SAAS,wBAAwB,OAAoB,YAA8B;AACxF,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO,MAAM,MAAM,IAAI;AAAA,EACzB;AACA,QAAM,aAAa,MAAM,OAAO,gBAAgB,8BAA8B;AAC9E,SAAO,SAAS,MAAM,IAAI;AAAA,EAC1B,WAAW,KAAK,IAAI,CAAC;AAAA,GACpB,UAAU;AACb;AAKO,SAAS,qBAAqB,YAA8B;AACjE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUP,WAAW,IAAI,UAAQ,KAAK,IAAI,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAIjD;AAKO,SAAS,sBAAsB,QAAuB,SAA2B;AAEtF,QAAM,eAAe,oBAAI,IAAsB;AAC/C,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,UAAU;AACvB,mBAAa,IAAI,QAAQ,CAAC,CAAC;AAAA,IAC7B;AAAA,EACF;AACA,aAAW,SAAS,QAAQ;AAC1B,QAAI,MAAM,WAAW,YAAY,aAAa,IAAI,MAAM,MAAM,GAAG;AAC/D,mBAAa,IAAI,MAAM,MAAM,EAAG,KAAK,MAAM,IAAI;AAAA,IACjD;AAAA,EACF;AACA,QAAM,WAAqB,CAAC;AAAA;AAAA,gFAEkD;AAG9E,aAAW,CAAC,QAAQ,UAAU,KAAK,cAAc;AAC/C,QAAI,WAAW,SAAS,GAAG;AACzB,YAAM,YAAY,GAAG,OAAO,YAAY,CAAC;AACzC,eAAS,KAAK;AAAA,oBACA,MAAM,2BAA2B,MAAM;AAAA,eAC5C,SAAS;AAAA,EACtB,WAAW,IAAI,UAAQ,MAAM,IAAI,IAAI,EAAE,KAAK,IAAI,CAAC;AAAA,IAC/C;AAAA,IACA;AAAA,EACF;AAGA,QAAM,eAAe,MAAM,KAAK,aAAa,QAAQ,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,MAAM;AAChH,UAAM,YAAY,GAAG,OAAO,YAAY,CAAC;AACzC,WAAO,SAAS,SAAS,4BAA4B,MAAM;AAAA,EAC7D,CAAC;AACD,MAAI,aAAa,SAAS,GAAG;AAC3B,aAAS,KAAK;AAAA;AAAA;AAAA;AAAA,qDAImC,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,KAAK,CAAC;AAAA,EACzF,aAAa,KAAK,IAAI,CAAC;AAAA;AAAA,EAEvB;AAAA,EACA,OAAO;AACL,aAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhB;AAAA,EACA;AACA,SAAO,SAAS,KAAK,IAAI;AAC3B;AAKO,SAAS,oBAA4B;AAC1C,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuBT;AAKO,SAAS,mBAAmB,QAAuB,WAAqB,SAAmB,WAA2B;AAC3H,QAAM,aAAa,OAAO,IAAI,OAAK,EAAE,IAAI;AACzC,SAAO,GAAG,eAAe,SAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMnC,UAAU,KAAK,MAAM,CAAC;AAAA;AAAA,EAEtB,qBAAqB,UAAU,CAAC;AAAA;AAAA,EAEhC,sBAAsB,QAAQ,CAAC,UAAU,GAAG,QAAQ,OAAO,OAAK,MAAM,QAAQ,CAAC,CAAC,CAAC;AAAA,EACjF,kBAAkB,CAAC;AAAA;AAErB;;;AFnIO,SAAS,mBAAmB,QAAgB,YAAoB,iBAAiD;AAEtH,QAAM,YAAY,OAAO,KAAK,EAAE,QAAQ,iBAAiB,EAAE;AAG3D,MAAI,UAAU,SAAS,MAAM,KAAK,UAAU,SAAS,SAAS,KAAK,UAAU,SAAS,GAAG,GAAG;AAC1F,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,SAAS,IAAI,GAAG;AAC5B,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,WAAW;AAC3B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,EACF;AAGA,MAAI,cAAc,UAAU;AAE1B,QAAI,gBAAgB,KAAK,aAAW,WAAW,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,CAAC,GAAG;AAC7F,aAAO;AAAA,QACL,MAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,cAAc,UAAU;AAC1B,WAAO;AAAA,MACL,MAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,UAAU,SAAS,WAAW,KAAK,UAAU,SAAS,OAAO,GAAG;AAClE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,SAAO;AAAA,IACL,MAAM;AAAA,EACR;AACF;AAKO,SAAS,mBAAmB,OAAoB,iBAAqC;AAC1F,QAAM,aAAuB,CAAC;AAC9B,aAAW,CAAC,YAAY,MAAM,KAAK,MAAM,SAAS;AAChD,UAAM,UAAU,mBAAmB,QAAQ,YAAY,eAAe;AACtE,QAAI,SAAS;AAEX,UAAI,UAAU;AACd,UAAI,QAAQ,WAAW;AACrB,kBAAU;AAAA,MACZ,WAAW,QAAQ,QAAQ;AACzB,kBAAU;AAAA,MACZ;AACA,iBAAW,KAAK,KAAK,UAAU,KAAK,QAAQ,IAAI,IAAI,OAAO,EAAE;AAAA,IAC/D;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAsB,eAAe,QAAyB,SAIlC;AAC1B,QAAM,MAAM,SAAS,OAAO,QAAQ,IAAI;AACxC,QAAM,UAAU,SAAS,WAAW;AACpC,QAAM,SAAS,SAAS,UAAU;AAClC,QAAM,SAAyB;AAAA,IAC7B,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,YAAY;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,UAAU,CAAC;AAAA,EACb;AAGA,QAAM,YAAiB,gBAAW,OAAO,SAAS,IAAI,OAAO,YAAiB,aAAQ,KAAK,OAAO,SAAS;AAC3G,QAAM,aAAkB,gBAAW,OAAO,UAAU,IAAI,OAAO,aAAkB,aAAQ,KAAK,OAAO,UAAU;AAC/G,SAAO,aAAa;AAGpB,MAAI,CAAI,cAAW,SAAS,GAAG;AAC7B,WAAO,OAAO,KAAK,yBAAyB,SAAS,EAAE;AACvD,WAAO;AAAA,EACT;AAGA,MAAI,SAAS;AACX,YAAQ,IAAI,uBAAuB,SAAS,EAAE;AAAA,EAChD;AACA,QAAM,eAAkB,gBAAa,WAAW,OAAO;AAGvD,QAAM,cAAc,oBAAI,IAAI,CAAC,GAAG,sBAAsB,GAAI,OAAO,eAAe,CAAC,CAAE,CAAC;AAGpF,QAAM,kBAAkB,CAAC,GAAG,0BAA0B,GAAI,OAAO,mBAAmB,CAAC,CAAE;AAGvF,QAAM,eAAe,eAAe,cAAc,OAAO,QAAQ,WAAW;AAG5E,aAAW,eAAe,OAAO,QAAQ;AACvC,UAAM,QAAQ,aAAa,KAAK,OAAK,EAAE,SAAS,YAAY,IAAI;AAChE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,KAAK,UAAU,YAAY,IAAI,0BAA0B,YAAY,UAAU,QAAQ,GAAG;AAAA,IAC5G;AAAA,EACF;AACA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO,OAAO,KAAK,oCAAoC;AACvD,WAAO;AAAA,EACT;AAGA,QAAM,YAAsB,CAAC;AAC7B,aAAW,SAAS,cAAc;AAChC,QAAI,SAAS;AACX,YAAM,SAAS,MAAM,OAAO,kBAAkB,MAAM,OAAO;AAC3D,cAAQ,IAAI,cAAc,MAAM,MAAM,IAAI,MAAM,IAAI,KAAK,MAAM,QAAQ,IAAI,YAAY,MAAM,OAAO,gBAAgB,qBAAqB,EAAE,GAAG,SAAS,sBAAsB,EAAE,EAAE;AAAA,IACnL;AACA,UAAM,aAAa,mBAAmB,OAAO,eAAe;AAC5D,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,SAAS,KAAK,UAAU,MAAM,IAAI,2BAA2B;AACpE;AAAA,IACF;AACA,cAAU,KAAK,wBAAwB,OAAO,UAAU,CAAC;AAAA,EAC3D;AAGA,QAAM,UAAU,CAAC,GAAG,IAAI,IAAI,OAAO,OAAO,IAAI,OAAK,EAAE,UAAU,QAAQ,CAAC,CAAC;AAGzE,QAAM,eAAoB,cAAS,KAAK,SAAS;AACjD,QAAM,SAAS,mBAAmB,aAAa,OAAO,OAAK,UAAU,KAAK,SAAO,IAAI,SAAS,SAAS,EAAE,IAAI,IAAI,CAAC,CAAC,GAAG,WAAW,SAAS,YAAY;AAGtJ,MAAI,QAAQ;AACV,WAAO,UAAU;AACjB,WAAO,kBAAkB,UAAU;AACnC,WAAO,SAAS;AAChB,WAAO;AAAA,EACT;AAGA,QAAM,YAAiB,aAAQ,UAAU;AACzC,MAAI,CAAI,cAAW,SAAS,GAAG;AAC7B,IAAG,aAAU,WAAW;AAAA,MACtB,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAGA,EAAG,iBAAc,YAAY,MAAM;AACnC,SAAO,UAAU;AACjB,SAAO,kBAAkB,UAAU;AACnC,SAAO;AACT;","names":[]}
package/dist/index.d.ts CHANGED
@@ -1,24 +1,26 @@
1
1
  import { A as AbstractPowerSyncDatabase } from './types-CDqWh56B.js';
2
2
  export { k as CacheStats, i as ClassifiedError, j as CompactResult, h as CompletedTransaction, b as ConnectionHealth, o as CountRow, C as CrudEntry, l as CrudTransaction, n as DbStatRow, D as DownloadProgress, E as EntitySyncState, F as FailedTransaction, r as FreelistCountRow, s as IntegrityCheckRow, I as IntegrityResult, q as PageCountRow, p as PageSizeRow, P as PowerSyncBackendConnector, m as SqliteTableRow, c as StorageInfo, d as StorageQuota, f as SyncError, g as SyncErrorType, e as SyncMetrics, S as SyncMode, a as SyncStatus, T as TableCacheStats } from './types-CDqWh56B.js';
3
- export { ATTACHMENT_DOWNLOAD_TIMEOUT_MS, ATTACHMENT_RETRY_DELAY_MS, AttachmentError, COMPRESSION_MAX_WIDTH, COMPRESSION_SKIP_SIZE_BYTES, COMPRESSION_TARGET_SIZE_BYTES, ConfigurationError, ConnectorError, DEFAULT_ATTACHMENT_CACHE_SIZE, DEFAULT_ATTACHMENT_CONCURRENCY, DEFAULT_COMPRESSION_QUALITY, DEFAULT_MAX_RETRY_ATTEMPTS, DEFAULT_RETRY_BACKOFF_MULTIPLIER, DEFAULT_RETRY_BASE_DELAY_MS, DEFAULT_RETRY_MAX_DELAY_MS, DEFAULT_SYNC_INTERVAL_MS, DEFAULT_SYNC_MODE, DOWNLOAD_STOP_THRESHOLD, EVICTION_TRIGGER_THRESHOLD, HEALTH_CHECK_INTERVAL_MS, HEALTH_CHECK_TIMEOUT_MS, InitializationError, LATENCY_DEGRADED_THRESHOLD_MS, MAX_CONSECUTIVE_FAILURES, PlatformAdapterError, PowerSyncError, STATS_CACHE_TTL_MS, STATUS_NOTIFY_THROTTLE_MS, STORAGE_CRITICAL_THRESHOLD, STORAGE_KEY_ATTACHMENT_SETTINGS, STORAGE_KEY_AUTO_OFFLINE, STORAGE_KEY_ENABLED, STORAGE_KEY_METRICS, STORAGE_KEY_PAUSED, STORAGE_KEY_PREFIX, STORAGE_KEY_SYNC_MODE, STORAGE_WARNING_THRESHOLD, SyncOperationError, classifyError, classifySupabaseError, createSyncError, extractEntityIds, extractTableNames, generateFailureId, toSyncOperationError } from './core/index.js';
4
- import { k as ConflictDetectionConfig, h as ConflictCheckResult } from './supabase-connector-qLm-WHkM.js';
5
- export { g as ConflictBus, j as ConflictHandler, l as ConflictListener, i as ConflictResolution, C as ConnectorCircuitBreakerConfig, b as ConnectorConfig, e as CrudHandler, D as DEFAULT_RETRY_CONFIG, F as FieldConflict, P as PowerSyncCredentials, m as ResolutionListener, f as RetryConfig, R as RetryStrategyConfig, c as SchemaRouter, S as SupabaseConnector, a as SupabaseConnectorOptions, d as defaultSchemaRouter } from './supabase-connector-qLm-WHkM.js';
6
- export { CircuitBreaker, CircuitBreakerConfig, CircuitBreakerStats, CircuitOpenError, CircuitState, DEFAULT_CIRCUIT_BREAKER_CONFIG } from './connector/index.js';
7
- export { t as AttachmentRecord, A as AttachmentSourceConfig, q as AttachmentStatsRow, e as AttachmentStorageAdapter, p as AttachmentSyncStats, o as AttachmentSyncStatus, h as CacheConfig, r as CacheFileRow, s as CachedSizeRow, C as CompressionConfig, i as DEFAULT_CACHE_CONFIG, D as DEFAULT_COMPRESSION_CONFIG, f as DEFAULT_UPLOAD_CONFIG, k as DownloadPhase, l as DownloadStatus, E as EvictRow, I as IdRow, P as PolAttachmentQueue, j as PolAttachmentQueueConfig, a as PolAttachmentQueueOptions, d as PolAttachmentRecord, b as PolAttachmentState, U as UploadConfig, g as UploadHandler, m as UploadPhase, n as UploadStatus, c as createPolAttachmentQueue } from './pol-attachment-queue-C7YNXXhK.js';
8
- export { PolStorageAdapter, PolStorageAdapterOptions } from './attachments/index.js';
3
+ export { ATTACHMENT_DOWNLOAD_TIMEOUT_MS, ATTACHMENT_RETRY_DELAY_MS, AttachmentError, COMPRESSION_MAX_WIDTH, COMPRESSION_SKIP_SIZE_BYTES, COMPRESSION_TARGET_SIZE_BYTES, ConfigurationError, ConnectorError, DEFAULT_ATTACHMENT_CACHE_SIZE, DEFAULT_ATTACHMENT_CONCURRENCY, DEFAULT_COMPRESSION_QUALITY, DEFAULT_MAX_RETRY_ATTEMPTS, DEFAULT_RETRY_BACKOFF_MULTIPLIER, DEFAULT_RETRY_BASE_DELAY_MS, DEFAULT_RETRY_MAX_DELAY_MS, DEFAULT_SYNC_INTERVAL_MS, DEFAULT_SYNC_MODE, DOWNLOAD_STOP_THRESHOLD, EVICTION_TRIGGER_THRESHOLD, HEALTH_CHECK_INTERVAL_MS, HEALTH_CHECK_TIMEOUT_MS, InitializationError, LATENCY_DEGRADED_THRESHOLD_MS, MAX_CONSECUTIVE_FAILURES, PlatformAdapterError, PowerSyncError, STATS_CACHE_TTL_MS, STATUS_NOTIFY_THROTTLE_MS, STORAGE_CRITICAL_THRESHOLD, STORAGE_KEY_ATTACHMENT_SETTINGS, STORAGE_KEY_AUTO_OFFLINE, STORAGE_KEY_ENABLED, STORAGE_KEY_METRICS, STORAGE_KEY_PAUSED, STORAGE_KEY_PREFIX, STORAGE_KEY_SYNC_MODE, STORAGE_WARNING_THRESHOLD, SyncOperationError, classifyError, classifySupabaseError, createSyncError, extractEntityIds, extractTableNames, generateFailureId, isRlsError, toSyncOperationError } from './core/index.js';
4
+ import { k as ConflictDetectionConfig, h as ConflictCheckResult } from './types-B212hgfA.js';
5
+ export { A as AuthProvider, g as ConflictBus, j as ConflictHandler, l as ConflictListener, i as ConflictResolution, C as ConnectorCircuitBreakerConfig, a as ConnectorConfig, c as CrudHandler, D as DEFAULT_RETRY_CONFIG, F as FieldConflict, P as PowerSyncCredentials, m as ResolutionListener, e as RetryConfig, R as RetryStrategyConfig, b as SchemaRouter, f as Session, S as SupabaseConnectorOptions, d as defaultSchemaRouter } from './types-B212hgfA.js';
6
+ export { S as SupabaseConnector } from './supabase-connector-T9vHq_3i.js';
7
+ export { AuthRefreshError, CircuitBreaker, CircuitBreakerConfig, CircuitBreakerStats, CircuitOpenError, CircuitState, DEFAULT_CIRCUIT_BREAKER_CONFIG, SupabaseAuthOptions, createSupabaseAuth } from './connector/index.js';
8
+ export { e as AttachmentConfig, z as AttachmentRecord, A as AttachmentSourceConfig, w as AttachmentStatsRow, f as AttachmentStorageAdapter, v as AttachmentSyncStats, u as AttachmentSyncStatus, B as BatchFilterContext, m as CACHE_SIZE_PRESETS, k as CacheConfig, x as CacheFileRow, n as CacheSizePreset, o as CacheSizeValue, y as CachedSizeRow, C as CompressionConfig, l as DEFAULT_CACHE_CONFIG, D as DEFAULT_COMPRESSION_CONFIG, j as DEFAULT_DOWNLOAD_CONFIG, g as DEFAULT_UPLOAD_CONFIG, i as DownloadConfig, q as DownloadPhase, r as DownloadStatus, E as EvictRow, I as IdRow, P as PolAttachmentQueue, a as PolAttachmentQueueOptions, d as PolAttachmentRecord, b as PolAttachmentState, S as SkipDownloadContext, U as UploadConfig, h as UploadHandler, s as UploadPhase, t as UploadStatus, W as WatchConfig, c as createPolAttachmentQueue, p as formatCacheSize } from './pol-attachment-queue-BVAIueoP.js';
9
+ export { CacheManagerDeps, DOWNLOAD_WORKFLOW_STATES, DownloadManagerDeps, LOCALLY_AVAILABLE_STATES, MigrationStats, PENDING_DOWNLOAD_STATES, PROTECTED_UPLOAD_STATES, PolStorageAdapter, PolStorageAdapterOptions, STATE_MAPPING, STATE_NAMES, TERMINAL_STATES, UPLOAD_WORKFLOW_STATES, UploadManagerDeps, UploadManagerState, VALID_STATES, blobToArrayBuffer, buildIdOnlyWatchQuery, buildRecordFetchQuery, buildWatchQuery, cacheLocalFile, clearCache, clearUploadCallback, copyToManagedCache, createCacheManagerDeps, createMigrationStats, createUploadManagerDeps, createUploadManagerState, determineAttachmentState, downloadRecord, enforceCacheLimit, ensureFileUri, extractErrorCode, formatMigrationStats, getCachedSize, getEvictionCandidates, getExcludeProtectedStatesCondition, getFailedPermanentUploads, getLocalUriForStoragePath, getPendingUploads, getProtectedStatesInClause, getSoonestRetryTime, getStaleUploads, getStateName, getSyncedUploadsWithPendingCallback, isCacheNearCapacity, isDownloadWorkflowState, isLocallyAvailable, isPendingDownloadState, isPermanentError, isProtectedUploadState, isStateTransitionAllowed, isTerminalState, isUploadWorkflowState, isValidAttachmentState, markUploadPermanentFailure, markUploadSynced, migrateAttachmentState, migrateAttachmentStateSafe, recordMigration, scheduleUploadRetry, startUploadProcessing, stripFileUri, uploadOne, validateSqlIdentifier, validateSqlIdentifierFromStateMachine, validateWhereClause, watchConfigToSourceConfig } from './attachments/index.js';
9
10
  export { B as BackgroundSyncOptions, h as BackgroundSyncSystem, e as HealthCheckResult, H as HealthMonitorOptions, M as MetricsCollectorOptions, P as PowerSyncRawStatus, c as SyncControlActions, f as SyncEvent, g as SyncEventListener, d as SyncOperationData, S as SyncScope, a as SyncStatusState, b as SyncStatusTrackerOptions, U as Unsubscribe, i as defineBackgroundSyncTask, j as initializeBackgroundSync, k as isBackgroundSyncRegistered, r as registerBackgroundSync, u as unregisterBackgroundSync } from './background-sync-ChCXW-EV.js';
10
11
  export { DeadLetterEntry, DeadLetterQueue, DeadLetterQueueListener, DeadLetterQueueOptions, DeadLetterReason, HealthMonitor, MetricsCollector, SyncStatusTracker, createDeadLetterEntry, generateDLQEntryId } from './sync/index.js';
11
12
  import { SupabaseClient } from '@supabase/supabase-js';
12
13
  export { DatabaseMaintenanceUtils, checkIntegrityImpl, checkStorageQuotaImpl, compactDatabaseImpl, getCacheStatsImpl, useDatabaseMaintenance } from './maintenance/index.js';
13
- export { A as AttachmentUploadHandler, a as BucketConfig, B as BucketResolver, D as DownloadResult, P as PowerSyncStorageAdapter, R as RemoteStorageAdapter, S as StorageUploadHandler, b as SupabaseStorageAdapterOptions, U as UploadOptions, r as resolveBucketFromConfig } from './types-Bgvx7-E8.js';
14
- export { a as CacheSettings, C as CacheSettingsManager, b as CacheSettingsManagerOptions, D as DEFAULT_CACHE_SETTINGS, S as SupabaseStorageAdapter, c as createSupabaseStorageAdapter } from './CacheSettingsManager-1exbOC6S.js';
15
- export { D as DEFAULT_UPLOAD_NOTIFICATION, S as SupabaseUploadHandlerOptions, U as UploadEventHandlers, a as UploadNotificationConfig } from './types-BVacP54t.js';
14
+ export { D as DownloadResult, a as StorageBackend, b as StorageUploadOptions, d as SupabaseClient, S as SupabaseStorage, c as SupabaseStorageOptions, e as SupabaseTransformOptions, U as UploadProgress, r as resolveBucket } from './types-D0WcHrq6.js';
15
+ export { f as CacheSettings, C as CacheSettingsManager, h as CacheSettingsManagerOptions, D as DEFAULT_CACHE_SETTINGS, I as ImageTransformOptions, S as SupabaseStorageAdapter, e as SupabaseStorageAdapterOptions, c as createSupabaseStorage, d as createSupabaseStorageAdapter, g as getStorageErrorMessage, a as isBlobResult, i as isFileResult, b as isStorageAuthError, n as normalizeStoragePath } from './CacheSettingsManager-uz-kbnRH.js';
16
+ export { D as DEFAULT_UPLOAD_NOTIFICATION, S as SupabaseUploadHandlerOptions, U as UploadEventHandlers, a as UploadNotificationConfig } from './types-CyvBaAl8.js';
16
17
  export { SupabaseUploadHandler, createSupabaseUploadHandler } from './storage/upload/index.js';
17
18
  export { PowerSyncErrorBoundary } from './error/index.js';
18
19
  export { AttachmentQueueContext, BackgroundSyncCallbacks, BackgroundSyncConfig, BackgroundSyncStatus, CompletedTransactionsContext, CompletedTransactionsContextValue, ConnectionHealthContext, ConnectionHealthContextValue, ConnectionStatusContext, ConnectionStatusContextValue, DEFAULT_CONNECTION_HEALTH, DEFAULT_SYNC_CONFIG, DEFAULT_SYNC_METRICS, DEFAULT_SYNC_STATUS, DataLayerConfig, DataLayerIntegration, EntitySyncStatusResult, FailedTransactionsContext, FailedTransactionsContextValue, OfflineDataAttachmentConfig, OfflineDataProvider, OfflineDataProviderConfig, OfflineDataProviderProps, PendingMutationsContext, PendingMutationsContextValue, PowerSyncConfig, PowerSyncContext, PowerSyncContextValue, PowerSyncProvider, PowerSyncProviderProps, PowerSyncSyncStatusSnapshot, ProviderBridge, ProviderBridgeProps, SyncActivityContext, SyncActivityContextValue, SyncActivityResult, SyncConfig, SyncMetricsContext, SyncMetricsContextValue, SyncModeContext, SyncModeContextValue, SyncStatusContext, SyncStatusContextValue, UploadStatusResult, useAttachmentQueue, useAttachmentQueueReady, useCompletedTransactions, useCompletedTransactionsContext, useConnectionHealth, useConnectionStatus, useDatabase, useDownloadProgress, useEntitySyncStatus, useFailedTransactions, useFailedTransactionsContext, useIsSyncing, useOnlineStatus, usePendingMutations, usePendingMutationsContext, usePlatform, usePowerSync, useSyncActivity, useSyncActivityContext, useSyncControl, useSyncMetrics, useSyncMode, useSyncModeContext, useSyncStatus, useUploadStatus } from './provider/index.js';
20
+ export { AttachmentQueueContextValue, SlimPowerSyncProvider, useAttachmentQueueContext, usePowerSyncContext, useSyncStatusContext } from './react/index.js';
19
21
  export { AsyncStorageAdapter, CompressedImage, CompressionOptions, ConnectionType, DatabaseOptions, FileInfo, FileSystemAdapter, ImageProcessorAdapter, LoggerAdapter, NetworkAdapter, PlatformAdapter, PlatformType, detectPlatform } from './platform/index.js';
20
22
  export { ATTACHMENT_TABLE, AbstractAttachmentQueue, AttachmentState, AttachmentTable, AttachmentTableOptions, AttachmentQueueOptions as BaseAttachmentQueueOptions, DEFAULT_ATTACHMENT_QUEUE_OPTIONS, EncodingType, AttachmentRecord as OfficialAttachmentRecord, StorageAdapter } from '@powersync/attachments';
21
- import '@powersync/common';
23
+ export { AbstractPowerSyncDatabase as PowerSyncDBInterface } from '@powersync/common';
22
24
  import 'react';
23
25
  import '@tanstack/react-query';
24
26
  import '@powersync/react-native';