@pol-studios/powersync 1.0.10 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,527 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/generator/cli.ts
4
+ import { Command } from "commander";
5
+ import * as path2 from "path";
6
+ import * as fs2 from "fs";
7
+ import pc from "picocolors";
8
+
9
+ // src/generator/generator.ts
10
+ import * as fs from "fs";
11
+ import * as path from "path";
12
+
13
+ // src/generator/config.ts
14
+ var DEFAULT_SKIP_COLUMNS = [
15
+ "id",
16
+ // PowerSync handles id automatically
17
+ // Legacy numeric ID columns - typically not needed after UUID migration
18
+ "legacyId"
19
+ ];
20
+ var DEFAULT_DECIMAL_PATTERNS = [
21
+ "hours",
22
+ "watts",
23
+ "voltage",
24
+ "rate",
25
+ "amount",
26
+ "price",
27
+ "cost",
28
+ "total"
29
+ ];
30
+
31
+ // src/generator/parser.ts
32
+ function parseRowType(tableContent, options) {
33
+ const columns = /* @__PURE__ */ new Map();
34
+ const rowMatch = tableContent.match(/Row:\s*\{([^}]+(?:\{[^}]*\}[^}]*)*)\}/s);
35
+ if (!rowMatch) return columns;
36
+ const rowContent = rowMatch[1];
37
+ const includePrimaryKey = options.syncPrimaryKey ?? options.includeId ?? false;
38
+ const columnRegex = /(\w+)\??:\s*([^,\n]+)/g;
39
+ let match;
40
+ while ((match = columnRegex.exec(rowContent)) !== null) {
41
+ const [, columnName, columnType] = match;
42
+ const shouldSkip = options.skipColumns.has(columnName) && !(includePrimaryKey && columnName === "id");
43
+ if (!shouldSkip) {
44
+ columns.set(columnName, columnType.trim());
45
+ }
46
+ }
47
+ return columns;
48
+ }
49
+ function extractTableDef(content, tableName, schema) {
50
+ const schemaRegex = new RegExp(
51
+ `${schema}:\\s*\\{[\\s\\S]*?Tables:\\s*\\{`,
52
+ "g"
53
+ );
54
+ const schemaMatch = schemaRegex.exec(content);
55
+ if (!schemaMatch) return null;
56
+ const startIndex = schemaMatch.index;
57
+ const tableRegex = new RegExp(
58
+ `(?<![A-Za-z])${tableName}:\\s*\\{[\\s\\S]*?Row:\\s*\\{[\\s\\S]*?\\}[\\s\\S]*?Relationships:\\s*\\[[^\\]]*\\]\\s*\\}`,
59
+ "g"
60
+ );
61
+ const searchContent = content.slice(startIndex);
62
+ const tableMatch = tableRegex.exec(searchContent);
63
+ return tableMatch ? tableMatch[0] : null;
64
+ }
65
+ function parseTypesFile(content, tables, skipColumns) {
66
+ const parsedTables = [];
67
+ for (const tableConfig of tables) {
68
+ const { name, schema = "public", syncPrimaryKey, includeId } = tableConfig;
69
+ const tableDef = extractTableDef(content, name, schema);
70
+ if (!tableDef) {
71
+ continue;
72
+ }
73
+ const columns = parseRowType(tableDef, {
74
+ skipColumns,
75
+ syncPrimaryKey,
76
+ includeId
77
+ });
78
+ if (columns.size > 0) {
79
+ parsedTables.push({
80
+ name,
81
+ schema,
82
+ columns,
83
+ config: tableConfig
84
+ });
85
+ }
86
+ }
87
+ return parsedTables;
88
+ }
89
+
90
+ // src/generator/templates.ts
91
+ function generateHeader(typesPath) {
92
+ return `/**
93
+ * PowerSync Schema Definition
94
+ *
95
+ * AUTO-GENERATED from ${typesPath}
96
+ * Run: npx @pol-studios/powersync generate-schema
97
+ *
98
+ * DO NOT EDIT MANUALLY - changes will be overwritten
99
+ */
100
+
101
+ import { column, Schema, Table } from "@powersync/react-native";
102
+ `;
103
+ }
104
+ function generateTableDefinition(table, columnDefs) {
105
+ if (columnDefs.length === 0) {
106
+ return `// ${table.name} - no syncable columns found`;
107
+ }
108
+ const optionsStr = table.config.trackMetadata ? ", { trackMetadata: true }" : "";
109
+ return `const ${table.name} = new Table({
110
+ ${columnDefs.join("\n")}
111
+ }${optionsStr});`;
112
+ }
113
+ function generateSchemaExport(tableNames) {
114
+ return `// ============================================================================
115
+ // SCHEMA EXPORT
116
+ // ============================================================================
117
+
118
+ // NOTE: photo_attachments is NOT included here.
119
+ // The AttachmentQueue from @powersync/attachments creates and manages
120
+ // its own internal SQLite table (not a view) during queue.init().
121
+ // This allows INSERT/UPDATE operations to work correctly.
122
+
123
+ export const AppSchema = new Schema({
124
+ ${tableNames.map((name) => ` ${name},`).join("\n")}
125
+ });
126
+
127
+ export type Database = (typeof AppSchema)["types"];`;
128
+ }
129
+ function generateSchemaMapping(tables, schemas) {
130
+ const schemaGroups = /* @__PURE__ */ new Map();
131
+ for (const schema of schemas) {
132
+ if (schema !== "public") {
133
+ schemaGroups.set(schema, []);
134
+ }
135
+ }
136
+ for (const table of tables) {
137
+ if (table.schema !== "public" && schemaGroups.has(table.schema)) {
138
+ schemaGroups.get(table.schema).push(table.name);
139
+ }
140
+ }
141
+ const sections = [
142
+ `// ============================================================================
143
+ // SCHEMA MAPPING FOR CONNECTOR
144
+ // ============================================================================`
145
+ ];
146
+ for (const [schema, tableNames] of schemaGroups) {
147
+ if (tableNames.length > 0) {
148
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
149
+ sections.push(`
150
+ // Tables in the '${schema}' schema (need .schema('${schema}') in Supabase queries)
151
+ export const ${constName} = new Set([
152
+ ${tableNames.map((name) => ` "${name}",`).join("\n")}
153
+ ]);`);
154
+ }
155
+ }
156
+ const schemaChecks = Array.from(schemaGroups.entries()).filter(([, names]) => names.length > 0).map(([schema]) => {
157
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
158
+ return ` if (${constName}.has(tableName)) return "${schema}";`;
159
+ });
160
+ if (schemaChecks.length > 0) {
161
+ sections.push(`
162
+ /**
163
+ * Get the Supabase schema for a table
164
+ */
165
+ export function getTableSchema(tableName: string): ${schemas.map((s) => `"${s}"`).join(" | ")} {
166
+ ${schemaChecks.join("\n")}
167
+ return "public";
168
+ }`);
169
+ } else {
170
+ sections.push(`
171
+ /**
172
+ * Get the Supabase schema for a table
173
+ */
174
+ export function getTableSchema(tableName: string): "public" {
175
+ return "public";
176
+ }`);
177
+ }
178
+ return sections.join("\n");
179
+ }
180
+ function generateFKUtility() {
181
+ return `
182
+ // ============================================================================
183
+ // FOREIGN KEY UTILITIES
184
+ // ============================================================================
185
+
186
+ /**
187
+ * Check if a column name represents a foreign key reference
188
+ * Convention: columns ending in 'Id' are foreign keys (e.g., projectId -> Project table)
189
+ */
190
+ export function isForeignKeyColumn(columnName: string): boolean {
191
+ return columnName.endsWith('Id') && columnName !== 'id';
192
+ }
193
+
194
+ /**
195
+ * Get the referenced table name from a foreign key column
196
+ * e.g., 'projectId' -> 'Project', 'equipmentFixtureUnitId' -> 'EquipmentFixtureUnit'
197
+ */
198
+ export function getForeignKeyTable(columnName: string): string | null {
199
+ if (!isForeignKeyColumn(columnName)) return null;
200
+ // Remove 'Id' suffix and capitalize first letter
201
+ const baseName = columnName.slice(0, -2);
202
+ return baseName.charAt(0).toUpperCase() + baseName.slice(1);
203
+ }`;
204
+ }
205
+ function generateOutputFile(tables, tableDefs, schemas, typesPath) {
206
+ const tableNames = tables.map((t) => t.name);
207
+ return `${generateHeader(typesPath)}
208
+
209
+ // ============================================================================
210
+ // TABLE DEFINITIONS
211
+ // ============================================================================
212
+
213
+ ${tableDefs.join("\n\n")}
214
+
215
+ ${generateSchemaExport(tableNames)}
216
+
217
+ ${generateSchemaMapping(tables, ["public", ...schemas.filter((s) => s !== "public")])}
218
+ ${generateFKUtility()}
219
+ `;
220
+ }
221
+
222
+ // src/generator/generator.ts
223
+ function mapTypeToPowerSync(tsType, columnName, decimalPatterns) {
224
+ const cleanType = tsType.trim().replace(/\s*\|\s*null/g, "");
225
+ if (cleanType.includes("Json") || cleanType.includes("unknown") || cleanType.includes("{")) {
226
+ return null;
227
+ }
228
+ if (cleanType.includes("[]")) {
229
+ return null;
230
+ }
231
+ if (cleanType === "boolean") {
232
+ return { type: "column.integer", isBoolean: true };
233
+ }
234
+ if (cleanType === "number") {
235
+ if (decimalPatterns.some(
236
+ (pattern) => columnName.toLowerCase().includes(pattern.toLowerCase())
237
+ )) {
238
+ return { type: "column.real" };
239
+ }
240
+ return { type: "column.integer" };
241
+ }
242
+ if (cleanType === "string") {
243
+ return { type: "column.text" };
244
+ }
245
+ if (cleanType.includes("Database[") && cleanType.includes("Enums")) {
246
+ return { type: "column.text", isEnum: true };
247
+ }
248
+ return { type: "column.text" };
249
+ }
250
+ function generateColumnDefs(table, decimalPatterns) {
251
+ const columnDefs = [];
252
+ for (const [columnName, tsType] of table.columns) {
253
+ const mapping = mapTypeToPowerSync(tsType, columnName, decimalPatterns);
254
+ if (mapping) {
255
+ let comment = "";
256
+ if (mapping.isBoolean) {
257
+ comment = " // boolean stored as 0/1";
258
+ } else if (mapping.isEnum) {
259
+ comment = " // enum stored as text";
260
+ }
261
+ columnDefs.push(` ${columnName}: ${mapping.type},${comment}`);
262
+ }
263
+ }
264
+ return columnDefs;
265
+ }
266
+ async function generateSchema(config, options) {
267
+ const cwd = options?.cwd ?? process.cwd();
268
+ const verbose = options?.verbose ?? false;
269
+ const dryRun = options?.dryRun ?? false;
270
+ const result = {
271
+ success: false,
272
+ tablesGenerated: 0,
273
+ outputPath: "",
274
+ errors: [],
275
+ warnings: []
276
+ };
277
+ const typesPath = path.isAbsolute(config.typesPath) ? config.typesPath : path.resolve(cwd, config.typesPath);
278
+ const outputPath = path.isAbsolute(config.outputPath) ? config.outputPath : path.resolve(cwd, config.outputPath);
279
+ result.outputPath = outputPath;
280
+ if (!fs.existsSync(typesPath)) {
281
+ result.errors.push(`Types file not found: ${typesPath}`);
282
+ return result;
283
+ }
284
+ if (verbose) {
285
+ console.log(`Reading types from: ${typesPath}`);
286
+ }
287
+ const typesContent = fs.readFileSync(typesPath, "utf-8");
288
+ const skipColumns = /* @__PURE__ */ new Set([
289
+ ...DEFAULT_SKIP_COLUMNS,
290
+ ...config.skipColumns ?? []
291
+ ]);
292
+ const decimalPatterns = [
293
+ ...DEFAULT_DECIMAL_PATTERNS,
294
+ ...config.decimalPatterns ?? []
295
+ ];
296
+ const parsedTables = parseTypesFile(
297
+ typesContent,
298
+ config.tables,
299
+ skipColumns
300
+ );
301
+ for (const tableConfig of config.tables) {
302
+ const found = parsedTables.some((t) => t.name === tableConfig.name);
303
+ if (!found) {
304
+ result.warnings.push(
305
+ `Table '${tableConfig.name}' not found in schema '${tableConfig.schema ?? "public"}'`
306
+ );
307
+ }
308
+ }
309
+ if (parsedTables.length === 0) {
310
+ result.errors.push("No tables were parsed successfully");
311
+ return result;
312
+ }
313
+ const tableDefs = [];
314
+ for (const table of parsedTables) {
315
+ if (verbose) {
316
+ const syncPK = table.config.syncPrimaryKey || table.config.includeId;
317
+ console.log(
318
+ `Processing ${table.schema}.${table.name} (${table.columns.size} columns)${table.config.trackMetadata ? " [trackMetadata]" : ""}${syncPK ? " [syncPrimaryKey]" : ""}`
319
+ );
320
+ }
321
+ const columnDefs = generateColumnDefs(table, decimalPatterns);
322
+ if (columnDefs.length === 0) {
323
+ result.warnings.push(`Table '${table.name}' has no syncable columns`);
324
+ continue;
325
+ }
326
+ tableDefs.push(generateTableDefinition(table, columnDefs));
327
+ }
328
+ const schemas = [...new Set(config.tables.map((t) => t.schema ?? "public"))];
329
+ const relativePath = path.relative(cwd, typesPath);
330
+ const output = generateOutputFile(
331
+ parsedTables.filter(
332
+ (t) => tableDefs.some((def) => def.includes(`const ${t.name} =`))
333
+ ),
334
+ tableDefs,
335
+ schemas,
336
+ relativePath
337
+ );
338
+ if (dryRun) {
339
+ result.success = true;
340
+ result.tablesGenerated = tableDefs.length;
341
+ result.output = output;
342
+ return result;
343
+ }
344
+ const outputDir = path.dirname(outputPath);
345
+ if (!fs.existsSync(outputDir)) {
346
+ fs.mkdirSync(outputDir, { recursive: true });
347
+ }
348
+ fs.writeFileSync(outputPath, output);
349
+ result.success = true;
350
+ result.tablesGenerated = tableDefs.length;
351
+ return result;
352
+ }
353
+
354
+ // src/generator/cli.ts
355
+ var program = new Command();
356
+ var VERSION = "1.0.0";
357
+ program.name("@pol-studios/powersync").description("PowerSync utilities for offline-first applications").version(VERSION);
358
+ program.command("generate-schema").description("Generate PowerSync schema from database.types.ts").option(
359
+ "-c, --config <path>",
360
+ "Path to config file",
361
+ "powersync.config.ts"
362
+ ).option("-v, --verbose", "Enable verbose output", false).option("-d, --dry-run", "Preview output without writing files", false).option("-w, --watch", "Watch for changes and regenerate", false).action(async (options) => {
363
+ const cwd = process.cwd();
364
+ console.log(pc.bold(pc.cyan("\n PowerSync Schema Generator\n")));
365
+ const configPath = path2.isAbsolute(options.config) ? options.config : path2.resolve(cwd, options.config);
366
+ if (!fs2.existsSync(configPath)) {
367
+ console.error(pc.red(` Error: Config file not found: ${configPath}`));
368
+ console.log(pc.dim(`
369
+ Create a powersync.config.ts file in your project root:
370
+
371
+ ${pc.cyan(`import { defineConfig } from '@pol-studios/powersync/generator';
372
+
373
+ export default defineConfig({
374
+ typesPath: './database.types.ts',
375
+ outputPath: './src/data/powersync-schema.ts',
376
+ tables: [
377
+ { name: 'User', schema: 'public' },
378
+ { name: 'Post', schema: 'public', trackMetadata: true },
379
+ ],
380
+ });`)}
381
+ `));
382
+ process.exit(1);
383
+ }
384
+ let config;
385
+ try {
386
+ console.log(pc.dim(` Loading config from: ${configPath}`));
387
+ const configModule = await import(configPath);
388
+ config = configModule.default;
389
+ if (!config || !config.tables || !config.typesPath || !config.outputPath) {
390
+ throw new Error(
391
+ "Invalid config: must export { typesPath, outputPath, tables }"
392
+ );
393
+ }
394
+ } catch (error) {
395
+ console.error(pc.red(` Error loading config: ${error}`));
396
+ console.log(pc.dim(`
397
+ Make sure your config file:
398
+ 1. Uses 'export default defineConfig({...})'
399
+ 2. Contains typesPath, outputPath, and tables properties
400
+ 3. Can be imported (run with tsx or ts-node if using TypeScript)
401
+ `));
402
+ process.exit(1);
403
+ }
404
+ const runGeneration = async () => {
405
+ console.log(pc.dim(` Types file: ${config.typesPath}`));
406
+ console.log(pc.dim(` Output file: ${config.outputPath}`));
407
+ console.log(pc.dim(` Tables: ${config.tables.length}`));
408
+ if (options.dryRun) {
409
+ console.log(pc.cyan(" Mode: dry-run (no files will be written)"));
410
+ }
411
+ console.log("");
412
+ const result = await generateSchema(config, {
413
+ cwd,
414
+ verbose: options.verbose,
415
+ dryRun: options.dryRun
416
+ });
417
+ if (result.warnings.length > 0) {
418
+ console.log(pc.yellow(" Warnings:"));
419
+ for (const warning of result.warnings) {
420
+ console.log(pc.yellow(` - ${warning}`));
421
+ }
422
+ console.log("");
423
+ }
424
+ if (result.errors.length > 0) {
425
+ console.log(pc.red(" Errors:"));
426
+ for (const error of result.errors) {
427
+ console.log(pc.red(` - ${error}`));
428
+ }
429
+ console.log("");
430
+ }
431
+ if (result.success) {
432
+ if (options.dryRun) {
433
+ console.log(
434
+ pc.cyan(` ${pc.bold("Dry run:")} Would generate ${result.tablesGenerated} tables`)
435
+ );
436
+ if (options.verbose && result.output) {
437
+ console.log(pc.dim("\n Generated output:\n"));
438
+ console.log(pc.dim(result.output.split("\n").map((l) => " " + l).join("\n")));
439
+ }
440
+ } else {
441
+ console.log(
442
+ pc.green(` ${pc.bold("Success!")} Generated ${result.tablesGenerated} tables`)
443
+ );
444
+ console.log(pc.dim(` Output: ${result.outputPath}`));
445
+ }
446
+ console.log("");
447
+ } else {
448
+ console.error(pc.red(" Schema generation failed"));
449
+ if (!options.watch) {
450
+ process.exit(1);
451
+ }
452
+ }
453
+ return result;
454
+ };
455
+ await runGeneration();
456
+ if (options.watch) {
457
+ const typesPath = path2.isAbsolute(config.typesPath) ? config.typesPath : path2.resolve(cwd, config.typesPath);
458
+ console.log(pc.cyan(` Watching for changes: ${typesPath}`));
459
+ console.log(pc.dim(" Press Ctrl+C to stop\n"));
460
+ fs2.watchFile(typesPath, { interval: 1e3 }, async (curr, prev) => {
461
+ if (curr.mtime !== prev.mtime) {
462
+ console.log(pc.cyan("\n File changed, regenerating...\n"));
463
+ await runGeneration();
464
+ }
465
+ });
466
+ process.on("SIGINT", () => {
467
+ fs2.unwatchFile(typesPath);
468
+ console.log(pc.dim("\n Stopped watching"));
469
+ process.exit(0);
470
+ });
471
+ }
472
+ });
473
+ program.command("init").description("Create a powersync.config.ts template").option("-f, --force", "Overwrite existing config file", false).action((options) => {
474
+ const cwd = process.cwd();
475
+ const configPath = path2.resolve(cwd, "powersync.config.ts");
476
+ if (fs2.existsSync(configPath) && !options.force) {
477
+ console.error(
478
+ pc.red(` Config file already exists: ${configPath}`)
479
+ );
480
+ console.log(pc.dim(" Use --force to overwrite"));
481
+ process.exit(1);
482
+ }
483
+ const template = `import { defineConfig } from '@pol-studios/powersync/generator';
484
+
485
+ export default defineConfig({
486
+ // Path to Supabase-generated types
487
+ typesPath: './database.types.ts',
488
+
489
+ // Output path for generated schema
490
+ outputPath: './src/data/powersync-schema.ts',
491
+
492
+ // Tables to sync
493
+ tables: [
494
+ // Public schema tables (default)
495
+ { name: 'User' },
496
+ { name: 'Post', trackMetadata: true },
497
+
498
+ // Tables in other schemas
499
+ // { name: 'Profile', schema: 'core' },
500
+
501
+ // For tables with integer PKs referenced by FKs in other tables,
502
+ // use syncPrimaryKey: true to include the id column
503
+ // { name: 'Group', schema: 'core', syncPrimaryKey: true },
504
+
505
+ // Access control tables (required for offline auth)
506
+ // { name: 'UserAccess', schema: 'core' },
507
+ // { name: 'UserGroup', schema: 'core' },
508
+ // { name: 'GroupAccessKey', schema: 'core', syncPrimaryKey: true },
509
+ // { name: 'Group', schema: 'core', syncPrimaryKey: true },
510
+ ],
511
+
512
+ // Optional: columns to always skip
513
+ // skipColumns: ['searchVector', 'tsv'],
514
+
515
+ // Optional: column name patterns for decimal values (use column.real)
516
+ // decimalPatterns: ['price', 'amount', 'rate'],
517
+ });
518
+ `;
519
+ fs2.writeFileSync(configPath, template);
520
+ console.log(pc.green(` Created config file: ${configPath}`));
521
+ console.log(pc.dim(`
522
+ Next steps:
523
+ 1. Edit powersync.config.ts with your tables
524
+ 2. Run: npx @pol-studios/powersync generate-schema
525
+ `));
526
+ });
527
+ program.parse();
@@ -0,0 +1,168 @@
1
+ /**
2
+ * Configuration types and helpers for PowerSync schema generator
3
+ */
4
+ interface TableConfig {
5
+ /** Table name (PascalCase as it appears in database.types.ts) */
6
+ name: string;
7
+ /** Schema name (defaults to 'public') */
8
+ schema?: string;
9
+ /** Enable ps_crud timestamp tracking for optimistic UI updates */
10
+ trackMetadata?: boolean;
11
+ /**
12
+ * Sync the primary key column (normally skipped as PowerSync handles it internally).
13
+ *
14
+ * Use this for tables with integer PKs that are referenced by FKs in other tables.
15
+ * Example: `Group.id` is referenced by `UserGroup.groupId`, so Group needs `syncPrimaryKey: true`
16
+ * to ensure the integer ID is available for client-side joins.
17
+ */
18
+ syncPrimaryKey?: boolean;
19
+ /** @deprecated Use `syncPrimaryKey` instead */
20
+ includeId?: boolean;
21
+ /** Columns to skip for this specific table (in addition to global skipColumns) */
22
+ skipColumns?: string[];
23
+ /** Only include these columns (overrides skipColumns if specified) */
24
+ onlyColumns?: string[];
25
+ }
26
+ interface GeneratorConfig {
27
+ /** Path to Supabase-generated database.types.ts file */
28
+ typesPath: string;
29
+ /** Output path for generated PowerSync schema */
30
+ outputPath: string;
31
+ /** Tables to include in the PowerSync schema */
32
+ tables: TableConfig[];
33
+ /** Columns to always skip (in addition to defaults like 'id') */
34
+ skipColumns?: string[];
35
+ /** Column name patterns that should use column.real for decimal values */
36
+ decimalPatterns?: string[];
37
+ /** Additional schemas to track (besides 'public' which is the default) */
38
+ schemas?: string[];
39
+ }
40
+ /**
41
+ * Define a PowerSync generator configuration with type safety
42
+ */
43
+ declare function defineConfig(config: GeneratorConfig): GeneratorConfig;
44
+ /**
45
+ * Default columns that are skipped during generation
46
+ */
47
+ declare const DEFAULT_SKIP_COLUMNS: string[];
48
+ /**
49
+ * Default column name patterns that indicate decimal values
50
+ */
51
+ declare const DEFAULT_DECIMAL_PATTERNS: string[];
52
+
53
+ /**
54
+ * Parser for Supabase database.types.ts files
55
+ *
56
+ * Extracts table definitions and column types from the generated TypeScript types
57
+ */
58
+
59
+ interface ColumnInfo {
60
+ name: string;
61
+ tsType: string;
62
+ isNullable: boolean;
63
+ }
64
+ interface ParsedTable {
65
+ name: string;
66
+ schema: string;
67
+ columns: Map<string, string>;
68
+ config: TableConfig;
69
+ }
70
+ interface ParseOptions {
71
+ /** Columns to skip */
72
+ skipColumns: Set<string>;
73
+ /**
74
+ * Include the id column (normally skipped).
75
+ * Use for tables with integer PKs referenced by FKs in other tables.
76
+ */
77
+ syncPrimaryKey?: boolean;
78
+ /** @deprecated Use `syncPrimaryKey` instead */
79
+ includeId?: boolean;
80
+ }
81
+ /**
82
+ * Parse the Row type from a table definition and extract columns
83
+ */
84
+ declare function parseRowType(tableContent: string, options: ParseOptions): Map<string, string>;
85
+ /**
86
+ * Extract a table definition from the database.types.ts content
87
+ */
88
+ declare function extractTableDef(content: string, tableName: string, schema: string): string | null;
89
+ /**
90
+ * Parse a database.types.ts file and extract specified tables
91
+ */
92
+ declare function parseTypesFile(content: string, tables: TableConfig[], skipColumns: Set<string>): ParsedTable[];
93
+ /**
94
+ * Get all available schemas from the types file
95
+ */
96
+ declare function getAvailableSchemas(content: string): string[];
97
+ /**
98
+ * Get all table names in a schema
99
+ */
100
+ declare function getTablesInSchema(content: string, schema: string): string[];
101
+
102
+ /**
103
+ * PowerSync schema generator
104
+ *
105
+ * Converts Supabase database.types.ts into PowerSync schema definitions
106
+ */
107
+
108
+ interface ColumnMapping {
109
+ type: 'column.text' | 'column.integer' | 'column.real';
110
+ isEnum?: boolean;
111
+ isBoolean?: boolean;
112
+ }
113
+ interface GenerateResult {
114
+ success: boolean;
115
+ tablesGenerated: number;
116
+ outputPath: string;
117
+ errors: string[];
118
+ warnings: string[];
119
+ /** Generated output (included when dryRun is true) */
120
+ output?: string;
121
+ }
122
+ /**
123
+ * Map TypeScript types to PowerSync column types
124
+ */
125
+ declare function mapTypeToPowerSync(tsType: string, columnName: string, decimalPatterns: string[]): ColumnMapping | null;
126
+ /**
127
+ * Generate column definitions for a table
128
+ */
129
+ declare function generateColumnDefs(table: ParsedTable, decimalPatterns: string[]): string[];
130
+ /**
131
+ * Generate PowerSync schema from configuration
132
+ */
133
+ declare function generateSchema(config: GeneratorConfig, options?: {
134
+ cwd?: string;
135
+ verbose?: boolean;
136
+ dryRun?: boolean;
137
+ }): Promise<GenerateResult>;
138
+
139
+ /**
140
+ * Output templates for PowerSync schema generation
141
+ */
142
+
143
+ /**
144
+ * File header template
145
+ */
146
+ declare function generateHeader(typesPath: string): string;
147
+ /**
148
+ * Generate the table definition for a parsed table
149
+ */
150
+ declare function generateTableDefinition(table: ParsedTable, columnDefs: string[]): string;
151
+ /**
152
+ * Generate the schema export section
153
+ */
154
+ declare function generateSchemaExport(tableNames: string[]): string;
155
+ /**
156
+ * Generate schema mapping utilities
157
+ */
158
+ declare function generateSchemaMapping(tables: ParsedTable[], schemas: string[]): string;
159
+ /**
160
+ * Generate the FK detection utility (helpful for consumers)
161
+ */
162
+ declare function generateFKUtility(): string;
163
+ /**
164
+ * Generate complete output file
165
+ */
166
+ declare function generateOutputFile(tables: ParsedTable[], tableDefs: string[], schemas: string[], typesPath: string): string;
167
+
168
+ export { type ColumnInfo, type ColumnMapping, DEFAULT_DECIMAL_PATTERNS, DEFAULT_SKIP_COLUMNS, type GenerateResult, type GeneratorConfig, type ParseOptions, type ParsedTable, type TableConfig, defineConfig, extractTableDef, generateColumnDefs, generateFKUtility, generateHeader, generateOutputFile, generateSchema, generateSchemaExport, generateSchemaMapping, generateTableDefinition, getAvailableSchemas, getTablesInSchema, mapTypeToPowerSync, parseRowType, parseTypesFile };
@@ -0,0 +1,370 @@
1
+ // src/generator/config.ts
2
+ function defineConfig(config) {
3
+ return config;
4
+ }
5
+ var DEFAULT_SKIP_COLUMNS = [
6
+ "id",
7
+ // PowerSync handles id automatically
8
+ // Legacy numeric ID columns - typically not needed after UUID migration
9
+ "legacyId"
10
+ ];
11
+ var DEFAULT_DECIMAL_PATTERNS = ["hours", "watts", "voltage", "rate", "amount", "price", "cost", "total"];
12
+
13
+ // src/generator/generator.ts
14
+ import * as fs from "fs";
15
+ import * as path from "path";
16
+
17
+ // src/generator/parser.ts
18
+ function parseRowType(tableContent, options) {
19
+ const columns = /* @__PURE__ */ new Map();
20
+ const rowMatch = tableContent.match(/Row:\s*\{([^}]+(?:\{[^}]*\}[^}]*)*)\}/s);
21
+ if (!rowMatch) return columns;
22
+ const rowContent = rowMatch[1];
23
+ const includePrimaryKey = options.syncPrimaryKey ?? options.includeId ?? false;
24
+ const columnRegex = /(\w+)\??:\s*([^,\n]+)/g;
25
+ let match;
26
+ while ((match = columnRegex.exec(rowContent)) !== null) {
27
+ const [, columnName, columnType] = match;
28
+ const shouldSkip = options.skipColumns.has(columnName) && !(includePrimaryKey && columnName === "id");
29
+ if (!shouldSkip) {
30
+ columns.set(columnName, columnType.trim());
31
+ }
32
+ }
33
+ return columns;
34
+ }
35
+ function extractTableDef(content, tableName, schema) {
36
+ const schemaRegex = new RegExp(`${schema}:\\s*\\{[\\s\\S]*?Tables:\\s*\\{`, "g");
37
+ const schemaMatch = schemaRegex.exec(content);
38
+ if (!schemaMatch) return null;
39
+ const startIndex = schemaMatch.index;
40
+ const tableRegex = new RegExp(`(?<![A-Za-z])${tableName}:\\s*\\{[\\s\\S]*?Row:\\s*\\{[\\s\\S]*?\\}[\\s\\S]*?Relationships:\\s*\\[[^\\]]*\\]\\s*\\}`, "g");
41
+ const searchContent = content.slice(startIndex);
42
+ const tableMatch = tableRegex.exec(searchContent);
43
+ return tableMatch ? tableMatch[0] : null;
44
+ }
45
+ function parseTypesFile(content, tables, skipColumns) {
46
+ const parsedTables = [];
47
+ for (const tableConfig of tables) {
48
+ const {
49
+ name,
50
+ schema = "public",
51
+ syncPrimaryKey,
52
+ includeId
53
+ } = tableConfig;
54
+ const tableDef = extractTableDef(content, name, schema);
55
+ if (!tableDef) {
56
+ continue;
57
+ }
58
+ const columns = parseRowType(tableDef, {
59
+ skipColumns,
60
+ syncPrimaryKey,
61
+ includeId
62
+ });
63
+ if (columns.size > 0) {
64
+ parsedTables.push({
65
+ name,
66
+ schema,
67
+ columns,
68
+ config: tableConfig
69
+ });
70
+ }
71
+ }
72
+ return parsedTables;
73
+ }
74
+ function getAvailableSchemas(content) {
75
+ const schemas = [];
76
+ const schemaRegex = /(\w+):\s*\{[\s\S]*?Tables:\s*\{/g;
77
+ let match;
78
+ while ((match = schemaRegex.exec(content)) !== null) {
79
+ schemas.push(match[1]);
80
+ }
81
+ return schemas;
82
+ }
83
+ function getTablesInSchema(content, schema) {
84
+ const tables = [];
85
+ const schemaRegex = new RegExp(`${schema}:\\s*\\{[\\s\\S]*?Tables:\\s*\\{([\\s\\S]*?)\\}\\s*Views:`, "g");
86
+ const schemaMatch = schemaRegex.exec(content);
87
+ if (!schemaMatch) return tables;
88
+ const tablesContent = schemaMatch[1];
89
+ const tableNameRegex = /^\s*(\w+):\s*\{/gm;
90
+ let match;
91
+ while ((match = tableNameRegex.exec(tablesContent)) !== null) {
92
+ tables.push(match[1]);
93
+ }
94
+ return tables;
95
+ }
96
+
97
+ // src/generator/templates.ts
98
+ function generateHeader(typesPath) {
99
+ return `/**
100
+ * PowerSync Schema Definition
101
+ *
102
+ * AUTO-GENERATED from ${typesPath}
103
+ * Run: npx @pol-studios/powersync generate-schema
104
+ *
105
+ * DO NOT EDIT MANUALLY - changes will be overwritten
106
+ */
107
+
108
+ import { column, Schema, Table } from "@powersync/react-native";
109
+ `;
110
+ }
111
+ function generateTableDefinition(table, columnDefs) {
112
+ if (columnDefs.length === 0) {
113
+ return `// ${table.name} - no syncable columns found`;
114
+ }
115
+ const optionsStr = table.config.trackMetadata ? ", { trackMetadata: true }" : "";
116
+ return `const ${table.name} = new Table({
117
+ ${columnDefs.join("\n")}
118
+ }${optionsStr});`;
119
+ }
120
+ function generateSchemaExport(tableNames) {
121
+ return `// ============================================================================
122
+ // SCHEMA EXPORT
123
+ // ============================================================================
124
+
125
+ // NOTE: photo_attachments is NOT included here.
126
+ // The AttachmentQueue from @powersync/attachments creates and manages
127
+ // its own internal SQLite table (not a view) during queue.init().
128
+ // This allows INSERT/UPDATE operations to work correctly.
129
+
130
+ export const AppSchema = new Schema({
131
+ ${tableNames.map((name) => ` ${name},`).join("\n")}
132
+ });
133
+
134
+ export type Database = (typeof AppSchema)["types"];`;
135
+ }
136
+ function generateSchemaMapping(tables, schemas) {
137
+ const schemaGroups = /* @__PURE__ */ new Map();
138
+ for (const schema of schemas) {
139
+ if (schema !== "public") {
140
+ schemaGroups.set(schema, []);
141
+ }
142
+ }
143
+ for (const table of tables) {
144
+ if (table.schema !== "public" && schemaGroups.has(table.schema)) {
145
+ schemaGroups.get(table.schema).push(table.name);
146
+ }
147
+ }
148
+ const sections = [`// ============================================================================
149
+ // SCHEMA MAPPING FOR CONNECTOR
150
+ // ============================================================================`];
151
+ for (const [schema, tableNames] of schemaGroups) {
152
+ if (tableNames.length > 0) {
153
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
154
+ sections.push(`
155
+ // Tables in the '${schema}' schema (need .schema('${schema}') in Supabase queries)
156
+ export const ${constName} = new Set([
157
+ ${tableNames.map((name) => ` "${name}",`).join("\n")}
158
+ ]);`);
159
+ }
160
+ }
161
+ const schemaChecks = Array.from(schemaGroups.entries()).filter(([, names]) => names.length > 0).map(([schema]) => {
162
+ const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;
163
+ return ` if (${constName}.has(tableName)) return "${schema}";`;
164
+ });
165
+ if (schemaChecks.length > 0) {
166
+ sections.push(`
167
+ /**
168
+ * Get the Supabase schema for a table
169
+ */
170
+ export function getTableSchema(tableName: string): ${schemas.map((s) => `"${s}"`).join(" | ")} {
171
+ ${schemaChecks.join("\n")}
172
+ return "public";
173
+ }`);
174
+ } else {
175
+ sections.push(`
176
+ /**
177
+ * Get the Supabase schema for a table
178
+ */
179
+ export function getTableSchema(tableName: string): "public" {
180
+ return "public";
181
+ }`);
182
+ }
183
+ return sections.join("\n");
184
+ }
185
+ function generateFKUtility() {
186
+ return `
187
+ // ============================================================================
188
+ // FOREIGN KEY UTILITIES
189
+ // ============================================================================
190
+
191
+ /**
192
+ * Check if a column name represents a foreign key reference
193
+ * Convention: columns ending in 'Id' are foreign keys (e.g., projectId -> Project table)
194
+ */
195
+ export function isForeignKeyColumn(columnName: string): boolean {
196
+ return columnName.endsWith('Id') && columnName !== 'id';
197
+ }
198
+
199
+ /**
200
+ * Get the referenced table name from a foreign key column
201
+ * e.g., 'projectId' -> 'Project', 'equipmentFixtureUnitId' -> 'EquipmentFixtureUnit'
202
+ */
203
+ export function getForeignKeyTable(columnName: string): string | null {
204
+ if (!isForeignKeyColumn(columnName)) return null;
205
+ // Remove 'Id' suffix and capitalize first letter
206
+ const baseName = columnName.slice(0, -2);
207
+ return baseName.charAt(0).toUpperCase() + baseName.slice(1);
208
+ }`;
209
+ }
210
+ function generateOutputFile(tables, tableDefs, schemas, typesPath) {
211
+ const tableNames = tables.map((t) => t.name);
212
+ return `${generateHeader(typesPath)}
213
+
214
+ // ============================================================================
215
+ // TABLE DEFINITIONS
216
+ // ============================================================================
217
+
218
+ ${tableDefs.join("\n\n")}
219
+
220
+ ${generateSchemaExport(tableNames)}
221
+
222
+ ${generateSchemaMapping(tables, ["public", ...schemas.filter((s) => s !== "public")])}
223
+ ${generateFKUtility()}
224
+ `;
225
+ }
226
+
227
+ // src/generator/generator.ts
228
+ function mapTypeToPowerSync(tsType, columnName, decimalPatterns) {
229
+ const cleanType = tsType.trim().replace(/\s*\|\s*null/g, "");
230
+ if (cleanType.includes("Json") || cleanType.includes("unknown") || cleanType.includes("{")) {
231
+ return null;
232
+ }
233
+ if (cleanType.includes("[]")) {
234
+ return null;
235
+ }
236
+ if (cleanType === "boolean") {
237
+ return {
238
+ type: "column.integer",
239
+ isBoolean: true
240
+ };
241
+ }
242
+ if (cleanType === "number") {
243
+ if (decimalPatterns.some((pattern) => columnName.toLowerCase().includes(pattern.toLowerCase()))) {
244
+ return {
245
+ type: "column.real"
246
+ };
247
+ }
248
+ return {
249
+ type: "column.integer"
250
+ };
251
+ }
252
+ if (cleanType === "string") {
253
+ return {
254
+ type: "column.text"
255
+ };
256
+ }
257
+ if (cleanType.includes("Database[") && cleanType.includes("Enums")) {
258
+ return {
259
+ type: "column.text",
260
+ isEnum: true
261
+ };
262
+ }
263
+ return {
264
+ type: "column.text"
265
+ };
266
+ }
267
+ function generateColumnDefs(table, decimalPatterns) {
268
+ const columnDefs = [];
269
+ for (const [columnName, tsType] of table.columns) {
270
+ const mapping = mapTypeToPowerSync(tsType, columnName, decimalPatterns);
271
+ if (mapping) {
272
+ let comment = "";
273
+ if (mapping.isBoolean) {
274
+ comment = " // boolean stored as 0/1";
275
+ } else if (mapping.isEnum) {
276
+ comment = " // enum stored as text";
277
+ }
278
+ columnDefs.push(` ${columnName}: ${mapping.type},${comment}`);
279
+ }
280
+ }
281
+ return columnDefs;
282
+ }
283
+ async function generateSchema(config, options) {
284
+ const cwd = options?.cwd ?? process.cwd();
285
+ const verbose = options?.verbose ?? false;
286
+ const dryRun = options?.dryRun ?? false;
287
+ const result = {
288
+ success: false,
289
+ tablesGenerated: 0,
290
+ outputPath: "",
291
+ errors: [],
292
+ warnings: []
293
+ };
294
+ const typesPath = path.isAbsolute(config.typesPath) ? config.typesPath : path.resolve(cwd, config.typesPath);
295
+ const outputPath = path.isAbsolute(config.outputPath) ? config.outputPath : path.resolve(cwd, config.outputPath);
296
+ result.outputPath = outputPath;
297
+ if (!fs.existsSync(typesPath)) {
298
+ result.errors.push(`Types file not found: ${typesPath}`);
299
+ return result;
300
+ }
301
+ if (verbose) {
302
+ console.log(`Reading types from: ${typesPath}`);
303
+ }
304
+ const typesContent = fs.readFileSync(typesPath, "utf-8");
305
+ const skipColumns = /* @__PURE__ */ new Set([...DEFAULT_SKIP_COLUMNS, ...config.skipColumns ?? []]);
306
+ const decimalPatterns = [...DEFAULT_DECIMAL_PATTERNS, ...config.decimalPatterns ?? []];
307
+ const parsedTables = parseTypesFile(typesContent, config.tables, skipColumns);
308
+ for (const tableConfig of config.tables) {
309
+ const found = parsedTables.some((t) => t.name === tableConfig.name);
310
+ if (!found) {
311
+ result.warnings.push(`Table '${tableConfig.name}' not found in schema '${tableConfig.schema ?? "public"}'`);
312
+ }
313
+ }
314
+ if (parsedTables.length === 0) {
315
+ result.errors.push("No tables were parsed successfully");
316
+ return result;
317
+ }
318
+ const tableDefs = [];
319
+ for (const table of parsedTables) {
320
+ if (verbose) {
321
+ const syncPK = table.config.syncPrimaryKey || table.config.includeId;
322
+ console.log(`Processing ${table.schema}.${table.name} (${table.columns.size} columns)${table.config.trackMetadata ? " [trackMetadata]" : ""}${syncPK ? " [syncPrimaryKey]" : ""}`);
323
+ }
324
+ const columnDefs = generateColumnDefs(table, decimalPatterns);
325
+ if (columnDefs.length === 0) {
326
+ result.warnings.push(`Table '${table.name}' has no syncable columns`);
327
+ continue;
328
+ }
329
+ tableDefs.push(generateTableDefinition(table, columnDefs));
330
+ }
331
+ const schemas = [...new Set(config.tables.map((t) => t.schema ?? "public"))];
332
+ const relativePath = path.relative(cwd, typesPath);
333
+ const output = generateOutputFile(parsedTables.filter((t) => tableDefs.some((def) => def.includes(`const ${t.name} =`))), tableDefs, schemas, relativePath);
334
+ if (dryRun) {
335
+ result.success = true;
336
+ result.tablesGenerated = tableDefs.length;
337
+ result.output = output;
338
+ return result;
339
+ }
340
+ const outputDir = path.dirname(outputPath);
341
+ if (!fs.existsSync(outputDir)) {
342
+ fs.mkdirSync(outputDir, {
343
+ recursive: true
344
+ });
345
+ }
346
+ fs.writeFileSync(outputPath, output);
347
+ result.success = true;
348
+ result.tablesGenerated = tableDefs.length;
349
+ return result;
350
+ }
351
+ export {
352
+ DEFAULT_DECIMAL_PATTERNS,
353
+ DEFAULT_SKIP_COLUMNS,
354
+ defineConfig,
355
+ extractTableDef,
356
+ generateColumnDefs,
357
+ generateFKUtility,
358
+ generateHeader,
359
+ generateOutputFile,
360
+ generateSchema,
361
+ generateSchemaExport,
362
+ generateSchemaMapping,
363
+ generateTableDefinition,
364
+ getAvailableSchemas,
365
+ getTablesInSchema,
366
+ mapTypeToPowerSync,
367
+ parseRowType,
368
+ parseTypesFile
369
+ };
370
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/generator/config.ts","../../src/generator/generator.ts","../../src/generator/parser.ts","../../src/generator/templates.ts"],"sourcesContent":["/**\n * Configuration types and helpers for PowerSync schema generator\n */\n\nexport interface TableConfig {\n /** Table name (PascalCase as it appears in database.types.ts) */\n name: string;\n /** Schema name (defaults to 'public') */\n schema?: string;\n /** Enable ps_crud timestamp tracking for optimistic UI updates */\n trackMetadata?: boolean;\n /**\n * Sync the primary key column (normally skipped as PowerSync handles it internally).\n *\n * Use this for tables with integer PKs that are referenced by FKs in other tables.\n * Example: `Group.id` is referenced by `UserGroup.groupId`, so Group needs `syncPrimaryKey: true`\n * to ensure the integer ID is available for client-side joins.\n */\n syncPrimaryKey?: boolean;\n /** @deprecated Use `syncPrimaryKey` instead */\n includeId?: boolean;\n /** Columns to skip for this specific table (in addition to global skipColumns) */\n skipColumns?: string[];\n /** Only include these columns (overrides skipColumns if specified) */\n onlyColumns?: string[];\n}\nexport interface GeneratorConfig {\n /** Path to Supabase-generated database.types.ts file */\n typesPath: string;\n /** Output path for generated PowerSync schema */\n outputPath: string;\n /** Tables to include in the PowerSync schema */\n tables: TableConfig[];\n /** Columns to always skip (in addition to defaults like 'id') */\n skipColumns?: string[];\n /** Column name patterns that should use column.real for decimal values */\n decimalPatterns?: string[];\n /** Additional schemas to track (besides 'public' which is the default) */\n schemas?: string[];\n}\n\n/**\n * Define a PowerSync generator configuration with type safety\n */\nexport function defineConfig(config: GeneratorConfig): GeneratorConfig {\n return config;\n}\n\n/**\n * Default columns that are skipped during generation\n */\nexport const DEFAULT_SKIP_COLUMNS = ['id',\n// PowerSync handles id automatically\n// Legacy numeric ID columns - typically not needed after UUID migration\n'legacyId'];\n\n/**\n * Default column name patterns that indicate decimal values\n */\nexport const DEFAULT_DECIMAL_PATTERNS = ['hours', 'watts', 'voltage', 'rate', 'amount', 'price', 'cost', 'total'];","/**\n * PowerSync schema generator\n *\n * Converts Supabase database.types.ts into PowerSync schema definitions\n */\n\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport type { GeneratorConfig } from './config.js';\nimport { DEFAULT_SKIP_COLUMNS, DEFAULT_DECIMAL_PATTERNS } from './config.js';\nimport { parseTypesFile, type ParsedTable } from './parser.js';\nimport { generateTableDefinition, generateOutputFile } from './templates.js';\nexport interface ColumnMapping {\n type: 'column.text' | 'column.integer' | 'column.real';\n isEnum?: boolean;\n isBoolean?: boolean;\n}\nexport interface GenerateResult {\n success: boolean;\n tablesGenerated: number;\n outputPath: string;\n errors: string[];\n warnings: string[];\n /** Generated output (included when dryRun is true) */\n output?: string;\n}\n\n/**\n * Map TypeScript types to PowerSync column types\n */\nexport function mapTypeToPowerSync(tsType: string, columnName: string, decimalPatterns: string[]): ColumnMapping | null {\n // Clean up the type (remove nullability)\n const cleanType = tsType.trim().replace(/\\s*\\|\\s*null/g, '');\n\n // Skip complex types that can't be stored in SQLite\n if (cleanType.includes('Json') || cleanType.includes('unknown') || cleanType.includes('{')) {\n return null;\n }\n\n // Array types - skip\n if (cleanType.includes('[]')) {\n return null;\n }\n\n // Boolean -> integer (0/1)\n if (cleanType === 'boolean') {\n return {\n type: 'column.integer',\n isBoolean: true\n };\n }\n\n // Number types\n if (cleanType === 'number') {\n // Use real for columns that might have decimals\n if (decimalPatterns.some(pattern => columnName.toLowerCase().includes(pattern.toLowerCase()))) {\n return {\n type: 'column.real'\n };\n }\n return {\n type: 'column.integer'\n };\n }\n\n // String types\n if (cleanType === 'string') {\n return {\n type: 'column.text'\n };\n }\n\n // Enum types (Database[\"schema\"][\"Enums\"][\"EnumName\"]) -> store as text\n if (cleanType.includes('Database[') && cleanType.includes('Enums')) {\n return {\n type: 'column.text',\n isEnum: true\n };\n }\n\n // Default to text for unknown types (likely enums or other string-like types)\n return {\n type: 'column.text'\n };\n}\n\n/**\n * Generate column definitions for a table\n */\nexport function generateColumnDefs(table: ParsedTable, decimalPatterns: string[]): string[] {\n const columnDefs: string[] = [];\n for (const [columnName, tsType] of table.columns) {\n const mapping = mapTypeToPowerSync(tsType, columnName, decimalPatterns);\n if (mapping) {\n // Add comment for boolean and enum columns\n let comment = '';\n if (mapping.isBoolean) {\n comment = ' // boolean stored as 0/1';\n } else if (mapping.isEnum) {\n comment = ' // enum stored as text';\n }\n columnDefs.push(` ${columnName}: ${mapping.type},${comment}`);\n }\n }\n return columnDefs;\n}\n\n/**\n * Generate PowerSync schema from configuration\n */\nexport async function generateSchema(config: GeneratorConfig, options?: {\n cwd?: string;\n verbose?: boolean;\n dryRun?: boolean;\n}): Promise<GenerateResult> {\n const cwd = options?.cwd ?? process.cwd();\n const verbose = options?.verbose ?? false;\n const dryRun = options?.dryRun ?? false;\n const result: GenerateResult = {\n success: false,\n tablesGenerated: 0,\n outputPath: '',\n errors: [],\n warnings: []\n };\n\n // Resolve paths relative to cwd\n const typesPath = path.isAbsolute(config.typesPath) ? config.typesPath : path.resolve(cwd, config.typesPath);\n const outputPath = path.isAbsolute(config.outputPath) ? config.outputPath : path.resolve(cwd, config.outputPath);\n result.outputPath = outputPath;\n\n // Check if types file exists\n if (!fs.existsSync(typesPath)) {\n result.errors.push(`Types file not found: ${typesPath}`);\n return result;\n }\n\n // Read types file\n if (verbose) {\n console.log(`Reading types from: ${typesPath}`);\n }\n const typesContent = fs.readFileSync(typesPath, 'utf-8');\n\n // Build skip columns set\n const skipColumns = new Set([...DEFAULT_SKIP_COLUMNS, ...(config.skipColumns ?? [])]);\n\n // Build decimal patterns\n const decimalPatterns = [...DEFAULT_DECIMAL_PATTERNS, ...(config.decimalPatterns ?? [])];\n\n // Parse tables from types file\n const parsedTables = parseTypesFile(typesContent, config.tables, skipColumns);\n\n // Check for tables that weren't found\n for (const tableConfig of config.tables) {\n const found = parsedTables.some(t => t.name === tableConfig.name);\n if (!found) {\n result.warnings.push(`Table '${tableConfig.name}' not found in schema '${tableConfig.schema ?? 'public'}'`);\n }\n }\n if (parsedTables.length === 0) {\n result.errors.push('No tables were parsed successfully');\n return result;\n }\n\n // Generate table definitions\n const tableDefs: string[] = [];\n for (const table of parsedTables) {\n if (verbose) {\n const syncPK = table.config.syncPrimaryKey || table.config.includeId;\n console.log(`Processing ${table.schema}.${table.name} (${table.columns.size} columns)${table.config.trackMetadata ? ' [trackMetadata]' : ''}${syncPK ? ' [syncPrimaryKey]' : ''}`);\n }\n const columnDefs = generateColumnDefs(table, decimalPatterns);\n if (columnDefs.length === 0) {\n result.warnings.push(`Table '${table.name}' has no syncable columns`);\n continue;\n }\n tableDefs.push(generateTableDefinition(table, columnDefs));\n }\n\n // Collect unique schemas\n const schemas = [...new Set(config.tables.map(t => t.schema ?? 'public'))];\n\n // Generate output file content\n const relativePath = path.relative(cwd, typesPath);\n const output = generateOutputFile(parsedTables.filter(t => tableDefs.some(def => def.includes(`const ${t.name} =`))), tableDefs, schemas, relativePath);\n\n // If dry-run, return output without writing\n if (dryRun) {\n result.success = true;\n result.tablesGenerated = tableDefs.length;\n result.output = output;\n return result;\n }\n\n // Ensure output directory exists\n const outputDir = path.dirname(outputPath);\n if (!fs.existsSync(outputDir)) {\n fs.mkdirSync(outputDir, {\n recursive: true\n });\n }\n\n // Write output file\n fs.writeFileSync(outputPath, output);\n result.success = true;\n result.tablesGenerated = tableDefs.length;\n return result;\n}","/**\n * Parser for Supabase database.types.ts files\n *\n * Extracts table definitions and column types from the generated TypeScript types\n */\n\nimport type { TableConfig } from './config.js';\nexport interface ColumnInfo {\n name: string;\n tsType: string;\n isNullable: boolean;\n}\nexport interface ParsedTable {\n name: string;\n schema: string;\n columns: Map<string, string>;\n config: TableConfig;\n}\nexport interface ParseOptions {\n /** Columns to skip */\n skipColumns: Set<string>;\n /**\n * Include the id column (normally skipped).\n * Use for tables with integer PKs referenced by FKs in other tables.\n */\n syncPrimaryKey?: boolean;\n /** @deprecated Use `syncPrimaryKey` instead */\n includeId?: boolean;\n}\n\n/**\n * Parse the Row type from a table definition and extract columns\n */\nexport function parseRowType(tableContent: string, options: ParseOptions): Map<string, string> {\n const columns = new Map<string, string>();\n\n // Find the Row block - handles nested braces in type definitions\n const rowMatch = tableContent.match(/Row:\\s*\\{([^}]+(?:\\{[^}]*\\}[^}]*)*)\\}/s);\n if (!rowMatch) return columns;\n const rowContent = rowMatch[1];\n\n // syncPrimaryKey takes precedence, with includeId as fallback for backwards compat\n const includePrimaryKey = options.syncPrimaryKey ?? options.includeId ?? false;\n\n // Parse each column: \"columnName: type\" or \"columnName?: type\"\n const columnRegex = /(\\w+)\\??:\\s*([^,\\n]+)/g;\n let match;\n while ((match = columnRegex.exec(rowContent)) !== null) {\n const [, columnName, columnType] = match;\n // Skip columns unless syncPrimaryKey is true for id column\n const shouldSkip = options.skipColumns.has(columnName) && !(includePrimaryKey && columnName === 'id');\n if (!shouldSkip) {\n columns.set(columnName, columnType.trim());\n }\n }\n return columns;\n}\n\n/**\n * Extract a table definition from the database.types.ts content\n */\nexport function extractTableDef(content: string, tableName: string, schema: string): string | null {\n // Find the schema section\n const schemaRegex = new RegExp(`${schema}:\\\\s*\\\\{[\\\\s\\\\S]*?Tables:\\\\s*\\\\{`, 'g');\n const schemaMatch = schemaRegex.exec(content);\n if (!schemaMatch) return null;\n const startIndex = schemaMatch.index;\n\n // Find this specific table within the schema\n // Use negative lookbehind (?<![A-Za-z]) to avoid matching table names that are\n // substrings of other names (e.g., \"Tag\" in \"CommentTag\")\n const tableRegex = new RegExp(`(?<![A-Za-z])${tableName}:\\\\s*\\\\{[\\\\s\\\\S]*?Row:\\\\s*\\\\{[\\\\s\\\\S]*?\\\\}[\\\\s\\\\S]*?Relationships:\\\\s*\\\\[[^\\\\]]*\\\\]\\\\s*\\\\}`, 'g');\n\n // Search from the schema start\n const searchContent = content.slice(startIndex);\n const tableMatch = tableRegex.exec(searchContent);\n return tableMatch ? tableMatch[0] : null;\n}\n\n/**\n * Parse a database.types.ts file and extract specified tables\n */\nexport function parseTypesFile(content: string, tables: TableConfig[], skipColumns: Set<string>): ParsedTable[] {\n const parsedTables: ParsedTable[] = [];\n for (const tableConfig of tables) {\n const {\n name,\n schema = 'public',\n syncPrimaryKey,\n includeId\n } = tableConfig;\n const tableDef = extractTableDef(content, name, schema);\n if (!tableDef) {\n continue;\n }\n const columns = parseRowType(tableDef, {\n skipColumns,\n syncPrimaryKey,\n includeId\n });\n if (columns.size > 0) {\n parsedTables.push({\n name,\n schema,\n columns,\n config: tableConfig\n });\n }\n }\n return parsedTables;\n}\n\n/**\n * Get all available schemas from the types file\n */\nexport function getAvailableSchemas(content: string): string[] {\n const schemas: string[] = [];\n const schemaRegex = /(\\w+):\\s*\\{[\\s\\S]*?Tables:\\s*\\{/g;\n let match;\n while ((match = schemaRegex.exec(content)) !== null) {\n schemas.push(match[1]);\n }\n return schemas;\n}\n\n/**\n * Get all table names in a schema\n */\nexport function getTablesInSchema(content: string, schema: string): string[] {\n const tables: string[] = [];\n\n // Find the schema section\n const schemaRegex = new RegExp(`${schema}:\\\\s*\\\\{[\\\\s\\\\S]*?Tables:\\\\s*\\\\{([\\\\s\\\\S]*?)\\\\}\\\\s*Views:`, 'g');\n const schemaMatch = schemaRegex.exec(content);\n if (!schemaMatch) return tables;\n const tablesContent = schemaMatch[1];\n\n // Find table names (they're at the start of each table definition)\n const tableNameRegex = /^\\s*(\\w+):\\s*\\{/gm;\n let match;\n while ((match = tableNameRegex.exec(tablesContent)) !== null) {\n tables.push(match[1]);\n }\n return tables;\n}","/**\n * Output templates for PowerSync schema generation\n */\n\nimport type { ParsedTable } from './parser.js';\n\n/**\n * File header template\n */\nexport function generateHeader(typesPath: string): string {\n return `/**\n * PowerSync Schema Definition\n *\n * AUTO-GENERATED from ${typesPath}\n * Run: npx @pol-studios/powersync generate-schema\n *\n * DO NOT EDIT MANUALLY - changes will be overwritten\n */\n\nimport { column, Schema, Table } from \"@powersync/react-native\";\n`;\n}\n\n/**\n * Generate the table definition for a parsed table\n */\nexport function generateTableDefinition(table: ParsedTable, columnDefs: string[]): string {\n if (columnDefs.length === 0) {\n return `// ${table.name} - no syncable columns found`;\n }\n const optionsStr = table.config.trackMetadata ? ', { trackMetadata: true }' : '';\n return `const ${table.name} = new Table({\n${columnDefs.join('\\n')}\n}${optionsStr});`;\n}\n\n/**\n * Generate the schema export section\n */\nexport function generateSchemaExport(tableNames: string[]): string {\n return `// ============================================================================\n// SCHEMA EXPORT\n// ============================================================================\n\n// NOTE: photo_attachments is NOT included here.\n// The AttachmentQueue from @powersync/attachments creates and manages\n// its own internal SQLite table (not a view) during queue.init().\n// This allows INSERT/UPDATE operations to work correctly.\n\nexport const AppSchema = new Schema({\n${tableNames.map(name => ` ${name},`).join('\\n')}\n});\n\nexport type Database = (typeof AppSchema)[\"types\"];`;\n}\n\n/**\n * Generate schema mapping utilities\n */\nexport function generateSchemaMapping(tables: ParsedTable[], schemas: string[]): string {\n // Group tables by non-public schemas\n const schemaGroups = new Map<string, string[]>();\n for (const schema of schemas) {\n if (schema !== 'public') {\n schemaGroups.set(schema, []);\n }\n }\n for (const table of tables) {\n if (table.schema !== 'public' && schemaGroups.has(table.schema)) {\n schemaGroups.get(table.schema)!.push(table.name);\n }\n }\n const sections: string[] = [`// ============================================================================\n// SCHEMA MAPPING FOR CONNECTOR\n// ============================================================================`];\n\n // Generate constants for each non-public schema\n for (const [schema, tableNames] of schemaGroups) {\n if (tableNames.length > 0) {\n const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;\n sections.push(`\n// Tables in the '${schema}' schema (need .schema('${schema}') in Supabase queries)\nexport const ${constName} = new Set([\n${tableNames.map(name => ` \"${name}\",`).join('\\n')}\n]);`);\n }\n }\n\n // Generate helper function\n const schemaChecks = Array.from(schemaGroups.entries()).filter(([, names]) => names.length > 0).map(([schema]) => {\n const constName = `${schema.toUpperCase()}_SCHEMA_TABLES`;\n return ` if (${constName}.has(tableName)) return \"${schema}\";`;\n });\n if (schemaChecks.length > 0) {\n sections.push(`\n/**\n * Get the Supabase schema for a table\n */\nexport function getTableSchema(tableName: string): ${schemas.map(s => `\"${s}\"`).join(' | ')} {\n${schemaChecks.join('\\n')}\n return \"public\";\n}`);\n } else {\n sections.push(`\n/**\n * Get the Supabase schema for a table\n */\nexport function getTableSchema(tableName: string): \"public\" {\n return \"public\";\n}`);\n }\n return sections.join('\\n');\n}\n\n/**\n * Generate the FK detection utility (helpful for consumers)\n */\nexport function generateFKUtility(): string {\n return `\n// ============================================================================\n// FOREIGN KEY UTILITIES\n// ============================================================================\n\n/**\n * Check if a column name represents a foreign key reference\n * Convention: columns ending in 'Id' are foreign keys (e.g., projectId -> Project table)\n */\nexport function isForeignKeyColumn(columnName: string): boolean {\n return columnName.endsWith('Id') && columnName !== 'id';\n}\n\n/**\n * Get the referenced table name from a foreign key column\n * e.g., 'projectId' -> 'Project', 'equipmentFixtureUnitId' -> 'EquipmentFixtureUnit'\n */\nexport function getForeignKeyTable(columnName: string): string | null {\n if (!isForeignKeyColumn(columnName)) return null;\n // Remove 'Id' suffix and capitalize first letter\n const baseName = columnName.slice(0, -2);\n return baseName.charAt(0).toUpperCase() + baseName.slice(1);\n}`;\n}\n\n/**\n * Generate complete output file\n */\nexport function generateOutputFile(tables: ParsedTable[], tableDefs: string[], schemas: string[], typesPath: string): string {\n const tableNames = tables.map(t => t.name);\n return `${generateHeader(typesPath)}\n\n// ============================================================================\n// TABLE DEFINITIONS\n// ============================================================================\n\n${tableDefs.join('\\n\\n')}\n\n${generateSchemaExport(tableNames)}\n\n${generateSchemaMapping(tables, ['public', ...schemas.filter(s => s !== 'public')])}\n${generateFKUtility()}\n`;\n}"],"mappings":";AA4CO,SAAS,aAAa,QAA0C;AACrE,SAAO;AACT;AAKO,IAAM,uBAAuB;AAAA,EAAC;AAAA;AAAA;AAAA,EAGrC;AAAU;AAKH,IAAM,2BAA2B,CAAC,SAAS,SAAS,WAAW,QAAQ,UAAU,SAAS,QAAQ,OAAO;;;ACrDhH,YAAY,QAAQ;AACpB,YAAY,UAAU;;;AC0Bf,SAAS,aAAa,cAAsB,SAA4C;AAC7F,QAAM,UAAU,oBAAI,IAAoB;AAGxC,QAAM,WAAW,aAAa,MAAM,wCAAwC;AAC5E,MAAI,CAAC,SAAU,QAAO;AACtB,QAAM,aAAa,SAAS,CAAC;AAG7B,QAAM,oBAAoB,QAAQ,kBAAkB,QAAQ,aAAa;AAGzE,QAAM,cAAc;AACpB,MAAI;AACJ,UAAQ,QAAQ,YAAY,KAAK,UAAU,OAAO,MAAM;AACtD,UAAM,CAAC,EAAE,YAAY,UAAU,IAAI;AAEnC,UAAM,aAAa,QAAQ,YAAY,IAAI,UAAU,KAAK,EAAE,qBAAqB,eAAe;AAChG,QAAI,CAAC,YAAY;AACf,cAAQ,IAAI,YAAY,WAAW,KAAK,CAAC;AAAA,IAC3C;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,gBAAgB,SAAiB,WAAmB,QAA+B;AAEjG,QAAM,cAAc,IAAI,OAAO,GAAG,MAAM,oCAAoC,GAAG;AAC/E,QAAM,cAAc,YAAY,KAAK,OAAO;AAC5C,MAAI,CAAC,YAAa,QAAO;AACzB,QAAM,aAAa,YAAY;AAK/B,QAAM,aAAa,IAAI,OAAO,gBAAgB,SAAS,8FAA8F,GAAG;AAGxJ,QAAM,gBAAgB,QAAQ,MAAM,UAAU;AAC9C,QAAM,aAAa,WAAW,KAAK,aAAa;AAChD,SAAO,aAAa,WAAW,CAAC,IAAI;AACtC;AAKO,SAAS,eAAe,SAAiB,QAAuB,aAAyC;AAC9G,QAAM,eAA8B,CAAC;AACrC,aAAW,eAAe,QAAQ;AAChC,UAAM;AAAA,MACJ;AAAA,MACA,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF,IAAI;AACJ,UAAM,WAAW,gBAAgB,SAAS,MAAM,MAAM;AACtD,QAAI,CAAC,UAAU;AACb;AAAA,IACF;AACA,UAAM,UAAU,aAAa,UAAU;AAAA,MACrC;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,QAAI,QAAQ,OAAO,GAAG;AACpB,mBAAa,KAAK;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,EACF;AACA,SAAO;AACT;AAKO,SAAS,oBAAoB,SAA2B;AAC7D,QAAM,UAAoB,CAAC;AAC3B,QAAM,cAAc;AACpB,MAAI;AACJ,UAAQ,QAAQ,YAAY,KAAK,OAAO,OAAO,MAAM;AACnD,YAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,EACvB;AACA,SAAO;AACT;AAKO,SAAS,kBAAkB,SAAiB,QAA0B;AAC3E,QAAM,SAAmB,CAAC;AAG1B,QAAM,cAAc,IAAI,OAAO,GAAG,MAAM,6DAA6D,GAAG;AACxG,QAAM,cAAc,YAAY,KAAK,OAAO;AAC5C,MAAI,CAAC,YAAa,QAAO;AACzB,QAAM,gBAAgB,YAAY,CAAC;AAGnC,QAAM,iBAAiB;AACvB,MAAI;AACJ,UAAQ,QAAQ,eAAe,KAAK,aAAa,OAAO,MAAM;AAC5D,WAAO,KAAK,MAAM,CAAC,CAAC;AAAA,EACtB;AACA,SAAO;AACT;;;ACvIO,SAAS,eAAe,WAA2B;AACxD,SAAO;AAAA;AAAA;AAAA,yBAGgB,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQlC;AAKO,SAAS,wBAAwB,OAAoB,YAA8B;AACxF,MAAI,WAAW,WAAW,GAAG;AAC3B,WAAO,MAAM,MAAM,IAAI;AAAA,EACzB;AACA,QAAM,aAAa,MAAM,OAAO,gBAAgB,8BAA8B;AAC9E,SAAO,SAAS,MAAM,IAAI;AAAA,EAC1B,WAAW,KAAK,IAAI,CAAC;AAAA,GACpB,UAAU;AACb;AAKO,SAAS,qBAAqB,YAA8B;AACjE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUP,WAAW,IAAI,UAAQ,KAAK,IAAI,GAAG,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA;AAAA;AAIjD;AAKO,SAAS,sBAAsB,QAAuB,SAA2B;AAEtF,QAAM,eAAe,oBAAI,IAAsB;AAC/C,aAAW,UAAU,SAAS;AAC5B,QAAI,WAAW,UAAU;AACvB,mBAAa,IAAI,QAAQ,CAAC,CAAC;AAAA,IAC7B;AAAA,EACF;AACA,aAAW,SAAS,QAAQ;AAC1B,QAAI,MAAM,WAAW,YAAY,aAAa,IAAI,MAAM,MAAM,GAAG;AAC/D,mBAAa,IAAI,MAAM,MAAM,EAAG,KAAK,MAAM,IAAI;AAAA,IACjD;AAAA,EACF;AACA,QAAM,WAAqB,CAAC;AAAA;AAAA,gFAEkD;AAG9E,aAAW,CAAC,QAAQ,UAAU,KAAK,cAAc;AAC/C,QAAI,WAAW,SAAS,GAAG;AACzB,YAAM,YAAY,GAAG,OAAO,YAAY,CAAC;AACzC,eAAS,KAAK;AAAA,oBACA,MAAM,2BAA2B,MAAM;AAAA,eAC5C,SAAS;AAAA,EACtB,WAAW,IAAI,UAAQ,MAAM,IAAI,IAAI,EAAE,KAAK,IAAI,CAAC;AAAA,IAC/C;AAAA,IACA;AAAA,EACF;AAGA,QAAM,eAAe,MAAM,KAAK,aAAa,QAAQ,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,MAAM,SAAS,CAAC,EAAE,IAAI,CAAC,CAAC,MAAM,MAAM;AAChH,UAAM,YAAY,GAAG,OAAO,YAAY,CAAC;AACzC,WAAO,SAAS,SAAS,4BAA4B,MAAM;AAAA,EAC7D,CAAC;AACD,MAAI,aAAa,SAAS,GAAG;AAC3B,aAAS,KAAK;AAAA;AAAA;AAAA;AAAA,qDAImC,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,KAAK,CAAC;AAAA,EACzF,aAAa,KAAK,IAAI,CAAC;AAAA;AAAA,EAEvB;AAAA,EACA,OAAO;AACL,aAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMhB;AAAA,EACA;AACA,SAAO,SAAS,KAAK,IAAI;AAC3B;AAKO,SAAS,oBAA4B;AAC1C,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuBT;AAKO,SAAS,mBAAmB,QAAuB,WAAqB,SAAmB,WAA2B;AAC3H,QAAM,aAAa,OAAO,IAAI,OAAK,EAAE,IAAI;AACzC,SAAO,GAAG,eAAe,SAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMnC,UAAU,KAAK,MAAM,CAAC;AAAA;AAAA,EAEtB,qBAAqB,UAAU,CAAC;AAAA;AAAA,EAEhC,sBAAsB,QAAQ,CAAC,UAAU,GAAG,QAAQ,OAAO,OAAK,MAAM,QAAQ,CAAC,CAAC,CAAC;AAAA,EACjF,kBAAkB,CAAC;AAAA;AAErB;;;AFnIO,SAAS,mBAAmB,QAAgB,YAAoB,iBAAiD;AAEtH,QAAM,YAAY,OAAO,KAAK,EAAE,QAAQ,iBAAiB,EAAE;AAG3D,MAAI,UAAU,SAAS,MAAM,KAAK,UAAU,SAAS,SAAS,KAAK,UAAU,SAAS,GAAG,GAAG;AAC1F,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,SAAS,IAAI,GAAG;AAC5B,WAAO;AAAA,EACT;AAGA,MAAI,cAAc,WAAW;AAC3B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAAA,EACF;AAGA,MAAI,cAAc,UAAU;AAE1B,QAAI,gBAAgB,KAAK,aAAW,WAAW,YAAY,EAAE,SAAS,QAAQ,YAAY,CAAC,CAAC,GAAG;AAC7F,aAAO;AAAA,QACL,MAAM;AAAA,MACR;AAAA,IACF;AACA,WAAO;AAAA,MACL,MAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,cAAc,UAAU;AAC1B,WAAO;AAAA,MACL,MAAM;AAAA,IACR;AAAA,EACF;AAGA,MAAI,UAAU,SAAS,WAAW,KAAK,UAAU,SAAS,OAAO,GAAG;AAClE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,SAAO;AAAA,IACL,MAAM;AAAA,EACR;AACF;AAKO,SAAS,mBAAmB,OAAoB,iBAAqC;AAC1F,QAAM,aAAuB,CAAC;AAC9B,aAAW,CAAC,YAAY,MAAM,KAAK,MAAM,SAAS;AAChD,UAAM,UAAU,mBAAmB,QAAQ,YAAY,eAAe;AACtE,QAAI,SAAS;AAEX,UAAI,UAAU;AACd,UAAI,QAAQ,WAAW;AACrB,kBAAU;AAAA,MACZ,WAAW,QAAQ,QAAQ;AACzB,kBAAU;AAAA,MACZ;AACA,iBAAW,KAAK,KAAK,UAAU,KAAK,QAAQ,IAAI,IAAI,OAAO,EAAE;AAAA,IAC/D;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAsB,eAAe,QAAyB,SAIlC;AAC1B,QAAM,MAAM,SAAS,OAAO,QAAQ,IAAI;AACxC,QAAM,UAAU,SAAS,WAAW;AACpC,QAAM,SAAS,SAAS,UAAU;AAClC,QAAM,SAAyB;AAAA,IAC7B,SAAS;AAAA,IACT,iBAAiB;AAAA,IACjB,YAAY;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,UAAU,CAAC;AAAA,EACb;AAGA,QAAM,YAAiB,gBAAW,OAAO,SAAS,IAAI,OAAO,YAAiB,aAAQ,KAAK,OAAO,SAAS;AAC3G,QAAM,aAAkB,gBAAW,OAAO,UAAU,IAAI,OAAO,aAAkB,aAAQ,KAAK,OAAO,UAAU;AAC/G,SAAO,aAAa;AAGpB,MAAI,CAAI,cAAW,SAAS,GAAG;AAC7B,WAAO,OAAO,KAAK,yBAAyB,SAAS,EAAE;AACvD,WAAO;AAAA,EACT;AAGA,MAAI,SAAS;AACX,YAAQ,IAAI,uBAAuB,SAAS,EAAE;AAAA,EAChD;AACA,QAAM,eAAkB,gBAAa,WAAW,OAAO;AAGvD,QAAM,cAAc,oBAAI,IAAI,CAAC,GAAG,sBAAsB,GAAI,OAAO,eAAe,CAAC,CAAE,CAAC;AAGpF,QAAM,kBAAkB,CAAC,GAAG,0BAA0B,GAAI,OAAO,mBAAmB,CAAC,CAAE;AAGvF,QAAM,eAAe,eAAe,cAAc,OAAO,QAAQ,WAAW;AAG5E,aAAW,eAAe,OAAO,QAAQ;AACvC,UAAM,QAAQ,aAAa,KAAK,OAAK,EAAE,SAAS,YAAY,IAAI;AAChE,QAAI,CAAC,OAAO;AACV,aAAO,SAAS,KAAK,UAAU,YAAY,IAAI,0BAA0B,YAAY,UAAU,QAAQ,GAAG;AAAA,IAC5G;AAAA,EACF;AACA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO,OAAO,KAAK,oCAAoC;AACvD,WAAO;AAAA,EACT;AAGA,QAAM,YAAsB,CAAC;AAC7B,aAAW,SAAS,cAAc;AAChC,QAAI,SAAS;AACX,YAAM,SAAS,MAAM,OAAO,kBAAkB,MAAM,OAAO;AAC3D,cAAQ,IAAI,cAAc,MAAM,MAAM,IAAI,MAAM,IAAI,KAAK,MAAM,QAAQ,IAAI,YAAY,MAAM,OAAO,gBAAgB,qBAAqB,EAAE,GAAG,SAAS,sBAAsB,EAAE,EAAE;AAAA,IACnL;AACA,UAAM,aAAa,mBAAmB,OAAO,eAAe;AAC5D,QAAI,WAAW,WAAW,GAAG;AAC3B,aAAO,SAAS,KAAK,UAAU,MAAM,IAAI,2BAA2B;AACpE;AAAA,IACF;AACA,cAAU,KAAK,wBAAwB,OAAO,UAAU,CAAC;AAAA,EAC3D;AAGA,QAAM,UAAU,CAAC,GAAG,IAAI,IAAI,OAAO,OAAO,IAAI,OAAK,EAAE,UAAU,QAAQ,CAAC,CAAC;AAGzE,QAAM,eAAoB,cAAS,KAAK,SAAS;AACjD,QAAM,SAAS,mBAAmB,aAAa,OAAO,OAAK,UAAU,KAAK,SAAO,IAAI,SAAS,SAAS,EAAE,IAAI,IAAI,CAAC,CAAC,GAAG,WAAW,SAAS,YAAY;AAGtJ,MAAI,QAAQ;AACV,WAAO,UAAU;AACjB,WAAO,kBAAkB,UAAU;AACnC,WAAO,SAAS;AAChB,WAAO;AAAA,EACT;AAGA,QAAM,YAAiB,aAAQ,UAAU;AACzC,MAAI,CAAI,cAAW,SAAS,GAAG;AAC7B,IAAG,aAAU,WAAW;AAAA,MACtB,WAAW;AAAA,IACb,CAAC;AAAA,EACH;AAGA,EAAG,iBAAc,YAAY,MAAM;AACnC,SAAO,UAAU;AACjB,SAAO,kBAAkB,UAAU;AACnC,SAAO;AACT;","names":[]}
package/package.json CHANGED
@@ -1,11 +1,15 @@
1
1
  {
2
2
  "name": "@pol-studios/powersync",
3
- "version": "1.0.10",
3
+ "version": "1.0.11",
4
4
  "description": "Enterprise PowerSync integration for offline-first applications",
5
5
  "license": "UNLICENSED",
6
6
  "type": "module",
7
7
  "main": "./dist/index.js",
8
8
  "types": "./dist/index.d.ts",
9
+ "bin": {
10
+ "powersync": "./dist/generator/cli.js",
11
+ "@pol-studios/powersync": "./dist/generator/cli.js"
12
+ },
9
13
  "exports": {
10
14
  ".": {
11
15
  "react-native": {
@@ -124,6 +128,10 @@
124
128
  "import": "./dist/react/index.js",
125
129
  "types": "./dist/react/index.d.ts"
126
130
  }
131
+ },
132
+ "./generator": {
133
+ "import": "./dist/generator/index.js",
134
+ "types": "./dist/generator/index.d.ts"
127
135
  }
128
136
  },
129
137
  "files": [
@@ -136,7 +144,9 @@
136
144
  "prepublishOnly": "pnpm build"
137
145
  },
138
146
  "dependencies": {
139
- "@pol-studios/db": "workspace:*"
147
+ "@pol-studios/db": "workspace:*",
148
+ "commander": "^12.0.0",
149
+ "picocolors": "^1.0.0"
140
150
  },
141
151
  "peerDependencies": {
142
152
  "@powersync/react-native": ">=1.0.8",