supatool 0.4.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -258,7 +258,8 @@ program
258
258
  connectionString: config.connectionString,
259
259
  schemaDir: options.schemaDir,
260
260
  tablePattern: options.tables,
261
- force: options.force
261
+ force: options.force,
262
+ migrationConfig: config.migration
262
263
  });
263
264
  }
264
265
  catch (error) {
@@ -276,6 +277,7 @@ program
276
277
  .option('--auto-apply', 'Auto-apply to remote (no confirmation)')
277
278
  .option('--dry-run', 'Preview changes only (recommended)')
278
279
  .option('--generate-only', 'Generate migration files only (no apply)')
280
+ .option('--rls <mode>', 'RLS migration mode: rewrite = DROP+CREATE policies (default: skip)', 'skip')
279
281
  .option('--config <path>', 'Configuration file path')
280
282
  .action(async (options) => {
281
283
  const config = (0, sync_1.resolveConfig)({
@@ -328,7 +330,9 @@ program
328
330
  force: isAutoApply,
329
331
  dryRun: isDryRun,
330
332
  generateOnly: isGenerateOnly,
331
- requireConfirmation: !isDryRun && !isAutoApply && !isGenerateOnly
333
+ requireConfirmation: !isDryRun && !isAutoApply && !isGenerateOnly,
334
+ migrationConfig: config.migration,
335
+ rlsMode: options.rls
332
336
  });
333
337
  }
334
338
  catch (error) {
@@ -72,27 +72,85 @@ function loadConfig(configPath) {
72
72
  */
73
73
  function resolveConfig(options, configPath) {
74
74
  const fileConfig = loadConfig(configPath);
75
+ const connectionString = options.connectionString ||
76
+ process.env.SUPABASE_CONNECTION_STRING ||
77
+ process.env.DATABASE_URL ||
78
+ fileConfig.connectionString;
75
79
  return {
76
- connectionString: options.connectionString ||
77
- process.env.SUPABASE_CONNECTION_STRING ||
78
- process.env.DATABASE_URL ||
79
- fileConfig.connectionString,
80
+ connectionString,
80
81
  schemaDir: options.schemaDir || fileConfig.schemaDir || './supabase/schemas',
81
- tablePattern: options.tablePattern || fileConfig.tablePattern || '*'
82
+ tablePattern: options.tablePattern || fileConfig.tablePattern || '*',
83
+ migration: fileConfig.migration
82
84
  };
83
85
  }
84
86
  /**
85
- * Generate config file template
87
+ * Generate config file template (no connection string — use .env.local)
86
88
  */
87
89
  function createConfigTemplate(outputPath) {
88
90
  const template = {
89
- connectionString: "postgresql://user:password@host:port/database",
90
91
  schemaDir: "./supabase/schemas",
91
92
  tablePattern: "*",
92
- "_comment": "It is recommended to set connection string in .env or .env.local file using SUPABASE_CONNECTION_STRING or DATABASE_URL"
93
+ migration: {
94
+ naming: "timestamp",
95
+ "_naming_comment": "Use 'sequential' for NNN_description.sql format, 'timestamp' for YYYYMMDDHHMMSS_description.sql",
96
+ dir: "supabase/migrations"
97
+ },
98
+ "_comment": "Set credentials in .env.local — never put secrets in this file."
93
99
  };
94
100
  fs.writeFileSync(outputPath, JSON.stringify(template, null, 2), 'utf-8');
95
- console.log(`Configuration template generated: ${outputPath}`);
96
- console.log('⚠️ Remember to add the configuration file to .gitignore!');
97
- console.log('💡 Manage connection string in .env or .env.local file');
101
+ console.log(`Config template generated: ${outputPath}`);
102
+ ensureEnvLocalTemplate();
103
+ checkGitignore(outputPath);
104
+ }
105
+ /**
106
+ * Write .env.local template if it doesn't exist yet.
107
+ */
108
+ function ensureEnvLocalTemplate() {
109
+ const envLocalPath = path.join(process.cwd(), '.env.local');
110
+ if (fs.existsSync(envLocalPath))
111
+ return;
112
+ const template = [
113
+ '# supatool credentials — never commit this file',
114
+ '# Option A: Supabase URL + service role key (recommended)',
115
+ 'SUPABASE_URL=https://your-project-ref.supabase.co',
116
+ 'SUPABASE_SERVICE_ROLE_KEY=your-service-role-key',
117
+ '',
118
+ '# Option B: direct connection string',
119
+ '# SUPABASE_CONNECTION_STRING=postgresql://user:password@host:port/database',
120
+ ].join('\n') + '\n';
121
+ fs.writeFileSync(envLocalPath, template, 'utf-8');
122
+ console.log('.env.local template created — fill in your credentials.');
123
+ }
124
+ /**
125
+ * Warn if the config file or .env.local are not covered by .gitignore.
126
+ */
127
+ function checkGitignore(configPath) {
128
+ const gitignorePath = path.join(process.cwd(), '.gitignore');
129
+ if (!fs.existsSync(gitignorePath)) {
130
+ console.warn('Warning: .gitignore not found. Make sure to exclude .env.local and supatool.config.json.');
131
+ return;
132
+ }
133
+ const content = fs.readFileSync(gitignorePath, 'utf-8');
134
+ const lines = content.split('\n').map(l => l.trim());
135
+ const missing = [];
136
+ const configFile = path.basename(configPath);
137
+ if (!lines.some(l => l === configFile || l === `/${configFile}`)) {
138
+ missing.push(configFile);
139
+ }
140
+ if (!lines.some(l => l === '.env.local' || l === '*.local')) {
141
+ missing.push('.env.local');
142
+ }
143
+ if (missing.length > 0) {
144
+ console.warn(`\nWarning: The following are NOT in .gitignore — add them to avoid committing secrets:`);
145
+ for (const f of missing) {
146
+ console.warn(` ${f}`);
147
+ }
148
+ // Auto-append to .gitignore
149
+ const toAdd = missing.map(f => f).join('\n') + '\n';
150
+ fs.appendFileSync(gitignorePath, '\n# supatool\n' + toAdd);
151
+ console.log(`Auto-added to .gitignore: ${missing.join(', ')}`);
152
+ }
153
+ else {
154
+ console.log('.gitignore OK — credentials files are excluded.');
155
+ }
98
156
  }
@@ -355,6 +355,26 @@ async function fetchFunctions(client, spinner, progress, schemas = ['public']) {
355
355
  timestamp: Math.floor(Date.now() / 1000)
356
356
  });
357
357
  }
358
+ // Detect overloaded functions (same schema.name, different signatures)
359
+ const nameCount = new Map();
360
+ for (const row of result.rows) {
361
+ const key = `${row.schema_name}.${row.name}`;
362
+ const sig = `${row.name}(${row.identity_args || ''})`;
363
+ if (!nameCount.has(key))
364
+ nameCount.set(key, []);
365
+ nameCount.get(key).push(sig);
366
+ }
367
+ const overloads = [...nameCount.entries()].filter(([, sigs]) => sigs.length > 1);
368
+ if (overloads.length > 0) {
369
+ console.warn('\n⚠ Overloaded RPC functions detected (same name, different signatures):');
370
+ for (const [key, sigs] of overloads) {
371
+ console.warn(` ${key}`);
372
+ for (const sig of sigs) {
373
+ console.warn(` - ${sig}`);
374
+ }
375
+ }
376
+ console.warn(' Note: Only the last definition will be written to the output file.\n');
377
+ }
358
378
  return functions;
359
379
  }
360
380
  /**
@@ -843,19 +863,21 @@ async function generateCreateTableDDL(client, tableName, schemaName = 'public')
843
863
  const [columnsResult, primaryKeyResult, tableCommentResult, columnCommentsResult, uniqueConstraintResult, foreignKeyResult] = await Promise.all([
844
864
  // Get column info
845
865
  client.query(`
846
- SELECT
866
+ SELECT
847
867
  c.column_name,
848
868
  c.data_type,
849
869
  c.udt_name,
850
870
  c.character_maximum_length,
851
871
  c.is_nullable,
852
872
  c.column_default,
873
+ c.is_generated,
874
+ c.generation_expression,
853
875
  pg_catalog.format_type(a.atttypid, a.atttypmod) AS full_type
854
876
  FROM information_schema.columns c
855
877
  JOIN pg_class cl ON cl.relname = c.table_name
856
878
  JOIN pg_namespace ns ON ns.nspname = c.table_schema AND ns.oid = cl.relnamespace
857
879
  JOIN pg_attribute a ON a.attrelid = cl.oid AND a.attname = c.column_name
858
- WHERE c.table_schema = $1
880
+ WHERE c.table_schema = $1
859
881
  AND c.table_name = $2
860
882
  ORDER BY c.ordinal_position
861
883
  `, [schemaName, tableName]),
@@ -951,13 +973,19 @@ async function generateCreateTableDDL(client, tableName, schemaName = 'public')
951
973
  if (col.character_maximum_length) {
952
974
  colDef += `(${col.character_maximum_length})`;
953
975
  }
954
- // NOT NULL constraint
955
- if (col.is_nullable === 'NO') {
956
- colDef += ' NOT NULL';
976
+ // Generated column
977
+ if (col.is_generated === 'ALWAYS') {
978
+ colDef += ` GENERATED ALWAYS AS (${col.generation_expression}) STORED`;
957
979
  }
958
- // Default value
959
- if (col.column_default) {
960
- colDef += ` DEFAULT ${col.column_default}`;
980
+ else {
981
+ // NOT NULL constraint
982
+ if (col.is_nullable === 'NO') {
983
+ colDef += ' NOT NULL';
984
+ }
985
+ // Default value
986
+ if (col.column_default) {
987
+ colDef += ` DEFAULT ${col.column_default}`;
988
+ }
961
989
  }
962
990
  columnDefs.push(colDef);
963
991
  }
@@ -1109,13 +1137,7 @@ async function saveDefinitionsByType(definitions, outputDir, separateDirectories
1109
1137
  if (!fs.existsSync(targetDir)) {
1110
1138
  fs.mkdirSync(targetDir, { recursive: true });
1111
1139
  }
1112
- let fileName;
1113
- if (def.type === 'function') {
1114
- fileName = `fn_${def.name}.sql`;
1115
- }
1116
- else {
1117
- fileName = `${def.name}.sql`;
1118
- }
1140
+ const fileName = `${def.name}.sql`;
1119
1141
  const filePath = path.join(targetDir, fileName);
1120
1142
  const ddlWithNewline = def.ddl.endsWith('\n') ? def.ddl : def.ddl + '\n';
1121
1143
  await fsPromises.writeFile(filePath, headerComment + ddlWithNewline);
@@ -1175,7 +1197,7 @@ async function generateIndexFile(definitions, outputDir, separateDirectories = t
1175
1197
  // Build relative path per file (schema/type/file when multiSchema)
1176
1198
  const getRelPath = (def) => {
1177
1199
  const typeDir = separateDirectories ? (typeDirNames[def.type] ?? def.type) : '.';
1178
- const fileName = def.type === 'function' ? `fn_${def.name}.sql` : `${def.name}.sql`;
1200
+ const fileName = `${def.name}.sql`;
1179
1201
  if (multiSchema && def.schema) {
1180
1202
  return `${def.schema}/${typeDir}/${fileName}`;
1181
1203
  }
@@ -34,9 +34,36 @@ var __importStar = (this && this.__importStar) || (function () {
34
34
  })();
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
36
  exports.generateMigrationFile = generateMigrationFile;
37
+ exports.generateRenameTableMigrationFile = generateRenameTableMigrationFile;
38
+ exports.generateFunctionMigrationFile = generateFunctionMigrationFile;
39
+ exports.generateRlsMigrationFile = generateRlsMigrationFile;
37
40
  const fs = __importStar(require("fs"));
38
41
  const path = __importStar(require("path"));
39
42
  const diff_1 = require("diff");
43
+ /**
44
+ * Resolve migration filename based on naming config
45
+ */
46
+ function resolveMigrationFilename(migrationDir, description, naming = 'timestamp') {
47
+ if (naming === 'sequential') {
48
+ // Find max NNN from existing files
49
+ let max = 0;
50
+ if (fs.existsSync(migrationDir)) {
51
+ for (const f of fs.readdirSync(migrationDir)) {
52
+ const m = f.match(/^(\d+)_/);
53
+ if (m)
54
+ max = Math.max(max, parseInt(m[1], 10));
55
+ }
56
+ }
57
+ const next = String(max + 1).padStart(3, '0');
58
+ return `${next}_${description}.sql`;
59
+ }
60
+ // Default: timestamp
61
+ const ts = new Date().toISOString()
62
+ .replace(/[-:]/g, '')
63
+ .replace(/\..+/, '')
64
+ .replace('T', '');
65
+ return `${ts}_${description}.sql`;
66
+ }
40
67
  /**
41
68
  * Generate ALTER TABLE statements from DDL diff
42
69
  */
@@ -112,8 +139,8 @@ function generateAlterStatements(tableName, fromDdl, toDdl) {
112
139
  /**
113
140
  * Generate migration file
114
141
  */
115
- async function generateMigrationFile(tableName, fromDdl, toDdl, projectDir = '.') {
116
- const migrationDir = path.join(projectDir, 'supabase', 'migrations');
142
+ async function generateMigrationFile(tableName, fromDdl, toDdl, projectDir = '.', migrationConfig) {
143
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
117
144
  // Create migrations directory
118
145
  if (!fs.existsSync(migrationDir)) {
119
146
  fs.mkdirSync(migrationDir, { recursive: true });
@@ -121,20 +148,14 @@ async function generateMigrationFile(tableName, fromDdl, toDdl, projectDir = '.'
121
148
  // Generate ALTER TABLE statements
122
149
  const alterStatements = generateAlterStatements(tableName, fromDdl, toDdl);
123
150
  if (alterStatements.length === 0) {
124
- return await generateManualMigrationTemplate(tableName, fromDdl, toDdl, projectDir);
151
+ return await generateManualMigrationTemplate(tableName, fromDdl, toDdl, projectDir, migrationConfig);
125
152
  }
126
- // Generate filename from timestamp
127
- const now = new Date();
128
- const timestamp = now.toISOString()
129
- .replace(/[-:]/g, '')
130
- .replace(/\..+/, '')
131
- .replace('T', '');
132
- const filename = `${timestamp}_update_${tableName}.sql`;
153
+ const filename = resolveMigrationFilename(migrationDir, `update_${tableName}`, migrationConfig?.naming);
133
154
  const filepath = path.join(migrationDir, filename);
134
155
  // Build migration file content
135
156
  const content = `-- Migration generated by supatool
136
157
  -- Table: ${tableName}
137
- -- Generated at: ${now.toISOString()}
158
+ -- Generated at: ${new Date().toISOString()}
138
159
 
139
160
  ${alterStatements.join('\n')}
140
161
  `;
@@ -187,20 +208,14 @@ function analyzeDiffForTemplate(fromDdl, toDdl) {
187
208
  /**
188
209
  * Generate manual migration template
189
210
  */
190
- async function generateManualMigrationTemplate(tableName, fromDdl, toDdl, projectDir) {
191
- const migrationDir = path.join(projectDir, 'supabase', 'migrations');
211
+ async function generateManualMigrationTemplate(tableName, fromDdl, toDdl, projectDir, migrationConfig) {
212
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
192
213
  if (!fs.existsSync(migrationDir)) {
193
214
  fs.mkdirSync(migrationDir, { recursive: true });
194
215
  }
195
216
  // Analyze diff
196
217
  const { removedColumns, addedColumns } = analyzeDiffForTemplate(fromDdl, toDdl);
197
- // Generate filename from timestamp
198
- const now = new Date();
199
- const timestamp = now.toISOString()
200
- .replace(/[-:]/g, '')
201
- .replace(/\..+/, '')
202
- .replace('T', '');
203
- const filename = `${timestamp}_manual_update_${tableName}.sql`;
218
+ const filename = resolveMigrationFilename(migrationDir, `manual_update_${tableName}`, migrationConfig?.naming);
204
219
  const filepath = path.join(migrationDir, filename);
205
220
  // Build template from actual changes
206
221
  let migrationStatements = [];
@@ -263,14 +278,100 @@ ${migrationStatements.join('\n')}
263
278
  return filepath;
264
279
  }
265
280
  /**
266
- * Advanced diff analysis (detect column changes)
281
+ * Generate a RENAME TABLE migration when a table appears renamed.
282
+ * (old exists on remote, new exists on local, columns are highly similar)
283
+ */
284
+ async function generateRenameTableMigrationFile(schema, oldName, newName, projectDir = '.', migrationConfig) {
285
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
286
+ if (!fs.existsSync(migrationDir)) {
287
+ fs.mkdirSync(migrationDir, { recursive: true });
288
+ }
289
+ const filename = resolveMigrationFilename(migrationDir, `rename_${schema}_${oldName}_to_${newName}`, migrationConfig?.naming);
290
+ const filepath = path.join(migrationDir, filename);
291
+ const content = `-- Migration generated by supatool
292
+ -- Rename table: ${schema}.${oldName} → ${schema}.${newName}
293
+ -- Generated at: ${new Date().toISOString()}
294
+ -- WARNING: Review carefully before applying. supatool detected this as a rename
295
+ -- based on column similarity, but it may be an unrelated add/drop pair.
296
+
297
+ ALTER TABLE ${schema === 'public' ? '' : schema + '.'}${oldName} RENAME TO ${newName};
298
+ `;
299
+ fs.writeFileSync(filepath, content, 'utf-8');
300
+ console.log(`Rename migration generated: ${filename}`);
301
+ return filepath;
302
+ }
303
+ /**
304
+ * Normalize function DDL for comparison (strip supatool header comment, trailing whitespace)
267
305
  */
268
- function analyzeColumnChanges(tableName, localDdl, remoteDdl) {
306
+ function normalizeFunctionDdl(ddl) {
307
+ return ddl
308
+ .split('\n')
309
+ .filter(line => !line.startsWith('-- Generated by supatool'))
310
+ .join('\n')
311
+ .replace(/\s+$/, '')
312
+ .trim();
313
+ }
314
+ /**
315
+ * Generate a migration file for a changed function.
316
+ * localDdl is used as-is (must be valid CREATE OR REPLACE FUNCTION DDL).
317
+ */
318
+ async function generateFunctionMigrationFile(schema, funcName, localDdl, remoteDdl, projectDir = '.', migrationConfig) {
319
+ const normalizedLocal = normalizeFunctionDdl(localDdl);
320
+ const normalizedRemote = normalizeFunctionDdl(remoteDdl);
321
+ if (normalizedLocal === normalizedRemote)
322
+ return null;
323
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
324
+ if (!fs.existsSync(migrationDir)) {
325
+ fs.mkdirSync(migrationDir, { recursive: true });
326
+ }
327
+ const filename = resolveMigrationFilename(migrationDir, `update_fn_${schema}_${funcName}`, migrationConfig?.naming);
328
+ const filepath = path.join(migrationDir, filename);
329
+ const content = `-- Migration generated by supatool
330
+ -- Function: ${schema}.${funcName}
331
+ -- Generated at: ${new Date().toISOString()}
332
+
333
+ ${normalizedLocal.endsWith(';') ? normalizedLocal : normalizedLocal + ';'}
334
+ `;
335
+ fs.writeFileSync(filepath, content, 'utf-8');
336
+ console.log(`Function migration generated: ${filename}`);
337
+ return filepath;
338
+ }
339
+ /**
340
+ * Generate DROP POLICY IF EXISTS + CREATE POLICY statements for changed policies.
341
+ */
342
+ async function generateRlsMigrationFile(changedPolicies, droppedPolicies, projectDir = '.', migrationConfig) {
343
+ if (changedPolicies.length === 0 && droppedPolicies.length === 0)
344
+ return null;
345
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
346
+ if (!fs.existsSync(migrationDir)) {
347
+ fs.mkdirSync(migrationDir, { recursive: true });
348
+ }
269
349
  const statements = [];
270
- // Simple case: detect column name change
271
- const localLines = localDdl.split('\n').map(line => line.trim()).filter(line => line);
272
- const remoteLines = remoteDdl.split('\n').map(line => line.trim()).filter(line => line);
273
- // Advanced detection (type change, default change) to be implemented later
274
- // Currently only basic add/remove
275
- return statements;
350
+ for (const p of droppedPolicies) {
351
+ statements.push(`DROP POLICY IF EXISTS "${p.policyName}" ON ${p.tableName};`);
352
+ }
353
+ for (const p of changedPolicies) {
354
+ statements.push(`DROP POLICY IF EXISTS "${p.policyName}" ON ${p.tableName};`);
355
+ const permissive = p.permissive ? 'PERMISSIVE' : 'RESTRICTIVE';
356
+ const roles = p.roles || 'public';
357
+ let sql = `CREATE POLICY "${p.policyName}" ON ${p.tableName}\n` +
358
+ ` AS ${permissive} FOR ${p.cmd} TO ${roles}`;
359
+ if (p.qual)
360
+ sql += `\n USING (${p.qual})`;
361
+ if (p.withCheck)
362
+ sql += `\n WITH CHECK (${p.withCheck})`;
363
+ sql += ';';
364
+ statements.push(sql);
365
+ }
366
+ const filename = resolveMigrationFilename(migrationDir, 'update_rls', migrationConfig?.naming);
367
+ const filepath = path.join(migrationDir, filename);
368
+ const content = `-- Migration generated by supatool
369
+ -- RLS policies: ${changedPolicies.length} changed, ${droppedPolicies.length} dropped
370
+ -- Generated at: ${new Date().toISOString()}
371
+
372
+ ${statements.join('\n\n')}
373
+ `;
374
+ fs.writeFileSync(filepath, content, 'utf-8');
375
+ console.log(`RLS migration generated: ${filename}`);
376
+ return filepath;
276
377
  }
package/dist/sync/sync.js CHANGED
@@ -1,14 +1,63 @@
1
1
  "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
2
35
  Object.defineProperty(exports, "__esModule", { value: true });
3
36
  exports.syncAllTables = syncAllTables;
37
+ const path = __importStar(require("path"));
4
38
  const parseLocalSchemas_1 = require("./parseLocalSchemas");
5
39
  const fetchRemoteSchemas_1 = require("./fetchRemoteSchemas");
6
40
  const writeSchema_1 = require("./writeSchema");
41
+ const fs = __importStar(require("fs"));
7
42
  const generateMigration_1 = require("./generateMigration");
8
43
  const diff_1 = require("diff");
9
44
  const utils_1 = require("./utils");
10
45
  // Global approval state (shared with writeSchema.ts)
11
46
  let globalApproveAll = false;
47
+ /**
48
+ * Extract column names from a CREATE TABLE DDL string.
49
+ */
50
+ function extractColumnNames(ddl) {
51
+ const match = ddl.match(/CREATE TABLE[^(]*\(([\s\S]*)\)/i);
52
+ if (!match)
53
+ return [];
54
+ return match[1]
55
+ .split(',')
56
+ .map(line => line.trim())
57
+ .filter(line => !line.match(/^(PRIMARY|CONSTRAINT|UNIQUE|FOREIGN|CHECK)/i))
58
+ .map(line => { const m = line.match(/^([a-zA-Z_]\w*)/); return m ? m[1] : ''; })
59
+ .filter(Boolean);
60
+ }
12
61
  /**
13
62
  * Normalize DDL string (unify spaces, newlines, tabs)
14
63
  */
@@ -37,7 +86,7 @@ function formatSQL(sql) {
37
86
  /**
38
87
  * Synchronize all table schemas
39
88
  */
40
- async function syncAllTables({ connectionString, schemaDir, tablePattern = '*', force = false, dryRun = false, generateOnly = false, requireConfirmation = false }) {
89
+ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*', force = false, dryRun = false, generateOnly = false, requireConfirmation = false, migrationConfig, rlsMode = 'skip' }) {
41
90
  // Reset approval state
42
91
  (0, writeSchema_1.resetApprovalState)();
43
92
  const localSchemas = await (0, parseLocalSchemas_1.parseLocalSchemas)(schemaDir);
@@ -53,6 +102,34 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
53
102
  }
54
103
  // Fetch only the remote schemas for target tables
55
104
  const remoteSchemas = await (0, fetchRemoteSchemas_1.fetchRemoteSchemas)(connectionString, targetLocalTables);
105
+ // --- Table rename detection ---
106
+ // Tables that exist locally but not remotely (potential new name)
107
+ const localOnly = targetLocalTables.filter(t => !remoteSchemas[t]);
108
+ // Tables that exist remotely but not locally (potential old name)
109
+ const remoteOnly = Object.keys(remoteSchemas).filter(t => !localSchemas[t]);
110
+ for (const newName of localOnly) {
111
+ const localDdl = localSchemas[newName]?.ddl ?? '';
112
+ const localCols = extractColumnNames(localDdl);
113
+ if (localCols.length === 0)
114
+ continue;
115
+ for (const oldName of remoteOnly) {
116
+ const remoteDdl = remoteSchemas[oldName]?.ddl ?? '';
117
+ const remoteCols = extractColumnNames(remoteDdl);
118
+ if (remoteCols.length === 0)
119
+ continue;
120
+ const shared = localCols.filter(c => remoteCols.includes(c)).length;
121
+ const similarity = shared / Math.max(localCols.length, remoteCols.length);
122
+ if (similarity >= 0.7) {
123
+ console.warn(`⚠️ Possible table rename detected: "${oldName}" → "${newName}" ` +
124
+ `(${Math.round(similarity * 100)}% column match). ` +
125
+ `Generating rename migration — review before applying.`);
126
+ // Infer schema from CREATE TABLE <schema>.<table> in DDL (default public)
127
+ const schemaMatch = localDdl.match(/CREATE TABLE\s+(\w+)\.\w+/i);
128
+ const schema = schemaMatch ? schemaMatch[1] : 'public';
129
+ await (0, generateMigration_1.generateRenameTableMigrationFile)(schema, oldName, newName, process.cwd(), migrationConfig);
130
+ }
131
+ }
132
+ }
56
133
  for (const tableName of targetLocalTables) {
57
134
  const local = localSchemas[tableName];
58
135
  const remote = remoteSchemas[tableName];
@@ -129,7 +206,7 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
129
206
  // Generate migration file (local → remote diff)
130
207
  const migrationPath = await (0, generateMigration_1.generateMigrationFile)(tableName, normalizedRemote, // from (current remote state)
131
208
  normalizedLocal, // to (local target state)
132
- process.cwd());
209
+ process.cwd(), migrationConfig);
133
210
  if (migrationPath) {
134
211
  console.log(`[${tableName}] 📝 UPDATE migration generated: ${migrationPath}`);
135
212
  }
@@ -140,4 +217,197 @@ async function syncAllTables({ connectionString, schemaDir, tablePattern = '*',
140
217
  }
141
218
  }
142
219
  }
220
+ // --- Function diff: scan local rpc/*.sql vs remote ---
221
+ await syncFunctions(connectionString, schemaDir, migrationConfig);
222
+ // --- RLS diff ---
223
+ if (rlsMode === 'rewrite') {
224
+ await syncRls(connectionString, schemaDir, migrationConfig);
225
+ }
226
+ }
227
+ /**
228
+ * Scan local <schemaDir>/<schema>/rpc/*.sql, compare with remote pg_get_functiondef,
229
+ * and generate CREATE OR REPLACE migrations for any changed functions.
230
+ */
231
+ async function syncFunctions(connectionString, schemaDir, migrationConfig) {
232
+ const { Client } = await Promise.resolve().then(() => __importStar(require('pg')));
233
+ // Collect all local rpc sql files: { schema, funcName, localDdl }
234
+ const entries = [];
235
+ if (!fs.existsSync(schemaDir))
236
+ return;
237
+ for (const schemaEntry of fs.readdirSync(schemaDir, { withFileTypes: true })) {
238
+ if (!schemaEntry.isDirectory())
239
+ continue;
240
+ const rpcDir = path.join(schemaDir, schemaEntry.name, 'rpc');
241
+ if (!fs.existsSync(rpcDir))
242
+ continue;
243
+ for (const file of fs.readdirSync(rpcDir)) {
244
+ if (!file.endsWith('.sql'))
245
+ continue;
246
+ const funcName = file.replace(/\.sql$/, '');
247
+ const localDdl = fs.readFileSync(path.join(rpcDir, file), 'utf-8');
248
+ entries.push({ schema: schemaEntry.name, funcName, localDdl });
249
+ }
250
+ }
251
+ if (entries.length === 0)
252
+ return;
253
+ const client = new Client({ connectionString });
254
+ await client.connect();
255
+ let changed = 0;
256
+ try {
257
+ for (const { schema, funcName, localDdl } of entries) {
258
+ const result = await client.query(`SELECT pg_get_functiondef(p.oid) as definition
259
+ FROM pg_proc p
260
+ JOIN pg_namespace n ON p.pronamespace = n.oid
261
+ WHERE n.nspname = $1 AND p.proname = $2
262
+ LIMIT 1`, [schema, funcName]);
263
+ if (result.rows.length === 0) {
264
+ // Function not in remote yet — generate migration to create it
265
+ const migrationPath = await (0, generateMigration_1.generateFunctionMigrationFile)(schema, funcName, localDdl, '', process.cwd(), migrationConfig);
266
+ if (migrationPath) {
267
+ console.log(`[${schema}.${funcName}] 📝 NEW function migration generated: ${migrationPath}`);
268
+ changed++;
269
+ }
270
+ continue;
271
+ }
272
+ const remoteDdl = result.rows[0].definition;
273
+ const migrationPath = await (0, generateMigration_1.generateFunctionMigrationFile)(schema, funcName, localDdl, remoteDdl, process.cwd(), migrationConfig);
274
+ if (migrationPath) {
275
+ console.log(`[${schema}.${funcName}] 📝 UPDATE function migration generated: ${migrationPath}`);
276
+ changed++;
277
+ }
278
+ }
279
+ }
280
+ finally {
281
+ await client.end();
282
+ }
283
+ if (changed === 0) {
284
+ console.log('Functions: no differences found');
285
+ }
286
+ else {
287
+ console.log(`Functions: ${changed} migration(s) generated`);
288
+ }
289
+ }
290
+ /**
291
+ * Fetch remote RLS policies, compare with local <schemaDir>/<schema>/rls/*.sql,
292
+ * and generate DROP + CREATE migrations for changed/new/deleted policies.
293
+ */
294
+ async function syncRls(connectionString, schemaDir, migrationConfig) {
295
+ const { Client } = await Promise.resolve().then(() => __importStar(require('pg')));
296
+ const client = new Client({ connectionString });
297
+ await client.connect();
298
+ try {
299
+ // Fetch all policies from remote
300
+ const result = await client.query(`
301
+ SELECT schemaname, tablename, policyname, cmd,
302
+ array_to_string(roles, ',') as roles,
303
+ qual, with_check,
304
+ permissive
305
+ FROM pg_policies
306
+ ORDER BY schemaname, tablename, policyname
307
+ `);
308
+ // Build remote policy map: key = "schema.table.policyname"
309
+ const remoteMap = new Map();
310
+ for (const row of result.rows) {
311
+ const tableQualified = row.schemaname === 'public'
312
+ ? row.tablename
313
+ : `${row.schemaname}.${row.tablename}`;
314
+ const key = `${row.schemaname}.${row.tablename}.${row.policyname}`;
315
+ remoteMap.set(key, {
316
+ policyName: row.policyname,
317
+ tableName: tableQualified,
318
+ cmd: row.cmd ?? 'ALL',
319
+ roles: row.roles ?? 'public',
320
+ qual: row.qual ?? null,
321
+ withCheck: row.with_check ?? null,
322
+ permissive: row.permissive !== 'RESTRICTIVE'
323
+ });
324
+ }
325
+ // Build local policy map from rls/*.sql files
326
+ const localMap = new Map(); // key → raw SQL
327
+ if (fs.existsSync(schemaDir)) {
328
+ for (const schemaEntry of fs.readdirSync(schemaDir, { withFileTypes: true })) {
329
+ if (!schemaEntry.isDirectory())
330
+ continue;
331
+ const rlsDir = path.join(schemaDir, schemaEntry.name, 'rls');
332
+ if (!fs.existsSync(rlsDir))
333
+ continue;
334
+ for (const file of fs.readdirSync(rlsDir)) {
335
+ if (!file.endsWith('.sql'))
336
+ continue;
337
+ // filename: tablename__policyname.sql
338
+ const baseName = file.replace(/\.sql$/, '');
339
+ const key = `${schemaEntry.name}.${baseName.replace('__', '.')}`;
340
+ localMap.set(key, fs.readFileSync(path.join(rlsDir, file), 'utf-8'));
341
+ }
342
+ }
343
+ }
344
+ // Detect changes: remote policies not matching local
345
+ const changed = [];
346
+ const dropped = [];
347
+ // Policies in remote that differ from local (or absent in local → drop)
348
+ for (const [key, remotePolicy] of remoteMap) {
349
+ if (!localMap.has(key)) {
350
+ // Remote has policy but local doesn't — local wins: drop it
351
+ dropped.push({ policyName: remotePolicy.policyName, tableName: remotePolicy.tableName });
352
+ }
353
+ // If local has it, we trust local as SSoT — will re-create from local below
354
+ }
355
+ // Policies in local but not in remote, or that differ → re-create
356
+ for (const [key, localSql] of localMap) {
357
+ const remotePolicy = remoteMap.get(key);
358
+ const normalizedLocal = localSql.replace(/\s+/g, ' ').trim();
359
+ if (!remotePolicy) {
360
+ // New policy in local
361
+ changed.push(parsePolicySql(localSql, key));
362
+ }
363
+ else {
364
+ // Compare: rebuild remote SQL and compare normalized
365
+ const remoteSql = buildPolicySql(remotePolicy);
366
+ if (normalizedLocal !== remoteSql.replace(/\s+/g, ' ').trim()) {
367
+ changed.push(parsePolicySql(localSql, key));
368
+ }
369
+ }
370
+ }
371
+ const migrationPath = await (0, generateMigration_1.generateRlsMigrationFile)(changed, dropped, process.cwd(), migrationConfig);
372
+ if (!migrationPath) {
373
+ console.log('RLS: no differences found');
374
+ }
375
+ else {
376
+ console.log(`RLS: ${changed.length} changed, ${dropped.length} dropped`);
377
+ }
378
+ }
379
+ finally {
380
+ await client.end();
381
+ }
382
+ }
383
+ function buildPolicySql(p) {
384
+ const permissive = p.permissive ? 'PERMISSIVE' : 'RESTRICTIVE';
385
+ let sql = `CREATE POLICY "${p.policyName}" ON ${p.tableName} AS ${permissive} FOR ${p.cmd} TO ${p.roles}`;
386
+ if (p.qual)
387
+ sql += ` USING (${p.qual})`;
388
+ if (p.withCheck)
389
+ sql += ` WITH CHECK (${p.withCheck})`;
390
+ return sql + ';';
391
+ }
392
+ function parsePolicySql(sql, key) {
393
+ // Best-effort parse of CREATE POLICY DDL to RlsPolicy struct
394
+ const tableMatch = sql.match(/ON\s+(\S+)/i);
395
+ const policyMatch = sql.match(/CREATE POLICY\s+"?([^"\s]+)"?/i);
396
+ const cmdMatch = sql.match(/FOR\s+(SELECT|INSERT|UPDATE|DELETE|ALL)/i);
397
+ const rolesMatch = sql.match(/TO\s+([^\n]+?)(?:\s+USING|\s+WITH CHECK|;|$)/i);
398
+ const usingMatch = sql.match(/USING\s*\(([^)]+)\)/i);
399
+ const withCheckMatch = sql.match(/WITH CHECK\s*\(([^)]+)\)/i);
400
+ const permissive = !/RESTRICTIVE/i.test(sql);
401
+ const parts = key.split('.');
402
+ const policyName = policyMatch?.[1] ?? parts[parts.length - 1];
403
+ const tableName = tableMatch?.[1] ?? `${parts[0]}.${parts[1]}`;
404
+ return {
405
+ policyName,
406
+ tableName,
407
+ cmd: cmdMatch?.[1] ?? 'ALL',
408
+ roles: rolesMatch?.[1]?.trim() ?? 'public',
409
+ qual: usingMatch?.[1] ?? null,
410
+ withCheck: withCheckMatch?.[1] ?? null,
411
+ permissive
412
+ };
143
413
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "supatool",
3
- "version": "0.4.2",
3
+ "version": "0.5.0",
4
4
  "description": "CLI for Supabase: extract schema (tables, views, RLS, RPC) to files + llms.txt for LLM, deploy local schema, seed export. CRUD code gen deprecated.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",