supatool 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1344,7 +1344,9 @@ async function generateIndexFile(definitions, outputDir, separateDirectories = t
1344
1344
  * Classify and output definitions
1345
1345
  */
1346
1346
  async function extractDefinitions(options) {
1347
- const { connectionString, outputDir, separateDirectories = true, tablesOnly = false, viewsOnly = false, all = false, tablePattern = '*', force = false, schemas = ['public'], version } = options;
1347
+ const { connectionString, outputDir, separateDirectories = true, tablesOnly = false, viewsOnly = false, all = false, tablePattern = '*', force = false, schemas: schemasOption = ['public'], excludeSchemas = [], allSchemas: useAllSchemas = false, version } = options;
1348
+ // schemas will be resolved after DB connect when useAllSchemas is true
1349
+ let schemas = schemasOption;
1348
1350
  // Disable Node.js SSL certificate verification
1349
1351
  process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
1350
1352
  // Connection string validation
@@ -1463,6 +1465,17 @@ async function extractDefinitions(options) {
1463
1465
  console.log(` Connection string length: ${encodedConnectionString.length}`);
1464
1466
  await client.connect();
1465
1467
  spinner.text = 'Connected to database';
1468
+ // Resolve schemas: when --all-schemas, fetch all from DB and subtract excludeSchemas
1469
+ if (useAllSchemas) {
1470
+ const SYSTEM_SCHEMAS = ['information_schema', 'pg_catalog', 'pg_toast', 'pg_temp_1', 'pg_toast_temp_1'];
1471
+ const discovered = await fetchAllSchemas(client);
1472
+ schemas = discovered.filter(s => !SYSTEM_SCHEMAS.includes(s) && !excludeSchemas.includes(s));
1473
+ console.log(`Schemas (all minus excluded): ${schemas.join(', ')}`);
1474
+ }
1475
+ else if (excludeSchemas.length > 0) {
1476
+ schemas = schemas.filter(s => !excludeSchemas.includes(s));
1477
+ console.log(`Schemas (filtered): ${schemas.join(', ')}`);
1478
+ }
1466
1479
  let allDefinitions = [];
1467
1480
  // Initialize progress tracker
1468
1481
  const progress = {
@@ -56,8 +56,10 @@ async function fetchRemoteSchemas(connectionString, targetTableNames) {
56
56
  // console.log('Connecting to database...');
57
57
  try {
58
58
  // Basic connection string check
59
- if (!connectionString || !connectionString.startsWith('postgresql://')) {
60
- throw new Error('Invalid connection string. Please specify a valid postgresql:// format.');
59
+ if (!connectionString ||
60
+ (!connectionString.startsWith('postgresql://') &&
61
+ !connectionString.startsWith('postgres://'))) {
62
+ throw new Error('Invalid connection string. Please specify a valid postgresql:// or postgres:// format.');
61
63
  }
62
64
  // Parse URL and display connection info
63
65
  const url = new URL(connectionString);
@@ -140,7 +140,7 @@ function generateAlterStatements(tableName, fromDdl, toDdl) {
140
140
  * Generate migration file
141
141
  */
142
142
  async function generateMigrationFile(tableName, fromDdl, toDdl, projectDir = '.', migrationConfig) {
143
- const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
143
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'db/migrations');
144
144
  // Create migrations directory
145
145
  if (!fs.existsSync(migrationDir)) {
146
146
  fs.mkdirSync(migrationDir, { recursive: true });
@@ -209,7 +209,7 @@ function analyzeDiffForTemplate(fromDdl, toDdl) {
209
209
  * Generate manual migration template
210
210
  */
211
211
  async function generateManualMigrationTemplate(tableName, fromDdl, toDdl, projectDir, migrationConfig) {
212
- const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
212
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'db/migrations');
213
213
  if (!fs.existsSync(migrationDir)) {
214
214
  fs.mkdirSync(migrationDir, { recursive: true });
215
215
  }
@@ -282,7 +282,7 @@ ${migrationStatements.join('\n')}
282
282
  * (old exists on remote, new exists on local, columns are highly similar)
283
283
  */
284
284
  async function generateRenameTableMigrationFile(schema, oldName, newName, projectDir = '.', migrationConfig) {
285
- const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
285
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'db/migrations');
286
286
  if (!fs.existsSync(migrationDir)) {
287
287
  fs.mkdirSync(migrationDir, { recursive: true });
288
288
  }
@@ -320,7 +320,7 @@ async function generateFunctionMigrationFile(schema, funcName, localDdl, remoteD
320
320
  const normalizedRemote = normalizeFunctionDdl(remoteDdl);
321
321
  if (normalizedLocal === normalizedRemote)
322
322
  return null;
323
- const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
323
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'db/migrations');
324
324
  if (!fs.existsSync(migrationDir)) {
325
325
  fs.mkdirSync(migrationDir, { recursive: true });
326
326
  }
@@ -342,7 +342,7 @@ ${normalizedLocal.endsWith(';') ? normalizedLocal : normalizedLocal + ';'}
342
342
  async function generateRlsMigrationFile(changedPolicies, droppedPolicies, projectDir = '.', migrationConfig) {
343
343
  if (changedPolicies.length === 0 && droppedPolicies.length === 0)
344
344
  return null;
345
- const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'supabase/migrations');
345
+ const migrationDir = path.join(projectDir, migrationConfig?.dir ?? 'db/migrations');
346
346
  if (!fs.existsSync(migrationDir)) {
347
347
  fs.mkdirSync(migrationDir, { recursive: true });
348
348
  }
@@ -0,0 +1,114 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.migrateRemote = migrateRemote;
37
+ const fs = __importStar(require("fs"));
38
+ const path = __importStar(require("path"));
39
+ const pg_1 = require("pg");
40
+ const MIGRATIONS_TABLE = '_supatool_migrations';
41
+ /**
42
+ * Apply pending SQL migration files to remote DB.
43
+ *
44
+ * Tracks applied migrations in _supatool_migrations table.
45
+ * Files are applied in alphabetical order (timestamp or sequential naming).
46
+ */
47
+ async function migrateRemote(options) {
48
+ const { connectionString, migrationsDir, dryRun = false } = options;
49
+ // Collect .sql files
50
+ if (!fs.existsSync(migrationsDir)) {
51
+ console.error(`❌ Migrations directory not found: ${migrationsDir}`);
52
+ process.exit(1);
53
+ }
54
+ const allFiles = fs.readdirSync(migrationsDir)
55
+ .filter(f => f.endsWith('.sql'))
56
+ .sort();
57
+ if (allFiles.length === 0) {
58
+ console.log('No migration files found.');
59
+ return;
60
+ }
61
+ const client = new pg_1.Client({
62
+ connectionString,
63
+ ssl: { rejectUnauthorized: false }
64
+ });
65
+ try {
66
+ await client.connect();
67
+ // Ensure tracking table exists
68
+ await client.query(`
69
+ CREATE TABLE IF NOT EXISTS ${MIGRATIONS_TABLE} (
70
+ id SERIAL PRIMARY KEY,
71
+ filename TEXT NOT NULL UNIQUE,
72
+ applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
73
+ )
74
+ `);
75
+ // Get already-applied migrations
76
+ const applied = await client.query(`SELECT filename FROM ${MIGRATIONS_TABLE} ORDER BY filename`);
77
+ const appliedSet = new Set(applied.rows.map(r => r.filename));
78
+ const pending = allFiles.filter(f => !appliedSet.has(f));
79
+ if (pending.length === 0) {
80
+ console.log('✅ All migrations already applied.');
81
+ return;
82
+ }
83
+ console.log(`Pending migrations: ${pending.length}`);
84
+ for (const f of pending) {
85
+ console.log(` • ${f}`);
86
+ }
87
+ if (dryRun) {
88
+ console.log('\n(dry-run) No changes applied.');
89
+ return;
90
+ }
91
+ // Apply each pending migration in a transaction
92
+ for (const filename of pending) {
93
+ const filepath = path.join(migrationsDir, filename);
94
+ const sql = fs.readFileSync(filepath, 'utf-8');
95
+ process.stdout.write(`Applying ${filename}... `);
96
+ await client.query('BEGIN');
97
+ try {
98
+ await client.query(sql);
99
+ await client.query(`INSERT INTO ${MIGRATIONS_TABLE} (filename) VALUES ($1)`, [filename]);
100
+ await client.query('COMMIT');
101
+ console.log('✅');
102
+ }
103
+ catch (err) {
104
+ await client.query('ROLLBACK');
105
+ console.log('❌');
106
+ throw err;
107
+ }
108
+ }
109
+ console.log(`\n✅ Applied ${pending.length} migration(s).`);
110
+ }
111
+ finally {
112
+ await client.end();
113
+ }
114
+ }
@@ -56,6 +56,7 @@ function parseTablesYaml(yamlPath) {
56
56
  * @param options SeedGenOptions
57
57
  */
58
58
  async function generateSeedsFromRemote(options) {
59
+ process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
59
60
  const tables = parseTablesYaml(options.tablesYamlPath);
60
61
  // Generate datetime subdir name (e.g. 20250705_1116_supatool)
61
62
  const now = new Date();
@@ -67,7 +68,10 @@ async function generateSeedsFromRemote(options) {
67
68
  const folderName = `${y}${m}${d}_${hh}${mm}_supatool`;
68
69
  const outDir = path_1.default.join(options.outputDir, folderName);
69
70
  // DB connection
70
- const client = new pg_1.Client({ connectionString: options.connectionString });
71
+ const client = new pg_1.Client({
72
+ connectionString: options.connectionString,
73
+ ssl: { rejectUnauthorized: false }
74
+ });
71
75
  await client.connect();
72
76
  const processedFiles = [];
73
77
  for (const { schema, table } of tables) {
@@ -77,8 +81,15 @@ async function generateSeedsFromRemote(options) {
77
81
  fs_1.default.mkdirSync(schemaDir, { recursive: true });
78
82
  }
79
83
  // Fetch data
80
- const res = await client.query(`SELECT * FROM "${schema}"."${table}"`);
81
- const rows = res.rows;
84
+ let rows;
85
+ try {
86
+ const res = await client.query(`SELECT * FROM "${schema}"."${table}"`);
87
+ rows = res.rows;
88
+ }
89
+ catch (err) {
90
+ console.warn(`⚠️ Skip: ${schema}.${table} — ${err.message}`);
91
+ continue;
92
+ }
82
93
  // Output JSON
83
94
  const fileName = `${table}_seed.json`;
84
95
  const filePath = path_1.default.join(schemaDir, fileName);
@@ -118,7 +129,7 @@ async function generateSeedsFromRemote(options) {
118
129
  }
119
130
  /** Utility to get table comment */
120
131
  async function getTableComment(connectionString, schema, table) {
121
- const client = new pg_1.Client({ connectionString });
132
+ const client = new pg_1.Client({ connectionString, ssl: { rejectUnauthorized: false } });
122
133
  await client.connect();
123
134
  try {
124
135
  const res = await client.query(`SELECT obj_description(c.oid) as comment FROM pg_class c JOIN pg_namespace n ON c.relnamespace = n.oid WHERE c.relname = $1 AND n.nspname = $2 AND c.relkind = 'r'`, [table, schema]);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "supatool",
3
- "version": "0.5.0",
4
- "description": "CLI for Supabase: extract schema (tables, views, RLS, RPC) to files + llms.txt for LLM, deploy local schema, seed export. CRUD code gen deprecated.",
3
+ "version": "0.6.0",
4
+ "description": "CLI for PostgreSQL (Cloud SQL / Supabase): extract schema to files, deploy schema diffs, apply migrations, seed export.",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
7
7
  "bin": {
@@ -9,6 +9,7 @@
9
9
  },
10
10
  "scripts": {
11
11
  "build": "tsc",
12
+ "test": "npm run build && node test/smoke.js",
12
13
  "start": "tsx src/bin/supatool.ts",
13
14
  "local": "tsx src/bin/supatool.ts"
14
15
  },
@@ -17,18 +18,18 @@
17
18
  "bin"
18
19
  ],
19
20
  "keywords": [
21
+ "postgresql",
22
+ "cloud-sql",
20
23
  "supabase",
21
- "crud",
22
24
  "cli",
23
25
  "typescript",
24
- "React",
25
26
  "postgres",
26
- "database"
27
+ "database",
28
+ "migration"
27
29
  ],
28
30
  "author": "IdeaGarage",
29
31
  "license": "MIT",
30
32
  "dependencies": {
31
- "@supabase/supabase-js": "^2.49.4",
32
33
  "commander": "^13.1.0",
33
34
  "diff": "^5.2.0",
34
35
  "dotenv": "^16.5.0",
@@ -1,220 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getTasksByFilters = getTasksByFilters;
4
- exports.getTasksSingleByFilters = getTasksSingleByFilters;
5
- exports.getTasksById = getTasksById;
6
- exports.createTasks = createTasks;
7
- exports.updateTasks = updateTasks;
8
- exports.deleteTasks = deleteTasks;
9
- exports.queryTasks = queryTasks;
10
- // Supabase CRUD operations for tasks
11
- // This file is automatically generated. Do not edit it directly.
12
- const client_1 = require("../client");
13
- // Function to apply filters to a query
14
- function applyFilters(query, filters) {
15
- for (const [key, value] of Object.entries(filters)) {
16
- if (Array.isArray(value)) {
17
- query = query.in(key, value); // Use 'in' for array values
18
- }
19
- else if (typeof value === 'object' && value !== null) {
20
- for (const [operator, val] of Object.entries(value)) {
21
- switch (operator) {
22
- case 'eq':
23
- query = query.eq(key, val);
24
- break;
25
- case 'neq':
26
- query = query.neq(key, val);
27
- break;
28
- case 'like':
29
- query = query.like(key, val);
30
- break;
31
- case 'ilike':
32
- query = query.ilike(key, val);
33
- break;
34
- case 'lt':
35
- query = query.lt(key, val);
36
- break;
37
- case 'lte':
38
- query = query.lte(key, val);
39
- break;
40
- case 'gte':
41
- query = query.gte(key, val);
42
- break;
43
- case 'gt':
44
- query = query.gt(key, val);
45
- break;
46
- case 'contains':
47
- query = query.contains(key, val);
48
- break;
49
- case 'contains_any':
50
- query = query.contains_any(key, val);
51
- break;
52
- case 'contains_all':
53
- query = query.contains_all(key, val);
54
- break;
55
- // Add more operators as needed
56
- default:
57
- throw new Error('Unsupported operator: ' + operator);
58
- }
59
- }
60
- }
61
- else {
62
- query = query.eq(key, value); // Default to 'eq' for simple values
63
- }
64
- }
65
- return query;
66
- }
67
- // Read multiple rows with dynamic filters
68
- async function getTasksByFilters({ filters }) {
69
- try {
70
- let query = client_1.supabase.from('tasks').select('*');
71
- query = applyFilters(query, filters);
72
- const result = await query;
73
- if (result.error) {
74
- throw new Error(`Failed to fetch tasks: ${result.error.message}`);
75
- }
76
- return result.data || [];
77
- }
78
- catch (error) {
79
- console.error('Error in getTasksByFilters:', error);
80
- throw error;
81
- }
82
- }
83
- // Read a single row with dynamic filters
84
- async function getTasksSingleByFilters({ filters }) {
85
- try {
86
- let query = client_1.supabase.from('tasks').select('*');
87
- query = applyFilters(query, filters).single();
88
- const result = await query;
89
- if (result.error) {
90
- if (result.error.code === 'PGRST116') {
91
- return null;
92
- }
93
- throw new Error(`Failed to fetch tasks: ${result.error.message}`);
94
- }
95
- return result.data;
96
- }
97
- catch (error) {
98
- console.error('Error in getTasksSingleByFilters:', error);
99
- throw error;
100
- }
101
- }
102
- // Read single row using id
103
- async function getTasksById({ id }) {
104
- if (!id) {
105
- throw new Error('ID is required');
106
- }
107
- try {
108
- const result = await client_1.supabase
109
- .from('tasks')
110
- .select('*')
111
- .eq('id', id)
112
- .single();
113
- if (result.error) {
114
- if (result.error.code === 'PGRST116') {
115
- return null;
116
- }
117
- throw new Error(`Failed to fetch tasks: ${result.error.message}`);
118
- }
119
- return result.data;
120
- }
121
- catch (error) {
122
- console.error('Error in getTasksById:', error);
123
- throw error;
124
- }
125
- }
126
- // Create Function
127
- async function createTasks({ data }) {
128
- if (!data) {
129
- throw new Error('Data is required for creation');
130
- }
131
- try {
132
- const result = await client_1.supabase
133
- .from('tasks')
134
- .insert([data])
135
- .select()
136
- .single();
137
- if (result.error) {
138
- throw new Error(`Failed to create tasks: ${result.error.message}`);
139
- }
140
- if (!result.data) {
141
- throw new Error('No data returned after creation');
142
- }
143
- return result.data;
144
- }
145
- catch (error) {
146
- console.error('Error in createTasks:', error);
147
- throw error;
148
- }
149
- }
150
- // Update Function
151
- async function updateTasks({ id, data }) {
152
- if (!id) {
153
- throw new Error('ID is required for update');
154
- }
155
- if (!data || Object.keys(data).length === 0) {
156
- throw new Error('Update data is required');
157
- }
158
- try {
159
- const result = await client_1.supabase
160
- .from('tasks')
161
- .update(data)
162
- .eq('id', id)
163
- .select()
164
- .single();
165
- if (result.error) {
166
- if (result.error.code === 'PGRST116') {
167
- throw new Error(`tasks with ID ${id} not found`);
168
- }
169
- throw new Error(`Failed to update tasks: ${result.error.message}`);
170
- }
171
- if (!result.data) {
172
- throw new Error(`tasks with ID ${id} not found`);
173
- }
174
- return result.data;
175
- }
176
- catch (error) {
177
- console.error('Error in updateTasks:', error);
178
- throw error;
179
- }
180
- }
181
- // Delete Function
182
- async function deleteTasks({ id }) {
183
- if (!id) {
184
- throw new Error('ID is required for deletion');
185
- }
186
- try {
187
- const result = await client_1.supabase
188
- .from('tasks')
189
- .delete()
190
- .eq('id', id);
191
- if (result.error) {
192
- throw new Error(`Failed to delete tasks: ${result.error.message}`);
193
- }
194
- return true;
195
- }
196
- catch (error) {
197
- console.error('Error in deleteTasks:', error);
198
- throw error;
199
- }
200
- }
201
- // Custom query function
202
- async function queryTasks({ query }) {
203
- if (!query) {
204
- throw new Error('Query is required');
205
- }
206
- try {
207
- const result = await client_1.supabase
208
- .from('tasks')
209
- .select(query);
210
- if (result.error) {
211
- throw new Error(`Failed to execute query: ${result.error.message}`);
212
- }
213
- return result.data || [];
214
- }
215
- catch (error) {
216
- console.error('Error in queryTasks:', error);
217
- throw error;
218
- }
219
- }
220
- // All functions are exported individually above