@coldge.com/gitbase 1.0.2 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,11 +2,16 @@ import fs from 'fs/promises';
2
2
  import path from 'path';
3
3
  import chalk from 'chalk';
4
4
  import readline from 'readline';
5
- import { getConfig, runQuery } from '../api/supabase.js';
5
+ import { getConfig, runQuery, runTransaction, createPool, endPool } from '../api/supabase.js';
6
6
  import { extractSchema } from '../schema/extractor.js';
7
- import { canonicalize } from '../utils/hashing.js';
7
+ import { canonicalize, hashString } from '../utils/hashing.js';
8
+ import { generateTableAlter, generateEnumAlter } from '../utils/sqlDiff.js';
8
9
  import toposort from 'toposort';
9
- const GITBASE_DIR = '.gitbase';
10
+ // Execution order matters: extensions before types, grants and publications last
11
+ const SCHEMA_TYPES = [
12
+ 'extensions', 'types', 'sequences', 'tables', 'matviews',
13
+ 'views', 'functions', 'triggers', 'policies', 'grants', 'publications'
14
+ ];
10
15
  export async function push(argv) {
11
16
  const config = await getConfig();
12
17
  if (!config) {
@@ -28,14 +33,22 @@ export async function push(argv) {
28
33
  console.log(chalk.green('Permission verified.'));
29
34
  }
30
35
  const filterFiles = argv.files || [];
31
- console.log(chalk.blue(`Pushing local files to branch '${currentBranch}' (${projectRef})...`));
32
- // 1. Fetch current live state to make smart decisions
33
- console.log(chalk.blue(`Checking current database state...`));
34
- const liveSchema = await extractSchema(projectRef);
35
- // 2. Load Local Files as the "Target Schema"
36
- const localSchema = { tables: {}, functions: {}, views: {}, triggers: {}, policies: {}, types: {} };
37
- const types = ['tables', 'functions', 'views', 'triggers', 'policies', 'types'];
38
- for (const type of types) {
36
+ console.log(chalk.blue(`\nFetching current database state...`));
37
+ await createPool(projectRef);
38
+ let liveSchema;
39
+ try {
40
+ liveSchema = await extractSchema(projectRef);
41
+ }
42
+ finally {
43
+ // Keep pool alive — we need it for conflict detection queries via runQuery()
44
+ // We'll call endPool at the very end of push().
45
+ }
46
+ // --- Load Local Files as Target Schema ---
47
+ const localSchema = {
48
+ types: {}, sequences: {}, tables: {}, matviews: {},
49
+ views: {}, functions: {}, triggers: {}, policies: {}
50
+ };
51
+ for (const type of SCHEMA_TYPES) {
39
52
  const dir = path.join('supabase', type);
40
53
  try {
41
54
  const files = await fs.readdir(dir);
@@ -47,35 +60,141 @@ export async function push(argv) {
47
60
  localSchema[type][name] = content;
48
61
  }
49
62
  }
50
- catch (e) { }
63
+ catch { /* directory may not exist yet */ }
51
64
  }
52
- const rl = readline.createInterface({
53
- input: process.stdin,
54
- output: process.stdout
55
- });
65
+ const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
56
66
  const ask = (q) => new Promise(r => rl.question(q, r));
57
- let madeDbChanges = false;
58
- // 3. Execution Plan (Same logic as revert, but local files are the truth)
59
- // TYPES
67
+ // Helper functions declared early so they can be used in both conflict detection and planning
68
+ const skip = (type, name) => filterFiles.length > 0 && !filterFiles.includes(`${type}/${name}.sql`);
69
+ const same = (a, b) => canonicalize(a) === canonicalize(b);
70
+ // =========================================================================
71
+ // CONFLICT DETECTION — compare live DB state against what was last pulled
72
+ // (the HEAD commit tree). If the live DB has changed since our last pull,
73
+ // someone else pushed to the DB and we could be overwriting their work.
74
+ // =========================================================================
75
+ const skippedObjects = new Set(); // type/name pairs to skip in plan
76
+ try {
77
+ const headHash = await fs.readFile(path.join('.gitbase', 'HEAD'), 'utf-8').catch(() => null);
78
+ if (headHash) {
79
+ const headCommitStr = await fs.readFile(path.join('.gitbase', 'objects', headHash), 'utf-8');
80
+ const headCommit = JSON.parse(headCommitStr);
81
+ const headTreeStr = await fs.readFile(path.join('.gitbase', 'objects', headCommit.tree), 'utf-8');
82
+ const headTree = JSON.parse(headTreeStr);
83
+ // headTree: relPath -> hash (e.g. "functions/foo.sql" -> "abc123")
84
+ // For each local change, check if the live DB version differs from HEAD
85
+ let conflictsFound = false;
86
+ for (const type of SCHEMA_TYPES) {
87
+ const liveObjects = liveSchema[type];
88
+ const localObjects = localSchema[type];
89
+ if (!localObjects)
90
+ continue;
91
+ for (const [name, localSql] of Object.entries(localObjects)) {
92
+ if (skip(type, name))
93
+ continue;
94
+ const headKey = `${type}/${name}.sql`;
95
+ const headHash = headTree[headKey];
96
+ if (!headHash)
97
+ continue; // New object — no conflict possible
98
+ const liveSql = liveObjects?.[name];
99
+ if (!liveSql)
100
+ continue; // Object deleted from live — not a conflict
101
+ // Hash the live version the same way commit.ts does
102
+ const liveHash = hashString(liveSql.trim() + '\n');
103
+ // Conflict: live DB was changed by someone else since our last pull
104
+ if (liveHash !== headHash && !same(localSql, liveSql)) {
105
+ if (!conflictsFound) {
106
+ console.log(chalk.yellow('\n⚠️ CONFLICTS DETECTED\n'));
107
+ conflictsFound = true;
108
+ }
109
+ console.log(chalk.yellow(` CONFLICT: ${type}/${name}.sql`));
110
+ console.log(chalk.gray(` Your version: ${hashString(localSql).substring(0, 7)}`));
111
+ console.log(chalk.gray(` Live version: ${liveHash.substring(0, 7)} (changed since your last pull)`));
112
+ const answer = await ask(chalk.white(` [o]verwrite live [s]kip [d]iff > `));
113
+ const choice = answer.trim().toLowerCase();
114
+ if (choice === 's') {
115
+ skippedObjects.add(`${type}/${name}`);
116
+ console.log(chalk.gray(` → Skipped`));
117
+ }
118
+ else if (choice === 'd') {
119
+ console.log(chalk.gray('\n --- Live DB ---'));
120
+ console.log(chalk.red(liveSql.substring(0, 500)));
121
+ console.log(chalk.gray('\n --- Your local ---'));
122
+ console.log(chalk.green(localSql.substring(0, 500)));
123
+ // Ask again after showing diff
124
+ const answer2 = await ask(chalk.white('\n [o]verwrite [s]kip > '));
125
+ if (answer2.trim().toLowerCase() === 's') {
126
+ skippedObjects.add(`${type}/${name}`);
127
+ console.log(chalk.gray(' → Skipped'));
128
+ }
129
+ else {
130
+ console.log(chalk.cyan(' → Will overwrite'));
131
+ }
132
+ }
133
+ else {
134
+ console.log(chalk.cyan(' → Will overwrite'));
135
+ }
136
+ }
137
+ }
138
+ }
139
+ }
140
+ }
141
+ catch { /* HEAD not found or malformed — skip conflict check */ }
142
+ // Update skip function to also check conflict skips
143
+ const skipWithConflicts = (type, name) => skip(type, name) || skippedObjects.has(`${type}/${name}`);
144
+ // =========================================================================
145
+ // PHASE 1: PLAN — collect all SQL statements (interactively if needed)
146
+ // No DB writes happen in this phase.
147
+ // =========================================================================
148
+ const plan = [];
149
+ const timestamp = Math.floor(Date.now() / 1000);
150
+ // EXTENSIONS — CREATE IF NOT EXISTS only, never dropped (too destructive)
151
+ for (const [name, sql] of Object.entries(localSchema.extensions)) {
152
+ if (skipWithConflicts('extensions', name))
153
+ continue;
154
+ if (liveSchema.extensions[name] && same(sql, liveSchema.extensions[name]))
155
+ continue;
156
+ plan.push({ label: `Extension: ${name}`, sqls: [sql] });
157
+ }
158
+ // TYPES (enums, composites, domains)
60
159
  for (const [name, sql] of Object.entries(localSchema.types)) {
61
- if (filterFiles.length > 0 && !filterFiles.includes(`types/${name}.sql`))
160
+ if (skipWithConflicts('types', name))
62
161
  continue;
63
- if (liveSchema.types[name] && canonicalize(sql) === canonicalize(liveSchema.types[name]))
162
+ if (liveSchema.types[name] && same(sql, liveSchema.types[name]))
64
163
  continue;
65
- madeDbChanges = true;
66
- console.log(chalk.cyan(`Pushing Type: ${name}`));
67
- await runQuery(projectRef, `DROP TYPE IF EXISTS public."${name}" CASCADE;`);
68
- await runQuery(projectRef, sql);
164
+ // For enum types, try a safe ADD VALUE first before falling back to DROP+CREATE
165
+ const isEnum = /AS ENUM/i.test(sql);
166
+ if (isEnum && liveSchema.types[name]) {
167
+ const alterStmts = generateEnumAlter(name, liveSchema.types[name], sql);
168
+ if (alterStmts !== null) {
169
+ if (alterStmts.length > 0) {
170
+ plan.push({ label: `Enum Type (ADD VALUE): ${name}`, sqls: alterStmts });
171
+ }
172
+ continue; // handled — skip DROP+CREATE
173
+ }
174
+ // null = values removed or reordered — must DROP+CREATE
175
+ console.log(chalk.yellow(`\nEnum '${name}' has removed/reordered values — full recreation required.`));
176
+ }
177
+ plan.push({
178
+ label: `Type: ${name}`,
179
+ sqls: [`DROP TYPE IF EXISTS public."${name}" CASCADE;`, sql]
180
+ });
181
+ }
182
+ // SEQUENCES — created with IF NOT EXISTS to preserve counter values in production
183
+ for (const [name, sql] of Object.entries(localSchema.sequences)) {
184
+ if (skip('sequences', name))
185
+ continue;
186
+ if (liveSchema.sequences[name] && same(sql, liveSchema.sequences[name]))
187
+ continue;
188
+ plan.push({ label: `Sequence: ${name}`, sqls: [sql] });
69
189
  }
70
- // TABLES (Topological Sort)
190
+ // TABLES topologically sorted by foreign key dependency
71
191
  const tableEdges = [];
72
192
  const tables = localSchema.tables;
73
193
  const tableNames = Object.keys(tables);
74
194
  for (const [name, sql] of Object.entries(tables)) {
75
- const content = sql;
76
195
  const regex = /REFERENCES\s+(?:public\.)?(\w+)/gi;
77
196
  let match;
78
- while ((match = regex.exec(content)) !== null) {
197
+ while ((match = regex.exec(sql)) !== null) {
79
198
  const target = match[1];
80
199
  if (target !== name && tables[target])
81
200
  tableEdges.push([target, name]);
@@ -85,92 +204,226 @@ export async function push(argv) {
85
204
  try {
86
205
  sortedTables = toposort.array(tableNames, tableEdges);
87
206
  }
88
- catch (e) {
207
+ catch {
89
208
  sortedTables = tableNames.sort();
90
209
  }
91
210
  for (const name of sortedTables) {
92
211
  const sql = tables[name];
93
- if (filterFiles.length > 0 && !filterFiles.includes(`tables/${name}.sql`))
212
+ if (!sql || skip('tables', name))
94
213
  continue;
95
- if (liveSchema.tables[name] && canonicalize(sql) === canonicalize(liveSchema.tables[name]))
214
+ if (liveSchema.tables[name] && same(sql, liveSchema.tables[name]))
96
215
  continue;
97
- madeDbChanges = true;
98
- console.log(chalk.yellow(`\nPushing Table: ${name}`));
99
216
  if (liveSchema.tables[name]) {
100
- const answer = await ask(chalk.white(`? Table '${name}' schema changed. Keep existing as backup? (Y/n) > `));
101
- if (answer.toLowerCase() !== 'n') {
102
- const timestamp = Math.floor(Date.now() / 1000);
217
+ // Table already exists generate ALTER statements
218
+ const alterStmts = generateTableAlter(name, liveSchema.tables[name], sql);
219
+ if (alterStmts.length === 0)
220
+ continue; // Diff detected but no ALTER needed (e.g. only comment change)
221
+ console.log(chalk.yellow(`\nTable schema changed: ${name}`));
222
+ console.log(chalk.gray(` Changes that will be applied (${alterStmts.length} statement${alterStmts.length > 1 ? 's' : ''}):`));
223
+ for (const s of alterStmts)
224
+ console.log(chalk.gray(` → ${s}`));
225
+ console.log();
226
+ const answer = await ask(chalk.white(` Apply smart ALTER in-place (data preserved) or backup + recreate?\n`) +
227
+ chalk.cyan(` [A]lter (default) `) +
228
+ chalk.yellow(`[B]ackup + recreate `) +
229
+ chalk.red(`[D]rop + recreate (data loss)`) +
230
+ chalk.white(`\n Choice [A/b/d] > `));
231
+ const choice = answer.trim().toLowerCase();
232
+ if (choice === 'b') {
103
233
  const backupName = `${name}_backup_${timestamp}`;
104
- await runQuery(projectRef, `ALTER TABLE public."${name}" RENAME TO "${backupName}";`);
105
- console.log(chalk.green('Backup successful.'));
234
+ console.log(chalk.gray(` Will rename '${name}' '${backupName}' then CREATE fresh.`));
235
+ plan.push({
236
+ label: `Table (backup+recreate): ${name}`,
237
+ sqls: [
238
+ `ALTER TABLE public."${name}" RENAME TO "${backupName}";`,
239
+ sql
240
+ ]
241
+ });
242
+ }
243
+ else if (choice === 'd') {
244
+ console.log(chalk.red(` → Will DROP '${name}' and recreate (all data lost).`));
245
+ plan.push({
246
+ label: `Table (drop+recreate): ${name}`,
247
+ sqls: [`DROP TABLE IF EXISTS public."${name}" CASCADE;`, sql]
248
+ });
106
249
  }
107
250
  else {
108
- await runQuery(projectRef, `DROP TABLE IF EXISTS public."${name}" CASCADE;`);
251
+ // Default: smart ALTER optionally with a pre-transaction data snapshot
252
+ const snapshotAnswer = await ask(chalk.white(` Save a data snapshot of '${name}' before altering? (y/N) > `));
253
+ const wantsSnapshot = snapshotAnswer.trim().toLowerCase() === 'y';
254
+ const backupName = `${name}_snapshot_${timestamp}`;
255
+ if (wantsSnapshot) {
256
+ console.log(chalk.gray(` → Will snapshot data to '${backupName}', then ALTER in-place.`));
257
+ }
258
+ else {
259
+ console.log(chalk.cyan(` → Will ALTER in-place.`));
260
+ }
261
+ plan.push({
262
+ label: `Table (ALTER): ${name}`,
263
+ sqls: alterStmts,
264
+ // Runs BEFORE the transaction so the copy survives if ALTER rolls back
265
+ preTransactionSqls: wantsSnapshot
266
+ ? [`CREATE TABLE public."${backupName}" AS SELECT * FROM public."${name}";`]
267
+ : undefined
268
+ });
109
269
  }
110
270
  }
111
- await runQuery(projectRef, sql);
271
+ else {
272
+ // New table — just CREATE it (no choice needed)
273
+ console.log(chalk.green(`\nNew table: ${name}`));
274
+ plan.push({ label: `Table (CREATE): ${name}`, sqls: [sql] });
275
+ }
112
276
  }
113
- // VIEWS
277
+ // MATERIALIZED VIEWS
278
+ for (const [name, sql] of Object.entries(localSchema.matviews)) {
279
+ if (skip('matviews', name))
280
+ continue;
281
+ if (liveSchema.matviews[name] && same(sql, liveSchema.matviews[name]))
282
+ continue;
283
+ plan.push({
284
+ label: `Materialized View: ${name}`,
285
+ sqls: [`DROP MATERIALIZED VIEW IF EXISTS public."${name}" CASCADE;`, sql]
286
+ });
287
+ }
288
+ // VIEWS — CREATE OR REPLACE, no DROP needed
114
289
  for (const [name, sql] of Object.entries(localSchema.views)) {
115
- if (filterFiles.length > 0 && !filterFiles.includes(`views/${name}.sql`))
290
+ if (skip('views', name))
116
291
  continue;
117
- if (liveSchema.views[name] && canonicalize(sql) === canonicalize(liveSchema.views[name]))
292
+ if (liveSchema.views[name] && same(sql, liveSchema.views[name]))
118
293
  continue;
119
- madeDbChanges = true;
120
- console.log(chalk.cyan(`Pushing View: ${name}`));
121
- await runQuery(projectRef, `DROP VIEW IF EXISTS public."${name}" CASCADE;`);
122
- await runQuery(projectRef, sql);
294
+ // The stored SQL already contains CREATE OR REPLACE VIEW
295
+ plan.push({ label: `View: ${name}`, sqls: [sql] });
123
296
  }
124
- // FUNCTIONS
297
+ // FUNCTIONS — support CREATE OR REPLACE, no drop needed
125
298
  for (const [name, sql] of Object.entries(localSchema.functions)) {
126
- if (filterFiles.length > 0 && !filterFiles.includes(`functions/${name}.sql`))
299
+ if (skip('functions', name))
127
300
  continue;
128
- if (liveSchema.functions[name] && canonicalize(sql) === canonicalize(liveSchema.functions[name]))
301
+ if (liveSchema.functions[name] && same(sql, liveSchema.functions[name]))
129
302
  continue;
130
- madeDbChanges = true;
131
- console.log(chalk.cyan(`Pushing Function: ${name}`));
132
- await runQuery(projectRef, sql);
303
+ plan.push({ label: `Function: ${name}`, sqls: [sql] });
133
304
  }
134
305
  // TRIGGERS
135
306
  for (const [name, sql] of Object.entries(localSchema.triggers)) {
136
- if (filterFiles.length > 0 && !filterFiles.includes(`triggers/${name}.sql`))
307
+ if (skip('triggers', name))
137
308
  continue;
138
- if (liveSchema.triggers[name] && canonicalize(sql) === canonicalize(liveSchema.triggers[name]))
309
+ if (liveSchema.triggers[name] && same(sql, liveSchema.triggers[name]))
139
310
  continue;
140
- madeDbChanges = true;
141
- console.log(chalk.cyan(`Pushing Trigger: ${name}`));
142
311
  const match = sql.match(/ON\s+(public\.)?("?\w+"?)/i);
312
+ const triggerSqls = [];
143
313
  if (match) {
144
314
  const tableName = match[2].replace(/"/g, '');
145
- await runQuery(projectRef, `DROP TRIGGER IF EXISTS "${name}" ON public."${tableName}";`);
315
+ triggerSqls.push(`DROP TRIGGER IF EXISTS "${name}" ON public."${tableName}";`);
146
316
  }
147
- await runQuery(projectRef, sql);
317
+ triggerSqls.push(sql);
318
+ plan.push({ label: `Trigger: ${name}`, sqls: triggerSqls });
148
319
  }
149
320
  // POLICIES
150
321
  for (const [key, sql] of Object.entries(localSchema.policies)) {
151
- if (filterFiles.length > 0 && !filterFiles.includes(`policies/${key}.sql`))
322
+ if (skip('policies', key))
152
323
  continue;
153
- if (liveSchema.policies[key] && canonicalize(sql) === canonicalize(liveSchema.policies[key]))
324
+ if (liveSchema.policies[key] && same(sql, liveSchema.policies[key]))
154
325
  continue;
155
- madeDbChanges = true;
156
326
  const match = sql.match(/CREATE POLICY "([^"]+)"\s+ON\s+(public\.)?("?\w+"?)/i);
157
327
  if (match) {
158
328
  const policyName = match[1];
159
329
  const tableName = match[3].replace(/"/g, '');
160
- console.log(chalk.cyan(`Pushing Policy: ${policyName} on ${tableName}`));
161
- try {
162
- await runQuery(projectRef, `ALTER TABLE public."${tableName}" ENABLE ROW LEVEL SECURITY;`);
330
+ plan.push({
331
+ label: `Policy: ${policyName} on ${tableName}`,
332
+ sqls: [
333
+ `ALTER TABLE public."${tableName}" ENABLE ROW LEVEL SECURITY;`,
334
+ `DROP POLICY IF EXISTS "${policyName}" ON public."${tableName}";`,
335
+ sql
336
+ ]
337
+ });
338
+ }
339
+ }
340
+ // GRANTS — REVOKE ALL from known roles first, then re-GRANT from snapshot
341
+ // This ensures stale grants are cleaned up alongside the re-grant.
342
+ for (const [key, sql] of Object.entries(localSchema.grants)) {
343
+ if (skip('grants', key))
344
+ continue;
345
+ if (liveSchema.grants[key] && same(sql, liveSchema.grants[key]))
346
+ continue;
347
+ const grantSqls = [];
348
+ // Parse out object type and name from the key (e.g. "table__profiles" or "function__my_func")
349
+ const isTable = key.startsWith('table__');
350
+ const isFunc = key.startsWith('function__');
351
+ const objName = key.replace(/^(table|function)__/, '');
352
+ // Collect all unique grantees from snapshot SQL to revoke from
353
+ const grantees = [...new Set((sql.match(/TO (\w+)/gi) || []).map(m => m.replace(/^TO /i, '')))];
354
+ if (grantees.length > 0) {
355
+ if (isTable) {
356
+ grantSqls.push(`REVOKE ALL PRIVILEGES ON TABLE public."${objName}" FROM ${grantees.join(', ')};`);
163
357
  }
164
- catch (e) { }
165
- await runQuery(projectRef, `DROP POLICY IF EXISTS "${policyName}" ON public."${tableName}";`);
166
- await runQuery(projectRef, sql);
358
+ else if (isFunc) {
359
+ grantSqls.push(`REVOKE ALL PRIVILEGES ON FUNCTION public."${objName}" FROM ${grantees.join(', ')};`);
360
+ }
361
+ }
362
+ // Then re-apply each GRANT statement from the snapshot
363
+ for (const grantStmt of sql.split('\n').filter(Boolean)) {
364
+ grantSqls.push(grantStmt);
167
365
  }
366
+ plan.push({ label: `Grants on ${key.replace('__', ': ')}`, sqls: grantSqls });
367
+ }
368
+ // PUBLICATIONS
369
+ for (const [name, sql] of Object.entries(localSchema.publications)) {
370
+ if (skip('publications', name))
371
+ continue;
372
+ if (liveSchema.publications[name] && same(sql, liveSchema.publications[name]))
373
+ continue;
374
+ plan.push({
375
+ label: `Publication: ${name}`,
376
+ sqls: [`DROP PUBLICATION IF EXISTS "${name}";`, sql]
377
+ });
168
378
  }
169
379
  rl.close();
170
- if (!madeDbChanges) {
380
+ if (plan.length === 0) {
171
381
  console.log(chalk.green('\nEverything is already up to date. No push required.'));
382
+ return;
383
+ }
384
+ // =========================================================================
385
+ // PHASE 2: EXECUTE — all statements in a single atomic transaction
386
+ // =========================================================================
387
+ console.log(chalk.blue(`\nApplying ${plan.length} change(s)...`));
388
+ for (const step of plan) {
389
+ const prefix = step.preTransactionSqls?.length ? '(+data snapshot) ' : '';
390
+ console.log(chalk.cyan(` → ${prefix}${step.label}`));
391
+ }
392
+ // Run any pre-transaction data snapshots first (outside the transaction so they
393
+ // survive even if the subsequent ALTER rolls back)
394
+ const preSteps = plan.filter(p => p.preTransactionSqls?.length);
395
+ if (preSteps.length > 0) {
396
+ console.log(chalk.blue(`\nCreating ${preSteps.length} data snapshot(s) (outside transaction)...`));
397
+ try {
398
+ for (const step of preSteps) {
399
+ for (const sql of step.preTransactionSqls) {
400
+ await runQuery(projectRef, sql);
401
+ console.log(chalk.gray(` ✓ ${sql}`));
402
+ }
403
+ }
404
+ }
405
+ catch (e) {
406
+ console.error(chalk.red(`\n🔥 Data snapshot failed — aborting push.`));
407
+ console.error(chalk.red(` Reason: ${e.message}`));
408
+ return;
409
+ }
410
+ }
411
+ const allSqls = plan.flatMap(p => p.sqls);
412
+ try {
413
+ await runTransaction(projectRef, allSqls);
414
+ console.log(chalk.green('\n✅ Push complete! All changes applied atomically.'));
415
+ if (preSteps.length > 0) {
416
+ console.log(chalk.gray(' Data snapshot table(s) preserved. Drop them manually when no longer needed.'));
417
+ }
418
+ }
419
+ catch (e) {
420
+ console.error(chalk.red(`\n🔥 Push failed — database unchanged (transaction rolled back).`));
421
+ if (preSteps.length > 0) {
422
+ console.error(chalk.yellow(` Data snapshot(s) were created before the failure and are still intact.`));
423
+ }
424
+ console.error(chalk.red(` Reason: ${e.message}`));
172
425
  }
173
- else {
174
- console.log(chalk.green('\nPush complete! Live database updated.'));
426
+ finally {
427
+ await endPool(projectRef);
175
428
  }
176
429
  }