@coldge.com/gitbase 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +65 -15
- package/dist/api/supabase.js +139 -32
- package/dist/commands/branch.js +86 -14
- package/dist/commands/diff.js +180 -94
- package/dist/commands/init.js +8 -3
- package/dist/commands/log.js +85 -11
- package/dist/commands/merge.js +226 -15
- package/dist/commands/pull.js +171 -0
- package/dist/commands/push.js +324 -71
- package/dist/commands/revert.js +247 -95
- package/dist/commands/snapshot.js +49 -0
- package/dist/commands/stash.js +211 -0
- package/dist/commands/status.js +46 -38
- package/dist/commands/verify.js +120 -0
- package/dist/index.js +90 -10
- package/dist/schema/extractor.js +183 -26
- package/dist/schema/queries.js +160 -8
- package/dist/utils/hashing.js +52 -3
- package/dist/utils/sqlDiff.js +245 -0
- package/package.json +2 -1
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* stash.ts — Save and restore uncommitted local schema changes.
|
|
3
|
+
*
|
|
4
|
+
* Stash entries are stored as tree objects in .gitbase/objects/ (same as
|
|
5
|
+
* regular commits), with an index at .gitbase/stash (JSON array).
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* gitb stash Save current supabase/ state (alias for 'push')
|
|
9
|
+
* gitb stash push [-m "msg"] Save with a custom message
|
|
10
|
+
* gitb stash pop Restore top stash entry and remove it
|
|
11
|
+
* gitb stash apply [n] Restore stash entry N (0-indexed) but keep it
|
|
12
|
+
* gitb stash list List all stash entries
|
|
13
|
+
* gitb stash drop [n] Delete stash entry N without applying
|
|
14
|
+
* gitb stash clear Delete all stash entries
|
|
15
|
+
*/
|
|
16
|
+
import fs from 'fs/promises';
|
|
17
|
+
import path from 'path';
|
|
18
|
+
import chalk from 'chalk';
|
|
19
|
+
import { hashString } from '../utils/hashing.js';
|
|
20
|
+
const GITBASE_DIR = '.gitbase';
|
|
21
|
+
const OBJECTS_DIR = path.join(GITBASE_DIR, 'objects');
|
|
22
|
+
const STASH_FILE = path.join(GITBASE_DIR, 'stash');
|
|
23
|
+
const HEAD_FILE = path.join(GITBASE_DIR, 'HEAD');
|
|
24
|
+
// ---------------------------------------------------------------------------
|
|
25
|
+
// Helpers
|
|
26
|
+
// ---------------------------------------------------------------------------
|
|
27
|
+
async function readStash() {
|
|
28
|
+
try {
|
|
29
|
+
const raw = await fs.readFile(STASH_FILE, 'utf-8');
|
|
30
|
+
return JSON.parse(raw);
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
return [];
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
async function writeStash(entries) {
|
|
37
|
+
await fs.writeFile(STASH_FILE, JSON.stringify(entries, null, 2), 'utf-8');
|
|
38
|
+
}
|
|
39
|
+
async function saveObject(hash, content) {
|
|
40
|
+
await fs.mkdir(OBJECTS_DIR, { recursive: true });
|
|
41
|
+
await fs.writeFile(path.join(OBJECTS_DIR, hash), content, 'utf-8');
|
|
42
|
+
}
|
|
43
|
+
/** Walk supabase/ and build a tree map {relPath -> content} */
|
|
44
|
+
async function buildTree() {
|
|
45
|
+
const tree = {};
|
|
46
|
+
async function walk(dir, base) {
|
|
47
|
+
const entries = await fs.readdir(dir, { withFileTypes: true }).catch(() => []);
|
|
48
|
+
for (const entry of entries) {
|
|
49
|
+
const fullPath = path.join(dir, entry.name);
|
|
50
|
+
const relPath = path.join(base, entry.name).replace(/\\/g, '/');
|
|
51
|
+
if (entry.isDirectory()) {
|
|
52
|
+
await walk(fullPath, relPath);
|
|
53
|
+
}
|
|
54
|
+
else if (entry.name.endsWith('.sql')) {
|
|
55
|
+
tree[relPath] = await fs.readFile(fullPath, 'utf-8');
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
await walk('supabase', '').catch(() => { }); // supabase/ may not exist
|
|
60
|
+
return tree;
|
|
61
|
+
}
|
|
62
|
+
/** Save a tree from {relPath -> content} to objects/, return tree hash */
|
|
63
|
+
async function saveTree(tree) {
|
|
64
|
+
const hashed = {};
|
|
65
|
+
for (const [relPath, content] of Object.entries(tree)) {
|
|
66
|
+
const h = hashString(content);
|
|
67
|
+
await saveObject(h, content);
|
|
68
|
+
hashed[relPath] = h;
|
|
69
|
+
}
|
|
70
|
+
const sorted = Object.fromEntries(Object.entries(hashed).sort(([a], [b]) => a.localeCompare(b)));
|
|
71
|
+
const treeJson = JSON.stringify(sorted);
|
|
72
|
+
const treeHash = hashString(treeJson);
|
|
73
|
+
await saveObject(treeHash, treeJson);
|
|
74
|
+
return treeHash;
|
|
75
|
+
}
|
|
76
|
+
/** Restore a tree from a tree hash to supabase/, optionally wiping first */
|
|
77
|
+
async function restoreTree(treeHash) {
|
|
78
|
+
const treeStr = await fs.readFile(path.join(OBJECTS_DIR, treeHash), 'utf-8');
|
|
79
|
+
const tree = JSON.parse(treeStr);
|
|
80
|
+
await fs.rm('supabase', { recursive: true, force: true });
|
|
81
|
+
for (const [relPath, blobHash] of Object.entries(tree)) {
|
|
82
|
+
const fullPath = path.join('supabase', relPath);
|
|
83
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
84
|
+
const content = await fs.readFile(path.join(OBJECTS_DIR, blobHash), 'utf-8');
|
|
85
|
+
await fs.writeFile(fullPath, content, 'utf-8');
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
async function getCurrentBranch() {
|
|
89
|
+
try {
|
|
90
|
+
const raw = await fs.readFile(path.join(GITBASE_DIR, 'config'), 'utf-8');
|
|
91
|
+
return JSON.parse(raw).currentBranch ?? 'unknown';
|
|
92
|
+
}
|
|
93
|
+
catch {
|
|
94
|
+
return 'unknown';
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
// ---------------------------------------------------------------------------
|
|
98
|
+
// Subcommands
|
|
99
|
+
// ---------------------------------------------------------------------------
|
|
100
|
+
async function stashPush(message) {
|
|
101
|
+
const tree = await buildTree();
|
|
102
|
+
if (Object.keys(tree).length === 0) {
|
|
103
|
+
console.log(chalk.yellow('Nothing to stash (supabase/ is empty or missing).'));
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
const branch = await getCurrentBranch();
|
|
107
|
+
const treeHash = await saveTree(tree);
|
|
108
|
+
const msg = message ?? `On ${branch}: WIP`;
|
|
109
|
+
const entry = {
|
|
110
|
+
tree: treeHash,
|
|
111
|
+
message: msg,
|
|
112
|
+
timestamp: new Date().toISOString(),
|
|
113
|
+
branch
|
|
114
|
+
};
|
|
115
|
+
const entries = await readStash();
|
|
116
|
+
entries.unshift(entry); // newest first, like git stash
|
|
117
|
+
await writeStash(entries);
|
|
118
|
+
console.log(chalk.green(`Saved working state as stash@{0}: ${msg}`));
|
|
119
|
+
console.log(chalk.gray(` ${Object.keys(tree).length} file(s) stashed. Run 'gitb stash pop' to restore.`));
|
|
120
|
+
}
|
|
121
|
+
async function stashList() {
|
|
122
|
+
const entries = await readStash();
|
|
123
|
+
if (entries.length === 0) {
|
|
124
|
+
console.log(chalk.yellow('No stash entries found.'));
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
console.log(chalk.cyan('Stash list:'));
|
|
128
|
+
entries.forEach((e, i) => {
|
|
129
|
+
const date = new Date(e.timestamp).toLocaleString();
|
|
130
|
+
console.log(` stash@{${i}}: ${e.message} ${chalk.gray(`(${e.branch}, ${date})`)}`);
|
|
131
|
+
});
|
|
132
|
+
}
|
|
133
|
+
async function stashApply(index, remove) {
|
|
134
|
+
const entries = await readStash();
|
|
135
|
+
if (entries.length === 0) {
|
|
136
|
+
console.log(chalk.yellow('No stash entries to apply.'));
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
if (index < 0 || index >= entries.length) {
|
|
140
|
+
console.error(chalk.red(`Invalid stash index ${index}. Use 'gitb stash list' to see available entries.`));
|
|
141
|
+
return;
|
|
142
|
+
}
|
|
143
|
+
const entry = entries[index];
|
|
144
|
+
console.log(chalk.blue(`Restoring stash@{${index}}: ${entry.message}`));
|
|
145
|
+
await restoreTree(entry.tree);
|
|
146
|
+
if (remove) {
|
|
147
|
+
entries.splice(index, 1);
|
|
148
|
+
await writeStash(entries);
|
|
149
|
+
console.log(chalk.green(`Stash@{${index}} applied and removed.`));
|
|
150
|
+
}
|
|
151
|
+
else {
|
|
152
|
+
console.log(chalk.green(`Stash@{${index}} applied (entry kept — use 'gitb stash drop ${index}' to remove).`));
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
async function stashDrop(index) {
|
|
156
|
+
const entries = await readStash();
|
|
157
|
+
if (index < 0 || index >= entries.length) {
|
|
158
|
+
console.error(chalk.red(`Invalid stash index ${index}.`));
|
|
159
|
+
return;
|
|
160
|
+
}
|
|
161
|
+
const dropped = entries.splice(index, 1)[0];
|
|
162
|
+
await writeStash(entries);
|
|
163
|
+
console.log(chalk.red(`Dropped stash@{${index}}: ${dropped.message}`));
|
|
164
|
+
}
|
|
165
|
+
async function stashClear() {
|
|
166
|
+
await writeStash([]);
|
|
167
|
+
console.log(chalk.red('All stash entries cleared.'));
|
|
168
|
+
}
|
|
169
|
+
// ---------------------------------------------------------------------------
|
|
170
|
+
// Entry point — router
|
|
171
|
+
// ---------------------------------------------------------------------------
|
|
172
|
+
export async function stash(argv) {
|
|
173
|
+
// Verify gitbase is initialized
|
|
174
|
+
try {
|
|
175
|
+
await fs.access(path.join(GITBASE_DIR, 'config'));
|
|
176
|
+
}
|
|
177
|
+
catch {
|
|
178
|
+
console.error(chalk.red('Not initialized. Run `gitb init` first.'));
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
const sub = argv.subcommand ?? argv._?.[1] ?? 'push';
|
|
182
|
+
switch (sub) {
|
|
183
|
+
case 'push':
|
|
184
|
+
case 'save':
|
|
185
|
+
await stashPush(argv.message ?? argv.m);
|
|
186
|
+
break;
|
|
187
|
+
case 'pop':
|
|
188
|
+
await stashApply(0, true);
|
|
189
|
+
break;
|
|
190
|
+
case 'apply': {
|
|
191
|
+
const idx = typeof argv.index === 'number' ? argv.index : parseInt(argv.index ?? '0', 10);
|
|
192
|
+
await stashApply(isNaN(idx) ? 0 : idx, false);
|
|
193
|
+
break;
|
|
194
|
+
}
|
|
195
|
+
case 'list':
|
|
196
|
+
case 'ls':
|
|
197
|
+
await stashList();
|
|
198
|
+
break;
|
|
199
|
+
case 'drop': {
|
|
200
|
+
const idx = typeof argv.index === 'number' ? argv.index : parseInt(argv.index ?? '0', 10);
|
|
201
|
+
await stashDrop(isNaN(idx) ? 0 : idx);
|
|
202
|
+
break;
|
|
203
|
+
}
|
|
204
|
+
case 'clear':
|
|
205
|
+
await stashClear();
|
|
206
|
+
break;
|
|
207
|
+
default:
|
|
208
|
+
// Default bare 'gitb stash' saves
|
|
209
|
+
await stashPush(argv.message ?? argv.m);
|
|
210
|
+
}
|
|
211
|
+
}
|
package/dist/commands/status.js
CHANGED
|
@@ -3,32 +3,20 @@ import path from 'path';
|
|
|
3
3
|
import chalk from 'chalk';
|
|
4
4
|
import { extractSchema } from '../schema/extractor.js';
|
|
5
5
|
import { canonicalize } from '../utils/hashing.js';
|
|
6
|
+
import { createPool, endPool } from '../api/supabase.js';
|
|
6
7
|
const GITBASE_DIR = '.gitbase';
|
|
7
8
|
const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
|
|
9
|
+
const ALL_TYPES = [
|
|
10
|
+
'extensions', 'types', 'sequences', 'tables', 'matviews',
|
|
11
|
+
'views', 'functions', 'triggers', 'policies', 'grants', 'publications'
|
|
12
|
+
];
|
|
8
13
|
export async function status() {
|
|
9
|
-
const changes = await getStatus();
|
|
14
|
+
const changes = await getStatus(false);
|
|
10
15
|
if (!changes)
|
|
11
16
|
return;
|
|
12
|
-
|
|
13
|
-
console.log(chalk.green('No changes detected. Working tree clean.'));
|
|
14
|
-
}
|
|
15
|
-
else {
|
|
16
|
-
console.log(chalk.yellow('Unsynced Changes (Database -> Local):'));
|
|
17
|
-
for (const change of changes) {
|
|
18
|
-
if (change.type === 'new') {
|
|
19
|
-
console.log(chalk.green(` [NEW] ${change.path}`));
|
|
20
|
-
}
|
|
21
|
-
else if (change.type === 'modified') {
|
|
22
|
-
console.log(chalk.yellow(` [MODIFIED] ${change.path}`));
|
|
23
|
-
}
|
|
24
|
-
else if (change.type === 'deleted') {
|
|
25
|
-
console.log(chalk.red(` [DELETED] ${change.path}`));
|
|
26
|
-
}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
17
|
+
// printing is now handled by getStatus directly during the check to maintain structure.
|
|
29
18
|
}
|
|
30
|
-
export async function getStatus() {
|
|
31
|
-
// Check if initialized
|
|
19
|
+
export async function getStatus(silent = false) {
|
|
32
20
|
try {
|
|
33
21
|
await fs.access(CONFIG_FILE);
|
|
34
22
|
}
|
|
@@ -37,29 +25,52 @@ export async function getStatus() {
|
|
|
37
25
|
return null;
|
|
38
26
|
}
|
|
39
27
|
const config = JSON.parse(await fs.readFile(CONFIG_FILE, 'utf-8'));
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
28
|
+
// BUG FIX 1.5: support the current multi-branch config format
|
|
29
|
+
const branchData = config.branches?.[config.currentBranch];
|
|
30
|
+
const projectRef = branchData?.projectRef ?? config.projectRef;
|
|
31
|
+
if (!projectRef) {
|
|
32
|
+
console.error(chalk.red('Project ref not found in config. Run `gitb init --force` to re-initialize.'));
|
|
33
|
+
return null;
|
|
34
|
+
}
|
|
44
35
|
let liveSchema;
|
|
45
36
|
try {
|
|
37
|
+
await createPool(projectRef);
|
|
46
38
|
liveSchema = await extractSchema(projectRef);
|
|
47
39
|
}
|
|
48
40
|
catch (e) {
|
|
49
41
|
console.error(chalk.red(`Failed to fetch schema: ${e.message}`));
|
|
50
42
|
return null;
|
|
51
43
|
}
|
|
44
|
+
finally {
|
|
45
|
+
await endPool(projectRef);
|
|
46
|
+
}
|
|
52
47
|
const changes = [];
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
48
|
+
for (const type of ALL_TYPES) {
|
|
49
|
+
await checkType(liveSchema[type] ?? {}, type, changes);
|
|
50
|
+
}
|
|
51
|
+
if (!silent) {
|
|
52
|
+
if (changes.length === 0) {
|
|
53
|
+
console.log(chalk.green('No changes detected. Working tree clean.'));
|
|
54
|
+
}
|
|
55
|
+
else {
|
|
56
|
+
console.log(chalk.yellow('Unsynced Changes (Database → Local):'));
|
|
57
|
+
for (const change of changes) {
|
|
58
|
+
if (change.type === 'new') {
|
|
59
|
+
console.log(chalk.green(` [NEW] ${change.path}`));
|
|
60
|
+
}
|
|
61
|
+
else if (change.type === 'modified') {
|
|
62
|
+
console.log(chalk.yellow(` [MODIFIED] ${change.path}`));
|
|
63
|
+
}
|
|
64
|
+
else if (change.type === 'deleted') {
|
|
65
|
+
console.log(chalk.red(` [DELETED] ${change.path}`));
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
58
70
|
return changes;
|
|
59
71
|
}
|
|
60
72
|
async function checkType(liveObjects, type, changes) {
|
|
61
73
|
const dir = path.join('supabase', type);
|
|
62
|
-
// Ensure dir exists
|
|
63
74
|
try {
|
|
64
75
|
await fs.mkdir(dir, { recursive: true });
|
|
65
76
|
}
|
|
@@ -73,22 +84,19 @@ async function checkType(liveObjects, type, changes) {
|
|
|
73
84
|
const content = await fs.readFile(path.join(dir, file), 'utf-8');
|
|
74
85
|
localMap[name] = content;
|
|
75
86
|
}
|
|
76
|
-
//
|
|
87
|
+
// New or modified
|
|
77
88
|
for (const [name, content] of Object.entries(liveObjects)) {
|
|
78
|
-
// Sanitize name for filename
|
|
79
89
|
const safeName = name.replace(/\//g, '_');
|
|
80
90
|
if (!localMap[safeName]) {
|
|
81
91
|
changes.push({ type: 'new', path: `${type}/${safeName}.sql`, content, rawName: name });
|
|
82
92
|
}
|
|
83
|
-
else {
|
|
84
|
-
|
|
85
|
-
changes.push({ type: 'modified', path: `${type}/${safeName}.sql`, content, rawName: name });
|
|
86
|
-
}
|
|
93
|
+
else if (canonicalize(content) !== canonicalize(localMap[safeName])) {
|
|
94
|
+
changes.push({ type: 'modified', path: `${type}/${safeName}.sql`, content, rawName: name });
|
|
87
95
|
}
|
|
88
96
|
}
|
|
89
|
-
//
|
|
97
|
+
// Deleted (in local but gone from live DB)
|
|
98
|
+
const liveKeysSafe = Object.keys(liveObjects).map(k => k.replace(/\//g, '_'));
|
|
90
99
|
for (const name of Object.keys(localMap)) {
|
|
91
|
-
const liveKeysSafe = Object.keys(liveObjects).map(k => k.replace(/\//g, '_'));
|
|
92
100
|
if (!liveKeysSafe.includes(name)) {
|
|
93
101
|
changes.push({ type: 'deleted', path: `${type}/${name}.sql`, rawName: name });
|
|
94
102
|
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import { getConfig, getConnectionString } from '../api/supabase.js';
|
|
5
|
+
import pg from 'pg';
|
|
6
|
+
const { Client } = pg;
|
|
7
|
+
export async function verify() {
|
|
8
|
+
console.log(chalk.blue('Verifying local schema against current database...'));
|
|
9
|
+
const config = await getConfig();
|
|
10
|
+
if (!config) {
|
|
11
|
+
console.error(chalk.red('Not initialized. Run `gitb init` first.'));
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
const currentBranch = config.branches ? config.branches[config.currentBranch] : null;
|
|
15
|
+
const projectRef = currentBranch ? currentBranch.projectRef : config.projectRef;
|
|
16
|
+
if (!projectRef) {
|
|
17
|
+
console.error(chalk.red('Could not determine project ref. Check config.'));
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
// 1. Load all local SQL files
|
|
21
|
+
const localFiles = await loadAllSqlFiles('supabase');
|
|
22
|
+
if (localFiles.length === 0) {
|
|
23
|
+
console.log(chalk.yellow('No local SQL files found to verify.'));
|
|
24
|
+
return;
|
|
25
|
+
}
|
|
26
|
+
const connString = await getConnectionString(projectRef);
|
|
27
|
+
const client = new Client({ connectionString: connString, ssl: { rejectUnauthorized: false } });
|
|
28
|
+
try {
|
|
29
|
+
await client.connect();
|
|
30
|
+
}
|
|
31
|
+
catch (e) {
|
|
32
|
+
console.error(chalk.red(`Failed to connect to database: ${e.message}`));
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
let hasErrors = false;
|
|
36
|
+
console.log(chalk.cyan(`Found ${localFiles.length} local SQL files. Validating syntax and dependencies...`));
|
|
37
|
+
try {
|
|
38
|
+
// We use a transaction so we can roll it back immediately.
|
|
39
|
+
await client.query('BEGIN');
|
|
40
|
+
// We need to apply files somewhat in order so references exist.
|
|
41
|
+
// It's not a perfect graph, but simple enough for demonstration:
|
|
42
|
+
// Types -> Tables -> Views -> Functions -> Policies -> Triggers
|
|
43
|
+
const sortedFiles = localFiles.sort((a, b) => {
|
|
44
|
+
const getRank = (p) => {
|
|
45
|
+
if (p.includes('/types/'))
|
|
46
|
+
return 1;
|
|
47
|
+
if (p.includes('/tables/'))
|
|
48
|
+
return 2;
|
|
49
|
+
if (p.includes('/views/'))
|
|
50
|
+
return 3;
|
|
51
|
+
if (p.includes('/functions/'))
|
|
52
|
+
return 4;
|
|
53
|
+
if (p.includes('/policies/'))
|
|
54
|
+
return 5;
|
|
55
|
+
if (p.includes('/triggers/'))
|
|
56
|
+
return 6;
|
|
57
|
+
return 7;
|
|
58
|
+
};
|
|
59
|
+
return getRank(a.path) - getRank(b.path);
|
|
60
|
+
});
|
|
61
|
+
for (const file of sortedFiles) {
|
|
62
|
+
try {
|
|
63
|
+
// Some Postgres DDL (like CREATE INDEX CONCURRENTLY or DROP DATABASE) can't run in a transaction block.
|
|
64
|
+
// We'll swallow those specific errors if they occur, but standard CREATE TABLE/FUNCTION/POLICY works fine.
|
|
65
|
+
await client.query(file.content);
|
|
66
|
+
}
|
|
67
|
+
catch (err) {
|
|
68
|
+
// If the error is about running outside a transaction block, we ignore the verification failure
|
|
69
|
+
// (it relies on the live DB to tell us)
|
|
70
|
+
if (err.message && err.message.includes('cannot run inside a transaction block')) {
|
|
71
|
+
console.log(chalk.yellow(` ⚠ Skipped ${file.path} (cannot be verified inside a transaction block)`));
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
hasErrors = true;
|
|
75
|
+
console.error(chalk.red(`\n❌ Error in ${file.path}:`));
|
|
76
|
+
console.error(chalk.red(` ${err.message}`));
|
|
77
|
+
if (err.position) {
|
|
78
|
+
console.error(chalk.yellow(` Position: ${err.position}`));
|
|
79
|
+
}
|
|
80
|
+
// Once a transaction errors, subsequent queries in the same transaction will fail with "current transaction is aborted".
|
|
81
|
+
// So we stop checking further files.
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
finally {
|
|
87
|
+
// ABSOLUTELY ESSENTIAL: Rollback the transaction so we don't actually modify the live database.
|
|
88
|
+
await client.query('ROLLBACK');
|
|
89
|
+
await client.end();
|
|
90
|
+
}
|
|
91
|
+
if (!hasErrors) {
|
|
92
|
+
console.log(chalk.green('\n✅ Schema is valid. Ready to push.'));
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
console.error(chalk.red('\nVerification failed. Please fix the SQL errors before pushing.'));
|
|
96
|
+
process.exitCode = 1;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
async function loadAllSqlFiles(dir, baseDir = dir) {
|
|
100
|
+
let results = [];
|
|
101
|
+
try {
|
|
102
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
103
|
+
for (const entry of entries) {
|
|
104
|
+
const fullPath = path.join(dir, entry.name);
|
|
105
|
+
if (entry.isDirectory()) {
|
|
106
|
+
results = results.concat(await loadAllSqlFiles(fullPath, baseDir));
|
|
107
|
+
}
|
|
108
|
+
else if (entry.isFile() && fullPath.endsWith('.sql')) {
|
|
109
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
110
|
+
// Use posix paths internally for consistency
|
|
111
|
+
const relPath = path.relative(baseDir, fullPath).replace(/\\/g, '/');
|
|
112
|
+
results.push({ path: relPath, content });
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
catch {
|
|
117
|
+
// Directory doesn't exist
|
|
118
|
+
}
|
|
119
|
+
return results;
|
|
120
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -26,15 +26,25 @@ yargs(hideBin(process.argv))
|
|
|
26
26
|
const { status } = await import('./commands/status.js');
|
|
27
27
|
await status();
|
|
28
28
|
})
|
|
29
|
-
.command(['pull [files..]', 'add [files..]'], 'Pull
|
|
30
|
-
return yargs
|
|
31
|
-
|
|
29
|
+
.command(['pull [files..]', 'add [files..]'], 'Pull schema from database to local files', (yargs) => {
|
|
30
|
+
return yargs
|
|
31
|
+
.positional('files', {
|
|
32
|
+
describe: 'Specific files to pull',
|
|
32
33
|
type: 'string',
|
|
33
34
|
array: true
|
|
35
|
+
})
|
|
36
|
+
.option('auto-commit', {
|
|
37
|
+
type: 'boolean',
|
|
38
|
+
description: 'Automatically commit after pulling with an auto-generated message'
|
|
39
|
+
})
|
|
40
|
+
.option('message', {
|
|
41
|
+
alias: 'm',
|
|
42
|
+
type: 'string',
|
|
43
|
+
description: 'Commit message (implies auto-commit)'
|
|
34
44
|
});
|
|
35
45
|
}, async (argv) => {
|
|
36
|
-
const {
|
|
37
|
-
await
|
|
46
|
+
const { pull } = await import('./commands/pull.js');
|
|
47
|
+
await pull(argv);
|
|
38
48
|
})
|
|
39
49
|
.command('commit', 'Record changes to the repository', (yargs) => {
|
|
40
50
|
return yargs.option('message', {
|
|
@@ -60,14 +70,47 @@ yargs(hideBin(process.argv))
|
|
|
60
70
|
const { revert } = await import('./commands/revert.js');
|
|
61
71
|
await revert(argv);
|
|
62
72
|
})
|
|
63
|
-
.command('log', 'Show commit
|
|
73
|
+
.command('log [file]', 'Show commit history', (yargs) => {
|
|
74
|
+
return yargs
|
|
75
|
+
.positional('file', {
|
|
76
|
+
describe: 'Only show commits that touched this file (e.g. tables/users.sql)',
|
|
77
|
+
type: 'string'
|
|
78
|
+
})
|
|
79
|
+
.option('oneline', {
|
|
80
|
+
type: 'boolean',
|
|
81
|
+
description: 'Compact one-line format'
|
|
82
|
+
})
|
|
83
|
+
.option('since', {
|
|
84
|
+
type: 'string',
|
|
85
|
+
description: 'Only show commits after this date (ISO format, e.g. 2026-01-01)'
|
|
86
|
+
})
|
|
87
|
+
.option('n', {
|
|
88
|
+
alias: 'max-count',
|
|
89
|
+
type: 'number',
|
|
90
|
+
description: 'Limit to last N commits'
|
|
91
|
+
});
|
|
92
|
+
}, async (argv) => {
|
|
64
93
|
const { log } = await import('./commands/log.js');
|
|
65
|
-
await log();
|
|
94
|
+
await log(argv);
|
|
66
95
|
})
|
|
67
|
-
.command('diff [commit]', 'Show changes between commits or live database', (yargs) => {
|
|
68
|
-
return yargs
|
|
69
|
-
|
|
96
|
+
.command('diff [commit] [commit2]', 'Show changes between commits or live database', (yargs) => {
|
|
97
|
+
return yargs
|
|
98
|
+
.positional('commit', {
|
|
99
|
+
describe: 'First commit hash (or HEAD~N). Compared against local files by default.',
|
|
100
|
+
type: 'string'
|
|
101
|
+
})
|
|
102
|
+
.positional('commit2', {
|
|
103
|
+
describe: 'Second commit hash — compare two commits directly',
|
|
70
104
|
type: 'string'
|
|
105
|
+
})
|
|
106
|
+
.option('live', {
|
|
107
|
+
type: 'boolean',
|
|
108
|
+
description: 'Compare commit against live database instead of local files'
|
|
109
|
+
})
|
|
110
|
+
.option('files', {
|
|
111
|
+
type: 'string',
|
|
112
|
+
array: true,
|
|
113
|
+
description: 'Filter to specific files (e.g. tables/users.sql)'
|
|
71
114
|
});
|
|
72
115
|
}, async (argv) => {
|
|
73
116
|
const { diff } = await import('./commands/diff.js');
|
|
@@ -125,6 +168,43 @@ yargs(hideBin(process.argv))
|
|
|
125
168
|
}, async (argv) => {
|
|
126
169
|
const { remote } = await import('./commands/remote.js');
|
|
127
170
|
await remote(argv);
|
|
171
|
+
})
|
|
172
|
+
.command('stash [subcommand]', 'Stash and restore uncommitted local schema changes', (yargs) => {
|
|
173
|
+
return yargs
|
|
174
|
+
.positional('subcommand', {
|
|
175
|
+
describe: 'Stash subcommand: push (default), pop, apply, list, drop, clear',
|
|
176
|
+
type: 'string',
|
|
177
|
+
choices: ['push', 'save', 'pop', 'apply', 'list', 'ls', 'drop', 'clear'],
|
|
178
|
+
default: 'push'
|
|
179
|
+
})
|
|
180
|
+
.option('message', {
|
|
181
|
+
alias: 'm',
|
|
182
|
+
type: 'string',
|
|
183
|
+
description: 'Stash message (for push/save)'
|
|
184
|
+
})
|
|
185
|
+
.option('index', {
|
|
186
|
+
alias: 'n',
|
|
187
|
+
type: 'number',
|
|
188
|
+
description: 'Stash entry index (for apply/drop)',
|
|
189
|
+
default: 0
|
|
190
|
+
});
|
|
191
|
+
}, async (argv) => {
|
|
192
|
+
const { stash } = await import('./commands/stash.js');
|
|
193
|
+
await stash(argv);
|
|
194
|
+
})
|
|
195
|
+
.command('verify', 'Dry-run local schema files against the database to catch errors', {}, async (argv) => {
|
|
196
|
+
const { verify } = await import('./commands/verify.js');
|
|
197
|
+
await verify();
|
|
198
|
+
})
|
|
199
|
+
.command('snapshot', 'Start a background daemon that automatically commits remote changes', (yargs) => {
|
|
200
|
+
return yargs.option('interval', {
|
|
201
|
+
type: 'number',
|
|
202
|
+
description: 'Interval in minutes to check for changes',
|
|
203
|
+
default: 60
|
|
204
|
+
});
|
|
205
|
+
}, async (argv) => {
|
|
206
|
+
const { snapshot } = await import('./commands/snapshot.js');
|
|
207
|
+
await snapshot(argv);
|
|
128
208
|
})
|
|
129
209
|
.help()
|
|
130
210
|
.parse();
|