@coldge.com/gitbase 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +15 -0
- package/README.md +178 -0
- package/dist/api/supabase.js +166 -0
- package/dist/commands/add.js +34 -0
- package/dist/commands/branch.js +96 -0
- package/dist/commands/clone.js +65 -0
- package/dist/commands/commit.js +99 -0
- package/dist/commands/diff.js +125 -0
- package/dist/commands/fetch.js +49 -0
- package/dist/commands/init.js +90 -0
- package/dist/commands/log.js +30 -0
- package/dist/commands/login.js +29 -0
- package/dist/commands/merge.js +59 -0
- package/dist/commands/push.js +176 -0
- package/dist/commands/remote.js +142 -0
- package/dist/commands/revert.js +233 -0
- package/dist/commands/status.js +96 -0
- package/dist/commands/whoami.js +19 -0
- package/dist/index.js +130 -0
- package/dist/schema/extractor.js +113 -0
- package/dist/schema/queries.js +47 -0
- package/dist/storage/git.js +15 -0
- package/dist/utils/config.js +21 -0
- package/dist/utils/hashing.js +9 -0
- package/package.json +47 -0
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import * as diffLib from 'diff';
|
|
5
|
+
import { readCommit, readTree, readObject } from '../storage/git.js';
|
|
6
|
+
import { extractSchema } from '../schema/extractor.js';
|
|
7
|
+
import { canonicalize } from '../utils/hashing.js';
|
|
8
|
+
const GITBASE_DIR = '.gitbase';
|
|
9
|
+
const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
|
|
10
|
+
export async function diff(argv) {
|
|
11
|
+
// 1. Check Config
|
|
12
|
+
try {
|
|
13
|
+
await fs.access(CONFIG_FILE);
|
|
14
|
+
}
|
|
15
|
+
catch {
|
|
16
|
+
console.error(chalk.red('Not initialized. Run `gitb init` first.'));
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
const config = JSON.parse(await fs.readFile(CONFIG_FILE, 'utf-8'));
|
|
20
|
+
// Support new config format (branches)
|
|
21
|
+
const currentBranch = config.branches ? config.branches[config.currentBranch] : null;
|
|
22
|
+
const projectRef = currentBranch ? currentBranch.projectRef : config.projectRef;
|
|
23
|
+
if (!projectRef) {
|
|
24
|
+
console.error(chalk.red('Project Ref not found in config.'));
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
console.log(chalk.blue(`Fetching current database state...`));
|
|
28
|
+
const liveSchema = await extractSchema(projectRef);
|
|
29
|
+
let targetSchema = { tables: {}, functions: {}, views: {}, triggers: {}, policies: {}, types: {} };
|
|
30
|
+
let targetLabel = '';
|
|
31
|
+
// Filter files if provided
|
|
32
|
+
const filterFiles = argv.files || [];
|
|
33
|
+
// 2. Load Target Schema (Commit or Local Files)
|
|
34
|
+
const commitHash = argv.commit;
|
|
35
|
+
if (commitHash) {
|
|
36
|
+
// Compare with specific commit
|
|
37
|
+
targetLabel = `Commit ${commitHash.substring(0, 7)}`;
|
|
38
|
+
try {
|
|
39
|
+
const commit = await readCommit(commitHash);
|
|
40
|
+
const tree = await readTree(commit.tree);
|
|
41
|
+
for (const [relPath, hash] of Object.entries(tree)) {
|
|
42
|
+
const parts = relPath.split('/');
|
|
43
|
+
const type = parts[0];
|
|
44
|
+
const name = path.basename(relPath, '.sql');
|
|
45
|
+
const content = await readObject(hash);
|
|
46
|
+
if (targetSchema[type])
|
|
47
|
+
targetSchema[type][name] = content;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch {
|
|
51
|
+
console.error(chalk.red(`Commit ${commitHash} not found.`));
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
else {
|
|
56
|
+
// Compare with Local Files (equivalent to git diff)
|
|
57
|
+
targetLabel = 'Local Files';
|
|
58
|
+
const types = ['tables', 'functions', 'views', 'triggers', 'policies', 'types'];
|
|
59
|
+
for (const type of types) {
|
|
60
|
+
const dir = path.join('supabase', type);
|
|
61
|
+
try {
|
|
62
|
+
const files = await fs.readdir(dir);
|
|
63
|
+
for (const file of files) {
|
|
64
|
+
if (!file.endsWith('.sql'))
|
|
65
|
+
continue;
|
|
66
|
+
const name = path.basename(file, '.sql');
|
|
67
|
+
// Un-sanitize name if needed?
|
|
68
|
+
// In our extractor, we replace '/' with '_' for local files.
|
|
69
|
+
// But for functions and tables they don't usually have '/'.
|
|
70
|
+
const content = await fs.readFile(path.join(dir, file), 'utf-8');
|
|
71
|
+
targetSchema[type][name] = content;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
catch (e) {
|
|
75
|
+
// Directory doesn't exist, ignore
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
// 4. Compare and Print Diff
|
|
80
|
+
let hasDifferences = false;
|
|
81
|
+
const types = ['tables', 'functions', 'views', 'triggers', 'policies', 'types'];
|
|
82
|
+
for (const type of types) {
|
|
83
|
+
const keys = Array.from(new Set([
|
|
84
|
+
...Object.keys(liveSchema[type]),
|
|
85
|
+
...Object.keys(targetSchema[type])
|
|
86
|
+
])).sort();
|
|
87
|
+
for (const key of keys) {
|
|
88
|
+
const filePath = `${type}/${key}.sql`;
|
|
89
|
+
if (filterFiles.length > 0 && !filterFiles.includes(filePath))
|
|
90
|
+
continue;
|
|
91
|
+
const liveSql = liveSchema[type][key] || '';
|
|
92
|
+
const targetSql = targetSchema[type][key] || '';
|
|
93
|
+
if (canonicalize(liveSql) !== canonicalize(targetSql)) {
|
|
94
|
+
hasDifferences = true;
|
|
95
|
+
console.log(chalk.bold(`\ndiff --git a/${type}/${key}.sql b/${type}/${key}.sql`));
|
|
96
|
+
const patch = diffLib.createTwoFilesPatch(`a/${type}/${key}.sql`, `b/${type}/${key}.sql`, targetSql, liveSql, targetLabel, 'Live Database');
|
|
97
|
+
// Colorize patch output, skipping the first 4 header lines usually
|
|
98
|
+
const lines = patch.split('\n');
|
|
99
|
+
for (let i = 0; i < lines.length; i++) {
|
|
100
|
+
const line = lines[i];
|
|
101
|
+
if (i < 4) {
|
|
102
|
+
// Print headers in default or bold
|
|
103
|
+
console.log(chalk.white(line));
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
if (line.startsWith('+') && !line.startsWith('+++')) {
|
|
107
|
+
console.log(chalk.green(line));
|
|
108
|
+
}
|
|
109
|
+
else if (line.startsWith('-') && !line.startsWith('---')) {
|
|
110
|
+
console.log(chalk.red(line));
|
|
111
|
+
}
|
|
112
|
+
else if (line.startsWith('@@')) {
|
|
113
|
+
console.log(chalk.cyan(line));
|
|
114
|
+
}
|
|
115
|
+
else {
|
|
116
|
+
console.log(line);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
if (!hasDifferences) {
|
|
123
|
+
console.log(chalk.green(`\nNo differences found. Live database matches ${targetLabel} exactly.`));
|
|
124
|
+
}
|
|
125
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import { getConfig, listStorageObjects, downloadFromStorage } from '../api/supabase.js';
|
|
5
|
+
const GITBASE_DIR = '.gitbase';
|
|
6
|
+
const OBJECTS_DIR = path.join(GITBASE_DIR, 'objects');
|
|
7
|
+
const BUCKET_NAME = 'gitbase-remotes';
|
|
8
|
+
export async function fetch() {
|
|
9
|
+
const config = await getConfig();
|
|
10
|
+
if (!config) {
|
|
11
|
+
console.error(chalk.red('Not initialized.'));
|
|
12
|
+
return;
|
|
13
|
+
}
|
|
14
|
+
const currentBranch = config.currentBranch;
|
|
15
|
+
const projectRef = config.branches[currentBranch].projectRef;
|
|
16
|
+
console.log(chalk.blue(`Fetching remote objects from Supabase for branch '${currentBranch}'...`));
|
|
17
|
+
try {
|
|
18
|
+
// List all objects in the bucket
|
|
19
|
+
const objects = await listStorageObjects(projectRef, BUCKET_NAME, 'objects/');
|
|
20
|
+
let downloadCount = 0;
|
|
21
|
+
for (const obj of objects) {
|
|
22
|
+
const hash = obj.name;
|
|
23
|
+
const localPath = path.join(OBJECTS_DIR, hash);
|
|
24
|
+
// Check if we already have this object
|
|
25
|
+
try {
|
|
26
|
+
await fs.access(localPath);
|
|
27
|
+
}
|
|
28
|
+
catch {
|
|
29
|
+
// Download missing object
|
|
30
|
+
const content = await downloadFromStorage(projectRef, BUCKET_NAME, `objects/${hash}`);
|
|
31
|
+
await fs.mkdir(OBJECTS_DIR, { recursive: true });
|
|
32
|
+
await fs.writeFile(localPath, content, 'utf-8');
|
|
33
|
+
downloadCount++;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
// Update branch HEAD from bucket if possible?
|
|
37
|
+
// For now, let's keep config.json as the local source.
|
|
38
|
+
// A real "git fetch" also updates remote tracking branches.
|
|
39
|
+
if (downloadCount === 0) {
|
|
40
|
+
console.log(chalk.green('Local history is up to date.'));
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
console.log(chalk.green(`Fetched ${downloadCount} new object(s) from remote.`));
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (e) {
|
|
47
|
+
console.error(chalk.red(`Fetch failed: ${e.message}`));
|
|
48
|
+
}
|
|
49
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import readline from 'readline';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
const GITBASE_DIR = '.gitbase';
|
|
6
|
+
const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
|
|
7
|
+
export async function init(argv = {}) {
|
|
8
|
+
try {
|
|
9
|
+
await fs.mkdir(GITBASE_DIR);
|
|
10
|
+
console.log(chalk.green('Initialized empty GitBase repository in .gitbase/'));
|
|
11
|
+
}
|
|
12
|
+
catch (error) {
|
|
13
|
+
if (error.code === 'EEXIST') {
|
|
14
|
+
// console.log(chalk.yellow('GitBase repository already exists.'));
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
console.error(chalk.red('Failed to initialize repository:', error));
|
|
18
|
+
return;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Check if config exists
|
|
22
|
+
try {
|
|
23
|
+
await fs.access(CONFIG_FILE);
|
|
24
|
+
if (!argv.force) {
|
|
25
|
+
console.log(chalk.yellow('Project is already linked. Use --force to re-initialize.'));
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
// Continue
|
|
31
|
+
}
|
|
32
|
+
const rl = readline.createInterface({
|
|
33
|
+
input: process.stdin,
|
|
34
|
+
output: process.stdout
|
|
35
|
+
});
|
|
36
|
+
const ask = (q) => new Promise(r => rl.question(q, r));
|
|
37
|
+
console.log(chalk.blue('Please enter your Supabase Project Reference ID:'));
|
|
38
|
+
const ref = await ask('Project Ref (e.g., mxyzptlk): ');
|
|
39
|
+
if (!ref) {
|
|
40
|
+
console.log(chalk.red('Project Ref is required.'));
|
|
41
|
+
rl.close();
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
console.log(chalk.blue('\nPlease enter your Full Database Connection URI:'));
|
|
45
|
+
console.log(chalk.gray('(Click the "Connect" button in the top header of your Supabase dashboard)'));
|
|
46
|
+
console.log(chalk.yellow('IMPORTANT: In case the connection fails later, change the connection type to "Session Pooler" in the dropdown and try again!'));
|
|
47
|
+
console.log(chalk.gray('Make sure to replace [YOUR-PASSWORD] with your actual password!'));
|
|
48
|
+
console.log(chalk.gray('Example: postgresql://postgres.mxyzptlk:mypassword@aws-0-us-east-1.pooler.supabase.com:5432/postgres'));
|
|
49
|
+
const connString = await ask('\nConnection URI: ');
|
|
50
|
+
if (!connString) {
|
|
51
|
+
console.log(chalk.red('Connection URI is required to execute SQL.'));
|
|
52
|
+
rl.close();
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
const config = {
|
|
56
|
+
currentBranch: 'production',
|
|
57
|
+
branches: {
|
|
58
|
+
production: {
|
|
59
|
+
projectRef: ref.trim(),
|
|
60
|
+
connectionString: connString.trim(),
|
|
61
|
+
head: null
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
await fs.writeFile(CONFIG_FILE, JSON.stringify(config, null, 2));
|
|
66
|
+
console.log(chalk.green(`\nLinked to project: ${ref.trim()} (Branch: production)`));
|
|
67
|
+
// Create supabase directory structure
|
|
68
|
+
await fs.mkdir('supabase/tables', { recursive: true });
|
|
69
|
+
await fs.mkdir('supabase/functions', { recursive: true });
|
|
70
|
+
await fs.mkdir('supabase/views', { recursive: true });
|
|
71
|
+
await fs.mkdir('supabase/types', { recursive: true });
|
|
72
|
+
await fs.mkdir('supabase/triggers', { recursive: true });
|
|
73
|
+
await fs.mkdir('supabase/policies', { recursive: true });
|
|
74
|
+
// Auto-ignore .gitbase
|
|
75
|
+
try {
|
|
76
|
+
const gitignorePath = '.gitignore';
|
|
77
|
+
let gitignoreContent = '';
|
|
78
|
+
try {
|
|
79
|
+
gitignoreContent = await fs.readFile(gitignorePath, 'utf-8');
|
|
80
|
+
}
|
|
81
|
+
catch (e) { }
|
|
82
|
+
if (!gitignoreContent.includes(GITBASE_DIR)) {
|
|
83
|
+
const entry = `\n# GitBase history & config\n${GITBASE_DIR}/\n`;
|
|
84
|
+
await fs.appendFile(gitignorePath, entry);
|
|
85
|
+
console.log(chalk.gray('Added .gitbase/ to .gitignore for security.'));
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
catch (e) { }
|
|
89
|
+
rl.close();
|
|
90
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import { readCommit } from '../storage/git.js';
|
|
5
|
+
const GITBASE_DIR = '.gitbase';
|
|
6
|
+
const HEAD_FILE = path.join(GITBASE_DIR, 'HEAD');
|
|
7
|
+
export async function log() {
|
|
8
|
+
let currentHash;
|
|
9
|
+
try {
|
|
10
|
+
currentHash = await fs.readFile(HEAD_FILE, 'utf-8');
|
|
11
|
+
}
|
|
12
|
+
catch {
|
|
13
|
+
console.log(chalk.red('No commits yet.'));
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
console.log(chalk.cyan('Commit History:'));
|
|
17
|
+
while (currentHash) {
|
|
18
|
+
try {
|
|
19
|
+
const commit = await readCommit(currentHash);
|
|
20
|
+
console.log(chalk.yellow(`commit ${currentHash}`));
|
|
21
|
+
console.log(`Author: ${commit.author}`);
|
|
22
|
+
console.log(`Date: ${commit.timestamp}`);
|
|
23
|
+
console.log(`\n ${commit.message}\n`);
|
|
24
|
+
currentHash = commit.parent;
|
|
25
|
+
}
|
|
26
|
+
catch (e) {
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { saveToken } from '../utils/config.js';
|
|
2
|
+
import readline from 'readline';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
export async function login() {
|
|
5
|
+
console.log(chalk.blue('Please enter your Supabase Access Token (https://supabase.com/dashboard/account/tokens):'));
|
|
6
|
+
const rl = readline.createInterface({
|
|
7
|
+
input: process.stdin,
|
|
8
|
+
output: process.stdout
|
|
9
|
+
});
|
|
10
|
+
return new Promise((resolve) => {
|
|
11
|
+
rl.question('Token: ', async (token) => {
|
|
12
|
+
if (!token) {
|
|
13
|
+
console.log(chalk.red('Token is required.'));
|
|
14
|
+
rl.close();
|
|
15
|
+
resolve();
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
try {
|
|
19
|
+
await saveToken(token.trim());
|
|
20
|
+
console.log(chalk.green('Token saved successfully!'));
|
|
21
|
+
}
|
|
22
|
+
catch (error) {
|
|
23
|
+
console.error(chalk.red('Failed to save token:', error));
|
|
24
|
+
}
|
|
25
|
+
rl.close();
|
|
26
|
+
resolve();
|
|
27
|
+
});
|
|
28
|
+
});
|
|
29
|
+
}
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import { readCommit, readTree, readObject } from '../storage/git.js';
|
|
5
|
+
const GITBASE_DIR = '.gitbase';
|
|
6
|
+
export async function merge(argv) {
|
|
7
|
+
const sourceBranch = argv.name;
|
|
8
|
+
if (!sourceBranch) {
|
|
9
|
+
console.error(chalk.red('Source branch name required.'));
|
|
10
|
+
return;
|
|
11
|
+
}
|
|
12
|
+
const configContent = await fs.readFile(path.join(GITBASE_DIR, 'config'), 'utf-8');
|
|
13
|
+
const config = JSON.parse(configContent);
|
|
14
|
+
if (!config.branches[sourceBranch]) {
|
|
15
|
+
console.error(chalk.red(`Branch '${sourceBranch}' does not exist.`));
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
if (sourceBranch === config.currentBranch) {
|
|
19
|
+
console.error(chalk.red('Cannot merge a branch into itself.'));
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
const sourceHead = config.branches[sourceBranch].head;
|
|
23
|
+
if (!sourceHead) {
|
|
24
|
+
console.error(chalk.red(`Branch '${sourceBranch}' has no commits to merge.`));
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
const currentBranch = config.currentBranch;
|
|
28
|
+
const projectRef = config.branches[currentBranch].projectRef;
|
|
29
|
+
// RBAC: Production Protection
|
|
30
|
+
if (currentBranch === 'production') {
|
|
31
|
+
const { isProductionAdmin } = await import('../api/supabase.js');
|
|
32
|
+
console.log(chalk.blue('Verifying production permissions...'));
|
|
33
|
+
const isAdmin = await isProductionAdmin(projectRef);
|
|
34
|
+
if (!isAdmin) {
|
|
35
|
+
console.error(chalk.red('\n🔥 ACCESS DENIED: Only Owners or Administrators can merge into the production branch.'));
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
console.log(chalk.green('Permission verified.'));
|
|
39
|
+
}
|
|
40
|
+
console.log(chalk.blue(`Merging files from '${sourceBranch}' into '${currentBranch}'...`));
|
|
41
|
+
// 1. Load source tree
|
|
42
|
+
const commit = await readCommit(sourceHead);
|
|
43
|
+
const tree = await readTree(commit.tree);
|
|
44
|
+
// 2. Clear current supabase dir and copy files
|
|
45
|
+
// In a real merge, we would do a 3-way merge.
|
|
46
|
+
// For MVP, we just overwrite local files with the source branch's files.
|
|
47
|
+
try {
|
|
48
|
+
await fs.rm('supabase', { recursive: true, force: true });
|
|
49
|
+
}
|
|
50
|
+
catch (e) { }
|
|
51
|
+
for (const [relPath, hash] of Object.entries(tree)) {
|
|
52
|
+
const fullPath = path.join('supabase', relPath);
|
|
53
|
+
await fs.mkdir(path.dirname(fullPath), { recursive: true });
|
|
54
|
+
const content = await readObject(hash);
|
|
55
|
+
await fs.writeFile(fullPath, content, 'utf-8');
|
|
56
|
+
}
|
|
57
|
+
console.log(chalk.green(`\nSuccessfully merged branch '${sourceBranch}' into your local files.`));
|
|
58
|
+
console.log(chalk.yellow(`Run 'gitb push' to apply these changes to the live '${config.currentBranch}' database.`));
|
|
59
|
+
}
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import fs from 'fs/promises';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import chalk from 'chalk';
|
|
4
|
+
import readline from 'readline';
|
|
5
|
+
import { getConfig, runQuery } from '../api/supabase.js';
|
|
6
|
+
import { extractSchema } from '../schema/extractor.js';
|
|
7
|
+
import { canonicalize } from '../utils/hashing.js';
|
|
8
|
+
import toposort from 'toposort';
|
|
9
|
+
const GITBASE_DIR = '.gitbase';
|
|
10
|
+
export async function push(argv) {
|
|
11
|
+
const config = await getConfig();
|
|
12
|
+
if (!config) {
|
|
13
|
+
console.error(chalk.red('Not initialized. Run `gitb init` first.'));
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
const currentBranch = config.currentBranch;
|
|
17
|
+
const projectRef = config.branches[currentBranch].projectRef;
|
|
18
|
+
// RBAC: Production Protection
|
|
19
|
+
if (currentBranch === 'production') {
|
|
20
|
+
const { isProductionAdmin } = await import('../api/supabase.js');
|
|
21
|
+
console.log(chalk.blue('Verifying production permissions...'));
|
|
22
|
+
const isAdmin = await isProductionAdmin(projectRef);
|
|
23
|
+
if (!isAdmin) {
|
|
24
|
+
console.error(chalk.red('\n🔥 ACCESS DENIED: Only Owners or Administrators can push to the production branch.'));
|
|
25
|
+
console.error(chalk.yellow('Please work on a feature branch and request an administrator to sync your changes.'));
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
console.log(chalk.green('Permission verified.'));
|
|
29
|
+
}
|
|
30
|
+
const filterFiles = argv.files || [];
|
|
31
|
+
console.log(chalk.blue(`Pushing local files to branch '${currentBranch}' (${projectRef})...`));
|
|
32
|
+
// 1. Fetch current live state to make smart decisions
|
|
33
|
+
console.log(chalk.blue(`Checking current database state...`));
|
|
34
|
+
const liveSchema = await extractSchema(projectRef);
|
|
35
|
+
// 2. Load Local Files as the "Target Schema"
|
|
36
|
+
const localSchema = { tables: {}, functions: {}, views: {}, triggers: {}, policies: {}, types: {} };
|
|
37
|
+
const types = ['tables', 'functions', 'views', 'triggers', 'policies', 'types'];
|
|
38
|
+
for (const type of types) {
|
|
39
|
+
const dir = path.join('supabase', type);
|
|
40
|
+
try {
|
|
41
|
+
const files = await fs.readdir(dir);
|
|
42
|
+
for (const file of files) {
|
|
43
|
+
if (!file.endsWith('.sql'))
|
|
44
|
+
continue;
|
|
45
|
+
const name = path.basename(file, '.sql');
|
|
46
|
+
const content = await fs.readFile(path.join(dir, file), 'utf-8');
|
|
47
|
+
localSchema[type][name] = content;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch (e) { }
|
|
51
|
+
}
|
|
52
|
+
const rl = readline.createInterface({
|
|
53
|
+
input: process.stdin,
|
|
54
|
+
output: process.stdout
|
|
55
|
+
});
|
|
56
|
+
const ask = (q) => new Promise(r => rl.question(q, r));
|
|
57
|
+
let madeDbChanges = false;
|
|
58
|
+
// 3. Execution Plan (Same logic as revert, but local files are the truth)
|
|
59
|
+
// TYPES
|
|
60
|
+
for (const [name, sql] of Object.entries(localSchema.types)) {
|
|
61
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`types/${name}.sql`))
|
|
62
|
+
continue;
|
|
63
|
+
if (liveSchema.types[name] && canonicalize(sql) === canonicalize(liveSchema.types[name]))
|
|
64
|
+
continue;
|
|
65
|
+
madeDbChanges = true;
|
|
66
|
+
console.log(chalk.cyan(`Pushing Type: ${name}`));
|
|
67
|
+
await runQuery(projectRef, `DROP TYPE IF EXISTS public."${name}" CASCADE;`);
|
|
68
|
+
await runQuery(projectRef, sql);
|
|
69
|
+
}
|
|
70
|
+
// TABLES (Topological Sort)
|
|
71
|
+
const tableEdges = [];
|
|
72
|
+
const tables = localSchema.tables;
|
|
73
|
+
const tableNames = Object.keys(tables);
|
|
74
|
+
for (const [name, sql] of Object.entries(tables)) {
|
|
75
|
+
const content = sql;
|
|
76
|
+
const regex = /REFERENCES\s+(?:public\.)?(\w+)/gi;
|
|
77
|
+
let match;
|
|
78
|
+
while ((match = regex.exec(content)) !== null) {
|
|
79
|
+
const target = match[1];
|
|
80
|
+
if (target !== name && tables[target])
|
|
81
|
+
tableEdges.push([target, name]);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
let sortedTables = tableNames;
|
|
85
|
+
try {
|
|
86
|
+
sortedTables = toposort.array(tableNames, tableEdges);
|
|
87
|
+
}
|
|
88
|
+
catch (e) {
|
|
89
|
+
sortedTables = tableNames.sort();
|
|
90
|
+
}
|
|
91
|
+
for (const name of sortedTables) {
|
|
92
|
+
const sql = tables[name];
|
|
93
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`tables/${name}.sql`))
|
|
94
|
+
continue;
|
|
95
|
+
if (liveSchema.tables[name] && canonicalize(sql) === canonicalize(liveSchema.tables[name]))
|
|
96
|
+
continue;
|
|
97
|
+
madeDbChanges = true;
|
|
98
|
+
console.log(chalk.yellow(`\nPushing Table: ${name}`));
|
|
99
|
+
if (liveSchema.tables[name]) {
|
|
100
|
+
const answer = await ask(chalk.white(`? Table '${name}' schema changed. Keep existing as backup? (Y/n) > `));
|
|
101
|
+
if (answer.toLowerCase() !== 'n') {
|
|
102
|
+
const timestamp = Math.floor(Date.now() / 1000);
|
|
103
|
+
const backupName = `${name}_backup_${timestamp}`;
|
|
104
|
+
await runQuery(projectRef, `ALTER TABLE public."${name}" RENAME TO "${backupName}";`);
|
|
105
|
+
console.log(chalk.green('Backup successful.'));
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
await runQuery(projectRef, `DROP TABLE IF EXISTS public."${name}" CASCADE;`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
await runQuery(projectRef, sql);
|
|
112
|
+
}
|
|
113
|
+
// VIEWS
|
|
114
|
+
for (const [name, sql] of Object.entries(localSchema.views)) {
|
|
115
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`views/${name}.sql`))
|
|
116
|
+
continue;
|
|
117
|
+
if (liveSchema.views[name] && canonicalize(sql) === canonicalize(liveSchema.views[name]))
|
|
118
|
+
continue;
|
|
119
|
+
madeDbChanges = true;
|
|
120
|
+
console.log(chalk.cyan(`Pushing View: ${name}`));
|
|
121
|
+
await runQuery(projectRef, `DROP VIEW IF EXISTS public."${name}" CASCADE;`);
|
|
122
|
+
await runQuery(projectRef, sql);
|
|
123
|
+
}
|
|
124
|
+
// FUNCTIONS
|
|
125
|
+
for (const [name, sql] of Object.entries(localSchema.functions)) {
|
|
126
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`functions/${name}.sql`))
|
|
127
|
+
continue;
|
|
128
|
+
if (liveSchema.functions[name] && canonicalize(sql) === canonicalize(liveSchema.functions[name]))
|
|
129
|
+
continue;
|
|
130
|
+
madeDbChanges = true;
|
|
131
|
+
console.log(chalk.cyan(`Pushing Function: ${name}`));
|
|
132
|
+
await runQuery(projectRef, sql);
|
|
133
|
+
}
|
|
134
|
+
// TRIGGERS
|
|
135
|
+
for (const [name, sql] of Object.entries(localSchema.triggers)) {
|
|
136
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`triggers/${name}.sql`))
|
|
137
|
+
continue;
|
|
138
|
+
if (liveSchema.triggers[name] && canonicalize(sql) === canonicalize(liveSchema.triggers[name]))
|
|
139
|
+
continue;
|
|
140
|
+
madeDbChanges = true;
|
|
141
|
+
console.log(chalk.cyan(`Pushing Trigger: ${name}`));
|
|
142
|
+
const match = sql.match(/ON\s+(public\.)?("?\w+"?)/i);
|
|
143
|
+
if (match) {
|
|
144
|
+
const tableName = match[2].replace(/"/g, '');
|
|
145
|
+
await runQuery(projectRef, `DROP TRIGGER IF EXISTS "${name}" ON public."${tableName}";`);
|
|
146
|
+
}
|
|
147
|
+
await runQuery(projectRef, sql);
|
|
148
|
+
}
|
|
149
|
+
// POLICIES
|
|
150
|
+
for (const [key, sql] of Object.entries(localSchema.policies)) {
|
|
151
|
+
if (filterFiles.length > 0 && !filterFiles.includes(`policies/${key}.sql`))
|
|
152
|
+
continue;
|
|
153
|
+
if (liveSchema.policies[key] && canonicalize(sql) === canonicalize(liveSchema.policies[key]))
|
|
154
|
+
continue;
|
|
155
|
+
madeDbChanges = true;
|
|
156
|
+
const match = sql.match(/CREATE POLICY "([^"]+)"\s+ON\s+(public\.)?("?\w+"?)/i);
|
|
157
|
+
if (match) {
|
|
158
|
+
const policyName = match[1];
|
|
159
|
+
const tableName = match[3].replace(/"/g, '');
|
|
160
|
+
console.log(chalk.cyan(`Pushing Policy: ${policyName} on ${tableName}`));
|
|
161
|
+
try {
|
|
162
|
+
await runQuery(projectRef, `ALTER TABLE public."${tableName}" ENABLE ROW LEVEL SECURITY;`);
|
|
163
|
+
}
|
|
164
|
+
catch (e) { }
|
|
165
|
+
await runQuery(projectRef, `DROP POLICY IF EXISTS "${policyName}" ON public."${tableName}";`);
|
|
166
|
+
await runQuery(projectRef, sql);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
rl.close();
|
|
170
|
+
if (!madeDbChanges) {
|
|
171
|
+
console.log(chalk.green('\nEverything is already up to date. No push required.'));
|
|
172
|
+
}
|
|
173
|
+
else {
|
|
174
|
+
console.log(chalk.green('\nPush complete! Live database updated.'));
|
|
175
|
+
}
|
|
176
|
+
}
|