@coldge.com/gitbase 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,142 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import chalk from 'chalk';
4
+ import { getConfig, ensureBucket, listObjects, uploadObject, downloadObject } from '../api/supabase.js';
5
+ const GITBASE_DIR = '.gitbase';
6
+ const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
7
+ const OBJECTS_DIR = path.join(GITBASE_DIR, 'objects');
8
+ const REFS_DIR = path.join(GITBASE_DIR, 'refs');
9
+ export async function remote(argv) {
10
+ const subcommand = argv._[1];
11
+ switch (subcommand) {
12
+ case 'add':
13
+ await addRemote(argv);
14
+ break;
15
+ case 'list':
16
+ await listRemotes();
17
+ break;
18
+ case 'push':
19
+ await pushRemote(argv);
20
+ break;
21
+ case 'pull':
22
+ await pullRemote(argv);
23
+ break;
24
+ default:
25
+ console.log(chalk.yellow('Usage: gitb remote <add|list|push|pull> [args]'));
26
+ }
27
+ }
28
+ async function addRemote(argv) {
29
+ const name = argv.name || argv._[2];
30
+ const bucket = argv.bucket || argv._[3];
31
+ if (!name || !bucket) {
32
+ console.error(chalk.red('Usage: gitb remote add <name> <bucket>'));
33
+ return;
34
+ }
35
+ const config = await getConfig();
36
+ if (!config)
37
+ return;
38
+ config.remotes = config.remotes || {};
39
+ config.remotes[name] = { bucket };
40
+ await fs.writeFile(CONFIG_FILE, JSON.stringify(config, null, 2));
41
+ console.log(chalk.green(`Remote '${name}' added pointing to bucket '${bucket}'.`));
42
+ }
43
+ async function listRemotes() {
44
+ const config = await getConfig();
45
+ if (!config || !config.remotes) {
46
+ console.log(chalk.yellow('No remotes configured.'));
47
+ return;
48
+ }
49
+ console.log(chalk.cyan('Remotes:'));
50
+ for (const [name, data] of Object.entries(config.remotes)) {
51
+ console.log(` ${name} -> ${data.bucket}`);
52
+ }
53
+ }
54
+ async function pushRemote(argv) {
55
+ const name = argv.name || argv._[2];
56
+ if (!name) {
57
+ console.error(chalk.red('Remote name required.'));
58
+ return;
59
+ }
60
+ const config = await getConfig();
61
+ if (!config || !config.remotes || !config.remotes[name]) {
62
+ console.error(chalk.red(`Remote '${name}' not found.`));
63
+ return;
64
+ }
65
+ const bucket = config.remotes[name].bucket;
66
+ const projectRef = config.branches[config.currentBranch].projectRef;
67
+ console.log(chalk.blue(`Pushing history to remote '${name}' (${bucket})...`));
68
+ try {
69
+ await ensureBucket(projectRef, bucket);
70
+ // 1. Sync Objects
71
+ console.log(chalk.blue('Syncing objects...'));
72
+ const remoteObjects = await listObjects(projectRef, bucket, 'objects/');
73
+ const remoteHashes = new Set(remoteObjects.map((o) => o.name));
74
+ const localObjects = await fs.readdir(OBJECTS_DIR).catch(() => []);
75
+ let uploadedCount = 0;
76
+ for (const hash of localObjects) {
77
+ if (!remoteHashes.has(hash)) {
78
+ const content = await fs.readFile(path.join(OBJECTS_DIR, hash));
79
+ await uploadObject(projectRef, bucket, `objects/${hash}`, content);
80
+ uploadedCount++;
81
+ }
82
+ }
83
+ console.log(chalk.green(`Uploaded ${uploadedCount} new objects.`));
84
+ // 2. Sync Refs (Heads)
85
+ console.log(chalk.blue('Syncing refs...'));
86
+ for (const branchName of Object.keys(config.branches)) {
87
+ const head = config.branches[branchName].head;
88
+ if (head) {
89
+ // Check for conflict (non-fast-forward) would go here
90
+ // For MVP: Overwrite remote ref
91
+ await uploadObject(projectRef, bucket, `refs/heads/${branchName}`, head);
92
+ }
93
+ }
94
+ console.log(chalk.green('Refs updated.'));
95
+ }
96
+ catch (e) {
97
+ console.error(chalk.red(`Push failed: ${e.message}`));
98
+ }
99
+ }
100
+ async function pullRemote(argv) {
101
+ const name = argv.name || argv._[2];
102
+ if (!name) {
103
+ console.error(chalk.red('Remote name required.'));
104
+ return;
105
+ }
106
+ const config = await getConfig();
107
+ if (!config || !config.remotes || !config.remotes[name]) {
108
+ console.error(chalk.red(`Remote '${name}' not found.`));
109
+ return;
110
+ }
111
+ const bucket = config.remotes[name].bucket;
112
+ const projectRef = config.branches[config.currentBranch].projectRef;
113
+ console.log(chalk.blue(`Pulling history from remote '${name}'...`));
114
+ try {
115
+ // 1. Download missing objects
116
+ const remoteObjects = await listObjects(projectRef, bucket, 'objects/');
117
+ const localObjects = new Set(await fs.readdir(OBJECTS_DIR).catch(() => []));
118
+ let downloadedCount = 0;
119
+ for (const obj of remoteObjects) {
120
+ const hash = obj.name;
121
+ if (!localObjects.has(hash)) {
122
+ const content = await downloadObject(projectRef, bucket, `objects/${hash}`);
123
+ await fs.writeFile(path.join(OBJECTS_DIR, hash), content);
124
+ downloadedCount++;
125
+ }
126
+ }
127
+ console.log(chalk.green(`Downloaded ${downloadedCount} new objects.`));
128
+ // 2. Update remote-tracking branches
129
+ const remoteRefs = await listObjects(projectRef, bucket, 'refs/heads/');
130
+ await fs.mkdir(path.join(REFS_DIR, 'remotes', name), { recursive: true });
131
+ for (const ref of remoteRefs) {
132
+ const branchName = ref.name;
133
+ const head = await downloadObject(projectRef, bucket, `refs/heads/${branchName}`);
134
+ await fs.writeFile(path.join(REFS_DIR, 'remotes', name, branchName), head);
135
+ console.log(chalk.cyan(` ${name}/${branchName} -> ${head.substring(0, 7)}`));
136
+ }
137
+ console.log(chalk.green('\nPull complete. Use `gitb merge` to incorporate changes.'));
138
+ }
139
+ catch (e) {
140
+ console.error(chalk.red(`Pull failed: ${e.message}`));
141
+ }
142
+ }
@@ -0,0 +1,233 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import chalk from 'chalk';
4
+ import readline from 'readline';
5
+ import { readCommit, readTree, readObject } from '../storage/git.js';
6
+ import { runQuery } from '../api/supabase.js';
7
+ import toposort from 'toposort';
8
+ const GITBASE_DIR = '.gitbase';
9
+ const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
10
+ export async function revert(argv) {
11
+ // 1. Check Config
12
+ try {
13
+ await fs.access(CONFIG_FILE);
14
+ }
15
+ catch {
16
+ console.error(chalk.red('Not initialized.'));
17
+ return;
18
+ }
19
+ const config = JSON.parse(await fs.readFile(CONFIG_FILE, 'utf-8'));
20
+ const currentBranch = config.currentBranch;
21
+ const projectRef = config.branches[currentBranch].projectRef;
22
+ // RBAC: Production Protection
23
+ if (currentBranch === 'production') {
24
+ const { isProductionAdmin } = await import('../api/supabase.js');
25
+ console.log(chalk.blue('Verifying production permissions...'));
26
+ const isAdmin = await isProductionAdmin(projectRef);
27
+ if (!isAdmin) {
28
+ console.error(chalk.red('\n🔥 ACCESS DENIED: Only Owners or Administrators can revert the production branch.'));
29
+ return;
30
+ }
31
+ console.log(chalk.green('Permission verified.'));
32
+ }
33
+ // 2. Load Commit
34
+ let commitHash = argv.commit;
35
+ if (!commitHash) {
36
+ try {
37
+ commitHash = await fs.readFile(path.join(GITBASE_DIR, 'HEAD'), 'utf-8');
38
+ if (!commitHash)
39
+ throw new Error();
40
+ }
41
+ catch {
42
+ console.error(chalk.red('No commits found or HEAD is missing.'));
43
+ return;
44
+ }
45
+ }
46
+ let commit;
47
+ try {
48
+ commit = await readCommit(commitHash);
49
+ }
50
+ catch {
51
+ console.error(chalk.red(`Commit ${commitHash} not found.`));
52
+ return;
53
+ }
54
+ console.log(chalk.blue(`Reverting to commit ${commitHash} (${commit.message})...`));
55
+ // 3. Fetch current live state to make smart decisions
56
+ console.log(chalk.blue(`Checking current database state...`));
57
+ const { extractSchema } = await import('../schema/extractor.js');
58
+ const liveSchema = await extractSchema(projectRef);
59
+ // 4. Load Tree & Parse
60
+ const tree = await readTree(commit.tree);
61
+ const schema = { tables: {}, functions: {}, views: {}, triggers: {}, policies: {}, types: {} };
62
+ const filterFiles = argv.files || [];
63
+ for (const [relPath, hash] of Object.entries(tree)) {
64
+ if (filterFiles.length > 0 && !filterFiles.includes(relPath))
65
+ continue;
66
+ const parts = relPath.split('/');
67
+ // relPath corresponds to file structure, e.g., "tables/users.sql"
68
+ const type = parts[0];
69
+ const name = path.basename(relPath, '.sql');
70
+ const content = await readObject(hash);
71
+ if (schema[type]) {
72
+ schema[type][name] = content;
73
+ }
74
+ }
75
+ const rl = readline.createInterface({
76
+ input: process.stdin,
77
+ output: process.stdout
78
+ });
79
+ const ask = (q) => new Promise(r => rl.question(q, r));
80
+ // 4. Execution Plan
81
+ const { canonicalize } = await import('../utils/hashing.js');
82
+ let madeDbChanges = false;
83
+ // TYPES
84
+ for (const [name, sql] of Object.entries(schema.types)) {
85
+ if (liveSchema.types[name] && canonicalize(sql) === canonicalize(liveSchema.types[name])) {
86
+ continue;
87
+ }
88
+ madeDbChanges = true;
89
+ console.log(chalk.cyan(`Restoring Type: ${name}`));
90
+ await runQuery(projectRef, `DROP TYPE IF EXISTS public."${name}" CASCADE;`);
91
+ await runQuery(projectRef, sql);
92
+ }
93
+ // TABLES (Topological Sort)
94
+ const tableEdges = [];
95
+ const tables = schema.tables;
96
+ const tableNames = Object.keys(tables);
97
+ for (const [name, sql] of Object.entries(tables)) {
98
+ const content = sql;
99
+ const regex = /REFERENCES\s+(?:public\.)?(\w+)/gi;
100
+ let match;
101
+ while ((match = regex.exec(content)) !== null) {
102
+ const target = match[1];
103
+ if (target !== name && tables[target]) {
104
+ tableEdges.push([target, name]); // target -> name
105
+ }
106
+ }
107
+ }
108
+ let sortedTables = tableNames;
109
+ try {
110
+ sortedTables = toposort.array(tableNames, tableEdges);
111
+ }
112
+ catch (e) {
113
+ console.warn(chalk.yellow('Circular dependency detected in tables. Falling back to alphabetical order.'));
114
+ sortedTables = tableNames.sort();
115
+ }
116
+ for (const name of sortedTables) {
117
+ const sql = tables[name];
118
+ if (!sql)
119
+ continue;
120
+ if (liveSchema.tables[name] && canonicalize(sql) === canonicalize(liveSchema.tables[name])) {
121
+ continue;
122
+ }
123
+ madeDbChanges = true;
124
+ console.log(chalk.yellow(`\nRestoring Table: ${name}`));
125
+ if (liveSchema.tables[name]) {
126
+ const answer = await ask(chalk.white(`? Keep existing '${name}' table as backup? (Y/n) > `));
127
+ if (answer.toLowerCase() !== 'n') {
128
+ const timestamp = Math.floor(Date.now() / 1000);
129
+ const backupName = `${name}_backup_${timestamp}`;
130
+ console.log(chalk.gray(`Backing up to ${backupName}...`));
131
+ try {
132
+ await runQuery(projectRef, `ALTER TABLE public."${name}" RENAME TO "${backupName}";`);
133
+ console.log(chalk.green('Backup successful.'));
134
+ }
135
+ catch (e) {
136
+ console.log(chalk.red(`Backup failed: ${e.message}`));
137
+ }
138
+ }
139
+ else {
140
+ console.log(chalk.red(`Dropping table ${name}...`));
141
+ await runQuery(projectRef, `DROP TABLE IF EXISTS public."${name}" CASCADE;`);
142
+ }
143
+ }
144
+ else {
145
+ console.log(chalk.gray(`Table '${name}' does not exist in live database. Skipping backup.`));
146
+ }
147
+ console.log(chalk.cyan(`Creating table ${name}...`));
148
+ try {
149
+ await runQuery(projectRef, sql);
150
+ }
151
+ catch (e) {
152
+ console.error(chalk.red(`Failed to create table: ${e.message}`));
153
+ }
154
+ }
155
+ // VIEWS
156
+ for (const [name, sql] of Object.entries(schema.views)) {
157
+ if (liveSchema.views[name] && canonicalize(sql) === canonicalize(liveSchema.views[name])) {
158
+ continue;
159
+ }
160
+ madeDbChanges = true;
161
+ console.log(chalk.cyan(`Restoring View: ${name}`));
162
+ await runQuery(projectRef, `DROP VIEW IF EXISTS public."${name}" CASCADE;`);
163
+ await runQuery(projectRef, sql);
164
+ }
165
+ // FUNCTIONS
166
+ for (const [name, sql] of Object.entries(schema.functions)) {
167
+ if (liveSchema.functions[name] && canonicalize(sql) === canonicalize(liveSchema.functions[name])) {
168
+ continue;
169
+ }
170
+ madeDbChanges = true;
171
+ console.log(chalk.cyan(`Restoring Function: ${name}`));
172
+ await runQuery(projectRef, sql);
173
+ }
174
+ // TRIGGERS
175
+ for (const [name, sql] of Object.entries(schema.triggers)) {
176
+ if (liveSchema.triggers[name] && canonicalize(sql) === canonicalize(liveSchema.triggers[name])) {
177
+ continue;
178
+ }
179
+ madeDbChanges = true;
180
+ console.log(chalk.cyan(`Restoring Trigger: ${name}`));
181
+ const match = sql.match(/ON\s+(public\.)?("?\w+"?)/i);
182
+ if (match) {
183
+ const tableName = match[2].replace(/"/g, '');
184
+ await runQuery(projectRef, `DROP TRIGGER IF EXISTS "${name}" ON public."${tableName}";`);
185
+ }
186
+ await runQuery(projectRef, sql);
187
+ }
188
+ // POLICIES
189
+ for (const [key, sql] of Object.entries(schema.policies)) {
190
+ if (liveSchema.policies[key] && canonicalize(sql) === canonicalize(liveSchema.policies[key])) {
191
+ continue;
192
+ }
193
+ madeDbChanges = true;
194
+ const match = sql.match(/CREATE POLICY "([^"]+)"\s+ON\s+(public\.)?("?\w+"?)/i);
195
+ if (match) {
196
+ const policyName = match[1];
197
+ const tableName = match[3].replace(/"/g, '');
198
+ console.log(chalk.cyan(`Restoring Policy: ${policyName} on ${tableName}`));
199
+ // Auto-enable RLS just in case the table creation didn't include it (for backwards compatibility with older commits)
200
+ try {
201
+ await runQuery(projectRef, `ALTER TABLE public."${tableName}" ENABLE ROW LEVEL SECURITY;`);
202
+ }
203
+ catch (e) { }
204
+ await runQuery(projectRef, `DROP POLICY IF EXISTS "${policyName}" ON public."${tableName}";`);
205
+ await runQuery(projectRef, sql);
206
+ }
207
+ }
208
+ // 5. Update local working tree
209
+ // Clear existing supabase directory safely
210
+ if (filterFiles.length === 0) {
211
+ try {
212
+ await fs.rm('supabase', { recursive: true, force: true });
213
+ }
214
+ catch (e) { }
215
+ }
216
+ // Recreate files from tree
217
+ for (const [relPath, hash] of Object.entries(tree)) {
218
+ if (filterFiles.length > 0 && !filterFiles.includes(relPath))
219
+ continue;
220
+ const fullPath = path.join('supabase', relPath);
221
+ await fs.mkdir(path.dirname(fullPath), { recursive: true });
222
+ const content = await readObject(hash);
223
+ await fs.writeFile(fullPath, content, 'utf-8');
224
+ }
225
+ rl.close();
226
+ if (!madeDbChanges) {
227
+ console.log(chalk.green('\nNo database changes were required. The live database already matches this commit.'));
228
+ }
229
+ else {
230
+ console.log(chalk.green('\nDatabase reset complete!'));
231
+ }
232
+ console.log(chalk.gray('Local files have been synchronized.'));
233
+ }
@@ -0,0 +1,96 @@
1
+ import fs from 'fs/promises';
2
+ import path from 'path';
3
+ import chalk from 'chalk';
4
+ import { extractSchema } from '../schema/extractor.js';
5
+ import { canonicalize } from '../utils/hashing.js';
6
+ const GITBASE_DIR = '.gitbase';
7
+ const CONFIG_FILE = path.join(GITBASE_DIR, 'config');
8
+ export async function status() {
9
+ const changes = await getStatus();
10
+ if (!changes)
11
+ return;
12
+ if (changes.length === 0) {
13
+ console.log(chalk.green('No changes detected. Working tree clean.'));
14
+ }
15
+ else {
16
+ console.log(chalk.yellow('Unsynced Changes (Database -> Local):'));
17
+ for (const change of changes) {
18
+ if (change.type === 'new') {
19
+ console.log(chalk.green(` [NEW] ${change.path}`));
20
+ }
21
+ else if (change.type === 'modified') {
22
+ console.log(chalk.yellow(` [MODIFIED] ${change.path}`));
23
+ }
24
+ else if (change.type === 'deleted') {
25
+ console.log(chalk.red(` [DELETED] ${change.path}`));
26
+ }
27
+ }
28
+ }
29
+ }
30
+ export async function getStatus() {
31
+ // Check if initialized
32
+ try {
33
+ await fs.access(CONFIG_FILE);
34
+ }
35
+ catch {
36
+ console.error(chalk.red('Not initialized. Run `gitb init` first.'));
37
+ return null;
38
+ }
39
+ const config = JSON.parse(await fs.readFile(CONFIG_FILE, 'utf-8'));
40
+ const projectRef = config.projectRef;
41
+ // console.log(chalk.blue(`Fetching schema from ${projectRef}...`));
42
+ // Silence this log for `status` or keep it? Maybe ok.
43
+ // But `add` might want to be silent?
44
+ let liveSchema;
45
+ try {
46
+ liveSchema = await extractSchema(projectRef);
47
+ }
48
+ catch (e) {
49
+ console.error(chalk.red(`Failed to fetch schema: ${e.message}`));
50
+ return null;
51
+ }
52
+ const changes = [];
53
+ await checkType(liveSchema.tables, 'tables', changes);
54
+ await checkType(liveSchema.functions, 'functions', changes);
55
+ await checkType(liveSchema.views, 'views', changes);
56
+ await checkType(liveSchema.triggers, 'triggers', changes);
57
+ await checkType(liveSchema.policies, 'policies', changes);
58
+ return changes;
59
+ }
60
+ async function checkType(liveObjects, type, changes) {
61
+ const dir = path.join('supabase', type);
62
+ // Ensure dir exists
63
+ try {
64
+ await fs.mkdir(dir, { recursive: true });
65
+ }
66
+ catch { }
67
+ const localFiles = await fs.readdir(dir).catch(() => []);
68
+ const localMap = {};
69
+ for (const file of localFiles) {
70
+ if (!file.endsWith('.sql'))
71
+ continue;
72
+ const name = file.replace('.sql', '');
73
+ const content = await fs.readFile(path.join(dir, file), 'utf-8');
74
+ localMap[name] = content;
75
+ }
76
+ // Check for New or Modified
77
+ for (const [name, content] of Object.entries(liveObjects)) {
78
+ // Sanitize name for filename
79
+ const safeName = name.replace(/\//g, '_');
80
+ if (!localMap[safeName]) {
81
+ changes.push({ type: 'new', path: `${type}/${safeName}.sql`, content, rawName: name });
82
+ }
83
+ else {
84
+ if (canonicalize(content) !== canonicalize(localMap[safeName])) {
85
+ changes.push({ type: 'modified', path: `${type}/${safeName}.sql`, content, rawName: name });
86
+ }
87
+ }
88
+ }
89
+ // Check for Deleted
90
+ for (const name of Object.keys(localMap)) {
91
+ const liveKeysSafe = Object.keys(liveObjects).map(k => k.replace(/\//g, '_'));
92
+ if (!liveKeysSafe.includes(name)) {
93
+ changes.push({ type: 'deleted', path: `${type}/${name}.sql`, rawName: name });
94
+ }
95
+ }
96
+ }
@@ -0,0 +1,19 @@
1
+ import chalk from 'chalk';
2
+ import { getToken } from '../utils/config.js';
3
+ import { getProjects } from '../api/supabase.js';
4
+ export async function whoami() {
5
+ const token = await getToken();
6
+ if (!token) {
7
+ console.log(chalk.red('Not logged in. Run `gitb login`.'));
8
+ return;
9
+ }
10
+ console.log(chalk.blue(`Token: ${token.substring(0, 4)}...${token.substring(token.length - 4)}`));
11
+ try {
12
+ const projects = await getProjects();
13
+ console.log(chalk.green(`Logged in successfully. You have access to ${projects.length} project(s).`));
14
+ console.log(chalk.gray('Manage your tokens and account at: https://supabase.com/dashboard/account/tokens'));
15
+ }
16
+ catch (e) {
17
+ console.log(chalk.red(`Invalid or expired token: ${e.message}`));
18
+ }
19
+ }
package/dist/index.js ADDED
@@ -0,0 +1,130 @@
1
+ #!/usr/bin/env node
2
+ import yargs from 'yargs';
3
+ import { hideBin } from 'yargs/helpers';
4
+ yargs(hideBin(process.argv))
5
+ .scriptName('gitb')
6
+ .usage('$0 <cmd> [args]')
7
+ .command('login', 'Authenticate with Supabase', {}, async (argv) => {
8
+ const { login } = await import('./commands/login.js');
9
+ await login();
10
+ })
11
+ .command('whoami', 'Show current authentication status', {}, async (argv) => {
12
+ const { whoami } = await import('./commands/whoami.js');
13
+ await whoami();
14
+ })
15
+ .command('init', 'Initialize gitbase in the current directory', (yargs) => {
16
+ return yargs.option('force', {
17
+ alias: 'f',
18
+ type: 'boolean',
19
+ description: 'Force re-initialization'
20
+ });
21
+ }, async (argv) => {
22
+ const { init } = await import('./commands/init.js');
23
+ await init(argv);
24
+ })
25
+ .command('status', 'Show the working tree status', {}, async (argv) => {
26
+ const { status } = await import('./commands/status.js');
27
+ await status();
28
+ })
29
+ .command(['pull [files..]', 'add [files..]'], 'Pull changes from database to local files', (yargs) => {
30
+ return yargs.positional('files', {
31
+ describe: 'Files to pull',
32
+ type: 'string',
33
+ array: true
34
+ });
35
+ }, async (argv) => {
36
+ const { add } = await import('./commands/add.js');
37
+ await add(argv);
38
+ })
39
+ .command('commit', 'Record changes to the repository', (yargs) => {
40
+ return yargs.option('message', {
41
+ alias: 'm',
42
+ type: 'string',
43
+ description: 'Commit message',
44
+ demandOption: true
45
+ });
46
+ }, async (argv) => {
47
+ const { commit } = await import('./commands/commit.js');
48
+ await commit(argv);
49
+ })
50
+ .command(['revert [commit]', 'reset [commit]'], 'Revert to a previous state', (yargs) => {
51
+ return yargs.positional('commit', {
52
+ describe: 'Commit hash to revert to (defaults to last commit)',
53
+ type: 'string'
54
+ }).option('files', {
55
+ describe: 'Specific files to revert',
56
+ type: 'string',
57
+ array: true
58
+ });
59
+ }, async (argv) => {
60
+ const { revert } = await import('./commands/revert.js');
61
+ await revert(argv);
62
+ })
63
+ .command('log', 'Show commit logs', {}, async (argv) => {
64
+ const { log } = await import('./commands/log.js');
65
+ await log();
66
+ })
67
+ .command('diff [commit]', 'Show changes between commits or live database', (yargs) => {
68
+ return yargs.positional('commit', {
69
+ describe: 'Commit hash to compare with Live Database (defaults to HEAD)',
70
+ type: 'string'
71
+ });
72
+ }, async (argv) => {
73
+ const { diff } = await import('./commands/diff.js');
74
+ await diff(argv);
75
+ })
76
+ .command('push [files..]', 'Push local changes to the live database', (yargs) => {
77
+ return yargs.positional('files', {
78
+ describe: 'Specific files to push',
79
+ type: 'string',
80
+ array: true
81
+ });
82
+ }, async (argv) => {
83
+ const { push } = await import('./commands/push.js');
84
+ await push(argv);
85
+ })
86
+ .command('branch [name]', 'List or create branches', (yargs) => {
87
+ return yargs.positional('name', {
88
+ describe: 'Name of the branch to create',
89
+ type: 'string'
90
+ }).option('delete', {
91
+ alias: 'd',
92
+ type: 'boolean',
93
+ description: 'Delete a branch'
94
+ });
95
+ }, async (argv) => {
96
+ const { branch } = await import('./commands/branch.js');
97
+ await branch(argv);
98
+ })
99
+ .command('checkout <name>', 'Switch branches', (yargs) => {
100
+ return yargs.positional('name', {
101
+ describe: 'Name of the branch to switch to',
102
+ type: 'string',
103
+ demandOption: true
104
+ });
105
+ }, async (argv) => {
106
+ const { checkout } = await import('./commands/branch.js');
107
+ await checkout(argv);
108
+ })
109
+ .command('merge <name>', 'Merge another branch into the current one', (yargs) => {
110
+ return yargs.positional('name', {
111
+ describe: 'Name of the branch to merge from',
112
+ type: 'string',
113
+ demandOption: true
114
+ });
115
+ }, async (argv) => {
116
+ const { merge } = await import('./commands/merge.js');
117
+ await merge(argv);
118
+ })
119
+ .command('remote <subcommand> [args]', 'Manage remote history repositories', (yargs) => {
120
+ return yargs.positional('subcommand', {
121
+ describe: 'Digital subcommand (add, list, push, pull)',
122
+ type: 'string',
123
+ choices: ['add', 'list', 'push', 'pull']
124
+ });
125
+ }, async (argv) => {
126
+ const { remote } = await import('./commands/remote.js');
127
+ await remote(argv);
128
+ })
129
+ .help()
130
+ .parse();