@tmlmobilidade/env-sync 20260304.1625.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +150 -0
- package/dist/artifacts/upload.service.d.ts +17 -0
- package/dist/artifacts/upload.service.js +181 -0
- package/dist/cli/commands.d.ts +12 -0
- package/dist/cli/commands.js +111 -0
- package/dist/config/config-loader.d.ts +37 -0
- package/dist/config/config-loader.js +91 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +176 -0
- package/dist/mongodb/dump.service.d.ts +8 -0
- package/dist/mongodb/dump.service.js +43 -0
- package/dist/mongodb/restore.service.d.ts +6 -0
- package/dist/mongodb/restore.service.js +28 -0
- package/dist/mongodb/sync.service.d.ts +7 -0
- package/dist/mongodb/sync.service.js +106 -0
- package/dist/storage/rclone.service.d.ts +2 -0
- package/dist/storage/rclone.service.js +78 -0
- package/dist/storage/sync.service.d.ts +2 -0
- package/dist/storage/sync.service.js +11 -0
- package/dist/utils/exec.d.ts +12 -0
- package/dist/utils/exec.js +114 -0
- package/dist/utils/logger.d.ts +11 -0
- package/dist/utils/logger.js +37 -0
- package/dist/utils/metadata.d.ts +6 -0
- package/dist/utils/metadata.js +56 -0
- package/package.json +46 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { cancel, isCancel, multiselect, outro, spinner } from '@clack/prompts';
|
|
3
|
+
import { ASCII_TMLMOBILIDADE } from '@tmlmobilidade/consts';
|
|
4
|
+
import chalk from 'chalk';
|
|
5
|
+
import { uploadArtifacts } from './artifacts/upload.service.js';
|
|
6
|
+
import { parseArgs, showHelp } from './cli/commands.js';
|
|
7
|
+
import { loadConfig } from './config/config-loader.js';
|
|
8
|
+
import { syncMongoDB } from './mongodb/sync.service.js';
|
|
9
|
+
import { syncStorageService } from './storage/sync.service.js';
|
|
10
|
+
import { logger } from './utils/logger.js';
|
|
11
|
+
/* * */
|
|
12
|
+
export const renderTitle = () => {
|
|
13
|
+
let text = ASCII_TMLMOBILIDADE;
|
|
14
|
+
text = text.replace(/▓/g, chalk.dim(chalk.yellow('▓')));
|
|
15
|
+
text = text.replace(/ ▄▄▄ /g, chalk.yellow(' ▄▄▄ '));
|
|
16
|
+
text = text.replace(/ ▀▀▀ /g, chalk.yellow(' ▀▀▀ '));
|
|
17
|
+
text = text.replace(/▐▒▒▒▌/g, chalk.yellow('▐') + chalk.white('▒▒▒') + chalk.yellow('▌'));
|
|
18
|
+
console.log(text);
|
|
19
|
+
};
|
|
20
|
+
async function determineSyncTargets(options) {
|
|
21
|
+
// --backup-only: only dump the database, skip storage and restore
|
|
22
|
+
if (options.backupOnly) {
|
|
23
|
+
return {
|
|
24
|
+
backupOnly: true,
|
|
25
|
+
syncDb: true,
|
|
26
|
+
syncStorage: false,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
// If only --upload-artifacts is provided, skip sync operations
|
|
30
|
+
if (options.uploadArtifacts && !options.dbOnly && !options.storageOnly) {
|
|
31
|
+
return {
|
|
32
|
+
backupOnly: false,
|
|
33
|
+
syncDb: false,
|
|
34
|
+
syncStorage: false,
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
// If flags are provided, use them (non-interactive mode)
|
|
38
|
+
if (options.dbOnly || options.storageOnly) {
|
|
39
|
+
return {
|
|
40
|
+
backupOnly: false,
|
|
41
|
+
syncDb: Boolean(options.dbOnly),
|
|
42
|
+
syncStorage: Boolean(options.storageOnly),
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
// Interactive mode: ask user what to sync
|
|
46
|
+
const syncOptions = await multiselect({
|
|
47
|
+
message: 'What would you like to do?',
|
|
48
|
+
options: [
|
|
49
|
+
{
|
|
50
|
+
hint: 'Dump MongoDB database only (no restore to staging)',
|
|
51
|
+
label: 'Backup only',
|
|
52
|
+
value: 'backup',
|
|
53
|
+
},
|
|
54
|
+
{
|
|
55
|
+
hint: 'Sync MongoDB database from production to staging',
|
|
56
|
+
label: 'Database',
|
|
57
|
+
value: 'database',
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
hint: 'Sync OCI storage files from production to staging',
|
|
61
|
+
label: 'Storage',
|
|
62
|
+
value: 'storage',
|
|
63
|
+
},
|
|
64
|
+
],
|
|
65
|
+
required: true,
|
|
66
|
+
});
|
|
67
|
+
if (isCancel(syncOptions)) {
|
|
68
|
+
cancel('Operation cancelled');
|
|
69
|
+
process.exit(0);
|
|
70
|
+
}
|
|
71
|
+
const selected = syncOptions;
|
|
72
|
+
const isBackupOnly = selected.includes('backup');
|
|
73
|
+
return {
|
|
74
|
+
backupOnly: isBackupOnly,
|
|
75
|
+
syncDb: isBackupOnly || selected.includes('database'),
|
|
76
|
+
syncStorage: !isBackupOnly && selected.includes('storage'),
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
async function runStorageSync(config) {
|
|
80
|
+
const s = spinner();
|
|
81
|
+
s.start('Syncing storage...');
|
|
82
|
+
try {
|
|
83
|
+
await syncStorageService(config);
|
|
84
|
+
s.stop('Storage sync completed successfully');
|
|
85
|
+
}
|
|
86
|
+
catch (error) {
|
|
87
|
+
s.stop('Storage sync failed');
|
|
88
|
+
logger.error(error instanceof Error ? error.message : String(error));
|
|
89
|
+
process.exit(1);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
async function runDatabaseSync(config, options) {
|
|
93
|
+
const backupOnly = options.backupOnly || false;
|
|
94
|
+
const s = spinner();
|
|
95
|
+
s.start(backupOnly ? 'Backing up database...' : 'Syncing database...');
|
|
96
|
+
try {
|
|
97
|
+
await syncMongoDB(config, {
|
|
98
|
+
backupOnly,
|
|
99
|
+
skipCleanup: options.noCleanup || false,
|
|
100
|
+
useReplicaSet: options.replicaSet,
|
|
101
|
+
});
|
|
102
|
+
s.stop(backupOnly ? 'Database backup completed successfully' : 'Database sync completed successfully');
|
|
103
|
+
}
|
|
104
|
+
catch (error) {
|
|
105
|
+
s.stop(backupOnly ? 'Database backup failed' : 'Database sync failed');
|
|
106
|
+
logger.error(error instanceof Error ? error.message : String(error));
|
|
107
|
+
process.exit(1);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
async function runArtifactUpload(config) {
|
|
111
|
+
const s = spinner();
|
|
112
|
+
s.start('Uploading artifacts to OCI...');
|
|
113
|
+
try {
|
|
114
|
+
if (!config.artifacts.bucket) {
|
|
115
|
+
throw new Error('ARTIFACTS_BUCKET is not configured. Please set it in your .env file.');
|
|
116
|
+
}
|
|
117
|
+
await uploadArtifacts({
|
|
118
|
+
bucket: config.artifacts.bucket,
|
|
119
|
+
localPath: config.backupDir,
|
|
120
|
+
prefix: config.artifacts.prefix,
|
|
121
|
+
storageConfig: config.storage,
|
|
122
|
+
});
|
|
123
|
+
s.stop('Artifacts uploaded successfully');
|
|
124
|
+
}
|
|
125
|
+
catch (error) {
|
|
126
|
+
s.stop('Artifact upload failed');
|
|
127
|
+
logger.error(error instanceof Error ? error.message : String(error));
|
|
128
|
+
process.exit(1);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
async function main() {
|
|
132
|
+
const args = process.argv.slice(2);
|
|
133
|
+
const options = parseArgs(args);
|
|
134
|
+
if (options.help) {
|
|
135
|
+
showHelp();
|
|
136
|
+
process.exit(0);
|
|
137
|
+
}
|
|
138
|
+
try {
|
|
139
|
+
renderTitle();
|
|
140
|
+
// Set verbose mode
|
|
141
|
+
if (options.verbose) {
|
|
142
|
+
logger.setVerbose(true);
|
|
143
|
+
logger.verbose('Verbose mode enabled');
|
|
144
|
+
}
|
|
145
|
+
// Load configuration
|
|
146
|
+
const config = loadConfig();
|
|
147
|
+
logger.verbose('Configuration loaded successfully');
|
|
148
|
+
const { backupOnly, syncDb, syncStorage } = await determineSyncTargets(options);
|
|
149
|
+
if (syncStorage) {
|
|
150
|
+
await runStorageSync(config);
|
|
151
|
+
}
|
|
152
|
+
if (syncDb) {
|
|
153
|
+
await runDatabaseSync(config, { ...options, backupOnly });
|
|
154
|
+
}
|
|
155
|
+
if (options.uploadArtifacts) {
|
|
156
|
+
await runArtifactUpload(config);
|
|
157
|
+
}
|
|
158
|
+
if (syncDb || syncStorage || options.uploadArtifacts) {
|
|
159
|
+
const message = backupOnly ? 'Database backup completed successfully!' : 'Environment sync completed successfully!';
|
|
160
|
+
outro(message);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
catch (error) {
|
|
164
|
+
if (error instanceof Error) {
|
|
165
|
+
logger.error(error.message);
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
logger.error(String(error));
|
|
169
|
+
}
|
|
170
|
+
process.exit(1);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
main().catch((error) => {
|
|
174
|
+
logger.error(`Unexpected error: ${error instanceof Error ? error.message : String(error)}`);
|
|
175
|
+
process.exit(1);
|
|
176
|
+
});
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { MongoConfig } from '../config/config-loader.js';
|
|
2
|
+
export interface DumpOptions {
|
|
3
|
+
config: MongoConfig;
|
|
4
|
+
excludeCollections?: string[];
|
|
5
|
+
outputPath: string;
|
|
6
|
+
useReplicaSet?: boolean;
|
|
7
|
+
}
|
|
8
|
+
export declare function dumpMongoDB(options: DumpOptions): Promise<void>;
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { existsSync, mkdirSync } from 'fs';
|
|
2
|
+
import { execCommand } from '../utils/exec.js';
|
|
3
|
+
import { logger } from '../utils/logger.js';
|
|
4
|
+
export async function dumpMongoDB(options) {
|
|
5
|
+
const { config, excludeCollections = [], outputPath, useReplicaSet = false } = options;
|
|
6
|
+
// Ensure output directory exists
|
|
7
|
+
if (!existsSync(outputPath)) {
|
|
8
|
+
mkdirSync(outputPath, { recursive: true });
|
|
9
|
+
}
|
|
10
|
+
// Build mongodump command for full database dump
|
|
11
|
+
logger.info('Dumping production database...');
|
|
12
|
+
logger.verbose(`Output path: ${outputPath}`);
|
|
13
|
+
logger.verbose(`Host: ${config.host}`);
|
|
14
|
+
logger.verbose(`Database: ${config.database}`);
|
|
15
|
+
logger.verbose(`Auth database: ${config.authDatabase}`);
|
|
16
|
+
if (excludeCollections.length > 0) {
|
|
17
|
+
logger.verbose(`Excluding collections: ${excludeCollections.join(', ')}`);
|
|
18
|
+
}
|
|
19
|
+
let dumpCmd = `mongodump \
|
|
20
|
+
--host=${config.host} \
|
|
21
|
+
--username=${config.username} \
|
|
22
|
+
--password=${config.password} \
|
|
23
|
+
--authenticationDatabase=${config.authDatabase} \
|
|
24
|
+
--db=${config.database} \
|
|
25
|
+
--gzip \
|
|
26
|
+
--archive=${outputPath}\\production.dump`;
|
|
27
|
+
if (useReplicaSet) {
|
|
28
|
+
dumpCmd += ' --readPreference=secondary';
|
|
29
|
+
logger.verbose('Using replica set mode with secondary read preference');
|
|
30
|
+
}
|
|
31
|
+
// Add exclude collections if configured
|
|
32
|
+
for (const collection of excludeCollections) {
|
|
33
|
+
dumpCmd += ` --excludeCollection=${collection}`;
|
|
34
|
+
}
|
|
35
|
+
try {
|
|
36
|
+
await execCommand(dumpCmd);
|
|
37
|
+
logger.success('MongoDB dump completed successfully');
|
|
38
|
+
}
|
|
39
|
+
catch (error) {
|
|
40
|
+
logger.error(`Failed to dump MongoDB: ${error instanceof Error ? error.message : String(error)}`);
|
|
41
|
+
throw error;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { execCommand } from '../utils/exec.js';
|
|
2
|
+
import { logger } from '../utils/logger.js';
|
|
3
|
+
export async function restoreMongoDB(options) {
|
|
4
|
+
const { config, dumpPath } = options;
|
|
5
|
+
logger.verbose(`Restore dump path: ${dumpPath}`);
|
|
6
|
+
logger.verbose(`Host: ${config.host}`);
|
|
7
|
+
logger.verbose(`Database: ${config.database}`);
|
|
8
|
+
logger.verbose(`Auth database: ${config.authDatabase}`);
|
|
9
|
+
// Full restore: drop existing database and restore
|
|
10
|
+
const restoreCmd = `mongorestore \
|
|
11
|
+
--host=${config.host} \
|
|
12
|
+
--username=${config.username} \
|
|
13
|
+
--password=${config.password} \
|
|
14
|
+
--authenticationDatabase=${config.authDatabase} \
|
|
15
|
+
--db=${config.database} \
|
|
16
|
+
--drop \
|
|
17
|
+
--gzip \
|
|
18
|
+
--numParallelCollections=4 \
|
|
19
|
+
--archive=${dumpPath}\\production.dump`;
|
|
20
|
+
try {
|
|
21
|
+
await execCommand(restoreCmd);
|
|
22
|
+
logger.success('MongoDB restore completed successfully');
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
logger.error(`Failed to restore MongoDB: ${error instanceof Error ? error.message : String(error)}`);
|
|
26
|
+
throw error;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { SyncConfig } from '../config/config-loader.js';
|
|
2
|
+
export interface MongoSyncOptions {
|
|
3
|
+
backupOnly?: boolean;
|
|
4
|
+
skipCleanup?: boolean;
|
|
5
|
+
useReplicaSet?: boolean;
|
|
6
|
+
}
|
|
7
|
+
export declare function syncMongoDB(config: SyncConfig, options?: MongoSyncOptions): Promise<void>;
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { existsSync, mkdirSync, readdirSync, rmSync, statSync, unlinkSync } from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { checkCommandAvailable } from '../utils/exec.js';
|
|
4
|
+
import { logger } from '../utils/logger.js';
|
|
5
|
+
import { saveBackupMetadata } from '../utils/metadata.js';
|
|
6
|
+
import { dumpMongoDB } from './dump.service.js';
|
|
7
|
+
import { restoreMongoDB } from './restore.service.js';
|
|
8
|
+
export async function syncMongoDB(config, options = {}) {
|
|
9
|
+
const { backupOnly = false, skipCleanup = false, useReplicaSet } = options;
|
|
10
|
+
logger.info(backupOnly ? 'Starting MongoDB backup...' : 'Starting MongoDB sync...');
|
|
11
|
+
logger.verbose('Checking for MongoDB tools...');
|
|
12
|
+
if (!(await checkCommandAvailable('mongodump'))) {
|
|
13
|
+
throw new Error('mongodump not found. Please install MongoDB tools.');
|
|
14
|
+
}
|
|
15
|
+
logger.verbose('mongodump found');
|
|
16
|
+
if (!backupOnly) {
|
|
17
|
+
if (!(await checkCommandAvailable('mongorestore'))) {
|
|
18
|
+
throw new Error('mongorestore not found. Please install MongoDB tools.');
|
|
19
|
+
}
|
|
20
|
+
logger.verbose('mongorestore found');
|
|
21
|
+
}
|
|
22
|
+
// Determine backup type
|
|
23
|
+
const effectiveReplicaSet = useReplicaSet ?? false;
|
|
24
|
+
const backupType = effectiveReplicaSet ? 'mongodb_rs' : 'mongodb';
|
|
25
|
+
logger.verbose(`Backup type: ${backupType}`);
|
|
26
|
+
logger.verbose(`Replica set mode: ${effectiveReplicaSet ? 'enabled' : 'disabled'}`);
|
|
27
|
+
// Create backup directory
|
|
28
|
+
const backupTimestamp = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').substring(0, 19);
|
|
29
|
+
const dumpPath = path.join(config.backupDir, `${backupType}_${backupTimestamp}`);
|
|
30
|
+
logger.verbose(`Backup directory: ${dumpPath}`);
|
|
31
|
+
if (!existsSync(dumpPath)) {
|
|
32
|
+
mkdirSync(dumpPath, { recursive: true });
|
|
33
|
+
}
|
|
34
|
+
// Dump production database
|
|
35
|
+
logger.info('Dumping production database...');
|
|
36
|
+
try {
|
|
37
|
+
await dumpMongoDB({
|
|
38
|
+
config: config.databaseProduction,
|
|
39
|
+
excludeCollections: config.excludeCollections,
|
|
40
|
+
outputPath: dumpPath,
|
|
41
|
+
useReplicaSet: effectiveReplicaSet,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
catch (error) {
|
|
45
|
+
logger.error(`Failed to dump production database: ${error instanceof Error ? error.message : String(error)}`);
|
|
46
|
+
throw error;
|
|
47
|
+
}
|
|
48
|
+
if (!backupOnly) {
|
|
49
|
+
logger.info('Restoring to staging database...');
|
|
50
|
+
try {
|
|
51
|
+
await restoreMongoDB({
|
|
52
|
+
config: config.databaseStaging,
|
|
53
|
+
dumpPath,
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
catch (error) {
|
|
57
|
+
logger.error(`Failed to restore to staging database: ${error instanceof Error ? error.message : String(error)}`);
|
|
58
|
+
throw error;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
logger.verbose('Saving backup metadata...');
|
|
62
|
+
saveBackupMetadata(config.backupDir, backupType, backupTimestamp);
|
|
63
|
+
logger.verbose(`Backup metadata saved: ${backupType}_last_backup=${backupTimestamp}`);
|
|
64
|
+
logger.success(backupOnly ? 'MongoDB backup completed successfully' : 'MongoDB sync completed successfully');
|
|
65
|
+
// Cleanup old backups
|
|
66
|
+
if (!skipCleanup) {
|
|
67
|
+
logger.verbose(`Starting cleanup (retention: ${config.backupRetentionDays} days)...`);
|
|
68
|
+
cleanupOldBackups(config.backupDir, config.backupRetentionDays);
|
|
69
|
+
}
|
|
70
|
+
else {
|
|
71
|
+
logger.verbose('Cleanup skipped (--no-cleanup flag)');
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function cleanupOldBackups(backupDir, retentionDays) {
|
|
75
|
+
logger.info(`Cleaning up old backups (keeping last ${retentionDays} days)...`);
|
|
76
|
+
if (!existsSync(backupDir)) {
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
const now = Date.now();
|
|
80
|
+
const maxAge = retentionDays * 24 * 60 * 60 * 1000; // Convert days to milliseconds
|
|
81
|
+
try {
|
|
82
|
+
const entries = readdirSync(backupDir, { withFileTypes: true });
|
|
83
|
+
for (const entry of entries) {
|
|
84
|
+
if (entry.name === METADATA_FILE)
|
|
85
|
+
continue; // Don't delete metadata file
|
|
86
|
+
const entryPath = path.join(backupDir, entry.name);
|
|
87
|
+
const stats = statSync(entryPath);
|
|
88
|
+
const age = now - stats.mtimeMs;
|
|
89
|
+
if (age > maxAge) {
|
|
90
|
+
if (entry.isDirectory()) {
|
|
91
|
+
rmSync(entryPath, { force: true, recursive: true });
|
|
92
|
+
logger.info(`Deleted old backup directory: ${entry.name}`);
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
unlinkSync(entryPath);
|
|
96
|
+
logger.info(`Deleted old backup file: ${entry.name}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
logger.success('Cleanup completed');
|
|
101
|
+
}
|
|
102
|
+
catch (error) {
|
|
103
|
+
logger.warn(`Cleanup failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
const METADATA_FILE = '.backup_metadata';
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { rmSync, writeFileSync } from 'fs';
|
|
2
|
+
import os from 'os';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import { checkCommandAvailable, execCommandStream } from '../utils/exec.js';
|
|
5
|
+
import { logger } from '../utils/logger.js';
|
|
6
|
+
/**
|
|
7
|
+
* Build a temporary OCI CLI config file using the credentials from StorageConfig.
|
|
8
|
+
* This lets rclone use the "user_principal_auth" provider without needing ~/.oci/config.
|
|
9
|
+
*
|
|
10
|
+
* See: https://rclone.org/oracleobjectstorage/#authentication-providers
|
|
11
|
+
*/
|
|
12
|
+
function buildOciConfig(config, profileName) {
|
|
13
|
+
return [
|
|
14
|
+
`[${profileName}]`,
|
|
15
|
+
`user=${config.user}`,
|
|
16
|
+
`fingerprint=${config.fingerprint}`,
|
|
17
|
+
`key_file=${config.keyFile}`,
|
|
18
|
+
`tenancy=${config.tenancy}`,
|
|
19
|
+
`region=${config.region}`,
|
|
20
|
+
'',
|
|
21
|
+
].join('\n');
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Build the rclone backend config pointing at the temporary OCI config file.
|
|
25
|
+
* Uses the "user_principal_auth" provider so credentials come from that file.
|
|
26
|
+
*/
|
|
27
|
+
function buildRcloneConfig(config, ociConfigPath, profileName) {
|
|
28
|
+
return [
|
|
29
|
+
`[${config.remoteName}]`,
|
|
30
|
+
`type = ${config.type}`,
|
|
31
|
+
`namespace = ${config.namespace}`,
|
|
32
|
+
`compartment = ${config.compartment}`,
|
|
33
|
+
`region = ${config.region}`,
|
|
34
|
+
'provider = user_principal_auth',
|
|
35
|
+
`config_file = ${ociConfigPath}`,
|
|
36
|
+
`config_profile = ${profileName}`,
|
|
37
|
+
'',
|
|
38
|
+
].join('\n');
|
|
39
|
+
}
|
|
40
|
+
export async function syncStorage(config) {
|
|
41
|
+
logger.info('Starting OCI file sync...');
|
|
42
|
+
// Check if rclone is available
|
|
43
|
+
if (!(await checkCommandAvailable('rclone'))) {
|
|
44
|
+
throw new Error('rclone not found. Please install rclone.');
|
|
45
|
+
}
|
|
46
|
+
// Prepare temporary OCI config (no need for ~/.oci/config)
|
|
47
|
+
logger.info('Building temporary OCI config...');
|
|
48
|
+
const profileName = 'Default';
|
|
49
|
+
const ociConfigFile = path.join(os.tmpdir(), `oci-env-sync-${Date.now()}.conf`);
|
|
50
|
+
const ociConfigContent = buildOciConfig(config, profileName);
|
|
51
|
+
writeFileSync(ociConfigFile, ociConfigContent);
|
|
52
|
+
// Build rclone config pointing at the temporary OCI config
|
|
53
|
+
logger.info('Building rclone config...');
|
|
54
|
+
const rcloneConfig = buildRcloneConfig(config, ociConfigFile, profileName);
|
|
55
|
+
const rcloneConfigFile = path.join(os.tmpdir(), `rclone-env-sync-${Date.now()}.conf`);
|
|
56
|
+
writeFileSync(rcloneConfigFile, rcloneConfig);
|
|
57
|
+
// Sync production to staging
|
|
58
|
+
logger.info('Syncing files from production to staging...');
|
|
59
|
+
// Use the configured remote name so that rclone talks to OCI Object Storage
|
|
60
|
+
// instead of treating source/dest as local filesystem paths.
|
|
61
|
+
const sourcePath = `${config.remoteName}:${config.source}`;
|
|
62
|
+
const destPath = `${config.remoteName}:${config.dest}`;
|
|
63
|
+
const baseCmd = `rclone sync "${sourcePath}" "${destPath}" --config ${rcloneConfigFile}`;
|
|
64
|
+
const verboseFlags = logger.isVerbose() ? ' --progress --verbose' : '';
|
|
65
|
+
const syncCmd = `${baseCmd}${verboseFlags}`;
|
|
66
|
+
try {
|
|
67
|
+
// Use streaming execution to show rclone progress in real-time
|
|
68
|
+
await execCommandStream(syncCmd);
|
|
69
|
+
logger.success('OCI file sync completed successfully');
|
|
70
|
+
// Remove the temporary OCI config file
|
|
71
|
+
rmSync(ociConfigFile);
|
|
72
|
+
rmSync(rcloneConfigFile);
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
logger.error(`OCI file sync failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { logger } from '../utils/logger.js';
|
|
2
|
+
import { syncStorage } from './rclone.service.js';
|
|
3
|
+
export async function syncStorageService(config) {
|
|
4
|
+
try {
|
|
5
|
+
await syncStorage(config.storage);
|
|
6
|
+
}
|
|
7
|
+
catch (error) {
|
|
8
|
+
logger.error(`Storage sync failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
9
|
+
throw error;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export interface ExecOptions {
|
|
2
|
+
cwd?: string;
|
|
3
|
+
env?: NodeJS.ProcessEnv;
|
|
4
|
+
timeout?: number;
|
|
5
|
+
}
|
|
6
|
+
export interface ExecResult {
|
|
7
|
+
stderr: string;
|
|
8
|
+
stdout: string;
|
|
9
|
+
}
|
|
10
|
+
export declare function execCommand(command: string, options?: ExecOptions): Promise<ExecResult>;
|
|
11
|
+
export declare function execCommandStream(command: string, options?: ExecOptions): Promise<void>;
|
|
12
|
+
export declare function checkCommandAvailable(command: string): Promise<boolean>;
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { exec as execCallback, spawn } from 'child_process';
|
|
2
|
+
import { promisify } from 'util';
|
|
3
|
+
import { logger } from './logger.js';
|
|
4
|
+
const exec = promisify(execCallback);
|
|
5
|
+
export async function execCommand(command, options = {}) {
|
|
6
|
+
// Mask sensitive information in verbose output
|
|
7
|
+
const maskedCommand = command.replace(/--password=([^\s]+)/g, '--password=***');
|
|
8
|
+
logger.verbose(`Executing: ${maskedCommand}`);
|
|
9
|
+
try {
|
|
10
|
+
const result = await exec(command, {
|
|
11
|
+
cwd: options.cwd,
|
|
12
|
+
env: { ...process.env, ...options.env },
|
|
13
|
+
timeout: options.timeout || 0,
|
|
14
|
+
});
|
|
15
|
+
if (logger.isVerbose()) {
|
|
16
|
+
if (result.stdout) {
|
|
17
|
+
logger.verbose(`Stdout: ${result.stdout.toString().trim()}`);
|
|
18
|
+
}
|
|
19
|
+
if (result.stderr) {
|
|
20
|
+
logger.verbose(`Stderr: ${result.stderr.toString().trim()}`);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
return {
|
|
24
|
+
stderr: result.stderr.toString(),
|
|
25
|
+
stdout: result.stdout.toString(),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
catch (error) {
|
|
29
|
+
const execError = error;
|
|
30
|
+
throw new Error(`Command failed: ${command}\nExit code: ${execError.code || 'unknown'}\nStderr: ${execError.stderr || 'none'}\nStdout: ${execError.stdout || 'none'}`);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
export async function execCommandStream(command, options = {}) {
|
|
34
|
+
return new Promise((resolve, reject) => {
|
|
35
|
+
// Mask sensitive information in verbose output
|
|
36
|
+
const maskedCommand = command.replace(/--password=([^\s]+)/g, '--password=***');
|
|
37
|
+
logger.verbose(`Executing: ${maskedCommand}`);
|
|
38
|
+
// Parse command properly handling quoted strings
|
|
39
|
+
// This avoids the deprecation warning from using shell: true with arguments
|
|
40
|
+
const parts = [];
|
|
41
|
+
let current = '';
|
|
42
|
+
let inQuotes = false;
|
|
43
|
+
let quoteChar = '';
|
|
44
|
+
let escaped = false;
|
|
45
|
+
for (const char of command) {
|
|
46
|
+
if (escaped) {
|
|
47
|
+
current += char;
|
|
48
|
+
escaped = false;
|
|
49
|
+
continue;
|
|
50
|
+
}
|
|
51
|
+
if (char === '\\') {
|
|
52
|
+
escaped = true;
|
|
53
|
+
continue;
|
|
54
|
+
}
|
|
55
|
+
if ((char === '"' || char === '\'') && !escaped) {
|
|
56
|
+
if (!inQuotes) {
|
|
57
|
+
inQuotes = true;
|
|
58
|
+
quoteChar = char;
|
|
59
|
+
}
|
|
60
|
+
else if (char === quoteChar) {
|
|
61
|
+
inQuotes = false;
|
|
62
|
+
quoteChar = '';
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
current += char;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
else if (char === ' ' && !inQuotes) {
|
|
69
|
+
if (current) {
|
|
70
|
+
parts.push(current);
|
|
71
|
+
current = '';
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
current += char;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
if (current) {
|
|
79
|
+
parts.push(current);
|
|
80
|
+
}
|
|
81
|
+
if (parts.length === 0) {
|
|
82
|
+
reject(new Error(`Invalid command: ${command}`));
|
|
83
|
+
return;
|
|
84
|
+
}
|
|
85
|
+
const cmd = parts[0];
|
|
86
|
+
const args = parts.slice(1);
|
|
87
|
+
const childProcess = spawn(cmd, args, {
|
|
88
|
+
cwd: options.cwd,
|
|
89
|
+
env: { ...process.env, ...options.env },
|
|
90
|
+
shell: false,
|
|
91
|
+
stdio: 'inherit', // This allows rclone's progress output to be displayed in real-time
|
|
92
|
+
});
|
|
93
|
+
childProcess.on('close', (code) => {
|
|
94
|
+
if (code === 0) {
|
|
95
|
+
resolve();
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
reject(new Error(`Command failed with exit code ${code}: ${command}`));
|
|
99
|
+
}
|
|
100
|
+
});
|
|
101
|
+
childProcess.on('error', (error) => {
|
|
102
|
+
reject(new Error(`Command failed: ${command}\nError: ${error.message}`));
|
|
103
|
+
});
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
export async function checkCommandAvailable(command) {
|
|
107
|
+
try {
|
|
108
|
+
await execCommand(`which ${command}`);
|
|
109
|
+
return true;
|
|
110
|
+
}
|
|
111
|
+
catch {
|
|
112
|
+
return false;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare const logger: {
|
|
2
|
+
clearPreviousLine(): void;
|
|
3
|
+
error(...args: unknown[]): void;
|
|
4
|
+
info(...args: unknown[]): void;
|
|
5
|
+
isVerbose(): boolean;
|
|
6
|
+
log(...args: unknown[]): void;
|
|
7
|
+
setVerbose(enabled: boolean): void;
|
|
8
|
+
success(...args: unknown[]): void;
|
|
9
|
+
verbose(...args: unknown[]): void;
|
|
10
|
+
warn(...args: unknown[]): void;
|
|
11
|
+
};
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import chalk from 'chalk';
|
|
2
|
+
let verboseMode = false;
|
|
3
|
+
function getTimestamp() {
|
|
4
|
+
return new Date().toISOString().replace('T', ' ').substring(0, 19);
|
|
5
|
+
}
|
|
6
|
+
export const logger = {
|
|
7
|
+
clearPreviousLine() {
|
|
8
|
+
process.stdout.clearLine(0);
|
|
9
|
+
process.stdout.cursorTo(0);
|
|
10
|
+
},
|
|
11
|
+
error(...args) {
|
|
12
|
+
console.error(chalk.red(`[ERROR]`), ...args);
|
|
13
|
+
},
|
|
14
|
+
info(...args) {
|
|
15
|
+
console.log(chalk.green(`[${getTimestamp()}]`), ...args);
|
|
16
|
+
},
|
|
17
|
+
isVerbose() {
|
|
18
|
+
return verboseMode;
|
|
19
|
+
},
|
|
20
|
+
log(...args) {
|
|
21
|
+
console.log(...args);
|
|
22
|
+
},
|
|
23
|
+
setVerbose(enabled) {
|
|
24
|
+
verboseMode = enabled;
|
|
25
|
+
},
|
|
26
|
+
success(...args) {
|
|
27
|
+
console.log(chalk.green(`[${getTimestamp()}]`), ...args);
|
|
28
|
+
},
|
|
29
|
+
verbose(...args) {
|
|
30
|
+
if (verboseMode) {
|
|
31
|
+
console.log(chalk.dim(`[VERBOSE]`), ...args);
|
|
32
|
+
}
|
|
33
|
+
},
|
|
34
|
+
warn(...args) {
|
|
35
|
+
console.log(chalk.yellow(`[WARNING]`), ...args);
|
|
36
|
+
},
|
|
37
|
+
};
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export interface BackupMetadata {
|
|
2
|
+
timestamp: string;
|
|
3
|
+
}
|
|
4
|
+
export declare function getMetadataFilePath(backupDir: string): string;
|
|
5
|
+
export declare function saveBackupMetadata(backupDir: string, backupType: string, timestamp: string): void;
|
|
6
|
+
export declare function loadBackupMetadata(backupDir: string, backupType: string): BackupMetadata | null;
|