@tamyla/clodo-framework 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +564 -0
- package/LICENSE +21 -0
- package/README.md +1393 -0
- package/bin/README.md +71 -0
- package/bin/clodo-service.js +416 -0
- package/bin/security/security-cli.js +96 -0
- package/bin/service-management/README.md +74 -0
- package/bin/service-management/create-service.js +129 -0
- package/bin/service-management/init-service.js +102 -0
- package/bin/service-management/init-service.js.backup +889 -0
- package/bin/shared/config/customer-cli.js +293 -0
- package/dist/config/ConfigurationManager.js +159 -0
- package/dist/config/CustomerConfigCLI.js +220 -0
- package/dist/config/FeatureManager.js +426 -0
- package/dist/config/customers.js +441 -0
- package/dist/config/domains.js +180 -0
- package/dist/config/features.js +225 -0
- package/dist/config/index.js +6 -0
- package/dist/database/database-orchestrator.js +730 -0
- package/dist/database/index.js +4 -0
- package/dist/deployment/auditor.js +971 -0
- package/dist/deployment/index.js +10 -0
- package/dist/deployment/rollback-manager.js +523 -0
- package/dist/deployment/testers/api-tester.js +80 -0
- package/dist/deployment/testers/auth-tester.js +129 -0
- package/dist/deployment/testers/core.js +217 -0
- package/dist/deployment/testers/database-tester.js +105 -0
- package/dist/deployment/testers/index.js +74 -0
- package/dist/deployment/testers/load-tester.js +120 -0
- package/dist/deployment/testers/performance-tester.js +105 -0
- package/dist/deployment/validator.js +558 -0
- package/dist/deployment/wrangler-deployer.js +574 -0
- package/dist/handlers/GenericRouteHandler.js +532 -0
- package/dist/index.js +39 -0
- package/dist/migration/MigrationAdapters.js +562 -0
- package/dist/modules/ModuleManager.js +668 -0
- package/dist/modules/security.js +98 -0
- package/dist/orchestration/cross-domain-coordinator.js +1083 -0
- package/dist/orchestration/index.js +5 -0
- package/dist/orchestration/modules/DeploymentCoordinator.js +258 -0
- package/dist/orchestration/modules/DomainResolver.js +196 -0
- package/dist/orchestration/modules/StateManager.js +332 -0
- package/dist/orchestration/multi-domain-orchestrator.js +255 -0
- package/dist/routing/EnhancedRouter.js +158 -0
- package/dist/schema/SchemaManager.js +778 -0
- package/dist/security/ConfigurationValidator.js +490 -0
- package/dist/security/DeploymentManager.js +208 -0
- package/dist/security/SecretGenerator.js +142 -0
- package/dist/security/SecurityCLI.js +228 -0
- package/dist/security/index.js +51 -0
- package/dist/security/patterns/environment-rules.js +66 -0
- package/dist/security/patterns/insecure-patterns.js +21 -0
- package/dist/service-management/ConfirmationEngine.js +411 -0
- package/dist/service-management/ErrorTracker.js +294 -0
- package/dist/service-management/GenerationEngine.js +3109 -0
- package/dist/service-management/InputCollector.js +237 -0
- package/dist/service-management/ServiceCreator.js +229 -0
- package/dist/service-management/ServiceInitializer.js +448 -0
- package/dist/service-management/ServiceOrchestrator.js +638 -0
- package/dist/service-management/handlers/ConfigMutator.js +130 -0
- package/dist/service-management/handlers/ConfirmationHandler.js +71 -0
- package/dist/service-management/handlers/GenerationHandler.js +80 -0
- package/dist/service-management/handlers/InputHandler.js +59 -0
- package/dist/service-management/handlers/ValidationHandler.js +203 -0
- package/dist/service-management/index.js +7 -0
- package/dist/services/GenericDataService.js +488 -0
- package/dist/shared/cloudflare/domain-discovery.js +562 -0
- package/dist/shared/cloudflare/domain-manager.js +912 -0
- package/dist/shared/cloudflare/index.js +8 -0
- package/dist/shared/cloudflare/ops.js +387 -0
- package/dist/shared/config/cache.js +1167 -0
- package/dist/shared/config/command-config-manager.js +174 -0
- package/dist/shared/config/customer-cli.js +258 -0
- package/dist/shared/config/index.js +9 -0
- package/dist/shared/config/manager.js +289 -0
- package/dist/shared/database/connection-manager.js +338 -0
- package/dist/shared/database/index.js +7 -0
- package/dist/shared/database/orchestrator.js +632 -0
- package/dist/shared/deployment/auditor.js +971 -0
- package/dist/shared/deployment/index.js +10 -0
- package/dist/shared/deployment/rollback-manager.js +523 -0
- package/dist/shared/deployment/validator.js +558 -0
- package/dist/shared/index.js +32 -0
- package/dist/shared/monitoring/health-checker.js +250 -0
- package/dist/shared/monitoring/index.js +8 -0
- package/dist/shared/monitoring/memory-manager.js +382 -0
- package/dist/shared/monitoring/production-monitor.js +390 -0
- package/dist/shared/production-tester/api-tester.js +80 -0
- package/dist/shared/production-tester/auth-tester.js +129 -0
- package/dist/shared/production-tester/core.js +217 -0
- package/dist/shared/production-tester/database-tester.js +105 -0
- package/dist/shared/production-tester/index.js +74 -0
- package/dist/shared/production-tester/load-tester.js +120 -0
- package/dist/shared/production-tester/performance-tester.js +105 -0
- package/dist/shared/security/api-token-manager.js +296 -0
- package/dist/shared/security/index.js +8 -0
- package/dist/shared/security/secret-generator.js +918 -0
- package/dist/shared/security/secure-token-manager.js +379 -0
- package/dist/shared/utils/error-recovery.js +240 -0
- package/dist/shared/utils/graceful-shutdown-manager.js +380 -0
- package/dist/shared/utils/index.js +9 -0
- package/dist/shared/utils/interactive-prompts.js +134 -0
- package/dist/shared/utils/rate-limiter.js +249 -0
- package/dist/utils/ErrorHandler.js +173 -0
- package/dist/utils/deployment/config-cache.js +1160 -0
- package/dist/utils/deployment/index.js +6 -0
- package/dist/utils/deployment/interactive-prompts.js +97 -0
- package/dist/utils/deployment/secret-generator.js +896 -0
- package/dist/utils/dirname-helper.js +35 -0
- package/dist/utils/domain-config.js +159 -0
- package/dist/utils/error-recovery.js +240 -0
- package/dist/utils/esm-helper.js +52 -0
- package/dist/utils/framework-config.js +481 -0
- package/dist/utils/graceful-shutdown-manager.js +379 -0
- package/dist/utils/health-checker.js +114 -0
- package/dist/utils/index.js +36 -0
- package/dist/utils/prompt-handler.js +98 -0
- package/dist/utils/usage-tracker.js +252 -0
- package/dist/utils/validation.js +112 -0
- package/dist/version/VersionDetector.js +723 -0
- package/dist/worker/index.js +4 -0
- package/dist/worker/integration.js +332 -0
- package/docs/FRAMEWORK-ARCHITECTURE-OVERVIEW.md +206 -0
- package/docs/INTEGRATION_GUIDE.md +2045 -0
- package/docs/README.md +82 -0
- package/docs/SECURITY.md +242 -0
- package/docs/deployment/deployment-guide.md +540 -0
- package/docs/overview.md +280 -0
- package/package.json +176 -0
- package/types/index.d.ts +575 -0
|
@@ -0,0 +1,730 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Database Orchestrator Module
|
|
5
|
+
* Enterprise-grade database management across multiple environments
|
|
6
|
+
*
|
|
7
|
+
* Extracted from manage-migrations.ps1 and manage-data-cleanup.ps1 with enhancements
|
|
8
|
+
*/
|
|
9
|
+
import { exec } from 'child_process';
|
|
10
|
+
import { writeFile, access, mkdir, stat, appendFile } from 'fs/promises';
|
|
11
|
+
import { existsSync } from 'fs';
|
|
12
|
+
import { join, dirname } from 'path';
|
|
13
|
+
import { fileURLToPath } from 'url';
|
|
14
|
+
import { promisify } from 'util';
|
|
15
|
+
const execAsync = promisify(exec);
|
|
16
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
17
|
+
const __dirname = dirname(__filename);
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Advanced Database Orchestrator
|
|
21
|
+
* Manages database operations across development, staging, and production environments
|
|
22
|
+
*/
|
|
23
|
+
export class DatabaseOrchestrator {
|
|
24
|
+
constructor(options = {}) {
|
|
25
|
+
// Enhanced project root detection
|
|
26
|
+
this.projectRoot = this.detectProjectRoot(options.projectRoot);
|
|
27
|
+
this.dryRun = options.dryRun || false;
|
|
28
|
+
this.options = options;
|
|
29
|
+
this.config = null;
|
|
30
|
+
|
|
31
|
+
// Environment configurations
|
|
32
|
+
this.environments = {
|
|
33
|
+
development: {
|
|
34
|
+
name: 'development',
|
|
35
|
+
isRemote: false,
|
|
36
|
+
description: 'Local development database',
|
|
37
|
+
defaultDatabase: 'local-db'
|
|
38
|
+
},
|
|
39
|
+
staging: {
|
|
40
|
+
name: 'staging',
|
|
41
|
+
isRemote: true,
|
|
42
|
+
description: 'Staging environment database',
|
|
43
|
+
requiresConfirmation: true
|
|
44
|
+
},
|
|
45
|
+
production: {
|
|
46
|
+
name: 'production',
|
|
47
|
+
isRemote: true,
|
|
48
|
+
description: 'PRODUCTION environment database - USE WITH EXTREME CAUTION',
|
|
49
|
+
requiresConfirmation: true,
|
|
50
|
+
requiresBackup: true
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
// Backup and audit configuration - only set paths if not running as dependency
|
|
55
|
+
if (this.projectRoot) {
|
|
56
|
+
// Use configurable paths from options or defaults
|
|
57
|
+
const baseLogsDir = options.logsDir || 'logs';
|
|
58
|
+
const baseBackupDir = options.backupDir || 'backups';
|
|
59
|
+
this.backupPaths = {
|
|
60
|
+
root: join(this.projectRoot, baseBackupDir, 'database'),
|
|
61
|
+
migrations: join(this.projectRoot, baseBackupDir, 'migrations'),
|
|
62
|
+
audit: join(this.projectRoot, baseLogsDir, 'database-audit.log')
|
|
63
|
+
};
|
|
64
|
+
this.migrationPaths = {
|
|
65
|
+
root: join(this.projectRoot, 'migrations'),
|
|
66
|
+
templates: join(this.projectRoot, 'migration-templates')
|
|
67
|
+
};
|
|
68
|
+
} else {
|
|
69
|
+
// When used as dependency, disable file-based logging
|
|
70
|
+
this.backupPaths = null;
|
|
71
|
+
this.migrationPaths = null;
|
|
72
|
+
console.log('đŚ Running as dependency - file logging disabled');
|
|
73
|
+
}
|
|
74
|
+
this.initializeOrchestrator();
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Initialize database orchestrator
|
|
79
|
+
*/
|
|
80
|
+
initializeOrchestrator() {
|
|
81
|
+
console.log('đď¸ Database Orchestrator v1.0');
|
|
82
|
+
console.log('==============================');
|
|
83
|
+
if (this.projectRoot) {
|
|
84
|
+
console.log(`đ Project Root: ${this.projectRoot}`);
|
|
85
|
+
} else {
|
|
86
|
+
console.log('đŚ Running as dependency - limited functionality');
|
|
87
|
+
}
|
|
88
|
+
console.log(`đ Mode: ${this.dryRun ? 'DRY RUN' : 'LIVE OPERATIONS'}`);
|
|
89
|
+
console.log(`đ Retry Attempts: ${this.config ? this.config.retryAttempts : 3}`);
|
|
90
|
+
console.log('');
|
|
91
|
+
|
|
92
|
+
// Create necessary directories (only if not running as dependency)
|
|
93
|
+
if (this.backupPaths && this.migrationPaths) {
|
|
94
|
+
Object.values(this.backupPaths).forEach(path => {
|
|
95
|
+
if (!path.endsWith('.log')) {
|
|
96
|
+
this.ensureDirectory(path);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
this.ensureDirectory(this.migrationPaths.root);
|
|
100
|
+
this.ensureDirectory(dirname(this.backupPaths.audit));
|
|
101
|
+
}
|
|
102
|
+
this.logAuditEvent('ORCHESTRATOR_INITIALIZED', 'SYSTEM', {
|
|
103
|
+
mode: this.dryRun ? 'DRY_RUN' : 'LIVE',
|
|
104
|
+
environments: Object.keys(this.environments)
|
|
105
|
+
}).catch(err => console.warn('â ď¸ Audit logging failed:', err.message));
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Initialize with framework configuration
|
|
110
|
+
*/
|
|
111
|
+
async initialize() {
|
|
112
|
+
try {
|
|
113
|
+
// Import framework config for consistent timing and database settings
|
|
114
|
+
const {
|
|
115
|
+
frameworkConfig
|
|
116
|
+
} = await import('../utils/framework-config.js');
|
|
117
|
+
const timing = frameworkConfig.getTiming();
|
|
118
|
+
const database = frameworkConfig.getDatabaseConfig();
|
|
119
|
+
const configPaths = frameworkConfig.getPaths();
|
|
120
|
+
this.config = {
|
|
121
|
+
retryAttempts: this.options.retryAttempts || timing.retryAttempts,
|
|
122
|
+
retryDelay: this.options.retryDelay || timing.retryDelay,
|
|
123
|
+
executionTimeout: this.options.executionTimeout || database.executionTimeout,
|
|
124
|
+
...this.options
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
// Update paths if framework config is available and we have project root
|
|
128
|
+
if (this.projectRoot) {
|
|
129
|
+
this.backupPaths = {
|
|
130
|
+
root: join(this.projectRoot, configPaths.backups, 'database'),
|
|
131
|
+
migrations: join(this.projectRoot, configPaths.backups, 'migrations'),
|
|
132
|
+
audit: join(this.projectRoot, configPaths.logs, 'database-audit.log')
|
|
133
|
+
};
|
|
134
|
+
console.log(`đ Database orchestrator paths updated with framework config`);
|
|
135
|
+
console.log(` Backups: ${this.backupPaths.root}`);
|
|
136
|
+
console.log(` Audit: ${this.backupPaths.audit}`);
|
|
137
|
+
}
|
|
138
|
+
} catch (error) {
|
|
139
|
+
console.warn(`â ď¸ Could not load framework config: ${error.message}. Using existing paths.`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Ensure directories exist for logging and backups
|
|
143
|
+
if (this.projectRoot && this.backupPaths) {
|
|
144
|
+
await this.ensureDirectoryExists(this.backupPaths.root);
|
|
145
|
+
await this.ensureDirectoryExists(this.backupPaths.migrations);
|
|
146
|
+
await this.ensureDirectoryExists(dirname(this.backupPaths.audit));
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Detect project root with enhanced logic
|
|
152
|
+
*/
|
|
153
|
+
detectProjectRoot(providedRoot) {
|
|
154
|
+
if (providedRoot) {
|
|
155
|
+
console.log(`đ Using provided project root: ${providedRoot}`);
|
|
156
|
+
return providedRoot;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Check if running as dependency (in node_modules)
|
|
160
|
+
const isDependency = __dirname.includes('node_modules');
|
|
161
|
+
if (isDependency) {
|
|
162
|
+
console.log('đŚ Running as dependency - limited functionality mode');
|
|
163
|
+
return null;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// Try multiple strategies to detect project root
|
|
167
|
+
let candidates = [
|
|
168
|
+
// Standard clodo-framework structure
|
|
169
|
+
join(__dirname, '..', '..'),
|
|
170
|
+
// Alternative if running from dist/
|
|
171
|
+
join(__dirname, '..', '..', '..'),
|
|
172
|
+
// Current working directory
|
|
173
|
+
process.cwd(),
|
|
174
|
+
// Environment variable override
|
|
175
|
+
process.env.FRAMEWORK_PROJECT_ROOT];
|
|
176
|
+
|
|
177
|
+
// Filter out null/undefined candidates
|
|
178
|
+
candidates = candidates.filter(Boolean);
|
|
179
|
+
for (const candidate of candidates) {
|
|
180
|
+
if (this.isValidProjectRoot(candidate)) {
|
|
181
|
+
console.log(`đ Detected project root: ${candidate}`);
|
|
182
|
+
return candidate;
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
console.warn('â ď¸ Could not detect project root. Some features may be limited.');
|
|
186
|
+
return null;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Check if a directory appears to be a valid project root
|
|
191
|
+
*/
|
|
192
|
+
isValidProjectRoot(path) {
|
|
193
|
+
try {
|
|
194
|
+
// Check for common project indicators
|
|
195
|
+
const indicators = ['package.json', 'validation-config.json', 'src', 'bin'];
|
|
196
|
+
const hasIndicators = indicators.some(indicator => existsSync(join(path, indicator)));
|
|
197
|
+
return hasIndicators;
|
|
198
|
+
} catch (error) {
|
|
199
|
+
return false;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Ensure a directory exists, creating it if necessary
|
|
205
|
+
*/
|
|
206
|
+
async ensureDirectoryExists(dirPath) {
|
|
207
|
+
try {
|
|
208
|
+
await mkdir(dirPath, {
|
|
209
|
+
recursive: true
|
|
210
|
+
});
|
|
211
|
+
} catch (error) {
|
|
212
|
+
if (error.code !== 'EEXIST') {
|
|
213
|
+
console.error(`â Failed to create directory ${dirPath}: ${error.message}`);
|
|
214
|
+
throw error;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Apply migrations across multiple environments with coordination
|
|
221
|
+
* @param {Object} options - Migration options
|
|
222
|
+
* @returns {Promise<Object>} Migration results
|
|
223
|
+
*/
|
|
224
|
+
async applyMigrationsAcrossEnvironments(options = {}) {
|
|
225
|
+
const {
|
|
226
|
+
environments = ['development', 'staging', 'production'],
|
|
227
|
+
domainConfigs = [],
|
|
228
|
+
skipBackup = false,
|
|
229
|
+
continueOnError = false
|
|
230
|
+
} = options;
|
|
231
|
+
console.log('đ Cross-Environment Migration Orchestration');
|
|
232
|
+
console.log('===========================================');
|
|
233
|
+
console.log(`đ Environments: ${environments.join(', ')}`);
|
|
234
|
+
console.log(`đ Domains: ${domainConfigs.length} configured`);
|
|
235
|
+
console.log('');
|
|
236
|
+
const results = {
|
|
237
|
+
orchestrationId: this.generateOrchestrationId(),
|
|
238
|
+
environments: {},
|
|
239
|
+
summary: {
|
|
240
|
+
total: 0,
|
|
241
|
+
successful: 0,
|
|
242
|
+
failed: 0,
|
|
243
|
+
skipped: 0
|
|
244
|
+
},
|
|
245
|
+
startTime: new Date()
|
|
246
|
+
};
|
|
247
|
+
try {
|
|
248
|
+
for (const env of environments) {
|
|
249
|
+
if (!this.environments[env]) {
|
|
250
|
+
console.log(`â ď¸ Unknown environment: ${env}, skipping`);
|
|
251
|
+
continue;
|
|
252
|
+
}
|
|
253
|
+
console.log(`\nđ Processing ${env} environment...`);
|
|
254
|
+
results.summary.total++;
|
|
255
|
+
try {
|
|
256
|
+
// Create backup if required
|
|
257
|
+
if (this.environments[env].requiresBackup && !skipBackup) {
|
|
258
|
+
await this.createEnvironmentBackup(env, domainConfigs);
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// Apply migrations for environment
|
|
262
|
+
const envResult = await this.applyEnvironmentMigrations(env, domainConfigs, options);
|
|
263
|
+
results.environments[env] = {
|
|
264
|
+
status: 'completed',
|
|
265
|
+
...envResult
|
|
266
|
+
};
|
|
267
|
+
results.summary.successful++;
|
|
268
|
+
console.log(`â
${env} environment completed successfully`);
|
|
269
|
+
} catch (error) {
|
|
270
|
+
console.error(`â ${env} environment failed: ${error.message}`);
|
|
271
|
+
results.environments[env] = {
|
|
272
|
+
status: 'failed',
|
|
273
|
+
error: error.message,
|
|
274
|
+
timestamp: new Date()
|
|
275
|
+
};
|
|
276
|
+
results.summary.failed++;
|
|
277
|
+
if (!continueOnError) {
|
|
278
|
+
throw new Error(`Migration failed in ${env} environment: ${error.message}`);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
results.endTime = new Date();
|
|
283
|
+
results.summary.duration = (results.endTime - results.startTime) / 1000;
|
|
284
|
+
await this.logAuditEvent('MIGRATION_ORCHESTRATION_COMPLETED', 'ALL', results.summary);
|
|
285
|
+
console.log('\nđ MIGRATION ORCHESTRATION SUMMARY');
|
|
286
|
+
console.log('==================================');
|
|
287
|
+
console.log(`â
Successful: ${results.summary.successful}`);
|
|
288
|
+
console.log(`â Failed: ${results.summary.failed}`);
|
|
289
|
+
console.log(`â¸ď¸ Skipped: ${results.summary.skipped}`);
|
|
290
|
+
console.log(`âąď¸ Duration: ${results.summary.duration.toFixed(1)}s`);
|
|
291
|
+
return results;
|
|
292
|
+
} catch (error) {
|
|
293
|
+
await this.logAuditEvent('MIGRATION_ORCHESTRATION_FAILED', 'ALL', {
|
|
294
|
+
error: error.message
|
|
295
|
+
});
|
|
296
|
+
throw error;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Apply migrations to specific environment
|
|
302
|
+
* @param {string} environment - Environment name
|
|
303
|
+
* @param {Array} domainConfigs - Domain configurations
|
|
304
|
+
* @returns {Promise<Object>} Environment migration result
|
|
305
|
+
*/
|
|
306
|
+
async applyEnvironmentMigrations(environment, domainConfigs) {
|
|
307
|
+
const envConfig = this.environments[environment];
|
|
308
|
+
const results = {
|
|
309
|
+
environment,
|
|
310
|
+
databases: {},
|
|
311
|
+
migrationsApplied: 0,
|
|
312
|
+
startTime: new Date()
|
|
313
|
+
};
|
|
314
|
+
console.log(` đ Environment: ${envConfig.description}`);
|
|
315
|
+
console.log(` đ Remote: ${envConfig.isRemote ? 'Yes' : 'No'}`);
|
|
316
|
+
|
|
317
|
+
// Process each domain's databases
|
|
318
|
+
if (domainConfigs.length > 0) {
|
|
319
|
+
for (const domainConfig of domainConfigs) {
|
|
320
|
+
const dbConfig = domainConfig.databases?.[environment];
|
|
321
|
+
if (!dbConfig) {
|
|
322
|
+
console.log(` â ď¸ No ${environment} database config for ${domainConfig.name}`);
|
|
323
|
+
continue;
|
|
324
|
+
}
|
|
325
|
+
try {
|
|
326
|
+
const dbResult = await this.applyDatabaseMigrations(dbConfig.name, environment, envConfig.isRemote);
|
|
327
|
+
results.databases[dbConfig.name] = dbResult;
|
|
328
|
+
results.migrationsApplied += dbResult.migrationsApplied || 0;
|
|
329
|
+
} catch (error) {
|
|
330
|
+
console.error(` â Database ${dbConfig.name} migration failed: ${error.message}`);
|
|
331
|
+
results.databases[dbConfig.name] = {
|
|
332
|
+
status: 'failed',
|
|
333
|
+
error: error.message
|
|
334
|
+
};
|
|
335
|
+
throw error;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
} else {
|
|
339
|
+
// Apply to default database
|
|
340
|
+
const defaultDb = envConfig.defaultDatabase || 'default-db';
|
|
341
|
+
const dbResult = await this.applyDatabaseMigrations(defaultDb, environment, envConfig.isRemote);
|
|
342
|
+
results.databases[defaultDb] = dbResult;
|
|
343
|
+
results.migrationsApplied = dbResult.migrationsApplied || 0;
|
|
344
|
+
}
|
|
345
|
+
results.endTime = new Date();
|
|
346
|
+
results.duration = (results.endTime - results.startTime) / 1000;
|
|
347
|
+
await this.logAuditEvent('ENVIRONMENT_MIGRATION_COMPLETED', environment, {
|
|
348
|
+
databases: Object.keys(results.databases),
|
|
349
|
+
migrationsApplied: results.migrationsApplied,
|
|
350
|
+
duration: results.duration
|
|
351
|
+
});
|
|
352
|
+
return results;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
/**
|
|
356
|
+
* Apply migrations to specific database
|
|
357
|
+
* @param {string} databaseName - Database name
|
|
358
|
+
* @param {string} environment - Environment
|
|
359
|
+
* @param {boolean} isRemote - Whether database is remote
|
|
360
|
+
* @returns {Promise<Object>} Database migration result
|
|
361
|
+
*/
|
|
362
|
+
async applyDatabaseMigrations(databaseName, environment, isRemote) {
|
|
363
|
+
console.log(` đď¸ Applying migrations to ${databaseName}...`);
|
|
364
|
+
if (this.dryRun) {
|
|
365
|
+
console.log(` đ DRY RUN: Would apply migrations to ${databaseName}`);
|
|
366
|
+
return {
|
|
367
|
+
status: 'dry-run',
|
|
368
|
+
databaseName,
|
|
369
|
+
environment,
|
|
370
|
+
migrationsApplied: 0
|
|
371
|
+
};
|
|
372
|
+
}
|
|
373
|
+
try {
|
|
374
|
+
const command = this.buildMigrationCommand(databaseName, environment, isRemote);
|
|
375
|
+
const output = await this.executeWithRetry(command, 120000); // 2 minute timeout
|
|
376
|
+
|
|
377
|
+
// Parse migration output
|
|
378
|
+
const migrationsApplied = this.parseMigrationOutput(output);
|
|
379
|
+
console.log(` â
Applied ${migrationsApplied} migrations to ${databaseName}`);
|
|
380
|
+
await this.logAuditEvent('DATABASE_MIGRATION_APPLIED', environment, {
|
|
381
|
+
databaseName,
|
|
382
|
+
migrationsApplied,
|
|
383
|
+
isRemote
|
|
384
|
+
});
|
|
385
|
+
return {
|
|
386
|
+
status: 'completed',
|
|
387
|
+
databaseName,
|
|
388
|
+
environment,
|
|
389
|
+
migrationsApplied,
|
|
390
|
+
output: output.substring(0, 500) // Truncate for storage
|
|
391
|
+
};
|
|
392
|
+
} catch (error) {
|
|
393
|
+
await this.logAuditEvent('DATABASE_MIGRATION_FAILED', environment, {
|
|
394
|
+
databaseName,
|
|
395
|
+
error: error.message
|
|
396
|
+
});
|
|
397
|
+
throw new Error(`Migration failed for ${databaseName}: ${error.message}`);
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
/**
|
|
402
|
+
* Create comprehensive backup across environments
|
|
403
|
+
* @param {string} environment - Environment to backup
|
|
404
|
+
* @param {Array} domainConfigs - Domain configurations
|
|
405
|
+
* @returns {Promise<Object>} Backup results
|
|
406
|
+
*/
|
|
407
|
+
async createEnvironmentBackup(environment, domainConfigs) {
|
|
408
|
+
console.log(` đž Creating ${environment} environment backup...`);
|
|
409
|
+
const backupId = this.generateBackupId(environment);
|
|
410
|
+
const backupDir = join(this.backupPaths.root, environment, backupId);
|
|
411
|
+
this.ensureDirectory(backupDir);
|
|
412
|
+
const backupResults = {
|
|
413
|
+
backupId,
|
|
414
|
+
environment,
|
|
415
|
+
backupDir,
|
|
416
|
+
databases: {},
|
|
417
|
+
startTime: new Date()
|
|
418
|
+
};
|
|
419
|
+
try {
|
|
420
|
+
if (domainConfigs.length > 0) {
|
|
421
|
+
for (const domainConfig of domainConfigs) {
|
|
422
|
+
const dbConfig = domainConfig.databases?.[environment];
|
|
423
|
+
if (dbConfig && dbConfig.id) {
|
|
424
|
+
try {
|
|
425
|
+
const dbBackup = await this.createDatabaseBackup(dbConfig.name, environment, backupDir);
|
|
426
|
+
backupResults.databases[dbConfig.name] = dbBackup;
|
|
427
|
+
} catch (error) {
|
|
428
|
+
console.log(` â ď¸ Backup failed for ${dbConfig.name}: ${error.message}`);
|
|
429
|
+
backupResults.databases[dbConfig.name] = {
|
|
430
|
+
status: 'failed',
|
|
431
|
+
error: error.message
|
|
432
|
+
};
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
backupResults.endTime = new Date();
|
|
438
|
+
backupResults.duration = (backupResults.endTime - backupResults.startTime) / 1000;
|
|
439
|
+
|
|
440
|
+
// Save backup manifest
|
|
441
|
+
const manifestPath = join(backupDir, 'backup-manifest.json');
|
|
442
|
+
await writeFile(manifestPath, JSON.stringify(backupResults, null, 2));
|
|
443
|
+
await this.logAuditEvent('ENVIRONMENT_BACKUP_CREATED', environment, {
|
|
444
|
+
backupId,
|
|
445
|
+
databases: Object.keys(backupResults.databases),
|
|
446
|
+
duration: backupResults.duration
|
|
447
|
+
});
|
|
448
|
+
console.log(` â
Environment backup completed: ${backupId}`);
|
|
449
|
+
return backupResults;
|
|
450
|
+
} catch (error) {
|
|
451
|
+
await this.logAuditEvent('ENVIRONMENT_BACKUP_FAILED', environment, {
|
|
452
|
+
backupId,
|
|
453
|
+
error: error.message
|
|
454
|
+
});
|
|
455
|
+
throw error;
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
/**
|
|
460
|
+
* Create backup of specific database
|
|
461
|
+
* @param {string} databaseName - Database name
|
|
462
|
+
* @param {string} environment - Environment
|
|
463
|
+
* @param {string} backupDir - Backup directory
|
|
464
|
+
* @returns {Promise<Object>} Database backup result
|
|
465
|
+
*/
|
|
466
|
+
async createDatabaseBackup(databaseName, environment, backupDir) {
|
|
467
|
+
const backupFile = join(backupDir, `${databaseName}-${environment}.sql`);
|
|
468
|
+
if (this.dryRun) {
|
|
469
|
+
console.log(` đ DRY RUN: Would backup ${databaseName} to ${backupFile}`);
|
|
470
|
+
return {
|
|
471
|
+
status: 'dry-run',
|
|
472
|
+
backupFile
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
try {
|
|
476
|
+
const isRemote = this.environments[environment].isRemote;
|
|
477
|
+
const command = this.buildBackupCommand(databaseName, environment, backupFile, isRemote);
|
|
478
|
+
await this.executeWithRetry(command, 300000); // 5 minute timeout for backups
|
|
479
|
+
|
|
480
|
+
if (existsSync(backupFile)) {
|
|
481
|
+
const stats = await stat(backupFile);
|
|
482
|
+
console.log(` đž Backup created: ${backupFile} (${(stats.size / 1024).toFixed(1)}KB)`);
|
|
483
|
+
return {
|
|
484
|
+
status: 'completed',
|
|
485
|
+
backupFile,
|
|
486
|
+
sizeKB: (stats.size / 1024).toFixed(1),
|
|
487
|
+
timestamp: new Date()
|
|
488
|
+
};
|
|
489
|
+
} else {
|
|
490
|
+
throw new Error('Backup file was not created');
|
|
491
|
+
}
|
|
492
|
+
} catch (error) {
|
|
493
|
+
throw new Error(`Database backup failed: ${error.message}`);
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
/**
|
|
498
|
+
* Perform safe data cleanup with backup and confirmation
|
|
499
|
+
* @param {Object} options - Cleanup options
|
|
500
|
+
* @returns {Promise<Object>} Cleanup results
|
|
501
|
+
*/
|
|
502
|
+
async performSafeDataCleanup(options = {}) {
|
|
503
|
+
const {
|
|
504
|
+
environment = 'development',
|
|
505
|
+
domainConfigs = [],
|
|
506
|
+
cleanupType = 'partial',
|
|
507
|
+
// 'partial', 'full', 'logs-only'
|
|
508
|
+
skipBackup = false,
|
|
509
|
+
force = false
|
|
510
|
+
} = options;
|
|
511
|
+
console.log('đ§š Safe Data Cleanup Operation');
|
|
512
|
+
console.log('==============================');
|
|
513
|
+
console.log(`đ Environment: ${environment}`);
|
|
514
|
+
console.log(`đ§˝ Cleanup Type: ${cleanupType}`);
|
|
515
|
+
console.log(`đž Skip Backup: ${skipBackup}`);
|
|
516
|
+
console.log('');
|
|
517
|
+
const envConfig = this.environments[environment];
|
|
518
|
+
if (!envConfig) {
|
|
519
|
+
throw new Error(`Unknown environment: ${environment}`);
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// Safety confirmation for production
|
|
523
|
+
if (environment === 'production' && !force) {
|
|
524
|
+
const confirmed = await this.confirmDangerousOperation(`${cleanupType} data cleanup in PRODUCTION`, 'This will permanently delete data and cannot be undone');
|
|
525
|
+
if (!confirmed) {
|
|
526
|
+
console.log('â Operation cancelled by user');
|
|
527
|
+
return {
|
|
528
|
+
status: 'cancelled',
|
|
529
|
+
reason: 'User declined confirmation'
|
|
530
|
+
};
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
const cleanupResults = {
|
|
534
|
+
cleanupId: this.generateCleanupId(environment),
|
|
535
|
+
environment,
|
|
536
|
+
cleanupType,
|
|
537
|
+
operations: {},
|
|
538
|
+
startTime: new Date()
|
|
539
|
+
};
|
|
540
|
+
try {
|
|
541
|
+
// Create backup if required
|
|
542
|
+
if (!skipBackup && envConfig.requiresBackup) {
|
|
543
|
+
const backupResult = await this.createEnvironmentBackup(environment, domainConfigs);
|
|
544
|
+
cleanupResults.backup = backupResult;
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
// Perform cleanup operations
|
|
548
|
+
for (const domainConfig of domainConfigs) {
|
|
549
|
+
const dbConfig = domainConfig.databases?.[environment];
|
|
550
|
+
if (dbConfig) {
|
|
551
|
+
try {
|
|
552
|
+
const cleanupResult = await this.performDatabaseCleanup(dbConfig.name, environment, cleanupType);
|
|
553
|
+
cleanupResults.operations[dbConfig.name] = cleanupResult;
|
|
554
|
+
} catch (error) {
|
|
555
|
+
console.error(`â Cleanup failed for ${dbConfig.name}: ${error.message}`);
|
|
556
|
+
cleanupResults.operations[dbConfig.name] = {
|
|
557
|
+
status: 'failed',
|
|
558
|
+
error: error.message
|
|
559
|
+
};
|
|
560
|
+
}
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
cleanupResults.endTime = new Date();
|
|
564
|
+
cleanupResults.duration = (cleanupResults.endTime - cleanupResults.startTime) / 1000;
|
|
565
|
+
await this.logAuditEvent('DATA_CLEANUP_COMPLETED', environment, {
|
|
566
|
+
cleanupId: cleanupResults.cleanupId,
|
|
567
|
+
cleanupType,
|
|
568
|
+
operations: Object.keys(cleanupResults.operations),
|
|
569
|
+
duration: cleanupResults.duration
|
|
570
|
+
});
|
|
571
|
+
console.log('\nâ
Data cleanup completed successfully');
|
|
572
|
+
return cleanupResults;
|
|
573
|
+
} catch (error) {
|
|
574
|
+
await this.logAuditEvent('DATA_CLEANUP_FAILED', environment, {
|
|
575
|
+
cleanupId: cleanupResults.cleanupId,
|
|
576
|
+
error: error.message
|
|
577
|
+
});
|
|
578
|
+
throw error;
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
/**
|
|
583
|
+
* Perform cleanup on specific database
|
|
584
|
+
* @param {string} databaseName - Database name
|
|
585
|
+
* @param {string} environment - Environment
|
|
586
|
+
* @param {string} cleanupType - Type of cleanup
|
|
587
|
+
* @returns {Promise<Object>} Cleanup result
|
|
588
|
+
*/
|
|
589
|
+
async performDatabaseCleanup(databaseName, environment, cleanupType) {
|
|
590
|
+
console.log(` đ§š Cleaning ${databaseName} (${cleanupType})...`);
|
|
591
|
+
if (this.dryRun) {
|
|
592
|
+
console.log(` đ DRY RUN: Would perform ${cleanupType} cleanup on ${databaseName}`);
|
|
593
|
+
return {
|
|
594
|
+
status: 'dry-run',
|
|
595
|
+
cleanupType
|
|
596
|
+
};
|
|
597
|
+
}
|
|
598
|
+
const commands = this.getCleanupCommands(cleanupType, environment);
|
|
599
|
+
let executedCommands = 0;
|
|
600
|
+
try {
|
|
601
|
+
for (const command of commands) {
|
|
602
|
+
const fullCommand = this.buildDatabaseCommand(command, databaseName, environment);
|
|
603
|
+
await this.executeWithRetry(fullCommand, 60000);
|
|
604
|
+
executedCommands++;
|
|
605
|
+
}
|
|
606
|
+
console.log(` â
Cleanup completed: ${executedCommands} operations`);
|
|
607
|
+
return {
|
|
608
|
+
status: 'completed',
|
|
609
|
+
cleanupType,
|
|
610
|
+
operationsExecuted: executedCommands,
|
|
611
|
+
timestamp: new Date()
|
|
612
|
+
};
|
|
613
|
+
} catch (error) {
|
|
614
|
+
throw new Error(`Database cleanup failed after ${executedCommands} operations: ${error.message}`);
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
|
|
618
|
+
// Command builders and utility methods
|
|
619
|
+
|
|
620
|
+
buildMigrationCommand(databaseName, environment, isRemote) {
|
|
621
|
+
const remoteFlag = isRemote ? '--remote' : '--local';
|
|
622
|
+
return `npx wrangler d1 migrations apply ${databaseName} --env ${environment} ${remoteFlag}`;
|
|
623
|
+
}
|
|
624
|
+
buildBackupCommand(databaseName, environment, backupFile, isRemote) {
|
|
625
|
+
const remoteFlag = isRemote ? '--remote' : '--local';
|
|
626
|
+
return `npx wrangler d1 export ${databaseName} --env ${environment} ${remoteFlag} --output ${backupFile}`;
|
|
627
|
+
}
|
|
628
|
+
buildDatabaseCommand(sqlCommand, databaseName, environment) {
|
|
629
|
+
const envConfig = this.environments[environment];
|
|
630
|
+
const remoteFlag = envConfig.isRemote ? '--remote' : '--local';
|
|
631
|
+
return `npx wrangler d1 execute ${databaseName} --env ${environment} ${remoteFlag} --command "${sqlCommand}"`;
|
|
632
|
+
}
|
|
633
|
+
getCleanupCommands(cleanupType) {
|
|
634
|
+
const commands = {
|
|
635
|
+
'logs-only': ['DELETE FROM logs WHERE created_at < datetime("now", "-30 days");'],
|
|
636
|
+
'partial': ['DELETE FROM logs WHERE created_at < datetime("now", "-7 days");', 'DELETE FROM sessions WHERE expires_at < datetime("now");', 'UPDATE users SET last_cleanup = datetime("now") WHERE last_cleanup IS NULL;'],
|
|
637
|
+
'full': ['DELETE FROM logs;', 'DELETE FROM sessions;', 'DELETE FROM files;', 'DELETE FROM user_profiles;', 'DELETE FROM users;']
|
|
638
|
+
};
|
|
639
|
+
return commands[cleanupType] || commands['partial'];
|
|
640
|
+
}
|
|
641
|
+
parseMigrationOutput(output) {
|
|
642
|
+
// Parse wrangler migration output to count applied migrations
|
|
643
|
+
const matches = output.match(/Applied (\d+) migration/);
|
|
644
|
+
return matches ? parseInt(matches[1]) : 0;
|
|
645
|
+
}
|
|
646
|
+
async executeWithRetry(command, timeout = null) {
|
|
647
|
+
const actualTimeout = timeout || (this.config ? this.config.executionTimeout : 30000);
|
|
648
|
+
const maxAttempts = this.config ? this.config.retryAttempts : 3;
|
|
649
|
+
const retryDelay = this.config ? this.config.retryDelay : 1000;
|
|
650
|
+
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
|
651
|
+
try {
|
|
652
|
+
const {
|
|
653
|
+
stdout
|
|
654
|
+
} = await execAsync(command, {
|
|
655
|
+
encoding: 'utf8',
|
|
656
|
+
timeout: actualTimeout,
|
|
657
|
+
stdio: 'pipe'
|
|
658
|
+
});
|
|
659
|
+
return stdout;
|
|
660
|
+
} catch (error) {
|
|
661
|
+
if (attempt === maxAttempts) {
|
|
662
|
+
throw error;
|
|
663
|
+
}
|
|
664
|
+
console.log(` â ď¸ Attempt ${attempt} failed, retrying...`);
|
|
665
|
+
await new Promise(resolve => setTimeout(resolve, retryDelay));
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
}
|
|
669
|
+
async confirmDangerousOperation(operation, impact) {
|
|
670
|
+
// In a real implementation, this would use a proper input library
|
|
671
|
+
console.log(`\nâ ď¸ DANGER: ${operation}`);
|
|
672
|
+
console.log(`Impact: ${impact}`);
|
|
673
|
+
console.log('Type "YES" to confirm this operation:');
|
|
674
|
+
|
|
675
|
+
// For now, return false to prevent accidental execution
|
|
676
|
+
return false;
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Utility methods
|
|
680
|
+
|
|
681
|
+
generateOrchestrationId() {
|
|
682
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
683
|
+
return `orchestration-${timestamp}`;
|
|
684
|
+
}
|
|
685
|
+
generateBackupId(environment) {
|
|
686
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
687
|
+
return `backup-${environment}-${timestamp}`;
|
|
688
|
+
}
|
|
689
|
+
generateCleanupId(environment) {
|
|
690
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
691
|
+
return `cleanup-${environment}-${timestamp}`;
|
|
692
|
+
}
|
|
693
|
+
async ensureDirectory(path) {
|
|
694
|
+
try {
|
|
695
|
+
await access(path);
|
|
696
|
+
} catch {
|
|
697
|
+
await mkdir(path, {
|
|
698
|
+
recursive: true
|
|
699
|
+
});
|
|
700
|
+
}
|
|
701
|
+
}
|
|
702
|
+
async logAuditEvent(event, environment, details = {}) {
|
|
703
|
+
const logEntry = {
|
|
704
|
+
timestamp: new Date().toISOString(),
|
|
705
|
+
event,
|
|
706
|
+
environment,
|
|
707
|
+
details,
|
|
708
|
+
user: process.env.USER || process.env.USERNAME || 'system'
|
|
709
|
+
};
|
|
710
|
+
|
|
711
|
+
// Skip logging if running as dependency (no file access)
|
|
712
|
+
if (!this.backupPaths) {
|
|
713
|
+
console.log(`đ Audit: ${event} (${environment})`);
|
|
714
|
+
return;
|
|
715
|
+
}
|
|
716
|
+
try {
|
|
717
|
+
// Ensure the audit log directory exists
|
|
718
|
+
await this.ensureDirectory(dirname(this.backupPaths.audit));
|
|
719
|
+
const logLine = JSON.stringify(logEntry) + '\n';
|
|
720
|
+
if (existsSync(this.backupPaths.audit)) {
|
|
721
|
+
await appendFile(this.backupPaths.audit, logLine);
|
|
722
|
+
} else {
|
|
723
|
+
await writeFile(this.backupPaths.audit, logLine);
|
|
724
|
+
}
|
|
725
|
+
} catch (error) {
|
|
726
|
+
console.warn(`â ď¸ Failed to log audit event: ${error.message}`);
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
export default DatabaseOrchestrator;
|