@tamyla/clodo-framework 3.0.12 → 3.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -597,7 +597,7 @@ export class MultiDomainOrchestrator {
597
597
  }
598
598
 
599
599
  /**
600
- * Validate domain deployment with real HTTP health check
600
+ * Validate domain deployment with real HTTP health check (with retries)
601
601
  */
602
602
  async validateDomainDeployment(domain) {
603
603
  console.log(` ✅ Validating deployment for ${domain}`);
@@ -614,54 +614,75 @@ export class MultiDomainOrchestrator {
614
614
  return true;
615
615
  }
616
616
  console.log(` 🔍 Running health check: ${deploymentUrl}/health`);
617
- try {
618
- const startTime = Date.now();
619
-
620
- // Perform actual HTTP health check
621
- const response = await fetch(`${deploymentUrl}/health`, {
622
- method: 'GET',
623
- headers: {
624
- 'User-Agent': 'Clodo-Orchestrator/2.0'
625
- },
626
- signal: AbortSignal.timeout(10000) // 10 second timeout
627
- });
628
- const responseTime = Date.now() - startTime;
629
- const status = response.status;
630
- if (status === 200) {
631
- console.log(` ✅ Health check passed (${status}) - Response time: ${responseTime}ms`);
632
-
633
- // Log successful health check
634
- this.stateManager.logAuditEvent('HEALTH_CHECK_PASSED', domain, {
635
- url: deploymentUrl,
636
- status,
637
- responseTime,
638
- environment: this.environment
639
- });
640
- return true;
641
- } else {
642
- console.log(` ⚠️ Health check returned ${status} - deployment may have issues`);
643
- this.stateManager.logAuditEvent('HEALTH_CHECK_WARNING', domain, {
644
- url: deploymentUrl,
645
- status,
646
- responseTime,
647
- environment: this.environment
617
+
618
+ // Retry logic for health checks
619
+ const maxRetries = 3;
620
+ const retryDelay = 5000; // 5 seconds between retries
621
+
622
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
623
+ try {
624
+ const startTime = Date.now();
625
+ console.log(` Attempt ${attempt}/${maxRetries}...`);
626
+
627
+ // Perform actual HTTP health check
628
+ const response = await fetch(`${deploymentUrl}/health`, {
629
+ method: 'GET',
630
+ headers: {
631
+ 'User-Agent': 'Clodo-Orchestrator/2.0'
632
+ },
633
+ signal: AbortSignal.timeout(15000) // 15 second timeout
648
634
  });
635
+ const responseTime = Date.now() - startTime;
636
+ const status = response.status;
637
+ if (status === 200) {
638
+ console.log(` ✅ Health check passed (${status}) - Response time: ${responseTime}ms`);
639
+
640
+ // Log successful health check
641
+ this.stateManager.logAuditEvent('HEALTH_CHECK_PASSED', domain, {
642
+ url: deploymentUrl,
643
+ status,
644
+ responseTime,
645
+ attempt,
646
+ environment: this.environment
647
+ });
648
+ return true;
649
+ } else {
650
+ const errorMsg = `Health check returned ${status} - deployment may have issues`;
651
+ console.log(` ⚠️ ${errorMsg}`);
652
+ this.stateManager.logAuditEvent('HEALTH_CHECK_WARNING', domain, {
653
+ url: deploymentUrl,
654
+ status,
655
+ responseTime,
656
+ attempt,
657
+ environment: this.environment
658
+ });
649
659
 
650
- // Don't fail deployment for non-200 status, just warn
651
- return true;
652
- }
653
- } catch (error) {
654
- console.log(` ⚠️ Health check failed: ${error.message}`);
655
- console.log(` 💡 This may be expected if the worker isn't fully propagated yet`);
656
- this.stateManager.logAuditEvent('HEALTH_CHECK_FAILED', domain, {
657
- url: deploymentUrl,
658
- error: error.message,
659
- environment: this.environment
660
- });
660
+ // Don't fail deployment for non-200 status, just warn
661
+ return true;
662
+ }
663
+ } catch (error) {
664
+ const isLastAttempt = attempt === maxRetries;
665
+ const errorMsg = `Health check failed: ${error.message}`;
666
+ if (isLastAttempt) {
667
+ console.log(` ❌ ${errorMsg} (final attempt)`);
668
+ console.log(` 💡 The service may still be deploying. Check manually: curl ${deploymentUrl}/health`);
669
+ this.stateManager.logAuditEvent('HEALTH_CHECK_FAILED', domain, {
670
+ url: deploymentUrl,
671
+ error: error.message,
672
+ attempts: maxRetries,
673
+ environment: this.environment
674
+ });
661
675
 
662
- // Don't fail deployment for health check failure - it might just need time
663
- return true;
676
+ // Don't fail deployment for health check failure - it might just need time
677
+ return true;
678
+ } else {
679
+ console.log(` ⚠️ ${errorMsg} (attempt ${attempt}/${maxRetries})`);
680
+ console.log(` ⏳ Retrying in ${retryDelay / 1000} seconds...`);
681
+ await new Promise(resolve => setTimeout(resolve, retryDelay));
682
+ }
683
+ }
664
684
  }
685
+ return true;
665
686
  }
666
687
 
667
688
  /**
@@ -0,0 +1,303 @@
1
+ /**
2
+ * Assessment Cache System
3
+ *
4
+ * Caches assessment results to avoid redundant analysis of project artifacts.
5
+ * Provides intelligent cache invalidation based on file changes and time-based expiration.
6
+ */
7
+
8
+ import crypto from 'crypto';
9
+ import fs from 'fs/promises';
10
+ import path from 'path';
11
+ import { existsSync } from 'fs';
12
+ export class AssessmentCache {
13
+ constructor(options = {}) {
14
+ this.cacheDir = options.cacheDir || './.clodo-cache/assessment';
15
+ this.ttl = options.ttl || 5 * 60 * 1000; // 5 minutes default
16
+ this.maxEntries = options.maxEntries || 50;
17
+ this.enableDiskCache = options.enableDiskCache !== false;
18
+ this.memoryCache = new Map();
19
+ this.initialized = false;
20
+ }
21
+
22
+ /**
23
+ * Initialize the cache system
24
+ */
25
+ async initialize() {
26
+ if (this.initialized) return;
27
+ if (this.enableDiskCache) {
28
+ await this.ensureCacheDirectory();
29
+ await this.loadFromDisk();
30
+ }
31
+ this.initialized = true;
32
+ }
33
+
34
+ /**
35
+ * Generate cache key from project state and inputs
36
+ */
37
+ async generateCacheKey(projectPath, inputs = {}) {
38
+ const keyData = {
39
+ projectPath: path.resolve(projectPath),
40
+ inputs: this.sanitizeInputs(inputs),
41
+ projectFiles: await this.getProjectFileHashes(projectPath)
42
+ };
43
+ const keyString = JSON.stringify(keyData, Object.keys(keyData).sort());
44
+ return crypto.createHash('sha256').update(keyString).digest('hex');
45
+ }
46
+
47
+ /**
48
+ * Sanitize inputs for cache key generation (remove sensitive data)
49
+ */
50
+ sanitizeInputs(inputs) {
51
+ const sanitized = {
52
+ ...inputs
53
+ };
54
+
55
+ // Remove sensitive fields but keep their presence for cache invalidation
56
+ const sensitiveFields = ['apiToken', 'cloudflareToken', 'token', 'secret', 'password'];
57
+ sensitiveFields.forEach(field => {
58
+ if (sanitized[field]) {
59
+ sanitized[field] = 'present'; // Just mark presence, not value
60
+ }
61
+ });
62
+ return sanitized;
63
+ }
64
+
65
+ /**
66
+ * Get hashes of relevant project files for cache invalidation
67
+ */
68
+ async getProjectFileHashes(projectPath) {
69
+ const relevantFiles = ['package.json', 'wrangler.toml', 'src/index.js', 'src/worker.js', 'dist/index.js'];
70
+ const hashes = {};
71
+ for (const file of relevantFiles) {
72
+ const filePath = path.join(projectPath, file);
73
+ try {
74
+ const stats = await fs.stat(filePath);
75
+ const content = await fs.readFile(filePath, 'utf8');
76
+ hashes[file] = {
77
+ mtime: stats.mtime.getTime(),
78
+ size: stats.size,
79
+ hash: crypto.createHash('md5').update(content).digest('hex').substring(0, 8)
80
+ };
81
+ } catch (error) {
82
+ // File doesn't exist, that's fine
83
+ hashes[file] = null;
84
+ }
85
+ }
86
+ return hashes;
87
+ }
88
+
89
+ /**
90
+ * Get cached assessment result
91
+ */
92
+ async get(cacheKey) {
93
+ await this.initialize();
94
+
95
+ // Check memory cache first
96
+ const memoryEntry = this.memoryCache.get(cacheKey);
97
+ if (memoryEntry && !this.isExpired(memoryEntry)) {
98
+ return memoryEntry.data;
99
+ }
100
+
101
+ // Check disk cache if enabled
102
+ if (this.enableDiskCache) {
103
+ const diskEntry = await this.loadFromDiskCache(cacheKey);
104
+ if (diskEntry && !this.isExpired(diskEntry)) {
105
+ // Restore to memory cache
106
+ this.memoryCache.set(cacheKey, diskEntry);
107
+ return diskEntry.data;
108
+ }
109
+ }
110
+ return null;
111
+ }
112
+
113
+ /**
114
+ * Store assessment result in cache
115
+ */
116
+ async set(cacheKey, data) {
117
+ await this.initialize();
118
+ const entry = {
119
+ data,
120
+ timestamp: Date.now(),
121
+ key: cacheKey
122
+ };
123
+
124
+ // Store in memory
125
+ this.memoryCache.set(cacheKey, entry);
126
+
127
+ // Store on disk if enabled
128
+ if (this.enableDiskCache) {
129
+ await this.saveToDiskCache(cacheKey, entry);
130
+ }
131
+
132
+ // Maintain cache size limits
133
+ await this.cleanup();
134
+ }
135
+
136
+ /**
137
+ * Check if cache entry is expired
138
+ */
139
+ isExpired(entry) {
140
+ return Date.now() - entry.timestamp > this.ttl;
141
+ }
142
+
143
+ /**
144
+ * Clear expired entries and maintain size limits
145
+ */
146
+ async cleanup() {
147
+ const now = Date.now();
148
+ const validEntries = new Map();
149
+
150
+ // Clean memory cache
151
+ for (const [key, entry] of this.memoryCache) {
152
+ if (!this.isExpired(entry)) {
153
+ validEntries.set(key, entry);
154
+ }
155
+ }
156
+
157
+ // If still too many entries, remove oldest
158
+ if (validEntries.size > this.maxEntries) {
159
+ const sortedEntries = Array.from(validEntries.entries()).sort((a, b) => a[1].timestamp - b[1].timestamp);
160
+ const toKeep = sortedEntries.slice(-this.maxEntries);
161
+ validEntries.clear();
162
+ toKeep.forEach(([key, entry]) => validEntries.set(key, entry));
163
+ }
164
+ this.memoryCache = validEntries;
165
+
166
+ // Clean disk cache
167
+ if (this.enableDiskCache) {
168
+ await this.cleanupDiskCache();
169
+ }
170
+ }
171
+
172
+ /**
173
+ * Clear all cache entries
174
+ */
175
+ async clear() {
176
+ this.memoryCache.clear();
177
+ if (this.enableDiskCache) {
178
+ await this.clearDiskCache();
179
+ }
180
+ }
181
+
182
+ /**
183
+ * Get cache statistics
184
+ */
185
+ async getStats() {
186
+ const now = Date.now();
187
+ const memoryEntries = Array.from(this.memoryCache.values());
188
+ return {
189
+ memory: {
190
+ total: memoryEntries.length,
191
+ valid: memoryEntries.filter(entry => !this.isExpired(entry)).length,
192
+ expired: memoryEntries.filter(entry => this.isExpired(entry)).length
193
+ },
194
+ disk: this.enableDiskCache ? await this.getDiskStats() : null,
195
+ ttl: this.ttl,
196
+ maxEntries: this.maxEntries
197
+ };
198
+ }
199
+
200
+ // Disk cache implementation
201
+ async ensureCacheDirectory() {
202
+ try {
203
+ await fs.mkdir(this.cacheDir, {
204
+ recursive: true
205
+ });
206
+ } catch (error) {
207
+ // Directory might already exist, ignore
208
+ }
209
+ }
210
+ getCacheFilePath(key) {
211
+ return path.join(this.cacheDir, `${key}.json`);
212
+ }
213
+ async saveToDiskCache(key, entry) {
214
+ try {
215
+ const filePath = this.getCacheFilePath(key);
216
+ await fs.writeFile(filePath, JSON.stringify(entry, null, 2));
217
+ } catch (error) {
218
+ // Disk cache failure shouldn't break functionality
219
+ console.warn('Failed to save to disk cache:', error.message);
220
+ }
221
+ }
222
+ async loadFromDiskCache(key) {
223
+ try {
224
+ const filePath = this.getCacheFilePath(key);
225
+ const content = await fs.readFile(filePath, 'utf8');
226
+ return JSON.parse(content);
227
+ } catch (error) {
228
+ return null;
229
+ }
230
+ }
231
+ async loadFromDisk() {
232
+ try {
233
+ const files = await fs.readdir(this.cacheDir);
234
+ const cacheFiles = files.filter(file => file.endsWith('.json'));
235
+ for (const file of cacheFiles) {
236
+ const key = file.replace('.json', '');
237
+ const entry = await this.loadFromDiskCache(key);
238
+ if (entry && !this.isExpired(entry)) {
239
+ this.memoryCache.set(key, entry);
240
+ }
241
+ }
242
+ } catch (error) {
243
+ // Disk cache loading failure is not critical
244
+ }
245
+ }
246
+ async cleanupDiskCache() {
247
+ try {
248
+ const files = await fs.readdir(this.cacheDir);
249
+ const cacheFiles = files.filter(file => file.endsWith('.json'));
250
+ for (const file of cacheFiles) {
251
+ const key = file.replace('.json', '');
252
+ const entry = await this.loadFromDiskCache(key);
253
+ if (!entry || this.isExpired(entry)) {
254
+ await fs.unlink(path.join(this.cacheDir, file));
255
+ }
256
+ }
257
+ } catch (error) {
258
+ // Cleanup failure is not critical
259
+ }
260
+ }
261
+ async clearDiskCache() {
262
+ try {
263
+ const files = await fs.readdir(this.cacheDir);
264
+ for (const file of files) {
265
+ if (file.endsWith('.json')) {
266
+ await fs.unlink(path.join(this.cacheDir, file));
267
+ }
268
+ }
269
+ } catch (error) {
270
+ // Clear failure is not critical
271
+ }
272
+ }
273
+ async getDiskStats() {
274
+ try {
275
+ const files = await fs.readdir(this.cacheDir);
276
+ const cacheFiles = files.filter(file => file.endsWith('.json'));
277
+ let valid = 0;
278
+ let expired = 0;
279
+ for (const file of cacheFiles) {
280
+ const key = file.replace('.json', '');
281
+ const entry = await this.loadFromDiskCache(key);
282
+ if (entry) {
283
+ if (this.isExpired(entry)) {
284
+ expired++;
285
+ } else {
286
+ valid++;
287
+ }
288
+ }
289
+ }
290
+ return {
291
+ total: cacheFiles.length,
292
+ valid,
293
+ expired
294
+ };
295
+ } catch (error) {
296
+ return {
297
+ total: 0,
298
+ valid: 0,
299
+ expired: 0
300
+ };
301
+ }
302
+ }
303
+ }