@tamyla/clodo-framework 3.0.11 → 3.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/bin/clodo-service.js +385 -184
- package/dist/orchestration/multi-domain-orchestrator.js +131 -63
- package/dist/service-management/AssessmentCache.js +303 -0
- package/dist/service-management/CapabilityAssessmentEngine.js +902 -0
- package/dist/service-management/ConfirmationEngine.js +4 -4
- package/dist/service-management/InputCollector.js +119 -4
- package/dist/service-management/ServiceAutoDiscovery.js +745 -0
- package/dist/service-management/ServiceCreator.js +1 -4
- package/dist/service-management/ServiceOrchestrator.js +269 -1
- package/dist/service-management/index.js +4 -1
- package/dist/utils/cloudflare/api.js +41 -1
- package/dist/utils/config/unified-config-manager.js +6 -2
- package/dist/utils/deployment/wrangler-config-manager.js +32 -7
- package/package.json +1 -1
- package/templates/generic/src/config/domains.js +3 -3
|
@@ -39,6 +39,13 @@ export class MultiDomainOrchestrator {
|
|
|
39
39
|
this.cloudflareToken = options.cloudflareToken;
|
|
40
40
|
this.cloudflareAccountId = options.cloudflareAccountId;
|
|
41
41
|
|
|
42
|
+
// Configure wrangler to use API token when available
|
|
43
|
+
// This ensures all wrangler operations use the same account as API operations
|
|
44
|
+
if (this.cloudflareToken) {
|
|
45
|
+
process.env.CLOUDFLARE_API_TOKEN = this.cloudflareToken;
|
|
46
|
+
console.log(`🔑 Configured wrangler to use API token authentication`);
|
|
47
|
+
}
|
|
48
|
+
|
|
42
49
|
// Initialize modular components
|
|
43
50
|
this.domainResolver = new DomainResolver({
|
|
44
51
|
environment: this.environment,
|
|
@@ -74,7 +81,8 @@ export class MultiDomainOrchestrator {
|
|
|
74
81
|
this.wranglerConfigManager = new WranglerConfigManager({
|
|
75
82
|
projectRoot: this.servicePath,
|
|
76
83
|
dryRun: this.dryRun,
|
|
77
|
-
verbose: options.verbose || false
|
|
84
|
+
verbose: options.verbose || false,
|
|
85
|
+
accountId: this.cloudflareAccountId
|
|
78
86
|
});
|
|
79
87
|
|
|
80
88
|
// ConfigurationValidator is a static class - don't instantiate
|
|
@@ -283,26 +291,60 @@ export class MultiDomainOrchestrator {
|
|
|
283
291
|
// Use API-based operations if credentials are available
|
|
284
292
|
if (this.cloudflareToken && this.cloudflareAccountId) {
|
|
285
293
|
console.log(` 🔑 Using API token authentication for account: ${this.cloudflareAccountId}`);
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
accountId: this.cloudflareAccountId
|
|
289
|
-
});
|
|
290
|
-
if (exists) {
|
|
291
|
-
console.log(` ✅ Database already exists: ${databaseName}`);
|
|
292
|
-
databaseId = await getDatabaseId(databaseName, {
|
|
294
|
+
try {
|
|
295
|
+
exists = await databaseExists(databaseName, {
|
|
293
296
|
apiToken: this.cloudflareToken,
|
|
294
297
|
accountId: this.cloudflareAccountId
|
|
295
298
|
});
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
299
|
+
if (exists) {
|
|
300
|
+
console.log(` ✅ Database already exists: ${databaseName}`);
|
|
301
|
+
databaseId = await getDatabaseId(databaseName, {
|
|
302
|
+
apiToken: this.cloudflareToken,
|
|
303
|
+
accountId: this.cloudflareAccountId
|
|
304
|
+
});
|
|
305
|
+
console.log(` 📊 Existing Database ID: ${databaseId}`);
|
|
306
|
+
} else {
|
|
307
|
+
console.log(` 📦 Creating database: ${databaseName}`);
|
|
308
|
+
databaseId = await createDatabase(databaseName, {
|
|
309
|
+
apiToken: this.cloudflareToken,
|
|
310
|
+
accountId: this.cloudflareAccountId
|
|
311
|
+
});
|
|
312
|
+
console.log(` ✅ Database created: ${databaseName}`);
|
|
313
|
+
console.log(` 📊 Database ID: ${databaseId}`);
|
|
314
|
+
created = true;
|
|
315
|
+
}
|
|
316
|
+
} catch (apiError) {
|
|
317
|
+
// Check if this is an authentication or permission error
|
|
318
|
+
if (apiError.message.includes('permission denied') || apiError.message.includes('403') || apiError.message.includes('authentication failed') || apiError.message.includes('401')) {
|
|
319
|
+
if (apiError.message.includes('401')) {
|
|
320
|
+
console.log(` ❌ API token authentication failed (invalid/expired token)`);
|
|
321
|
+
console.log(` 🔗 Check/create token at: https://dash.cloudflare.com/profile/api-tokens`);
|
|
322
|
+
} else {
|
|
323
|
+
console.log(` ⚠️ API token lacks D1 database permissions`);
|
|
324
|
+
console.log(` 💡 Required permission: 'Cloudflare D1:Edit'`);
|
|
325
|
+
console.log(` 🔗 Update token at: https://dash.cloudflare.com/profile/api-tokens`);
|
|
326
|
+
}
|
|
327
|
+
console.log(` 🔄 Falling back to OAuth authentication...`);
|
|
328
|
+
console.log(` ⚠️ WARNING: OAuth uses your personal account, not the API token account!`);
|
|
329
|
+
|
|
330
|
+
// Fall back to OAuth-based operations with warning
|
|
331
|
+
console.log(` 🔐 Using OAuth authentication (wrangler CLI)`);
|
|
332
|
+
exists = await databaseExists(databaseName);
|
|
333
|
+
if (exists) {
|
|
334
|
+
console.log(` ✅ Database already exists: ${databaseName}`);
|
|
335
|
+
databaseId = await getDatabaseId(databaseName);
|
|
336
|
+
console.log(` 📊 Existing Database ID: ${databaseId}`);
|
|
337
|
+
} else {
|
|
338
|
+
console.log(` 📦 Creating database: ${databaseName}`);
|
|
339
|
+
databaseId = await createDatabase(databaseName);
|
|
340
|
+
console.log(` ✅ Database created: ${databaseName}`);
|
|
341
|
+
console.log(` 📊 Database ID: ${databaseId}`);
|
|
342
|
+
created = true;
|
|
343
|
+
}
|
|
344
|
+
} else {
|
|
345
|
+
// Re-throw non-auth/permission errors
|
|
346
|
+
throw apiError;
|
|
347
|
+
}
|
|
306
348
|
}
|
|
307
349
|
} else {
|
|
308
350
|
// Fallback to CLI-based operations (OAuth)
|
|
@@ -333,6 +375,11 @@ export class MultiDomainOrchestrator {
|
|
|
333
375
|
console.log(` 📁 Service path: ${this.servicePath}`);
|
|
334
376
|
console.log(` 📁 Current working directory: ${process.cwd()}`);
|
|
335
377
|
try {
|
|
378
|
+
// Set account_id if API credentials are available
|
|
379
|
+
if (this.cloudflareAccountId) {
|
|
380
|
+
await this.wranglerConfigManager.setAccountId(this.cloudflareAccountId);
|
|
381
|
+
}
|
|
382
|
+
|
|
336
383
|
// Ensure environment section exists
|
|
337
384
|
await this.wranglerConfigManager.ensureEnvironment(this.environment);
|
|
338
385
|
|
|
@@ -550,7 +597,7 @@ export class MultiDomainOrchestrator {
|
|
|
550
597
|
}
|
|
551
598
|
|
|
552
599
|
/**
|
|
553
|
-
* Validate domain deployment with real HTTP health check
|
|
600
|
+
* Validate domain deployment with real HTTP health check (with retries)
|
|
554
601
|
*/
|
|
555
602
|
async validateDomainDeployment(domain) {
|
|
556
603
|
console.log(` ✅ Validating deployment for ${domain}`);
|
|
@@ -567,54 +614,75 @@ export class MultiDomainOrchestrator {
|
|
|
567
614
|
return true;
|
|
568
615
|
}
|
|
569
616
|
console.log(` 🔍 Running health check: ${deploymentUrl}/health`);
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
}
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
this.stateManager.logAuditEvent('HEALTH_CHECK_PASSED', domain, {
|
|
588
|
-
url: deploymentUrl,
|
|
589
|
-
status,
|
|
590
|
-
responseTime,
|
|
591
|
-
environment: this.environment
|
|
592
|
-
});
|
|
593
|
-
return true;
|
|
594
|
-
} else {
|
|
595
|
-
console.log(` ⚠️ Health check returned ${status} - deployment may have issues`);
|
|
596
|
-
this.stateManager.logAuditEvent('HEALTH_CHECK_WARNING', domain, {
|
|
597
|
-
url: deploymentUrl,
|
|
598
|
-
status,
|
|
599
|
-
responseTime,
|
|
600
|
-
environment: this.environment
|
|
617
|
+
|
|
618
|
+
// Retry logic for health checks
|
|
619
|
+
const maxRetries = 3;
|
|
620
|
+
const retryDelay = 5000; // 5 seconds between retries
|
|
621
|
+
|
|
622
|
+
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
623
|
+
try {
|
|
624
|
+
const startTime = Date.now();
|
|
625
|
+
console.log(` Attempt ${attempt}/${maxRetries}...`);
|
|
626
|
+
|
|
627
|
+
// Perform actual HTTP health check
|
|
628
|
+
const response = await fetch(`${deploymentUrl}/health`, {
|
|
629
|
+
method: 'GET',
|
|
630
|
+
headers: {
|
|
631
|
+
'User-Agent': 'Clodo-Orchestrator/2.0'
|
|
632
|
+
},
|
|
633
|
+
signal: AbortSignal.timeout(15000) // 15 second timeout
|
|
601
634
|
});
|
|
635
|
+
const responseTime = Date.now() - startTime;
|
|
636
|
+
const status = response.status;
|
|
637
|
+
if (status === 200) {
|
|
638
|
+
console.log(` ✅ Health check passed (${status}) - Response time: ${responseTime}ms`);
|
|
639
|
+
|
|
640
|
+
// Log successful health check
|
|
641
|
+
this.stateManager.logAuditEvent('HEALTH_CHECK_PASSED', domain, {
|
|
642
|
+
url: deploymentUrl,
|
|
643
|
+
status,
|
|
644
|
+
responseTime,
|
|
645
|
+
attempt,
|
|
646
|
+
environment: this.environment
|
|
647
|
+
});
|
|
648
|
+
return true;
|
|
649
|
+
} else {
|
|
650
|
+
const errorMsg = `Health check returned ${status} - deployment may have issues`;
|
|
651
|
+
console.log(` ⚠️ ${errorMsg}`);
|
|
652
|
+
this.stateManager.logAuditEvent('HEALTH_CHECK_WARNING', domain, {
|
|
653
|
+
url: deploymentUrl,
|
|
654
|
+
status,
|
|
655
|
+
responseTime,
|
|
656
|
+
attempt,
|
|
657
|
+
environment: this.environment
|
|
658
|
+
});
|
|
602
659
|
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
660
|
+
// Don't fail deployment for non-200 status, just warn
|
|
661
|
+
return true;
|
|
662
|
+
}
|
|
663
|
+
} catch (error) {
|
|
664
|
+
const isLastAttempt = attempt === maxRetries;
|
|
665
|
+
const errorMsg = `Health check failed: ${error.message}`;
|
|
666
|
+
if (isLastAttempt) {
|
|
667
|
+
console.log(` ❌ ${errorMsg} (final attempt)`);
|
|
668
|
+
console.log(` 💡 The service may still be deploying. Check manually: curl ${deploymentUrl}/health`);
|
|
669
|
+
this.stateManager.logAuditEvent('HEALTH_CHECK_FAILED', domain, {
|
|
670
|
+
url: deploymentUrl,
|
|
671
|
+
error: error.message,
|
|
672
|
+
attempts: maxRetries,
|
|
673
|
+
environment: this.environment
|
|
674
|
+
});
|
|
614
675
|
|
|
615
|
-
|
|
616
|
-
|
|
676
|
+
// Don't fail deployment for health check failure - it might just need time
|
|
677
|
+
return true;
|
|
678
|
+
} else {
|
|
679
|
+
console.log(` ⚠️ ${errorMsg} (attempt ${attempt}/${maxRetries})`);
|
|
680
|
+
console.log(` ⏳ Retrying in ${retryDelay / 1000} seconds...`);
|
|
681
|
+
await new Promise(resolve => setTimeout(resolve, retryDelay));
|
|
682
|
+
}
|
|
683
|
+
}
|
|
617
684
|
}
|
|
685
|
+
return true;
|
|
618
686
|
}
|
|
619
687
|
|
|
620
688
|
/**
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Assessment Cache System
|
|
3
|
+
*
|
|
4
|
+
* Caches assessment results to avoid redundant analysis of project artifacts.
|
|
5
|
+
* Provides intelligent cache invalidation based on file changes and time-based expiration.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import crypto from 'crypto';
|
|
9
|
+
import fs from 'fs/promises';
|
|
10
|
+
import path from 'path';
|
|
11
|
+
import { existsSync } from 'fs';
|
|
12
|
+
export class AssessmentCache {
|
|
13
|
+
constructor(options = {}) {
|
|
14
|
+
this.cacheDir = options.cacheDir || './.clodo-cache/assessment';
|
|
15
|
+
this.ttl = options.ttl || 5 * 60 * 1000; // 5 minutes default
|
|
16
|
+
this.maxEntries = options.maxEntries || 50;
|
|
17
|
+
this.enableDiskCache = options.enableDiskCache !== false;
|
|
18
|
+
this.memoryCache = new Map();
|
|
19
|
+
this.initialized = false;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Initialize the cache system
|
|
24
|
+
*/
|
|
25
|
+
async initialize() {
|
|
26
|
+
if (this.initialized) return;
|
|
27
|
+
if (this.enableDiskCache) {
|
|
28
|
+
await this.ensureCacheDirectory();
|
|
29
|
+
await this.loadFromDisk();
|
|
30
|
+
}
|
|
31
|
+
this.initialized = true;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Generate cache key from project state and inputs
|
|
36
|
+
*/
|
|
37
|
+
async generateCacheKey(projectPath, inputs = {}) {
|
|
38
|
+
const keyData = {
|
|
39
|
+
projectPath: path.resolve(projectPath),
|
|
40
|
+
inputs: this.sanitizeInputs(inputs),
|
|
41
|
+
projectFiles: await this.getProjectFileHashes(projectPath)
|
|
42
|
+
};
|
|
43
|
+
const keyString = JSON.stringify(keyData, Object.keys(keyData).sort());
|
|
44
|
+
return crypto.createHash('sha256').update(keyString).digest('hex');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Sanitize inputs for cache key generation (remove sensitive data)
|
|
49
|
+
*/
|
|
50
|
+
sanitizeInputs(inputs) {
|
|
51
|
+
const sanitized = {
|
|
52
|
+
...inputs
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
// Remove sensitive fields but keep their presence for cache invalidation
|
|
56
|
+
const sensitiveFields = ['apiToken', 'cloudflareToken', 'token', 'secret', 'password'];
|
|
57
|
+
sensitiveFields.forEach(field => {
|
|
58
|
+
if (sanitized[field]) {
|
|
59
|
+
sanitized[field] = 'present'; // Just mark presence, not value
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
return sanitized;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* Get hashes of relevant project files for cache invalidation
|
|
67
|
+
*/
|
|
68
|
+
async getProjectFileHashes(projectPath) {
|
|
69
|
+
const relevantFiles = ['package.json', 'wrangler.toml', 'src/index.js', 'src/worker.js', 'dist/index.js'];
|
|
70
|
+
const hashes = {};
|
|
71
|
+
for (const file of relevantFiles) {
|
|
72
|
+
const filePath = path.join(projectPath, file);
|
|
73
|
+
try {
|
|
74
|
+
const stats = await fs.stat(filePath);
|
|
75
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
76
|
+
hashes[file] = {
|
|
77
|
+
mtime: stats.mtime.getTime(),
|
|
78
|
+
size: stats.size,
|
|
79
|
+
hash: crypto.createHash('md5').update(content).digest('hex').substring(0, 8)
|
|
80
|
+
};
|
|
81
|
+
} catch (error) {
|
|
82
|
+
// File doesn't exist, that's fine
|
|
83
|
+
hashes[file] = null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
return hashes;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Get cached assessment result
|
|
91
|
+
*/
|
|
92
|
+
async get(cacheKey) {
|
|
93
|
+
await this.initialize();
|
|
94
|
+
|
|
95
|
+
// Check memory cache first
|
|
96
|
+
const memoryEntry = this.memoryCache.get(cacheKey);
|
|
97
|
+
if (memoryEntry && !this.isExpired(memoryEntry)) {
|
|
98
|
+
return memoryEntry.data;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Check disk cache if enabled
|
|
102
|
+
if (this.enableDiskCache) {
|
|
103
|
+
const diskEntry = await this.loadFromDiskCache(cacheKey);
|
|
104
|
+
if (diskEntry && !this.isExpired(diskEntry)) {
|
|
105
|
+
// Restore to memory cache
|
|
106
|
+
this.memoryCache.set(cacheKey, diskEntry);
|
|
107
|
+
return diskEntry.data;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
return null;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Store assessment result in cache
|
|
115
|
+
*/
|
|
116
|
+
async set(cacheKey, data) {
|
|
117
|
+
await this.initialize();
|
|
118
|
+
const entry = {
|
|
119
|
+
data,
|
|
120
|
+
timestamp: Date.now(),
|
|
121
|
+
key: cacheKey
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
// Store in memory
|
|
125
|
+
this.memoryCache.set(cacheKey, entry);
|
|
126
|
+
|
|
127
|
+
// Store on disk if enabled
|
|
128
|
+
if (this.enableDiskCache) {
|
|
129
|
+
await this.saveToDiskCache(cacheKey, entry);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Maintain cache size limits
|
|
133
|
+
await this.cleanup();
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Check if cache entry is expired
|
|
138
|
+
*/
|
|
139
|
+
isExpired(entry) {
|
|
140
|
+
return Date.now() - entry.timestamp > this.ttl;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Clear expired entries and maintain size limits
|
|
145
|
+
*/
|
|
146
|
+
async cleanup() {
|
|
147
|
+
const now = Date.now();
|
|
148
|
+
const validEntries = new Map();
|
|
149
|
+
|
|
150
|
+
// Clean memory cache
|
|
151
|
+
for (const [key, entry] of this.memoryCache) {
|
|
152
|
+
if (!this.isExpired(entry)) {
|
|
153
|
+
validEntries.set(key, entry);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// If still too many entries, remove oldest
|
|
158
|
+
if (validEntries.size > this.maxEntries) {
|
|
159
|
+
const sortedEntries = Array.from(validEntries.entries()).sort((a, b) => a[1].timestamp - b[1].timestamp);
|
|
160
|
+
const toKeep = sortedEntries.slice(-this.maxEntries);
|
|
161
|
+
validEntries.clear();
|
|
162
|
+
toKeep.forEach(([key, entry]) => validEntries.set(key, entry));
|
|
163
|
+
}
|
|
164
|
+
this.memoryCache = validEntries;
|
|
165
|
+
|
|
166
|
+
// Clean disk cache
|
|
167
|
+
if (this.enableDiskCache) {
|
|
168
|
+
await this.cleanupDiskCache();
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Clear all cache entries
|
|
174
|
+
*/
|
|
175
|
+
async clear() {
|
|
176
|
+
this.memoryCache.clear();
|
|
177
|
+
if (this.enableDiskCache) {
|
|
178
|
+
await this.clearDiskCache();
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Get cache statistics
|
|
184
|
+
*/
|
|
185
|
+
async getStats() {
|
|
186
|
+
const now = Date.now();
|
|
187
|
+
const memoryEntries = Array.from(this.memoryCache.values());
|
|
188
|
+
return {
|
|
189
|
+
memory: {
|
|
190
|
+
total: memoryEntries.length,
|
|
191
|
+
valid: memoryEntries.filter(entry => !this.isExpired(entry)).length,
|
|
192
|
+
expired: memoryEntries.filter(entry => this.isExpired(entry)).length
|
|
193
|
+
},
|
|
194
|
+
disk: this.enableDiskCache ? await this.getDiskStats() : null,
|
|
195
|
+
ttl: this.ttl,
|
|
196
|
+
maxEntries: this.maxEntries
|
|
197
|
+
};
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Disk cache implementation
|
|
201
|
+
async ensureCacheDirectory() {
|
|
202
|
+
try {
|
|
203
|
+
await fs.mkdir(this.cacheDir, {
|
|
204
|
+
recursive: true
|
|
205
|
+
});
|
|
206
|
+
} catch (error) {
|
|
207
|
+
// Directory might already exist, ignore
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
getCacheFilePath(key) {
|
|
211
|
+
return path.join(this.cacheDir, `${key}.json`);
|
|
212
|
+
}
|
|
213
|
+
async saveToDiskCache(key, entry) {
|
|
214
|
+
try {
|
|
215
|
+
const filePath = this.getCacheFilePath(key);
|
|
216
|
+
await fs.writeFile(filePath, JSON.stringify(entry, null, 2));
|
|
217
|
+
} catch (error) {
|
|
218
|
+
// Disk cache failure shouldn't break functionality
|
|
219
|
+
console.warn('Failed to save to disk cache:', error.message);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
async loadFromDiskCache(key) {
|
|
223
|
+
try {
|
|
224
|
+
const filePath = this.getCacheFilePath(key);
|
|
225
|
+
const content = await fs.readFile(filePath, 'utf8');
|
|
226
|
+
return JSON.parse(content);
|
|
227
|
+
} catch (error) {
|
|
228
|
+
return null;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
async loadFromDisk() {
|
|
232
|
+
try {
|
|
233
|
+
const files = await fs.readdir(this.cacheDir);
|
|
234
|
+
const cacheFiles = files.filter(file => file.endsWith('.json'));
|
|
235
|
+
for (const file of cacheFiles) {
|
|
236
|
+
const key = file.replace('.json', '');
|
|
237
|
+
const entry = await this.loadFromDiskCache(key);
|
|
238
|
+
if (entry && !this.isExpired(entry)) {
|
|
239
|
+
this.memoryCache.set(key, entry);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
} catch (error) {
|
|
243
|
+
// Disk cache loading failure is not critical
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
async cleanupDiskCache() {
|
|
247
|
+
try {
|
|
248
|
+
const files = await fs.readdir(this.cacheDir);
|
|
249
|
+
const cacheFiles = files.filter(file => file.endsWith('.json'));
|
|
250
|
+
for (const file of cacheFiles) {
|
|
251
|
+
const key = file.replace('.json', '');
|
|
252
|
+
const entry = await this.loadFromDiskCache(key);
|
|
253
|
+
if (!entry || this.isExpired(entry)) {
|
|
254
|
+
await fs.unlink(path.join(this.cacheDir, file));
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
} catch (error) {
|
|
258
|
+
// Cleanup failure is not critical
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
async clearDiskCache() {
|
|
262
|
+
try {
|
|
263
|
+
const files = await fs.readdir(this.cacheDir);
|
|
264
|
+
for (const file of files) {
|
|
265
|
+
if (file.endsWith('.json')) {
|
|
266
|
+
await fs.unlink(path.join(this.cacheDir, file));
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
} catch (error) {
|
|
270
|
+
// Clear failure is not critical
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
async getDiskStats() {
|
|
274
|
+
try {
|
|
275
|
+
const files = await fs.readdir(this.cacheDir);
|
|
276
|
+
const cacheFiles = files.filter(file => file.endsWith('.json'));
|
|
277
|
+
let valid = 0;
|
|
278
|
+
let expired = 0;
|
|
279
|
+
for (const file of cacheFiles) {
|
|
280
|
+
const key = file.replace('.json', '');
|
|
281
|
+
const entry = await this.loadFromDiskCache(key);
|
|
282
|
+
if (entry) {
|
|
283
|
+
if (this.isExpired(entry)) {
|
|
284
|
+
expired++;
|
|
285
|
+
} else {
|
|
286
|
+
valid++;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
return {
|
|
291
|
+
total: cacheFiles.length,
|
|
292
|
+
valid,
|
|
293
|
+
expired
|
|
294
|
+
};
|
|
295
|
+
} catch (error) {
|
|
296
|
+
return {
|
|
297
|
+
total: 0,
|
|
298
|
+
valid: 0,
|
|
299
|
+
expired: 0
|
|
300
|
+
};
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
}
|