cloud-cost-cli 0.1.1 → 0.3.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +116 -122
- package/dist/bin/cloud-cost-cli.js +28 -3
- package/dist/src/analyzers/cost-estimator.d.ts +14 -0
- package/dist/src/analyzers/cost-estimator.js +74 -16
- package/dist/src/analyzers/pricing-service.d.ts +38 -0
- package/dist/src/analyzers/pricing-service.js +263 -0
- package/dist/src/commands/ask.d.ts +11 -0
- package/dist/src/commands/ask.js +164 -0
- package/dist/src/commands/config.d.ts +1 -0
- package/dist/src/commands/config.js +120 -0
- package/dist/src/commands/costs.d.ts +6 -0
- package/dist/src/commands/costs.js +54 -0
- package/dist/src/commands/scan.d.ts +6 -0
- package/dist/src/commands/scan.js +255 -85
- package/dist/src/commands/script.d.ts +8 -0
- package/dist/src/commands/script.js +27 -0
- package/dist/src/providers/azure/client.d.ts +20 -0
- package/dist/src/providers/azure/client.js +41 -0
- package/dist/src/providers/azure/disks.d.ts +4 -0
- package/dist/src/providers/azure/disks.js +87 -0
- package/dist/src/providers/azure/index.d.ts +6 -0
- package/dist/src/providers/azure/index.js +15 -0
- package/dist/src/providers/azure/public-ips.d.ts +3 -0
- package/dist/src/providers/azure/public-ips.js +47 -0
- package/dist/src/providers/azure/sql.d.ts +4 -0
- package/dist/src/providers/azure/sql.js +134 -0
- package/dist/src/providers/azure/storage.d.ts +8 -0
- package/dist/src/providers/azure/storage.js +100 -0
- package/dist/src/providers/azure/vms.d.ts +4 -0
- package/dist/src/providers/azure/vms.js +164 -0
- package/dist/src/reporters/table.d.ts +2 -1
- package/dist/src/reporters/table.js +69 -3
- package/dist/src/services/ai.d.ts +44 -0
- package/dist/src/services/ai.js +345 -0
- package/dist/src/services/script-generator.d.ts +21 -0
- package/dist/src/services/script-generator.js +245 -0
- package/dist/src/utils/cache.d.ts +25 -0
- package/dist/src/utils/cache.js +197 -0
- package/dist/src/utils/config.d.ts +37 -0
- package/dist/src/utils/config.js +175 -0
- package/dist/src/utils/cost-tracker.d.ts +33 -0
- package/dist/src/utils/cost-tracker.js +135 -0
- package/dist/src/utils/formatter.d.ts +2 -0
- package/dist/src/utils/formatter.js +29 -1
- package/docs/RELEASE.md +14 -25
- package/package.json +15 -3
|
@@ -8,104 +8,274 @@ const rds_1 = require("../providers/aws/rds");
|
|
|
8
8
|
const s3_1 = require("../providers/aws/s3");
|
|
9
9
|
const elb_1 = require("../providers/aws/elb");
|
|
10
10
|
const eip_1 = require("../providers/aws/eip");
|
|
11
|
+
const client_2 = require("../providers/azure/client");
|
|
12
|
+
const vms_1 = require("../providers/azure/vms");
|
|
13
|
+
const disks_1 = require("../providers/azure/disks");
|
|
14
|
+
const storage_1 = require("../providers/azure/storage");
|
|
15
|
+
const sql_1 = require("../providers/azure/sql");
|
|
16
|
+
const public_ips_1 = require("../providers/azure/public-ips");
|
|
11
17
|
const table_1 = require("../reporters/table");
|
|
12
18
|
const json_1 = require("../reporters/json");
|
|
13
19
|
const logger_1 = require("../utils/logger");
|
|
20
|
+
const ai_1 = require("../services/ai");
|
|
21
|
+
const ask_1 = require("./ask");
|
|
22
|
+
const config_1 = require("../utils/config");
|
|
14
23
|
async function scanCommand(options) {
|
|
15
24
|
try {
|
|
16
|
-
if (options.provider
|
|
17
|
-
|
|
18
|
-
process.exit(1);
|
|
25
|
+
if (options.provider === 'aws') {
|
|
26
|
+
await scanAWS(options);
|
|
19
27
|
}
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
profile: options.profile,
|
|
23
|
-
});
|
|
24
|
-
(0, logger_1.info)(`Scanning AWS account (profile: ${options.profile || 'default'}, region: ${client.region})...`);
|
|
25
|
-
// Run analyzers in parallel
|
|
26
|
-
(0, logger_1.info)('Analyzing EC2 instances...');
|
|
27
|
-
const ec2Promise = (0, ec2_1.analyzeEC2Instances)(client);
|
|
28
|
-
(0, logger_1.info)('Analyzing EBS volumes...');
|
|
29
|
-
const ebsPromise = (0, ebs_1.analyzeEBSVolumes)(client);
|
|
30
|
-
(0, logger_1.info)('Analyzing RDS instances...');
|
|
31
|
-
const rdsPromise = (0, rds_1.analyzeRDSInstances)(client);
|
|
32
|
-
(0, logger_1.info)('Analyzing S3 buckets...');
|
|
33
|
-
const s3Promise = (0, s3_1.analyzeS3Buckets)(client);
|
|
34
|
-
(0, logger_1.info)('Analyzing Load Balancers...');
|
|
35
|
-
const elbPromise = (0, elb_1.analyzeELBs)(client);
|
|
36
|
-
(0, logger_1.info)('Analyzing Elastic IPs...');
|
|
37
|
-
const eipPromise = (0, eip_1.analyzeElasticIPs)(client);
|
|
38
|
-
// Wait for all analyzers to complete
|
|
39
|
-
const [ec2Opportunities, ebsOpportunities, rdsOpportunities, s3Opportunities, elbOpportunities, eipOpportunities,] = await Promise.all([
|
|
40
|
-
ec2Promise,
|
|
41
|
-
ebsPromise,
|
|
42
|
-
rdsPromise,
|
|
43
|
-
s3Promise,
|
|
44
|
-
elbPromise,
|
|
45
|
-
eipPromise,
|
|
46
|
-
]);
|
|
47
|
-
(0, logger_1.success)(`Found ${ec2Opportunities.length} EC2 opportunities`);
|
|
48
|
-
(0, logger_1.success)(`Found ${ebsOpportunities.length} EBS opportunities`);
|
|
49
|
-
(0, logger_1.success)(`Found ${rdsOpportunities.length} RDS opportunities`);
|
|
50
|
-
(0, logger_1.success)(`Found ${s3Opportunities.length} S3 opportunities`);
|
|
51
|
-
(0, logger_1.success)(`Found ${elbOpportunities.length} ELB opportunities`);
|
|
52
|
-
(0, logger_1.success)(`Found ${eipOpportunities.length} EIP opportunities`);
|
|
53
|
-
// Combine opportunities
|
|
54
|
-
const allOpportunities = [
|
|
55
|
-
...ec2Opportunities,
|
|
56
|
-
...ebsOpportunities,
|
|
57
|
-
...rdsOpportunities,
|
|
58
|
-
...s3Opportunities,
|
|
59
|
-
...elbOpportunities,
|
|
60
|
-
...eipOpportunities,
|
|
61
|
-
];
|
|
62
|
-
// Filter by minimum savings if specified
|
|
63
|
-
const minSavings = options.minSavings ? parseFloat(options.minSavings) : 0;
|
|
64
|
-
const filteredOpportunities = allOpportunities.filter((opp) => opp.estimatedSavings >= minSavings);
|
|
65
|
-
// Calculate totals
|
|
66
|
-
const totalPotentialSavings = filteredOpportunities.reduce((sum, opp) => sum + opp.estimatedSavings, 0);
|
|
67
|
-
const summary = {
|
|
68
|
-
totalResources: filteredOpportunities.length,
|
|
69
|
-
idleResources: filteredOpportunities.filter((o) => o.category === 'idle').length,
|
|
70
|
-
oversizedResources: filteredOpportunities.filter((o) => o.category === 'oversized').length,
|
|
71
|
-
unusedResources: filteredOpportunities.filter((o) => o.category === 'unused').length,
|
|
72
|
-
};
|
|
73
|
-
const report = {
|
|
74
|
-
provider: 'aws',
|
|
75
|
-
accountId: 'N/A', // Will fetch from STS in future
|
|
76
|
-
region: client.region,
|
|
77
|
-
scanPeriod: {
|
|
78
|
-
start: new Date(Date.now() - (parseInt(options.days || '30') * 24 * 60 * 60 * 1000)),
|
|
79
|
-
end: new Date(),
|
|
80
|
-
},
|
|
81
|
-
opportunities: filteredOpportunities,
|
|
82
|
-
totalPotentialSavings,
|
|
83
|
-
summary,
|
|
84
|
-
};
|
|
85
|
-
// Render output
|
|
86
|
-
const topN = parseInt(options.top || '5');
|
|
87
|
-
if (options.output === 'json') {
|
|
88
|
-
(0, json_1.renderJSON)(report);
|
|
28
|
+
else if (options.provider === 'azure') {
|
|
29
|
+
await scanAzure(options);
|
|
89
30
|
}
|
|
90
31
|
else {
|
|
91
|
-
(0,
|
|
32
|
+
(0, logger_1.error)(`Provider "${options.provider}" not yet supported. Use --provider aws or --provider azure`);
|
|
33
|
+
process.exit(1);
|
|
92
34
|
}
|
|
93
35
|
}
|
|
94
36
|
catch (err) {
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
console.
|
|
98
|
-
console.log('1. Run: aws configure');
|
|
99
|
-
console.log('2. Or set environment variables: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY');
|
|
100
|
-
console.log('3. Or use --profile flag with a configured profile');
|
|
101
|
-
console.log('\nSee: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html');
|
|
37
|
+
(0, logger_1.error)(`Scan failed: ${err.message}`);
|
|
38
|
+
if (options.verbose) {
|
|
39
|
+
console.error(err);
|
|
102
40
|
}
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
async function scanAWS(options) {
|
|
45
|
+
const client = new client_1.AWSClient({
|
|
46
|
+
region: options.region,
|
|
47
|
+
profile: options.profile,
|
|
48
|
+
});
|
|
49
|
+
(0, logger_1.info)(`Scanning AWS account (profile: ${options.profile || 'default'}, region: ${client.region})...`);
|
|
50
|
+
if (options.accurate) {
|
|
51
|
+
(0, logger_1.info)('Note: --accurate flag is not yet implemented. Using estimated pricing.');
|
|
52
|
+
(0, logger_1.info)('Real-time pricing will be available in a future release.');
|
|
53
|
+
}
|
|
54
|
+
// Run analyzers in parallel
|
|
55
|
+
(0, logger_1.info)('Analyzing EC2 instances...');
|
|
56
|
+
const ec2Promise = (0, ec2_1.analyzeEC2Instances)(client);
|
|
57
|
+
(0, logger_1.info)('Analyzing EBS volumes...');
|
|
58
|
+
const ebsPromise = (0, ebs_1.analyzeEBSVolumes)(client);
|
|
59
|
+
(0, logger_1.info)('Analyzing RDS instances...');
|
|
60
|
+
const rdsPromise = (0, rds_1.analyzeRDSInstances)(client);
|
|
61
|
+
(0, logger_1.info)('Analyzing S3 buckets...');
|
|
62
|
+
const s3Promise = (0, s3_1.analyzeS3Buckets)(client);
|
|
63
|
+
(0, logger_1.info)('Analyzing Load Balancers...');
|
|
64
|
+
const elbPromise = (0, elb_1.analyzeELBs)(client);
|
|
65
|
+
(0, logger_1.info)('Analyzing Elastic IPs...');
|
|
66
|
+
const eipPromise = (0, eip_1.analyzeElasticIPs)(client);
|
|
67
|
+
// Wait for all analyzers to complete
|
|
68
|
+
const [ec2Opportunities, ebsOpportunities, rdsOpportunities, s3Opportunities, elbOpportunities, eipOpportunities,] = await Promise.all([
|
|
69
|
+
ec2Promise,
|
|
70
|
+
ebsPromise,
|
|
71
|
+
rdsPromise,
|
|
72
|
+
s3Promise,
|
|
73
|
+
elbPromise,
|
|
74
|
+
eipPromise,
|
|
75
|
+
]);
|
|
76
|
+
(0, logger_1.success)(`Found ${ec2Opportunities.length} EC2 opportunities`);
|
|
77
|
+
(0, logger_1.success)(`Found ${ebsOpportunities.length} EBS opportunities`);
|
|
78
|
+
(0, logger_1.success)(`Found ${rdsOpportunities.length} RDS opportunities`);
|
|
79
|
+
(0, logger_1.success)(`Found ${s3Opportunities.length} S3 opportunities`);
|
|
80
|
+
(0, logger_1.success)(`Found ${elbOpportunities.length} ELB opportunities`);
|
|
81
|
+
(0, logger_1.success)(`Found ${eipOpportunities.length} EIP opportunities`);
|
|
82
|
+
// Combine opportunities
|
|
83
|
+
const allOpportunities = [
|
|
84
|
+
...ec2Opportunities,
|
|
85
|
+
...ebsOpportunities,
|
|
86
|
+
...rdsOpportunities,
|
|
87
|
+
...s3Opportunities,
|
|
88
|
+
...elbOpportunities,
|
|
89
|
+
...eipOpportunities,
|
|
90
|
+
];
|
|
91
|
+
// Filter by minimum savings if specified
|
|
92
|
+
const minSavings = options.minSavings ? parseFloat(options.minSavings) : 0;
|
|
93
|
+
const filteredOpportunities = allOpportunities.filter((opp) => opp.estimatedSavings >= minSavings);
|
|
94
|
+
// Calculate totals
|
|
95
|
+
const totalPotentialSavings = filteredOpportunities.reduce((sum, opp) => sum + opp.estimatedSavings, 0);
|
|
96
|
+
const summary = {
|
|
97
|
+
totalResources: filteredOpportunities.length,
|
|
98
|
+
idleResources: filteredOpportunities.filter((o) => o.category === 'idle').length,
|
|
99
|
+
oversizedResources: filteredOpportunities.filter((o) => o.category === 'oversized').length,
|
|
100
|
+
unusedResources: filteredOpportunities.filter((o) => o.category === 'unused').length,
|
|
101
|
+
};
|
|
102
|
+
const report = {
|
|
103
|
+
provider: 'aws',
|
|
104
|
+
accountId: 'N/A', // Will fetch from STS in future
|
|
105
|
+
region: client.region,
|
|
106
|
+
scanPeriod: {
|
|
107
|
+
start: new Date(Date.now() - (parseInt(options.days || '30') * 24 * 60 * 60 * 1000)),
|
|
108
|
+
end: new Date(),
|
|
109
|
+
},
|
|
110
|
+
opportunities: filteredOpportunities,
|
|
111
|
+
totalPotentialSavings,
|
|
112
|
+
summary,
|
|
113
|
+
};
|
|
114
|
+
// Render output
|
|
115
|
+
const topN = parseInt(options.top || '5');
|
|
116
|
+
let aiService;
|
|
117
|
+
if (options.explain) {
|
|
118
|
+
// Load config file to get defaults
|
|
119
|
+
const fileConfig = config_1.ConfigLoader.load();
|
|
120
|
+
// CLI flags override config file
|
|
121
|
+
const provider = options.aiProvider || fileConfig.ai?.provider || 'openai';
|
|
122
|
+
const model = options.aiModel || fileConfig.ai?.model;
|
|
123
|
+
const maxExplanations = fileConfig.ai?.maxExplanations;
|
|
124
|
+
// Debug logging
|
|
125
|
+
if (process.env.DEBUG) {
|
|
126
|
+
console.error('options.aiProvider:', options.aiProvider, '(type:', typeof options.aiProvider, ')');
|
|
127
|
+
console.error('fileConfig.ai?.provider:', fileConfig.ai?.provider);
|
|
128
|
+
console.error('Provider detected:', provider);
|
|
129
|
+
console.error('Has API key in config:', !!fileConfig.ai?.apiKey);
|
|
130
|
+
console.error('Has env API key:', !!process.env.OPENAI_API_KEY);
|
|
131
|
+
}
|
|
132
|
+
if (provider === 'openai' && !process.env.OPENAI_API_KEY && !fileConfig.ai?.apiKey) {
|
|
133
|
+
(0, logger_1.error)('--explain with OpenAI requires OPENAI_API_KEY environment variable or config file');
|
|
134
|
+
(0, logger_1.info)('Set it with: export OPENAI_API_KEY="sk-..."');
|
|
135
|
+
(0, logger_1.info)('Or use --ai-provider ollama for local AI (requires Ollama installed)');
|
|
136
|
+
process.exit(1);
|
|
137
|
+
}
|
|
138
|
+
try {
|
|
139
|
+
aiService = new ai_1.AIService({
|
|
140
|
+
provider,
|
|
141
|
+
apiKey: provider === 'openai' ? (process.env.OPENAI_API_KEY || fileConfig.ai?.apiKey) : undefined,
|
|
142
|
+
model,
|
|
143
|
+
maxExplanations,
|
|
144
|
+
});
|
|
145
|
+
if (provider === 'ollama') {
|
|
146
|
+
(0, logger_1.info)('Using local Ollama for AI explanations (privacy-first, no API costs)');
|
|
107
147
|
}
|
|
108
148
|
}
|
|
109
|
-
|
|
149
|
+
catch (error) {
|
|
150
|
+
error(`Failed to initialize AI service: ${error.message}`);
|
|
151
|
+
process.exit(1);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
// Save scan cache for natural language queries
|
|
155
|
+
(0, ask_1.saveScanCache)(options.provider, options.region, report);
|
|
156
|
+
if (options.output === 'json') {
|
|
157
|
+
(0, json_1.renderJSON)(report);
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
await (0, table_1.renderTable)(report, topN, aiService);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
async function scanAzure(options) {
|
|
164
|
+
const client = new client_2.AzureClient({
|
|
165
|
+
subscriptionId: options.subscriptionId,
|
|
166
|
+
location: options.location,
|
|
167
|
+
});
|
|
168
|
+
(0, logger_1.info)(`Scanning Azure subscription (${client.subscriptionId})...`);
|
|
169
|
+
if (client.location) {
|
|
170
|
+
(0, logger_1.info)(`Filtering resources by location: ${client.location}`);
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
(0, logger_1.info)('Scanning all locations (no filter specified)');
|
|
174
|
+
}
|
|
175
|
+
if (options.accurate) {
|
|
176
|
+
(0, logger_1.info)('Note: --accurate flag is not yet implemented. Using estimated pricing.');
|
|
177
|
+
}
|
|
178
|
+
// Run analyzers in parallel
|
|
179
|
+
(0, logger_1.info)('Analyzing Virtual Machines...');
|
|
180
|
+
const vmPromise = (0, vms_1.analyzeAzureVMs)(client);
|
|
181
|
+
(0, logger_1.info)('Analyzing Managed Disks...');
|
|
182
|
+
const diskPromise = (0, disks_1.analyzeAzureDisks)(client);
|
|
183
|
+
(0, logger_1.info)('Analyzing Storage Accounts...');
|
|
184
|
+
const storagePromise = (0, storage_1.analyzeAzureStorage)(client);
|
|
185
|
+
(0, logger_1.info)('Analyzing SQL Databases...');
|
|
186
|
+
const sqlPromise = (0, sql_1.analyzeAzureSQL)(client);
|
|
187
|
+
(0, logger_1.info)('Analyzing Public IP Addresses...');
|
|
188
|
+
const ipPromise = (0, public_ips_1.analyzeAzurePublicIPs)(client);
|
|
189
|
+
// Wait for all analyzers to complete
|
|
190
|
+
const [vmOpportunities, diskOpportunities, storageOpportunities, sqlOpportunities, ipOpportunities,] = await Promise.all([
|
|
191
|
+
vmPromise,
|
|
192
|
+
diskPromise,
|
|
193
|
+
storagePromise,
|
|
194
|
+
sqlPromise,
|
|
195
|
+
ipPromise,
|
|
196
|
+
]);
|
|
197
|
+
(0, logger_1.success)(`Found ${vmOpportunities.length} VM opportunities`);
|
|
198
|
+
(0, logger_1.success)(`Found ${diskOpportunities.length} Disk opportunities`);
|
|
199
|
+
(0, logger_1.success)(`Found ${storageOpportunities.length} Storage opportunities`);
|
|
200
|
+
(0, logger_1.success)(`Found ${sqlOpportunities.length} SQL opportunities`);
|
|
201
|
+
(0, logger_1.success)(`Found ${ipOpportunities.length} Public IP opportunities`);
|
|
202
|
+
// Combine opportunities
|
|
203
|
+
const allOpportunities = [
|
|
204
|
+
...vmOpportunities,
|
|
205
|
+
...diskOpportunities,
|
|
206
|
+
...storageOpportunities,
|
|
207
|
+
...sqlOpportunities,
|
|
208
|
+
...ipOpportunities,
|
|
209
|
+
];
|
|
210
|
+
// Filter by minimum savings if specified
|
|
211
|
+
const minSavings = options.minSavings ? parseFloat(options.minSavings) : 0;
|
|
212
|
+
const filteredOpportunities = allOpportunities.filter((opp) => opp.estimatedSavings >= minSavings);
|
|
213
|
+
// Calculate totals
|
|
214
|
+
const totalPotentialSavings = filteredOpportunities.reduce((sum, opp) => sum + opp.estimatedSavings, 0);
|
|
215
|
+
const summary = {
|
|
216
|
+
totalResources: filteredOpportunities.length,
|
|
217
|
+
idleResources: filteredOpportunities.filter((o) => o.category === 'idle').length,
|
|
218
|
+
oversizedResources: filteredOpportunities.filter((o) => o.category === 'oversized').length,
|
|
219
|
+
unusedResources: filteredOpportunities.filter((o) => o.category === 'unused').length,
|
|
220
|
+
};
|
|
221
|
+
const report = {
|
|
222
|
+
provider: 'azure',
|
|
223
|
+
accountId: client.subscriptionId,
|
|
224
|
+
region: client.location || 'all',
|
|
225
|
+
scanPeriod: {
|
|
226
|
+
start: new Date(Date.now() - (parseInt(options.days || '7') * 24 * 60 * 60 * 1000)),
|
|
227
|
+
end: new Date(),
|
|
228
|
+
},
|
|
229
|
+
opportunities: filteredOpportunities,
|
|
230
|
+
totalPotentialSavings,
|
|
231
|
+
summary,
|
|
232
|
+
};
|
|
233
|
+
// Render output
|
|
234
|
+
const topN = parseInt(options.top || '5');
|
|
235
|
+
let aiService;
|
|
236
|
+
if (options.explain) {
|
|
237
|
+
// Load config file to get defaults
|
|
238
|
+
const fileConfig = config_1.ConfigLoader.load();
|
|
239
|
+
// CLI flags override config file
|
|
240
|
+
const provider = options.aiProvider || fileConfig.ai?.provider || 'openai';
|
|
241
|
+
const model = options.aiModel || fileConfig.ai?.model;
|
|
242
|
+
const maxExplanations = fileConfig.ai?.maxExplanations;
|
|
243
|
+
// Debug logging
|
|
244
|
+
if (process.env.DEBUG) {
|
|
245
|
+
console.error('options.aiProvider:', options.aiProvider, '(type:', typeof options.aiProvider, ')');
|
|
246
|
+
console.error('fileConfig.ai?.provider:', fileConfig.ai?.provider);
|
|
247
|
+
console.error('Provider detected:', provider);
|
|
248
|
+
console.error('Has API key in config:', !!fileConfig.ai?.apiKey);
|
|
249
|
+
console.error('Has env API key:', !!process.env.OPENAI_API_KEY);
|
|
250
|
+
}
|
|
251
|
+
if (provider === 'openai' && !process.env.OPENAI_API_KEY && !fileConfig.ai?.apiKey) {
|
|
252
|
+
(0, logger_1.error)('--explain with OpenAI requires OPENAI_API_KEY environment variable or config file');
|
|
253
|
+
(0, logger_1.info)('Set it with: export OPENAI_API_KEY="sk-..."');
|
|
254
|
+
(0, logger_1.info)('Or use --ai-provider ollama for local AI (requires Ollama installed)');
|
|
255
|
+
process.exit(1);
|
|
256
|
+
}
|
|
257
|
+
try {
|
|
258
|
+
aiService = new ai_1.AIService({
|
|
259
|
+
provider,
|
|
260
|
+
apiKey: provider === 'openai' ? (process.env.OPENAI_API_KEY || fileConfig.ai?.apiKey) : undefined,
|
|
261
|
+
model,
|
|
262
|
+
maxExplanations,
|
|
263
|
+
});
|
|
264
|
+
if (provider === 'ollama') {
|
|
265
|
+
(0, logger_1.info)('Using local Ollama for AI explanations (privacy-first, no API costs)');
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
catch (error) {
|
|
269
|
+
error(`Failed to initialize AI service: ${error.message}`);
|
|
270
|
+
process.exit(1);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
// Save scan cache for natural language queries
|
|
274
|
+
(0, ask_1.saveScanCache)('azure', client.location, report);
|
|
275
|
+
if (options.output === 'json') {
|
|
276
|
+
(0, json_1.renderJSON)(report);
|
|
277
|
+
}
|
|
278
|
+
else {
|
|
279
|
+
await (0, table_1.renderTable)(report, topN, aiService);
|
|
110
280
|
}
|
|
111
281
|
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { SavingsOpportunity } from '../types';
|
|
2
|
+
interface ScriptCommandOptions {
|
|
3
|
+
opportunity: string;
|
|
4
|
+
output?: string;
|
|
5
|
+
}
|
|
6
|
+
export declare function scriptCommand(options: ScriptCommandOptions): Promise<void>;
|
|
7
|
+
export declare function generateScriptForOpportunity(opportunity: SavingsOpportunity): string | null;
|
|
8
|
+
export {};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.scriptCommand = scriptCommand;
|
|
4
|
+
exports.generateScriptForOpportunity = generateScriptForOpportunity;
|
|
5
|
+
const script_generator_1 = require("../services/script-generator");
|
|
6
|
+
const logger_1 = require("../utils/logger");
|
|
7
|
+
async function scriptCommand(options) {
|
|
8
|
+
(0, logger_1.info)('Script generation is currently only available after running a scan.');
|
|
9
|
+
(0, logger_1.info)('Usage: cloud-cost-cli scan --provider aws --region us-east-1');
|
|
10
|
+
(0, logger_1.info)('Then use the displayed resource IDs to generate scripts.');
|
|
11
|
+
// This is a placeholder - in a real implementation, we'd:
|
|
12
|
+
// 1. Load scan results from a cache/temp file
|
|
13
|
+
// 2. Find the opportunity by index or ID
|
|
14
|
+
// 3. Generate the script
|
|
15
|
+
// 4. Output to file or stdout
|
|
16
|
+
(0, logger_1.error)('Script generation requires a recent scan. Run "scan" first.');
|
|
17
|
+
process.exit(1);
|
|
18
|
+
}
|
|
19
|
+
// Helper function to generate script for a single opportunity
|
|
20
|
+
function generateScriptForOpportunity(opportunity) {
|
|
21
|
+
const generator = new script_generator_1.ScriptGenerator();
|
|
22
|
+
const script = generator.generateRemediation(opportunity);
|
|
23
|
+
if (!script) {
|
|
24
|
+
return null;
|
|
25
|
+
}
|
|
26
|
+
return generator.renderScript(script);
|
|
27
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { ComputeManagementClient } from '@azure/arm-compute';
|
|
2
|
+
import { StorageManagementClient } from '@azure/arm-storage';
|
|
3
|
+
import { SqlManagementClient } from '@azure/arm-sql';
|
|
4
|
+
import { NetworkManagementClient } from '@azure/arm-network';
|
|
5
|
+
import { MonitorClient } from '@azure/arm-monitor';
|
|
6
|
+
export interface AzureClientConfig {
|
|
7
|
+
subscriptionId?: string;
|
|
8
|
+
location?: string;
|
|
9
|
+
}
|
|
10
|
+
export declare class AzureClient {
|
|
11
|
+
private credential;
|
|
12
|
+
subscriptionId: string;
|
|
13
|
+
location: string;
|
|
14
|
+
constructor(config?: AzureClientConfig);
|
|
15
|
+
getComputeClient(): ComputeManagementClient;
|
|
16
|
+
getStorageClient(): StorageManagementClient;
|
|
17
|
+
getSqlClient(): SqlManagementClient;
|
|
18
|
+
getNetworkClient(): NetworkManagementClient;
|
|
19
|
+
getMonitorClient(): MonitorClient;
|
|
20
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AzureClient = void 0;
|
|
4
|
+
const identity_1 = require("@azure/identity");
|
|
5
|
+
const arm_compute_1 = require("@azure/arm-compute");
|
|
6
|
+
const arm_storage_1 = require("@azure/arm-storage");
|
|
7
|
+
const arm_sql_1 = require("@azure/arm-sql");
|
|
8
|
+
const arm_network_1 = require("@azure/arm-network");
|
|
9
|
+
const arm_monitor_1 = require("@azure/arm-monitor");
|
|
10
|
+
class AzureClient {
|
|
11
|
+
credential;
|
|
12
|
+
subscriptionId;
|
|
13
|
+
location;
|
|
14
|
+
constructor(config = {}) {
|
|
15
|
+
// Use Azure SDK's default credential chain (env vars, CLI, managed identity)
|
|
16
|
+
this.credential = new identity_1.DefaultAzureCredential();
|
|
17
|
+
// Get subscription ID from env or config
|
|
18
|
+
this.subscriptionId = config.subscriptionId || process.env.AZURE_SUBSCRIPTION_ID || '';
|
|
19
|
+
if (!this.subscriptionId) {
|
|
20
|
+
throw new Error('Azure subscription ID not found. Set AZURE_SUBSCRIPTION_ID environment variable or use --subscription-id flag.');
|
|
21
|
+
}
|
|
22
|
+
// Default to East US if no location specified
|
|
23
|
+
this.location = config.location || '';
|
|
24
|
+
}
|
|
25
|
+
getComputeClient() {
|
|
26
|
+
return new arm_compute_1.ComputeManagementClient(this.credential, this.subscriptionId);
|
|
27
|
+
}
|
|
28
|
+
getStorageClient() {
|
|
29
|
+
return new arm_storage_1.StorageManagementClient(this.credential, this.subscriptionId);
|
|
30
|
+
}
|
|
31
|
+
getSqlClient() {
|
|
32
|
+
return new arm_sql_1.SqlManagementClient(this.credential, this.subscriptionId);
|
|
33
|
+
}
|
|
34
|
+
getNetworkClient() {
|
|
35
|
+
return new arm_network_1.NetworkManagementClient(this.credential, this.subscriptionId);
|
|
36
|
+
}
|
|
37
|
+
getMonitorClient() {
|
|
38
|
+
return new arm_monitor_1.MonitorClient(this.credential, this.subscriptionId);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
exports.AzureClient = AzureClient;
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.AZURE_DISK_PRICING = void 0;
|
|
4
|
+
exports.analyzeAzureDisks = analyzeAzureDisks;
|
|
5
|
+
// Azure Managed Disk pricing (per GB/month, East US)
|
|
6
|
+
exports.AZURE_DISK_PRICING = {
|
|
7
|
+
'Premium_LRS': 0.135, // Premium SSD
|
|
8
|
+
'StandardSSD_LRS': 0.075, // Standard SSD
|
|
9
|
+
'Standard_LRS': 0.045, // Standard HDD
|
|
10
|
+
};
|
|
11
|
+
function getDiskMonthlyCost(sizeGB, diskType) {
|
|
12
|
+
const pricePerGB = exports.AZURE_DISK_PRICING[diskType] || 0.075;
|
|
13
|
+
return sizeGB * pricePerGB;
|
|
14
|
+
}
|
|
15
|
+
async function analyzeAzureDisks(client) {
|
|
16
|
+
const computeClient = client.getComputeClient();
|
|
17
|
+
const opportunities = [];
|
|
18
|
+
try {
|
|
19
|
+
// List all managed disks
|
|
20
|
+
const disks = computeClient.disks.list();
|
|
21
|
+
for await (const disk of disks) {
|
|
22
|
+
if (!disk.id || !disk.name)
|
|
23
|
+
continue;
|
|
24
|
+
// Filter by location if specified
|
|
25
|
+
if (client.location && disk.location?.toLowerCase() !== client.location.toLowerCase()) {
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
const sizeGB = disk.diskSizeGB || 0;
|
|
29
|
+
const diskType = disk.sku?.name || 'Standard_LRS';
|
|
30
|
+
const currentCost = getDiskMonthlyCost(sizeGB, diskType);
|
|
31
|
+
// Opportunity 1: Unattached disk
|
|
32
|
+
if (disk.diskState === 'Unattached') {
|
|
33
|
+
opportunities.push({
|
|
34
|
+
id: `azure-disk-unattached-${disk.name}`,
|
|
35
|
+
provider: 'azure',
|
|
36
|
+
resourceType: 'disk',
|
|
37
|
+
resourceId: disk.id,
|
|
38
|
+
resourceName: disk.name,
|
|
39
|
+
category: 'unused',
|
|
40
|
+
currentCost,
|
|
41
|
+
estimatedSavings: currentCost,
|
|
42
|
+
confidence: 'high',
|
|
43
|
+
recommendation: `Unattached disk (${sizeGB} GB). Delete if no longer needed.`,
|
|
44
|
+
metadata: {
|
|
45
|
+
sizeGB,
|
|
46
|
+
diskType,
|
|
47
|
+
location: disk.location,
|
|
48
|
+
diskState: disk.diskState,
|
|
49
|
+
},
|
|
50
|
+
detectedAt: new Date(),
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
// Opportunity 2: Premium disk that could be Standard SSD
|
|
54
|
+
else if (diskType === 'Premium_LRS' && sizeGB < 256) {
|
|
55
|
+
const newType = 'StandardSSD_LRS';
|
|
56
|
+
const newCost = getDiskMonthlyCost(sizeGB, newType);
|
|
57
|
+
const savings = currentCost - newCost;
|
|
58
|
+
if (savings > 5) {
|
|
59
|
+
opportunities.push({
|
|
60
|
+
id: `azure-disk-premium-${disk.name}`,
|
|
61
|
+
provider: 'azure',
|
|
62
|
+
resourceType: 'disk',
|
|
63
|
+
resourceId: disk.id,
|
|
64
|
+
resourceName: disk.name,
|
|
65
|
+
category: 'oversized',
|
|
66
|
+
currentCost,
|
|
67
|
+
estimatedSavings: savings,
|
|
68
|
+
confidence: 'medium',
|
|
69
|
+
recommendation: `Consider switching from Premium SSD to Standard SSD for non-performance-critical workloads.`,
|
|
70
|
+
metadata: {
|
|
71
|
+
sizeGB,
|
|
72
|
+
currentType: diskType,
|
|
73
|
+
suggestedType: newType,
|
|
74
|
+
location: disk.location,
|
|
75
|
+
},
|
|
76
|
+
detectedAt: new Date(),
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
return opportunities;
|
|
82
|
+
}
|
|
83
|
+
catch (error) {
|
|
84
|
+
console.error('Error analyzing Azure disks:', error);
|
|
85
|
+
return opportunities;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { AzureClient } from './client';
|
|
2
|
+
export { analyzeAzureVMs } from './vms';
|
|
3
|
+
export { analyzeAzureDisks } from './disks';
|
|
4
|
+
export { analyzeAzureStorage } from './storage';
|
|
5
|
+
export { analyzeAzureSQL } from './sql';
|
|
6
|
+
export { analyzeAzurePublicIPs } from './public-ips';
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.analyzeAzurePublicIPs = exports.analyzeAzureSQL = exports.analyzeAzureStorage = exports.analyzeAzureDisks = exports.analyzeAzureVMs = exports.AzureClient = void 0;
|
|
4
|
+
var client_1 = require("./client");
|
|
5
|
+
Object.defineProperty(exports, "AzureClient", { enumerable: true, get: function () { return client_1.AzureClient; } });
|
|
6
|
+
var vms_1 = require("./vms");
|
|
7
|
+
Object.defineProperty(exports, "analyzeAzureVMs", { enumerable: true, get: function () { return vms_1.analyzeAzureVMs; } });
|
|
8
|
+
var disks_1 = require("./disks");
|
|
9
|
+
Object.defineProperty(exports, "analyzeAzureDisks", { enumerable: true, get: function () { return disks_1.analyzeAzureDisks; } });
|
|
10
|
+
var storage_1 = require("./storage");
|
|
11
|
+
Object.defineProperty(exports, "analyzeAzureStorage", { enumerable: true, get: function () { return storage_1.analyzeAzureStorage; } });
|
|
12
|
+
var sql_1 = require("./sql");
|
|
13
|
+
Object.defineProperty(exports, "analyzeAzureSQL", { enumerable: true, get: function () { return sql_1.analyzeAzureSQL; } });
|
|
14
|
+
var public_ips_1 = require("./public-ips");
|
|
15
|
+
Object.defineProperty(exports, "analyzeAzurePublicIPs", { enumerable: true, get: function () { return public_ips_1.analyzeAzurePublicIPs; } });
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.analyzeAzurePublicIPs = analyzeAzurePublicIPs;
|
|
4
|
+
// Azure Public IP pricing (per month, East US)
|
|
5
|
+
const PUBLIC_IP_MONTHLY_COST = 3.65; // Static IP address
|
|
6
|
+
async function analyzeAzurePublicIPs(client) {
|
|
7
|
+
const networkClient = client.getNetworkClient();
|
|
8
|
+
const opportunities = [];
|
|
9
|
+
try {
|
|
10
|
+
// List all public IP addresses
|
|
11
|
+
const publicIPs = networkClient.publicIPAddresses.listAll();
|
|
12
|
+
for await (const ip of publicIPs) {
|
|
13
|
+
if (!ip.id || !ip.name)
|
|
14
|
+
continue;
|
|
15
|
+
// Filter by location if specified
|
|
16
|
+
if (client.location && ip.location?.toLowerCase() !== client.location.toLowerCase()) {
|
|
17
|
+
continue;
|
|
18
|
+
}
|
|
19
|
+
// Opportunity: Unassociated public IP
|
|
20
|
+
if (!ip.ipConfiguration) {
|
|
21
|
+
opportunities.push({
|
|
22
|
+
id: `azure-ip-unassociated-${ip.name}`,
|
|
23
|
+
provider: 'azure',
|
|
24
|
+
resourceType: 'public-ip',
|
|
25
|
+
resourceId: ip.id,
|
|
26
|
+
resourceName: ip.name,
|
|
27
|
+
category: 'unused',
|
|
28
|
+
currentCost: PUBLIC_IP_MONTHLY_COST,
|
|
29
|
+
estimatedSavings: PUBLIC_IP_MONTHLY_COST,
|
|
30
|
+
confidence: 'high',
|
|
31
|
+
recommendation: 'Unassociated public IP address. Delete if not needed.',
|
|
32
|
+
metadata: {
|
|
33
|
+
ipAddress: ip.ipAddress,
|
|
34
|
+
allocationMethod: ip.publicIPAllocationMethod,
|
|
35
|
+
location: ip.location,
|
|
36
|
+
},
|
|
37
|
+
detectedAt: new Date(),
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return opportunities;
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.error('Error analyzing Azure public IPs:', error);
|
|
45
|
+
return opportunities;
|
|
46
|
+
}
|
|
47
|
+
}
|