gitgreen 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -0
- package/dist/cli.js +2 -0
- package/dist/init.js +134 -15
- package/dist/lib/integrations/db-setup.js +122 -0
- package/dist/lib/integrations/output-integrations.js +163 -0
- package/dist/lib/integrations/types.js +2 -0
- package/package.json +4 -1
package/README.md
CHANGED
|
@@ -140,6 +140,29 @@ Pipeline starts → component script fetches CPU/RAM timeseries from GCP Monitor
|
|
|
140
140
|
→ optional MR note when CI_JOB_TOKEN is present
|
|
141
141
|
```
|
|
142
142
|
|
|
143
|
+
## Output Integrations
|
|
144
|
+
|
|
145
|
+
During `gitgreen init` you can opt into exporting GitGreen data to external systems. The wizard includes an integration step with two optional sinks:
|
|
146
|
+
|
|
147
|
+
- **Per-job carbon data** – emissions, runtime, and runner tags for every CI job.
|
|
148
|
+
- **Runner inventory** – the machine catalog that powers your GitLab runners, including machine type and scope 3 estimates.
|
|
149
|
+
|
|
150
|
+
Built-in connectors today:
|
|
151
|
+
- **MySQL** – populates `GITGREEN_JOB_MYSQL_*` / `GITGREEN_RUNNER_MYSQL_*` and inserts rows through a standard MySQL client.
|
|
152
|
+
- **PostgreSQL** – captures host, port, credentials, schema, table, and SSL mode (`GITGREEN_JOB_POSTGRES_*` / `GITGREEN_RUNNER_POSTGRES_*`) for storage in Postgres.
|
|
153
|
+
|
|
154
|
+
When you select either connector, the wizard captures host, port, username, password, database, and target table names and stores them in CI/CD variables. It immediately connects with those credentials to ensure the database, schema, and table exist (creating them with a generic `payload JSON/JSONB` + `ingested_at` structure when missing). The GitLab component then uses those variables to insert rows via the corresponding database driver.
|
|
155
|
+
|
|
156
|
+
### Extending the interface
|
|
157
|
+
|
|
158
|
+
Additional connectors can be added without touching the wizard logic. Each destination implements the `OutputIntegration` interface in `src/lib/integrations/output-integrations.ts`, which specifies:
|
|
159
|
+
|
|
160
|
+
1. Display metadata (`id`, `name`, `description`)
|
|
161
|
+
2. The data target it handles (`job` vs `runner`)
|
|
162
|
+
3. Prompted credential fields (label, env var key, input type, default, mask flag)
|
|
163
|
+
|
|
164
|
+
To add another sink (for example PostgreSQL or a webhook), create a new entry in that file with the fields your integration needs. Re-run `gitgreen init` and the option will automatically appear in the integration step.
|
|
165
|
+
|
|
143
166
|
## Adding a provider
|
|
144
167
|
1. Extend `CloudProvider` and the provider guard in `src/index.ts` so the calculator accepts the new key.
|
|
145
168
|
2. Add machine power data (`<provider>_machine_power_profiles.json`) and, if needed, CPU profiles to `data/`, then update `PowerProfileRepository.loadMachineData` to load it.
|
package/dist/cli.js
CHANGED
|
@@ -9,6 +9,7 @@ const commander_1 = require("commander");
|
|
|
9
9
|
const kleur_1 = require("kleur");
|
|
10
10
|
const asciichart_1 = __importDefault(require("asciichart"));
|
|
11
11
|
const config_1 = require("./config");
|
|
12
|
+
const package_json_1 = require("../package.json");
|
|
12
13
|
const index_1 = require("./index");
|
|
13
14
|
const init_1 = require("./init");
|
|
14
15
|
const cloudwatch_1 = require("./lib/aws/cloudwatch");
|
|
@@ -279,6 +280,7 @@ const runCalculate = async (opts) => {
|
|
|
279
280
|
};
|
|
280
281
|
program
|
|
281
282
|
.name('gitgreen')
|
|
283
|
+
.version(package_json_1.version)
|
|
282
284
|
.description('GitGreen carbon calculator using real timeseries metrics')
|
|
283
285
|
.option('-p, --provider <provider>', 'Cloud provider (gcp|aws)', config_1.config.defaultProvider)
|
|
284
286
|
.option('-m, --machine <type>', 'Machine type (e.g., e2-standard-4)')
|
package/dist/init.js
CHANGED
|
@@ -11,6 +11,7 @@ const axios_1 = __importDefault(require("axios"));
|
|
|
11
11
|
const prompts_1 = __importDefault(require("prompts"));
|
|
12
12
|
const kleur_1 = require("kleur");
|
|
13
13
|
const power_profile_repository_1 = require("./lib/carbon/power-profile-repository");
|
|
14
|
+
const output_integrations_1 = require("./lib/integrations/output-integrations");
|
|
14
15
|
const hasGlab = () => {
|
|
15
16
|
try {
|
|
16
17
|
(0, child_process_1.execSync)('glab --version', { stdio: 'ignore' });
|
|
@@ -81,6 +82,97 @@ const setVariable = async (auth, project, key, value, masked = false) => {
|
|
|
81
82
|
return setVariableApi(auth.baseUrl, auth.pat, project, key, value, masked);
|
|
82
83
|
}
|
|
83
84
|
};
|
|
85
|
+
const askIntegrationFieldValue = async (field) => {
|
|
86
|
+
const promptType = field.type === 'password' ? 'password' : field.type === 'number' ? 'number' : 'text';
|
|
87
|
+
if (field.helpText) {
|
|
88
|
+
console.log((0, kleur_1.gray)(field.helpText));
|
|
89
|
+
}
|
|
90
|
+
const envInitial = process.env[field.envKey];
|
|
91
|
+
const initialValue = envInitial ?? field.initial;
|
|
92
|
+
const initial = promptType === 'number' && initialValue !== undefined
|
|
93
|
+
? (() => {
|
|
94
|
+
const parsed = Number(initialValue);
|
|
95
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
96
|
+
})()
|
|
97
|
+
: initialValue;
|
|
98
|
+
const answer = await (0, prompts_1.default)({
|
|
99
|
+
type: promptType,
|
|
100
|
+
name: 'value',
|
|
101
|
+
message: field.label,
|
|
102
|
+
initial
|
|
103
|
+
});
|
|
104
|
+
const rawValue = answer.value;
|
|
105
|
+
if (rawValue === undefined || rawValue === null || rawValue === '') {
|
|
106
|
+
if (field.required) {
|
|
107
|
+
console.log((0, kleur_1.red)(`${field.label} is required.`));
|
|
108
|
+
process.exit(1);
|
|
109
|
+
}
|
|
110
|
+
return undefined;
|
|
111
|
+
}
|
|
112
|
+
return String(rawValue);
|
|
113
|
+
};
|
|
114
|
+
const promptIntegrationFields = async (integration) => {
|
|
115
|
+
const collected = [];
|
|
116
|
+
const valuesByField = {};
|
|
117
|
+
for (const field of integration.fields) {
|
|
118
|
+
const value = await askIntegrationFieldValue(field);
|
|
119
|
+
if (value !== undefined) {
|
|
120
|
+
collected.push({ key: field.envKey, value, masked: Boolean(field.masked) });
|
|
121
|
+
valuesByField[field.id] = value;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return { variables: collected, values: valuesByField };
|
|
125
|
+
};
|
|
126
|
+
const selectIntegrationForTarget = async (target) => {
|
|
127
|
+
const available = output_integrations_1.outputIntegrations.filter(integration => integration.target === target);
|
|
128
|
+
if (!available.length)
|
|
129
|
+
return [];
|
|
130
|
+
const targetLabel = target === 'job' ? 'per-job carbon data' : 'runner inventory';
|
|
131
|
+
const { integrationId } = await (0, prompts_1.default)({
|
|
132
|
+
type: 'select',
|
|
133
|
+
name: 'integrationId',
|
|
134
|
+
message: `Output integration for ${targetLabel}`,
|
|
135
|
+
choices: [
|
|
136
|
+
{ title: 'Skip', description: 'Do not export data', value: '_skip_' },
|
|
137
|
+
...available.map(integration => ({
|
|
138
|
+
title: integration.name,
|
|
139
|
+
description: integration.description,
|
|
140
|
+
value: integration.id
|
|
141
|
+
}))
|
|
142
|
+
]
|
|
143
|
+
});
|
|
144
|
+
if (!integrationId || integrationId === '_skip_') {
|
|
145
|
+
return [];
|
|
146
|
+
}
|
|
147
|
+
const integration = available.find(item => item.id === integrationId);
|
|
148
|
+
if (!integration)
|
|
149
|
+
return [];
|
|
150
|
+
console.log((0, kleur_1.gray)(`\n${integration.name}`));
|
|
151
|
+
if (integration.description) {
|
|
152
|
+
console.log((0, kleur_1.gray)(integration.description));
|
|
153
|
+
}
|
|
154
|
+
const { variables, values } = await promptIntegrationFields(integration);
|
|
155
|
+
if (integration.ensure) {
|
|
156
|
+
try {
|
|
157
|
+
await integration.ensure(values);
|
|
158
|
+
}
|
|
159
|
+
catch (errorState) {
|
|
160
|
+
console.log((0, kleur_1.red)(`Failed to verify ${integration.name}: ${errorState?.message || errorState}`));
|
|
161
|
+
process.exit(1);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
return variables;
|
|
165
|
+
};
|
|
166
|
+
const promptOutputIntegrations = async (stepNumber) => {
|
|
167
|
+
if (!output_integrations_1.outputIntegrations.length)
|
|
168
|
+
return [];
|
|
169
|
+
console.log((0, kleur_1.gray)(`\nStep ${stepNumber}: Output Integrations`));
|
|
170
|
+
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
171
|
+
console.log((0, kleur_1.gray)('Optionally send carbon job data or runner metadata to an external system.'));
|
|
172
|
+
const jobVars = await selectIntegrationForTarget('job');
|
|
173
|
+
const runnerVars = await selectIntegrationForTarget('runner');
|
|
174
|
+
return [...jobVars, ...runnerVars];
|
|
175
|
+
};
|
|
84
176
|
const generateCiJob = (opts) => {
|
|
85
177
|
const { provider, runnerTag, carbonBudget, failOnBudget } = opts;
|
|
86
178
|
let inputs = ` provider: ${provider}
|
|
@@ -259,6 +351,7 @@ const buildAwsUserData = (params) => {
|
|
|
259
351
|
'dnf install -y gitlab-runner',
|
|
260
352
|
'curl -fsSL https://rpm.nodesource.com/setup_20.x | bash -',
|
|
261
353
|
'dnf install -y nodejs',
|
|
354
|
+
'npm install -g gitgreen@latest',
|
|
262
355
|
'',
|
|
263
356
|
'INSTANCE_ID=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)',
|
|
264
357
|
'',
|
|
@@ -614,7 +707,9 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
614
707
|
});
|
|
615
708
|
failOnBudget = shouldFail;
|
|
616
709
|
}
|
|
617
|
-
|
|
710
|
+
// Step 8: Output integrations
|
|
711
|
+
const integrationVariables = await promptOutputIntegrations(8);
|
|
712
|
+
console.log((0, kleur_1.gray)('\nStep 9: Setting CI/CD Variables'));
|
|
618
713
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
619
714
|
const variables = [
|
|
620
715
|
{ key: 'AWS_ACCESS_KEY_ID', value: accessKeyId, masked: true },
|
|
@@ -634,6 +729,9 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
634
729
|
if (failOnBudget) {
|
|
635
730
|
variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
|
|
636
731
|
}
|
|
732
|
+
if (integrationVariables.length) {
|
|
733
|
+
variables.push(...integrationVariables);
|
|
734
|
+
}
|
|
637
735
|
for (const v of variables) {
|
|
638
736
|
const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
|
|
639
737
|
if (ok) {
|
|
@@ -643,7 +741,7 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
643
741
|
console.log((0, kleur_1.red)(' Failed: ' + v.key));
|
|
644
742
|
}
|
|
645
743
|
}
|
|
646
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
744
|
+
console.log((0, kleur_1.gray)('\nStep 10: CI Configuration'));
|
|
647
745
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
648
746
|
let runnerTagForCi = runnerTag;
|
|
649
747
|
if (!runnerTagForCi) {
|
|
@@ -889,11 +987,11 @@ const runInit = async (opts = {}) => {
|
|
|
889
987
|
(0, child_process_1.execSync)(`gcloud projects add-iam-policy-binding ${gcpProjectId} --member="serviceAccount:${saEmail}" --role="roles/monitoring.viewer" --quiet 2>/dev/null`, { stdio: 'pipe' });
|
|
890
988
|
}
|
|
891
989
|
catch { }
|
|
892
|
-
// Check if VM exists
|
|
990
|
+
// Check if VM exists and is running
|
|
893
991
|
let vmExists = false;
|
|
894
992
|
try {
|
|
895
|
-
(0, child_process_1.execSync)(`gcloud compute instances describe ${vmName} --project=${gcpProjectId} --zone=${gcpZone} 2>/dev/null`, {
|
|
896
|
-
vmExists =
|
|
993
|
+
const status = (0, child_process_1.execSync)(`gcloud compute instances describe ${vmName} --project=${gcpProjectId} --zone=${gcpZone} --format="value(status)" 2>/dev/null`, { encoding: 'utf8' }).trim();
|
|
994
|
+
vmExists = status === 'RUNNING';
|
|
897
995
|
}
|
|
898
996
|
catch { }
|
|
899
997
|
const startupScript = `#!/usr/bin/env bash
|
|
@@ -904,6 +1002,7 @@ apt-get install -y curl ca-certificates python3
|
|
|
904
1002
|
# Install Node.js 20.x
|
|
905
1003
|
curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
|
906
1004
|
apt-get install -y nodejs
|
|
1005
|
+
npm install -g gitgreen@latest
|
|
907
1006
|
|
|
908
1007
|
# Install GitLab Runner
|
|
909
1008
|
curl -L https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh | bash
|
|
@@ -944,6 +1043,12 @@ systemctl start gitlab-runner
|
|
|
944
1043
|
try {
|
|
945
1044
|
(0, child_process_1.execSync)(`gcloud compute instances create ${vmName} --project=${gcpProjectId} --zone=${gcpZone} --machine-type=${vmMachineType} --service-account=${saEmail} --scopes=https://www.googleapis.com/auth/cloud-platform --metadata=runner-token="${runnerToken}" --metadata-from-file=startup-script=${tmpFile} --image-family=debian-12 --image-project=debian-cloud`, { stdio: 'inherit' });
|
|
946
1045
|
console.log((0, kleur_1.green)(`VM ${vmName} created. Runner will register in 2-3 minutes.`));
|
|
1046
|
+
// Get the instance ID of the newly created VM
|
|
1047
|
+
try {
|
|
1048
|
+
const instanceInfo = (0, child_process_1.execSync)(`gcloud compute instances describe ${vmName} --project=${gcpProjectId} --zone=${gcpZone} --format="value(id)"`, { encoding: 'utf8' });
|
|
1049
|
+
gcpInstanceId = instanceInfo.trim();
|
|
1050
|
+
}
|
|
1051
|
+
catch { }
|
|
947
1052
|
}
|
|
948
1053
|
catch (err) {
|
|
949
1054
|
console.log((0, kleur_1.red)('Failed to create VM: ' + err.message));
|
|
@@ -1104,7 +1209,7 @@ systemctl start gitlab-runner
|
|
|
1104
1209
|
}
|
|
1105
1210
|
} // end else (use existing runner)
|
|
1106
1211
|
// Step 7: Service Account
|
|
1107
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1212
|
+
console.log((0, kleur_1.gray)('\nStep 7: Service Account'));
|
|
1108
1213
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1109
1214
|
let saKeyBase64;
|
|
1110
1215
|
const { keyMethod } = await (0, prompts_1.default)({
|
|
@@ -1144,7 +1249,17 @@ systemctl start gitlab-runner
|
|
|
1144
1249
|
// Create key
|
|
1145
1250
|
console.log((0, kleur_1.gray)('Creating key...'));
|
|
1146
1251
|
const tmpKeyPath = `/tmp/gitgreen-sa-key-${Date.now()}.json`;
|
|
1147
|
-
|
|
1252
|
+
try {
|
|
1253
|
+
(0, child_process_1.execSync)(`gcloud iam service-accounts keys create ${tmpKeyPath} --iam-account=${saEmail}`, { stdio: 'inherit' });
|
|
1254
|
+
}
|
|
1255
|
+
catch {
|
|
1256
|
+
console.log((0, kleur_1.red)('\nFailed to create service account key.'));
|
|
1257
|
+
console.log((0, kleur_1.gray)('This usually means the service account has too many keys (limit: 10).'));
|
|
1258
|
+
console.log((0, kleur_1.gray)('\nTo fix, delete old keys:'));
|
|
1259
|
+
console.log((0, kleur_1.green)(` gcloud iam service-accounts keys list --iam-account=${saEmail}`));
|
|
1260
|
+
console.log((0, kleur_1.green)(` gcloud iam service-accounts keys delete KEY_ID --iam-account=${saEmail}`));
|
|
1261
|
+
process.exit(1);
|
|
1262
|
+
}
|
|
1148
1263
|
saKeyBase64 = fs_1.default.readFileSync(tmpKeyPath).toString('base64');
|
|
1149
1264
|
fs_1.default.unlinkSync(tmpKeyPath);
|
|
1150
1265
|
console.log((0, kleur_1.green)('Service account key created'));
|
|
@@ -1161,8 +1276,8 @@ systemctl start gitlab-runner
|
|
|
1161
1276
|
}
|
|
1162
1277
|
saKeyBase64 = fs_1.default.readFileSync(saKeyPath).toString('base64');
|
|
1163
1278
|
}
|
|
1164
|
-
// Step
|
|
1165
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1279
|
+
// Step 8: Electricity Maps API
|
|
1280
|
+
console.log((0, kleur_1.gray)('\nStep 8: Electricity Maps API'));
|
|
1166
1281
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1167
1282
|
console.log((0, kleur_1.gray)('Get free key: https://api-portal.electricitymaps.com'));
|
|
1168
1283
|
const { electricityMapsKey } = await (0, prompts_1.default)({
|
|
@@ -1174,8 +1289,8 @@ systemctl start gitlab-runner
|
|
|
1174
1289
|
console.log((0, kleur_1.red)('API key required'));
|
|
1175
1290
|
process.exit(1);
|
|
1176
1291
|
}
|
|
1177
|
-
// Step
|
|
1178
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1292
|
+
// Step 9: Optional
|
|
1293
|
+
console.log((0, kleur_1.gray)('\nStep 9: Optional Settings'));
|
|
1179
1294
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1180
1295
|
console.log((0, kleur_1.gray)('Set a carbon budget to track emissions against a limit.'));
|
|
1181
1296
|
console.log((0, kleur_1.gray)('Example: 10 grams CO2e per job. Leave empty to skip.\n'));
|
|
@@ -1195,8 +1310,9 @@ systemctl start gitlab-runner
|
|
|
1195
1310
|
});
|
|
1196
1311
|
failOnBudget = shouldFail;
|
|
1197
1312
|
}
|
|
1198
|
-
// Step
|
|
1199
|
-
|
|
1313
|
+
// Step 10: Output integrations
|
|
1314
|
+
const gcpIntegrationVariables = await promptOutputIntegrations(10);
|
|
1315
|
+
console.log((0, kleur_1.gray)('\nStep 11: Setting CI/CD Variables'));
|
|
1200
1316
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1201
1317
|
const variables = [
|
|
1202
1318
|
{ key: 'GCP_PROJECT_ID', value: gcpProjectId, masked: false },
|
|
@@ -1212,6 +1328,9 @@ systemctl start gitlab-runner
|
|
|
1212
1328
|
if (failOnBudget) {
|
|
1213
1329
|
variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
|
|
1214
1330
|
}
|
|
1331
|
+
if (gcpIntegrationVariables.length) {
|
|
1332
|
+
variables.push(...gcpIntegrationVariables);
|
|
1333
|
+
}
|
|
1215
1334
|
for (const v of variables) {
|
|
1216
1335
|
const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
|
|
1217
1336
|
if (ok) {
|
|
@@ -1221,8 +1340,8 @@ systemctl start gitlab-runner
|
|
|
1221
1340
|
console.log((0, kleur_1.red)(' Failed: ' + v.key));
|
|
1222
1341
|
}
|
|
1223
1342
|
}
|
|
1224
|
-
// Step
|
|
1225
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1343
|
+
// Step 12: Generate CI job
|
|
1344
|
+
console.log((0, kleur_1.gray)('\nStep 12: CI Configuration'));
|
|
1226
1345
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1227
1346
|
// Only prompt for runner tag if not already set from provisioning
|
|
1228
1347
|
if (!runnerTag) {
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ensurePostgresResources = exports.ensureMysqlResources = void 0;
|
|
7
|
+
const promise_1 = __importDefault(require("mysql2/promise"));
|
|
8
|
+
const pg_1 = require("pg");
|
|
9
|
+
const kleur_1 = require("kleur");
|
|
10
|
+
const escapeMysqlIdentifier = (value) => {
|
|
11
|
+
return '`' + value.replace(/`/g, '``') + '`';
|
|
12
|
+
};
|
|
13
|
+
const escapePgIdentifier = (value) => {
|
|
14
|
+
return '"' + value.replace(/"/g, '""') + '"';
|
|
15
|
+
};
|
|
16
|
+
const getTableDefinition = (_target) => {
|
|
17
|
+
return `(
|
|
18
|
+
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
19
|
+
ingested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
20
|
+
payload JSON NOT NULL,
|
|
21
|
+
PRIMARY KEY (id)
|
|
22
|
+
) ENGINE=InnoDB`;
|
|
23
|
+
};
|
|
24
|
+
const getPgTableDefinition = (_target) => {
|
|
25
|
+
return `(
|
|
26
|
+
id BIGSERIAL PRIMARY KEY,
|
|
27
|
+
ingested_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
28
|
+
payload JSONB NOT NULL
|
|
29
|
+
)`;
|
|
30
|
+
};
|
|
31
|
+
const normalizeHost = (input) => {
|
|
32
|
+
if (!input)
|
|
33
|
+
return input;
|
|
34
|
+
const trimmed = input.trim();
|
|
35
|
+
const withoutProtocol = trimmed.replace(/^[a-zA-Z]+:\/\//, '');
|
|
36
|
+
return withoutProtocol.split(/[/?#]/)[0];
|
|
37
|
+
};
|
|
38
|
+
const buildPgSslConfig = (mode) => {
|
|
39
|
+
if (!mode || mode === 'disable') {
|
|
40
|
+
return undefined;
|
|
41
|
+
}
|
|
42
|
+
if (mode === 'verify-full') {
|
|
43
|
+
return { rejectUnauthorized: true };
|
|
44
|
+
}
|
|
45
|
+
// Modes like "require", "allow", "prefer" fall back to TLS without cert validation
|
|
46
|
+
return { rejectUnauthorized: false };
|
|
47
|
+
};
|
|
48
|
+
const ensureMysqlResources = async (params) => {
|
|
49
|
+
const host = normalizeHost(params.host);
|
|
50
|
+
console.log((0, kleur_1.gray)(`Verifying MySQL database "${params.database}"...`));
|
|
51
|
+
const serverConnection = await promise_1.default.createConnection({
|
|
52
|
+
host,
|
|
53
|
+
port: params.port,
|
|
54
|
+
user: params.username,
|
|
55
|
+
password: params.password
|
|
56
|
+
});
|
|
57
|
+
try {
|
|
58
|
+
await serverConnection.query(`CREATE DATABASE IF NOT EXISTS ${escapeMysqlIdentifier(params.database)}`);
|
|
59
|
+
}
|
|
60
|
+
finally {
|
|
61
|
+
await serverConnection.end();
|
|
62
|
+
}
|
|
63
|
+
const dbConnection = await promise_1.default.createConnection({
|
|
64
|
+
host,
|
|
65
|
+
port: params.port,
|
|
66
|
+
user: params.username,
|
|
67
|
+
password: params.password,
|
|
68
|
+
database: params.database
|
|
69
|
+
});
|
|
70
|
+
try {
|
|
71
|
+
const tableDefinition = getTableDefinition(params.target);
|
|
72
|
+
await dbConnection.query(`CREATE TABLE IF NOT EXISTS ${escapeMysqlIdentifier(params.table)} ${tableDefinition}`);
|
|
73
|
+
}
|
|
74
|
+
finally {
|
|
75
|
+
await dbConnection.end();
|
|
76
|
+
}
|
|
77
|
+
console.log((0, kleur_1.green)(`MySQL ready: ${params.database}.${params.table}`));
|
|
78
|
+
};
|
|
79
|
+
exports.ensureMysqlResources = ensureMysqlResources;
|
|
80
|
+
const ensurePostgresResources = async (params) => {
|
|
81
|
+
const schemaName = params.schema || 'public';
|
|
82
|
+
const host = normalizeHost(params.host);
|
|
83
|
+
console.log((0, kleur_1.gray)(`Verifying PostgreSQL database "${params.database}"...`));
|
|
84
|
+
const adminClient = new pg_1.Client({
|
|
85
|
+
host,
|
|
86
|
+
port: params.port,
|
|
87
|
+
user: params.username,
|
|
88
|
+
password: params.password,
|
|
89
|
+
database: 'postgres',
|
|
90
|
+
ssl: buildPgSslConfig(params.sslMode)
|
|
91
|
+
});
|
|
92
|
+
await adminClient.connect();
|
|
93
|
+
try {
|
|
94
|
+
const dbExists = await adminClient.query('SELECT 1 FROM pg_database WHERE datname = $1', [params.database]);
|
|
95
|
+
if (!dbExists.rowCount) {
|
|
96
|
+
await adminClient.query(`CREATE DATABASE ${escapePgIdentifier(params.database)}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
finally {
|
|
100
|
+
await adminClient.end();
|
|
101
|
+
}
|
|
102
|
+
const dbClient = new pg_1.Client({
|
|
103
|
+
host,
|
|
104
|
+
port: params.port,
|
|
105
|
+
user: params.username,
|
|
106
|
+
password: params.password,
|
|
107
|
+
database: params.database,
|
|
108
|
+
ssl: buildPgSslConfig(params.sslMode)
|
|
109
|
+
});
|
|
110
|
+
await dbClient.connect();
|
|
111
|
+
try {
|
|
112
|
+
await dbClient.query(`CREATE SCHEMA IF NOT EXISTS ${escapePgIdentifier(schemaName)}`);
|
|
113
|
+
const fullTable = `${escapePgIdentifier(schemaName)}.${escapePgIdentifier(params.table)}`;
|
|
114
|
+
const tableDefinition = getPgTableDefinition(params.target);
|
|
115
|
+
await dbClient.query(`CREATE TABLE IF NOT EXISTS ${fullTable} ${tableDefinition}`);
|
|
116
|
+
}
|
|
117
|
+
finally {
|
|
118
|
+
await dbClient.end();
|
|
119
|
+
}
|
|
120
|
+
console.log((0, kleur_1.green)(`PostgreSQL ready: ${params.database}.${schemaName}.${params.table}`));
|
|
121
|
+
};
|
|
122
|
+
exports.ensurePostgresResources = ensurePostgresResources;
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.outputIntegrations = void 0;
|
|
4
|
+
const db_setup_1 = require("./db-setup");
|
|
5
|
+
const buildMysqlIntegration = (target) => {
|
|
6
|
+
const scope = target === 'job' ? 'job' : 'runner';
|
|
7
|
+
const prefix = target === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
8
|
+
const tableDefault = target === 'job' ? 'gitgreen_carbon_jobs' : 'gitgreen_runner_inventory';
|
|
9
|
+
return {
|
|
10
|
+
id: `mysql-${scope}`,
|
|
11
|
+
name: `MySQL (${scope === 'job' ? 'per-job emissions' : 'runner inventory'})`,
|
|
12
|
+
description: target === 'job'
|
|
13
|
+
? 'Store each CI job calculation (emissions, runtime, runner tags) inside a MySQL table.'
|
|
14
|
+
: 'Store runner metadata (machine type, scope 3 estimates, tags) alongside usage metrics.',
|
|
15
|
+
target,
|
|
16
|
+
fields: [
|
|
17
|
+
{
|
|
18
|
+
id: 'host',
|
|
19
|
+
label: 'MySQL host',
|
|
20
|
+
envKey: `${prefix}_MYSQL_HOST`,
|
|
21
|
+
initial: '127.0.0.1',
|
|
22
|
+
required: true
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
id: 'port',
|
|
26
|
+
label: 'MySQL port',
|
|
27
|
+
envKey: `${prefix}_MYSQL_PORT`,
|
|
28
|
+
type: 'number',
|
|
29
|
+
initial: '3306',
|
|
30
|
+
required: true
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
id: 'username',
|
|
34
|
+
label: 'MySQL username',
|
|
35
|
+
envKey: `${prefix}_MYSQL_USERNAME`,
|
|
36
|
+
required: true
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
id: 'password',
|
|
40
|
+
label: 'MySQL password',
|
|
41
|
+
envKey: `${prefix}_MYSQL_PASSWORD`,
|
|
42
|
+
type: 'password',
|
|
43
|
+
required: true,
|
|
44
|
+
masked: true
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
id: 'database',
|
|
48
|
+
label: 'Database name',
|
|
49
|
+
envKey: `${prefix}_MYSQL_DATABASE`,
|
|
50
|
+
required: true
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
id: 'table',
|
|
54
|
+
label: 'Target table',
|
|
55
|
+
envKey: `${prefix}_MYSQL_TABLE`,
|
|
56
|
+
initial: tableDefault,
|
|
57
|
+
required: true,
|
|
58
|
+
helpText: 'Specify the table that will receive inserts from the GitGreen component.'
|
|
59
|
+
}
|
|
60
|
+
],
|
|
61
|
+
ensure: async (values) => {
|
|
62
|
+
await (0, db_setup_1.ensureMysqlResources)({
|
|
63
|
+
host: values.host,
|
|
64
|
+
port: values.port ? Number(values.port) : 3306,
|
|
65
|
+
username: values.username,
|
|
66
|
+
password: values.password,
|
|
67
|
+
database: values.database,
|
|
68
|
+
table: values.table,
|
|
69
|
+
target
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
const buildPostgresIntegration = (target) => {
|
|
75
|
+
const scope = target === 'job' ? 'job' : 'runner';
|
|
76
|
+
const prefix = target === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
77
|
+
const tableDefault = target === 'job' ? 'gitgreen_carbon_jobs' : 'gitgreen_runner_inventory';
|
|
78
|
+
return {
|
|
79
|
+
id: `postgres-${scope}`,
|
|
80
|
+
name: `PostgreSQL (${scope === 'job' ? 'per-job emissions' : 'runner inventory'})`,
|
|
81
|
+
description: target === 'job'
|
|
82
|
+
? 'Insert each CI job calculation into a PostgreSQL table for downstream analytics.'
|
|
83
|
+
: 'Persist runner metadata in PostgreSQL so you can track each machine in your fleet.',
|
|
84
|
+
target,
|
|
85
|
+
fields: [
|
|
86
|
+
{
|
|
87
|
+
id: 'host',
|
|
88
|
+
label: 'PostgreSQL host',
|
|
89
|
+
envKey: `${prefix}_POSTGRES_HOST`,
|
|
90
|
+
initial: '127.0.0.1',
|
|
91
|
+
required: true
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
id: 'port',
|
|
95
|
+
label: 'PostgreSQL port',
|
|
96
|
+
envKey: `${prefix}_POSTGRES_PORT`,
|
|
97
|
+
type: 'number',
|
|
98
|
+
initial: '5432',
|
|
99
|
+
required: true
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
id: 'username',
|
|
103
|
+
label: 'PostgreSQL username',
|
|
104
|
+
envKey: `${prefix}_POSTGRES_USERNAME`,
|
|
105
|
+
required: true
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
id: 'password',
|
|
109
|
+
label: 'PostgreSQL password',
|
|
110
|
+
envKey: `${prefix}_POSTGRES_PASSWORD`,
|
|
111
|
+
type: 'password',
|
|
112
|
+
required: true,
|
|
113
|
+
masked: true
|
|
114
|
+
},
|
|
115
|
+
{
|
|
116
|
+
id: 'database',
|
|
117
|
+
label: 'Database name',
|
|
118
|
+
envKey: `${prefix}_POSTGRES_DATABASE`,
|
|
119
|
+
required: true
|
|
120
|
+
},
|
|
121
|
+
{
|
|
122
|
+
id: 'schema',
|
|
123
|
+
label: 'Schema',
|
|
124
|
+
envKey: `${prefix}_POSTGRES_SCHEMA`,
|
|
125
|
+
initial: 'public',
|
|
126
|
+
helpText: 'Leave as "public" unless you organize tables under a different schema.'
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
id: 'table',
|
|
130
|
+
label: 'Target table',
|
|
131
|
+
envKey: `${prefix}_POSTGRES_TABLE`,
|
|
132
|
+
initial: tableDefault,
|
|
133
|
+
required: true
|
|
134
|
+
},
|
|
135
|
+
{
|
|
136
|
+
id: 'sslmode',
|
|
137
|
+
label: 'SSL mode',
|
|
138
|
+
envKey: `${prefix}_POSTGRES_SSLMODE`,
|
|
139
|
+
initial: 'require',
|
|
140
|
+
helpText: 'Example: require, verify-full, disable'
|
|
141
|
+
}
|
|
142
|
+
],
|
|
143
|
+
ensure: async (values) => {
|
|
144
|
+
await (0, db_setup_1.ensurePostgresResources)({
|
|
145
|
+
host: values.host,
|
|
146
|
+
port: values.port ? Number(values.port) : 5432,
|
|
147
|
+
username: values.username,
|
|
148
|
+
password: values.password,
|
|
149
|
+
database: values.database,
|
|
150
|
+
schema: values.schema,
|
|
151
|
+
sslMode: values.sslmode,
|
|
152
|
+
table: values.table,
|
|
153
|
+
target
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
};
|
|
157
|
+
};
|
|
158
|
+
exports.outputIntegrations = [
|
|
159
|
+
buildMysqlIntegration('job'),
|
|
160
|
+
buildMysqlIntegration('runner'),
|
|
161
|
+
buildPostgresIntegration('job'),
|
|
162
|
+
buildPostgresIntegration('runner')
|
|
163
|
+
];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "gitgreen",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.4",
|
|
4
4
|
"description": "GitGreen CLI for carbon reporting in GitLab pipelines (GCP/AWS)",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -41,10 +41,13 @@
|
|
|
41
41
|
"cubic-spline": "^3.0.3",
|
|
42
42
|
"dotenv": "^16.5.0",
|
|
43
43
|
"kleur": "^4.1.5",
|
|
44
|
+
"mysql2": "^3.11.3",
|
|
45
|
+
"pg": "^8.12.0",
|
|
44
46
|
"prompts": "^2.4.2"
|
|
45
47
|
},
|
|
46
48
|
"devDependencies": {
|
|
47
49
|
"@types/node": "^20.11.30",
|
|
50
|
+
"@types/pg": "^8.11.6",
|
|
48
51
|
"@types/prompts": "^2.4.9",
|
|
49
52
|
"ts-node": "^10.9.2",
|
|
50
53
|
"typescript": "^5.4.0",
|