gitgreen 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +23 -0
- package/dist/init.js +114 -11
- package/dist/lib/integrations/db-setup.js +122 -0
- package/dist/lib/integrations/output-integrations.js +163 -0
- package/dist/lib/integrations/types.js +2 -0
- package/package.json +4 -1
package/README.md
CHANGED
|
@@ -140,6 +140,29 @@ Pipeline starts → component script fetches CPU/RAM timeseries from GCP Monitor
|
|
|
140
140
|
→ optional MR note when CI_JOB_TOKEN is present
|
|
141
141
|
```
|
|
142
142
|
|
|
143
|
+
## Output Integrations
|
|
144
|
+
|
|
145
|
+
During `gitgreen init` you can opt into exporting GitGreen data to external systems. The wizard includes an integration step with two optional sinks:
|
|
146
|
+
|
|
147
|
+
- **Per-job carbon data** – emissions, runtime, and runner tags for every CI job.
|
|
148
|
+
- **Runner inventory** – the machine catalog that powers your GitLab runners, including machine type and scope 3 estimates.
|
|
149
|
+
|
|
150
|
+
Built-in connectors today:
|
|
151
|
+
- **MySQL** – populates `GITGREEN_JOB_MYSQL_*` / `GITGREEN_RUNNER_MYSQL_*` and inserts rows through a standard MySQL client.
|
|
152
|
+
- **PostgreSQL** – captures host, port, credentials, schema, table, and SSL mode (`GITGREEN_JOB_POSTGRES_*` / `GITGREEN_RUNNER_POSTGRES_*`) for storage in Postgres.
|
|
153
|
+
|
|
154
|
+
When you select either connector, the wizard captures host, port, username, password, database, and target table names and stores them in CI/CD variables. It immediately connects with those credentials to ensure the database, schema, and table exist (creating them with a generic `payload JSON/JSONB` + `ingested_at` structure when missing). The GitLab component then uses those variables to insert rows via the corresponding database driver.
|
|
155
|
+
|
|
156
|
+
### Extending the interface
|
|
157
|
+
|
|
158
|
+
Additional connectors can be added without touching the wizard logic. Each destination implements the `OutputIntegration` interface in `src/lib/integrations/output-integrations.ts`, which specifies:
|
|
159
|
+
|
|
160
|
+
1. Display metadata (`id`, `name`, `description`)
|
|
161
|
+
2. The data target it handles (`job` vs `runner`)
|
|
162
|
+
3. Prompted credential fields (label, env var key, input type, default, mask flag)
|
|
163
|
+
|
|
164
|
+
To add another sink (for example PostgreSQL or a webhook), create a new entry in that file with the fields your integration needs. Re-run `gitgreen init` and the option will automatically appear in the integration step.
|
|
165
|
+
|
|
143
166
|
## Adding a provider
|
|
144
167
|
1. Extend `CloudProvider` and the provider guard in `src/index.ts` so the calculator accepts the new key.
|
|
145
168
|
2. Add machine power data (`<provider>_machine_power_profiles.json`) and, if needed, CPU profiles to `data/`, then update `PowerProfileRepository.loadMachineData` to load it.
|
package/dist/init.js
CHANGED
|
@@ -11,6 +11,7 @@ const axios_1 = __importDefault(require("axios"));
|
|
|
11
11
|
const prompts_1 = __importDefault(require("prompts"));
|
|
12
12
|
const kleur_1 = require("kleur");
|
|
13
13
|
const power_profile_repository_1 = require("./lib/carbon/power-profile-repository");
|
|
14
|
+
const output_integrations_1 = require("./lib/integrations/output-integrations");
|
|
14
15
|
const hasGlab = () => {
|
|
15
16
|
try {
|
|
16
17
|
(0, child_process_1.execSync)('glab --version', { stdio: 'ignore' });
|
|
@@ -81,6 +82,97 @@ const setVariable = async (auth, project, key, value, masked = false) => {
|
|
|
81
82
|
return setVariableApi(auth.baseUrl, auth.pat, project, key, value, masked);
|
|
82
83
|
}
|
|
83
84
|
};
|
|
85
|
+
const askIntegrationFieldValue = async (field) => {
|
|
86
|
+
const promptType = field.type === 'password' ? 'password' : field.type === 'number' ? 'number' : 'text';
|
|
87
|
+
if (field.helpText) {
|
|
88
|
+
console.log((0, kleur_1.gray)(field.helpText));
|
|
89
|
+
}
|
|
90
|
+
const envInitial = process.env[field.envKey];
|
|
91
|
+
const initialValue = envInitial ?? field.initial;
|
|
92
|
+
const initial = promptType === 'number' && initialValue !== undefined
|
|
93
|
+
? (() => {
|
|
94
|
+
const parsed = Number(initialValue);
|
|
95
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
96
|
+
})()
|
|
97
|
+
: initialValue;
|
|
98
|
+
const answer = await (0, prompts_1.default)({
|
|
99
|
+
type: promptType,
|
|
100
|
+
name: 'value',
|
|
101
|
+
message: field.label,
|
|
102
|
+
initial
|
|
103
|
+
});
|
|
104
|
+
const rawValue = answer.value;
|
|
105
|
+
if (rawValue === undefined || rawValue === null || rawValue === '') {
|
|
106
|
+
if (field.required) {
|
|
107
|
+
console.log((0, kleur_1.red)(`${field.label} is required.`));
|
|
108
|
+
process.exit(1);
|
|
109
|
+
}
|
|
110
|
+
return undefined;
|
|
111
|
+
}
|
|
112
|
+
return String(rawValue);
|
|
113
|
+
};
|
|
114
|
+
const promptIntegrationFields = async (integration) => {
|
|
115
|
+
const collected = [];
|
|
116
|
+
const valuesByField = {};
|
|
117
|
+
for (const field of integration.fields) {
|
|
118
|
+
const value = await askIntegrationFieldValue(field);
|
|
119
|
+
if (value !== undefined) {
|
|
120
|
+
collected.push({ key: field.envKey, value, masked: Boolean(field.masked) });
|
|
121
|
+
valuesByField[field.id] = value;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return { variables: collected, values: valuesByField };
|
|
125
|
+
};
|
|
126
|
+
const selectIntegrationForTarget = async (target) => {
|
|
127
|
+
const available = output_integrations_1.outputIntegrations.filter(integration => integration.target === target);
|
|
128
|
+
if (!available.length)
|
|
129
|
+
return [];
|
|
130
|
+
const targetLabel = target === 'job' ? 'per-job carbon data' : 'runner inventory';
|
|
131
|
+
const { integrationId } = await (0, prompts_1.default)({
|
|
132
|
+
type: 'select',
|
|
133
|
+
name: 'integrationId',
|
|
134
|
+
message: `Output integration for ${targetLabel}`,
|
|
135
|
+
choices: [
|
|
136
|
+
{ title: 'Skip', description: 'Do not export data', value: '_skip_' },
|
|
137
|
+
...available.map(integration => ({
|
|
138
|
+
title: integration.name,
|
|
139
|
+
description: integration.description,
|
|
140
|
+
value: integration.id
|
|
141
|
+
}))
|
|
142
|
+
]
|
|
143
|
+
});
|
|
144
|
+
if (!integrationId || integrationId === '_skip_') {
|
|
145
|
+
return [];
|
|
146
|
+
}
|
|
147
|
+
const integration = available.find(item => item.id === integrationId);
|
|
148
|
+
if (!integration)
|
|
149
|
+
return [];
|
|
150
|
+
console.log((0, kleur_1.gray)(`\n${integration.name}`));
|
|
151
|
+
if (integration.description) {
|
|
152
|
+
console.log((0, kleur_1.gray)(integration.description));
|
|
153
|
+
}
|
|
154
|
+
const { variables, values } = await promptIntegrationFields(integration);
|
|
155
|
+
if (integration.ensure) {
|
|
156
|
+
try {
|
|
157
|
+
await integration.ensure(values);
|
|
158
|
+
}
|
|
159
|
+
catch (errorState) {
|
|
160
|
+
console.log((0, kleur_1.red)(`Failed to verify ${integration.name}: ${errorState?.message || errorState}`));
|
|
161
|
+
process.exit(1);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
return variables;
|
|
165
|
+
};
|
|
166
|
+
const promptOutputIntegrations = async (stepNumber) => {
|
|
167
|
+
if (!output_integrations_1.outputIntegrations.length)
|
|
168
|
+
return [];
|
|
169
|
+
console.log((0, kleur_1.gray)(`\nStep ${stepNumber}: Output Integrations`));
|
|
170
|
+
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
171
|
+
console.log((0, kleur_1.gray)('Optionally send carbon job data or runner metadata to an external system.'));
|
|
172
|
+
const jobVars = await selectIntegrationForTarget('job');
|
|
173
|
+
const runnerVars = await selectIntegrationForTarget('runner');
|
|
174
|
+
return [...jobVars, ...runnerVars];
|
|
175
|
+
};
|
|
84
176
|
const generateCiJob = (opts) => {
|
|
85
177
|
const { provider, runnerTag, carbonBudget, failOnBudget } = opts;
|
|
86
178
|
let inputs = ` provider: ${provider}
|
|
@@ -259,6 +351,7 @@ const buildAwsUserData = (params) => {
|
|
|
259
351
|
'dnf install -y gitlab-runner',
|
|
260
352
|
'curl -fsSL https://rpm.nodesource.com/setup_20.x | bash -',
|
|
261
353
|
'dnf install -y nodejs',
|
|
354
|
+
'npm install -g gitgreen@latest',
|
|
262
355
|
'',
|
|
263
356
|
'INSTANCE_ID=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)',
|
|
264
357
|
'',
|
|
@@ -614,7 +707,9 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
614
707
|
});
|
|
615
708
|
failOnBudget = shouldFail;
|
|
616
709
|
}
|
|
617
|
-
|
|
710
|
+
// Step 8: Output integrations
|
|
711
|
+
const integrationVariables = await promptOutputIntegrations(8);
|
|
712
|
+
console.log((0, kleur_1.gray)('\nStep 9: Setting CI/CD Variables'));
|
|
618
713
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
619
714
|
const variables = [
|
|
620
715
|
{ key: 'AWS_ACCESS_KEY_ID', value: accessKeyId, masked: true },
|
|
@@ -634,6 +729,9 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
634
729
|
if (failOnBudget) {
|
|
635
730
|
variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
|
|
636
731
|
}
|
|
732
|
+
if (integrationVariables.length) {
|
|
733
|
+
variables.push(...integrationVariables);
|
|
734
|
+
}
|
|
637
735
|
for (const v of variables) {
|
|
638
736
|
const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
|
|
639
737
|
if (ok) {
|
|
@@ -643,7 +741,7 @@ const runAwsInit = async (auth, projectPath) => {
|
|
|
643
741
|
console.log((0, kleur_1.red)(' Failed: ' + v.key));
|
|
644
742
|
}
|
|
645
743
|
}
|
|
646
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
744
|
+
console.log((0, kleur_1.gray)('\nStep 10: CI Configuration'));
|
|
647
745
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
648
746
|
let runnerTagForCi = runnerTag;
|
|
649
747
|
if (!runnerTagForCi) {
|
|
@@ -904,6 +1002,7 @@ apt-get install -y curl ca-certificates python3
|
|
|
904
1002
|
# Install Node.js 20.x
|
|
905
1003
|
curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
|
906
1004
|
apt-get install -y nodejs
|
|
1005
|
+
npm install -g gitgreen@latest
|
|
907
1006
|
|
|
908
1007
|
# Install GitLab Runner
|
|
909
1008
|
curl -L https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh | bash
|
|
@@ -1110,7 +1209,7 @@ systemctl start gitlab-runner
|
|
|
1110
1209
|
}
|
|
1111
1210
|
} // end else (use existing runner)
|
|
1112
1211
|
// Step 7: Service Account
|
|
1113
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1212
|
+
console.log((0, kleur_1.gray)('\nStep 7: Service Account'));
|
|
1114
1213
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1115
1214
|
let saKeyBase64;
|
|
1116
1215
|
const { keyMethod } = await (0, prompts_1.default)({
|
|
@@ -1177,8 +1276,8 @@ systemctl start gitlab-runner
|
|
|
1177
1276
|
}
|
|
1178
1277
|
saKeyBase64 = fs_1.default.readFileSync(saKeyPath).toString('base64');
|
|
1179
1278
|
}
|
|
1180
|
-
// Step
|
|
1181
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1279
|
+
// Step 8: Electricity Maps API
|
|
1280
|
+
console.log((0, kleur_1.gray)('\nStep 8: Electricity Maps API'));
|
|
1182
1281
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1183
1282
|
console.log((0, kleur_1.gray)('Get free key: https://api-portal.electricitymaps.com'));
|
|
1184
1283
|
const { electricityMapsKey } = await (0, prompts_1.default)({
|
|
@@ -1190,8 +1289,8 @@ systemctl start gitlab-runner
|
|
|
1190
1289
|
console.log((0, kleur_1.red)('API key required'));
|
|
1191
1290
|
process.exit(1);
|
|
1192
1291
|
}
|
|
1193
|
-
// Step
|
|
1194
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1292
|
+
// Step 9: Optional
|
|
1293
|
+
console.log((0, kleur_1.gray)('\nStep 9: Optional Settings'));
|
|
1195
1294
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1196
1295
|
console.log((0, kleur_1.gray)('Set a carbon budget to track emissions against a limit.'));
|
|
1197
1296
|
console.log((0, kleur_1.gray)('Example: 10 grams CO2e per job. Leave empty to skip.\n'));
|
|
@@ -1211,8 +1310,9 @@ systemctl start gitlab-runner
|
|
|
1211
1310
|
});
|
|
1212
1311
|
failOnBudget = shouldFail;
|
|
1213
1312
|
}
|
|
1214
|
-
// Step
|
|
1215
|
-
|
|
1313
|
+
// Step 10: Output integrations
|
|
1314
|
+
const gcpIntegrationVariables = await promptOutputIntegrations(10);
|
|
1315
|
+
console.log((0, kleur_1.gray)('\nStep 11: Setting CI/CD Variables'));
|
|
1216
1316
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1217
1317
|
const variables = [
|
|
1218
1318
|
{ key: 'GCP_PROJECT_ID', value: gcpProjectId, masked: false },
|
|
@@ -1228,6 +1328,9 @@ systemctl start gitlab-runner
|
|
|
1228
1328
|
if (failOnBudget) {
|
|
1229
1329
|
variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
|
|
1230
1330
|
}
|
|
1331
|
+
if (gcpIntegrationVariables.length) {
|
|
1332
|
+
variables.push(...gcpIntegrationVariables);
|
|
1333
|
+
}
|
|
1231
1334
|
for (const v of variables) {
|
|
1232
1335
|
const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
|
|
1233
1336
|
if (ok) {
|
|
@@ -1237,8 +1340,8 @@ systemctl start gitlab-runner
|
|
|
1237
1340
|
console.log((0, kleur_1.red)(' Failed: ' + v.key));
|
|
1238
1341
|
}
|
|
1239
1342
|
}
|
|
1240
|
-
// Step
|
|
1241
|
-
console.log((0, kleur_1.gray)('\nStep
|
|
1343
|
+
// Step 12: Generate CI job
|
|
1344
|
+
console.log((0, kleur_1.gray)('\nStep 12: CI Configuration'));
|
|
1242
1345
|
console.log((0, kleur_1.gray)('─'.repeat(40)));
|
|
1243
1346
|
// Only prompt for runner tag if not already set from provisioning
|
|
1244
1347
|
if (!runnerTag) {
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.ensurePostgresResources = exports.ensureMysqlResources = void 0;
|
|
7
|
+
const promise_1 = __importDefault(require("mysql2/promise"));
|
|
8
|
+
const pg_1 = require("pg");
|
|
9
|
+
const kleur_1 = require("kleur");
|
|
10
|
+
const escapeMysqlIdentifier = (value) => {
|
|
11
|
+
return '`' + value.replace(/`/g, '``') + '`';
|
|
12
|
+
};
|
|
13
|
+
const escapePgIdentifier = (value) => {
|
|
14
|
+
return '"' + value.replace(/"/g, '""') + '"';
|
|
15
|
+
};
|
|
16
|
+
const getTableDefinition = (_target) => {
|
|
17
|
+
return `(
|
|
18
|
+
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
19
|
+
ingested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
20
|
+
payload JSON NOT NULL,
|
|
21
|
+
PRIMARY KEY (id)
|
|
22
|
+
) ENGINE=InnoDB`;
|
|
23
|
+
};
|
|
24
|
+
const getPgTableDefinition = (_target) => {
|
|
25
|
+
return `(
|
|
26
|
+
id BIGSERIAL PRIMARY KEY,
|
|
27
|
+
ingested_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
28
|
+
payload JSONB NOT NULL
|
|
29
|
+
)`;
|
|
30
|
+
};
|
|
31
|
+
const normalizeHost = (input) => {
|
|
32
|
+
if (!input)
|
|
33
|
+
return input;
|
|
34
|
+
const trimmed = input.trim();
|
|
35
|
+
const withoutProtocol = trimmed.replace(/^[a-zA-Z]+:\/\//, '');
|
|
36
|
+
return withoutProtocol.split(/[/?#]/)[0];
|
|
37
|
+
};
|
|
38
|
+
const buildPgSslConfig = (mode) => {
|
|
39
|
+
if (!mode || mode === 'disable') {
|
|
40
|
+
return undefined;
|
|
41
|
+
}
|
|
42
|
+
if (mode === 'verify-full') {
|
|
43
|
+
return { rejectUnauthorized: true };
|
|
44
|
+
}
|
|
45
|
+
// Modes like "require", "allow", "prefer" fall back to TLS without cert validation
|
|
46
|
+
return { rejectUnauthorized: false };
|
|
47
|
+
};
|
|
48
|
+
const ensureMysqlResources = async (params) => {
|
|
49
|
+
const host = normalizeHost(params.host);
|
|
50
|
+
console.log((0, kleur_1.gray)(`Verifying MySQL database "${params.database}"...`));
|
|
51
|
+
const serverConnection = await promise_1.default.createConnection({
|
|
52
|
+
host,
|
|
53
|
+
port: params.port,
|
|
54
|
+
user: params.username,
|
|
55
|
+
password: params.password
|
|
56
|
+
});
|
|
57
|
+
try {
|
|
58
|
+
await serverConnection.query(`CREATE DATABASE IF NOT EXISTS ${escapeMysqlIdentifier(params.database)}`);
|
|
59
|
+
}
|
|
60
|
+
finally {
|
|
61
|
+
await serverConnection.end();
|
|
62
|
+
}
|
|
63
|
+
const dbConnection = await promise_1.default.createConnection({
|
|
64
|
+
host,
|
|
65
|
+
port: params.port,
|
|
66
|
+
user: params.username,
|
|
67
|
+
password: params.password,
|
|
68
|
+
database: params.database
|
|
69
|
+
});
|
|
70
|
+
try {
|
|
71
|
+
const tableDefinition = getTableDefinition(params.target);
|
|
72
|
+
await dbConnection.query(`CREATE TABLE IF NOT EXISTS ${escapeMysqlIdentifier(params.table)} ${tableDefinition}`);
|
|
73
|
+
}
|
|
74
|
+
finally {
|
|
75
|
+
await dbConnection.end();
|
|
76
|
+
}
|
|
77
|
+
console.log((0, kleur_1.green)(`MySQL ready: ${params.database}.${params.table}`));
|
|
78
|
+
};
|
|
79
|
+
exports.ensureMysqlResources = ensureMysqlResources;
|
|
80
|
+
const ensurePostgresResources = async (params) => {
|
|
81
|
+
const schemaName = params.schema || 'public';
|
|
82
|
+
const host = normalizeHost(params.host);
|
|
83
|
+
console.log((0, kleur_1.gray)(`Verifying PostgreSQL database "${params.database}"...`));
|
|
84
|
+
const adminClient = new pg_1.Client({
|
|
85
|
+
host,
|
|
86
|
+
port: params.port,
|
|
87
|
+
user: params.username,
|
|
88
|
+
password: params.password,
|
|
89
|
+
database: 'postgres',
|
|
90
|
+
ssl: buildPgSslConfig(params.sslMode)
|
|
91
|
+
});
|
|
92
|
+
await adminClient.connect();
|
|
93
|
+
try {
|
|
94
|
+
const dbExists = await adminClient.query('SELECT 1 FROM pg_database WHERE datname = $1', [params.database]);
|
|
95
|
+
if (!dbExists.rowCount) {
|
|
96
|
+
await adminClient.query(`CREATE DATABASE ${escapePgIdentifier(params.database)}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
finally {
|
|
100
|
+
await adminClient.end();
|
|
101
|
+
}
|
|
102
|
+
const dbClient = new pg_1.Client({
|
|
103
|
+
host,
|
|
104
|
+
port: params.port,
|
|
105
|
+
user: params.username,
|
|
106
|
+
password: params.password,
|
|
107
|
+
database: params.database,
|
|
108
|
+
ssl: buildPgSslConfig(params.sslMode)
|
|
109
|
+
});
|
|
110
|
+
await dbClient.connect();
|
|
111
|
+
try {
|
|
112
|
+
await dbClient.query(`CREATE SCHEMA IF NOT EXISTS ${escapePgIdentifier(schemaName)}`);
|
|
113
|
+
const fullTable = `${escapePgIdentifier(schemaName)}.${escapePgIdentifier(params.table)}`;
|
|
114
|
+
const tableDefinition = getPgTableDefinition(params.target);
|
|
115
|
+
await dbClient.query(`CREATE TABLE IF NOT EXISTS ${fullTable} ${tableDefinition}`);
|
|
116
|
+
}
|
|
117
|
+
finally {
|
|
118
|
+
await dbClient.end();
|
|
119
|
+
}
|
|
120
|
+
console.log((0, kleur_1.green)(`PostgreSQL ready: ${params.database}.${schemaName}.${params.table}`));
|
|
121
|
+
};
|
|
122
|
+
exports.ensurePostgresResources = ensurePostgresResources;
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.outputIntegrations = void 0;
|
|
4
|
+
const db_setup_1 = require("./db-setup");
|
|
5
|
+
const buildMysqlIntegration = (target) => {
|
|
6
|
+
const scope = target === 'job' ? 'job' : 'runner';
|
|
7
|
+
const prefix = target === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
8
|
+
const tableDefault = target === 'job' ? 'gitgreen_carbon_jobs' : 'gitgreen_runner_inventory';
|
|
9
|
+
return {
|
|
10
|
+
id: `mysql-${scope}`,
|
|
11
|
+
name: `MySQL (${scope === 'job' ? 'per-job emissions' : 'runner inventory'})`,
|
|
12
|
+
description: target === 'job'
|
|
13
|
+
? 'Store each CI job calculation (emissions, runtime, runner tags) inside a MySQL table.'
|
|
14
|
+
: 'Store runner metadata (machine type, scope 3 estimates, tags) alongside usage metrics.',
|
|
15
|
+
target,
|
|
16
|
+
fields: [
|
|
17
|
+
{
|
|
18
|
+
id: 'host',
|
|
19
|
+
label: 'MySQL host',
|
|
20
|
+
envKey: `${prefix}_MYSQL_HOST`,
|
|
21
|
+
initial: '127.0.0.1',
|
|
22
|
+
required: true
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
id: 'port',
|
|
26
|
+
label: 'MySQL port',
|
|
27
|
+
envKey: `${prefix}_MYSQL_PORT`,
|
|
28
|
+
type: 'number',
|
|
29
|
+
initial: '3306',
|
|
30
|
+
required: true
|
|
31
|
+
},
|
|
32
|
+
{
|
|
33
|
+
id: 'username',
|
|
34
|
+
label: 'MySQL username',
|
|
35
|
+
envKey: `${prefix}_MYSQL_USERNAME`,
|
|
36
|
+
required: true
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
id: 'password',
|
|
40
|
+
label: 'MySQL password',
|
|
41
|
+
envKey: `${prefix}_MYSQL_PASSWORD`,
|
|
42
|
+
type: 'password',
|
|
43
|
+
required: true,
|
|
44
|
+
masked: true
|
|
45
|
+
},
|
|
46
|
+
{
|
|
47
|
+
id: 'database',
|
|
48
|
+
label: 'Database name',
|
|
49
|
+
envKey: `${prefix}_MYSQL_DATABASE`,
|
|
50
|
+
required: true
|
|
51
|
+
},
|
|
52
|
+
{
|
|
53
|
+
id: 'table',
|
|
54
|
+
label: 'Target table',
|
|
55
|
+
envKey: `${prefix}_MYSQL_TABLE`,
|
|
56
|
+
initial: tableDefault,
|
|
57
|
+
required: true,
|
|
58
|
+
helpText: 'Specify the table that will receive inserts from the GitGreen component.'
|
|
59
|
+
}
|
|
60
|
+
],
|
|
61
|
+
ensure: async (values) => {
|
|
62
|
+
await (0, db_setup_1.ensureMysqlResources)({
|
|
63
|
+
host: values.host,
|
|
64
|
+
port: values.port ? Number(values.port) : 3306,
|
|
65
|
+
username: values.username,
|
|
66
|
+
password: values.password,
|
|
67
|
+
database: values.database,
|
|
68
|
+
table: values.table,
|
|
69
|
+
target
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
};
|
|
73
|
+
};
|
|
74
|
+
const buildPostgresIntegration = (target) => {
|
|
75
|
+
const scope = target === 'job' ? 'job' : 'runner';
|
|
76
|
+
const prefix = target === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
77
|
+
const tableDefault = target === 'job' ? 'gitgreen_carbon_jobs' : 'gitgreen_runner_inventory';
|
|
78
|
+
return {
|
|
79
|
+
id: `postgres-${scope}`,
|
|
80
|
+
name: `PostgreSQL (${scope === 'job' ? 'per-job emissions' : 'runner inventory'})`,
|
|
81
|
+
description: target === 'job'
|
|
82
|
+
? 'Insert each CI job calculation into a PostgreSQL table for downstream analytics.'
|
|
83
|
+
: 'Persist runner metadata in PostgreSQL so you can track each machine in your fleet.',
|
|
84
|
+
target,
|
|
85
|
+
fields: [
|
|
86
|
+
{
|
|
87
|
+
id: 'host',
|
|
88
|
+
label: 'PostgreSQL host',
|
|
89
|
+
envKey: `${prefix}_POSTGRES_HOST`,
|
|
90
|
+
initial: '127.0.0.1',
|
|
91
|
+
required: true
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
id: 'port',
|
|
95
|
+
label: 'PostgreSQL port',
|
|
96
|
+
envKey: `${prefix}_POSTGRES_PORT`,
|
|
97
|
+
type: 'number',
|
|
98
|
+
initial: '5432',
|
|
99
|
+
required: true
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
id: 'username',
|
|
103
|
+
label: 'PostgreSQL username',
|
|
104
|
+
envKey: `${prefix}_POSTGRES_USERNAME`,
|
|
105
|
+
required: true
|
|
106
|
+
},
|
|
107
|
+
{
|
|
108
|
+
id: 'password',
|
|
109
|
+
label: 'PostgreSQL password',
|
|
110
|
+
envKey: `${prefix}_POSTGRES_PASSWORD`,
|
|
111
|
+
type: 'password',
|
|
112
|
+
required: true,
|
|
113
|
+
masked: true
|
|
114
|
+
},
|
|
115
|
+
{
|
|
116
|
+
id: 'database',
|
|
117
|
+
label: 'Database name',
|
|
118
|
+
envKey: `${prefix}_POSTGRES_DATABASE`,
|
|
119
|
+
required: true
|
|
120
|
+
},
|
|
121
|
+
{
|
|
122
|
+
id: 'schema',
|
|
123
|
+
label: 'Schema',
|
|
124
|
+
envKey: `${prefix}_POSTGRES_SCHEMA`,
|
|
125
|
+
initial: 'public',
|
|
126
|
+
helpText: 'Leave as "public" unless you organize tables under a different schema.'
|
|
127
|
+
},
|
|
128
|
+
{
|
|
129
|
+
id: 'table',
|
|
130
|
+
label: 'Target table',
|
|
131
|
+
envKey: `${prefix}_POSTGRES_TABLE`,
|
|
132
|
+
initial: tableDefault,
|
|
133
|
+
required: true
|
|
134
|
+
},
|
|
135
|
+
{
|
|
136
|
+
id: 'sslmode',
|
|
137
|
+
label: 'SSL mode',
|
|
138
|
+
envKey: `${prefix}_POSTGRES_SSLMODE`,
|
|
139
|
+
initial: 'require',
|
|
140
|
+
helpText: 'Example: require, verify-full, disable'
|
|
141
|
+
}
|
|
142
|
+
],
|
|
143
|
+
ensure: async (values) => {
|
|
144
|
+
await (0, db_setup_1.ensurePostgresResources)({
|
|
145
|
+
host: values.host,
|
|
146
|
+
port: values.port ? Number(values.port) : 5432,
|
|
147
|
+
username: values.username,
|
|
148
|
+
password: values.password,
|
|
149
|
+
database: values.database,
|
|
150
|
+
schema: values.schema,
|
|
151
|
+
sslMode: values.sslmode,
|
|
152
|
+
table: values.table,
|
|
153
|
+
target
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
};
|
|
157
|
+
};
|
|
158
|
+
exports.outputIntegrations = [
|
|
159
|
+
buildMysqlIntegration('job'),
|
|
160
|
+
buildMysqlIntegration('runner'),
|
|
161
|
+
buildPostgresIntegration('job'),
|
|
162
|
+
buildPostgresIntegration('runner')
|
|
163
|
+
];
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "gitgreen",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.4",
|
|
4
4
|
"description": "GitGreen CLI for carbon reporting in GitLab pipelines (GCP/AWS)",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -41,10 +41,13 @@
|
|
|
41
41
|
"cubic-spline": "^3.0.3",
|
|
42
42
|
"dotenv": "^16.5.0",
|
|
43
43
|
"kleur": "^4.1.5",
|
|
44
|
+
"mysql2": "^3.11.3",
|
|
45
|
+
"pg": "^8.12.0",
|
|
44
46
|
"prompts": "^2.4.2"
|
|
45
47
|
},
|
|
46
48
|
"devDependencies": {
|
|
47
49
|
"@types/node": "^20.11.30",
|
|
50
|
+
"@types/pg": "^8.11.6",
|
|
48
51
|
"@types/prompts": "^2.4.9",
|
|
49
52
|
"ts-node": "^10.9.2",
|
|
50
53
|
"typescript": "^5.4.0",
|