gitgreen 1.0.3 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -140,6 +140,44 @@ Pipeline starts → component script fetches CPU/RAM timeseries from GCP Monitor
140
140
  → optional MR note when CI_JOB_TOKEN is present
141
141
  ```
142
142
 
143
+ ## Output Integrations
144
+
145
+ During `gitgreen init` you can opt into exporting GitGreen data to external systems. The wizard includes an integration step with two optional sinks:
146
+
147
+ - **Per-job carbon data** – emissions, runtime, and runner tags for every CI job.
148
+ - **Runner inventory** – the machine catalog that powers your GitLab runners, including machine type and scope 3 estimates.
149
+
150
+ Built-in connectors today:
151
+ - **MySQL** – populates `GITGREEN_JOB_MYSQL_*` / `GITGREEN_RUNNER_MYSQL_*` and inserts rows through a standard MySQL client.
152
+ - **PostgreSQL** – captures host, port, credentials, schema, table, and SSL mode (`GITGREEN_JOB_POSTGRES_*` / `GITGREEN_RUNNER_POSTGRES_*`) for storage in Postgres.
153
+
154
+ When you select either connector, the wizard captures host, port, username, password, database, and target table names and stores them in CI/CD variables. It immediately connects with those credentials to ensure the database, schema, and table exist (job sinks also create a `<table>_timeseries` table linked via foreign key). During CI, the GitGreen CLI automatically detects those env vars and:
155
+
156
+ - runs `gitgreen migrate --scope job|runner` to apply any pending migrations (tracked per DB via `gitgreen_migrations`);
157
+ - writes each carbon calculation (typed summary columns plus CPU/RAM timeseries rows) and optional runner inventory snapshot into the configured sink.
158
+
159
+ ### Extending the interface
160
+
161
+ Additional connectors can be added without touching the wizard logic. Each destination implements the `OutputIntegration` interface in `src/lib/integrations/output-integrations.ts`, which specifies:
162
+
163
+ 1. Display metadata (`id`, `name`, `description`)
164
+ 2. The data target it handles (`job` vs `runner`)
165
+ 3. Prompted credential fields (label, env var key, input type, default, mask flag)
166
+
167
+ To add another sink (for example PostgreSQL or a webhook), create a new entry in that file with the fields your integration needs. Re-run `gitgreen init` and the option will automatically appear in the integration step.
168
+
169
+ ### Database migrations
170
+
171
+ Structured sinks rely on migrations tracked in `gitgreen_migrations`. Run them whenever you update GitGreen or change table names:
172
+
173
+ ```bash
174
+ gitgreen migrate --scope job # apply job sink migrations (summary + timeseries)
175
+ gitgreen migrate --scope runner # apply runner inventory migrations
176
+ gitgreen migrate --scope all # convenience wrapper (used by the GitLab component)
177
+ ```
178
+
179
+ The GitLab component automatically runs `gitgreen migrate --scope job` and `--scope runner` before calculating emissions, so pipelines stay in sync even when you change versions.
180
+
143
181
  ## Adding a provider
144
182
  1. Extend `CloudProvider` and the provider guard in `src/index.ts` so the calculator accepts the new key.
145
183
  2. Add machine power data (`<provider>_machine_power_profiles.json`) and, if needed, CPU profiles to `data/`, then update `PowerProfileRepository.loadMachineData` to load it.
package/dist/cli.js CHANGED
@@ -14,6 +14,9 @@ const index_1 = require("./index");
14
14
  const init_1 = require("./init");
15
15
  const cloudwatch_1 = require("./lib/aws/cloudwatch");
16
16
  const power_profile_repository_1 = require("./lib/carbon/power-profile-repository");
17
+ const sink_writer_1 = require("./lib/integrations/sink-writer");
18
+ const migration_runner_1 = require("./lib/integrations/migration-runner");
19
+ const data_sink_1 = require("./lib/integrations/data-sink");
17
20
  const program = new commander_1.Command();
18
21
  const toIsoTimestamp = (input) => {
19
22
  if (input instanceof Date) {
@@ -209,6 +212,14 @@ const runCalculate = async (opts) => {
209
212
  ramSizeTimeseries = parseTimeseriesFile(opts.ramSizeTimeseries);
210
213
  }
211
214
  console.log((0, kleur_1.gray)(`Loaded ${cpuTimeseries.length} CPU points, ${ramUsedTimeseries.length} RAM points`));
215
+ const jobInput = {
216
+ provider,
217
+ machineType: opts.machine,
218
+ region: calculationRegion,
219
+ cpuTimeseries,
220
+ ramUsedTimeseries,
221
+ ramSizeTimeseries
222
+ };
212
223
  // Sort by timestamp for charts
213
224
  const cpuSorted = [...cpuTimeseries].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
214
225
  const ramSorted = [...ramUsedTimeseries].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
@@ -232,13 +243,13 @@ const runCalculate = async (opts) => {
232
243
  console.log(asciichart_1.default.plot(ramPercent, { height: 8, format: (x) => x.toFixed(1).padStart(6) }));
233
244
  }
234
245
  try {
235
- const { result, budget, markdown } = await (0, index_1.calculate)({
236
- provider,
237
- machineType: opts.machine,
238
- region: calculationRegion,
239
- cpuTimeseries,
240
- ramUsedTimeseries,
241
- ramSizeTimeseries,
246
+ const { result, budget, markdown, jsonReport } = await (0, index_1.calculate)({
247
+ provider: jobInput.provider,
248
+ machineType: jobInput.machineType,
249
+ region: jobInput.region,
250
+ cpuTimeseries: jobInput.cpuTimeseries,
251
+ ramUsedTimeseries: jobInput.ramUsedTimeseries,
252
+ ramSizeTimeseries: jobInput.ramSizeTimeseries,
242
253
  carbonBudgetGrams: opts.budget,
243
254
  failOnBreach: opts.failOnBudget,
244
255
  runnerTags: opts.runnerTags ? String(opts.runnerTags).split(/[,\s]+/).filter(Boolean) : (0, index_1.readRunnerTagsFromEnv)(),
@@ -269,6 +280,8 @@ const runCalculate = async (opts) => {
269
280
  if (!opts.outMd && !opts.outJson) {
270
281
  console.log('\n' + markdown);
271
282
  }
283
+ await (0, sink_writer_1.persistJobPayload)({ jobInput, result, budget, jsonReport });
284
+ await (0, sink_writer_1.persistRunnerPayload)({ jobInput, result });
272
285
  if (budget.overBudget && opts.failOnBudget) {
273
286
  process.exitCode = 1;
274
287
  }
@@ -312,4 +325,41 @@ program
312
325
  .action(async (cmdOpts) => {
313
326
  await (0, init_1.runInit)({ assumeYes: Boolean(cmdOpts.yes) });
314
327
  });
328
+ program
329
+ .command('migrate')
330
+ .description('Run output-integration database migrations')
331
+ .option('--scope <scope>', 'Scope to migrate (job|runner|all)', 'all')
332
+ .action(async (opts) => {
333
+ const rawScope = String(opts.scope || 'all').toLowerCase();
334
+ const scopes = rawScope === 'all' ? ['job', 'runner'] : (rawScope === 'job' || rawScope === 'runner') ? [rawScope] : [];
335
+ if (!scopes.length) {
336
+ console.error((0, kleur_1.red)('Invalid scope. Use job, runner, or all.'));
337
+ process.exitCode = 1;
338
+ return;
339
+ }
340
+ for (const scope of scopes) {
341
+ await runMigrationsForScope(scope);
342
+ }
343
+ });
315
344
  program.parseAsync(process.argv);
345
+ const buildMigrationInput = (config) => ({
346
+ driver: config.driver,
347
+ host: config.host,
348
+ port: config.port,
349
+ username: config.username,
350
+ password: config.password,
351
+ database: config.database,
352
+ schema: config.driver === 'postgres' ? config.schema : undefined,
353
+ sslMode: config.driver === 'postgres' ? config.sslMode : undefined,
354
+ table: config.table,
355
+ timeseriesTable: config.timeseriesTable,
356
+ scope: config.scope
357
+ });
358
+ const runMigrationsForScope = async (scope) => {
359
+ const configs = (0, data_sink_1.resolveAllSinkConfigs)(scope);
360
+ if (!configs.length)
361
+ return;
362
+ for (const cfg of configs) {
363
+ await (0, migration_runner_1.runMigrations)(buildMigrationInput(cfg));
364
+ }
365
+ };
package/dist/init.js CHANGED
@@ -11,6 +11,7 @@ const axios_1 = __importDefault(require("axios"));
11
11
  const prompts_1 = __importDefault(require("prompts"));
12
12
  const kleur_1 = require("kleur");
13
13
  const power_profile_repository_1 = require("./lib/carbon/power-profile-repository");
14
+ const output_integrations_1 = require("./lib/integrations/output-integrations");
14
15
  const hasGlab = () => {
15
16
  try {
16
17
  (0, child_process_1.execSync)('glab --version', { stdio: 'ignore' });
@@ -81,6 +82,97 @@ const setVariable = async (auth, project, key, value, masked = false) => {
81
82
  return setVariableApi(auth.baseUrl, auth.pat, project, key, value, masked);
82
83
  }
83
84
  };
85
+ const askIntegrationFieldValue = async (field) => {
86
+ const promptType = field.type === 'password' ? 'password' : field.type === 'number' ? 'number' : 'text';
87
+ if (field.helpText) {
88
+ console.log((0, kleur_1.gray)(field.helpText));
89
+ }
90
+ const envInitial = process.env[field.envKey];
91
+ const initialValue = envInitial ?? field.initial;
92
+ const initial = promptType === 'number' && initialValue !== undefined
93
+ ? (() => {
94
+ const parsed = Number(initialValue);
95
+ return Number.isFinite(parsed) ? parsed : undefined;
96
+ })()
97
+ : initialValue;
98
+ const answer = await (0, prompts_1.default)({
99
+ type: promptType,
100
+ name: 'value',
101
+ message: field.label,
102
+ initial
103
+ });
104
+ const rawValue = answer.value;
105
+ if (rawValue === undefined || rawValue === null || rawValue === '') {
106
+ if (field.required) {
107
+ console.log((0, kleur_1.red)(`${field.label} is required.`));
108
+ process.exit(1);
109
+ }
110
+ return undefined;
111
+ }
112
+ return String(rawValue);
113
+ };
114
+ const promptIntegrationFields = async (integration) => {
115
+ const collected = [];
116
+ const valuesByField = {};
117
+ for (const field of integration.fields) {
118
+ const value = await askIntegrationFieldValue(field);
119
+ if (value !== undefined) {
120
+ collected.push({ key: field.envKey, value, masked: Boolean(field.masked) });
121
+ valuesByField[field.id] = value;
122
+ }
123
+ }
124
+ return { variables: collected, values: valuesByField };
125
+ };
126
+ const selectIntegrationForTarget = async (target) => {
127
+ const available = output_integrations_1.outputIntegrations.filter(integration => integration.target === target);
128
+ if (!available.length)
129
+ return [];
130
+ const targetLabel = target === 'job' ? 'per-job carbon data' : 'runner inventory';
131
+ const { integrationId } = await (0, prompts_1.default)({
132
+ type: 'select',
133
+ name: 'integrationId',
134
+ message: `Output integration for ${targetLabel}`,
135
+ choices: [
136
+ { title: 'Skip', description: 'Do not export data', value: '_skip_' },
137
+ ...available.map(integration => ({
138
+ title: integration.name,
139
+ description: integration.description,
140
+ value: integration.id
141
+ }))
142
+ ]
143
+ });
144
+ if (!integrationId || integrationId === '_skip_') {
145
+ return [];
146
+ }
147
+ const integration = available.find(item => item.id === integrationId);
148
+ if (!integration)
149
+ return [];
150
+ console.log((0, kleur_1.gray)(`\n${integration.name}`));
151
+ if (integration.description) {
152
+ console.log((0, kleur_1.gray)(integration.description));
153
+ }
154
+ const { variables, values } = await promptIntegrationFields(integration);
155
+ if (integration.ensure) {
156
+ try {
157
+ await integration.ensure(values);
158
+ }
159
+ catch (errorState) {
160
+ console.log((0, kleur_1.red)(`Failed to verify ${integration.name}: ${errorState?.message || errorState}`));
161
+ process.exit(1);
162
+ }
163
+ }
164
+ return variables;
165
+ };
166
+ const promptOutputIntegrations = async (stepNumber) => {
167
+ if (!output_integrations_1.outputIntegrations.length)
168
+ return [];
169
+ console.log((0, kleur_1.gray)(`\nStep ${stepNumber}: Output Integrations`));
170
+ console.log((0, kleur_1.gray)('─'.repeat(40)));
171
+ console.log((0, kleur_1.gray)('Optionally send carbon job data or runner metadata to an external system.'));
172
+ const jobVars = await selectIntegrationForTarget('job');
173
+ const runnerVars = await selectIntegrationForTarget('runner');
174
+ return [...jobVars, ...runnerVars];
175
+ };
84
176
  const generateCiJob = (opts) => {
85
177
  const { provider, runnerTag, carbonBudget, failOnBudget } = opts;
86
178
  let inputs = ` provider: ${provider}
@@ -259,6 +351,7 @@ const buildAwsUserData = (params) => {
259
351
  'dnf install -y gitlab-runner',
260
352
  'curl -fsSL https://rpm.nodesource.com/setup_20.x | bash -',
261
353
  'dnf install -y nodejs',
354
+ 'npm install -g gitgreen@latest',
262
355
  '',
263
356
  'INSTANCE_ID=$(curl -s http://169.254.169.254/latest/meta-data/instance-id)',
264
357
  '',
@@ -614,7 +707,9 @@ const runAwsInit = async (auth, projectPath) => {
614
707
  });
615
708
  failOnBudget = shouldFail;
616
709
  }
617
- console.log((0, kleur_1.gray)('\nStep 8: Setting CI/CD Variables'));
710
+ // Step 8: Output integrations
711
+ const integrationVariables = await promptOutputIntegrations(8);
712
+ console.log((0, kleur_1.gray)('\nStep 9: Setting CI/CD Variables'));
618
713
  console.log((0, kleur_1.gray)('─'.repeat(40)));
619
714
  const variables = [
620
715
  { key: 'AWS_ACCESS_KEY_ID', value: accessKeyId, masked: true },
@@ -634,6 +729,9 @@ const runAwsInit = async (auth, projectPath) => {
634
729
  if (failOnBudget) {
635
730
  variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
636
731
  }
732
+ if (integrationVariables.length) {
733
+ variables.push(...integrationVariables);
734
+ }
637
735
  for (const v of variables) {
638
736
  const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
639
737
  if (ok) {
@@ -643,7 +741,7 @@ const runAwsInit = async (auth, projectPath) => {
643
741
  console.log((0, kleur_1.red)(' Failed: ' + v.key));
644
742
  }
645
743
  }
646
- console.log((0, kleur_1.gray)('\nStep 9: CI Configuration'));
744
+ console.log((0, kleur_1.gray)('\nStep 10: CI Configuration'));
647
745
  console.log((0, kleur_1.gray)('─'.repeat(40)));
648
746
  let runnerTagForCi = runnerTag;
649
747
  if (!runnerTagForCi) {
@@ -904,6 +1002,7 @@ apt-get install -y curl ca-certificates python3
904
1002
  # Install Node.js 20.x
905
1003
  curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
906
1004
  apt-get install -y nodejs
1005
+ npm install -g gitgreen@latest
907
1006
 
908
1007
  # Install GitLab Runner
909
1008
  curl -L https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh | bash
@@ -1110,7 +1209,7 @@ systemctl start gitlab-runner
1110
1209
  }
1111
1210
  } // end else (use existing runner)
1112
1211
  // Step 7: Service Account
1113
- console.log((0, kleur_1.gray)('\nStep 5: Service Account'));
1212
+ console.log((0, kleur_1.gray)('\nStep 7: Service Account'));
1114
1213
  console.log((0, kleur_1.gray)('─'.repeat(40)));
1115
1214
  let saKeyBase64;
1116
1215
  const { keyMethod } = await (0, prompts_1.default)({
@@ -1177,8 +1276,8 @@ systemctl start gitlab-runner
1177
1276
  }
1178
1277
  saKeyBase64 = fs_1.default.readFileSync(saKeyPath).toString('base64');
1179
1278
  }
1180
- // Step 6: Electricity Maps API
1181
- console.log((0, kleur_1.gray)('\nStep 6: Electricity Maps API'));
1279
+ // Step 8: Electricity Maps API
1280
+ console.log((0, kleur_1.gray)('\nStep 8: Electricity Maps API'));
1182
1281
  console.log((0, kleur_1.gray)('─'.repeat(40)));
1183
1282
  console.log((0, kleur_1.gray)('Get free key: https://api-portal.electricitymaps.com'));
1184
1283
  const { electricityMapsKey } = await (0, prompts_1.default)({
@@ -1190,8 +1289,8 @@ systemctl start gitlab-runner
1190
1289
  console.log((0, kleur_1.red)('API key required'));
1191
1290
  process.exit(1);
1192
1291
  }
1193
- // Step 7: Optional
1194
- console.log((0, kleur_1.gray)('\nStep 7: Optional Settings'));
1292
+ // Step 9: Optional
1293
+ console.log((0, kleur_1.gray)('\nStep 9: Optional Settings'));
1195
1294
  console.log((0, kleur_1.gray)('─'.repeat(40)));
1196
1295
  console.log((0, kleur_1.gray)('Set a carbon budget to track emissions against a limit.'));
1197
1296
  console.log((0, kleur_1.gray)('Example: 10 grams CO2e per job. Leave empty to skip.\n'));
@@ -1211,8 +1310,9 @@ systemctl start gitlab-runner
1211
1310
  });
1212
1311
  failOnBudget = shouldFail;
1213
1312
  }
1214
- // Step 8: Set Variables
1215
- console.log((0, kleur_1.gray)('\nStep 8: Setting CI/CD Variables'));
1313
+ // Step 10: Output integrations
1314
+ const gcpIntegrationVariables = await promptOutputIntegrations(10);
1315
+ console.log((0, kleur_1.gray)('\nStep 11: Setting CI/CD Variables'));
1216
1316
  console.log((0, kleur_1.gray)('─'.repeat(40)));
1217
1317
  const variables = [
1218
1318
  { key: 'GCP_PROJECT_ID', value: gcpProjectId, masked: false },
@@ -1228,6 +1328,9 @@ systemctl start gitlab-runner
1228
1328
  if (failOnBudget) {
1229
1329
  variables.push({ key: 'FAIL_ON_BUDGET', value: 'true', masked: false });
1230
1330
  }
1331
+ if (gcpIntegrationVariables.length) {
1332
+ variables.push(...gcpIntegrationVariables);
1333
+ }
1231
1334
  for (const v of variables) {
1232
1335
  const ok = await setVariable(auth, projectPath, v.key, v.value, v.masked);
1233
1336
  if (ok) {
@@ -1237,8 +1340,8 @@ systemctl start gitlab-runner
1237
1340
  console.log((0, kleur_1.red)(' Failed: ' + v.key));
1238
1341
  }
1239
1342
  }
1240
- // Step 9: Generate CI job
1241
- console.log((0, kleur_1.gray)('\nStep 9: CI Configuration'));
1343
+ // Step 12: Generate CI job
1344
+ console.log((0, kleur_1.gray)('\nStep 12: CI Configuration'));
1242
1345
  console.log((0, kleur_1.gray)('─'.repeat(40)));
1243
1346
  // Only prompt for runner tag if not already set from provisioning
1244
1347
  if (!runnerTag) {
@@ -0,0 +1,250 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.insertRunnerRow = exports.insertJobRow = exports.resolveAllSinkConfigs = exports.resolveSinkConfig = void 0;
7
+ const promise_1 = __importDefault(require("mysql2/promise"));
8
+ const pg_1 = require("pg");
9
+ const kleur_1 = require("kleur");
10
+ const normalizeHost = (input) => {
11
+ if (!input)
12
+ return input;
13
+ const trimmed = input.trim();
14
+ const withoutProtocol = trimmed.replace(/^[a-zA-Z]+:\/\//, '');
15
+ return withoutProtocol.split(/[/?#]/)[0];
16
+ };
17
+ const buildPgSslConfig = (mode) => {
18
+ if (!mode || mode === 'disable')
19
+ return undefined;
20
+ if (mode === 'verify-full')
21
+ return { rejectUnauthorized: true };
22
+ return { rejectUnauthorized: false };
23
+ };
24
+ const loadEnv = (key) => {
25
+ const value = process.env[key];
26
+ if (!value)
27
+ return undefined;
28
+ return value;
29
+ };
30
+ const buildMysqlConfig = (scope) => {
31
+ const prefix = scope === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
32
+ const host = loadEnv(`${prefix}_MYSQL_HOST`);
33
+ const username = loadEnv(`${prefix}_MYSQL_USERNAME`);
34
+ const password = loadEnv(`${prefix}_MYSQL_PASSWORD`);
35
+ const database = loadEnv(`${prefix}_MYSQL_DATABASE`);
36
+ const table = loadEnv(`${prefix}_MYSQL_TABLE`);
37
+ if (!host || !username || !password || !database || !table)
38
+ return undefined;
39
+ const port = Number(loadEnv(`${prefix}_MYSQL_PORT`) || 3306);
40
+ return {
41
+ driver: 'mysql',
42
+ host: normalizeHost(host),
43
+ port,
44
+ username,
45
+ password,
46
+ database,
47
+ table,
48
+ timeseriesTable: scope === 'job' ? `${table}_timeseries` : undefined,
49
+ scope
50
+ };
51
+ };
52
+ const buildPostgresConfig = (scope) => {
53
+ const prefix = scope === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
54
+ const host = loadEnv(`${prefix}_POSTGRES_HOST`);
55
+ const username = loadEnv(`${prefix}_POSTGRES_USERNAME`);
56
+ const password = loadEnv(`${prefix}_POSTGRES_PASSWORD`);
57
+ const database = loadEnv(`${prefix}_POSTGRES_DATABASE`);
58
+ const table = loadEnv(`${prefix}_POSTGRES_TABLE`);
59
+ if (!host || !username || !password || !database || !table)
60
+ return undefined;
61
+ const port = Number(loadEnv(`${prefix}_POSTGRES_PORT`) || 5432);
62
+ const schema = loadEnv(`${prefix}_POSTGRES_SCHEMA`) || 'public';
63
+ const sslMode = loadEnv(`${prefix}_POSTGRES_SSLMODE`);
64
+ return {
65
+ driver: 'postgres',
66
+ host: normalizeHost(host),
67
+ port,
68
+ username,
69
+ password,
70
+ database,
71
+ table,
72
+ timeseriesTable: scope === 'job' ? `${table}_timeseries` : undefined,
73
+ schema,
74
+ sslMode,
75
+ scope
76
+ };
77
+ };
78
+ const resolveSinkConfig = (scope, driver) => {
79
+ if (driver === 'postgres')
80
+ return buildPostgresConfig(scope);
81
+ if (driver === 'mysql')
82
+ return buildMysqlConfig(scope);
83
+ return buildPostgresConfig(scope) || buildMysqlConfig(scope);
84
+ };
85
+ exports.resolveSinkConfig = resolveSinkConfig;
86
+ const resolveAllSinkConfigs = (scope) => {
87
+ const configs = [];
88
+ const pg = buildPostgresConfig(scope);
89
+ if (pg)
90
+ configs.push(pg);
91
+ const mysqlCfg = buildMysqlConfig(scope);
92
+ if (mysqlCfg)
93
+ configs.push(mysqlCfg);
94
+ return configs;
95
+ };
96
+ exports.resolveAllSinkConfigs = resolveAllSinkConfigs;
97
+ const insertWithRetry = async (action, attempts = 3, delayMs = 2000) => {
98
+ let lastError;
99
+ for (let attempt = 1; attempt <= attempts; attempt++) {
100
+ try {
101
+ return await action();
102
+ }
103
+ catch (err) {
104
+ lastError = err;
105
+ if (attempt < attempts) {
106
+ await new Promise(resolve => setTimeout(resolve, delayMs));
107
+ }
108
+ }
109
+ }
110
+ throw lastError;
111
+ };
112
+ const jobColumns = [
113
+ ['provider', (row) => row.provider ?? null],
114
+ ['region', (row) => row.region ?? null],
115
+ ['machine_type', (row) => row.machineType ?? null],
116
+ ['cpu_points', (row) => row.cpuPoints ?? null],
117
+ ['ram_points', (row) => row.ramPoints ?? null],
118
+ ['runtime_seconds', (row) => row.runtimeSeconds ?? null],
119
+ ['total_emissions', (row) => row.totalEmissions ?? null],
120
+ ['cpu_emissions', (row) => row.cpuEmissions ?? null],
121
+ ['ram_emissions', (row) => row.ramEmissions ?? null],
122
+ ['scope3_emissions', (row) => row.scope3Emissions ?? null],
123
+ ['carbon_intensity', (row) => row.carbonIntensity ?? null],
124
+ ['pue', (row) => row.pue ?? null],
125
+ ['carbon_budget', (row) => row.carbonBudget ?? null],
126
+ ['over_budget', (row) => typeof row.overBudget === 'boolean' ? row.overBudget : null],
127
+ ['gitlab_project_id', (row) => row.gitlabProjectId ?? null],
128
+ ['gitlab_pipeline_id', (row) => row.gitlabPipelineId ?? null],
129
+ ['gitlab_job_id', (row) => row.gitlabJobId ?? null],
130
+ ['gitlab_job_name', (row) => row.gitlabJobName ?? null],
131
+ ['runner_id', (row) => row.runnerId ?? null],
132
+ ['runner_description', (row) => row.runnerDescription ?? null],
133
+ ['runner_tags', (row) => row.runnerTags ?? null],
134
+ ['runner_version', (row) => row.runnerVersion ?? null],
135
+ ['runner_revision', (row) => row.runnerRevision ?? null],
136
+ ['payload', (row) => JSON.stringify(row.payload)]
137
+ ];
138
+ const runnerColumns = [
139
+ ['runner_id', (row) => row.runnerId ?? null],
140
+ ['runner_description', (row) => row.runnerDescription ?? null],
141
+ ['runner_version', (row) => row.runnerVersion ?? null],
142
+ ['runner_revision', (row) => row.runnerRevision ?? null],
143
+ ['runner_platform', (row) => row.runnerPlatform ?? null],
144
+ ['runner_architecture', (row) => row.runnerArchitecture ?? null],
145
+ ['runner_executor', (row) => row.runnerExecutor ?? null],
146
+ ['runner_tags', (row) => row.runnerTags ?? null],
147
+ ['machine_type', (row) => row.machineType ?? null],
148
+ ['provider', (row) => row.provider ?? null],
149
+ ['region', (row) => row.region ?? null],
150
+ ['gcp_project_id', (row) => row.gcpProjectId ?? null],
151
+ ['gcp_instance_id', (row) => row.gcpInstanceId ?? null],
152
+ ['gcp_zone', (row) => row.gcpZone ?? null],
153
+ ['aws_region', (row) => row.awsRegion ?? null],
154
+ ['aws_instance_id', (row) => row.awsInstanceId ?? null],
155
+ ['last_job_machine_type', (row) => row.lastJobMachineType ?? null],
156
+ ['last_job_region', (row) => row.lastJobRegion ?? null],
157
+ ['last_job_provider', (row) => row.lastJobProvider ?? null],
158
+ ['last_job_runtime_seconds', (row) => row.lastJobRuntimeSeconds ?? null],
159
+ ['last_job_total_emissions', (row) => row.lastJobTotalEmissions ?? null],
160
+ ['last_job_recorded_at', (row) => row.lastJobRecordedAt ?? null],
161
+ ['payload', (row) => JSON.stringify(row.payload)]
162
+ ];
163
+ const runMysqlInsert = async (config, columns, row, timeseries) => {
164
+ const connection = await promise_1.default.createConnection({
165
+ host: config.host,
166
+ port: config.port,
167
+ user: config.username,
168
+ password: config.password,
169
+ database: config.database
170
+ });
171
+ try {
172
+ await connection.beginTransaction();
173
+ const colNames = columns.map(([name]) => `\`${name}\``).join(', ');
174
+ const placeholders = columns.map(() => '?').join(', ');
175
+ const values = columns.map(([, getter]) => getter(row));
176
+ const [result] = await connection.query(`INSERT INTO \`${config.table}\` (${colNames}) VALUES (${placeholders})`, values);
177
+ const insertedId = result.insertId;
178
+ if (timeseries && timeseries.length > 0) {
179
+ const tsTable = config.timeseriesTable || `${config.table}_timeseries`;
180
+ const tsValues = timeseries.map(point => [insertedId, point.metric, point.timestamp, point.value]);
181
+ await connection.query(`INSERT INTO \`${tsTable}\` (job_id, metric, ts, value) VALUES ?`, [tsValues]);
182
+ }
183
+ await connection.commit();
184
+ }
185
+ finally {
186
+ await connection.end();
187
+ }
188
+ };
189
+ const runPostgresInsert = async (config, columns, row, timeseries) => {
190
+ const schema = config.schema || 'public';
191
+ const client = new pg_1.Client({
192
+ host: config.host,
193
+ port: config.port,
194
+ user: config.username,
195
+ password: config.password,
196
+ database: config.database,
197
+ ssl: buildPgSslConfig(config.sslMode)
198
+ });
199
+ await client.connect();
200
+ try {
201
+ await client.query('BEGIN');
202
+ const colNames = columns.map(([name]) => `"${name}"`).join(', ');
203
+ const placeholders = columns.map((_, idx) => `$${idx + 1}`).join(', ');
204
+ const values = columns.map(([, getter]) => getter(row));
205
+ const insertResult = await client.query(`INSERT INTO "${schema}"."${config.table}" (${colNames}) VALUES (${placeholders}) RETURNING id`, values);
206
+ const jobId = insertResult.rows[0]?.id;
207
+ if (jobId && timeseries && timeseries.length > 0) {
208
+ const tsTableName = config.timeseriesTable || `${config.table}_timeseries`;
209
+ const tsTable = `"${schema}"."${tsTableName}"`;
210
+ const valueFragments = [];
211
+ const params = [];
212
+ timeseries.forEach((point, index) => {
213
+ const base = index * 4;
214
+ valueFragments.push(`($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4})`);
215
+ params.push(jobId, point.metric, point.timestamp, point.value);
216
+ });
217
+ await client.query(`INSERT INTO ${tsTable} (job_id, metric, ts, value) VALUES ${valueFragments.join(', ')}`, params);
218
+ }
219
+ await client.query('COMMIT');
220
+ }
221
+ catch (err) {
222
+ await client.query('ROLLBACK').catch(() => { });
223
+ throw err;
224
+ }
225
+ finally {
226
+ await client.end();
227
+ }
228
+ };
229
+ const writeStructuredRow = async (config, columns, row, timeseries) => {
230
+ try {
231
+ if (config.driver === 'mysql') {
232
+ await insertWithRetry(() => runMysqlInsert(config, columns, row, timeseries));
233
+ }
234
+ else {
235
+ await insertWithRetry(() => runPostgresInsert(config, columns, row, timeseries));
236
+ }
237
+ console.log((0, kleur_1.green)(`Saved ${config.scope} payload to ${config.driver} sink (${config.database}.${config.table})`));
238
+ }
239
+ catch (err) {
240
+ console.log((0, kleur_1.red)(`Failed to write ${config.scope} payload: ${err?.message || err}`));
241
+ }
242
+ };
243
+ const insertJobRow = (config, row, timeseries) => {
244
+ return writeStructuredRow(config, jobColumns, row, timeseries);
245
+ };
246
+ exports.insertJobRow = insertJobRow;
247
+ const insertRunnerRow = (config, row) => {
248
+ return writeStructuredRow(config, runnerColumns, row);
249
+ };
250
+ exports.insertRunnerRow = insertRunnerRow;