gitgreen 1.0.4 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -1
- package/dist/cli.js +57 -7
- package/dist/lib/integrations/data-sink.js +250 -0
- package/dist/lib/integrations/migration-runner.js +214 -0
- package/dist/lib/integrations/sink-writer.js +181 -0
- package/migrations/job_mysql_001_init.sql +40 -0
- package/migrations/job_postgres_001_init.sql +36 -0
- package/migrations/runner_mysql_001_init.sql +28 -0
- package/migrations/runner_postgres_001_init.sql +27 -0
- package/package.json +2 -1
package/README.md
CHANGED
|
@@ -151,7 +151,10 @@ Built-in connectors today:
|
|
|
151
151
|
- **MySQL** – populates `GITGREEN_JOB_MYSQL_*` / `GITGREEN_RUNNER_MYSQL_*` and inserts rows through a standard MySQL client.
|
|
152
152
|
- **PostgreSQL** – captures host, port, credentials, schema, table, and SSL mode (`GITGREEN_JOB_POSTGRES_*` / `GITGREEN_RUNNER_POSTGRES_*`) for storage in Postgres.
|
|
153
153
|
|
|
154
|
-
When you select either connector, the wizard captures host, port, username, password, database, and target table names and stores them in CI/CD variables. It immediately connects with those credentials to ensure the database, schema, and table exist (
|
|
154
|
+
When you select either connector, the wizard captures host, port, username, password, database, and target table names and stores them in CI/CD variables. It immediately connects with those credentials to ensure the database, schema, and table exist (job sinks also create a `<table>_timeseries` table linked via foreign key). During CI, the GitGreen CLI automatically detects those env vars and:
|
|
155
|
+
|
|
156
|
+
- runs `gitgreen migrate --scope job|runner` to apply any pending migrations (tracked per DB via `gitgreen_migrations`);
|
|
157
|
+
- writes each carbon calculation (typed summary columns plus CPU/RAM timeseries rows) and optional runner inventory snapshot into the configured sink.
|
|
155
158
|
|
|
156
159
|
### Extending the interface
|
|
157
160
|
|
|
@@ -163,6 +166,18 @@ Additional connectors can be added without touching the wizard logic. Each desti
|
|
|
163
166
|
|
|
164
167
|
To add another sink (for example PostgreSQL or a webhook), create a new entry in that file with the fields your integration needs. Re-run `gitgreen init` and the option will automatically appear in the integration step.
|
|
165
168
|
|
|
169
|
+
### Database migrations
|
|
170
|
+
|
|
171
|
+
Structured sinks rely on migrations tracked in `gitgreen_migrations`. Run them whenever you update GitGreen or change table names:
|
|
172
|
+
|
|
173
|
+
```bash
|
|
174
|
+
gitgreen migrate --scope job # apply job sink migrations (summary + timeseries)
|
|
175
|
+
gitgreen migrate --scope runner # apply runner inventory migrations
|
|
176
|
+
gitgreen migrate --scope all # convenience wrapper (used by the GitLab component)
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
The GitLab component automatically runs `gitgreen migrate --scope job` and `--scope runner` before calculating emissions, so pipelines stay in sync even when you change versions.
|
|
180
|
+
|
|
166
181
|
## Adding a provider
|
|
167
182
|
1. Extend `CloudProvider` and the provider guard in `src/index.ts` so the calculator accepts the new key.
|
|
168
183
|
2. Add machine power data (`<provider>_machine_power_profiles.json`) and, if needed, CPU profiles to `data/`, then update `PowerProfileRepository.loadMachineData` to load it.
|
package/dist/cli.js
CHANGED
|
@@ -14,6 +14,30 @@ const index_1 = require("./index");
|
|
|
14
14
|
const init_1 = require("./init");
|
|
15
15
|
const cloudwatch_1 = require("./lib/aws/cloudwatch");
|
|
16
16
|
const power_profile_repository_1 = require("./lib/carbon/power-profile-repository");
|
|
17
|
+
const sink_writer_1 = require("./lib/integrations/sink-writer");
|
|
18
|
+
const migration_runner_1 = require("./lib/integrations/migration-runner");
|
|
19
|
+
const data_sink_1 = require("./lib/integrations/data-sink");
|
|
20
|
+
const buildMigrationInput = (config) => ({
|
|
21
|
+
driver: config.driver,
|
|
22
|
+
host: config.host,
|
|
23
|
+
port: config.port,
|
|
24
|
+
username: config.username,
|
|
25
|
+
password: config.password,
|
|
26
|
+
database: config.database,
|
|
27
|
+
schema: config.driver === 'postgres' ? config.schema : undefined,
|
|
28
|
+
sslMode: config.driver === 'postgres' ? config.sslMode : undefined,
|
|
29
|
+
table: config.table,
|
|
30
|
+
timeseriesTable: config.timeseriesTable,
|
|
31
|
+
scope: config.scope
|
|
32
|
+
});
|
|
33
|
+
const runMigrationsForScope = async (scope) => {
|
|
34
|
+
const configs = (0, data_sink_1.resolveAllSinkConfigs)(scope);
|
|
35
|
+
if (!configs.length)
|
|
36
|
+
return;
|
|
37
|
+
for (const cfg of configs) {
|
|
38
|
+
await (0, migration_runner_1.runMigrations)(buildMigrationInput(cfg));
|
|
39
|
+
}
|
|
40
|
+
};
|
|
17
41
|
const program = new commander_1.Command();
|
|
18
42
|
const toIsoTimestamp = (input) => {
|
|
19
43
|
if (input instanceof Date) {
|
|
@@ -209,6 +233,14 @@ const runCalculate = async (opts) => {
|
|
|
209
233
|
ramSizeTimeseries = parseTimeseriesFile(opts.ramSizeTimeseries);
|
|
210
234
|
}
|
|
211
235
|
console.log((0, kleur_1.gray)(`Loaded ${cpuTimeseries.length} CPU points, ${ramUsedTimeseries.length} RAM points`));
|
|
236
|
+
const jobInput = {
|
|
237
|
+
provider,
|
|
238
|
+
machineType: opts.machine,
|
|
239
|
+
region: calculationRegion,
|
|
240
|
+
cpuTimeseries,
|
|
241
|
+
ramUsedTimeseries,
|
|
242
|
+
ramSizeTimeseries
|
|
243
|
+
};
|
|
212
244
|
// Sort by timestamp for charts
|
|
213
245
|
const cpuSorted = [...cpuTimeseries].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
|
|
214
246
|
const ramSorted = [...ramUsedTimeseries].sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
|
|
@@ -232,13 +264,13 @@ const runCalculate = async (opts) => {
|
|
|
232
264
|
console.log(asciichart_1.default.plot(ramPercent, { height: 8, format: (x) => x.toFixed(1).padStart(6) }));
|
|
233
265
|
}
|
|
234
266
|
try {
|
|
235
|
-
const { result, budget, markdown } = await (0, index_1.calculate)({
|
|
236
|
-
provider,
|
|
237
|
-
machineType:
|
|
238
|
-
region:
|
|
239
|
-
cpuTimeseries,
|
|
240
|
-
ramUsedTimeseries,
|
|
241
|
-
ramSizeTimeseries,
|
|
267
|
+
const { result, budget, markdown, jsonReport } = await (0, index_1.calculate)({
|
|
268
|
+
provider: jobInput.provider,
|
|
269
|
+
machineType: jobInput.machineType,
|
|
270
|
+
region: jobInput.region,
|
|
271
|
+
cpuTimeseries: jobInput.cpuTimeseries,
|
|
272
|
+
ramUsedTimeseries: jobInput.ramUsedTimeseries,
|
|
273
|
+
ramSizeTimeseries: jobInput.ramSizeTimeseries,
|
|
242
274
|
carbonBudgetGrams: opts.budget,
|
|
243
275
|
failOnBreach: opts.failOnBudget,
|
|
244
276
|
runnerTags: opts.runnerTags ? String(opts.runnerTags).split(/[,\s]+/).filter(Boolean) : (0, index_1.readRunnerTagsFromEnv)(),
|
|
@@ -269,6 +301,8 @@ const runCalculate = async (opts) => {
|
|
|
269
301
|
if (!opts.outMd && !opts.outJson) {
|
|
270
302
|
console.log('\n' + markdown);
|
|
271
303
|
}
|
|
304
|
+
await (0, sink_writer_1.persistJobPayload)({ jobInput, result, budget, jsonReport });
|
|
305
|
+
await (0, sink_writer_1.persistRunnerPayload)({ jobInput, result });
|
|
272
306
|
if (budget.overBudget && opts.failOnBudget) {
|
|
273
307
|
process.exitCode = 1;
|
|
274
308
|
}
|
|
@@ -312,4 +346,20 @@ program
|
|
|
312
346
|
.action(async (cmdOpts) => {
|
|
313
347
|
await (0, init_1.runInit)({ assumeYes: Boolean(cmdOpts.yes) });
|
|
314
348
|
});
|
|
349
|
+
program
|
|
350
|
+
.command('migrate')
|
|
351
|
+
.description('Run output-integration database migrations')
|
|
352
|
+
.option('--scope <scope>', 'Scope to migrate (job|runner|all)', 'all')
|
|
353
|
+
.action(async (opts) => {
|
|
354
|
+
const rawScope = String(opts.scope || 'all').toLowerCase();
|
|
355
|
+
const scopes = rawScope === 'all' ? ['job', 'runner'] : (rawScope === 'job' || rawScope === 'runner') ? [rawScope] : [];
|
|
356
|
+
if (!scopes.length) {
|
|
357
|
+
console.error((0, kleur_1.red)('Invalid scope. Use job, runner, or all.'));
|
|
358
|
+
process.exitCode = 1;
|
|
359
|
+
return;
|
|
360
|
+
}
|
|
361
|
+
for (const scope of scopes) {
|
|
362
|
+
await runMigrationsForScope(scope);
|
|
363
|
+
}
|
|
364
|
+
});
|
|
315
365
|
program.parseAsync(process.argv);
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.insertRunnerRow = exports.insertJobRow = exports.resolveAllSinkConfigs = exports.resolveSinkConfig = void 0;
|
|
7
|
+
const promise_1 = __importDefault(require("mysql2/promise"));
|
|
8
|
+
const pg_1 = require("pg");
|
|
9
|
+
const kleur_1 = require("kleur");
|
|
10
|
+
const normalizeHost = (input) => {
|
|
11
|
+
if (!input)
|
|
12
|
+
return input;
|
|
13
|
+
const trimmed = input.trim();
|
|
14
|
+
const withoutProtocol = trimmed.replace(/^[a-zA-Z]+:\/\//, '');
|
|
15
|
+
return withoutProtocol.split(/[/?#]/)[0];
|
|
16
|
+
};
|
|
17
|
+
const buildPgSslConfig = (mode) => {
|
|
18
|
+
if (!mode || mode === 'disable')
|
|
19
|
+
return undefined;
|
|
20
|
+
if (mode === 'verify-full')
|
|
21
|
+
return { rejectUnauthorized: true };
|
|
22
|
+
return { rejectUnauthorized: false };
|
|
23
|
+
};
|
|
24
|
+
const loadEnv = (key) => {
|
|
25
|
+
const value = process.env[key];
|
|
26
|
+
if (!value)
|
|
27
|
+
return undefined;
|
|
28
|
+
return value;
|
|
29
|
+
};
|
|
30
|
+
const buildMysqlConfig = (scope) => {
|
|
31
|
+
const prefix = scope === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
32
|
+
const host = loadEnv(`${prefix}_MYSQL_HOST`);
|
|
33
|
+
const username = loadEnv(`${prefix}_MYSQL_USERNAME`);
|
|
34
|
+
const password = loadEnv(`${prefix}_MYSQL_PASSWORD`);
|
|
35
|
+
const database = loadEnv(`${prefix}_MYSQL_DATABASE`);
|
|
36
|
+
const table = loadEnv(`${prefix}_MYSQL_TABLE`);
|
|
37
|
+
if (!host || !username || !password || !database || !table)
|
|
38
|
+
return undefined;
|
|
39
|
+
const port = Number(loadEnv(`${prefix}_MYSQL_PORT`) || 3306);
|
|
40
|
+
return {
|
|
41
|
+
driver: 'mysql',
|
|
42
|
+
host: normalizeHost(host),
|
|
43
|
+
port,
|
|
44
|
+
username,
|
|
45
|
+
password,
|
|
46
|
+
database,
|
|
47
|
+
table,
|
|
48
|
+
timeseriesTable: scope === 'job' ? `${table}_timeseries` : undefined,
|
|
49
|
+
scope
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
const buildPostgresConfig = (scope) => {
|
|
53
|
+
const prefix = scope === 'job' ? 'GITGREEN_JOB' : 'GITGREEN_RUNNER';
|
|
54
|
+
const host = loadEnv(`${prefix}_POSTGRES_HOST`);
|
|
55
|
+
const username = loadEnv(`${prefix}_POSTGRES_USERNAME`);
|
|
56
|
+
const password = loadEnv(`${prefix}_POSTGRES_PASSWORD`);
|
|
57
|
+
const database = loadEnv(`${prefix}_POSTGRES_DATABASE`);
|
|
58
|
+
const table = loadEnv(`${prefix}_POSTGRES_TABLE`);
|
|
59
|
+
if (!host || !username || !password || !database || !table)
|
|
60
|
+
return undefined;
|
|
61
|
+
const port = Number(loadEnv(`${prefix}_POSTGRES_PORT`) || 5432);
|
|
62
|
+
const schema = loadEnv(`${prefix}_POSTGRES_SCHEMA`) || 'public';
|
|
63
|
+
const sslMode = loadEnv(`${prefix}_POSTGRES_SSLMODE`);
|
|
64
|
+
return {
|
|
65
|
+
driver: 'postgres',
|
|
66
|
+
host: normalizeHost(host),
|
|
67
|
+
port,
|
|
68
|
+
username,
|
|
69
|
+
password,
|
|
70
|
+
database,
|
|
71
|
+
table,
|
|
72
|
+
timeseriesTable: scope === 'job' ? `${table}_timeseries` : undefined,
|
|
73
|
+
schema,
|
|
74
|
+
sslMode,
|
|
75
|
+
scope
|
|
76
|
+
};
|
|
77
|
+
};
|
|
78
|
+
const resolveSinkConfig = (scope, driver) => {
|
|
79
|
+
if (driver === 'postgres')
|
|
80
|
+
return buildPostgresConfig(scope);
|
|
81
|
+
if (driver === 'mysql')
|
|
82
|
+
return buildMysqlConfig(scope);
|
|
83
|
+
return buildPostgresConfig(scope) || buildMysqlConfig(scope);
|
|
84
|
+
};
|
|
85
|
+
exports.resolveSinkConfig = resolveSinkConfig;
|
|
86
|
+
const resolveAllSinkConfigs = (scope) => {
|
|
87
|
+
const configs = [];
|
|
88
|
+
const pg = buildPostgresConfig(scope);
|
|
89
|
+
if (pg)
|
|
90
|
+
configs.push(pg);
|
|
91
|
+
const mysqlCfg = buildMysqlConfig(scope);
|
|
92
|
+
if (mysqlCfg)
|
|
93
|
+
configs.push(mysqlCfg);
|
|
94
|
+
return configs;
|
|
95
|
+
};
|
|
96
|
+
exports.resolveAllSinkConfigs = resolveAllSinkConfigs;
|
|
97
|
+
const insertWithRetry = async (action, attempts = 3, delayMs = 2000) => {
|
|
98
|
+
let lastError;
|
|
99
|
+
for (let attempt = 1; attempt <= attempts; attempt++) {
|
|
100
|
+
try {
|
|
101
|
+
return await action();
|
|
102
|
+
}
|
|
103
|
+
catch (err) {
|
|
104
|
+
lastError = err;
|
|
105
|
+
if (attempt < attempts) {
|
|
106
|
+
await new Promise(resolve => setTimeout(resolve, delayMs));
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
throw lastError;
|
|
111
|
+
};
|
|
112
|
+
const jobColumns = [
|
|
113
|
+
['provider', (row) => row.provider ?? null],
|
|
114
|
+
['region', (row) => row.region ?? null],
|
|
115
|
+
['machine_type', (row) => row.machineType ?? null],
|
|
116
|
+
['cpu_points', (row) => row.cpuPoints ?? null],
|
|
117
|
+
['ram_points', (row) => row.ramPoints ?? null],
|
|
118
|
+
['runtime_seconds', (row) => row.runtimeSeconds ?? null],
|
|
119
|
+
['total_emissions', (row) => row.totalEmissions ?? null],
|
|
120
|
+
['cpu_emissions', (row) => row.cpuEmissions ?? null],
|
|
121
|
+
['ram_emissions', (row) => row.ramEmissions ?? null],
|
|
122
|
+
['scope3_emissions', (row) => row.scope3Emissions ?? null],
|
|
123
|
+
['carbon_intensity', (row) => row.carbonIntensity ?? null],
|
|
124
|
+
['pue', (row) => row.pue ?? null],
|
|
125
|
+
['carbon_budget', (row) => row.carbonBudget ?? null],
|
|
126
|
+
['over_budget', (row) => typeof row.overBudget === 'boolean' ? row.overBudget : null],
|
|
127
|
+
['gitlab_project_id', (row) => row.gitlabProjectId ?? null],
|
|
128
|
+
['gitlab_pipeline_id', (row) => row.gitlabPipelineId ?? null],
|
|
129
|
+
['gitlab_job_id', (row) => row.gitlabJobId ?? null],
|
|
130
|
+
['gitlab_job_name', (row) => row.gitlabJobName ?? null],
|
|
131
|
+
['runner_id', (row) => row.runnerId ?? null],
|
|
132
|
+
['runner_description', (row) => row.runnerDescription ?? null],
|
|
133
|
+
['runner_tags', (row) => row.runnerTags ?? null],
|
|
134
|
+
['runner_version', (row) => row.runnerVersion ?? null],
|
|
135
|
+
['runner_revision', (row) => row.runnerRevision ?? null],
|
|
136
|
+
['payload', (row) => JSON.stringify(row.payload)]
|
|
137
|
+
];
|
|
138
|
+
const runnerColumns = [
|
|
139
|
+
['runner_id', (row) => row.runnerId ?? null],
|
|
140
|
+
['runner_description', (row) => row.runnerDescription ?? null],
|
|
141
|
+
['runner_version', (row) => row.runnerVersion ?? null],
|
|
142
|
+
['runner_revision', (row) => row.runnerRevision ?? null],
|
|
143
|
+
['runner_platform', (row) => row.runnerPlatform ?? null],
|
|
144
|
+
['runner_architecture', (row) => row.runnerArchitecture ?? null],
|
|
145
|
+
['runner_executor', (row) => row.runnerExecutor ?? null],
|
|
146
|
+
['runner_tags', (row) => row.runnerTags ?? null],
|
|
147
|
+
['machine_type', (row) => row.machineType ?? null],
|
|
148
|
+
['provider', (row) => row.provider ?? null],
|
|
149
|
+
['region', (row) => row.region ?? null],
|
|
150
|
+
['gcp_project_id', (row) => row.gcpProjectId ?? null],
|
|
151
|
+
['gcp_instance_id', (row) => row.gcpInstanceId ?? null],
|
|
152
|
+
['gcp_zone', (row) => row.gcpZone ?? null],
|
|
153
|
+
['aws_region', (row) => row.awsRegion ?? null],
|
|
154
|
+
['aws_instance_id', (row) => row.awsInstanceId ?? null],
|
|
155
|
+
['last_job_machine_type', (row) => row.lastJobMachineType ?? null],
|
|
156
|
+
['last_job_region', (row) => row.lastJobRegion ?? null],
|
|
157
|
+
['last_job_provider', (row) => row.lastJobProvider ?? null],
|
|
158
|
+
['last_job_runtime_seconds', (row) => row.lastJobRuntimeSeconds ?? null],
|
|
159
|
+
['last_job_total_emissions', (row) => row.lastJobTotalEmissions ?? null],
|
|
160
|
+
['last_job_recorded_at', (row) => row.lastJobRecordedAt ?? null],
|
|
161
|
+
['payload', (row) => JSON.stringify(row.payload)]
|
|
162
|
+
];
|
|
163
|
+
const runMysqlInsert = async (config, columns, row, timeseries) => {
|
|
164
|
+
const connection = await promise_1.default.createConnection({
|
|
165
|
+
host: config.host,
|
|
166
|
+
port: config.port,
|
|
167
|
+
user: config.username,
|
|
168
|
+
password: config.password,
|
|
169
|
+
database: config.database
|
|
170
|
+
});
|
|
171
|
+
try {
|
|
172
|
+
await connection.beginTransaction();
|
|
173
|
+
const colNames = columns.map(([name]) => `\`${name}\``).join(', ');
|
|
174
|
+
const placeholders = columns.map(() => '?').join(', ');
|
|
175
|
+
const values = columns.map(([, getter]) => getter(row));
|
|
176
|
+
const [result] = await connection.query(`INSERT INTO \`${config.table}\` (${colNames}) VALUES (${placeholders})`, values);
|
|
177
|
+
const insertedId = result.insertId;
|
|
178
|
+
if (timeseries && timeseries.length > 0) {
|
|
179
|
+
const tsTable = config.timeseriesTable || `${config.table}_timeseries`;
|
|
180
|
+
const tsValues = timeseries.map(point => [insertedId, point.metric, point.timestamp, point.value]);
|
|
181
|
+
await connection.query(`INSERT INTO \`${tsTable}\` (job_id, metric, ts, value) VALUES ?`, [tsValues]);
|
|
182
|
+
}
|
|
183
|
+
await connection.commit();
|
|
184
|
+
}
|
|
185
|
+
finally {
|
|
186
|
+
await connection.end();
|
|
187
|
+
}
|
|
188
|
+
};
|
|
189
|
+
const runPostgresInsert = async (config, columns, row, timeseries) => {
|
|
190
|
+
const schema = config.schema || 'public';
|
|
191
|
+
const client = new pg_1.Client({
|
|
192
|
+
host: config.host,
|
|
193
|
+
port: config.port,
|
|
194
|
+
user: config.username,
|
|
195
|
+
password: config.password,
|
|
196
|
+
database: config.database,
|
|
197
|
+
ssl: buildPgSslConfig(config.sslMode)
|
|
198
|
+
});
|
|
199
|
+
await client.connect();
|
|
200
|
+
try {
|
|
201
|
+
await client.query('BEGIN');
|
|
202
|
+
const colNames = columns.map(([name]) => `"${name}"`).join(', ');
|
|
203
|
+
const placeholders = columns.map((_, idx) => `$${idx + 1}`).join(', ');
|
|
204
|
+
const values = columns.map(([, getter]) => getter(row));
|
|
205
|
+
const insertResult = await client.query(`INSERT INTO "${schema}"."${config.table}" (${colNames}) VALUES (${placeholders}) RETURNING id`, values);
|
|
206
|
+
const jobId = insertResult.rows[0]?.id;
|
|
207
|
+
if (jobId && timeseries && timeseries.length > 0) {
|
|
208
|
+
const tsTableName = config.timeseriesTable || `${config.table}_timeseries`;
|
|
209
|
+
const tsTable = `"${schema}"."${tsTableName}"`;
|
|
210
|
+
const valueFragments = [];
|
|
211
|
+
const params = [];
|
|
212
|
+
timeseries.forEach((point, index) => {
|
|
213
|
+
const base = index * 4;
|
|
214
|
+
valueFragments.push(`($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4})`);
|
|
215
|
+
params.push(jobId, point.metric, point.timestamp, point.value);
|
|
216
|
+
});
|
|
217
|
+
await client.query(`INSERT INTO ${tsTable} (job_id, metric, ts, value) VALUES ${valueFragments.join(', ')}`, params);
|
|
218
|
+
}
|
|
219
|
+
await client.query('COMMIT');
|
|
220
|
+
}
|
|
221
|
+
catch (err) {
|
|
222
|
+
await client.query('ROLLBACK').catch(() => { });
|
|
223
|
+
throw err;
|
|
224
|
+
}
|
|
225
|
+
finally {
|
|
226
|
+
await client.end();
|
|
227
|
+
}
|
|
228
|
+
};
|
|
229
|
+
const writeStructuredRow = async (config, columns, row, timeseries) => {
|
|
230
|
+
try {
|
|
231
|
+
if (config.driver === 'mysql') {
|
|
232
|
+
await insertWithRetry(() => runMysqlInsert(config, columns, row, timeseries));
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
await insertWithRetry(() => runPostgresInsert(config, columns, row, timeseries));
|
|
236
|
+
}
|
|
237
|
+
console.log((0, kleur_1.green)(`Saved ${config.scope} payload to ${config.driver} sink (${config.database}.${config.table})`));
|
|
238
|
+
}
|
|
239
|
+
catch (err) {
|
|
240
|
+
console.log((0, kleur_1.red)(`Failed to write ${config.scope} payload: ${err?.message || err}`));
|
|
241
|
+
}
|
|
242
|
+
};
|
|
243
|
+
const insertJobRow = (config, row, timeseries) => {
|
|
244
|
+
return writeStructuredRow(config, jobColumns, row, timeseries);
|
|
245
|
+
};
|
|
246
|
+
exports.insertJobRow = insertJobRow;
|
|
247
|
+
const insertRunnerRow = (config, row) => {
|
|
248
|
+
return writeStructuredRow(config, runnerColumns, row);
|
|
249
|
+
};
|
|
250
|
+
exports.insertRunnerRow = insertRunnerRow;
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.runMigrations = void 0;
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const promise_1 = __importDefault(require("mysql2/promise"));
|
|
10
|
+
const pg_1 = require("pg");
|
|
11
|
+
const kleur_1 = require("kleur");
|
|
12
|
+
const normalizeHost = (input) => {
|
|
13
|
+
if (!input)
|
|
14
|
+
return input;
|
|
15
|
+
const trimmed = input.trim();
|
|
16
|
+
const withoutProtocol = trimmed.replace(/^[a-zA-Z]+:\/\//, '');
|
|
17
|
+
return withoutProtocol.split(/[/?#]/)[0];
|
|
18
|
+
};
|
|
19
|
+
const buildPgSslConfig = (mode) => {
|
|
20
|
+
if (!mode || mode === 'disable')
|
|
21
|
+
return undefined;
|
|
22
|
+
if (mode === 'verify-full')
|
|
23
|
+
return { rejectUnauthorized: true };
|
|
24
|
+
return { rejectUnauthorized: false };
|
|
25
|
+
};
|
|
26
|
+
const migrationsDir = path_1.default.join(__dirname, '..', '..', '..', 'migrations');
|
|
27
|
+
const loadMigrations = (scope, driver) => {
|
|
28
|
+
const prefix = `${scope}_${driver}_`;
|
|
29
|
+
const files = fs_1.default.readdirSync(migrationsDir)
|
|
30
|
+
.filter(file => file.endsWith('.sql') && file.startsWith(prefix))
|
|
31
|
+
.sort();
|
|
32
|
+
return files.map(file => {
|
|
33
|
+
const id = Number(file.split('_')[0]);
|
|
34
|
+
return {
|
|
35
|
+
id,
|
|
36
|
+
name: file,
|
|
37
|
+
sql: fs_1.default.readFileSync(path_1.default.join(migrationsDir, file), 'utf8')
|
|
38
|
+
};
|
|
39
|
+
});
|
|
40
|
+
};
|
|
41
|
+
const ensureMetadataTable = async (config) => {
|
|
42
|
+
if (config.driver === 'mysql') {
|
|
43
|
+
const connection = await promise_1.default.createConnection({
|
|
44
|
+
host: config.host,
|
|
45
|
+
port: config.port,
|
|
46
|
+
user: config.username,
|
|
47
|
+
password: config.password,
|
|
48
|
+
database: config.database
|
|
49
|
+
});
|
|
50
|
+
try {
|
|
51
|
+
await connection.query(`CREATE TABLE IF NOT EXISTS gitgreen_migrations (
|
|
52
|
+
scope VARCHAR(32) NOT NULL,
|
|
53
|
+
id INT NOT NULL,
|
|
54
|
+
name VARCHAR(255) NOT NULL,
|
|
55
|
+
applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
56
|
+
PRIMARY KEY (scope, id)
|
|
57
|
+
) ENGINE=InnoDB`);
|
|
58
|
+
}
|
|
59
|
+
finally {
|
|
60
|
+
await connection.end();
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
else {
|
|
64
|
+
const client = new pg_1.Client({
|
|
65
|
+
host: config.host,
|
|
66
|
+
port: config.port,
|
|
67
|
+
user: config.username,
|
|
68
|
+
password: config.password,
|
|
69
|
+
database: config.database,
|
|
70
|
+
ssl: buildPgSslConfig(config.sslMode)
|
|
71
|
+
});
|
|
72
|
+
await client.connect();
|
|
73
|
+
try {
|
|
74
|
+
await client.query('CREATE TABLE IF NOT EXISTS gitgreen_migrations (scope TEXT NOT NULL, id INT NOT NULL, name TEXT NOT NULL, applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), PRIMARY KEY (scope, id))');
|
|
75
|
+
}
|
|
76
|
+
finally {
|
|
77
|
+
await client.end();
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
const fetchAppliedMigrations = async (config) => {
|
|
82
|
+
const applied = new Set();
|
|
83
|
+
if (config.driver === 'mysql') {
|
|
84
|
+
const connection = await promise_1.default.createConnection({
|
|
85
|
+
host: config.host,
|
|
86
|
+
port: config.port,
|
|
87
|
+
user: config.username,
|
|
88
|
+
password: config.password,
|
|
89
|
+
database: config.database
|
|
90
|
+
});
|
|
91
|
+
try {
|
|
92
|
+
const [rows] = await connection.query('SELECT id FROM gitgreen_migrations WHERE scope = ?', [config.scope]);
|
|
93
|
+
for (const row of rows) {
|
|
94
|
+
applied.add(row.id);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
finally {
|
|
98
|
+
await connection.end();
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
else {
|
|
102
|
+
const client = new pg_1.Client({
|
|
103
|
+
host: config.host,
|
|
104
|
+
port: config.port,
|
|
105
|
+
user: config.username,
|
|
106
|
+
password: config.password,
|
|
107
|
+
database: config.database,
|
|
108
|
+
ssl: buildPgSslConfig(config.sslMode)
|
|
109
|
+
});
|
|
110
|
+
await client.connect();
|
|
111
|
+
try {
|
|
112
|
+
const result = await client.query('SELECT id FROM gitgreen_migrations WHERE scope = $1', [config.scope]);
|
|
113
|
+
result.rows.forEach(row => applied.add(Number(row.id)));
|
|
114
|
+
}
|
|
115
|
+
finally {
|
|
116
|
+
await client.end();
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return applied;
|
|
120
|
+
};
|
|
121
|
+
const renderSql = (migration, config) => {
|
|
122
|
+
const tableName = config.table;
|
|
123
|
+
const timeseriesName = config.timeseriesTable || `${config.table}_timeseries`;
|
|
124
|
+
const qualifiedTable = config.driver === 'postgres' && config.schema
|
|
125
|
+
? `"${config.schema}"."${tableName}"`
|
|
126
|
+
: config.driver === 'postgres'
|
|
127
|
+
? `"${tableName}"`
|
|
128
|
+
: `\`${tableName}\``;
|
|
129
|
+
const qualifiedTimeseries = config.driver === 'postgres' && config.schema
|
|
130
|
+
? `"${config.schema}"."${timeseriesName}"`
|
|
131
|
+
: config.driver === 'postgres'
|
|
132
|
+
? `"${timeseriesName}"`
|
|
133
|
+
: `\`${timeseriesName}\``;
|
|
134
|
+
return migration.sql
|
|
135
|
+
.replace(/\{\{TABLE\}\}/g, qualifiedTable)
|
|
136
|
+
.replace(/\{\{TIMESERIES_TABLE\}\}/g, qualifiedTimeseries);
|
|
137
|
+
};
|
|
138
|
+
const runMysqlMigration = async (config, migration) => {
|
|
139
|
+
const connection = await promise_1.default.createConnection({
|
|
140
|
+
host: config.host,
|
|
141
|
+
port: config.port,
|
|
142
|
+
user: config.username,
|
|
143
|
+
password: config.password,
|
|
144
|
+
database: config.database,
|
|
145
|
+
multipleStatements: true
|
|
146
|
+
});
|
|
147
|
+
try {
|
|
148
|
+
await connection.beginTransaction();
|
|
149
|
+
const sql = renderSql(migration, config);
|
|
150
|
+
await connection.query(sql);
|
|
151
|
+
await connection.query('INSERT INTO gitgreen_migrations (scope, id, name) VALUES (?, ?, ?)', [config.scope, migration.id, migration.name]);
|
|
152
|
+
await connection.commit();
|
|
153
|
+
}
|
|
154
|
+
catch (err) {
|
|
155
|
+
await connection.rollback().catch(() => { });
|
|
156
|
+
throw err;
|
|
157
|
+
}
|
|
158
|
+
finally {
|
|
159
|
+
await connection.end();
|
|
160
|
+
}
|
|
161
|
+
};
|
|
162
|
+
const runPostgresMigration = async (config, migration) => {
|
|
163
|
+
const client = new pg_1.Client({
|
|
164
|
+
host: config.host,
|
|
165
|
+
port: config.port,
|
|
166
|
+
user: config.username,
|
|
167
|
+
password: config.password,
|
|
168
|
+
database: config.database,
|
|
169
|
+
ssl: buildPgSslConfig(config.sslMode)
|
|
170
|
+
});
|
|
171
|
+
await client.connect();
|
|
172
|
+
try {
|
|
173
|
+
await client.query('BEGIN');
|
|
174
|
+
const sql = renderSql(migration, config);
|
|
175
|
+
await client.query(sql);
|
|
176
|
+
await client.query('INSERT INTO gitgreen_migrations (scope, id, name) VALUES ($1, $2, $3)', [config.scope, migration.id, migration.name]);
|
|
177
|
+
await client.query('COMMIT');
|
|
178
|
+
}
|
|
179
|
+
catch (err) {
|
|
180
|
+
await client.query('ROLLBACK').catch(() => { });
|
|
181
|
+
throw err;
|
|
182
|
+
}
|
|
183
|
+
finally {
|
|
184
|
+
await client.end();
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
const runMigrations = async (config) => {
|
|
188
|
+
const normalizedHost = normalizeHost(config.host);
|
|
189
|
+
const driver = config.driver;
|
|
190
|
+
const finalConfig = { ...config, host: normalizedHost };
|
|
191
|
+
await ensureMetadataTable(finalConfig);
|
|
192
|
+
const applied = await fetchAppliedMigrations(finalConfig);
|
|
193
|
+
const migrations = loadMigrations(finalConfig.scope, driver).filter(mig => !applied.has(mig.id));
|
|
194
|
+
if (!migrations.length) {
|
|
195
|
+
console.log((0, kleur_1.green)('No database migrations to run.'));
|
|
196
|
+
return;
|
|
197
|
+
}
|
|
198
|
+
for (const migration of migrations) {
|
|
199
|
+
try {
|
|
200
|
+
if (driver === 'mysql') {
|
|
201
|
+
await runMysqlMigration(finalConfig, migration);
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
await runPostgresMigration(finalConfig, migration);
|
|
205
|
+
}
|
|
206
|
+
console.log((0, kleur_1.green)(`Applied migration ${migration.name}`));
|
|
207
|
+
}
|
|
208
|
+
catch (err) {
|
|
209
|
+
console.log((0, kleur_1.red)(`Failed to apply migration ${migration.name}: ${err?.message || err}`));
|
|
210
|
+
throw err;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
exports.runMigrations = runMigrations;
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.persistRunnerPayload = exports.persistJobPayload = void 0;
|
|
4
|
+
const data_sink_1 = require("./data-sink");
|
|
5
|
+
const migration_runner_1 = require("./migration-runner");
|
|
6
|
+
const readRunnerTags = () => {
|
|
7
|
+
const tagsEnv = process.env.CI_RUNNER_TAGS || process.env.RUNNER_TAGS || '';
|
|
8
|
+
return tagsEnv.split(/[,\s]+/).filter(Boolean);
|
|
9
|
+
};
|
|
10
|
+
const migrationsRan = new Set();
|
|
11
|
+
const keyForConfig = (config) => {
|
|
12
|
+
const schemaPart = config.driver === 'postgres' ? (config.schema || 'public') : '';
|
|
13
|
+
return [
|
|
14
|
+
config.driver,
|
|
15
|
+
config.host,
|
|
16
|
+
config.port,
|
|
17
|
+
config.database,
|
|
18
|
+
config.table,
|
|
19
|
+
schemaPart
|
|
20
|
+
].join(':');
|
|
21
|
+
};
|
|
22
|
+
const ensureMigrationsForConfig = async (config) => {
|
|
23
|
+
const key = keyForConfig(config);
|
|
24
|
+
if (migrationsRan.has(key))
|
|
25
|
+
return;
|
|
26
|
+
await (0, migration_runner_1.runMigrations)({
|
|
27
|
+
driver: config.driver,
|
|
28
|
+
host: config.host,
|
|
29
|
+
port: config.port,
|
|
30
|
+
username: config.username,
|
|
31
|
+
password: config.password,
|
|
32
|
+
database: config.database,
|
|
33
|
+
schema: config.driver === 'postgres' ? config.schema : undefined,
|
|
34
|
+
sslMode: config.driver === 'postgres' ? config.sslMode : undefined,
|
|
35
|
+
table: config.table,
|
|
36
|
+
timeseriesTable: config.timeseriesTable,
|
|
37
|
+
scope: config.scope
|
|
38
|
+
});
|
|
39
|
+
migrationsRan.add(key);
|
|
40
|
+
};
|
|
41
|
+
const persistJobPayload = async (options) => {
|
|
42
|
+
const config = (0, data_sink_1.resolveSinkConfig)('job');
|
|
43
|
+
if (!config)
|
|
44
|
+
return;
|
|
45
|
+
await ensureMigrationsForConfig(config);
|
|
46
|
+
const payload = {
|
|
47
|
+
ingestedAt: new Date().toISOString(),
|
|
48
|
+
gitlab: {
|
|
49
|
+
projectId: process.env.CI_PROJECT_ID,
|
|
50
|
+
pipelineId: process.env.CI_PIPELINE_ID,
|
|
51
|
+
jobId: process.env.CI_JOB_ID,
|
|
52
|
+
jobName: process.env.CI_JOB_NAME,
|
|
53
|
+
runnerId: process.env.CI_RUNNER_ID,
|
|
54
|
+
runnerDescription: process.env.CI_RUNNER_DESCRIPTION,
|
|
55
|
+
runnerTags: readRunnerTags(),
|
|
56
|
+
runnerVersion: process.env.CI_RUNNER_VERSION,
|
|
57
|
+
runnerRevision: process.env.CI_RUNNER_REVISION
|
|
58
|
+
},
|
|
59
|
+
job: {
|
|
60
|
+
provider: options.jobInput.provider,
|
|
61
|
+
region: options.jobInput.region,
|
|
62
|
+
machineType: options.jobInput.machineType,
|
|
63
|
+
cpuDataPoints: options.jobInput.cpuTimeseries.length,
|
|
64
|
+
ramDataPoints: options.jobInput.ramUsedTimeseries.length
|
|
65
|
+
},
|
|
66
|
+
emissions: options.result,
|
|
67
|
+
budget: options.budget,
|
|
68
|
+
report: options.jsonReport
|
|
69
|
+
};
|
|
70
|
+
const timeseriesRows = [
|
|
71
|
+
...toTimeseries('cpu', options.jobInput.cpuTimeseries),
|
|
72
|
+
...toTimeseries('ram_used', options.jobInput.ramUsedTimeseries),
|
|
73
|
+
...toTimeseries('ram_size', options.jobInput.ramSizeTimeseries)
|
|
74
|
+
];
|
|
75
|
+
await (0, data_sink_1.insertJobRow)(config, {
|
|
76
|
+
provider: options.jobInput.provider,
|
|
77
|
+
region: options.jobInput.region,
|
|
78
|
+
machineType: options.jobInput.machineType,
|
|
79
|
+
cpuPoints: options.jobInput.cpuTimeseries.length,
|
|
80
|
+
ramPoints: options.jobInput.ramUsedTimeseries.length,
|
|
81
|
+
runtimeSeconds: Math.round(options.result.runtimeHours * 3600),
|
|
82
|
+
totalEmissions: options.result.totalEmissions,
|
|
83
|
+
cpuEmissions: options.result.cpuEmissions,
|
|
84
|
+
ramEmissions: options.result.ramEmissions,
|
|
85
|
+
scope3Emissions: options.result.scope3Emissions,
|
|
86
|
+
carbonIntensity: options.result.carbonIntensity,
|
|
87
|
+
pue: options.result.pue,
|
|
88
|
+
carbonBudget: options.budget.limitGrams,
|
|
89
|
+
overBudget: options.budget.overBudget,
|
|
90
|
+
gitlabProjectId: payload.gitlab.projectId,
|
|
91
|
+
gitlabPipelineId: payload.gitlab.pipelineId,
|
|
92
|
+
gitlabJobId: payload.gitlab.jobId,
|
|
93
|
+
gitlabJobName: payload.gitlab.jobName,
|
|
94
|
+
runnerId: payload.gitlab.runnerId,
|
|
95
|
+
runnerDescription: payload.gitlab.runnerDescription,
|
|
96
|
+
runnerTags: (payload.gitlab.runnerTags || []).join(','),
|
|
97
|
+
runnerVersion: payload.gitlab.runnerVersion,
|
|
98
|
+
runnerRevision: payload.gitlab.runnerRevision,
|
|
99
|
+
payload
|
|
100
|
+
}, timeseriesRows);
|
|
101
|
+
};
|
|
102
|
+
exports.persistJobPayload = persistJobPayload;
|
|
103
|
+
const persistRunnerPayload = async (options) => {
|
|
104
|
+
const config = (0, data_sink_1.resolveSinkConfig)('runner');
|
|
105
|
+
if (!config)
|
|
106
|
+
return;
|
|
107
|
+
await ensureMigrationsForConfig(config);
|
|
108
|
+
const env = process.env;
|
|
109
|
+
const machineType = env.MACHINE_TYPE || options.jobInput.machineType;
|
|
110
|
+
const payload = {
|
|
111
|
+
ingestedAt: new Date().toISOString(),
|
|
112
|
+
runner: {
|
|
113
|
+
id: env.CI_RUNNER_ID,
|
|
114
|
+
description: env.CI_RUNNER_DESCRIPTION,
|
|
115
|
+
version: env.CI_RUNNER_VERSION,
|
|
116
|
+
revision: env.CI_RUNNER_REVISION,
|
|
117
|
+
platform: env.CI_RUNNER_PLATFORM,
|
|
118
|
+
architecture: env.CI_RUNNER_ARCH,
|
|
119
|
+
executor: env.CI_RUNNER_EXECUTOR,
|
|
120
|
+
tags: readRunnerTags(),
|
|
121
|
+
machineType,
|
|
122
|
+
provider: options.jobInput.provider,
|
|
123
|
+
region: options.jobInput.region,
|
|
124
|
+
gcp: {
|
|
125
|
+
projectId: env.GCP_PROJECT_ID,
|
|
126
|
+
instanceId: env.GCP_INSTANCE_ID,
|
|
127
|
+
zone: env.GCP_ZONE
|
|
128
|
+
},
|
|
129
|
+
aws: {
|
|
130
|
+
region: env.AWS_REGION,
|
|
131
|
+
instanceId: env.AWS_INSTANCE_ID
|
|
132
|
+
}
|
|
133
|
+
},
|
|
134
|
+
gitlab: {
|
|
135
|
+
projectId: env.CI_PROJECT_ID,
|
|
136
|
+
pipelineId: env.CI_PIPELINE_ID,
|
|
137
|
+
jobId: env.CI_JOB_ID
|
|
138
|
+
},
|
|
139
|
+
lastJob: {
|
|
140
|
+
machineType: options.jobInput.machineType,
|
|
141
|
+
provider: options.jobInput.provider,
|
|
142
|
+
region: options.jobInput.region,
|
|
143
|
+
runtimeSeconds: Math.round(options.result.runtimeHours * 3600),
|
|
144
|
+
totalEmissions: options.result.totalEmissions,
|
|
145
|
+
recordedAt: new Date().toISOString()
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
await (0, data_sink_1.insertRunnerRow)(config, {
|
|
149
|
+
runnerId: payload.runner.id,
|
|
150
|
+
runnerDescription: payload.runner.description,
|
|
151
|
+
runnerVersion: payload.runner.version,
|
|
152
|
+
runnerRevision: payload.runner.revision,
|
|
153
|
+
runnerPlatform: payload.runner.platform,
|
|
154
|
+
runnerArchitecture: payload.runner.architecture,
|
|
155
|
+
runnerExecutor: payload.runner.executor,
|
|
156
|
+
runnerTags: (payload.runner.tags || []).join(','),
|
|
157
|
+
machineType: payload.runner.machineType,
|
|
158
|
+
provider: payload.runner.provider,
|
|
159
|
+
region: payload.runner.region,
|
|
160
|
+
gcpProjectId: payload.runner.gcp.projectId,
|
|
161
|
+
gcpInstanceId: payload.runner.gcp.instanceId,
|
|
162
|
+
gcpZone: payload.runner.gcp.zone,
|
|
163
|
+
awsRegion: payload.runner.aws.region,
|
|
164
|
+
awsInstanceId: payload.runner.aws.instanceId,
|
|
165
|
+
lastJobMachineType: payload.lastJob.machineType,
|
|
166
|
+
lastJobRegion: payload.lastJob.region,
|
|
167
|
+
lastJobProvider: payload.lastJob.provider,
|
|
168
|
+
lastJobRuntimeSeconds: payload.lastJob.runtimeSeconds,
|
|
169
|
+
lastJobTotalEmissions: payload.lastJob.totalEmissions,
|
|
170
|
+
lastJobRecordedAt: payload.lastJob.recordedAt,
|
|
171
|
+
payload
|
|
172
|
+
});
|
|
173
|
+
};
|
|
174
|
+
exports.persistRunnerPayload = persistRunnerPayload;
|
|
175
|
+
const toTimeseries = (metric, points) => {
|
|
176
|
+
return points.map(point => ({
|
|
177
|
+
metric,
|
|
178
|
+
timestamp: new Date(point.timestamp).toISOString(),
|
|
179
|
+
value: typeof point.value === 'number' ? point.value : Number(point.value)
|
|
180
|
+
}));
|
|
181
|
+
};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
CREATE TABLE IF NOT EXISTS {{TABLE}} (
|
|
2
|
+
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
3
|
+
ingested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
4
|
+
provider VARCHAR(32),
|
|
5
|
+
region VARCHAR(128),
|
|
6
|
+
machine_type VARCHAR(128),
|
|
7
|
+
cpu_points INT,
|
|
8
|
+
ram_points INT,
|
|
9
|
+
runtime_seconds INT,
|
|
10
|
+
total_emissions DOUBLE,
|
|
11
|
+
cpu_emissions DOUBLE,
|
|
12
|
+
ram_emissions DOUBLE,
|
|
13
|
+
scope3_emissions DOUBLE,
|
|
14
|
+
carbon_intensity DOUBLE,
|
|
15
|
+
pue DOUBLE,
|
|
16
|
+
carbon_budget DOUBLE,
|
|
17
|
+
over_budget BOOLEAN,
|
|
18
|
+
gitlab_project_id BIGINT,
|
|
19
|
+
gitlab_pipeline_id BIGINT,
|
|
20
|
+
gitlab_job_id BIGINT,
|
|
21
|
+
gitlab_job_name VARCHAR(255),
|
|
22
|
+
runner_id VARCHAR(255),
|
|
23
|
+
runner_description VARCHAR(255),
|
|
24
|
+
runner_tags TEXT,
|
|
25
|
+
runner_version VARCHAR(128),
|
|
26
|
+
runner_revision VARCHAR(128),
|
|
27
|
+
payload JSON NOT NULL,
|
|
28
|
+
PRIMARY KEY (id)
|
|
29
|
+
) ENGINE=InnoDB;
|
|
30
|
+
|
|
31
|
+
CREATE TABLE IF NOT EXISTS {{TIMESERIES_TABLE}} (
|
|
32
|
+
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
33
|
+
job_id BIGINT UNSIGNED NOT NULL,
|
|
34
|
+
metric VARCHAR(64) NOT NULL,
|
|
35
|
+
ts DATETIME NOT NULL,
|
|
36
|
+
value DOUBLE NOT NULL,
|
|
37
|
+
PRIMARY KEY (id),
|
|
38
|
+
KEY job_id_idx (job_id),
|
|
39
|
+
CONSTRAINT {{TIMESERIES_TABLE}}_fk FOREIGN KEY (job_id) REFERENCES {{TABLE}}(id) ON DELETE CASCADE
|
|
40
|
+
) ENGINE=InnoDB;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
CREATE TABLE IF NOT EXISTS {{TABLE}} (
|
|
2
|
+
id BIGSERIAL PRIMARY KEY,
|
|
3
|
+
ingested_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
4
|
+
provider TEXT,
|
|
5
|
+
region TEXT,
|
|
6
|
+
machine_type TEXT,
|
|
7
|
+
cpu_points INT,
|
|
8
|
+
ram_points INT,
|
|
9
|
+
runtime_seconds INT,
|
|
10
|
+
total_emissions DOUBLE PRECISION,
|
|
11
|
+
cpu_emissions DOUBLE PRECISION,
|
|
12
|
+
ram_emissions DOUBLE PRECISION,
|
|
13
|
+
scope3_emissions DOUBLE PRECISION,
|
|
14
|
+
carbon_intensity DOUBLE PRECISION,
|
|
15
|
+
pue DOUBLE PRECISION,
|
|
16
|
+
carbon_budget DOUBLE PRECISION,
|
|
17
|
+
over_budget BOOLEAN,
|
|
18
|
+
gitlab_project_id BIGINT,
|
|
19
|
+
gitlab_pipeline_id BIGINT,
|
|
20
|
+
gitlab_job_id BIGINT,
|
|
21
|
+
gitlab_job_name TEXT,
|
|
22
|
+
runner_id TEXT,
|
|
23
|
+
runner_description TEXT,
|
|
24
|
+
runner_tags TEXT,
|
|
25
|
+
runner_version TEXT,
|
|
26
|
+
runner_revision TEXT,
|
|
27
|
+
payload JSONB NOT NULL
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
CREATE TABLE IF NOT EXISTS {{TIMESERIES_TABLE}} (
|
|
31
|
+
id BIGSERIAL PRIMARY KEY,
|
|
32
|
+
job_id BIGINT NOT NULL REFERENCES {{TABLE}}(id) ON DELETE CASCADE,
|
|
33
|
+
metric TEXT NOT NULL,
|
|
34
|
+
ts TIMESTAMPTZ NOT NULL,
|
|
35
|
+
value DOUBLE PRECISION NOT NULL
|
|
36
|
+
);
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
CREATE TABLE IF NOT EXISTS {{TABLE}} (
|
|
2
|
+
id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
|
|
3
|
+
ingested_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
4
|
+
runner_id VARCHAR(255),
|
|
5
|
+
runner_description VARCHAR(255),
|
|
6
|
+
runner_version VARCHAR(128),
|
|
7
|
+
runner_revision VARCHAR(128),
|
|
8
|
+
runner_platform VARCHAR(255),
|
|
9
|
+
runner_architecture VARCHAR(128),
|
|
10
|
+
runner_executor VARCHAR(128),
|
|
11
|
+
runner_tags TEXT,
|
|
12
|
+
machine_type VARCHAR(128),
|
|
13
|
+
provider VARCHAR(32),
|
|
14
|
+
region VARCHAR(128),
|
|
15
|
+
gcp_project_id VARCHAR(128),
|
|
16
|
+
gcp_instance_id VARCHAR(128),
|
|
17
|
+
gcp_zone VARCHAR(64),
|
|
18
|
+
aws_region VARCHAR(64),
|
|
19
|
+
aws_instance_id VARCHAR(128),
|
|
20
|
+
last_job_machine_type VARCHAR(128),
|
|
21
|
+
last_job_region VARCHAR(128),
|
|
22
|
+
last_job_provider VARCHAR(32),
|
|
23
|
+
last_job_runtime_seconds INT,
|
|
24
|
+
last_job_total_emissions DOUBLE,
|
|
25
|
+
last_job_recorded_at TIMESTAMP NULL,
|
|
26
|
+
payload JSON NOT NULL,
|
|
27
|
+
PRIMARY KEY (id)
|
|
28
|
+
) ENGINE=InnoDB;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
CREATE TABLE IF NOT EXISTS {{TABLE}} (
|
|
2
|
+
id BIGSERIAL PRIMARY KEY,
|
|
3
|
+
ingested_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
4
|
+
runner_id TEXT,
|
|
5
|
+
runner_description TEXT,
|
|
6
|
+
runner_version TEXT,
|
|
7
|
+
runner_revision TEXT,
|
|
8
|
+
runner_platform TEXT,
|
|
9
|
+
runner_architecture TEXT,
|
|
10
|
+
runner_executor TEXT,
|
|
11
|
+
runner_tags TEXT,
|
|
12
|
+
machine_type TEXT,
|
|
13
|
+
provider TEXT,
|
|
14
|
+
region TEXT,
|
|
15
|
+
gcp_project_id TEXT,
|
|
16
|
+
gcp_instance_id TEXT,
|
|
17
|
+
gcp_zone TEXT,
|
|
18
|
+
aws_region TEXT,
|
|
19
|
+
aws_instance_id TEXT,
|
|
20
|
+
last_job_machine_type TEXT,
|
|
21
|
+
last_job_region TEXT,
|
|
22
|
+
last_job_provider TEXT,
|
|
23
|
+
last_job_runtime_seconds INT,
|
|
24
|
+
last_job_total_emissions DOUBLE PRECISION,
|
|
25
|
+
last_job_recorded_at TIMESTAMPTZ,
|
|
26
|
+
payload JSONB NOT NULL
|
|
27
|
+
);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "gitgreen",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.1.1",
|
|
4
4
|
"description": "GitGreen CLI for carbon reporting in GitLab pipelines (GCP/AWS)",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"types": "dist/index.d.ts",
|
|
@@ -30,6 +30,7 @@
|
|
|
30
30
|
"files": [
|
|
31
31
|
"dist",
|
|
32
32
|
"data",
|
|
33
|
+
"migrations",
|
|
33
34
|
"README.md"
|
|
34
35
|
],
|
|
35
36
|
"dependencies": {
|