duoops 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +22 -0
- package/README.md +181 -0
- package/bin/dev.cmd +3 -0
- package/bin/dev.js +7 -0
- package/bin/run.cmd +3 -0
- package/bin/run.js +8 -0
- package/dist/commands/act.d.ts +12 -0
- package/dist/commands/act.js +61 -0
- package/dist/commands/ask.d.ts +8 -0
- package/dist/commands/ask.js +22 -0
- package/dist/commands/init.d.ts +5 -0
- package/dist/commands/init.js +97 -0
- package/dist/commands/job/logs.d.ts +13 -0
- package/dist/commands/job/logs.js +26 -0
- package/dist/commands/measure/calculate.d.ts +19 -0
- package/dist/commands/measure/calculate.js +208 -0
- package/dist/commands/measure/component.d.ts +5 -0
- package/dist/commands/measure/component.js +23 -0
- package/dist/commands/measure/seed.d.ts +5 -0
- package/dist/commands/measure/seed.js +62 -0
- package/dist/commands/pipelines/list.d.ts +14 -0
- package/dist/commands/pipelines/list.js +62 -0
- package/dist/commands/pipelines/show.d.ts +13 -0
- package/dist/commands/pipelines/show.js +68 -0
- package/dist/commands/portal.d.ts +8 -0
- package/dist/commands/portal.js +139 -0
- package/dist/commands/undo.d.ts +5 -0
- package/dist/commands/undo.js +35 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/dist/lib/ai/agent.d.ts +6 -0
- package/dist/lib/ai/agent.js +139 -0
- package/dist/lib/ai/model.d.ts +2 -0
- package/dist/lib/ai/model.js +22 -0
- package/dist/lib/ai/tools/editing.d.ts +3 -0
- package/dist/lib/ai/tools/editing.js +61 -0
- package/dist/lib/ai/tools/filesystem.d.ts +4 -0
- package/dist/lib/ai/tools/filesystem.js +44 -0
- package/dist/lib/ai/tools/gitlab.d.ts +4 -0
- package/dist/lib/ai/tools/gitlab.js +81 -0
- package/dist/lib/ai/tools/measure.d.ts +3 -0
- package/dist/lib/ai/tools/measure.js +26 -0
- package/dist/lib/config.d.ts +18 -0
- package/dist/lib/config.js +72 -0
- package/dist/lib/gitlab/client.d.ts +6 -0
- package/dist/lib/gitlab/client.js +18 -0
- package/dist/lib/gitlab/index.d.ts +6 -0
- package/dist/lib/gitlab/index.js +49 -0
- package/dist/lib/gitlab/provider.d.ts +14 -0
- package/dist/lib/gitlab/provider.js +72 -0
- package/dist/lib/gitlab/types.d.ts +34 -0
- package/dist/lib/gitlab/types.js +5 -0
- package/dist/lib/integrations/bigquery-sink.d.ts +12 -0
- package/dist/lib/integrations/bigquery-sink.js +47 -0
- package/dist/lib/logger.d.ts +2 -0
- package/dist/lib/logger.js +11 -0
- package/dist/lib/measure/bigquery-service.d.ts +2 -0
- package/dist/lib/measure/bigquery-service.js +54 -0
- package/dist/lib/measure/carbon-calculator.d.ts +13 -0
- package/dist/lib/measure/carbon-calculator.js +125 -0
- package/dist/lib/measure/cli-utils.d.ts +2 -0
- package/dist/lib/measure/cli-utils.js +107 -0
- package/dist/lib/measure/intensity-provider.d.ts +6 -0
- package/dist/lib/measure/intensity-provider.js +34 -0
- package/dist/lib/measure/power-profile-repository.d.ts +19 -0
- package/dist/lib/measure/power-profile-repository.js +129 -0
- package/dist/lib/measure/types.d.ts +137 -0
- package/dist/lib/measure/types.js +1 -0
- package/dist/lib/measure/zone-mapper.d.ts +16 -0
- package/dist/lib/measure/zone-mapper.js +104 -0
- package/dist/lib/state.d.ts +4 -0
- package/dist/lib/state.js +21 -0
- package/dist/portal/assets/index-BP8FwWqA.css +1 -0
- package/dist/portal/assets/index-MU6EBerh.js +188 -0
- package/dist/portal/duoops.svg +4 -0
- package/dist/portal/index.html +24 -0
- package/dist/portal/vite.svg +1 -0
- package/oclif.manifest.json +415 -0
- package/package.json +103 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { createGitlabClient } from './client.js';
|
|
2
|
+
/**
|
|
3
|
+
* GitLab implementation of PipelineProvider.
|
|
4
|
+
* Wraps @gitbeaker/rest and normalizes responses to our domain types.
|
|
5
|
+
*/
|
|
6
|
+
export class GitLabPipelineProvider {
|
|
7
|
+
#client;
|
|
8
|
+
constructor(client) {
|
|
9
|
+
this.#client = client ?? createGitlabClient();
|
|
10
|
+
}
|
|
11
|
+
async getJobTrace(projectId, jobId) {
|
|
12
|
+
const raw = await this.#client.Jobs.showLog(projectId, jobId);
|
|
13
|
+
const data = raw?.data ?? raw;
|
|
14
|
+
return typeof data === 'string' ? data : String(data ?? '');
|
|
15
|
+
}
|
|
16
|
+
async getPipeline(projectId, pipelineId) {
|
|
17
|
+
const raw = await this.#client.Pipelines.show(projectId, pipelineId);
|
|
18
|
+
const data = raw?.data ?? raw;
|
|
19
|
+
return normalizePipeline(data);
|
|
20
|
+
}
|
|
21
|
+
async listJobs(projectId, pipelineId) {
|
|
22
|
+
const raw = await this.#client.Jobs.all(projectId, { pipelineId });
|
|
23
|
+
const data = Array.isArray(raw) ? raw : raw.data ?? raw;
|
|
24
|
+
const arr = Array.isArray(data) ? data : [data];
|
|
25
|
+
return arr.map((item) => normalizeJob(item));
|
|
26
|
+
}
|
|
27
|
+
async listPipelines(projectId, options) {
|
|
28
|
+
const raw = await this.#client.Pipelines.all(projectId, {
|
|
29
|
+
perPage: options?.perPage ?? 20,
|
|
30
|
+
ref: options?.ref,
|
|
31
|
+
status: options?.status,
|
|
32
|
+
});
|
|
33
|
+
const data = Array.isArray(raw) ? raw : raw.data ?? raw;
|
|
34
|
+
const arr = Array.isArray(data) ? data : [data];
|
|
35
|
+
return arr.map((item) => normalizePipeline(item));
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
function normalizePipeline(raw) {
|
|
39
|
+
return {
|
|
40
|
+
createdAt: String(raw.created_at ?? ''),
|
|
41
|
+
duration: raw.duration === undefined || raw.duration === null
|
|
42
|
+
? undefined
|
|
43
|
+
: Number(raw.duration),
|
|
44
|
+
id: Number(raw.id),
|
|
45
|
+
ref: String(raw.ref ?? ''),
|
|
46
|
+
sha: String(raw.sha ?? ''),
|
|
47
|
+
status: String(raw.status ?? ''),
|
|
48
|
+
updatedAt: raw.updated_at === undefined || raw.updated_at === null
|
|
49
|
+
? undefined
|
|
50
|
+
: String(raw.updated_at),
|
|
51
|
+
webUrl: raw.web_url === undefined || raw.web_url === null
|
|
52
|
+
? undefined
|
|
53
|
+
: String(raw.web_url),
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
function normalizeJob(raw) {
|
|
57
|
+
return {
|
|
58
|
+
createdAt: raw.created_at === undefined || raw.created_at === null
|
|
59
|
+
? undefined
|
|
60
|
+
: String(raw.created_at),
|
|
61
|
+
duration: raw.duration === undefined || raw.duration === null
|
|
62
|
+
? undefined
|
|
63
|
+
: Number(raw.duration),
|
|
64
|
+
finishedAt: raw.finished_at === undefined || raw.finished_at === null
|
|
65
|
+
? undefined
|
|
66
|
+
: String(raw.finished_at),
|
|
67
|
+
id: Number(raw.id),
|
|
68
|
+
name: String(raw.name ?? ''),
|
|
69
|
+
stage: String(raw.stage ?? ''),
|
|
70
|
+
status: String(raw.status ?? ''),
|
|
71
|
+
};
|
|
72
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared types for pipeline and job data.
|
|
3
|
+
* Keeps the data layer independent of GitLab API specifics.
|
|
4
|
+
*/
|
|
5
|
+
export interface ListOptions {
|
|
6
|
+
perPage?: number;
|
|
7
|
+
ref?: string;
|
|
8
|
+
status?: string;
|
|
9
|
+
}
|
|
10
|
+
export interface Pipeline {
|
|
11
|
+
createdAt: string;
|
|
12
|
+
duration?: number;
|
|
13
|
+
id: number;
|
|
14
|
+
ref: string;
|
|
15
|
+
sha: string;
|
|
16
|
+
status: string;
|
|
17
|
+
updatedAt?: string;
|
|
18
|
+
webUrl?: string;
|
|
19
|
+
}
|
|
20
|
+
export interface Job {
|
|
21
|
+
createdAt?: string;
|
|
22
|
+
duration?: number;
|
|
23
|
+
finishedAt?: string;
|
|
24
|
+
id: number;
|
|
25
|
+
name: string;
|
|
26
|
+
stage: string;
|
|
27
|
+
status: string;
|
|
28
|
+
}
|
|
29
|
+
export interface PipelineProvider {
|
|
30
|
+
getJobTrace(projectId: string, jobId: number): Promise<string>;
|
|
31
|
+
getPipeline(projectId: string, pipelineId: number): Promise<Pipeline>;
|
|
32
|
+
listJobs(projectId: string, pipelineId: number): Promise<Job[]>;
|
|
33
|
+
listPipelines(projectId: string, options?: ListOptions): Promise<Pipeline[]>;
|
|
34
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { BudgetResult, EmissionsResult, JobInput } from '../measure/types.js';
|
|
2
|
+
export interface JobPayloadOptions {
|
|
3
|
+
budget: BudgetResult;
|
|
4
|
+
jobInput: JobInput;
|
|
5
|
+
jsonReport: Record<string, unknown>;
|
|
6
|
+
result: EmissionsResult;
|
|
7
|
+
}
|
|
8
|
+
export interface BigQuerySinkConfig {
|
|
9
|
+
dataset: string;
|
|
10
|
+
table: string;
|
|
11
|
+
}
|
|
12
|
+
export declare const persistJobToBigQuery: (options: JobPayloadOptions, config: BigQuerySinkConfig) => Promise<void>;
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { BigQuery } from '@google-cloud/bigquery';
|
|
2
|
+
const getSafeEnv = (key) => process.env[key];
|
|
3
|
+
const calculateAverage = (points) => {
|
|
4
|
+
if (points.length === 0)
|
|
5
|
+
return 0;
|
|
6
|
+
const sum = points.reduce((acc, p) => acc + p.value, 0);
|
|
7
|
+
return sum / points.length;
|
|
8
|
+
};
|
|
9
|
+
export const persistJobToBigQuery = async (options, config) => {
|
|
10
|
+
const bigquery = new BigQuery();
|
|
11
|
+
const dataset = bigquery.dataset(config.dataset);
|
|
12
|
+
const table = dataset.table(config.table);
|
|
13
|
+
// Use snake_case for BigQuery columns, but wrap in quotes if eslint complains, or disable rule
|
|
14
|
+
/* eslint-disable camelcase */
|
|
15
|
+
const row = {
|
|
16
|
+
carbon_intensity: options.result.carbonIntensity,
|
|
17
|
+
cpu_emissions_g: options.result.cpuEmissions,
|
|
18
|
+
cpu_utilization_avg: calculateAverage(options.jobInput.cpuTimeseries),
|
|
19
|
+
energy_kwh: options.result.cpuEnergyKwh + options.result.ramEnergyKwh,
|
|
20
|
+
gitlab_job_id: getSafeEnv('CI_JOB_ID') ? Number(getSafeEnv('CI_JOB_ID')) : undefined,
|
|
21
|
+
gitlab_job_name: getSafeEnv('CI_JOB_NAME'),
|
|
22
|
+
gitlab_pipeline_id: getSafeEnv('CI_PIPELINE_ID') ? Number(getSafeEnv('CI_PIPELINE_ID')) : undefined,
|
|
23
|
+
gitlab_project_id: getSafeEnv('CI_PROJECT_ID') ? Number(getSafeEnv('CI_PROJECT_ID')) : undefined,
|
|
24
|
+
ingested_at: BigQuery.timestamp(new Date()),
|
|
25
|
+
machine_type: options.jobInput.machineType,
|
|
26
|
+
payload_json: JSON.stringify(options.jsonReport),
|
|
27
|
+
provider: options.jobInput.provider,
|
|
28
|
+
pue: options.result.pue,
|
|
29
|
+
ram_emissions_g: options.result.ramEmissions,
|
|
30
|
+
ram_utilization_avg: calculateAverage(options.jobInput.ramUsedTimeseries),
|
|
31
|
+
region: options.jobInput.region,
|
|
32
|
+
runtime_seconds: Math.round(options.result.runtimeHours * 3600),
|
|
33
|
+
scope3_emissions_g: options.result.scope3Emissions,
|
|
34
|
+
total_emissions_g: options.result.totalEmissions,
|
|
35
|
+
};
|
|
36
|
+
/* eslint-enable camelcase */
|
|
37
|
+
try {
|
|
38
|
+
await table.insert(row);
|
|
39
|
+
}
|
|
40
|
+
catch (error) {
|
|
41
|
+
console.error('Failed to insert into BigQuery:', error);
|
|
42
|
+
// Surface error so the job knows the sink failed, but maybe don't exit(1) unless critical?
|
|
43
|
+
// gitgreen philosophy was usually best-effort for sinks.
|
|
44
|
+
// We'll throw so the user sees it.
|
|
45
|
+
throw error;
|
|
46
|
+
}
|
|
47
|
+
};
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import pino from 'pino';
|
|
2
|
+
const isDev = process.env.NODE_ENV !== 'production';
|
|
3
|
+
export const logger = pino({
|
|
4
|
+
level: process.env.LOG_LEVEL ?? 'info',
|
|
5
|
+
...(isDev && {
|
|
6
|
+
transport: {
|
|
7
|
+
options: { colorize: true },
|
|
8
|
+
target: 'pino-pretty',
|
|
9
|
+
},
|
|
10
|
+
}),
|
|
11
|
+
});
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { BigQuery } from '@google-cloud/bigquery';
|
|
2
|
+
import { configManager } from '../config.js';
|
|
3
|
+
export async function fetchCarbonMetrics(projectId, limit = 10) {
|
|
4
|
+
const config = configManager.get();
|
|
5
|
+
if (!config.measure?.bigqueryDataset || !config.measure?.bigqueryTable) {
|
|
6
|
+
throw new Error('BigQuery sink not configured. Run "duoops init" first.');
|
|
7
|
+
}
|
|
8
|
+
const bigquery = new BigQuery({
|
|
9
|
+
projectId: config.measure?.googleProjectId || process.env.GCP_PROJECT_ID,
|
|
10
|
+
});
|
|
11
|
+
// Ensure we are selecting relevant columns for charts
|
|
12
|
+
const query = `
|
|
13
|
+
SELECT
|
|
14
|
+
ingested_at,
|
|
15
|
+
gitlab_job_id,
|
|
16
|
+
gitlab_project_id,
|
|
17
|
+
gitlab_job_name,
|
|
18
|
+
gitlab_user_name,
|
|
19
|
+
region,
|
|
20
|
+
machine_type,
|
|
21
|
+
total_emissions_g,
|
|
22
|
+
energy_kwh,
|
|
23
|
+
cpu_utilization_avg,
|
|
24
|
+
ram_utilization_avg,
|
|
25
|
+
runtime_seconds
|
|
26
|
+
FROM \`${config.measure.bigqueryDataset}.${config.measure.bigqueryTable}\`
|
|
27
|
+
WHERE gitlab_project_id = @projectId
|
|
28
|
+
ORDER BY ingested_at DESC
|
|
29
|
+
LIMIT @limit
|
|
30
|
+
`;
|
|
31
|
+
const [rows] = await bigquery.query({
|
|
32
|
+
params: { limit, projectId: Number(projectId) },
|
|
33
|
+
query,
|
|
34
|
+
});
|
|
35
|
+
return rows;
|
|
36
|
+
}
|
|
37
|
+
export async function fetchAvailableProjects() {
|
|
38
|
+
const config = configManager.get();
|
|
39
|
+
if (!config.measure?.bigqueryDataset || !config.measure?.bigqueryTable) {
|
|
40
|
+
throw new Error('BigQuery sink not configured. Run "duoops init" first.');
|
|
41
|
+
}
|
|
42
|
+
const bigquery = new BigQuery({
|
|
43
|
+
projectId: config.measure?.googleProjectId || process.env.GCP_PROJECT_ID,
|
|
44
|
+
});
|
|
45
|
+
// Get distinct project IDs that have data
|
|
46
|
+
const query = `
|
|
47
|
+
SELECT DISTINCT gitlab_project_id
|
|
48
|
+
FROM \`${config.measure.bigqueryDataset}.${config.measure.bigqueryTable}\`
|
|
49
|
+
ORDER BY gitlab_project_id
|
|
50
|
+
`;
|
|
51
|
+
const [rows] = await bigquery.query({ query });
|
|
52
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
53
|
+
return rows.map((row) => row.gitlab_project_id);
|
|
54
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { IntensityProvider } from './intensity-provider.js';
|
|
2
|
+
import { PowerProfileRepository } from './power-profile-repository.js';
|
|
3
|
+
import { EmissionsResult, JobInput } from './types.js';
|
|
4
|
+
import { ZoneMapper } from './zone-mapper.js';
|
|
5
|
+
export declare class CarbonCalculator {
|
|
6
|
+
private readonly powerProfileRepository;
|
|
7
|
+
private readonly zoneMapper;
|
|
8
|
+
private readonly intensityProvider;
|
|
9
|
+
constructor(powerProfileRepository: PowerProfileRepository, zoneMapper: ZoneMapper, intensityProvider: IntensityProvider);
|
|
10
|
+
calculate(job: JobInput): Promise<EmissionsResult>;
|
|
11
|
+
private interpolatePower;
|
|
12
|
+
private parseTimestamp;
|
|
13
|
+
}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import CubicSpline from 'cubic-spline';
|
|
2
|
+
export class CarbonCalculator {
|
|
3
|
+
powerProfileRepository;
|
|
4
|
+
zoneMapper;
|
|
5
|
+
intensityProvider;
|
|
6
|
+
constructor(powerProfileRepository, zoneMapper, intensityProvider) {
|
|
7
|
+
this.powerProfileRepository = powerProfileRepository;
|
|
8
|
+
this.zoneMapper = zoneMapper;
|
|
9
|
+
this.intensityProvider = intensityProvider;
|
|
10
|
+
}
|
|
11
|
+
async calculate(job) {
|
|
12
|
+
const machineProfile = await this.powerProfileRepository.getMachineProfile(job.provider, job.machineType);
|
|
13
|
+
if (!machineProfile) {
|
|
14
|
+
throw new Error(`No machine profile found for ${job.machineType} on ${job.provider}`);
|
|
15
|
+
}
|
|
16
|
+
const cpuProfile = await this.powerProfileRepository.getCpuPowerProfile(job.provider, job.machineType);
|
|
17
|
+
if (!cpuProfile || cpuProfile.length === 0) {
|
|
18
|
+
throw new Error(`No CPU power profile for ${job.machineType}`);
|
|
19
|
+
}
|
|
20
|
+
const { pue, zone } = this.zoneMapper.resolve(job.provider, job.region);
|
|
21
|
+
const carbonIntensity = await this.intensityProvider.getCarbonIntensity(zone);
|
|
22
|
+
// Sort timeseries by timestamp
|
|
23
|
+
const cpuSorted = [...job.cpuTimeseries].sort((a, b) => this.parseTimestamp(a.timestamp) - this.parseTimestamp(b.timestamp));
|
|
24
|
+
const ramUsedSorted = [...job.ramUsedTimeseries].sort((a, b) => this.parseTimestamp(a.timestamp) - this.parseTimestamp(b.timestamp));
|
|
25
|
+
const DEFAULT_INTERVAL_SECONDS = 60;
|
|
26
|
+
// Integrate CPU energy over timeseries
|
|
27
|
+
let cpuEnergyKwh = 0;
|
|
28
|
+
for (let i = 0; i < cpuSorted.length; i++) {
|
|
29
|
+
const rawCpuValue = cpuSorted[i].value;
|
|
30
|
+
const cpuPercent = job.provider === 'aws'
|
|
31
|
+
? (rawCpuValue <= 1 ? rawCpuValue * 100 : rawCpuValue)
|
|
32
|
+
: rawCpuValue * 100;
|
|
33
|
+
const powerWatts = this.interpolatePower(cpuProfile, cpuPercent);
|
|
34
|
+
// Calculate interval from timestamps, or use default interval for single point
|
|
35
|
+
let intervalSeconds;
|
|
36
|
+
if (cpuSorted.length === 1) {
|
|
37
|
+
intervalSeconds = DEFAULT_INTERVAL_SECONDS;
|
|
38
|
+
}
|
|
39
|
+
else if (i < cpuSorted.length - 1) {
|
|
40
|
+
intervalSeconds = this.parseTimestamp(cpuSorted[i + 1].timestamp) - this.parseTimestamp(cpuSorted[i].timestamp);
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
// Last point: assume same interval as previous
|
|
44
|
+
intervalSeconds = this.parseTimestamp(cpuSorted[i].timestamp) - this.parseTimestamp(cpuSorted[i - 1].timestamp);
|
|
45
|
+
}
|
|
46
|
+
cpuEnergyKwh += (powerWatts / 1000) * (intervalSeconds / 3600);
|
|
47
|
+
}
|
|
48
|
+
// Integrate RAM energy over timeseries
|
|
49
|
+
let ramEnergyKwh = 0;
|
|
50
|
+
const RAM_WATTS_PER_GB = 0.5; // Standard estimation
|
|
51
|
+
for (let i = 0; i < ramUsedSorted.length; i++) {
|
|
52
|
+
// Calculate RAM power based on total RAM of the machine (static consumption) + dynamic?
|
|
53
|
+
// Cloud Carbon Footprint methodology suggests ~0.392 Watts/GB for older DRAM, less for newer.
|
|
54
|
+
// GitGreen uses 0.5 Watts/GB as a conservative flat estimate for allocated memory.
|
|
55
|
+
// We'll stick to the GitGreen logic: Power = Total RAM GB * Watts/GB
|
|
56
|
+
// Note: GitGreen original code used ramUsedSorted values, but memory power is largely static based on allocation.
|
|
57
|
+
// However, looking at the original code:
|
|
58
|
+
// const ramUsedGb = ramUsedBytes / (1024 * 1024 * 1024);
|
|
59
|
+
// const powerWatts = ramUsedGb * RAM_WATTS_PER_GB;
|
|
60
|
+
// This implies it calculates power based on *used* memory, not *total* memory. We will replicate that.
|
|
61
|
+
const ramUsedBytes = ramUsedSorted[i].value;
|
|
62
|
+
const ramUsedGb = ramUsedBytes / (1024 * 1024 * 1024);
|
|
63
|
+
const powerWatts = ramUsedGb * RAM_WATTS_PER_GB;
|
|
64
|
+
let intervalSeconds;
|
|
65
|
+
if (ramUsedSorted.length === 1) {
|
|
66
|
+
intervalSeconds = DEFAULT_INTERVAL_SECONDS;
|
|
67
|
+
}
|
|
68
|
+
else if (i < ramUsedSorted.length - 1) {
|
|
69
|
+
intervalSeconds = this.parseTimestamp(ramUsedSorted[i + 1].timestamp) - this.parseTimestamp(ramUsedSorted[i].timestamp);
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
intervalSeconds = this.parseTimestamp(ramUsedSorted[i].timestamp) - this.parseTimestamp(ramUsedSorted[i - 1].timestamp);
|
|
73
|
+
}
|
|
74
|
+
ramEnergyKwh += (powerWatts / 1000) * (intervalSeconds / 3600);
|
|
75
|
+
}
|
|
76
|
+
// Calculate total runtime from timeseries
|
|
77
|
+
// Find the min start time across all series
|
|
78
|
+
const startTimes = [];
|
|
79
|
+
if (cpuSorted.length > 0)
|
|
80
|
+
startTimes.push(this.parseTimestamp(cpuSorted[0].timestamp));
|
|
81
|
+
if (ramUsedSorted.length > 0)
|
|
82
|
+
startTimes.push(this.parseTimestamp(ramUsedSorted[0].timestamp));
|
|
83
|
+
const endTimes = [];
|
|
84
|
+
if (cpuSorted.length > 0)
|
|
85
|
+
endTimes.push(this.parseTimestamp(cpuSorted.at(-1).timestamp));
|
|
86
|
+
if (ramUsedSorted.length > 0)
|
|
87
|
+
endTimes.push(this.parseTimestamp(ramUsedSorted.at(-1).timestamp));
|
|
88
|
+
const firstTs = startTimes.length > 0 ? Math.min(...startTimes) : 0;
|
|
89
|
+
const lastTs = endTimes.length > 0 ? Math.max(...endTimes) : 0;
|
|
90
|
+
const durationSeconds = (lastTs > firstTs) ? (lastTs - firstTs) : DEFAULT_INTERVAL_SECONDS;
|
|
91
|
+
const runtimeHours = durationSeconds / 3600;
|
|
92
|
+
// Calculate emissions
|
|
93
|
+
const cpuEmissions = cpuEnergyKwh * pue * carbonIntensity;
|
|
94
|
+
const ramEmissions = ramEnergyKwh * pue * carbonIntensity;
|
|
95
|
+
const scope3Emissions = (machineProfile.scope3EmissionsHourly || 0) * runtimeHours;
|
|
96
|
+
const totalEmissions = cpuEmissions + ramEmissions + scope3Emissions;
|
|
97
|
+
return {
|
|
98
|
+
carbonIntensity,
|
|
99
|
+
cpuEmissions,
|
|
100
|
+
cpuEnergyKwh,
|
|
101
|
+
machineType: job.machineType,
|
|
102
|
+
provider: job.provider,
|
|
103
|
+
pue,
|
|
104
|
+
ramEmissions,
|
|
105
|
+
ramEnergyKwh,
|
|
106
|
+
region: job.region,
|
|
107
|
+
runtimeHours,
|
|
108
|
+
scope3Emissions,
|
|
109
|
+
totalEmissions,
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
interpolatePower(profile, utilization) {
|
|
113
|
+
const sorted = [...profile].sort((a, b) => a.percentage - b.percentage);
|
|
114
|
+
const exact = sorted.find((point) => point.percentage === utilization);
|
|
115
|
+
if (exact)
|
|
116
|
+
return exact.watts;
|
|
117
|
+
const percentages = sorted.map((p) => p.percentage);
|
|
118
|
+
const watts = sorted.map((p) => p.watts);
|
|
119
|
+
const spline = new CubicSpline(percentages, watts);
|
|
120
|
+
return spline.at(utilization);
|
|
121
|
+
}
|
|
122
|
+
parseTimestamp(ts) {
|
|
123
|
+
return new Date(ts).getTime() / 1000;
|
|
124
|
+
}
|
|
125
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
const toIsoTimestamp = (input) => {
|
|
3
|
+
if (input instanceof Date) {
|
|
4
|
+
return Number.isNaN(input.getTime()) ? undefined : input.toISOString();
|
|
5
|
+
}
|
|
6
|
+
if (typeof input === 'string' || typeof input === 'number') {
|
|
7
|
+
const date = new Date(input);
|
|
8
|
+
return Number.isNaN(date.getTime()) ? undefined : date.toISOString();
|
|
9
|
+
}
|
|
10
|
+
return undefined;
|
|
11
|
+
};
|
|
12
|
+
const toNumericValue = (input) => {
|
|
13
|
+
if (typeof input === 'number') {
|
|
14
|
+
return Number.isFinite(input) ? input : undefined;
|
|
15
|
+
}
|
|
16
|
+
if (typeof input === 'string') {
|
|
17
|
+
const parsed = Number(input);
|
|
18
|
+
return Number.isFinite(parsed) ? parsed : undefined;
|
|
19
|
+
}
|
|
20
|
+
return undefined;
|
|
21
|
+
};
|
|
22
|
+
const parseGcpTimeseries = (raw) => {
|
|
23
|
+
const points = [];
|
|
24
|
+
const response = raw;
|
|
25
|
+
const { timeSeries } = response;
|
|
26
|
+
if (!Array.isArray(timeSeries))
|
|
27
|
+
return points;
|
|
28
|
+
for (const ts of timeSeries) {
|
|
29
|
+
const series = ts;
|
|
30
|
+
if (!Array.isArray(series.points))
|
|
31
|
+
continue;
|
|
32
|
+
for (const point of series.points) {
|
|
33
|
+
const timestamp = toIsoTimestamp(point.interval?.endTime || point.interval?.startTime);
|
|
34
|
+
const value = point.value?.doubleValue === undefined
|
|
35
|
+
? toNumericValue(point.value?.int64Value)
|
|
36
|
+
: toNumericValue(point.value.doubleValue);
|
|
37
|
+
if (timestamp && value !== undefined)
|
|
38
|
+
points.push({ timestamp, value });
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return points;
|
|
42
|
+
};
|
|
43
|
+
const parseCloudWatchMetricData = (raw) => {
|
|
44
|
+
const data = raw.MetricDataResults || raw.metricDataResults;
|
|
45
|
+
const results = Array.isArray(data) ? data : [];
|
|
46
|
+
if (results.length === 0)
|
|
47
|
+
return [];
|
|
48
|
+
const first = results[0];
|
|
49
|
+
const timestamps = Array.isArray(first?.Timestamps) ? first.Timestamps : [];
|
|
50
|
+
const values = Array.isArray(first?.Values) ? first.Values : [];
|
|
51
|
+
const len = Math.min(timestamps.length, values.length);
|
|
52
|
+
const points = [];
|
|
53
|
+
for (let i = 0; i < len; i++) {
|
|
54
|
+
const timestamp = toIsoTimestamp(timestamps[i]);
|
|
55
|
+
const value = toNumericValue(values[i]);
|
|
56
|
+
if (timestamp && value !== undefined) {
|
|
57
|
+
points.push({ timestamp, value });
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
return points;
|
|
61
|
+
};
|
|
62
|
+
const parseLegacyTimeseries = (entries) => {
|
|
63
|
+
const points = [];
|
|
64
|
+
for (const entry of entries) {
|
|
65
|
+
const timestamp = toIsoTimestamp((entry.timestamp ?? entry.Timestamp));
|
|
66
|
+
const value = toNumericValue((entry.value ?? entry.Value));
|
|
67
|
+
if (timestamp && value !== undefined) {
|
|
68
|
+
points.push({ timestamp, value });
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
return points;
|
|
72
|
+
};
|
|
73
|
+
const convertCloudWatchDatapoints = (entries) => {
|
|
74
|
+
const points = [];
|
|
75
|
+
for (const entry of entries) {
|
|
76
|
+
const timestamp = toIsoTimestamp(entry.Timestamp);
|
|
77
|
+
const value = toNumericValue((entry.Average ?? entry.Maximum ?? entry.Minimum ?? entry.Sum));
|
|
78
|
+
if (timestamp && value !== undefined) {
|
|
79
|
+
points.push({ timestamp, value });
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return points;
|
|
83
|
+
};
|
|
84
|
+
export const parseTimeseriesFile = (filePath) => {
|
|
85
|
+
const raw = JSON.parse(fs.readFileSync(filePath, 'utf8'));
|
|
86
|
+
const gcp = parseGcpTimeseries(raw);
|
|
87
|
+
if (gcp.length > 0)
|
|
88
|
+
return gcp;
|
|
89
|
+
const cw = parseCloudWatchMetricData(raw);
|
|
90
|
+
if (cw.length > 0)
|
|
91
|
+
return cw;
|
|
92
|
+
if (Array.isArray(raw)) {
|
|
93
|
+
const cwFromArray = parseCloudWatchMetricData({ MetricDataResults: raw });
|
|
94
|
+
if (cwFromArray.length > 0)
|
|
95
|
+
return cwFromArray;
|
|
96
|
+
const legacy = parseLegacyTimeseries(raw);
|
|
97
|
+
if (legacy.length > 0)
|
|
98
|
+
return legacy;
|
|
99
|
+
}
|
|
100
|
+
const datapointResponse = raw;
|
|
101
|
+
if (Array.isArray(datapointResponse.Datapoints)) {
|
|
102
|
+
const datapointSeries = convertCloudWatchDatapoints(datapointResponse.Datapoints);
|
|
103
|
+
if (datapointSeries.length > 0)
|
|
104
|
+
return datapointSeries;
|
|
105
|
+
}
|
|
106
|
+
return [];
|
|
107
|
+
};
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import axios from 'axios';
|
|
2
|
+
export class IntensityProvider {
|
|
3
|
+
baseUrl;
|
|
4
|
+
apiKey;
|
|
5
|
+
constructor(baseUrl = 'https://api.electricitymap.org/v3', apiKey = process.env.ELECTRICITY_MAPS_API_KEY || '') {
|
|
6
|
+
this.baseUrl = baseUrl;
|
|
7
|
+
this.apiKey = apiKey;
|
|
8
|
+
}
|
|
9
|
+
async getCarbonIntensity(zone) {
|
|
10
|
+
if (!this.apiKey) {
|
|
11
|
+
// Return global average as fallback if no API key
|
|
12
|
+
// Approx 475 gCO2/kWh global average, or use region-specific fallbacks if we had them
|
|
13
|
+
// For now, let's warn and return a static average to avoid breaking
|
|
14
|
+
console.warn('Warning: ELECTRICITY_MAPS_API_KEY not set. Using global average fallback (475 gCO2/kWh).');
|
|
15
|
+
return 475;
|
|
16
|
+
}
|
|
17
|
+
const url = `${this.baseUrl.replace(/\/$/, '')}/carbon-intensity/latest`;
|
|
18
|
+
try {
|
|
19
|
+
const response = await axios.get(url, {
|
|
20
|
+
headers: { 'auth-token': this.apiKey },
|
|
21
|
+
params: { zone },
|
|
22
|
+
});
|
|
23
|
+
const intensity = response.data.carbonIntensity;
|
|
24
|
+
if (typeof intensity !== 'number' || intensity < 0) {
|
|
25
|
+
throw new Error(`Invalid carbon intensity response for ${zone}`);
|
|
26
|
+
}
|
|
27
|
+
return intensity;
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
console.warn(`Warning: Failed to fetch carbon intensity for ${zone}. Using global average fallback (475 gCO2/kWh). Error: ${error instanceof Error ? error.message : String(error)}`);
|
|
31
|
+
return 475;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { CloudProvider, MachineProfile, PowerPoint } from './types.js';
|
|
2
|
+
export declare class PowerProfileRepository {
|
|
3
|
+
private readonly dataDir;
|
|
4
|
+
private awsMachines;
|
|
5
|
+
private cpuPhysicalSpecs;
|
|
6
|
+
private cpuProfiles;
|
|
7
|
+
private gcpMachines;
|
|
8
|
+
constructor(dataDir: string);
|
|
9
|
+
getCpuPowerProfile(provider: CloudProvider, machineType: string): Promise<null | PowerPoint[]>;
|
|
10
|
+
getMachineProfile(provider: CloudProvider, machineType: string): Promise<MachineProfile | null>;
|
|
11
|
+
listMachines(provider: CloudProvider): string[];
|
|
12
|
+
private findPhysicalSpec;
|
|
13
|
+
private loadCpuPhysicalSpecs;
|
|
14
|
+
private loadCpuProfiles;
|
|
15
|
+
private loadJsonFile;
|
|
16
|
+
private loadMachineData;
|
|
17
|
+
private normalizeNumber;
|
|
18
|
+
private normalizePowerPoints;
|
|
19
|
+
}
|