greenmining 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. greenmining/__init__.py +11 -29
  2. greenmining/__main__.py +9 -3
  3. greenmining/__version__.py +2 -2
  4. greenmining/analyzers/__init__.py +3 -7
  5. greenmining/analyzers/code_diff_analyzer.py +151 -61
  6. greenmining/analyzers/qualitative_analyzer.py +15 -81
  7. greenmining/analyzers/statistical_analyzer.py +8 -69
  8. greenmining/analyzers/temporal_analyzer.py +16 -72
  9. greenmining/config.py +105 -58
  10. greenmining/controllers/__init__.py +1 -5
  11. greenmining/controllers/repository_controller.py +153 -94
  12. greenmining/energy/__init__.py +13 -0
  13. greenmining/energy/base.py +165 -0
  14. greenmining/energy/codecarbon_meter.py +146 -0
  15. greenmining/energy/rapl.py +157 -0
  16. greenmining/gsf_patterns.py +4 -26
  17. greenmining/models/__init__.py +1 -5
  18. greenmining/models/aggregated_stats.py +4 -4
  19. greenmining/models/analysis_result.py +4 -4
  20. greenmining/models/commit.py +5 -5
  21. greenmining/models/repository.py +5 -5
  22. greenmining/presenters/__init__.py +1 -5
  23. greenmining/presenters/console_presenter.py +24 -24
  24. greenmining/services/__init__.py +10 -6
  25. greenmining/services/commit_extractor.py +8 -152
  26. greenmining/services/data_aggregator.py +45 -175
  27. greenmining/services/data_analyzer.py +9 -202
  28. greenmining/services/github_fetcher.py +210 -323
  29. greenmining/services/github_graphql_fetcher.py +361 -0
  30. greenmining/services/local_repo_analyzer.py +387 -0
  31. greenmining/services/reports.py +33 -137
  32. greenmining/utils.py +21 -149
  33. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/METADATA +69 -173
  34. greenmining-1.0.5.dist-info/RECORD +37 -0
  35. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/WHEEL +1 -1
  36. greenmining/analyzers/ml_feature_extractor.py +0 -512
  37. greenmining/analyzers/nlp_analyzer.py +0 -365
  38. greenmining/cli.py +0 -471
  39. greenmining/main.py +0 -37
  40. greenmining-1.0.3.dist-info/RECORD +0 -36
  41. greenmining-1.0.3.dist-info/entry_points.txt +0 -2
  42. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/licenses/LICENSE +0 -0
  43. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/top_level.txt +0 -0
@@ -1,20 +1,115 @@
1
- """Repository Controller - Handles repository fetching operations."""
2
-
3
- from github import Github, GithubException
1
+ # Repository Controller - Handles repository fetching operations.
2
+
3
+ # ============================================================================
4
+ # OLD REST API IMPLEMENTATION (DEADCODE - REPLACED WITH GRAPHQL)
5
+ # ============================================================================
6
+ # from github import Github, GithubException
7
+ # from tqdm import tqdm
8
+ #
9
+ # from greenmining.config import Config
10
+ # from greenmining.models.repository import Repository
11
+ # from greenmining.utils import colored_print, load_json_file, save_json_file
12
+ #
13
+ #
14
+ # class RepositoryController:
15
+ # # Controller for GitHub repository operations.
16
+ #
17
+ # def __init__(self, config: Config):
18
+ # # Initialize controller with configuration.
19
+ # self.config = config
20
+ # self.github = Github(config.GITHUB_TOKEN)
21
+ # ============================================================================
22
+
23
+ # NEW GRAPHQL IMPLEMENTATION (5-10x faster)
4
24
  from tqdm import tqdm
5
25
 
6
26
  from greenmining.config import Config
7
27
  from greenmining.models.repository import Repository
28
+ from greenmining.services.github_graphql_fetcher import GitHubGraphQLFetcher
8
29
  from greenmining.utils import colored_print, load_json_file, save_json_file
9
30
 
10
31
 
11
32
  class RepositoryController:
12
- """Controller for GitHub repository operations."""
33
+ # Controller for GitHub repository operations using GraphQL API.
13
34
 
14
35
  def __init__(self, config: Config):
15
- """Initialize controller with configuration."""
36
+ # Initialize controller with configuration.
16
37
  self.config = config
17
- self.github = Github(config.GITHUB_TOKEN)
38
+ self.graphql_fetcher = GitHubGraphQLFetcher(config.GITHUB_TOKEN)
39
+
40
+ # ============================================================================
41
+ # OLD REST API METHOD (DEADCODE - 10x slower, high rate limit cost)
42
+ # ============================================================================
43
+ # def fetch_repositories(
44
+ # self,
45
+ # max_repos: int = None,
46
+ # min_stars: int = None,
47
+ # languages: list[str] = None,
48
+ # keywords: str = None,
49
+ # created_after: str = None,
50
+ # created_before: str = None,
51
+ # pushed_after: str = None,
52
+ # pushed_before: str = None,
53
+ # ) -> list[Repository]:
54
+ # # Fetch repositories from GitHub using REST API (slow).
55
+ # max_repos = max_repos or self.config.MAX_REPOS
56
+ # min_stars = min_stars or self.config.MIN_STARS
57
+ # languages = languages or self.config.SUPPORTED_LANGUAGES
58
+ # keywords = keywords or "microservices"
59
+ #
60
+ # colored_print(f" Fetching up to {max_repos} repositories...", "cyan")
61
+ # colored_print(f" Keywords: {keywords}", "cyan")
62
+ # colored_print(f" Filters: min_stars={min_stars}", "cyan")
63
+ #
64
+ # if created_after or created_before:
65
+ # colored_print(
66
+ # f" Created: {created_after or 'any'} to {created_before or 'any'}", "cyan"
67
+ # )
68
+ # if pushed_after or pushed_before:
69
+ # colored_print(f" Pushed: {pushed_after or 'any'} to {pushed_before or 'any'}", "cyan")
70
+ #
71
+ # # Build search query with temporal filters
72
+ # query = self._build_temporal_query(
73
+ # keywords, min_stars, created_after, created_before, pushed_after, pushed_before
74
+ # )
75
+ #
76
+ # try:
77
+ # # Execute search (REST API - many requests)
78
+ # search_results = self.github.search_repositories(
79
+ # query=query, sort="stars", order="desc"
80
+ # )
81
+ #
82
+ # total_found = search_results.totalCount
83
+ # colored_print(f" Found {total_found} repositories", "green")
84
+ #
85
+ # # Fetch repositories (1 request per repo = slow)
86
+ # repositories = []
87
+ # with tqdm(total=min(max_repos, total_found), desc="Fetching", unit="repo") as pbar:
88
+ # for idx, repo in enumerate(search_results):
89
+ # if idx >= max_repos:
90
+ # break
91
+ #
92
+ # try:
93
+ # repo_model = Repository.from_github_repo(repo, idx + 1)
94
+ # repositories.append(repo_model)
95
+ # pbar.update(1)
96
+ # except GithubException as e:
97
+ # colored_print(f" Error: {repo.full_name}: {e}", "yellow")
98
+ # continue
99
+ #
100
+ # # Save to file
101
+ # repo_dicts = [r.to_dict() for r in repositories]
102
+ # save_json_file(repo_dicts, self.config.REPOS_FILE)
103
+ #
104
+ # colored_print(f" Fetched {len(repositories)} repositories", "green")
105
+ # colored_print(f" Saved to: {self.config.REPOS_FILE}", "cyan")
106
+ #
107
+ # return repositories
108
+ #
109
+ # except Exception as e:
110
+ # colored_print(f" Error fetching repositories: {e}", "red")
111
+ # raise
112
+ # ============================================================================
18
113
 
19
114
  def fetch_repositories(
20
115
  self,
@@ -27,27 +122,13 @@ class RepositoryController:
27
122
  pushed_after: str = None,
28
123
  pushed_before: str = None,
29
124
  ) -> list[Repository]:
30
- """Fetch repositories from GitHub.
31
-
32
- Args:
33
- max_repos: Maximum number of repositories to fetch
34
- min_stars: Minimum stars filter
35
- languages: List of programming languages to filter
36
- keywords: Custom search keywords (default: "microservices")
37
- created_after: Repository created after date (YYYY-MM-DD)
38
- created_before: Repository created before date (YYYY-MM-DD)
39
- pushed_after: Repository pushed after date (YYYY-MM-DD)
40
- pushed_before: Repository pushed before date (YYYY-MM-DD)
41
-
42
- Returns:
43
- List of Repository model instances
44
- """
125
+ # Fetch repositories from GitHub using GraphQL API (5-10x faster).
45
126
  max_repos = max_repos or self.config.MAX_REPOS
46
127
  min_stars = min_stars or self.config.MIN_STARS
47
128
  languages = languages or self.config.SUPPORTED_LANGUAGES
48
129
  keywords = keywords or "microservices"
49
130
 
50
- colored_print(f"🔍 Fetching up to {max_repos} repositories...", "cyan")
131
+ colored_print(f"🚀 Fetching up to {max_repos} repositories (GraphQL API)...", "cyan")
51
132
  colored_print(f" Keywords: {keywords}", "cyan")
52
133
  colored_print(f" Filters: min_stars={min_stars}", "cyan")
53
134
 
@@ -58,85 +139,70 @@ class RepositoryController:
58
139
  if pushed_after or pushed_before:
59
140
  colored_print(f" Pushed: {pushed_after or 'any'} to {pushed_before or 'any'}", "cyan")
60
141
 
61
- # Build search query with temporal filters
62
- query = self._build_temporal_query(
63
- keywords, min_stars, created_after, created_before, pushed_after, pushed_before
64
- )
65
-
66
142
  try:
67
- # Execute search
68
- search_results = self.github.search_repositories(
69
- query=query, sort="stars", order="desc"
143
+ # Use GraphQL API (much faster!)
144
+ repositories = self.graphql_fetcher.search_repositories(
145
+ keywords=keywords,
146
+ max_repos=max_repos,
147
+ min_stars=min_stars,
148
+ languages=languages,
149
+ created_after=created_after,
150
+ created_before=created_before,
151
+ pushed_after=pushed_after,
152
+ pushed_before=pushed_before,
70
153
  )
71
154
 
72
- total_found = search_results.totalCount
73
- colored_print(f" Found {total_found} repositories", "green")
74
-
75
- # Fetch repositories
76
- repositories = []
77
- with tqdm(total=min(max_repos, total_found), desc="Fetching", unit="repo") as pbar:
78
- for idx, repo in enumerate(search_results):
79
- if idx >= max_repos:
80
- break
81
-
82
- try:
83
- repo_model = Repository.from_github_repo(repo, idx + 1)
84
- repositories.append(repo_model)
85
- pbar.update(1)
86
- except GithubException as e:
87
- colored_print(f" Error: {repo.full_name}: {e}", "yellow")
88
- continue
89
-
90
155
  # Save to file
91
156
  repo_dicts = [r.to_dict() for r in repositories]
92
157
  save_json_file(repo_dicts, self.config.REPOS_FILE)
93
158
 
94
- colored_print(f" Fetched {len(repositories)} repositories", "green")
159
+ colored_print(f" Fetched {len(repositories)} repositories", "green")
95
160
  colored_print(f" Saved to: {self.config.REPOS_FILE}", "cyan")
161
+ colored_print(f" API: GraphQL (5-10x faster than REST)", "green")
96
162
 
97
163
  return repositories
98
164
 
99
165
  except Exception as e:
100
- colored_print(f" Error fetching repositories: {e}", "red")
166
+ colored_print(f" Error fetching repositories: {e}", "red")
101
167
  raise
102
168
 
103
- def _build_temporal_query(
104
- self,
105
- keywords: str,
106
- min_stars: int,
107
- created_after: str = None,
108
- created_before: str = None,
109
- pushed_after: str = None,
110
- pushed_before: str = None,
111
- ) -> str:
112
- """Build GitHub search query with temporal constraints."""
113
- query_parts = [keywords, f"stars:>={min_stars}"]
114
-
115
- # Temporal filters
116
- if created_after and created_before:
117
- query_parts.append(f"created:{created_after}..{created_before}")
118
- elif created_after:
119
- query_parts.append(f"created:>={created_after}")
120
- elif created_before:
121
- query_parts.append(f"created:<={created_before}")
122
-
123
- if pushed_after and pushed_before:
124
- query_parts.append(f"pushed:{pushed_after}..{pushed_before}")
125
- elif pushed_after:
126
- query_parts.append(f"pushed:>={pushed_after}")
127
- elif pushed_before:
128
- query_parts.append(f"pushed:<={pushed_before}")
129
-
130
- query = " ".join(query_parts)
131
- colored_print(f" Query: {query}", "cyan")
132
- return query
169
+ # ============================================================================
170
+ # OLD REST API HELPER (DEADCODE - handled by GraphQL fetcher now)
171
+ # ============================================================================
172
+ # def _build_temporal_query(
173
+ # self,
174
+ # keywords: str,
175
+ # min_stars: int,
176
+ # created_after: str = None,
177
+ # created_before: str = None,
178
+ # pushed_after: str = None,
179
+ # pushed_before: str = None,
180
+ # ) -> str:
181
+ # # Build GitHub search query with temporal constraints.
182
+ # query_parts = [keywords, f"stars:>={min_stars}"]
183
+ #
184
+ # # Temporal filters
185
+ # if created_after and created_before:
186
+ # query_parts.append(f"created:{created_after}..{created_before}")
187
+ # elif created_after:
188
+ # query_parts.append(f"created:>={created_after}")
189
+ # elif created_before:
190
+ # query_parts.append(f"created:<={created_before}")
191
+ #
192
+ # if pushed_after and pushed_before:
193
+ # query_parts.append(f"pushed:{pushed_after}..{pushed_before}")
194
+ # elif pushed_after:
195
+ # query_parts.append(f"pushed:>={pushed_after}")
196
+ # elif pushed_before:
197
+ # query_parts.append(f"pushed:<={pushed_before}")
198
+ #
199
+ # query = " ".join(query_parts)
200
+ # colored_print(f" Query: {query}", "cyan")
201
+ # return query
202
+ # ============================================================================
133
203
 
134
204
  def load_repositories(self) -> list[Repository]:
135
- """Load repositories from file.
136
-
137
- Returns:
138
- List of Repository model instances
139
- """
205
+ # Load repositories from file.
140
206
  if not self.config.REPOS_FILE.exists():
141
207
  raise FileNotFoundError(f"No repositories file found at {self.config.REPOS_FILE}")
142
208
 
@@ -144,14 +210,7 @@ class RepositoryController:
144
210
  return [Repository.from_dict(r) for r in repo_dicts]
145
211
 
146
212
  def get_repository_stats(self, repositories: list[Repository]) -> dict:
147
- """Get statistics about fetched repositories.
148
-
149
- Args:
150
- repositories: List of Repository instances
151
-
152
- Returns:
153
- Dictionary with statistics
154
- """
213
+ # Get statistics about fetched repositories.
155
214
  if not repositories:
156
215
  return {}
157
216
 
@@ -164,7 +223,7 @@ class RepositoryController:
164
223
  }
165
224
 
166
225
  def _count_by_language(self, repositories: list[Repository]) -> dict:
167
- """Count repositories by language."""
226
+ # Count repositories by language.
168
227
  counts = {}
169
228
  for repo in repositories:
170
229
  lang = repo.language or "Unknown"
@@ -0,0 +1,13 @@
1
+ # Energy measurement module for GreenMining.
2
+
3
+ from .base import EnergyMeter, EnergyMetrics, EnergyBackend
4
+ from .rapl import RAPLEnergyMeter
5
+ from .codecarbon_meter import CodeCarbonMeter
6
+
7
+ __all__ = [
8
+ "EnergyMeter",
9
+ "EnergyMetrics",
10
+ "EnergyBackend",
11
+ "RAPLEnergyMeter",
12
+ "CodeCarbonMeter",
13
+ ]
@@ -0,0 +1,165 @@
1
+ # Base classes and interfaces for energy measurement.
2
+
3
+ from __future__ import annotations
4
+
5
+ from abc import ABC, abstractmethod
6
+ from dataclasses import dataclass, field
7
+ from datetime import datetime
8
+ from enum import Enum
9
+ from typing import Any, Dict, List, Optional, Callable
10
+ import time
11
+
12
+
13
+ class EnergyBackend(Enum):
14
+ # Supported energy measurement backends.
15
+
16
+ RAPL = "rapl" # Intel RAPL (Linux)
17
+ CODECARBON = "codecarbon" # CodeCarbon (cross-platform)
18
+ CPU_METER = "cpu_meter" # CPU Energy Meter
19
+
20
+
21
+ @dataclass
22
+ class EnergyMetrics:
23
+ # Energy measurement results from a profiling session.
24
+
25
+ # Core energy metrics
26
+ joules: float = 0.0 # Total energy consumed
27
+ watts_avg: float = 0.0 # Average power draw
28
+ watts_peak: float = 0.0 # Peak power draw
29
+ duration_seconds: float = 0.0 # Measurement duration
30
+
31
+ # Component-specific energy (if available)
32
+ cpu_energy_joules: float = 0.0 # CPU-specific energy
33
+ dram_energy_joules: float = 0.0 # Memory energy
34
+ gpu_energy_joules: Optional[float] = None # GPU energy if available
35
+
36
+ # Carbon footprint (if carbon tracking enabled)
37
+ carbon_grams: Optional[float] = None # CO2 equivalent in grams
38
+ carbon_intensity: Optional[float] = None # gCO2/kWh of grid
39
+
40
+ # Metadata
41
+ backend: str = ""
42
+ start_time: Optional[datetime] = None
43
+ end_time: Optional[datetime] = None
44
+
45
+ def to_dict(self) -> Dict[str, Any]:
46
+ # Convert to dictionary.
47
+ return {
48
+ "joules": self.joules,
49
+ "watts_avg": self.watts_avg,
50
+ "watts_peak": self.watts_peak,
51
+ "duration_seconds": self.duration_seconds,
52
+ "cpu_energy_joules": self.cpu_energy_joules,
53
+ "dram_energy_joules": self.dram_energy_joules,
54
+ "gpu_energy_joules": self.gpu_energy_joules,
55
+ "carbon_grams": self.carbon_grams,
56
+ "carbon_intensity": self.carbon_intensity,
57
+ "backend": self.backend,
58
+ "start_time": self.start_time.isoformat() if self.start_time else None,
59
+ "end_time": self.end_time.isoformat() if self.end_time else None,
60
+ }
61
+
62
+
63
+ @dataclass
64
+ class CommitEnergyProfile:
65
+ # Energy profile for a specific commit.
66
+
67
+ commit_hash: str
68
+ energy_before: Optional[EnergyMetrics] = None # Parent commit energy
69
+ energy_after: Optional[EnergyMetrics] = None # This commit energy
70
+ energy_delta: float = 0.0 # Change in joules
71
+ energy_regression: bool = False # True if energy increased
72
+ regression_percentage: float = 0.0 # % change
73
+
74
+ def to_dict(self) -> Dict[str, Any]:
75
+ # Convert to dictionary.
76
+ return {
77
+ "commit_hash": self.commit_hash,
78
+ "energy_before": self.energy_before.to_dict() if self.energy_before else None,
79
+ "energy_after": self.energy_after.to_dict() if self.energy_after else None,
80
+ "energy_delta": self.energy_delta,
81
+ "energy_regression": self.energy_regression,
82
+ "regression_percentage": self.regression_percentage,
83
+ }
84
+
85
+
86
+ class EnergyMeter(ABC):
87
+ # Abstract base class for energy measurement backends.
88
+
89
+ def __init__(self, backend: EnergyBackend):
90
+ # Initialize the energy meter.
91
+ self.backend = backend
92
+ self._is_measuring = False
93
+ self._start_time: Optional[float] = None
94
+ self._measurements: List[float] = []
95
+
96
+ @abstractmethod
97
+ def is_available(self) -> bool:
98
+ # Check if this energy measurement backend is available on the system.
99
+ pass
100
+
101
+ @abstractmethod
102
+ def start(self) -> None:
103
+ # Start energy measurement.
104
+ pass
105
+
106
+ @abstractmethod
107
+ def stop(self) -> EnergyMetrics:
108
+ # Stop energy measurement and return results.
109
+ pass
110
+
111
+ def measure(self, func: Callable, *args, **kwargs) -> tuple[Any, EnergyMetrics]:
112
+ # Measure energy consumption of a function call.
113
+ self.start()
114
+ try:
115
+ result = func(*args, **kwargs)
116
+ finally:
117
+ metrics = self.stop()
118
+ return result, metrics
119
+
120
+ def measure_command(self, command: str, timeout: Optional[int] = None) -> EnergyMetrics:
121
+ # Measure energy consumption of a shell command.
122
+ import subprocess
123
+
124
+ self.start()
125
+ try:
126
+ subprocess.run(
127
+ command,
128
+ shell=True,
129
+ timeout=timeout,
130
+ capture_output=True,
131
+ text=True,
132
+ )
133
+ finally:
134
+ metrics = self.stop()
135
+ return metrics
136
+
137
+ def __enter__(self):
138
+ # Context manager entry.
139
+ self.start()
140
+ return self
141
+
142
+ def __exit__(self, exc_type, exc_val, exc_tb):
143
+ # Context manager exit.
144
+ self.stop()
145
+ return False
146
+
147
+
148
+ def get_energy_meter(backend: str = "rapl") -> EnergyMeter:
149
+ # Factory function to get an energy meter instance.
150
+ from .rapl import RAPLEnergyMeter
151
+ from .codecarbon_meter import CodeCarbonMeter
152
+
153
+ backend_lower = backend.lower()
154
+
155
+ if backend_lower == "rapl":
156
+ meter = RAPLEnergyMeter()
157
+ elif backend_lower == "codecarbon":
158
+ meter = CodeCarbonMeter()
159
+ else:
160
+ raise ValueError(f"Unsupported energy backend: {backend}")
161
+
162
+ if not meter.is_available():
163
+ raise ValueError(f"Energy backend '{backend}' is not available on this system")
164
+
165
+ return meter
@@ -0,0 +1,146 @@
1
+ # CodeCarbon integration for carbon-aware energy measurement.
2
+
3
+ from __future__ import annotations
4
+
5
+ import time
6
+ from datetime import datetime
7
+ from typing import Optional
8
+
9
+ from .base import EnergyMeter, EnergyMetrics, EnergyBackend
10
+
11
+
12
+ class CodeCarbonMeter(EnergyMeter):
13
+ # Energy measurement using CodeCarbon library.
14
+
15
+ def __init__(
16
+ self,
17
+ project_name: str = "greenmining",
18
+ output_dir: Optional[str] = None,
19
+ save_to_file: bool = False,
20
+ ):
21
+ # Initialize CodeCarbon energy meter.
22
+ super().__init__(EnergyBackend.CODECARBON)
23
+ self.project_name = project_name
24
+ self.output_dir = output_dir
25
+ self.save_to_file = save_to_file
26
+ self._tracker = None
27
+ self._start_time: Optional[float] = None
28
+ self._codecarbon_available = self._check_codecarbon()
29
+
30
+ def _check_codecarbon(self) -> bool:
31
+ # Check if CodeCarbon is installed.
32
+ try:
33
+ from codecarbon import EmissionsTracker
34
+ return True
35
+ except ImportError:
36
+ return False
37
+
38
+ def is_available(self) -> bool:
39
+ # Check if CodeCarbon is available.
40
+ return self._codecarbon_available
41
+
42
+ def start(self) -> None:
43
+ # Start energy measurement.
44
+ if not self._codecarbon_available:
45
+ raise RuntimeError("CodeCarbon is not installed. Run: pip install codecarbon")
46
+
47
+ if self._is_measuring:
48
+ raise RuntimeError("Already measuring energy")
49
+
50
+ from codecarbon import EmissionsTracker
51
+
52
+ self._is_measuring = True
53
+ self._start_time = time.time()
54
+
55
+ # Create emissions tracker
56
+ tracker_kwargs = {
57
+ "project_name": self.project_name,
58
+ "measure_power_secs": 1,
59
+ "save_to_file": self.save_to_file,
60
+ "log_level": "error", # Suppress verbose output
61
+ }
62
+
63
+ if self.output_dir:
64
+ tracker_kwargs["output_dir"] = self.output_dir
65
+
66
+ self._tracker = EmissionsTracker(**tracker_kwargs)
67
+ self._tracker.start()
68
+
69
+ def stop(self) -> EnergyMetrics:
70
+ # Stop energy measurement and return results.
71
+ if not self._is_measuring:
72
+ raise RuntimeError("Not currently measuring energy")
73
+
74
+ end_time = time.time()
75
+ self._is_measuring = False
76
+
77
+ # Stop tracker and get emissions
78
+ emissions_kg = self._tracker.stop()
79
+
80
+ # Get detailed data from tracker
81
+ duration = end_time - self._start_time
82
+
83
+ # CodeCarbon stores data in tracker._total_energy (kWh)
84
+ # In v3.x it may return an Energy object, extract the value
85
+ energy_raw = getattr(self._tracker, "_total_energy", 0) or 0
86
+ if hasattr(energy_raw, "kWh"):
87
+ energy_kwh = float(energy_raw.kWh)
88
+ else:
89
+ energy_kwh = float(energy_raw) if energy_raw else 0.0
90
+
91
+ # Convert kWh to joules (1 kWh = 3,600,000 J)
92
+ energy_joules = energy_kwh * 3_600_000
93
+
94
+ # Calculate average power
95
+ watts_avg = (energy_joules / duration) if duration > 0 else 0
96
+
97
+ # Get carbon intensity if available
98
+ carbon_intensity = None
99
+ try:
100
+ carbon_intensity = getattr(self._tracker, "_carbon_intensity", None)
101
+ if hasattr(carbon_intensity, "value"):
102
+ carbon_intensity = float(carbon_intensity.value)
103
+ except Exception:
104
+ pass
105
+
106
+ # Convert emissions from kg to grams (handle Energy objects)
107
+ if hasattr(emissions_kg, "value"):
108
+ emissions_kg = float(emissions_kg.value)
109
+ carbon_grams = float(emissions_kg or 0) * 1000
110
+
111
+ return EnergyMetrics(
112
+ joules=energy_joules,
113
+ watts_avg=watts_avg,
114
+ watts_peak=watts_avg, # CodeCarbon doesn't provide peak
115
+ duration_seconds=duration,
116
+ cpu_energy_joules=energy_joules, # CodeCarbon aggregates all sources
117
+ dram_energy_joules=0,
118
+ gpu_energy_joules=None,
119
+ carbon_grams=carbon_grams,
120
+ carbon_intensity=carbon_intensity,
121
+ backend="codecarbon",
122
+ start_time=datetime.fromtimestamp(self._start_time),
123
+ end_time=datetime.fromtimestamp(end_time),
124
+ )
125
+
126
+ def get_carbon_intensity(self) -> Optional[float]:
127
+ # Get current carbon intensity for the configured region.
128
+ if not self._codecarbon_available:
129
+ return None
130
+
131
+ try:
132
+ from codecarbon import EmissionsTracker
133
+
134
+ # Create temporary tracker to get carbon intensity
135
+ tracker = EmissionsTracker(
136
+ project_name="carbon_check",
137
+ country_iso_code=self.country_iso_code,
138
+ save_to_file=False,
139
+ log_level="error",
140
+ )
141
+ tracker.start()
142
+ tracker.stop()
143
+
144
+ return getattr(tracker, "_carbon_intensity", None)
145
+ except Exception:
146
+ return None