greenmining 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. greenmining/__init__.py +11 -29
  2. greenmining/__main__.py +9 -3
  3. greenmining/__version__.py +2 -2
  4. greenmining/analyzers/__init__.py +3 -7
  5. greenmining/analyzers/code_diff_analyzer.py +151 -61
  6. greenmining/analyzers/qualitative_analyzer.py +15 -81
  7. greenmining/analyzers/statistical_analyzer.py +8 -69
  8. greenmining/analyzers/temporal_analyzer.py +16 -72
  9. greenmining/config.py +105 -58
  10. greenmining/controllers/__init__.py +1 -5
  11. greenmining/controllers/repository_controller.py +153 -94
  12. greenmining/energy/__init__.py +13 -0
  13. greenmining/energy/base.py +165 -0
  14. greenmining/energy/codecarbon_meter.py +146 -0
  15. greenmining/energy/rapl.py +157 -0
  16. greenmining/gsf_patterns.py +4 -26
  17. greenmining/models/__init__.py +1 -5
  18. greenmining/models/aggregated_stats.py +4 -4
  19. greenmining/models/analysis_result.py +4 -4
  20. greenmining/models/commit.py +5 -5
  21. greenmining/models/repository.py +5 -5
  22. greenmining/presenters/__init__.py +1 -5
  23. greenmining/presenters/console_presenter.py +24 -24
  24. greenmining/services/__init__.py +10 -6
  25. greenmining/services/commit_extractor.py +8 -152
  26. greenmining/services/data_aggregator.py +45 -175
  27. greenmining/services/data_analyzer.py +9 -202
  28. greenmining/services/github_fetcher.py +210 -323
  29. greenmining/services/github_graphql_fetcher.py +361 -0
  30. greenmining/services/local_repo_analyzer.py +387 -0
  31. greenmining/services/reports.py +33 -137
  32. greenmining/utils.py +21 -149
  33. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/METADATA +69 -173
  34. greenmining-1.0.5.dist-info/RECORD +37 -0
  35. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/WHEEL +1 -1
  36. greenmining/analyzers/ml_feature_extractor.py +0 -512
  37. greenmining/analyzers/nlp_analyzer.py +0 -365
  38. greenmining/cli.py +0 -471
  39. greenmining/main.py +0 -37
  40. greenmining-1.0.3.dist-info/RECORD +0 -36
  41. greenmining-1.0.3.dist-info/entry_points.txt +0 -2
  42. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/licenses/LICENSE +0 -0
  43. {greenmining-1.0.3.dist-info → greenmining-1.0.5.dist-info}/top_level.txt +0 -0
greenmining/cli.py DELETED
@@ -1,471 +0,0 @@
1
- """Green Microservices Mining CLI"""
2
-
3
- import click
4
-
5
- from greenmining.config import Config
6
- from greenmining.controllers.repository_controller import RepositoryController
7
- from greenmining.presenters.console_presenter import ConsolePresenter
8
- from greenmining.utils import colored_print, load_json_file
9
-
10
- # Initialize configuration
11
- config = Config()
12
-
13
- # Initialize presenter
14
- presenter = ConsolePresenter()
15
-
16
-
17
- @click.group()
18
- @click.option("--config-file", default=".env", help="Path to configuration file")
19
- @click.option("--verbose", is_flag=True, help="Enable verbose output")
20
- def cli(config_file, verbose):
21
- """Green Microservices Mining"""
22
- if verbose:
23
- config.VERBOSE = True
24
-
25
-
26
- @cli.command()
27
- @click.option("--max-repos", default=100, type=int, help="Maximum repositories to fetch")
28
- @click.option("--min-stars", default=100, type=int, help="Minimum stars required")
29
- @click.option(
30
- "--languages", default="Python,Java,Go,JavaScript,TypeScript", help="Comma-separated languages"
31
- )
32
- @click.option(
33
- "--keywords",
34
- default="microservices",
35
- type=str,
36
- help="Search keywords (e.g., 'kubernetes', 'docker', 'cloud-native')",
37
- )
38
- @click.option("--created-after", type=str, help="Repository created after (YYYY-MM-DD)")
39
- @click.option("--created-before", type=str, help="Repository created before (YYYY-MM-DD)")
40
- @click.option("--pushed-after", type=str, help="Repository pushed after (YYYY-MM-DD)")
41
- @click.option("--pushed-before", type=str, help="Repository pushed before (YYYY-MM-DD)")
42
- def fetch(
43
- max_repos,
44
- min_stars,
45
- languages,
46
- keywords,
47
- created_after,
48
- created_before,
49
- pushed_after,
50
- pushed_before,
51
- ):
52
- """Fetch repositories from GitHub based on custom search keywords."""
53
- presenter.show_banner()
54
- colored_print(f"\n🎯 Target: {max_repos} repositories\n", "cyan")
55
-
56
- controller = RepositoryController(config)
57
- lang_list = [lang.strip() for lang in languages.split(",")]
58
-
59
- try:
60
- repositories = controller.fetch_repositories(
61
- max_repos=max_repos,
62
- min_stars=min_stars,
63
- languages=lang_list,
64
- keywords=keywords,
65
- created_after=created_after,
66
- created_before=created_before,
67
- pushed_after=pushed_after,
68
- pushed_before=pushed_before,
69
- )
70
-
71
- # Show results
72
- repo_dicts = [r.to_dict() for r in repositories]
73
- presenter.show_repositories(repo_dicts, limit=10)
74
-
75
- stats = controller.get_repository_stats(repositories)
76
- colored_print(f"\n📊 Total Stars: {stats.get('total_stars', 0):,}", "green")
77
- colored_print(f"📈 Average Stars: {stats.get('avg_stars', 0):.0f}", "green")
78
-
79
- presenter.show_success(f"Fetched {len(repositories)} repositories successfully!")
80
-
81
- except Exception as e:
82
- presenter.show_error(str(e))
83
- raise click.Abort() from e
84
-
85
-
86
- @cli.command()
87
- @click.option("--max-commits", default=50, type=int, help="Max commits per repository")
88
- @click.option("--skip-merges", is_flag=True, default=True, help="Skip merge commits")
89
- @click.option("--days-back", default=730, type=int, help="Days to look back (default: 2 years)")
90
- @click.option("--timeout", default=60, type=int, help="Timeout per repo in seconds (default: 60)")
91
- def extract(max_commits, skip_merges, days_back, timeout):
92
- """Extract commits from fetched repositories."""
93
- presenter.show_banner()
94
-
95
- from greenmining.services.commit_extractor import CommitExtractor
96
-
97
- try:
98
- # Load repositories
99
- controller = RepositoryController(config)
100
- repositories = controller.load_repositories()
101
-
102
- colored_print(f"\n📝 Extracting commits from {len(repositories)} repositories...\n", "cyan")
103
- colored_print(
104
- f" Settings: max={max_commits}/repo, skip_merges={skip_merges}, days_back={days_back}\n",
105
- "cyan",
106
- )
107
-
108
- # Extract commits
109
- extractor = CommitExtractor(
110
- max_commits=max_commits, skip_merges=skip_merges, days_back=days_back, timeout=timeout
111
- )
112
- commits = extractor.extract_from_repositories(
113
- repositories=[r.to_dict() for r in repositories]
114
- )
115
-
116
- # Save commits
117
- from greenmining.utils import save_json_file
118
-
119
- save_json_file(commits, config.COMMITS_FILE)
120
- colored_print(f" Saved to: {config.COMMITS_FILE}", "cyan")
121
-
122
- # Show stats
123
- stats = {
124
- "total_commits": len(commits),
125
- "total_repos": len(repositories),
126
- "avg_per_repo": len(commits) / len(repositories) if repositories else 0,
127
- }
128
-
129
- presenter.show_commit_stats(stats)
130
- presenter.show_success(f"Extracted {len(commits)} commits successfully!")
131
-
132
- except FileNotFoundError as e:
133
- presenter.show_error(str(e))
134
- colored_print("💡 Run 'fetch' command first to get repositories", "yellow")
135
- raise click.Abort() from e
136
- except Exception as e:
137
- presenter.show_error(str(e))
138
- raise click.Abort() from e
139
-
140
-
141
- @cli.command()
142
- @click.option("--batch-size", default=10, type=int, help="Batch size for processing")
143
- @click.option("--enable-diff-analysis", is_flag=True, help="Enable code diff analysis (slower)")
144
- @click.option("--enable-nlp", is_flag=True, help="Enable NLP-enhanced pattern detection")
145
- @click.option("--enable-ml-features", is_flag=True, help="Enable ML feature extraction")
146
- def analyze(batch_size, enable_diff_analysis, enable_nlp, enable_ml_features):
147
- """Analyze commits for green software patterns."""
148
- presenter.show_banner()
149
-
150
- from greenmining.services.data_analyzer import DataAnalyzer
151
- from greenmining.utils import save_json_file
152
-
153
- try:
154
- # Load commits
155
- if not config.COMMITS_FILE.exists():
156
- raise FileNotFoundError("No commits file found. Run 'extract' first.")
157
-
158
- commits = load_json_file(config.COMMITS_FILE)
159
- colored_print(f"\n🔬 Analyzing {len(commits)} commits for green patterns...\n", "cyan")
160
-
161
- # Show enabled methods
162
- methods = ["Keyword"]
163
- if enable_diff_analysis:
164
- methods.append("Code Diff")
165
- if enable_nlp:
166
- methods.append("NLP")
167
- if enable_ml_features:
168
- methods.append("ML Features")
169
-
170
- colored_print(f" Methods: {' + '.join(methods)}\n", "cyan")
171
- colored_print(f" Batch size: {batch_size}\n", "cyan")
172
-
173
- # Analyze
174
- analyzer = DataAnalyzer(
175
- batch_size=batch_size,
176
- enable_diff_analysis=enable_diff_analysis,
177
- enable_nlp=enable_nlp,
178
- enable_ml_features=enable_ml_features,
179
- )
180
- results = analyzer.analyze_commits(commits)
181
-
182
- # Save results
183
- save_json_file(results, config.ANALYSIS_FILE)
184
-
185
- # Show results
186
- green_count = sum(1 for r in results if r.get("green_aware", False))
187
- green_rate = (green_count / len(results)) if results else 0
188
-
189
- results_dict = {
190
- "summary": {
191
- "total_commits": len(results),
192
- "green_commits": green_count,
193
- "green_commit_rate": green_rate,
194
- },
195
- "known_patterns": {},
196
- }
197
-
198
- presenter.show_analysis_results(results_dict)
199
- presenter.show_success(f"Analysis complete! Results saved to {config.ANALYSIS_FILE}")
200
-
201
- except Exception as e:
202
- presenter.show_error(str(e))
203
- raise click.Abort() from e
204
-
205
-
206
- @cli.command()
207
- @click.option("--enable-enhanced-stats", is_flag=True, help="Enable enhanced statistical analysis")
208
- @click.option("--enable-temporal", is_flag=True, help="Enable temporal trend analysis")
209
- @click.option(
210
- "--temporal-granularity",
211
- default="quarter",
212
- type=click.Choice(["day", "week", "month", "quarter", "year"]),
213
- help="Temporal analysis granularity",
214
- )
215
- def aggregate(enable_enhanced_stats, enable_temporal, temporal_granularity):
216
- """Aggregate analysis results and generate statistics."""
217
- presenter.show_banner()
218
-
219
- from greenmining.services.data_aggregator import DataAggregator
220
- from greenmining.utils import save_json_file
221
-
222
- try:
223
- # Load data
224
- if not config.ANALYSIS_FILE.exists():
225
- raise FileNotFoundError("No analysis file found. Run 'analyze' first.")
226
-
227
- results = load_json_file(config.ANALYSIS_FILE)
228
- repos = load_json_file(config.REPOS_FILE) if config.REPOS_FILE.exists() else []
229
-
230
- colored_print(f"\n📊 Aggregating results from {len(results)} commits...\n", "cyan")
231
-
232
- # Show enabled features
233
- if enable_enhanced_stats:
234
- colored_print(" Enhanced statistics: Enabled\n", "cyan")
235
- if enable_temporal:
236
- colored_print(
237
- f" Temporal analysis: Enabled (granularity: {temporal_granularity})\n", "cyan"
238
- )
239
-
240
- # Aggregate
241
- aggregator = DataAggregator(
242
- enable_enhanced_stats=enable_enhanced_stats,
243
- enable_temporal=enable_temporal,
244
- temporal_granularity=temporal_granularity,
245
- )
246
- aggregated = aggregator.aggregate(results, repos)
247
-
248
- # Save
249
- save_json_file(aggregated, config.AGGREGATED_FILE)
250
-
251
- # Show results
252
- presenter.show_analysis_results(aggregated)
253
-
254
- if aggregated.get("known_patterns"):
255
- # Convert list format to dict format expected by presenter
256
- patterns_dict = {}
257
- for pattern in aggregated["known_patterns"]:
258
- patterns_dict[pattern["pattern_name"]] = {
259
- "count": pattern["count"],
260
- "percentage": pattern["percentage"],
261
- "confidence_distribution": pattern.get("confidence_breakdown", {}),
262
- }
263
- presenter.show_pattern_distribution(patterns_dict, limit=10)
264
-
265
- presenter.show_success(f"Aggregation complete! Results saved to {config.AGGREGATED_FILE}")
266
-
267
- except Exception as e:
268
- presenter.show_error(str(e))
269
- raise click.Abort() from e
270
-
271
-
272
- @cli.command()
273
- @click.option("--output", default="green_microservices_analysis.md", help="Output filename")
274
- def report(output):
275
- """Generate comprehensive markdown report."""
276
- presenter.show_banner()
277
-
278
- from greenmining.services.reports import ReportGenerator
279
-
280
- try:
281
- # Load aggregated data
282
- if not config.AGGREGATED_FILE.exists():
283
- raise FileNotFoundError("No aggregated data found. Run 'aggregate' first.")
284
-
285
- # Load analysis results
286
- if not config.ANALYSIS_FILE.exists():
287
- raise FileNotFoundError("No analysis results found. Run 'analyze' first.")
288
-
289
- # Load repository data
290
- if not config.REPOS_FILE.exists():
291
- raise FileNotFoundError("No repository data found. Run 'fetch' first.")
292
-
293
- aggregated = load_json_file(config.AGGREGATED_FILE)
294
- analysis_results = load_json_file(config.ANALYSIS_FILE)
295
- repos_data = load_json_file(config.REPOS_FILE)
296
-
297
- # Wrap analysis results if it's a list
298
- if isinstance(analysis_results, list):
299
- analysis = {"results": analysis_results, "total": len(analysis_results)}
300
- else:
301
- analysis = analysis_results
302
-
303
- # Wrap repos data if it's a list
304
- if isinstance(repos_data, list):
305
- repos = {"repositories": repos_data, "total": len(repos_data)}
306
- else:
307
- repos = repos_data
308
-
309
- colored_print("\n📄 Generating comprehensive report...\n", "cyan")
310
-
311
- # Generate report
312
- generator = ReportGenerator()
313
- report_content = generator.generate_report(aggregated, analysis, repos)
314
-
315
- # Save report
316
- from pathlib import Path
317
-
318
- report_path = Path(output)
319
- report_path.write_text(report_content)
320
-
321
- presenter.show_success(f"Report generated: {report_path}")
322
- colored_print("\n📖 The report includes:", "cyan")
323
- colored_print(" • Executive Summary", "white")
324
- colored_print(" • Methodology", "white")
325
- colored_print(" • Results & Statistics", "white")
326
- colored_print(" • Pattern Analysis", "white")
327
- colored_print(" • Per-Repository Breakdown", "white")
328
- colored_print(" • Discussion & Conclusions", "white")
329
-
330
- except Exception as e:
331
- presenter.show_error(str(e))
332
- raise click.Abort() from e
333
-
334
-
335
- @cli.command()
336
- def status():
337
- """Show current pipeline status."""
338
- presenter.show_banner()
339
-
340
- phases = {
341
- "1. Fetch Repositories": {
342
- "file": str(config.REPOS_FILE),
343
- "completed": config.REPOS_FILE.exists(),
344
- "size": (
345
- f"{config.REPOS_FILE.stat().st_size / 1024:.1f} KB"
346
- if config.REPOS_FILE.exists()
347
- else "N/A"
348
- ),
349
- },
350
- "2. Extract Commits": {
351
- "file": str(config.COMMITS_FILE),
352
- "completed": config.COMMITS_FILE.exists(),
353
- "size": (
354
- f"{config.COMMITS_FILE.stat().st_size / 1024:.1f} KB"
355
- if config.COMMITS_FILE.exists()
356
- else "N/A"
357
- ),
358
- },
359
- "3. Analyze Commits": {
360
- "file": str(config.ANALYSIS_FILE),
361
- "completed": config.ANALYSIS_FILE.exists(),
362
- "size": (
363
- f"{config.ANALYSIS_FILE.stat().st_size / 1024:.1f} KB"
364
- if config.ANALYSIS_FILE.exists()
365
- else "N/A"
366
- ),
367
- },
368
- "4. Aggregate Results": {
369
- "file": str(config.AGGREGATED_FILE),
370
- "completed": config.AGGREGATED_FILE.exists(),
371
- "size": (
372
- f"{config.AGGREGATED_FILE.stat().st_size / 1024:.1f} KB"
373
- if config.AGGREGATED_FILE.exists()
374
- else "N/A"
375
- ),
376
- },
377
- "5. Generate Report": {
378
- "file": str(config.REPORT_FILE),
379
- "completed": config.REPORT_FILE.exists(),
380
- "size": (
381
- f"{config.REPORT_FILE.stat().st_size / 1024:.1f} KB"
382
- if config.REPORT_FILE.exists()
383
- else "N/A"
384
- ),
385
- },
386
- }
387
-
388
- presenter.show_pipeline_status(phases)
389
-
390
- # Show next step
391
- for phase_name, info in phases.items():
392
- if not info["completed"]:
393
- colored_print(f"\n💡 Next step: {phase_name}", "yellow")
394
- break
395
- else:
396
- colored_print("\n✅ All phases complete!", "green")
397
-
398
-
399
- @cli.command()
400
- @click.option("--max-repos", default=100, type=int, help="Maximum repositories to analyze")
401
- @click.option("--skip-fetch", is_flag=True, help="Skip fetch phase if data exists")
402
- def pipeline(max_repos, skip_fetch):
403
- """Run full pipeline: fetch → extract → analyze → aggregate → report."""
404
- presenter.show_banner()
405
-
406
- colored_print("\n🚀 Starting Full Pipeline...\n", "green")
407
- colored_print(f" Target: {max_repos} repositories", "cyan")
408
- colored_print(" Phases: fetch → extract → analyze → aggregate → report\n", "cyan")
409
-
410
- try:
411
- # Phase 1: Fetch
412
- if not skip_fetch or not config.REPOS_FILE.exists():
413
- colored_print("\n[1/5] 🔍 Fetching repositories...", "cyan")
414
- controller = RepositoryController(config)
415
- controller.fetch_repositories(max_repos=max_repos)
416
- else:
417
- colored_print("\n[1/5] ⏭️ Skipping fetch (using existing data)", "yellow")
418
-
419
- # Phase 2: Extract
420
- colored_print("\n[2/5] 📝 Extracting commits...", "cyan")
421
- from greenmining.services.commit_extractor import CommitExtractor
422
- from greenmining.utils import save_json_file
423
-
424
- controller = RepositoryController(config)
425
- repos = controller.load_repositories()
426
- extractor = CommitExtractor()
427
- commits = extractor.extract_from_repositories([r.to_dict() for r in repos])
428
- save_json_file(commits, config.COMMITS_FILE)
429
- colored_print(f" Saved {len(commits)} commits to: {config.COMMITS_FILE}", "green")
430
-
431
- # Phase 3: Analyze
432
- colored_print("\n[3/5] 🔬 Analyzing commits...", "cyan")
433
- from greenmining.services.data_analyzer import DataAnalyzer
434
-
435
- commits = load_json_file(config.COMMITS_FILE)
436
- analyzer = DataAnalyzer()
437
- results = analyzer.analyze_commits_batch(commits)
438
- save_json_file(results, config.ANALYSIS_FILE)
439
- colored_print(
440
- f" Analyzed {len(results)} commits, saved to: {config.ANALYSIS_FILE}", "green"
441
- )
442
-
443
- # Phase 4: Aggregate
444
- colored_print("\n[4/5] 📊 Aggregating results...", "cyan")
445
- from greenmining.services.data_aggregator import DataAggregator
446
-
447
- aggregator = DataAggregator()
448
- aggregated = aggregator.aggregate(results, [r.to_dict() for r in repos])
449
- save_json_file(aggregated, config.AGGREGATED_FILE)
450
-
451
- # Phase 5: Report
452
- colored_print("\n[5/5] 📄 Generating report...", "cyan")
453
- from greenmining.services.reports import ReportGenerator
454
-
455
- generator = ReportGenerator()
456
- generator.generate_report(aggregated)
457
-
458
- colored_print("\n" + "=" * 60, "green")
459
- colored_print("✅ Pipeline Complete!", "green")
460
- colored_print("=" * 60, "green")
461
-
462
- presenter.show_success(f"All results saved to {config.OUTPUT_DIR}")
463
- colored_print(f"\n📖 View report: {config.REPORT_FILE}", "cyan")
464
-
465
- except Exception as e:
466
- presenter.show_error(str(e))
467
- raise click.Abort() from e
468
-
469
-
470
- if __name__ == "__main__":
471
- cli()
greenmining/main.py DELETED
@@ -1,37 +0,0 @@
1
- """Main entry point for Green Microservices Mining CLI."""
2
-
3
- import sys
4
-
5
- from cli import cli
6
-
7
- from greenmining.utils import colored_print, print_banner
8
-
9
-
10
- def main():
11
- """Main entry point with error handling."""
12
- try:
13
- print_banner("🌱 Green Microservices Mining Tool")
14
- colored_print("Analyze GitHub repositories for sustainability practices\n", "cyan")
15
-
16
- cli(obj={})
17
-
18
- except KeyboardInterrupt:
19
- colored_print("\n\n⚠️ Operation cancelled by user", "yellow")
20
- sys.exit(130)
21
-
22
- except Exception as e:
23
- colored_print(f"\n❌ Unexpected error: {e}", "red")
24
-
25
- if "--verbose" in sys.argv or "-v" in sys.argv:
26
- import traceback
27
-
28
- colored_print("\nFull traceback:", "red")
29
- traceback.print_exc()
30
- else:
31
- colored_print("Run with --verbose for detailed error information", "yellow")
32
-
33
- sys.exit(1)
34
-
35
-
36
- if __name__ == "__main__":
37
- main()
@@ -1,36 +0,0 @@
1
- greenmining/__init__.py,sha256=p_pk0TmyP34o97wTYVTHkXe7qpGtH43GUVD_iCadrYY,1763
2
- greenmining/__main__.py,sha256=1RwcSXcwdza6xJX5fRT8-HhZjlnKbkmGY_uxTm-NYZ4,138
3
- greenmining/__version__.py,sha256=3OgUZ5K2OXa9_-2kjlgye1N6G_QeQDeex2uw33Ja6Cs,66
4
- greenmining/cli.py,sha256=40eKDEZHNeDVb91xKBG70VfPk45mwb4YjuVCC2efVPA,17458
5
- greenmining/config.py,sha256=1_puT52zNS589hTxEZ3UCqRC_Qw5Jw2UupUPNbNz_hs,5195
6
- greenmining/gsf_patterns.py,sha256=Prsk_stnQrfOsk0x0zn-zdevbueAnPfGDM4XNA9PbdA,54664
7
- greenmining/main.py,sha256=h8J9OcwyGpVJ-gjSFUS2SZExQQlHV0eDMMjAoI_sgAo,952
8
- greenmining/utils.py,sha256=-pL8yznf1jSazBMk1ugjPQbtFOQI1E9wRI1NJbHl2xs,7941
9
- greenmining/analyzers/__init__.py,sha256=6emAyka8ifjNjEpqhWOGkWkTJU1SgJy8Xuva-b9XSNY,518
10
- greenmining/analyzers/code_diff_analyzer.py,sha256=mL8sCpnVo_m8vsgabe2t3gF0b_gNp3MIM-D4v31-zNQ,7682
11
- greenmining/analyzers/ml_feature_extractor.py,sha256=rbCPA12hD1Xda7CGkLA7vGZgDjZK0r4ev5crDcbg3Jc,17727
12
- greenmining/analyzers/nlp_analyzer.py,sha256=fBwkHqV0e4rnI-dz2DH2zmrnLZYpcRTkMrAY-zmMBTo,12616
13
- greenmining/analyzers/qualitative_analyzer.py,sha256=6HU_Rn-mAOBXiwroj7UjV13nmagSboz5rB4eYuiYs6U,17256
14
- greenmining/analyzers/statistical_analyzer.py,sha256=g_suZ6AAJzTft2kZH5dwSUZ8S06JyuaBy4MPSZidavY,8755
15
- greenmining/analyzers/temporal_analyzer.py,sha256=-1fmZdkGsNqmukoy8xxEG1v4AdJ5P6Y1C1Q8e-aI1cs,15976
16
- greenmining/controllers/__init__.py,sha256=y-W1Xnnhm4JnrY2QEo5osK8jQs7hpxXovVbHlE334F0,279
17
- greenmining/controllers/repository_controller.py,sha256=_DtX0OAm5VUEOPY8SxrvVWNujjQmkoyt-2PpL-R2sQ0,6453
18
- greenmining/models/__init__.py,sha256=K8udzQW2V5LqPowIm5aCiK07LxJZxCt_oW3gz5Qi-mc,397
19
- greenmining/models/aggregated_stats.py,sha256=eYyEcKfL8oqqE_hN0tzM7eyXFzc54by20N3-72vcJ7Y,1032
20
- greenmining/models/analysis_result.py,sha256=-6hwmickqncRXDGWM3aXBEaOGlddM5G6hnmRTyHFcMs,1525
21
- greenmining/models/commit.py,sha256=OT95QqVzU-0xbXB5l7m7V6J4FXSPIO80M2zYJHJdyOU,2459
22
- greenmining/models/repository.py,sha256=k1X9UYZYLl0RznohOHx_Y5wur-ZBvLcNyc9vPVArb7E,2876
23
- greenmining/presenters/__init__.py,sha256=-ukAvhNuTvy1Xpknps0faDZ78HKdPHPySzFpQHABzKM,203
24
- greenmining/presenters/console_presenter.py,sha256=ykJ9Hgors2dRTqQNaqCTxH4fd49F0AslQTgUOr_csI0,5347
25
- greenmining/services/__init__.py,sha256=7CJDjHMTrY0bBoqzx22AUzIwEvby0FbAUUKYbjSlNPQ,460
26
- greenmining/services/commit_extractor.py,sha256=FSgoHpMvoqjZ6b1UQYtwfUaLVX_GDfiR0BVd51y-gYk,13126
27
- greenmining/services/data_aggregator.py,sha256=OqJvQZp9xaZaSmbwWoiHAHECAghd8agbhVmStDvebOU,24054
28
- greenmining/services/data_analyzer.py,sha256=HZDQLFZDCwCUGIzRjypyXC09Fl_-zaxhly74n3siwQc,16325
29
- greenmining/services/github_fetcher.py,sha256=J47-plM_NKXwHDSWNBuSUZMnZnGP6wXiJyrVfeWT9ug,11360
30
- greenmining/services/reports.py,sha256=NCNI9SCTnSLeAO8WmkNIdkB0hr-XyVpuzV0sovOoUOM,27107
31
- greenmining-1.0.3.dist-info/licenses/LICENSE,sha256=M7ma3JHGeiIZIs3ea0HTcFl_wLFPX2NZElUliYs4bCA,1083
32
- greenmining-1.0.3.dist-info/METADATA,sha256=0Hj5qXVUkuJhIUBZBRhSysc3zx6L3py0HpZg9vKcl7Y,29260
33
- greenmining-1.0.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
- greenmining-1.0.3.dist-info/entry_points.txt,sha256=oHvTWMzNFGf2W3CFEKVVPsG4exeMv0MaQu9YsUoQ9lw,53
35
- greenmining-1.0.3.dist-info/top_level.txt,sha256=nreXgXxZIWI-42yQknQ0HXtUrFnzZ8N1ra4Mdy2KcsI,12
36
- greenmining-1.0.3.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- [console_scripts]
2
- greenmining = greenmining.cli:main