greenmining 0.1.12__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: greenmining
3
- Version: 0.1.12
3
+ Version: 1.0.1
4
4
  Summary: Analyze GitHub repositories to identify green software engineering patterns and energy-efficient practices
5
5
  Author-email: Adam Bouafia <a.bouafia@student.vu.nl>
6
6
  License: MIT
@@ -62,16 +62,7 @@ Green mining for microservices repositories.
62
62
 
63
63
  ## Overview
64
64
 
65
- `greenmining` is a Python library and CLI tool for analyzing GitHub repositories to identify green software engineering practices and energy-efficient patterns. It detects 122 sustainable software patterns across cloud, web, AI, database, networking, and general categories, including advanced patterns from VU Amsterdam 2024 research on green architectural tactics for ML systems.
66
-
67
- ## Features
68
-
69
- - 🔍 **122 Sustainability Patterns**: Detect energy-efficient and environmentally conscious coding practices across 15 categories (expanded from 76)
70
- - 📊 **Repository Mining**: Analyze 100+ microservices repositories from GitHub
71
- - 📈 **Green Awareness Detection**: Identify sustainability-focused commits
72
- - 📄 **Comprehensive Reports**: Generate analysis reports in multiple formats
73
- - 🐳 **Docker Support**: Run in containers for consistent environments
74
- - ⚡ **Fast Analysis**: Parallel processing and checkpoint system
65
+ `greenmining` is a Python library and CLI tool for analyzing GitHub repositories to identify green software engineering practices and energy-efficient patterns. It detects sustainable software patterns across cloud, web, AI, database, networking, and general categories.
75
66
 
76
67
  ## Installation
77
68
 
@@ -118,6 +109,12 @@ greenmining extract --max-commits 50
118
109
  # Analyze for green patterns
119
110
  greenmining analyze
120
111
 
112
+ # Analyze with advanced features
113
+ greenmining analyze --enable-nlp --enable-ml-features --enable-diff-analysis
114
+
115
+ # Aggregate results with temporal analysis
116
+ greenmining aggregate --enable-temporal --temporal-granularity quarter --enable-enhanced-stats
117
+
121
118
  # Generate report
122
119
  greenmining report
123
120
  ```
@@ -147,11 +144,15 @@ from greenmining import fetch_repositories
147
144
 
148
145
  # Fetch repositories with custom search keywords
149
146
  repos = fetch_repositories(
150
- github_token="your_github_token",
151
- max_repos=50,
152
- min_stars=500,
153
- keywords="kubernetes cloud-native",
154
- languages=["Python", "Go"]
147
+ github_token="your_github_token", # Required: GitHub personal access token
148
+ max_repos=50, # Maximum number of repositories to fetch
149
+ min_stars=500, # Minimum star count filter
150
+ keywords="kubernetes cloud-native", # Search keywords (space-separated)
151
+ languages=["Python", "Go"], # Programming language filters
152
+ created_after="2020-01-01", # Filter by creation date (YYYY-MM-DD)
153
+ created_before="2024-12-31", # Filter by creation date (YYYY-MM-DD)
154
+ pushed_after="2023-01-01", # Filter by last push date (YYYY-MM-DD)
155
+ pushed_before="2024-12-31" # Filter by last push date (YYYY-MM-DD)
155
156
  )
156
157
 
157
158
  print(f"Found {len(repos)} repositories")
@@ -159,11 +160,24 @@ for repo in repos[:5]:
159
160
  print(f"- {repo.full_name} ({repo.stars} stars)")
160
161
  ```
161
162
 
163
+ **Parameters:**
164
+ - `github_token` (str, required): GitHub personal access token for API authentication
165
+ - `max_repos` (int, default=100): Maximum number of repositories to fetch
166
+ - `min_stars` (int, default=100): Minimum GitHub stars filter
167
+ - `keywords` (str, default="microservices"): Space-separated search keywords
168
+ - `languages` (list[str], optional): Programming language filters (e.g., ["Python", "Go", "Java"])
169
+ - `created_after` (str, optional): Filter repos created after date (format: "YYYY-MM-DD")
170
+ - `created_before` (str, optional): Filter repos created before date (format: "YYYY-MM-DD")
171
+ - `pushed_after` (str, optional): Filter repos pushed after date (format: "YYYY-MM-DD")
172
+ - `pushed_before` (str, optional): Filter repos pushed before date (format: "YYYY-MM-DD")
173
+
162
174
  #### Analyze Repository Commits
163
175
 
164
176
  ```python
165
177
  from greenmining.services.commit_extractor import CommitExtractor
166
178
  from greenmining.services.data_analyzer import DataAnalyzer
179
+ from greenmining.analyzers.nlp_analyzer import NLPAnalyzer
180
+ from greenmining.analyzers.ml_feature_extractor import MLFeatureExtractor
167
181
  from greenmining import fetch_repositories
168
182
 
169
183
  # Fetch repositories with custom keywords
@@ -173,12 +187,59 @@ repos = fetch_repositories(
173
187
  keywords="serverless edge-computing"
174
188
  )
175
189
 
176
- # Initialize services
177
- extractor = CommitExtractor()
178
- analyzer = DataAnalyzer()
190
+ # Initialize commit extractor with parameters
191
+ extractor = CommitExtractor(
192
+ exclude_merge_commits=True, # Skip merge commits (default: True)
193
+ exclude_bot_commits=True, # Skip bot commits (default: True)
194
+ min_message_length=10 # Minimum commit message length (default: 10)
195
+ )
196
+
197
+ # Initialize analyzer with advanced features
198
+ analyzer = DataAnalyzer(
199
+ enable_diff_analysis=False, # Enable code diff analysis (slower but more accurate)
200
+ enable_nlp=True, # Enable NLP-enhanced pattern detection
201
+ enable_ml_features=True, # Enable ML feature extraction
202
+ patterns=None, # Custom pattern dict (default: GSF_PATTERNS)
203
+ batch_size=10 # Batch processing size (default: 10)
204
+ )
205
+
206
+ # Optional: Configure NLP analyzer separately
207
+ nlp_analyzer = NLPAnalyzer(
208
+ enable_stemming=True, # Enable morphological analysis (optimize→optimizing)
209
+ enable_synonyms=True # Enable semantic synonym matching (cache→buffer)
210
+ )
211
+
212
+ # Optional: Configure ML feature extractor
213
+ ml_extractor = MLFeatureExtractor(
214
+ green_keywords=None # Custom keyword list (default: built-in 19 keywords)
215
+ )
179
216
 
180
217
  # Extract commits from first repo
181
- commits = extractor.extract_commits(repos[0], max_commits=50)
218
+ commits = extractor.extract_commits(
219
+ repository=repos[0], # PyGithub Repository object
220
+ max_commits=50, # Maximum commits to extract per repository
221
+ since=None, # Start date filter (datetime object, optional)
222
+ until=None # End date filter (datetime object, optional)
223
+ )
224
+
225
+ **CommitExtractor Parameters:**
226
+ - `exclude_merge_commits` (bool, default=True): Skip merge commits during extraction
227
+ - `exclude_bot_commits` (bool, default=True): Skip commits from bot accounts
228
+ - `min_message_length` (int, default=10): Minimum length for commit message to be included
229
+
230
+ **DataAnalyzer Parameters:**
231
+ - `enable_diff_analysis` (bool, default=False): Enable code diff analysis (slower)
232
+ - `enable_nlp` (bool, default=False): Enable NLP-enhanced pattern detection
233
+ - `enable_ml_features` (bool, default=False): Enable ML feature extraction
234
+ - `patterns` (dict, optional): Custom pattern dictionary (default: GSF_PATTERNS)
235
+ - `batch_size` (int, default=10): Number of commits to process in each batch
236
+
237
+ **NLPAnalyzer Parameters:**
238
+ - `enable_stemming` (bool, default=True): Enable morphological variant matching
239
+ - `enable_synonyms` (bool, default=True): Enable semantic synonym expansion
240
+
241
+ **MLFeatureExtractor Parameters:**
242
+ - `green_keywords` (list[str], optional): Custom green keywords list
182
243
 
183
244
  # Analyze commits for green patterns
184
245
  results = []
@@ -188,6 +249,18 @@ for commit in commits:
188
249
  results.append(result)
189
250
  print(f"Green commit found: {commit.message[:50]}...")
190
251
  print(f" Patterns: {result['known_pattern']}")
252
+
253
+ # Access NLP analysis results (NEW)
254
+ if 'nlp_analysis' in result:
255
+ nlp = result['nlp_analysis']
256
+ print(f" NLP: {nlp['morphological_count']} morphological matches, "
257
+ f"{nlp['semantic_count']} semantic matches")
258
+
259
+ # Access ML features (NEW)
260
+ if 'ml_features' in result:
261
+ ml = result['ml_features']['text']
262
+ print(f" ML Features: {ml['word_count']} words, "
263
+ f"keyword density: {ml['keyword_density']:.2f}")
191
264
  ```
192
265
 
193
266
  #### Access Sustainability Patterns Data
@@ -223,6 +296,66 @@ print(f"Available categories: {sorted(categories)}")
223
296
  # 'monitoring', 'network', 'networking', 'resource', 'web']
224
297
  ```
225
298
 
299
+ #### Advanced Analysis: Temporal Trends (NEW)
300
+
301
+ ```python
302
+ from greenmining.services.data_aggregator import DataAggregator
303
+ from greenmining.analyzers.temporal_analyzer import TemporalAnalyzer
304
+ from greenmining.analyzers.qualitative_analyzer import QualitativeAnalyzer
305
+
306
+ # Initialize aggregator with all advanced features
307
+ aggregator = DataAggregator(
308
+ config=None, # Config object (optional)
309
+ enable_enhanced_stats=True, # Enable statistical analysis (correlations, trends)
310
+ enable_temporal=True, # Enable temporal trend analysis
311
+ temporal_granularity="quarter" # Time granularity: day/week/month/quarter/year
312
+ )
313
+
314
+ # Optional: Configure temporal analyzer separately
315
+ temporal_analyzer = TemporalAnalyzer(
316
+ granularity="quarter" # Time period granularity for grouping commits
317
+ )
318
+
319
+ # Optional: Configure qualitative analyzer for validation sampling
320
+ qualitative_analyzer = QualitativeAnalyzer(
321
+ sample_size=30, # Number of samples for manual validation
322
+ stratify_by="pattern" # Stratification method: pattern/repository/time/random
323
+ )
324
+
325
+ # Aggregate results with temporal insights
326
+ aggregated = aggregator.aggregate(
327
+ analysis_results=analysis_results, # List of analysis result dictionaries
328
+ repositories=repositories # List of PyGithub repository objects
329
+ )
330
+
331
+ **DataAggregator Parameters:**
332
+ - `config` (Config, optional): Configuration object
333
+ - `enable_enhanced_stats` (bool, default=False): Enable pattern correlations and effect size analysis
334
+ - `enable_temporal` (bool, default=False): Enable temporal trend analysis over time
335
+ - `temporal_granularity` (str, default="quarter"): Time granularity (day/week/month/quarter/year)
336
+
337
+ **TemporalAnalyzer Parameters:**
338
+ - `granularity` (str, default="quarter"): Time period for grouping (day/week/month/quarter/year)
339
+
340
+ **QualitativeAnalyzer Parameters:**
341
+ - `sample_size` (int, default=30): Number of commits to sample for validation
342
+ - `stratify_by` (str, default="pattern"): Stratification method (pattern/repository/time/random)
343
+
344
+ # Access temporal analysis results
345
+ temporal = aggregated['temporal_analysis']
346
+ print(f"Time periods analyzed: {len(temporal['periods'])}")
347
+
348
+ # View pattern adoption trends over time
349
+ for period_data in temporal['periods']:
350
+ print(f"{period_data['period']}: {period_data['commit_count']} commits, "
351
+ f"{period_data['green_awareness_rate']:.1%} green awareness")
352
+
353
+ # Access pattern evolution insights
354
+ evolution = temporal.get('pattern_evolution', {})
355
+ print(f"Emerging patterns: {evolution.get('emerging', [])}")
356
+ print(f"Stable patterns: {evolution.get('stable', [])}")
357
+ ```
358
+
226
359
  #### Generate Custom Reports
227
360
 
228
361
  ```python
@@ -282,13 +415,81 @@ docker run -it adambouafia/greenmining:latest /bin/bash
282
415
 
283
416
  ## Configuration
284
417
 
418
+ ### Environment Variables
419
+
285
420
  Create a `.env` file or set environment variables:
286
421
 
287
422
  ```bash
423
+ # Required
288
424
  GITHUB_TOKEN=your_github_personal_access_token
425
+
426
+ # Optional - Repository Fetching
289
427
  MAX_REPOS=100
428
+ MIN_STARS=100
429
+ SUPPORTED_LANGUAGES=Python,Java,Go,JavaScript,TypeScript
430
+ SEARCH_KEYWORDS=microservices
431
+
432
+ # Optional - Commit Extraction
290
433
  COMMITS_PER_REPO=50
434
+ EXCLUDE_MERGE_COMMITS=true
435
+ EXCLUDE_BOT_COMMITS=true
436
+
437
+ # Optional - Analysis Features
438
+ ENABLE_DIFF_ANALYSIS=false
439
+ ENABLE_NLP=true
440
+ ENABLE_ML_FEATURES=true
441
+ BATCH_SIZE=10
442
+
443
+ # Optional - Temporal Analysis
444
+ ENABLE_TEMPORAL=true
445
+ TEMPORAL_GRANULARITY=quarter
446
+ ENABLE_ENHANCED_STATS=true
447
+
448
+ # Optional - Output
291
449
  OUTPUT_DIR=./data
450
+ REPORT_FORMAT=markdown
451
+ ```
452
+
453
+ ### Config Object Parameters
454
+
455
+ ```python
456
+ from greenmining.config import Config
457
+
458
+ config = Config(
459
+ # GitHub API
460
+ github_token="your_token", # GitHub personal access token (required)
461
+
462
+ # Repository Fetching
463
+ max_repos=100, # Maximum repositories to fetch
464
+ min_stars=100, # Minimum star threshold
465
+ supported_languages=["Python", "Go"], # Language filters
466
+ search_keywords="microservices", # Default search keywords
467
+
468
+ # Commit Extraction
469
+ max_commits=50, # Commits per repository
470
+ exclude_merge_commits=True, # Skip merge commits
471
+ exclude_bot_commits=True, # Skip bot commits
472
+ min_message_length=10, # Minimum commit message length
473
+
474
+ # Analysis Options
475
+ enable_diff_analysis=False, # Enable code diff analysis
476
+ enable_nlp=True, # Enable NLP features
477
+ enable_ml_features=True, # Enable ML feature extraction
478
+ batch_size=10, # Batch processing size
479
+
480
+ # Temporal Analysis
481
+ enable_temporal=True, # Enable temporal trend analysis
482
+ temporal_granularity="quarter", # day/week/month/quarter/year
483
+ enable_enhanced_stats=True, # Enable statistical analysis
484
+
485
+ # Output Configuration
486
+ output_dir="./data", # Output directory path
487
+ repos_file="repositories.json", # Repositories filename
488
+ commits_file="commits.json", # Commits filename
489
+ analysis_file="analysis_results.json", # Analysis results filename
490
+ stats_file="aggregated_statistics.json", # Statistics filename
491
+ report_file="green_analysis.md" # Report filename
492
+ )
292
493
  ```
293
494
 
294
495
  ## Features
@@ -375,8 +576,8 @@ Feature flags, incremental processing, precomputation, background jobs, workflow
375
576
  |---------|-------------|-------------|
376
577
  | `fetch` | Fetch repositories from GitHub with custom keywords | `--max-repos`, `--min-stars`, `--languages`, `--keywords` |
377
578
  | `extract` | Extract commit history from repositories | `--max-commits` per repository |
378
- | `analyze` | Analyze commits for green patterns | Auto-detects patterns from 122-pattern database |
379
- | `aggregate` | Aggregate analysis results | Generates statistics and summaries |
579
+ | `analyze` | Analyze commits for green patterns | `--enable-nlp`, `--enable-ml-features`, `--enable-diff-analysis` |
580
+ | `aggregate` | Aggregate analysis results | `--enable-temporal`, `--temporal-granularity`, `--enable-enhanced-stats` |
380
581
  | `report` | Generate comprehensive report | Creates Markdown and CSV outputs |
381
582
  | `pipeline` | Run complete analysis pipeline | `--max-repos`, `--max-commits` (all-in-one) |
382
583
  | `status` | Show current analysis status | Displays progress and file statistics |
@@ -404,6 +605,33 @@ greenmining extract --max-commits 50
404
605
  Options:
405
606
  - `--max-commits`: Maximum commits per repository (default: 50)
406
607
 
608
+ #### Analyze Commits (with Advanced Features)
609
+ ```bash
610
+ # Basic analysis
611
+ greenmining analyze
612
+
613
+ # Advanced analysis with all features
614
+ greenmining analyze --enable-nlp --enable-ml-features --enable-diff-analysis --batch-size 20
615
+ ```
616
+ Options:
617
+ - `--batch-size`: Batch size for processing (default: 10)
618
+ - `--enable-diff-analysis`: Enable code diff analysis (slower but more accurate)
619
+ - `--enable-nlp`: Enable NLP-enhanced pattern detection with morphological variants and synonyms
620
+ - `--enable-ml-features`: Enable ML feature extraction for model training
621
+
622
+ #### Aggregate Results (with Temporal Analysis)
623
+ ```bash
624
+ # Basic aggregation
625
+ greenmining aggregate
626
+
627
+ # Advanced aggregation with temporal trends
628
+ greenmining aggregate --enable-temporal --temporal-granularity quarter --enable-enhanced-stats
629
+ ```
630
+ Options:
631
+ - `--enable-enhanced-stats`: Enable enhanced statistical analysis (correlations, effect sizes)
632
+ - `--enable-temporal`: Enable temporal trend analysis
633
+ - `--temporal-granularity`: Time period granularity (choices: day, week, month, quarter, year)
634
+
407
635
  #### Run Pipeline
408
636
  ```bash
409
637
  greenmining pipeline --max-repos 50 --max-commits 100
@@ -0,0 +1,36 @@
1
+ greenmining/__init__.py,sha256=c_Vaq_WW6-SkI_es4cQKXpdEtXdfVEnGjIDxACF6bzk,1764
2
+ greenmining/__main__.py,sha256=1RwcSXcwdza6xJX5fRT8-HhZjlnKbkmGY_uxTm-NYZ4,138
3
+ greenmining/__version__.py,sha256=1jisBTMaMLSYH7jCobgcRNneQXzskU1YHLC5Za-5YsQ,66
4
+ greenmining/cli.py,sha256=40eKDEZHNeDVb91xKBG70VfPk45mwb4YjuVCC2efVPA,17458
5
+ greenmining/config.py,sha256=1_puT52zNS589hTxEZ3UCqRC_Qw5Jw2UupUPNbNz_hs,5195
6
+ greenmining/gsf_patterns.py,sha256=Prsk_stnQrfOsk0x0zn-zdevbueAnPfGDM4XNA9PbdA,54664
7
+ greenmining/main.py,sha256=h8J9OcwyGpVJ-gjSFUS2SZExQQlHV0eDMMjAoI_sgAo,952
8
+ greenmining/utils.py,sha256=-pL8yznf1jSazBMk1ugjPQbtFOQI1E9wRI1NJbHl2xs,7941
9
+ greenmining/analyzers/__init__.py,sha256=6emAyka8ifjNjEpqhWOGkWkTJU1SgJy8Xuva-b9XSNY,518
10
+ greenmining/analyzers/code_diff_analyzer.py,sha256=mL8sCpnVo_m8vsgabe2t3gF0b_gNp3MIM-D4v31-zNQ,7682
11
+ greenmining/analyzers/ml_feature_extractor.py,sha256=rbCPA12hD1Xda7CGkLA7vGZgDjZK0r4ev5crDcbg3Jc,17727
12
+ greenmining/analyzers/nlp_analyzer.py,sha256=fBwkHqV0e4rnI-dz2DH2zmrnLZYpcRTkMrAY-zmMBTo,12616
13
+ greenmining/analyzers/qualitative_analyzer.py,sha256=6HU_Rn-mAOBXiwroj7UjV13nmagSboz5rB4eYuiYs6U,17256
14
+ greenmining/analyzers/statistical_analyzer.py,sha256=g_suZ6AAJzTft2kZH5dwSUZ8S06JyuaBy4MPSZidavY,8755
15
+ greenmining/analyzers/temporal_analyzer.py,sha256=-1fmZdkGsNqmukoy8xxEG1v4AdJ5P6Y1C1Q8e-aI1cs,15976
16
+ greenmining/controllers/__init__.py,sha256=y-W1Xnnhm4JnrY2QEo5osK8jQs7hpxXovVbHlE334F0,279
17
+ greenmining/controllers/repository_controller.py,sha256=_DtX0OAm5VUEOPY8SxrvVWNujjQmkoyt-2PpL-R2sQ0,6453
18
+ greenmining/models/__init__.py,sha256=K8udzQW2V5LqPowIm5aCiK07LxJZxCt_oW3gz5Qi-mc,397
19
+ greenmining/models/aggregated_stats.py,sha256=eYyEcKfL8oqqE_hN0tzM7eyXFzc54by20N3-72vcJ7Y,1032
20
+ greenmining/models/analysis_result.py,sha256=-6hwmickqncRXDGWM3aXBEaOGlddM5G6hnmRTyHFcMs,1525
21
+ greenmining/models/commit.py,sha256=OT95QqVzU-0xbXB5l7m7V6J4FXSPIO80M2zYJHJdyOU,2459
22
+ greenmining/models/repository.py,sha256=k1X9UYZYLl0RznohOHx_Y5wur-ZBvLcNyc9vPVArb7E,2876
23
+ greenmining/presenters/__init__.py,sha256=-ukAvhNuTvy1Xpknps0faDZ78HKdPHPySzFpQHABzKM,203
24
+ greenmining/presenters/console_presenter.py,sha256=ykJ9Hgors2dRTqQNaqCTxH4fd49F0AslQTgUOr_csI0,5347
25
+ greenmining/services/__init__.py,sha256=7CJDjHMTrY0bBoqzx22AUzIwEvby0FbAUUKYbjSlNPQ,460
26
+ greenmining/services/commit_extractor.py,sha256=ldwfb6pNMPqaAXEYMIGYyo5yFx-tYcLlAiMpJdKc8Ek,12738
27
+ greenmining/services/data_aggregator.py,sha256=WRYmVoscX0kMyI0CRnYFPhYwOBVI73o573LhibZTcPA,23770
28
+ greenmining/services/data_analyzer.py,sha256=HZDQLFZDCwCUGIzRjypyXC09Fl_-zaxhly74n3siwQc,16325
29
+ greenmining/services/github_fetcher.py,sha256=J47-plM_NKXwHDSWNBuSUZMnZnGP6wXiJyrVfeWT9ug,11360
30
+ greenmining/services/reports.py,sha256=NCNI9SCTnSLeAO8WmkNIdkB0hr-XyVpuzV0sovOoUOM,27107
31
+ greenmining-1.0.1.dist-info/licenses/LICENSE,sha256=M7ma3JHGeiIZIs3ea0HTcFl_wLFPX2NZElUliYs4bCA,1083
32
+ greenmining-1.0.1.dist-info/METADATA,sha256=BxsHBbcm6_287X-8Cq2Phi-twkwXN7E8OrDDdYemhUw,25694
33
+ greenmining-1.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
+ greenmining-1.0.1.dist-info/entry_points.txt,sha256=oHvTWMzNFGf2W3CFEKVVPsG4exeMv0MaQu9YsUoQ9lw,53
35
+ greenmining-1.0.1.dist-info/top_level.txt,sha256=nreXgXxZIWI-42yQknQ0HXtUrFnzZ8N1ra4Mdy2KcsI,12
36
+ greenmining-1.0.1.dist-info/RECORD,,
@@ -1,29 +0,0 @@
1
- greenmining/__init__.py,sha256=c_Vaq_WW6-SkI_es4cQKXpdEtXdfVEnGjIDxACF6bzk,1764
2
- greenmining/__main__.py,sha256=1RwcSXcwdza6xJX5fRT8-HhZjlnKbkmGY_uxTm-NYZ4,138
3
- greenmining/__version__.py,sha256=Hry6u6QztktMYf7nqf0jPXFaA0b7lmr6pjdAaVRXDaE,66
4
- greenmining/cli.py,sha256=l85LaLimXR0UPloIqni7nmpbMtkdlk57Wy9eDhe-qyQ,13451
5
- greenmining/config.py,sha256=jTWEIHIwRiQmdoGV5iCILT_nPTka6ZX1DL5ltTIzWJ0,4004
6
- greenmining/gsf_patterns.py,sha256=aSyj6XNgfxJlqFAI8I5xd47PQlU9DHFlDkrBFwRt6pM,54516
7
- greenmining/main.py,sha256=h8J9OcwyGpVJ-gjSFUS2SZExQQlHV0eDMMjAoI_sgAo,952
8
- greenmining/utils.py,sha256=-pL8yznf1jSazBMk1ugjPQbtFOQI1E9wRI1NJbHl2xs,7941
9
- greenmining/controllers/__init__.py,sha256=y-W1Xnnhm4JnrY2QEo5osK8jQs7hpxXovVbHlE334F0,279
10
- greenmining/controllers/repository_controller.py,sha256=NooHPIU8wFK5ZYxMCHteb4nfdIBa56BbNVo-akvvZL0,4509
11
- greenmining/models/__init__.py,sha256=K8udzQW2V5LqPowIm5aCiK07LxJZxCt_oW3gz5Qi-mc,397
12
- greenmining/models/aggregated_stats.py,sha256=SysZD7ZeyvOG4Qsq9B_JdMjI5NpKCNKC63sJ6-s2k2M,984
13
- greenmining/models/analysis_result.py,sha256=-6hwmickqncRXDGWM3aXBEaOGlddM5G6hnmRTyHFcMs,1525
14
- greenmining/models/commit.py,sha256=9-PbZmHSyorD1ed13rFkrT9u5XOG7SBT5Nowlr82-PE,2399
15
- greenmining/models/repository.py,sha256=lpe9Pte6KPCcRvx0aOH16v2PiH3NwjPeQRJYxriKnns,2834
16
- greenmining/presenters/__init__.py,sha256=-ukAvhNuTvy1Xpknps0faDZ78HKdPHPySzFpQHABzKM,203
17
- greenmining/presenters/console_presenter.py,sha256=jK_8agdEz-_2mqoyMNht-mNA9hXWe9EA8VlAUT_XFxA,5299
18
- greenmining/services/__init__.py,sha256=7CJDjHMTrY0bBoqzx22AUzIwEvby0FbAUUKYbjSlNPQ,460
19
- greenmining/services/commit_extractor.py,sha256=XB7Y1HKeQ4OpgEz0yAjKDPdiQcq07QCQ5Xrx9AxGfrM,11814
20
- greenmining/services/data_aggregator.py,sha256=m3O9Gp1kxR6vZ6kWTBMxfZqhLVSP5K76tXg30fDf6nc,16865
21
- greenmining/services/data_analyzer.py,sha256=MOQ-soPZRqiuEONB_-9QmWA-2akx1H7T2IFRp3GIizg,12035
22
- greenmining/services/github_fetcher.py,sha256=9aHSbZoA8BWL1Cp0cCv2NltXf0Jr7W_mO5d_-7TuOvY,9294
23
- greenmining/services/reports.py,sha256=cE7XvB2ihD5KwrO4W1Uj_I1h5pELBPF85MjgGFzkgOQ,21829
24
- greenmining-0.1.12.dist-info/licenses/LICENSE,sha256=M7ma3JHGeiIZIs3ea0HTcFl_wLFPX2NZElUliYs4bCA,1083
25
- greenmining-0.1.12.dist-info/METADATA,sha256=EXz0u_yrTG-YPNoJjrWzDqxmy8mptD4axxv5leTi-uo,15599
26
- greenmining-0.1.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
27
- greenmining-0.1.12.dist-info/entry_points.txt,sha256=oHvTWMzNFGf2W3CFEKVVPsG4exeMv0MaQu9YsUoQ9lw,53
28
- greenmining-0.1.12.dist-info/top_level.txt,sha256=nreXgXxZIWI-42yQknQ0HXtUrFnzZ8N1ra4Mdy2KcsI,12
29
- greenmining-0.1.12.dist-info/RECORD,,