gitflow-analytics 3.12.6__py3-none-any.whl → 3.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1938,12 +1938,14 @@ class NarrativeReportGenerator:
1938
1938
  }
1939
1939
  )
1940
1940
 
1941
- # Contributor analysis
1941
+ # Contributor analysis - use canonical_id as key for accurate lookup
1942
+ # Store display name separately so we can show the correct name in reports
1943
+ canonical_id = commit.get("canonical_id", "unknown")
1942
1944
  author = commit.get("author", "Unknown")
1943
- if author not in contributors:
1944
- contributors[author] = {"count": 0, "categories": set()}
1945
- contributors[author]["count"] += 1
1946
- contributors[author]["categories"].add(category)
1945
+ if canonical_id not in contributors:
1946
+ contributors[canonical_id] = {"count": 0, "categories": set(), "name": author}
1947
+ contributors[canonical_id]["count"] += 1
1948
+ contributors[canonical_id]["categories"].add(category)
1947
1949
 
1948
1950
  # Project analysis
1949
1951
  project = commit.get("project_key", "UNKNOWN")
@@ -1998,12 +2000,14 @@ class NarrativeReportGenerator:
1998
2000
  contributors.items(), key=lambda x: x[1]["count"], reverse=True
1999
2001
  )
2000
2002
 
2001
- for author, data in sorted_contributors[:5]: # Show top 5
2003
+ for canonical_id, data in sorted_contributors[:5]: # Show top 5
2002
2004
  untracked_count = data["count"]
2003
2005
  pct_of_untracked = (untracked_count / total_untracked) * 100
2006
+ # Get the display name from the stored data
2007
+ author_name = data.get("name", "Unknown")
2004
2008
 
2005
- # Find developer's total commits to calculate percentage of their work that's untracked
2006
- dev_data = dev_lookup.get(author)
2009
+ # Find developer's total commits using canonical_id
2010
+ dev_data = dev_lookup.get(canonical_id)
2007
2011
  if dev_data:
2008
2012
  total_dev_commits = dev_data["total_commits"]
2009
2013
  pct_of_dev_work = (
@@ -2018,7 +2022,7 @@ class NarrativeReportGenerator:
2018
2022
  if len(categories_list) > 3:
2019
2023
  categories_str += f" (+{len(categories_list) - 3} more)"
2020
2024
 
2021
- report.write(f"- **{author}**: {untracked_count} commits ")
2025
+ report.write(f"- **{author_name}**: {untracked_count} commits ")
2022
2026
  report.write(f"({pct_of_untracked:.1f}% of untracked{dev_context}) - ")
2023
2027
  report.write(f"*{categories_str}*\n")
2024
2028
  report.write("\n")
@@ -0,0 +1,5 @@
1
+ """Security reporting module."""
2
+
3
+ from .security_report import SecurityReportGenerator
4
+
5
+ __all__ = ["SecurityReportGenerator"]
@@ -0,0 +1,358 @@
1
+ """Generate security analysis reports."""
2
+
3
+ import json
4
+ import csv
5
+ from typing import List, Dict, Any, Optional
6
+ from pathlib import Path
7
+ from datetime import datetime
8
+ from ..security_analyzer import SecurityAnalysis
9
+
10
+
11
+ class SecurityReportGenerator:
12
+ """Generate various format reports for security findings."""
13
+
14
+ def __init__(self, output_dir: Optional[Path] = None):
15
+ """Initialize report generator.
16
+
17
+ Args:
18
+ output_dir: Directory for report output
19
+ """
20
+ self.output_dir = output_dir or Path("reports")
21
+ self.output_dir.mkdir(parents=True, exist_ok=True)
22
+
23
+ def generate_reports(self, analyses: List[SecurityAnalysis], summary: Dict[str, Any]) -> Dict[str, Path]:
24
+ """Generate all report formats.
25
+
26
+ Args:
27
+ analyses: List of security analyses
28
+ summary: Summary statistics
29
+
30
+ Returns:
31
+ Dictionary of report type to file path
32
+ """
33
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
34
+ reports = {}
35
+
36
+ # Generate Markdown report
37
+ md_path = self.output_dir / f"security_report_{timestamp}.md"
38
+ self._generate_markdown_report(analyses, summary, md_path)
39
+ reports["markdown"] = md_path
40
+
41
+ # Generate JSON report
42
+ json_path = self.output_dir / f"security_findings_{timestamp}.json"
43
+ self._generate_json_report(analyses, summary, json_path)
44
+ reports["json"] = json_path
45
+
46
+ # Generate CSV report
47
+ csv_path = self.output_dir / f"security_issues_{timestamp}.csv"
48
+ self._generate_csv_report(analyses, csv_path)
49
+ reports["csv"] = csv_path
50
+
51
+ # Generate SARIF report if requested
52
+ if any(a.total_findings > 0 for a in analyses):
53
+ sarif_path = self.output_dir / f"security_sarif_{timestamp}.json"
54
+ self._generate_sarif_report(analyses, sarif_path)
55
+ reports["sarif"] = sarif_path
56
+
57
+ return reports
58
+
59
+ def _generate_markdown_report(self, analyses: List[SecurityAnalysis], summary: Dict, path: Path) -> None:
60
+ """Generate comprehensive Markdown security report."""
61
+ with open(path, 'w') as f:
62
+ # Header
63
+ f.write("# 🔒 Security Analysis Report\n\n")
64
+ f.write(f"**Generated**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n")
65
+
66
+ # Executive Summary
67
+ f.write("## 📊 Executive Summary\n\n")
68
+ f.write(f"- **Commits Analyzed**: {summary['total_commits']}\n")
69
+ f.write(f"- **Commits with Issues**: {summary['commits_with_issues']}\n")
70
+ f.write(f"- **Total Findings**: {summary['total_findings']}\n")
71
+ f.write(f"- **Risk Level**: **{summary['risk_level']}** (Score: {summary['average_risk_score']})\n\n")
72
+
73
+ # Risk Assessment
74
+ self._write_risk_assessment(f, summary)
75
+
76
+ # Severity Distribution
77
+ f.write("## 🎯 Severity Distribution\n\n")
78
+ severity = summary['severity_distribution']
79
+ if severity['critical'] > 0:
80
+ f.write(f"- 🔴 **Critical**: {severity['critical']}\n")
81
+ if severity['high'] > 0:
82
+ f.write(f"- 🟠 **High**: {severity['high']}\n")
83
+ if severity['medium'] > 0:
84
+ f.write(f"- 🟡 **Medium**: {severity['medium']}\n")
85
+ if severity['low'] > 0:
86
+ f.write(f"- 🟢 **Low**: {severity['low']}\n")
87
+ f.write("\n")
88
+
89
+ # Top Issues
90
+ if summary['top_issues']:
91
+ f.write("## 🔝 Top Security Issues\n\n")
92
+ f.write("| Issue Type | Severity | Occurrences | Affected Files |\n")
93
+ f.write("|------------|----------|-------------|----------------|\n")
94
+ for issue in summary['top_issues']:
95
+ f.write(f"| {issue['type']} | {issue['severity'].upper()} | "
96
+ f"{issue['occurrences']} | {issue['affected_files']} |\n")
97
+ f.write("\n")
98
+
99
+ # Detailed Findings by Category
100
+ self._write_detailed_findings(f, analyses)
101
+
102
+ # LLM Insights
103
+ if 'llm_insights' in summary and summary['llm_insights']:
104
+ f.write("## 🤖 AI Security Insights\n\n")
105
+ f.write(summary['llm_insights'])
106
+ f.write("\n\n")
107
+
108
+ # Recommendations
109
+ f.write("## 💡 Recommendations\n\n")
110
+ for rec in summary['recommendations']:
111
+ f.write(f"- {rec}\n")
112
+ f.write("\n")
113
+
114
+ # Appendix - All Findings
115
+ f.write("## 📋 Detailed Findings\n\n")
116
+ self._write_all_findings(f, analyses)
117
+
118
+ def _write_risk_assessment(self, f, summary: Dict) -> None:
119
+ """Write risk assessment section."""
120
+ risk_level = summary['risk_level']
121
+ score = summary['average_risk_score']
122
+
123
+ f.write("## ⚠️ Risk Assessment\n\n")
124
+
125
+ if risk_level == "CRITICAL":
126
+ f.write("### 🚨 CRITICAL RISK DETECTED\n\n")
127
+ f.write("Immediate action required. Critical security vulnerabilities have been identified "
128
+ "that could lead to severe security breaches.\n\n")
129
+ elif risk_level == "HIGH":
130
+ f.write("### 🔴 High Risk\n\n")
131
+ f.write("Significant security issues detected that should be addressed urgently.\n\n")
132
+ elif risk_level == "MEDIUM":
133
+ f.write("### 🟡 Medium Risk\n\n")
134
+ f.write("Moderate security concerns identified that should be addressed in the near term.\n\n")
135
+ else:
136
+ f.write("### 🟢 Low Risk\n\n")
137
+ f.write("Minor security issues detected. Continue with regular security practices.\n\n")
138
+
139
+ # Risk score visualization
140
+ f.write("**Risk Score Breakdown**:\n")
141
+ f.write("```\n")
142
+ bar_length = 50
143
+ filled = int(score / 100 * bar_length)
144
+ bar = "█" * filled + "░" * (bar_length - filled)
145
+ f.write(f"[{bar}] {score:.1f}/100\n")
146
+ f.write("```\n\n")
147
+
148
+ def _write_detailed_findings(self, f, analyses: List[SecurityAnalysis]) -> None:
149
+ """Write detailed findings by category."""
150
+ # Aggregate findings
151
+ all_secrets = []
152
+ all_vulnerabilities = []
153
+ all_dependencies = []
154
+ all_llm = []
155
+
156
+ for analysis in analyses:
157
+ all_secrets.extend(analysis.secrets)
158
+ all_vulnerabilities.extend(analysis.vulnerabilities)
159
+ all_dependencies.extend(analysis.dependency_issues)
160
+ all_llm.extend(analysis.llm_findings)
161
+
162
+ # Secrets Section
163
+ if all_secrets:
164
+ f.write("## 🔑 Exposed Secrets\n\n")
165
+ f.write(f"**Total**: {len(all_secrets)} potential secrets detected\n\n")
166
+
167
+ # Group by secret type
168
+ by_type = {}
169
+ for secret in all_secrets:
170
+ secret_type = secret.get('secret_type', 'unknown')
171
+ if secret_type not in by_type:
172
+ by_type[secret_type] = []
173
+ by_type[secret_type].append(secret)
174
+
175
+ for secret_type, secrets in sorted(by_type.items()):
176
+ f.write(f"### {secret_type.replace('_', ' ').title()}\n")
177
+ for s in secrets[:5]: # Show first 5 of each type
178
+ f.write(f"- **File**: `{s.get('file', 'unknown')}`\n")
179
+ f.write(f" - Line: {s.get('line', 'N/A')}\n")
180
+ f.write(f" - Pattern: `{s.get('match', 'N/A')}`\n")
181
+ if len(secrets) > 5:
182
+ f.write(f" - *... and {len(secrets) - 5} more*\n")
183
+ f.write("\n")
184
+
185
+ # Vulnerabilities Section
186
+ if all_vulnerabilities:
187
+ f.write("## 🛡️ Code Vulnerabilities\n\n")
188
+ f.write(f"**Total**: {len(all_vulnerabilities)} vulnerabilities detected\n\n")
189
+
190
+ # Group by vulnerability type
191
+ by_type = {}
192
+ for vuln in all_vulnerabilities:
193
+ vuln_type = vuln.get('vulnerability_type', 'unknown')
194
+ if vuln_type not in by_type:
195
+ by_type[vuln_type] = []
196
+ by_type[vuln_type].append(vuln)
197
+
198
+ for vuln_type, vulns in sorted(by_type.items()):
199
+ f.write(f"### {vuln_type.replace('_', ' ').title()}\n")
200
+ for v in vulns[:5]:
201
+ f.write(f"- **File**: `{v.get('file', 'unknown')}:{v.get('line', 'N/A')}`\n")
202
+ f.write(f" - Tool: {v.get('tool', 'N/A')}\n")
203
+ f.write(f" - Message: {v.get('message', 'N/A')}\n")
204
+ if len(vulns) > 5:
205
+ f.write(f" - *... and {len(vulns) - 5} more*\n")
206
+ f.write("\n")
207
+
208
+ # Dependencies Section
209
+ if all_dependencies:
210
+ f.write("## 📦 Vulnerable Dependencies\n\n")
211
+ f.write(f"**Total**: {len(all_dependencies)} vulnerable dependencies\n\n")
212
+
213
+ for dep in all_dependencies[:10]:
214
+ f.write(f"- **{dep.get('package', 'unknown')}** @ {dep.get('version', 'unknown')}\n")
215
+ f.write(f" - File: `{dep.get('file', 'unknown')}`\n")
216
+ if dep.get('cve'):
217
+ f.write(f" - CVE: {dep['cve']}\n")
218
+ f.write(f" - Message: {dep.get('message', 'N/A')}\n")
219
+ if len(all_dependencies) > 10:
220
+ f.write(f"\n*... and {len(all_dependencies) - 10} more vulnerable dependencies*\n")
221
+ f.write("\n")
222
+
223
+ def _write_all_findings(self, f, analyses: List[SecurityAnalysis]) -> None:
224
+ """Write all findings in detail."""
225
+ for analysis in analyses:
226
+ if analysis.total_findings == 0:
227
+ continue
228
+
229
+ f.write(f"### Commit: `{analysis.commit_hash[:8]}`\n")
230
+ f.write(f"**Time**: {analysis.timestamp.strftime('%Y-%m-%d %H:%M:%S')}\n")
231
+ f.write(f"**Files Changed**: {len(analysis.files_changed)}\n")
232
+ f.write(f"**Risk Score**: {analysis.risk_score:.1f}\n\n")
233
+
234
+ if analysis.secrets:
235
+ f.write("**Secrets**:\n")
236
+ for s in analysis.secrets:
237
+ f.write(f"- {s.get('secret_type', 'unknown')}: {s.get('file', 'N/A')}\n")
238
+
239
+ if analysis.vulnerabilities:
240
+ f.write("**Vulnerabilities**:\n")
241
+ for v in analysis.vulnerabilities:
242
+ f.write(f"- {v.get('vulnerability_type', 'unknown')}: {v.get('file', 'N/A')}\n")
243
+
244
+ f.write("\n---\n\n")
245
+
246
+ def _generate_json_report(self, analyses: List[SecurityAnalysis], summary: Dict, path: Path) -> None:
247
+ """Generate JSON report with all findings."""
248
+ report = {
249
+ "metadata": {
250
+ "generated": datetime.now().isoformat(),
251
+ "version": "1.0.0"
252
+ },
253
+ "summary": summary,
254
+ "analyses": []
255
+ }
256
+
257
+ for analysis in analyses:
258
+ report["analyses"].append({
259
+ "commit_hash": analysis.commit_hash,
260
+ "timestamp": analysis.timestamp.isoformat(),
261
+ "files_changed": analysis.files_changed,
262
+ "risk_score": analysis.risk_score,
263
+ "findings": {
264
+ "secrets": analysis.secrets,
265
+ "vulnerabilities": analysis.vulnerabilities,
266
+ "dependency_issues": analysis.dependency_issues,
267
+ "llm_findings": analysis.llm_findings
268
+ },
269
+ "metrics": {
270
+ "total": analysis.total_findings,
271
+ "critical": analysis.critical_count,
272
+ "high": analysis.high_count,
273
+ "medium": analysis.medium_count,
274
+ "low": analysis.low_count
275
+ }
276
+ })
277
+
278
+ with open(path, 'w') as f:
279
+ json.dump(report, f, indent=2)
280
+
281
+ def _generate_csv_report(self, analyses: List[SecurityAnalysis], path: Path) -> None:
282
+ """Generate CSV report of all findings."""
283
+ with open(path, 'w', newline='') as f:
284
+ writer = csv.DictWriter(f, fieldnames=[
285
+ 'commit_hash', 'timestamp', 'type', 'severity',
286
+ 'category', 'file', 'line', 'message', 'tool', 'confidence'
287
+ ])
288
+ writer.writeheader()
289
+
290
+ for analysis in analyses:
291
+ # Write all findings
292
+ for finding in (analysis.secrets + analysis.vulnerabilities +
293
+ analysis.dependency_issues + analysis.llm_findings):
294
+ writer.writerow({
295
+ 'commit_hash': analysis.commit_hash[:8],
296
+ 'timestamp': analysis.timestamp.isoformat(),
297
+ 'type': finding.get('type', 'unknown'),
298
+ 'severity': finding.get('severity', 'medium'),
299
+ 'category': finding.get('vulnerability_type',
300
+ finding.get('secret_type', 'unknown')),
301
+ 'file': finding.get('file', ''),
302
+ 'line': finding.get('line', ''),
303
+ 'message': finding.get('message', ''),
304
+ 'tool': finding.get('tool', finding.get('source', '')),
305
+ 'confidence': finding.get('confidence', '')
306
+ })
307
+
308
+ def _generate_sarif_report(self, analyses: List[SecurityAnalysis], path: Path) -> None:
309
+ """Generate SARIF format report for GitHub Security tab integration."""
310
+ sarif = {
311
+ "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
312
+ "version": "2.1.0",
313
+ "runs": [{
314
+ "tool": {
315
+ "driver": {
316
+ "name": "GitFlow Analytics Security",
317
+ "version": "1.0.0",
318
+ "informationUri": "https://github.com/yourusername/gitflow-analytics"
319
+ }
320
+ },
321
+ "results": []
322
+ }]
323
+ }
324
+
325
+ for analysis in analyses:
326
+ for finding in (analysis.secrets + analysis.vulnerabilities):
327
+ result = {
328
+ "ruleId": finding.get('vulnerability_type',
329
+ finding.get('secret_type', 'unknown')),
330
+ "level": self._severity_to_sarif_level(finding.get('severity', 'medium')),
331
+ "message": {
332
+ "text": finding.get('message', 'Security issue detected')
333
+ },
334
+ "locations": [{
335
+ "physicalLocation": {
336
+ "artifactLocation": {
337
+ "uri": finding.get('file', 'unknown')
338
+ },
339
+ "region": {
340
+ "startLine": finding.get('line', 1)
341
+ }
342
+ }
343
+ }]
344
+ }
345
+ sarif["runs"][0]["results"].append(result)
346
+
347
+ with open(path, 'w') as f:
348
+ json.dump(sarif, f, indent=2)
349
+
350
+ def _severity_to_sarif_level(self, severity: str) -> str:
351
+ """Convert severity to SARIF level."""
352
+ mapping = {
353
+ "critical": "error",
354
+ "high": "error",
355
+ "medium": "warning",
356
+ "low": "note"
357
+ }
358
+ return mapping.get(severity.lower(), "warning")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gitflow-analytics
3
- Version: 3.12.6
3
+ Version: 3.13.0
4
4
  Summary: Analyze Git repositories for developer productivity insights
5
5
  Author-email: Bob Matyas <bobmatnyc@gmail.com>
6
6
  License: MIT
@@ -1,6 +1,6 @@
1
1
  gitflow_analytics/__init__.py,sha256=W3Jaey5wuT1nBPehVLTIRkVIyBa5jgYOlBKc_UFfh-4,773
2
- gitflow_analytics/_version.py,sha256=pewC7Cj-Xe3Qm1VHbqoeHBpNrY0jP-bgcVEirV9kWuU,138
3
- gitflow_analytics/cli.py,sha256=ElQH2MiFVeQuYv4fosF5KVgMLRwJ0ILz1n4Ezt32s0M,274948
2
+ gitflow_analytics/_version.py,sha256=TklDtu9hDGukw4XEfy3LR-nKoAb-vOvLBNLNYEL_r1Y,138
3
+ gitflow_analytics/cli.py,sha256=52-T-1jysBeCKkRG9y3qUbbZFzni4tTflO-uU2lj1VY,300203
4
4
  gitflow_analytics/config.py,sha256=XRuxvzLWyn_ML7mDCcuZ9-YFNAEsnt33vIuWxQQ_jxg,1033
5
5
  gitflow_analytics/constants.py,sha256=GXEncUJS9ijOI5KWtQCTANwdqxPfXpw-4lNjhaWTKC4,2488
6
6
  gitflow_analytics/verify_activity.py,sha256=aRQnmypf5NDasXudf2iz_WdJnCWtwlbAiJ5go0DJLSU,27050
@@ -10,8 +10,9 @@ gitflow_analytics/classification/classifier.py,sha256=U1vpdiMXqGdHR8iHWf_wPdrJxx
10
10
  gitflow_analytics/classification/feature_extractor.py,sha256=W82vztPQO8-MFw9Yt17K1kXrLZ5lNtuMSwC1NBsZPLQ,23804
11
11
  gitflow_analytics/classification/linguist_analyzer.py,sha256=HjLx9mM7hGXtrvMba6osovHJLAacTx9oDmN6CS5w0bE,17687
12
12
  gitflow_analytics/classification/model.py,sha256=2KbmFh9MpyvHMcNHbqwUTAAVLHHu3MiTfFIPyZSGa-8,16356
13
- gitflow_analytics/cli_wizards/__init__.py,sha256=D73D97cS1hZsB_fCQQaAiWtd_w2Lb8TtcGc9Pn2DIyE,343
13
+ gitflow_analytics/cli_wizards/__init__.py,sha256=iSCVYkwAnyPweZixLtFnNa7pB8DRLAj_sJrUPYesdn8,432
14
14
  gitflow_analytics/cli_wizards/install_wizard.py,sha256=gz5c1NYeGLCzs-plL6ju7GXn7VldF7VyMw8MO4CzUGk,70345
15
+ gitflow_analytics/cli_wizards/menu.py,sha256=hjKziABmFqvTLrVwhVUnrtwqCrqGEEbvNTqQY84ZXzQ,21330
15
16
  gitflow_analytics/cli_wizards/run_launcher.py,sha256=J6G_C7IqxPg7_GhAfbV99D1dIIWwb1s_qmHC7Iv2iGI,15038
16
17
  gitflow_analytics/config/__init__.py,sha256=KziRIbBJctB5LOLcKLzELWA1rXwjS6-C2_DeM_hT9rM,1133
17
18
  gitflow_analytics/config/aliases.py,sha256=z9F0X6qbbF544Tw7sHlOoBj5mpRSddMkCpoKLzvVzDU,10960
@@ -28,7 +29,7 @@ gitflow_analytics/core/cache.py,sha256=2SBzry3FoLCJyhu-I-AgNTSzN_MkA-DunzOAxq_ly
28
29
  gitflow_analytics/core/data_fetcher.py,sha256=Adj1EE2RaHxSC6xmcagQb_ak9M9RgUC2GgfPuSrITME,106946
29
30
  gitflow_analytics/core/git_auth.py,sha256=QP7U5_Mi9J-hEtoEhdjoMBl61nCukOGlL8PYXYSyN3g,6369
30
31
  gitflow_analytics/core/git_timeout_wrapper.py,sha256=14K8PHKSOonW4hJpLigB5XQNSWxmFbMFbrpu8cT1h-M,12534
31
- gitflow_analytics/core/identity.py,sha256=k7i-vcRJ2eiTU0_kYGY5QOhxcqnitibTTx7DVONW0kg,31237
32
+ gitflow_analytics/core/identity.py,sha256=CTjxpM5BeeMyGQ8QbtSCsUmuzMmU7vhBwrdQctjI7Z0,31397
32
33
  gitflow_analytics/core/metrics_storage.py,sha256=2u4dxVHsCTEaVIO5udWCaHzuefRL7JVS8aN7wIwyMlc,21769
33
34
  gitflow_analytics/core/progress.py,sha256=KMXwZpJGlmUU8OehNRA7_PONpXUgSIxWl5ZN7INc108,20732
34
35
  gitflow_analytics/core/schema_version.py,sha256=fhYKxerCgPHJoX4SAoJQO38sQcKDNguMEUWSj367Ilc,10660
@@ -37,7 +38,7 @@ gitflow_analytics/extractors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
37
38
  gitflow_analytics/extractors/base.py,sha256=AKbYkFiMhNxVj7zfNzsJfh0rpyTdNr4Faea3bcZPPBo,1168
38
39
  gitflow_analytics/extractors/ml_tickets.py,sha256=js5OFmbZt9JHy5r_crehhuB1MxrkdfrPj2u_4B6K35c,43304
39
40
  gitflow_analytics/extractors/story_points.py,sha256=IggP-Ei832oV9aD08a3li08kmjF3BqyU9i8EgAZcpfs,5324
40
- gitflow_analytics/extractors/tickets.py,sha256=2s5Iu7eZXVi8yl9Yme5HKzrJo3mDjzsSOUr_iJGUeLM,43799
41
+ gitflow_analytics/extractors/tickets.py,sha256=KDXOTCb6FsTIsA71gldbW58_WiuGDopC2tVYpUfCnUI,43829
41
42
  gitflow_analytics/identity_llm/__init__.py,sha256=tpWDwapm6zIyb8LxLO8A6pHlE3wNorT_fBL-Yp9-XnU,250
42
43
  gitflow_analytics/identity_llm/analysis_pass.py,sha256=FJF1BEGekHRY4i5jasgxxL_UWFGYP5kBkvn8hAtMorY,9728
43
44
  gitflow_analytics/identity_llm/analyzer.py,sha256=-a7lUJt_Dlgx9aNOH1YlFqPe7BSxtwY2RoGruIzwrzs,17932
@@ -104,7 +105,7 @@ gitflow_analytics/reports/formatters.py,sha256=RFocNTbDwrNCw4IoL5etemWOPeengvDJ-
104
105
  gitflow_analytics/reports/html_generator.py,sha256=gl4vhQZb8_oxvDonBP--v_wOXXgn32OCLwwyuzuKncE,62549
105
106
  gitflow_analytics/reports/interfaces.py,sha256=XJJxmwhQ3-4iJrDQ1n5r8vFgL4gRgw_3HMpW3RPYrAU,12194
106
107
  gitflow_analytics/reports/json_exporter.py,sha256=p-GpOBJsdmvXpwORTy7LLpipFFUzI8eGuNasmKn57kA,114323
107
- gitflow_analytics/reports/narrative_writer.py,sha256=4s-4VQiHqckvOEy53ueFA9_UZEtL1ehKTtObS2RBLMU,119419
108
+ gitflow_analytics/reports/narrative_writer.py,sha256=9yZN_zQcjOlkpIJH7yhGXdBV8bxv-mRu5BB8waB0e1o,119756
108
109
  gitflow_analytics/reports/story_point_correlation.py,sha256=V9fnqNOxJxK00w0Mx69BMpcZgdgQJyja_Pu4qD-SWw0,51210
109
110
  gitflow_analytics/reports/weekly_trends_writer.py,sha256=m_TQ6ThSaa5rkAwfQpPRms2Jwgq3RReYwVBsts67cLk,15720
110
111
  gitflow_analytics/security/__init__.py,sha256=qocWBnb3xM_H43vsj6RWWOjHHTAZM7GDma5wEhdbWDc,432
@@ -115,6 +116,8 @@ gitflow_analytics/security/extractors/__init__.py,sha256=K64IAhS0k47J5oUFNmm1NcL
115
116
  gitflow_analytics/security/extractors/dependency_checker.py,sha256=Tb-Rb4G7jJkH_hsvH3dmEV2SyqjN8ycTHjgItObnCHg,14601
116
117
  gitflow_analytics/security/extractors/secret_detector.py,sha256=aa2YiKeR6SurWHMHU5fr71Q83stSmIRKvwWFpp0IVFs,6857
117
118
  gitflow_analytics/security/extractors/vulnerability_scanner.py,sha256=TWK1fPMN5s_EM8AwTFpkxRjXEsHIMwxYUZQqM2l5dV0,12682
119
+ gitflow_analytics/security/reports/__init__.py,sha256=C6a4tHi-xCtTE5sSkQU7bjhRtEGE9l-ApUC13DVZqVQ,125
120
+ gitflow_analytics/security/reports/security_report.py,sha256=W0DXpR2ddej0Di6X6YqI6U05M08oTPjW8VWWHdNrGCg,15466
118
121
  gitflow_analytics/training/__init__.py,sha256=YT5p7Wm4U8trzLnbS5FASJBWPMKhqp3rlAThjpxWnxo,143
119
122
  gitflow_analytics/training/model_loader.py,sha256=xGZLSopGxDhC--2XN6ytRgi2CyjOKY4zS4fZ-ZlO6lM,13245
120
123
  gitflow_analytics/training/pipeline.py,sha256=PQegTk_-OsPexVyRDfiy-3Df-7pcs25C4vPASr-HT9E,19951
@@ -124,9 +127,9 @@ gitflow_analytics/ui/__init__.py,sha256=UBhYhZMvwlSrCuGWjkIdoP2zNbiQxOHOli-I8mqI
124
127
  gitflow_analytics/ui/progress_display.py,sha256=3xJnCOSs1DRVAfS-rTu37EsLfWDFW5-mbv-bPS9NMm4,59182
125
128
  gitflow_analytics/utils/__init__.py,sha256=YE3E5Mx-LmVRqLIgUUwDmbstm6gkpeavYHrQmVjwR3o,197
126
129
  gitflow_analytics/utils/commit_utils.py,sha256=TBgrWW73EODGOegGCF79ch0L0e5R6gpydNWutiQOa14,1356
127
- gitflow_analytics-3.12.6.dist-info/licenses/LICENSE,sha256=xwvSwY1GYXpRpmbnFvvnbmMwpobnrdN9T821sGvjOY0,1066
128
- gitflow_analytics-3.12.6.dist-info/METADATA,sha256=xl10qO6jZm4fVUFzQlxNwas8F1KakCkc8W0jYhbwcR0,39531
129
- gitflow_analytics-3.12.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
130
- gitflow_analytics-3.12.6.dist-info/entry_points.txt,sha256=ZOsX0GLsnMysp5FWPOfP_qyoS7WJ8IgcaDFDxWBYl1g,98
131
- gitflow_analytics-3.12.6.dist-info/top_level.txt,sha256=CQyxZXjKvpSB1kgqqtuE0PCRqfRsXZJL8JrYpJKtkrk,18
132
- gitflow_analytics-3.12.6.dist-info/RECORD,,
130
+ gitflow_analytics-3.13.0.dist-info/licenses/LICENSE,sha256=xwvSwY1GYXpRpmbnFvvnbmMwpobnrdN9T821sGvjOY0,1066
131
+ gitflow_analytics-3.13.0.dist-info/METADATA,sha256=_qRLGhFaiq52qsJIVwf-1hN6Nx3pRfB12zTPfhxCx-Y,39531
132
+ gitflow_analytics-3.13.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
133
+ gitflow_analytics-3.13.0.dist-info/entry_points.txt,sha256=ZOsX0GLsnMysp5FWPOfP_qyoS7WJ8IgcaDFDxWBYl1g,98
134
+ gitflow_analytics-3.13.0.dist-info/top_level.txt,sha256=CQyxZXjKvpSB1kgqqtuE0PCRqfRsXZJL8JrYpJKtkrk,18
135
+ gitflow_analytics-3.13.0.dist-info/RECORD,,