gitflow-analytics 1.3.11__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. gitflow_analytics/_version.py +1 -1
  2. gitflow_analytics/classification/batch_classifier.py +156 -4
  3. gitflow_analytics/cli.py +803 -135
  4. gitflow_analytics/config/loader.py +39 -1
  5. gitflow_analytics/config/schema.py +1 -0
  6. gitflow_analytics/core/cache.py +20 -0
  7. gitflow_analytics/core/data_fetcher.py +1051 -117
  8. gitflow_analytics/core/git_auth.py +169 -0
  9. gitflow_analytics/core/git_timeout_wrapper.py +347 -0
  10. gitflow_analytics/core/metrics_storage.py +12 -3
  11. gitflow_analytics/core/progress.py +219 -18
  12. gitflow_analytics/core/subprocess_git.py +145 -0
  13. gitflow_analytics/extractors/ml_tickets.py +3 -2
  14. gitflow_analytics/extractors/tickets.py +93 -8
  15. gitflow_analytics/integrations/jira_integration.py +1 -1
  16. gitflow_analytics/integrations/orchestrator.py +47 -29
  17. gitflow_analytics/metrics/branch_health.py +3 -2
  18. gitflow_analytics/models/database.py +72 -1
  19. gitflow_analytics/pm_framework/adapters/jira_adapter.py +12 -5
  20. gitflow_analytics/pm_framework/orchestrator.py +8 -3
  21. gitflow_analytics/qualitative/classifiers/llm/openai_client.py +24 -4
  22. gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +3 -1
  23. gitflow_analytics/qualitative/core/llm_fallback.py +34 -2
  24. gitflow_analytics/reports/narrative_writer.py +118 -74
  25. gitflow_analytics/security/__init__.py +11 -0
  26. gitflow_analytics/security/config.py +189 -0
  27. gitflow_analytics/security/extractors/__init__.py +7 -0
  28. gitflow_analytics/security/extractors/dependency_checker.py +379 -0
  29. gitflow_analytics/security/extractors/secret_detector.py +197 -0
  30. gitflow_analytics/security/extractors/vulnerability_scanner.py +333 -0
  31. gitflow_analytics/security/llm_analyzer.py +347 -0
  32. gitflow_analytics/security/reports/__init__.py +5 -0
  33. gitflow_analytics/security/reports/security_report.py +358 -0
  34. gitflow_analytics/security/security_analyzer.py +414 -0
  35. gitflow_analytics/tui/app.py +3 -1
  36. gitflow_analytics/tui/progress_adapter.py +313 -0
  37. gitflow_analytics/tui/screens/analysis_progress_screen.py +407 -46
  38. gitflow_analytics/tui/screens/results_screen.py +219 -206
  39. gitflow_analytics/ui/__init__.py +21 -0
  40. gitflow_analytics/ui/progress_display.py +1477 -0
  41. gitflow_analytics/verify_activity.py +697 -0
  42. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/METADATA +2 -1
  43. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/RECORD +47 -31
  44. gitflow_analytics/cli_rich.py +0 -503
  45. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/WHEEL +0 -0
  46. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/entry_points.txt +0 -0
  47. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/licenses/LICENSE +0 -0
  48. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/top_level.txt +0 -0
@@ -6,6 +6,13 @@ from collections import defaultdict
6
6
  from datetime import timezone
7
7
  from typing import Any, Optional, cast
8
8
 
9
+ try:
10
+ from tqdm import tqdm
11
+
12
+ TQDM_AVAILABLE = True
13
+ except ImportError:
14
+ TQDM_AVAILABLE = False
15
+
9
16
  logger = logging.getLogger(__name__)
10
17
 
11
18
 
@@ -141,7 +148,7 @@ class TicketExtractor:
141
148
  # Commit categorization patterns
142
149
  self.category_patterns = {
143
150
  "bug_fix": [
144
- r"^fix:",
151
+ r"^fix(\([^)]*\))?:", # Conventional commits: fix: or fix(scope):
145
152
  r"\b(fix|bug|error|issue|problem|crash|exception|failure)\b",
146
153
  r"\b(resolve|solve|repair|correct|corrected|address)\b",
147
154
  r"\b(hotfix|bugfix|patch|quickfix)\b",
@@ -152,9 +159,13 @@ class TicketExtractor:
152
159
  r"\bmissing\s+(space|field|data|property)\b",
153
160
  r"\b(counting|allowing|episodes)\s+(was|not|issue)\b",
154
161
  r"^fixes\s+\b(beacon|beacons|combo|issue|problem)\b",
162
+ r"\bfixing\b(?!\s+test)", # "fixing" but not "fixing tests"
163
+ r"\bfixed?\s+(issue|problem|bug|error)\b",
164
+ r"\bresolve[ds]?\s+(issue|problem|bug)\b",
165
+ r"\brepair\b",
155
166
  ],
156
167
  "feature": [
157
- r"^(feat|feature):",
168
+ r"^(feat|feature)(\([^)]*\))?:", # Conventional commits: feat: or feat(scope):
158
169
  r"\b(add|new|feature|implement|create|build)\b",
159
170
  r"\b(introduce|enhance|extend|expand)\b",
160
171
  r"\b(functionality|capability|support|enable)\b",
@@ -170,6 +181,7 @@ class TicketExtractor:
170
181
  r"\b(localization)\s+(data|structure)\b",
171
182
  ],
172
183
  "refactor": [
184
+ r"^refactor(\([^)]*\))?:", # Conventional commits: refactor: or refactor(scope):
173
185
  r"\b(refactor|restructure|reorganize|cleanup|clean up)\b",
174
186
  r"\b(optimize|improve|simplify|streamline)\b",
175
187
  r"\b(rename|move|extract|consolidate)\b",
@@ -179,6 +191,13 @@ class TicketExtractor:
179
191
  r"improves?\s+(performance|efficiency|structure)\b",
180
192
  r"improves?\s+(combo|box|focus|behavior)\b",
181
193
  r"using\s+\w+\s+instead\s+of\s+\w+\b", # "using X instead of Y" pattern
194
+ r"\brenaming\b",
195
+ r"\brenamed?\b",
196
+ r"\breduce\s+code\b",
197
+ r"\bsimplify\b",
198
+ r"\bsimplified\b",
199
+ r"\bboilerplate\b",
200
+ r"\bcode\s+cleanup\b",
182
201
  ],
183
202
  "documentation": [
184
203
  r"\b(doc|docs|documentation|readme|comment|comments)\b",
@@ -186,6 +205,9 @@ class TicketExtractor:
186
205
  r"\b(manual|guide|tutorial|how-to|howto)\b",
187
206
  r"\b(explain|clarify|describe)\b",
188
207
  r"\b(changelog|notes|examples)\b",
208
+ r"\bupdating\s+readme\b",
209
+ r"\bdoc\s+update\b",
210
+ r"\bdocumentation\s+fix\b",
189
211
  ],
190
212
  "deployment": [
191
213
  r"^deploy:",
@@ -283,6 +305,7 @@ class TicketExtractor:
283
305
  r"\b(prepare\s+for\s+release|pre-release)\b",
284
306
  ],
285
307
  "maintenance": [
308
+ r"^chore(\([^)]*\))?:", # Conventional commits: chore: or chore(scope):
286
309
  r"\b(update|upgrade|bump|maintenance|maint)\b",
287
310
  r"\b(dependency|dependencies|package|packages)\b",
288
311
  r"\b(npm\s+update|pip\s+install|yarn\s+upgrade)\b",
@@ -301,6 +324,11 @@ class TicketExtractor:
301
324
  r"\b(mock|stub|fixture|factory)\b",
302
325
  r"\b(e2e|end-to-end|acceptance|smoke)\b",
303
326
  r"\b(coverage|assert|expect|should)\b",
327
+ r"\bfixing\s+tests?\b",
328
+ r"\btest.*broke\b",
329
+ r"\bupdate.*test\b",
330
+ r"\bbroke.*test\b",
331
+ r"\btest\s+fix\b",
304
332
  ],
305
333
  "style": [
306
334
  r"^style:",
@@ -380,9 +408,20 @@ class TicketExtractor:
380
408
  return dict(by_platform)
381
409
 
382
410
  def analyze_ticket_coverage(
383
- self, commits: list[dict[str, Any]], prs: list[dict[str, Any]]
411
+ self, commits: list[dict[str, Any]], prs: list[dict[str, Any]], progress_display=None
384
412
  ) -> dict[str, Any]:
385
- """Analyze ticket reference coverage across commits and PRs."""
413
+ """Analyze ticket reference coverage across commits and PRs.
414
+
415
+ Args:
416
+ commits: List of commit dictionaries to analyze
417
+ prs: List of PR dictionaries to analyze
418
+ progress_display: Optional progress display for showing analysis progress
419
+
420
+ Note:
421
+ This method re-extracts tickets from commit messages rather than using cached
422
+ 'ticket_references' to ensure the analysis respects the current allowed_platforms
423
+ configuration. Cached data may contain tickets from all platforms from previous runs.
424
+ """
386
425
  ticket_platforms: defaultdict[str, int] = defaultdict(int)
387
426
  untracked_commits: list[dict[str, Any]] = []
388
427
  ticket_summary: defaultdict[str, set[str]] = defaultdict(set)
@@ -400,20 +439,36 @@ class TicketExtractor:
400
439
  # Analyze commits
401
440
  commits_analyzed = 0
402
441
  commits_with_ticket_refs = 0
442
+ tickets_found = 0
443
+
444
+ # Set up progress tracking for commits
445
+ commit_iterator = commits
446
+ if progress_display and hasattr(progress_display, "console"):
447
+ # Rich progress display available
448
+ commit_iterator = commits # Rich will handle its own progress
449
+ elif TQDM_AVAILABLE:
450
+ # Fall back to tqdm for simple progress tracking
451
+ commit_iterator = tqdm(
452
+ commits, desc="🎫 Analyzing commits for tickets", unit="commits", leave=False
453
+ )
403
454
 
404
- for commit in commits:
455
+ for commit in commit_iterator:
405
456
  # Debug: check if commit is actually a dictionary
406
457
  if not isinstance(commit, dict):
407
458
  logger.error(f"Expected commit to be dict, got {type(commit)}: {commit}")
408
459
  continue
409
460
 
410
461
  commits_analyzed += 1
411
- ticket_refs = commit.get("ticket_references", [])
462
+ # IMPORTANT: Re-extract tickets using current allowed_platforms instead of cached values
463
+ # This ensures the analysis respects the current configuration
464
+ commit_message = commit.get("message", "")
465
+ ticket_refs = self.extract_from_text(commit_message)
412
466
 
413
467
  # Debug logging for the first few commits
414
468
  if commits_analyzed <= 5:
415
469
  logger.debug(
416
- f"Commit {commits_analyzed}: hash={commit.get('hash', 'N/A')[:8]}, ticket_refs={ticket_refs}"
470
+ f"Commit {commits_analyzed}: hash={commit.get('hash', 'N/A')[:8]}, "
471
+ f"re-extracted ticket_refs={ticket_refs} (allowed_platforms={self.allowed_platforms})"
417
472
  )
418
473
 
419
474
  if ticket_refs:
@@ -432,6 +487,7 @@ class TicketExtractor:
432
487
  platform_count = ticket_platforms[platform]
433
488
  ticket_platforms[platform] = platform_count + 1
434
489
  ticket_summary[platform].add(ticket_id)
490
+ tickets_found += 1
435
491
  else:
436
492
  # Track untracked commits with configurable threshold and enhanced data
437
493
  files_changed = self._get_files_changed_count(commit)
@@ -462,8 +518,30 @@ class TicketExtractor:
462
518
 
463
519
  untracked_commits.append(commit_data)
464
520
 
521
+ # Update progress if using tqdm
522
+ if TQDM_AVAILABLE and hasattr(commit_iterator, "set_postfix"):
523
+ commit_iterator.set_postfix(
524
+ {
525
+ "tickets": tickets_found,
526
+ "with_tickets": commits_with_ticket_refs,
527
+ "untracked": len(untracked_commits),
528
+ }
529
+ )
530
+
465
531
  # Analyze PRs
466
- for pr in prs:
532
+ pr_tickets_found = 0
533
+
534
+ # Set up progress tracking for PRs (only if there are PRs to analyze)
535
+ pr_iterator = prs
536
+ if (
537
+ prs
538
+ and TQDM_AVAILABLE
539
+ and not (progress_display and hasattr(progress_display, "console"))
540
+ ):
541
+ # Only show PR progress if there are PRs and we're not using Rich
542
+ pr_iterator = tqdm(prs, desc="🎫 Analyzing PRs for tickets", unit="PRs", leave=False)
543
+
544
+ for pr in pr_iterator:
467
545
  # Extract tickets from PR title and description
468
546
  pr_text = f"{pr.get('title', '')} {pr.get('description', '')}"
469
547
  tickets = self.extract_from_text(pr_text)
@@ -476,6 +554,13 @@ class TicketExtractor:
476
554
  platform_count = ticket_platforms[platform]
477
555
  ticket_platforms[platform] = platform_count + 1
478
556
  ticket_summary[platform].add(ticket["id"])
557
+ pr_tickets_found += 1
558
+
559
+ # Update PR progress if using tqdm
560
+ if TQDM_AVAILABLE and hasattr(pr_iterator, "set_postfix"):
561
+ pr_iterator.set_postfix(
562
+ {"tickets": pr_tickets_found, "with_tickets": results["prs_with_tickets"]}
563
+ )
479
564
 
480
565
  # Calculate coverage percentages
481
566
  total_commits = cast(int, results["total_commits"])
@@ -202,7 +202,7 @@ class JIRAIntegration:
202
202
  try:
203
203
  print(f" 🔍 Fetching {len(batch)} JIRA tickets from API...")
204
204
  response = self._session.get(
205
- f"{self.base_url}/rest/api/3/search",
205
+ f"{self.base_url}/rest/api/3/search/jql",
206
206
  params={
207
207
  "jql": jql,
208
208
  "fields": "*all", # Get all fields to find story points
@@ -1,6 +1,7 @@
1
1
  """Integration orchestrator for multiple platforms."""
2
2
 
3
3
  import json
4
+ import os
4
5
  from datetime import datetime
5
6
  from typing import Any, Union
6
7
 
@@ -15,7 +16,9 @@ class IntegrationOrchestrator:
15
16
 
16
17
  def __init__(self, config: Any, cache: GitAnalysisCache):
17
18
  """Initialize integration orchestrator."""
18
- print(" 🔍 IntegrationOrchestrator.__init__ called")
19
+ self.debug_mode = os.getenv("GITFLOW_DEBUG", "").lower() in ("1", "true", "yes")
20
+ if self.debug_mode:
21
+ print(" 🔍 IntegrationOrchestrator.__init__ called")
19
22
  self.config = config
20
23
  self.cache = cache
21
24
  self.integrations: dict[str, Union[GitHubIntegration, JIRAIntegration]] = {}
@@ -59,7 +62,8 @@ class IntegrationOrchestrator:
59
62
  and config.pm_integration
60
63
  and config.pm_integration.enabled
61
64
  ):
62
- print(" 🔍 PM Integration detected - building configuration...")
65
+ if self.debug_mode:
66
+ print(" 🔍 PM Integration detected - building configuration...")
63
67
  try:
64
68
  # Create PM platform configuration for the orchestrator
65
69
  pm_config = {
@@ -93,41 +97,48 @@ class IntegrationOrchestrator:
93
97
  }
94
98
 
95
99
  # Special handling for JIRA - use credentials from top-level JIRA config
96
- if platform_name == "jira" and hasattr(config, "jira"):
97
- platform_settings["username"] = config.jira.access_user
98
- platform_settings["api_token"] = config.jira.access_token
100
+ if platform_name == "jira" and hasattr(config, "jira") and config.jira:
101
+ # Safely access JIRA config attributes
102
+ if hasattr(config.jira, "access_user") and config.jira.access_user:
103
+ platform_settings["username"] = config.jira.access_user
104
+ if hasattr(config.jira, "access_token") and config.jira.access_token:
105
+ platform_settings["api_token"] = config.jira.access_token
99
106
  # Also ensure base_url matches if not set
100
107
  if (
101
108
  not platform_settings.get("base_url")
102
109
  or platform_settings["base_url"] == "will_be_set_at_runtime"
103
- ):
110
+ ) and hasattr(config.jira, "base_url"):
104
111
  platform_settings["base_url"] = config.jira.base_url
105
112
  # Add cache directory for ticket caching (config file directory)
106
113
  if hasattr(config, "cache") and hasattr(config.cache, "directory"):
107
114
  platform_settings["cache_dir"] = config.cache.directory
108
115
  # Debug output to check credentials
109
- print(
110
- f" 🔍 JIRA config: username={platform_settings['username']}, has_token={bool(platform_settings['api_token'])}, base_url={platform_settings['base_url']}, cache_dir={platform_settings.get('cache_dir', 'not_set')}"
111
- )
116
+ if self.debug_mode:
117
+ print(
118
+ f" 🔍 JIRA config: username={platform_settings['username']}, has_token={bool(platform_settings['api_token'])}, base_url={platform_settings['base_url']}, cache_dir={platform_settings.get('cache_dir', 'not_set')}"
119
+ )
112
120
 
113
121
  pm_config["pm_platforms"][platform_name] = platform_settings
114
122
 
115
123
  # Debug output - show final PM config
116
- print(
117
- f" 🔍 Final PM config platforms: {list(pm_config.get('pm_platforms', {}).keys())}"
118
- )
119
- for plat_name, plat_config in pm_config.get("pm_platforms", {}).items():
124
+ if self.debug_mode:
120
125
  print(
121
- f" 🔍 {plat_name}: enabled={plat_config.get('enabled')}, has_username={bool(plat_config.get('username'))}, has_token={bool(plat_config.get('api_token'))}"
126
+ f" 🔍 Final PM config platforms: {list(pm_config.get('pm_platforms', {}).keys())}"
122
127
  )
128
+ for plat_name, plat_config in pm_config.get("pm_platforms", {}).items():
129
+ print(
130
+ f" 🔍 {plat_name}: enabled={plat_config.get('enabled')}, has_username={bool(plat_config.get('username'))}, has_token={bool(plat_config.get('api_token'))}"
131
+ )
123
132
 
124
133
  self.pm_orchestrator = PMFrameworkOrchestrator(pm_config)
125
- print(
126
- f"📋 PM Framework initialized with {len(self.pm_orchestrator.get_active_platforms())} platforms"
127
- )
134
+ if self.debug_mode:
135
+ print(
136
+ f"📋 PM Framework initialized with {len(self.pm_orchestrator.get_active_platforms())} platforms"
137
+ )
128
138
 
129
139
  except Exception as e:
130
- print(f"⚠️ Failed to initialize PM framework: {e}")
140
+ if self.debug_mode:
141
+ print(f"⚠️ Failed to initialize PM framework: {e}")
131
142
  self.pm_orchestrator = None
132
143
 
133
144
  def enrich_repository_data(
@@ -154,8 +165,11 @@ class IntegrationOrchestrator:
154
165
  except Exception as e:
155
166
  import traceback
156
167
 
157
- print(f" ⚠️ GitHub enrichment failed: {e}")
158
- print(f" Debug traceback: {traceback.format_exc()}")
168
+ if self.debug_mode:
169
+ print(f" ⚠️ GitHub enrichment failed: {e}")
170
+ import traceback
171
+
172
+ print(f" Debug traceback: {traceback.format_exc()}")
159
173
 
160
174
  # JIRA enrichment for story points
161
175
  if "jira" in self.integrations:
@@ -170,12 +184,14 @@ class IntegrationOrchestrator:
170
184
  jira_integration.enrich_prs_with_jira_data(enrichment["prs"])
171
185
 
172
186
  except Exception as e:
173
- print(f" ⚠️ JIRA enrichment failed: {e}")
187
+ if self.debug_mode:
188
+ print(f" ⚠️ JIRA enrichment failed: {e}")
174
189
 
175
190
  # PM Framework enrichment
176
191
  if self.pm_orchestrator and self.pm_orchestrator.is_enabled():
177
192
  try:
178
- print(" 📋 Collecting PM platform data...")
193
+ if self.debug_mode:
194
+ print(" 📋 Collecting PM platform data...")
179
195
 
180
196
  # Get all issues from PM platforms
181
197
  pm_issues = self.pm_orchestrator.get_all_issues(since=since)
@@ -194,15 +210,17 @@ class IntegrationOrchestrator:
194
210
  enrichment["pm_data"]["metrics"] = enhanced_metrics
195
211
 
196
212
  # Only show correlations message if there are any correlations found
197
- if correlations:
198
- print(
199
- f" ✅ PM correlations found: {len(correlations)} commits linked to issues"
200
- )
201
- else:
202
- print(" 📋 PM data processed (no correlations found)")
213
+ if self.debug_mode:
214
+ if correlations:
215
+ print(
216
+ f" ✅ PM correlations found: {len(correlations)} commits linked to issues"
217
+ )
218
+ else:
219
+ print(" 📋 PM data processed (no correlations found)")
203
220
 
204
221
  except Exception as e:
205
- print(f" ⚠️ PM framework enrichment failed: {e}")
222
+ if self.debug_mode:
223
+ print(f" ⚠️ PM framework enrichment failed: {e}")
206
224
  enrichment["pm_data"] = {"error": str(e)}
207
225
 
208
226
  return enrichment
@@ -186,8 +186,9 @@ class BranchHealthAnalyzer:
186
186
 
187
187
  # If branch tip is in main's history, it's merged
188
188
  branch_tip = branch.commit
189
- main_commits = set(repo.iter_commits(main_branch))
190
- return branch_tip in main_commits
189
+ # Use commit hashes instead of commit objects for hashability
190
+ main_commit_hashes = set(commit.hexsha for commit in repo.iter_commits(main_branch))
191
+ return branch_tip.hexsha in main_commit_hashes
191
192
 
192
193
  except Exception:
193
194
  return False
@@ -52,6 +52,9 @@ class CachedCommit(Base):
52
52
  files_changed = Column(Integer)
53
53
  insertions = Column(Integer)
54
54
  deletions = Column(Integer)
55
+ # Filtered metrics (after exclusions applied)
56
+ filtered_insertions = Column(Integer, default=0)
57
+ filtered_deletions = Column(Integer, default=0)
55
58
  complexity_delta = Column(Float)
56
59
 
57
60
  # Extracted data
@@ -936,6 +939,8 @@ class Database:
936
939
  # Test the connection and create tables
937
940
  Base.metadata.create_all(self.engine)
938
941
  self.SessionLocal = sessionmaker(bind=self.engine)
942
+ # Apply migrations for existing databases
943
+ self._apply_migrations()
939
944
 
940
945
  # Test that we can actually write to the database
941
946
  self._test_database_write()
@@ -972,6 +977,8 @@ class Database:
972
977
 
973
978
  Base.metadata.create_all(self.engine)
974
979
  self.SessionLocal = sessionmaker(bind=self.engine)
980
+ # Apply migrations for existing databases
981
+ self._apply_migrations()
975
982
 
976
983
  # Test write capability
977
984
  self._test_database_write()
@@ -1005,6 +1012,8 @@ class Database:
1005
1012
 
1006
1013
  Base.metadata.create_all(self.engine)
1007
1014
  self.SessionLocal = sessionmaker(bind=self.engine)
1015
+ # Apply migrations for existing databases
1016
+ self._apply_migrations()
1008
1017
 
1009
1018
  self.is_readonly_fallback = True
1010
1019
 
@@ -1094,5 +1103,67 @@ class Database:
1094
1103
  return self.SessionLocal()
1095
1104
 
1096
1105
  def init_db(self) -> None:
1097
- """Initialize database tables."""
1106
+ """Initialize database tables and apply migrations."""
1098
1107
  Base.metadata.create_all(self.engine)
1108
+ self._apply_migrations()
1109
+
1110
+ def _apply_migrations(self) -> None:
1111
+ """Apply database migrations for backward compatibility.
1112
+
1113
+ This method adds new columns to existing tables without losing data.
1114
+ """
1115
+ try:
1116
+ with self.engine.connect() as conn:
1117
+ # Check if filtered columns exist in cached_commits table
1118
+ result = conn.execute(text("PRAGMA table_info(cached_commits)"))
1119
+ columns = {row[1] for row in result}
1120
+
1121
+ # Add filtered_insertions column if it doesn't exist
1122
+ if "filtered_insertions" not in columns:
1123
+ logger.info("Adding filtered_insertions column to cached_commits table")
1124
+ try:
1125
+ conn.execute(
1126
+ text(
1127
+ "ALTER TABLE cached_commits ADD COLUMN filtered_insertions INTEGER DEFAULT 0"
1128
+ )
1129
+ )
1130
+ conn.commit()
1131
+ except Exception as e:
1132
+ logger.debug(f"Column may already exist or database is readonly: {e}")
1133
+
1134
+ # Add filtered_deletions column if it doesn't exist
1135
+ if "filtered_deletions" not in columns:
1136
+ logger.info("Adding filtered_deletions column to cached_commits table")
1137
+ try:
1138
+ conn.execute(
1139
+ text(
1140
+ "ALTER TABLE cached_commits ADD COLUMN filtered_deletions INTEGER DEFAULT 0"
1141
+ )
1142
+ )
1143
+ conn.commit()
1144
+ except Exception as e:
1145
+ logger.debug(f"Column may already exist or database is readonly: {e}")
1146
+
1147
+ # Initialize filtered columns with existing values for backward compatibility
1148
+ if "filtered_insertions" not in columns or "filtered_deletions" not in columns:
1149
+ logger.info("Initializing filtered columns with existing values")
1150
+ try:
1151
+ conn.execute(
1152
+ text(
1153
+ """
1154
+ UPDATE cached_commits
1155
+ SET filtered_insertions = COALESCE(filtered_insertions, insertions),
1156
+ filtered_deletions = COALESCE(filtered_deletions, deletions)
1157
+ WHERE filtered_insertions IS NULL OR filtered_deletions IS NULL
1158
+ """
1159
+ )
1160
+ )
1161
+ conn.commit()
1162
+ except Exception as e:
1163
+ logger.debug(f"Could not initialize filtered columns: {e}")
1164
+
1165
+ except Exception as e:
1166
+ # Don't fail if migrations can't be applied (e.g., in-memory database)
1167
+ logger.debug(
1168
+ f"Could not apply migrations (may be normal for new/memory databases): {e}"
1169
+ )
@@ -545,7 +545,13 @@ class JIRAAdapter(BasePlatformAdapter):
545
545
  - cache_dir: Directory for ticket cache (optional, defaults to current directory)
546
546
  - cache_ttl_hours: Cache TTL in hours (optional, default: 168 = 7 days)
547
547
  """
548
- print(f" 🔍 JIRA adapter __init__ called with config keys: {list(config.keys())}")
548
+ import os
549
+
550
+ # Check debug mode
551
+ debug_mode = os.getenv("GITFLOW_DEBUG", "").lower() in ("1", "true", "yes")
552
+ if debug_mode:
553
+ print(f" 🔍 JIRA adapter __init__ called with config keys: {list(config.keys())}")
554
+
549
555
  super().__init__(config)
550
556
 
551
557
  # Required configuration (use defaults for capability checking)
@@ -557,9 +563,10 @@ class JIRAAdapter(BasePlatformAdapter):
557
563
  logger.info(
558
564
  f"JIRA adapter init: base_url={self.base_url}, username={self.username}, has_token={bool(self.api_token and self.api_token != 'dummy-token')}"
559
565
  )
560
- print(
561
- f" 🔍 JIRA adapter received: username={self.username}, has_token={bool(self.api_token and self.api_token != 'dummy-token')}, base_url={self.base_url}"
562
- )
566
+ if debug_mode:
567
+ print(
568
+ f" 🔍 JIRA adapter received: username={self.username}, has_token={bool(self.api_token and self.api_token != 'dummy-token')}, base_url={self.base_url}"
569
+ )
563
570
 
564
571
  # Optional configuration with defaults
565
572
  self.story_point_fields = config.get(
@@ -945,7 +952,7 @@ class JIRAAdapter(BasePlatformAdapter):
945
952
 
946
953
  session = self._ensure_session()
947
954
  response = session.get(
948
- f"{self.base_url}/rest/api/3/search",
955
+ f"{self.base_url}/rest/api/3/search/jql",
949
956
  params={
950
957
  "jql": jql,
951
958
  "startAt": start_at,
@@ -80,12 +80,17 @@ class PMFrameworkOrchestrator:
80
80
  self.temporal_window_hours = correlation_config.get("temporal_window_hours", 72)
81
81
  self.confidence_threshold = correlation_config.get("confidence_threshold", 0.8)
82
82
 
83
+ import os
83
84
  import traceback
84
85
 
85
86
  logger.info("PM Framework Orchestrator initialized")
86
- print(" 🔍 PM Framework init stack trace:")
87
- for line in traceback.format_stack()[-5:-1]:
88
- print(" " + line.strip())
87
+
88
+ # Only show debug messages when GITFLOW_DEBUG is set
89
+ debug_mode = os.getenv("GITFLOW_DEBUG", "").lower() in ("1", "true", "yes")
90
+ if debug_mode:
91
+ print(" 🔍 PM Framework init stack trace:")
92
+ for line in traceback.format_stack()[-5:-1]:
93
+ print(" " + line.strip())
89
94
 
90
95
  # Initialize configured platforms if PM integration is enabled
91
96
  if self.pm_integration_enabled:
@@ -312,11 +312,31 @@ class OpenAIClassifier(BaseLLMClassifier):
312
312
  "temperature": self.config.temperature,
313
313
  }
314
314
 
315
- # Make request
315
+ # Make request with proper timeout handling
316
316
  url = f"{self.config.api_base_url}/chat/completions"
317
- response = requests.post(
318
- url, headers=headers, json=payload, timeout=self.config.timeout_seconds
319
- )
317
+
318
+ # Log request details for debugging
319
+ logger.debug(f"Making API request to {url} with model {self.config.model}")
320
+ logger.debug(f"Timeout set to {self.config.timeout_seconds} seconds")
321
+
322
+ try:
323
+ # Use a more conservative timeout and handle both connection and read timeouts
324
+ # connection timeout = 10s, read timeout = config timeout
325
+ response = requests.post(
326
+ url,
327
+ headers=headers,
328
+ json=payload,
329
+ timeout=(10.0, self.config.timeout_seconds), # (connection, read) timeouts
330
+ )
331
+ except requests.exceptions.Timeout as e:
332
+ logger.error(f"API request timed out after {self.config.timeout_seconds}s: {e}")
333
+ raise Exception(f"API request timed out after {self.config.timeout_seconds} seconds")
334
+ except requests.exceptions.ConnectionError as e:
335
+ logger.error(f"Connection error during API request: {e}")
336
+ raise Exception(f"Connection error: Unable to reach API at {url}")
337
+ except requests.exceptions.RequestException as e:
338
+ logger.error(f"Request failed: {e}")
339
+ raise Exception(f"Request failed: {str(e)}")
320
340
 
321
341
  # Check response
322
342
  if response.status_code != 200:
@@ -45,7 +45,7 @@ class LLMConfig:
45
45
  confidence_threshold: float = 0.7 # Minimum confidence for LLM predictions
46
46
  max_tokens: int = 50 # Keep responses short
47
47
  temperature: float = 0.1 # Low temperature for consistent results
48
- timeout_seconds: float = 30.0 # API timeout
48
+ timeout_seconds: float = 5.0 # API timeout - reduced to fail fast on unresponsive APIs
49
49
 
50
50
  # Caching configuration
51
51
  cache_duration_days: int = 90 # Long cache duration for cost optimization
@@ -54,6 +54,7 @@ class LLMConfig:
54
54
  # Cost optimization
55
55
  batch_size: int = 1 # Process one at a time for simplicity
56
56
  max_daily_requests: int = 1000 # Rate limiting
57
+ max_retries: int = 1 # Reduce retries to fail faster on unresponsive APIs
57
58
 
58
59
  # Domain-specific terms for organization
59
60
  domain_terms: dict[str, list[str]] = None
@@ -163,6 +164,7 @@ class LLMCommitClassifier:
163
164
  max_tokens=self.config.max_tokens,
164
165
  timeout_seconds=self.config.timeout_seconds,
165
166
  max_daily_requests=self.config.max_daily_requests,
167
+ max_retries=getattr(self.config, "max_retries", 2), # Use config or default to 2
166
168
  use_openrouter=True, # Default to OpenRouter
167
169
  )
168
170