gitflow-analytics 3.6.2__py3-none-any.whl → 3.7.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. gitflow_analytics/__init__.py +8 -12
  2. gitflow_analytics/_version.py +1 -1
  3. gitflow_analytics/cli.py +323 -203
  4. gitflow_analytics/cli_wizards/install_wizard.py +5 -5
  5. gitflow_analytics/config/repository.py +9 -1
  6. gitflow_analytics/config/schema.py +39 -0
  7. gitflow_analytics/identity_llm/analysis_pass.py +7 -2
  8. gitflow_analytics/models/database.py +229 -8
  9. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/METADATA +2 -4
  10. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/RECORD +14 -27
  11. gitflow_analytics/tui/__init__.py +0 -5
  12. gitflow_analytics/tui/app.py +0 -726
  13. gitflow_analytics/tui/progress_adapter.py +0 -313
  14. gitflow_analytics/tui/screens/__init__.py +0 -8
  15. gitflow_analytics/tui/screens/analysis_progress_screen.py +0 -857
  16. gitflow_analytics/tui/screens/configuration_screen.py +0 -523
  17. gitflow_analytics/tui/screens/loading_screen.py +0 -348
  18. gitflow_analytics/tui/screens/main_screen.py +0 -321
  19. gitflow_analytics/tui/screens/results_screen.py +0 -735
  20. gitflow_analytics/tui/widgets/__init__.py +0 -7
  21. gitflow_analytics/tui/widgets/data_table.py +0 -255
  22. gitflow_analytics/tui/widgets/export_modal.py +0 -301
  23. gitflow_analytics/tui/widgets/progress_widget.py +0 -187
  24. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/WHEEL +0 -0
  25. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/entry_points.txt +0 -0
  26. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/licenses/LICENSE +0 -0
  27. {gitflow_analytics-3.6.2.dist-info → gitflow_analytics-3.7.4.dist-info}/top_level.txt +0 -0
@@ -117,7 +117,7 @@ class InstallWizard:
117
117
  return getpass.getpass(prompt)
118
118
  else:
119
119
  click.echo(f"⚠️ Non-interactive mode detected - {field_name} will be visible", err=True)
120
- return click.prompt(prompt, hide_input=False)
120
+ return click.prompt(prompt, hide_input=False).strip()
121
121
 
122
122
  def _select_profile(self) -> dict:
123
123
  """Let user select installation profile."""
@@ -484,7 +484,7 @@ class InstallWizard:
484
484
  if not click.confirm("Add anyway?", default=False):
485
485
  continue
486
486
 
487
- repo_name = click.prompt("Repository name", default=path_obj.name)
487
+ repo_name = click.prompt("Repository name", default=path_obj.name).strip()
488
488
 
489
489
  repositories.append({"name": repo_name, "path": str(path_obj)})
490
490
  click.echo(f"Added repository #{len(repositories)}\n")
@@ -850,7 +850,7 @@ class InstallWizard:
850
850
  "Output directory for reports",
851
851
  type=str,
852
852
  default="./reports",
853
- )
853
+ ).strip()
854
854
  output_path = self._validate_directory_path(output_dir, "Output directory")
855
855
  if output_path is not None:
856
856
  output_dir = str(output_path)
@@ -863,7 +863,7 @@ class InstallWizard:
863
863
  "Cache directory",
864
864
  type=str,
865
865
  default="./.gitflow-cache",
866
- )
866
+ ).strip()
867
867
  cache_path = self._validate_directory_path(cache_dir, "Cache directory")
868
868
  if cache_path is not None:
869
869
  cache_dir = str(cache_path)
@@ -928,7 +928,7 @@ class InstallWizard:
928
928
  # Use existing file
929
929
  aliases_path = click.prompt(
930
930
  "Path to aliases.yaml (relative to config)", default="../shared/aliases.yaml"
931
- )
931
+ ).strip()
932
932
 
933
933
  # Ensure analysis.identity section exists
934
934
  if "identity" not in self.config_data.get("analysis", {}):
@@ -20,12 +20,13 @@ class RepositoryManager:
20
20
  self.github_config = github_config
21
21
 
22
22
  def discover_organization_repositories(
23
- self, clone_base_path: Optional[Path] = None
23
+ self, clone_base_path: Optional[Path] = None, progress_callback=None
24
24
  ) -> list[RepositoryConfig]:
25
25
  """Discover repositories from GitHub organization.
26
26
 
27
27
  Args:
28
28
  clone_base_path: Base directory where repos should be cloned/found.
29
+ progress_callback: Optional callback function(repo_name, count) for progress updates.
29
30
 
30
31
  Returns:
31
32
  List of discovered repository configurations.
@@ -42,7 +43,14 @@ class RepositoryManager:
42
43
  if clone_base_path is None:
43
44
  raise ValueError("No base path available for repository cloning")
44
45
 
46
+ repo_count = 0
45
47
  for repo in org.get_repos():
48
+ repo_count += 1
49
+
50
+ # Call progress callback if provided
51
+ if progress_callback:
52
+ progress_callback(repo.name, repo_count)
53
+
46
54
  # Skip archived repositories
47
55
  if repo.archived:
48
56
  continue
@@ -456,3 +456,42 @@ class Config:
456
456
  raise ValueError(
457
457
  f"Failed to discover repositories from organization {self.github.organization}: {e}"
458
458
  ) from e
459
+
460
+ def get_effective_ticket_platforms(self) -> list[str]:
461
+ """Get the effective list of ticket platforms to extract.
462
+
463
+ If ticket_platforms is explicitly configured in analysis config, use that.
464
+ Otherwise, infer from which PM platforms are actually configured.
465
+
466
+ Returns:
467
+ List of ticket platform names to extract (e.g., ['jira', 'github'])
468
+ """
469
+ # If explicitly configured, use that
470
+ if self.analysis.ticket_platforms is not None:
471
+ return self.analysis.ticket_platforms
472
+
473
+ # Otherwise, infer from configured PM platforms
474
+ platforms = []
475
+
476
+ # Check modern PM framework config
477
+ if self.pm:
478
+ if hasattr(self.pm, "jira") and self.pm.jira:
479
+ platforms.append("jira")
480
+ if hasattr(self.pm, "linear") and self.pm.linear:
481
+ platforms.append("linear")
482
+ if hasattr(self.pm, "clickup") and self.pm.clickup:
483
+ platforms.append("clickup")
484
+
485
+ # Check legacy JIRA config
486
+ if (self.jira or self.jira_integration) and "jira" not in platforms:
487
+ platforms.append("jira")
488
+
489
+ # Always include GitHub if we have GitHub configured (for issue tracking)
490
+ if self.github.token:
491
+ platforms.append("github")
492
+
493
+ # If nothing configured, fall back to common platforms
494
+ if not platforms:
495
+ platforms = ["jira", "github", "clickup", "linear"]
496
+
497
+ return platforms
@@ -155,8 +155,13 @@ class IdentityAnalysisPass:
155
155
  existing_emails.add(email.lower())
156
156
 
157
157
  for new_mapping in new_mappings:
158
- # New mappings use primary_email
159
- canonical_email = new_mapping["primary_email"].lower()
158
+ # New mappings use primary_email, but support canonical_email for backward compat
159
+ canonical_email = (
160
+ new_mapping.get("primary_email") or new_mapping.get("canonical_email", "")
161
+ ).lower()
162
+ if not canonical_email:
163
+ logger.warning(f"Skipping mapping with no email: {new_mapping}")
164
+ continue
160
165
  if canonical_email not in existing_emails:
161
166
  existing_mappings.append(new_mapping)
162
167
  logger.info(f"Added identity mapping for: {canonical_email}")
@@ -5,7 +5,7 @@ import os
5
5
  import tempfile
6
6
  from datetime import datetime, timezone
7
7
  from pathlib import Path
8
- from typing import Any
8
+ from typing import Any, Optional
9
9
 
10
10
  from sqlalchemy import (
11
11
  JSON,
@@ -869,9 +869,30 @@ class WeeklyTrends(Base):
869
869
  )
870
870
 
871
871
 
872
+ class SchemaVersion(Base):
873
+ """Track database schema versions for automatic migrations.
874
+
875
+ WHY: Schema changes (like timezone-aware timestamps) require migration
876
+ to ensure old cache databases work correctly without user intervention.
877
+ This table tracks the current schema version to trigger automatic upgrades.
878
+ """
879
+
880
+ __tablename__ = "schema_version"
881
+
882
+ id = Column(Integer, primary_key=True)
883
+ version = Column(String, nullable=False) # e.g., "2.0"
884
+ upgraded_at = Column(DateTime(timezone=True), default=utcnow_tz_aware)
885
+ previous_version = Column(String, nullable=True)
886
+ migration_notes = Column(String, nullable=True)
887
+
888
+
872
889
  class Database:
873
890
  """Database connection manager with robust permission handling."""
874
891
 
892
+ # Schema version constants
893
+ CURRENT_SCHEMA_VERSION = "2.0" # Timezone-aware timestamps
894
+ LEGACY_SCHEMA_VERSION = "1.0" # Timezone-naive timestamps
895
+
875
896
  def __init__(self, db_path: Path):
876
897
  """
877
898
  Initialize database connection with proper error handling.
@@ -949,10 +970,21 @@ class Database:
949
970
  },
950
971
  )
951
972
 
952
- # Test the connection and create tables
953
- Base.metadata.create_all(self.engine)
973
+ # Check schema version BEFORE creating tables to detect legacy databases
954
974
  self.SessionLocal = sessionmaker(bind=self.engine)
955
- # Apply migrations for existing databases
975
+ needs_migration = self._check_schema_version_before_create()
976
+
977
+ # Create/update tables
978
+ Base.metadata.create_all(self.engine)
979
+
980
+ # Perform migration if needed (after tables are created/updated)
981
+ if needs_migration:
982
+ self._perform_schema_migration()
983
+ else:
984
+ # No migration needed - record current schema version if not already recorded
985
+ self._ensure_schema_version_recorded()
986
+
987
+ # Apply other migrations for existing databases
956
988
  self._apply_migrations()
957
989
 
958
990
  # Test that we can actually write to the database
@@ -988,9 +1020,21 @@ class Database:
988
1020
  },
989
1021
  )
990
1022
 
991
- Base.metadata.create_all(self.engine)
1023
+ # Check schema version BEFORE creating tables to detect legacy databases
992
1024
  self.SessionLocal = sessionmaker(bind=self.engine)
993
- # Apply migrations for existing databases
1025
+ needs_migration = self._check_schema_version_before_create()
1026
+
1027
+ # Create/update tables
1028
+ Base.metadata.create_all(self.engine)
1029
+
1030
+ # Perform migration if needed (after tables are created/updated)
1031
+ if needs_migration:
1032
+ self._perform_schema_migration()
1033
+ else:
1034
+ # No migration needed - record current schema version if not already recorded
1035
+ self._ensure_schema_version_recorded()
1036
+
1037
+ # Apply other migrations for existing databases
994
1038
  self._apply_migrations()
995
1039
 
996
1040
  # Test write capability
@@ -1023,9 +1067,21 @@ class Database:
1023
1067
  "sqlite:///:memory:", connect_args={"check_same_thread": False}
1024
1068
  )
1025
1069
 
1026
- Base.metadata.create_all(self.engine)
1070
+ # Check schema version BEFORE creating tables to detect legacy databases
1027
1071
  self.SessionLocal = sessionmaker(bind=self.engine)
1028
- # Apply migrations for existing databases
1072
+ needs_migration = self._check_schema_version_before_create()
1073
+
1074
+ # Create/update tables
1075
+ Base.metadata.create_all(self.engine)
1076
+
1077
+ # Perform migration if needed (after tables are created/updated)
1078
+ if needs_migration:
1079
+ self._perform_schema_migration()
1080
+ else:
1081
+ # No migration needed - record current schema version if not already recorded
1082
+ self._ensure_schema_version_recorded()
1083
+
1084
+ # Apply other migrations for existing databases
1029
1085
  self._apply_migrations()
1030
1086
 
1031
1087
  self.is_readonly_fallback = True
@@ -1117,9 +1173,174 @@ class Database:
1117
1173
 
1118
1174
  def init_db(self) -> None:
1119
1175
  """Initialize database tables and apply migrations."""
1176
+ needs_migration = self._check_schema_version_before_create()
1120
1177
  Base.metadata.create_all(self.engine)
1178
+ if needs_migration:
1179
+ self._perform_schema_migration()
1180
+ else:
1181
+ self._ensure_schema_version_recorded()
1121
1182
  self._apply_migrations()
1122
1183
 
1184
+ def _check_schema_version_before_create(self) -> bool:
1185
+ """Check if database needs migration BEFORE create_all is called.
1186
+
1187
+ WHY: We need to check for legacy databases BEFORE creating new tables,
1188
+ otherwise we can't distinguish between a fresh database and a legacy one.
1189
+
1190
+ Returns:
1191
+ True if migration is needed, False otherwise
1192
+ """
1193
+ try:
1194
+ with self.engine.connect() as conn:
1195
+ # Check if schema_version table exists
1196
+ result = conn.execute(
1197
+ text(
1198
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='schema_version'"
1199
+ )
1200
+ )
1201
+ schema_table_exists = result.fetchone() is not None
1202
+
1203
+ if schema_table_exists:
1204
+ # Check current version
1205
+ result = conn.execute(
1206
+ text("SELECT version FROM schema_version ORDER BY id DESC LIMIT 1")
1207
+ )
1208
+ row = result.fetchone()
1209
+
1210
+ if row and row[0] != self.CURRENT_SCHEMA_VERSION:
1211
+ # Version mismatch - needs migration
1212
+ logger.warning(
1213
+ f"⚠️ Schema version mismatch: {row[0]} → {self.CURRENT_SCHEMA_VERSION}"
1214
+ )
1215
+ return True
1216
+ # else: Already at current version or no version record yet
1217
+ return False
1218
+ else:
1219
+ # No schema_version table - check if this is legacy or new
1220
+ result = conn.execute(
1221
+ text(
1222
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='cached_commits'"
1223
+ )
1224
+ )
1225
+ has_cached_commits = result.fetchone() is not None
1226
+
1227
+ if has_cached_commits:
1228
+ # Check if table has data
1229
+ result = conn.execute(text("SELECT COUNT(*) FROM cached_commits"))
1230
+ commit_count = result.fetchone()[0]
1231
+
1232
+ if commit_count > 0:
1233
+ # Legacy database with data - needs migration
1234
+ logger.warning("⚠️ Old cache schema detected (v1.0 → v2.0)")
1235
+ logger.info(" This is a one-time operation due to timezone fix")
1236
+ return True
1237
+
1238
+ # New database or empty legacy database - no migration needed
1239
+ return False
1240
+
1241
+ except Exception as e:
1242
+ # Don't fail initialization due to schema check issues
1243
+ logger.debug(f"Schema version check failed: {e}")
1244
+ return False
1245
+
1246
+ def _perform_schema_migration(self) -> None:
1247
+ """Perform the actual schema migration after tables are created.
1248
+
1249
+ WHY: Separating migration from detection allows us to update table schemas
1250
+ via create_all before clearing/migrating data.
1251
+ """
1252
+ try:
1253
+ with self.engine.connect() as conn:
1254
+ logger.info("🔄 Automatically upgrading cache database...")
1255
+ logger.info(" Clearing old cache data (timezone schema incompatible)...")
1256
+
1257
+ # Clear cached data tables
1258
+ conn.execute(text("DELETE FROM cached_commits"))
1259
+ conn.execute(text("DELETE FROM pull_request_cache"))
1260
+ conn.execute(text("DELETE FROM issue_cache"))
1261
+ conn.execute(text("DELETE FROM repository_analysis_status"))
1262
+
1263
+ # Also clear qualitative analysis data if it exists
1264
+ try:
1265
+ conn.execute(text("DELETE FROM qualitative_commits"))
1266
+ conn.execute(text("DELETE FROM pattern_cache"))
1267
+ except Exception:
1268
+ # These tables might not exist in all databases
1269
+ pass
1270
+
1271
+ conn.commit()
1272
+
1273
+ # Record the schema upgrade
1274
+ self._record_schema_version(
1275
+ conn,
1276
+ self.CURRENT_SCHEMA_VERSION,
1277
+ self.LEGACY_SCHEMA_VERSION,
1278
+ "Migrated to timezone-aware timestamps (v2.0)",
1279
+ )
1280
+
1281
+ logger.info(" Migration complete - cache will be rebuilt on next analysis")
1282
+ logger.info("✅ Cache database upgraded successfully")
1283
+
1284
+ except Exception as e:
1285
+ logger.error(f"Migration failed: {e}")
1286
+ # Don't raise - let the system continue and rebuild cache from scratch
1287
+
1288
+ def _ensure_schema_version_recorded(self) -> None:
1289
+ """Ensure schema version is recorded for databases that didn't need migration.
1290
+
1291
+ WHY: Fresh databases and already-migrated databases need to have their
1292
+ schema version recorded for future migration detection.
1293
+ """
1294
+ try:
1295
+ with self.engine.connect() as conn:
1296
+ # Check if version is already recorded
1297
+ result = conn.execute(text("SELECT COUNT(*) FROM schema_version"))
1298
+ count = result.fetchone()[0]
1299
+
1300
+ if count == 0:
1301
+ # No version recorded - this is a fresh database
1302
+ self._record_schema_version(
1303
+ conn, self.CURRENT_SCHEMA_VERSION, None, "Initial schema creation"
1304
+ )
1305
+ logger.debug(f"Recorded initial schema version: {self.CURRENT_SCHEMA_VERSION}")
1306
+
1307
+ except Exception as e:
1308
+ # Don't fail if we can't record version
1309
+ logger.debug(f"Could not ensure schema version recorded: {e}")
1310
+
1311
+ def _record_schema_version(
1312
+ self, conn, version: str, previous_version: Optional[str], notes: Optional[str]
1313
+ ) -> None:
1314
+ """Record schema version in the database.
1315
+
1316
+ Args:
1317
+ conn: Database connection
1318
+ version: New schema version
1319
+ previous_version: Previous schema version (None for initial)
1320
+ notes: Migration notes
1321
+ """
1322
+ try:
1323
+ from datetime import datetime, timezone
1324
+
1325
+ # Insert new schema version record
1326
+ conn.execute(
1327
+ text(
1328
+ """
1329
+ INSERT INTO schema_version (version, upgraded_at, previous_version, migration_notes)
1330
+ VALUES (:version, :upgraded_at, :previous_version, :notes)
1331
+ """
1332
+ ),
1333
+ {
1334
+ "version": version,
1335
+ "upgraded_at": datetime.now(timezone.utc),
1336
+ "previous_version": previous_version,
1337
+ "notes": notes,
1338
+ },
1339
+ )
1340
+ conn.commit()
1341
+ except Exception as e:
1342
+ logger.debug(f"Could not record schema version: {e}")
1343
+
1123
1344
  def _apply_migrations(self) -> None:
1124
1345
  """Apply database migrations for backward compatibility.
1125
1346
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gitflow-analytics
3
- Version: 3.6.2
3
+ Version: 3.7.4
4
4
  Summary: Analyze Git repositories for developer productivity insights
5
5
  Author-email: Bob Matyas <bobmatnyc@gmail.com>
6
6
  License: MIT
@@ -52,10 +52,8 @@ Requires-Dist: types-PyYAML>=6.0; extra == "dev"
52
52
  Requires-Dist: types-requests>=2.28; extra == "dev"
53
53
  Provides-Extra: github
54
54
  Requires-Dist: pygithub>=1.58; extra == "github"
55
- Provides-Extra: tui
56
- Requires-Dist: textual>=0.41.0; extra == "tui"
57
55
  Provides-Extra: all
58
- Requires-Dist: gitflow-analytics[github,tui]; extra == "all"
56
+ Requires-Dist: gitflow-analytics[github]; extra == "all"
59
57
  Dynamic: license-file
60
58
 
61
59
  # GitFlow Analytics
@@ -1,6 +1,6 @@
1
- gitflow_analytics/__init__.py,sha256=yN1dyAUu4l9qX-YNAGRItEf4RFFe-5GQiOntXPIfdxo,683
2
- gitflow_analytics/_version.py,sha256=Yo0R3JqQQalk8q_bjgTlzcf-eHgi2oUhbEiu0U4YCzQ,137
3
- gitflow_analytics/cli.py,sha256=fFgVKLAMlk0Dx-ZzB4b-0qR-veJxEQ6H1T6fv6-egjI,262752
1
+ gitflow_analytics/__init__.py,sha256=W3Jaey5wuT1nBPehVLTIRkVIyBa5jgYOlBKc_UFfh-4,773
2
+ gitflow_analytics/_version.py,sha256=xZ8QK9HyXEtX-sea8S5xFccBqQR_dn4tQAInEXd1CqY,137
3
+ gitflow_analytics/cli.py,sha256=pYW6V0b6SRa3-NyOmXGQhf5emcKHUHgOVL2PFOAS8LQ,273331
4
4
  gitflow_analytics/config.py,sha256=XRuxvzLWyn_ML7mDCcuZ9-YFNAEsnt33vIuWxQQ_jxg,1033
5
5
  gitflow_analytics/constants.py,sha256=GXEncUJS9ijOI5KWtQCTANwdqxPfXpw-4lNjhaWTKC4,2488
6
6
  gitflow_analytics/verify_activity.py,sha256=aRQnmypf5NDasXudf2iz_WdJnCWtwlbAiJ5go0DJLSU,27050
@@ -11,15 +11,15 @@ gitflow_analytics/classification/feature_extractor.py,sha256=W82vztPQO8-MFw9Yt17
11
11
  gitflow_analytics/classification/linguist_analyzer.py,sha256=HjLx9mM7hGXtrvMba6osovHJLAacTx9oDmN6CS5w0bE,17687
12
12
  gitflow_analytics/classification/model.py,sha256=2KbmFh9MpyvHMcNHbqwUTAAVLHHu3MiTfFIPyZSGa-8,16356
13
13
  gitflow_analytics/cli_wizards/__init__.py,sha256=D73D97cS1hZsB_fCQQaAiWtd_w2Lb8TtcGc9Pn2DIyE,343
14
- gitflow_analytics/cli_wizards/install_wizard.py,sha256=x5GaYqAgnFwLYyuHnjbuRHL-yw4Mxfi3OzJcQJszeM4,45005
14
+ gitflow_analytics/cli_wizards/install_wizard.py,sha256=ib2H1JOaV2ts9iXN0nhHIxtITWhe22XCiT2gqpZa0HI,45045
15
15
  gitflow_analytics/cli_wizards/run_launcher.py,sha256=J6G_C7IqxPg7_GhAfbV99D1dIIWwb1s_qmHC7Iv2iGI,15038
16
16
  gitflow_analytics/config/__init__.py,sha256=KziRIbBJctB5LOLcKLzELWA1rXwjS6-C2_DeM_hT9rM,1133
17
17
  gitflow_analytics/config/aliases.py,sha256=z9F0X6qbbF544Tw7sHlOoBj5mpRSddMkCpoKLzvVzDU,10960
18
18
  gitflow_analytics/config/errors.py,sha256=IBKhAIwJ4gscZFnLDyE3jEp03wn2stPR7JQJXNSIfok,10386
19
19
  gitflow_analytics/config/loader.py,sha256=EiksTB1Uqey63FxIvuud_kMdab3sNDfuICE_RwMLYFA,37290
20
20
  gitflow_analytics/config/profiles.py,sha256=yUjFAWW6uzOUdi5qlPE-QV9681HigyrLiSJFpL8X9A0,7967
21
- gitflow_analytics/config/repository.py,sha256=maptMAdCKDsuMAfoTAaTrMPVfVd_tKNLRenvuPe1-t4,4350
22
- gitflow_analytics/config/schema.py,sha256=lFN80-YcSqu33UwiJryFHn8F5_zX619AaJXSuJ3aht8,15271
21
+ gitflow_analytics/config/repository.py,sha256=u7JHcKvqmXOl3i7EmNUfJ6wtjzElxPMyXRkATnVyQ0I,4685
22
+ gitflow_analytics/config/schema.py,sha256=XP2VTpMzGIZ-dn0OcCNIgmTFe6_sIFyJarLcy-zGg2A,16753
23
23
  gitflow_analytics/config/validator.py,sha256=l7AHjXYJ8wEmyA1rn2WiItZXtAiRb9YBLjFCDl53qKM,5907
24
24
  gitflow_analytics/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  gitflow_analytics/core/analyzer.py,sha256=59kGObzjziOb8geFyZMKCUvWmo3hcXE0eTgrjYEc1XA,58736
@@ -39,7 +39,7 @@ gitflow_analytics/extractors/ml_tickets.py,sha256=js5OFmbZt9JHy5r_crehhuB1Mxrkdf
39
39
  gitflow_analytics/extractors/story_points.py,sha256=IggP-Ei832oV9aD08a3li08kmjF3BqyU9i8EgAZcpfs,5324
40
40
  gitflow_analytics/extractors/tickets.py,sha256=2s5Iu7eZXVi8yl9Yme5HKzrJo3mDjzsSOUr_iJGUeLM,43799
41
41
  gitflow_analytics/identity_llm/__init__.py,sha256=tpWDwapm6zIyb8LxLO8A6pHlE3wNorT_fBL-Yp9-XnU,250
42
- gitflow_analytics/identity_llm/analysis_pass.py,sha256=lYfjM6f82agXstTrUBsS0R9-ipfRnviIqe8ExkjKnvc,9459
42
+ gitflow_analytics/identity_llm/analysis_pass.py,sha256=FJF1BEGekHRY4i5jasgxxL_UWFGYP5kBkvn8hAtMorY,9728
43
43
  gitflow_analytics/identity_llm/analyzer.py,sha256=-a7lUJt_Dlgx9aNOH1YlFqPe7BSxtwY2RoGruIzwrzs,17932
44
44
  gitflow_analytics/identity_llm/models.py,sha256=F1RN6g8og9esj-m4TPY_928Ci9TA43G9NFNHYf4zHHQ,2677
45
45
  gitflow_analytics/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -51,7 +51,7 @@ gitflow_analytics/metrics/activity_scoring.py,sha256=h1uj_6dTKpCwNJfsimfaY0TB3Qa
51
51
  gitflow_analytics/metrics/branch_health.py,sha256=MkfyiUc1nHEakKBJ_uTlvxmofX1QX_s4hm4XBTYKVLM,17522
52
52
  gitflow_analytics/metrics/dora.py,sha256=U4Xk0tr7kPcpR7r-PevYBUDtZPkDIG-w_yS2DJOlTrk,27549
53
53
  gitflow_analytics/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
- gitflow_analytics/models/database.py,sha256=DuVfDoyWUlzzQmf1JFQW7gKbPf2ylNO5gHTzUUl0hTU,44372
54
+ gitflow_analytics/models/database.py,sha256=m2WRdMhM3e42rB2akf3sPWYckYc7aSMS9UVZJNvDQHA,53818
55
55
  gitflow_analytics/pm_framework/__init__.py,sha256=2wEfO1uF6TlsymnKeMJimdLYay3K9rigGThhpGVBA3A,3698
56
56
  gitflow_analytics/pm_framework/base.py,sha256=3fXjekfscGy_WslIbyVXdAhWDIIElei9okSyleUJgqU,15247
57
57
  gitflow_analytics/pm_framework/models.py,sha256=uikCapq6KGe_zbWymzvNFvJaN38Nld9i9gBJFKTVtNo,7115
@@ -118,24 +118,11 @@ gitflow_analytics/security/extractors/vulnerability_scanner.py,sha256=TWK1fPMN5s
118
118
  gitflow_analytics/training/__init__.py,sha256=YT5p7Wm4U8trzLnbS5FASJBWPMKhqp3rlAThjpxWnxo,143
119
119
  gitflow_analytics/training/model_loader.py,sha256=xGZLSopGxDhC--2XN6ytRgi2CyjOKY4zS4fZ-ZlO6lM,13245
120
120
  gitflow_analytics/training/pipeline.py,sha256=PQegTk_-OsPexVyRDfiy-3Df-7pcs25C4vPASr-HT9E,19951
121
- gitflow_analytics/tui/__init__.py,sha256=1liMpth2RvUkmKfNUEnYEZkYi2RpYITFMmGKtBBwiUk,126
122
- gitflow_analytics/tui/app.py,sha256=rD0UTQqu9u3yrnJ7akcUNffZbJu-mfAFG5PTTLEcRo0,22200
123
- gitflow_analytics/tui/progress_adapter.py,sha256=fb6z2N87VAiQo0IyIpho23XTSHvQ-ydU6rVzHRzzQu8,11300
124
- gitflow_analytics/tui/screens/__init__.py,sha256=JVnPy-o4V6D2jehliXAbRET9x8zWmHR7PPk2is-l9OM,327
125
- gitflow_analytics/tui/screens/analysis_progress_screen.py,sha256=j1Mg5WZOKPGMtQoKxLUjy4UM9tKllcl3Ktqr0WJgOaY,36660
126
- gitflow_analytics/tui/screens/configuration_screen.py,sha256=QLtTz8xFAGdIxYsRmUyBr4m-1PteAk3_kxfE1UexqgA,19345
127
- gitflow_analytics/tui/screens/loading_screen.py,sha256=5kh0kKKCa6-NMlZyPfu2fE4ROgmjU8_jA2xCUX3z5iY,14451
128
- gitflow_analytics/tui/screens/main_screen.py,sha256=6aIzJrDtgXDlgGcW--wQUqncBBa_hcUytiLu76fMALw,11482
129
- gitflow_analytics/tui/screens/results_screen.py,sha256=3RHLZ53s9TrcyyE6IfzaaS3QL7uqjm_9fFZCvsRiZUc,28875
130
- gitflow_analytics/tui/widgets/__init__.py,sha256=85l6vkJuRGJNvej-nUZZoNg562zl_1JFOlewVer1mLI,259
131
- gitflow_analytics/tui/widgets/data_table.py,sha256=8fGNG4m7H41vCid3QwCHJa7bd8qu_DKrDf22iCks3XA,8722
132
- gitflow_analytics/tui/widgets/export_modal.py,sha256=L-XKPOc6u-fow2TudPgDnC0kXZM1WZuGd_jahtV8lhg,10737
133
- gitflow_analytics/tui/widgets/progress_widget.py,sha256=Qny6Q1nU0Pr3aj4aHfXLaRjya9MH3rldR2HWYiaQyGE,6167
134
121
  gitflow_analytics/ui/__init__.py,sha256=UBhYhZMvwlSrCuGWjkIdoP2zNbiQxOHOli-I8mqIZUE,441
135
122
  gitflow_analytics/ui/progress_display.py,sha256=3xJnCOSs1DRVAfS-rTu37EsLfWDFW5-mbv-bPS9NMm4,59182
136
- gitflow_analytics-3.6.2.dist-info/licenses/LICENSE,sha256=xwvSwY1GYXpRpmbnFvvnbmMwpobnrdN9T821sGvjOY0,1066
137
- gitflow_analytics-3.6.2.dist-info/METADATA,sha256=1yfOh855xYsJjdZPCzTNM3JODbzJZ3WRB65uW9wdp6g,34122
138
- gitflow_analytics-3.6.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
139
- gitflow_analytics-3.6.2.dist-info/entry_points.txt,sha256=a3y8HnfLOvK1QVOgAkDY6VQXXm3o9ZSQRZrpiaS3hEM,65
140
- gitflow_analytics-3.6.2.dist-info/top_level.txt,sha256=CQyxZXjKvpSB1kgqqtuE0PCRqfRsXZJL8JrYpJKtkrk,18
141
- gitflow_analytics-3.6.2.dist-info/RECORD,,
123
+ gitflow_analytics-3.7.4.dist-info/licenses/LICENSE,sha256=xwvSwY1GYXpRpmbnFvvnbmMwpobnrdN9T821sGvjOY0,1066
124
+ gitflow_analytics-3.7.4.dist-info/METADATA,sha256=lfxt6pRpjIjcpNDvEpbZxur80jVaUDVoCS7taWt8BO8,34051
125
+ gitflow_analytics-3.7.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
126
+ gitflow_analytics-3.7.4.dist-info/entry_points.txt,sha256=a3y8HnfLOvK1QVOgAkDY6VQXXm3o9ZSQRZrpiaS3hEM,65
127
+ gitflow_analytics-3.7.4.dist-info/top_level.txt,sha256=CQyxZXjKvpSB1kgqqtuE0PCRqfRsXZJL8JrYpJKtkrk,18
128
+ gitflow_analytics-3.7.4.dist-info/RECORD,,
@@ -1,5 +0,0 @@
1
- """Terminal User Interface for GitFlow Analytics."""
2
-
3
- from .app import GitFlowAnalyticsApp
4
-
5
- __all__ = ["GitFlowAnalyticsApp"]