gitflow-analytics 1.0.1__py3-none-any.whl → 1.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/__init__.py +11 -11
- gitflow_analytics/_version.py +2 -2
- gitflow_analytics/classification/__init__.py +31 -0
- gitflow_analytics/classification/batch_classifier.py +752 -0
- gitflow_analytics/classification/classifier.py +464 -0
- gitflow_analytics/classification/feature_extractor.py +725 -0
- gitflow_analytics/classification/linguist_analyzer.py +574 -0
- gitflow_analytics/classification/model.py +455 -0
- gitflow_analytics/cli.py +4490 -378
- gitflow_analytics/cli_rich.py +503 -0
- gitflow_analytics/config/__init__.py +43 -0
- gitflow_analytics/config/errors.py +261 -0
- gitflow_analytics/config/loader.py +904 -0
- gitflow_analytics/config/profiles.py +264 -0
- gitflow_analytics/config/repository.py +124 -0
- gitflow_analytics/config/schema.py +441 -0
- gitflow_analytics/config/validator.py +154 -0
- gitflow_analytics/config.py +44 -398
- gitflow_analytics/core/analyzer.py +1320 -172
- gitflow_analytics/core/branch_mapper.py +132 -132
- gitflow_analytics/core/cache.py +1554 -175
- gitflow_analytics/core/data_fetcher.py +1193 -0
- gitflow_analytics/core/identity.py +571 -185
- gitflow_analytics/core/metrics_storage.py +526 -0
- gitflow_analytics/core/progress.py +372 -0
- gitflow_analytics/core/schema_version.py +269 -0
- gitflow_analytics/extractors/base.py +13 -11
- gitflow_analytics/extractors/ml_tickets.py +1100 -0
- gitflow_analytics/extractors/story_points.py +77 -59
- gitflow_analytics/extractors/tickets.py +841 -89
- gitflow_analytics/identity_llm/__init__.py +6 -0
- gitflow_analytics/identity_llm/analysis_pass.py +231 -0
- gitflow_analytics/identity_llm/analyzer.py +464 -0
- gitflow_analytics/identity_llm/models.py +76 -0
- gitflow_analytics/integrations/github_integration.py +258 -87
- gitflow_analytics/integrations/jira_integration.py +572 -123
- gitflow_analytics/integrations/orchestrator.py +206 -82
- gitflow_analytics/metrics/activity_scoring.py +322 -0
- gitflow_analytics/metrics/branch_health.py +470 -0
- gitflow_analytics/metrics/dora.py +542 -179
- gitflow_analytics/models/database.py +986 -59
- gitflow_analytics/pm_framework/__init__.py +115 -0
- gitflow_analytics/pm_framework/adapters/__init__.py +50 -0
- gitflow_analytics/pm_framework/adapters/jira_adapter.py +1845 -0
- gitflow_analytics/pm_framework/base.py +406 -0
- gitflow_analytics/pm_framework/models.py +211 -0
- gitflow_analytics/pm_framework/orchestrator.py +652 -0
- gitflow_analytics/pm_framework/registry.py +333 -0
- gitflow_analytics/qualitative/__init__.py +29 -0
- gitflow_analytics/qualitative/chatgpt_analyzer.py +259 -0
- gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
- gitflow_analytics/qualitative/classifiers/change_type.py +742 -0
- gitflow_analytics/qualitative/classifiers/domain_classifier.py +506 -0
- gitflow_analytics/qualitative/classifiers/intent_analyzer.py +535 -0
- gitflow_analytics/qualitative/classifiers/llm/__init__.py +35 -0
- gitflow_analytics/qualitative/classifiers/llm/base.py +193 -0
- gitflow_analytics/qualitative/classifiers/llm/batch_processor.py +383 -0
- gitflow_analytics/qualitative/classifiers/llm/cache.py +479 -0
- gitflow_analytics/qualitative/classifiers/llm/cost_tracker.py +435 -0
- gitflow_analytics/qualitative/classifiers/llm/openai_client.py +403 -0
- gitflow_analytics/qualitative/classifiers/llm/prompts.py +373 -0
- gitflow_analytics/qualitative/classifiers/llm/response_parser.py +287 -0
- gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +607 -0
- gitflow_analytics/qualitative/classifiers/risk_analyzer.py +438 -0
- gitflow_analytics/qualitative/core/__init__.py +13 -0
- gitflow_analytics/qualitative/core/llm_fallback.py +657 -0
- gitflow_analytics/qualitative/core/nlp_engine.py +382 -0
- gitflow_analytics/qualitative/core/pattern_cache.py +479 -0
- gitflow_analytics/qualitative/core/processor.py +673 -0
- gitflow_analytics/qualitative/enhanced_analyzer.py +2236 -0
- gitflow_analytics/qualitative/example_enhanced_usage.py +420 -0
- gitflow_analytics/qualitative/models/__init__.py +25 -0
- gitflow_analytics/qualitative/models/schemas.py +306 -0
- gitflow_analytics/qualitative/utils/__init__.py +13 -0
- gitflow_analytics/qualitative/utils/batch_processor.py +339 -0
- gitflow_analytics/qualitative/utils/cost_tracker.py +345 -0
- gitflow_analytics/qualitative/utils/metrics.py +361 -0
- gitflow_analytics/qualitative/utils/text_processing.py +285 -0
- gitflow_analytics/reports/__init__.py +100 -0
- gitflow_analytics/reports/analytics_writer.py +550 -18
- gitflow_analytics/reports/base.py +648 -0
- gitflow_analytics/reports/branch_health_writer.py +322 -0
- gitflow_analytics/reports/classification_writer.py +924 -0
- gitflow_analytics/reports/cli_integration.py +427 -0
- gitflow_analytics/reports/csv_writer.py +1700 -216
- gitflow_analytics/reports/data_models.py +504 -0
- gitflow_analytics/reports/database_report_generator.py +427 -0
- gitflow_analytics/reports/example_usage.py +344 -0
- gitflow_analytics/reports/factory.py +499 -0
- gitflow_analytics/reports/formatters.py +698 -0
- gitflow_analytics/reports/html_generator.py +1116 -0
- gitflow_analytics/reports/interfaces.py +489 -0
- gitflow_analytics/reports/json_exporter.py +2770 -0
- gitflow_analytics/reports/narrative_writer.py +2289 -158
- gitflow_analytics/reports/story_point_correlation.py +1144 -0
- gitflow_analytics/reports/weekly_trends_writer.py +389 -0
- gitflow_analytics/training/__init__.py +5 -0
- gitflow_analytics/training/model_loader.py +377 -0
- gitflow_analytics/training/pipeline.py +550 -0
- gitflow_analytics/tui/__init__.py +5 -0
- gitflow_analytics/tui/app.py +724 -0
- gitflow_analytics/tui/screens/__init__.py +8 -0
- gitflow_analytics/tui/screens/analysis_progress_screen.py +496 -0
- gitflow_analytics/tui/screens/configuration_screen.py +523 -0
- gitflow_analytics/tui/screens/loading_screen.py +348 -0
- gitflow_analytics/tui/screens/main_screen.py +321 -0
- gitflow_analytics/tui/screens/results_screen.py +722 -0
- gitflow_analytics/tui/widgets/__init__.py +7 -0
- gitflow_analytics/tui/widgets/data_table.py +255 -0
- gitflow_analytics/tui/widgets/export_modal.py +301 -0
- gitflow_analytics/tui/widgets/progress_widget.py +187 -0
- gitflow_analytics-1.3.6.dist-info/METADATA +1015 -0
- gitflow_analytics-1.3.6.dist-info/RECORD +122 -0
- gitflow_analytics-1.0.1.dist-info/METADATA +0 -463
- gitflow_analytics-1.0.1.dist-info/RECORD +0 -31
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/WHEEL +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.3.6.dist-info}/top_level.txt +0 -0
|
@@ -1,182 +1,253 @@
|
|
|
1
1
|
"""JIRA API integration for story point and ticket enrichment."""
|
|
2
|
+
|
|
2
3
|
import base64
|
|
3
|
-
|
|
4
|
+
import socket
|
|
5
|
+
import time
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from typing import Any, Optional
|
|
4
8
|
|
|
5
9
|
import requests
|
|
6
|
-
from requests.
|
|
10
|
+
from requests.adapters import HTTPAdapter
|
|
11
|
+
from requests.exceptions import ConnectionError, RequestException, Timeout
|
|
12
|
+
from urllib3.util.retry import Retry
|
|
7
13
|
|
|
8
14
|
from ..core.cache import GitAnalysisCache
|
|
9
15
|
|
|
10
16
|
|
|
11
17
|
class JIRAIntegration:
|
|
12
18
|
"""Integrate with JIRA API for ticket and story point data."""
|
|
13
|
-
|
|
14
|
-
def __init__(
|
|
15
|
-
|
|
16
|
-
|
|
19
|
+
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
base_url: str,
|
|
23
|
+
username: str,
|
|
24
|
+
api_token: str,
|
|
25
|
+
cache: GitAnalysisCache,
|
|
26
|
+
story_point_fields: Optional[list[str]] = None,
|
|
27
|
+
dns_timeout: int = 10,
|
|
28
|
+
connection_timeout: int = 30,
|
|
29
|
+
max_retries: int = 3,
|
|
30
|
+
backoff_factor: float = 1.0,
|
|
31
|
+
enable_proxy: bool = False,
|
|
32
|
+
proxy_url: Optional[str] = None,
|
|
33
|
+
):
|
|
17
34
|
"""Initialize JIRA integration.
|
|
18
|
-
|
|
35
|
+
|
|
19
36
|
Args:
|
|
20
37
|
base_url: JIRA instance base URL (e.g., https://company.atlassian.net)
|
|
21
38
|
username: JIRA username/email
|
|
22
39
|
api_token: JIRA API token
|
|
23
40
|
cache: Git analysis cache for storing JIRA data
|
|
24
41
|
story_point_fields: List of custom field IDs for story points
|
|
42
|
+
dns_timeout: DNS resolution timeout in seconds (default: 10)
|
|
43
|
+
connection_timeout: HTTP connection timeout in seconds (default: 30)
|
|
44
|
+
max_retries: Maximum number of retry attempts (default: 3)
|
|
45
|
+
backoff_factor: Exponential backoff factor for retries (default: 1.0)
|
|
46
|
+
enable_proxy: Whether to use proxy settings (default: False)
|
|
47
|
+
proxy_url: Proxy URL if proxy is enabled (default: None)
|
|
25
48
|
"""
|
|
26
|
-
self.base_url = base_url.rstrip(
|
|
49
|
+
self.base_url = base_url.rstrip("/")
|
|
27
50
|
self.cache = cache
|
|
28
|
-
|
|
51
|
+
self.dns_timeout = dns_timeout
|
|
52
|
+
self.connection_timeout = connection_timeout
|
|
53
|
+
self.max_retries = max_retries
|
|
54
|
+
self.backoff_factor = backoff_factor
|
|
55
|
+
self.enable_proxy = enable_proxy
|
|
56
|
+
self.proxy_url = proxy_url
|
|
57
|
+
|
|
58
|
+
# Network connectivity status
|
|
59
|
+
self._connection_validated = False
|
|
60
|
+
self._last_dns_check = 0
|
|
61
|
+
self._dns_check_interval = 300 # 5 minutes
|
|
62
|
+
|
|
29
63
|
# Set up authentication
|
|
30
64
|
credentials = base64.b64encode(f"{username}:{api_token}".encode()).decode()
|
|
31
65
|
self.headers = {
|
|
32
66
|
"Authorization": f"Basic {credentials}",
|
|
33
67
|
"Accept": "application/json",
|
|
34
|
-
"Content-Type": "application/json"
|
|
68
|
+
"Content-Type": "application/json",
|
|
69
|
+
"User-Agent": "GitFlow-Analytics-JIRA/1.0",
|
|
35
70
|
}
|
|
36
|
-
|
|
71
|
+
|
|
37
72
|
# Default story point field names/IDs
|
|
38
73
|
self.story_point_fields = story_point_fields or [
|
|
39
74
|
"customfield_10016", # Common story points field
|
|
40
75
|
"customfield_10021", # Alternative field
|
|
41
|
-
"Story Points",
|
|
42
|
-
"storypoints",
|
|
76
|
+
"Story Points", # Field name
|
|
77
|
+
"storypoints", # Alternative name
|
|
43
78
|
"customfield_10002", # Another common ID
|
|
44
79
|
]
|
|
45
|
-
|
|
80
|
+
|
|
46
81
|
# Cache for field mapping
|
|
47
82
|
self._field_mapping = None
|
|
48
|
-
|
|
49
|
-
|
|
83
|
+
|
|
84
|
+
# Initialize HTTP session with enhanced error handling
|
|
85
|
+
self._session = self._create_resilient_session()
|
|
86
|
+
|
|
87
|
+
def enrich_commits_with_jira_data(self, commits: list[dict[str, Any]]) -> None:
|
|
50
88
|
"""Enrich commits with JIRA story points by looking up ticket references.
|
|
51
|
-
|
|
89
|
+
|
|
52
90
|
Args:
|
|
53
91
|
commits: List of commit dictionaries to enrich
|
|
54
92
|
"""
|
|
93
|
+
# Validate network connectivity before attempting JIRA operations
|
|
94
|
+
if not self._validate_network_connectivity():
|
|
95
|
+
print(" ⚠️ JIRA network connectivity issues detected, skipping commit enrichment")
|
|
96
|
+
return
|
|
97
|
+
|
|
55
98
|
# Collect all unique JIRA tickets from commits
|
|
56
99
|
jira_tickets = set()
|
|
57
100
|
for commit in commits:
|
|
58
|
-
ticket_refs = commit.get(
|
|
101
|
+
ticket_refs = commit.get("ticket_references", [])
|
|
59
102
|
for ref in ticket_refs:
|
|
60
|
-
if isinstance(ref, dict) and ref.get(
|
|
61
|
-
jira_tickets.add(ref[
|
|
103
|
+
if isinstance(ref, dict) and ref.get("platform") == "jira":
|
|
104
|
+
jira_tickets.add(ref["id"])
|
|
62
105
|
elif isinstance(ref, str) and self._is_jira_ticket(ref):
|
|
63
106
|
jira_tickets.add(ref)
|
|
64
|
-
|
|
107
|
+
|
|
65
108
|
if not jira_tickets:
|
|
66
109
|
return
|
|
67
|
-
|
|
68
|
-
# Fetch ticket data from JIRA
|
|
110
|
+
|
|
111
|
+
# Fetch ticket data from JIRA with enhanced error handling
|
|
69
112
|
ticket_data = self._fetch_tickets_batch(list(jira_tickets))
|
|
70
|
-
|
|
113
|
+
|
|
71
114
|
# Enrich commits with story points
|
|
72
115
|
for commit in commits:
|
|
73
116
|
commit_story_points = 0
|
|
74
|
-
ticket_refs = commit.get(
|
|
75
|
-
|
|
117
|
+
ticket_refs = commit.get("ticket_references", [])
|
|
118
|
+
|
|
76
119
|
for ref in ticket_refs:
|
|
77
120
|
ticket_id = None
|
|
78
|
-
if isinstance(ref, dict) and ref.get(
|
|
79
|
-
ticket_id = ref[
|
|
121
|
+
if isinstance(ref, dict) and ref.get("platform") == "jira":
|
|
122
|
+
ticket_id = ref["id"]
|
|
80
123
|
elif isinstance(ref, str) and self._is_jira_ticket(ref):
|
|
81
124
|
ticket_id = ref
|
|
82
|
-
|
|
125
|
+
|
|
83
126
|
if ticket_id and ticket_id in ticket_data:
|
|
84
|
-
points = ticket_data[ticket_id].get(
|
|
127
|
+
points = ticket_data[ticket_id].get("story_points", 0)
|
|
85
128
|
if points:
|
|
86
129
|
commit_story_points = max(commit_story_points, points)
|
|
87
|
-
|
|
130
|
+
|
|
88
131
|
if commit_story_points > 0:
|
|
89
|
-
commit[
|
|
90
|
-
|
|
91
|
-
def enrich_prs_with_jira_data(self, prs:
|
|
132
|
+
commit["story_points"] = commit_story_points
|
|
133
|
+
|
|
134
|
+
def enrich_prs_with_jira_data(self, prs: list[dict[str, Any]]) -> None:
|
|
92
135
|
"""Enrich PRs with JIRA story points.
|
|
93
|
-
|
|
136
|
+
|
|
94
137
|
Args:
|
|
95
138
|
prs: List of PR dictionaries to enrich
|
|
96
139
|
"""
|
|
140
|
+
# Validate network connectivity before attempting JIRA operations
|
|
141
|
+
if not self._validate_network_connectivity():
|
|
142
|
+
print(" ⚠️ JIRA network connectivity issues detected, skipping PR enrichment")
|
|
143
|
+
return
|
|
144
|
+
|
|
97
145
|
# Similar to commits, extract JIRA tickets from PR titles/descriptions
|
|
98
146
|
for pr in prs:
|
|
99
147
|
pr_text = f"{pr.get('title', '')} {pr.get('description', '')}"
|
|
100
148
|
jira_tickets = self._extract_jira_tickets(pr_text)
|
|
101
|
-
|
|
149
|
+
|
|
102
150
|
if jira_tickets:
|
|
103
151
|
ticket_data = self._fetch_tickets_batch(list(jira_tickets))
|
|
104
|
-
|
|
152
|
+
|
|
105
153
|
# Use the highest story point value found
|
|
106
154
|
max_points = 0
|
|
107
155
|
for ticket_id in jira_tickets:
|
|
108
156
|
if ticket_id in ticket_data:
|
|
109
|
-
points = ticket_data[ticket_id].get(
|
|
157
|
+
points = ticket_data[ticket_id].get("story_points", 0)
|
|
110
158
|
max_points = max(max_points, points)
|
|
111
|
-
|
|
159
|
+
|
|
112
160
|
if max_points > 0:
|
|
113
|
-
pr[
|
|
114
|
-
|
|
115
|
-
def _fetch_tickets_batch(self, ticket_ids:
|
|
116
|
-
"""Fetch multiple tickets from JIRA API.
|
|
117
|
-
|
|
161
|
+
pr["story_points"] = max_points
|
|
162
|
+
|
|
163
|
+
def _fetch_tickets_batch(self, ticket_ids: list[str]) -> dict[str, dict[str, Any]]:
|
|
164
|
+
"""Fetch multiple tickets from JIRA API with optimized caching.
|
|
165
|
+
|
|
166
|
+
WHY: This method implements comprehensive caching to minimize JIRA API calls,
|
|
167
|
+
which are often the slowest part of the analysis. It uses bulk cache lookups
|
|
168
|
+
and provides detailed cache hit/miss metrics.
|
|
169
|
+
|
|
118
170
|
Args:
|
|
119
171
|
ticket_ids: List of JIRA ticket IDs
|
|
120
|
-
|
|
172
|
+
|
|
121
173
|
Returns:
|
|
122
174
|
Dictionary mapping ticket ID to ticket data
|
|
123
175
|
"""
|
|
124
176
|
if not ticket_ids:
|
|
125
177
|
return {}
|
|
126
|
-
|
|
127
|
-
#
|
|
128
|
-
cached_tickets =
|
|
129
|
-
tickets_to_fetch = []
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
178
|
+
|
|
179
|
+
# Bulk cache lookup for better performance
|
|
180
|
+
cached_tickets = self._get_cached_tickets_bulk(ticket_ids)
|
|
181
|
+
tickets_to_fetch = [tid for tid in ticket_ids if tid not in cached_tickets]
|
|
182
|
+
|
|
183
|
+
# Track cache performance
|
|
184
|
+
cache_hits = len(cached_tickets)
|
|
185
|
+
cache_misses = len(tickets_to_fetch)
|
|
186
|
+
|
|
187
|
+
if cache_hits > 0 or cache_misses > 0:
|
|
188
|
+
print(
|
|
189
|
+
f" 📊 JIRA cache: {cache_hits} hits, {cache_misses} misses ({cache_hits/(cache_hits+cache_misses)*100:.1f}% hit rate)"
|
|
190
|
+
)
|
|
191
|
+
|
|
138
192
|
# Fetch missing tickets from JIRA
|
|
139
193
|
if tickets_to_fetch:
|
|
140
194
|
# JIRA JQL has a limit, so batch the requests
|
|
141
195
|
batch_size = 50
|
|
196
|
+
new_tickets = [] # Collect new tickets for bulk caching
|
|
197
|
+
|
|
142
198
|
for i in range(0, len(tickets_to_fetch), batch_size):
|
|
143
|
-
batch = tickets_to_fetch[i:i + batch_size]
|
|
199
|
+
batch = tickets_to_fetch[i : i + batch_size]
|
|
144
200
|
jql = f"key in ({','.join(batch)})"
|
|
145
|
-
|
|
201
|
+
|
|
146
202
|
try:
|
|
147
|
-
|
|
203
|
+
print(f" 🔍 Fetching {len(batch)} JIRA tickets from API...")
|
|
204
|
+
response = self._session.get(
|
|
148
205
|
f"{self.base_url}/rest/api/3/search",
|
|
149
|
-
headers=self.headers,
|
|
150
206
|
params={
|
|
151
207
|
"jql": jql,
|
|
152
208
|
"fields": "*all", # Get all fields to find story points
|
|
153
|
-
"maxResults": batch_size
|
|
154
|
-
}
|
|
209
|
+
"maxResults": batch_size,
|
|
210
|
+
},
|
|
211
|
+
timeout=self.connection_timeout,
|
|
155
212
|
)
|
|
156
213
|
response.raise_for_status()
|
|
157
|
-
|
|
214
|
+
|
|
158
215
|
data = response.json()
|
|
159
|
-
for issue in data.get(
|
|
216
|
+
for issue in data.get("issues", []):
|
|
160
217
|
ticket_data = self._extract_ticket_data(issue)
|
|
161
|
-
cached_tickets[ticket_data[
|
|
162
|
-
|
|
163
|
-
|
|
218
|
+
cached_tickets[ticket_data["id"]] = ticket_data
|
|
219
|
+
new_tickets.append(ticket_data)
|
|
220
|
+
|
|
221
|
+
except ConnectionError as e:
|
|
222
|
+
print(f" ❌ JIRA DNS/connection error: {self._format_network_error(e)}")
|
|
223
|
+
print(
|
|
224
|
+
f" Troubleshooting: Check network connectivity and DNS resolution for {self.base_url}"
|
|
225
|
+
)
|
|
226
|
+
break # Stop processing batches on network errors
|
|
227
|
+
except Timeout as e:
|
|
228
|
+
print(f" ⏱️ JIRA request timeout: {e}")
|
|
229
|
+
print(" Consider increasing timeout settings or checking network latency")
|
|
164
230
|
except RequestException as e:
|
|
165
231
|
print(f" ⚠️ Failed to fetch JIRA tickets: {e}")
|
|
166
|
-
|
|
232
|
+
|
|
233
|
+
# Bulk cache all new tickets
|
|
234
|
+
if new_tickets:
|
|
235
|
+
self._cache_tickets_bulk(new_tickets)
|
|
236
|
+
print(f" 💾 Cached {len(new_tickets)} new JIRA tickets")
|
|
237
|
+
|
|
167
238
|
return cached_tickets
|
|
168
|
-
|
|
169
|
-
def _extract_ticket_data(self, issue:
|
|
239
|
+
|
|
240
|
+
def _extract_ticket_data(self, issue: dict[str, Any]) -> dict[str, Any]:
|
|
170
241
|
"""Extract relevant data from JIRA issue.
|
|
171
|
-
|
|
242
|
+
|
|
172
243
|
Args:
|
|
173
244
|
issue: JIRA issue data from API
|
|
174
|
-
|
|
245
|
+
|
|
175
246
|
Returns:
|
|
176
247
|
Dictionary with extracted ticket data
|
|
177
248
|
"""
|
|
178
|
-
fields = issue.get(
|
|
179
|
-
|
|
249
|
+
fields = issue.get("fields", {})
|
|
250
|
+
|
|
180
251
|
# Extract story points from various possible fields
|
|
181
252
|
story_points = 0
|
|
182
253
|
for field_id in self.story_point_fields:
|
|
@@ -186,87 +257,465 @@ class JIRAIntegration:
|
|
|
186
257
|
break
|
|
187
258
|
except (ValueError, TypeError):
|
|
188
259
|
continue
|
|
189
|
-
|
|
260
|
+
|
|
190
261
|
return {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
262
|
+
"id": issue["key"],
|
|
263
|
+
"summary": fields.get("summary", ""),
|
|
264
|
+
"status": fields.get("status", {}).get("name", ""),
|
|
265
|
+
"story_points": int(story_points) if story_points else 0,
|
|
266
|
+
"assignee": (
|
|
267
|
+
fields.get("assignee", {}).get("displayName", "") if fields.get("assignee") else ""
|
|
268
|
+
),
|
|
269
|
+
"created": fields.get("created", ""),
|
|
270
|
+
"updated": fields.get("updated", ""),
|
|
198
271
|
}
|
|
199
|
-
|
|
272
|
+
|
|
200
273
|
def _is_jira_ticket(self, text: str) -> bool:
|
|
201
274
|
"""Check if text matches JIRA ticket pattern."""
|
|
202
275
|
import re
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
276
|
+
|
|
277
|
+
return bool(re.match(r"^[A-Z]{2,10}-\d+$", text))
|
|
278
|
+
|
|
279
|
+
def _extract_jira_tickets(self, text: str) -> set[str]:
|
|
206
280
|
"""Extract JIRA ticket IDs from text."""
|
|
207
281
|
import re
|
|
208
|
-
|
|
282
|
+
|
|
283
|
+
pattern = r"([A-Z]{2,10}-\d+)"
|
|
209
284
|
matches = re.findall(pattern, text)
|
|
210
285
|
return set(matches)
|
|
211
|
-
|
|
212
|
-
def _get_cached_ticket(self, ticket_id: str) -> Optional[
|
|
213
|
-
"""Get ticket data from cache.
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
286
|
+
|
|
287
|
+
def _get_cached_ticket(self, ticket_id: str) -> Optional[dict[str, Any]]:
|
|
288
|
+
"""Get ticket data from cache.
|
|
289
|
+
|
|
290
|
+
WHY: JIRA API calls are expensive and slow. Caching ticket data
|
|
291
|
+
significantly improves performance on repeated runs over the same
|
|
292
|
+
time period, especially when analyzing multiple repositories.
|
|
293
|
+
|
|
294
|
+
Args:
|
|
295
|
+
ticket_id: JIRA ticket ID (e.g., "PROJ-123")
|
|
296
|
+
|
|
297
|
+
Returns:
|
|
298
|
+
Cached ticket data or None if not found/stale
|
|
299
|
+
"""
|
|
300
|
+
with self.cache.get_session() as session:
|
|
301
|
+
from ..models.database import IssueCache
|
|
302
|
+
|
|
303
|
+
cached_ticket = (
|
|
304
|
+
session.query(IssueCache)
|
|
305
|
+
.filter(IssueCache.platform == "jira", IssueCache.issue_id == ticket_id)
|
|
306
|
+
.first()
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
if cached_ticket and not self._is_ticket_stale(cached_ticket.cached_at):
|
|
310
|
+
self.cache.cache_hits += 1
|
|
311
|
+
if self.cache.debug_mode:
|
|
312
|
+
print(f"DEBUG: JIRA cache HIT for ticket {ticket_id}")
|
|
313
|
+
|
|
314
|
+
return {
|
|
315
|
+
"id": cached_ticket.issue_id,
|
|
316
|
+
"summary": cached_ticket.title or "",
|
|
317
|
+
"status": cached_ticket.status or "",
|
|
318
|
+
"story_points": cached_ticket.story_points or 0,
|
|
319
|
+
"assignee": cached_ticket.assignee or "",
|
|
320
|
+
"created": (
|
|
321
|
+
cached_ticket.created_at.isoformat() if cached_ticket.created_at else ""
|
|
322
|
+
),
|
|
323
|
+
"updated": (
|
|
324
|
+
cached_ticket.updated_at.isoformat() if cached_ticket.updated_at else ""
|
|
325
|
+
),
|
|
326
|
+
"platform_data": cached_ticket.platform_data or {},
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
self.cache.cache_misses += 1
|
|
330
|
+
if self.cache.debug_mode:
|
|
331
|
+
print(f"DEBUG: JIRA cache MISS for ticket {ticket_id}")
|
|
332
|
+
return None
|
|
333
|
+
|
|
334
|
+
def _cache_ticket(self, ticket_id: str, ticket_data: dict[str, Any]) -> None:
|
|
335
|
+
"""Cache ticket data.
|
|
336
|
+
|
|
337
|
+
WHY: Caching JIRA ticket data prevents redundant API calls and
|
|
338
|
+
significantly improves performance on subsequent runs. The cache
|
|
339
|
+
respects TTL settings to ensure data freshness.
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
ticket_id: JIRA ticket ID
|
|
343
|
+
ticket_data: Ticket data from JIRA API
|
|
344
|
+
"""
|
|
345
|
+
# Use the existing cache_issue method which handles JIRA tickets
|
|
346
|
+
cache_data = {
|
|
347
|
+
"id": ticket_id,
|
|
348
|
+
"project_key": self._extract_project_key(ticket_id),
|
|
349
|
+
"title": ticket_data.get("summary", ""),
|
|
350
|
+
"description": "", # Not typically needed for analytics
|
|
351
|
+
"status": ticket_data.get("status", ""),
|
|
352
|
+
"assignee": ticket_data.get("assignee", ""),
|
|
353
|
+
"created_at": self._parse_jira_date(ticket_data.get("created")),
|
|
354
|
+
"updated_at": self._parse_jira_date(ticket_data.get("updated")),
|
|
355
|
+
"story_points": ticket_data.get("story_points", 0),
|
|
356
|
+
"labels": [], # Could extract from JIRA data if needed
|
|
357
|
+
"platform_data": ticket_data, # Store full JIRA response for future use
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
self.cache.cache_issue("jira", cache_data)
|
|
361
|
+
|
|
362
|
+
def _is_ticket_stale(self, cached_at: datetime) -> bool:
|
|
363
|
+
"""Check if cached ticket data is stale based on cache TTL.
|
|
364
|
+
|
|
365
|
+
Args:
|
|
366
|
+
cached_at: When the ticket was cached
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
True if stale and should be refreshed, False if still fresh
|
|
370
|
+
"""
|
|
371
|
+
from datetime import timedelta
|
|
372
|
+
|
|
373
|
+
if self.cache.ttl_hours == 0: # No expiration
|
|
374
|
+
return False
|
|
375
|
+
|
|
376
|
+
stale_threshold = datetime.utcnow() - timedelta(hours=self.cache.ttl_hours)
|
|
377
|
+
return cached_at < stale_threshold
|
|
378
|
+
|
|
379
|
+
def _extract_project_key(self, ticket_id: str) -> str:
|
|
380
|
+
"""Extract project key from JIRA ticket ID.
|
|
381
|
+
|
|
382
|
+
Args:
|
|
383
|
+
ticket_id: JIRA ticket ID (e.g., "PROJ-123")
|
|
384
|
+
|
|
385
|
+
Returns:
|
|
386
|
+
Project key (e.g., "PROJ")
|
|
387
|
+
"""
|
|
388
|
+
return ticket_id.split("-")[0] if "-" in ticket_id else ticket_id
|
|
389
|
+
|
|
390
|
+
def _parse_jira_date(self, date_str: Optional[str]) -> Optional[datetime]:
|
|
391
|
+
"""Parse JIRA date string to datetime object.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
date_str: JIRA date string or None
|
|
395
|
+
|
|
396
|
+
Returns:
|
|
397
|
+
Parsed datetime object or None
|
|
398
|
+
"""
|
|
399
|
+
if not date_str:
|
|
400
|
+
return None
|
|
401
|
+
|
|
402
|
+
try:
|
|
403
|
+
# JIRA typically returns ISO format dates
|
|
404
|
+
from dateutil import parser
|
|
405
|
+
|
|
406
|
+
return parser.parse(date_str).replace(tzinfo=None) # Store as naive UTC
|
|
407
|
+
except (ValueError, ImportError):
|
|
408
|
+
# Fallback for basic ISO format
|
|
409
|
+
try:
|
|
410
|
+
return datetime.fromisoformat(date_str.replace("Z", "+00:00")).replace(tzinfo=None)
|
|
411
|
+
except ValueError:
|
|
412
|
+
return None
|
|
413
|
+
|
|
414
|
+
def _get_cached_tickets_bulk(self, ticket_ids: list[str]) -> dict[str, dict[str, Any]]:
|
|
415
|
+
"""Get multiple tickets from cache in a single query.
|
|
416
|
+
|
|
417
|
+
WHY: Bulk cache lookups are much more efficient than individual lookups
|
|
418
|
+
when checking many tickets, reducing database overhead significantly.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
ticket_ids: List of JIRA ticket IDs to look up
|
|
422
|
+
|
|
423
|
+
Returns:
|
|
424
|
+
Dictionary mapping ticket ID to cached data (only non-stale entries)
|
|
425
|
+
"""
|
|
426
|
+
if not ticket_ids:
|
|
427
|
+
return {}
|
|
428
|
+
|
|
429
|
+
cached_tickets = {}
|
|
430
|
+
with self.cache.get_session() as session:
|
|
431
|
+
from ..models.database import IssueCache
|
|
432
|
+
|
|
433
|
+
cached_results = (
|
|
434
|
+
session.query(IssueCache)
|
|
435
|
+
.filter(IssueCache.platform == "jira", IssueCache.issue_id.in_(ticket_ids))
|
|
436
|
+
.all()
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
for cached in cached_results:
|
|
440
|
+
if not self._is_ticket_stale(cached.cached_at):
|
|
441
|
+
ticket_data = {
|
|
442
|
+
"id": cached.issue_id,
|
|
443
|
+
"summary": cached.title or "",
|
|
444
|
+
"status": cached.status or "",
|
|
445
|
+
"story_points": cached.story_points or 0,
|
|
446
|
+
"assignee": cached.assignee or "",
|
|
447
|
+
"created": cached.created_at.isoformat() if cached.created_at else "",
|
|
448
|
+
"updated": cached.updated_at.isoformat() if cached.updated_at else "",
|
|
449
|
+
"platform_data": cached.platform_data or {},
|
|
450
|
+
}
|
|
451
|
+
cached_tickets[cached.issue_id] = ticket_data
|
|
452
|
+
|
|
453
|
+
return cached_tickets
|
|
454
|
+
|
|
455
|
+
def _cache_tickets_bulk(self, tickets: list[dict[str, Any]]) -> None:
|
|
456
|
+
"""Cache multiple tickets in a single transaction.
|
|
457
|
+
|
|
458
|
+
WHY: Bulk caching is more efficient than individual cache operations,
|
|
459
|
+
reducing database overhead and improving performance when caching
|
|
460
|
+
many tickets from JIRA API responses.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
tickets: List of ticket data dictionaries to cache
|
|
464
|
+
"""
|
|
465
|
+
if not tickets:
|
|
466
|
+
return
|
|
467
|
+
|
|
468
|
+
for ticket_data in tickets:
|
|
469
|
+
# Use individual cache method which handles upserts properly
|
|
470
|
+
self._cache_ticket(ticket_data["id"], ticket_data)
|
|
471
|
+
|
|
223
472
|
def validate_connection(self) -> bool:
|
|
224
473
|
"""Validate JIRA connection and credentials.
|
|
225
|
-
|
|
474
|
+
|
|
226
475
|
Returns:
|
|
227
476
|
True if connection is valid
|
|
228
477
|
"""
|
|
229
478
|
try:
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
479
|
+
# First validate network connectivity
|
|
480
|
+
if not self._validate_network_connectivity():
|
|
481
|
+
return False
|
|
482
|
+
|
|
483
|
+
response = self._session.get(
|
|
484
|
+
f"{self.base_url}/rest/api/3/myself", timeout=self.connection_timeout
|
|
233
485
|
)
|
|
234
486
|
response.raise_for_status()
|
|
487
|
+
self._connection_validated = True
|
|
235
488
|
return True
|
|
489
|
+
except ConnectionError as e:
|
|
490
|
+
print(f" ❌ JIRA DNS/connection error: {self._format_network_error(e)}")
|
|
491
|
+
print(
|
|
492
|
+
f" Troubleshooting: Check network connectivity and DNS resolution for {self.base_url}"
|
|
493
|
+
)
|
|
494
|
+
return False
|
|
495
|
+
except Timeout as e:
|
|
496
|
+
print(f" ⏱️ JIRA connection timeout: {e}")
|
|
497
|
+
print(" Consider increasing timeout settings or checking network latency")
|
|
498
|
+
return False
|
|
236
499
|
except RequestException as e:
|
|
237
500
|
print(f" ❌ JIRA connection failed: {e}")
|
|
238
501
|
return False
|
|
239
|
-
|
|
240
|
-
def discover_fields(self) ->
|
|
502
|
+
|
|
503
|
+
def discover_fields(self) -> dict[str, dict[str, str]]:
|
|
241
504
|
"""Discover all available fields in JIRA instance.
|
|
242
|
-
|
|
505
|
+
|
|
243
506
|
Returns:
|
|
244
507
|
Dictionary mapping field IDs to their names and types
|
|
245
508
|
"""
|
|
246
509
|
try:
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
510
|
+
# Validate network connectivity first
|
|
511
|
+
if not self._validate_network_connectivity():
|
|
512
|
+
return {}
|
|
513
|
+
|
|
514
|
+
response = self._session.get(
|
|
515
|
+
f"{self.base_url}/rest/api/3/field", timeout=self.connection_timeout
|
|
250
516
|
)
|
|
251
517
|
response.raise_for_status()
|
|
252
|
-
|
|
518
|
+
|
|
253
519
|
fields = {}
|
|
254
520
|
for field in response.json():
|
|
255
|
-
field_id = field.get(
|
|
256
|
-
field_name = field.get(
|
|
257
|
-
field_type =
|
|
258
|
-
|
|
521
|
+
field_id = field.get("id", "")
|
|
522
|
+
field_name = field.get("name", "")
|
|
523
|
+
field_type = (
|
|
524
|
+
field.get("schema", {}).get("type", "unknown")
|
|
525
|
+
if field.get("schema")
|
|
526
|
+
else "unknown"
|
|
527
|
+
)
|
|
528
|
+
|
|
259
529
|
# Look for potential story point fields
|
|
260
|
-
if any(
|
|
530
|
+
if any(
|
|
531
|
+
term in field_name.lower() for term in ["story", "point", "estimate", "size"]
|
|
532
|
+
):
|
|
261
533
|
fields[field_id] = {
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
534
|
+
"name": field_name,
|
|
535
|
+
"type": field_type,
|
|
536
|
+
"is_custom": field.get("custom", False),
|
|
265
537
|
}
|
|
266
|
-
print(
|
|
267
|
-
|
|
538
|
+
print(
|
|
539
|
+
f" 📊 Potential story point field: {field_id} = '{field_name}' (type: {field_type})"
|
|
540
|
+
)
|
|
541
|
+
|
|
268
542
|
return fields
|
|
269
|
-
|
|
543
|
+
|
|
544
|
+
except ConnectionError as e:
|
|
545
|
+
print(
|
|
546
|
+
f" ❌ JIRA DNS/connection error during field discovery: {self._format_network_error(e)}"
|
|
547
|
+
)
|
|
548
|
+
print(
|
|
549
|
+
f" Troubleshooting: Check network connectivity and DNS resolution for {self.base_url}"
|
|
550
|
+
)
|
|
551
|
+
return {}
|
|
552
|
+
except Timeout as e:
|
|
553
|
+
print(f" ⏱️ JIRA field discovery timeout: {e}")
|
|
554
|
+
print(" Consider increasing timeout settings or checking network latency")
|
|
555
|
+
return {}
|
|
270
556
|
except RequestException as e:
|
|
271
557
|
print(f" ⚠️ Failed to discover JIRA fields: {e}")
|
|
272
|
-
return {}
|
|
558
|
+
return {}
|
|
559
|
+
|
|
560
|
+
def _create_resilient_session(self) -> requests.Session:
|
|
561
|
+
"""Create HTTP session with enhanced retry logic and DNS error handling.
|
|
562
|
+
|
|
563
|
+
WHY: DNS resolution failures and network issues are common when connecting
|
|
564
|
+
to external JIRA instances. This session provides resilient connections
|
|
565
|
+
with exponential backoff and comprehensive error handling.
|
|
566
|
+
|
|
567
|
+
Returns:
|
|
568
|
+
Configured requests session with retry strategy and network resilience.
|
|
569
|
+
"""
|
|
570
|
+
session = requests.Session()
|
|
571
|
+
|
|
572
|
+
# Configure retry strategy for network resilience
|
|
573
|
+
retry_strategy = Retry(
|
|
574
|
+
total=self.max_retries,
|
|
575
|
+
backoff_factor=self.backoff_factor,
|
|
576
|
+
status_forcelist=[429, 500, 502, 503, 504],
|
|
577
|
+
allowed_methods=["HEAD", "GET", "OPTIONS", "POST"],
|
|
578
|
+
raise_on_status=False, # Let us handle status codes
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
adapter = HTTPAdapter(max_retries=retry_strategy)
|
|
582
|
+
session.mount("http://", adapter)
|
|
583
|
+
session.mount("https://", adapter)
|
|
584
|
+
|
|
585
|
+
# Set default headers
|
|
586
|
+
session.headers.update(self.headers)
|
|
587
|
+
|
|
588
|
+
# Configure proxy if enabled
|
|
589
|
+
if self.enable_proxy and self.proxy_url:
|
|
590
|
+
session.proxies = {
|
|
591
|
+
"http": self.proxy_url,
|
|
592
|
+
"https": self.proxy_url,
|
|
593
|
+
}
|
|
594
|
+
print(f" 🌐 Using proxy: {self.proxy_url}")
|
|
595
|
+
|
|
596
|
+
# Set default timeout
|
|
597
|
+
session.timeout = self.connection_timeout
|
|
598
|
+
|
|
599
|
+
return session
|
|
600
|
+
|
|
601
|
+
def _validate_network_connectivity(self) -> bool:
|
|
602
|
+
"""Validate network connectivity to JIRA instance.
|
|
603
|
+
|
|
604
|
+
WHY: DNS resolution errors are a common cause of JIRA integration failures.
|
|
605
|
+
This method performs proactive network validation to detect issues early
|
|
606
|
+
and provide better error messages for troubleshooting.
|
|
607
|
+
|
|
608
|
+
Returns:
|
|
609
|
+
True if network connectivity is available, False otherwise.
|
|
610
|
+
"""
|
|
611
|
+
current_time = time.time()
|
|
612
|
+
|
|
613
|
+
# Skip check if recently validated (within interval)
|
|
614
|
+
if (
|
|
615
|
+
self._connection_validated
|
|
616
|
+
and current_time - self._last_dns_check < self._dns_check_interval
|
|
617
|
+
):
|
|
618
|
+
return True
|
|
619
|
+
|
|
620
|
+
try:
|
|
621
|
+
# Extract hostname from base URL
|
|
622
|
+
from urllib.parse import urlparse
|
|
623
|
+
|
|
624
|
+
parsed_url = urlparse(self.base_url)
|
|
625
|
+
hostname = parsed_url.hostname
|
|
626
|
+
port = parsed_url.port or (443 if parsed_url.scheme == "https" else 80)
|
|
627
|
+
|
|
628
|
+
if not hostname:
|
|
629
|
+
print(f" ❌ Invalid JIRA URL format: {self.base_url}")
|
|
630
|
+
return False
|
|
631
|
+
|
|
632
|
+
# Test DNS resolution
|
|
633
|
+
print(f" 🔍 Validating DNS resolution for {hostname}...")
|
|
634
|
+
socket.setdefaulttimeout(self.dns_timeout)
|
|
635
|
+
|
|
636
|
+
# Attempt to resolve hostname
|
|
637
|
+
addr_info = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
|
|
638
|
+
if not addr_info:
|
|
639
|
+
print(f" ❌ DNS resolution failed: No addresses found for {hostname}")
|
|
640
|
+
return False
|
|
641
|
+
|
|
642
|
+
# Test basic connectivity
|
|
643
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
644
|
+
sock.settimeout(self.dns_timeout)
|
|
645
|
+
try:
|
|
646
|
+
result = sock.connect_ex((addr_info[0][4][0], port))
|
|
647
|
+
if result == 0:
|
|
648
|
+
print(f" ✅ Network connectivity confirmed to {hostname}:{port}")
|
|
649
|
+
self._connection_validated = True
|
|
650
|
+
self._last_dns_check = current_time
|
|
651
|
+
return True
|
|
652
|
+
else:
|
|
653
|
+
print(f" ❌ Connection failed to {hostname}:{port} (error code: {result})")
|
|
654
|
+
return False
|
|
655
|
+
finally:
|
|
656
|
+
sock.close()
|
|
657
|
+
|
|
658
|
+
except socket.gaierror as e:
|
|
659
|
+
print(f" ❌ DNS resolution error: {self._format_dns_error(e)}")
|
|
660
|
+
print(f" Hostname: {hostname}")
|
|
661
|
+
print(" Troubleshooting:")
|
|
662
|
+
print(f" 1. Verify the hostname is correct: {hostname}")
|
|
663
|
+
print(" 2. Check your internet connection")
|
|
664
|
+
print(f" 3. Verify DNS settings (try: nslookup {hostname})")
|
|
665
|
+
print(" 4. Check if behind corporate firewall/proxy")
|
|
666
|
+
print(" 5. Verify JIRA instance is accessible externally")
|
|
667
|
+
return False
|
|
668
|
+
except socket.timeout:
|
|
669
|
+
print(f" ⏱️ DNS resolution timeout for {hostname} (>{self.dns_timeout}s)")
|
|
670
|
+
print(" Consider increasing dns_timeout or checking network latency")
|
|
671
|
+
return False
|
|
672
|
+
except Exception as e:
|
|
673
|
+
print(f" ❌ Network validation error: {e}")
|
|
674
|
+
return False
|
|
675
|
+
finally:
|
|
676
|
+
socket.setdefaulttimeout(None) # Reset to default
|
|
677
|
+
|
|
678
|
+
def _format_network_error(self, error: Exception) -> str:
|
|
679
|
+
"""Format network errors with helpful context.
|
|
680
|
+
|
|
681
|
+
Args:
|
|
682
|
+
error: The network exception that occurred.
|
|
683
|
+
|
|
684
|
+
Returns:
|
|
685
|
+
Formatted error message with troubleshooting context.
|
|
686
|
+
"""
|
|
687
|
+
error_str = str(error)
|
|
688
|
+
|
|
689
|
+
if "nodename nor servname provided" in error_str or "[Errno 8]" in error_str:
|
|
690
|
+
return f"DNS resolution failed - hostname not found ({error_str})"
|
|
691
|
+
elif "Name or service not known" in error_str or "[Errno -2]" in error_str:
|
|
692
|
+
return f"DNS resolution failed - service not known ({error_str})"
|
|
693
|
+
elif "Connection refused" in error_str or "[Errno 111]" in error_str:
|
|
694
|
+
return f"Connection refused - service not running ({error_str})"
|
|
695
|
+
elif "Network is unreachable" in error_str or "[Errno 101]" in error_str:
|
|
696
|
+
return f"Network unreachable - check internet connection ({error_str})"
|
|
697
|
+
elif "timeout" in error_str.lower():
|
|
698
|
+
return f"Network timeout - slow connection or high latency ({error_str})"
|
|
699
|
+
else:
|
|
700
|
+
return f"Network error ({error_str})"
|
|
701
|
+
|
|
702
|
+
def _format_dns_error(self, error: socket.gaierror) -> str:
|
|
703
|
+
"""Format DNS resolution errors with specific guidance.
|
|
704
|
+
|
|
705
|
+
Args:
|
|
706
|
+
error: The DNS resolution error that occurred.
|
|
707
|
+
|
|
708
|
+
Returns:
|
|
709
|
+
Formatted DNS error message with troubleshooting guidance.
|
|
710
|
+
"""
|
|
711
|
+
error_code = error.errno if hasattr(error, "errno") else "unknown"
|
|
712
|
+
error_msg = str(error)
|
|
713
|
+
|
|
714
|
+
if error_code == 8 or "nodename nor servname provided" in error_msg:
|
|
715
|
+
return f"Hostname not found in DNS (error code: {error_code})"
|
|
716
|
+
elif error_code == -2 or "Name or service not known" in error_msg:
|
|
717
|
+
return f"DNS name resolution failed (error code: {error_code})"
|
|
718
|
+
elif error_code == -3 or "Temporary failure in name resolution" in error_msg:
|
|
719
|
+
return f"Temporary DNS failure - try again later (error code: {error_code})"
|
|
720
|
+
else:
|
|
721
|
+
return f"DNS error (code: {error_code}, message: {error_msg})"
|