mcp-ticketer 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mcp-ticketer might be problematic. Click here for more details.
- mcp_ticketer/__version__.py +1 -1
- mcp_ticketer/adapters/linear.py +427 -3
- mcp_ticketer/cli/discover.py +402 -0
- mcp_ticketer/cli/main.py +4 -0
- mcp_ticketer/core/__init__.py +2 -1
- mcp_ticketer/core/adapter.py +155 -2
- mcp_ticketer/core/env_discovery.py +555 -0
- mcp_ticketer/core/models.py +58 -6
- mcp_ticketer/core/project_config.py +62 -9
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/METADATA +1 -1
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/RECORD +15 -13
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/WHEEL +0 -0
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/entry_points.txt +0 -0
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/licenses/LICENSE +0 -0
- {mcp_ticketer-0.1.12.dist-info → mcp_ticketer-0.1.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,555 @@
|
|
|
1
|
+
"""Auto-discovery of configuration from .env and .env.local files.
|
|
2
|
+
|
|
3
|
+
This module provides intelligent detection of adapter configurations from
|
|
4
|
+
environment files, including:
|
|
5
|
+
- Automatic adapter type detection from available keys
|
|
6
|
+
- Support for multiple naming conventions
|
|
7
|
+
- Project information extraction
|
|
8
|
+
- Security validation
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import logging
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Dict, Any, Optional, List, Tuple
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
from dotenv import dotenv_values
|
|
17
|
+
|
|
18
|
+
from .project_config import AdapterType, AdapterConfig
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Key patterns for each adapter type
|
|
24
|
+
LINEAR_KEY_PATTERNS = [
|
|
25
|
+
"LINEAR_API_KEY",
|
|
26
|
+
"LINEAR_TOKEN",
|
|
27
|
+
"LINEAR_KEY",
|
|
28
|
+
"MCP_TICKETER_LINEAR_API_KEY",
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
LINEAR_TEAM_PATTERNS = [
|
|
32
|
+
"LINEAR_TEAM_ID",
|
|
33
|
+
"LINEAR_TEAM",
|
|
34
|
+
"MCP_TICKETER_LINEAR_TEAM_ID",
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
LINEAR_PROJECT_PATTERNS = [
|
|
38
|
+
"LINEAR_PROJECT_ID",
|
|
39
|
+
"LINEAR_PROJECT",
|
|
40
|
+
"MCP_TICKETER_LINEAR_PROJECT_ID",
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
GITHUB_TOKEN_PATTERNS = [
|
|
44
|
+
"GITHUB_TOKEN",
|
|
45
|
+
"GH_TOKEN",
|
|
46
|
+
"GITHUB_PAT",
|
|
47
|
+
"GH_PAT",
|
|
48
|
+
"MCP_TICKETER_GITHUB_TOKEN",
|
|
49
|
+
]
|
|
50
|
+
|
|
51
|
+
GITHUB_REPO_PATTERNS = [
|
|
52
|
+
"GITHUB_REPOSITORY", # Format: "owner/repo"
|
|
53
|
+
"GH_REPO",
|
|
54
|
+
"MCP_TICKETER_GITHUB_REPOSITORY",
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
GITHUB_OWNER_PATTERNS = [
|
|
58
|
+
"GITHUB_OWNER",
|
|
59
|
+
"GH_OWNER",
|
|
60
|
+
"MCP_TICKETER_GITHUB_OWNER",
|
|
61
|
+
]
|
|
62
|
+
|
|
63
|
+
GITHUB_REPO_NAME_PATTERNS = [
|
|
64
|
+
"GITHUB_REPO",
|
|
65
|
+
"GH_REPO_NAME",
|
|
66
|
+
"MCP_TICKETER_GITHUB_REPO",
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
JIRA_TOKEN_PATTERNS = [
|
|
70
|
+
"JIRA_API_TOKEN",
|
|
71
|
+
"JIRA_TOKEN",
|
|
72
|
+
"JIRA_PAT",
|
|
73
|
+
"MCP_TICKETER_JIRA_TOKEN",
|
|
74
|
+
]
|
|
75
|
+
|
|
76
|
+
JIRA_SERVER_PATTERNS = [
|
|
77
|
+
"JIRA_SERVER",
|
|
78
|
+
"JIRA_URL",
|
|
79
|
+
"JIRA_HOST",
|
|
80
|
+
"MCP_TICKETER_JIRA_SERVER",
|
|
81
|
+
]
|
|
82
|
+
|
|
83
|
+
JIRA_EMAIL_PATTERNS = [
|
|
84
|
+
"JIRA_EMAIL",
|
|
85
|
+
"JIRA_USER",
|
|
86
|
+
"JIRA_USERNAME",
|
|
87
|
+
"MCP_TICKETER_JIRA_EMAIL",
|
|
88
|
+
]
|
|
89
|
+
|
|
90
|
+
JIRA_PROJECT_PATTERNS = [
|
|
91
|
+
"JIRA_PROJECT_KEY",
|
|
92
|
+
"JIRA_PROJECT",
|
|
93
|
+
"MCP_TICKETER_JIRA_PROJECT_KEY",
|
|
94
|
+
]
|
|
95
|
+
|
|
96
|
+
AITRACKDOWN_PATH_PATTERNS = [
|
|
97
|
+
"AITRACKDOWN_PATH",
|
|
98
|
+
"AITRACKDOWN_BASE_PATH",
|
|
99
|
+
"MCP_TICKETER_AITRACKDOWN_BASE_PATH",
|
|
100
|
+
]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class DiscoveredAdapter:
|
|
105
|
+
"""Information about a discovered adapter configuration."""
|
|
106
|
+
|
|
107
|
+
adapter_type: str
|
|
108
|
+
config: Dict[str, Any]
|
|
109
|
+
confidence: float # 0.0-1.0 how complete the configuration is
|
|
110
|
+
missing_fields: List[str] = field(default_factory=list)
|
|
111
|
+
found_in: str = ".env" # Which file it was found in
|
|
112
|
+
|
|
113
|
+
def is_complete(self) -> bool:
|
|
114
|
+
"""Check if configuration has all required fields."""
|
|
115
|
+
return len(self.missing_fields) == 0
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@dataclass
|
|
119
|
+
class DiscoveryResult:
|
|
120
|
+
"""Result of environment file discovery."""
|
|
121
|
+
|
|
122
|
+
adapters: List[DiscoveredAdapter] = field(default_factory=list)
|
|
123
|
+
warnings: List[str] = field(default_factory=list)
|
|
124
|
+
env_files_found: List[str] = field(default_factory=list)
|
|
125
|
+
|
|
126
|
+
def get_primary_adapter(self) -> Optional[DiscoveredAdapter]:
|
|
127
|
+
"""Get the adapter with highest confidence and completeness."""
|
|
128
|
+
if not self.adapters:
|
|
129
|
+
return None
|
|
130
|
+
|
|
131
|
+
# Sort by: complete configs first, then by confidence
|
|
132
|
+
sorted_adapters = sorted(
|
|
133
|
+
self.adapters,
|
|
134
|
+
key=lambda a: (a.is_complete(), a.confidence),
|
|
135
|
+
reverse=True
|
|
136
|
+
)
|
|
137
|
+
return sorted_adapters[0]
|
|
138
|
+
|
|
139
|
+
def get_adapter_by_type(self, adapter_type: str) -> Optional[DiscoveredAdapter]:
|
|
140
|
+
"""Get discovered adapter by type."""
|
|
141
|
+
for adapter in self.adapters:
|
|
142
|
+
if adapter.adapter_type == adapter_type:
|
|
143
|
+
return adapter
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class EnvDiscovery:
|
|
148
|
+
"""Auto-discovery of adapter configurations from environment files."""
|
|
149
|
+
|
|
150
|
+
# File search order (highest priority first)
|
|
151
|
+
ENV_FILE_ORDER = [
|
|
152
|
+
".env.local",
|
|
153
|
+
".env",
|
|
154
|
+
".env.production",
|
|
155
|
+
".env.development",
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
def __init__(self, project_path: Optional[Path] = None):
|
|
159
|
+
"""Initialize discovery.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
project_path: Path to project root (defaults to cwd)
|
|
163
|
+
"""
|
|
164
|
+
self.project_path = project_path or Path.cwd()
|
|
165
|
+
|
|
166
|
+
def discover(self) -> DiscoveryResult:
|
|
167
|
+
"""Discover adapter configurations from environment files.
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
DiscoveryResult with found adapters and warnings
|
|
171
|
+
"""
|
|
172
|
+
result = DiscoveryResult()
|
|
173
|
+
|
|
174
|
+
# Load environment files
|
|
175
|
+
env_vars = self._load_env_files(result)
|
|
176
|
+
|
|
177
|
+
if not env_vars:
|
|
178
|
+
result.warnings.append("No .env files found in project directory")
|
|
179
|
+
return result
|
|
180
|
+
|
|
181
|
+
# Detect adapters
|
|
182
|
+
linear_adapter = self._detect_linear(env_vars, result.env_files_found[0] if result.env_files_found else ".env")
|
|
183
|
+
if linear_adapter:
|
|
184
|
+
result.adapters.append(linear_adapter)
|
|
185
|
+
|
|
186
|
+
github_adapter = self._detect_github(env_vars, result.env_files_found[0] if result.env_files_found else ".env")
|
|
187
|
+
if github_adapter:
|
|
188
|
+
result.adapters.append(github_adapter)
|
|
189
|
+
|
|
190
|
+
jira_adapter = self._detect_jira(env_vars, result.env_files_found[0] if result.env_files_found else ".env")
|
|
191
|
+
if jira_adapter:
|
|
192
|
+
result.adapters.append(jira_adapter)
|
|
193
|
+
|
|
194
|
+
aitrackdown_adapter = self._detect_aitrackdown(env_vars, result.env_files_found[0] if result.env_files_found else ".env")
|
|
195
|
+
if aitrackdown_adapter:
|
|
196
|
+
result.adapters.append(aitrackdown_adapter)
|
|
197
|
+
|
|
198
|
+
# Validate security
|
|
199
|
+
security_warnings = self._validate_security()
|
|
200
|
+
result.warnings.extend(security_warnings)
|
|
201
|
+
|
|
202
|
+
return result
|
|
203
|
+
|
|
204
|
+
def _load_env_files(self, result: DiscoveryResult) -> Dict[str, str]:
|
|
205
|
+
"""Load environment variables from files.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
result: DiscoveryResult to update with found files
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
Merged dictionary of environment variables
|
|
212
|
+
"""
|
|
213
|
+
merged_env: Dict[str, str] = {}
|
|
214
|
+
|
|
215
|
+
# Load files in reverse order (lowest priority first)
|
|
216
|
+
for env_file in reversed(self.ENV_FILE_ORDER):
|
|
217
|
+
file_path = self.project_path / env_file
|
|
218
|
+
if file_path.exists():
|
|
219
|
+
try:
|
|
220
|
+
env_vars = dotenv_values(file_path)
|
|
221
|
+
# Filter out None values
|
|
222
|
+
env_vars = {k: v for k, v in env_vars.items() if v is not None}
|
|
223
|
+
merged_env.update(env_vars)
|
|
224
|
+
result.env_files_found.insert(0, env_file)
|
|
225
|
+
logger.debug(f"Loaded {len(env_vars)} variables from {env_file}")
|
|
226
|
+
except Exception as e:
|
|
227
|
+
logger.warning(f"Failed to load {env_file}: {e}")
|
|
228
|
+
result.warnings.append(f"Failed to parse {env_file}: {e}")
|
|
229
|
+
|
|
230
|
+
return merged_env
|
|
231
|
+
|
|
232
|
+
def _find_key_value(self, env_vars: Dict[str, str], patterns: List[str]) -> Optional[str]:
|
|
233
|
+
"""Find first matching key value from patterns.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
env_vars: Environment variables dictionary
|
|
237
|
+
patterns: List of key patterns to try
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
Value if found, None otherwise
|
|
241
|
+
"""
|
|
242
|
+
for pattern in patterns:
|
|
243
|
+
if pattern in env_vars and env_vars[pattern]:
|
|
244
|
+
return env_vars[pattern]
|
|
245
|
+
return None
|
|
246
|
+
|
|
247
|
+
def _detect_linear(self, env_vars: Dict[str, str], found_in: str) -> Optional[DiscoveredAdapter]:
|
|
248
|
+
"""Detect Linear adapter configuration.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
env_vars: Environment variables
|
|
252
|
+
found_in: Which file the config was found in
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
DiscoveredAdapter if Linear config detected, None otherwise
|
|
256
|
+
"""
|
|
257
|
+
api_key = self._find_key_value(env_vars, LINEAR_KEY_PATTERNS)
|
|
258
|
+
|
|
259
|
+
if not api_key:
|
|
260
|
+
return None
|
|
261
|
+
|
|
262
|
+
config: Dict[str, Any] = {
|
|
263
|
+
"api_key": api_key,
|
|
264
|
+
"adapter": AdapterType.LINEAR.value,
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
missing_fields: List[str] = []
|
|
268
|
+
confidence = 0.6 # Has API key
|
|
269
|
+
|
|
270
|
+
# Extract team ID (recommended but not required)
|
|
271
|
+
team_id = self._find_key_value(env_vars, LINEAR_TEAM_PATTERNS)
|
|
272
|
+
if team_id:
|
|
273
|
+
config["team_id"] = team_id
|
|
274
|
+
confidence += 0.3
|
|
275
|
+
else:
|
|
276
|
+
missing_fields.append("team_id (recommended)")
|
|
277
|
+
|
|
278
|
+
# Extract project ID (optional)
|
|
279
|
+
project_id = self._find_key_value(env_vars, LINEAR_PROJECT_PATTERNS)
|
|
280
|
+
if project_id:
|
|
281
|
+
config["project_id"] = project_id
|
|
282
|
+
confidence += 0.1
|
|
283
|
+
|
|
284
|
+
return DiscoveredAdapter(
|
|
285
|
+
adapter_type=AdapterType.LINEAR.value,
|
|
286
|
+
config=config,
|
|
287
|
+
confidence=min(confidence, 1.0),
|
|
288
|
+
missing_fields=missing_fields,
|
|
289
|
+
found_in=found_in,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
def _detect_github(self, env_vars: Dict[str, str], found_in: str) -> Optional[DiscoveredAdapter]:
|
|
293
|
+
"""Detect GitHub adapter configuration.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
env_vars: Environment variables
|
|
297
|
+
found_in: Which file the config was found in
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
DiscoveredAdapter if GitHub config detected, None otherwise
|
|
301
|
+
"""
|
|
302
|
+
token = self._find_key_value(env_vars, GITHUB_TOKEN_PATTERNS)
|
|
303
|
+
|
|
304
|
+
if not token:
|
|
305
|
+
return None
|
|
306
|
+
|
|
307
|
+
config: Dict[str, Any] = {
|
|
308
|
+
"token": token,
|
|
309
|
+
"adapter": AdapterType.GITHUB.value,
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
missing_fields: List[str] = []
|
|
313
|
+
confidence = 0.4 # Has token
|
|
314
|
+
|
|
315
|
+
# Try to extract owner/repo from combined field
|
|
316
|
+
repo_full = self._find_key_value(env_vars, GITHUB_REPO_PATTERNS)
|
|
317
|
+
if repo_full and "/" in repo_full:
|
|
318
|
+
parts = repo_full.split("/", 1)
|
|
319
|
+
if len(parts) == 2:
|
|
320
|
+
config["owner"] = parts[0]
|
|
321
|
+
config["repo"] = parts[1]
|
|
322
|
+
confidence += 0.6
|
|
323
|
+
else:
|
|
324
|
+
# Try separate fields
|
|
325
|
+
owner = self._find_key_value(env_vars, GITHUB_OWNER_PATTERNS)
|
|
326
|
+
repo = self._find_key_value(env_vars, GITHUB_REPO_NAME_PATTERNS)
|
|
327
|
+
|
|
328
|
+
if owner:
|
|
329
|
+
config["owner"] = owner
|
|
330
|
+
confidence += 0.3
|
|
331
|
+
else:
|
|
332
|
+
missing_fields.append("owner")
|
|
333
|
+
|
|
334
|
+
if repo:
|
|
335
|
+
config["repo"] = repo
|
|
336
|
+
confidence += 0.3
|
|
337
|
+
else:
|
|
338
|
+
missing_fields.append("repo")
|
|
339
|
+
|
|
340
|
+
return DiscoveredAdapter(
|
|
341
|
+
adapter_type=AdapterType.GITHUB.value,
|
|
342
|
+
config=config,
|
|
343
|
+
confidence=min(confidence, 1.0),
|
|
344
|
+
missing_fields=missing_fields,
|
|
345
|
+
found_in=found_in,
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
def _detect_jira(self, env_vars: Dict[str, str], found_in: str) -> Optional[DiscoveredAdapter]:
|
|
349
|
+
"""Detect JIRA adapter configuration.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
env_vars: Environment variables
|
|
353
|
+
found_in: Which file the config was found in
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
DiscoveredAdapter if JIRA config detected, None otherwise
|
|
357
|
+
"""
|
|
358
|
+
api_token = self._find_key_value(env_vars, JIRA_TOKEN_PATTERNS)
|
|
359
|
+
|
|
360
|
+
if not api_token:
|
|
361
|
+
return None
|
|
362
|
+
|
|
363
|
+
config: Dict[str, Any] = {
|
|
364
|
+
"api_token": api_token,
|
|
365
|
+
"adapter": AdapterType.JIRA.value,
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
missing_fields: List[str] = []
|
|
369
|
+
confidence = 0.3 # Has token
|
|
370
|
+
|
|
371
|
+
# Extract server (required)
|
|
372
|
+
server = self._find_key_value(env_vars, JIRA_SERVER_PATTERNS)
|
|
373
|
+
if server:
|
|
374
|
+
config["server"] = server
|
|
375
|
+
confidence += 0.35
|
|
376
|
+
else:
|
|
377
|
+
missing_fields.append("server")
|
|
378
|
+
|
|
379
|
+
# Extract email (required)
|
|
380
|
+
email = self._find_key_value(env_vars, JIRA_EMAIL_PATTERNS)
|
|
381
|
+
if email:
|
|
382
|
+
config["email"] = email
|
|
383
|
+
confidence += 0.35
|
|
384
|
+
else:
|
|
385
|
+
missing_fields.append("email")
|
|
386
|
+
|
|
387
|
+
# Extract project key (optional)
|
|
388
|
+
project_key = self._find_key_value(env_vars, JIRA_PROJECT_PATTERNS)
|
|
389
|
+
if project_key:
|
|
390
|
+
config["project_key"] = project_key
|
|
391
|
+
confidence += 0.1
|
|
392
|
+
|
|
393
|
+
return DiscoveredAdapter(
|
|
394
|
+
adapter_type=AdapterType.JIRA.value,
|
|
395
|
+
config=config,
|
|
396
|
+
confidence=min(confidence, 1.0),
|
|
397
|
+
missing_fields=missing_fields,
|
|
398
|
+
found_in=found_in,
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
def _detect_aitrackdown(self, env_vars: Dict[str, str], found_in: str) -> Optional[DiscoveredAdapter]:
|
|
402
|
+
"""Detect AITrackdown adapter configuration.
|
|
403
|
+
|
|
404
|
+
Args:
|
|
405
|
+
env_vars: Environment variables
|
|
406
|
+
found_in: Which file the config was found in
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
DiscoveredAdapter if AITrackdown config detected, None otherwise
|
|
410
|
+
"""
|
|
411
|
+
base_path = self._find_key_value(env_vars, AITRACKDOWN_PATH_PATTERNS)
|
|
412
|
+
|
|
413
|
+
# Also check if .aitrackdown directory exists
|
|
414
|
+
aitrackdown_dir = self.project_path / ".aitrackdown"
|
|
415
|
+
if not base_path and not aitrackdown_dir.exists():
|
|
416
|
+
return None
|
|
417
|
+
|
|
418
|
+
config: Dict[str, Any] = {
|
|
419
|
+
"adapter": AdapterType.AITRACKDOWN.value,
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
if base_path:
|
|
423
|
+
config["base_path"] = base_path
|
|
424
|
+
else:
|
|
425
|
+
config["base_path"] = ".aitrackdown"
|
|
426
|
+
|
|
427
|
+
# AITrackdown has no required external credentials
|
|
428
|
+
confidence = 1.0 if aitrackdown_dir.exists() else 0.7
|
|
429
|
+
|
|
430
|
+
return DiscoveredAdapter(
|
|
431
|
+
adapter_type=AdapterType.AITRACKDOWN.value,
|
|
432
|
+
config=config,
|
|
433
|
+
confidence=confidence,
|
|
434
|
+
missing_fields=[],
|
|
435
|
+
found_in=found_in,
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
def _validate_security(self) -> List[str]:
|
|
439
|
+
"""Validate security of environment files.
|
|
440
|
+
|
|
441
|
+
Returns:
|
|
442
|
+
List of security warnings
|
|
443
|
+
"""
|
|
444
|
+
warnings: List[str] = []
|
|
445
|
+
|
|
446
|
+
# Check if .env files are tracked in git
|
|
447
|
+
gitignore_path = self.project_path / ".gitignore"
|
|
448
|
+
|
|
449
|
+
for env_file in self.ENV_FILE_ORDER:
|
|
450
|
+
file_path = self.project_path / env_file
|
|
451
|
+
if not file_path.exists():
|
|
452
|
+
continue
|
|
453
|
+
|
|
454
|
+
# Check if file is tracked in git
|
|
455
|
+
if self._is_tracked_in_git(env_file):
|
|
456
|
+
warnings.append(
|
|
457
|
+
f"⚠️ {env_file} is tracked in git (security risk - should be in .gitignore)"
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Check if .gitignore exists and has .env patterns
|
|
461
|
+
if gitignore_path.exists():
|
|
462
|
+
try:
|
|
463
|
+
with open(gitignore_path, 'r') as f:
|
|
464
|
+
gitignore_content = f.read()
|
|
465
|
+
if ".env" not in gitignore_content:
|
|
466
|
+
warnings.append(
|
|
467
|
+
"⚠️ .gitignore doesn't contain .env pattern - credentials may be exposed"
|
|
468
|
+
)
|
|
469
|
+
except Exception as e:
|
|
470
|
+
logger.debug(f"Failed to read .gitignore: {e}")
|
|
471
|
+
|
|
472
|
+
return warnings
|
|
473
|
+
|
|
474
|
+
def _is_tracked_in_git(self, file_name: str) -> bool:
|
|
475
|
+
"""Check if file is tracked in git.
|
|
476
|
+
|
|
477
|
+
Args:
|
|
478
|
+
file_name: Name of file to check
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
True if file is tracked in git, False otherwise
|
|
482
|
+
"""
|
|
483
|
+
import subprocess
|
|
484
|
+
|
|
485
|
+
try:
|
|
486
|
+
# Run git ls-files to check if file is tracked
|
|
487
|
+
result = subprocess.run(
|
|
488
|
+
["git", "ls-files", "--error-unmatch", file_name],
|
|
489
|
+
cwd=self.project_path,
|
|
490
|
+
capture_output=True,
|
|
491
|
+
timeout=5,
|
|
492
|
+
)
|
|
493
|
+
return result.returncode == 0
|
|
494
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
495
|
+
# Git not available or timeout
|
|
496
|
+
return False
|
|
497
|
+
except Exception as e:
|
|
498
|
+
logger.debug(f"Git check failed: {e}")
|
|
499
|
+
return False
|
|
500
|
+
|
|
501
|
+
def validate_discovered_config(self, adapter: DiscoveredAdapter) -> List[str]:
|
|
502
|
+
"""Validate a discovered adapter configuration.
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
adapter: Discovered adapter to validate
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
List of validation warnings
|
|
509
|
+
"""
|
|
510
|
+
warnings: List[str] = []
|
|
511
|
+
|
|
512
|
+
# Check API key/token length (basic sanity check)
|
|
513
|
+
if adapter.adapter_type == AdapterType.LINEAR.value:
|
|
514
|
+
api_key = adapter.config.get("api_key", "")
|
|
515
|
+
if len(api_key) < 20:
|
|
516
|
+
warnings.append("⚠️ Linear API key looks suspiciously short")
|
|
517
|
+
|
|
518
|
+
elif adapter.adapter_type == AdapterType.GITHUB.value:
|
|
519
|
+
token = adapter.config.get("token", "")
|
|
520
|
+
if len(token) < 20:
|
|
521
|
+
warnings.append("⚠️ GitHub token looks suspiciously short")
|
|
522
|
+
|
|
523
|
+
# Validate token prefix
|
|
524
|
+
if token and not token.startswith(("ghp_", "gho_", "ghu_", "ghs_", "ghr_")):
|
|
525
|
+
warnings.append("⚠️ GitHub token doesn't match expected format (should start with ghp_, gho_, etc.)")
|
|
526
|
+
|
|
527
|
+
elif adapter.adapter_type == AdapterType.JIRA.value:
|
|
528
|
+
server = adapter.config.get("server", "")
|
|
529
|
+
if server and not server.startswith(("http://", "https://")):
|
|
530
|
+
warnings.append("⚠️ JIRA server URL should start with http:// or https://")
|
|
531
|
+
|
|
532
|
+
email = adapter.config.get("email", "")
|
|
533
|
+
if email and "@" not in email:
|
|
534
|
+
warnings.append("⚠️ JIRA email doesn't look like a valid email address")
|
|
535
|
+
|
|
536
|
+
# Check for missing fields
|
|
537
|
+
if adapter.missing_fields:
|
|
538
|
+
warnings.append(
|
|
539
|
+
f"⚠️ Incomplete configuration - missing: {', '.join(adapter.missing_fields)}"
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
return warnings
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
def discover_config(project_path: Optional[Path] = None) -> DiscoveryResult:
|
|
546
|
+
"""Convenience function to discover configuration.
|
|
547
|
+
|
|
548
|
+
Args:
|
|
549
|
+
project_path: Path to project root (defaults to cwd)
|
|
550
|
+
|
|
551
|
+
Returns:
|
|
552
|
+
DiscoveryResult with found adapters and warnings
|
|
553
|
+
"""
|
|
554
|
+
discovery = EnvDiscovery(project_path)
|
|
555
|
+
return discovery.discover()
|
mcp_ticketer/core/models.py
CHANGED
|
@@ -14,6 +14,14 @@ class Priority(str, Enum):
|
|
|
14
14
|
CRITICAL = "critical"
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
class TicketType(str, Enum):
|
|
18
|
+
"""Ticket type hierarchy."""
|
|
19
|
+
EPIC = "epic" # Strategic level (Projects in Linear, Milestones in GitHub)
|
|
20
|
+
ISSUE = "issue" # Work item level (standard issues/tasks)
|
|
21
|
+
TASK = "task" # Sub-task level (sub-issues, checkboxes)
|
|
22
|
+
SUBTASK = "subtask" # Alias for task (for clarity)
|
|
23
|
+
|
|
24
|
+
|
|
17
25
|
class TicketState(str, Enum):
|
|
18
26
|
"""Universal ticket states with state machine abstraction."""
|
|
19
27
|
OPEN = "open"
|
|
@@ -65,25 +73,69 @@ class BaseTicket(BaseModel):
|
|
|
65
73
|
|
|
66
74
|
|
|
67
75
|
class Epic(BaseTicket):
|
|
68
|
-
"""Epic - highest level container for work."""
|
|
69
|
-
ticket_type:
|
|
76
|
+
"""Epic - highest level container for work (Projects in Linear, Milestones in GitHub)."""
|
|
77
|
+
ticket_type: TicketType = Field(default=TicketType.EPIC, frozen=True, description="Always EPIC type")
|
|
70
78
|
child_issues: List[str] = Field(
|
|
71
79
|
default_factory=list,
|
|
72
80
|
description="IDs of child issues"
|
|
73
81
|
)
|
|
74
82
|
|
|
83
|
+
def validate_hierarchy(self) -> List[str]:
|
|
84
|
+
"""Validate epic hierarchy rules.
|
|
85
|
+
|
|
86
|
+
Returns:
|
|
87
|
+
List of validation errors (empty if valid)
|
|
88
|
+
"""
|
|
89
|
+
# Epics don't have parents in our hierarchy
|
|
90
|
+
return []
|
|
91
|
+
|
|
75
92
|
|
|
76
93
|
class Task(BaseTicket):
|
|
77
|
-
"""Task - individual work item."""
|
|
78
|
-
ticket_type:
|
|
79
|
-
parent_issue: Optional[str] = Field(None, description="Parent issue ID")
|
|
80
|
-
parent_epic: Optional[str] = Field(None, description="Parent epic ID")
|
|
94
|
+
"""Task - individual work item (can be ISSUE or TASK type)."""
|
|
95
|
+
ticket_type: TicketType = Field(default=TicketType.ISSUE, description="Ticket type in hierarchy")
|
|
96
|
+
parent_issue: Optional[str] = Field(None, description="Parent issue ID (for tasks)")
|
|
97
|
+
parent_epic: Optional[str] = Field(None, description="Parent epic ID (for issues)")
|
|
81
98
|
assignee: Optional[str] = Field(None, description="Assigned user")
|
|
99
|
+
children: List[str] = Field(default_factory=list, description="Child task IDs")
|
|
82
100
|
|
|
83
101
|
# Additional fields common across systems
|
|
84
102
|
estimated_hours: Optional[float] = Field(None, description="Time estimate")
|
|
85
103
|
actual_hours: Optional[float] = Field(None, description="Actual time spent")
|
|
86
104
|
|
|
105
|
+
def is_epic(self) -> bool:
|
|
106
|
+
"""Check if this is an epic (should use Epic class instead)."""
|
|
107
|
+
return self.ticket_type == TicketType.EPIC
|
|
108
|
+
|
|
109
|
+
def is_issue(self) -> bool:
|
|
110
|
+
"""Check if this is a standard issue."""
|
|
111
|
+
return self.ticket_type == TicketType.ISSUE
|
|
112
|
+
|
|
113
|
+
def is_task(self) -> bool:
|
|
114
|
+
"""Check if this is a sub-task."""
|
|
115
|
+
return self.ticket_type in (TicketType.TASK, TicketType.SUBTASK)
|
|
116
|
+
|
|
117
|
+
def validate_hierarchy(self) -> List[str]:
|
|
118
|
+
"""Validate ticket hierarchy rules.
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
List of validation errors (empty if valid)
|
|
122
|
+
"""
|
|
123
|
+
errors = []
|
|
124
|
+
|
|
125
|
+
# Tasks must have parent issue
|
|
126
|
+
if self.is_task() and not self.parent_issue:
|
|
127
|
+
errors.append("Tasks must have a parent_issue (issue)")
|
|
128
|
+
|
|
129
|
+
# Issues should not have parent_issue (use epic_id instead)
|
|
130
|
+
if self.is_issue() and self.parent_issue:
|
|
131
|
+
errors.append("Issues should use parent_epic, not parent_issue")
|
|
132
|
+
|
|
133
|
+
# Tasks should not have both parent_issue and parent_epic
|
|
134
|
+
if self.is_task() and self.parent_epic:
|
|
135
|
+
errors.append("Tasks should only have parent_issue, not parent_epic (epic comes from parent issue)")
|
|
136
|
+
|
|
137
|
+
return errors
|
|
138
|
+
|
|
87
139
|
|
|
88
140
|
class Comment(BaseModel):
|
|
89
141
|
"""Comment on a ticket."""
|