mcp-ticketer 0.1.30__py3-none-any.whl → 1.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mcp-ticketer might be problematic. Click here for more details.

Files changed (109) hide show
  1. mcp_ticketer/__init__.py +10 -10
  2. mcp_ticketer/__version__.py +3 -3
  3. mcp_ticketer/adapters/__init__.py +2 -0
  4. mcp_ticketer/adapters/aitrackdown.py +796 -46
  5. mcp_ticketer/adapters/asana/__init__.py +15 -0
  6. mcp_ticketer/adapters/asana/adapter.py +1416 -0
  7. mcp_ticketer/adapters/asana/client.py +292 -0
  8. mcp_ticketer/adapters/asana/mappers.py +348 -0
  9. mcp_ticketer/adapters/asana/types.py +146 -0
  10. mcp_ticketer/adapters/github.py +879 -129
  11. mcp_ticketer/adapters/hybrid.py +11 -11
  12. mcp_ticketer/adapters/jira.py +973 -73
  13. mcp_ticketer/adapters/linear/__init__.py +24 -0
  14. mcp_ticketer/adapters/linear/adapter.py +2732 -0
  15. mcp_ticketer/adapters/linear/client.py +344 -0
  16. mcp_ticketer/adapters/linear/mappers.py +420 -0
  17. mcp_ticketer/adapters/linear/queries.py +479 -0
  18. mcp_ticketer/adapters/linear/types.py +360 -0
  19. mcp_ticketer/adapters/linear.py +10 -2315
  20. mcp_ticketer/analysis/__init__.py +23 -0
  21. mcp_ticketer/analysis/orphaned.py +218 -0
  22. mcp_ticketer/analysis/similarity.py +224 -0
  23. mcp_ticketer/analysis/staleness.py +266 -0
  24. mcp_ticketer/cache/memory.py +9 -8
  25. mcp_ticketer/cli/adapter_diagnostics.py +421 -0
  26. mcp_ticketer/cli/auggie_configure.py +116 -15
  27. mcp_ticketer/cli/codex_configure.py +274 -82
  28. mcp_ticketer/cli/configure.py +888 -151
  29. mcp_ticketer/cli/diagnostics.py +400 -157
  30. mcp_ticketer/cli/discover.py +297 -26
  31. mcp_ticketer/cli/gemini_configure.py +119 -26
  32. mcp_ticketer/cli/init_command.py +880 -0
  33. mcp_ticketer/cli/instruction_commands.py +435 -0
  34. mcp_ticketer/cli/linear_commands.py +616 -0
  35. mcp_ticketer/cli/main.py +203 -1165
  36. mcp_ticketer/cli/mcp_configure.py +474 -90
  37. mcp_ticketer/cli/mcp_server_commands.py +415 -0
  38. mcp_ticketer/cli/migrate_config.py +12 -8
  39. mcp_ticketer/cli/platform_commands.py +123 -0
  40. mcp_ticketer/cli/platform_detection.py +418 -0
  41. mcp_ticketer/cli/platform_installer.py +513 -0
  42. mcp_ticketer/cli/python_detection.py +126 -0
  43. mcp_ticketer/cli/queue_commands.py +15 -15
  44. mcp_ticketer/cli/setup_command.py +639 -0
  45. mcp_ticketer/cli/simple_health.py +90 -65
  46. mcp_ticketer/cli/ticket_commands.py +1013 -0
  47. mcp_ticketer/cli/update_checker.py +313 -0
  48. mcp_ticketer/cli/utils.py +114 -66
  49. mcp_ticketer/core/__init__.py +24 -1
  50. mcp_ticketer/core/adapter.py +250 -16
  51. mcp_ticketer/core/config.py +145 -37
  52. mcp_ticketer/core/env_discovery.py +101 -22
  53. mcp_ticketer/core/env_loader.py +349 -0
  54. mcp_ticketer/core/exceptions.py +160 -0
  55. mcp_ticketer/core/http_client.py +26 -26
  56. mcp_ticketer/core/instructions.py +405 -0
  57. mcp_ticketer/core/label_manager.py +732 -0
  58. mcp_ticketer/core/mappers.py +42 -30
  59. mcp_ticketer/core/models.py +280 -28
  60. mcp_ticketer/core/onepassword_secrets.py +379 -0
  61. mcp_ticketer/core/project_config.py +183 -49
  62. mcp_ticketer/core/registry.py +3 -3
  63. mcp_ticketer/core/session_state.py +171 -0
  64. mcp_ticketer/core/state_matcher.py +592 -0
  65. mcp_ticketer/core/url_parser.py +425 -0
  66. mcp_ticketer/core/validators.py +69 -0
  67. mcp_ticketer/defaults/ticket_instructions.md +644 -0
  68. mcp_ticketer/mcp/__init__.py +29 -1
  69. mcp_ticketer/mcp/__main__.py +60 -0
  70. mcp_ticketer/mcp/server/__init__.py +25 -0
  71. mcp_ticketer/mcp/server/__main__.py +60 -0
  72. mcp_ticketer/mcp/server/constants.py +58 -0
  73. mcp_ticketer/mcp/server/diagnostic_helper.py +175 -0
  74. mcp_ticketer/mcp/server/dto.py +195 -0
  75. mcp_ticketer/mcp/server/main.py +1343 -0
  76. mcp_ticketer/mcp/server/response_builder.py +206 -0
  77. mcp_ticketer/mcp/server/routing.py +655 -0
  78. mcp_ticketer/mcp/server/server_sdk.py +151 -0
  79. mcp_ticketer/mcp/server/tools/__init__.py +56 -0
  80. mcp_ticketer/mcp/server/tools/analysis_tools.py +495 -0
  81. mcp_ticketer/mcp/server/tools/attachment_tools.py +226 -0
  82. mcp_ticketer/mcp/server/tools/bulk_tools.py +273 -0
  83. mcp_ticketer/mcp/server/tools/comment_tools.py +152 -0
  84. mcp_ticketer/mcp/server/tools/config_tools.py +1439 -0
  85. mcp_ticketer/mcp/server/tools/diagnostic_tools.py +211 -0
  86. mcp_ticketer/mcp/server/tools/hierarchy_tools.py +921 -0
  87. mcp_ticketer/mcp/server/tools/instruction_tools.py +300 -0
  88. mcp_ticketer/mcp/server/tools/label_tools.py +948 -0
  89. mcp_ticketer/mcp/server/tools/pr_tools.py +152 -0
  90. mcp_ticketer/mcp/server/tools/search_tools.py +215 -0
  91. mcp_ticketer/mcp/server/tools/session_tools.py +170 -0
  92. mcp_ticketer/mcp/server/tools/ticket_tools.py +1268 -0
  93. mcp_ticketer/mcp/server/tools/user_ticket_tools.py +547 -0
  94. mcp_ticketer/queue/__init__.py +1 -0
  95. mcp_ticketer/queue/health_monitor.py +168 -136
  96. mcp_ticketer/queue/manager.py +95 -25
  97. mcp_ticketer/queue/queue.py +40 -21
  98. mcp_ticketer/queue/run_worker.py +6 -1
  99. mcp_ticketer/queue/ticket_registry.py +213 -155
  100. mcp_ticketer/queue/worker.py +109 -49
  101. mcp_ticketer-1.2.11.dist-info/METADATA +792 -0
  102. mcp_ticketer-1.2.11.dist-info/RECORD +110 -0
  103. mcp_ticketer/mcp/server.py +0 -1895
  104. mcp_ticketer-0.1.30.dist-info/METADATA +0 -413
  105. mcp_ticketer-0.1.30.dist-info/RECORD +0 -49
  106. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/WHEEL +0 -0
  107. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/entry_points.txt +0 -0
  108. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/licenses/LICENSE +0 -0
  109. {mcp_ticketer-0.1.30.dist-info → mcp_ticketer-1.2.11.dist-info}/top_level.txt +0 -0
@@ -7,17 +7,17 @@ import threading
7
7
  import time
8
8
  from datetime import datetime
9
9
  from pathlib import Path
10
- from typing import Any, Optional
10
+ from typing import Any
11
11
 
12
12
  from dotenv import load_dotenv
13
13
 
14
+ # Import adapters module to trigger registration
15
+ import mcp_ticketer.adapters # noqa: F401
16
+
14
17
  from ..core import AdapterRegistry, Task
15
18
  from .queue import Queue, QueueItem, QueueStatus
16
19
  from .ticket_registry import TicketRegistry
17
20
 
18
- # Import adapters module to trigger registration
19
- import mcp_ticketer.adapters # noqa: F401
20
-
21
21
  # Load environment variables from .env.local
22
22
  env_path = Path.cwd() / ".env.local"
23
23
  if env_path.exists():
@@ -58,7 +58,7 @@ class Worker:
58
58
 
59
59
  def __init__(
60
60
  self,
61
- queue: Optional[Queue] = None,
61
+ queue: Queue | None = None,
62
62
  batch_size: int = DEFAULT_BATCH_SIZE,
63
63
  max_concurrent: int = DEFAULT_MAX_CONCURRENT,
64
64
  ):
@@ -97,12 +97,12 @@ class Worker:
97
97
  f"Worker initialized with batch_size={batch_size}, max_concurrent={max_concurrent}"
98
98
  )
99
99
 
100
- def _signal_handler(self, signum, frame):
100
+ def _signal_handler(self, signum: int, frame: Any) -> None:
101
101
  """Handle shutdown signals."""
102
102
  logger.info(f"Received signal {signum}, shutting down...")
103
103
  self.stop()
104
104
 
105
- def start(self, daemon: bool = True):
105
+ def start(self, daemon: bool = True) -> None:
106
106
  """Start the worker.
107
107
 
108
108
  Args:
@@ -126,14 +126,14 @@ class Worker:
126
126
  # Run in main thread
127
127
  self._run_loop()
128
128
 
129
- def stop(self):
129
+ def stop(self) -> None:
130
130
  """Stop the worker."""
131
131
  logger.info("Stopping worker...")
132
132
  self.running = False
133
133
  self.stop_event.set()
134
134
 
135
- def _run_loop(self):
136
- """Main worker loop with batch processing."""
135
+ def _run_loop(self) -> None:
136
+ """Run main worker loop with batch processing."""
137
137
  logger.info("Worker loop started")
138
138
 
139
139
  # Reset any stuck items on startup
@@ -174,7 +174,7 @@ class Worker:
174
174
  break
175
175
  return batch
176
176
 
177
- async def _process_batch(self, batch: list[QueueItem]):
177
+ async def _process_batch(self, batch: list[QueueItem]) -> None:
178
178
  """Process a batch of queue items with concurrency control.
179
179
 
180
180
  Args:
@@ -184,7 +184,7 @@ class Worker:
184
184
  logger.info(f"Processing batch of {len(batch)} items")
185
185
 
186
186
  # Group items by adapter for concurrent processing
187
- adapter_groups = {}
187
+ adapter_groups: dict[str, list[Any]] = {}
188
188
  for item in batch:
189
189
  if item.adapter not in adapter_groups:
190
190
  adapter_groups[item.adapter] = []
@@ -199,7 +199,9 @@ class Worker:
199
199
  # Wait for all adapter groups to complete
200
200
  await asyncio.gather(*tasks, return_exceptions=True)
201
201
 
202
- async def _process_adapter_group(self, adapter: str, items: list[QueueItem]):
202
+ async def _process_adapter_group(
203
+ self, adapter: str, items: list[QueueItem]
204
+ ) -> None:
203
205
  """Process items for a specific adapter with concurrency control.
204
206
 
205
207
  Args:
@@ -216,7 +218,7 @@ class Worker:
216
218
  semaphore = self.adapter_semaphores[adapter]
217
219
 
218
220
  # Process items with concurrency control
219
- async def process_with_semaphore(item):
221
+ async def process_with_semaphore(item: QueueItem) -> None:
220
222
  async with semaphore:
221
223
  await self._process_item(item)
222
224
 
@@ -226,7 +228,7 @@ class Worker:
226
228
  # Process with concurrency control
227
229
  await asyncio.gather(*tasks, return_exceptions=True)
228
230
 
229
- async def _process_item(self, item: QueueItem):
231
+ async def _process_item(self, item: QueueItem) -> None:
230
232
  """Process a single queue item.
231
233
 
232
234
  Args:
@@ -263,15 +265,19 @@ class Worker:
263
265
 
264
266
  # Mark as completed in both queue and registry (atomic)
265
267
  success = self.queue.update_status(
266
- item.id, QueueStatus.COMPLETED, result=result,
267
- expected_status=QueueStatus.PROCESSING
268
+ item.id,
269
+ QueueStatus.COMPLETED,
270
+ result=result,
271
+ expected_status=QueueStatus.PROCESSING,
268
272
  )
269
273
  if success:
270
274
  self.ticket_registry.update_ticket_status(
271
275
  item.id, "completed", ticket_id=ticket_id, result_data=result
272
276
  )
273
277
  else:
274
- logger.warning(f"Failed to update status for {item.id} - item may have been processed by another worker")
278
+ logger.warning(
279
+ f"Failed to update status for {item.id} - item may have been processed by another worker"
280
+ )
275
281
 
276
282
  self.stats["items_processed"] += 1
277
283
  logger.info(f"Successfully processed {item.id}, ticket ID: {ticket_id}")
@@ -301,26 +307,35 @@ class Worker:
301
307
  item.id, "queued", retry_count=new_retry_count
302
308
  )
303
309
  else:
304
- logger.warning(f"Failed to increment retry for {item.id} - item may have been processed by another worker")
310
+ logger.warning(
311
+ f"Failed to increment retry for {item.id} - item may have been processed by another worker"
312
+ )
305
313
 
306
314
  # Wait before retry
307
315
  await asyncio.sleep(retry_delay)
308
316
  else:
309
317
  # Max retries exceeded, mark as failed (atomic)
310
318
  success = self.queue.update_status(
311
- item.id, QueueStatus.FAILED, error_message=str(e),
312
- expected_status=QueueStatus.PROCESSING
319
+ item.id,
320
+ QueueStatus.FAILED,
321
+ error_message=str(e),
322
+ expected_status=QueueStatus.PROCESSING,
313
323
  )
314
324
  if success:
315
325
  self.ticket_registry.update_ticket_status(
316
- item.id, "failed", error_message=str(e), retry_count=item.retry_count
326
+ item.id,
327
+ "failed",
328
+ error_message=str(e),
329
+ retry_count=item.retry_count,
317
330
  )
318
331
  else:
319
- logger.warning(f"Failed to mark {item.id} as failed - item may have been processed by another worker")
332
+ logger.warning(
333
+ f"Failed to mark {item.id} as failed - item may have been processed by another worker"
334
+ )
320
335
  self.stats["items_failed"] += 1
321
336
  logger.error(f"Max retries exceeded for {item.id}, marking as failed")
322
337
 
323
- async def _check_rate_limit(self, adapter: str):
338
+ async def _check_rate_limit(self, adapter: str) -> None:
324
339
  """Check and enforce rate limits.
325
340
 
326
341
  Args:
@@ -344,7 +359,7 @@ class Worker:
344
359
 
345
360
  self.last_request_times[adapter] = datetime.now()
346
361
 
347
- def _get_adapter(self, item: QueueItem):
362
+ def _get_adapter(self, item: QueueItem) -> Any:
348
363
  """Get adapter instance for item.
349
364
 
350
365
  Args:
@@ -360,19 +375,31 @@ class Worker:
360
375
 
361
376
  from ..cli.main import load_config
362
377
 
363
- # Use item's project_dir if available, otherwise use current directory
364
- project_path = Path(item.project_dir) if item.project_dir else None
365
-
366
- # Load environment variables from project directory's .env.local if it exists
367
- if project_path:
368
- env_file = project_path / ".env.local"
369
- if env_file.exists():
370
- logger.debug(f"Loading environment from {env_file}")
371
- load_dotenv(env_file)
372
-
373
- config = load_config(project_dir=project_path)
374
- adapters_config = config.get("adapters", {})
375
- adapter_config = adapters_config.get(item.adapter, {})
378
+ # PRIORITY 1: Use adapter_config from queue item if available (explicit config)
379
+ if item.adapter_config:
380
+ logger.info("Worker using explicit adapter_config from queue item")
381
+ adapter_config = item.adapter_config
382
+ logger.info(f"Worker adapter config for {item.adapter}: {adapter_config}")
383
+ else:
384
+ # PRIORITY 2: Load from project config file
385
+ # Use item's project_dir if available, otherwise use current directory
386
+ project_path = Path(item.project_dir) if item.project_dir else None
387
+
388
+ # Load environment variables from project directory's .env.local if it exists
389
+ if project_path:
390
+ env_file = project_path / ".env.local"
391
+ if env_file.exists():
392
+ logger.info(f"Worker loading environment from {env_file}")
393
+ load_dotenv(env_file)
394
+
395
+ logger.info(f"Worker project_path: {project_path}")
396
+ logger.info(f"Worker current working directory: {os.getcwd()}")
397
+
398
+ config = load_config(project_dir=project_path)
399
+ logger.info(f"Worker loaded config: {config}")
400
+ adapters_config = config.get("adapters", {})
401
+ adapter_config = adapters_config.get(item.adapter, {})
402
+ logger.info(f"Worker adapter config for {item.adapter}: {adapter_config}")
376
403
 
377
404
  # Add environment variables for authentication
378
405
  if item.adapter == "linear":
@@ -387,9 +414,37 @@ class Worker:
387
414
  if not adapter_config.get("email"):
388
415
  adapter_config["email"] = os.getenv("JIRA_ACCESS_USER")
389
416
 
390
- return AdapterRegistry.get_adapter(item.adapter, adapter_config)
417
+ logger.info(f"Worker final adapter config: {adapter_config}")
418
+
419
+ # Add debugging for Linear adapter specifically
420
+ if item.adapter == "linear":
421
+ import os
422
+
423
+ linear_api_key = os.getenv("LINEAR_API_KEY", "Not set")
424
+ logger.info(
425
+ f"Worker LINEAR_API_KEY: {linear_api_key[:20] if linear_api_key != 'Not set' else 'Not set'}..."
426
+ )
427
+ logger.info(
428
+ f"Worker adapter_config api_key: {adapter_config.get('api_key', 'Not set')[:20] if adapter_config.get('api_key') else 'Not set'}..."
429
+ )
430
+
431
+ adapter = AdapterRegistry.get_adapter(item.adapter, adapter_config)
432
+ logger.info(
433
+ f"Worker created adapter: {type(adapter)} with team_id: {getattr(adapter, 'team_id_config', 'Not set')}"
434
+ )
435
+
436
+ # Add more debugging for Linear adapter
437
+ if item.adapter == "linear":
438
+ logger.info(
439
+ f"Worker Linear adapter api_key: {getattr(adapter, 'api_key', 'Not set')[:20] if getattr(adapter, 'api_key', None) else 'Not set'}..."
440
+ )
441
+ logger.info(
442
+ f"Worker Linear adapter team_key: {getattr(adapter, 'team_key', 'Not set')}"
443
+ )
444
+
445
+ return adapter
391
446
 
392
- async def _execute_operation(self, adapter, item: QueueItem) -> dict[str, Any]:
447
+ async def _execute_operation(self, adapter: Any, item: QueueItem) -> dict[str, Any]:
393
448
  """Execute the queued operation.
394
449
 
395
450
  Args:
@@ -439,14 +494,13 @@ class Worker:
439
494
  result = await adapter.create_epic(
440
495
  title=data["title"],
441
496
  description=data.get("description"),
442
- **{k: v for k, v in data.items()
443
- if k not in ["title", "description"]}
497
+ **{k: v for k, v in data.items() if k not in ["title", "description"]},
444
498
  )
445
499
  return {
446
500
  "id": result.id if result else None,
447
501
  "title": result.title if result else None,
448
502
  "type": "epic",
449
- "success": bool(result)
503
+ "success": bool(result),
450
504
  }
451
505
 
452
506
  elif operation == "create_issue":
@@ -454,15 +508,18 @@ class Worker:
454
508
  title=data["title"],
455
509
  description=data.get("description"),
456
510
  epic_id=data.get("epic_id"),
457
- **{k: v for k, v in data.items()
458
- if k not in ["title", "description", "epic_id"]}
511
+ **{
512
+ k: v
513
+ for k, v in data.items()
514
+ if k not in ["title", "description", "epic_id"]
515
+ },
459
516
  )
460
517
  return {
461
518
  "id": result.id if result else None,
462
519
  "title": result.title if result else None,
463
520
  "type": "issue",
464
521
  "epic_id": data.get("epic_id"),
465
- "success": bool(result)
522
+ "success": bool(result),
466
523
  }
467
524
 
468
525
  elif operation == "create_task":
@@ -470,15 +527,18 @@ class Worker:
470
527
  title=data["title"],
471
528
  parent_id=data["parent_id"],
472
529
  description=data.get("description"),
473
- **{k: v for k, v in data.items()
474
- if k not in ["title", "parent_id", "description"]}
530
+ **{
531
+ k: v
532
+ for k, v in data.items()
533
+ if k not in ["title", "parent_id", "description"]
534
+ },
475
535
  )
476
536
  return {
477
537
  "id": result.id if result else None,
478
538
  "title": result.title if result else None,
479
539
  "type": "task",
480
540
  "parent_id": data["parent_id"],
481
- "success": bool(result)
541
+ "success": bool(result),
482
542
  }
483
543
 
484
544
  else: