monoco-toolkit 0.3.12__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. monoco/core/automation/__init__.py +0 -11
  2. monoco/core/automation/handlers.py +108 -26
  3. monoco/core/config.py +28 -10
  4. monoco/core/daemon/__init__.py +5 -0
  5. monoco/core/daemon/pid.py +290 -0
  6. monoco/core/injection.py +86 -8
  7. monoco/core/integrations.py +0 -24
  8. monoco/core/router/__init__.py +1 -39
  9. monoco/core/router/action.py +3 -142
  10. monoco/core/scheduler/events.py +28 -2
  11. monoco/core/setup.py +9 -0
  12. monoco/core/sync.py +199 -4
  13. monoco/core/watcher/__init__.py +6 -0
  14. monoco/core/watcher/base.py +18 -1
  15. monoco/core/watcher/im.py +460 -0
  16. monoco/core/watcher/memo.py +40 -48
  17. monoco/daemon/app.py +3 -60
  18. monoco/daemon/commands.py +459 -25
  19. monoco/daemon/scheduler.py +1 -16
  20. monoco/daemon/services.py +15 -0
  21. monoco/features/agent/resources/en/AGENTS.md +14 -14
  22. monoco/features/agent/resources/en/skills/monoco_role_engineer/SKILL.md +101 -0
  23. monoco/features/agent/resources/en/skills/monoco_role_manager/SKILL.md +95 -0
  24. monoco/features/agent/resources/en/skills/monoco_role_planner/SKILL.md +177 -0
  25. monoco/features/agent/resources/en/skills/monoco_role_reviewer/SKILL.md +139 -0
  26. monoco/features/agent/resources/zh/skills/monoco_role_engineer/SKILL.md +101 -0
  27. monoco/features/agent/resources/zh/skills/monoco_role_manager/SKILL.md +95 -0
  28. monoco/features/agent/resources/zh/skills/monoco_role_planner/SKILL.md +177 -0
  29. monoco/features/agent/resources/zh/skills/monoco_role_reviewer/SKILL.md +139 -0
  30. monoco/features/hooks/__init__.py +61 -6
  31. monoco/features/hooks/commands.py +281 -271
  32. monoco/features/hooks/dispatchers/__init__.py +23 -0
  33. monoco/features/hooks/dispatchers/agent_dispatcher.py +486 -0
  34. monoco/features/hooks/dispatchers/git_dispatcher.py +478 -0
  35. monoco/features/hooks/manager.py +357 -0
  36. monoco/features/hooks/models.py +262 -0
  37. monoco/features/hooks/parser.py +322 -0
  38. monoco/features/hooks/universal_interceptor.py +503 -0
  39. monoco/features/im/__init__.py +67 -0
  40. monoco/features/im/core.py +782 -0
  41. monoco/features/im/models.py +311 -0
  42. monoco/features/issue/commands.py +65 -50
  43. monoco/features/issue/core.py +199 -99
  44. monoco/features/issue/domain_commands.py +0 -19
  45. monoco/features/issue/resources/en/AGENTS.md +17 -122
  46. monoco/features/issue/resources/hooks/agent/before-tool.sh +102 -0
  47. monoco/features/issue/resources/hooks/agent/session-start.sh +88 -0
  48. monoco/features/issue/resources/hooks/{post-checkout.sh → git/git-post-checkout.sh} +10 -9
  49. monoco/features/issue/resources/hooks/git/git-pre-commit.sh +31 -0
  50. monoco/features/issue/resources/hooks/{pre-push.sh → git/git-pre-push.sh} +7 -13
  51. monoco/features/issue/resources/zh/AGENTS.md +18 -123
  52. monoco/features/memo/cli.py +15 -64
  53. monoco/features/memo/core.py +6 -34
  54. monoco/features/memo/models.py +24 -15
  55. monoco/features/memo/resources/en/AGENTS.md +31 -0
  56. monoco/features/memo/resources/zh/AGENTS.md +28 -5
  57. monoco/main.py +5 -3
  58. {monoco_toolkit-0.3.12.dist-info → monoco_toolkit-0.4.0.dist-info}/METADATA +1 -1
  59. monoco_toolkit-0.4.0.dist-info/RECORD +170 -0
  60. monoco/core/automation/config.py +0 -338
  61. monoco/core/execution.py +0 -67
  62. monoco/core/executor/__init__.py +0 -38
  63. monoco/core/executor/agent_action.py +0 -254
  64. monoco/core/executor/git_action.py +0 -303
  65. monoco/core/executor/im_action.py +0 -309
  66. monoco/core/executor/pytest_action.py +0 -218
  67. monoco/core/router/router.py +0 -392
  68. monoco/features/agent/resources/atoms/atom-code-dev.yaml +0 -61
  69. monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +0 -73
  70. monoco/features/agent/resources/atoms/atom-knowledge.yaml +0 -55
  71. monoco/features/agent/resources/atoms/atom-review.yaml +0 -60
  72. monoco/features/agent/resources/en/skills/monoco_atom_core/SKILL.md +0 -99
  73. monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
  74. monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +0 -93
  75. monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +0 -85
  76. monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -114
  77. monoco/features/agent/resources/workflows/workflow-dev.yaml +0 -83
  78. monoco/features/agent/resources/workflows/workflow-issue-create.yaml +0 -72
  79. monoco/features/agent/resources/workflows/workflow-review.yaml +0 -94
  80. monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +0 -49
  81. monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +0 -46
  82. monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +0 -46
  83. monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +0 -47
  84. monoco/features/agent/resources/zh/skills/monoco_atom_core/SKILL.md +0 -99
  85. monoco/features/agent/resources/zh/skills/monoco_workflow_agent_engineer/SKILL.md +0 -94
  86. monoco/features/agent/resources/zh/skills/monoco_workflow_agent_manager/SKILL.md +0 -88
  87. monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +0 -259
  88. monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +0 -137
  89. monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +0 -278
  90. monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +0 -35
  91. monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +0 -35
  92. monoco/features/hooks/adapter.py +0 -67
  93. monoco/features/hooks/core.py +0 -441
  94. monoco/features/i18n/resources/en/skills/monoco_atom_i18n/SKILL.md +0 -96
  95. monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
  96. monoco/features/i18n/resources/zh/skills/monoco_atom_i18n/SKILL.md +0 -96
  97. monoco/features/i18n/resources/zh/skills/monoco_workflow_i18n_scan/SKILL.md +0 -105
  98. monoco/features/issue/resources/en/skills/monoco_atom_issue/SKILL.md +0 -165
  99. monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
  100. monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +0 -224
  101. monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +0 -159
  102. monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
  103. monoco/features/issue/resources/hooks/pre-commit.sh +0 -41
  104. monoco/features/issue/resources/zh/skills/monoco_atom_issue_lifecycle/SKILL.md +0 -190
  105. monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +0 -167
  106. monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +0 -224
  107. monoco/features/issue/resources/zh/skills/monoco_workflow_issue_management/SKILL.md +0 -159
  108. monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +0 -203
  109. monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +0 -77
  110. monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +0 -140
  111. monoco/features/memo/resources/zh/skills/monoco_atom_memo/SKILL.md +0 -77
  112. monoco/features/memo/resources/zh/skills/monoco_workflow_note_processing/SKILL.md +0 -140
  113. monoco/features/spike/resources/en/skills/monoco_atom_spike/SKILL.md +0 -76
  114. monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +0 -121
  115. monoco/features/spike/resources/zh/skills/monoco_atom_spike/SKILL.md +0 -76
  116. monoco/features/spike/resources/zh/skills/monoco_workflow_research/SKILL.md +0 -121
  117. monoco_toolkit-0.3.12.dist-info/RECORD +0 -202
  118. {monoco_toolkit-0.3.12.dist-info → monoco_toolkit-0.4.0.dist-info}/WHEEL +0 -0
  119. {monoco_toolkit-0.3.12.dist-info → monoco_toolkit-0.4.0.dist-info}/entry_points.txt +0 -0
  120. {monoco_toolkit-0.3.12.dist-info → monoco_toolkit-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,8 +2,6 @@
2
2
  Automation Module - Event-driven automation framework.
3
3
 
4
4
  This module provides:
5
- - YAML/JSON configuration parsing
6
- - Trigger configuration management
7
5
  - Field change detection
8
6
  - Independent Event Handlers for Agent collaboration (FEAT-0162)
9
7
 
@@ -12,11 +10,6 @@ stateless microservice that responds to specific events. Workflow emerges
12
10
  from the natural interaction of handlers.
13
11
  """
14
12
 
15
- from .config import (
16
- TriggerConfig,
17
- AutomationConfig,
18
- load_automation_config,
19
- )
20
13
  from .field_watcher import (
21
14
  YAMLFrontMatterExtractor,
22
15
  FieldWatcher,
@@ -32,10 +25,6 @@ from .handlers import (
32
25
  )
33
26
 
34
27
  __all__ = [
35
- # Config
36
- "TriggerConfig",
37
- "AutomationConfig",
38
- "load_automation_config",
39
28
  # Field watching
40
29
  "YAMLFrontMatterExtractor",
41
30
  "FieldWatcher",
@@ -21,7 +21,7 @@ from __future__ import annotations
21
21
 
22
22
  import logging
23
23
  from pathlib import Path
24
- from typing import Any, Dict, Optional, Set
24
+ from typing import Any, Dict, List, Optional, Set
25
25
 
26
26
  from monoco.core.scheduler import (
27
27
  AgentEvent,
@@ -31,6 +31,8 @@ from monoco.core.scheduler import (
31
31
  event_bus,
32
32
  )
33
33
  from monoco.core.router import ActionResult
34
+ from monoco.features.memo.models import Memo
35
+ from monoco.features.memo.core import load_memos, get_inbox_path
34
36
 
35
37
  logger = logging.getLogger(__name__)
36
38
 
@@ -361,6 +363,12 @@ class MemoThresholdHandler:
361
363
  Condition: Pending memo count exceeds threshold
362
364
  Action: Spawn Architect agent to analyze and create Issues
363
365
 
366
+ Signal Queue Model (FEAT-0165):
367
+ - Memos are signals, not assets
368
+ - File existence = signal pending
369
+ - File cleared = signal consumed
370
+ - Git is the archive, not app state
371
+
364
372
  Emergent Workflow: Memos (threshold) → Architect → Issues
365
373
 
366
374
  This handler is stateless and self-contained.
@@ -384,7 +392,6 @@ class MemoThresholdHandler:
384
392
  self.name = name
385
393
  self.threshold = threshold
386
394
  self._subscribed = False
387
- self._last_processed_count = 0
388
395
 
389
396
  def _should_handle(self, event: AgentEvent) -> bool:
390
397
  """
@@ -400,65 +407,110 @@ class MemoThresholdHandler:
400
407
  logger.debug(f"Pending count {pending_count} below threshold {self.threshold}")
401
408
  return False
402
409
 
403
- if pending_count <= self._last_processed_count:
404
- logger.debug(f"Already processed {self._last_processed_count} memos, skipping")
405
- return False
406
-
407
410
  return True
408
411
 
409
412
  async def _handle(self, event: AgentEvent) -> Optional[ActionResult]:
410
413
  """
411
414
  Handle the event by spawning Architect agent.
412
415
 
413
- The Architect will:
414
- 1. Read the Memos/inbox.md file
415
- 2. Analyze accumulated ideas
416
- 3. Create appropriate Issue tickets
417
- 4. Clear or organize processed memos
416
+ Signal Queue Semantics:
417
+ 1. Atomically load and clear inbox BEFORE scheduling
418
+ 2. Memos are embedded in prompt, not read from file
419
+ 3. File cleared = consumed, no state needed
420
+
421
+ This ensures:
422
+ - Natural idempotency (deleted memos won't be reprocessed)
423
+ - No dependency on memory state across restarts
424
+ - Architect always has data even if file is cleared
418
425
  """
419
- file_path = event.payload.get("path", "Memos/inbox.md")
426
+ file_path_str = event.payload.get("path", "Memos/inbox.md")
427
+ file_path = Path(file_path_str)
420
428
  pending_count = event.payload.get("pending_count", 0)
421
429
 
422
- logger.info(f"MemoThresholdHandler: Spawning Architect for {pending_count} memos")
430
+ logger.info(f"MemoThresholdHandler: Processing {pending_count} memos")
423
431
 
424
- self._last_processed_count = pending_count
432
+ # Phase 1: Atomically load and clear inbox
433
+ try:
434
+ # Load memos before clearing
435
+ memos = self._load_and_clear_memos(file_path)
436
+ if not memos:
437
+ logger.warning("Inbox was empty after locking, skipping")
438
+ return None
439
+ except Exception as e:
440
+ logger.error(f"Failed to load and clear inbox: {e}")
441
+ return ActionResult.failure_result(
442
+ error=f"Failed to consume memos: {e}",
443
+ metadata={"file_path": file_path_str},
444
+ )
425
445
 
446
+ # Phase 2: Schedule Architect with embedded memos
426
447
  task = AgentTask(
427
448
  task_id=f"architect-memo-{event.timestamp.timestamp()}",
428
449
  role_name="Architect",
429
450
  issue_id="memo-analysis",
430
- prompt=self._build_prompt(file_path, pending_count),
451
+ prompt=self._build_prompt(file_path_str, memos),
431
452
  engine="gemini",
432
453
  timeout=900,
433
454
  metadata={
434
455
  "trigger": "memo_threshold",
435
- "file_path": file_path,
456
+ "file_path": file_path_str,
436
457
  "pending_count": pending_count,
437
458
  "threshold": self.threshold,
459
+ "memo_count": len(memos),
438
460
  },
439
461
  )
440
462
 
441
463
  try:
442
464
  session_id = await self.scheduler.schedule(task)
443
- logger.info(f"Architect scheduled: session={session_id}")
465
+ logger.info(f"Architect scheduled: session={session_id} with {len(memos)} memos")
444
466
 
445
467
  return ActionResult.success_result(
446
468
  output={
447
469
  "session_id": session_id,
448
470
  "role": "Architect",
449
471
  "trigger": "memo_threshold",
450
- "pending_count": pending_count,
472
+ "memo_count": len(memos),
451
473
  },
452
- metadata={"file_path": file_path},
474
+ metadata={"file_path": file_path_str},
453
475
  )
454
476
 
455
477
  except Exception as e:
456
478
  logger.error(f"Failed to spawn Architect: {e}")
479
+ # Note: At this point memos are already cleared from inbox
480
+ # This is intentional - we trade "at-least-once" for "at-most-once" semantics
481
+ # If Architect fails, the memos are in git history
457
482
  return ActionResult.failure_result(
458
483
  error=f"Failed to schedule Architect: {e}",
459
- metadata={"file_path": file_path},
484
+ metadata={"file_path": file_path_str, "memos_consumed": len(memos)},
460
485
  )
461
486
 
487
+ def _load_and_clear_memos(self, inbox_path: Path) -> List[Memo]:
488
+ """
489
+ Atomically load all memos and clear the inbox file.
490
+
491
+ This implements the "consume" operation in signal queue model.
492
+ File existence is the state - clearing the file marks all signals consumed.
493
+ """
494
+ # Resolve path relative to project root if needed
495
+ if not inbox_path.is_absolute():
496
+ from monoco.core.config import find_monoco_root
497
+ project_root = find_monoco_root()
498
+ inbox_path = project_root / inbox_path
499
+
500
+ if not inbox_path.exists():
501
+ return []
502
+
503
+ # Load memos directly from inbox path
504
+ # inbox_path is Memos/inbox.md, issues_root is sibling: Issues/
505
+ issues_root = inbox_path.parent.parent / "Issues"
506
+ memos = load_memos(issues_root)
507
+
508
+ # Clear inbox (atomic write)
509
+ inbox_path.write_text("# Monoco Memos Inbox\n\n", encoding="utf-8")
510
+ logger.info(f"Inbox cleared after consuming {len(memos)} memos")
511
+
512
+ return memos
513
+
462
514
  async def __call__(self, event: AgentEvent) -> Optional[ActionResult]:
463
515
  """Make handler callable - used as EventBus callback."""
464
516
  try:
@@ -486,18 +538,48 @@ class MemoThresholdHandler:
486
538
  self._subscribed = False
487
539
  logger.info(f"{self.name} stopped")
488
540
 
489
- def _build_prompt(self, file_path: str, pending_count: int) -> str:
490
- """Build the prompt for the Architect agent."""
491
- return f"""You are the Architect. {pending_count} memos have accumulated in {file_path}.
541
+ def _build_prompt(self, file_path: str, memos: List[Memo]) -> str:
542
+ """Build the prompt for the Architect agent with embedded memos."""
543
+ # Format memos for prompt
544
+ memo_sections = []
545
+ for i, memo in enumerate(memos, 1):
546
+ section = f"""### Memo {i} (ID: {memo.uid})
547
+ - **Time**: {memo.timestamp.strftime("%Y-%m-%d %H:%M:%S")}
548
+ - **Type**: {memo.type}
549
+ - **Source**: {memo.source}
550
+ - **Author**: {memo.author}
551
+ {'' if not memo.context else f'- **Context**: `{memo.context}`'}
552
+
553
+ {memo.content}
554
+ """
555
+ memo_sections.append(section)
556
+
557
+ memos_text = "\n".join(memo_sections)
558
+
559
+ return f"""You are the Architect. {len(memos)} memos have been consumed from {file_path}.
492
560
 
493
- Your task:
494
- 1. Read and analyze the accumulated memos
561
+ ## Consumed Memos (Signal Queue Model)
562
+
563
+ The following memos have been atomically consumed from the inbox.
564
+ They are provided here for your analysis - do NOT read the inbox file as it has been cleared.
565
+
566
+ {memos_text}
567
+
568
+ ## Your Task
569
+
570
+ 1. Analyze the accumulated memos above
495
571
  2. Categorize and prioritize the ideas
496
572
  3. Create Issue tickets for actionable items:
497
573
  - Use `monoco issue create` command
498
574
  - Set appropriate type (feature, fix, chore)
499
575
  - Set stage to 'draft' for review
500
- 4. Organize or clear processed memos
576
+ 4. Link related memos to created issues via `source_memo` field if applicable
577
+
578
+ ## Signal Queue Semantics
579
+
580
+ - Memos are signals, not assets - they are consumed (deleted) upon processing
581
+ - No need to "resolve" or "link" memos - just create Issues from them
582
+ - Historical memos can be found in git history if needed
501
583
 
502
584
  Focus on turning raw ideas into structured, actionable work items."""
503
585
 
monoco/core/config.py CHANGED
@@ -462,8 +462,13 @@ class ConfigMonitor:
462
462
  self.config_path = config_path
463
463
  self.on_change = on_change
464
464
  self.observer = Observer()
465
+ self._started = False
465
466
 
466
467
  async def start(self):
468
+ if self._started:
469
+ logger.warning(f"Config Monitor already started for {self.config_path}")
470
+ return
471
+
467
472
  loop = asyncio.get_running_loop()
468
473
  event_handler = ConfigEventHandler(loop, self.on_change, self.config_path)
469
474
 
@@ -471,15 +476,28 @@ class ConfigMonitor:
471
476
  # Ensure parent exists at least
472
477
  self.config_path.parent.mkdir(parents=True, exist_ok=True)
473
478
 
474
- # We watch the parent directory for the specific file
475
- self.observer.schedule(
476
- event_handler, str(self.config_path.parent), recursive=False
477
- )
478
- self.observer.start()
479
- logger.info(f"Config Monitor started for {self.config_path}")
479
+ # Watch the specific file, not the parent directory
480
+ # This avoids "already scheduled" errors when multiple files are in the same directory
481
+ try:
482
+ self.observer.schedule(
483
+ event_handler, str(self.config_path), recursive=False
484
+ )
485
+ self.observer.start()
486
+ self._started = True
487
+ logger.info(f"Config Monitor started for {self.config_path}")
488
+ except RuntimeError as e:
489
+ logger.error(f"Failed to start Config Monitor for {self.config_path}: {e}")
490
+ raise
480
491
 
481
492
  def stop(self):
482
- if self.observer.is_alive():
483
- self.observer.stop()
484
- self.observer.join()
485
- logger.info(f"Config Monitor stopped for {self.config_path}")
493
+ if not self._started:
494
+ return
495
+ try:
496
+ if self.observer.is_alive():
497
+ self.observer.stop()
498
+ self.observer.join()
499
+ logger.info(f"Config Monitor stopped for {self.config_path}")
500
+ except Exception as e:
501
+ logger.warning(f"Error stopping Config Monitor: {e}")
502
+ finally:
503
+ self._started = False
@@ -0,0 +1,5 @@
1
+ """Monoco Daemon core components."""
2
+
3
+ from monoco.core.daemon.pid import PIDManager, PIDFileError, PortManager
4
+
5
+ __all__ = ["PIDManager", "PIDFileError", "PortManager"]
@@ -0,0 +1,290 @@
1
+ """PID file management and port utilities for Monoco Daemon."""
2
+
3
+ import json
4
+ import os
5
+ import signal
6
+ import socket
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+ from typing import Optional
10
+
11
+
12
+ class PIDFileError(Exception):
13
+ """Exception raised for PID file related errors."""
14
+
15
+ pass
16
+
17
+
18
+ class PIDManager:
19
+ """Manages PID file for workspace-scoped daemon process.
20
+
21
+ PID file format (JSON):
22
+ {
23
+ "pid": 12345,
24
+ "host": "127.0.0.1",
25
+ "port": 8642,
26
+ "started_at": "2026-02-03T12:00:00",
27
+ "version": "0.3.12"
28
+ }
29
+ """
30
+
31
+ PID_FILENAME = "monoco.pid"
32
+ PID_DIR = "run"
33
+
34
+ def __init__(self, workspace_root: Path):
35
+ self.workspace_root = Path(workspace_root)
36
+ self.pid_file = self._get_pid_file_path()
37
+
38
+ def _get_pid_file_path(self) -> Path:
39
+ """Get the PID file path for the workspace."""
40
+ pid_dir = self.workspace_root / ".monoco" / self.PID_DIR
41
+ pid_dir.mkdir(parents=True, exist_ok=True)
42
+ return pid_dir / self.PID_FILENAME
43
+
44
+ def create_pid_file(
45
+ self, host: str, port: int, version: str = "0.3.12"
46
+ ) -> Path:
47
+ """Create a PID file with process metadata.
48
+
49
+ Args:
50
+ host: The host address the daemon is listening on
51
+ port: The port the daemon is listening on
52
+ version: Monoco toolkit version
53
+
54
+ Returns:
55
+ Path to the created PID file
56
+
57
+ Raises:
58
+ PIDFileError: If PID file already exists and process is still alive
59
+ """
60
+ # Check for existing PID file
61
+ existing = self.read_pid_file()
62
+ if existing and self.is_process_alive(existing["pid"]):
63
+ raise PIDFileError(
64
+ f"Daemon already running (PID: {existing['pid']}, "
65
+ f"port: {existing['port']})"
66
+ )
67
+
68
+ # Clean up stale PID file if exists
69
+ if self.pid_file.exists():
70
+ self.remove_pid_file()
71
+
72
+ pid_data = {
73
+ "pid": os.getpid(),
74
+ "host": host,
75
+ "port": port,
76
+ "started_at": datetime.now().isoformat(),
77
+ "version": version,
78
+ }
79
+
80
+ # Write atomically using temp file
81
+ temp_file = self.pid_file.with_suffix(".tmp")
82
+ try:
83
+ with open(temp_file, "w", encoding="utf-8") as f:
84
+ json.dump(pid_data, f, indent=2)
85
+ temp_file.rename(self.pid_file)
86
+ except Exception as e:
87
+ if temp_file.exists():
88
+ temp_file.unlink()
89
+ raise PIDFileError(f"Failed to create PID file: {e}") from e
90
+
91
+ return self.pid_file
92
+
93
+ def read_pid_file(self) -> Optional[dict]:
94
+ """Read and parse the PID file.
95
+
96
+ Returns:
97
+ Dict with pid, host, port, started_at, version or None if not exists
98
+ """
99
+ if not self.pid_file.exists():
100
+ return None
101
+
102
+ try:
103
+ with open(self.pid_file, "r", encoding="utf-8") as f:
104
+ return json.load(f)
105
+ except (json.JSONDecodeError, IOError):
106
+ return None
107
+
108
+ def remove_pid_file(self) -> bool:
109
+ """Remove the PID file.
110
+
111
+ Returns:
112
+ True if file was removed, False if it didn't exist
113
+ """
114
+ try:
115
+ self.pid_file.unlink()
116
+ return True
117
+ except FileNotFoundError:
118
+ return False
119
+
120
+ @staticmethod
121
+ def is_process_alive(pid: int) -> bool:
122
+ """Check if a process with given PID is still running.
123
+
124
+ Args:
125
+ pid: Process ID to check
126
+
127
+ Returns:
128
+ True if process exists and is running
129
+ """
130
+ try:
131
+ os.kill(pid, 0)
132
+ return True
133
+ except (OSError, ProcessLookupError):
134
+ return False
135
+
136
+ def get_daemon_info(self) -> Optional[dict]:
137
+ """Get daemon info if it's running.
138
+
139
+ Returns:
140
+ Daemon info dict if running, None otherwise
141
+ """
142
+ pid_data = self.read_pid_file()
143
+ if not pid_data:
144
+ return None
145
+
146
+ if not self.is_process_alive(pid_data["pid"]):
147
+ # Stale PID file, clean it up
148
+ self.remove_pid_file()
149
+ return None
150
+
151
+ return pid_data
152
+
153
+ def send_signal(self, sig: int) -> bool:
154
+ """Send a signal to the daemon process.
155
+
156
+ Args:
157
+ sig: Signal to send (e.g., signal.SIGTERM)
158
+
159
+ Returns:
160
+ True if signal was sent successfully
161
+ """
162
+ pid_data = self.read_pid_file()
163
+ if not pid_data:
164
+ return False
165
+
166
+ pid = pid_data["pid"]
167
+ try:
168
+ os.kill(pid, sig)
169
+ return True
170
+ except (OSError, ProcessLookupError):
171
+ return False
172
+
173
+ def terminate(self, timeout: int = 5) -> bool:
174
+ """Gracefully terminate the daemon process.
175
+
176
+ Args:
177
+ timeout: Seconds to wait for graceful shutdown
178
+
179
+ Returns:
180
+ True if process was terminated
181
+ """
182
+ pid_data = self.read_pid_file()
183
+ if not pid_data:
184
+ return False
185
+
186
+ pid = pid_data["pid"]
187
+
188
+ # Try graceful shutdown first
189
+ try:
190
+ os.kill(pid, signal.SIGTERM)
191
+ except (OSError, ProcessLookupError):
192
+ # Process already gone
193
+ self.remove_pid_file()
194
+ return True
195
+
196
+ # Wait for process to terminate
197
+ import time
198
+
199
+ for _ in range(timeout * 10):
200
+ if not self.is_process_alive(pid):
201
+ self.remove_pid_file()
202
+ return True
203
+ time.sleep(0.1)
204
+
205
+ # Force kill if still running
206
+ try:
207
+ os.kill(pid, signal.SIGKILL)
208
+ except (OSError, ProcessLookupError):
209
+ pass
210
+
211
+ self.remove_pid_file()
212
+ return True
213
+
214
+
215
+ class PortManager:
216
+ """Port management utilities for daemon."""
217
+
218
+ DEFAULT_PORT = 8642
219
+ MAX_PORT_RETRY = 100
220
+
221
+ @staticmethod
222
+ def is_port_in_use(port: int, host: str = "127.0.0.1") -> bool:
223
+ """Check if a port is already in use.
224
+
225
+ Args:
226
+ port: Port number to check
227
+ host: Host address to check
228
+
229
+ Returns:
230
+ True if port is in use
231
+ """
232
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
233
+ try:
234
+ s.bind((host, port))
235
+ return False
236
+ except OSError:
237
+ return True
238
+
239
+ @classmethod
240
+ def find_available_port(
241
+ cls, start_port: int = DEFAULT_PORT, host: str = "127.0.0.1", max_retry: int = MAX_PORT_RETRY
242
+ ) -> int:
243
+ """Find an available port starting from start_port.
244
+
245
+ Args:
246
+ start_port: Starting port number
247
+ host: Host address to bind
248
+ max_retry: Maximum number of ports to try
249
+
250
+ Returns:
251
+ Available port number
252
+
253
+ Raises:
254
+ PIDFileError: If no available port found within range
255
+ """
256
+ for port in range(start_port, start_port + max_retry):
257
+ if not cls.is_port_in_use(port, host):
258
+ return port
259
+
260
+ raise PIDFileError(
261
+ f"No available port found in range {start_port}-{start_port + max_retry - 1}"
262
+ )
263
+
264
+ @classmethod
265
+ def get_port_with_fallback(
266
+ cls, preferred_port: int = DEFAULT_PORT, host: str = "127.0.0.1", auto_increment: bool = True
267
+ ) -> int:
268
+ """Get a port, either the preferred one or an available fallback.
269
+
270
+ Args:
271
+ preferred_port: Preferred port to use
272
+ host: Host address
273
+ auto_increment: If True, find next available port; if False, raise error
274
+
275
+ Returns:
276
+ Port number to use
277
+
278
+ Raises:
279
+ PIDFileError: If preferred port is in use and auto_increment is False
280
+ """
281
+ if not cls.is_port_in_use(preferred_port, host):
282
+ return preferred_port
283
+
284
+ if not auto_increment:
285
+ raise PIDFileError(
286
+ f"Port {preferred_port} is already in use. "
287
+ f"Use --port to specify a different port."
288
+ )
289
+
290
+ return cls.find_available_port(preferred_port + 1, host)