monoco-toolkit 0.2.8__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. monoco/cli/project.py +35 -31
  2. monoco/cli/workspace.py +26 -16
  3. monoco/core/agent/__init__.py +0 -2
  4. monoco/core/agent/action.py +44 -20
  5. monoco/core/agent/adapters.py +20 -16
  6. monoco/core/agent/protocol.py +5 -4
  7. monoco/core/agent/state.py +21 -21
  8. monoco/core/config.py +90 -33
  9. monoco/core/execution.py +21 -16
  10. monoco/core/feature.py +8 -5
  11. monoco/core/git.py +61 -30
  12. monoco/core/hooks.py +57 -0
  13. monoco/core/injection.py +47 -44
  14. monoco/core/integrations.py +50 -35
  15. monoco/core/lsp.py +12 -1
  16. monoco/core/output.py +35 -16
  17. monoco/core/registry.py +3 -2
  18. monoco/core/setup.py +190 -124
  19. monoco/core/skills.py +121 -107
  20. monoco/core/state.py +12 -10
  21. monoco/core/sync.py +85 -56
  22. monoco/core/telemetry.py +10 -6
  23. monoco/core/workspace.py +26 -19
  24. monoco/daemon/app.py +123 -79
  25. monoco/daemon/commands.py +14 -13
  26. monoco/daemon/models.py +11 -3
  27. monoco/daemon/reproduce_stats.py +8 -8
  28. monoco/daemon/services.py +32 -33
  29. monoco/daemon/stats.py +59 -40
  30. monoco/features/config/commands.py +38 -25
  31. monoco/features/i18n/adapter.py +4 -5
  32. monoco/features/i18n/commands.py +83 -49
  33. monoco/features/i18n/core.py +94 -54
  34. monoco/features/issue/adapter.py +6 -7
  35. monoco/features/issue/commands.py +468 -272
  36. monoco/features/issue/core.py +419 -312
  37. monoco/features/issue/domain/lifecycle.py +33 -23
  38. monoco/features/issue/domain/models.py +71 -38
  39. monoco/features/issue/domain/parser.py +92 -69
  40. monoco/features/issue/domain/workspace.py +19 -16
  41. monoco/features/issue/engine/__init__.py +3 -3
  42. monoco/features/issue/engine/config.py +18 -25
  43. monoco/features/issue/engine/machine.py +72 -39
  44. monoco/features/issue/engine/models.py +4 -2
  45. monoco/features/issue/linter.py +287 -157
  46. monoco/features/issue/lsp/definition.py +26 -19
  47. monoco/features/issue/migration.py +45 -34
  48. monoco/features/issue/models.py +29 -13
  49. monoco/features/issue/monitor.py +24 -8
  50. monoco/features/issue/resources/en/SKILL.md +6 -2
  51. monoco/features/issue/validator.py +395 -208
  52. monoco/features/skills/__init__.py +0 -1
  53. monoco/features/skills/core.py +24 -18
  54. monoco/features/spike/adapter.py +4 -5
  55. monoco/features/spike/commands.py +51 -38
  56. monoco/features/spike/core.py +24 -16
  57. monoco/main.py +34 -21
  58. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.1.dist-info}/METADATA +1 -1
  59. monoco_toolkit-0.3.1.dist-info/RECORD +84 -0
  60. monoco_toolkit-0.2.8.dist-info/RECORD +0 -83
  61. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.1.dist-info}/WHEEL +0 -0
  62. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.1.dist-info}/entry_points.txt +0 -0
  63. {monoco_toolkit-0.2.8.dist-info → monoco_toolkit-0.3.1.dist-info}/licenses/LICENSE +0 -0
@@ -2,7 +2,8 @@ from pathlib import Path
2
2
  from typing import Optional, List
3
3
  from monoco.core.lsp import Location, Position, Range
4
4
  from ..domain.parser import MarkdownParser
5
- from ..domain.workspace import WorkspaceSymbolIndex, IssueLocation
5
+ from ..domain.workspace import WorkspaceSymbolIndex
6
+
6
7
 
7
8
  class DefinitionProvider:
8
9
  def __init__(self, workspace_root: Path):
@@ -18,25 +19,29 @@ class DefinitionProvider:
18
19
  return []
19
20
 
20
21
  content = file_path.read_text()
21
-
22
+
22
23
  # 1. Parse the document to find spans
23
24
  # We only need to find the span at the specific line
24
25
  issue = MarkdownParser.parse(content, path=str(file_path))
25
-
26
+
26
27
  target_span = None
27
28
  for block in issue.body.blocks:
28
29
  # Check if position is within block
29
- # Note: block.line_start is inclusive, line_end is exclusive for content
30
+ # Note: block.line_start is inclusive, line_end is exclusive for content
30
31
  if block.line_start <= position.line < block.line_end:
31
- for span in block.spans:
32
- if span.range.start.line == position.line:
33
- # Check character range
34
- if span.range.start.character <= position.character <= span.range.end.character:
35
- target_span = span
36
- break
32
+ for span in block.spans:
33
+ if span.range.start.line == position.line:
34
+ # Check character range
35
+ if (
36
+ span.range.start.character
37
+ <= position.character
38
+ <= span.range.end.character
39
+ ):
40
+ target_span = span
41
+ break
37
42
  if target_span:
38
43
  break
39
-
44
+
40
45
  if not target_span:
41
46
  return []
42
47
 
@@ -45,28 +50,30 @@ class DefinitionProvider:
45
50
  issue_id = target_span.metadata.get("issue_id")
46
51
  if issue_id:
47
52
  # Resolve using Workspace Index
48
- location = self.index.resolve(issue_id, context_project=self._get_context_project(file_path))
53
+ location = self.index.resolve(
54
+ issue_id, context_project=self._get_context_project(file_path)
55
+ )
49
56
  if location:
50
57
  return [
51
58
  Location(
52
59
  uri=f"file://{location.file_path}",
53
60
  range=Range(
54
61
  start=Position(line=0, character=0),
55
- end=Position(line=0, character=0)
56
- )
62
+ end=Position(line=0, character=0),
63
+ ),
57
64
  )
58
65
  ]
59
-
66
+
60
67
  return []
61
68
 
62
69
  def _get_context_project(self, file_path: Path) -> Optional[str]:
63
70
  # Simple heuristic: look for parent directory name if it's a known project structure?
64
- # Or rely on configuration.
71
+ # Or rely on configuration.
65
72
  # For now, let's assume the index handles context if passed, or we pass None.
66
73
  # Actually resolving context project from file path is tricky without config loaded for that specific root.
67
74
  # Let's try to deduce from path relative to workspace root.
68
75
  try:
69
- rel = file_path.relative_to(self.workspace_root)
70
- return rel.parts[0] # First dir is likely project name in a workspace
76
+ rel = file_path.relative_to(self.workspace_root)
77
+ return rel.parts[0] # First dir is likely project name in a workspace
71
78
  except ValueError:
72
- return "local"
79
+ return "local"
@@ -1,11 +1,7 @@
1
1
  import os
2
2
  import re
3
3
  import yaml
4
- import hashlib
5
- import secrets
6
4
  from pathlib import Path
7
- from typing import List, Dict, Any
8
- from datetime import datetime
9
5
  from .models import generate_uid
10
6
 
11
7
  # Migration Mappings
@@ -21,20 +17,13 @@ DIR_MAP = {
21
17
  "features": "Features",
22
18
  "chores": "Chores",
23
19
  "fixes": "Fixes",
24
- "epics": "Epics"
20
+ "epics": "Epics",
25
21
  }
26
22
 
27
- TYPE_MAP = {
28
- "story": "feature",
29
- "task": "chore",
30
- "bug": "fix"
31
- }
23
+ TYPE_MAP = {"story": "feature", "task": "chore", "bug": "fix"}
24
+
25
+ ID_PREFIX_MAP = {"STORY": "FEAT", "TASK": "CHORE", "BUG": "FIX"}
32
26
 
33
- ID_PREFIX_MAP = {
34
- "STORY": "FEAT",
35
- "TASK": "CHORE",
36
- "BUG": "FIX"
37
- }
38
27
 
39
28
  def migrate_issues_directory(issues_dir: Path):
40
29
  """
@@ -48,7 +37,7 @@ def migrate_issues_directory(issues_dir: Path):
48
37
  old_path = issues_dir / old_name
49
38
  if old_path.exists():
50
39
  new_path = issues_dir / new_name
51
-
40
+
52
41
  # Case sensitivity check for some filesystems
53
42
  same_inode = False
54
43
  try:
@@ -64,6 +53,7 @@ def migrate_issues_directory(issues_dir: Path):
64
53
 
65
54
  if new_path.exists():
66
55
  import shutil
56
+
67
57
  for item in old_path.iterdir():
68
58
  dest = new_path / item.name
69
59
  if dest.exists() and item.is_dir():
@@ -81,20 +71,37 @@ def migrate_issues_directory(issues_dir: Path):
81
71
  subdir = issues_dir / subdir_name
82
72
  if not subdir.exists():
83
73
  continue
84
-
74
+
85
75
  for file_path in subdir.rglob("*.md"):
86
76
  content = file_path.read_text(encoding="utf-8")
87
77
  new_content = content
88
-
78
+
89
79
  # Replace Type in Frontmatter
90
80
  for old_type, new_type in TYPE_MAP.items():
91
- new_content = re.sub(rf"^type:\s*{old_type}", f"type: {new_type}", new_content, flags=re.IGNORECASE | re.MULTILINE)
92
-
81
+ new_content = re.sub(
82
+ rf"^type:\s*{old_type}",
83
+ f"type: {new_type}",
84
+ new_content,
85
+ flags=re.IGNORECASE | re.MULTILINE,
86
+ )
87
+
93
88
  # Replace ID Prefixes
94
89
  for old_prefix, new_prefix in ID_PREFIX_MAP.items():
95
- new_content = new_content.replace(f"[[{old_prefix}-", f"[[{new_prefix}-")
96
- new_content = re.sub(rf"^id: {old_prefix}-", f"id: {new_prefix}-", new_content, flags=re.MULTILINE)
97
- new_content = re.sub(rf"^parent: {old_prefix}-", f"parent: {new_prefix}-", new_content, flags=re.MULTILINE)
90
+ new_content = new_content.replace(
91
+ f"[[{old_prefix}-", f"[[{new_prefix}-"
92
+ )
93
+ new_content = re.sub(
94
+ rf"^id: {old_prefix}-",
95
+ f"id: {new_prefix}-",
96
+ new_content,
97
+ flags=re.MULTILINE,
98
+ )
99
+ new_content = re.sub(
100
+ rf"^parent: {old_prefix}-",
101
+ f"parent: {new_prefix}-",
102
+ new_content,
103
+ flags=re.MULTILINE,
104
+ )
98
105
  new_content = new_content.replace(f"{old_prefix}-", f"{new_prefix}-")
99
106
 
100
107
  # Structural Updates (UID, Stage)
@@ -104,18 +111,20 @@ def migrate_issues_directory(issues_dir: Path):
104
111
  try:
105
112
  data = yaml.safe_load(yaml_str) or {}
106
113
  changed = False
107
-
108
- if 'uid' not in data:
109
- data['uid'] = generate_uid()
114
+
115
+ if "uid" not in data:
116
+ data["uid"] = generate_uid()
110
117
  changed = True
111
-
112
- if 'stage' in data and data['stage'] == 'todo':
113
- data['stage'] = 'draft'
118
+
119
+ if "stage" in data and data["stage"] == "todo":
120
+ data["stage"] = "draft"
114
121
  changed = True
115
-
122
+
116
123
  if changed:
117
- new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
118
- new_content = new_content.replace(match.group(1), "\n" + new_yaml)
124
+ new_yaml = yaml.dump(data, sort_keys=False, allow_unicode=True)
125
+ new_content = new_content.replace(
126
+ match.group(1), "\n" + new_yaml
127
+ )
119
128
  except yaml.YAMLError:
120
129
  pass
121
130
 
@@ -127,8 +136,10 @@ def migrate_issues_directory(issues_dir: Path):
127
136
  new_filename = filename
128
137
  for old_prefix, new_prefix in ID_PREFIX_MAP.items():
129
138
  if filename.startswith(f"{old_prefix}-"):
130
- new_filename = filename.replace(f"{old_prefix}-", f"{new_prefix}-", 1)
139
+ new_filename = filename.replace(
140
+ f"{old_prefix}-", f"{new_prefix}-", 1
141
+ )
131
142
  break
132
-
143
+
133
144
  if new_filename != filename:
134
145
  file_path.rename(file_path.parent / new_filename)
@@ -10,6 +10,7 @@ class IssueID:
10
10
  """
11
11
  Helper for parsing Issue IDs that might be namespaced (e.g. 'toolkit::FEAT-0001').
12
12
  """
13
+
13
14
  def __init__(self, raw: str):
14
15
  self.raw = raw
15
16
  if "::" in raw:
@@ -22,7 +23,7 @@ class IssueID:
22
23
  if self.namespace:
23
24
  return f"{self.namespace}::{self.local_id}"
24
25
  return self.local_id
25
-
26
+
26
27
  def __repr__(self):
27
28
  return f"IssueID({self.raw})"
28
29
 
@@ -34,9 +35,11 @@ class IssueID:
34
35
  """Check if this ID matches another ID string."""
35
36
  return str(self) == other_id or (self.is_local and self.local_id == other_id)
36
37
 
38
+
37
39
  def current_time() -> datetime:
38
40
  return datetime.now().replace(microsecond=0)
39
41
 
42
+
40
43
  def generate_uid() -> str:
41
44
  """
42
45
  Generate a globally unique 6-character short hash for issue identity.
@@ -55,11 +58,13 @@ class IssueType(str, Enum):
55
58
  CHORE = "chore"
56
59
  FIX = "fix"
57
60
 
61
+
58
62
  class IssueStatus(str, Enum):
59
63
  OPEN = "open"
60
64
  CLOSED = "closed"
61
65
  BACKLOG = "backlog"
62
66
 
67
+
63
68
  class IssueStage(str, Enum):
64
69
  DRAFT = "draft"
65
70
  DOING = "doing"
@@ -67,43 +72,48 @@ class IssueStage(str, Enum):
67
72
  DONE = "done"
68
73
  FREEZED = "freezed"
69
74
 
75
+
70
76
  class IssueSolution(str, Enum):
71
77
  IMPLEMENTED = "implemented"
72
78
  CANCELLED = "cancelled"
73
79
  WONTFIX = "wontfix"
74
80
  DUPLICATE = "duplicate"
75
81
 
82
+
76
83
  class IsolationType(str, Enum):
77
84
  BRANCH = "branch"
78
85
  WORKTREE = "worktree"
79
86
 
87
+
80
88
  class IssueIsolation(BaseModel):
81
89
  type: str
82
90
  ref: str # Git branch name
83
91
  path: Optional[str] = None # Worktree path (relative to repo root or absolute)
84
92
  created_at: datetime = Field(default_factory=current_time)
85
93
 
94
+
86
95
  class IssueAction(BaseModel):
87
96
  label: str
88
97
  target_status: Optional[str] = None
89
98
  target_stage: Optional[str] = None
90
99
  target_solution: Optional[str] = None
91
100
  icon: Optional[str] = None
92
-
101
+
93
102
  # Generic execution extensions
94
103
  command: Optional[str] = None
95
104
  params: Dict[str, Any] = {}
96
105
 
106
+
97
107
  class IssueMetadata(BaseModel):
98
108
  model_config = {"extra": "allow"}
99
-
109
+
100
110
  id: str
101
111
  uid: Optional[str] = None # Global unique identifier for cross-project identity
102
112
  type: str
103
113
  status: str = "open"
104
114
  stage: Optional[str] = None
105
115
  title: str
106
-
116
+
107
117
  # Time Anchors
108
118
  created_at: datetime = Field(default_factory=current_time)
109
119
  opened_at: Optional[datetime] = None
@@ -116,16 +126,16 @@ class IssueMetadata(BaseModel):
116
126
  isolation: Optional[IssueIsolation] = None
117
127
  dependencies: List[str] = []
118
128
  related: List[str] = []
129
+ domains: List[str] = []
119
130
  tags: List[str] = []
120
131
  files: List[str] = []
121
132
  path: Optional[str] = None # Absolute path to the issue file
122
-
133
+
123
134
  # Proxy UI Actions (Excluded from file persistence)
124
135
  # Modified: Remove exclude=True to allow API/CLI inspection. Must be manually excluded during YAML Dump.
125
136
  actions: List[IssueAction] = Field(default=[])
126
137
 
127
-
128
- @model_validator(mode='before')
138
+ @model_validator(mode="before")
129
139
  @classmethod
130
140
  def normalize_fields(cls, v: Any) -> Any:
131
141
  if isinstance(v, dict):
@@ -139,10 +149,13 @@ class IssueMetadata(BaseModel):
139
149
  "Parent": "parent",
140
150
  "Solution": "solution",
141
151
  "Sprint": "sprint",
152
+ "Domains": "domains",
142
153
  }
143
154
  for old_k, new_k in field_map.items():
144
155
  if old_k in v and new_k not in v:
145
- v[new_k] = v[old_k] # Don't pop yet to avoid mutation issues if used elsewhere, or pop if safe.
156
+ v[new_k] = v[
157
+ old_k
158
+ ] # Don't pop yet to avoid mutation issues if used elsewhere, or pop if safe.
146
159
  # Pydantic v2 mode='before' is usually a copy if we want to be safe, but let's just add it.
147
160
 
148
161
  # Normalize type and status to lowercase for compatibility
@@ -159,18 +172,21 @@ class IssueMetadata(BaseModel):
159
172
  v["stage"] = "draft"
160
173
  return v
161
174
 
162
- @model_validator(mode='after')
163
- def validate_lifecycle(self) -> 'IssueMetadata':
175
+ @model_validator(mode="after")
176
+ def validate_lifecycle(self) -> "IssueMetadata":
164
177
  # Logic Definition:
165
178
  # status: backlog -> stage: freezed
166
179
  # status: closed -> stage: done
167
180
  # status: open -> stage: draft | doing | review | done (default draft)
168
-
181
+
169
182
  # NOTE: We do NOT auto-correct state here anymore to allow Linter to detect inconsistencies.
170
183
  # Auto-correction should be applied explicitly by 'create' or 'update' commands via core logic.
171
-
184
+
172
185
  return self
173
186
 
187
+
174
188
  class IssueDetail(IssueMetadata):
175
189
  body: str = ""
176
- raw_content: Optional[str] = None # Full file content including frontmatter for editing
190
+ raw_content: Optional[
191
+ str
192
+ ] = None # Full file content including frontmatter for editing
@@ -2,15 +2,21 @@ import re
2
2
  import asyncio
3
3
  import logging
4
4
  from pathlib import Path
5
- from typing import Callable, Awaitable, Any, Optional
5
+ from typing import Callable, Awaitable
6
6
 
7
7
  from watchdog.observers import Observer
8
8
  from watchdog.events import FileSystemEventHandler
9
9
 
10
10
  logger = logging.getLogger("monoco.features.issue.monitor")
11
11
 
12
+
12
13
  class IssueEventHandler(FileSystemEventHandler):
13
- def __init__(self, loop, on_upsert: Callable[[dict], Awaitable[None]], on_delete: Callable[[dict], Awaitable[None]]):
14
+ def __init__(
15
+ self,
16
+ loop,
17
+ on_upsert: Callable[[dict], Awaitable[None]],
18
+ on_delete: Callable[[dict], Awaitable[None]],
19
+ ):
14
20
  self.loop = loop
15
21
  self.on_upsert = on_upsert
16
22
  self.on_delete = on_delete
@@ -19,16 +25,17 @@ class IssueEventHandler(FileSystemEventHandler):
19
25
  if not path_str.endswith(".md"):
20
26
  return
21
27
  asyncio.run_coroutine_threadsafe(self._handle_upsert(path_str), self.loop)
22
-
28
+
23
29
  async def _handle_upsert(self, path_str: str):
24
30
  try:
25
31
  from monoco.features.issue.core import parse_issue
32
+
26
33
  path = Path(path_str)
27
34
  if not path.exists():
28
35
  return
29
36
  issue = parse_issue(path)
30
37
  if issue:
31
- await self.on_upsert(issue.model_dump(mode='json'))
38
+ await self.on_upsert(issue.model_dump(mode="json"))
32
39
  except Exception as e:
33
40
  logger.error(f"Error handling upsert for {path_str}: {e}")
34
41
 
@@ -54,7 +61,7 @@ class IssueEventHandler(FileSystemEventHandler):
54
61
  def on_modified(self, event):
55
62
  if not event.is_directory:
56
63
  self._process_upsert(event.src_path)
57
-
64
+
58
65
  def on_deleted(self, event):
59
66
  if not event.is_directory:
60
67
  self._process_delete(event.src_path)
@@ -64,11 +71,18 @@ class IssueEventHandler(FileSystemEventHandler):
64
71
  self._process_delete(event.src_path)
65
72
  self._process_upsert(event.dest_path)
66
73
 
74
+
67
75
  class IssueMonitor:
68
76
  """
69
77
  Monitor the Issues directory for changes using Watchdog and trigger callbacks.
70
78
  """
71
- def __init__(self, issues_root: Path, on_upsert: Callable[[dict], Awaitable[None]], on_delete: Callable[[dict], Awaitable[None]]):
79
+
80
+ def __init__(
81
+ self,
82
+ issues_root: Path,
83
+ on_upsert: Callable[[dict], Awaitable[None]],
84
+ on_delete: Callable[[dict], Awaitable[None]],
85
+ ):
72
86
  self.issues_root = issues_root
73
87
  self.on_upsert = on_upsert
74
88
  self.on_delete = on_delete
@@ -78,9 +92,11 @@ class IssueMonitor:
78
92
  async def start(self):
79
93
  self.loop = asyncio.get_running_loop()
80
94
  event_handler = IssueEventHandler(self.loop, self.on_upsert, self.on_delete)
81
-
95
+
82
96
  if not self.issues_root.exists():
83
- logger.warning(f"Issues root {self.issues_root} does not exist. creating...")
97
+ logger.warning(
98
+ f"Issues root {self.issues_root} does not exist. creating..."
99
+ )
84
100
  self.issues_root.mkdir(parents=True, exist_ok=True)
85
101
 
86
102
  self.observer.schedule(event_handler, str(self.issues_root), recursive=True)
@@ -29,9 +29,13 @@ Use this skill to create and manage **Issues** (Universal Atoms) in Monoco proje
29
29
 
30
30
  Monoco enforces a **Feature Branch** model.
31
31
 
32
- - **Start**: Must use `monoco issue start <ID> --branch` to start working. This creates a `feat/<ID>-<slug>` branch.
32
+ - **Start**: Agents **MUST** use `monoco issue start <ID> --branch` to start working.
33
+ - This creates and switches to a standard `feat/<ID>-<slug>` branch.
34
+ - **Do NOT** manually create branches using `git checkout -b`.
33
35
  - **Protected Main**: **NO** direct modification on `main`, `master`, or `production` branches. Linter will block this.
34
- - **Submit**: Run `monoco issue submit <ID>` before PR to clean up and validate.
36
+ - **Submit**: Run `monoco issue submit <ID>` when work is ready for review.
37
+ - This moves the issue to `Review` stage and generates a Delivery Report.
38
+ - **Note**: This does **not** merge the code. You (or the user) must handle the Merge/PR process.
35
39
 
36
40
  ### 2. File Tracking
37
41