monoco-toolkit 0.3.1__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. monoco/core/config.py +35 -0
  2. monoco/core/integrations.py +0 -6
  3. monoco/core/resources/en/AGENTS.md +25 -0
  4. monoco/core/resources/en/SKILL.md +32 -1
  5. monoco/core/resources/zh/AGENTS.md +25 -0
  6. monoco/core/resources/zh/SKILL.md +32 -0
  7. monoco/core/sync.py +6 -19
  8. monoco/features/i18n/core.py +31 -11
  9. monoco/features/issue/commands.py +24 -1
  10. monoco/features/issue/core.py +90 -39
  11. monoco/features/issue/domain/models.py +1 -0
  12. monoco/features/issue/domain_commands.py +47 -0
  13. monoco/features/issue/domain_service.py +69 -0
  14. monoco/features/issue/linter.py +119 -11
  15. monoco/features/issue/validator.py +47 -0
  16. monoco/features/scheduler/__init__.py +19 -0
  17. monoco/features/scheduler/cli.py +204 -0
  18. monoco/features/scheduler/config.py +32 -0
  19. monoco/features/scheduler/defaults.py +54 -0
  20. monoco/features/scheduler/manager.py +49 -0
  21. monoco/features/scheduler/models.py +24 -0
  22. monoco/features/scheduler/reliability.py +99 -0
  23. monoco/features/scheduler/session.py +87 -0
  24. monoco/features/scheduler/worker.py +129 -0
  25. monoco/main.py +4 -0
  26. {monoco_toolkit-0.3.1.dist-info → monoco_toolkit-0.3.3.dist-info}/METADATA +1 -1
  27. {monoco_toolkit-0.3.1.dist-info → monoco_toolkit-0.3.3.dist-info}/RECORD +30 -24
  28. monoco/core/agent/__init__.py +0 -3
  29. monoco/core/agent/action.py +0 -168
  30. monoco/core/agent/adapters.py +0 -133
  31. monoco/core/agent/protocol.py +0 -32
  32. monoco/core/agent/state.py +0 -106
  33. {monoco_toolkit-0.3.1.dist-info → monoco_toolkit-0.3.3.dist-info}/WHEEL +0 -0
  34. {monoco_toolkit-0.3.1.dist-info → monoco_toolkit-0.3.3.dist-info}/entry_points.txt +0 -0
  35. {monoco_toolkit-0.3.1.dist-info → monoco_toolkit-0.3.3.dist-info}/licenses/LICENSE +0 -0
monoco/core/config.py CHANGED
@@ -133,6 +133,40 @@ class IssueSchemaConfig(BaseModel):
133
133
  return self
134
134
 
135
135
 
136
+ class DomainItem(BaseModel):
137
+ name: str = Field(..., description="Canonical domain name (e.g. backend.auth)")
138
+ description: Optional[str] = Field(None, description="Description of the domain")
139
+ aliases: List[str] = Field(default_factory=list, description="List of aliases")
140
+
141
+
142
+ class DomainConfig(BaseModel):
143
+ items: List[DomainItem] = Field(
144
+ default_factory=list, description="List of defined domains"
145
+ )
146
+ strict: bool = Field(
147
+ default=False, description="If True, only allow defined domains"
148
+ )
149
+
150
+ def merge(self, other: "DomainConfig") -> "DomainConfig":
151
+ if not other:
152
+ return self
153
+
154
+ # Merge items by name
155
+ if other.items:
156
+ item_map = {item.name: item for item in self.items}
157
+ for item in other.items:
158
+ # Overwrite or merge aliases? Let's overwrite for simplicity/consistency
159
+ item_map[item.name] = item
160
+ self.items = list(item_map.values())
161
+
162
+ # Strict mode: logic? maybe strict overrides?
163
+ # Let's say if ANY config asks for strict, it is strict? Or last one wins (project)?
164
+ # Default merge is usually override.
165
+ self.strict = other.strict
166
+
167
+ return self
168
+
169
+
136
170
  class StateMachineConfig(BaseModel):
137
171
  transitions: List[TransitionConfig]
138
172
 
@@ -155,6 +189,7 @@ class MonocoConfig(BaseModel):
155
189
  )
156
190
 
157
191
  issue: IssueSchemaConfig = Field(default_factory=IssueSchemaConfig)
192
+ domains: DomainConfig = Field(default_factory=DomainConfig)
158
193
 
159
194
  @staticmethod
160
195
  def _deep_merge(base: Dict[str, Any], update: Dict[str, Any]) -> Dict[str, Any]:
@@ -134,12 +134,6 @@ DEFAULT_INTEGRATIONS: Dict[str, AgentIntegration] = {
134
134
  bin_name="kimi",
135
135
  version_cmd="--version",
136
136
  ),
137
- "agent": AgentIntegration(
138
- key="agent",
139
- name="Antigravity",
140
- system_prompt_file="GEMINI.md",
141
- skill_root_dir=".agent/skills/",
142
- ),
143
137
  }
144
138
 
145
139
 
@@ -6,3 +6,28 @@ Core toolkit commands for project management.
6
6
  - **Config**: `monoco config get|set <key> [value]` (Manage configuration)
7
7
  - **Sync**: `monoco sync` (Synchronize with agent environments)
8
8
  - **Uninstall**: `monoco uninstall` (Clean up agent integrations)
9
+
10
+ ---
11
+
12
+ ## ⚠️ Agent Must-Read: Git Workflow
13
+
14
+ Before modifying any code, **MUST** follow these steps:
15
+
16
+ ### Standard Process
17
+
18
+ 1. **Create Issue**: `monoco issue create feature -t "Feature Title"`
19
+ 2. **🔒 Start Isolation**: `monoco issue start FEAT-XXX --branch`
20
+ - ⚠️ **Required** `--branch` flag
21
+ - ❌ Never modify code directly on `main`/`master` branches
22
+ 3. **Implement**: Code and test normally
23
+ 4. **Sync Files**: `monoco issue sync-files` (must run before commit)
24
+ 5. **Submit Review**: `monoco issue submit FEAT-XXX`
25
+ 6. **Close Issue**: `monoco issue close FEAT-XXX --solution implemented`
26
+
27
+ ### Quality Gates
28
+
29
+ - Git Hooks auto-run `monoco issue lint` and tests
30
+ - Don't bypass with `git commit --no-verify`
31
+ - Linter blocks direct modifications on protected branches
32
+
33
+ > 📖 See `monoco-issue` skill for complete workflow documentation.
@@ -34,7 +34,6 @@ Monoco is a developer productivity toolkit that provides:
34
34
  ### Agent Integration
35
35
 
36
36
  - **`monoco sync`**: Synchronize with agent environments
37
-
38
37
  - Injects system prompts into agent configuration files (GEMINI.md, CLAUDE.md, etc.)
39
38
  - Distributes skills to agent framework directories
40
39
  - Respects language configuration from `i18n.source_lang`
@@ -43,6 +42,24 @@ Monoco is a developer productivity toolkit that provides:
43
42
  - Removes managed blocks from agent configuration files
44
43
  - Cleans up distributed skills
45
44
 
45
+ ### Git Workflow Integration
46
+
47
+ Monoco enforces a **Feature Branch Workflow** to ensure code isolation and quality:
48
+
49
+ - **`monoco init`**: Automatically installs Git Hooks
50
+ - **pre-commit**: Runs Issue Linter and code formatting checks
51
+ - **pre-push**: Executes test suite and integrity validation
52
+ - All hooks configurable via `.monoco/config.yaml`
53
+
54
+ - **Branch Isolation Strategy**:
55
+ - ⚠️ **Required**: Use `monoco issue start <ID> --branch` to create isolated environment
56
+ - Auto-generates normalized branch names: `feat/<id>-<slug>`
57
+ - **Main Protection**: Linter blocks direct code modifications on `main`/`master` branches
58
+
59
+ - **File Tracking**: `monoco issue sync-files` auto-syncs Git changes to Issue metadata
60
+
61
+ > 📖 **Detailed Workflow**: See `monoco-issue` skill for complete Issue lifecycle management guide.
62
+
46
63
  ## Configuration Structure
47
64
 
48
65
  Configuration is stored in YAML format at:
@@ -60,7 +77,21 @@ Key configuration sections:
60
77
 
61
78
  ## Best Practices
62
79
 
80
+ ### Basic Operations
81
+
63
82
  1. **Use CLI commands** instead of manual file editing when possible
64
83
  2. **Run `monoco sync`** after configuration changes to update agent environments
65
84
  3. **Commit `.monoco/config.yaml`** to version control for team consistency
66
85
  4. **Keep global config minimal** - most settings should be project-specific
86
+
87
+ ### Git Workflow (⚠️ CRITICAL for Agents)
88
+
89
+ 5. **Strictly follow branch isolation**:
90
+ - ✅ Always use: `monoco issue start <ID> --branch`
91
+ - ❌ Never modify code directly on `main`/`master` branches
92
+ - 📝 Before commit: Run `monoco issue sync-files` to update file tracking
93
+
94
+ 6. **Quality Gates**:
95
+ - Git Hooks will auto-run checks, don't bypass (`--no-verify`)
96
+ - Ensure `monoco issue lint` passes before committing
97
+ - Use `monoco issue submit` to generate delivery reports
@@ -6,3 +6,28 @@
6
6
  - **配置**: `monoco config get|set <key> [value]` (管理配置)
7
7
  - **同步**: `monoco sync` (与 agent 环境同步)
8
8
  - **卸载**: `monoco uninstall` (清理 agent 集成)
9
+
10
+ ---
11
+
12
+ ## ⚠️ Agent 必读: Git 工作流
13
+
14
+ 在修改任何代码前,**必须**遵循以下步骤:
15
+
16
+ ### 标准流程
17
+
18
+ 1. **创建 Issue**: `monoco issue create feature -t "功能标题"`
19
+ 2. **🔒 启动隔离环境**: `monoco issue start FEAT-XXX --branch`
20
+ - ⚠️ **强制要求** `--branch` 参数
21
+ - ❌ 禁止在 `main`/`master` 分支直接修改代码
22
+ 3. **实现功能**: 正常编码和测试
23
+ 4. **同步文件**: `monoco issue sync-files` (提交前必须运行)
24
+ 5. **提交审查**: `monoco issue submit FEAT-XXX`
25
+ 6. **关闭 Issue**: `monoco issue close FEAT-XXX --solution implemented`
26
+
27
+ ### 质量门禁
28
+
29
+ - Git Hooks 会自动运行 `monoco issue lint` 和测试
30
+ - 不要使用 `git commit --no-verify` 绕过检查
31
+ - Linter 会阻止在受保护分支上的直接修改
32
+
33
+ > 📖 详见 `monoco-issue` skill 获取完整工作流文档。
@@ -42,6 +42,24 @@ Monoco 是一个开发者生产力工具包,提供:
42
42
  - 从 agent 配置文件中移除托管块
43
43
  - 清理已分发的 skills
44
44
 
45
+ ### Git 工作流集成
46
+
47
+ Monoco 强制执行 **Feature Branch 工作流**以确保代码隔离和质量:
48
+
49
+ - **`monoco init`**: 自动安装 Git Hooks
50
+ - **pre-commit**: 运行 Issue Linter 和代码格式检查
51
+ - **pre-push**: 执行测试套件和完整性验证
52
+ - 所有 Hooks 可通过 `.monoco/config.yaml` 配置
53
+
54
+ - **分支隔离策略**:
55
+ - ⚠️ **强制要求**: 使用 `monoco issue start <ID> --branch` 创建隔离环境
56
+ - 自动创建规范化分支名: `feat/<id>-<slug>`
57
+ - **主干保护**: Linter 会阻止在 `main`/`master` 分支上的直接代码修改
58
+
59
+ - **文件追踪**: `monoco issue sync-files` 自动同步 Git 变更到 Issue 元数据
60
+
61
+ > 📖 **详细工作流**: 参见 `monoco-issue` skill 获取完整的 Issue 生命周期管理指南。
62
+
45
63
  ## 配置结构
46
64
 
47
65
  配置以 YAML 格式存储在:
@@ -59,7 +77,21 @@ Monoco 是一个开发者生产力工具包,提供:
59
77
 
60
78
  ## 最佳实践
61
79
 
80
+ ### 基础操作
81
+
62
82
  1. **优先使用 CLI 命令**,而不是手动编辑文件
63
83
  2. **配置更改后运行 `monoco sync`** 以更新 agent 环境
64
84
  3. **将 `.monoco/config.yaml` 提交到版本控制**,保持团队一致性
65
85
  4. **保持全局配置最小化** - 大多数设置应该是项目特定的
86
+
87
+ ### Git 工作流 (⚠️ CRITICAL for Agents)
88
+
89
+ 5. **严格遵循分支隔离**:
90
+ - ✅ 始终使用: `monoco issue start <ID> --branch`
91
+ - ❌ 禁止在 `main`/`master` 分支直接修改代码
92
+ - 📝 提交前运行: `monoco issue sync-files` 更新文件追踪
93
+
94
+ 6. **质量门禁**:
95
+ - Git Hooks 会自动运行检查,不要尝试绕过 (`--no-verify`)
96
+ - 提交前确保 `monoco issue lint` 通过
97
+ - 使用 `monoco issue submit` 生成交付报告
monoco/core/sync.py CHANGED
@@ -20,15 +20,9 @@ def _get_targets(root: Path, config, cli_target: Optional[Path]) -> List[Path]:
20
20
  targets.append(cli_target)
21
21
  return targets
22
22
 
23
- # 2. Config Targets
24
- if config.agent.targets:
25
- for t in config.agent.targets:
26
- targets.append(root / t)
27
- return targets
28
-
29
- # 3. Registry Defaults (Dynamic Detection)
23
+ # 2. Registry Defaults (Dynamic Detection)
30
24
  integrations = get_active_integrations(
31
- root, config_overrides=config.agent.integrations, auto_detect=True
25
+ root, config_overrides=None, auto_detect=True
32
26
  )
33
27
 
34
28
  if integrations:
@@ -69,16 +63,9 @@ def sync_command(
69
63
  # 2. Collect Data
70
64
  collected_prompts = {}
71
65
 
72
- # Filter features based on config if specified
66
+ # Filter features based on config if specified (Deprecated: agent config removed)
73
67
  all_features = registry.get_features()
74
- active_features = []
75
-
76
- if config.agent.includes:
77
- for f in all_features:
78
- if f.name in config.agent.includes:
79
- active_features.append(f)
80
- else:
81
- active_features = all_features
68
+ active_features = all_features
82
69
 
83
70
  with console.status("[bold green]Collecting feature integration data...") as status:
84
71
  for feature in active_features:
@@ -109,7 +96,7 @@ def sync_command(
109
96
 
110
97
  # Get active integrations
111
98
  integrations = get_active_integrations(
112
- root, config_overrides=config.agent.integrations, auto_detect=True
99
+ root, config_overrides=None, auto_detect=True
113
100
  )
114
101
 
115
102
  if integrations:
@@ -231,7 +218,7 @@ def uninstall_command(
231
218
 
232
219
  # Get active integrations
233
220
  integrations = get_active_integrations(
234
- root, config_overrides=config.agent.integrations, auto_detect=True
221
+ root, config_overrides=None, auto_detect=True
235
222
  )
236
223
 
237
224
  if integrations:
@@ -24,6 +24,13 @@ DEFAULT_EXCLUDES = [
24
24
  ".vscode",
25
25
  ".idea",
26
26
  ".fleet",
27
+ ".vscode-test",
28
+ ".cache",
29
+ ".mypy_cache",
30
+ ".pytest_cache",
31
+ ".ruff_cache",
32
+ ".tox",
33
+ ".nox",
27
34
  # System Prompts & Agent Configs
28
35
  "AGENTS.md",
29
36
  "CLAUDE.md",
@@ -55,17 +62,16 @@ def load_gitignore_patterns(root: Path) -> List[str]:
55
62
 
56
63
 
57
64
  def is_excluded(
58
- path: Path, root: Path, patterns: List[str], excludes: Optional[List[str]] = None
65
+ path: Path, root: Path, patterns: List[str], excludes_set: Optional[set] = None
59
66
  ) -> bool:
60
67
  """Check if a path should be excluded based on patterns and defaults."""
61
68
  rel_path = str(path.relative_to(root))
62
69
 
63
- final_excludes = excludes if excludes is not None else DEFAULT_EXCLUDES
64
-
65
70
  # 1. Check default excludes (exact match for any path component, case-insensitive)
66
- for part in path.parts:
67
- if part.lower() in [e.lower() for e in final_excludes]:
68
- return True
71
+ if excludes_set:
72
+ for part in path.parts:
73
+ if part.lower() in excludes_set:
74
+ return True
69
75
 
70
76
  # 2. Check gitignore patterns
71
77
  for pattern in patterns:
@@ -98,11 +104,25 @@ def discover_markdown_files(root: Path, include_issues: bool = False) -> List[Pa
98
104
  if not include_issues:
99
105
  excludes.append("Issues")
100
106
 
101
- # We walk to ensure we can skip directories early if needed,
102
- # but for now rglob + filter is simpler.
103
- for p in root.rglob("*.md"):
104
- if p.is_file() and not is_excluded(p, root, patterns, excludes=excludes):
105
- all_md_files.append(p)
107
+ # Pre-calculate lowercase set for performance
108
+ excludes_set = {e.lower() for e in excludes}
109
+
110
+ # Use walk to skip excluded directories early
111
+ for current_root, dirs, files in root.walk():
112
+ # Filter directories in-place to skip excluded ones
113
+ dirs[:] = [
114
+ d
115
+ for d in dirs
116
+ if not is_excluded(
117
+ current_root / d, root, patterns, excludes_set=excludes_set
118
+ )
119
+ ]
120
+
121
+ for file in files:
122
+ if file.endswith(".md"):
123
+ p = current_root / file
124
+ if not is_excluded(p, root, patterns, excludes_set=excludes_set):
125
+ all_md_files.append(p)
106
126
 
107
127
  return sorted(all_md_files)
108
128
 
@@ -17,6 +17,9 @@ backlog_app = typer.Typer(help="Manage backlog operations.")
17
17
  lsp_app = typer.Typer(help="LSP Server commands.")
18
18
  app.add_typer(backlog_app, name="backlog")
19
19
  app.add_typer(lsp_app, name="lsp")
20
+ from . import domain_commands
21
+
22
+ app.add_typer(domain_commands.app, name="domain")
20
23
  console = Console()
21
24
 
22
25
 
@@ -45,12 +48,12 @@ def create(
45
48
  ),
46
49
  sprint: Optional[str] = typer.Option(None, "--sprint", help="Sprint ID"),
47
50
  tags: List[str] = typer.Option([], "--tag", help="Tags"),
51
+ domains: List[str] = typer.Option([], "--domain", help="Domains"),
48
52
  root: Optional[str] = typer.Option(
49
53
  None, "--root", help="Override issues root directory"
50
54
  ),
51
55
  json: AgentOutput = False,
52
56
  ):
53
- """Create a new issue."""
54
57
  """Create a new issue."""
55
58
  config = get_config()
56
59
  issues_root = _resolve_issues_root(config, root)
@@ -79,6 +82,7 @@ def create(
79
82
  stage=stage,
80
83
  dependencies=dependencies,
81
84
  related=related,
85
+ domains=domains,
82
86
  subdir=subdir,
83
87
  sprint=sprint,
84
88
  tags=tags,
@@ -99,6 +103,25 @@ def create(
99
103
  )
100
104
  console.print(f"Path: {rel_path}")
101
105
 
106
+ # Prompt for Language
107
+ target_langs = config.i18n.target_langs
108
+ primary_lang = target_langs[0] if target_langs else "en"
109
+
110
+ # Simple mapping for display
111
+ lang_display = {
112
+ "zh": "Chinese (Simplified)",
113
+ "en": "English",
114
+ "ja": "Japanese",
115
+ }.get(primary_lang, primary_lang)
116
+
117
+ console.print(
118
+ f"\n[bold yellow]Agent Hint:[/bold yellow] Please fill the ticket content in [bold cyan]{lang_display}[/bold cyan]."
119
+ )
120
+
121
+ except ValueError as e:
122
+ OutputManager.error(str(e))
123
+ raise typer.Exit(code=1)
124
+
102
125
  except ValueError as e:
103
126
  OutputManager.error(str(e))
104
127
  raise typer.Exit(code=1)
@@ -53,28 +53,108 @@ def _get_slug(title: str) -> str:
53
53
  return slug
54
54
 
55
55
 
56
- def parse_issue(file_path: Path) -> Optional[IssueMetadata]:
56
+ def parse_issue(file_path: Path, raise_error: bool = False) -> Optional[IssueMetadata]:
57
57
  if not file_path.suffix == ".md":
58
58
  return None
59
59
 
60
60
  content = file_path.read_text()
61
61
  match = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
62
62
  if not match:
63
+ if raise_error:
64
+ raise ValueError(f"No frontmatter found in {file_path.name}")
63
65
  return None
64
66
 
65
67
  try:
66
68
  data = yaml.safe_load(match.group(1))
67
69
  if not isinstance(data, dict):
70
+ if raise_error:
71
+ raise ValueError(f"Frontmatter is not a dictionary in {file_path.name}")
68
72
  return None
69
73
 
70
74
  data["path"] = str(file_path.absolute())
71
75
  meta = IssueMetadata(**data)
72
76
  meta.actions = get_available_actions(meta)
73
77
  return meta
74
- except Exception:
78
+ except Exception as e:
79
+ if raise_error:
80
+ raise e
75
81
  return None
76
82
 
77
83
 
84
+ def _serialize_metadata(metadata: IssueMetadata) -> str:
85
+ """
86
+ Centralized serialization logic to ensure explicit fields and correct ordering.
87
+ """
88
+ # Serialize metadata
89
+ # We want explicit fields even if None/Empty to enforce schema awareness
90
+ data = metadata.model_dump(
91
+ exclude_none=True, mode="json", exclude={"actions", "path"}
92
+ )
93
+
94
+ # Force explicit keys if missing (due to exclude_none or defaults)
95
+ if "parent" not in data:
96
+ data["parent"] = None
97
+ if "dependencies" not in data:
98
+ data["dependencies"] = []
99
+ if "related" not in data:
100
+ data["related"] = []
101
+ if "domains" not in data:
102
+ data["domains"] = []
103
+ if "files" not in data:
104
+ data["files"] = []
105
+
106
+ # Custom YAML Dumper to preserve None as 'null' and order
107
+ # Helper to order keys: id, uid, type, status, stage, title, ... graph ...
108
+ # Simple sort isn't enough, we rely on insertion order (Python 3.7+)
109
+ ordered_data = {
110
+ k: data[k]
111
+ for k in [
112
+ "id",
113
+ "uid",
114
+ "type",
115
+ "status",
116
+ "stage",
117
+ "title",
118
+ "created_at",
119
+ "updated_at",
120
+ ]
121
+ if k in data
122
+ }
123
+ # Add graph fields
124
+ for k in [
125
+ "priority",
126
+ "parent",
127
+ "dependencies",
128
+ "related",
129
+ "domains",
130
+ "tags",
131
+ "files",
132
+ ]:
133
+ if k in data:
134
+ ordered_data[k] = data[k]
135
+ elif k in ["dependencies", "related", "domains", "tags", "files"]:
136
+ ordered_data[k] = []
137
+ elif k == "parent":
138
+ ordered_data[k] = None
139
+
140
+ # Add remaining
141
+ for k, v in data.items():
142
+ if k not in ordered_data:
143
+ ordered_data[k] = v
144
+
145
+ yaml_header = yaml.dump(
146
+ ordered_data, sort_keys=False, allow_unicode=True, default_flow_style=False
147
+ )
148
+
149
+ # Inject Comments for guidance (replace keys with key+comment)
150
+ if "parent" in ordered_data and ordered_data["parent"] is None:
151
+ yaml_header = yaml_header.replace(
152
+ "parent: null", "parent: null # <EPIC-ID> Optional"
153
+ )
154
+
155
+ return yaml_header
156
+
157
+
78
158
  def parse_issue_detail(file_path: Path) -> Optional[IssueDetail]:
79
159
  if not file_path.suffix == ".md":
80
160
  return None
@@ -127,6 +207,7 @@ def create_issue_file(
127
207
  stage: Optional[IssueStage] = None,
128
208
  dependencies: List[str] = [],
129
209
  related: List[str] = [],
210
+ domains: List[str] = [],
130
211
  subdir: Optional[str] = None,
131
212
  sprint: Optional[str] = None,
132
213
  tags: List[str] = [],
@@ -149,8 +230,7 @@ def create_issue_file(
149
230
 
150
231
  target_dir.mkdir(parents=True, exist_ok=True)
151
232
 
152
- # Auto-Populate Tags with required IDs (Requirement: Maintain tags field with parent/deps/related/self IDs)
153
- # Ensure they are prefixed with '#' for tagging convention if not present (usually tags are just strings, but user asked for #ID)
233
+ # Auto-Populate Tags with required IDs
154
234
  auto_tags = set(tags) if tags else set()
155
235
 
156
236
  # 1. Add Parent
@@ -165,15 +245,14 @@ def create_issue_file(
165
245
  for rel in related:
166
246
  auto_tags.add(f"#{rel}")
167
247
 
168
- # 4. Add Self (as per instruction "auto add this issue... number")
169
- # Note: issue_id is generated just above
248
+ # 4. Add Self
170
249
  auto_tags.add(f"#{issue_id}")
171
250
 
172
251
  final_tags = sorted(list(auto_tags))
173
252
 
174
253
  metadata = IssueMetadata(
175
254
  id=issue_id,
176
- uid=generate_uid(), # Generate global unique identifier
255
+ uid=generate_uid(),
177
256
  type=issue_type,
178
257
  status=status,
179
258
  stage=stage,
@@ -181,43 +260,23 @@ def create_issue_file(
181
260
  parent=parent,
182
261
  dependencies=dependencies,
183
262
  related=related,
263
+ domains=domains,
184
264
  sprint=sprint,
185
265
  tags=final_tags,
186
266
  opened_at=current_time() if status == IssueStatus.OPEN else None,
187
267
  )
188
268
 
189
- # Enforce lifecycle policies (defaults, auto-corrections)
269
+ # Enforce lifecycle policies
190
270
  from .engine import get_engine
191
271
 
192
272
  get_engine().enforce_policy(metadata)
193
273
 
194
274
  # Serialize metadata
195
- # Explicitly exclude actions and path from file persistence
196
- yaml_header = yaml.dump(
197
- metadata.model_dump(
198
- exclude_none=True, mode="json", exclude={"actions", "path"}
199
- ),
200
- sort_keys=False,
201
- allow_unicode=True,
202
- )
203
-
204
- # Inject Self-Documenting Hints (Interactive Frontmatter)
205
- if "parent:" not in yaml_header:
206
- yaml_header += "# parent: <EPIC-ID> # Optional: Parent Issue ID\n"
207
- if "solution:" not in yaml_header:
208
- yaml_header += "# solution: null # Required for Closed state (implemented, cancelled, etc.)\n"
209
-
210
- if "dependencies:" not in yaml_header:
211
- yaml_header += "# dependencies: [] # List of dependency IDs\n"
212
- if "related:" not in yaml_header:
213
- yaml_header += "# related: [] # List of related issue IDs\n"
214
- if "files:" not in yaml_header:
215
- yaml_header += "# files: [] # List of modified files\n"
275
+ yaml_header = _serialize_metadata(metadata)
216
276
 
217
277
  slug = _get_slug(title)
218
278
  filename = f"{issue_id}-{slug}.md"
219
279
 
220
- # Enhanced Template with Instructional Comments
221
280
  file_content = f"""---
222
281
  {yaml_header}---
223
282
 
@@ -249,7 +308,6 @@ def create_issue_file(
249
308
  file_path = target_dir / filename
250
309
  file_path.write_text(file_content)
251
310
 
252
- # Inject path into returned metadata
253
311
  metadata.path = str(file_path.absolute())
254
312
 
255
313
  return metadata, file_path
@@ -512,14 +570,7 @@ def update_issue(
512
570
  raise ValueError(f"Failed to validate updated metadata: {e}")
513
571
 
514
572
  # Serialize back
515
- # Explicitly exclude actions and path from file persistence
516
- new_yaml = yaml.dump(
517
- updated_meta.model_dump(
518
- exclude_none=True, mode="json", exclude={"actions", "path"}
519
- ),
520
- sort_keys=False,
521
- allow_unicode=True,
522
- )
573
+ new_yaml = _serialize_metadata(updated_meta)
523
574
 
524
575
  # Reconstruct File
525
576
  match_header = re.search(r"^---(.*?)---", content, re.DOTALL | re.MULTILINE)
@@ -113,6 +113,7 @@ class IssueFrontmatter(BaseModel):
113
113
  parent: Optional[str] = None
114
114
  dependencies: List[str] = Field(default_factory=list)
115
115
  related: List[str] = Field(default_factory=list)
116
+ domains: List[str] = Field(default_factory=list)
116
117
  tags: List[str] = Field(default_factory=list)
117
118
  solution: Optional[IssueSolution] = None
118
119
  isolation: Optional[IssueIsolation] = None
@@ -0,0 +1,47 @@
1
+ import typer
2
+ from rich.table import Table
3
+ from rich.console import Console
4
+ from monoco.features.issue.domain_service import DomainService
5
+
6
+ app = typer.Typer(help="Manage domain ontology.")
7
+ console = Console()
8
+
9
+
10
+ @app.command("list")
11
+ def list_domains():
12
+ """List defined domains and aliases."""
13
+ service = DomainService()
14
+ config = service.config
15
+
16
+ table = Table(title=f"Domain Ontology (Strict: {config.strict})")
17
+ table.add_column("Canonical Name", style="bold cyan")
18
+ table.add_column("Description", style="white")
19
+ table.add_column("Aliases", style="yellow")
20
+
21
+ for item in config.items:
22
+ table.add_row(
23
+ item.name,
24
+ item.description or "",
25
+ ", ".join(item.aliases) if item.aliases else "-",
26
+ )
27
+
28
+ console.print(table)
29
+
30
+
31
+ @app.command("check")
32
+ def check_domain(domain: str = typer.Argument(..., help="Domain name to check")):
33
+ """Check if a domain is valid and resolve it."""
34
+ service = DomainService()
35
+
36
+ if service.is_canonical(domain):
37
+ console.print(f"[green]✔ '{domain}' is a canonical domain.[/green]")
38
+ elif service.is_alias(domain):
39
+ canonical = service.get_canonical(domain)
40
+ console.print(f"[yellow]➜ '{domain}' is an alias for '{canonical}'.[/yellow]")
41
+ else:
42
+ if service.config.strict:
43
+ console.print(f"[red]✘ '{domain}' is NOT a valid domain.[/red]")
44
+ else:
45
+ console.print(
46
+ f"[yellow]⚠ '{domain}' is undefined (Strict Mode: OFF).[/yellow]"
47
+ )