monoco-toolkit 0.3.9__py3-none-any.whl → 0.3.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/config.py +38 -4
- monoco/core/git.py +23 -0
- monoco/core/hooks/builtin/git_cleanup.py +1 -1
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/injection.py +63 -29
- monoco/core/integrations.py +2 -2
- monoco/core/loader.py +633 -0
- monoco/core/output.py +5 -5
- monoco/core/registry.py +34 -19
- monoco/core/resource/__init__.py +5 -0
- monoco/core/resource/finder.py +98 -0
- monoco/core/resource/manager.py +91 -0
- monoco/core/resource/models.py +35 -0
- monoco/core/skill_framework.py +292 -0
- monoco/core/skills.py +524 -385
- monoco/core/sync.py +73 -1
- monoco/core/workflow_converter.py +420 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +236 -0
- monoco/daemon/services.py +185 -0
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +2 -2
- monoco/features/agent/adapter.py +41 -0
- monoco/features/agent/apoptosis.py +44 -0
- monoco/features/agent/cli.py +101 -144
- monoco/features/agent/config.py +35 -21
- monoco/features/agent/defaults.py +6 -49
- monoco/features/agent/engines.py +32 -6
- monoco/features/agent/manager.py +47 -6
- monoco/features/agent/models.py +2 -2
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +61 -0
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +73 -0
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +55 -0
- monoco/features/agent/resources/atoms/atom-review.yaml +60 -0
- monoco/{core/resources/en → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +3 -1
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_engineer/SKILL.md +94 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_manager/SKILL.md +93 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_planner/SKILL.md +85 -0
- monoco/features/agent/resources/en/skills/monoco_workflow_agent_reviewer/SKILL.md +114 -0
- monoco/features/agent/resources/workflows/workflow-dev.yaml +83 -0
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +72 -0
- monoco/features/agent/resources/workflows/workflow-review.yaml +94 -0
- monoco/features/agent/resources/zh/roles/monoco_role_engineer.yaml +49 -0
- monoco/features/agent/resources/zh/roles/monoco_role_manager.yaml +46 -0
- monoco/features/agent/resources/zh/roles/monoco_role_planner.yaml +46 -0
- monoco/features/agent/resources/zh/roles/monoco_role_reviewer.yaml +47 -0
- monoco/{core/resources/zh → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +3 -1
- monoco/features/agent/resources/{skills/flow_engineer → zh/skills/monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/{skills/flow_manager → zh/skills/monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_planner/SKILL.md +259 -0
- monoco/features/agent/resources/zh/skills/monoco_workflow_agent_reviewer/SKILL.md +137 -0
- monoco/features/agent/session.py +59 -11
- monoco/features/agent/worker.py +38 -2
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/__init__.py +0 -0
- monoco/features/glossary/adapter.py +42 -0
- monoco/features/glossary/config.py +5 -0
- monoco/features/glossary/resources/en/AGENTS.md +29 -0
- monoco/features/glossary/resources/en/skills/monoco_atom_glossary/SKILL.md +35 -0
- monoco/features/glossary/resources/zh/AGENTS.md +29 -0
- monoco/features/glossary/resources/zh/skills/monoco_atom_glossary/SKILL.md +35 -0
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/{SKILL.md → skills/monoco_atom_i18n/SKILL.md} +3 -1
- monoco/features/i18n/resources/en/skills/monoco_workflow_i18n_scan/SKILL.md +105 -0
- monoco/features/i18n/resources/zh/{SKILL.md → skills/monoco_atom_i18n/SKILL.md} +3 -1
- monoco/features/i18n/resources/{skills/i18n_scan_workflow → zh/skills/monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +281 -7
- monoco/features/issue/core.py +272 -19
- monoco/features/issue/engine/machine.py +118 -5
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +3 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/{SKILL.md → skills/monoco_atom_issue/SKILL.md} +3 -1
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_creation/SKILL.md +167 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_development/SKILL.md +224 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_management/SKILL.md +159 -0
- monoco/features/issue/resources/en/skills/monoco_workflow_issue_refinement/SKILL.md +203 -0
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/{SKILL.md → skills/monoco_atom_issue_lifecycle/SKILL.md} +3 -1
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_creation/SKILL.md +167 -0
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_development/SKILL.md +224 -0
- monoco/features/issue/resources/{skills/issue_lifecycle_workflow → zh/skills/monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/monoco_workflow_issue_refinement/SKILL.md +203 -0
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/monoco_atom_memo/SKILL.md +77 -0
- monoco/features/memo/resources/en/skills/monoco_workflow_note_processing/SKILL.md +140 -0
- monoco/features/memo/resources/zh/{SKILL.md → skills/monoco_atom_memo/SKILL.md} +3 -1
- monoco/features/memo/resources/{skills/note_processing_workflow → zh/skills/monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/resources/en/{SKILL.md → skills/monoco_atom_spike/SKILL.md} +3 -1
- monoco/features/spike/resources/en/skills/monoco_workflow_research/SKILL.md +121 -0
- monoco/features/spike/resources/zh/{SKILL.md → skills/monoco_atom_spike/SKILL.md} +3 -1
- monoco/features/spike/resources/{skills/research_workflow → zh/skills/monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- monoco_toolkit-0.3.11.dist-info/METADATA +130 -0
- monoco_toolkit-0.3.11.dist-info/RECORD +181 -0
- monoco/features/agent/reliability.py +0 -106
- monoco/features/agent/resources/skills/flow_reviewer/SKILL.md +0 -114
- monoco_toolkit-0.3.9.dist-info/METADATA +0 -127
- monoco_toolkit-0.3.9.dist-info/RECORD +0 -115
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.11.dist-info}/licenses/LICENSE +0 -0
|
@@ -28,9 +28,19 @@ class IssueValidator:
|
|
|
28
28
|
current_project: Optional[str] = None,
|
|
29
29
|
workspace_root: Optional[str] = None,
|
|
30
30
|
valid_domains: Set[str] = set(),
|
|
31
|
+
all_issues: List[IssueMetadata] = None,
|
|
31
32
|
) -> List[Diagnostic]:
|
|
32
33
|
"""
|
|
33
34
|
Validate an issue and return diagnostics.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
meta: Issue metadata to validate
|
|
38
|
+
content: Full content of the issue file
|
|
39
|
+
all_issue_ids: Set of all issue IDs in the project (for reference validation)
|
|
40
|
+
current_project: Current project name
|
|
41
|
+
workspace_root: Workspace root project name
|
|
42
|
+
valid_domains: Set of valid domain names
|
|
43
|
+
all_issues: List of all IssueMetadata objects (for domain governance checks, FEAT-0136)
|
|
34
44
|
"""
|
|
35
45
|
diagnostics = []
|
|
36
46
|
self._current_project = current_project
|
|
@@ -65,6 +75,9 @@ class IssueValidator:
|
|
|
65
75
|
# 1. State Matrix Validation
|
|
66
76
|
diagnostics.extend(self._validate_state_matrix(meta, content))
|
|
67
77
|
|
|
78
|
+
# 1.5 Directory Consistency (FEAT-0144)
|
|
79
|
+
diagnostics.extend(self._validate_directory_consistency(meta, content))
|
|
80
|
+
|
|
68
81
|
# 2. State Requirements (Strict Verification)
|
|
69
82
|
diagnostics.extend(self._validate_state_requirements(meta, blocks))
|
|
70
83
|
|
|
@@ -80,7 +93,7 @@ class IssueValidator:
|
|
|
80
93
|
# 5.5 Domain Integrity
|
|
81
94
|
diagnostics.extend(
|
|
82
95
|
self._validate_domains(
|
|
83
|
-
meta, content, all_issue_ids, valid_domains=valid_domains
|
|
96
|
+
meta, content, all_issue_ids, valid_domains=valid_domains, all_issues=all_issues
|
|
84
97
|
)
|
|
85
98
|
)
|
|
86
99
|
|
|
@@ -151,6 +164,62 @@ class IssueValidator:
|
|
|
151
164
|
return i
|
|
152
165
|
return 0
|
|
153
166
|
|
|
167
|
+
def _validate_directory_consistency(
|
|
168
|
+
self, meta: IssueMetadata, content: str
|
|
169
|
+
) -> List[Diagnostic]:
|
|
170
|
+
"""
|
|
171
|
+
Check if the issue status matches its physical directory.
|
|
172
|
+
Checks for illegal directory names (like 'done' or 'freezed').
|
|
173
|
+
"""
|
|
174
|
+
diagnostics = []
|
|
175
|
+
if not meta.path:
|
|
176
|
+
return diagnostics
|
|
177
|
+
|
|
178
|
+
path = Path(meta.path)
|
|
179
|
+
parent_dir_name = path.parent.name.lower()
|
|
180
|
+
|
|
181
|
+
# 1. Status/Directory Mismatch
|
|
182
|
+
if meta.status == "open" and parent_dir_name == "closed":
|
|
183
|
+
diagnostics.append(
|
|
184
|
+
self._create_diagnostic(
|
|
185
|
+
f"Status/Directory Mismatch: Issue '{meta.id}' has status 'open' but is in 'closed/' directory.",
|
|
186
|
+
DiagnosticSeverity.Error,
|
|
187
|
+
)
|
|
188
|
+
)
|
|
189
|
+
elif meta.status == "closed" and parent_dir_name == "open":
|
|
190
|
+
diagnostics.append(
|
|
191
|
+
self._create_diagnostic(
|
|
192
|
+
f"Status/Directory Mismatch: Issue '{meta.id}' has status 'closed' but is in 'open/' directory.",
|
|
193
|
+
DiagnosticSeverity.Error,
|
|
194
|
+
)
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# 2. Illegal Directory Names
|
|
198
|
+
if parent_dir_name == "done":
|
|
199
|
+
diagnostics.append(
|
|
200
|
+
self._create_diagnostic(
|
|
201
|
+
"Illegal Directory: Issues should be in 'closed/' directory, not 'done/'.",
|
|
202
|
+
DiagnosticSeverity.Error,
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
elif parent_dir_name == "freezed":
|
|
206
|
+
diagnostics.append(
|
|
207
|
+
self._create_diagnostic(
|
|
208
|
+
"Illegal Directory: Issues should be in 'backlog/' directory, not 'freezed/'.",
|
|
209
|
+
DiagnosticSeverity.Error,
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return diagnostics
|
|
214
|
+
|
|
215
|
+
def _validate_status_enum(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
|
|
216
|
+
"""Legacy helper for enum validation (mostly handled by Pydantic now)."""
|
|
217
|
+
return []
|
|
218
|
+
|
|
219
|
+
def _validate_stage_enum(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
|
|
220
|
+
"""Legacy helper for enum validation (mostly handled by Pydantic now)."""
|
|
221
|
+
return []
|
|
222
|
+
|
|
154
223
|
def _validate_state_matrix(
|
|
155
224
|
self, meta: IssueMetadata, content: str
|
|
156
225
|
) -> List[Diagnostic]:
|
|
@@ -629,6 +698,7 @@ class IssueValidator:
|
|
|
629
698
|
content: str,
|
|
630
699
|
all_ids: Set[str] = set(),
|
|
631
700
|
valid_domains: Set[str] = set(),
|
|
701
|
+
all_issues: List[IssueMetadata] = None,
|
|
632
702
|
) -> List[Diagnostic]:
|
|
633
703
|
diagnostics = []
|
|
634
704
|
# Check if 'domains' field exists in frontmatter text
|
|
@@ -674,6 +744,36 @@ class IssueValidator:
|
|
|
674
744
|
)
|
|
675
745
|
)
|
|
676
746
|
|
|
747
|
+
# FEAT-0136: Scale-Aware Domain Governance
|
|
748
|
+
# Rule: If Total Issues > 128 or Total Epics > 32, enforce strict Domain coverage
|
|
749
|
+
is_large_scale = num_issues > 128 or num_epics > 32
|
|
750
|
+
|
|
751
|
+
# FEAT-0136: Domain Auto-Inheritance Logic
|
|
752
|
+
# If child issue has no domains but parent has, treat as inherited (logical inheritance)
|
|
753
|
+
if meta.type != "epic" and not meta.domains and meta.parent and all_issues and is_large_scale:
|
|
754
|
+
parent_issue = None
|
|
755
|
+
for issue in all_issues:
|
|
756
|
+
if issue.id == meta.parent:
|
|
757
|
+
parent_issue = issue
|
|
758
|
+
break
|
|
759
|
+
|
|
760
|
+
if parent_issue and parent_issue.domains:
|
|
761
|
+
# Parent has domains, child inherits them logically
|
|
762
|
+
# This is not an error - it's valid inheritance
|
|
763
|
+
pass
|
|
764
|
+
elif parent_issue and not parent_issue.domains:
|
|
765
|
+
# Parent also has no domains in large scale mode - this is an error
|
|
766
|
+
line = self._get_field_line(content, "domains")
|
|
767
|
+
diagnostics.append(
|
|
768
|
+
self._create_diagnostic(
|
|
769
|
+
f"Domain Governance: Issue '{meta.id}' has no domains assigned. "
|
|
770
|
+
f"Parent '{meta.parent}' also has no domains. In large-scale projects, "
|
|
771
|
+
f"at least 75% of Epics must have domains for children to inherit.",
|
|
772
|
+
DiagnosticSeverity.Error,
|
|
773
|
+
line=line if line > 0 else field_line,
|
|
774
|
+
)
|
|
775
|
+
)
|
|
776
|
+
|
|
677
777
|
# Domain Content Validation
|
|
678
778
|
# If valid_domains is provided (from file scan), use it as strict source of truth
|
|
679
779
|
if hasattr(meta, "domains") and meta.domains:
|
monoco/features/memo/adapter.py
CHANGED
|
@@ -1,19 +1,32 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
from typing import Dict
|
|
3
|
-
from monoco.core.
|
|
3
|
+
from monoco.core.loader import FeatureModule, FeatureMetadata
|
|
4
|
+
from monoco.core.feature import IntegrationData
|
|
4
5
|
|
|
5
6
|
|
|
6
|
-
class MemoFeature(
|
|
7
|
-
|
|
8
|
-
def name(self) -> str:
|
|
9
|
-
return "memo"
|
|
7
|
+
class MemoFeature(FeatureModule):
|
|
8
|
+
"""Memo (fleeting notes) feature module with unified lifecycle support."""
|
|
10
9
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
10
|
+
@property
|
|
11
|
+
def metadata(self) -> FeatureMetadata:
|
|
12
|
+
return FeatureMetadata(
|
|
13
|
+
name="memo",
|
|
14
|
+
version="1.0.0",
|
|
15
|
+
description="Fleeting notes and quick idea capture",
|
|
16
|
+
dependencies=["core"],
|
|
17
|
+
priority=40,
|
|
18
|
+
lazy=True, # Can be lazy loaded
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
def _on_mount(self, context: "FeatureContext") -> None: # type: ignore
|
|
22
|
+
"""Memo feature doesn't require explicit initialization.
|
|
23
|
+
|
|
24
|
+
The inbox is created on first use.
|
|
25
|
+
"""
|
|
14
26
|
pass
|
|
15
27
|
|
|
16
28
|
def integrate(self, root: Path, config: Dict) -> IntegrationData:
|
|
29
|
+
"""Provide integration data for agent environment."""
|
|
17
30
|
# Determine language from config, default to 'en'
|
|
18
31
|
lang = config.get("i18n", {}).get("source_lang", "en")
|
|
19
32
|
|
monoco/features/memo/cli.py
CHANGED
|
@@ -4,7 +4,7 @@ from typing import Optional
|
|
|
4
4
|
from rich.console import Console
|
|
5
5
|
from rich.table import Table
|
|
6
6
|
from monoco.core.config import get_config
|
|
7
|
-
from .core import add_memo,
|
|
7
|
+
from .core import add_memo, load_memos, delete_memo, update_memo, get_inbox_path, validate_content_language
|
|
8
8
|
|
|
9
9
|
app = typer.Typer(help="Manage memos (fleeting notes).")
|
|
10
10
|
console = Console()
|
|
@@ -26,6 +26,12 @@ def add_command(
|
|
|
26
26
|
context: Optional[str] = typer.Option(
|
|
27
27
|
None, "--context", "-c", help="Context reference (e.g. file:line)."
|
|
28
28
|
),
|
|
29
|
+
type: str = typer.Option(
|
|
30
|
+
"insight", "--type", "-t", help="Type of memo (insight, bug, feature, task)."
|
|
31
|
+
),
|
|
32
|
+
source: str = typer.Option(
|
|
33
|
+
"cli", "--source", "-s", help="Source of the memo."
|
|
34
|
+
),
|
|
29
35
|
force: bool = typer.Option(
|
|
30
36
|
False, "--force", "-f", help="Bypass i18n language validation."
|
|
31
37
|
),
|
|
@@ -47,36 +53,75 @@ def add_command(
|
|
|
47
53
|
)
|
|
48
54
|
raise typer.Exit(code=1)
|
|
49
55
|
|
|
50
|
-
|
|
56
|
+
# TODO: Get actual user name if possible
|
|
57
|
+
author = "User"
|
|
58
|
+
|
|
59
|
+
uid = add_memo(
|
|
60
|
+
issues_root,
|
|
61
|
+
content,
|
|
62
|
+
context=context,
|
|
63
|
+
author=author,
|
|
64
|
+
source=source,
|
|
65
|
+
memo_type=type
|
|
66
|
+
)
|
|
51
67
|
|
|
52
68
|
console.print(f"[green]✔ Memo recorded.[/green] ID: [bold]{uid}[/bold]")
|
|
53
69
|
|
|
54
70
|
|
|
55
71
|
@app.command("list")
|
|
56
|
-
def list_command(
|
|
72
|
+
def list_command(
|
|
73
|
+
status: Optional[str] = typer.Option(None, "--status", help="Filter by status (pending, tracked, resolved)."),
|
|
74
|
+
limit: int = typer.Option(None, "--limit", "-n", help="Limit number of memos shown.")
|
|
75
|
+
):
|
|
57
76
|
"""
|
|
58
77
|
List all memos in the inbox.
|
|
59
78
|
"""
|
|
60
79
|
issues_root = get_issues_root()
|
|
61
80
|
|
|
62
|
-
memos =
|
|
81
|
+
memos = load_memos(issues_root)
|
|
82
|
+
|
|
83
|
+
if status:
|
|
84
|
+
memos = [m for m in memos if m.status == status]
|
|
63
85
|
|
|
64
86
|
if not memos:
|
|
65
|
-
console.print("No memos found.
|
|
87
|
+
console.print("No memos found.")
|
|
66
88
|
return
|
|
89
|
+
|
|
90
|
+
# Reverse sort by timestamp (newest first) usually?
|
|
91
|
+
# But file is appended. Let's show newest at bottom (log style) or newest at top?
|
|
92
|
+
# Usually list shows content. Newest at bottom is standard for logs, but for "Inbox" maybe newest top?
|
|
93
|
+
# Let's keep file order (oldest first) unless user asks otherwise, or maybe reverse it for "Inbox" feel?
|
|
94
|
+
# Let's reverse it to see latest first.
|
|
95
|
+
memos.reverse()
|
|
96
|
+
|
|
97
|
+
if limit:
|
|
98
|
+
memos = memos[:limit]
|
|
67
99
|
|
|
68
100
|
table = Table(title="Memo Inbox")
|
|
69
101
|
table.add_column("ID", style="cyan", no_wrap=True)
|
|
70
|
-
table.add_column("
|
|
102
|
+
table.add_column("Stat", style="yellow", width=4)
|
|
103
|
+
table.add_column("Type", style="magenta", width=8)
|
|
104
|
+
table.add_column("Ref", style="blue")
|
|
71
105
|
table.add_column("Content")
|
|
72
106
|
|
|
73
107
|
for memo in memos:
|
|
74
108
|
# Truncate content for list view
|
|
75
|
-
content_preview = memo
|
|
76
|
-
if len(
|
|
109
|
+
content_preview = memo.content.split("\n")[0]
|
|
110
|
+
if len(content_preview) > 50:
|
|
77
111
|
content_preview = content_preview[:47] + "..."
|
|
78
|
-
|
|
79
|
-
|
|
112
|
+
|
|
113
|
+
status_icon = " "
|
|
114
|
+
if memo.status == "pending": status_icon = "P"
|
|
115
|
+
elif memo.status == "tracked": status_icon = "T"
|
|
116
|
+
elif memo.status == "resolved": status_icon = "✔"
|
|
117
|
+
|
|
118
|
+
table.add_row(
|
|
119
|
+
memo.uid,
|
|
120
|
+
status_icon,
|
|
121
|
+
memo.type,
|
|
122
|
+
memo.ref or "",
|
|
123
|
+
content_preview
|
|
124
|
+
)
|
|
80
125
|
|
|
81
126
|
console.print(table)
|
|
82
127
|
|
|
@@ -110,3 +155,51 @@ def delete_command(
|
|
|
110
155
|
else:
|
|
111
156
|
console.print(f"[red]Error: Memo with ID [bold]{memo_id}[/bold] not found.[/red]")
|
|
112
157
|
raise typer.Exit(code=1)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
@app.command("link")
|
|
161
|
+
def link_command(
|
|
162
|
+
memo_id: str = typer.Argument(..., help="Memo ID"),
|
|
163
|
+
issue_id: str = typer.Argument(..., help="Issue ID to link to")
|
|
164
|
+
):
|
|
165
|
+
"""
|
|
166
|
+
Link a memo to an issue (Traceability).
|
|
167
|
+
Sets status to 'tracked'.
|
|
168
|
+
"""
|
|
169
|
+
issues_root = get_issues_root()
|
|
170
|
+
|
|
171
|
+
updates = {
|
|
172
|
+
"status": "tracked",
|
|
173
|
+
"ref": issue_id
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if update_memo(issues_root, memo_id, updates):
|
|
177
|
+
console.print(f"[green]✔ Memo {memo_id} linked to {issue_id}.[/green]")
|
|
178
|
+
else:
|
|
179
|
+
console.print(f"[red]Error: Memo {memo_id} not found.[/red]")
|
|
180
|
+
raise typer.Exit(code=1)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
@app.command("resolve")
|
|
184
|
+
def resolve_command(
|
|
185
|
+
memo_id: str = typer.Argument(..., help="Memo ID")
|
|
186
|
+
):
|
|
187
|
+
"""
|
|
188
|
+
Mark a memo as resolved.
|
|
189
|
+
"""
|
|
190
|
+
issues_root = get_issues_root()
|
|
191
|
+
|
|
192
|
+
updates = {
|
|
193
|
+
"status": "resolved"
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if update_memo(issues_root, memo_id, updates):
|
|
197
|
+
console.print(f"[green]✔ Memo {memo_id} resolved.[/green]")
|
|
198
|
+
else:
|
|
199
|
+
console.print(f"[red]Error: Memo {memo_id} not found.[/red]")
|
|
200
|
+
raise typer.Exit(code=1)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
|
monoco/features/memo/core.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import re
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from
|
|
4
|
-
from typing import List, Dict, Optional
|
|
3
|
+
from typing import List, Optional, Any
|
|
5
4
|
import secrets
|
|
5
|
+
from datetime import datetime
|
|
6
6
|
|
|
7
|
+
from .models import Memo
|
|
7
8
|
|
|
8
9
|
def is_chinese(text: str) -> bool:
|
|
9
10
|
"""Check if the text contains at least one Chinese character."""
|
|
@@ -18,7 +19,6 @@ def validate_content_language(content: str, source_lang: str) -> bool:
|
|
|
18
19
|
if source_lang == "zh":
|
|
19
20
|
return is_chinese(content)
|
|
20
21
|
# For 'en', we generally allow everything but could be more strict.
|
|
21
|
-
# Requirement is mainly about enforcing 'zh' when configured.
|
|
22
22
|
return True
|
|
23
23
|
|
|
24
24
|
|
|
@@ -27,120 +27,206 @@ def get_memos_dir(issues_root: Path) -> Path:
|
|
|
27
27
|
Get the directory for memos.
|
|
28
28
|
Convention: Sibling of Issues directory.
|
|
29
29
|
"""
|
|
30
|
-
# issues_root is usually ".../Issues"
|
|
31
30
|
return issues_root.parent / "Memos"
|
|
32
31
|
|
|
33
|
-
|
|
34
32
|
def get_inbox_path(issues_root: Path) -> Path:
|
|
35
33
|
return get_memos_dir(issues_root) / "inbox.md"
|
|
36
34
|
|
|
37
|
-
|
|
38
35
|
def generate_memo_id() -> str:
|
|
39
36
|
"""Generate a short 6-char ID."""
|
|
40
37
|
return secrets.token_hex(3)
|
|
41
38
|
|
|
42
|
-
|
|
43
|
-
def format_memo(uid: str, content: str, context: Optional[str] = None) -> str:
|
|
44
|
-
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
45
|
-
header = f"## [{uid}] {timestamp}"
|
|
46
|
-
|
|
47
|
-
body = content.strip()
|
|
48
|
-
|
|
49
|
-
if context:
|
|
50
|
-
body = f"> **Context**: `{context}`\n\n{body}"
|
|
51
|
-
|
|
52
|
-
return f"\n{header}\n{body}\n"
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def add_memo(issues_root: Path, content: str, context: Optional[str] = None) -> str:
|
|
39
|
+
def parse_memo_block(block: str) -> Optional[Memo]:
|
|
56
40
|
"""
|
|
57
|
-
|
|
58
|
-
|
|
41
|
+
Parse a text block into a Memo object.
|
|
42
|
+
Block format:
|
|
43
|
+
## [uid] YYYY-MM-DD HH:MM:SS
|
|
44
|
+
- **Key**: Value
|
|
45
|
+
...
|
|
46
|
+
Content
|
|
59
47
|
"""
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
48
|
+
lines = block.strip().split("\n")
|
|
49
|
+
if not lines:
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
header = lines[0]
|
|
53
|
+
match = re.match(r"^## \[([a-f0-9]+)\] (.*?)$", header)
|
|
54
|
+
if not match:
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
uid = match.group(1)
|
|
58
|
+
ts_str = match.group(2)
|
|
59
|
+
try:
|
|
60
|
+
timestamp = datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S")
|
|
61
|
+
except ValueError:
|
|
62
|
+
timestamp = datetime.now() # Fallback
|
|
63
|
+
|
|
64
|
+
content_lines = []
|
|
65
|
+
metadata = {}
|
|
66
|
+
|
|
67
|
+
# Simple state machine
|
|
68
|
+
# 0: Header (done)
|
|
69
|
+
# 1: Metadata
|
|
70
|
+
# 2: Content
|
|
71
|
+
|
|
72
|
+
state = 1
|
|
73
|
+
|
|
74
|
+
for line in lines[1:]:
|
|
75
|
+
stripped = line.strip()
|
|
76
|
+
if state == 1:
|
|
77
|
+
if not stripped:
|
|
78
|
+
continue
|
|
79
|
+
# Check for metadata line: - **Key**: Value
|
|
80
|
+
meta_match = re.match(r"^\- \*\*([a-zA-Z]+)\*\*: (.*)$", stripped)
|
|
81
|
+
if meta_match:
|
|
82
|
+
key = meta_match.group(1).lower()
|
|
83
|
+
val = meta_match.group(2).strip()
|
|
84
|
+
metadata[key] = val
|
|
85
|
+
else:
|
|
86
|
+
# First non-metadata line marks start of content
|
|
87
|
+
state = 2
|
|
88
|
+
content_lines.append(line)
|
|
89
|
+
elif state == 2:
|
|
90
|
+
content_lines.append(line)
|
|
91
|
+
|
|
92
|
+
content = "\n".join(content_lines).strip()
|
|
93
|
+
|
|
94
|
+
# Map metadata to model fields
|
|
95
|
+
# Status map reverse
|
|
96
|
+
status_raw = metadata.get("status", "[ ] Pending")
|
|
97
|
+
status = "pending"
|
|
98
|
+
if "[x] Tracked" in status_raw:
|
|
99
|
+
status = "tracked"
|
|
100
|
+
elif "[x] Resolved" in status_raw:
|
|
101
|
+
status = "resolved"
|
|
102
|
+
elif "[-] Dismissed" in status_raw:
|
|
103
|
+
status = "dismissed"
|
|
104
|
+
|
|
105
|
+
return Memo(
|
|
106
|
+
uid=uid,
|
|
107
|
+
timestamp=timestamp,
|
|
108
|
+
content=content,
|
|
109
|
+
author=metadata.get("from", "User"),
|
|
110
|
+
source=metadata.get("source", "cli"),
|
|
111
|
+
type=metadata.get("type", "insight"),
|
|
112
|
+
status=status,
|
|
113
|
+
ref=metadata.get("ref"),
|
|
114
|
+
context=metadata.get("context") # Note: context might need cleanup if it was wrapped in code blocks
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
def load_memos(issues_root: Path) -> List[Memo]:
|
|
76
118
|
"""
|
|
77
|
-
Parse memos from inbox.
|
|
119
|
+
Parse all memos from inbox.
|
|
78
120
|
"""
|
|
79
121
|
inbox_path = get_inbox_path(issues_root)
|
|
80
122
|
if not inbox_path.exists():
|
|
81
123
|
return []
|
|
82
124
|
|
|
83
125
|
content = inbox_path.read_text(encoding="utf-8")
|
|
84
|
-
|
|
85
|
-
#
|
|
86
|
-
# We split
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
126
|
+
|
|
127
|
+
# Split by headers: ## [uid]
|
|
128
|
+
# We use a lookahead or just standard split carefully
|
|
129
|
+
parts = re.split(r"(^## \[)", content, flags=re.MULTILINE)[1:] # Skip preamble
|
|
130
|
+
|
|
131
|
+
# parts will be like: ['## [', 'abc] 2023...\n...', '## [', 'def] ...']
|
|
132
|
+
# Reassemble pairs
|
|
133
|
+
blocks = []
|
|
134
|
+
for i in range(0, len(parts), 2):
|
|
135
|
+
if i+1 < len(parts):
|
|
136
|
+
blocks.append(parts[i] + parts[i+1])
|
|
137
|
+
|
|
90
138
|
memos = []
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
start = match.end()
|
|
98
|
-
end = matches[i + 1].start() if i + 1 < len(matches) else len(content)
|
|
99
|
-
|
|
100
|
-
body = content[start:end].strip()
|
|
101
|
-
|
|
102
|
-
memos.append({"id": uid, "timestamp": timestamp, "content": body})
|
|
103
|
-
|
|
139
|
+
for block in blocks:
|
|
140
|
+
memo = parse_memo_block(block)
|
|
141
|
+
if memo:
|
|
142
|
+
memos.append(memo)
|
|
143
|
+
|
|
144
|
+
# Sort by timestamp desc? Or keep file order? File order is usually append (time asc).
|
|
104
145
|
return memos
|
|
105
146
|
|
|
106
|
-
|
|
107
|
-
def delete_memo(issues_root: Path, memo_id: str) -> bool:
|
|
147
|
+
def save_memos(issues_root: Path, memos: List[Memo]) -> None:
|
|
108
148
|
"""
|
|
109
|
-
|
|
110
|
-
|
|
149
|
+
Rewrite the inbox file with the given list of memos.
|
|
150
|
+
"""
|
|
151
|
+
inbox_path = get_inbox_path(issues_root)
|
|
152
|
+
|
|
153
|
+
# Header
|
|
154
|
+
lines = ["# Monoco Memos Inbox", ""]
|
|
155
|
+
|
|
156
|
+
for memo in memos:
|
|
157
|
+
lines.append(memo.to_markdown().strip())
|
|
158
|
+
lines.append("") # Spacer
|
|
159
|
+
|
|
160
|
+
inbox_path.write_text("\n".join(lines), encoding="utf-8")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def add_memo(
|
|
164
|
+
issues_root: Path,
|
|
165
|
+
content: str,
|
|
166
|
+
context: Optional[str] = None,
|
|
167
|
+
author: str = "User",
|
|
168
|
+
source: str = "cli",
|
|
169
|
+
memo_type: str = "insight"
|
|
170
|
+
) -> str:
|
|
171
|
+
"""
|
|
172
|
+
Append a memo to the inbox.
|
|
173
|
+
Returns the generated UID.
|
|
111
174
|
"""
|
|
175
|
+
uid = generate_memo_id()
|
|
176
|
+
memo = Memo(
|
|
177
|
+
uid=uid,
|
|
178
|
+
content=content,
|
|
179
|
+
context=context,
|
|
180
|
+
author=author,
|
|
181
|
+
source=source,
|
|
182
|
+
type=memo_type
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Append mode is more robust against concurrent reads than rewrite,
|
|
186
|
+
# but for consistent formatting we might want to just append string.
|
|
112
187
|
inbox_path = get_inbox_path(issues_root)
|
|
188
|
+
|
|
113
189
|
if not inbox_path.exists():
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
190
|
+
inbox_path.parent.mkdir(parents=True, exist_ok=True)
|
|
191
|
+
inbox_path.write_text("# Monoco Memos Inbox\n\n", encoding="utf-8")
|
|
192
|
+
|
|
193
|
+
with inbox_path.open("a", encoding="utf-8") as f:
|
|
194
|
+
f.write("\n" + memo.to_markdown().strip() + "\n")
|
|
195
|
+
|
|
196
|
+
return uid
|
|
118
197
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
198
|
+
def update_memo(issues_root: Path, memo_id: str, updates: dict) -> bool:
|
|
199
|
+
"""
|
|
200
|
+
Update a memo's fields.
|
|
201
|
+
"""
|
|
202
|
+
memos = load_memos(issues_root)
|
|
203
|
+
found = False
|
|
204
|
+
for i, m in enumerate(memos):
|
|
205
|
+
if m.uid == memo_id:
|
|
206
|
+
# Apply updates
|
|
207
|
+
updated_data = m.model_dump()
|
|
208
|
+
updated_data.update(updates)
|
|
209
|
+
memos[i] = Memo(**updated_data) # Re-validate
|
|
210
|
+
found = True
|
|
124
211
|
break
|
|
212
|
+
|
|
213
|
+
if found:
|
|
214
|
+
save_memos(issues_root, memos)
|
|
215
|
+
|
|
216
|
+
return found
|
|
125
217
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
else:
|
|
142
|
-
end = len(content)
|
|
143
|
-
|
|
144
|
-
new_content = content[:start] + content[end:]
|
|
145
|
-
inbox_path.write_text(new_content, encoding="utf-8")
|
|
146
|
-
return True
|
|
218
|
+
def delete_memo(issues_root: Path, memo_id: str) -> bool:
|
|
219
|
+
"""
|
|
220
|
+
Delete a memo by its ID.
|
|
221
|
+
"""
|
|
222
|
+
memos = load_memos(issues_root)
|
|
223
|
+
initial_count = len(memos)
|
|
224
|
+
memos = [m for m in memos if m.uid != memo_id]
|
|
225
|
+
|
|
226
|
+
if len(memos) < initial_count:
|
|
227
|
+
save_memos(issues_root, memos)
|
|
228
|
+
return True
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
# Compatibility shim
|
|
232
|
+
list_memos = load_memos
|