monoco-toolkit 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/__main__.py +8 -0
- monoco/core/artifacts/__init__.py +16 -0
- monoco/core/artifacts/manager.py +575 -0
- monoco/core/artifacts/models.py +161 -0
- monoco/core/automation/__init__.py +51 -0
- monoco/core/automation/config.py +338 -0
- monoco/core/automation/field_watcher.py +296 -0
- monoco/core/automation/handlers.py +723 -0
- monoco/core/config.py +31 -4
- monoco/core/executor/__init__.py +38 -0
- monoco/core/executor/agent_action.py +254 -0
- monoco/core/executor/git_action.py +303 -0
- monoco/core/executor/im_action.py +309 -0
- monoco/core/executor/pytest_action.py +218 -0
- monoco/core/git.py +38 -0
- monoco/core/hooks/context.py +74 -13
- monoco/core/ingestion/__init__.py +20 -0
- monoco/core/ingestion/discovery.py +248 -0
- monoco/core/ingestion/watcher.py +343 -0
- monoco/core/ingestion/worker.py +436 -0
- monoco/core/loader.py +633 -0
- monoco/core/registry.py +34 -25
- monoco/core/router/__init__.py +55 -0
- monoco/core/router/action.py +341 -0
- monoco/core/router/router.py +392 -0
- monoco/core/scheduler/__init__.py +63 -0
- monoco/core/scheduler/base.py +152 -0
- monoco/core/scheduler/engines.py +175 -0
- monoco/core/scheduler/events.py +171 -0
- monoco/core/scheduler/local.py +377 -0
- monoco/core/skills.py +119 -80
- monoco/core/watcher/__init__.py +57 -0
- monoco/core/watcher/base.py +365 -0
- monoco/core/watcher/dropzone.py +152 -0
- monoco/core/watcher/issue.py +303 -0
- monoco/core/watcher/memo.py +200 -0
- monoco/core/watcher/task.py +238 -0
- monoco/daemon/app.py +77 -1
- monoco/daemon/commands.py +10 -0
- monoco/daemon/events.py +34 -0
- monoco/daemon/mailroom_service.py +196 -0
- monoco/daemon/models.py +1 -0
- monoco/daemon/scheduler.py +207 -0
- monoco/daemon/services.py +27 -58
- monoco/daemon/triggers.py +55 -0
- monoco/features/agent/__init__.py +25 -7
- monoco/features/agent/adapter.py +17 -7
- monoco/features/agent/cli.py +91 -57
- monoco/features/agent/engines.py +31 -170
- monoco/{core/resources/en/skills/monoco_core → features/agent/resources/en/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/en/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/resources/{roles/role-engineer.yaml → zh/roles/monoco_role_engineer.yaml} +3 -3
- monoco/features/agent/resources/{roles/role-manager.yaml → zh/roles/monoco_role_manager.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-planner.yaml → zh/roles/monoco_role_planner.yaml} +8 -8
- monoco/features/agent/resources/{roles/role-reviewer.yaml → zh/roles/monoco_role_reviewer.yaml} +8 -8
- monoco/{core/resources/zh/skills/monoco_core → features/agent/resources/zh/skills/monoco_atom_core}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_engineer → monoco_workflow_agent_engineer}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_manager → monoco_workflow_agent_manager}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_planner → monoco_workflow_agent_planner}/SKILL.md +2 -2
- monoco/features/agent/resources/zh/skills/{flow_reviewer → monoco_workflow_agent_reviewer}/SKILL.md +2 -2
- monoco/features/agent/worker.py +1 -1
- monoco/features/artifact/__init__.py +0 -0
- monoco/features/artifact/adapter.py +33 -0
- monoco/features/artifact/resources/zh/AGENTS.md +14 -0
- monoco/features/artifact/resources/zh/skills/monoco_atom_artifact/SKILL.md +278 -0
- monoco/features/glossary/adapter.py +18 -7
- monoco/features/glossary/resources/en/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/glossary/resources/zh/skills/{monoco_glossary → monoco_atom_glossary}/SKILL.md +2 -2
- monoco/features/hooks/__init__.py +11 -0
- monoco/features/hooks/adapter.py +67 -0
- monoco/features/hooks/commands.py +309 -0
- monoco/features/hooks/core.py +441 -0
- monoco/features/hooks/resources/ADDING_HOOKS.md +234 -0
- monoco/features/i18n/adapter.py +18 -5
- monoco/features/i18n/core.py +482 -17
- monoco/features/i18n/resources/en/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/en/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{monoco_i18n → monoco_atom_i18n}/SKILL.md +2 -2
- monoco/features/i18n/resources/zh/skills/{i18n_scan_workflow → monoco_workflow_i18n_scan}/SKILL.md +2 -2
- monoco/features/issue/adapter.py +19 -6
- monoco/features/issue/commands.py +352 -20
- monoco/features/issue/core.py +475 -16
- monoco/features/issue/engine/machine.py +114 -4
- monoco/features/issue/linter.py +60 -5
- monoco/features/issue/models.py +2 -2
- monoco/features/issue/resources/en/AGENTS.md +109 -0
- monoco/features/issue/resources/en/skills/{monoco_issue → monoco_atom_issue}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/en/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/resources/hooks/post-checkout.sh +39 -0
- monoco/features/issue/resources/hooks/pre-commit.sh +41 -0
- monoco/features/issue/resources/hooks/pre-push.sh +35 -0
- monoco/features/issue/resources/zh/AGENTS.md +109 -0
- monoco/features/issue/resources/zh/skills/{monoco_issue → monoco_atom_issue_lifecycle}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_create_workflow → monoco_workflow_issue_creation}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_develop_workflow → monoco_workflow_issue_development}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_lifecycle_workflow → monoco_workflow_issue_management}/SKILL.md +2 -2
- monoco/features/issue/resources/zh/skills/{issue_refine_workflow → monoco_workflow_issue_refinement}/SKILL.md +2 -2
- monoco/features/issue/validator.py +101 -1
- monoco/features/memo/adapter.py +21 -8
- monoco/features/memo/cli.py +103 -10
- monoco/features/memo/core.py +178 -92
- monoco/features/memo/models.py +53 -0
- monoco/features/memo/resources/en/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/en/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{monoco_memo → monoco_atom_memo}/SKILL.md +2 -2
- monoco/features/memo/resources/zh/skills/{note_processing_workflow → monoco_workflow_note_processing}/SKILL.md +2 -2
- monoco/features/spike/adapter.py +18 -5
- monoco/features/spike/commands.py +5 -3
- monoco/features/spike/resources/en/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{monoco_spike → monoco_atom_spike}/SKILL.md +2 -2
- monoco/features/spike/resources/zh/skills/{research_workflow → monoco_workflow_research}/SKILL.md +2 -2
- monoco/main.py +38 -1
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/METADATA +7 -1
- monoco_toolkit-0.3.12.dist-info/RECORD +202 -0
- monoco/features/agent/apoptosis.py +0 -44
- monoco/features/agent/manager.py +0 -91
- monoco/features/agent/session.py +0 -121
- monoco_toolkit-0.3.10.dist-info/RECORD +0 -156
- /monoco/{core → features/agent}/resources/en/AGENTS.md +0 -0
- /monoco/{core → features/agent}/resources/zh/AGENTS.md +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.10.dist-info → monoco_toolkit-0.3.12.dist-info}/licenses/LICENSE +0 -0
monoco/features/memo/core.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import re
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from
|
|
4
|
-
from typing import List, Dict, Optional
|
|
3
|
+
from typing import List, Optional, Any
|
|
5
4
|
import secrets
|
|
5
|
+
from datetime import datetime
|
|
6
6
|
|
|
7
|
+
from .models import Memo
|
|
7
8
|
|
|
8
9
|
def is_chinese(text: str) -> bool:
|
|
9
10
|
"""Check if the text contains at least one Chinese character."""
|
|
@@ -18,7 +19,6 @@ def validate_content_language(content: str, source_lang: str) -> bool:
|
|
|
18
19
|
if source_lang == "zh":
|
|
19
20
|
return is_chinese(content)
|
|
20
21
|
# For 'en', we generally allow everything but could be more strict.
|
|
21
|
-
# Requirement is mainly about enforcing 'zh' when configured.
|
|
22
22
|
return True
|
|
23
23
|
|
|
24
24
|
|
|
@@ -27,120 +27,206 @@ def get_memos_dir(issues_root: Path) -> Path:
|
|
|
27
27
|
Get the directory for memos.
|
|
28
28
|
Convention: Sibling of Issues directory.
|
|
29
29
|
"""
|
|
30
|
-
# issues_root is usually ".../Issues"
|
|
31
30
|
return issues_root.parent / "Memos"
|
|
32
31
|
|
|
33
|
-
|
|
34
32
|
def get_inbox_path(issues_root: Path) -> Path:
|
|
35
33
|
return get_memos_dir(issues_root) / "inbox.md"
|
|
36
34
|
|
|
37
|
-
|
|
38
35
|
def generate_memo_id() -> str:
|
|
39
36
|
"""Generate a short 6-char ID."""
|
|
40
37
|
return secrets.token_hex(3)
|
|
41
38
|
|
|
42
|
-
|
|
43
|
-
def format_memo(uid: str, content: str, context: Optional[str] = None) -> str:
|
|
44
|
-
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
45
|
-
header = f"## [{uid}] {timestamp}"
|
|
46
|
-
|
|
47
|
-
body = content.strip()
|
|
48
|
-
|
|
49
|
-
if context:
|
|
50
|
-
body = f"> **Context**: `{context}`\n\n{body}"
|
|
51
|
-
|
|
52
|
-
return f"\n{header}\n{body}\n"
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def add_memo(issues_root: Path, content: str, context: Optional[str] = None) -> str:
|
|
39
|
+
def parse_memo_block(block: str) -> Optional[Memo]:
|
|
56
40
|
"""
|
|
57
|
-
|
|
58
|
-
|
|
41
|
+
Parse a text block into a Memo object.
|
|
42
|
+
Block format:
|
|
43
|
+
## [uid] YYYY-MM-DD HH:MM:SS
|
|
44
|
+
- **Key**: Value
|
|
45
|
+
...
|
|
46
|
+
Content
|
|
59
47
|
"""
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
48
|
+
lines = block.strip().split("\n")
|
|
49
|
+
if not lines:
|
|
50
|
+
return None
|
|
51
|
+
|
|
52
|
+
header = lines[0]
|
|
53
|
+
match = re.match(r"^## \[([a-f0-9]+)\] (.*?)$", header)
|
|
54
|
+
if not match:
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
uid = match.group(1)
|
|
58
|
+
ts_str = match.group(2)
|
|
59
|
+
try:
|
|
60
|
+
timestamp = datetime.strptime(ts_str, "%Y-%m-%d %H:%M:%S")
|
|
61
|
+
except ValueError:
|
|
62
|
+
timestamp = datetime.now() # Fallback
|
|
63
|
+
|
|
64
|
+
content_lines = []
|
|
65
|
+
metadata = {}
|
|
66
|
+
|
|
67
|
+
# Simple state machine
|
|
68
|
+
# 0: Header (done)
|
|
69
|
+
# 1: Metadata
|
|
70
|
+
# 2: Content
|
|
71
|
+
|
|
72
|
+
state = 1
|
|
73
|
+
|
|
74
|
+
for line in lines[1:]:
|
|
75
|
+
stripped = line.strip()
|
|
76
|
+
if state == 1:
|
|
77
|
+
if not stripped:
|
|
78
|
+
continue
|
|
79
|
+
# Check for metadata line: - **Key**: Value
|
|
80
|
+
meta_match = re.match(r"^\- \*\*([a-zA-Z]+)\*\*: (.*)$", stripped)
|
|
81
|
+
if meta_match:
|
|
82
|
+
key = meta_match.group(1).lower()
|
|
83
|
+
val = meta_match.group(2).strip()
|
|
84
|
+
metadata[key] = val
|
|
85
|
+
else:
|
|
86
|
+
# First non-metadata line marks start of content
|
|
87
|
+
state = 2
|
|
88
|
+
content_lines.append(line)
|
|
89
|
+
elif state == 2:
|
|
90
|
+
content_lines.append(line)
|
|
91
|
+
|
|
92
|
+
content = "\n".join(content_lines).strip()
|
|
93
|
+
|
|
94
|
+
# Map metadata to model fields
|
|
95
|
+
# Status map reverse
|
|
96
|
+
status_raw = metadata.get("status", "[ ] Pending")
|
|
97
|
+
status = "pending"
|
|
98
|
+
if "[x] Tracked" in status_raw:
|
|
99
|
+
status = "tracked"
|
|
100
|
+
elif "[x] Resolved" in status_raw:
|
|
101
|
+
status = "resolved"
|
|
102
|
+
elif "[-] Dismissed" in status_raw:
|
|
103
|
+
status = "dismissed"
|
|
104
|
+
|
|
105
|
+
return Memo(
|
|
106
|
+
uid=uid,
|
|
107
|
+
timestamp=timestamp,
|
|
108
|
+
content=content,
|
|
109
|
+
author=metadata.get("from", "User"),
|
|
110
|
+
source=metadata.get("source", "cli"),
|
|
111
|
+
type=metadata.get("type", "insight"),
|
|
112
|
+
status=status,
|
|
113
|
+
ref=metadata.get("ref"),
|
|
114
|
+
context=metadata.get("context") # Note: context might need cleanup if it was wrapped in code blocks
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
def load_memos(issues_root: Path) -> List[Memo]:
|
|
76
118
|
"""
|
|
77
|
-
Parse memos from inbox.
|
|
119
|
+
Parse all memos from inbox.
|
|
78
120
|
"""
|
|
79
121
|
inbox_path = get_inbox_path(issues_root)
|
|
80
122
|
if not inbox_path.exists():
|
|
81
123
|
return []
|
|
82
124
|
|
|
83
125
|
content = inbox_path.read_text(encoding="utf-8")
|
|
84
|
-
|
|
85
|
-
#
|
|
86
|
-
# We split
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
126
|
+
|
|
127
|
+
# Split by headers: ## [uid]
|
|
128
|
+
# We use a lookahead or just standard split carefully
|
|
129
|
+
parts = re.split(r"(^## \[)", content, flags=re.MULTILINE)[1:] # Skip preamble
|
|
130
|
+
|
|
131
|
+
# parts will be like: ['## [', 'abc] 2023...\n...', '## [', 'def] ...']
|
|
132
|
+
# Reassemble pairs
|
|
133
|
+
blocks = []
|
|
134
|
+
for i in range(0, len(parts), 2):
|
|
135
|
+
if i+1 < len(parts):
|
|
136
|
+
blocks.append(parts[i] + parts[i+1])
|
|
137
|
+
|
|
90
138
|
memos = []
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
start = match.end()
|
|
98
|
-
end = matches[i + 1].start() if i + 1 < len(matches) else len(content)
|
|
99
|
-
|
|
100
|
-
body = content[start:end].strip()
|
|
101
|
-
|
|
102
|
-
memos.append({"id": uid, "timestamp": timestamp, "content": body})
|
|
103
|
-
|
|
139
|
+
for block in blocks:
|
|
140
|
+
memo = parse_memo_block(block)
|
|
141
|
+
if memo:
|
|
142
|
+
memos.append(memo)
|
|
143
|
+
|
|
144
|
+
# Sort by timestamp desc? Or keep file order? File order is usually append (time asc).
|
|
104
145
|
return memos
|
|
105
146
|
|
|
106
|
-
|
|
107
|
-
def delete_memo(issues_root: Path, memo_id: str) -> bool:
|
|
147
|
+
def save_memos(issues_root: Path, memos: List[Memo]) -> None:
|
|
108
148
|
"""
|
|
109
|
-
|
|
110
|
-
|
|
149
|
+
Rewrite the inbox file with the given list of memos.
|
|
150
|
+
"""
|
|
151
|
+
inbox_path = get_inbox_path(issues_root)
|
|
152
|
+
|
|
153
|
+
# Header
|
|
154
|
+
lines = ["# Monoco Memos Inbox", ""]
|
|
155
|
+
|
|
156
|
+
for memo in memos:
|
|
157
|
+
lines.append(memo.to_markdown().strip())
|
|
158
|
+
lines.append("") # Spacer
|
|
159
|
+
|
|
160
|
+
inbox_path.write_text("\n".join(lines), encoding="utf-8")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def add_memo(
|
|
164
|
+
issues_root: Path,
|
|
165
|
+
content: str,
|
|
166
|
+
context: Optional[str] = None,
|
|
167
|
+
author: str = "User",
|
|
168
|
+
source: str = "cli",
|
|
169
|
+
memo_type: str = "insight"
|
|
170
|
+
) -> str:
|
|
171
|
+
"""
|
|
172
|
+
Append a memo to the inbox.
|
|
173
|
+
Returns the generated UID.
|
|
111
174
|
"""
|
|
175
|
+
uid = generate_memo_id()
|
|
176
|
+
memo = Memo(
|
|
177
|
+
uid=uid,
|
|
178
|
+
content=content,
|
|
179
|
+
context=context,
|
|
180
|
+
author=author,
|
|
181
|
+
source=source,
|
|
182
|
+
type=memo_type
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Append mode is more robust against concurrent reads than rewrite,
|
|
186
|
+
# but for consistent formatting we might want to just append string.
|
|
112
187
|
inbox_path = get_inbox_path(issues_root)
|
|
188
|
+
|
|
113
189
|
if not inbox_path.exists():
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
190
|
+
inbox_path.parent.mkdir(parents=True, exist_ok=True)
|
|
191
|
+
inbox_path.write_text("# Monoco Memos Inbox\n\n", encoding="utf-8")
|
|
192
|
+
|
|
193
|
+
with inbox_path.open("a", encoding="utf-8") as f:
|
|
194
|
+
f.write("\n" + memo.to_markdown().strip() + "\n")
|
|
195
|
+
|
|
196
|
+
return uid
|
|
118
197
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
198
|
+
def update_memo(issues_root: Path, memo_id: str, updates: dict) -> bool:
|
|
199
|
+
"""
|
|
200
|
+
Update a memo's fields.
|
|
201
|
+
"""
|
|
202
|
+
memos = load_memos(issues_root)
|
|
203
|
+
found = False
|
|
204
|
+
for i, m in enumerate(memos):
|
|
205
|
+
if m.uid == memo_id:
|
|
206
|
+
# Apply updates
|
|
207
|
+
updated_data = m.model_dump()
|
|
208
|
+
updated_data.update(updates)
|
|
209
|
+
memos[i] = Memo(**updated_data) # Re-validate
|
|
210
|
+
found = True
|
|
124
211
|
break
|
|
212
|
+
|
|
213
|
+
if found:
|
|
214
|
+
save_memos(issues_root, memos)
|
|
215
|
+
|
|
216
|
+
return found
|
|
125
217
|
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
else:
|
|
142
|
-
end = len(content)
|
|
143
|
-
|
|
144
|
-
new_content = content[:start] + content[end:]
|
|
145
|
-
inbox_path.write_text(new_content, encoding="utf-8")
|
|
146
|
-
return True
|
|
218
|
+
def delete_memo(issues_root: Path, memo_id: str) -> bool:
|
|
219
|
+
"""
|
|
220
|
+
Delete a memo by its ID.
|
|
221
|
+
"""
|
|
222
|
+
memos = load_memos(issues_root)
|
|
223
|
+
initial_count = len(memos)
|
|
224
|
+
memos = [m for m in memos if m.uid != memo_id]
|
|
225
|
+
|
|
226
|
+
if len(memos) < initial_count:
|
|
227
|
+
save_memos(issues_root, memos)
|
|
228
|
+
return True
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
# Compatibility shim
|
|
232
|
+
list_memos = load_memos
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Optional, Literal
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
class Memo(BaseModel):
|
|
6
|
+
uid: str
|
|
7
|
+
content: str
|
|
8
|
+
timestamp: datetime = Field(default_factory=datetime.now)
|
|
9
|
+
|
|
10
|
+
# Optional Context
|
|
11
|
+
context: Optional[str] = None
|
|
12
|
+
|
|
13
|
+
# New Metadata Fields
|
|
14
|
+
author: str = "User" # User, Assistant, or specific Agent Name
|
|
15
|
+
source: str = "cli" # cli, agent, mailroom, etc.
|
|
16
|
+
status: Literal["pending", "tracked", "resolved", "dismissed"] = "pending"
|
|
17
|
+
ref: Optional[str] = None # Linked Issue ID or other reference
|
|
18
|
+
type: Literal["insight", "bug", "feature", "task"] = "insight"
|
|
19
|
+
|
|
20
|
+
def to_markdown(self) -> str:
|
|
21
|
+
"""
|
|
22
|
+
Render the memo to Markdown format.
|
|
23
|
+
"""
|
|
24
|
+
ts_str = self.timestamp.strftime("%Y-%m-%d %H:%M:%S")
|
|
25
|
+
header = f"## [{self.uid}] {ts_str}"
|
|
26
|
+
|
|
27
|
+
# Metadata block
|
|
28
|
+
meta = []
|
|
29
|
+
if self.author != "User":
|
|
30
|
+
meta.append(f"- **From**: {self.author}")
|
|
31
|
+
if self.source != "cli":
|
|
32
|
+
meta.append(f"- **Source**: {self.source}")
|
|
33
|
+
if self.type != "insight":
|
|
34
|
+
meta.append(f"- **Type**: {self.type}")
|
|
35
|
+
|
|
36
|
+
# Status line with checkbox simulation
|
|
37
|
+
status_map = {
|
|
38
|
+
"pending": "[ ] Pending",
|
|
39
|
+
"tracked": "[x] Tracked",
|
|
40
|
+
"resolved": "[x] Resolved",
|
|
41
|
+
"dismissed": "[-] Dismissed"
|
|
42
|
+
}
|
|
43
|
+
meta.append(f"- **Status**: {status_map.get(self.status, '[ ] Pending')}")
|
|
44
|
+
|
|
45
|
+
if self.ref:
|
|
46
|
+
meta.append(f"- **Ref**: {self.ref}")
|
|
47
|
+
|
|
48
|
+
if self.context:
|
|
49
|
+
meta.append(f"- **Context**: `{self.context}`")
|
|
50
|
+
|
|
51
|
+
meta_block = "\n".join(meta)
|
|
52
|
+
|
|
53
|
+
return f"\n{header}\n{meta_block}\n\n{self.content.strip()}\n"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
|
-
name:
|
|
2
|
+
name: monoco_workflow_note_processing
|
|
3
3
|
description: Memo Note Processing Workflow (Flow Skill). Defines the standard operational process from capturing fleeting notes to organizing and archiving, ensuring effective management of ideas.
|
|
4
|
-
type:
|
|
4
|
+
type: workflow
|
|
5
5
|
domain: memo
|
|
6
6
|
version: 1.0.0
|
|
7
7
|
---
|
monoco/features/spike/adapter.py
CHANGED
|
@@ -1,19 +1,32 @@
|
|
|
1
1
|
from pathlib import Path
|
|
2
2
|
from typing import Dict
|
|
3
|
-
from monoco.core.
|
|
3
|
+
from monoco.core.loader import FeatureModule, FeatureMetadata
|
|
4
|
+
from monoco.core.feature import IntegrationData
|
|
4
5
|
from monoco.features.spike import core
|
|
5
6
|
|
|
6
7
|
|
|
7
|
-
class SpikeFeature(
|
|
8
|
+
class SpikeFeature(FeatureModule):
|
|
9
|
+
"""Spike (research) feature module with unified lifecycle support."""
|
|
10
|
+
|
|
8
11
|
@property
|
|
9
|
-
def
|
|
10
|
-
return
|
|
12
|
+
def metadata(self) -> FeatureMetadata:
|
|
13
|
+
return FeatureMetadata(
|
|
14
|
+
name="spike",
|
|
15
|
+
version="1.0.0",
|
|
16
|
+
description="Research spike management for external references",
|
|
17
|
+
dependencies=["core"],
|
|
18
|
+
priority=30,
|
|
19
|
+
)
|
|
11
20
|
|
|
12
|
-
def
|
|
21
|
+
def _on_mount(self, context: "FeatureContext") -> None: # type: ignore
|
|
22
|
+
"""Initialize spike feature with workspace context."""
|
|
23
|
+
root = context.root
|
|
24
|
+
config = context.config
|
|
13
25
|
spikes_name = config.get("paths", {}).get("spikes", ".references")
|
|
14
26
|
core.init(root, spikes_name)
|
|
15
27
|
|
|
16
28
|
def integrate(self, root: Path, config: Dict) -> IntegrationData:
|
|
29
|
+
"""Provide integration data for agent environment."""
|
|
17
30
|
# Determine language from config, default to 'en'
|
|
18
31
|
lang = config.get("i18n", {}).get("source_lang", "en")
|
|
19
32
|
base_dir = Path(__file__).parent / "resources"
|
|
@@ -81,13 +81,15 @@ def remove_repo(
|
|
|
81
81
|
target_path = spikes_dir / name
|
|
82
82
|
deleted = False
|
|
83
83
|
if target_path.exists():
|
|
84
|
-
if force
|
|
85
|
-
f"Do you want to delete the directory {target_path}?", default=False
|
|
86
|
-
):
|
|
84
|
+
if force:
|
|
87
85
|
core.remove_repo_dir(spikes_dir, name)
|
|
88
86
|
deleted = True
|
|
89
87
|
else:
|
|
90
88
|
deleted = False
|
|
89
|
+
if not OutputManager.is_agent_mode():
|
|
90
|
+
from rich.console import Console
|
|
91
|
+
console = Console()
|
|
92
|
+
console.print(f"[yellow]Skipping physical deletion of {target_path}. Use --force to delete.[/yellow]")
|
|
91
93
|
|
|
92
94
|
OutputManager.print(
|
|
93
95
|
{"status": "removed", "name": name, "directory_deleted": deleted}
|
monoco/features/spike/resources/en/skills/{research_workflow → monoco_workflow_research}/SKILL.md
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
|
-
name:
|
|
2
|
+
name: monoco_workflow_research
|
|
3
3
|
description: Spike Research Workflow (Flow Skill). Defines the standard operational process from adding external repositories to knowledge extraction and archiving, ensuring effective management of external knowledge.
|
|
4
|
-
type:
|
|
4
|
+
type: workflow
|
|
5
5
|
domain: spike
|
|
6
6
|
version: 1.0.0
|
|
7
7
|
---
|
monoco/main.py
CHANGED
|
@@ -1,7 +1,23 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import typer
|
|
3
3
|
from typing import Optional
|
|
4
|
+
from pathlib import Path
|
|
4
5
|
from monoco.core.output import print_output
|
|
6
|
+
from monoco.core.loader import FeatureLoader, FeatureContext
|
|
7
|
+
|
|
8
|
+
# Global feature loader for CLI lifecycle management
|
|
9
|
+
_feature_loader: Optional[FeatureLoader] = None
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_feature_loader() -> FeatureLoader:
|
|
13
|
+
"""Get or initialize the global feature loader."""
|
|
14
|
+
global _feature_loader
|
|
15
|
+
if _feature_loader is None:
|
|
16
|
+
_feature_loader = FeatureLoader()
|
|
17
|
+
# Discover features but defer loading until needed
|
|
18
|
+
_feature_loader.discover()
|
|
19
|
+
return _feature_loader
|
|
20
|
+
|
|
5
21
|
|
|
6
22
|
app = typer.Typer(
|
|
7
23
|
name="monoco",
|
|
@@ -89,7 +105,26 @@ def main(
|
|
|
89
105
|
if (discovered / ".monoco").exists():
|
|
90
106
|
config_root = str(discovered)
|
|
91
107
|
|
|
92
|
-
get_config(project_root=config_root, require_project=require_workspace)
|
|
108
|
+
config = get_config(project_root=config_root, require_project=require_workspace)
|
|
109
|
+
|
|
110
|
+
# Initialize FeatureLoader and mount features when workspace is available
|
|
111
|
+
if require_workspace and config_root:
|
|
112
|
+
loader = get_feature_loader()
|
|
113
|
+
# Load all features (with lazy loading for non-critical features)
|
|
114
|
+
loader.load_all(lazy=True)
|
|
115
|
+
# Create feature context and mount all features
|
|
116
|
+
feature_context = FeatureContext(
|
|
117
|
+
root=Path(config_root),
|
|
118
|
+
config=config.model_dump(),
|
|
119
|
+
registry=loader.registry,
|
|
120
|
+
)
|
|
121
|
+
errors = loader.mount_all(feature_context)
|
|
122
|
+
if errors:
|
|
123
|
+
from rich.console import Console
|
|
124
|
+
console = Console()
|
|
125
|
+
for name, error in errors.items():
|
|
126
|
+
console.print(f"[yellow]Warning: Failed to mount feature '{name}': {error}[/yellow]")
|
|
127
|
+
|
|
93
128
|
except FileNotFoundError as e:
|
|
94
129
|
# Graceful exit for workspace errors
|
|
95
130
|
from rich.console import Console
|
|
@@ -156,6 +191,7 @@ from monoco.features.issue import commands as issue_cmd
|
|
|
156
191
|
from monoco.features.spike import commands as spike_cmd
|
|
157
192
|
from monoco.features.i18n import commands as i18n_cmd
|
|
158
193
|
from monoco.features.config import commands as config_cmd
|
|
194
|
+
from monoco.features.hooks import commands as hooks_cmd
|
|
159
195
|
from monoco.cli import project as project_cmd
|
|
160
196
|
from monoco.cli import workspace as workspace_cmd
|
|
161
197
|
|
|
@@ -163,6 +199,7 @@ app.add_typer(issue_cmd.app, name="issue", help="Manage development issues")
|
|
|
163
199
|
app.add_typer(spike_cmd.app, name="spike", help="Manage research spikes")
|
|
164
200
|
app.add_typer(i18n_cmd.app, name="i18n", help="Manage documentation i18n")
|
|
165
201
|
app.add_typer(config_cmd.app, name="config", help="Manage configuration")
|
|
202
|
+
app.add_typer(hooks_cmd.app, name="hooks", help="Manage git hooks for development workflow")
|
|
166
203
|
app.add_typer(project_cmd.app, name="project", help="Manage projects")
|
|
167
204
|
app.add_typer(workspace_cmd.app, name="workspace", help="Manage workspace")
|
|
168
205
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: monoco-toolkit
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.12
|
|
4
4
|
Summary: Agent Native Toolkit for Monoco - Task Management & Kanban for AI Agents
|
|
5
5
|
Project-URL: Homepage, https://monoco.io
|
|
6
6
|
Project-URL: Repository, https://github.com/IndenScale/Monoco
|
|
@@ -115,6 +115,12 @@ monoco session start
|
|
|
115
115
|
- **Protocols**: LSP / ACP (for IDE integration)
|
|
116
116
|
- **Storage**: Local Filesystem (Markdown/YAML)
|
|
117
117
|
|
|
118
|
+
## 📁 Project Structure
|
|
119
|
+
|
|
120
|
+
Monoco follows a "Distro" architecture that separates **State** (`Issues/`), **Config** (`.monoco/`), and **Logic** (`monoco/`).
|
|
121
|
+
|
|
122
|
+
For a detailed breakdown of the directory structure, see **[TREE.md](./TREE.md)**.
|
|
123
|
+
|
|
118
124
|
## 🤝 Contributing
|
|
119
125
|
|
|
120
126
|
Monoco is open-source. We are building the standard distribution for the Agentic era.
|