monoco-toolkit 0.3.3__py3-none-any.whl → 0.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,83 @@
1
+ from monoco.features.issue.resolver import ReferenceResolver, ResolutionContext
2
+
3
+
4
+ def test_resolve_explicit_namespace():
5
+ context = ResolutionContext(
6
+ current_project="toolkit",
7
+ workspace_root="monoco",
8
+ available_ids={"toolkit::FEAT-0001", "monoco::FEAT-0001", "EPIC-0001"},
9
+ )
10
+ resolver = ReferenceResolver(context)
11
+
12
+ # Explicitly project reference
13
+ assert resolver.resolve("toolkit::FEAT-0001") == "toolkit::FEAT-0001"
14
+ assert resolver.resolve("monoco::FEAT-0001") == "monoco::FEAT-0001"
15
+
16
+ # Non-existent namespace
17
+ assert resolver.resolve("other::FEAT-0001") is None
18
+
19
+
20
+ def test_resolve_proximity_current_project():
21
+ context = ResolutionContext(
22
+ current_project="toolkit",
23
+ workspace_root="monoco",
24
+ available_ids={
25
+ "toolkit::FEAT-0001",
26
+ "monoco::FEAT-0001",
27
+ },
28
+ )
29
+ resolver = ReferenceResolver(context)
30
+
31
+ # Should prefer current project
32
+ assert resolver.resolve("FEAT-0001") == "toolkit::FEAT-0001"
33
+
34
+
35
+ def test_resolve_root_fallback():
36
+ context = ResolutionContext(
37
+ current_project="toolkit",
38
+ workspace_root="monoco",
39
+ available_ids={
40
+ "monoco::EPIC-0000",
41
+ "toolkit::FEAT-0001",
42
+ },
43
+ )
44
+ resolver = ReferenceResolver(context)
45
+
46
+ # Should fallback to root if not in current
47
+ assert resolver.resolve("EPIC-0000") == "monoco::EPIC-0000"
48
+
49
+
50
+ def test_resolve_local_ids():
51
+ context = ResolutionContext(
52
+ current_project="toolkit",
53
+ workspace_root="monoco",
54
+ available_ids={
55
+ "EPIC-9999",
56
+ },
57
+ )
58
+ resolver = ReferenceResolver(context)
59
+
60
+ # Should resolve plain local IDs
61
+ assert resolver.resolve("EPIC-9999") == "EPIC-9999"
62
+
63
+
64
+ def test_priority_order():
65
+ context = ResolutionContext(
66
+ current_project="toolkit",
67
+ workspace_root="monoco",
68
+ available_ids={"toolkit::FEAT-0001", "monoco::FEAT-0001", "FEAT-0001"},
69
+ )
70
+ resolver = ReferenceResolver(context)
71
+
72
+ # Order: toolkit::FEAT-0001 > monoco::FEAT-0001 > FEAT-0001
73
+ assert resolver.resolve("FEAT-0001") == "toolkit::FEAT-0001"
74
+
75
+ # If removed from toolkit context
76
+ context.available_ids.remove("toolkit::FEAT-0001")
77
+ resolver = ReferenceResolver(context)
78
+ assert resolver.resolve("FEAT-0001") == "monoco::FEAT-0001"
79
+
80
+ # If removed from root context
81
+ context.available_ids.remove("monoco::FEAT-0001")
82
+ resolver = ReferenceResolver(context)
83
+ assert resolver.resolve("FEAT-0001") == "FEAT-0001"
@@ -8,6 +8,7 @@ from monoco.features.i18n.core import detect_language
8
8
  from .models import IssueMetadata
9
9
  from .domain.parser import MarkdownParser
10
10
  from .domain.models import ContentBlock
11
+ from .resolver import ReferenceResolver, ResolutionContext
11
12
 
12
13
 
13
14
  class IssueValidator:
@@ -20,9 +21,19 @@ class IssueValidator:
20
21
  self.issue_root = issue_root
21
22
 
22
23
  def validate(
23
- self, meta: IssueMetadata, content: str, all_issue_ids: Set[str] = set()
24
+ self,
25
+ meta: IssueMetadata,
26
+ content: str,
27
+ all_issue_ids: Set[str] = set(),
28
+ current_project: Optional[str] = None,
29
+ workspace_root: Optional[str] = None,
24
30
  ) -> List[Diagnostic]:
31
+ """
32
+ Validate an issue and return diagnostics.
33
+ """
25
34
  diagnostics = []
35
+ self._current_project = current_project
36
+ self._workspace_root = workspace_root
26
37
 
27
38
  # Parse Content into Blocks (Domain Layer)
28
39
  # Handle case where content might be just body (from update_issue) or full file
@@ -311,7 +322,11 @@ class IssueValidator:
311
322
  if stripped == expected_header:
312
323
  header_found = True
313
324
 
314
- if stripped == "## Review Comments":
325
+ # Flexible matching for Review Comments header
326
+ if any(
327
+ kw in stripped
328
+ for kw in ["Review Comments", "评审备注", "评审记录", "Review"]
329
+ ):
315
330
  review_header_found = True
316
331
  review_header_index = i
317
332
 
@@ -406,6 +421,16 @@ class IssueValidator:
406
421
  ) -> List[Diagnostic]:
407
422
  diagnostics = []
408
423
 
424
+ # Initialize Resolver
425
+ resolver = None
426
+ if all_ids:
427
+ context = ResolutionContext(
428
+ current_project=self._current_project or "local",
429
+ workspace_root=self._workspace_root,
430
+ available_ids=all_ids,
431
+ )
432
+ resolver = ReferenceResolver(context)
433
+
409
434
  # Malformed ID Check
410
435
  if meta.parent and meta.parent.startswith("#"):
411
436
  line = self._get_field_line(content, "parent")
@@ -441,7 +466,7 @@ class IssueValidator:
441
466
  )
442
467
  )
443
468
 
444
- if not all_ids:
469
+ if not all_ids or not resolver:
445
470
  return diagnostics
446
471
 
447
472
  # Logic: Epics must have a parent (unless it is the Sink Root EPIC-0000)
@@ -457,8 +482,9 @@ class IssueValidator:
457
482
 
458
483
  if (
459
484
  meta.parent
460
- and meta.parent not in all_ids
485
+ and meta.parent != "EPIC-0000"
461
486
  and not meta.parent.startswith("#")
487
+ and not resolver.is_valid_reference(meta.parent)
462
488
  ):
463
489
  line = self._get_field_line(content, "parent")
464
490
  diagnostics.append(
@@ -470,7 +496,7 @@ class IssueValidator:
470
496
  )
471
497
 
472
498
  for dep in meta.dependencies:
473
- if dep not in all_ids:
499
+ if not resolver.is_valid_reference(dep):
474
500
  line = self._get_field_line(content, "dependencies")
475
501
  diagnostics.append(
476
502
  self._create_diagnostic(
@@ -503,24 +529,27 @@ class IssueValidator:
503
529
  matches = re.finditer(r"\b((?:EPIC|FEAT|CHORE|FIX)-\d{4})\b", line)
504
530
  for match in matches:
505
531
  ref_id = match.group(1)
506
- if ref_id != meta.id and ref_id not in all_ids:
507
- # Check if it's a namespaced ID? The regex only catches local IDs.
508
- # If users use MON::FEAT-0001, the regex might catch FEAT-0001.
509
- # But all_ids contains full IDs (potentially namespaced).
510
- # Simple logic: if ref_id isn't in all_ids, check if any id ENDS with ref_id
511
-
512
- found_namespaced = any(
513
- known.endswith(f"::{ref_id}") for known in all_ids
514
- )
532
+ # Check for namespaced ID before this match?
533
+ # The regex above only catches the ID part.
534
+ # Let's adjust regex to optionally catch namespace::
535
+ full_match = re.search(
536
+ r"\b(?:([a-z0-9_-]+)::)?(" + re.escape(ref_id) + r")\b",
537
+ line[max(0, match.start() - 50) : match.end()],
538
+ )
515
539
 
516
- if not found_namespaced:
517
- diagnostics.append(
518
- self._create_diagnostic(
519
- f"Broken Reference: Issue '{ref_id}' not found.",
520
- DiagnosticSeverity.Warning,
521
- line=i,
522
- )
540
+ check_id = ref_id
541
+ if full_match and full_match.group(1):
542
+ check_id = f"{full_match.group(1)}::{ref_id}"
543
+
544
+ if ref_id != meta.id and not resolver.is_valid_reference(check_id):
545
+ diagnostics.append(
546
+ self._create_diagnostic(
547
+ f"Broken Reference: Issue '{check_id}' not found.",
548
+ DiagnosticSeverity.Warning,
549
+ line=i,
523
550
  )
551
+ )
552
+ return diagnostics
524
553
  return diagnostics
525
554
 
526
555
  def _validate_time_consistency(
@@ -0,0 +1,3 @@
1
+ from .cli import app
2
+
3
+ __all__ = ["app"]
@@ -0,0 +1,90 @@
1
+ import typer
2
+ from pathlib import Path
3
+ from typing import Optional
4
+ from rich.console import Console
5
+ from rich.table import Table
6
+ from monoco.core.config import get_config
7
+ from .core import add_memo, list_memos, get_inbox_path
8
+
9
+ app = typer.Typer(help="Manage memos (fleeting notes).")
10
+ console = Console()
11
+
12
+
13
+ def get_issues_root() -> Path:
14
+ config = get_config()
15
+ # Resolve absolute path for issues
16
+ root = Path(config.paths.root).resolve()
17
+ # If config.paths.root is '.', it means current or discovered root.
18
+ # We should trust get_config's loading mechanism, but find_monoco_root might be safer to base off.
19
+ # Update: config is loaded relative to where it was found.
20
+ # Let's rely on config.paths.root if it's absolute, or relative to CWD?
21
+ # Actually, the ConfigLoader doesn't mutate paths.root based on location.
22
+ # It defaults to "."
23
+
24
+ # Better approach:
25
+ # Use find_monoco_root() to get base, then append config.paths.issues
26
+ from monoco.core.config import find_monoco_root
27
+
28
+ project_root = find_monoco_root()
29
+ return project_root / config.paths.issues
30
+
31
+
32
+ @app.command("add")
33
+ def add_command(
34
+ content: str = typer.Argument(..., help="The content of the memo."),
35
+ context: Optional[str] = typer.Option(
36
+ None, "--context", "-c", help="Context reference (e.g. file:line)."
37
+ ),
38
+ ):
39
+ """
40
+ Capture a new idea or thought into the Memo Inbox.
41
+ """
42
+ issues_root = get_issues_root()
43
+
44
+ uid = add_memo(issues_root, content, context)
45
+
46
+ console.print(f"[green]✔ Memo recorded.[/green] ID: [bold]{uid}[/bold]")
47
+
48
+
49
+ @app.command("list")
50
+ def list_command():
51
+ """
52
+ List all memos in the inbox.
53
+ """
54
+ issues_root = get_issues_root()
55
+
56
+ memos = list_memos(issues_root)
57
+
58
+ if not memos:
59
+ console.print("No memos found. Use `monoco memo add` to create one.")
60
+ return
61
+
62
+ table = Table(title="Memo Inbox")
63
+ table.add_column("ID", style="cyan", no_wrap=True)
64
+ table.add_column("Timestamp", style="magenta")
65
+ table.add_column("Content")
66
+
67
+ for memo in memos:
68
+ # Truncate content for list view
69
+ content_preview = memo["content"].split("\n")[0]
70
+ if len(memo["content"]) > 50:
71
+ content_preview = content_preview[:47] + "..."
72
+
73
+ table.add_row(memo["id"], memo["timestamp"], content_preview)
74
+
75
+ console.print(table)
76
+
77
+
78
+ @app.command("open")
79
+ def open_command():
80
+ """
81
+ Open the inbox file in the default editor.
82
+ """
83
+ issues_root = get_issues_root()
84
+ inbox_path = get_inbox_path(issues_root)
85
+
86
+ if not inbox_path.exists():
87
+ console.print("[yellow]Inbox does not exist yet.[/yellow]")
88
+ return
89
+
90
+ typer.launch(str(inbox_path))
@@ -0,0 +1,87 @@
1
+ import re
2
+ from pathlib import Path
3
+ from datetime import datetime
4
+ from typing import List, Dict, Optional
5
+ import secrets
6
+
7
+
8
+ def get_memos_dir(issues_root: Path) -> Path:
9
+ """
10
+ Get the directory for memos.
11
+ Convention: Sibling of Issues directory.
12
+ """
13
+ # issues_root is usually ".../Issues"
14
+ return issues_root.parent / "Memos"
15
+
16
+
17
+ def get_inbox_path(issues_root: Path) -> Path:
18
+ return get_memos_dir(issues_root) / "inbox.md"
19
+
20
+
21
+ def generate_memo_id() -> str:
22
+ """Generate a short 6-char ID."""
23
+ return secrets.token_hex(3)
24
+
25
+
26
+ def format_memo(uid: str, content: str, context: Optional[str] = None) -> str:
27
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
28
+ header = f"## [{uid}] {timestamp}"
29
+
30
+ body = content.strip()
31
+
32
+ if context:
33
+ body = f"> **Context**: `{context}`\n\n{body}"
34
+
35
+ return f"\n{header}\n{body}\n"
36
+
37
+
38
+ def add_memo(issues_root: Path, content: str, context: Optional[str] = None) -> str:
39
+ """
40
+ Append a memo to the inbox.
41
+ Returns the generated UID.
42
+ """
43
+ inbox_path = get_inbox_path(issues_root)
44
+
45
+ if not inbox_path.exists():
46
+ inbox_path.parent.mkdir(parents=True, exist_ok=True)
47
+ inbox_path.write_text("# Monoco Memos Inbox\n", encoding="utf-8")
48
+
49
+ uid = generate_memo_id()
50
+ entry = format_memo(uid, content, context)
51
+
52
+ with inbox_path.open("a", encoding="utf-8") as f:
53
+ f.write(entry)
54
+
55
+ return uid
56
+
57
+
58
+ def list_memos(issues_root: Path) -> List[Dict[str, str]]:
59
+ """
60
+ Parse memos from inbox.
61
+ """
62
+ inbox_path = get_inbox_path(issues_root)
63
+ if not inbox_path.exists():
64
+ return []
65
+
66
+ content = inbox_path.read_text(encoding="utf-8")
67
+
68
+ # Regex to find headers: ## [uid] timestamp
69
+ # We split by headers
70
+
71
+ pattern = re.compile(r"^## \[([a-f0-9]+)\] (.*?)$", re.MULTILINE)
72
+
73
+ memos = []
74
+ matches = list(pattern.finditer(content))
75
+
76
+ for i, match in enumerate(matches):
77
+ uid = match.group(1)
78
+ timestamp = match.group(2)
79
+
80
+ start = match.end()
81
+ end = matches[i + 1].start() if i + 1 < len(matches) else len(content)
82
+
83
+ body = content[start:end].strip()
84
+
85
+ memos.append({"id": uid, "timestamp": timestamp, "content": body})
86
+
87
+ return memos
@@ -7,6 +7,90 @@ from monoco.core.config import get_config
7
7
  from monoco.features.scheduler import SessionManager, load_scheduler_config
8
8
 
9
9
  app = typer.Typer(name="agent", help="Manage agent sessions")
10
+ role_app = typer.Typer(name="role", help="Manage agent roles")
11
+
12
+
13
+ @role_app.command(name="list")
14
+ def list_roles():
15
+ """
16
+ List available agent roles and their sources.
17
+ """
18
+ from monoco.features.scheduler.config import RoleLoader
19
+
20
+ settings = get_config()
21
+ project_root = Path(settings.paths.root).resolve()
22
+
23
+ loader = RoleLoader(project_root)
24
+ roles = loader.load_all()
25
+
26
+ output = []
27
+ for name, role in roles.items():
28
+ output.append(
29
+ {
30
+ "role": name,
31
+ "engine": role.engine,
32
+ "source": loader.sources.get(name, "unknown"),
33
+ "description": role.description,
34
+ }
35
+ )
36
+
37
+ print_output(output, title="Agent Roles")
38
+
39
+
40
+ @app.command()
41
+ def draft(
42
+ desc: str = typer.Option(..., "--desc", "-d", help="Description of the task"),
43
+ type: str = typer.Option(
44
+ "feature", "--type", "-t", help="Issue type (feature/chore/fix)"
45
+ ),
46
+ ):
47
+ """
48
+ Draft a new issue based on a natural language description.
49
+ This creates a temporary 'drafter' agent session.
50
+ """
51
+ from monoco.core.output import print_error
52
+
53
+ settings = get_config()
54
+ project_root = Path(settings.paths.root).resolve()
55
+
56
+ # Load Roles
57
+ roles = load_scheduler_config(project_root)
58
+ # Use 'crafter' as the role for drafting (it handles new tasks)
59
+ role_name = "crafter"
60
+ selected_role = roles.get(role_name)
61
+
62
+ if not selected_role:
63
+ print_error(f"Role '{role_name}' not found.")
64
+ raise typer.Exit(code=1)
65
+
66
+ print_output(
67
+ f"Drafting {type} from description: '{desc}'",
68
+ title="Agent Drafter",
69
+ )
70
+
71
+ manager = SessionManager()
72
+ # We use a placeholder ID as we don't know the ID yet.
73
+ # The agent is expected to create the file, so the ID will be generated then.
74
+ session = manager.create_session("NEW_TASK", selected_role)
75
+
76
+ context = {"description": desc, "type": type}
77
+
78
+ try:
79
+ session.start(context=context)
80
+
81
+ # Monitoring Loop
82
+ while session.refresh_status() == "running":
83
+ time.sleep(1)
84
+
85
+ if session.model.status == "failed":
86
+ print_error("Drafting failed.")
87
+ else:
88
+ print_output("Drafting completed.", title="Agent Drafter")
89
+
90
+ except KeyboardInterrupt:
91
+ print("\nStopping...")
92
+ session.terminate()
93
+ print_output("Drafting cancelled.")
10
94
 
11
95
 
12
96
  @app.command()
@@ -43,6 +127,7 @@ def run(
43
127
  role_name = role or "builder"
44
128
  description = None
45
129
  else:
130
+ # Implicit Draft Mode via run command
46
131
  issue_id = "NEW_TASK"
47
132
  role_name = role or "crafter"
48
133
  description = target
@@ -84,7 +169,7 @@ def run(
84
169
  session.start(context=context)
85
170
 
86
171
  # Monitoring Loop
87
- while session.model.status == "running":
172
+ while session.refresh_status() == "running":
88
173
  time.sleep(1)
89
174
 
90
175
  if session.model.status == "failed":
@@ -141,9 +226,15 @@ def autopsy(
141
226
  if re.match(r"^[a-zA-Z]+-\d+$", target):
142
227
  print_output(f"Session not in memory. Analyzing Issue {target} directly.")
143
228
  # We create a transient session just to trigger the coroner
144
- from .defaults import DEFAULT_ROLES
229
+ settings = get_config()
230
+ project_root = Path(settings.paths.root).resolve()
231
+ roles = load_scheduler_config(project_root)
232
+ builder_role = roles.get("builder")
233
+
234
+ if not builder_role:
235
+ print_output("Builder role not found.", style="red")
236
+ raise typer.Exit(code=1)
145
237
 
146
- builder_role = next(r for r in DEFAULT_ROLES if r.name == "builder")
147
238
  session = manager.create_session(target.upper(), builder_role)
148
239
  session.model.status = "failed"
149
240
  else:
@@ -192,13 +283,3 @@ def logs(session_id: str):
192
283
  # Placeholder
193
284
  print("[12:00:00] Session started")
194
285
  print("[12:00:01] Worker initialized")
195
-
196
-
197
- @app.command()
198
- def kill(session_id: str):
199
- """
200
- Terminate a session.
201
- """
202
- print_output(f"Killing session {session_id}...", title="Kill Session")
203
- # Placeholder
204
- print("Signal sent.")
@@ -1,32 +1,68 @@
1
+ from typing import Dict, Optional
1
2
  import yaml
2
3
  from pathlib import Path
3
- from typing import Dict
4
4
  from .models import RoleTemplate, SchedulerConfig
5
5
  from .defaults import DEFAULT_ROLES
6
6
 
7
7
 
8
- def load_scheduler_config(project_root: Path) -> Dict[str, RoleTemplate]:
8
+ class RoleLoader:
9
9
  """
10
- Load scheduler configuration from .monoco/scheduler.yaml
11
- Merges with default roles.
10
+ Tiered configuration loader for Agent Roles.
11
+ Level 1: Builtin Fallback
12
+ Level 2: Global (~/.monoco/roles.yaml)
13
+ Level 3: Project (./.monoco/roles.yaml)
12
14
  """
13
- roles = {role.name: role for role in DEFAULT_ROLES}
14
15
 
15
- config_path = project_root / ".monoco" / "scheduler.yaml"
16
- if config_path.exists():
16
+ def __init__(self, project_root: Optional[Path] = None):
17
+ self.project_root = project_root
18
+ self.user_home = Path.home()
19
+ self.roles: Dict[str, RoleTemplate] = {}
20
+ self.sources: Dict[str, str] = {} # role_name -> source description
21
+
22
+ def load_all(self) -> Dict[str, RoleTemplate]:
23
+ # Level 1: Defaults
24
+ for role in DEFAULT_ROLES:
25
+ self.roles[role.name] = role
26
+ self.sources[role.name] = "builtin"
27
+
28
+ # Level 2: Global
29
+ global_path = self.user_home / ".monoco" / "roles.yaml"
30
+ self._load_from_path(global_path, "global")
31
+
32
+ # Level 3: Project
33
+ if self.project_root:
34
+ project_path = self.project_root / ".monoco" / "roles.yaml"
35
+ self._load_from_path(project_path, "project")
36
+
37
+ return self.roles
38
+
39
+ def _load_from_path(self, path: Path, source_label: str):
40
+ if not path.exists():
41
+ return
42
+
17
43
  try:
18
- with open(config_path, "r") as f:
44
+ with open(path, "r") as f:
19
45
  data = yaml.safe_load(f) or {}
20
46
 
21
- # Use Pydantic to validate the whole config if possible, or just the roles list
22
- # Depending on file structure. Assuming the file has a 'roles' key.
23
47
  if "roles" in data:
24
- # We can validate using SchedulerConfig
48
+ # Validate using SchedulerConfig
25
49
  config = SchedulerConfig(roles=data["roles"])
26
50
  for role in config.roles:
27
- roles[role.name] = role
51
+ # Level 3 > Level 2 > Level 1 (名字相同的 Role 进行覆盖/Merge)
52
+ # Currently we do total replacement for same-named roles
53
+ self.roles[role.name] = role
54
+ self.sources[role.name] = str(path)
28
55
  except Exception as e:
29
- # For now, just log or print. Ideally use a logger.
30
- print(f"Warning: Failed to load scheduler config: {e}")
56
+ # We don't want to crash the whole tool if a config is malformed,
57
+ # but we should probably warn.
58
+ import sys
59
+
60
+ print(f"Warning: Failed to load roles from {path}: {e}", file=sys.stderr)
61
+
31
62
 
32
- return roles
63
+ def load_scheduler_config(project_root: Path) -> Dict[str, RoleTemplate]:
64
+ """
65
+ Legacy compatibility wrapper for functional access.
66
+ """
67
+ loader = RoleLoader(project_root)
68
+ return loader.load_all()