monoco-toolkit 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. monoco/cli/project.py +15 -7
  2. monoco/cli/workspace.py +11 -3
  3. monoco/core/agent/adapters.py +24 -1
  4. monoco/core/config.py +81 -3
  5. monoco/core/integrations.py +8 -0
  6. monoco/core/lsp.py +7 -0
  7. monoco/core/output.py +8 -1
  8. monoco/core/resources/en/SKILL.md +1 -1
  9. monoco/core/setup.py +8 -1
  10. monoco/daemon/app.py +18 -12
  11. monoco/features/agent/commands.py +94 -17
  12. monoco/features/agent/core.py +48 -0
  13. monoco/features/agent/resources/en/critique.prompty +16 -0
  14. monoco/features/agent/resources/en/develop.prompty +16 -0
  15. monoco/features/agent/resources/en/investigate.prompty +16 -0
  16. monoco/features/agent/resources/en/refine.prompty +14 -0
  17. monoco/features/agent/resources/en/verify.prompty +16 -0
  18. monoco/features/agent/resources/zh/critique.prompty +18 -0
  19. monoco/features/agent/resources/zh/develop.prompty +18 -0
  20. monoco/features/agent/resources/zh/investigate.prompty +18 -0
  21. monoco/features/agent/resources/zh/refine.prompty +16 -0
  22. monoco/features/agent/resources/zh/verify.prompty +18 -0
  23. monoco/features/config/commands.py +35 -14
  24. monoco/features/i18n/commands.py +89 -10
  25. monoco/features/i18n/core.py +112 -16
  26. monoco/features/issue/commands.py +254 -85
  27. monoco/features/issue/core.py +142 -119
  28. monoco/features/issue/domain/__init__.py +0 -0
  29. monoco/features/issue/domain/lifecycle.py +126 -0
  30. monoco/features/issue/domain/models.py +170 -0
  31. monoco/features/issue/domain/parser.py +223 -0
  32. monoco/features/issue/domain/workspace.py +104 -0
  33. monoco/features/issue/engine/__init__.py +22 -0
  34. monoco/features/issue/engine/config.py +189 -0
  35. monoco/features/issue/engine/machine.py +185 -0
  36. monoco/features/issue/engine/models.py +18 -0
  37. monoco/features/issue/linter.py +32 -11
  38. monoco/features/issue/lsp/__init__.py +3 -0
  39. monoco/features/issue/lsp/definition.py +72 -0
  40. monoco/features/issue/models.py +8 -8
  41. monoco/features/issue/validator.py +204 -65
  42. monoco/features/spike/commands.py +45 -24
  43. monoco/features/spike/core.py +5 -22
  44. monoco/main.py +11 -17
  45. {monoco_toolkit-0.2.4.dist-info → monoco_toolkit-0.2.6.dist-info}/METADATA +1 -1
  46. monoco_toolkit-0.2.6.dist-info/RECORD +96 -0
  47. monoco/features/issue/executions/refine.md +0 -26
  48. monoco/features/pty/core.py +0 -185
  49. monoco/features/pty/router.py +0 -138
  50. monoco/features/pty/server.py +0 -56
  51. monoco_toolkit-0.2.4.dist-info/RECORD +0 -78
  52. {monoco_toolkit-0.2.4.dist-info → monoco_toolkit-0.2.6.dist-info}/WHEEL +0 -0
  53. {monoco_toolkit-0.2.4.dist-info → monoco_toolkit-0.2.6.dist-info}/entry_points.txt +0 -0
  54. {monoco_toolkit-0.2.4.dist-info → monoco_toolkit-0.2.6.dist-info}/licenses/LICENSE +0 -0
@@ -4,7 +4,11 @@ from typing import List, Set, Optional, Dict
4
4
  from pathlib import Path
5
5
 
6
6
  from monoco.core.lsp import Diagnostic, DiagnosticSeverity, Range, Position
7
- from .models import IssueMetadata, IssueStatus, IssueStage, IssueType
7
+ from monoco.core.config import get_config
8
+ from monoco.features.i18n.core import detect_language
9
+ from .models import IssueMetadata, IssueType
10
+ from .domain.parser import MarkdownParser
11
+ from .domain.models import ContentBlock
8
12
 
9
13
  class IssueValidator:
10
14
  """
@@ -18,14 +22,41 @@ class IssueValidator:
18
22
  def validate(self, meta: IssueMetadata, content: str, all_issue_ids: Set[str] = set()) -> List[Diagnostic]:
19
23
  diagnostics = []
20
24
 
25
+ # Parse Content into Blocks (Domain Layer)
26
+ # Handle case where content might be just body (from update_issue) or full file
27
+ if content.startswith("---"):
28
+ try:
29
+ issue_domain = MarkdownParser.parse(content)
30
+ blocks = issue_domain.body.blocks
31
+ has_frontmatter = True
32
+ except Exception:
33
+ # Fallback if parser fails (e.g. invalid YAML)
34
+ # We continue with empty blocks or try partial parsing?
35
+ # For now, let's try to parse blocks ignoring FM
36
+ lines = content.splitlines()
37
+ # Find end of FM
38
+ start_line = 0
39
+ if lines[0].strip() == "---":
40
+ for i in range(1, len(lines)):
41
+ if lines[i].strip() == "---":
42
+ start_line = i + 1
43
+ break
44
+ blocks = MarkdownParser._parse_blocks(lines[start_line:], start_line_offset=start_line)
45
+ has_frontmatter = True
46
+ else:
47
+ # Assume content is just body
48
+ lines = content.splitlines()
49
+ blocks = MarkdownParser._parse_blocks(lines, start_line_offset=0)
50
+ has_frontmatter = False
51
+
21
52
  # 1. State Matrix Validation
22
53
  diagnostics.extend(self._validate_state_matrix(meta, content))
23
54
 
24
- # 2. Content Completeness (Checkbox check)
25
- diagnostics.extend(self._validate_content_completeness(meta, content))
55
+ # 2. State Requirements (Strict Verification)
56
+ diagnostics.extend(self._validate_state_requirements(meta, blocks))
26
57
 
27
- # 3. Structure Consistency (Headings)
28
- diagnostics.extend(self._validate_structure(meta, content))
58
+ # 3. Structure Consistency (Headings) - Using Blocks
59
+ diagnostics.extend(self._validate_structure_blocks(meta, blocks))
29
60
 
30
61
  # 4. Lifecycle/Integrity (Solution, etc.)
31
62
  diagnostics.extend(self._validate_integrity(meta, content))
@@ -36,9 +67,30 @@ class IssueValidator:
36
67
  # 6. Time Consistency
37
68
  diagnostics.extend(self._validate_time_consistency(meta, content))
38
69
 
39
- # 7. Checkbox Syntax
40
- diagnostics.extend(self._validate_checkbox_logic(content))
70
+ # 7. Checkbox Syntax - Using Blocks
71
+ diagnostics.extend(self._validate_checkbox_logic_blocks(blocks))
41
72
 
73
+ # 8. Language Consistency
74
+ diagnostics.extend(self._validate_language_consistency(meta, content))
75
+
76
+ return diagnostics
77
+
78
+ def _validate_language_consistency(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
79
+ diagnostics = []
80
+ try:
81
+ config = get_config()
82
+ source_lang = config.i18n.source_lang
83
+
84
+ # Check for language mismatch (specifically zh vs en)
85
+ if source_lang.lower() == 'zh':
86
+ detected = detect_language(content)
87
+ if detected == 'en':
88
+ diagnostics.append(self._create_diagnostic(
89
+ "Language Mismatch: Project source language is 'zh' but content appears to be 'en'.",
90
+ DiagnosticSeverity.Warning
91
+ ))
92
+ except Exception:
93
+ pass
42
94
  return diagnostics
43
95
 
44
96
  def _create_diagnostic(self, message: str, severity: DiagnosticSeverity, line: int = 0) -> Diagnostic:
@@ -74,56 +126,102 @@ class IssueValidator:
74
126
  diagnostics = []
75
127
 
76
128
  # Check based on parsed metadata (now that auto-correction is disabled)
77
- if meta.status == IssueStatus.CLOSED and meta.stage != IssueStage.DONE:
129
+ if meta.status == "closed" and meta.stage != "done":
78
130
  line = self._get_field_line(content, "status")
79
131
  diagnostics.append(self._create_diagnostic(
80
- f"State Mismatch: Closed issues must be in 'Done' stage (found: {meta.stage.value if meta.stage else 'None'})",
132
+ f"State Mismatch: Closed issues must be in 'Done' stage (found: {meta.stage if meta.stage else 'None'})",
81
133
  DiagnosticSeverity.Error,
82
134
  line=line
83
135
  ))
84
136
 
85
- if meta.status == IssueStatus.BACKLOG and meta.stage != IssueStage.FREEZED:
137
+ if meta.status == "backlog" and meta.stage != "freezed":
86
138
  line = self._get_field_line(content, "status")
87
139
  diagnostics.append(self._create_diagnostic(
88
- f"State Mismatch: Backlog issues must be in 'Freezed' stage (found: {meta.stage.value if meta.stage else 'None'})",
140
+ f"State Mismatch: Backlog issues must be in 'Freezed' stage (found: {meta.stage if meta.stage else 'None'})",
89
141
  DiagnosticSeverity.Error,
90
142
  line=line
91
143
  ))
92
144
 
93
145
  return diagnostics
94
146
 
95
- def _validate_content_completeness(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
147
+ def _validate_state_requirements(self, meta: IssueMetadata, blocks: List[ContentBlock]) -> List[Diagnostic]:
96
148
  diagnostics = []
97
- # Checkbox regex: - [ ] or - [x] or - [-] or - [/]
98
- checkboxes = re.findall(r"-\s*\[([ x\-/])\]", content)
99
149
 
100
- if len(checkboxes) < 2:
101
- diagnostics.append(self._create_diagnostic(
102
- "Content Incomplete: Ticket must contain at least 2 checkboxes (AC & Tasks).",
103
- DiagnosticSeverity.Warning
104
- ))
105
-
106
- if meta.stage in [IssueStage.REVIEW, IssueStage.DONE]:
107
- # No empty checkboxes allowed
108
- if ' ' in checkboxes:
109
- # Find the first occurrence line
110
- lines = content.split('\n')
111
- first_line = 0
112
- for i, line in enumerate(lines):
113
- if re.search(r"-\s*\[ \]", line):
114
- first_line = i
115
- break
116
-
117
- diagnostics.append(self._create_diagnostic(
118
- f"Incomplete Tasks: Issue in {meta.stage} cannot have unchecked boxes.",
119
- DiagnosticSeverity.Error,
120
- line=first_line
121
- ))
150
+ # 1. Map Blocks to Sections
151
+ sections = {"tasks": [], "ac": [], "review": []}
152
+ current_section = None
153
+
154
+ for block in blocks:
155
+ if block.type == "heading":
156
+ title = block.content.strip().lower()
157
+ if "technical tasks" in title:
158
+ current_section = "tasks"
159
+ elif "acceptance criteria" in title:
160
+ current_section = "ac"
161
+ elif "review comments" in title:
162
+ current_section = "review"
163
+ else:
164
+ current_section = None
165
+ elif block.type == "task_item":
166
+ if current_section and current_section in sections:
167
+ sections[current_section].append(block)
168
+
169
+ # 2. Logic: DOING -> Must have defined tasks
170
+ if meta.stage in ["doing", "review", "done"]:
171
+ if not sections["tasks"]:
172
+ # We can't strictly point to a line if section missing, but we can point to top/bottom
173
+ # Or just a general error.
174
+ diagnostics.append(self._create_diagnostic(
175
+ "State Requirement (DOING+): Must define 'Technical Tasks' (at least 1 checkbox).",
176
+ DiagnosticSeverity.Warning
177
+ ))
178
+
179
+ # 3. Logic: REVIEW -> Tasks must be Completed ([x]) or Cancelled ([~], [+])
180
+ # No [ ] (ToDo) or [-]/[/] (Doing) allowed.
181
+ if meta.stage in ["review", "done"]:
182
+ for block in sections["tasks"]:
183
+ content = block.content.strip()
184
+ # Check for explicit illegal states
185
+ if re.search(r"-\s*\[\s+\]", content):
186
+ diagnostics.append(self._create_diagnostic(
187
+ f"State Requirement ({meta.stage.upper()}): Technical Tasks must be resolved. Found Todo [ ]: '{content}'",
188
+ DiagnosticSeverity.Error,
189
+ line=block.line_start
190
+ ))
191
+ elif re.search(r"-\s*\[[-\/]]", content):
192
+ diagnostics.append(self._create_diagnostic(
193
+ f"State Requirement ({meta.stage.upper()}): Technical Tasks must be finished (not Doing). Found Doing [-]: '{content}'",
194
+ DiagnosticSeverity.Error,
195
+ line=block.line_start
196
+ ))
197
+
198
+ # 4. Logic: DONE -> AC must be Verified ([x])
199
+ if meta.stage == "done":
200
+ for block in sections["ac"]:
201
+ content = block.content.strip()
202
+ if not re.search(r"-\s*\[[xX]\]", content):
203
+ diagnostics.append(self._create_diagnostic(
204
+ f"State Requirement (DONE): Acceptance Criteria must be passed ([x]). Found: '{content}'",
205
+ DiagnosticSeverity.Error,
206
+ line=block.line_start
207
+ ))
208
+
209
+ # 5. Logic: DONE -> Review Checkboxes (if any) must be Resolved ([x] or [~])
210
+ for block in sections["review"]:
211
+ content = block.content.strip()
212
+ # Must be [x], [X], [~], [+]
213
+ # Therefore [ ], [-], [/] are invalid blocking states
214
+ if re.search(r"-\s*\[[\s\-\/]\]", content):
215
+ diagnostics.append(self._create_diagnostic(
216
+ f"State Requirement (DONE): Actionable Review Comments must be resolved ([x] or [~]). Found: '{content}'",
217
+ DiagnosticSeverity.Error,
218
+ line=block.line_start
219
+ ))
220
+
122
221
  return diagnostics
123
222
 
124
- def _validate_structure(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
223
+ def _validate_structure_blocks(self, meta: IssueMetadata, blocks: List[ContentBlock]) -> List[Diagnostic]:
125
224
  diagnostics = []
126
- lines = content.split('\n')
127
225
 
128
226
  # 1. Heading check: ## {issue-id}: {issue-title}
129
227
  expected_header = f"## {meta.id}: {meta.title}"
@@ -133,19 +231,25 @@ class IssueValidator:
133
231
  review_header_found = False
134
232
  review_content_found = False
135
233
 
136
- for i, line in enumerate(lines):
137
- line_stripped = line.strip()
138
- if line_stripped == expected_header:
139
- header_found = True
140
-
141
- if line_stripped == "## Review Comments":
142
- review_header_found = True
143
- # Check near lines for content
144
- # This is a naive check (next line is not empty)
145
- if i + 1 < len(lines) and lines[i+1].strip():
146
- review_content_found = True
147
- elif i + 2 < len(lines) and lines[i+2].strip():
148
- review_content_found = True
234
+ review_header_index = -1
235
+
236
+ for i, block in enumerate(blocks):
237
+ if block.type == 'heading':
238
+ stripped = block.content.strip()
239
+ if stripped == expected_header:
240
+ header_found = True
241
+
242
+ if stripped == "## Review Comments":
243
+ review_header_found = True
244
+ review_header_index = i
245
+
246
+ # Check content after review header
247
+ if review_header_found:
248
+ # Check if there are blocks after review_header_index that are NOT empty
249
+ for j in range(review_header_index + 1, len(blocks)):
250
+ if blocks[j].type != 'empty':
251
+ review_content_found = True
252
+ break
149
253
 
150
254
  if not header_found:
151
255
  diagnostics.append(self._create_diagnostic(
@@ -153,7 +257,7 @@ class IssueValidator:
153
257
  DiagnosticSeverity.Warning
154
258
  ))
155
259
 
156
- if meta.stage in [IssueStage.REVIEW, IssueStage.DONE]:
260
+ if meta.stage in ["review", "done"]:
157
261
  if not review_header_found:
158
262
  diagnostics.append(self._create_diagnostic(
159
263
  "Review Requirement: Missing '## Review Comments' section.",
@@ -168,7 +272,7 @@ class IssueValidator:
168
272
 
169
273
  def _validate_integrity(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
170
274
  diagnostics = []
171
- if meta.status == IssueStatus.CLOSED and not meta.solution:
275
+ if meta.status == "closed" and not meta.solution:
172
276
  line = self._get_field_line(content, "status")
173
277
  diagnostics.append(self._create_diagnostic(
174
278
  f"Data Integrity: Closed issue {meta.id} missing 'solution' field.",
@@ -198,6 +302,42 @@ class IssueValidator:
198
302
  DiagnosticSeverity.Error,
199
303
  line=line
200
304
  ))
305
+
306
+ # Body Reference Check
307
+ # Regex for generic issue ID: (EPIC|FEAT|CHORE|FIX)-\d{4}
308
+ # We scan line by line to get line numbers
309
+ lines = content.split('\n')
310
+ # Skip frontmatter for body check to avoid double counting (handled above)
311
+ in_fm = False
312
+ fm_end = 0
313
+ for i, line in enumerate(lines):
314
+ if line.strip() == '---':
315
+ if not in_fm: in_fm = True
316
+ else:
317
+ fm_end = i
318
+ break
319
+
320
+ for i, line in enumerate(lines):
321
+ if i <= fm_end: continue # Skip frontmatter
322
+
323
+ # Find all matches
324
+ matches = re.finditer(r"\b((?:EPIC|FEAT|CHORE|FIX)-\d{4})\b", line)
325
+ for match in matches:
326
+ ref_id = match.group(1)
327
+ if ref_id != meta.id and ref_id not in all_ids:
328
+ # Check if it's a namespaced ID? The regex only catches local IDs.
329
+ # If users use MON::FEAT-0001, the regex might catch FEAT-0001.
330
+ # But all_ids contains full IDs (potentially namespaced).
331
+ # Simple logic: if ref_id isn't in all_ids, check if any id ENDS with ref_id
332
+
333
+ found_namespaced = any(known.endswith(f"::{ref_id}") for known in all_ids)
334
+
335
+ if not found_namespaced:
336
+ diagnostics.append(self._create_diagnostic(
337
+ f"Broken Reference: Issue '{ref_id}' not found.",
338
+ DiagnosticSeverity.Warning,
339
+ line=i
340
+ ))
201
341
  return diagnostics
202
342
 
203
343
  def _validate_time_consistency(self, meta: IssueMetadata, content: str) -> List[Diagnostic]:
@@ -226,21 +366,20 @@ class IssueValidator:
226
366
 
227
367
  return diagnostics
228
368
 
229
- def _validate_checkbox_logic(self, content: str) -> List[Diagnostic]:
369
+ def _validate_checkbox_logic_blocks(self, blocks: List[ContentBlock]) -> List[Diagnostic]:
230
370
  diagnostics = []
231
- lines = content.split('\n')
232
371
 
233
- for i, line in enumerate(lines):
234
- stripped = line.lstrip()
235
-
236
- # Syntax Check: - [?]
237
- if stripped.startswith("- ["):
238
- match = re.match(r"- \[([ x\-/])\]", stripped)
372
+ for block in blocks:
373
+ if block.type == 'task_item':
374
+ content = block.content.strip()
375
+ # Syntax Check: - [?]
376
+ # Added supported chars: /, ~, +
377
+ match = re.match(r"- \[([ x\-/~+])\]", content)
239
378
  if not match:
240
379
  # Check for Common errors
241
- if re.match(r"- \[.{2,}\]", stripped): # [xx] or [ ]
242
- diagnostics.append(self._create_diagnostic("Invalid Checkbox: Use single character [ ], [x], [-], [/]", DiagnosticSeverity.Error, i))
243
- elif re.match(r"- \[([^ x\-/])\]", stripped): # [v], [o]
244
- diagnostics.append(self._create_diagnostic("Invalid Checkbox Status: Use [ ], [x], [-], [/]", DiagnosticSeverity.Error, i))
380
+ if re.match(r"- \[.{2,}\]", content): # [xx] or [ ]
381
+ diagnostics.append(self._create_diagnostic("Invalid Checkbox: Use single character [ ], [x], [-], [/]", DiagnosticSeverity.Error, block.line_start))
382
+ elif re.match(r"- \[([^ x\-/~+])\]", content): # [v], [o]
383
+ diagnostics.append(self._create_diagnostic("Invalid Checkbox Status: Use [ ], [x], [/], [~]", DiagnosticSeverity.Error, block.line_start))
245
384
 
246
385
  return diagnostics
@@ -1,15 +1,18 @@
1
1
  import typer
2
2
  from pathlib import Path
3
3
  from rich.console import Console
4
+ from typing import Annotated
4
5
 
5
6
  from monoco.core.config import get_config
7
+ from monoco.core.output import AgentOutput, OutputManager
6
8
  from . import core
7
9
 
8
10
  app = typer.Typer(help="Spike & Repo Management.")
9
- console = Console()
10
11
 
11
12
  @app.command("init")
12
- def init():
13
+ def init(
14
+ json: AgentOutput = False,
15
+ ):
13
16
  """Initialize the Spike environment (gitignore setup)."""
14
17
  config = get_config()
15
18
  root_dir = Path(config.paths.root)
@@ -20,11 +23,16 @@ def init():
20
23
  # Create the directory
21
24
  (root_dir / spikes_dir_name).mkdir(exist_ok=True)
22
25
 
23
- console.print(f"[green]✔[/green] Initialized Spike environment. Added '{spikes_dir_name}/' to .gitignore.")
26
+ OutputManager.print({
27
+ "status": "initialized",
28
+ "directory": spikes_dir_name,
29
+ "gitignore_updated": True
30
+ })
24
31
 
25
32
  @app.command("add")
26
33
  def add_repo(
27
34
  url: str = typer.Argument(..., help="Git Repository URL"),
35
+ json: AgentOutput = False,
28
36
  ):
29
37
  """Add a new research repository."""
30
38
  config = get_config()
@@ -38,13 +46,18 @@ def add_repo(
38
46
  name = name[:-4]
39
47
 
40
48
  core.update_config_repos(root_dir, name, url)
41
- console.print(f"[green]✔[/green] Added repo [bold]{name}[/bold] ({url}) to configuration.")
42
- console.print("Run [bold]monoco spike sync[/bold] to download content.")
49
+ OutputManager.print({
50
+ "status": "added",
51
+ "name": name,
52
+ "url": url,
53
+ "message": f"Run 'monoco spike sync' to download content."
54
+ })
43
55
 
44
56
  @app.command("remove")
45
57
  def remove_repo(
46
58
  name: str = typer.Argument(..., help="Repository Name"),
47
59
  force: bool = typer.Option(False, "--force", "-f", help="Force delete physical directory without asking"),
60
+ json: AgentOutput = False,
48
61
  ):
49
62
  """Remove a repository from configuration."""
50
63
  config = get_config()
@@ -52,30 +65,34 @@ def remove_repo(
52
65
  spikes_dir = root_dir / config.paths.spikes
53
66
 
54
67
  if name not in config.project.spike_repos:
55
- console.print(f"[yellow]![/yellow] Repo [bold]{name}[/bold] not found in configuration.")
68
+ OutputManager.error(f"Repo {name} not found in configuration.")
56
69
  return
57
70
 
58
71
  # Remove from config
59
72
  core.update_config_repos(root_dir, name, "", remove=True)
60
- console.print(f"[green]✔[/green] Removed [bold]{name}[/bold] from configuration.")
61
73
 
62
74
  target_path = spikes_dir / name
75
+ deleted = False
63
76
  if target_path.exists():
64
77
  if force or typer.confirm(f"Do you want to delete the directory {target_path}?", default=False):
65
78
  core.remove_repo_dir(spikes_dir, name)
66
- console.print(f"[gray]✔[/gray] Deleted directory {target_path}.")
79
+ deleted = True
67
80
  else:
68
- console.print(f"[gray]ℹ[/gray] Directory {target_path} kept.")
81
+ deleted = False
82
+
83
+ OutputManager.print({
84
+ "status": "removed",
85
+ "name": name,
86
+ "directory_deleted": deleted
87
+ })
69
88
 
70
89
  @app.command("sync")
71
- def sync_repos():
90
+ def sync_repos(
91
+ json: AgentOutput = False,
92
+ ):
72
93
  """Sync (Clone/Pull) all configured repositories."""
73
94
  # Force reload config to get latest updates
74
95
  config = get_config()
75
- # Note: get_config is a singleton, so for 'add' then 'sync' in same process,
76
- # we rely on 'add' writing to disk and us reading from memory?
77
- # Actually, if we run standard CLI "monoco spike add" then "monoco spike sync",
78
- # they are separate processes, so config loads fresh.
79
96
 
80
97
  root_dir = Path(config.paths.root)
81
98
  spikes_dir = root_dir / config.paths.spikes
@@ -84,27 +101,31 @@ def sync_repos():
84
101
  repos = config.project.spike_repos
85
102
 
86
103
  if not repos:
87
- console.print("[yellow]No repositories configured.[/yellow] Use 'monoco spike add <url>' first.")
104
+ OutputManager.print({"status": "empty", "message": "No repositories configured."}, title="Sync")
88
105
  return
89
106
 
90
- console.print(f"Syncing {len(repos)} repositories...")
107
+ results = []
91
108
 
92
109
  for name, url in repos.items():
93
- core.sync_repo(root_dir, spikes_dir, name, url)
110
+ try:
111
+ core.sync_repo(root_dir, spikes_dir, name, url)
112
+ results.append({"name": name, "status": "synced", "url": url})
113
+ except Exception as e:
114
+ results.append({"name": name, "status": "failed", "error": str(e), "url": url})
94
115
 
95
- console.print("[green]✔[/green] Sync complete.")
116
+ OutputManager.print(results, title="Sync Results")
96
117
 
97
- # Alias for list (showing configured repos) could be useful but not strictly asked for.
98
- # Let's add a simple list command to see what we have.
99
118
  @app.command("list")
100
- def list_repos():
119
+ def list_repos(
120
+ json: AgentOutput = False,
121
+ ):
101
122
  """List configured repositories."""
102
123
  config = get_config()
103
124
  repos = config.project.spike_repos
104
125
 
105
126
  if not repos:
106
- console.print("[yellow]No repositories configured.[/yellow]")
127
+ OutputManager.print([], title="Repositories")
107
128
  return
108
129
 
109
- for name, url in repos.items():
110
- console.print(f"- [bold]{name}[/bold]: {url}")
130
+ data = [{"name": name, "url": url} for name, url in repos.items()]
131
+ OutputManager.print(data, title="Repositories")
@@ -1,12 +1,12 @@
1
1
  import os
2
2
  import shutil
3
3
  import subprocess
4
- import yaml
4
+
5
5
  from pathlib import Path
6
6
  from typing import Dict, Optional, List, Any
7
7
  from rich.console import Console
8
8
 
9
- from monoco.core.config import get_config
9
+ from monoco.core.config import get_config, load_raw_config, save_raw_config, ConfigScope
10
10
 
11
11
  console = Console()
12
12
 
@@ -29,26 +29,10 @@ def run_git_command(cmd: List[str], cwd: Path) -> bool:
29
29
  console.print("[red]Error:[/red] git command not found.")
30
30
  return False
31
31
 
32
- def get_config_file_path(root: Path) -> Path:
33
- """Determine the config file to update."""
34
- # Standard: .monoco/config.yaml
35
- hidden = root / ".monoco" / "config.yaml"
36
-
37
- # Ensure parent exists
38
- hidden.parent.mkdir(exist_ok=True)
39
- return hidden
40
-
41
32
  def update_config_repos(root: Path, repo_name: str, repo_url: str, remove: bool = False):
42
33
  """Update the repos list in the config file."""
43
- config_path = get_config_file_path(root)
44
-
45
- data = {}
46
- if config_path.exists():
47
- try:
48
- with open(config_path, "r") as f:
49
- data = yaml.safe_load(f) or {}
50
- except Exception:
51
- data = {}
34
+ # Use core config utils
35
+ data = load_raw_config(ConfigScope.PROJECT, project_root=str(root))
52
36
 
53
37
  # Ensure structure exists
54
38
  if "project" not in data:
@@ -62,8 +46,7 @@ def update_config_repos(root: Path, repo_name: str, repo_url: str, remove: bool
62
46
  else:
63
47
  data["project"]["spike_repos"][repo_name] = repo_url
64
48
 
65
- with open(config_path, "w") as f:
66
- yaml.dump(data, f, sort_keys=False, default_flow_style=False)
49
+ save_raw_config(ConfigScope.PROJECT, data, project_root=str(root))
67
50
 
68
51
  def ensure_gitignore(root: Path, target_dir_name: str):
69
52
  """Ensure the target directory is in .gitignore."""
monoco/main.py CHANGED
@@ -63,7 +63,7 @@ def main(
63
63
  NO_WORKSPACE_COMMANDS = ["init", "clone"]
64
64
 
65
65
  # Initialize Config
66
- from monoco.core.config import get_config
66
+ from monoco.core.config import get_config, find_monoco_root
67
67
  from pathlib import Path
68
68
 
69
69
  # If subcommand is not in whitelist, we enforce workspace
@@ -74,7 +74,16 @@ def main(
74
74
  try:
75
75
  # We pass root if provided. If require_workspace is True, get_config will throw if not found.
76
76
  # Note: If root is None, it defaults to CWD in get_config.
77
- get_config(project_root=root, require_project=require_workspace)
77
+
78
+ # Auto-discover root if not provided
79
+ config_root = root
80
+ if config_root is None:
81
+ discovered = find_monoco_root()
82
+ # Only use discovered root if it actually has .monoco
83
+ if (discovered / ".monoco").exists():
84
+ config_root = str(discovered)
85
+
86
+ get_config(project_root=config_root, require_project=require_workspace)
78
87
  except FileNotFoundError as e:
79
88
  # Graceful exit for workspace errors
80
89
  from rich.console import Console
@@ -158,20 +167,5 @@ app.add_typer(agent_cmd.app, name="agent", help="Delegate tasks to Agent CLIs")
158
167
  from monoco.daemon.commands import serve
159
168
  app.command(name="serve")(serve)
160
169
 
161
- @app.command()
162
- def pty(
163
- host: str = "127.0.0.1",
164
- port: int = 3124,
165
- cwd: Optional[str] = None
166
- ):
167
- """
168
- Start the Monoco PTY Daemon (WebSocket).
169
- """
170
- from monoco.features.pty.server import run_pty_server
171
- from pathlib import Path
172
-
173
- path = Path(cwd) if cwd else None
174
- run_pty_server(host, port, path)
175
-
176
170
  if __name__ == "__main__":
177
171
  app()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: monoco-toolkit
3
- Version: 0.2.4
3
+ Version: 0.2.6
4
4
  Summary: Agent Native Toolkit for Monoco - Task Management & Kanban for AI Agents
5
5
  Project-URL: Homepage, https://monoco.io
6
6
  Project-URL: Repository, https://github.com/IndenScale/Monoco