monoco-toolkit 0.3.9__py3-none-any.whl → 0.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- monoco/core/config.py +7 -0
- monoco/core/hooks/builtin/git_cleanup.py +1 -1
- monoco/core/injection.py +63 -29
- monoco/core/integrations.py +2 -2
- monoco/core/output.py +5 -5
- monoco/core/registry.py +7 -1
- monoco/core/resource/__init__.py +5 -0
- monoco/core/resource/finder.py +98 -0
- monoco/core/resource/manager.py +91 -0
- monoco/core/resource/models.py +35 -0
- monoco/core/resources/en/{SKILL.md → skills/monoco_core/SKILL.md} +2 -0
- monoco/core/resources/zh/{SKILL.md → skills/monoco_core/SKILL.md} +2 -0
- monoco/core/skill_framework.py +292 -0
- monoco/core/skills.py +471 -371
- monoco/core/sync.py +73 -1
- monoco/core/workflow_converter.py +420 -0
- monoco/features/agent/__init__.py +2 -2
- monoco/features/agent/adapter.py +31 -0
- monoco/features/agent/apoptosis.py +44 -0
- monoco/features/agent/cli.py +101 -144
- monoco/features/agent/config.py +35 -21
- monoco/features/agent/defaults.py +6 -49
- monoco/features/agent/engines.py +32 -6
- monoco/features/agent/manager.py +6 -1
- monoco/features/agent/models.py +2 -2
- monoco/features/agent/resources/atoms/atom-code-dev.yaml +61 -0
- monoco/features/agent/resources/atoms/atom-issue-lifecycle.yaml +73 -0
- monoco/features/agent/resources/atoms/atom-knowledge.yaml +55 -0
- monoco/features/agent/resources/atoms/atom-review.yaml +60 -0
- monoco/features/agent/resources/en/skills/flow_engineer/SKILL.md +94 -0
- monoco/features/agent/resources/en/skills/flow_manager/SKILL.md +93 -0
- monoco/features/agent/resources/en/skills/flow_planner/SKILL.md +85 -0
- monoco/features/agent/resources/en/skills/flow_reviewer/SKILL.md +114 -0
- monoco/features/agent/resources/roles/role-engineer.yaml +49 -0
- monoco/features/agent/resources/roles/role-manager.yaml +46 -0
- monoco/features/agent/resources/roles/role-planner.yaml +46 -0
- monoco/features/agent/resources/roles/role-reviewer.yaml +47 -0
- monoco/features/agent/resources/workflows/workflow-dev.yaml +83 -0
- monoco/features/agent/resources/workflows/workflow-issue-create.yaml +72 -0
- monoco/features/agent/resources/workflows/workflow-review.yaml +94 -0
- monoco/features/agent/resources/zh/skills/flow_planner/SKILL.md +259 -0
- monoco/features/agent/resources/zh/skills/flow_reviewer/SKILL.md +137 -0
- monoco/features/agent/worker.py +38 -2
- monoco/features/glossary/__init__.py +0 -0
- monoco/features/glossary/adapter.py +31 -0
- monoco/features/glossary/config.py +5 -0
- monoco/features/glossary/resources/en/AGENTS.md +29 -0
- monoco/features/glossary/resources/en/skills/monoco_glossary/SKILL.md +35 -0
- monoco/features/glossary/resources/zh/AGENTS.md +29 -0
- monoco/features/glossary/resources/zh/skills/monoco_glossary/SKILL.md +35 -0
- monoco/features/i18n/resources/en/skills/i18n_scan_workflow/SKILL.md +105 -0
- monoco/features/i18n/resources/en/{SKILL.md → skills/monoco_i18n/SKILL.md} +2 -0
- monoco/features/i18n/resources/zh/{SKILL.md → skills/monoco_i18n/SKILL.md} +2 -0
- monoco/features/issue/core.py +45 -6
- monoco/features/issue/engine/machine.py +5 -2
- monoco/features/issue/models.py +1 -0
- monoco/features/issue/resources/en/skills/issue_create_workflow/SKILL.md +167 -0
- monoco/features/issue/resources/en/skills/issue_develop_workflow/SKILL.md +224 -0
- monoco/features/issue/resources/en/skills/issue_lifecycle_workflow/SKILL.md +159 -0
- monoco/features/issue/resources/en/skills/issue_refine_workflow/SKILL.md +203 -0
- monoco/features/issue/resources/en/{SKILL.md → skills/monoco_issue/SKILL.md} +2 -0
- monoco/features/issue/resources/zh/skills/issue_create_workflow/SKILL.md +167 -0
- monoco/features/issue/resources/zh/skills/issue_develop_workflow/SKILL.md +224 -0
- monoco/features/issue/resources/zh/skills/issue_refine_workflow/SKILL.md +203 -0
- monoco/features/issue/resources/zh/{SKILL.md → skills/monoco_issue/SKILL.md} +2 -0
- monoco/features/memo/resources/en/skills/monoco_memo/SKILL.md +77 -0
- monoco/features/memo/resources/en/skills/note_processing_workflow/SKILL.md +140 -0
- monoco/features/memo/resources/zh/{SKILL.md → skills/monoco_memo/SKILL.md} +2 -0
- monoco/features/spike/resources/en/{SKILL.md → skills/monoco_spike/SKILL.md} +2 -0
- monoco/features/spike/resources/en/skills/research_workflow/SKILL.md +121 -0
- monoco/features/spike/resources/zh/{SKILL.md → skills/monoco_spike/SKILL.md} +2 -0
- monoco_toolkit-0.3.10.dist-info/METADATA +124 -0
- monoco_toolkit-0.3.10.dist-info/RECORD +156 -0
- monoco/features/agent/reliability.py +0 -106
- monoco/features/agent/resources/skills/flow_reviewer/SKILL.md +0 -114
- monoco_toolkit-0.3.9.dist-info/METADATA +0 -127
- monoco_toolkit-0.3.9.dist-info/RECORD +0 -115
- /monoco/features/agent/resources/{skills → zh/skills}/flow_engineer/SKILL.md +0 -0
- /monoco/features/agent/resources/{skills → zh/skills}/flow_manager/SKILL.md +0 -0
- /monoco/features/i18n/resources/{skills → zh/skills}/i18n_scan_workflow/SKILL.md +0 -0
- /monoco/features/issue/resources/{skills → zh/skills}/issue_lifecycle_workflow/SKILL.md +0 -0
- /monoco/features/memo/resources/{skills → zh/skills}/note_processing_workflow/SKILL.md +0 -0
- /monoco/features/spike/resources/{skills → zh/skills}/research_workflow/SKILL.md +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.10.dist-info}/WHEEL +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.10.dist-info}/entry_points.txt +0 -0
- {monoco_toolkit-0.3.9.dist-info → monoco_toolkit-0.3.10.dist-info}/licenses/LICENSE +0 -0
monoco/core/config.py
CHANGED
|
@@ -91,6 +91,7 @@ class TransitionConfig(BaseModel):
|
|
|
91
91
|
required_solution: Optional[str] = None
|
|
92
92
|
description: str = ""
|
|
93
93
|
command_template: Optional[str] = None
|
|
94
|
+
post_actions: List[str] = Field(default_factory=list)
|
|
94
95
|
|
|
95
96
|
|
|
96
97
|
class CriticalityRuleConfig(BaseModel):
|
|
@@ -135,6 +136,11 @@ class CriticalityConfig(BaseModel):
|
|
|
135
136
|
return self
|
|
136
137
|
|
|
137
138
|
|
|
139
|
+
class AgentConfig(BaseModel):
|
|
140
|
+
"""Configuration for AI Agents."""
|
|
141
|
+
timeout_seconds: int = Field(default=900, description="Global timeout for agent sessions")
|
|
142
|
+
|
|
143
|
+
|
|
138
144
|
class IssueSchemaConfig(BaseModel):
|
|
139
145
|
types: List[IssueTypeConfig] = Field(default_factory=list)
|
|
140
146
|
statuses: List[str] = Field(default_factory=list)
|
|
@@ -241,6 +247,7 @@ class MonocoConfig(BaseModel):
|
|
|
241
247
|
|
|
242
248
|
issue: IssueSchemaConfig = Field(default_factory=IssueSchemaConfig)
|
|
243
249
|
domains: DomainConfig = Field(default_factory=DomainConfig)
|
|
250
|
+
agent: AgentConfig = Field(default_factory=AgentConfig)
|
|
244
251
|
|
|
245
252
|
@staticmethod
|
|
246
253
|
def _deep_merge(base: Dict[str, Any], update: Dict[str, Any]) -> Dict[str, Any]:
|
|
@@ -31,7 +31,7 @@ class GitCleanupHook(SessionLifecycleHook):
|
|
|
31
31
|
|
|
32
32
|
# Configuration with defaults
|
|
33
33
|
self.auto_switch_to_main = self.config.get("auto_switch_to_main", True)
|
|
34
|
-
self.auto_delete_merged_branches = self.config.get("auto_delete_merged_branches",
|
|
34
|
+
self.auto_delete_merged_branches = self.config.get("auto_delete_merged_branches", False)
|
|
35
35
|
self.main_branch = self.config.get("main_branch", "main")
|
|
36
36
|
self.require_clean_worktree = self.config.get("require_clean_worktree", True)
|
|
37
37
|
|
monoco/core/injection.py
CHANGED
|
@@ -10,6 +10,8 @@ class PromptInjector:
|
|
|
10
10
|
"""
|
|
11
11
|
|
|
12
12
|
MANAGED_HEADER = "## Monoco Toolkit"
|
|
13
|
+
MANAGED_START = "<!-- MONOCO_GENERATED_START -->"
|
|
14
|
+
MANAGED_END = "<!-- MONOCO_GENERATED_END -->"
|
|
13
15
|
|
|
14
16
|
def __init__(self, target_file: Path):
|
|
15
17
|
self.target_file = target_file
|
|
@@ -52,19 +54,40 @@ class PromptInjector:
|
|
|
52
54
|
# Sanitize content: remove leading header if it matches the title
|
|
53
55
|
clean_content = content.strip()
|
|
54
56
|
# Regex to match optional leading hash header matching the title (case insensitive)
|
|
55
|
-
# e.g. "### Issue Management" or "# Issue Management"
|
|
56
57
|
pattern = r"^(#+\s*)" + re.escape(title) + r"\s*\n"
|
|
57
58
|
match = re.match(pattern, clean_content, re.IGNORECASE)
|
|
58
59
|
|
|
59
60
|
if match:
|
|
60
61
|
clean_content = clean_content[match.end() :].strip()
|
|
61
|
-
|
|
62
|
-
|
|
62
|
+
|
|
63
|
+
# Demote headers in content to be below ### (so start at ####)
|
|
64
|
+
# We assume the content headers start at # or ##.
|
|
65
|
+
# We map # -> ####, ## -> #####, etc. (+3 offset)
|
|
66
|
+
demoted_content = []
|
|
67
|
+
for line in clean_content.splitlines():
|
|
68
|
+
if line.lstrip().startswith("#"):
|
|
69
|
+
demoted_content.append("###" + line)
|
|
70
|
+
else:
|
|
71
|
+
demoted_content.append(line)
|
|
72
|
+
|
|
73
|
+
managed_block.append("\n".join(demoted_content))
|
|
63
74
|
managed_block.append("") # Blank line after section
|
|
64
75
|
|
|
65
76
|
managed_block_str = "\n".join(managed_block).strip() + "\n"
|
|
77
|
+
managed_block_str = f"{self.MANAGED_START}\n{managed_block_str}\n{self.MANAGED_END}\n"
|
|
66
78
|
|
|
67
79
|
# 2. Find and replace/append in the original content
|
|
80
|
+
# Check for delimiters first
|
|
81
|
+
if self.MANAGED_START in original and self.MANAGED_END in original:
|
|
82
|
+
try:
|
|
83
|
+
pre = original.split(self.MANAGED_START)[0]
|
|
84
|
+
post = original.split(self.MANAGED_END)[1]
|
|
85
|
+
# Reconstruct
|
|
86
|
+
return pre + managed_block_str.strip() + post
|
|
87
|
+
except IndexError:
|
|
88
|
+
# Fallback to header detection if delimiters malformed
|
|
89
|
+
pass
|
|
90
|
+
|
|
68
91
|
lines = original.splitlines()
|
|
69
92
|
start_idx = -1
|
|
70
93
|
end_idx = -1
|
|
@@ -74,31 +97,29 @@ class PromptInjector:
|
|
|
74
97
|
if line.strip() == self.MANAGED_HEADER:
|
|
75
98
|
start_idx = i
|
|
76
99
|
break
|
|
100
|
+
|
|
101
|
+
if start_idx == -1:
|
|
102
|
+
# Check if we have delimiters even if header is missing/changed?
|
|
103
|
+
# Handled above.
|
|
104
|
+
pass
|
|
77
105
|
|
|
78
106
|
if start_idx == -1:
|
|
79
107
|
# Block not found, append to end
|
|
80
108
|
if original and not original.endswith("\n"):
|
|
81
|
-
return original + "\n\n" + managed_block_str
|
|
109
|
+
return original + "\n\n" + managed_block_str.strip()
|
|
82
110
|
elif original:
|
|
83
|
-
return original + "\n" + managed_block_str
|
|
111
|
+
return original + "\n" + managed_block_str.strip()
|
|
84
112
|
else:
|
|
85
|
-
return managed_block_str
|
|
86
|
-
|
|
87
|
-
# Find end: Look for next header of level 1 (assuming Managed Header is H1)
|
|
88
|
-
# Or EOF
|
|
89
|
-
# Note: If MANAGED_HEADER is "# ...", we look for next "# ..."
|
|
90
|
-
# But allow "## ..." as children.
|
|
113
|
+
return managed_block_str.strip() + "\n"
|
|
91
114
|
|
|
115
|
+
# Find end: Look for next header of level 1 or 2 (siblings or parents)
|
|
92
116
|
header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
|
|
93
|
-
header_level_prefix = header_level_match.group(1) if header_level_match else "
|
|
117
|
+
header_level_prefix = header_level_match.group(1) if header_level_match else "##"
|
|
94
118
|
|
|
95
119
|
for i in range(start_idx + 1, len(lines)):
|
|
96
120
|
line = lines[i]
|
|
97
121
|
# Check if this line is a header of the same level or higher (fewer #s)
|
|
98
|
-
# e.g. if Managed is "###", then "#" and "##" are higher/parents, "###" is sibling.
|
|
99
|
-
# We treat siblings as end of block too.
|
|
100
122
|
if line.startswith("#"):
|
|
101
|
-
# Match regex to get level
|
|
102
123
|
match = re.match(r"^(#+)\s", line)
|
|
103
124
|
if match:
|
|
104
125
|
level = match.group(1)
|
|
@@ -146,26 +167,39 @@ class PromptInjector:
|
|
|
146
167
|
|
|
147
168
|
# Find start
|
|
148
169
|
for i, line in enumerate(lines):
|
|
149
|
-
if
|
|
170
|
+
if self.MANAGED_START in line:
|
|
150
171
|
start_idx = i
|
|
172
|
+
# Look for end from here
|
|
173
|
+
for j in range(i, len(lines)):
|
|
174
|
+
if self.MANAGED_END in lines[j]:
|
|
175
|
+
end_idx = j + 1 # Include the end line
|
|
176
|
+
break
|
|
151
177
|
break
|
|
178
|
+
|
|
179
|
+
if start_idx == -1:
|
|
180
|
+
# Fallback to header logic
|
|
181
|
+
for i, line in enumerate(lines):
|
|
182
|
+
if line.strip() == self.MANAGED_HEADER:
|
|
183
|
+
start_idx = i
|
|
184
|
+
break
|
|
152
185
|
|
|
153
186
|
if start_idx == -1:
|
|
154
187
|
return False
|
|
155
188
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
189
|
+
if end_idx == -1:
|
|
190
|
+
# Find end: exact logic as in _merge_content
|
|
191
|
+
header_level_match = re.match(r"^(#+)\s", self.MANAGED_HEADER)
|
|
192
|
+
header_level_prefix = header_level_match.group(1) if header_level_match else "##"
|
|
193
|
+
|
|
194
|
+
for i in range(start_idx + 1, len(lines)):
|
|
195
|
+
line = lines[i]
|
|
196
|
+
if line.startswith("#"):
|
|
197
|
+
match = re.match(r"^(#+)\s", line)
|
|
198
|
+
if match:
|
|
199
|
+
level = match.group(1)
|
|
200
|
+
if len(level) <= len(header_level_prefix):
|
|
201
|
+
end_idx = i
|
|
202
|
+
break
|
|
169
203
|
|
|
170
204
|
if end_idx == -1:
|
|
171
205
|
end_idx = len(lines)
|
monoco/core/integrations.py
CHANGED
|
@@ -129,8 +129,8 @@ DEFAULT_INTEGRATIONS: Dict[str, AgentIntegration] = {
|
|
|
129
129
|
"kimi": AgentIntegration(
|
|
130
130
|
key="kimi",
|
|
131
131
|
name="Kimi CLI",
|
|
132
|
-
system_prompt_file="
|
|
133
|
-
skill_root_dir=".
|
|
132
|
+
system_prompt_file="AGENTS.md",
|
|
133
|
+
skill_root_dir=".agent/skills/",
|
|
134
134
|
bin_name="kimi",
|
|
135
135
|
version_cmd="--version",
|
|
136
136
|
),
|
monoco/core/output.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import json
|
|
3
3
|
import typer
|
|
4
|
-
from typing import Any, List, Union, Annotated
|
|
4
|
+
from typing import Any, List, Union, Annotated, Optional
|
|
5
5
|
from pydantic import BaseModel
|
|
6
6
|
from rich.console import Console
|
|
7
7
|
from rich.table import Table
|
|
@@ -41,7 +41,7 @@ class OutputManager:
|
|
|
41
41
|
|
|
42
42
|
@staticmethod
|
|
43
43
|
def print(
|
|
44
|
-
data: Union[BaseModel, List[BaseModel], dict, list, str], title: str = ""
|
|
44
|
+
data: Union[BaseModel, List[BaseModel], dict, list, str], title: str = "", style: Optional[str] = None
|
|
45
45
|
):
|
|
46
46
|
"""
|
|
47
47
|
Dual frontend dispatcher.
|
|
@@ -49,7 +49,7 @@ class OutputManager:
|
|
|
49
49
|
if OutputManager.is_agent_mode():
|
|
50
50
|
OutputManager._render_agent(data)
|
|
51
51
|
else:
|
|
52
|
-
OutputManager._render_human(data, title)
|
|
52
|
+
OutputManager._render_human(data, title, style=style)
|
|
53
53
|
|
|
54
54
|
@staticmethod
|
|
55
55
|
def error(message: str):
|
|
@@ -94,7 +94,7 @@ class OutputManager:
|
|
|
94
94
|
print(str(data))
|
|
95
95
|
|
|
96
96
|
@staticmethod
|
|
97
|
-
def _render_human(data: Any, title: str):
|
|
97
|
+
def _render_human(data: Any, title: str, style: Optional[str] = None):
|
|
98
98
|
"""
|
|
99
99
|
Human channel: Visual priority.
|
|
100
100
|
"""
|
|
@@ -104,7 +104,7 @@ class OutputManager:
|
|
|
104
104
|
console.rule(f"[bold blue]{title}[/bold blue]")
|
|
105
105
|
|
|
106
106
|
if isinstance(data, str):
|
|
107
|
-
console.print(data)
|
|
107
|
+
console.print(data, style=style)
|
|
108
108
|
return
|
|
109
109
|
|
|
110
110
|
# Special handling for Lists of Pydantic Models -> Table
|
monoco/core/registry.py
CHANGED
|
@@ -36,4 +36,10 @@ class FeatureRegistry:
|
|
|
36
36
|
cls.register(SpikeFeature())
|
|
37
37
|
cls.register(I18nFeature())
|
|
38
38
|
cls.register(MemoFeature())
|
|
39
|
-
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
from monoco.features.glossary.adapter import GlossaryFeature
|
|
42
|
+
cls.register(GlossaryFeature())
|
|
43
|
+
|
|
44
|
+
from monoco.features.agent.adapter import AgentFeature
|
|
45
|
+
cls.register(AgentFeature())
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import List, Generator, Union
|
|
4
|
+
import importlib.util
|
|
5
|
+
|
|
6
|
+
# Use standard importlib.resources for Python 3.9+
|
|
7
|
+
if sys.version_info < (3, 9):
|
|
8
|
+
# Fallback or error - for now assume 3.9+ as this is a modern toolkit
|
|
9
|
+
raise RuntimeError("Monoco requires Python 3.9+")
|
|
10
|
+
from importlib.resources import files, as_file
|
|
11
|
+
|
|
12
|
+
from .models import ResourceNode, ResourceType
|
|
13
|
+
|
|
14
|
+
class ResourceFinder:
|
|
15
|
+
"""
|
|
16
|
+
Scans Python packages for Monoco standard resources.
|
|
17
|
+
Standard Layout: <package>/resources/<lang>/<type>/<file>
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def scan_package(self, package_name: str) -> List[ResourceNode]:
|
|
21
|
+
"""
|
|
22
|
+
Traverses the 'resources' directory of a given package.
|
|
23
|
+
Returns a flat list of ResourceNode objects.
|
|
24
|
+
"""
|
|
25
|
+
nodes = []
|
|
26
|
+
|
|
27
|
+
# Check if package exists
|
|
28
|
+
if not importlib.util.find_spec(package_name):
|
|
29
|
+
return []
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
pkg_root = files(package_name)
|
|
33
|
+
resources_root = pkg_root.joinpath("resources")
|
|
34
|
+
|
|
35
|
+
if not resources_root.is_dir():
|
|
36
|
+
return []
|
|
37
|
+
|
|
38
|
+
# Iterate over languages (direct children of resources/)
|
|
39
|
+
for lang_dir in resources_root.iterdir():
|
|
40
|
+
if not lang_dir.is_dir() or lang_dir.name.startswith("_"):
|
|
41
|
+
continue
|
|
42
|
+
|
|
43
|
+
lang = lang_dir.name
|
|
44
|
+
|
|
45
|
+
# Iterate over resource types (children of lang/)
|
|
46
|
+
for type_dir in lang_dir.iterdir():
|
|
47
|
+
if not type_dir.is_dir() or type_dir.name.startswith("_"):
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
res_type = ResourceType(type_dir.name)
|
|
52
|
+
except ValueError:
|
|
53
|
+
res_type = ResourceType.OTHER
|
|
54
|
+
|
|
55
|
+
# Iterate over files (children of type/)
|
|
56
|
+
# Note: This effectively supports shallow structure.
|
|
57
|
+
# For recursive (like skills folders), we might need recursion.
|
|
58
|
+
# For now, let's assume flat files or folders treated as units (like flow skill dirs).
|
|
59
|
+
|
|
60
|
+
for item in type_dir.iterdir():
|
|
61
|
+
# For skills, the item might be a directory (Flow Skill)
|
|
62
|
+
# We treat the directory path as the resource path in that case?
|
|
63
|
+
# Or we recursively scan?
|
|
64
|
+
# ResourceNode expects a path.
|
|
65
|
+
|
|
66
|
+
# Use as_file to ensure we have a filesystem path (needed for symlinks/copy)
|
|
67
|
+
with as_file(item) as item_path:
|
|
68
|
+
# Note: as_file context manager keeps the temporary file alive if extracted from zip.
|
|
69
|
+
# But here we probably want the path to persist?
|
|
70
|
+
# if it's a real file system, item_path is the real path.
|
|
71
|
+
|
|
72
|
+
if item.is_dir():
|
|
73
|
+
# Flow skills are directories
|
|
74
|
+
# We add the directory itself as a node?
|
|
75
|
+
if res_type == ResourceType.SKILLS:
|
|
76
|
+
nodes.append(ResourceNode(
|
|
77
|
+
name=item.name,
|
|
78
|
+
path=item_path,
|
|
79
|
+
type=res_type,
|
|
80
|
+
language=lang
|
|
81
|
+
))
|
|
82
|
+
elif item.is_file():
|
|
83
|
+
if item.name.startswith("."):
|
|
84
|
+
continue
|
|
85
|
+
|
|
86
|
+
nodes.append(ResourceNode(
|
|
87
|
+
name=item.name,
|
|
88
|
+
path=item_path,
|
|
89
|
+
type=res_type,
|
|
90
|
+
language=lang
|
|
91
|
+
))
|
|
92
|
+
|
|
93
|
+
except Exception as e:
|
|
94
|
+
# gracefully handle errors, maybe log?
|
|
95
|
+
print(f"Warning: Error scanning resources in {package_name}: {e}")
|
|
96
|
+
return []
|
|
97
|
+
|
|
98
|
+
return nodes
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from typing import List, Optional, Dict
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import shutil
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
from .models import ResourceNode, ResourceType
|
|
7
|
+
from .finder import ResourceFinder
|
|
8
|
+
|
|
9
|
+
class ResourceManager:
|
|
10
|
+
def __init__(self, source_lang: str = "en"):
|
|
11
|
+
self.finder = ResourceFinder()
|
|
12
|
+
self.source_lang = source_lang
|
|
13
|
+
|
|
14
|
+
def list_resources(self, package: str, type: Optional[ResourceType] = None, lang: Optional[str] = None) -> List[ResourceNode]:
|
|
15
|
+
"""
|
|
16
|
+
Low-level listing of resources with optional exact filtering.
|
|
17
|
+
"""
|
|
18
|
+
all_nodes = self.finder.scan_package(package)
|
|
19
|
+
filtered = []
|
|
20
|
+
for node in all_nodes:
|
|
21
|
+
if type and node.type != type:
|
|
22
|
+
continue
|
|
23
|
+
if lang and node.language != lang:
|
|
24
|
+
continue
|
|
25
|
+
filtered.append(node)
|
|
26
|
+
return filtered
|
|
27
|
+
|
|
28
|
+
def get_merged_resources(self, package: str, type: ResourceType, target_lang: str) -> List[ResourceNode]:
|
|
29
|
+
"""
|
|
30
|
+
Get resources of a specific type, merging source language defaults with target language overrides.
|
|
31
|
+
Returns a list of unique resources (by name), prioritizing target_lang.
|
|
32
|
+
"""
|
|
33
|
+
all_nodes = self.finder.scan_package(package)
|
|
34
|
+
type_nodes = [n for n in all_nodes if n.type == type]
|
|
35
|
+
|
|
36
|
+
# Dictionary to hold the best match for each filename: name -> ResourceNode
|
|
37
|
+
best_matches: Dict[str, ResourceNode] = {}
|
|
38
|
+
|
|
39
|
+
# 1. Populate with source language (Default Base)
|
|
40
|
+
for node in type_nodes:
|
|
41
|
+
if node.language == self.source_lang:
|
|
42
|
+
best_matches[node.name] = node
|
|
43
|
+
|
|
44
|
+
# 2. Override with target language if different
|
|
45
|
+
if target_lang != self.source_lang:
|
|
46
|
+
for node in type_nodes:
|
|
47
|
+
if node.language == target_lang:
|
|
48
|
+
best_matches[node.name] = node
|
|
49
|
+
|
|
50
|
+
return list(best_matches.values())
|
|
51
|
+
|
|
52
|
+
def extract_to(self, nodes: List[ResourceNode], destination: Path, symlink: bool = False, force: bool = True) -> int:
|
|
53
|
+
"""
|
|
54
|
+
Extracts (copy or symlink) resources to the destination directory.
|
|
55
|
+
Returns count of extracted items.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
nodes: List of resources to extract
|
|
59
|
+
destination: Target directory
|
|
60
|
+
symlink: If True, create symbolic links instead of copying
|
|
61
|
+
force: If True, overwrite existing files/symlinks
|
|
62
|
+
"""
|
|
63
|
+
destination.mkdir(parents=True, exist_ok=True)
|
|
64
|
+
count = 0
|
|
65
|
+
|
|
66
|
+
for node in nodes:
|
|
67
|
+
dest_path = destination / node.name
|
|
68
|
+
|
|
69
|
+
if dest_path.exists():
|
|
70
|
+
if not force:
|
|
71
|
+
continue
|
|
72
|
+
# Remove existing
|
|
73
|
+
if dest_path.is_symlink() or dest_path.is_file():
|
|
74
|
+
dest_path.unlink()
|
|
75
|
+
elif dest_path.is_dir():
|
|
76
|
+
shutil.rmtree(dest_path)
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
if symlink:
|
|
80
|
+
# Symlink target must be absolute
|
|
81
|
+
dest_path.symlink_to(node.path)
|
|
82
|
+
else:
|
|
83
|
+
if node.path.is_dir():
|
|
84
|
+
shutil.copytree(node.path, dest_path)
|
|
85
|
+
else:
|
|
86
|
+
shutil.copy2(node.path, dest_path)
|
|
87
|
+
count += 1
|
|
88
|
+
except Exception as e:
|
|
89
|
+
print(f"Error extracting {node.name}: {e}")
|
|
90
|
+
|
|
91
|
+
return count
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Optional, List
|
|
4
|
+
from enum import Enum
|
|
5
|
+
|
|
6
|
+
class ResourceType(str, Enum):
|
|
7
|
+
PROMPTS = "prompts"
|
|
8
|
+
RULES = "rules"
|
|
9
|
+
SKILLS = "skills"
|
|
10
|
+
ROLES = "roles"
|
|
11
|
+
GLOSSARY = "glossary"
|
|
12
|
+
TEMPLATES = "templates"
|
|
13
|
+
DOCS = "docs"
|
|
14
|
+
OTHER = "other"
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class ResourceNode:
|
|
18
|
+
"""
|
|
19
|
+
Represents a discovered resource file in a Python package.
|
|
20
|
+
"""
|
|
21
|
+
name: str
|
|
22
|
+
path: Path # Absolute path to the source file
|
|
23
|
+
type: ResourceType
|
|
24
|
+
language: str # "en", "zh", etc.
|
|
25
|
+
content: Optional[str] = None # Lazy loaded content
|
|
26
|
+
|
|
27
|
+
@property
|
|
28
|
+
def key(self) -> str:
|
|
29
|
+
"""Unique identifier for the resource (e.g. 'agent.prompts.system')"""
|
|
30
|
+
return f"{self.type.value}.{self.name}"
|
|
31
|
+
|
|
32
|
+
def read_text(self) -> str:
|
|
33
|
+
if self.content:
|
|
34
|
+
return self.content
|
|
35
|
+
return self.path.read_text(encoding="utf-8")
|