luckyd-code 1.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. luckyd_code/__init__.py +54 -0
  2. luckyd_code/__main__.py +5 -0
  3. luckyd_code/_agent_loop.py +551 -0
  4. luckyd_code/_data_dir.py +73 -0
  5. luckyd_code/agent.py +38 -0
  6. luckyd_code/analytics/__init__.py +18 -0
  7. luckyd_code/analytics/reporter.py +195 -0
  8. luckyd_code/analytics/scanner.py +443 -0
  9. luckyd_code/analytics/smells.py +316 -0
  10. luckyd_code/analytics/trends.py +303 -0
  11. luckyd_code/api.py +473 -0
  12. luckyd_code/audit_daemon.py +845 -0
  13. luckyd_code/autonomous_fixer.py +473 -0
  14. luckyd_code/background.py +159 -0
  15. luckyd_code/backup.py +237 -0
  16. luckyd_code/brain/__init__.py +84 -0
  17. luckyd_code/brain/assembler.py +100 -0
  18. luckyd_code/brain/chunker.py +345 -0
  19. luckyd_code/brain/constants.py +73 -0
  20. luckyd_code/brain/embedder.py +163 -0
  21. luckyd_code/brain/graph.py +311 -0
  22. luckyd_code/brain/indexer.py +316 -0
  23. luckyd_code/brain/parser.py +140 -0
  24. luckyd_code/brain/retriever.py +234 -0
  25. luckyd_code/cli.py +894 -0
  26. luckyd_code/cli_commands/__init__.py +1 -0
  27. luckyd_code/cli_commands/audit.py +120 -0
  28. luckyd_code/cli_commands/background.py +83 -0
  29. luckyd_code/cli_commands/brain.py +87 -0
  30. luckyd_code/cli_commands/config.py +75 -0
  31. luckyd_code/cli_commands/dispatcher.py +695 -0
  32. luckyd_code/cli_commands/sessions.py +41 -0
  33. luckyd_code/cli_entry.py +147 -0
  34. luckyd_code/cli_utils.py +112 -0
  35. luckyd_code/config.py +205 -0
  36. luckyd_code/context.py +214 -0
  37. luckyd_code/cost_tracker.py +209 -0
  38. luckyd_code/error_reporter.py +508 -0
  39. luckyd_code/exceptions.py +39 -0
  40. luckyd_code/export.py +126 -0
  41. luckyd_code/feedback_analyzer.py +290 -0
  42. luckyd_code/file_watcher.py +258 -0
  43. luckyd_code/git/__init__.py +11 -0
  44. luckyd_code/git/auto_commit.py +157 -0
  45. luckyd_code/git/tools.py +85 -0
  46. luckyd_code/hooks.py +236 -0
  47. luckyd_code/indexer.py +280 -0
  48. luckyd_code/init.py +39 -0
  49. luckyd_code/keybindings.py +77 -0
  50. luckyd_code/log.py +55 -0
  51. luckyd_code/mcp/__init__.py +6 -0
  52. luckyd_code/mcp/client.py +184 -0
  53. luckyd_code/memory/__init__.py +19 -0
  54. luckyd_code/memory/manager.py +339 -0
  55. luckyd_code/metrics/__init__.py +5 -0
  56. luckyd_code/model_registry.py +131 -0
  57. luckyd_code/orchestrator.py +204 -0
  58. luckyd_code/permissions/__init__.py +1 -0
  59. luckyd_code/permissions/manager.py +103 -0
  60. luckyd_code/planner.py +361 -0
  61. luckyd_code/plugins.py +91 -0
  62. luckyd_code/py.typed +0 -0
  63. luckyd_code/retry.py +57 -0
  64. luckyd_code/router.py +417 -0
  65. luckyd_code/sandbox.py +156 -0
  66. luckyd_code/self_critique.py +2 -0
  67. luckyd_code/self_improve.py +274 -0
  68. luckyd_code/sessions.py +114 -0
  69. luckyd_code/settings.py +72 -0
  70. luckyd_code/skills/__init__.py +8 -0
  71. luckyd_code/skills/review.py +22 -0
  72. luckyd_code/skills/security.py +17 -0
  73. luckyd_code/tasks/__init__.py +1 -0
  74. luckyd_code/tasks/manager.py +102 -0
  75. luckyd_code/templates/icon-192.png +0 -0
  76. luckyd_code/templates/icon-512.png +0 -0
  77. luckyd_code/templates/index.html +1965 -0
  78. luckyd_code/templates/manifest.json +14 -0
  79. luckyd_code/templates/src/app.js +694 -0
  80. luckyd_code/templates/src/body.html +767 -0
  81. luckyd_code/templates/src/cdn.txt +2 -0
  82. luckyd_code/templates/src/style.css +474 -0
  83. luckyd_code/templates/sw.js +31 -0
  84. luckyd_code/templates/test.html +6 -0
  85. luckyd_code/themes.py +48 -0
  86. luckyd_code/tools/__init__.py +97 -0
  87. luckyd_code/tools/agent_tools.py +65 -0
  88. luckyd_code/tools/bash.py +360 -0
  89. luckyd_code/tools/brain_tools.py +137 -0
  90. luckyd_code/tools/browser.py +369 -0
  91. luckyd_code/tools/datetime_tool.py +34 -0
  92. luckyd_code/tools/dockerfile_gen.py +212 -0
  93. luckyd_code/tools/file_ops.py +381 -0
  94. luckyd_code/tools/game_gen.py +360 -0
  95. luckyd_code/tools/git_tools.py +130 -0
  96. luckyd_code/tools/git_worktree.py +63 -0
  97. luckyd_code/tools/path_validate.py +64 -0
  98. luckyd_code/tools/project_gen.py +187 -0
  99. luckyd_code/tools/readme_gen.py +227 -0
  100. luckyd_code/tools/registry.py +157 -0
  101. luckyd_code/tools/shell_detect.py +109 -0
  102. luckyd_code/tools/web.py +89 -0
  103. luckyd_code/tools/youtube.py +187 -0
  104. luckyd_code/tools_bridge.py +144 -0
  105. luckyd_code/undo.py +126 -0
  106. luckyd_code/update.py +60 -0
  107. luckyd_code/verify.py +360 -0
  108. luckyd_code/web_app.py +176 -0
  109. luckyd_code/web_routes/__init__.py +23 -0
  110. luckyd_code/web_routes/background.py +73 -0
  111. luckyd_code/web_routes/brain.py +109 -0
  112. luckyd_code/web_routes/cost.py +12 -0
  113. luckyd_code/web_routes/files.py +133 -0
  114. luckyd_code/web_routes/memories.py +94 -0
  115. luckyd_code/web_routes/misc.py +67 -0
  116. luckyd_code/web_routes/project.py +48 -0
  117. luckyd_code/web_routes/review.py +20 -0
  118. luckyd_code/web_routes/sessions.py +44 -0
  119. luckyd_code/web_routes/settings.py +43 -0
  120. luckyd_code/web_routes/static.py +70 -0
  121. luckyd_code/web_routes/update.py +19 -0
  122. luckyd_code/web_routes/ws.py +237 -0
  123. luckyd_code-1.2.2.dist-info/METADATA +297 -0
  124. luckyd_code-1.2.2.dist-info/RECORD +127 -0
  125. luckyd_code-1.2.2.dist-info/WHEEL +4 -0
  126. luckyd_code-1.2.2.dist-info/entry_points.txt +3 -0
  127. luckyd_code-1.2.2.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,187 @@
1
+ """Project scaffold generator tool.
2
+
3
+ Generates complete, ready-to-run project scaffolds from a plain-English
4
+ description using the DeepSeek model. Writes every file directly to disk.
5
+
6
+ Any project type is supported — web app, CLI tool, REST API, Discord bot,
7
+ data pipeline, etc. Just describe what you want.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import json
13
+ import urllib.request
14
+ from pathlib import Path
15
+
16
+ from .registry import Tool
17
+
18
+ # ---------------------------------------------------------------------------
19
+ # Prompt for the scaffold generator
20
+ # ---------------------------------------------------------------------------
21
+
22
+ _SYSTEM_PROMPT = """\
23
+ You are an expert software architect and developer.
24
+
25
+ Your job is to generate a COMPLETE project scaffold from a plain-English
26
+ description. The scaffold must be immediately usable: clone, install deps, run.
27
+
28
+ OUTPUT FORMAT — respond with a single JSON object and nothing else:
29
+ {
30
+ "project_name": "<slug, lowercase, hyphens only>",
31
+ "description": "<one sentence>",
32
+ "stack": "<comma-separated key tech>",
33
+ "files": [
34
+ { "path": "relative/path/to/file.ext", "content": "<full file content>" },
35
+ ...
36
+ ],
37
+ "install": "<shell command to install dependencies>",
38
+ "run": "<shell command to start the project>",
39
+ "notes": "<optional short notes for the developer>"
40
+ }
41
+
42
+ RULES:
43
+ 1. Output ONLY the JSON — no markdown fences, no prose before or after.
44
+ 2. Every file must have COMPLETE content — no placeholders, no TODOs.
45
+ 3. Always include: README.md, .gitignore, a dependency manifest
46
+ (requirements.txt, package.json, pyproject.toml, go.mod, etc.).
47
+ 4. Include at least one working entry-point file and one basic test file.
48
+ 5. Keep the scaffold focused and minimal — just enough to be genuinely useful.
49
+ 6. Use modern, idiomatic conventions for the chosen language/framework.
50
+ 7. All secrets/API keys must use environment variables loaded from a .env file;
51
+ include a .env.example but never a real .env.
52
+ """
53
+
54
+
55
+ # ---------------------------------------------------------------------------
56
+ # Tool
57
+ # ---------------------------------------------------------------------------
58
+
59
+ class ProjectGenTool(Tool):
60
+ """Generate a complete project scaffold from a plain-English description.
61
+
62
+ Use this tool when the user asks to:
63
+ - Start a new project ("create a FastAPI CRUD app")
64
+ - Generate boilerplate for any language or framework
65
+ - Scaffold a CLI tool, web API, Discord bot, data pipeline, etc.
66
+ """
67
+
68
+ name = "ProjectGen"
69
+ description = (
70
+ "Generate a complete, ready-to-run project scaffold from a plain-English "
71
+ "description. Writes all files to disk. Any project type is supported — "
72
+ "web app, CLI tool, REST API, Discord bot, data pipeline, browser extension, etc."
73
+ )
74
+ parameters = {
75
+ "type": "object",
76
+ "properties": {
77
+ "description": {
78
+ "type": "string",
79
+ "description": (
80
+ "Plain-English description of the project to generate. "
81
+ "Include the language/framework if you have a preference. "
82
+ "Examples: 'a FastAPI CRUD API with SQLite', "
83
+ "'a React todo app with Tailwind', "
84
+ "'a Python CLI that converts Markdown to PDF'."
85
+ ),
86
+ },
87
+ "output_dir": {
88
+ "type": "string",
89
+ "description": (
90
+ "Parent directory to create the project folder inside. "
91
+ "Defaults to the current working directory."
92
+ ),
93
+ "default": ".",
94
+ },
95
+ },
96
+ "required": ["description"],
97
+ }
98
+ permission_risk = "medium"
99
+
100
+ def _call_model(self, description: str) -> dict[str, Any]:
101
+ """Ask the model to generate the scaffold JSON."""
102
+ raw = self._call_model_direct(description)
103
+ raw = raw.strip()
104
+ if raw.startswith("```"):
105
+ raw = "\n".join(
106
+ ln for ln in raw.splitlines() if not ln.startswith("```")
107
+ ).strip()
108
+
109
+ return json.loads(raw)
110
+
111
+ def _call_model_direct(self, description: str) -> str:
112
+ from ..config import get_api_key, get_base_url # noqa: PLC0415
113
+ payload = {
114
+ "model": "deepseek-chat",
115
+ "max_tokens": 8192,
116
+ "temperature": 0.2,
117
+ "messages": [
118
+ {"role": "system", "content": _SYSTEM_PROMPT},
119
+ {"role": "user", "content": f"Project description: {description}"},
120
+ ],
121
+ }
122
+ req = urllib.request.Request(
123
+ f"{get_base_url()}/chat/completions",
124
+ data=json.dumps(payload).encode(),
125
+ headers={
126
+ "Content-Type": "application/json",
127
+ "Authorization": f"Bearer {get_api_key()}",
128
+ },
129
+ method="POST",
130
+ )
131
+ with urllib.request.urlopen(req, timeout=120) as resp:
132
+ data = json.loads(resp.read())
133
+ return data["choices"][0]["message"]["content"]
134
+
135
+ def run(self, description: str, output_dir: str = ".") -> str: # type: ignore[override]
136
+ parent = Path(output_dir).expanduser().resolve()
137
+ try:
138
+ parent.mkdir(parents=True, exist_ok=True)
139
+ except OSError as e:
140
+ return f"Error: cannot create output directory: {e}"
141
+
142
+ try:
143
+ scaffold = self._call_model(description)
144
+ except json.JSONDecodeError as e:
145
+ return f"Error: model returned invalid JSON — {e}"
146
+ except Exception as e:
147
+ return f"Error: model call failed — {e}"
148
+
149
+ project_name = scaffold.get("project_name", "generated-project")
150
+ files: list[dict] = scaffold.get("files", [])
151
+ if not files:
152
+ return "Error: model returned no files."
153
+
154
+ project_dir = parent / project_name
155
+ written: list[str] = []
156
+ errors: list[str] = []
157
+
158
+ for f in files:
159
+ rel = f.get("path", "").lstrip("/")
160
+ content = f.get("content", "")
161
+ if not rel:
162
+ continue
163
+ target = project_dir / rel
164
+ try:
165
+ target.parent.mkdir(parents=True, exist_ok=True)
166
+ target.write_text(content, encoding="utf-8")
167
+ written.append(rel)
168
+ except OSError as e:
169
+ errors.append(f"{rel}: {e}")
170
+
171
+ lines = [
172
+ f"Project '{project_name}' created at {project_dir}",
173
+ f"Stack : {scaffold.get('stack', 'n/a')}",
174
+ f"Files : {len(written)} written",
175
+ ]
176
+ if errors:
177
+ lines.append(f"Errors : {len(errors)}")
178
+ lines.extend(f" {e}" for e in errors)
179
+ lines += [
180
+ "",
181
+ f"Install : {scaffold.get('install', 'n/a')}",
182
+ f"Run : {scaffold.get('run', 'n/a')}",
183
+ ]
184
+ if scaffold.get("notes"):
185
+ lines += ["", f"Notes : {scaffold['notes']}"]
186
+
187
+ return "\n".join(lines)
@@ -0,0 +1,227 @@
1
+ """README generator tool.
2
+
3
+ Scans the current project directory and generates a polished, professional
4
+ README.md using the DeepSeek model. Understands any language or framework.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ import urllib.request
11
+ from pathlib import Path
12
+
13
+ from .registry import Tool
14
+
15
+ # ---------------------------------------------------------------------------
16
+ # Prompt
17
+ # ---------------------------------------------------------------------------
18
+
19
+ _SYSTEM_PROMPT = """\
20
+ You are a senior technical writer. Your job is to write a polished, professional
21
+ README.md for a software project based on the source files you are given.
22
+
23
+ RULES:
24
+ 1. Output ONLY valid Markdown — no preamble, no code fences around the whole output.
25
+ 2. Structure: title, one-line description, badges (if applicable), Features,
26
+ Prerequisites, Installation, Usage, Configuration (.env vars if any),
27
+ Contributing, License.
28
+ 3. Be concise but complete. Real examples over generic placeholders.
29
+ 4. Infer the tech stack, purpose, and usage from the files provided.
30
+ 5. Use proper Markdown code blocks with language tags for all code/commands.
31
+ 6. If a LICENSE file is present, reference the licence type in the badge/section.
32
+ """
33
+
34
+ _MAX_FILE_CHARS = 3000
35
+ _MAX_FILES = 20
36
+
37
+ # ---------------------------------------------------------------------------
38
+ # Helpers
39
+ # ---------------------------------------------------------------------------
40
+
41
+ _SKIP_DIRS = {
42
+ ".git", ".venv", "venv", "env", "__pycache__", "node_modules",
43
+ ".mypy_cache", ".pytest_cache", "dist", "build", ".claude", ".deepseek-code",
44
+ }
45
+
46
+ _SKIP_EXTS = {
47
+ ".pyc", ".pyo", ".pyd", ".so", ".dll", ".exe", ".egg",
48
+ ".png", ".jpg", ".jpeg", ".gif", ".ico", ".svg",
49
+ ".lock", ".min.js", ".min.css",
50
+ }
51
+
52
+ _PRIORITY_FILES = {
53
+ "main.py", "app.py", "cli.py", "server.py", "index.js", "index.ts",
54
+ "main.go", "main.rs", "Cargo.toml", "go.mod", "pyproject.toml",
55
+ "package.json", "requirements.txt", "setup.py", "Makefile",
56
+ "Dockerfile", ".env.example", "LICENSE",
57
+ }
58
+
59
+
60
+ def _collect_files(root: Path) -> list[tuple[str, str]]:
61
+ """Return list of (relative_path, content_snippet) for key project files."""
62
+ collected: list[tuple[str, str]] = []
63
+ seen_names: set[str] = set()
64
+
65
+ def _walk(path: Path, depth: int = 0):
66
+ if depth > 5 or len(collected) >= _MAX_FILES:
67
+ return
68
+ try:
69
+ entries = sorted(path.iterdir(), key=lambda p: (p.is_dir(), p.name))
70
+ except PermissionError:
71
+ return
72
+ for entry in entries:
73
+ if entry.name.startswith(".") and entry.name not in {".env.example"}:
74
+ continue
75
+ if entry.name in _SKIP_DIRS:
76
+ continue
77
+ if entry.is_dir():
78
+ _walk(entry, depth + 1)
79
+ elif entry.is_file() and entry.suffix not in _SKIP_EXTS:
80
+ rel = str(entry.relative_to(root))
81
+ if rel in seen_names or len(collected) >= _MAX_FILES:
82
+ continue
83
+ seen_names.add(rel)
84
+ try:
85
+ text = entry.read_text(encoding="utf-8", errors="replace")
86
+ snippet = text[:_MAX_FILE_CHARS]
87
+ if len(text) > _MAX_FILE_CHARS:
88
+ snippet += f"\n... ({len(text) - _MAX_FILE_CHARS} chars truncated)"
89
+ collected.append((rel, snippet))
90
+ except Exception:
91
+ pass
92
+
93
+ for name in _PRIORITY_FILES:
94
+ candidate = root / name
95
+ if candidate.exists() and candidate.is_file():
96
+ try:
97
+ text = candidate.read_text(encoding="utf-8", errors="replace")
98
+ snippet = text[:_MAX_FILE_CHARS]
99
+ collected.append((str(candidate.relative_to(root)), snippet))
100
+ seen_names.add(str(candidate.relative_to(root)))
101
+ except Exception:
102
+ pass
103
+
104
+ _walk(root)
105
+ return collected
106
+
107
+
108
+ def _format_context(files: list[tuple[str, str]]) -> str:
109
+ parts = []
110
+ for rel, content in files:
111
+ parts.append(f"=== {rel} ===\n{content}")
112
+ return "\n\n".join(parts)
113
+
114
+
115
+ # ---------------------------------------------------------------------------
116
+ # Tool
117
+ # ---------------------------------------------------------------------------
118
+
119
+ class ReadmeGenTool(Tool):
120
+ """Generate a polished README.md by scanning the current project.
121
+
122
+ Use this tool when the user asks to:
123
+ - Write or improve the README for their project
124
+ - Auto-generate documentation from source files
125
+ - Create a professional project description
126
+ """
127
+
128
+ name = "ReadmeGen"
129
+ description = (
130
+ "Scan the current project directory and generate a professional README.md. "
131
+ "Works with any language or framework. Infers stack, features, and usage "
132
+ "automatically from source files."
133
+ )
134
+ parameters = {
135
+ "type": "object",
136
+ "properties": {
137
+ "project_dir": {
138
+ "type": "string",
139
+ "description": "Root directory of the project to document. Defaults to cwd.",
140
+ "default": ".",
141
+ },
142
+ "output_path": {
143
+ "type": "string",
144
+ "description": "Where to write the README. Defaults to <project_dir>/README.md.",
145
+ "default": "",
146
+ },
147
+ "overwrite": {
148
+ "type": "boolean",
149
+ "description": "Overwrite an existing README.md. Defaults to false.",
150
+ "default": False,
151
+ },
152
+ },
153
+ "required": [],
154
+ }
155
+ permission_risk = "medium"
156
+
157
+ def _call_model(self, context: str) -> str:
158
+ return self._call_model_direct(context)
159
+
160
+ def _call_model_direct(self, context: str) -> str:
161
+ from ..config import get_api_key, get_base_url # noqa: PLC0415
162
+ payload = {
163
+ "model": "deepseek-chat",
164
+ "max_tokens": 4096,
165
+ "temperature": 0.2,
166
+ "messages": [
167
+ {"role": "system", "content": _SYSTEM_PROMPT},
168
+ {"role": "user", "content": f"Project files:\n\n{context}\n\nGenerate the README.md."},
169
+ ],
170
+ }
171
+ req = urllib.request.Request(
172
+ f"{get_base_url()}/chat/completions",
173
+ data=json.dumps(payload).encode(),
174
+ headers={
175
+ "Content-Type": "application/json",
176
+ "Authorization": f"Bearer {get_api_key()}",
177
+ },
178
+ method="POST",
179
+ )
180
+ with urllib.request.urlopen(req, timeout=90) as resp:
181
+ data = json.loads(resp.read())
182
+ return data["choices"][0]["message"]["content"]
183
+
184
+ def run(
185
+ self,
186
+ project_dir: str = ".",
187
+ output_path: str = "",
188
+ overwrite: bool = False,
189
+ ) -> str: # type: ignore[override]
190
+ root = Path(project_dir).expanduser().resolve()
191
+ if not root.is_dir():
192
+ return f"Error: '{root}' is not a directory."
193
+
194
+ out = Path(output_path).expanduser().resolve() if output_path else root / "README.md"
195
+
196
+ if out.exists() and not overwrite:
197
+ return (
198
+ f"README already exists at {out}. "
199
+ "Pass overwrite=true to replace it."
200
+ )
201
+
202
+ files = _collect_files(root)
203
+ if not files:
204
+ return "Error: no readable source files found."
205
+
206
+ context = _format_context(files)
207
+
208
+ try:
209
+ readme = self._call_model(context)
210
+ except Exception as e:
211
+ return f"Error: model call failed — {e}"
212
+
213
+ readme = readme.strip()
214
+ if readme.startswith("```markdown"):
215
+ readme = readme[len("```markdown"):].lstrip("\n")
216
+ if readme.endswith("```"):
217
+ readme = readme[:-3].rstrip("\n")
218
+
219
+ try:
220
+ out.write_text(readme, encoding="utf-8")
221
+ except OSError as e:
222
+ return f"Error: could not write file — {e}"
223
+
224
+ return (
225
+ f"README.md generated from {len(files)} file(s).\n"
226
+ f"Written to: {out}"
227
+ )
@@ -0,0 +1,157 @@
1
+ import time
2
+ from typing import Any, Dict, Optional
3
+
4
+ # Tools in this set are read-only and safe to cache.
5
+ # Write/Bash/Git tools are explicitly excluded — their results must never be stale.
6
+ _CACHEABLE_TOOLS: frozenset[str] = frozenset({
7
+ "Read", "Glob", "Grep", "WebFetch", "WebSearch", "DateTime",
8
+ "YouTubePlaylist", # pure URL construction — no I/O, deterministic output
9
+ })
10
+
11
+ # Default TTL for cached tool results (seconds).
12
+ _DEFAULT_CACHE_TTL: float = 300.0 # 5 minutes
13
+
14
+
15
+ class _CacheEntry:
16
+ __slots__ = ("value", "expires_at")
17
+
18
+ def __init__(self, value: str, ttl: float) -> None:
19
+ self.value = value
20
+ self.expires_at = time.monotonic() + ttl
21
+
22
+
23
+ class Tool:
24
+ """Base class for all tools."""
25
+
26
+ name: str = ""
27
+ description: str = ""
28
+ parameters: Dict[str, Any] = {}
29
+ permission_risk: str = "safe" # safe | medium | high
30
+
31
+ def run(self, **kwargs) -> str:
32
+ raise NotImplementedError
33
+
34
+ def to_openai_tool(self) -> Dict[str, Any]:
35
+ return {
36
+ "type": "function",
37
+ "function": {
38
+ "name": self.name,
39
+ "description": self.description,
40
+ "parameters": self.parameters,
41
+ },
42
+ }
43
+
44
+
45
+ class ToolRegistry:
46
+ """Registry of all available tools with optional result caching.
47
+
48
+ Caching is applied only to read-only tools (``_CACHEABLE_TOOLS``).
49
+ Cache keys are derived from the tool name and its sorted arguments, so
50
+ calling ``Read`` on the same file twice within the TTL window costs only
51
+ one real I/O operation.
52
+
53
+ Set ``cache_ttl=0`` to disable caching entirely.
54
+ """
55
+
56
+ def __init__(self, cache_ttl: float = _DEFAULT_CACHE_TTL):
57
+ self._tools: Dict[str, Tool] = {}
58
+ self._cache: Dict[str, _CacheEntry] = {}
59
+ self._cache_ttl = cache_ttl
60
+
61
+ def register(self, tool: Tool) -> None:
62
+ self._tools[tool.name] = tool
63
+
64
+ def get(self, name: str) -> Optional[Tool]:
65
+ return self._tools.get(name)
66
+
67
+ def list_tools(self) -> list[Dict[str, Any]]:
68
+ return [t.to_openai_tool() for t in self._tools.values()]
69
+
70
+ # ------------------------------------------------------------------ #
71
+ # Cache helpers
72
+ # ------------------------------------------------------------------ #
73
+
74
+ @staticmethod
75
+ def _cache_key(name: str, arguments: Dict[str, Any]) -> str:
76
+ """Stable cache key: tool name + sorted argument pairs."""
77
+ arg_repr = ",".join(f"{k}={v!r}" for k, v in sorted(arguments.items()))
78
+ return f"{name}|{arg_repr}"
79
+
80
+ def _get_cached(self, key: str) -> Optional[str]:
81
+ entry = self._cache.get(key)
82
+ if entry is None:
83
+ return None
84
+ if time.monotonic() > entry.expires_at:
85
+ del self._cache[key]
86
+ return None
87
+ return entry.value
88
+
89
+ def _set_cached(self, key: str, value: str) -> None:
90
+ if self._cache_ttl <= 0:
91
+ return
92
+ # Evict expired entries when the cache grows large, OR unconditionally
93
+ # every 60 seconds so short-lived sessions don't accumulate stale
94
+ # entries indefinitely.
95
+ now = time.monotonic()
96
+ should_evict = (
97
+ len(self._cache) % 100 == 0 # every 100th insert (large cache)
98
+ or getattr(self, "_last_evict", 0) + 60 < now # time-based fallback
99
+ )
100
+ if should_evict:
101
+ expired = [k for k, e in self._cache.items() if now > e.expires_at]
102
+ for k in expired:
103
+ del self._cache[k]
104
+ self._last_evict = now
105
+ self._cache[key] = _CacheEntry(value, self._cache_ttl)
106
+
107
+ def invalidate(self, tool_name: Optional[str] = None) -> int:
108
+ """Invalidate cache entries.
109
+
110
+ Args:
111
+ tool_name: If given, only entries for this tool are removed.
112
+ If None, the entire cache is cleared.
113
+
114
+ Returns:
115
+ Number of entries removed.
116
+ """
117
+ if tool_name is None:
118
+ count = len(self._cache)
119
+ self._cache.clear()
120
+ return count
121
+ prefix = f"{tool_name}|"
122
+ keys = [k for k in self._cache if k.startswith(prefix)]
123
+ for k in keys:
124
+ del self._cache[k]
125
+ return len(keys)
126
+
127
+ # ------------------------------------------------------------------ #
128
+ # Execution
129
+ # ------------------------------------------------------------------ #
130
+
131
+ def execute(self, name: str, arguments: Dict[str, Any], check_perm=None) -> str:
132
+ tool = self.get(name)
133
+ if not tool:
134
+ return f"Error: unknown tool '{name}'"
135
+
136
+ if check_perm:
137
+ allowed = check_perm(name)
138
+ if not allowed:
139
+ return f"Permission denied for tool '{name}'"
140
+
141
+ # Check cache for eligible read-only tools
142
+ use_cache = name in _CACHEABLE_TOOLS and self._cache_ttl > 0
143
+ if use_cache:
144
+ key = self._cache_key(name, arguments)
145
+ cached = self._get_cached(key)
146
+ if cached is not None:
147
+ return cached
148
+
149
+ try:
150
+ result = tool.run(**arguments)
151
+ except Exception as e:
152
+ return f"Error executing {name}: {e}"
153
+
154
+ if use_cache:
155
+ self._set_cached(key, result)
156
+
157
+ return result
@@ -0,0 +1,109 @@
1
+ """Shell auto-detection for Windows — finds Git Bash, WSL, or falls back to cmd.exe."""
2
+
3
+ import os
4
+ import shutil
5
+ from dataclasses import dataclass
6
+ from typing import Optional
7
+
8
+
9
+ @dataclass
10
+ class ShellInfo:
11
+ name: str # "git_bash", "wsl", "cmd", "powershell"
12
+ path: str # executable path
13
+ args: list[str] # extra args (e.g. ["--login"] for bash)
14
+ unix_like: bool # True if ls/grep/find/curl etc. are available
15
+ description: str # Human-readable name
16
+
17
+
18
+ def detect_shell() -> ShellInfo:
19
+ """Probe for the best available shell on Windows.
20
+
21
+ Detection order:
22
+ 1. Git Bash (most common for developers on Windows)
23
+ 2. WSL (Windows Subsystem for Linux)
24
+ 3. cmd.exe (stock Windows — always available)
25
+
26
+ Returns the first match found.
27
+ """
28
+ git_bash = _find_git_bash()
29
+ if git_bash:
30
+ return ShellInfo(
31
+ name="git_bash",
32
+ path=git_bash,
33
+ args=["--norc", "--noprofile"],
34
+ unix_like=True,
35
+ description="Git Bash",
36
+ )
37
+
38
+ wsl = _find_wsl()
39
+ if wsl:
40
+ return ShellInfo(
41
+ name="wsl",
42
+ path=wsl,
43
+ args=["--", "bash"],
44
+ unix_like=True,
45
+ description="WSL (Ubuntu)",
46
+ )
47
+
48
+ return ShellInfo(
49
+ name="cmd",
50
+ path=os.environ.get("COMSPEC", "cmd.exe"),
51
+ args=[],
52
+ unix_like=False,
53
+ description="cmd.exe",
54
+ )
55
+
56
+
57
+ def _find_git_bash() -> Optional[str]:
58
+ """Locate Git Bash — use PATH lookup first, then common install paths."""
59
+ bash_in_path = shutil.which("bash")
60
+ if bash_in_path:
61
+ return bash_in_path
62
+ candidates = [
63
+ r"C:\Program Files\Git\bin\bash.exe",
64
+ r"C:\Program Files (x86)\Git\bin\bash.exe",
65
+ os.path.expanduser(r"~\AppData\Local\Programs\Git\bin\bash.exe"),
66
+ ]
67
+ for path in candidates:
68
+ if os.path.isfile(path):
69
+ return path
70
+ return None
71
+
72
+
73
+ def _find_wsl() -> Optional[str]:
74
+ """Check if WSL is available."""
75
+ wsl_path = shutil.which("wsl.exe") or shutil.which("wsl")
76
+ if wsl_path:
77
+ return wsl_path
78
+ return None
79
+
80
+
81
+ def resolve_shell(shell_setting: str = "auto") -> ShellInfo:
82
+ """Resolve the shell to use based on a user setting.
83
+
84
+ Args:
85
+ shell_setting: One of "auto", "git_bash", "wsl", "cmd".
86
+
87
+ Returns:
88
+ The resolved ShellInfo.
89
+ """
90
+ mapping = {
91
+ "git_bash": lambda: _find_git_bash(),
92
+ "wsl": lambda: _find_wsl(),
93
+ "cmd": lambda: os.environ.get("COMSPEC", "cmd.exe"),
94
+ }
95
+
96
+ if shell_setting != "auto":
97
+ finder = mapping.get(shell_setting)
98
+ if finder:
99
+ path = finder()
100
+ if path:
101
+ if shell_setting == "git_bash":
102
+ return ShellInfo("git_bash", path, ["--norc", "--noprofile"], True, "Git Bash")
103
+ elif shell_setting == "wsl":
104
+ return ShellInfo("wsl", path, ["--", "bash"], True, "WSL (Ubuntu)")
105
+ else:
106
+ return ShellInfo("cmd", path, [], False, "cmd.exe")
107
+ # Fall through to auto if the override shell wasn't found
108
+
109
+ return detect_shell()