llmcode-cli 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (212) hide show
  1. llm_code/__init__.py +2 -0
  2. llm_code/analysis/__init__.py +6 -0
  3. llm_code/analysis/cache.py +33 -0
  4. llm_code/analysis/engine.py +256 -0
  5. llm_code/analysis/go_rules.py +114 -0
  6. llm_code/analysis/js_rules.py +84 -0
  7. llm_code/analysis/python_rules.py +311 -0
  8. llm_code/analysis/rules.py +140 -0
  9. llm_code/analysis/rust_rules.py +108 -0
  10. llm_code/analysis/universal_rules.py +111 -0
  11. llm_code/api/__init__.py +0 -0
  12. llm_code/api/client.py +90 -0
  13. llm_code/api/errors.py +73 -0
  14. llm_code/api/openai_compat.py +390 -0
  15. llm_code/api/provider.py +35 -0
  16. llm_code/api/sse.py +52 -0
  17. llm_code/api/types.py +140 -0
  18. llm_code/cli/__init__.py +0 -0
  19. llm_code/cli/commands.py +70 -0
  20. llm_code/cli/image.py +122 -0
  21. llm_code/cli/render.py +214 -0
  22. llm_code/cli/status_line.py +79 -0
  23. llm_code/cli/streaming.py +92 -0
  24. llm_code/cli/tui_main.py +220 -0
  25. llm_code/computer_use/__init__.py +11 -0
  26. llm_code/computer_use/app_detect.py +49 -0
  27. llm_code/computer_use/app_tier.py +57 -0
  28. llm_code/computer_use/coordinator.py +99 -0
  29. llm_code/computer_use/input_control.py +71 -0
  30. llm_code/computer_use/screenshot.py +93 -0
  31. llm_code/cron/__init__.py +13 -0
  32. llm_code/cron/parser.py +145 -0
  33. llm_code/cron/scheduler.py +135 -0
  34. llm_code/cron/storage.py +126 -0
  35. llm_code/enterprise/__init__.py +1 -0
  36. llm_code/enterprise/audit.py +59 -0
  37. llm_code/enterprise/auth.py +26 -0
  38. llm_code/enterprise/oidc.py +95 -0
  39. llm_code/enterprise/rbac.py +65 -0
  40. llm_code/harness/__init__.py +5 -0
  41. llm_code/harness/config.py +33 -0
  42. llm_code/harness/engine.py +129 -0
  43. llm_code/harness/guides.py +41 -0
  44. llm_code/harness/sensors.py +68 -0
  45. llm_code/harness/templates.py +84 -0
  46. llm_code/hida/__init__.py +1 -0
  47. llm_code/hida/classifier.py +187 -0
  48. llm_code/hida/engine.py +49 -0
  49. llm_code/hida/profiles.py +95 -0
  50. llm_code/hida/types.py +28 -0
  51. llm_code/ide/__init__.py +1 -0
  52. llm_code/ide/bridge.py +80 -0
  53. llm_code/ide/detector.py +76 -0
  54. llm_code/ide/server.py +169 -0
  55. llm_code/logging.py +29 -0
  56. llm_code/lsp/__init__.py +0 -0
  57. llm_code/lsp/client.py +298 -0
  58. llm_code/lsp/detector.py +42 -0
  59. llm_code/lsp/manager.py +56 -0
  60. llm_code/lsp/tools.py +288 -0
  61. llm_code/marketplace/__init__.py +0 -0
  62. llm_code/marketplace/builtin_registry.py +102 -0
  63. llm_code/marketplace/installer.py +162 -0
  64. llm_code/marketplace/plugin.py +78 -0
  65. llm_code/marketplace/registry.py +360 -0
  66. llm_code/mcp/__init__.py +0 -0
  67. llm_code/mcp/bridge.py +87 -0
  68. llm_code/mcp/client.py +117 -0
  69. llm_code/mcp/health.py +120 -0
  70. llm_code/mcp/manager.py +214 -0
  71. llm_code/mcp/oauth.py +219 -0
  72. llm_code/mcp/transport.py +254 -0
  73. llm_code/mcp/types.py +53 -0
  74. llm_code/remote/__init__.py +0 -0
  75. llm_code/remote/client.py +136 -0
  76. llm_code/remote/protocol.py +22 -0
  77. llm_code/remote/server.py +275 -0
  78. llm_code/remote/ssh_proxy.py +56 -0
  79. llm_code/runtime/__init__.py +0 -0
  80. llm_code/runtime/auto_commit.py +56 -0
  81. llm_code/runtime/auto_diagnose.py +62 -0
  82. llm_code/runtime/checkpoint.py +70 -0
  83. llm_code/runtime/checkpoint_recovery.py +142 -0
  84. llm_code/runtime/compaction.py +35 -0
  85. llm_code/runtime/compressor.py +415 -0
  86. llm_code/runtime/config.py +533 -0
  87. llm_code/runtime/context.py +49 -0
  88. llm_code/runtime/conversation.py +921 -0
  89. llm_code/runtime/cost_tracker.py +126 -0
  90. llm_code/runtime/dream.py +127 -0
  91. llm_code/runtime/file_protection.py +150 -0
  92. llm_code/runtime/hardware.py +85 -0
  93. llm_code/runtime/hooks.py +223 -0
  94. llm_code/runtime/indexer.py +230 -0
  95. llm_code/runtime/knowledge_compiler.py +232 -0
  96. llm_code/runtime/memory.py +132 -0
  97. llm_code/runtime/memory_layers.py +467 -0
  98. llm_code/runtime/memory_lint.py +252 -0
  99. llm_code/runtime/model_aliases.py +37 -0
  100. llm_code/runtime/ollama.py +93 -0
  101. llm_code/runtime/overlay.py +124 -0
  102. llm_code/runtime/permissions.py +200 -0
  103. llm_code/runtime/plan.py +45 -0
  104. llm_code/runtime/prompt.py +238 -0
  105. llm_code/runtime/repo_map.py +174 -0
  106. llm_code/runtime/sandbox.py +116 -0
  107. llm_code/runtime/session.py +268 -0
  108. llm_code/runtime/skill_resolver.py +61 -0
  109. llm_code/runtime/skills.py +133 -0
  110. llm_code/runtime/speculative.py +75 -0
  111. llm_code/runtime/streaming_executor.py +216 -0
  112. llm_code/runtime/telemetry.py +196 -0
  113. llm_code/runtime/token_budget.py +26 -0
  114. llm_code/runtime/vcr.py +142 -0
  115. llm_code/runtime/vision.py +102 -0
  116. llm_code/swarm/__init__.py +1 -0
  117. llm_code/swarm/backend_subprocess.py +108 -0
  118. llm_code/swarm/backend_tmux.py +103 -0
  119. llm_code/swarm/backend_worktree.py +306 -0
  120. llm_code/swarm/checkpoint.py +74 -0
  121. llm_code/swarm/coordinator.py +236 -0
  122. llm_code/swarm/mailbox.py +88 -0
  123. llm_code/swarm/manager.py +202 -0
  124. llm_code/swarm/memory_sync.py +80 -0
  125. llm_code/swarm/recovery.py +21 -0
  126. llm_code/swarm/team.py +67 -0
  127. llm_code/swarm/types.py +31 -0
  128. llm_code/task/__init__.py +16 -0
  129. llm_code/task/diagnostics.py +93 -0
  130. llm_code/task/manager.py +162 -0
  131. llm_code/task/types.py +112 -0
  132. llm_code/task/verifier.py +104 -0
  133. llm_code/tools/__init__.py +0 -0
  134. llm_code/tools/agent.py +145 -0
  135. llm_code/tools/agent_roles.py +82 -0
  136. llm_code/tools/base.py +94 -0
  137. llm_code/tools/bash.py +565 -0
  138. llm_code/tools/computer_use_tools.py +278 -0
  139. llm_code/tools/coordinator_tool.py +75 -0
  140. llm_code/tools/cron_create.py +90 -0
  141. llm_code/tools/cron_delete.py +49 -0
  142. llm_code/tools/cron_list.py +51 -0
  143. llm_code/tools/deferred.py +92 -0
  144. llm_code/tools/dump.py +116 -0
  145. llm_code/tools/edit_file.py +282 -0
  146. llm_code/tools/git_tools.py +531 -0
  147. llm_code/tools/glob_search.py +112 -0
  148. llm_code/tools/grep_search.py +144 -0
  149. llm_code/tools/ide_diagnostics.py +59 -0
  150. llm_code/tools/ide_open.py +58 -0
  151. llm_code/tools/ide_selection.py +52 -0
  152. llm_code/tools/memory_tools.py +138 -0
  153. llm_code/tools/multi_edit.py +143 -0
  154. llm_code/tools/notebook_edit.py +107 -0
  155. llm_code/tools/notebook_read.py +81 -0
  156. llm_code/tools/parsing.py +63 -0
  157. llm_code/tools/read_file.py +154 -0
  158. llm_code/tools/registry.py +58 -0
  159. llm_code/tools/search_backends/__init__.py +56 -0
  160. llm_code/tools/search_backends/brave.py +56 -0
  161. llm_code/tools/search_backends/duckduckgo.py +129 -0
  162. llm_code/tools/search_backends/searxng.py +71 -0
  163. llm_code/tools/search_backends/tavily.py +73 -0
  164. llm_code/tools/swarm_create.py +109 -0
  165. llm_code/tools/swarm_delete.py +95 -0
  166. llm_code/tools/swarm_list.py +44 -0
  167. llm_code/tools/swarm_message.py +109 -0
  168. llm_code/tools/task_close.py +79 -0
  169. llm_code/tools/task_plan.py +79 -0
  170. llm_code/tools/task_verify.py +90 -0
  171. llm_code/tools/tool_search.py +65 -0
  172. llm_code/tools/web_common.py +258 -0
  173. llm_code/tools/web_fetch.py +223 -0
  174. llm_code/tools/web_search.py +280 -0
  175. llm_code/tools/write_file.py +118 -0
  176. llm_code/tui/__init__.py +1 -0
  177. llm_code/tui/app.py +2432 -0
  178. llm_code/tui/chat_view.py +82 -0
  179. llm_code/tui/chat_widgets.py +309 -0
  180. llm_code/tui/header_bar.py +46 -0
  181. llm_code/tui/input_bar.py +349 -0
  182. llm_code/tui/keybindings.py +142 -0
  183. llm_code/tui/marketplace.py +210 -0
  184. llm_code/tui/status_bar.py +72 -0
  185. llm_code/tui/theme.py +96 -0
  186. llm_code/utils/__init__.py +0 -0
  187. llm_code/utils/diff.py +111 -0
  188. llm_code/utils/errors.py +70 -0
  189. llm_code/utils/hyperlink.py +73 -0
  190. llm_code/utils/notebook.py +179 -0
  191. llm_code/utils/search.py +69 -0
  192. llm_code/utils/text_normalize.py +28 -0
  193. llm_code/utils/version_check.py +62 -0
  194. llm_code/vim/__init__.py +4 -0
  195. llm_code/vim/engine.py +51 -0
  196. llm_code/vim/motions.py +172 -0
  197. llm_code/vim/operators.py +183 -0
  198. llm_code/vim/text_objects.py +139 -0
  199. llm_code/vim/transitions.py +279 -0
  200. llm_code/vim/types.py +68 -0
  201. llm_code/voice/__init__.py +1 -0
  202. llm_code/voice/languages.py +43 -0
  203. llm_code/voice/recorder.py +136 -0
  204. llm_code/voice/stt.py +36 -0
  205. llm_code/voice/stt_anthropic.py +66 -0
  206. llm_code/voice/stt_google.py +32 -0
  207. llm_code/voice/stt_whisper.py +52 -0
  208. llmcode_cli-1.0.0.dist-info/METADATA +524 -0
  209. llmcode_cli-1.0.0.dist-info/RECORD +212 -0
  210. llmcode_cli-1.0.0.dist-info/WHEEL +4 -0
  211. llmcode_cli-1.0.0.dist-info/entry_points.txt +2 -0
  212. llmcode_cli-1.0.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,236 @@
1
+ """Coordinator — auto-decompose tasks and dispatch to swarm workers."""
2
+ from __future__ import annotations
3
+
4
+ import asyncio
5
+ import json
6
+ import logging
7
+ import re
8
+ from typing import Any
9
+
10
+ from llm_code.api.provider import LLMProvider
11
+ from llm_code.api.types import Message, MessageRequest, TextBlock
12
+ from llm_code.swarm.manager import SwarmManager
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ _DECOMPOSE_PROMPT = """\
17
+ Break the following task into independent subtasks that can be executed in parallel by specialized agents.
18
+
19
+ Output ONLY a JSON array (no explanation, no markdown fences). Each element must have:
20
+ - "role": a short role label (e.g. "coder", "tester", "reviewer", "researcher")
21
+ - "task": a clear, self-contained task description
22
+
23
+ Example output:
24
+ [
25
+ {{"role": "coder", "task": "Implement the binary search function in utils.py"}},
26
+ {{"role": "tester", "task": "Write unit tests for the binary search function"}}
27
+ ]
28
+
29
+ Task to decompose:
30
+ {task}
31
+ """
32
+
33
+ _AGGREGATE_PROMPT = """\
34
+ You are a coordinator agent summarizing the results of parallel worker agents.
35
+
36
+ Original task: {original_task}
37
+
38
+ Worker results:
39
+ {results}
40
+
41
+ Provide a concise summary of what was accomplished, any issues encountered, and the combined outcome.
42
+ """
43
+
44
+
45
+ class Coordinator:
46
+ """Orchestrate task decomposition and parallel worker dispatch.
47
+
48
+ Sends the original task to the LLM for decomposition into subtasks,
49
+ creates swarm members per subtask, monitors completion via the mailbox,
50
+ then aggregates results with a final LLM summary.
51
+ """
52
+
53
+ POLL_INTERVAL: float = 5.0
54
+ TIMEOUT: float = 300.0
55
+ COORDINATOR_ID: str = "coordinator"
56
+
57
+ def __init__(
58
+ self,
59
+ manager: SwarmManager,
60
+ provider: LLMProvider,
61
+ config: Any,
62
+ ) -> None:
63
+ self._manager = manager
64
+ self._provider = provider
65
+ self._config = config
66
+
67
+ async def orchestrate(self, task: str) -> str:
68
+ """Decompose task, dispatch workers, wait for completion, return summary.
69
+
70
+ Args:
71
+ task: High-level task description to decompose and delegate.
72
+
73
+ Returns:
74
+ Aggregated summary string from all worker results.
75
+ """
76
+ subtasks = await self._decompose(task)
77
+ if not subtasks:
78
+ return f"No subtasks generated for: {task}"
79
+
80
+ max_members = getattr(
81
+ getattr(self._config, "swarm", None), "max_members", 5
82
+ )
83
+ subtasks = subtasks[:max_members]
84
+
85
+ members = []
86
+ for subtask in subtasks:
87
+ role = subtask.get("role", "worker")
88
+ subtask_desc = subtask.get("task", "")
89
+ if not subtask_desc:
90
+ continue
91
+ try:
92
+ member = await self._manager.create_member(role=role, task=subtask_desc)
93
+ members.append(member)
94
+ logger.info("Spawned swarm member %s (%s)", member.id, role)
95
+ except ValueError as exc:
96
+ logger.warning("Could not create member for role=%s: %s", role, exc)
97
+ break
98
+
99
+ if not members:
100
+ return "Failed to create any swarm members."
101
+
102
+ results = await self._wait_for_completion(
103
+ member_ids=[m.id for m in members],
104
+ timeout=self.TIMEOUT,
105
+ poll_interval=self.POLL_INTERVAL,
106
+ )
107
+
108
+ summary = await self._aggregate(task, members, results)
109
+ return summary
110
+
111
+ async def _decompose(self, task: str) -> list[dict]:
112
+ """Ask the LLM to decompose task into subtasks. Returns list of dicts."""
113
+ prompt = _DECOMPOSE_PROMPT.format(task=task)
114
+ model = getattr(self._config, "model", None) or "default"
115
+ request = MessageRequest(
116
+ model=model,
117
+ messages=(
118
+ Message(
119
+ role="user",
120
+ content=(TextBlock(text=prompt),),
121
+ ),
122
+ ),
123
+ max_tokens=1024,
124
+ stream=False,
125
+ )
126
+ try:
127
+ response = await self._provider.send_message(request)
128
+ text = ""
129
+ for block in response.content:
130
+ if isinstance(block, TextBlock):
131
+ text += block.text
132
+ return self._parse_json_list(text)
133
+ except Exception as exc:
134
+ logger.error("Decomposition failed: %s", exc)
135
+ return []
136
+
137
+ def _parse_json_list(self, text: str) -> list[dict]:
138
+ """Extract a JSON array from LLM output (strips markdown fences)."""
139
+ # Strip markdown code fences if present
140
+ cleaned = re.sub(r"```(?:json)?\s*", "", text).strip().rstrip("`").strip()
141
+ # Find first '[' and last ']'
142
+ start = cleaned.find("[")
143
+ end = cleaned.rfind("]")
144
+ if start == -1 or end == -1:
145
+ logger.warning("Could not find JSON array in decomposition output: %r", text[:200])
146
+ return []
147
+ try:
148
+ data = json.loads(cleaned[start : end + 1])
149
+ if isinstance(data, list):
150
+ return [item for item in data if isinstance(item, dict)]
151
+ return []
152
+ except json.JSONDecodeError as exc:
153
+ logger.warning("JSON parse error in decomposition: %s", exc)
154
+ return []
155
+
156
+ async def _wait_for_completion(
157
+ self,
158
+ member_ids: list[str],
159
+ timeout: float,
160
+ poll_interval: float,
161
+ ) -> dict[str, list[str]]:
162
+ """Poll the mailbox until all members report completion or timeout.
163
+
164
+ Each member is expected to send a message to COORDINATOR_ID containing
165
+ "DONE" or "COMPLETE" when finished.
166
+
167
+ Returns:
168
+ Mapping of member_id -> list of message texts received.
169
+ """
170
+ results: dict[str, list[str]] = {mid: [] for mid in member_ids}
171
+ completed: set[str] = set()
172
+ elapsed = 0.0
173
+
174
+ while len(completed) < len(member_ids) and elapsed < timeout:
175
+ for mid in member_ids:
176
+ if mid in completed:
177
+ continue
178
+ msgs = self._manager.mailbox.receive_and_clear(
179
+ from_id=mid, to_id=self.COORDINATOR_ID
180
+ )
181
+ for msg in msgs:
182
+ results[mid].append(msg.text)
183
+ upper = msg.text.upper()
184
+ if "DONE" in upper or "COMPLETE" in upper or "FINISHED" in upper:
185
+ completed.add(mid)
186
+ logger.info("Member %s reported completion", mid)
187
+
188
+ if len(completed) < len(member_ids):
189
+ await asyncio.sleep(poll_interval)
190
+ elapsed += poll_interval
191
+
192
+ if elapsed >= timeout:
193
+ logger.warning(
194
+ "Coordinator timed out waiting for members: %s",
195
+ [mid for mid in member_ids if mid not in completed],
196
+ )
197
+
198
+ return results
199
+
200
+ async def _aggregate(self, original_task: str, members: list, results: dict[str, list[str]]) -> str:
201
+ """Ask the LLM to aggregate all worker results into a summary."""
202
+ result_lines = []
203
+ for member in members:
204
+ texts = results.get(member.id, [])
205
+ result_lines.append(
206
+ f"[{member.role}] {member.task}\n"
207
+ + (("\n".join(texts)) if texts else "(no output received)")
208
+ )
209
+
210
+ results_text = "\n\n".join(result_lines)
211
+ prompt = _AGGREGATE_PROMPT.format(
212
+ original_task=original_task,
213
+ results=results_text,
214
+ )
215
+ model = getattr(self._config, "model", None) or "default"
216
+ request = MessageRequest(
217
+ model=model,
218
+ messages=(
219
+ Message(
220
+ role="user",
221
+ content=(TextBlock(text=prompt),),
222
+ ),
223
+ ),
224
+ max_tokens=2048,
225
+ stream=False,
226
+ )
227
+ try:
228
+ response = await self._provider.send_message(request)
229
+ text = ""
230
+ for block in response.content:
231
+ if isinstance(block, TextBlock):
232
+ text += block.text
233
+ return text.strip() or results_text
234
+ except Exception as exc:
235
+ logger.error("Aggregation LLM call failed: %s", exc)
236
+ return results_text
@@ -0,0 +1,88 @@
1
+ """File-based JSONL mailbox for inter-agent communication."""
2
+ from __future__ import annotations
3
+
4
+ import fcntl
5
+ import json
6
+ from datetime import datetime, timezone
7
+ from pathlib import Path
8
+
9
+ from llm_code.swarm.types import SwarmMessage
10
+
11
+
12
+ class Mailbox:
13
+ """JSONL-based message passing between swarm members.
14
+
15
+ Messages stored at: <base_dir>/<sender>_to_<receiver>.jsonl
16
+ Uses file locking to prevent concurrent write corruption.
17
+ """
18
+
19
+ def __init__(self, base_dir: Path) -> None:
20
+ self._dir = Path(base_dir)
21
+ self._dir.mkdir(parents=True, exist_ok=True)
22
+
23
+ def send(self, from_id: str, to_id: str, text: str) -> SwarmMessage:
24
+ """Append a message to the sender->receiver JSONL file (with file lock)."""
25
+ ts = datetime.now(timezone.utc).isoformat()
26
+ msg = SwarmMessage(from_id=from_id, to_id=to_id, text=text, timestamp=ts)
27
+ path = self._msg_path(from_id, to_id)
28
+ with open(path, "a", encoding="utf-8") as f:
29
+ fcntl.flock(f, fcntl.LOCK_EX)
30
+ try:
31
+ f.write(json.dumps({
32
+ "from_id": msg.from_id,
33
+ "to_id": msg.to_id,
34
+ "text": msg.text,
35
+ "timestamp": msg.timestamp,
36
+ }) + "\n")
37
+ finally:
38
+ fcntl.flock(f, fcntl.LOCK_UN)
39
+ return msg
40
+
41
+ def receive(self, from_id: str, to_id: str) -> list[SwarmMessage]:
42
+ """Read all messages from sender->receiver."""
43
+ path = self._msg_path(from_id, to_id)
44
+ if not path.exists():
45
+ return []
46
+ messages: list[SwarmMessage] = []
47
+ for line in path.read_text(encoding="utf-8").strip().splitlines():
48
+ if not line:
49
+ continue
50
+ data = json.loads(line)
51
+ messages.append(SwarmMessage(
52
+ from_id=data["from_id"],
53
+ to_id=data["to_id"],
54
+ text=data["text"],
55
+ timestamp=data["timestamp"],
56
+ ))
57
+ return messages
58
+
59
+ def receive_and_clear(self, from_id: str, to_id: str) -> list[SwarmMessage]:
60
+ """Read all messages then delete the file."""
61
+ msgs = self.receive(from_id, to_id)
62
+ path = self._msg_path(from_id, to_id)
63
+ if path.exists():
64
+ path.unlink()
65
+ return msgs
66
+
67
+ def broadcast(self, from_id: str, to_ids: list[str], text: str) -> list[SwarmMessage]:
68
+ """Send the same message to multiple receivers."""
69
+ return [self.send(from_id, to_id, text) for to_id in to_ids]
70
+
71
+ def pending_for(self, to_id: str) -> list[SwarmMessage]:
72
+ """Return all unread messages addressed to a given member."""
73
+ messages: list[SwarmMessage] = []
74
+ for path in self._dir.glob(f"*_to_{to_id}.jsonl"):
75
+ for line in path.read_text(encoding="utf-8").strip().splitlines():
76
+ if not line:
77
+ continue
78
+ data = json.loads(line)
79
+ messages.append(SwarmMessage(
80
+ from_id=data["from_id"],
81
+ to_id=data["to_id"],
82
+ text=data["text"],
83
+ timestamp=data["timestamp"],
84
+ ))
85
+ return messages
86
+
87
+ def _msg_path(self, from_id: str, to_id: str) -> Path:
88
+ return self._dir / f"{from_id}_to_{to_id}.jsonl"
@@ -0,0 +1,202 @@
1
+ """SwarmManager — orchestrate creation, lifecycle, and teardown of swarm members."""
2
+ from __future__ import annotations
3
+
4
+ import re
5
+ import subprocess as sp
6
+ import uuid
7
+ from pathlib import Path
8
+
9
+ from llm_code.runtime.config import RuntimeConfig
10
+ from llm_code.swarm.backend_subprocess import SubprocessBackend
11
+ from llm_code.swarm.backend_tmux import TmuxBackend, is_tmux_available
12
+ from llm_code.swarm.backend_worktree import WorktreeBackend
13
+ from llm_code.swarm.mailbox import Mailbox
14
+ from llm_code.swarm.memory_sync import SharedMemory
15
+ from llm_code.swarm.types import SwarmMember, SwarmStatus
16
+
17
+
18
+ class SwarmManager:
19
+ """Manage the lifecycle of swarm worker agents.
20
+
21
+ Auto-detects tmux (if available and inside a session), otherwise falls
22
+ back to subprocess. Each member is a llm-code --lite process with a
23
+ role prompt injected at startup.
24
+ """
25
+
26
+ def __init__(
27
+ self,
28
+ swarm_dir: Path,
29
+ max_members: int = 5,
30
+ backend_preference: str = "auto",
31
+ config: RuntimeConfig | None = None,
32
+ ) -> None:
33
+ self._swarm_dir = Path(swarm_dir)
34
+ self._swarm_dir.mkdir(parents=True, exist_ok=True)
35
+ self._max_members = max_members
36
+ self._backend_preference = backend_preference
37
+ self._members: dict[str, SwarmMember] = {}
38
+ self._config = config or RuntimeConfig()
39
+
40
+ # Backends (lazily used)
41
+ self._subprocess_backend = SubprocessBackend(swarm_dir=self._swarm_dir)
42
+ self._tmux_backend = TmuxBackend()
43
+ # WorktreeBackend is initialised on demand (requires git + project dir)
44
+ self._worktree_backend: WorktreeBackend | None = None
45
+
46
+ # Shared resources
47
+ self.mailbox = Mailbox(self._swarm_dir / "mailbox")
48
+ self.shared_memory = SharedMemory(self._swarm_dir / "memory.json")
49
+
50
+ async def create_member(
51
+ self,
52
+ role: str,
53
+ task: str,
54
+ backend: str = "auto",
55
+ model: str | None = None,
56
+ ) -> SwarmMember:
57
+ """Spawn a new swarm worker.
58
+
59
+ Args:
60
+ role: Role description (e.g. 'security reviewer').
61
+ task: The task this member should perform.
62
+ backend: 'tmux', 'subprocess', or 'auto' (default).
63
+ model: Override the model for this specific member. When None,
64
+ the effective model is resolved via the 4-level fallback chain.
65
+
66
+ Returns:
67
+ The created SwarmMember.
68
+
69
+ Raises:
70
+ ValueError: If max_members limit is reached.
71
+ """
72
+ if len(self._members) >= self._max_members:
73
+ raise ValueError(
74
+ f"Cannot create member: max {self._max_members} members reached"
75
+ )
76
+
77
+ member_id = uuid.uuid4().hex[:8]
78
+ effective_backend = self._resolve_backend(backend)
79
+ effective_model = self._resolve_model(role, model)
80
+
81
+ pid: int | str | None = None
82
+ if effective_backend == "tmux":
83
+ pid = self._tmux_backend.spawn(
84
+ member_id=member_id, role=role, task=task, model=effective_model,
85
+ )
86
+ elif effective_backend == "worktree":
87
+ if self._worktree_backend is None:
88
+ self._worktree_backend = WorktreeBackend(
89
+ project_dir=self._swarm_dir.parent,
90
+ config=self._config.swarm.worktree,
91
+ )
92
+ pid = await self._worktree_backend.spawn(
93
+ member_id=member_id, role=role, task=task, model=effective_model,
94
+ )
95
+ else:
96
+ pid = await self._subprocess_backend.spawn(
97
+ member_id=member_id, role=role, task=task, model=effective_model,
98
+ )
99
+
100
+ member = SwarmMember(
101
+ id=member_id,
102
+ role=role,
103
+ task=task,
104
+ backend=effective_backend,
105
+ pid=pid if isinstance(pid, int) else None,
106
+ status=SwarmStatus.RUNNING,
107
+ model=effective_model,
108
+ )
109
+ self._members[member_id] = member
110
+ return member
111
+
112
+ def _resolve_model(self, role: str, explicit: str | None) -> str:
113
+ """Determine the effective model using a 4-level fallback chain.
114
+
115
+ Priority (highest to lowest):
116
+ 1. explicit argument
117
+ 2. config.swarm.role_models[role]
118
+ 3. config.model_routing.sub_agent
119
+ 4. config.model
120
+
121
+ The resolved value is then looked up in config.model_aliases.
122
+ """
123
+ if explicit:
124
+ model = explicit
125
+ elif role in self._config.swarm.role_models:
126
+ model = self._config.swarm.role_models[role]
127
+ elif self._config.model_routing.sub_agent:
128
+ model = self._config.model_routing.sub_agent
129
+ else:
130
+ model = self._config.model
131
+ return self._config.model_aliases.get(model, model)
132
+
133
+ def list_members(self) -> list[SwarmMember]:
134
+ """Return all current swarm members."""
135
+ return list(self._members.values())
136
+
137
+ async def stop_member(self, member_id: str) -> None:
138
+ """Stop and remove a swarm member.
139
+
140
+ Raises:
141
+ KeyError: If member_id is not found.
142
+ """
143
+ member = self._members.get(member_id)
144
+ if member is None:
145
+ raise KeyError(f"No swarm member with id '{member_id}'")
146
+
147
+ if member.backend == "tmux":
148
+ self._tmux_backend.stop(member_id)
149
+ else:
150
+ await self._subprocess_backend.stop(member_id)
151
+
152
+ del self._members[member_id]
153
+
154
+ async def stop_all(self) -> None:
155
+ """Stop all swarm members."""
156
+ await self._subprocess_backend.stop_all()
157
+ self._tmux_backend.stop_all()
158
+ self._members.clear()
159
+
160
+ def _resolve_backend(self, requested: str) -> str:
161
+ """Determine which backend to use.
162
+
163
+ Priority for explicit requests: worktree > tmux > subprocess.
164
+ In auto mode: worktree (if git available) > tmux (if available) > subprocess.
165
+ """
166
+ if requested == "worktree":
167
+ return "worktree"
168
+ if requested == "tmux":
169
+ return "tmux"
170
+ if requested == "subprocess":
171
+ return "subprocess"
172
+ # auto path — honour backend_preference first
173
+ pref = self._backend_preference
174
+ if pref == "worktree":
175
+ return "worktree"
176
+ if pref == "tmux":
177
+ return "tmux"
178
+ # pref == "auto": try worktree > tmux > subprocess
179
+ if self._is_git_repo() and self._git_supports_worktree():
180
+ return "worktree"
181
+ return "tmux" if is_tmux_available() else "subprocess"
182
+
183
+ def _is_git_repo(self) -> bool:
184
+ """Return True if the project directory is inside a git repository."""
185
+ result = sp.run(
186
+ ["git", "rev-parse", "--is-inside-work-tree"],
187
+ cwd=str(self._swarm_dir.parent),
188
+ capture_output=True,
189
+ text=True,
190
+ )
191
+ return result.returncode == 0
192
+
193
+ def _git_supports_worktree(self) -> bool:
194
+ """Return True if the installed git version supports worktrees (>= 2.15)."""
195
+ result = sp.run(["git", "--version"], capture_output=True, text=True)
196
+ if result.returncode != 0:
197
+ return False
198
+ match = re.search(r"(\d+)\.(\d+)", result.stdout)
199
+ if not match:
200
+ return False
201
+ major, minor = int(match.group(1)), int(match.group(2))
202
+ return (major, minor) >= (2, 15)
@@ -0,0 +1,80 @@
1
+ """Shared memory store with file locking for swarm members."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import sys
6
+ from pathlib import Path
7
+
8
+
9
+ def _lock_file(f) -> None: # type: ignore[no-untyped-def]
10
+ """Acquire an exclusive lock on a file handle (platform-aware)."""
11
+ if sys.platform == "win32":
12
+ import msvcrt
13
+ msvcrt.locking(f.fileno(), msvcrt.LK_LOCK, 1)
14
+ else:
15
+ import fcntl
16
+ fcntl.flock(f, fcntl.LOCK_EX)
17
+
18
+
19
+ def _unlock_file(f) -> None: # type: ignore[no-untyped-def]
20
+ """Release a file lock (platform-aware)."""
21
+ if sys.platform == "win32":
22
+ import msvcrt
23
+ msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK, 1)
24
+ else:
25
+ import fcntl
26
+ fcntl.flock(f, fcntl.LOCK_UN)
27
+
28
+
29
+ class SharedMemory:
30
+ """JSON-backed shared key-value store with file locking.
31
+
32
+ Multiple swarm members can safely read/write to the same file.
33
+ """
34
+
35
+ def __init__(self, path: Path) -> None:
36
+ self._path = Path(path)
37
+ self._path.parent.mkdir(parents=True, exist_ok=True)
38
+
39
+ def write(self, key: str, value: str) -> None:
40
+ """Set a key-value pair (locked)."""
41
+ data = self._locked_read()
42
+ data[key] = value
43
+ self._locked_write(data)
44
+
45
+ def read(self, key: str) -> str | None:
46
+ """Get a value by key, or None if missing."""
47
+ data = self._locked_read()
48
+ return data.get(key)
49
+
50
+ def read_all(self) -> dict[str, str]:
51
+ """Return the entire shared memory dict."""
52
+ return self._locked_read()
53
+
54
+ def delete(self, key: str) -> None:
55
+ """Remove a key (no-op if missing)."""
56
+ data = self._locked_read()
57
+ data.pop(key, None)
58
+ self._locked_write(data)
59
+
60
+ def _locked_read(self) -> dict[str, str]:
61
+ if not self._path.exists():
62
+ return {}
63
+ try:
64
+ with open(self._path, "r", encoding="utf-8") as f:
65
+ _lock_file(f)
66
+ try:
67
+ content = f.read()
68
+ return json.loads(content) if content.strip() else {}
69
+ finally:
70
+ _unlock_file(f)
71
+ except (json.JSONDecodeError, OSError):
72
+ return {}
73
+
74
+ def _locked_write(self, data: dict[str, str]) -> None:
75
+ with open(self._path, "w", encoding="utf-8") as f:
76
+ _lock_file(f)
77
+ try:
78
+ json.dump(data, f, indent=2)
79
+ finally:
80
+ _unlock_file(f)
@@ -0,0 +1,21 @@
1
+ """Recovery policy for agent teams — retry logic and failure handling."""
2
+ from __future__ import annotations
3
+
4
+ from dataclasses import dataclass
5
+
6
+
7
+ @dataclass(frozen=True)
8
+ class RecoveryPolicy:
9
+ max_retries: int = 2
10
+ retry_delay_sec: int = 5
11
+ on_all_failed: str = "abort"
12
+
13
+
14
+ class RecoveryAction:
15
+ @staticmethod
16
+ def should_retry(policy: RecoveryPolicy, attempt: int) -> bool:
17
+ return attempt <= policy.max_retries
18
+
19
+ @staticmethod
20
+ def resolve_all_failed(policy: RecoveryPolicy) -> str:
21
+ return policy.on_all_failed
llm_code/swarm/team.py ADDED
@@ -0,0 +1,67 @@
1
+ """Team template — save/load reusable agent team configurations."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+
8
+
9
+ @dataclass(frozen=True)
10
+ class TeamMemberTemplate:
11
+ """A template for a single team member."""
12
+ role: str
13
+ model: str = ""
14
+ backend: str = ""
15
+ system_prompt: str = ""
16
+
17
+
18
+ @dataclass(frozen=True)
19
+ class TeamTemplate:
20
+ """A reusable team configuration."""
21
+ name: str
22
+ description: str
23
+ members: tuple[TeamMemberTemplate, ...]
24
+ coordinator_model: str = ""
25
+ max_timeout: int = 600
26
+
27
+
28
+ def save_team(team: TeamTemplate, teams_dir: Path) -> Path:
29
+ teams_dir.mkdir(parents=True, exist_ok=True)
30
+ path = teams_dir / f"{team.name}.json"
31
+ data = {
32
+ "name": team.name,
33
+ "description": team.description,
34
+ "members": [
35
+ {"role": m.role, "model": m.model, "backend": m.backend, "system_prompt": m.system_prompt}
36
+ for m in team.members
37
+ ],
38
+ "coordinator_model": team.coordinator_model,
39
+ "max_timeout": team.max_timeout,
40
+ }
41
+ path.write_text(json.dumps(data, indent=2), encoding="utf-8")
42
+ return path
43
+
44
+
45
+ def load_team(name: str, teams_dir: Path) -> TeamTemplate:
46
+ path = teams_dir / f"{name}.json"
47
+ if not path.exists():
48
+ raise FileNotFoundError(f"Team template not found: {path}")
49
+ data = json.loads(path.read_text(encoding="utf-8"))
50
+ members = tuple(
51
+ TeamMemberTemplate(
52
+ role=m["role"], model=m.get("model", ""),
53
+ backend=m.get("backend", ""), system_prompt=m.get("system_prompt", ""),
54
+ )
55
+ for m in data.get("members", [])
56
+ )
57
+ return TeamTemplate(
58
+ name=data["name"], description=data.get("description", ""),
59
+ members=members, coordinator_model=data.get("coordinator_model", ""),
60
+ max_timeout=data.get("max_timeout", 600),
61
+ )
62
+
63
+
64
+ def list_teams(teams_dir: Path) -> list[str]:
65
+ if not teams_dir.is_dir():
66
+ return []
67
+ return sorted(p.stem for p in teams_dir.glob("*.json"))