strix-agent 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- strix/__init__.py +0 -0
- strix/agents/StrixAgent/__init__.py +4 -0
- strix/agents/StrixAgent/strix_agent.py +89 -0
- strix/agents/StrixAgent/system_prompt.jinja +404 -0
- strix/agents/__init__.py +10 -0
- strix/agents/base_agent.py +518 -0
- strix/agents/state.py +163 -0
- strix/interface/__init__.py +4 -0
- strix/interface/assets/tui_styles.tcss +694 -0
- strix/interface/cli.py +230 -0
- strix/interface/main.py +500 -0
- strix/interface/tool_components/__init__.py +39 -0
- strix/interface/tool_components/agents_graph_renderer.py +123 -0
- strix/interface/tool_components/base_renderer.py +62 -0
- strix/interface/tool_components/browser_renderer.py +120 -0
- strix/interface/tool_components/file_edit_renderer.py +99 -0
- strix/interface/tool_components/finish_renderer.py +31 -0
- strix/interface/tool_components/notes_renderer.py +108 -0
- strix/interface/tool_components/proxy_renderer.py +255 -0
- strix/interface/tool_components/python_renderer.py +34 -0
- strix/interface/tool_components/registry.py +72 -0
- strix/interface/tool_components/reporting_renderer.py +53 -0
- strix/interface/tool_components/scan_info_renderer.py +64 -0
- strix/interface/tool_components/terminal_renderer.py +131 -0
- strix/interface/tool_components/thinking_renderer.py +29 -0
- strix/interface/tool_components/user_message_renderer.py +43 -0
- strix/interface/tool_components/web_search_renderer.py +28 -0
- strix/interface/tui.py +1274 -0
- strix/interface/utils.py +559 -0
- strix/llm/__init__.py +15 -0
- strix/llm/config.py +20 -0
- strix/llm/llm.py +465 -0
- strix/llm/memory_compressor.py +212 -0
- strix/llm/request_queue.py +87 -0
- strix/llm/utils.py +87 -0
- strix/prompts/README.md +64 -0
- strix/prompts/__init__.py +109 -0
- strix/prompts/cloud/.gitkeep +0 -0
- strix/prompts/coordination/root_agent.jinja +41 -0
- strix/prompts/custom/.gitkeep +0 -0
- strix/prompts/frameworks/fastapi.jinja +142 -0
- strix/prompts/frameworks/nextjs.jinja +126 -0
- strix/prompts/protocols/graphql.jinja +215 -0
- strix/prompts/reconnaissance/.gitkeep +0 -0
- strix/prompts/technologies/firebase_firestore.jinja +177 -0
- strix/prompts/technologies/supabase.jinja +189 -0
- strix/prompts/vulnerabilities/authentication_jwt.jinja +147 -0
- strix/prompts/vulnerabilities/broken_function_level_authorization.jinja +146 -0
- strix/prompts/vulnerabilities/business_logic.jinja +171 -0
- strix/prompts/vulnerabilities/csrf.jinja +174 -0
- strix/prompts/vulnerabilities/idor.jinja +195 -0
- strix/prompts/vulnerabilities/information_disclosure.jinja +222 -0
- strix/prompts/vulnerabilities/insecure_file_uploads.jinja +188 -0
- strix/prompts/vulnerabilities/mass_assignment.jinja +141 -0
- strix/prompts/vulnerabilities/open_redirect.jinja +177 -0
- strix/prompts/vulnerabilities/path_traversal_lfi_rfi.jinja +142 -0
- strix/prompts/vulnerabilities/race_conditions.jinja +164 -0
- strix/prompts/vulnerabilities/rce.jinja +154 -0
- strix/prompts/vulnerabilities/sql_injection.jinja +151 -0
- strix/prompts/vulnerabilities/ssrf.jinja +135 -0
- strix/prompts/vulnerabilities/subdomain_takeover.jinja +155 -0
- strix/prompts/vulnerabilities/xss.jinja +169 -0
- strix/prompts/vulnerabilities/xxe.jinja +184 -0
- strix/runtime/__init__.py +19 -0
- strix/runtime/docker_runtime.py +399 -0
- strix/runtime/runtime.py +29 -0
- strix/runtime/tool_server.py +205 -0
- strix/telemetry/__init__.py +4 -0
- strix/telemetry/tracer.py +337 -0
- strix/tools/__init__.py +64 -0
- strix/tools/agents_graph/__init__.py +16 -0
- strix/tools/agents_graph/agents_graph_actions.py +621 -0
- strix/tools/agents_graph/agents_graph_actions_schema.xml +226 -0
- strix/tools/argument_parser.py +121 -0
- strix/tools/browser/__init__.py +4 -0
- strix/tools/browser/browser_actions.py +236 -0
- strix/tools/browser/browser_actions_schema.xml +183 -0
- strix/tools/browser/browser_instance.py +533 -0
- strix/tools/browser/tab_manager.py +342 -0
- strix/tools/executor.py +305 -0
- strix/tools/file_edit/__init__.py +4 -0
- strix/tools/file_edit/file_edit_actions.py +141 -0
- strix/tools/file_edit/file_edit_actions_schema.xml +128 -0
- strix/tools/finish/__init__.py +4 -0
- strix/tools/finish/finish_actions.py +174 -0
- strix/tools/finish/finish_actions_schema.xml +45 -0
- strix/tools/notes/__init__.py +14 -0
- strix/tools/notes/notes_actions.py +191 -0
- strix/tools/notes/notes_actions_schema.xml +150 -0
- strix/tools/proxy/__init__.py +20 -0
- strix/tools/proxy/proxy_actions.py +101 -0
- strix/tools/proxy/proxy_actions_schema.xml +267 -0
- strix/tools/proxy/proxy_manager.py +785 -0
- strix/tools/python/__init__.py +4 -0
- strix/tools/python/python_actions.py +47 -0
- strix/tools/python/python_actions_schema.xml +131 -0
- strix/tools/python/python_instance.py +172 -0
- strix/tools/python/python_manager.py +131 -0
- strix/tools/registry.py +196 -0
- strix/tools/reporting/__init__.py +6 -0
- strix/tools/reporting/reporting_actions.py +63 -0
- strix/tools/reporting/reporting_actions_schema.xml +30 -0
- strix/tools/terminal/__init__.py +4 -0
- strix/tools/terminal/terminal_actions.py +35 -0
- strix/tools/terminal/terminal_actions_schema.xml +146 -0
- strix/tools/terminal/terminal_manager.py +151 -0
- strix/tools/terminal/terminal_session.py +447 -0
- strix/tools/thinking/__init__.py +4 -0
- strix/tools/thinking/thinking_actions.py +18 -0
- strix/tools/thinking/thinking_actions_schema.xml +52 -0
- strix/tools/web_search/__init__.py +4 -0
- strix/tools/web_search/web_search_actions.py +80 -0
- strix/tools/web_search/web_search_actions_schema.xml +83 -0
- strix_agent-0.4.0.dist-info/LICENSE +201 -0
- strix_agent-0.4.0.dist-info/METADATA +282 -0
- strix_agent-0.4.0.dist-info/RECORD +118 -0
- strix_agent-0.4.0.dist-info/WHEEL +4 -0
- strix_agent-0.4.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,399 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import secrets
|
|
5
|
+
import socket
|
|
6
|
+
import time
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import cast
|
|
9
|
+
|
|
10
|
+
import docker
|
|
11
|
+
from docker.errors import DockerException, ImageNotFound, NotFound
|
|
12
|
+
from docker.models.containers import Container
|
|
13
|
+
|
|
14
|
+
from .runtime import AbstractRuntime, SandboxInfo
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
STRIX_IMAGE = os.getenv("STRIX_IMAGE", "ghcr.io/usestrix/strix-sandbox:0.1.10")
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DockerRuntime(AbstractRuntime):
|
|
22
|
+
def __init__(self) -> None:
|
|
23
|
+
try:
|
|
24
|
+
self.client = docker.from_env()
|
|
25
|
+
except DockerException as e:
|
|
26
|
+
logger.exception("Failed to connect to Docker daemon")
|
|
27
|
+
raise RuntimeError("Docker is not available or not configured correctly.") from e
|
|
28
|
+
|
|
29
|
+
self._scan_container: Container | None = None
|
|
30
|
+
self._tool_server_port: int | None = None
|
|
31
|
+
self._tool_server_token: str | None = None
|
|
32
|
+
|
|
33
|
+
def _generate_sandbox_token(self) -> str:
|
|
34
|
+
return secrets.token_urlsafe(32)
|
|
35
|
+
|
|
36
|
+
def _find_available_port(self) -> int:
|
|
37
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
38
|
+
s.bind(("", 0))
|
|
39
|
+
return cast("int", s.getsockname()[1])
|
|
40
|
+
|
|
41
|
+
def _get_scan_id(self, agent_id: str) -> str:
|
|
42
|
+
try:
|
|
43
|
+
from strix.telemetry.tracer import get_global_tracer
|
|
44
|
+
|
|
45
|
+
tracer = get_global_tracer()
|
|
46
|
+
if tracer and tracer.scan_config:
|
|
47
|
+
return str(tracer.scan_config.get("scan_id", "default-scan"))
|
|
48
|
+
except ImportError:
|
|
49
|
+
logger.debug("Failed to import tracer, using fallback scan ID")
|
|
50
|
+
except AttributeError:
|
|
51
|
+
logger.debug("Tracer missing scan_config, using fallback scan ID")
|
|
52
|
+
|
|
53
|
+
return f"scan-{agent_id.split('-')[0]}"
|
|
54
|
+
|
|
55
|
+
def _verify_image_available(self, image_name: str, max_retries: int = 3) -> None:
|
|
56
|
+
def _validate_image(image: docker.models.images.Image) -> None:
|
|
57
|
+
if not image.id or not image.attrs:
|
|
58
|
+
raise ImageNotFound(f"Image {image_name} metadata incomplete")
|
|
59
|
+
|
|
60
|
+
for attempt in range(max_retries):
|
|
61
|
+
try:
|
|
62
|
+
image = self.client.images.get(image_name)
|
|
63
|
+
_validate_image(image)
|
|
64
|
+
except ImageNotFound:
|
|
65
|
+
if attempt == max_retries - 1:
|
|
66
|
+
logger.exception(f"Image {image_name} not found after {max_retries} attempts")
|
|
67
|
+
raise
|
|
68
|
+
logger.warning(f"Image {image_name} not ready, attempt {attempt + 1}/{max_retries}")
|
|
69
|
+
time.sleep(2**attempt)
|
|
70
|
+
except DockerException:
|
|
71
|
+
if attempt == max_retries - 1:
|
|
72
|
+
logger.exception(f"Failed to verify image {image_name}")
|
|
73
|
+
raise
|
|
74
|
+
logger.warning(f"Docker error verifying image, attempt {attempt + 1}/{max_retries}")
|
|
75
|
+
time.sleep(2**attempt)
|
|
76
|
+
else:
|
|
77
|
+
logger.debug(f"Image {image_name} verified as available")
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
def _create_container_with_retry(self, scan_id: str, max_retries: int = 3) -> Container:
|
|
81
|
+
last_exception = None
|
|
82
|
+
container_name = f"strix-scan-{scan_id}"
|
|
83
|
+
|
|
84
|
+
for attempt in range(max_retries):
|
|
85
|
+
try:
|
|
86
|
+
self._verify_image_available(STRIX_IMAGE)
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
existing_container = self.client.containers.get(container_name)
|
|
90
|
+
logger.warning(f"Container {container_name} already exists, removing it")
|
|
91
|
+
with contextlib.suppress(Exception):
|
|
92
|
+
existing_container.stop(timeout=5)
|
|
93
|
+
existing_container.remove(force=True)
|
|
94
|
+
time.sleep(1)
|
|
95
|
+
except NotFound:
|
|
96
|
+
pass
|
|
97
|
+
except DockerException as e:
|
|
98
|
+
logger.warning(f"Error checking/removing existing container: {e}")
|
|
99
|
+
|
|
100
|
+
caido_port = self._find_available_port()
|
|
101
|
+
tool_server_port = self._find_available_port()
|
|
102
|
+
tool_server_token = self._generate_sandbox_token()
|
|
103
|
+
|
|
104
|
+
self._tool_server_port = tool_server_port
|
|
105
|
+
self._tool_server_token = tool_server_token
|
|
106
|
+
|
|
107
|
+
container = self.client.containers.run(
|
|
108
|
+
STRIX_IMAGE,
|
|
109
|
+
command="sleep infinity",
|
|
110
|
+
detach=True,
|
|
111
|
+
name=container_name,
|
|
112
|
+
hostname=f"strix-scan-{scan_id}",
|
|
113
|
+
ports={
|
|
114
|
+
f"{caido_port}/tcp": caido_port,
|
|
115
|
+
f"{tool_server_port}/tcp": tool_server_port,
|
|
116
|
+
},
|
|
117
|
+
cap_add=["NET_ADMIN", "NET_RAW"],
|
|
118
|
+
labels={"strix-scan-id": scan_id},
|
|
119
|
+
environment={
|
|
120
|
+
"PYTHONUNBUFFERED": "1",
|
|
121
|
+
"CAIDO_PORT": str(caido_port),
|
|
122
|
+
"TOOL_SERVER_PORT": str(tool_server_port),
|
|
123
|
+
"TOOL_SERVER_TOKEN": tool_server_token,
|
|
124
|
+
},
|
|
125
|
+
tty=True,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
self._scan_container = container
|
|
129
|
+
logger.info("Created container %s for scan %s", container.id, scan_id)
|
|
130
|
+
|
|
131
|
+
self._initialize_container(
|
|
132
|
+
container, caido_port, tool_server_port, tool_server_token
|
|
133
|
+
)
|
|
134
|
+
except DockerException as e:
|
|
135
|
+
last_exception = e
|
|
136
|
+
if attempt == max_retries - 1:
|
|
137
|
+
logger.exception(f"Failed to create container after {max_retries} attempts")
|
|
138
|
+
break
|
|
139
|
+
|
|
140
|
+
logger.warning(f"Container creation attempt {attempt + 1}/{max_retries} failed")
|
|
141
|
+
|
|
142
|
+
self._tool_server_port = None
|
|
143
|
+
self._tool_server_token = None
|
|
144
|
+
|
|
145
|
+
sleep_time = (2**attempt) + (0.1 * attempt)
|
|
146
|
+
time.sleep(sleep_time)
|
|
147
|
+
else:
|
|
148
|
+
return container
|
|
149
|
+
|
|
150
|
+
raise RuntimeError(
|
|
151
|
+
f"Failed to create Docker container after {max_retries} attempts: {last_exception}"
|
|
152
|
+
) from last_exception
|
|
153
|
+
|
|
154
|
+
def _get_or_create_scan_container(self, scan_id: str) -> Container: # noqa: PLR0912
|
|
155
|
+
container_name = f"strix-scan-{scan_id}"
|
|
156
|
+
|
|
157
|
+
if self._scan_container:
|
|
158
|
+
try:
|
|
159
|
+
self._scan_container.reload()
|
|
160
|
+
if self._scan_container.status == "running":
|
|
161
|
+
return self._scan_container
|
|
162
|
+
except NotFound:
|
|
163
|
+
self._scan_container = None
|
|
164
|
+
self._tool_server_port = None
|
|
165
|
+
self._tool_server_token = None
|
|
166
|
+
|
|
167
|
+
try:
|
|
168
|
+
container = self.client.containers.get(container_name)
|
|
169
|
+
container.reload()
|
|
170
|
+
|
|
171
|
+
if (
|
|
172
|
+
"strix-scan-id" not in container.labels
|
|
173
|
+
or container.labels["strix-scan-id"] != scan_id
|
|
174
|
+
):
|
|
175
|
+
logger.warning(
|
|
176
|
+
f"Container {container_name} exists but missing/wrong label, updating"
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
if container.status != "running":
|
|
180
|
+
logger.info(f"Starting existing container {container_name}")
|
|
181
|
+
container.start()
|
|
182
|
+
time.sleep(2)
|
|
183
|
+
|
|
184
|
+
self._scan_container = container
|
|
185
|
+
|
|
186
|
+
for env_var in container.attrs["Config"]["Env"]:
|
|
187
|
+
if env_var.startswith("TOOL_SERVER_PORT="):
|
|
188
|
+
self._tool_server_port = int(env_var.split("=")[1])
|
|
189
|
+
elif env_var.startswith("TOOL_SERVER_TOKEN="):
|
|
190
|
+
self._tool_server_token = env_var.split("=")[1]
|
|
191
|
+
|
|
192
|
+
logger.info(f"Reusing existing container {container_name}")
|
|
193
|
+
|
|
194
|
+
except NotFound:
|
|
195
|
+
pass
|
|
196
|
+
except DockerException as e:
|
|
197
|
+
logger.warning(f"Failed to get container by name {container_name}: {e}")
|
|
198
|
+
else:
|
|
199
|
+
return container
|
|
200
|
+
|
|
201
|
+
try:
|
|
202
|
+
containers = self.client.containers.list(
|
|
203
|
+
all=True, filters={"label": f"strix-scan-id={scan_id}"}
|
|
204
|
+
)
|
|
205
|
+
if containers:
|
|
206
|
+
container = cast("Container", containers[0])
|
|
207
|
+
if container.status != "running":
|
|
208
|
+
container.start()
|
|
209
|
+
time.sleep(2)
|
|
210
|
+
self._scan_container = container
|
|
211
|
+
|
|
212
|
+
for env_var in container.attrs["Config"]["Env"]:
|
|
213
|
+
if env_var.startswith("TOOL_SERVER_PORT="):
|
|
214
|
+
self._tool_server_port = int(env_var.split("=")[1])
|
|
215
|
+
elif env_var.startswith("TOOL_SERVER_TOKEN="):
|
|
216
|
+
self._tool_server_token = env_var.split("=")[1]
|
|
217
|
+
|
|
218
|
+
logger.info(f"Found existing container by label for scan {scan_id}")
|
|
219
|
+
return container
|
|
220
|
+
except DockerException as e:
|
|
221
|
+
logger.warning("Failed to find existing container by label for scan %s: %s", scan_id, e)
|
|
222
|
+
|
|
223
|
+
logger.info("Creating new Docker container for scan %s", scan_id)
|
|
224
|
+
return self._create_container_with_retry(scan_id)
|
|
225
|
+
|
|
226
|
+
def _initialize_container(
|
|
227
|
+
self, container: Container, caido_port: int, tool_server_port: int, tool_server_token: str
|
|
228
|
+
) -> None:
|
|
229
|
+
logger.info("Initializing Caido proxy on port %s", caido_port)
|
|
230
|
+
result = container.exec_run(
|
|
231
|
+
f"bash -c 'export CAIDO_PORT={caido_port} && /usr/local/bin/docker-entrypoint.sh true'",
|
|
232
|
+
detach=False,
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
time.sleep(5)
|
|
236
|
+
|
|
237
|
+
result = container.exec_run(
|
|
238
|
+
"bash -c 'source /etc/profile.d/proxy.sh && echo $CAIDO_API_TOKEN'", user="pentester"
|
|
239
|
+
)
|
|
240
|
+
caido_token = result.output.decode().strip() if result.exit_code == 0 else ""
|
|
241
|
+
|
|
242
|
+
container.exec_run(
|
|
243
|
+
f"bash -c 'source /etc/profile.d/proxy.sh && cd /app && "
|
|
244
|
+
f"STRIX_SANDBOX_MODE=true CAIDO_API_TOKEN={caido_token} CAIDO_PORT={caido_port} "
|
|
245
|
+
f"poetry run python strix/runtime/tool_server.py --token {tool_server_token} "
|
|
246
|
+
f"--host 0.0.0.0 --port {tool_server_port} &'",
|
|
247
|
+
detach=True,
|
|
248
|
+
user="pentester",
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
time.sleep(5)
|
|
252
|
+
|
|
253
|
+
def _copy_local_directory_to_container(
|
|
254
|
+
self, container: Container, local_path: str, target_name: str | None = None
|
|
255
|
+
) -> None:
|
|
256
|
+
import tarfile
|
|
257
|
+
from io import BytesIO
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
local_path_obj = Path(local_path).resolve()
|
|
261
|
+
if not local_path_obj.exists() or not local_path_obj.is_dir():
|
|
262
|
+
logger.warning(f"Local path does not exist or is not directory: {local_path_obj}")
|
|
263
|
+
return
|
|
264
|
+
|
|
265
|
+
if target_name:
|
|
266
|
+
logger.info(
|
|
267
|
+
f"Copying local directory {local_path_obj} to container at "
|
|
268
|
+
f"/workspace/{target_name}"
|
|
269
|
+
)
|
|
270
|
+
else:
|
|
271
|
+
logger.info(f"Copying local directory {local_path_obj} to container")
|
|
272
|
+
|
|
273
|
+
tar_buffer = BytesIO()
|
|
274
|
+
with tarfile.open(fileobj=tar_buffer, mode="w") as tar:
|
|
275
|
+
for item in local_path_obj.rglob("*"):
|
|
276
|
+
if item.is_file():
|
|
277
|
+
rel_path = item.relative_to(local_path_obj)
|
|
278
|
+
arcname = Path(target_name) / rel_path if target_name else rel_path
|
|
279
|
+
tar.add(item, arcname=arcname)
|
|
280
|
+
|
|
281
|
+
tar_buffer.seek(0)
|
|
282
|
+
container.put_archive("/workspace", tar_buffer.getvalue())
|
|
283
|
+
|
|
284
|
+
container.exec_run(
|
|
285
|
+
"chown -R pentester:pentester /workspace && chmod -R 755 /workspace",
|
|
286
|
+
user="root",
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
logger.info("Successfully copied local directory to /workspace")
|
|
290
|
+
|
|
291
|
+
except (OSError, DockerException):
|
|
292
|
+
logger.exception("Failed to copy local directory to container")
|
|
293
|
+
|
|
294
|
+
async def create_sandbox(
|
|
295
|
+
self,
|
|
296
|
+
agent_id: str,
|
|
297
|
+
existing_token: str | None = None,
|
|
298
|
+
local_sources: list[dict[str, str]] | None = None,
|
|
299
|
+
) -> SandboxInfo:
|
|
300
|
+
scan_id = self._get_scan_id(agent_id)
|
|
301
|
+
container = self._get_or_create_scan_container(scan_id)
|
|
302
|
+
|
|
303
|
+
source_copied_key = f"_source_copied_{scan_id}"
|
|
304
|
+
if local_sources and not hasattr(self, source_copied_key):
|
|
305
|
+
for index, source in enumerate(local_sources, start=1):
|
|
306
|
+
source_path = source.get("source_path")
|
|
307
|
+
if not source_path:
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
target_name = source.get("workspace_subdir")
|
|
311
|
+
if not target_name:
|
|
312
|
+
target_name = Path(source_path).name or f"target_{index}"
|
|
313
|
+
|
|
314
|
+
self._copy_local_directory_to_container(container, source_path, target_name)
|
|
315
|
+
setattr(self, source_copied_key, True)
|
|
316
|
+
|
|
317
|
+
container_id = container.id
|
|
318
|
+
if container_id is None:
|
|
319
|
+
raise RuntimeError("Docker container ID is unexpectedly None")
|
|
320
|
+
|
|
321
|
+
token = existing_token if existing_token is not None else self._tool_server_token
|
|
322
|
+
|
|
323
|
+
if self._tool_server_port is None or token is None:
|
|
324
|
+
raise RuntimeError("Tool server not initialized or no token available")
|
|
325
|
+
|
|
326
|
+
api_url = await self.get_sandbox_url(container_id, self._tool_server_port)
|
|
327
|
+
|
|
328
|
+
await self._register_agent_with_tool_server(api_url, agent_id, token)
|
|
329
|
+
|
|
330
|
+
return {
|
|
331
|
+
"workspace_id": container_id,
|
|
332
|
+
"api_url": api_url,
|
|
333
|
+
"auth_token": token,
|
|
334
|
+
"tool_server_port": self._tool_server_port,
|
|
335
|
+
"agent_id": agent_id,
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
async def _register_agent_with_tool_server(
|
|
339
|
+
self, api_url: str, agent_id: str, token: str
|
|
340
|
+
) -> None:
|
|
341
|
+
import httpx
|
|
342
|
+
|
|
343
|
+
try:
|
|
344
|
+
async with httpx.AsyncClient(trust_env=False) as client:
|
|
345
|
+
response = await client.post(
|
|
346
|
+
f"{api_url}/register_agent",
|
|
347
|
+
params={"agent_id": agent_id},
|
|
348
|
+
headers={"Authorization": f"Bearer {token}"},
|
|
349
|
+
timeout=30,
|
|
350
|
+
)
|
|
351
|
+
response.raise_for_status()
|
|
352
|
+
logger.info(f"Registered agent {agent_id} with tool server")
|
|
353
|
+
except (httpx.RequestError, httpx.HTTPStatusError) as e:
|
|
354
|
+
logger.warning(f"Failed to register agent {agent_id}: {e}")
|
|
355
|
+
|
|
356
|
+
async def get_sandbox_url(self, container_id: str, port: int) -> str:
|
|
357
|
+
try:
|
|
358
|
+
container = self.client.containers.get(container_id)
|
|
359
|
+
container.reload()
|
|
360
|
+
|
|
361
|
+
host = self._resolve_docker_host()
|
|
362
|
+
|
|
363
|
+
except NotFound:
|
|
364
|
+
raise ValueError(f"Container {container_id} not found.") from None
|
|
365
|
+
except DockerException as e:
|
|
366
|
+
raise RuntimeError(f"Failed to get container URL for {container_id}: {e}") from e
|
|
367
|
+
else:
|
|
368
|
+
return f"http://{host}:{port}"
|
|
369
|
+
|
|
370
|
+
def _resolve_docker_host(self) -> str:
|
|
371
|
+
docker_host = os.getenv("DOCKER_HOST", "")
|
|
372
|
+
if not docker_host:
|
|
373
|
+
return "127.0.0.1"
|
|
374
|
+
|
|
375
|
+
from urllib.parse import urlparse
|
|
376
|
+
|
|
377
|
+
parsed = urlparse(docker_host)
|
|
378
|
+
|
|
379
|
+
if parsed.scheme in ("tcp", "http", "https") and parsed.hostname:
|
|
380
|
+
return parsed.hostname
|
|
381
|
+
|
|
382
|
+
return "127.0.0.1"
|
|
383
|
+
|
|
384
|
+
async def destroy_sandbox(self, container_id: str) -> None:
|
|
385
|
+
logger.info("Destroying scan container %s", container_id)
|
|
386
|
+
try:
|
|
387
|
+
container = self.client.containers.get(container_id)
|
|
388
|
+
container.stop()
|
|
389
|
+
container.remove()
|
|
390
|
+
logger.info("Successfully destroyed container %s", container_id)
|
|
391
|
+
|
|
392
|
+
self._scan_container = None
|
|
393
|
+
self._tool_server_port = None
|
|
394
|
+
self._tool_server_token = None
|
|
395
|
+
|
|
396
|
+
except NotFound:
|
|
397
|
+
logger.warning("Container %s not found for destruction.", container_id)
|
|
398
|
+
except DockerException as e:
|
|
399
|
+
logger.warning("Failed to destroy container %s: %s", container_id, e)
|
strix/runtime/runtime.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import TypedDict
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class SandboxInfo(TypedDict):
|
|
6
|
+
workspace_id: str
|
|
7
|
+
api_url: str
|
|
8
|
+
auth_token: str | None
|
|
9
|
+
tool_server_port: int
|
|
10
|
+
agent_id: str
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AbstractRuntime(ABC):
|
|
14
|
+
@abstractmethod
|
|
15
|
+
async def create_sandbox(
|
|
16
|
+
self,
|
|
17
|
+
agent_id: str,
|
|
18
|
+
existing_token: str | None = None,
|
|
19
|
+
local_sources: list[dict[str, str]] | None = None,
|
|
20
|
+
) -> SandboxInfo:
|
|
21
|
+
raise NotImplementedError
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
async def get_sandbox_url(self, container_id: str, port: int) -> str:
|
|
25
|
+
raise NotImplementedError
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
async def destroy_sandbox(self, container_id: str) -> None:
|
|
29
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import asyncio
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import signal
|
|
8
|
+
import sys
|
|
9
|
+
from multiprocessing import Process, Queue
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
import uvicorn
|
|
13
|
+
from fastapi import Depends, FastAPI, HTTPException, status
|
|
14
|
+
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
|
15
|
+
from pydantic import BaseModel, ValidationError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
SANDBOX_MODE = os.getenv("STRIX_SANDBOX_MODE", "false").lower() == "true"
|
|
19
|
+
if not SANDBOX_MODE:
|
|
20
|
+
raise RuntimeError("Tool server should only run in sandbox mode (STRIX_SANDBOX_MODE=true)")
|
|
21
|
+
|
|
22
|
+
parser = argparse.ArgumentParser(description="Start Strix tool server")
|
|
23
|
+
parser.add_argument("--token", required=True, help="Authentication token")
|
|
24
|
+
parser.add_argument("--host", default="0.0.0.0", help="Host to bind to") # nosec
|
|
25
|
+
parser.add_argument("--port", type=int, required=True, help="Port to bind to")
|
|
26
|
+
|
|
27
|
+
args = parser.parse_args()
|
|
28
|
+
EXPECTED_TOKEN = args.token
|
|
29
|
+
|
|
30
|
+
app = FastAPI()
|
|
31
|
+
security = HTTPBearer()
|
|
32
|
+
|
|
33
|
+
security_dependency = Depends(security)
|
|
34
|
+
|
|
35
|
+
agent_processes: dict[str, dict[str, Any]] = {}
|
|
36
|
+
agent_queues: dict[str, dict[str, Queue[Any]]] = {}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def verify_token(credentials: HTTPAuthorizationCredentials) -> str:
|
|
40
|
+
if not credentials or credentials.scheme != "Bearer":
|
|
41
|
+
raise HTTPException(
|
|
42
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
43
|
+
detail="Invalid authentication scheme. Bearer token required.",
|
|
44
|
+
headers={"WWW-Authenticate": "Bearer"},
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
if credentials.credentials != EXPECTED_TOKEN:
|
|
48
|
+
raise HTTPException(
|
|
49
|
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
50
|
+
detail="Invalid authentication token",
|
|
51
|
+
headers={"WWW-Authenticate": "Bearer"},
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
return credentials.credentials
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ToolExecutionRequest(BaseModel):
|
|
58
|
+
agent_id: str
|
|
59
|
+
tool_name: str
|
|
60
|
+
kwargs: dict[str, Any]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class ToolExecutionResponse(BaseModel):
|
|
64
|
+
result: Any | None = None
|
|
65
|
+
error: str | None = None
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def agent_worker(_agent_id: str, request_queue: Queue[Any], response_queue: Queue[Any]) -> None:
|
|
69
|
+
null_handler = logging.NullHandler()
|
|
70
|
+
|
|
71
|
+
root_logger = logging.getLogger()
|
|
72
|
+
root_logger.handlers = [null_handler]
|
|
73
|
+
root_logger.setLevel(logging.CRITICAL)
|
|
74
|
+
|
|
75
|
+
from strix.tools.argument_parser import ArgumentConversionError, convert_arguments
|
|
76
|
+
from strix.tools.registry import get_tool_by_name
|
|
77
|
+
|
|
78
|
+
while True:
|
|
79
|
+
try:
|
|
80
|
+
request = request_queue.get()
|
|
81
|
+
|
|
82
|
+
if request is None:
|
|
83
|
+
break
|
|
84
|
+
|
|
85
|
+
tool_name = request["tool_name"]
|
|
86
|
+
kwargs = request["kwargs"]
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
tool_func = get_tool_by_name(tool_name)
|
|
90
|
+
if not tool_func:
|
|
91
|
+
response_queue.put({"error": f"Tool '{tool_name}' not found"})
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
converted_kwargs = convert_arguments(tool_func, kwargs)
|
|
95
|
+
result = tool_func(**converted_kwargs)
|
|
96
|
+
|
|
97
|
+
response_queue.put({"result": result})
|
|
98
|
+
|
|
99
|
+
except (ArgumentConversionError, ValidationError) as e:
|
|
100
|
+
response_queue.put({"error": f"Invalid arguments: {e}"})
|
|
101
|
+
except (RuntimeError, ValueError, ImportError) as e:
|
|
102
|
+
response_queue.put({"error": f"Tool execution error: {e}"})
|
|
103
|
+
|
|
104
|
+
except (RuntimeError, ValueError, ImportError) as e:
|
|
105
|
+
response_queue.put({"error": f"Worker error: {e}"})
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def ensure_agent_process(agent_id: str) -> tuple[Queue[Any], Queue[Any]]:
|
|
109
|
+
if agent_id not in agent_processes:
|
|
110
|
+
request_queue: Queue[Any] = Queue()
|
|
111
|
+
response_queue: Queue[Any] = Queue()
|
|
112
|
+
|
|
113
|
+
process = Process(
|
|
114
|
+
target=agent_worker, args=(agent_id, request_queue, response_queue), daemon=True
|
|
115
|
+
)
|
|
116
|
+
process.start()
|
|
117
|
+
|
|
118
|
+
agent_processes[agent_id] = {"process": process, "pid": process.pid}
|
|
119
|
+
agent_queues[agent_id] = {"request": request_queue, "response": response_queue}
|
|
120
|
+
|
|
121
|
+
return agent_queues[agent_id]["request"], agent_queues[agent_id]["response"]
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@app.post("/execute", response_model=ToolExecutionResponse)
|
|
125
|
+
async def execute_tool(
|
|
126
|
+
request: ToolExecutionRequest, credentials: HTTPAuthorizationCredentials = security_dependency
|
|
127
|
+
) -> ToolExecutionResponse:
|
|
128
|
+
verify_token(credentials)
|
|
129
|
+
|
|
130
|
+
request_queue, response_queue = ensure_agent_process(request.agent_id)
|
|
131
|
+
|
|
132
|
+
request_queue.put({"tool_name": request.tool_name, "kwargs": request.kwargs})
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
loop = asyncio.get_event_loop()
|
|
136
|
+
response = await loop.run_in_executor(None, response_queue.get)
|
|
137
|
+
|
|
138
|
+
if "error" in response:
|
|
139
|
+
return ToolExecutionResponse(error=response["error"])
|
|
140
|
+
return ToolExecutionResponse(result=response.get("result"))
|
|
141
|
+
|
|
142
|
+
except (RuntimeError, ValueError, OSError) as e:
|
|
143
|
+
return ToolExecutionResponse(error=f"Worker error: {e}")
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
@app.post("/register_agent")
|
|
147
|
+
async def register_agent(
|
|
148
|
+
agent_id: str, credentials: HTTPAuthorizationCredentials = security_dependency
|
|
149
|
+
) -> dict[str, str]:
|
|
150
|
+
verify_token(credentials)
|
|
151
|
+
|
|
152
|
+
ensure_agent_process(agent_id)
|
|
153
|
+
return {"status": "registered", "agent_id": agent_id}
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
@app.get("/health")
|
|
157
|
+
async def health_check() -> dict[str, Any]:
|
|
158
|
+
return {
|
|
159
|
+
"status": "healthy",
|
|
160
|
+
"sandbox_mode": str(SANDBOX_MODE),
|
|
161
|
+
"environment": "sandbox" if SANDBOX_MODE else "main",
|
|
162
|
+
"auth_configured": "true" if EXPECTED_TOKEN else "false",
|
|
163
|
+
"active_agents": len(agent_processes),
|
|
164
|
+
"agents": list(agent_processes.keys()),
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def cleanup_all_agents() -> None:
|
|
169
|
+
for agent_id in list(agent_processes.keys()):
|
|
170
|
+
try:
|
|
171
|
+
agent_queues[agent_id]["request"].put(None)
|
|
172
|
+
process = agent_processes[agent_id]["process"]
|
|
173
|
+
|
|
174
|
+
process.join(timeout=1)
|
|
175
|
+
|
|
176
|
+
if process.is_alive():
|
|
177
|
+
process.terminate()
|
|
178
|
+
process.join(timeout=1)
|
|
179
|
+
|
|
180
|
+
if process.is_alive():
|
|
181
|
+
process.kill()
|
|
182
|
+
|
|
183
|
+
except (BrokenPipeError, EOFError, OSError):
|
|
184
|
+
pass
|
|
185
|
+
except (RuntimeError, ValueError) as e:
|
|
186
|
+
logging.getLogger(__name__).debug(f"Error during agent cleanup: {e}")
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def signal_handler(_signum: int, _frame: Any) -> None:
|
|
190
|
+
signal.signal(signal.SIGPIPE, signal.SIG_IGN) if hasattr(signal, "SIGPIPE") else None
|
|
191
|
+
cleanup_all_agents()
|
|
192
|
+
sys.exit(0)
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
if hasattr(signal, "SIGPIPE"):
|
|
196
|
+
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
|
|
197
|
+
|
|
198
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
199
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
200
|
+
|
|
201
|
+
if __name__ == "__main__":
|
|
202
|
+
try:
|
|
203
|
+
uvicorn.run(app, host=args.host, port=args.port, log_level="info")
|
|
204
|
+
finally:
|
|
205
|
+
cleanup_all_agents()
|