strix-agent 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. strix/__init__.py +0 -0
  2. strix/agents/StrixAgent/__init__.py +4 -0
  3. strix/agents/StrixAgent/strix_agent.py +60 -0
  4. strix/agents/StrixAgent/system_prompt.jinja +504 -0
  5. strix/agents/__init__.py +10 -0
  6. strix/agents/base_agent.py +394 -0
  7. strix/agents/state.py +139 -0
  8. strix/cli/__init__.py +4 -0
  9. strix/cli/app.py +1124 -0
  10. strix/cli/assets/cli.tcss +680 -0
  11. strix/cli/main.py +542 -0
  12. strix/cli/tool_components/__init__.py +39 -0
  13. strix/cli/tool_components/agents_graph_renderer.py +129 -0
  14. strix/cli/tool_components/base_renderer.py +61 -0
  15. strix/cli/tool_components/browser_renderer.py +107 -0
  16. strix/cli/tool_components/file_edit_renderer.py +95 -0
  17. strix/cli/tool_components/finish_renderer.py +32 -0
  18. strix/cli/tool_components/notes_renderer.py +108 -0
  19. strix/cli/tool_components/proxy_renderer.py +255 -0
  20. strix/cli/tool_components/python_renderer.py +34 -0
  21. strix/cli/tool_components/registry.py +72 -0
  22. strix/cli/tool_components/reporting_renderer.py +53 -0
  23. strix/cli/tool_components/scan_info_renderer.py +58 -0
  24. strix/cli/tool_components/terminal_renderer.py +99 -0
  25. strix/cli/tool_components/thinking_renderer.py +29 -0
  26. strix/cli/tool_components/user_message_renderer.py +43 -0
  27. strix/cli/tool_components/web_search_renderer.py +28 -0
  28. strix/cli/tracer.py +308 -0
  29. strix/llm/__init__.py +14 -0
  30. strix/llm/config.py +19 -0
  31. strix/llm/llm.py +310 -0
  32. strix/llm/memory_compressor.py +206 -0
  33. strix/llm/request_queue.py +63 -0
  34. strix/llm/utils.py +84 -0
  35. strix/prompts/__init__.py +113 -0
  36. strix/prompts/coordination/root_agent.jinja +41 -0
  37. strix/prompts/vulnerabilities/authentication_jwt.jinja +129 -0
  38. strix/prompts/vulnerabilities/business_logic.jinja +143 -0
  39. strix/prompts/vulnerabilities/csrf.jinja +168 -0
  40. strix/prompts/vulnerabilities/idor.jinja +164 -0
  41. strix/prompts/vulnerabilities/race_conditions.jinja +194 -0
  42. strix/prompts/vulnerabilities/rce.jinja +222 -0
  43. strix/prompts/vulnerabilities/sql_injection.jinja +216 -0
  44. strix/prompts/vulnerabilities/ssrf.jinja +168 -0
  45. strix/prompts/vulnerabilities/xss.jinja +221 -0
  46. strix/prompts/vulnerabilities/xxe.jinja +276 -0
  47. strix/runtime/__init__.py +19 -0
  48. strix/runtime/docker_runtime.py +298 -0
  49. strix/runtime/runtime.py +25 -0
  50. strix/runtime/tool_server.py +97 -0
  51. strix/tools/__init__.py +64 -0
  52. strix/tools/agents_graph/__init__.py +16 -0
  53. strix/tools/agents_graph/agents_graph_actions.py +610 -0
  54. strix/tools/agents_graph/agents_graph_actions_schema.xml +223 -0
  55. strix/tools/argument_parser.py +120 -0
  56. strix/tools/browser/__init__.py +4 -0
  57. strix/tools/browser/browser_actions.py +236 -0
  58. strix/tools/browser/browser_actions_schema.xml +183 -0
  59. strix/tools/browser/browser_instance.py +533 -0
  60. strix/tools/browser/tab_manager.py +342 -0
  61. strix/tools/executor.py +302 -0
  62. strix/tools/file_edit/__init__.py +4 -0
  63. strix/tools/file_edit/file_edit_actions.py +141 -0
  64. strix/tools/file_edit/file_edit_actions_schema.xml +128 -0
  65. strix/tools/finish/__init__.py +4 -0
  66. strix/tools/finish/finish_actions.py +167 -0
  67. strix/tools/finish/finish_actions_schema.xml +45 -0
  68. strix/tools/notes/__init__.py +14 -0
  69. strix/tools/notes/notes_actions.py +191 -0
  70. strix/tools/notes/notes_actions_schema.xml +150 -0
  71. strix/tools/proxy/__init__.py +20 -0
  72. strix/tools/proxy/proxy_actions.py +101 -0
  73. strix/tools/proxy/proxy_actions_schema.xml +267 -0
  74. strix/tools/proxy/proxy_manager.py +785 -0
  75. strix/tools/python/__init__.py +4 -0
  76. strix/tools/python/python_actions.py +47 -0
  77. strix/tools/python/python_actions_schema.xml +131 -0
  78. strix/tools/python/python_instance.py +172 -0
  79. strix/tools/python/python_manager.py +131 -0
  80. strix/tools/registry.py +196 -0
  81. strix/tools/reporting/__init__.py +6 -0
  82. strix/tools/reporting/reporting_actions.py +63 -0
  83. strix/tools/reporting/reporting_actions_schema.xml +30 -0
  84. strix/tools/terminal/__init__.py +4 -0
  85. strix/tools/terminal/terminal_actions.py +53 -0
  86. strix/tools/terminal/terminal_actions_schema.xml +114 -0
  87. strix/tools/terminal/terminal_instance.py +231 -0
  88. strix/tools/terminal/terminal_manager.py +191 -0
  89. strix/tools/thinking/__init__.py +4 -0
  90. strix/tools/thinking/thinking_actions.py +18 -0
  91. strix/tools/thinking/thinking_actions_schema.xml +52 -0
  92. strix/tools/web_search/__init__.py +4 -0
  93. strix/tools/web_search/web_search_actions.py +80 -0
  94. strix/tools/web_search/web_search_actions_schema.xml +83 -0
  95. strix_agent-0.1.1.dist-info/LICENSE +201 -0
  96. strix_agent-0.1.1.dist-info/METADATA +200 -0
  97. strix_agent-0.1.1.dist-info/RECORD +99 -0
  98. strix_agent-0.1.1.dist-info/WHEEL +4 -0
  99. strix_agent-0.1.1.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,785 @@
1
+ import base64
2
+ import os
3
+ import re
4
+ import time
5
+ from typing import TYPE_CHECKING, Any
6
+ from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
7
+
8
+ import requests
9
+ from gql import Client, gql
10
+ from gql.transport.exceptions import TransportQueryError
11
+ from gql.transport.requests import RequestsHTTPTransport
12
+ from requests.exceptions import ProxyError, RequestException, Timeout
13
+
14
+
15
+ if TYPE_CHECKING:
16
+ from collections.abc import Callable
17
+
18
+
19
+ class ProxyManager:
20
+ def __init__(self, auth_token: str | None = None):
21
+ host = "127.0.0.1"
22
+ port = os.getenv("CAIDO_PORT", "56789")
23
+ self.base_url = f"http://{host}:{port}/graphql"
24
+ self.proxies = {"http": f"http://{host}:{port}", "https": f"http://{host}:{port}"}
25
+ self.auth_token = auth_token or os.getenv("CAIDO_API_TOKEN")
26
+ self.transport = RequestsHTTPTransport(
27
+ url=self.base_url, headers={"Authorization": f"Bearer {self.auth_token}"}
28
+ )
29
+ self.client = Client(transport=self.transport, fetch_schema_from_transport=False)
30
+
31
+ def list_requests(
32
+ self,
33
+ httpql_filter: str | None = None,
34
+ start_page: int = 1,
35
+ end_page: int = 1,
36
+ page_size: int = 50,
37
+ sort_by: str = "timestamp",
38
+ sort_order: str = "desc",
39
+ scope_id: str | None = None,
40
+ ) -> dict[str, Any]:
41
+ offset = (start_page - 1) * page_size
42
+ limit = (end_page - start_page + 1) * page_size
43
+
44
+ sort_mapping = {
45
+ "timestamp": "CREATED_AT",
46
+ "host": "HOST",
47
+ "method": "METHOD",
48
+ "path": "PATH",
49
+ "status_code": "RESP_STATUS_CODE",
50
+ "response_time": "RESP_ROUNDTRIP_TIME",
51
+ "response_size": "RESP_LENGTH",
52
+ "source": "SOURCE",
53
+ }
54
+
55
+ query = gql("""
56
+ query GetRequests(
57
+ $limit: Int, $offset: Int, $filter: HTTPQL,
58
+ $order: RequestResponseOrderInput, $scopeId: ID
59
+ ) {
60
+ requestsByOffset(
61
+ limit: $limit, offset: $offset, filter: $filter,
62
+ order: $order, scopeId: $scopeId
63
+ ) {
64
+ edges {
65
+ node {
66
+ id method host path query createdAt length isTls port
67
+ source alteration fileExtension
68
+ response { id statusCode length roundtripTime createdAt }
69
+ }
70
+ }
71
+ count { value }
72
+ }
73
+ }
74
+ """)
75
+
76
+ variables = {
77
+ "limit": limit,
78
+ "offset": offset,
79
+ "filter": httpql_filter,
80
+ "order": {
81
+ "by": sort_mapping.get(sort_by, "CREATED_AT"),
82
+ "ordering": sort_order.upper(),
83
+ },
84
+ "scopeId": scope_id,
85
+ }
86
+
87
+ try:
88
+ result = self.client.execute(query, variable_values=variables)
89
+ data = result.get("requestsByOffset", {})
90
+ nodes = [edge["node"] for edge in data.get("edges", [])]
91
+
92
+ count_data = data.get("count") or {}
93
+ return {
94
+ "requests": nodes,
95
+ "total_count": count_data.get("value", 0),
96
+ "start_page": start_page,
97
+ "end_page": end_page,
98
+ "page_size": page_size,
99
+ "offset": offset,
100
+ "returned_count": len(nodes),
101
+ "sort_by": sort_by,
102
+ "sort_order": sort_order,
103
+ }
104
+ except (TransportQueryError, ValueError, KeyError) as e:
105
+ return {"requests": [], "total_count": 0, "error": f"Error fetching requests: {e}"}
106
+
107
+ def view_request(
108
+ self,
109
+ request_id: str,
110
+ part: str = "request",
111
+ search_pattern: str | None = None,
112
+ page: int = 1,
113
+ page_size: int = 50,
114
+ ) -> dict[str, Any]:
115
+ queries = {
116
+ "request": """query GetRequest($id: ID!) {
117
+ request(id: $id) {
118
+ id method host path query createdAt length isTls port
119
+ source alteration edited raw
120
+ }
121
+ }""",
122
+ "response": """query GetRequest($id: ID!) {
123
+ request(id: $id) {
124
+ id response {
125
+ id statusCode length roundtripTime createdAt raw
126
+ }
127
+ }
128
+ }""",
129
+ }
130
+
131
+ if part not in queries:
132
+ return {"error": f"Invalid part '{part}'. Use 'request' or 'response'"}
133
+
134
+ try:
135
+ result = self.client.execute(gql(queries[part]), variable_values={"id": request_id})
136
+ request_data = result.get("request", {})
137
+
138
+ if not request_data:
139
+ return {"error": f"Request {request_id} not found"}
140
+
141
+ if part == "request":
142
+ raw_content = request_data.get("raw")
143
+ else:
144
+ response_data = request_data.get("response") or {}
145
+ raw_content = response_data.get("raw")
146
+
147
+ if not raw_content:
148
+ return {"error": "No content available"}
149
+
150
+ content = base64.b64decode(raw_content).decode("utf-8", errors="replace")
151
+
152
+ if part == "response":
153
+ request_data["response"]["raw"] = content
154
+ else:
155
+ request_data["raw"] = content
156
+
157
+ return (
158
+ self._search_content(request_data, content, search_pattern)
159
+ if search_pattern
160
+ else self._paginate_content(request_data, content, page, page_size)
161
+ )
162
+
163
+ except (TransportQueryError, ValueError, KeyError, UnicodeDecodeError) as e:
164
+ return {"error": f"Failed to view request: {e}"}
165
+
166
+ def _search_content(
167
+ self, request_data: dict[str, Any], content: str, pattern: str
168
+ ) -> dict[str, Any]:
169
+ try:
170
+ regex = re.compile(pattern, re.IGNORECASE | re.MULTILINE | re.DOTALL)
171
+ matches = []
172
+
173
+ for match in regex.finditer(content):
174
+ start, end = match.start(), match.end()
175
+ context_size = 120
176
+
177
+ before = re.sub(r"\s+", " ", content[max(0, start - context_size) : start].strip())[
178
+ -100:
179
+ ]
180
+ after = re.sub(r"\s+", " ", content[end : end + context_size].strip())[:100]
181
+
182
+ matches.append(
183
+ {"match": match.group(), "before": before, "after": after, "position": start}
184
+ )
185
+
186
+ if len(matches) >= 20:
187
+ break
188
+
189
+ return {
190
+ "id": request_data.get("id"),
191
+ "matches": matches,
192
+ "total_matches": len(matches),
193
+ "search_pattern": pattern,
194
+ "truncated": len(matches) >= 20,
195
+ }
196
+ except re.error as e:
197
+ return {"error": f"Invalid regex: {e}"}
198
+
199
+ def _paginate_content(
200
+ self, request_data: dict[str, Any], content: str, page: int, page_size: int
201
+ ) -> dict[str, Any]:
202
+ display_lines = []
203
+ for line in content.split("\n"):
204
+ if len(line) <= 80:
205
+ display_lines.append(line)
206
+ else:
207
+ display_lines.extend(
208
+ [
209
+ line[i : i + 80] + (" \\" if i + 80 < len(line) else "")
210
+ for i in range(0, len(line), 80)
211
+ ]
212
+ )
213
+
214
+ total_lines = len(display_lines)
215
+ total_pages = (total_lines + page_size - 1) // page_size
216
+ page = max(1, min(page, total_pages))
217
+
218
+ start_line = (page - 1) * page_size
219
+ end_line = min(total_lines, start_line + page_size)
220
+
221
+ return {
222
+ "id": request_data.get("id"),
223
+ "content": "\n".join(display_lines[start_line:end_line]),
224
+ "page": page,
225
+ "total_pages": total_pages,
226
+ "showing_lines": f"{start_line + 1}-{end_line} of {total_lines}",
227
+ "has_more": page < total_pages,
228
+ }
229
+
230
+ def send_simple_request(
231
+ self,
232
+ method: str,
233
+ url: str,
234
+ headers: dict[str, str] | None = None,
235
+ body: str = "",
236
+ timeout: int = 30,
237
+ ) -> dict[str, Any]:
238
+ if headers is None:
239
+ headers = {}
240
+ try:
241
+ start_time = time.time()
242
+ response = requests.request(
243
+ method=method,
244
+ url=url,
245
+ headers=headers,
246
+ data=body or None,
247
+ proxies=self.proxies,
248
+ timeout=timeout,
249
+ verify=False,
250
+ )
251
+ response_time = int((time.time() - start_time) * 1000)
252
+
253
+ body_content = response.text
254
+ if len(body_content) > 10000:
255
+ body_content = body_content[:10000] + "\n... [truncated]"
256
+
257
+ return {
258
+ "status_code": response.status_code,
259
+ "headers": dict(response.headers),
260
+ "body": body_content,
261
+ "response_time_ms": response_time,
262
+ "url": response.url,
263
+ "message": (
264
+ "Request sent through proxy - check list_requests() for captured traffic"
265
+ ),
266
+ }
267
+ except (RequestException, ProxyError, Timeout) as e:
268
+ return {"error": f"Request failed: {type(e).__name__}", "details": str(e), "url": url}
269
+
270
+ def repeat_request(
271
+ self, request_id: str, modifications: dict[str, Any] | None = None
272
+ ) -> dict[str, Any]:
273
+ if modifications is None:
274
+ modifications = {}
275
+
276
+ original = self.view_request(request_id, "request")
277
+ if "error" in original:
278
+ return {"error": f"Could not retrieve original request: {original['error']}"}
279
+
280
+ raw_content = original.get("content", "")
281
+ if not raw_content:
282
+ return {"error": "No raw request content found"}
283
+
284
+ request_components = self._parse_http_request(raw_content)
285
+ if "error" in request_components:
286
+ return request_components
287
+
288
+ full_url = self._build_full_url(request_components, modifications)
289
+ if "error" in full_url:
290
+ return full_url
291
+
292
+ modified_request = self._apply_modifications(
293
+ request_components, modifications, full_url["url"]
294
+ )
295
+
296
+ return self._send_modified_request(modified_request, request_id, modifications)
297
+
298
+ def _parse_http_request(self, raw_content: str) -> dict[str, Any]:
299
+ lines = raw_content.split("\n")
300
+ request_line = lines[0].strip().split(" ")
301
+ if len(request_line) < 2:
302
+ return {"error": "Invalid request line format"}
303
+
304
+ method, url_path = request_line[0], request_line[1]
305
+
306
+ headers = {}
307
+ body_start = 0
308
+ for i, line in enumerate(lines[1:], 1):
309
+ if line.strip() == "":
310
+ body_start = i + 1
311
+ break
312
+ if ":" in line:
313
+ key, value = line.split(":", 1)
314
+ headers[key.strip()] = value.strip()
315
+
316
+ body = "\n".join(lines[body_start:]).strip() if body_start < len(lines) else ""
317
+
318
+ return {"method": method, "url_path": url_path, "headers": headers, "body": body}
319
+
320
+ def _build_full_url(
321
+ self, components: dict[str, Any], modifications: dict[str, Any]
322
+ ) -> dict[str, Any]:
323
+ headers = components["headers"]
324
+ host = headers.get("Host", "")
325
+ if not host:
326
+ return {"error": "No Host header found"}
327
+
328
+ protocol = (
329
+ "https" if ":443" in host or "https" in headers.get("Referer", "").lower() else "http"
330
+ )
331
+ full_url = f"{protocol}://{host}{components['url_path']}"
332
+
333
+ if "url" in modifications:
334
+ full_url = modifications["url"]
335
+
336
+ return {"url": full_url}
337
+
338
+ def _apply_modifications(
339
+ self, components: dict[str, Any], modifications: dict[str, Any], full_url: str
340
+ ) -> dict[str, Any]:
341
+ headers = components["headers"].copy()
342
+ body = components["body"]
343
+ final_url = full_url
344
+
345
+ if "params" in modifications:
346
+ parsed = urlparse(final_url)
347
+ params = {k: v[0] if v else "" for k, v in parse_qs(parsed.query).items()}
348
+ params.update(modifications["params"])
349
+ final_url = urlunparse(parsed._replace(query=urlencode(params)))
350
+
351
+ if "headers" in modifications:
352
+ headers.update(modifications["headers"])
353
+
354
+ if "body" in modifications:
355
+ body = modifications["body"]
356
+
357
+ if "cookies" in modifications:
358
+ cookies = {}
359
+ if headers.get("Cookie"):
360
+ for cookie in headers["Cookie"].split(";"):
361
+ if "=" in cookie:
362
+ k, v = cookie.split("=", 1)
363
+ cookies[k.strip()] = v.strip()
364
+ cookies.update(modifications["cookies"])
365
+ headers["Cookie"] = "; ".join([f"{k}={v}" for k, v in cookies.items()])
366
+
367
+ return {
368
+ "method": components["method"],
369
+ "url": final_url,
370
+ "headers": headers,
371
+ "body": body,
372
+ }
373
+
374
+ def _send_modified_request(
375
+ self, request_data: dict[str, Any], request_id: str, modifications: dict[str, Any]
376
+ ) -> dict[str, Any]:
377
+ try:
378
+ start_time = time.time()
379
+ response = requests.request(
380
+ method=request_data["method"],
381
+ url=request_data["url"],
382
+ headers=request_data["headers"],
383
+ data=request_data["body"] or None,
384
+ proxies=self.proxies,
385
+ timeout=30,
386
+ verify=False,
387
+ )
388
+ response_time = int((time.time() - start_time) * 1000)
389
+
390
+ response_body = response.text
391
+ truncated = len(response_body) > 10000
392
+ if truncated:
393
+ response_body = response_body[:10000] + "\n... [truncated]"
394
+
395
+ return {
396
+ "status_code": response.status_code,
397
+ "status_text": response.reason,
398
+ "headers": {
399
+ k: v
400
+ for k, v in response.headers.items()
401
+ if k.lower()
402
+ in ["content-type", "content-length", "server", "set-cookie", "location"]
403
+ },
404
+ "body": response_body,
405
+ "body_truncated": truncated,
406
+ "body_size": len(response.content),
407
+ "response_time_ms": response_time,
408
+ "url": response.url,
409
+ "original_request_id": request_id,
410
+ "modifications_applied": modifications,
411
+ "request": {
412
+ "method": request_data["method"],
413
+ "url": request_data["url"],
414
+ "headers": request_data["headers"],
415
+ "has_body": bool(request_data["body"]),
416
+ },
417
+ }
418
+
419
+ except ProxyError as e:
420
+ return {
421
+ "error": "Proxy connection failed - is Caido running?",
422
+ "details": str(e),
423
+ "original_request_id": request_id,
424
+ }
425
+ except (RequestException, Timeout) as e:
426
+ return {
427
+ "error": f"Failed to repeat request: {type(e).__name__}",
428
+ "details": str(e),
429
+ "original_request_id": request_id,
430
+ }
431
+
432
+ def _handle_scope_list(self) -> dict[str, Any]:
433
+ result = self.client.execute(gql("query { scopes { id name allowlist denylist indexed } }"))
434
+ scopes = result.get("scopes", [])
435
+ return {"scopes": scopes, "count": len(scopes)}
436
+
437
+ def _handle_scope_get(self, scope_id: str | None) -> dict[str, Any]:
438
+ if not scope_id:
439
+ return self._handle_scope_list()
440
+
441
+ result = self.client.execute(
442
+ gql(
443
+ "query GetScope($id: ID!) { scope(id: $id) { id name allowlist denylist indexed } }"
444
+ ),
445
+ variable_values={"id": scope_id},
446
+ )
447
+ scope = result.get("scope")
448
+ if not scope:
449
+ return {"error": f"Scope {scope_id} not found"}
450
+ return {"scope": scope}
451
+
452
+ def _handle_scope_create(
453
+ self, scope_name: str, allowlist: list[str] | None, denylist: list[str] | None
454
+ ) -> dict[str, Any]:
455
+ if not scope_name:
456
+ return {"error": "scope_name required for create"}
457
+
458
+ mutation = gql("""
459
+ mutation CreateScope($input: CreateScopeInput!) {
460
+ createScope(input: $input) {
461
+ scope { id name allowlist denylist indexed }
462
+ error {
463
+ ... on InvalidGlobTermsUserError { code terms }
464
+ ... on OtherUserError { code }
465
+ }
466
+ }
467
+ }
468
+ """)
469
+
470
+ result = self.client.execute(
471
+ mutation,
472
+ variable_values={
473
+ "input": {
474
+ "name": scope_name,
475
+ "allowlist": allowlist or [],
476
+ "denylist": denylist or [],
477
+ }
478
+ },
479
+ )
480
+
481
+ payload = result.get("createScope", {})
482
+ if payload.get("error"):
483
+ error = payload["error"]
484
+ return {"error": f"Invalid glob patterns: {error.get('terms', error.get('code'))}"}
485
+
486
+ return {"scope": payload.get("scope"), "message": "Scope created successfully"}
487
+
488
+ def _handle_scope_update(
489
+ self,
490
+ scope_id: str,
491
+ scope_name: str,
492
+ allowlist: list[str] | None,
493
+ denylist: list[str] | None,
494
+ ) -> dict[str, Any]:
495
+ if not scope_id or not scope_name:
496
+ return {"error": "scope_id and scope_name required"}
497
+
498
+ mutation = gql("""
499
+ mutation UpdateScope($id: ID!, $input: UpdateScopeInput!) {
500
+ updateScope(id: $id, input: $input) {
501
+ scope { id name allowlist denylist indexed }
502
+ error {
503
+ ... on InvalidGlobTermsUserError { code terms }
504
+ ... on OtherUserError { code }
505
+ }
506
+ }
507
+ }
508
+ """)
509
+
510
+ result = self.client.execute(
511
+ mutation,
512
+ variable_values={
513
+ "id": scope_id,
514
+ "input": {
515
+ "name": scope_name,
516
+ "allowlist": allowlist or [],
517
+ "denylist": denylist or [],
518
+ },
519
+ },
520
+ )
521
+
522
+ payload = result.get("updateScope", {})
523
+ if payload.get("error"):
524
+ error = payload["error"]
525
+ return {"error": f"Invalid glob patterns: {error.get('terms', error.get('code'))}"}
526
+
527
+ return {"scope": payload.get("scope"), "message": "Scope updated successfully"}
528
+
529
+ def _handle_scope_delete(self, scope_id: str) -> dict[str, Any]:
530
+ if not scope_id:
531
+ return {"error": "scope_id required for delete"}
532
+
533
+ result = self.client.execute(
534
+ gql("mutation DeleteScope($id: ID!) { deleteScope(id: $id) { deletedId } }"),
535
+ variable_values={"id": scope_id},
536
+ )
537
+
538
+ payload = result.get("deleteScope", {})
539
+ if not payload.get("deletedId"):
540
+ return {"error": f"Failed to delete scope {scope_id}"}
541
+ return {"message": f"Scope {scope_id} deleted", "deletedId": payload["deletedId"]}
542
+
543
+ def scope_rules(
544
+ self,
545
+ action: str,
546
+ allowlist: list[str] | None = None,
547
+ denylist: list[str] | None = None,
548
+ scope_id: str | None = None,
549
+ scope_name: str | None = None,
550
+ ) -> dict[str, Any]:
551
+ handlers: dict[str, Callable[[], dict[str, Any]]] = {
552
+ "list": self._handle_scope_list,
553
+ "get": lambda: self._handle_scope_get(scope_id),
554
+ "create": lambda: (
555
+ {"error": "scope_name required for create"}
556
+ if not scope_name
557
+ else self._handle_scope_create(scope_name, allowlist, denylist)
558
+ ),
559
+ "update": lambda: (
560
+ {"error": "scope_id and scope_name required"}
561
+ if not scope_id or not scope_name
562
+ else self._handle_scope_update(scope_id, scope_name, allowlist, denylist)
563
+ ),
564
+ "delete": lambda: (
565
+ {"error": "scope_id required for delete"}
566
+ if not scope_id
567
+ else self._handle_scope_delete(scope_id)
568
+ ),
569
+ }
570
+
571
+ handler = handlers.get(action)
572
+ if not handler:
573
+ return {
574
+ "error": f"Unsupported action: {action}. Use 'get', 'list', 'create', "
575
+ f"'update', or 'delete'"
576
+ }
577
+
578
+ try:
579
+ result = handler()
580
+ except (TransportQueryError, ValueError, KeyError) as e:
581
+ return {"error": f"Scope operation failed: {e}"}
582
+ else:
583
+ return result
584
+
585
+ def list_sitemap(
586
+ self,
587
+ scope_id: str | None = None,
588
+ parent_id: str | None = None,
589
+ depth: str = "DIRECT",
590
+ page: int = 1,
591
+ page_size: int = 30,
592
+ ) -> dict[str, Any]:
593
+ try:
594
+ skip_count = (page - 1) * page_size
595
+
596
+ if parent_id:
597
+ query = gql("""
598
+ query GetSitemapDescendants($parentId: ID!, $depth: SitemapDescendantsDepth!) {
599
+ sitemapDescendantEntries(parentId: $parentId, depth: $depth) {
600
+ edges {
601
+ node {
602
+ id kind label hasDescendants
603
+ request { method path response { statusCode } }
604
+ }
605
+ }
606
+ count { value }
607
+ }
608
+ }
609
+ """)
610
+ result = self.client.execute(
611
+ query, variable_values={"parentId": parent_id, "depth": depth}
612
+ )
613
+ data = result.get("sitemapDescendantEntries", {})
614
+ else:
615
+ query = gql("""
616
+ query GetSitemapRoots($scopeId: ID) {
617
+ sitemapRootEntries(scopeId: $scopeId) {
618
+ edges { node {
619
+ id kind label hasDescendants
620
+ metadata { ... on SitemapEntryMetadataDomain { isTls port } }
621
+ request { method path response { statusCode } }
622
+ } }
623
+ count { value }
624
+ }
625
+ }
626
+ """)
627
+ result = self.client.execute(query, variable_values={"scopeId": scope_id})
628
+ data = result.get("sitemapRootEntries", {})
629
+
630
+ all_nodes = [edge["node"] for edge in data.get("edges", [])]
631
+ count_data = data.get("count") or {}
632
+ total_count = count_data.get("value", 0)
633
+
634
+ paginated_nodes = all_nodes[skip_count : skip_count + page_size]
635
+ cleaned_nodes = []
636
+
637
+ for node in paginated_nodes:
638
+ cleaned = {
639
+ "id": node["id"],
640
+ "kind": node["kind"],
641
+ "label": node["label"],
642
+ "hasDescendants": node["hasDescendants"],
643
+ }
644
+
645
+ if node.get("metadata") and (
646
+ node["metadata"].get("isTls") is not None or node["metadata"].get("port")
647
+ ):
648
+ cleaned["metadata"] = node["metadata"]
649
+
650
+ if node.get("request"):
651
+ req = node["request"]
652
+ cleaned_req = {}
653
+ if req.get("method"):
654
+ cleaned_req["method"] = req["method"]
655
+ if req.get("path"):
656
+ cleaned_req["path"] = req["path"]
657
+ response_data = req.get("response") or {}
658
+ if response_data.get("statusCode"):
659
+ cleaned_req["status"] = response_data["statusCode"]
660
+ if cleaned_req:
661
+ cleaned["request"] = cleaned_req
662
+
663
+ cleaned_nodes.append(cleaned)
664
+
665
+ total_pages = (total_count + page_size - 1) // page_size
666
+
667
+ return {
668
+ "entries": cleaned_nodes,
669
+ "page": page,
670
+ "page_size": page_size,
671
+ "total_pages": total_pages,
672
+ "total_count": total_count,
673
+ "has_more": page < total_pages,
674
+ "showing": (
675
+ f"{skip_count + 1}-{min(skip_count + page_size, total_count)} of {total_count}"
676
+ ),
677
+ }
678
+
679
+ except (TransportQueryError, ValueError, KeyError) as e:
680
+ return {"error": f"Failed to fetch sitemap: {e}"}
681
+
682
+ def _process_sitemap_metadata(self, node: dict[str, Any]) -> dict[str, Any]:
683
+ cleaned = {
684
+ "id": node["id"],
685
+ "kind": node["kind"],
686
+ "label": node["label"],
687
+ "hasDescendants": node["hasDescendants"],
688
+ }
689
+
690
+ if node.get("metadata") and (
691
+ node["metadata"].get("isTls") is not None or node["metadata"].get("port")
692
+ ):
693
+ cleaned["metadata"] = node["metadata"]
694
+
695
+ return cleaned
696
+
697
+ def _process_sitemap_request(self, req: dict[str, Any]) -> dict[str, Any] | None:
698
+ cleaned_req = {}
699
+ if req.get("method"):
700
+ cleaned_req["method"] = req["method"]
701
+ if req.get("path"):
702
+ cleaned_req["path"] = req["path"]
703
+ response_data = req.get("response") or {}
704
+ if response_data.get("statusCode"):
705
+ cleaned_req["status"] = response_data["statusCode"]
706
+ return cleaned_req if cleaned_req else None
707
+
708
+ def _process_sitemap_response(self, resp: dict[str, Any]) -> dict[str, Any]:
709
+ cleaned_resp = {}
710
+ if resp.get("statusCode"):
711
+ cleaned_resp["status"] = resp["statusCode"]
712
+ if resp.get("length"):
713
+ cleaned_resp["size"] = resp["length"]
714
+ if resp.get("roundtripTime"):
715
+ cleaned_resp["time_ms"] = resp["roundtripTime"]
716
+ return cleaned_resp
717
+
718
+ def view_sitemap_entry(self, entry_id: str) -> dict[str, Any]:
719
+ try:
720
+ query = gql("""
721
+ query GetSitemapEntry($id: ID!) {
722
+ sitemapEntry(id: $id) {
723
+ id kind label hasDescendants
724
+ metadata { ... on SitemapEntryMetadataDomain { isTls port } }
725
+ request { method path response { statusCode length roundtripTime } }
726
+ requests(first: 30, order: {by: CREATED_AT, ordering: DESC}) {
727
+ edges { node { method path response { statusCode length } } }
728
+ count { value }
729
+ }
730
+ }
731
+ }
732
+ """)
733
+
734
+ result = self.client.execute(query, variable_values={"id": entry_id})
735
+ entry = result.get("sitemapEntry")
736
+
737
+ if not entry:
738
+ return {"error": f"Sitemap entry {entry_id} not found"}
739
+
740
+ cleaned = self._process_sitemap_metadata(entry)
741
+
742
+ if entry.get("request"):
743
+ req = entry["request"]
744
+ cleaned_req = {}
745
+ if req.get("method"):
746
+ cleaned_req["method"] = req["method"]
747
+ if req.get("path"):
748
+ cleaned_req["path"] = req["path"]
749
+ if req.get("response"):
750
+ cleaned_req["response"] = self._process_sitemap_response(req["response"])
751
+ if cleaned_req:
752
+ cleaned["request"] = cleaned_req
753
+
754
+ requests_data = entry.get("requests", {})
755
+ request_nodes = [edge["node"] for edge in requests_data.get("edges", [])]
756
+
757
+ cleaned_requests = [
758
+ req
759
+ for req in (self._process_sitemap_request(node) for node in request_nodes)
760
+ if req is not None
761
+ ]
762
+
763
+ count_data = requests_data.get("count") or {}
764
+ cleaned["related_requests"] = {
765
+ "requests": cleaned_requests,
766
+ "total_count": count_data.get("value", 0),
767
+ "showing": f"Latest {len(cleaned_requests)} requests",
768
+ }
769
+
770
+ return {"entry": cleaned} if cleaned else {"error": "Failed to process sitemap entry"} # noqa: TRY300
771
+
772
+ except (TransportQueryError, ValueError, KeyError) as e:
773
+ return {"error": f"Failed to fetch sitemap entry: {e}"}
774
+
775
+ def close(self) -> None:
776
+ pass
777
+
778
+
779
+ _PROXY_MANAGER: ProxyManager | None = None
780
+
781
+
782
+ def get_proxy_manager() -> ProxyManager:
783
+ if _PROXY_MANAGER is None:
784
+ return ProxyManager()
785
+ return _PROXY_MANAGER