aiptx 2.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (187) hide show
  1. aipt_v2/__init__.py +110 -0
  2. aipt_v2/__main__.py +24 -0
  3. aipt_v2/agents/AIPTxAgent/__init__.py +10 -0
  4. aipt_v2/agents/AIPTxAgent/aiptx_agent.py +211 -0
  5. aipt_v2/agents/__init__.py +46 -0
  6. aipt_v2/agents/base.py +520 -0
  7. aipt_v2/agents/exploit_agent.py +688 -0
  8. aipt_v2/agents/ptt.py +406 -0
  9. aipt_v2/agents/state.py +168 -0
  10. aipt_v2/app.py +957 -0
  11. aipt_v2/browser/__init__.py +31 -0
  12. aipt_v2/browser/automation.py +458 -0
  13. aipt_v2/browser/crawler.py +453 -0
  14. aipt_v2/cli.py +2933 -0
  15. aipt_v2/compliance/__init__.py +71 -0
  16. aipt_v2/compliance/compliance_report.py +449 -0
  17. aipt_v2/compliance/framework_mapper.py +424 -0
  18. aipt_v2/compliance/nist_mapping.py +345 -0
  19. aipt_v2/compliance/owasp_mapping.py +330 -0
  20. aipt_v2/compliance/pci_mapping.py +297 -0
  21. aipt_v2/config.py +341 -0
  22. aipt_v2/core/__init__.py +43 -0
  23. aipt_v2/core/agent.py +630 -0
  24. aipt_v2/core/llm.py +395 -0
  25. aipt_v2/core/memory.py +305 -0
  26. aipt_v2/core/ptt.py +329 -0
  27. aipt_v2/database/__init__.py +14 -0
  28. aipt_v2/database/models.py +232 -0
  29. aipt_v2/database/repository.py +384 -0
  30. aipt_v2/docker/__init__.py +23 -0
  31. aipt_v2/docker/builder.py +260 -0
  32. aipt_v2/docker/manager.py +222 -0
  33. aipt_v2/docker/sandbox.py +371 -0
  34. aipt_v2/evasion/__init__.py +58 -0
  35. aipt_v2/evasion/request_obfuscator.py +272 -0
  36. aipt_v2/evasion/tls_fingerprint.py +285 -0
  37. aipt_v2/evasion/ua_rotator.py +301 -0
  38. aipt_v2/evasion/waf_bypass.py +439 -0
  39. aipt_v2/execution/__init__.py +23 -0
  40. aipt_v2/execution/executor.py +302 -0
  41. aipt_v2/execution/parser.py +544 -0
  42. aipt_v2/execution/terminal.py +337 -0
  43. aipt_v2/health.py +437 -0
  44. aipt_v2/intelligence/__init__.py +194 -0
  45. aipt_v2/intelligence/adaptation.py +474 -0
  46. aipt_v2/intelligence/auth.py +520 -0
  47. aipt_v2/intelligence/chaining.py +775 -0
  48. aipt_v2/intelligence/correlation.py +536 -0
  49. aipt_v2/intelligence/cve_aipt.py +334 -0
  50. aipt_v2/intelligence/cve_info.py +1111 -0
  51. aipt_v2/intelligence/knowledge_graph.py +590 -0
  52. aipt_v2/intelligence/learning.py +626 -0
  53. aipt_v2/intelligence/llm_analyzer.py +502 -0
  54. aipt_v2/intelligence/llm_tool_selector.py +518 -0
  55. aipt_v2/intelligence/payload_generator.py +562 -0
  56. aipt_v2/intelligence/rag.py +239 -0
  57. aipt_v2/intelligence/scope.py +442 -0
  58. aipt_v2/intelligence/searchers/__init__.py +5 -0
  59. aipt_v2/intelligence/searchers/exploitdb_searcher.py +523 -0
  60. aipt_v2/intelligence/searchers/github_searcher.py +467 -0
  61. aipt_v2/intelligence/searchers/google_searcher.py +281 -0
  62. aipt_v2/intelligence/tools.json +443 -0
  63. aipt_v2/intelligence/triage.py +670 -0
  64. aipt_v2/interactive_shell.py +559 -0
  65. aipt_v2/interface/__init__.py +5 -0
  66. aipt_v2/interface/cli.py +230 -0
  67. aipt_v2/interface/main.py +501 -0
  68. aipt_v2/interface/tui.py +1276 -0
  69. aipt_v2/interface/utils.py +583 -0
  70. aipt_v2/llm/__init__.py +39 -0
  71. aipt_v2/llm/config.py +26 -0
  72. aipt_v2/llm/llm.py +514 -0
  73. aipt_v2/llm/memory.py +214 -0
  74. aipt_v2/llm/request_queue.py +89 -0
  75. aipt_v2/llm/utils.py +89 -0
  76. aipt_v2/local_tool_installer.py +1467 -0
  77. aipt_v2/models/__init__.py +15 -0
  78. aipt_v2/models/findings.py +295 -0
  79. aipt_v2/models/phase_result.py +224 -0
  80. aipt_v2/models/scan_config.py +207 -0
  81. aipt_v2/monitoring/grafana/dashboards/aipt-dashboard.json +355 -0
  82. aipt_v2/monitoring/grafana/dashboards/default.yml +17 -0
  83. aipt_v2/monitoring/grafana/datasources/prometheus.yml +17 -0
  84. aipt_v2/monitoring/prometheus.yml +60 -0
  85. aipt_v2/orchestration/__init__.py +52 -0
  86. aipt_v2/orchestration/pipeline.py +398 -0
  87. aipt_v2/orchestration/progress.py +300 -0
  88. aipt_v2/orchestration/scheduler.py +296 -0
  89. aipt_v2/orchestrator.py +2427 -0
  90. aipt_v2/payloads/__init__.py +27 -0
  91. aipt_v2/payloads/cmdi.py +150 -0
  92. aipt_v2/payloads/sqli.py +263 -0
  93. aipt_v2/payloads/ssrf.py +204 -0
  94. aipt_v2/payloads/templates.py +222 -0
  95. aipt_v2/payloads/traversal.py +166 -0
  96. aipt_v2/payloads/xss.py +204 -0
  97. aipt_v2/prompts/__init__.py +60 -0
  98. aipt_v2/proxy/__init__.py +29 -0
  99. aipt_v2/proxy/history.py +352 -0
  100. aipt_v2/proxy/interceptor.py +452 -0
  101. aipt_v2/recon/__init__.py +44 -0
  102. aipt_v2/recon/dns.py +241 -0
  103. aipt_v2/recon/osint.py +367 -0
  104. aipt_v2/recon/subdomain.py +372 -0
  105. aipt_v2/recon/tech_detect.py +311 -0
  106. aipt_v2/reports/__init__.py +17 -0
  107. aipt_v2/reports/generator.py +313 -0
  108. aipt_v2/reports/html_report.py +378 -0
  109. aipt_v2/runtime/__init__.py +53 -0
  110. aipt_v2/runtime/base.py +30 -0
  111. aipt_v2/runtime/docker.py +401 -0
  112. aipt_v2/runtime/local.py +346 -0
  113. aipt_v2/runtime/tool_server.py +205 -0
  114. aipt_v2/runtime/vps.py +830 -0
  115. aipt_v2/scanners/__init__.py +28 -0
  116. aipt_v2/scanners/base.py +273 -0
  117. aipt_v2/scanners/nikto.py +244 -0
  118. aipt_v2/scanners/nmap.py +402 -0
  119. aipt_v2/scanners/nuclei.py +273 -0
  120. aipt_v2/scanners/web.py +454 -0
  121. aipt_v2/scripts/security_audit.py +366 -0
  122. aipt_v2/setup_wizard.py +941 -0
  123. aipt_v2/skills/__init__.py +80 -0
  124. aipt_v2/skills/agents/__init__.py +14 -0
  125. aipt_v2/skills/agents/api_tester.py +706 -0
  126. aipt_v2/skills/agents/base.py +477 -0
  127. aipt_v2/skills/agents/code_review.py +459 -0
  128. aipt_v2/skills/agents/security_agent.py +336 -0
  129. aipt_v2/skills/agents/web_pentest.py +818 -0
  130. aipt_v2/skills/prompts/__init__.py +647 -0
  131. aipt_v2/system_detector.py +539 -0
  132. aipt_v2/telemetry/__init__.py +7 -0
  133. aipt_v2/telemetry/tracer.py +347 -0
  134. aipt_v2/terminal/__init__.py +28 -0
  135. aipt_v2/terminal/executor.py +400 -0
  136. aipt_v2/terminal/sandbox.py +350 -0
  137. aipt_v2/tools/__init__.py +44 -0
  138. aipt_v2/tools/active_directory/__init__.py +78 -0
  139. aipt_v2/tools/active_directory/ad_config.py +238 -0
  140. aipt_v2/tools/active_directory/bloodhound_wrapper.py +447 -0
  141. aipt_v2/tools/active_directory/kerberos_attacks.py +430 -0
  142. aipt_v2/tools/active_directory/ldap_enum.py +533 -0
  143. aipt_v2/tools/active_directory/smb_attacks.py +505 -0
  144. aipt_v2/tools/agents_graph/__init__.py +19 -0
  145. aipt_v2/tools/agents_graph/agents_graph_actions.py +69 -0
  146. aipt_v2/tools/api_security/__init__.py +76 -0
  147. aipt_v2/tools/api_security/api_discovery.py +608 -0
  148. aipt_v2/tools/api_security/graphql_scanner.py +622 -0
  149. aipt_v2/tools/api_security/jwt_analyzer.py +577 -0
  150. aipt_v2/tools/api_security/openapi_fuzzer.py +761 -0
  151. aipt_v2/tools/browser/__init__.py +5 -0
  152. aipt_v2/tools/browser/browser_actions.py +238 -0
  153. aipt_v2/tools/browser/browser_instance.py +535 -0
  154. aipt_v2/tools/browser/tab_manager.py +344 -0
  155. aipt_v2/tools/cloud/__init__.py +70 -0
  156. aipt_v2/tools/cloud/cloud_config.py +273 -0
  157. aipt_v2/tools/cloud/cloud_scanner.py +639 -0
  158. aipt_v2/tools/cloud/prowler_tool.py +571 -0
  159. aipt_v2/tools/cloud/scoutsuite_tool.py +359 -0
  160. aipt_v2/tools/executor.py +307 -0
  161. aipt_v2/tools/parser.py +408 -0
  162. aipt_v2/tools/proxy/__init__.py +5 -0
  163. aipt_v2/tools/proxy/proxy_actions.py +103 -0
  164. aipt_v2/tools/proxy/proxy_manager.py +789 -0
  165. aipt_v2/tools/registry.py +196 -0
  166. aipt_v2/tools/scanners/__init__.py +343 -0
  167. aipt_v2/tools/scanners/acunetix_tool.py +712 -0
  168. aipt_v2/tools/scanners/burp_tool.py +631 -0
  169. aipt_v2/tools/scanners/config.py +156 -0
  170. aipt_v2/tools/scanners/nessus_tool.py +588 -0
  171. aipt_v2/tools/scanners/zap_tool.py +612 -0
  172. aipt_v2/tools/terminal/__init__.py +5 -0
  173. aipt_v2/tools/terminal/terminal_actions.py +37 -0
  174. aipt_v2/tools/terminal/terminal_manager.py +153 -0
  175. aipt_v2/tools/terminal/terminal_session.py +449 -0
  176. aipt_v2/tools/tool_processing.py +108 -0
  177. aipt_v2/utils/__init__.py +17 -0
  178. aipt_v2/utils/logging.py +202 -0
  179. aipt_v2/utils/model_manager.py +187 -0
  180. aipt_v2/utils/searchers/__init__.py +269 -0
  181. aipt_v2/verify_install.py +793 -0
  182. aiptx-2.0.7.dist-info/METADATA +345 -0
  183. aiptx-2.0.7.dist-info/RECORD +187 -0
  184. aiptx-2.0.7.dist-info/WHEEL +5 -0
  185. aiptx-2.0.7.dist-info/entry_points.txt +7 -0
  186. aiptx-2.0.7.dist-info/licenses/LICENSE +21 -0
  187. aiptx-2.0.7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,789 @@
1
+ from __future__ import annotations
2
+
3
+ import base64
4
+ import os
5
+ import re
6
+ import time
7
+ from typing import TYPE_CHECKING, Any
8
+ from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
9
+
10
+ import requests
11
+ from gql import Client, gql
12
+ from gql.transport.exceptions import TransportQueryError
13
+ from gql.transport.requests import RequestsHTTPTransport
14
+ from requests.exceptions import ProxyError, RequestException, Timeout
15
+
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Callable
19
+
20
+
21
+ class ProxyManager:
22
+ def __init__(self, auth_token: str | None = None):
23
+ host = "127.0.0.1"
24
+ port = os.getenv("CAIDO_PORT", "56789")
25
+ self.base_url = f"http://{host}:{port}/graphql"
26
+ self.proxies = {"http": f"http://{host}:{port}", "https": f"http://{host}:{port}"}
27
+ self.auth_token = auth_token or os.getenv("CAIDO_API_TOKEN")
28
+ self.transport = RequestsHTTPTransport(
29
+ url=self.base_url, headers={"Authorization": f"Bearer {self.auth_token}"}
30
+ )
31
+ self.client = Client(transport=self.transport, fetch_schema_from_transport=False)
32
+
33
+ def list_requests(
34
+ self,
35
+ httpql_filter: str | None = None,
36
+ start_page: int = 1,
37
+ end_page: int = 1,
38
+ page_size: int = 50,
39
+ sort_by: str = "timestamp",
40
+ sort_order: str = "desc",
41
+ scope_id: str | None = None,
42
+ ) -> dict[str, Any]:
43
+ offset = (start_page - 1) * page_size
44
+ limit = (end_page - start_page + 1) * page_size
45
+
46
+ sort_mapping = {
47
+ "timestamp": "CREATED_AT",
48
+ "host": "HOST",
49
+ "method": "METHOD",
50
+ "path": "PATH",
51
+ "status_code": "RESP_STATUS_CODE",
52
+ "response_time": "RESP_ROUNDTRIP_TIME",
53
+ "response_size": "RESP_LENGTH",
54
+ "source": "SOURCE",
55
+ }
56
+
57
+ query = gql("""
58
+ query GetRequests(
59
+ $limit: Int, $offset: Int, $filter: HTTPQL,
60
+ $order: RequestResponseOrderInput, $scopeId: ID
61
+ ) {
62
+ requestsByOffset(
63
+ limit: $limit, offset: $offset, filter: $filter,
64
+ order: $order, scopeId: $scopeId
65
+ ) {
66
+ edges {
67
+ node {
68
+ id method host path query createdAt length isTls port
69
+ source alteration fileExtension
70
+ response { id statusCode length roundtripTime createdAt }
71
+ }
72
+ }
73
+ count { value }
74
+ }
75
+ }
76
+ """)
77
+
78
+ variables = {
79
+ "limit": limit,
80
+ "offset": offset,
81
+ "filter": httpql_filter,
82
+ "order": {
83
+ "by": sort_mapping.get(sort_by, "CREATED_AT"),
84
+ "ordering": sort_order.upper(),
85
+ },
86
+ "scopeId": scope_id,
87
+ }
88
+
89
+ try:
90
+ result = self.client.execute(query, variable_values=variables)
91
+ data = result.get("requestsByOffset", {})
92
+ nodes = [edge["node"] for edge in data.get("edges", [])]
93
+
94
+ count_data = data.get("count") or {}
95
+ return {
96
+ "requests": nodes,
97
+ "total_count": count_data.get("value", 0),
98
+ "start_page": start_page,
99
+ "end_page": end_page,
100
+ "page_size": page_size,
101
+ "offset": offset,
102
+ "returned_count": len(nodes),
103
+ "sort_by": sort_by,
104
+ "sort_order": sort_order,
105
+ }
106
+ except (TransportQueryError, ValueError, KeyError) as e:
107
+ return {"requests": [], "total_count": 0, "error": f"Error fetching requests: {e}"}
108
+
109
+ def view_request(
110
+ self,
111
+ request_id: str,
112
+ part: str = "request",
113
+ search_pattern: str | None = None,
114
+ page: int = 1,
115
+ page_size: int = 50,
116
+ ) -> dict[str, Any]:
117
+ queries = {
118
+ "request": """query GetRequest($id: ID!) {
119
+ request(id: $id) {
120
+ id method host path query createdAt length isTls port
121
+ source alteration edited raw
122
+ }
123
+ }""",
124
+ "response": """query GetRequest($id: ID!) {
125
+ request(id: $id) {
126
+ id response {
127
+ id statusCode length roundtripTime createdAt raw
128
+ }
129
+ }
130
+ }""",
131
+ }
132
+
133
+ if part not in queries:
134
+ return {"error": f"Invalid part '{part}'. Use 'request' or 'response'"}
135
+
136
+ try:
137
+ result = self.client.execute(gql(queries[part]), variable_values={"id": request_id})
138
+ request_data = result.get("request", {})
139
+
140
+ if not request_data:
141
+ return {"error": f"Request {request_id} not found"}
142
+
143
+ if part == "request":
144
+ raw_content = request_data.get("raw")
145
+ else:
146
+ response_data = request_data.get("response") or {}
147
+ raw_content = response_data.get("raw")
148
+
149
+ if not raw_content:
150
+ return {"error": "No content available"}
151
+
152
+ content = base64.b64decode(raw_content).decode("utf-8", errors="replace")
153
+
154
+ if part == "response":
155
+ request_data["response"]["raw"] = content
156
+ else:
157
+ request_data["raw"] = content
158
+
159
+ return (
160
+ self._search_content(request_data, content, search_pattern)
161
+ if search_pattern
162
+ else self._paginate_content(request_data, content, page, page_size)
163
+ )
164
+
165
+ except (TransportQueryError, ValueError, KeyError, UnicodeDecodeError) as e:
166
+ return {"error": f"Failed to view request: {e}"}
167
+
168
+ def _search_content(
169
+ self, request_data: dict[str, Any], content: str, pattern: str
170
+ ) -> dict[str, Any]:
171
+ try:
172
+ regex = re.compile(pattern, re.IGNORECASE | re.MULTILINE | re.DOTALL)
173
+ matches = []
174
+
175
+ for match in regex.finditer(content):
176
+ start, end = match.start(), match.end()
177
+ context_size = 120
178
+
179
+ before = re.sub(r"\s+", " ", content[max(0, start - context_size) : start].strip())[
180
+ -100:
181
+ ]
182
+ after = re.sub(r"\s+", " ", content[end : end + context_size].strip())[:100]
183
+
184
+ matches.append(
185
+ {"match": match.group(), "before": before, "after": after, "position": start}
186
+ )
187
+
188
+ if len(matches) >= 20:
189
+ break
190
+
191
+ return {
192
+ "id": request_data.get("id"),
193
+ "matches": matches,
194
+ "total_matches": len(matches),
195
+ "search_pattern": pattern,
196
+ "truncated": len(matches) >= 20,
197
+ }
198
+ except re.error as e:
199
+ return {"error": f"Invalid regex: {e}"}
200
+
201
+ def _paginate_content(
202
+ self, request_data: dict[str, Any], content: str, page: int, page_size: int
203
+ ) -> dict[str, Any]:
204
+ display_lines = []
205
+ for line in content.split("\n"):
206
+ if len(line) <= 80:
207
+ display_lines.append(line)
208
+ else:
209
+ display_lines.extend(
210
+ [
211
+ line[i : i + 80] + (" \\" if i + 80 < len(line) else "")
212
+ for i in range(0, len(line), 80)
213
+ ]
214
+ )
215
+
216
+ total_lines = len(display_lines)
217
+ total_pages = (total_lines + page_size - 1) // page_size
218
+ page = max(1, min(page, total_pages))
219
+
220
+ start_line = (page - 1) * page_size
221
+ end_line = min(total_lines, start_line + page_size)
222
+
223
+ return {
224
+ "id": request_data.get("id"),
225
+ "content": "\n".join(display_lines[start_line:end_line]),
226
+ "page": page,
227
+ "total_pages": total_pages,
228
+ "showing_lines": f"{start_line + 1}-{end_line} of {total_lines}",
229
+ "has_more": page < total_pages,
230
+ }
231
+
232
+ def send_simple_request(
233
+ self,
234
+ method: str,
235
+ url: str,
236
+ headers: dict[str, str] | None = None,
237
+ body: str = "",
238
+ timeout: int = 30,
239
+ ) -> dict[str, Any]:
240
+ if headers is None:
241
+ headers = {}
242
+ try:
243
+ start_time = time.time()
244
+ response = requests.request(
245
+ method=method,
246
+ url=url,
247
+ headers=headers,
248
+ data=body or None,
249
+ proxies=self.proxies,
250
+ timeout=timeout,
251
+ verify=False,
252
+ )
253
+ response_time = int((time.time() - start_time) * 1000)
254
+
255
+ body_content = response.text
256
+ if len(body_content) > 10000:
257
+ body_content = body_content[:10000] + "\n... [truncated]"
258
+
259
+ return {
260
+ "status_code": response.status_code,
261
+ "headers": dict(response.headers),
262
+ "body": body_content,
263
+ "response_time_ms": response_time,
264
+ "url": response.url,
265
+ "message": (
266
+ "Request sent through proxy - check list_requests() for captured traffic"
267
+ ),
268
+ }
269
+ except (RequestException, ProxyError, Timeout) as e:
270
+ return {"error": f"Request failed: {type(e).__name__}", "details": str(e), "url": url}
271
+
272
+ def repeat_request(
273
+ self, request_id: str, modifications: dict[str, Any] | None = None
274
+ ) -> dict[str, Any]:
275
+ if modifications is None:
276
+ modifications = {}
277
+
278
+ original = self.view_request(request_id, "request")
279
+ if "error" in original:
280
+ return {"error": f"Could not retrieve original request: {original['error']}"}
281
+
282
+ raw_content = original.get("content", "")
283
+ if not raw_content:
284
+ return {"error": "No raw request content found"}
285
+
286
+ request_components = self._parse_http_request(raw_content)
287
+ if "error" in request_components:
288
+ return request_components
289
+
290
+ full_url = self._build_full_url(request_components, modifications)
291
+ if "error" in full_url:
292
+ return full_url
293
+
294
+ modified_request = self._apply_modifications(
295
+ request_components, modifications, full_url["url"]
296
+ )
297
+
298
+ return self._send_modified_request(modified_request, request_id, modifications)
299
+
300
+ def _parse_http_request(self, raw_content: str) -> dict[str, Any]:
301
+ lines = raw_content.split("\n")
302
+ request_line = lines[0].strip().split(" ")
303
+ if len(request_line) < 2:
304
+ return {"error": "Invalid request line format"}
305
+
306
+ method, url_path = request_line[0], request_line[1]
307
+
308
+ headers = {}
309
+ body_start = 0
310
+ for i, line in enumerate(lines[1:], 1):
311
+ if line.strip() == "":
312
+ body_start = i + 1
313
+ break
314
+ if ":" in line:
315
+ key, value = line.split(":", 1)
316
+ headers[key.strip()] = value.strip()
317
+
318
+ body = "\n".join(lines[body_start:]).strip() if body_start < len(lines) else ""
319
+
320
+ return {"method": method, "url_path": url_path, "headers": headers, "body": body}
321
+
322
+ def _build_full_url(
323
+ self, components: dict[str, Any], modifications: dict[str, Any]
324
+ ) -> dict[str, Any]:
325
+ headers = components["headers"]
326
+ host = headers.get("Host", "")
327
+ if not host:
328
+ return {"error": "No Host header found"}
329
+
330
+ protocol = (
331
+ "https" if ":443" in host or "https" in headers.get("Referer", "").lower() else "http"
332
+ )
333
+ full_url = f"{protocol}://{host}{components['url_path']}"
334
+
335
+ if "url" in modifications:
336
+ full_url = modifications["url"]
337
+
338
+ return {"url": full_url}
339
+
340
+ def _apply_modifications(
341
+ self, components: dict[str, Any], modifications: dict[str, Any], full_url: str
342
+ ) -> dict[str, Any]:
343
+ headers = components["headers"].copy()
344
+ body = components["body"]
345
+ final_url = full_url
346
+
347
+ if "params" in modifications:
348
+ parsed = urlparse(final_url)
349
+ params = {k: v[0] if v else "" for k, v in parse_qs(parsed.query).items()}
350
+ params.update(modifications["params"])
351
+ final_url = urlunparse(parsed._replace(query=urlencode(params)))
352
+
353
+ if "headers" in modifications:
354
+ headers.update(modifications["headers"])
355
+
356
+ if "body" in modifications:
357
+ body = modifications["body"]
358
+
359
+ if "cookies" in modifications:
360
+ cookies = {}
361
+ if headers.get("Cookie"):
362
+ for cookie in headers["Cookie"].split(";"):
363
+ if "=" in cookie:
364
+ k, v = cookie.split("=", 1)
365
+ cookies[k.strip()] = v.strip()
366
+ cookies.update(modifications["cookies"])
367
+ headers["Cookie"] = "; ".join([f"{k}={v}" for k, v in cookies.items()])
368
+
369
+ return {
370
+ "method": components["method"],
371
+ "url": final_url,
372
+ "headers": headers,
373
+ "body": body,
374
+ }
375
+
376
+ def _send_modified_request(
377
+ self, request_data: dict[str, Any], request_id: str, modifications: dict[str, Any]
378
+ ) -> dict[str, Any]:
379
+ try:
380
+ start_time = time.time()
381
+ response = requests.request(
382
+ method=request_data["method"],
383
+ url=request_data["url"],
384
+ headers=request_data["headers"],
385
+ data=request_data["body"] or None,
386
+ proxies=self.proxies,
387
+ timeout=30,
388
+ verify=False,
389
+ )
390
+ response_time = int((time.time() - start_time) * 1000)
391
+
392
+ response_body = response.text
393
+ truncated = len(response_body) > 10000
394
+ if truncated:
395
+ response_body = response_body[:10000] + "\n... [truncated]"
396
+
397
+ return {
398
+ "status_code": response.status_code,
399
+ "status_text": response.reason,
400
+ "headers": {
401
+ k: v
402
+ for k, v in response.headers.items()
403
+ if k.lower()
404
+ in ["content-type", "content-length", "server", "set-cookie", "location"]
405
+ },
406
+ "body": response_body,
407
+ "body_truncated": truncated,
408
+ "body_size": len(response.content),
409
+ "response_time_ms": response_time,
410
+ "url": response.url,
411
+ "original_request_id": request_id,
412
+ "modifications_applied": modifications,
413
+ "request": {
414
+ "method": request_data["method"],
415
+ "url": request_data["url"],
416
+ "headers": request_data["headers"],
417
+ "has_body": bool(request_data["body"]),
418
+ },
419
+ }
420
+
421
+ except ProxyError as e:
422
+ return {
423
+ "error": "Proxy connection failed - is Caido running?",
424
+ "details": str(e),
425
+ "original_request_id": request_id,
426
+ }
427
+ except (RequestException, Timeout) as e:
428
+ return {
429
+ "error": f"Failed to repeat request: {type(e).__name__}",
430
+ "details": str(e),
431
+ "original_request_id": request_id,
432
+ }
433
+
434
+ def _handle_scope_list(self) -> dict[str, Any]:
435
+ result = self.client.execute(gql("query { scopes { id name allowlist denylist indexed } }"))
436
+ scopes = result.get("scopes", [])
437
+ return {"scopes": scopes, "count": len(scopes)}
438
+
439
+ def _handle_scope_get(self, scope_id: str | None) -> dict[str, Any]:
440
+ if not scope_id:
441
+ return self._handle_scope_list()
442
+
443
+ result = self.client.execute(
444
+ gql(
445
+ "query GetScope($id: ID!) { scope(id: $id) { id name allowlist denylist indexed } }"
446
+ ),
447
+ variable_values={"id": scope_id},
448
+ )
449
+ scope = result.get("scope")
450
+ if not scope:
451
+ return {"error": f"Scope {scope_id} not found"}
452
+ return {"scope": scope}
453
+
454
+ def _handle_scope_create(
455
+ self, scope_name: str, allowlist: list[str] | None, denylist: list[str] | None
456
+ ) -> dict[str, Any]:
457
+ if not scope_name:
458
+ return {"error": "scope_name required for create"}
459
+
460
+ mutation = gql("""
461
+ mutation CreateScope($input: CreateScopeInput!) {
462
+ createScope(input: $input) {
463
+ scope { id name allowlist denylist indexed }
464
+ error {
465
+ ... on InvalidGlobTermsUserError { code terms }
466
+ ... on OtherUserError { code }
467
+ }
468
+ }
469
+ }
470
+ """)
471
+
472
+ result = self.client.execute(
473
+ mutation,
474
+ variable_values={
475
+ "input": {
476
+ "name": scope_name,
477
+ "allowlist": allowlist or [],
478
+ "denylist": denylist or [],
479
+ }
480
+ },
481
+ )
482
+
483
+ payload = result.get("createScope", {})
484
+ if payload.get("error"):
485
+ error = payload["error"]
486
+ return {"error": f"Invalid glob patterns: {error.get('terms', error.get('code'))}"}
487
+
488
+ return {"scope": payload.get("scope"), "message": "Scope created successfully"}
489
+
490
+ def _handle_scope_update(
491
+ self,
492
+ scope_id: str,
493
+ scope_name: str,
494
+ allowlist: list[str] | None,
495
+ denylist: list[str] | None,
496
+ ) -> dict[str, Any]:
497
+ if not scope_id or not scope_name:
498
+ return {"error": "scope_id and scope_name required"}
499
+
500
+ mutation = gql("""
501
+ mutation UpdateScope($id: ID!, $input: UpdateScopeInput!) {
502
+ updateScope(id: $id, input: $input) {
503
+ scope { id name allowlist denylist indexed }
504
+ error {
505
+ ... on InvalidGlobTermsUserError { code terms }
506
+ ... on OtherUserError { code }
507
+ }
508
+ }
509
+ }
510
+ """)
511
+
512
+ result = self.client.execute(
513
+ mutation,
514
+ variable_values={
515
+ "id": scope_id,
516
+ "input": {
517
+ "name": scope_name,
518
+ "allowlist": allowlist or [],
519
+ "denylist": denylist or [],
520
+ },
521
+ },
522
+ )
523
+
524
+ payload = result.get("updateScope", {})
525
+ if payload.get("error"):
526
+ error = payload["error"]
527
+ return {"error": f"Invalid glob patterns: {error.get('terms', error.get('code'))}"}
528
+
529
+ return {"scope": payload.get("scope"), "message": "Scope updated successfully"}
530
+
531
+ def _handle_scope_delete(self, scope_id: str) -> dict[str, Any]:
532
+ if not scope_id:
533
+ return {"error": "scope_id required for delete"}
534
+
535
+ result = self.client.execute(
536
+ gql("mutation DeleteScope($id: ID!) { deleteScope(id: $id) { deletedId } }"),
537
+ variable_values={"id": scope_id},
538
+ )
539
+
540
+ payload = result.get("deleteScope", {})
541
+ if not payload.get("deletedId"):
542
+ return {"error": f"Failed to delete scope {scope_id}"}
543
+ return {"message": f"Scope {scope_id} deleted", "deletedId": payload["deletedId"]}
544
+
545
+ def scope_rules(
546
+ self,
547
+ action: str,
548
+ allowlist: list[str] | None = None,
549
+ denylist: list[str] | None = None,
550
+ scope_id: str | None = None,
551
+ scope_name: str | None = None,
552
+ ) -> dict[str, Any]:
553
+ handlers: dict[str, Callable[[], dict[str, Any]]] = {
554
+ "list": self._handle_scope_list,
555
+ "get": lambda: self._handle_scope_get(scope_id),
556
+ "create": lambda: (
557
+ {"error": "scope_name required for create"}
558
+ if not scope_name
559
+ else self._handle_scope_create(scope_name, allowlist, denylist)
560
+ ),
561
+ "update": lambda: (
562
+ {"error": "scope_id and scope_name required"}
563
+ if not scope_id or not scope_name
564
+ else self._handle_scope_update(scope_id, scope_name, allowlist, denylist)
565
+ ),
566
+ "delete": lambda: (
567
+ {"error": "scope_id required for delete"}
568
+ if not scope_id
569
+ else self._handle_scope_delete(scope_id)
570
+ ),
571
+ }
572
+
573
+ handler = handlers.get(action)
574
+ if not handler:
575
+ return {
576
+ "error": f"Unsupported action: {action}. Use 'get', 'list', 'create', "
577
+ f"'update', or 'delete'"
578
+ }
579
+
580
+ try:
581
+ result = handler()
582
+ except (TransportQueryError, ValueError, KeyError) as e:
583
+ return {"error": f"Scope operation failed: {e}"}
584
+ else:
585
+ return result
586
+
587
+ def list_sitemap(
588
+ self,
589
+ scope_id: str | None = None,
590
+ parent_id: str | None = None,
591
+ depth: str = "DIRECT",
592
+ page: int = 1,
593
+ page_size: int = 30,
594
+ ) -> dict[str, Any]:
595
+ try:
596
+ skip_count = (page - 1) * page_size
597
+
598
+ if parent_id:
599
+ query = gql("""
600
+ query GetSitemapDescendants($parentId: ID!, $depth: SitemapDescendantsDepth!) {
601
+ sitemapDescendantEntries(parentId: $parentId, depth: $depth) {
602
+ edges {
603
+ node {
604
+ id kind label hasDescendants
605
+ request { method path response { statusCode } }
606
+ }
607
+ }
608
+ count { value }
609
+ }
610
+ }
611
+ """)
612
+ result = self.client.execute(
613
+ query, variable_values={"parentId": parent_id, "depth": depth}
614
+ )
615
+ data = result.get("sitemapDescendantEntries", {})
616
+ else:
617
+ query = gql("""
618
+ query GetSitemapRoots($scopeId: ID) {
619
+ sitemapRootEntries(scopeId: $scopeId) {
620
+ edges { node {
621
+ id kind label hasDescendants
622
+ metadata { ... on SitemapEntryMetadataDomain { isTls port } }
623
+ request { method path response { statusCode } }
624
+ } }
625
+ count { value }
626
+ }
627
+ }
628
+ """)
629
+ result = self.client.execute(query, variable_values={"scopeId": scope_id})
630
+ data = result.get("sitemapRootEntries", {})
631
+
632
+ all_nodes = [edge["node"] for edge in data.get("edges", [])]
633
+ count_data = data.get("count") or {}
634
+ total_count = count_data.get("value", 0)
635
+
636
+ paginated_nodes = all_nodes[skip_count : skip_count + page_size]
637
+ cleaned_nodes = []
638
+
639
+ for node in paginated_nodes:
640
+ cleaned = {
641
+ "id": node["id"],
642
+ "kind": node["kind"],
643
+ "label": node["label"],
644
+ "hasDescendants": node["hasDescendants"],
645
+ }
646
+
647
+ if node.get("metadata") and (
648
+ node["metadata"].get("isTls") is not None or node["metadata"].get("port")
649
+ ):
650
+ cleaned["metadata"] = node["metadata"]
651
+
652
+ if node.get("request"):
653
+ req = node["request"]
654
+ cleaned_req = {}
655
+ if req.get("method"):
656
+ cleaned_req["method"] = req["method"]
657
+ if req.get("path"):
658
+ cleaned_req["path"] = req["path"]
659
+ response_data = req.get("response") or {}
660
+ if response_data.get("statusCode"):
661
+ cleaned_req["status"] = response_data["statusCode"]
662
+ if cleaned_req:
663
+ cleaned["request"] = cleaned_req
664
+
665
+ cleaned_nodes.append(cleaned)
666
+
667
+ total_pages = (total_count + page_size - 1) // page_size
668
+
669
+ return {
670
+ "entries": cleaned_nodes,
671
+ "page": page,
672
+ "page_size": page_size,
673
+ "total_pages": total_pages,
674
+ "total_count": total_count,
675
+ "has_more": page < total_pages,
676
+ "showing": (
677
+ f"{skip_count + 1}-{min(skip_count + page_size, total_count)} of {total_count}"
678
+ ),
679
+ }
680
+
681
+ except (TransportQueryError, ValueError, KeyError) as e:
682
+ return {"error": f"Failed to fetch sitemap: {e}"}
683
+
684
+ def _process_sitemap_metadata(self, node: dict[str, Any]) -> dict[str, Any]:
685
+ cleaned = {
686
+ "id": node["id"],
687
+ "kind": node["kind"],
688
+ "label": node["label"],
689
+ "hasDescendants": node["hasDescendants"],
690
+ }
691
+
692
+ if node.get("metadata") and (
693
+ node["metadata"].get("isTls") is not None or node["metadata"].get("port")
694
+ ):
695
+ cleaned["metadata"] = node["metadata"]
696
+
697
+ return cleaned
698
+
699
+ def _process_sitemap_request(self, req: dict[str, Any]) -> dict[str, Any] | None:
700
+ cleaned_req = {}
701
+ if req.get("method"):
702
+ cleaned_req["method"] = req["method"]
703
+ if req.get("path"):
704
+ cleaned_req["path"] = req["path"]
705
+ response_data = req.get("response") or {}
706
+ if response_data.get("statusCode"):
707
+ cleaned_req["status"] = response_data["statusCode"]
708
+ return cleaned_req if cleaned_req else None
709
+
710
+ def _process_sitemap_response(self, resp: dict[str, Any]) -> dict[str, Any]:
711
+ cleaned_resp = {}
712
+ if resp.get("statusCode"):
713
+ cleaned_resp["status"] = resp["statusCode"]
714
+ if resp.get("length"):
715
+ cleaned_resp["size"] = resp["length"]
716
+ if resp.get("roundtripTime"):
717
+ cleaned_resp["time_ms"] = resp["roundtripTime"]
718
+ return cleaned_resp
719
+
720
+ def view_sitemap_entry(self, entry_id: str) -> dict[str, Any]:
721
+ try:
722
+ query = gql("""
723
+ query GetSitemapEntry($id: ID!) {
724
+ sitemapEntry(id: $id) {
725
+ id kind label hasDescendants
726
+ metadata { ... on SitemapEntryMetadataDomain { isTls port } }
727
+ request { method path response { statusCode length roundtripTime } }
728
+ requests(first: 30, order: {by: CREATED_AT, ordering: DESC}) {
729
+ edges { node { method path response { statusCode length } } }
730
+ count { value }
731
+ }
732
+ }
733
+ }
734
+ """)
735
+
736
+ result = self.client.execute(query, variable_values={"id": entry_id})
737
+ entry = result.get("sitemapEntry")
738
+
739
+ if not entry:
740
+ return {"error": f"Sitemap entry {entry_id} not found"}
741
+
742
+ cleaned = self._process_sitemap_metadata(entry)
743
+
744
+ if entry.get("request"):
745
+ req = entry["request"]
746
+ cleaned_req = {}
747
+ if req.get("method"):
748
+ cleaned_req["method"] = req["method"]
749
+ if req.get("path"):
750
+ cleaned_req["path"] = req["path"]
751
+ if req.get("response"):
752
+ cleaned_req["response"] = self._process_sitemap_response(req["response"])
753
+ if cleaned_req:
754
+ cleaned["request"] = cleaned_req
755
+
756
+ requests_data = entry.get("requests", {})
757
+ request_nodes = [edge["node"] for edge in requests_data.get("edges", [])]
758
+
759
+ cleaned_requests = [
760
+ req
761
+ for req in (self._process_sitemap_request(node) for node in request_nodes)
762
+ if req is not None
763
+ ]
764
+
765
+ count_data = requests_data.get("count") or {}
766
+ cleaned["related_requests"] = {
767
+ "requests": cleaned_requests,
768
+ "total_count": count_data.get("value", 0),
769
+ "showing": f"Latest {len(cleaned_requests)} requests",
770
+ }
771
+
772
+ return {"entry": cleaned} if cleaned else {"error": "Failed to process sitemap entry"} # noqa: TRY300
773
+
774
+ except (TransportQueryError, ValueError, KeyError) as e:
775
+ return {"error": f"Failed to fetch sitemap entry: {e}"}
776
+
777
+ def close(self) -> None:
778
+ """Close proxy manager and cleanup resources."""
779
+ # No resources to cleanup in current implementation
780
+ return
781
+
782
+
783
+ _PROXY_MANAGER: ProxyManager | None = None
784
+
785
+
786
+ def get_proxy_manager() -> ProxyManager:
787
+ if _PROXY_MANAGER is None:
788
+ return ProxyManager()
789
+ return _PROXY_MANAGER