cmdop 0.1.23__py3-none-any.whl → 0.1.25__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cmdop/__init__.py +1 -1
- cmdop/helpers/__init__.py +8 -0
- cmdop/helpers/network_analyzer.py +368 -0
- cmdop/services/browser/capabilities/__init__.py +2 -0
- cmdop/services/browser/capabilities/visual.py +100 -0
- cmdop/services/browser/session.py +10 -0
- cmdop-0.1.25.dist-info/METADATA +322 -0
- {cmdop-0.1.23.dist-info → cmdop-0.1.25.dist-info}/RECORD +10 -8
- cmdop-0.1.23.dist-info/METADATA +0 -330
- {cmdop-0.1.23.dist-info → cmdop-0.1.25.dist-info}/WHEEL +0 -0
- {cmdop-0.1.23.dist-info → cmdop-0.1.25.dist-info}/licenses/LICENSE +0 -0
cmdop/__init__.py
CHANGED
cmdop/helpers/__init__.py
CHANGED
|
@@ -2,8 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
from cmdop.helpers.formatting import json_to_toon
|
|
4
4
|
from cmdop.helpers.cleaner import JsonCleaner
|
|
5
|
+
from cmdop.helpers.network_analyzer import (
|
|
6
|
+
NetworkAnalyzer,
|
|
7
|
+
NetworkSnapshot,
|
|
8
|
+
RequestSnapshot,
|
|
9
|
+
)
|
|
5
10
|
|
|
6
11
|
__all__ = [
|
|
7
12
|
"json_to_toon",
|
|
8
13
|
"JsonCleaner",
|
|
14
|
+
"NetworkAnalyzer",
|
|
15
|
+
"NetworkSnapshot",
|
|
16
|
+
"RequestSnapshot",
|
|
9
17
|
]
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
"""Network analyzer for discovering API endpoints and creating request snapshots."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import time
|
|
6
|
+
from typing import TYPE_CHECKING, Any
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
from urllib.parse import urlparse, parse_qs
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from cmdop.services.browser.session import BrowserSession
|
|
13
|
+
from cmdop.services.browser.models import NetworkExchange
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RequestSnapshot(BaseModel):
|
|
17
|
+
"""Complete snapshot of an API request for reproduction."""
|
|
18
|
+
|
|
19
|
+
# Request info
|
|
20
|
+
url: str
|
|
21
|
+
method: str = "GET"
|
|
22
|
+
headers: dict[str, str] = Field(default_factory=dict)
|
|
23
|
+
body: str = ""
|
|
24
|
+
|
|
25
|
+
# Response info
|
|
26
|
+
status: int | None = None
|
|
27
|
+
content_type: str = ""
|
|
28
|
+
size: int = 0
|
|
29
|
+
|
|
30
|
+
# Parsed URL parts
|
|
31
|
+
base_url: str = ""
|
|
32
|
+
path: str = ""
|
|
33
|
+
query_params: dict[str, list[str]] = Field(default_factory=dict)
|
|
34
|
+
|
|
35
|
+
# Data analysis
|
|
36
|
+
data_key: str | None = None
|
|
37
|
+
item_count: int | None = None
|
|
38
|
+
item_fields: list[str] = Field(default_factory=list)
|
|
39
|
+
sample_response: Any = None
|
|
40
|
+
|
|
41
|
+
# Session data
|
|
42
|
+
cookies: dict[str, str] = Field(default_factory=dict)
|
|
43
|
+
|
|
44
|
+
def to_curl(self) -> str:
|
|
45
|
+
"""Generate curl command to reproduce request."""
|
|
46
|
+
parts = [f"curl -X {self.method}"]
|
|
47
|
+
|
|
48
|
+
# Add headers
|
|
49
|
+
for key, value in self.headers.items():
|
|
50
|
+
if key.lower() not in ("host", "content-length"):
|
|
51
|
+
parts.append(f"-H '{key}: {value}'")
|
|
52
|
+
|
|
53
|
+
# Add cookies if not in headers
|
|
54
|
+
if self.cookies and "cookie" not in [k.lower() for k in self.headers]:
|
|
55
|
+
cookie_str = "; ".join(f"{k}={v}" for k, v in self.cookies.items())
|
|
56
|
+
parts.append(f"-H 'Cookie: {cookie_str}'")
|
|
57
|
+
|
|
58
|
+
# Add body
|
|
59
|
+
if self.body:
|
|
60
|
+
parts.append(f"-d '{self.body}'")
|
|
61
|
+
|
|
62
|
+
# Add URL
|
|
63
|
+
parts.append(f"'{self.url}'")
|
|
64
|
+
|
|
65
|
+
return " \\\n ".join(parts)
|
|
66
|
+
|
|
67
|
+
def to_httpx(self) -> str:
|
|
68
|
+
"""Generate httpx Python code to reproduce request."""
|
|
69
|
+
lines = ["import httpx", ""]
|
|
70
|
+
|
|
71
|
+
# Headers
|
|
72
|
+
if self.headers:
|
|
73
|
+
lines.append("headers = {")
|
|
74
|
+
for key, value in self.headers.items():
|
|
75
|
+
if key.lower() not in ("host", "content-length"):
|
|
76
|
+
lines.append(f' "{key}": "{value}",')
|
|
77
|
+
lines.append("}")
|
|
78
|
+
else:
|
|
79
|
+
lines.append("headers = {}")
|
|
80
|
+
|
|
81
|
+
# Cookies
|
|
82
|
+
if self.cookies:
|
|
83
|
+
lines.append("")
|
|
84
|
+
lines.append("cookies = {")
|
|
85
|
+
for key, value in self.cookies.items():
|
|
86
|
+
lines.append(f' "{key}": "{value}",')
|
|
87
|
+
lines.append("}")
|
|
88
|
+
else:
|
|
89
|
+
lines.append("cookies = {}")
|
|
90
|
+
|
|
91
|
+
# Request
|
|
92
|
+
lines.append("")
|
|
93
|
+
if self.method == "GET":
|
|
94
|
+
lines.append(f'response = httpx.get("{self.url}", headers=headers, cookies=cookies)')
|
|
95
|
+
elif self.method == "POST":
|
|
96
|
+
if self.body:
|
|
97
|
+
lines.append(f'data = {repr(self.body)}')
|
|
98
|
+
lines.append(f'response = httpx.post("{self.url}", headers=headers, cookies=cookies, content=data)')
|
|
99
|
+
else:
|
|
100
|
+
lines.append(f'response = httpx.post("{self.url}", headers=headers, cookies=cookies)')
|
|
101
|
+
else:
|
|
102
|
+
lines.append(f'response = httpx.request("{self.method}", "{self.url}", headers=headers, cookies=cookies)')
|
|
103
|
+
|
|
104
|
+
lines.append("print(response.json())")
|
|
105
|
+
|
|
106
|
+
return "\n".join(lines)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class NetworkSnapshot(BaseModel):
|
|
110
|
+
"""Complete snapshot of network activity for a site."""
|
|
111
|
+
|
|
112
|
+
url: str
|
|
113
|
+
timestamp: str = ""
|
|
114
|
+
|
|
115
|
+
# Session data
|
|
116
|
+
cookies: dict[str, str] = Field(default_factory=dict)
|
|
117
|
+
local_storage: dict[str, str] = Field(default_factory=dict)
|
|
118
|
+
|
|
119
|
+
# Captured requests
|
|
120
|
+
api_requests: list[RequestSnapshot] = Field(default_factory=list)
|
|
121
|
+
json_requests: list[RequestSnapshot] = Field(default_factory=list)
|
|
122
|
+
other_requests: list[dict] = Field(default_factory=list)
|
|
123
|
+
|
|
124
|
+
# Stats
|
|
125
|
+
total_requests: int = 0
|
|
126
|
+
total_bytes: int = 0
|
|
127
|
+
|
|
128
|
+
def to_json(self, indent: int = 2) -> str:
|
|
129
|
+
"""Convert to JSON string."""
|
|
130
|
+
return self.model_dump_json(indent=indent)
|
|
131
|
+
|
|
132
|
+
def best_api(self) -> RequestSnapshot | None:
|
|
133
|
+
"""Get the best data API (largest response, then most items)."""
|
|
134
|
+
if not self.api_requests:
|
|
135
|
+
return None
|
|
136
|
+
return max(self.api_requests, key=lambda r: (r.size, r.item_count or 0))
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class NetworkAnalyzer:
|
|
140
|
+
"""Analyze network requests to discover API endpoints.
|
|
141
|
+
|
|
142
|
+
Creates complete request snapshots including cookies, headers, and
|
|
143
|
+
all data needed to reproduce API calls.
|
|
144
|
+
|
|
145
|
+
Usage:
|
|
146
|
+
from cmdop import CMDOPClient
|
|
147
|
+
from cmdop.helpers import NetworkAnalyzer
|
|
148
|
+
|
|
149
|
+
client = CMDOPClient.local()
|
|
150
|
+
with client.browser.create_session(headless=False) as b:
|
|
151
|
+
analyzer = NetworkAnalyzer(b)
|
|
152
|
+
|
|
153
|
+
# Interactive mode - user clicks pagination
|
|
154
|
+
snapshot = analyzer.capture("https://example.com/cars", wait_seconds=30)
|
|
155
|
+
|
|
156
|
+
# Get best API endpoint
|
|
157
|
+
if snapshot.api_requests:
|
|
158
|
+
best = snapshot.best_api()
|
|
159
|
+
print(f"API: {best.url}")
|
|
160
|
+
print(f"Curl: {best.to_curl()}")
|
|
161
|
+
"""
|
|
162
|
+
|
|
163
|
+
# Common keys that contain data arrays
|
|
164
|
+
DATA_KEYS = [
|
|
165
|
+
"data", "items", "results", "list", "records",
|
|
166
|
+
"cars", "vehicles", "products", "listings", "entries",
|
|
167
|
+
"rows", "content", "objects", "elements", "collection",
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
def __init__(self, session: "BrowserSession"):
|
|
171
|
+
"""Initialize with browser session."""
|
|
172
|
+
self._session = session
|
|
173
|
+
|
|
174
|
+
def capture(
|
|
175
|
+
self,
|
|
176
|
+
url: str,
|
|
177
|
+
wait_seconds: int = 30,
|
|
178
|
+
url_pattern: str = "",
|
|
179
|
+
clear_initial: bool = True,
|
|
180
|
+
same_origin: bool = True,
|
|
181
|
+
min_size: int = 100,
|
|
182
|
+
max_size: int = 5_000_000,
|
|
183
|
+
countdown_message: str = "Click pagination!",
|
|
184
|
+
) -> NetworkSnapshot:
|
|
185
|
+
"""Capture network requests while user interacts with page.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
url: Page URL to open
|
|
189
|
+
wait_seconds: Time to wait for user interactions
|
|
190
|
+
url_pattern: Optional regex filter for API URLs
|
|
191
|
+
clear_initial: Clear page load requests before capture
|
|
192
|
+
same_origin: Only capture requests to same domain (default True)
|
|
193
|
+
min_size: Min response size in bytes (filter tracking pixels)
|
|
194
|
+
max_size: Max response size in bytes (filter images/assets)
|
|
195
|
+
countdown_message: Message to show in countdown toast
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
NetworkSnapshot with all captured requests and session data
|
|
199
|
+
"""
|
|
200
|
+
from cmdop.services.browser.models import WaitUntil
|
|
201
|
+
from datetime import datetime
|
|
202
|
+
|
|
203
|
+
b = self._session
|
|
204
|
+
snapshot = NetworkSnapshot(
|
|
205
|
+
url=url,
|
|
206
|
+
timestamp=datetime.now().isoformat(),
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Extract base domain for filtering
|
|
210
|
+
base_domain = self._extract_base_domain(url)
|
|
211
|
+
|
|
212
|
+
# Enable network capture
|
|
213
|
+
b.network.enable(max_exchanges=500, max_response_size=5_000_000)
|
|
214
|
+
|
|
215
|
+
try:
|
|
216
|
+
print(f"Opening {url}...")
|
|
217
|
+
b.navigate(url, timeout_ms=90000, wait_until=WaitUntil.LOAD)
|
|
218
|
+
|
|
219
|
+
# Wait for page to be interactive
|
|
220
|
+
try:
|
|
221
|
+
b.wait_for("body", timeout_ms=10000)
|
|
222
|
+
except Exception:
|
|
223
|
+
pass
|
|
224
|
+
time.sleep(2)
|
|
225
|
+
|
|
226
|
+
if clear_initial:
|
|
227
|
+
b.network.clear()
|
|
228
|
+
|
|
229
|
+
# Show countdown while user interacts
|
|
230
|
+
b.visual.countdown(wait_seconds, countdown_message)
|
|
231
|
+
|
|
232
|
+
# Get cookies
|
|
233
|
+
b.visual.toast("Getting cookies...")
|
|
234
|
+
try:
|
|
235
|
+
cookies = b.get_cookies()
|
|
236
|
+
snapshot.cookies = {c.name: c.value for c in cookies}
|
|
237
|
+
except Exception:
|
|
238
|
+
pass
|
|
239
|
+
|
|
240
|
+
# Get stats
|
|
241
|
+
b.visual.toast("Getting network stats...")
|
|
242
|
+
stats = b.network.stats()
|
|
243
|
+
snapshot.total_requests = stats.total_captured
|
|
244
|
+
snapshot.total_bytes = stats.total_bytes
|
|
245
|
+
|
|
246
|
+
b.visual.toast(f"Captured {stats.total_captured} requests")
|
|
247
|
+
|
|
248
|
+
# Get XHR/Fetch calls
|
|
249
|
+
b.visual.toast("Filtering XHR/Fetch...")
|
|
250
|
+
# print("Calling network.filter...", flush=True)
|
|
251
|
+
api_calls = b.network.filter(
|
|
252
|
+
url_pattern=url_pattern,
|
|
253
|
+
resource_types=["xhr", "fetch"],
|
|
254
|
+
)
|
|
255
|
+
# print(f"Filter returned {len(api_calls)} calls", flush=True)
|
|
256
|
+
|
|
257
|
+
# Filter by domain
|
|
258
|
+
# print("Filtering by domain...", flush=True)
|
|
259
|
+
if same_origin:
|
|
260
|
+
api_calls = [
|
|
261
|
+
call for call in api_calls
|
|
262
|
+
if base_domain in urlparse(call.request.url).netloc
|
|
263
|
+
]
|
|
264
|
+
# print(f"After domain filter: {len(api_calls)}", flush=True)
|
|
265
|
+
|
|
266
|
+
# # Show sizes before filtering
|
|
267
|
+
# for call in api_calls:
|
|
268
|
+
# size = call.response.size if call.response else 0
|
|
269
|
+
# print(f" {call.request.url[:60]}... size={size}", flush=True)
|
|
270
|
+
|
|
271
|
+
# Filter by response size (ignore tracking pixels and heavy assets)
|
|
272
|
+
# print(f"Filtering by size ({min_size}-{max_size})...", flush=True)
|
|
273
|
+
api_calls = [
|
|
274
|
+
call for call in api_calls
|
|
275
|
+
if call.response and min_size <= call.response.size <= max_size
|
|
276
|
+
]
|
|
277
|
+
# print(f"After size filter: {len(api_calls)}", flush=True)
|
|
278
|
+
|
|
279
|
+
# print("Showing toast...", flush=True)
|
|
280
|
+
b.visual.toast(f"Found {len(api_calls)} API calls")
|
|
281
|
+
|
|
282
|
+
# Analyze calls - all JSON responses are API requests
|
|
283
|
+
# print(f"Analyzing {len(api_calls)} calls...", flush=True)
|
|
284
|
+
for call in api_calls:
|
|
285
|
+
req = self._create_snapshot(call, snapshot.cookies)
|
|
286
|
+
if req:
|
|
287
|
+
if req.content_type and "json" in req.content_type:
|
|
288
|
+
snapshot.api_requests.append(req)
|
|
289
|
+
else:
|
|
290
|
+
snapshot.other_requests.append({
|
|
291
|
+
"url": call.request.url,
|
|
292
|
+
"method": call.request.method,
|
|
293
|
+
"status": call.response.status if call.response else None,
|
|
294
|
+
})
|
|
295
|
+
|
|
296
|
+
# print("Analysis done", flush=True)
|
|
297
|
+
|
|
298
|
+
finally:
|
|
299
|
+
# print("Disabling network capture...", flush=True)
|
|
300
|
+
b.network.disable()
|
|
301
|
+
# print("Network disabled", flush=True)
|
|
302
|
+
|
|
303
|
+
# print("Returning snapshot", flush=True)
|
|
304
|
+
return snapshot
|
|
305
|
+
|
|
306
|
+
def _extract_base_domain(self, url: str) -> str:
|
|
307
|
+
"""Extract base domain from URL, handling country-code TLDs."""
|
|
308
|
+
parsed = urlparse(url)
|
|
309
|
+
host = parsed.netloc.replace("www.", "")
|
|
310
|
+
parts = host.split(".")
|
|
311
|
+
|
|
312
|
+
# Country-code second-level domains
|
|
313
|
+
cc_slds = {"co", "com", "net", "org", "ac", "go", "ne", "or"}
|
|
314
|
+
|
|
315
|
+
if len(parts) >= 3 and parts[-2] in cc_slds:
|
|
316
|
+
return ".".join(parts[-3:]) # bobaedream.co.kr
|
|
317
|
+
elif len(parts) >= 2:
|
|
318
|
+
return ".".join(parts[-2:]) # kcar.com
|
|
319
|
+
return host
|
|
320
|
+
|
|
321
|
+
def _create_snapshot(
|
|
322
|
+
self,
|
|
323
|
+
exchange: "NetworkExchange",
|
|
324
|
+
session_cookies: dict[str, str],
|
|
325
|
+
) -> RequestSnapshot | None:
|
|
326
|
+
"""Create request snapshot from network exchange."""
|
|
327
|
+
if not exchange.response:
|
|
328
|
+
return None
|
|
329
|
+
|
|
330
|
+
parsed = urlparse(exchange.request.url)
|
|
331
|
+
|
|
332
|
+
snapshot = RequestSnapshot(
|
|
333
|
+
url=exchange.request.url,
|
|
334
|
+
method=exchange.request.method,
|
|
335
|
+
headers=dict(exchange.request.headers),
|
|
336
|
+
body=exchange.request.body.decode("utf-8", errors="ignore") if exchange.request.body else "",
|
|
337
|
+
status=exchange.response.status,
|
|
338
|
+
content_type=exchange.response.content_type or "",
|
|
339
|
+
size=exchange.response.size,
|
|
340
|
+
base_url=f"{parsed.scheme}://{parsed.netloc}",
|
|
341
|
+
path=parsed.path,
|
|
342
|
+
query_params=parse_qs(parsed.query),
|
|
343
|
+
cookies=session_cookies,
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# Parse JSON response
|
|
347
|
+
if "json" in snapshot.content_type.lower():
|
|
348
|
+
try:
|
|
349
|
+
data = exchange.json_body()
|
|
350
|
+
snapshot.sample_response = data
|
|
351
|
+
|
|
352
|
+
if isinstance(data, list):
|
|
353
|
+
snapshot.item_count = len(data)
|
|
354
|
+
if data and isinstance(data[0], dict):
|
|
355
|
+
snapshot.item_fields = list(data[0].keys())
|
|
356
|
+
elif isinstance(data, dict):
|
|
357
|
+
for key in self.DATA_KEYS:
|
|
358
|
+
if key in data and isinstance(data[key], list):
|
|
359
|
+
snapshot.data_key = key
|
|
360
|
+
snapshot.item_count = len(data[key])
|
|
361
|
+
if data[key] and isinstance(data[key][0], dict):
|
|
362
|
+
snapshot.item_fields = list(data[key][0].keys())
|
|
363
|
+
break
|
|
364
|
+
except Exception:
|
|
365
|
+
pass
|
|
366
|
+
|
|
367
|
+
return snapshot
|
|
368
|
+
|
|
@@ -6,6 +6,7 @@ from .timing import TimingCapability
|
|
|
6
6
|
from .dom import DOMCapability
|
|
7
7
|
from .fetch import FetchCapability
|
|
8
8
|
from .network import NetworkCapability
|
|
9
|
+
from .visual import VisualCapability
|
|
9
10
|
|
|
10
11
|
__all__ = [
|
|
11
12
|
"ScrollCapability",
|
|
@@ -14,4 +15,5 @@ __all__ = [
|
|
|
14
15
|
"DOMCapability",
|
|
15
16
|
"FetchCapability",
|
|
16
17
|
"NetworkCapability",
|
|
18
|
+
"VisualCapability",
|
|
17
19
|
]
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""Visual capability - CMDOP plugin overlay effects."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import time
|
|
6
|
+
|
|
7
|
+
from ._base import BaseCapability
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class VisualCapability(BaseCapability):
|
|
11
|
+
"""Visual effects via CMDOP browser plugin.
|
|
12
|
+
|
|
13
|
+
Provides toast notifications, click effects, highlights, etc.
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
b.visual.toast("Hello!")
|
|
17
|
+
b.visual.countdown(10) # Smart countdown with early exit
|
|
18
|
+
b.visual.highlight(".button")
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def _dispatch(self, action: str, data: dict) -> None:
|
|
22
|
+
"""Dispatch visual action via CMDOP Visual extension.
|
|
23
|
+
|
|
24
|
+
Uses "mw:" prefix for main world execution to access __CMDOP_VISUAL__.
|
|
25
|
+
The extension API internally handles postMessage to content script.
|
|
26
|
+
"""
|
|
27
|
+
import json
|
|
28
|
+
data_json = json.dumps(data)
|
|
29
|
+
# "mw:" prefix tells Go backend to execute in main world context
|
|
30
|
+
self._js(f"""mw:(() => {{
|
|
31
|
+
const data = {data_json};
|
|
32
|
+
const api = window.__CMDOP_VISUAL__;
|
|
33
|
+
if (!api) return;
|
|
34
|
+
|
|
35
|
+
switch ('{action}') {{
|
|
36
|
+
case 'toast': api.showToast?.(data.message); break;
|
|
37
|
+
case 'clearToasts': api.clearToasts?.(); break;
|
|
38
|
+
case 'click': api.showClick?.(data.x, data.y, data.type || 'left'); break;
|
|
39
|
+
case 'move': api.showMouseMove?.(data.fromX, data.fromY, data.toX, data.toY); break;
|
|
40
|
+
case 'highlight': api.showHighlight?.(data.selector); break;
|
|
41
|
+
case 'hideHighlight': api.hideHighlight?.(); break;
|
|
42
|
+
case 'clearTrail': api.clearTrail?.(); break;
|
|
43
|
+
case 'state': api.setAutomationState?.(data.state); break;
|
|
44
|
+
}}
|
|
45
|
+
}})()
|
|
46
|
+
""")
|
|
47
|
+
|
|
48
|
+
def toast(self, message: str) -> None:
|
|
49
|
+
"""Show toast notification in browser."""
|
|
50
|
+
self._dispatch("toast", {"message": message})
|
|
51
|
+
|
|
52
|
+
def clear_toasts(self) -> None:
|
|
53
|
+
"""Clear all toast notifications."""
|
|
54
|
+
self._dispatch("clearToasts", {})
|
|
55
|
+
|
|
56
|
+
def click(self, x: int, y: int, click_type: str = "left") -> None:
|
|
57
|
+
"""Show click effect at coordinates."""
|
|
58
|
+
self._dispatch("click", {"x": x, "y": y, "type": click_type})
|
|
59
|
+
|
|
60
|
+
def move(self, from_x: int, from_y: int, to_x: int, to_y: int) -> None:
|
|
61
|
+
"""Show mouse movement trail."""
|
|
62
|
+
self._dispatch("move", {"fromX": from_x, "fromY": from_y, "toX": to_x, "toY": to_y})
|
|
63
|
+
|
|
64
|
+
def highlight(self, selector: str) -> None:
|
|
65
|
+
"""Highlight element by selector."""
|
|
66
|
+
self._dispatch("highlight", {"selector": selector})
|
|
67
|
+
|
|
68
|
+
def hide_highlight(self) -> None:
|
|
69
|
+
"""Hide element highlight."""
|
|
70
|
+
self._dispatch("hideHighlight", {})
|
|
71
|
+
|
|
72
|
+
def clear_trail(self) -> None:
|
|
73
|
+
"""Clear cursor trail."""
|
|
74
|
+
self._dispatch("clearTrail", {})
|
|
75
|
+
|
|
76
|
+
def set_state(self, state: str) -> None:
|
|
77
|
+
"""Set automation state: 'idle', 'active', 'busy'."""
|
|
78
|
+
self._dispatch("state", {"state": state})
|
|
79
|
+
|
|
80
|
+
def countdown(self, seconds: int, message: str = "Click pagination!") -> None:
|
|
81
|
+
"""Visual countdown timer with toast notifications.
|
|
82
|
+
|
|
83
|
+
Shows countdown in browser while user interacts with page.
|
|
84
|
+
No early exit - just waits the full duration.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
seconds: Seconds to wait
|
|
88
|
+
message: Message to show with countdown
|
|
89
|
+
"""
|
|
90
|
+
for i in range(seconds, 0, -1):
|
|
91
|
+
try:
|
|
92
|
+
self.clear_toasts()
|
|
93
|
+
self.toast(f"⏱️ {i}s - {message}")
|
|
94
|
+
except Exception:
|
|
95
|
+
pass
|
|
96
|
+
|
|
97
|
+
print(f"{i}", end=" ", flush=True)
|
|
98
|
+
time.sleep(1)
|
|
99
|
+
|
|
100
|
+
print("done", flush=True)
|
|
@@ -12,6 +12,7 @@ from .capabilities import (
|
|
|
12
12
|
DOMCapability,
|
|
13
13
|
FetchCapability,
|
|
14
14
|
NetworkCapability,
|
|
15
|
+
VisualCapability,
|
|
15
16
|
)
|
|
16
17
|
|
|
17
18
|
if TYPE_CHECKING:
|
|
@@ -53,6 +54,7 @@ class BrowserSession:
|
|
|
53
54
|
"_dom",
|
|
54
55
|
"_fetch",
|
|
55
56
|
"_network",
|
|
57
|
+
"_visual",
|
|
56
58
|
)
|
|
57
59
|
|
|
58
60
|
def __init__(self, service: "BrowserService", session_id: str) -> None:
|
|
@@ -64,6 +66,7 @@ class BrowserSession:
|
|
|
64
66
|
self._dom: DOMCapability | None = None
|
|
65
67
|
self._fetch: FetchCapability | None = None
|
|
66
68
|
self._network: NetworkCapability | None = None
|
|
69
|
+
self._visual: VisualCapability | None = None
|
|
67
70
|
|
|
68
71
|
@property
|
|
69
72
|
def session_id(self) -> str:
|
|
@@ -113,6 +116,13 @@ class BrowserSession:
|
|
|
113
116
|
self._network = NetworkCapability(self)
|
|
114
117
|
return self._network
|
|
115
118
|
|
|
119
|
+
@property
|
|
120
|
+
def visual(self) -> VisualCapability:
|
|
121
|
+
"""Visual: toast(), click(), move(), highlight(), hide_highlight(), clear_trail(), set_state()"""
|
|
122
|
+
if self._visual is None:
|
|
123
|
+
self._visual = VisualCapability(self)
|
|
124
|
+
return self._visual
|
|
125
|
+
|
|
116
126
|
# === Core Methods ===
|
|
117
127
|
|
|
118
128
|
def navigate(
|
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: cmdop
|
|
3
|
+
Version: 0.1.25
|
|
4
|
+
Summary: Python SDK for CMDOP agent interaction
|
|
5
|
+
Project-URL: Homepage, https://cmdop.com
|
|
6
|
+
Project-URL: Documentation, https://cmdop.com
|
|
7
|
+
Project-URL: Repository, https://github.com/markolofsen/cmdop-client
|
|
8
|
+
Author: CMDOP Team
|
|
9
|
+
License: MIT
|
|
10
|
+
License-File: LICENSE
|
|
11
|
+
Keywords: agent,automation,cmdop,terminal
|
|
12
|
+
Classifier: Development Status :: 3 - Alpha
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
+
Classifier: Typing :: Typed
|
|
21
|
+
Requires-Python: >=3.10
|
|
22
|
+
Requires-Dist: beautifulsoup4>=4.12.0
|
|
23
|
+
Requires-Dist: grpcio>=1.60.0
|
|
24
|
+
Requires-Dist: httpx>=0.27.0
|
|
25
|
+
Requires-Dist: lxml>=5.0.0
|
|
26
|
+
Requires-Dist: protobuf>=4.25.0
|
|
27
|
+
Requires-Dist: pydantic-settings>=2.0.0
|
|
28
|
+
Requires-Dist: pydantic>=2.5.0
|
|
29
|
+
Requires-Dist: rich>=13.0.0
|
|
30
|
+
Requires-Dist: toon-python>=0.1.2
|
|
31
|
+
Provides-Extra: dev
|
|
32
|
+
Requires-Dist: beautifulsoup4>=4.12.0; extra == 'dev'
|
|
33
|
+
Requires-Dist: grpcio-tools>=1.60.0; extra == 'dev'
|
|
34
|
+
Requires-Dist: mypy>=1.8.0; extra == 'dev'
|
|
35
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
|
|
36
|
+
Requires-Dist: pytest-cov>=4.1.0; extra == 'dev'
|
|
37
|
+
Requires-Dist: pytest-grpc-aio>=0.3.0; extra == 'dev'
|
|
38
|
+
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
39
|
+
Requires-Dist: ruff>=0.1.0; extra == 'dev'
|
|
40
|
+
Description-Content-Type: text/markdown
|
|
41
|
+
|
|
42
|
+
# cmdop
|
|
43
|
+
|
|
44
|
+
Python SDK for CMDOP browser automation and server control.
|
|
45
|
+
|
|
46
|
+
## Architecture
|
|
47
|
+
|
|
48
|
+
```
|
|
49
|
+
Your Code ──── Cloud Relay ──── Agent (on server)
|
|
50
|
+
│
|
|
51
|
+
Outbound only, works through any NAT/firewall
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Install
|
|
55
|
+
|
|
56
|
+
```bash
|
|
57
|
+
pip install cmdop
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
## Connection
|
|
61
|
+
|
|
62
|
+
```python
|
|
63
|
+
from cmdop import CMDOPClient, AsyncCMDOPClient
|
|
64
|
+
|
|
65
|
+
# Local (direct IPC to running agent)
|
|
66
|
+
client = CMDOPClient.local()
|
|
67
|
+
|
|
68
|
+
# Remote (via cloud relay)
|
|
69
|
+
client = CMDOPClient.remote(api_key="cmd_xxx")
|
|
70
|
+
|
|
71
|
+
# Async
|
|
72
|
+
async with AsyncCMDOPClient.local() as client:
|
|
73
|
+
await client.files.read("/etc/hostname")
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Browser
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
from cmdop.services.browser.models import WaitUntil
|
|
80
|
+
|
|
81
|
+
with client.browser.create_session(headless=False) as s:
|
|
82
|
+
s.navigate("https://shop.com", wait_until=WaitUntil.NETWORKIDLE)
|
|
83
|
+
|
|
84
|
+
# Core methods
|
|
85
|
+
s.click("button.buy", move_cursor=True)
|
|
86
|
+
s.type("input[name=q]", "search term")
|
|
87
|
+
s.wait_for(".results")
|
|
88
|
+
s.execute_script("return document.title")
|
|
89
|
+
s.screenshot()
|
|
90
|
+
s.get_state() # URL + title
|
|
91
|
+
s.get_page_info() # Full page info
|
|
92
|
+
s.get_cookies()
|
|
93
|
+
s.set_cookies([...])
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
**WaitUntil options:**
|
|
97
|
+
| Value | Description |
|
|
98
|
+
|-------|-------------|
|
|
99
|
+
| `LOAD` | Wait for load event (default) |
|
|
100
|
+
| `DOMCONTENTLOADED` | Wait for DOMContentLoaded |
|
|
101
|
+
| `NETWORKIDLE` | Wait until network is idle (best for SPA) |
|
|
102
|
+
| `COMMIT` | Return immediately (fastest) |
|
|
103
|
+
|
|
104
|
+
### Capabilities
|
|
105
|
+
|
|
106
|
+
**`s.scroll`** - Scrolling
|
|
107
|
+
```python
|
|
108
|
+
s.scroll.js("down", 500) # JS scroll (works on complex sites)
|
|
109
|
+
s.scroll.native("down", 500) # Browser API scroll
|
|
110
|
+
s.scroll.to_bottom() # Scroll to page bottom
|
|
111
|
+
s.scroll.to_element(".item") # Scroll element into view
|
|
112
|
+
s.scroll.info() # Get scroll position/dimensions
|
|
113
|
+
|
|
114
|
+
# Smart infinite scroll with extraction
|
|
115
|
+
items = s.scroll.infinite(
|
|
116
|
+
extract_fn=lambda: extract_new_items(),
|
|
117
|
+
limit=100,
|
|
118
|
+
max_scrolls=50,
|
|
119
|
+
scroll_amount=800,
|
|
120
|
+
)
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
**`s.input`** - Input
|
|
124
|
+
```python
|
|
125
|
+
s.input.click_js(".btn") # JS click (reliable)
|
|
126
|
+
s.input.click_all("See more") # Click all matching elements
|
|
127
|
+
s.input.key("Escape") # Press key
|
|
128
|
+
s.input.key("Enter", ".input") # Press key on element
|
|
129
|
+
s.input.hover(".tooltip") # Native hover
|
|
130
|
+
s.input.hover_js(".tooltip") # JS hover
|
|
131
|
+
s.input.mouse_move(500, 300) # Move cursor to coordinates
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
**`s.timing`** - Delays
|
|
135
|
+
```python
|
|
136
|
+
s.timing.wait(500) # Wait ms
|
|
137
|
+
s.timing.seconds(2) # Wait seconds
|
|
138
|
+
s.timing.random(0.5, 1.5) # Random delay
|
|
139
|
+
s.timing.timeout(fn, 10, cleanup) # Run with timeout
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
**`s.dom`** - DOM operations
|
|
143
|
+
```python
|
|
144
|
+
s.dom.html(".container") # Get HTML
|
|
145
|
+
s.dom.text(".title") # Get text
|
|
146
|
+
s.dom.soup(".items") # → SoupWrapper (chainable BS4)
|
|
147
|
+
s.dom.parse(html_string) # → BeautifulSoup
|
|
148
|
+
s.dom.extract(".items", "href") # Get attr list
|
|
149
|
+
s.dom.select("#country", "US") # Dropdown select
|
|
150
|
+
s.dom.close_modal() # Close dialogs/popups
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
**`s.fetch`** - HTTP from browser (bypass CORS, inherit cookies)
|
|
154
|
+
```python
|
|
155
|
+
s.fetch.json("/api/items") # Fetch JSON
|
|
156
|
+
s.fetch.all(["/api/a", "/api/b"]) # Parallel fetch
|
|
157
|
+
s.fetch.execute("return fetch(...)") # Custom JS
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
**`s.network`** - Traffic capture
|
|
161
|
+
```python
|
|
162
|
+
s.network.enable(max_exchanges=1000)
|
|
163
|
+
s.navigate(url)
|
|
164
|
+
|
|
165
|
+
# Get exchanges
|
|
166
|
+
exchanges = s.network.get_all()
|
|
167
|
+
api = s.network.last("/api/data")
|
|
168
|
+
data = api.json_body()
|
|
169
|
+
|
|
170
|
+
# Filter
|
|
171
|
+
posts = s.network.filter(
|
|
172
|
+
url_pattern="/api/posts",
|
|
173
|
+
methods=["GET", "POST"],
|
|
174
|
+
status_codes=[200],
|
|
175
|
+
resource_types=["xhr", "fetch"],
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Convenience
|
|
179
|
+
s.network.api_calls("/api/") # XHR/Fetch matching pattern
|
|
180
|
+
s.network.last_json("/api/data") # JSON body directly
|
|
181
|
+
s.network.wait_for("/api/", 5000) # Wait for request
|
|
182
|
+
s.network.export_har() # Export to HAR
|
|
183
|
+
s.network.stats() # Capture statistics
|
|
184
|
+
s.network.clear() # Clear captured
|
|
185
|
+
s.network.disable()
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
**`s.visual`** - Browser overlay (requires CMDOP extension)
|
|
189
|
+
```python
|
|
190
|
+
s.visual.toast("Loading...") # Show toast
|
|
191
|
+
s.visual.clear_toasts() # Clear all toasts
|
|
192
|
+
s.visual.countdown(30, "Click!") # Countdown timer
|
|
193
|
+
s.visual.highlight(".element") # Highlight element
|
|
194
|
+
s.visual.hide_highlight() # Hide highlight
|
|
195
|
+
s.visual.click(100, 200) # Show click effect
|
|
196
|
+
s.visual.move(0, 0, 100, 200) # Show cursor trail
|
|
197
|
+
s.visual.set_state("busy") # idle/active/busy
|
|
198
|
+
```
|
|
199
|
+
|
|
200
|
+
## NetworkAnalyzer
|
|
201
|
+
|
|
202
|
+
Discover API endpoints by capturing traffic while user interacts.
|
|
203
|
+
|
|
204
|
+
```python
|
|
205
|
+
from cmdop import CMDOPClient
|
|
206
|
+
from cmdop.helpers import NetworkAnalyzer
|
|
207
|
+
|
|
208
|
+
client = CMDOPClient.local()
|
|
209
|
+
with client.browser.create_session(headless=False) as b:
|
|
210
|
+
analyzer = NetworkAnalyzer(b)
|
|
211
|
+
|
|
212
|
+
snapshot = analyzer.capture(
|
|
213
|
+
"https://example.com/cars",
|
|
214
|
+
wait_seconds=30,
|
|
215
|
+
countdown_message="Click pagination!",
|
|
216
|
+
min_size=100, # Ignore tracking pixels
|
|
217
|
+
max_size=500_000, # Ignore heavy assets
|
|
218
|
+
same_origin=True, # Only same domain
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
# Get best data API
|
|
222
|
+
if snapshot.api_requests:
|
|
223
|
+
best = snapshot.best_api()
|
|
224
|
+
print(best.url)
|
|
225
|
+
print(best.item_count)
|
|
226
|
+
print(best.data_key) # "data", "items", etc.
|
|
227
|
+
print(best.item_fields) # Field names
|
|
228
|
+
print(best.to_curl()) # curl command
|
|
229
|
+
print(best.to_httpx()) # Python httpx code
|
|
230
|
+
|
|
231
|
+
# All captured
|
|
232
|
+
for req in snapshot.api_requests:
|
|
233
|
+
print(f"{req.method} {req.url} → {req.item_count} items")
|
|
234
|
+
```
|
|
235
|
+
|
|
236
|
+
**NetworkSnapshot:**
|
|
237
|
+
- `api_requests` - Requests with data arrays
|
|
238
|
+
- `json_requests` - Other JSON responses
|
|
239
|
+
- `cookies` - Session cookies
|
|
240
|
+
- `total_requests`, `total_bytes`
|
|
241
|
+
|
|
242
|
+
**RequestSnapshot:**
|
|
243
|
+
- `url`, `method`, `headers`, `body`, `cookies`
|
|
244
|
+
- `status`, `content_type`, `size`
|
|
245
|
+
- `data_key`, `item_count`, `item_fields`, `sample_response`
|
|
246
|
+
- `to_curl()`, `to_httpx()`
|
|
247
|
+
|
|
248
|
+
## Agent
|
|
249
|
+
|
|
250
|
+
Run AI tasks with typed output:
|
|
251
|
+
|
|
252
|
+
```python
|
|
253
|
+
from pydantic import BaseModel
|
|
254
|
+
|
|
255
|
+
class Health(BaseModel):
|
|
256
|
+
status: str
|
|
257
|
+
cpu: float
|
|
258
|
+
issues: list[str]
|
|
259
|
+
|
|
260
|
+
result = client.agent.run("Check server health", output_schema=Health)
|
|
261
|
+
health: Health = result.output # Typed!
|
|
262
|
+
```
|
|
263
|
+
|
|
264
|
+
## Terminal
|
|
265
|
+
|
|
266
|
+
```python
|
|
267
|
+
session = client.terminal.create()
|
|
268
|
+
client.terminal.send_input(session.session_id, "ls -la\n")
|
|
269
|
+
output = client.terminal.get_history(session.session_id)
|
|
270
|
+
client.terminal.resize(session.session_id, 120, 40)
|
|
271
|
+
client.terminal.send_signal(session.session_id, "SIGINT")
|
|
272
|
+
client.terminal.close(session.session_id)
|
|
273
|
+
```
|
|
274
|
+
|
|
275
|
+
## Files
|
|
276
|
+
|
|
277
|
+
```python
|
|
278
|
+
client.files.list("/var/log")
|
|
279
|
+
client.files.read("/etc/nginx/nginx.conf")
|
|
280
|
+
client.files.write("/tmp/config.json", b'{"key": "value"}')
|
|
281
|
+
client.files.delete("/tmp/old.txt")
|
|
282
|
+
client.files.copy("/src", "/dst")
|
|
283
|
+
client.files.move("/old", "/new")
|
|
284
|
+
client.files.mkdir("/new/dir")
|
|
285
|
+
client.files.info("/path")
|
|
286
|
+
```
|
|
287
|
+
|
|
288
|
+
## SDKBaseModel
|
|
289
|
+
|
|
290
|
+
Auto-cleaning Pydantic model:
|
|
291
|
+
|
|
292
|
+
```python
|
|
293
|
+
from cmdop import SDKBaseModel
|
|
294
|
+
|
|
295
|
+
class Product(SDKBaseModel):
|
|
296
|
+
__base_url__ = "https://shop.com"
|
|
297
|
+
name: str = "" # " iPhone 15 \n" → "iPhone 15"
|
|
298
|
+
price: int = 0 # "$1,299.00" → 1299
|
|
299
|
+
rating: float = 0 # "4.5 stars" → 4.5
|
|
300
|
+
url: str = "" # "/p/123" → "https://shop.com/p/123"
|
|
301
|
+
|
|
302
|
+
products = Product.from_list(raw["items"]) # Auto dedupe + filter
|
|
303
|
+
```
|
|
304
|
+
|
|
305
|
+
## Utilities
|
|
306
|
+
|
|
307
|
+
```python
|
|
308
|
+
from cmdop import get_logger, json_to_toon
|
|
309
|
+
|
|
310
|
+
# Logging (rich console + file)
|
|
311
|
+
log = get_logger(__name__)
|
|
312
|
+
log.info("Starting")
|
|
313
|
+
|
|
314
|
+
# TOON format (30-50% token savings)
|
|
315
|
+
toon = json_to_toon({"name": "Alice", "age": 25})
|
|
316
|
+
# → "name: Alice\nage: 25"
|
|
317
|
+
```
|
|
318
|
+
|
|
319
|
+
## Requirements
|
|
320
|
+
|
|
321
|
+
- Python 3.10+
|
|
322
|
+
- CMDOP agent running locally or API key for remote
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
cmdop/__init__.py,sha256=
|
|
1
|
+
cmdop/__init__.py,sha256=JAH5EagCk6WJ2i-OiE7QofeEi9LjaaHbvECewfdvOsg,5200
|
|
2
2
|
cmdop/client.py,sha256=nTotStZPBfYN3TrHH-OlEJMSVAXskYMQRkocsFmyaBY,14601
|
|
3
3
|
cmdop/config.py,sha256=vpw1aGCyS4NKlZyzVur81Lt06QmN3FnscZji0bypUi0,4398
|
|
4
4
|
cmdop/discovery.py,sha256=HNxSOa5tSuG7ppfFs21XdviW5ucjpRswVPguhX5j8Dg,7479
|
|
@@ -150,9 +150,10 @@ cmdop/api/generated/workspaces/workspaces__api__workspaces/__init__.py,sha256=Wo
|
|
|
150
150
|
cmdop/api/generated/workspaces/workspaces__api__workspaces/client.py,sha256=A1c5s8FJQ7vJRjReO7QQ8D6tPN-6PudxQXLYP6EGE2I,9861
|
|
151
151
|
cmdop/api/generated/workspaces/workspaces__api__workspaces/models.py,sha256=3UFJFLWWQs_jzSmFstJ5T1lsaVp9TYSb1u6YFFq5RSk,9501
|
|
152
152
|
cmdop/api/generated/workspaces/workspaces__api__workspaces/sync_client.py,sha256=KfWxpSjZyfZLjonHRFwk0a1eUmOZHuky4ZySo11wTWg,9639
|
|
153
|
-
cmdop/helpers/__init__.py,sha256=
|
|
153
|
+
cmdop/helpers/__init__.py,sha256=fxzHT-O9D4HbvSU4qcQ9Ce3Q0Y5dZV-nQHVuoMpahCw,355
|
|
154
154
|
cmdop/helpers/cleaner.py,sha256=3XWB3KpRSMgJ1Go0G7rE9bgDGy1bshk9PtWWB6pL7AU,1792
|
|
155
155
|
cmdop/helpers/formatting.py,sha256=lLYUEyzZLUSXq2xHB2w3i72QoP_baHjtFpP-7mb-V-s,341
|
|
156
|
+
cmdop/helpers/network_analyzer.py,sha256=ZiTRv39S_kTAppTVbq97DYLExS2WsDLCxf_N4rtnkf0,13025
|
|
156
157
|
cmdop/models/__init__.py,sha256=W6P1oo6JkUAeVEV59HzFT646hXM0pk_obXHfHbX4tAc,1594
|
|
157
158
|
cmdop/models/agent.py,sha256=Z1QDfr1-DTFVl5oPvbH2ZUBLXPHUlitiCRfG7y_OzYg,5495
|
|
158
159
|
cmdop/models/base.py,sha256=1SR1ka5p-rHHkk4k9pPwbraxX_CsTG830CosGNPn1JA,7425
|
|
@@ -169,8 +170,8 @@ cmdop/services/terminal.py,sha256=9SSWBexe2rWgMd-hGBEs9mcax3l7x_U84VHZpMC4xK8,17
|
|
|
169
170
|
cmdop/services/browser/__init__.py,sha256=31Ofu9RCYTAedPKLvnor8J7oGDgTjbqJ58OkxxHYwdk,1270
|
|
170
171
|
cmdop/services/browser/models.py,sha256=9MpNFgSgZDIznmTmsCUByEN31t_iQ6kAza1BsPSsuJs,5320
|
|
171
172
|
cmdop/services/browser/parsing.py,sha256=0hQAy-0ZwJqtmhEqHO3EEdVB3iYmyhXRdouN_dCbig8,3820
|
|
172
|
-
cmdop/services/browser/session.py,sha256=
|
|
173
|
-
cmdop/services/browser/capabilities/__init__.py,sha256=
|
|
173
|
+
cmdop/services/browser/session.py,sha256=4_g-vPiiFBTiz5pbaOSxEKsjgkjEPU949jby2B15eWQ,6771
|
|
174
|
+
cmdop/services/browser/capabilities/__init__.py,sha256=GfLhrQ_z-g22OwlZQt8KpQltA61SLSgV4cSsY284DUI,459
|
|
174
175
|
cmdop/services/browser/capabilities/_base.py,sha256=mW0jKa2CyvK-8cjenv5JYvuCKiO3rpt5F7WtWFXBitA,749
|
|
175
176
|
cmdop/services/browser/capabilities/_helpers.py,sha256=jXqYbeDocAHec2GwF2_BNnJ78vTyUnHteQoS-RSG00k,488
|
|
176
177
|
cmdop/services/browser/capabilities/dom.py,sha256=DuXfildga23wGBNJWtNzx-t2Cq553HC48o9KAWAlyC0,2612
|
|
@@ -179,6 +180,7 @@ cmdop/services/browser/capabilities/input.py,sha256=uYmWGqturMDent44Us80oT_nk4kF
|
|
|
179
180
|
cmdop/services/browser/capabilities/network.py,sha256=tZV4Oh_J5zUjEe9GBLQBDXEVh9EVTecPbyjE7lIUrd0,7775
|
|
180
181
|
cmdop/services/browser/capabilities/scroll.py,sha256=sh0VuOPOv81BZg80-n8TABOj5RpshJT12qJwm4F_OY0,4808
|
|
181
182
|
cmdop/services/browser/capabilities/timing.py,sha256=NH34G_4Kfukh6JCdhLRGoouA-uNTbx9ly7ybP9Kh558,1868
|
|
183
|
+
cmdop/services/browser/capabilities/visual.py,sha256=AecHna1jqIzgQdsYHiPlZjp-Rr8ivW4EE7IWf1Hyeko,3671
|
|
182
184
|
cmdop/services/browser/js/__init__.py,sha256=gTiZguikKfztDtggZTux2FqhT8YTjyHCzQR4TEnT7z4,1177
|
|
183
185
|
cmdop/services/browser/js/core.py,sha256=QXCCX_al5tMgz7aCwMqhIs1aRe_IdG8teOJniaumA5Q,995
|
|
184
186
|
cmdop/services/browser/js/fetch.py,sha256=WPy_H4LLkneSx06wpfnx4Sx_0Okf2ENXi6bveCd9ZCg,2188
|
|
@@ -198,7 +200,7 @@ cmdop/transport/base.py,sha256=2pkV8i9epgp_21dyReCfX47abRUrnALm0W5BXb-Fuz0,5571
|
|
|
198
200
|
cmdop/transport/discovery.py,sha256=rcGAuVrR1l6jwcP0dqZxVhX1NsFK7sRHygFMCLmmUbA,10673
|
|
199
201
|
cmdop/transport/local.py,sha256=ob6tWVxSdKwblHSMK8CkgjyuSdQoAeWgy5OAUd5ZNuE,7411
|
|
200
202
|
cmdop/transport/remote.py,sha256=FNVqus9wOv7LlxKarXjLmSyvJiHwhvPbNDOPv1IQkmE,4329
|
|
201
|
-
cmdop-0.1.
|
|
202
|
-
cmdop-0.1.
|
|
203
|
-
cmdop-0.1.
|
|
204
|
-
cmdop-0.1.
|
|
203
|
+
cmdop-0.1.25.dist-info/METADATA,sha256=zU53FzmNQTtobkEzWTWOnYCYiB80P6sER5Ovmq0WcoU,9284
|
|
204
|
+
cmdop-0.1.25.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
205
|
+
cmdop-0.1.25.dist-info/licenses/LICENSE,sha256=6hyzbI1QVXW6B-XT7PaQ6UG9lns11Y_nnap8uUKGUqo,1062
|
|
206
|
+
cmdop-0.1.25.dist-info/RECORD,,
|
cmdop-0.1.23.dist-info/METADATA
DELETED
|
@@ -1,330 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: cmdop
|
|
3
|
-
Version: 0.1.23
|
|
4
|
-
Summary: Python SDK for CMDOP agent interaction
|
|
5
|
-
Project-URL: Homepage, https://cmdop.com
|
|
6
|
-
Project-URL: Documentation, https://cmdop.com
|
|
7
|
-
Project-URL: Repository, https://github.com/markolofsen/cmdop-client
|
|
8
|
-
Author: CMDOP Team
|
|
9
|
-
License: MIT
|
|
10
|
-
License-File: LICENSE
|
|
11
|
-
Keywords: agent,automation,cmdop,terminal
|
|
12
|
-
Classifier: Development Status :: 3 - Alpha
|
|
13
|
-
Classifier: Intended Audience :: Developers
|
|
14
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
-
Classifier: Programming Language :: Python :: 3
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
-
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
20
|
-
Classifier: Typing :: Typed
|
|
21
|
-
Requires-Python: >=3.10
|
|
22
|
-
Requires-Dist: beautifulsoup4>=4.12.0
|
|
23
|
-
Requires-Dist: grpcio>=1.60.0
|
|
24
|
-
Requires-Dist: httpx>=0.27.0
|
|
25
|
-
Requires-Dist: lxml>=5.0.0
|
|
26
|
-
Requires-Dist: protobuf>=4.25.0
|
|
27
|
-
Requires-Dist: pydantic-settings>=2.0.0
|
|
28
|
-
Requires-Dist: pydantic>=2.5.0
|
|
29
|
-
Requires-Dist: rich>=13.0.0
|
|
30
|
-
Requires-Dist: toon-python>=0.1.2
|
|
31
|
-
Provides-Extra: dev
|
|
32
|
-
Requires-Dist: beautifulsoup4>=4.12.0; extra == 'dev'
|
|
33
|
-
Requires-Dist: grpcio-tools>=1.60.0; extra == 'dev'
|
|
34
|
-
Requires-Dist: mypy>=1.8.0; extra == 'dev'
|
|
35
|
-
Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
|
|
36
|
-
Requires-Dist: pytest-cov>=4.1.0; extra == 'dev'
|
|
37
|
-
Requires-Dist: pytest-grpc-aio>=0.3.0; extra == 'dev'
|
|
38
|
-
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
39
|
-
Requires-Dist: ruff>=0.1.0; extra == 'dev'
|
|
40
|
-
Description-Content-Type: text/markdown
|
|
41
|
-
|
|
42
|
-
# cmdop
|
|
43
|
-
|
|
44
|
-
**Any machine. One API.**
|
|
45
|
-
|
|
46
|
-
```python
|
|
47
|
-
from cmdop import CMDOPClient
|
|
48
|
-
|
|
49
|
-
with CMDOPClient.remote(api_key="cmd_xxx") as server:
|
|
50
|
-
server.terminal.execute("docker restart app")
|
|
51
|
-
server.files.write("/etc/nginx/nginx.conf", new_config)
|
|
52
|
-
logs = server.files.read("/var/log/app.log")
|
|
53
|
-
```
|
|
54
|
-
|
|
55
|
-
No SSH. No VPN. No open ports.
|
|
56
|
-
|
|
57
|
-
---
|
|
58
|
-
|
|
59
|
-
## How
|
|
60
|
-
|
|
61
|
-
```
|
|
62
|
-
Your Code ──── Cloud Relay ──── Agent (on server)
|
|
63
|
-
│
|
|
64
|
-
Outbound only, works through any NAT/firewall
|
|
65
|
-
```
|
|
66
|
-
|
|
67
|
-
Agent connects out. Your code connects to relay. Done.
|
|
68
|
-
|
|
69
|
-
---
|
|
70
|
-
|
|
71
|
-
## Install
|
|
72
|
-
|
|
73
|
-
```bash
|
|
74
|
-
pip install cmdop
|
|
75
|
-
```
|
|
76
|
-
|
|
77
|
-
```python
|
|
78
|
-
from cmdop import CMDOPClient, AsyncCMDOPClient
|
|
79
|
-
|
|
80
|
-
# Remote (via cloud relay)
|
|
81
|
-
with CMDOPClient.remote(api_key="cmd_xxx") as client:
|
|
82
|
-
client.files.list("/home")
|
|
83
|
-
|
|
84
|
-
# Local (direct IPC)
|
|
85
|
-
with CMDOPClient.local() as client:
|
|
86
|
-
client.terminal.execute("ls -la")
|
|
87
|
-
|
|
88
|
-
# Async
|
|
89
|
-
async with AsyncCMDOPClient.remote(api_key="cmd_xxx") as client:
|
|
90
|
-
await client.files.read("/etc/hostname")
|
|
91
|
-
```
|
|
92
|
-
|
|
93
|
-
---
|
|
94
|
-
|
|
95
|
-
## Terminal
|
|
96
|
-
|
|
97
|
-
```python
|
|
98
|
-
session = server.terminal.create()
|
|
99
|
-
server.terminal.send_input(session.session_id, "kubectl get pods\n")
|
|
100
|
-
output = server.terminal.get_history(session.session_id)
|
|
101
|
-
```
|
|
102
|
-
|
|
103
|
-
| Method | Description |
|
|
104
|
-
|--------|-------------|
|
|
105
|
-
| `create(shell)` | Start session |
|
|
106
|
-
| `send_input(id, data)` | Send commands |
|
|
107
|
-
| `get_history(id)` | Get output |
|
|
108
|
-
| `resize(id, cols, rows)` | Resize |
|
|
109
|
-
| `send_signal(id, signal)` | SIGINT/SIGTERM |
|
|
110
|
-
| `close(id)` | End session |
|
|
111
|
-
|
|
112
|
-
## Files
|
|
113
|
-
|
|
114
|
-
```python
|
|
115
|
-
server.files.list("/var/log")
|
|
116
|
-
server.files.read("/etc/nginx/nginx.conf")
|
|
117
|
-
server.files.write("/tmp/config.json", b'{"key": "value"}')
|
|
118
|
-
```
|
|
119
|
-
|
|
120
|
-
| Method | Description |
|
|
121
|
-
|--------|-------------|
|
|
122
|
-
| `list(path)` | List dir |
|
|
123
|
-
| `read(path)` | Read file |
|
|
124
|
-
| `write(path, content)` | Write file |
|
|
125
|
-
| `delete(path)` | Delete |
|
|
126
|
-
| `copy/move(src, dst)` | Copy/Move |
|
|
127
|
-
| `mkdir(path)` | Create dir |
|
|
128
|
-
| `info(path)` | Metadata |
|
|
129
|
-
|
|
130
|
-
## Agent
|
|
131
|
-
|
|
132
|
-
```python
|
|
133
|
-
from pydantic import BaseModel
|
|
134
|
-
|
|
135
|
-
class Health(BaseModel):
|
|
136
|
-
status: str
|
|
137
|
-
cpu: float
|
|
138
|
-
issues: list[str]
|
|
139
|
-
|
|
140
|
-
result = server.agent.run("Check server health", output_schema=Health)
|
|
141
|
-
health: Health = result.output # Typed!
|
|
142
|
-
```
|
|
143
|
-
|
|
144
|
-
---
|
|
145
|
-
|
|
146
|
-
## Browser
|
|
147
|
-
|
|
148
|
-
Capability-based API for browser automation.
|
|
149
|
-
|
|
150
|
-
```python
|
|
151
|
-
from cmdop.services.browser.models import WaitUntil
|
|
152
|
-
|
|
153
|
-
with client.browser.create_session() as s:
|
|
154
|
-
s.navigate("https://shop.com/products", wait_until=WaitUntil.NETWORKIDLE)
|
|
155
|
-
s.dom.close_modal() # Close popups
|
|
156
|
-
|
|
157
|
-
# BeautifulSoup parsing
|
|
158
|
-
soup = s.dom.soup() # SoupWrapper with chainable API
|
|
159
|
-
for item in soup.select(".product"):
|
|
160
|
-
title = item.select_one("h2").text()
|
|
161
|
-
price = item.attr("data-price")
|
|
162
|
-
|
|
163
|
-
# Scrolling with random delays
|
|
164
|
-
for _ in range(10):
|
|
165
|
-
soup = s.dom.soup(".listings")
|
|
166
|
-
s.scroll.js("down", 700)
|
|
167
|
-
s.timing.random(0.8, 1.5)
|
|
168
|
-
|
|
169
|
-
# Click with cursor movement
|
|
170
|
-
s.click("button.buy", move_cursor=True)
|
|
171
|
-
|
|
172
|
-
# Click all "See more" buttons
|
|
173
|
-
s.input.click_all("See more")
|
|
174
|
-
|
|
175
|
-
# Mouse operations
|
|
176
|
-
s.input.mouse_move(500, 300)
|
|
177
|
-
s.input.hover(".tooltip-trigger")
|
|
178
|
-
|
|
179
|
-
# JS fetch (bypass CORS, inherit cookies)
|
|
180
|
-
data = s.fetch.json("/api/items")
|
|
181
|
-
```
|
|
182
|
-
|
|
183
|
-
### Core Methods (on session)
|
|
184
|
-
|
|
185
|
-
| Method | Description |
|
|
186
|
-
|--------|-------------|
|
|
187
|
-
| `navigate(url, wait_until)` | Go to URL (wait_until: LOAD, DOMCONTENTLOADED, NETWORKIDLE, COMMIT) |
|
|
188
|
-
| `click(selector, move_cursor)` | Click element |
|
|
189
|
-
| `type(selector, text)` | Type text |
|
|
190
|
-
| `wait_for(selector)` | Wait for element |
|
|
191
|
-
| `execute_script(js)` | Run JavaScript |
|
|
192
|
-
| `screenshot()` | PNG bytes |
|
|
193
|
-
| `get_state()` | URL + title |
|
|
194
|
-
| `get_page_info()` | Full page info |
|
|
195
|
-
| `get/set_cookies()` | Cookie management |
|
|
196
|
-
|
|
197
|
-
### Capabilities
|
|
198
|
-
|
|
199
|
-
**`session.scroll`** - Scrolling
|
|
200
|
-
| Method | Description |
|
|
201
|
-
|--------|-------------|
|
|
202
|
-
| `js(dir, amount)` | JS scroll (works on complex sites) |
|
|
203
|
-
| `native(dir, amount)` | Browser API scroll |
|
|
204
|
-
| `to_bottom()` | Scroll to page bottom |
|
|
205
|
-
| `to_element(selector)` | Scroll element into view |
|
|
206
|
-
| `info()` | Get scroll position |
|
|
207
|
-
| `infinite(extract_fn)` | Smart infinite scroll with extraction |
|
|
208
|
-
|
|
209
|
-
**`session.input`** - Input operations
|
|
210
|
-
| Method | Description |
|
|
211
|
-
|--------|-------------|
|
|
212
|
-
| `click_js(selector)` | JS click (reliable) |
|
|
213
|
-
| `click_all(text, role)` | Click all matching elements |
|
|
214
|
-
| `key(key, selector)` | Press keyboard key |
|
|
215
|
-
| `hover(selector)` | Hover over element (native) |
|
|
216
|
-
| `hover_js(selector)` | Hover via JS |
|
|
217
|
-
| `mouse_move(x, y)` | Move cursor to coordinates |
|
|
218
|
-
|
|
219
|
-
**`session.timing`** - Delays
|
|
220
|
-
| Method | Description |
|
|
221
|
-
|--------|-------------|
|
|
222
|
-
| `wait(ms)` | Wait milliseconds |
|
|
223
|
-
| `seconds(n)` | Wait seconds |
|
|
224
|
-
| `random(min, max)` | Random delay |
|
|
225
|
-
| `timeout(fn, sec, cleanup)` | Run with timeout |
|
|
226
|
-
|
|
227
|
-
**`session.dom`** - DOM operations
|
|
228
|
-
| Method | Description |
|
|
229
|
-
|--------|-------------|
|
|
230
|
-
| `html(selector)` | Get HTML |
|
|
231
|
-
| `text(selector)` | Get text content |
|
|
232
|
-
| `soup(selector)` | → SoupWrapper |
|
|
233
|
-
| `parse(html)` | → BeautifulSoup |
|
|
234
|
-
| `extract(selector, attr)` | Get text/attr list |
|
|
235
|
-
| `select(selector, value)` | Dropdown select |
|
|
236
|
-
| `close_modal()` | Close dialogs |
|
|
237
|
-
|
|
238
|
-
**`session.fetch`** - HTTP from browser context
|
|
239
|
-
| Method | Description |
|
|
240
|
-
|--------|-------------|
|
|
241
|
-
| `json(url)` | Fetch JSON |
|
|
242
|
-
| `all(requests)` | Parallel fetch |
|
|
243
|
-
| `execute(js_code)` | Custom JS fetch code |
|
|
244
|
-
|
|
245
|
-
**`session.network`** - Network capture (v2.19.0)
|
|
246
|
-
| Method | Description |
|
|
247
|
-
|--------|-------------|
|
|
248
|
-
| `enable(max_exchanges)` | Start capturing HTTP traffic |
|
|
249
|
-
| `disable()` | Stop capturing |
|
|
250
|
-
| `get_all()` | Get all captured exchanges |
|
|
251
|
-
| `filter(url_pattern, methods, status_codes)` | Filter exchanges |
|
|
252
|
-
| `last(url_pattern)` | Get most recent matching exchange |
|
|
253
|
-
| `api_calls(url_pattern)` | Get XHR/Fetch calls matching pattern |
|
|
254
|
-
| `last_json(url_pattern)` | Get JSON body from last matching response |
|
|
255
|
-
| `wait_for(url_pattern, timeout_ms)` | Wait for matching request |
|
|
256
|
-
| `stats()` | Capture statistics |
|
|
257
|
-
| `export_har()` | Export to HAR format |
|
|
258
|
-
| `clear()` | Clear captured data |
|
|
259
|
-
|
|
260
|
-
```python
|
|
261
|
-
# Example: Intercept API responses
|
|
262
|
-
from cmdop.services.browser.models import WaitUntil
|
|
263
|
-
|
|
264
|
-
with client.browser.create_session() as s:
|
|
265
|
-
s.network.enable()
|
|
266
|
-
s.navigate("https://app.example.com", wait_until=WaitUntil.NETWORKIDLE)
|
|
267
|
-
|
|
268
|
-
# Get last API response
|
|
269
|
-
api = s.network.last("/api/data")
|
|
270
|
-
data = api.json_body()
|
|
271
|
-
|
|
272
|
-
# Filter by criteria
|
|
273
|
-
posts = s.network.filter(
|
|
274
|
-
url_pattern="/api/posts",
|
|
275
|
-
methods=["GET"],
|
|
276
|
-
status_codes=[200],
|
|
277
|
-
)
|
|
278
|
-
|
|
279
|
-
s.network.disable()
|
|
280
|
-
```
|
|
281
|
-
|
|
282
|
-
## SDKBaseModel
|
|
283
|
-
|
|
284
|
-
Auto-cleaning Pydantic model for scraped data:
|
|
285
|
-
|
|
286
|
-
```python
|
|
287
|
-
from cmdop import SDKBaseModel
|
|
288
|
-
|
|
289
|
-
class Product(SDKBaseModel):
|
|
290
|
-
__base_url__ = "https://shop.com"
|
|
291
|
-
name: str = "" # " iPhone 15 \n" → "iPhone 15"
|
|
292
|
-
price: int = 0 # "$1,299.00" → 1299
|
|
293
|
-
rating: float = 0 # "4.5 stars" → 4.5
|
|
294
|
-
url: str = "" # "/p/123" → "https://shop.com/p/123"
|
|
295
|
-
|
|
296
|
-
products = Product.from_list(raw["items"]) # Auto dedupe + filter
|
|
297
|
-
```
|
|
298
|
-
|
|
299
|
-
---
|
|
300
|
-
|
|
301
|
-
## Utilities
|
|
302
|
-
|
|
303
|
-
**Logging:**
|
|
304
|
-
```python
|
|
305
|
-
from cmdop import get_logger
|
|
306
|
-
log = get_logger(__name__)
|
|
307
|
-
log.info("Starting") # Rich console + auto file logging
|
|
308
|
-
```
|
|
309
|
-
|
|
310
|
-
**TOON Format (30-50% token savings):**
|
|
311
|
-
```python
|
|
312
|
-
from cmdop import json_to_toon, JsonCleaner
|
|
313
|
-
toon = json_to_toon({"name": "Alice", "age": 25})
|
|
314
|
-
# → "name: Alice\nage: 25"
|
|
315
|
-
```
|
|
316
|
-
|
|
317
|
-
---
|
|
318
|
-
|
|
319
|
-
## Requirements
|
|
320
|
-
|
|
321
|
-
- Python 3.10+
|
|
322
|
-
- CMDOP agent on target
|
|
323
|
-
|
|
324
|
-
## Links
|
|
325
|
-
|
|
326
|
-
[cmdop.com](https://cmdop.com)
|
|
327
|
-
|
|
328
|
-
## License
|
|
329
|
-
|
|
330
|
-
MIT
|
|
File without changes
|
|
File without changes
|