Jarvis-Brain 0.1.10.0__tar.gz → 0.1.11.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/PKG-INFO +2 -1
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/mcp_tools/dp_tools.py +23 -1
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/mcp_tools/main.py +2 -1
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/pyproject.toml +3 -2
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/tools/browser_proxy.py +45 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/tools/tools.py +69 -0
- jarvis_brain-0.1.10.0/mcp_tools/chrome_devtools_tools.py +0 -11
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/.gitignore +0 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/README.md +0 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/mcp_tools/__init__.py +0 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/tools/__init__.py +0 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/tools/browser_manager.py +0 -0
- {jarvis_brain-0.1.10.0 → jarvis_brain-0.1.11.1}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: Jarvis_Brain
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.11.1
|
|
4
4
|
Summary: Jarvis brain mcp
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Requires-Dist: beautifulsoup4
|
|
@@ -8,3 +8,4 @@ Requires-Dist: curl-cffi
|
|
|
8
8
|
Requires-Dist: drissionpage
|
|
9
9
|
Requires-Dist: fastmcp
|
|
10
10
|
Requires-Dist: minify-html
|
|
11
|
+
Requires-Dist: pillow
|
|
@@ -4,13 +4,14 @@
|
|
|
4
4
|
import hashlib
|
|
5
5
|
import json
|
|
6
6
|
import os
|
|
7
|
+
import time
|
|
7
8
|
from typing import Any
|
|
8
9
|
|
|
9
10
|
from fastmcp import FastMCP
|
|
10
11
|
|
|
11
12
|
from tools.browser_manager import BrowserManager
|
|
12
13
|
from tools.tools import compress_html, requests_html, dp_headless_html, assert_waf_cookie, dp_mcp_message_pack, \
|
|
13
|
-
compress_html_js
|
|
14
|
+
compress_html_js, compress_image_bytes
|
|
14
15
|
from tools.browser_proxy import DPProxyClient, DPProxyClientManager
|
|
15
16
|
|
|
16
17
|
html_source_code_local_save_path = os.path.join(os.getcwd(), "html-source-code")
|
|
@@ -304,3 +305,24 @@ def register_scroll_action(mcp: FastMCP, browser_manager):
|
|
|
304
305
|
browser_port=browser_port,
|
|
305
306
|
tab_id=tab_id,
|
|
306
307
|
)
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def register_get_screenshot(mcp: FastMCP, browser_manager):
|
|
311
|
+
@mcp.tool(name="get_tab_screenshot", description="尝试对传入tab页进行截图,并将截图压缩为1M大小png图片,会返回截图保存路径")
|
|
312
|
+
async def get_tab_screenshot(browser_port: int, tab_id: str) -> dict[str, Any]:
|
|
313
|
+
_browser = browser_manager.get_browser(browser_port)
|
|
314
|
+
target_tab = _browser.get_tab(tab_id)
|
|
315
|
+
if not os.path.exists(html_source_code_local_save_path):
|
|
316
|
+
os.makedirs(html_source_code_local_save_path)
|
|
317
|
+
timestamp = int(time.time() * 1000)
|
|
318
|
+
origin_png = target_tab.get_screenshot(as_bytes="png")
|
|
319
|
+
compress_png = compress_image_bytes(origin_png)
|
|
320
|
+
image_path = os.path.join(html_source_code_local_save_path, f"{browser_port}_{tab_id}_{timestamp}.png")
|
|
321
|
+
with open(image_path, "wb") as f:
|
|
322
|
+
f.write(compress_png)
|
|
323
|
+
return dp_mcp_message_pack(
|
|
324
|
+
message=f"已完成对browser_port={browser_port},tab_id={tab_id}的截屏",
|
|
325
|
+
browser_port=browser_port,
|
|
326
|
+
tab_id=tab_id,
|
|
327
|
+
screenshot_path=image_path
|
|
328
|
+
)
|
|
@@ -20,6 +20,7 @@ if "TeamNode-Dp" in enabled_modules:
|
|
|
20
20
|
register_get_html(mcp, browser_manager)
|
|
21
21
|
register_check_selector(mcp, browser_manager)
|
|
22
22
|
register_pop_first_packet(mcp, browser_manager, client_manager)
|
|
23
|
+
register_get_screenshot(mcp, browser_manager)
|
|
23
24
|
# 页面交互
|
|
24
25
|
register_click_action(mcp, browser_manager)
|
|
25
26
|
register_scroll_action(mcp, browser_manager)
|
|
@@ -29,7 +30,7 @@ if "JarvisNode" in enabled_modules:
|
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
def main():
|
|
32
|
-
mcp.run(transport="stdio",show_banner=False)
|
|
33
|
+
mcp.run(transport="stdio", show_banner=False)
|
|
33
34
|
|
|
34
35
|
|
|
35
36
|
if __name__ == '__main__':
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "Jarvis_Brain" # 别人下载时用的名字,必须在 PyPI 上唯一
|
|
3
|
-
version = "0.1.
|
|
3
|
+
version = "0.1.11.1"
|
|
4
4
|
description = "Jarvis brain mcp"
|
|
5
5
|
dependencies = [
|
|
6
6
|
"fastmcp",
|
|
7
7
|
"DrissionPage",
|
|
8
8
|
"minify-html",
|
|
9
9
|
"beautifulsoup4",
|
|
10
|
-
"curl_cffi"
|
|
10
|
+
"curl_cffi",
|
|
11
|
+
"pillow"
|
|
11
12
|
]
|
|
12
13
|
requires-python = ">=3.10"
|
|
13
14
|
|
|
@@ -126,6 +126,51 @@ class DrissionPageListenerProxy:
|
|
|
126
126
|
return attr
|
|
127
127
|
|
|
128
128
|
|
|
129
|
+
def check_data_packet(packet: DataPacket, client: DPProxyClient):
|
|
130
|
+
"""
|
|
131
|
+
封装监听到的数据包,并将其存放在client的packet_queue中
|
|
132
|
+
:param packet:
|
|
133
|
+
:param client:
|
|
134
|
+
:return:
|
|
135
|
+
"""
|
|
136
|
+
url = packet.url
|
|
137
|
+
method = packet.request.method
|
|
138
|
+
data = None
|
|
139
|
+
if packet.request.hasPostData:
|
|
140
|
+
data = packet.request.postData
|
|
141
|
+
domain = urlparse(url).netloc
|
|
142
|
+
body = packet.response.body
|
|
143
|
+
body_str = json.dumps(body, ensure_ascii=False, separators=(',', ':'))
|
|
144
|
+
body_str_list = [body_str[i:i + one_turn_max_token] for i in range(0, len(body_str), one_turn_max_token)]
|
|
145
|
+
body_completed = True
|
|
146
|
+
packet_filter = client.packet_filter
|
|
147
|
+
domain_filter = packet_filter.get("domain_filter", None)
|
|
148
|
+
method_filter = packet_filter.get("method_filter", ["GET", "POST"])
|
|
149
|
+
for index, body_str in enumerate(body_str_list):
|
|
150
|
+
# 如果给了domain_filter并且domain没有在domain_filter中时跳过该数据包
|
|
151
|
+
if domain_filter and domain not in domain_filter:
|
|
152
|
+
continue
|
|
153
|
+
# 如果method没有在method_filter中,则跳过该数据包
|
|
154
|
+
if method not in method_filter:
|
|
155
|
+
continue
|
|
156
|
+
if (index + 1) != len(body_str_list):
|
|
157
|
+
body_completed = False
|
|
158
|
+
if packet.response:
|
|
159
|
+
response_headers = packet.response.headers
|
|
160
|
+
else:
|
|
161
|
+
response_headers = {}
|
|
162
|
+
temp_dict = {
|
|
163
|
+
"url": url,
|
|
164
|
+
"body_completed": body_completed,
|
|
165
|
+
"method": method,
|
|
166
|
+
"request_data": data,
|
|
167
|
+
"request_headers": dict(packet.request.headers),
|
|
168
|
+
"response_headers": dict(response_headers),
|
|
169
|
+
"response_body_segment": body_str.replace("\\", ""),
|
|
170
|
+
}
|
|
171
|
+
client.packet_queue.append(temp_dict)
|
|
172
|
+
|
|
173
|
+
|
|
129
174
|
def check_data_packet(packet: DataPacket, client: DPProxyClient):
|
|
130
175
|
"""
|
|
131
176
|
封装监听到的数据包,并将其存放在client的packet_queue中
|
|
@@ -6,6 +6,9 @@ from DrissionPage import ChromiumPage, ChromiumOptions
|
|
|
6
6
|
from bs4 import BeautifulSoup
|
|
7
7
|
from curl_cffi import requests
|
|
8
8
|
from lxml import html, etree
|
|
9
|
+
import base64
|
|
10
|
+
from PIL import Image
|
|
11
|
+
import io
|
|
9
12
|
|
|
10
13
|
compress_html_js = """
|
|
11
14
|
function getSimplifiedDOM(node) {
|
|
@@ -188,6 +191,72 @@ def dp_mcp_message_pack(message: str, **kwargs):
|
|
|
188
191
|
}]
|
|
189
192
|
}
|
|
190
193
|
|
|
194
|
+
|
|
195
|
+
def btyes2Base64Img(target_byte):
|
|
196
|
+
"""
|
|
197
|
+
把byte转为base64,用于传输图片
|
|
198
|
+
:param target_byte:
|
|
199
|
+
:return:
|
|
200
|
+
"""
|
|
201
|
+
return "data:image/png;base64," + base64.b64encode(target_byte).decode()
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def compress_image_bytes(input_bytes, target_size_mb=1):
|
|
205
|
+
"""
|
|
206
|
+
压缩图片字节数据到目标大小
|
|
207
|
+
|
|
208
|
+
参数:
|
|
209
|
+
input_bytes: 输入图片的字节数据
|
|
210
|
+
target_size_mb: 目标大小(MB),默认1MB
|
|
211
|
+
|
|
212
|
+
返回:
|
|
213
|
+
压缩后的图片字节数据
|
|
214
|
+
"""
|
|
215
|
+
target_size = target_size_mb * 1024 * 1024 # 转换为字节
|
|
216
|
+
|
|
217
|
+
# 从字节数据打开图片
|
|
218
|
+
img = Image.open(io.BytesIO(input_bytes))
|
|
219
|
+
|
|
220
|
+
# 如果是PNG或其他格式,转换为RGB
|
|
221
|
+
if img.mode in ('RGBA', 'LA', 'P'):
|
|
222
|
+
img = img.convert('RGB')
|
|
223
|
+
|
|
224
|
+
# 初始质量设置
|
|
225
|
+
quality = 95
|
|
226
|
+
|
|
227
|
+
# 先尝试压缩
|
|
228
|
+
output_buffer = io.BytesIO()
|
|
229
|
+
img.save(output_buffer, 'JPEG', quality=quality, optimize=True)
|
|
230
|
+
output_bytes = output_buffer.getvalue()
|
|
231
|
+
|
|
232
|
+
# 如果文件仍然太大,逐步降低质量
|
|
233
|
+
while len(output_bytes) > target_size and quality > 10:
|
|
234
|
+
quality -= 5
|
|
235
|
+
output_buffer = io.BytesIO()
|
|
236
|
+
img.save(output_buffer, 'JPEG', quality=quality, optimize=True)
|
|
237
|
+
output_bytes = output_buffer.getvalue()
|
|
238
|
+
|
|
239
|
+
# 如果降低质量还不够,尝试缩小尺寸
|
|
240
|
+
if len(output_bytes) > target_size:
|
|
241
|
+
width, height = img.size
|
|
242
|
+
|
|
243
|
+
while len(output_bytes) > target_size and quality > 10:
|
|
244
|
+
# 缩小10%
|
|
245
|
+
width = int(width * 0.9)
|
|
246
|
+
height = int(height * 0.9)
|
|
247
|
+
img_resized = img.resize((width, height), Image.Resampling.LANCZOS)
|
|
248
|
+
output_buffer = io.BytesIO()
|
|
249
|
+
img_resized.save(output_buffer, 'JPEG', quality=quality, optimize=True)
|
|
250
|
+
output_bytes = output_buffer.getvalue()
|
|
251
|
+
|
|
252
|
+
final_size = len(output_bytes) / (1024 * 1024)
|
|
253
|
+
# print(f"压缩完成!")
|
|
254
|
+
# print(f"原始大小: {len(input_bytes) / (1024 * 1024):.2f}MB")
|
|
255
|
+
# print(f"压缩后大小: {final_size:.2f}MB")
|
|
256
|
+
# print(f"最终质量: {quality}")
|
|
257
|
+
|
|
258
|
+
return output_bytes
|
|
259
|
+
|
|
191
260
|
# todo: 大致盘一下各种判定的逻辑【以下的所有压缩比之间的差距均取“绝对值”】
|
|
192
261
|
# 1. 如果requests、无头、有头获取到的压缩比之间从差距都在15%以内,则认定该页面是静态页面,此时优先使用requests请求
|
|
193
262
|
# 2. 如果requests的status_code为特定的412,或者521,则判定是瑞数和jsl。[此时还有一个特点:requests的压缩比会与其他两种方式获取到的压缩比差距非常大(一两千的那种)]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|