lybic-guiagents 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lybic-guiagents might be problematic. Click here for more details.
- desktop_env/__init__.py +1 -0
- desktop_env/actions.py +203 -0
- desktop_env/controllers/__init__.py +0 -0
- desktop_env/controllers/python.py +471 -0
- desktop_env/controllers/setup.py +882 -0
- desktop_env/desktop_env.py +509 -0
- desktop_env/evaluators/__init__.py +5 -0
- desktop_env/evaluators/getters/__init__.py +41 -0
- desktop_env/evaluators/getters/calc.py +15 -0
- desktop_env/evaluators/getters/chrome.py +1774 -0
- desktop_env/evaluators/getters/file.py +154 -0
- desktop_env/evaluators/getters/general.py +42 -0
- desktop_env/evaluators/getters/gimp.py +38 -0
- desktop_env/evaluators/getters/impress.py +126 -0
- desktop_env/evaluators/getters/info.py +24 -0
- desktop_env/evaluators/getters/misc.py +406 -0
- desktop_env/evaluators/getters/replay.py +20 -0
- desktop_env/evaluators/getters/vlc.py +86 -0
- desktop_env/evaluators/getters/vscode.py +35 -0
- desktop_env/evaluators/metrics/__init__.py +160 -0
- desktop_env/evaluators/metrics/basic_os.py +68 -0
- desktop_env/evaluators/metrics/chrome.py +493 -0
- desktop_env/evaluators/metrics/docs.py +1011 -0
- desktop_env/evaluators/metrics/general.py +665 -0
- desktop_env/evaluators/metrics/gimp.py +637 -0
- desktop_env/evaluators/metrics/libreoffice.py +28 -0
- desktop_env/evaluators/metrics/others.py +92 -0
- desktop_env/evaluators/metrics/pdf.py +31 -0
- desktop_env/evaluators/metrics/slides.py +957 -0
- desktop_env/evaluators/metrics/table.py +585 -0
- desktop_env/evaluators/metrics/thunderbird.py +176 -0
- desktop_env/evaluators/metrics/utils.py +719 -0
- desktop_env/evaluators/metrics/vlc.py +524 -0
- desktop_env/evaluators/metrics/vscode.py +283 -0
- desktop_env/providers/__init__.py +35 -0
- desktop_env/providers/aws/__init__.py +0 -0
- desktop_env/providers/aws/manager.py +278 -0
- desktop_env/providers/aws/provider.py +186 -0
- desktop_env/providers/aws/provider_with_proxy.py +315 -0
- desktop_env/providers/aws/proxy_pool.py +193 -0
- desktop_env/providers/azure/__init__.py +0 -0
- desktop_env/providers/azure/manager.py +87 -0
- desktop_env/providers/azure/provider.py +207 -0
- desktop_env/providers/base.py +97 -0
- desktop_env/providers/gcp/__init__.py +0 -0
- desktop_env/providers/gcp/manager.py +0 -0
- desktop_env/providers/gcp/provider.py +0 -0
- desktop_env/providers/virtualbox/__init__.py +0 -0
- desktop_env/providers/virtualbox/manager.py +463 -0
- desktop_env/providers/virtualbox/provider.py +124 -0
- desktop_env/providers/vmware/__init__.py +0 -0
- desktop_env/providers/vmware/manager.py +455 -0
- desktop_env/providers/vmware/provider.py +105 -0
- gui_agents/__init__.py +0 -0
- gui_agents/agents/Action.py +209 -0
- gui_agents/agents/__init__.py +0 -0
- gui_agents/agents/agent_s.py +832 -0
- gui_agents/agents/global_state.py +610 -0
- gui_agents/agents/grounding.py +651 -0
- gui_agents/agents/hardware_interface.py +129 -0
- gui_agents/agents/manager.py +568 -0
- gui_agents/agents/translator.py +132 -0
- gui_agents/agents/worker.py +355 -0
- gui_agents/cli_app.py +560 -0
- gui_agents/core/__init__.py +0 -0
- gui_agents/core/engine.py +1496 -0
- gui_agents/core/knowledge.py +449 -0
- gui_agents/core/mllm.py +555 -0
- gui_agents/tools/__init__.py +0 -0
- gui_agents/tools/tools.py +727 -0
- gui_agents/unit_test/__init__.py +0 -0
- gui_agents/unit_test/run_tests.py +65 -0
- gui_agents/unit_test/test_manager.py +330 -0
- gui_agents/unit_test/test_worker.py +269 -0
- gui_agents/utils/__init__.py +0 -0
- gui_agents/utils/analyze_display.py +301 -0
- gui_agents/utils/common_utils.py +263 -0
- gui_agents/utils/display_viewer.py +281 -0
- gui_agents/utils/embedding_manager.py +53 -0
- gui_agents/utils/image_axis_utils.py +27 -0
- lybic_guiagents-0.1.0.dist-info/METADATA +416 -0
- lybic_guiagents-0.1.0.dist-info/RECORD +85 -0
- lybic_guiagents-0.1.0.dist-info/WHEEL +5 -0
- lybic_guiagents-0.1.0.dist-info/licenses/LICENSE +201 -0
- lybic_guiagents-0.1.0.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,1774 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
5
|
+
import sqlite3
|
|
6
|
+
import time
|
|
7
|
+
from urllib.parse import unquote
|
|
8
|
+
from typing import Dict, Any, List
|
|
9
|
+
from urllib.parse import urlparse, parse_qs
|
|
10
|
+
|
|
11
|
+
import lxml.etree
|
|
12
|
+
import requests
|
|
13
|
+
from lxml.cssselect import CSSSelector
|
|
14
|
+
from lxml.etree import _Element
|
|
15
|
+
from playwright.sync_api import sync_playwright, expect
|
|
16
|
+
from pydrive.auth import GoogleAuth
|
|
17
|
+
from pydrive.drive import GoogleDrive, GoogleDriveFileList, GoogleDriveFile
|
|
18
|
+
|
|
19
|
+
_accessibility_ns_map = {
|
|
20
|
+
"st": "uri:deskat:state.at-spi.gnome.org",
|
|
21
|
+
"attr": "uri:deskat:attributes.at-spi.gnome.org",
|
|
22
|
+
"cp": "uri:deskat:component.at-spi.gnome.org",
|
|
23
|
+
"doc": "uri:deskat:document.at-spi.gnome.org",
|
|
24
|
+
"docattr": "uri:deskat:attributes.document.at-spi.gnome.org",
|
|
25
|
+
"txt": "uri:deskat:text.at-spi.gnome.org",
|
|
26
|
+
"val": "uri:deskat:value.at-spi.gnome.org",
|
|
27
|
+
"act": "uri:deskat:action.at-spi.gnome.org"
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger("desktopenv.getters.chrome")
|
|
31
|
+
|
|
32
|
+
"""
|
|
33
|
+
WARNING:
|
|
34
|
+
1. Functions from this script assume that no account is registered on Chrome, otherwise the default file path needs to be changed.
|
|
35
|
+
2. The functions are not tested on Windows and Mac, but they should work.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_info_from_website(env, config: Dict[Any, Any]) -> Any:
|
|
40
|
+
""" Get information from a website. Especially useful when the information may be updated through time.
|
|
41
|
+
Args:
|
|
42
|
+
env (Any): The environment object.
|
|
43
|
+
config (Dict[Any, Any]): The configuration dictionary.
|
|
44
|
+
- url (str): The URL of the website to visit
|
|
45
|
+
- infos (List[Dict[str, str]]): The list of information to be extracted from the website. Each dictionary contains:
|
|
46
|
+
- action (str): chosen from 'inner_text', 'attribute', 'click_and_inner_text', 'click_and_attribute', etc., concretely,
|
|
47
|
+
- inner_text: extract the inner text of the element specified by the selector
|
|
48
|
+
- attribute: extract the attribute of the element specified by the selector
|
|
49
|
+
- click_and_inner_text: click elements following the selector and then extract the inner text of the last element
|
|
50
|
+
- click_and_attribute: click elements following the selector and then extract the attribute of the last element
|
|
51
|
+
- selector (Union[str, List[str]]): The CSS selector(s) of the element(s) to be extracted.
|
|
52
|
+
- attribute (str): optional for 'attribute' and 'click_and_attribute', the attribute to be extracted.
|
|
53
|
+
- backups (Any): The backup information to be returned if the extraction fails.
|
|
54
|
+
"""
|
|
55
|
+
# 添加函数开始日志
|
|
56
|
+
logger.info(f"[INFO_FROM_WEBSITE] Starting to get information from website: {config.get('url', 'N/A')}")
|
|
57
|
+
logger.info(f"[INFO_FROM_WEBSITE] Total info operations to perform: {len(config.get('infos', []))}")
|
|
58
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Full config: {config}")
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
host = env.vm_ip
|
|
62
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
63
|
+
server_port = env.server_port
|
|
64
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
65
|
+
backend_url = f"http://{host}:{server_port}"
|
|
66
|
+
use_proxy = env.current_use_proxy
|
|
67
|
+
|
|
68
|
+
logger.info(f"[INFO_FROM_WEBSITE] Connecting to Chrome at {remote_debugging_url}")
|
|
69
|
+
|
|
70
|
+
with sync_playwright() as p:
|
|
71
|
+
# connect to remote Chrome instance
|
|
72
|
+
try:
|
|
73
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
74
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully connected to existing Chrome instance")
|
|
75
|
+
except Exception as e:
|
|
76
|
+
logger.warning(f"[INFO_FROM_WEBSITE] Failed to connect to existing Chrome instance: {e}")
|
|
77
|
+
logger.info(f"[INFO_FROM_WEBSITE] Starting new Chrome instance...")
|
|
78
|
+
|
|
79
|
+
# If the connection fails (e.g., the agent close the browser instance), start a new browser instance
|
|
80
|
+
app = 'chromium' if 'arm' in platform.machine() else 'google-chrome'
|
|
81
|
+
command = [
|
|
82
|
+
app,
|
|
83
|
+
"--remote-debugging-port=1337"
|
|
84
|
+
]
|
|
85
|
+
if use_proxy:
|
|
86
|
+
command.append(f"--proxy-server=127.0.0.1:18888")
|
|
87
|
+
logger.info(f"[INFO_FROM_WEBSITE] Using proxy server: 127.0.0.1:18888")
|
|
88
|
+
|
|
89
|
+
logger.info(f"[INFO_FROM_WEBSITE] Starting browser with command: {' '.join(command)}")
|
|
90
|
+
payload = json.dumps({"command": command, "shell": False})
|
|
91
|
+
headers = {"Content-Type": "application/json"}
|
|
92
|
+
#requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
93
|
+
requests.post(backend_url + "/setup" + "/launch", headers=headers, data=payload)
|
|
94
|
+
time.sleep(5)
|
|
95
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
96
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully connected to new Chrome instance")
|
|
97
|
+
|
|
98
|
+
page = browser.contexts[0].new_page()
|
|
99
|
+
logger.info(f"[INFO_FROM_WEBSITE] Created new page, navigating to: {config['url']}")
|
|
100
|
+
|
|
101
|
+
page.goto(config["url"])
|
|
102
|
+
page.wait_for_load_state('load')
|
|
103
|
+
|
|
104
|
+
# 记录页面加载完成后的信息
|
|
105
|
+
logger.info(f"[INFO_FROM_WEBSITE] Page loaded successfully")
|
|
106
|
+
logger.info(f"[INFO_FROM_WEBSITE] Page title: '{page.title()}'")
|
|
107
|
+
logger.info(f"[INFO_FROM_WEBSITE] Current URL: '{page.url}'")
|
|
108
|
+
|
|
109
|
+
infos = []
|
|
110
|
+
for idx, info_dict in enumerate(config.get('infos', [])):
|
|
111
|
+
logger.info(f"[INFO_FROM_WEBSITE] Processing info operation {idx + 1}/{len(config.get('infos', []))}")
|
|
112
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Info config: {info_dict}")
|
|
113
|
+
|
|
114
|
+
if page.url != config["url"]:
|
|
115
|
+
logger.info(f"[INFO_FROM_WEBSITE] Page URL changed, navigating back to: {config['url']}")
|
|
116
|
+
page.goto(config["url"])
|
|
117
|
+
page.wait_for_load_state('load')
|
|
118
|
+
logger.info(f"[INFO_FROM_WEBSITE] Back to original page")
|
|
119
|
+
|
|
120
|
+
action = info_dict.get('action', 'inner_text')
|
|
121
|
+
selector = info_dict.get('selector')
|
|
122
|
+
logger.info(f"[INFO_FROM_WEBSITE] Action: {action}, Selector: {selector}")
|
|
123
|
+
|
|
124
|
+
if action == "inner_text":
|
|
125
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Waiting for element with selector: {selector}")
|
|
126
|
+
ele = page.wait_for_selector(info_dict['selector'], state='attached', timeout=10000)
|
|
127
|
+
extracted_text = ele.inner_text()
|
|
128
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully extracted inner_text: '{extracted_text}'")
|
|
129
|
+
infos.append(extracted_text)
|
|
130
|
+
|
|
131
|
+
elif action == "attribute":
|
|
132
|
+
attribute = info_dict.get('attribute')
|
|
133
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Waiting for element with selector: {selector}")
|
|
134
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Extracting attribute: {attribute}")
|
|
135
|
+
ele = page.wait_for_selector(info_dict['selector'], state='attached', timeout=10000)
|
|
136
|
+
extracted_attr = ele.get_attribute(info_dict['attribute'])
|
|
137
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully extracted attribute '{attribute}': '{extracted_attr}'")
|
|
138
|
+
infos.append(extracted_attr)
|
|
139
|
+
|
|
140
|
+
elif action == 'click_and_inner_text':
|
|
141
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Performing click_and_inner_text with {len(info_dict['selector'])} selectors")
|
|
142
|
+
for idx, sel in enumerate(info_dict['selector']):
|
|
143
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Processing selector {idx + 1}/{len(info_dict['selector'])}: {sel}")
|
|
144
|
+
if idx != len(info_dict['selector']) - 1:
|
|
145
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Clicking element with selector: {sel}")
|
|
146
|
+
link = page.wait_for_selector(sel, state='attached', timeout=10000)
|
|
147
|
+
link.click()
|
|
148
|
+
page.wait_for_load_state('load')
|
|
149
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully clicked element, page loaded")
|
|
150
|
+
logger.debug(f"[INFO_FROM_WEBSITE] New page URL: {page.url}")
|
|
151
|
+
else:
|
|
152
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Extracting inner_text from final element: {sel}")
|
|
153
|
+
ele = page.wait_for_selector(sel, state='attached', timeout=10000)
|
|
154
|
+
extracted_text = ele.inner_text()
|
|
155
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully extracted inner_text after clicks: '{extracted_text}'")
|
|
156
|
+
infos.append(extracted_text)
|
|
157
|
+
|
|
158
|
+
elif action == 'click_and_attribute':
|
|
159
|
+
attribute = info_dict.get('attribute')
|
|
160
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Performing click_and_attribute with {len(info_dict['selector'])} selectors")
|
|
161
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Target attribute: {attribute}")
|
|
162
|
+
for idx, sel in enumerate(info_dict['selector']):
|
|
163
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Processing selector {idx + 1}/{len(info_dict['selector'])}: {sel}")
|
|
164
|
+
if idx != len(info_dict['selector']) - 1:
|
|
165
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Clicking element with selector: {sel}")
|
|
166
|
+
link = page.wait_for_selector(sel, state='attached', timeout=10000)
|
|
167
|
+
link.click()
|
|
168
|
+
page.wait_for_load_state('load')
|
|
169
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully clicked element, page loaded")
|
|
170
|
+
logger.debug(f"[INFO_FROM_WEBSITE] New page URL: {page.url}")
|
|
171
|
+
else:
|
|
172
|
+
logger.debug(f"[INFO_FROM_WEBSITE] Extracting attribute from final element: {sel}")
|
|
173
|
+
ele = page.wait_for_selector(sel, state='attached')
|
|
174
|
+
extracted_attr = ele.get_attribute(info_dict['attribute'])
|
|
175
|
+
logger.info(f"[INFO_FROM_WEBSITE] Successfully extracted attribute '{attribute}' after clicks: '{extracted_attr}'")
|
|
176
|
+
infos.append(extracted_attr)
|
|
177
|
+
else:
|
|
178
|
+
logger.error(f"[INFO_FROM_WEBSITE] Unsupported action: {action}")
|
|
179
|
+
raise NotImplementedError(f'The action {action} is not supported yet.')
|
|
180
|
+
|
|
181
|
+
logger.info(f"[INFO_FROM_WEBSITE] Completed info operation {idx + 1}")
|
|
182
|
+
|
|
183
|
+
# 记录最终提取的所有信息
|
|
184
|
+
logger.info(f"[INFO_FROM_WEBSITE] All operations completed successfully")
|
|
185
|
+
logger.info(f"[INFO_FROM_WEBSITE] Total extracted information count: {len(infos)}")
|
|
186
|
+
logger.info(f"[INFO_FROM_WEBSITE] Final extracted information: {infos}")
|
|
187
|
+
|
|
188
|
+
return infos
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.error(f'[INFO_FROM_WEBSITE] ERROR: Failed to obtain information from website: {config.get("url", "N/A")}')
|
|
191
|
+
logger.error(f'[INFO_FROM_WEBSITE] Exception details: {str(e)}')
|
|
192
|
+
logger.error(f'[INFO_FROM_WEBSITE] Exception type: {type(e).__name__}')
|
|
193
|
+
logger.info(f'[INFO_FROM_WEBSITE] Using backup results instead')
|
|
194
|
+
backup_data = config.get('backups', None)
|
|
195
|
+
logger.info(f'[INFO_FROM_WEBSITE] Backup data: {backup_data}')
|
|
196
|
+
return backup_data
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
# The following ones just need to load info from the files of software, no need to connect to the software
|
|
200
|
+
def get_default_search_engine(env, config: Dict[str, str]):
|
|
201
|
+
os_type = env.vm_platform
|
|
202
|
+
if os_type == 'Windows':
|
|
203
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
204
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
205
|
+
elif os_type == 'Darwin':
|
|
206
|
+
preference_file_path = env.controller.execute_python_command(
|
|
207
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
208
|
+
'output'].strip()
|
|
209
|
+
elif os_type == 'Linux':
|
|
210
|
+
if "arm" in platform.machine():
|
|
211
|
+
preference_file_path = env.controller.execute_python_command(
|
|
212
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
213
|
+
'output'].strip()
|
|
214
|
+
else:
|
|
215
|
+
preference_file_path = env.controller.execute_python_command(
|
|
216
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
217
|
+
'output'].strip()
|
|
218
|
+
else:
|
|
219
|
+
raise Exception('Unsupported operating system')
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
content = env.controller.get_file(preference_file_path)
|
|
223
|
+
data = json.loads(content)
|
|
224
|
+
|
|
225
|
+
# The path within the JSON data to the default search engine might vary
|
|
226
|
+
search_engine = data.get('default_search_provider_data', {}).get('template_url_data', {}).get('short_name',
|
|
227
|
+
'Google')
|
|
228
|
+
return search_engine
|
|
229
|
+
except Exception as e:
|
|
230
|
+
logger.error(f"Error: {e}")
|
|
231
|
+
return "Google"
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def get_cookie_data(env, config: Dict[str, str]):
|
|
235
|
+
"""
|
|
236
|
+
Get the cookies from the Chrome browser.
|
|
237
|
+
Assume the cookies are stored in the default location, not encrypted and not large in size.
|
|
238
|
+
"""
|
|
239
|
+
os_type = env.vm_platform
|
|
240
|
+
if os_type == 'Windows':
|
|
241
|
+
chrome_cookie_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
242
|
+
'Google\\Chrome\\User Data\\Default\\Cookies'))""")['output'].strip()
|
|
243
|
+
elif os_type == 'Darwin':
|
|
244
|
+
chrome_cookie_file_path = env.controller.execute_python_command(
|
|
245
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Cookies'))")[
|
|
246
|
+
'output'].strip()
|
|
247
|
+
elif os_type == 'Linux':
|
|
248
|
+
if "arm" in platform.machine():
|
|
249
|
+
chrome_cookie_file_path = env.controller.execute_python_command(
|
|
250
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Cookies'))")[
|
|
251
|
+
'output'].strip()
|
|
252
|
+
else:
|
|
253
|
+
chrome_cookie_file_path = env.controller.execute_python_command(
|
|
254
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Cookies'))")[
|
|
255
|
+
'output'].strip()
|
|
256
|
+
else:
|
|
257
|
+
raise Exception('Unsupported operating system')
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
content = env.controller.get_file(chrome_cookie_file_path)
|
|
261
|
+
_path = os.path.join(env.cache_dir, config["dest"])
|
|
262
|
+
|
|
263
|
+
with open(_path, "wb") as f:
|
|
264
|
+
f.write(content)
|
|
265
|
+
|
|
266
|
+
conn = sqlite3.connect(_path)
|
|
267
|
+
cursor = conn.cursor()
|
|
268
|
+
|
|
269
|
+
# Query to check for OpenAI cookies
|
|
270
|
+
cursor.execute("SELECT * FROM cookies")
|
|
271
|
+
cookies = cursor.fetchall()
|
|
272
|
+
return cookies
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Error: {e}")
|
|
275
|
+
return None
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def get_history(env, config: Dict[str, str]):
|
|
279
|
+
os_type = env.vm_platform
|
|
280
|
+
if os_type == 'Windows':
|
|
281
|
+
chrome_history_path = env.controller.execute_python_command(
|
|
282
|
+
"""import os; print(os.path.join(os.getenv('USERPROFILE'), "AppData", "Local", "Google", "Chrome", "User Data", "Default", "History"))""")[
|
|
283
|
+
'output'].strip()
|
|
284
|
+
elif os_type == 'Darwin':
|
|
285
|
+
chrome_history_path = env.controller.execute_python_command(
|
|
286
|
+
"""import os; print(os.path.join(os.getenv('HOME'), "Library", "Application Support", "Google", "Chrome", "Default", "History"))""")[
|
|
287
|
+
'output'].strip()
|
|
288
|
+
elif os_type == 'Linux':
|
|
289
|
+
if "arm" in platform.machine():
|
|
290
|
+
chrome_history_path = env.controller.execute_python_command(
|
|
291
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/History'))")[
|
|
292
|
+
'output'].strip()
|
|
293
|
+
else:
|
|
294
|
+
chrome_history_path = env.controller.execute_python_command(
|
|
295
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config', 'google-chrome', 'Default', 'History'))")[
|
|
296
|
+
'output'].strip()
|
|
297
|
+
else:
|
|
298
|
+
raise Exception('Unsupported operating system')
|
|
299
|
+
|
|
300
|
+
try:
|
|
301
|
+
content = env.controller.get_file(chrome_history_path)
|
|
302
|
+
_path = os.path.join(env.cache_dir, config["dest"])
|
|
303
|
+
|
|
304
|
+
with open(_path, "wb") as f:
|
|
305
|
+
f.write(content)
|
|
306
|
+
|
|
307
|
+
conn = sqlite3.connect(_path)
|
|
308
|
+
cursor = conn.cursor()
|
|
309
|
+
|
|
310
|
+
# Query to check for OpenAI cookies
|
|
311
|
+
cursor.execute("SELECT url, title, last_visit_time FROM urls")
|
|
312
|
+
history_items = cursor.fetchall()
|
|
313
|
+
return history_items
|
|
314
|
+
except Exception as e:
|
|
315
|
+
logger.error(f"Error: {e}")
|
|
316
|
+
return None
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def get_enabled_experiments(env, config: Dict[str, str]):
|
|
320
|
+
os_type = env.vm_platform
|
|
321
|
+
if os_type == 'Windows':
|
|
322
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
323
|
+
'Google\\Chrome\\User Data\\Local State'))""")[
|
|
324
|
+
'output'].strip()
|
|
325
|
+
elif os_type == 'Darwin':
|
|
326
|
+
preference_file_path = env.controller.execute_python_command(
|
|
327
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Local State'))")[
|
|
328
|
+
'output'].strip()
|
|
329
|
+
elif os_type == 'Linux':
|
|
330
|
+
if "arm" in platform.machine():
|
|
331
|
+
preference_file_path = env.controller.execute_python_command(
|
|
332
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Local State'))")[
|
|
333
|
+
'output'].strip()
|
|
334
|
+
else:
|
|
335
|
+
preference_file_path = env.controller.execute_python_command(
|
|
336
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Local State'))")[
|
|
337
|
+
'output'].strip()
|
|
338
|
+
else:
|
|
339
|
+
raise Exception('Unsupported operating system')
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
content = env.controller.get_file(preference_file_path)
|
|
343
|
+
data = json.loads(content)
|
|
344
|
+
|
|
345
|
+
# The path within the JSON data to the default search engine might vary
|
|
346
|
+
enabled_labs_experiments = data.get('browser', {}).get('enabled_labs_experiments', [])
|
|
347
|
+
return enabled_labs_experiments
|
|
348
|
+
except Exception as e:
|
|
349
|
+
logger.error(f"Error: {e}")
|
|
350
|
+
return []
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def get_profile_name(env, config: Dict[str, str]):
|
|
354
|
+
"""
|
|
355
|
+
Get the username from the Chrome browser.
|
|
356
|
+
Assume the cookies are stored in the default location, not encrypted and not large in size.
|
|
357
|
+
"""
|
|
358
|
+
os_type = env.vm_platform
|
|
359
|
+
if os_type == 'Windows':
|
|
360
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
361
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
362
|
+
elif os_type == 'Darwin':
|
|
363
|
+
preference_file_path = env.controller.execute_python_command(
|
|
364
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
365
|
+
'output'].strip()
|
|
366
|
+
elif os_type == 'Linux':
|
|
367
|
+
if "arm" in platform.machine():
|
|
368
|
+
preference_file_path = env.controller.execute_python_command(
|
|
369
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
370
|
+
'output'].strip()
|
|
371
|
+
else:
|
|
372
|
+
preference_file_path = env.controller.execute_python_command(
|
|
373
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
374
|
+
'output'].strip()
|
|
375
|
+
else:
|
|
376
|
+
raise Exception('Unsupported operating system')
|
|
377
|
+
|
|
378
|
+
try:
|
|
379
|
+
content = env.controller.get_file(preference_file_path)
|
|
380
|
+
data = json.loads(content)
|
|
381
|
+
|
|
382
|
+
# The path within the JSON data to the default search engine might vary
|
|
383
|
+
profile_name = data.get('profile', {}).get('name', None)
|
|
384
|
+
return profile_name
|
|
385
|
+
except Exception as e:
|
|
386
|
+
logger.error(f"Error: {e}")
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
|
|
390
|
+
def get_chrome_language(env, config: Dict[str, str]):
|
|
391
|
+
os_type = env.vm_platform
|
|
392
|
+
if os_type == 'Windows':
|
|
393
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
394
|
+
'Google\\Chrome\\User Data\\Local State'))""")[
|
|
395
|
+
'output'].strip()
|
|
396
|
+
elif os_type == 'Darwin':
|
|
397
|
+
preference_file_path = env.controller.execute_python_command(
|
|
398
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Local State'))")[
|
|
399
|
+
'output'].strip()
|
|
400
|
+
elif os_type == 'Linux':
|
|
401
|
+
if "arm" in platform.machine():
|
|
402
|
+
preference_file_path = env.controller.execute_python_command(
|
|
403
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Local State'))")[
|
|
404
|
+
'output'].strip()
|
|
405
|
+
else:
|
|
406
|
+
preference_file_path = env.controller.execute_python_command(
|
|
407
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Local State'))")[
|
|
408
|
+
'output'].strip()
|
|
409
|
+
else:
|
|
410
|
+
raise Exception('Unsupported operating system')
|
|
411
|
+
|
|
412
|
+
try:
|
|
413
|
+
content = env.controller.get_file(preference_file_path)
|
|
414
|
+
data = json.loads(content)
|
|
415
|
+
|
|
416
|
+
# The path within the JSON data to the default search engine might vary
|
|
417
|
+
enabled_labs_experiments = data.get('intl', {}).get('app_locale', "en-US")
|
|
418
|
+
return enabled_labs_experiments
|
|
419
|
+
except Exception as e:
|
|
420
|
+
logger.error(f"Error: {e}")
|
|
421
|
+
return "en-US"
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
def get_chrome_font_size(env, config: Dict[str, str]):
|
|
425
|
+
os_type = env.vm_platform
|
|
426
|
+
if os_type == 'Windows':
|
|
427
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
428
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")[
|
|
429
|
+
'output'].strip()
|
|
430
|
+
elif os_type == 'Darwin':
|
|
431
|
+
preference_file_path = env.controller.execute_python_command(
|
|
432
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
433
|
+
'output'].strip()
|
|
434
|
+
elif os_type == 'Linux':
|
|
435
|
+
if "arm" in platform.machine():
|
|
436
|
+
preference_file_path = env.controller.execute_python_command(
|
|
437
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
438
|
+
'output'].strip()
|
|
439
|
+
else:
|
|
440
|
+
preference_file_path = env.controller.execute_python_command(
|
|
441
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
442
|
+
'output'].strip()
|
|
443
|
+
else:
|
|
444
|
+
raise Exception('Unsupported operating system')
|
|
445
|
+
|
|
446
|
+
try:
|
|
447
|
+
content = env.controller.get_file(preference_file_path)
|
|
448
|
+
data = json.loads(content)
|
|
449
|
+
|
|
450
|
+
# The path within the JSON data to the default search engine might vary
|
|
451
|
+
search_engine = data.get('webkit', {}).get('webprefs', {
|
|
452
|
+
"default_fixed_font_size": 13,
|
|
453
|
+
"default_font_size": 16,
|
|
454
|
+
"minimum_font_size": 13
|
|
455
|
+
})
|
|
456
|
+
return search_engine
|
|
457
|
+
except Exception as e:
|
|
458
|
+
logger.error(f"Error: {e}")
|
|
459
|
+
return {
|
|
460
|
+
"default_fixed_font_size": 13,
|
|
461
|
+
"default_font_size": 16
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
def get_bookmarks(env, config: Dict[str, str]):
|
|
466
|
+
os_type = env.vm_platform
|
|
467
|
+
if os_type == 'Windows':
|
|
468
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
469
|
+
'Google\\Chrome\\User Data\\Default\\Bookmarks'))""")['output'].strip()
|
|
470
|
+
elif os_type == 'Darwin':
|
|
471
|
+
preference_file_path = env.controller.execute_python_command(
|
|
472
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Bookmarks'))")[
|
|
473
|
+
'output'].strip()
|
|
474
|
+
elif os_type == 'Linux':
|
|
475
|
+
if "arm" in platform.machine():
|
|
476
|
+
preference_file_path = env.controller.execute_python_command(
|
|
477
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Bookmarks'))")[
|
|
478
|
+
'output'].strip()
|
|
479
|
+
else:
|
|
480
|
+
preference_file_path = env.controller.execute_python_command(
|
|
481
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Bookmarks'))")[
|
|
482
|
+
'output'].strip()
|
|
483
|
+
else:
|
|
484
|
+
raise Exception('Unsupported operating system')
|
|
485
|
+
|
|
486
|
+
content = env.controller.get_file(preference_file_path)
|
|
487
|
+
if not content:
|
|
488
|
+
return []
|
|
489
|
+
data = json.loads(content)
|
|
490
|
+
bookmarks = data.get('roots', {})
|
|
491
|
+
return bookmarks
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
# todo: move this to the main.py
|
|
495
|
+
def get_extensions_installed_from_shop(env, config: Dict[str, str]):
|
|
496
|
+
"""Find the Chrome extensions directory based on the operating system."""
|
|
497
|
+
os_type = env.vm_platform
|
|
498
|
+
if os_type == 'Windows':
|
|
499
|
+
chrome_extension_dir = env.controller.execute_python_command(
|
|
500
|
+
"""os.path.expanduser('~') + '\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions\\'""")[
|
|
501
|
+
'output'].strip()
|
|
502
|
+
elif os_type == 'Darwin': # macOS
|
|
503
|
+
chrome_extension_dir = env.controller.execute_python_command(
|
|
504
|
+
"""os.path.expanduser('~') + '/Library/Application Support/Google/Chrome/Default/Extensions/'""")[
|
|
505
|
+
'output'].strip()
|
|
506
|
+
elif os_type == 'Linux':
|
|
507
|
+
if "arm" in platform.machine():
|
|
508
|
+
preference_file_path = env.controller.execute_python_command(
|
|
509
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Extensions/'))")[
|
|
510
|
+
'output'].strip()
|
|
511
|
+
else:
|
|
512
|
+
chrome_extension_dir = env.controller.execute_python_command(
|
|
513
|
+
"""os.path.expanduser('~') + '/.config/google-chrome/Default/Extensions/'""")['output'].strip()
|
|
514
|
+
else:
|
|
515
|
+
raise Exception('Unsupported operating system')
|
|
516
|
+
|
|
517
|
+
manifests = []
|
|
518
|
+
for extension_id in os.listdir(chrome_extension_dir):
|
|
519
|
+
extension_path = os.path.join(chrome_extension_dir, extension_id)
|
|
520
|
+
if os.path.isdir(extension_path):
|
|
521
|
+
# Iterate through version-named subdirectories
|
|
522
|
+
for version_dir in os.listdir(extension_path):
|
|
523
|
+
version_path = os.path.join(extension_path, version_dir)
|
|
524
|
+
manifest_path = os.path.join(version_path, 'manifest.json')
|
|
525
|
+
if os.path.isfile(manifest_path):
|
|
526
|
+
with open(manifest_path, 'r') as file:
|
|
527
|
+
try:
|
|
528
|
+
manifest = json.load(file)
|
|
529
|
+
manifests.append(manifest)
|
|
530
|
+
except json.JSONDecodeError:
|
|
531
|
+
logger.error(f"Error reading {manifest_path}")
|
|
532
|
+
return manifests
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
# The following ones require Playwright to be installed on the target machine, and the chrome needs to be pre-config on
|
|
536
|
+
# port info to allow remote debugging, see README.md for details
|
|
537
|
+
|
|
538
|
+
def get_page_info(env, config: Dict[str, str]):
|
|
539
|
+
host = env.vm_ip
|
|
540
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
541
|
+
server_port = env.server_port
|
|
542
|
+
url = config["url"]
|
|
543
|
+
|
|
544
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
545
|
+
with sync_playwright() as p:
|
|
546
|
+
# connect to remote Chrome instance
|
|
547
|
+
try:
|
|
548
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
549
|
+
except Exception as e:
|
|
550
|
+
# If the connection fails, start a new browser instance
|
|
551
|
+
platform.machine()
|
|
552
|
+
if "arm" in platform.machine():
|
|
553
|
+
# start a new browser instance if the connection fails
|
|
554
|
+
payload = json.dumps({"command": [
|
|
555
|
+
"chromium",
|
|
556
|
+
"--remote-debugging-port=1337"
|
|
557
|
+
], "shell": False})
|
|
558
|
+
else:
|
|
559
|
+
payload = json.dumps({"command": [
|
|
560
|
+
"google-chrome",
|
|
561
|
+
"--remote-debugging-port=1337"
|
|
562
|
+
], "shell": False})
|
|
563
|
+
|
|
564
|
+
headers = {"Content-Type": "application/json"}
|
|
565
|
+
requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
566
|
+
time.sleep(5)
|
|
567
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
568
|
+
|
|
569
|
+
page = browser.contexts[0].new_page()
|
|
570
|
+
page.goto(url)
|
|
571
|
+
|
|
572
|
+
try:
|
|
573
|
+
# Wait for the page to finish loading, this prevents the "execution context was destroyed" issue
|
|
574
|
+
page.wait_for_load_state('load') # Wait for the 'load' event to complete
|
|
575
|
+
title = page.title()
|
|
576
|
+
url = page.url
|
|
577
|
+
page_info = {'title': title, 'url': url, 'content': page.content()}
|
|
578
|
+
except TimeoutError:
|
|
579
|
+
# If page loading times out, catch the exception and store the current information in the list
|
|
580
|
+
page_info = {'title': 'Load timeout', 'url': page.url, 'content': page.content()}
|
|
581
|
+
except Exception as e:
|
|
582
|
+
# Catch other potential exceptions that might occur while reading the page title
|
|
583
|
+
print(f'Error: {e}')
|
|
584
|
+
page_info = {'title': 'Error encountered', 'url': page.url, 'content': page.content()}
|
|
585
|
+
|
|
586
|
+
browser.close()
|
|
587
|
+
return page_info
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def get_open_tabs_info(env, config: Dict[str, str]):
|
|
591
|
+
host = env.vm_ip
|
|
592
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
593
|
+
server_port = env.server_port
|
|
594
|
+
|
|
595
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
596
|
+
with sync_playwright() as p:
|
|
597
|
+
# connect to remote Chrome instance
|
|
598
|
+
try:
|
|
599
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
600
|
+
except Exception as e:
|
|
601
|
+
# If the connection fails, start a new browser instance
|
|
602
|
+
platform.machine()
|
|
603
|
+
if "arm" in platform.machine():
|
|
604
|
+
# start a new browser instance if the connection fails
|
|
605
|
+
payload = json.dumps({"command": [
|
|
606
|
+
"chromium",
|
|
607
|
+
"--remote-debugging-port=1337"
|
|
608
|
+
], "shell": False})
|
|
609
|
+
else:
|
|
610
|
+
payload = json.dumps({"command": [
|
|
611
|
+
"google-chrome",
|
|
612
|
+
"--remote-debugging-port=1337"
|
|
613
|
+
], "shell": False})
|
|
614
|
+
|
|
615
|
+
headers = {"Content-Type": "application/json"}
|
|
616
|
+
requests.post(f"http://{host}:{server_port}/setup/launch", headers=headers, data=payload)
|
|
617
|
+
time.sleep(5)
|
|
618
|
+
try:
|
|
619
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
620
|
+
except Exception as e:
|
|
621
|
+
return []
|
|
622
|
+
|
|
623
|
+
tabs_info = []
|
|
624
|
+
for context in browser.contexts:
|
|
625
|
+
for page in context.pages:
|
|
626
|
+
try:
|
|
627
|
+
# Wait for the page to finish loading, this prevents the "execution context was destroyed" issue
|
|
628
|
+
page.wait_for_load_state('networkidle') # Wait for the 'load' event to complete
|
|
629
|
+
title = page.title()
|
|
630
|
+
url = page.url
|
|
631
|
+
tabs_info.append({'title': title, 'url': url})
|
|
632
|
+
except TimeoutError:
|
|
633
|
+
# If page loading times out, catch the exception and store the current information in the list
|
|
634
|
+
tabs_info.append({'title': 'Load timeout', 'url': page.url})
|
|
635
|
+
except Exception as e:
|
|
636
|
+
# Catch other potential exceptions that might occur while reading the page title
|
|
637
|
+
print(f'Error: {e}')
|
|
638
|
+
tabs_info.append({'title': 'Error encountered', 'url': page.url})
|
|
639
|
+
|
|
640
|
+
browser.close()
|
|
641
|
+
return tabs_info
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
def get_active_url_from_accessTree(env, config):
|
|
645
|
+
"""
|
|
646
|
+
Playwright cannot get the url of active tab directly,
|
|
647
|
+
so we need to use accessibility tree to get the active tab info.
|
|
648
|
+
This function is used to get the active tab url from the accessibility tree.
|
|
649
|
+
config:
|
|
650
|
+
Dict[str, str]{
|
|
651
|
+
# we no longer need to specify the xpath or selectors, since we will use defalut value
|
|
652
|
+
# 'xpath':
|
|
653
|
+
# the same as in metrics.general.accessibility_tree.
|
|
654
|
+
# 'selectors':
|
|
655
|
+
# the same as in metrics.general.accessibility_tree.
|
|
656
|
+
'goto_prefix':
|
|
657
|
+
the prefix you want to add to the beginning of the url to be opened, default is "https://",
|
|
658
|
+
(the url we get from accTree does not have prefix)
|
|
659
|
+
...(other keys, not used in this function)
|
|
660
|
+
}
|
|
661
|
+
Return
|
|
662
|
+
url: str
|
|
663
|
+
"""
|
|
664
|
+
# Ensure the controller and its method are accessible and return a valid result
|
|
665
|
+
if hasattr(env, 'controller') and callable(getattr(env.controller, 'get_accessibility_tree', None)):
|
|
666
|
+
accessibility_tree = env.controller.get_accessibility_tree()
|
|
667
|
+
if accessibility_tree is None:
|
|
668
|
+
print("Failed to get the accessibility tree.")
|
|
669
|
+
return None
|
|
670
|
+
else:
|
|
671
|
+
print("Controller or method 'get_accessibility_tree' not found.")
|
|
672
|
+
return None
|
|
673
|
+
|
|
674
|
+
logger.debug("AT@eval: %s", accessibility_tree)
|
|
675
|
+
|
|
676
|
+
at = None
|
|
677
|
+
try:
|
|
678
|
+
at = lxml.etree.fromstring(accessibility_tree)
|
|
679
|
+
except ValueError as e:
|
|
680
|
+
logger.error(f"Error parsing accessibility tree: {e}")
|
|
681
|
+
return None
|
|
682
|
+
|
|
683
|
+
# Determine the correct selector based on system architecture
|
|
684
|
+
selector = None
|
|
685
|
+
arch = platform.machine()
|
|
686
|
+
print(f"Your architecture is: {arch}")
|
|
687
|
+
|
|
688
|
+
if "arm" in arch:
|
|
689
|
+
selector_string = "application[name=Chromium] entry[name=Address\\ and\\ search\\ bar]"
|
|
690
|
+
else:
|
|
691
|
+
selector_string = "application[name=Google\\ Chrome] entry[name=Address\\ and\\ search\\ bar]"
|
|
692
|
+
|
|
693
|
+
try:
|
|
694
|
+
selector = CSSSelector(selector_string, namespaces=_accessibility_ns_map)
|
|
695
|
+
except Exception as e:
|
|
696
|
+
logger.error(f"Failed to parse the selector for active tab URL: {e}")
|
|
697
|
+
return None
|
|
698
|
+
|
|
699
|
+
elements = selector(at) if selector else []
|
|
700
|
+
if not elements:
|
|
701
|
+
print("No elements found.")
|
|
702
|
+
return None
|
|
703
|
+
elif not elements[-1].text:
|
|
704
|
+
print("No text found in the latest element.")
|
|
705
|
+
return None
|
|
706
|
+
|
|
707
|
+
# Use a default prefix if 'goto_prefix' is not specified in the config
|
|
708
|
+
goto_prefix = config.get("goto_prefix", "https://")
|
|
709
|
+
|
|
710
|
+
active_tab_url = f"{goto_prefix}{elements[0].text}"
|
|
711
|
+
print(f"Active tab url now: {active_tab_url}")
|
|
712
|
+
return active_tab_url
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
def get_active_tab_info(env, config: Dict[str, str]):
|
|
716
|
+
"""
|
|
717
|
+
This function is used to get all info about active tab.
|
|
718
|
+
Warning! This function will reload the target-url page
|
|
719
|
+
If the tartget url has cache or cookie, this function may reload to another page.
|
|
720
|
+
If you have tested the url will not pop up to another page (check in incongnito mode yourself first),
|
|
721
|
+
you can use this function.
|
|
722
|
+
config: Dict[str, str]{
|
|
723
|
+
# Keys used in get_active_url_from_accessTree: "xpath", "selectors"
|
|
724
|
+
}
|
|
725
|
+
"""
|
|
726
|
+
active_tab_url = get_active_url_from_accessTree(env, config)
|
|
727
|
+
if active_tab_url is None:
|
|
728
|
+
logger.error("Failed to get the url of active tab")
|
|
729
|
+
return None
|
|
730
|
+
host = env.vm_ip
|
|
731
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
732
|
+
|
|
733
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
734
|
+
with sync_playwright() as p:
|
|
735
|
+
# connect to remote Chrome instance, since it is supposed to be the active one, we won't start a new one if failed
|
|
736
|
+
try:
|
|
737
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
738
|
+
except Exception as e:
|
|
739
|
+
return None
|
|
740
|
+
|
|
741
|
+
active_tab_info = {}
|
|
742
|
+
# go to the target URL page
|
|
743
|
+
page = browser.new_page()
|
|
744
|
+
try:
|
|
745
|
+
page.goto(active_tab_url)
|
|
746
|
+
except:
|
|
747
|
+
logger.error("Failed to go to the target URL page")
|
|
748
|
+
return None
|
|
749
|
+
page.wait_for_load_state('load') # Wait for the 'load' event to complete
|
|
750
|
+
active_tab_info = {
|
|
751
|
+
'title': page.title(),
|
|
752
|
+
'url': page.url,
|
|
753
|
+
'content': page.content() # get the HTML content of the page
|
|
754
|
+
}
|
|
755
|
+
|
|
756
|
+
browser.close()
|
|
757
|
+
# print("active_tab_title: {}".format(active_tab_info.get('title', 'None')))
|
|
758
|
+
# print("active_tab_url: {}".format(active_tab_info.get('url', 'None')))
|
|
759
|
+
# print("active_tab_content: {}".format(active_tab_info.get('content', 'None')))
|
|
760
|
+
return active_tab_info
|
|
761
|
+
|
|
762
|
+
|
|
763
|
+
def get_pdf_from_url(env, config: Dict[str, str]) -> str:
|
|
764
|
+
"""
|
|
765
|
+
Download a PDF from a URL.
|
|
766
|
+
"""
|
|
767
|
+
_url = config["path"]
|
|
768
|
+
_path = os.path.join(env.cache_dir, config["dest"])
|
|
769
|
+
|
|
770
|
+
host = env.vm_ip
|
|
771
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
772
|
+
server_port = env.server_port
|
|
773
|
+
|
|
774
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
775
|
+
|
|
776
|
+
with sync_playwright() as p:
|
|
777
|
+
try:
|
|
778
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
779
|
+
except Exception as e:
|
|
780
|
+
# If the connection fails, start a new browser instance
|
|
781
|
+
platform.machine()
|
|
782
|
+
if "arm" in platform.machine():
|
|
783
|
+
# start a new browser instance if the connection fails
|
|
784
|
+
payload = json.dumps({"command": [
|
|
785
|
+
"chromium",
|
|
786
|
+
"--remote-debugging-port=1337"
|
|
787
|
+
], "shell": False})
|
|
788
|
+
else:
|
|
789
|
+
payload = json.dumps({"command": [
|
|
790
|
+
"google-chrome",
|
|
791
|
+
"--remote-debugging-port=1337"
|
|
792
|
+
], "shell": False})
|
|
793
|
+
|
|
794
|
+
headers = {"Content-Type": "application/json"}
|
|
795
|
+
requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
796
|
+
time.sleep(5)
|
|
797
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
798
|
+
|
|
799
|
+
page = browser.new_page()
|
|
800
|
+
page.goto(_url)
|
|
801
|
+
page.pdf(path=_path)
|
|
802
|
+
browser.close()
|
|
803
|
+
|
|
804
|
+
return _path
|
|
805
|
+
|
|
806
|
+
|
|
807
|
+
# fixme: needs to be changed (maybe through post-processing) since it's not working
|
|
808
|
+
def get_chrome_saved_address(env, config: Dict[str, str]):
|
|
809
|
+
host = env.vm_ip
|
|
810
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
811
|
+
server_port = env.server_port
|
|
812
|
+
|
|
813
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
814
|
+
with sync_playwright() as p:
|
|
815
|
+
# connect to remote Chrome instance
|
|
816
|
+
try:
|
|
817
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
818
|
+
except Exception as e:
|
|
819
|
+
# If the connection fails, start a new browser instance
|
|
820
|
+
platform.machine()
|
|
821
|
+
if "arm" in platform.machine():
|
|
822
|
+
# start a new browser instance if the connection fails
|
|
823
|
+
payload = json.dumps({"command": [
|
|
824
|
+
"chromium",
|
|
825
|
+
"--remote-debugging-port=1337"
|
|
826
|
+
], "shell": False})
|
|
827
|
+
else:
|
|
828
|
+
payload = json.dumps({"command": [
|
|
829
|
+
"google-chrome",
|
|
830
|
+
"--remote-debugging-port=1337"
|
|
831
|
+
], "shell": False})
|
|
832
|
+
|
|
833
|
+
headers = {"Content-Type": "application/json"}
|
|
834
|
+
requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
835
|
+
time.sleep(5)
|
|
836
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
837
|
+
|
|
838
|
+
page = browser.new_page()
|
|
839
|
+
|
|
840
|
+
# Navigate to Chrome's settings page for autofill
|
|
841
|
+
page.goto("chrome://settings/addresses")
|
|
842
|
+
|
|
843
|
+
# Get the HTML content of the page
|
|
844
|
+
content = page.content()
|
|
845
|
+
|
|
846
|
+
browser.close()
|
|
847
|
+
|
|
848
|
+
return content
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
def get_shortcuts_on_desktop(env, config: Dict[str, str]):
|
|
852
|
+
# Find out the operating system
|
|
853
|
+
os_name = env.vm_platform
|
|
854
|
+
|
|
855
|
+
# Depending on the OS, define the shortcut file extension
|
|
856
|
+
if os_name == 'Windows':
|
|
857
|
+
# Windows shortcuts are typically .url or .lnk files
|
|
858
|
+
shortcut_extension = '.lnk'
|
|
859
|
+
elif os_name == 'Darwin':
|
|
860
|
+
# macOS's shortcuts are .webloc files
|
|
861
|
+
shortcut_extension = '.webloc'
|
|
862
|
+
elif os_name == 'Linux':
|
|
863
|
+
# Linux (Ubuntu, etc.) shortcuts are typically .desktop files
|
|
864
|
+
shortcut_extension = '.desktop'
|
|
865
|
+
else:
|
|
866
|
+
logger.error(f"Unsupported operating system: {os_name}")
|
|
867
|
+
return []
|
|
868
|
+
|
|
869
|
+
# Get the path to the desktop folder
|
|
870
|
+
desktop_path = env.controller.get_vm_desktop_path()
|
|
871
|
+
desktop_directory_tree = env.controller.get_vm_directory_tree(desktop_path)
|
|
872
|
+
|
|
873
|
+
shortcuts_paths = [file['name'] for file in desktop_directory_tree['children'] if
|
|
874
|
+
file['name'].endswith(shortcut_extension)]
|
|
875
|
+
|
|
876
|
+
short_cuts = {}
|
|
877
|
+
|
|
878
|
+
for shortcut_path in shortcuts_paths:
|
|
879
|
+
short_cuts[shortcut_path] = env.controller.get_file(env.controller.execute_python_command(
|
|
880
|
+
f"import os; print(os.path.join(os.path.expanduser('~'), 'Desktop', '{shortcut_path}'))")[
|
|
881
|
+
'output'].strip()).decode('utf-8')
|
|
882
|
+
|
|
883
|
+
return short_cuts
|
|
884
|
+
|
|
885
|
+
|
|
886
|
+
def get_number_of_search_results(env, config: Dict[str, str]):
|
|
887
|
+
# todo: move into the config file
|
|
888
|
+
url, result_selector = "https://google.com/search?q=query", '.search-result'
|
|
889
|
+
host = env.vm_ip
|
|
890
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
891
|
+
server_port = env.server_port
|
|
892
|
+
|
|
893
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
894
|
+
with sync_playwright() as p:
|
|
895
|
+
try:
|
|
896
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
897
|
+
except Exception as e:
|
|
898
|
+
# If the connection fails, start a new browser instance
|
|
899
|
+
platform.machine()
|
|
900
|
+
if "arm" in platform.machine():
|
|
901
|
+
# start a new browser instance if the connection fails
|
|
902
|
+
payload = json.dumps({"command": [
|
|
903
|
+
"chromium",
|
|
904
|
+
"--remote-debugging-port=1337"
|
|
905
|
+
], "shell": False})
|
|
906
|
+
else:
|
|
907
|
+
payload = json.dumps({"command": [
|
|
908
|
+
"google-chrome",
|
|
909
|
+
"--remote-debugging-port=1337"
|
|
910
|
+
], "shell": False})
|
|
911
|
+
|
|
912
|
+
headers = {"Content-Type": "application/json"}
|
|
913
|
+
requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
914
|
+
time.sleep(5)
|
|
915
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
916
|
+
page = browser.new_page()
|
|
917
|
+
page.goto(url)
|
|
918
|
+
search_results = page.query_selector_all(result_selector)
|
|
919
|
+
actual_count = len(search_results)
|
|
920
|
+
browser.close()
|
|
921
|
+
|
|
922
|
+
return actual_count
|
|
923
|
+
|
|
924
|
+
|
|
925
|
+
def get_googledrive_file(env, config: Dict[str, Any]) -> str:
|
|
926
|
+
""" Get the desired file from Google Drive based on config, return the downloaded local filepath.
|
|
927
|
+
@args: keys in config dict
|
|
928
|
+
settings_file(str): target filepath to the settings file for Google Drive authentication, default is 'evaluation_examples/settings/googledrive/settings.yml'
|
|
929
|
+
query/path[_list](Union[str, List[str]]): the query or path [list] to the file(s) on Google Drive. To retrieve the file, we provide multiple key options to specify the filepath on drive in config dict:
|
|
930
|
+
1) query: a list of queries to search the file, each query is a string that follows the format of Google Drive search query. The documentation is available here: (support more complex search but too complicated to use)
|
|
931
|
+
https://developers.google.com/drive/api/guides/search-files?hl=en
|
|
932
|
+
2) path: a str list poingting to file path on googledrive, e.g., 'folder/subfolder/filename.txt' ->
|
|
933
|
+
config contain one key-value pair "path": ['folder', 'subfolder', 'filename.txt']
|
|
934
|
+
3) query_list: query extends to list to download multiple files
|
|
935
|
+
4) path_list: path extends to list to download multiple files, e.g.,
|
|
936
|
+
"path_list": [['folder', 'subfolder', 'filename1.txt'], ['folder', 'subfolder', 'filename2.txt']]
|
|
937
|
+
@return:
|
|
938
|
+
dest(Union[List[str], str]): target file name or list. If *_list is used in input config, dest should also be a list of the same length. Return the downloaded local filepath.
|
|
939
|
+
"""
|
|
940
|
+
settings_file = config.get('settings_file', 'evaluation_examples/settings/googledrive/settings.yml')
|
|
941
|
+
auth = GoogleAuth(settings_file=settings_file)
|
|
942
|
+
drive = GoogleDrive(auth)
|
|
943
|
+
|
|
944
|
+
def get_single_file(_query, _path):
|
|
945
|
+
parent_id = 'root'
|
|
946
|
+
try:
|
|
947
|
+
for q in _query:
|
|
948
|
+
search = f'( {q} ) and "{parent_id}" in parents'
|
|
949
|
+
filelist: GoogleDriveFileList = drive.ListFile({'q': search}).GetList()
|
|
950
|
+
if len(filelist) == 0: # target file not found
|
|
951
|
+
return None
|
|
952
|
+
file: GoogleDriveFile = filelist[0] # HACK: if multiple candidates, just use the first one
|
|
953
|
+
parent_id = file['id']
|
|
954
|
+
|
|
955
|
+
file.GetContentFile(_path, mimetype=file['mimeType'])
|
|
956
|
+
except Exception as e:
|
|
957
|
+
logger.info('[ERROR]: Failed to download the file from Google Drive', e)
|
|
958
|
+
return None
|
|
959
|
+
return _path
|
|
960
|
+
|
|
961
|
+
if 'query' in config:
|
|
962
|
+
return get_single_file(config['query'], os.path.join(env.cache_dir, config['dest']))
|
|
963
|
+
elif 'path' in config:
|
|
964
|
+
query = [f"title = '{fp}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false" if idx < len(
|
|
965
|
+
config['path']) - 1
|
|
966
|
+
else f"title = '{fp}' and trashed = false" for idx, fp in enumerate(config['path'])]
|
|
967
|
+
return get_single_file(query, os.path.join(env.cache_dir, config['dest']))
|
|
968
|
+
elif 'query_list' in config:
|
|
969
|
+
_path_list = []
|
|
970
|
+
assert len(config['query_list']) == len(config['dest'])
|
|
971
|
+
for idx, query in enumerate(config['query_list']):
|
|
972
|
+
dest = config['dest'][idx]
|
|
973
|
+
_path_list.append(get_single_file(query, os.path.join(env.cache_dir, dest)))
|
|
974
|
+
return _path_list
|
|
975
|
+
else: # path_list in config
|
|
976
|
+
_path_list = []
|
|
977
|
+
assert len(config['path_list']) == len(config['dest'])
|
|
978
|
+
for idx, path in enumerate(config['path_list']):
|
|
979
|
+
query = [
|
|
980
|
+
f"title = '{fp}' and mimeType = 'application/vnd.google-apps.folder' and trashed = false" if jdx < len(
|
|
981
|
+
path) - 1
|
|
982
|
+
else f"title = '{fp}' and trashed = false" for jdx, fp in enumerate(path)]
|
|
983
|
+
dest = config['dest'][idx]
|
|
984
|
+
_path_list.append(get_single_file(query, os.path.join(env.cache_dir, dest)))
|
|
985
|
+
return _path_list
|
|
986
|
+
|
|
987
|
+
|
|
988
|
+
def get_enable_do_not_track(env, config: Dict[str, str]):
|
|
989
|
+
os_type = env.vm_platform
|
|
990
|
+
if os_type == 'Windows':
|
|
991
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
992
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
993
|
+
elif os_type == 'Darwin':
|
|
994
|
+
preference_file_path = env.controller.execute_python_command(
|
|
995
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
996
|
+
'output'].strip()
|
|
997
|
+
elif os_type == 'Linux':
|
|
998
|
+
if "arm" in platform.machine():
|
|
999
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1000
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1001
|
+
'output'].strip()
|
|
1002
|
+
else:
|
|
1003
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1004
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1005
|
+
'output'].strip()
|
|
1006
|
+
|
|
1007
|
+
else:
|
|
1008
|
+
raise Exception('Unsupported operating system')
|
|
1009
|
+
|
|
1010
|
+
try:
|
|
1011
|
+
content = env.controller.get_file(preference_file_path)
|
|
1012
|
+
data = json.loads(content)
|
|
1013
|
+
|
|
1014
|
+
if_enable_do_not_track = data.get('enable_do_not_track', {}) # bool
|
|
1015
|
+
return "true" if if_enable_do_not_track else "false"
|
|
1016
|
+
except Exception as e:
|
|
1017
|
+
logger.error(f"Error: {e}")
|
|
1018
|
+
return "false"
|
|
1019
|
+
|
|
1020
|
+
|
|
1021
|
+
def get_enable_enhanced_safety_browsing(env, config: Dict[str, str]):
|
|
1022
|
+
os_type = env.vm_platform
|
|
1023
|
+
if os_type == 'Windows':
|
|
1024
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
1025
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
1026
|
+
elif os_type == 'Darwin':
|
|
1027
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1028
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
1029
|
+
'output'].strip()
|
|
1030
|
+
elif os_type == 'Linux':
|
|
1031
|
+
if "arm" in platform.machine():
|
|
1032
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1033
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1034
|
+
'output'].strip()
|
|
1035
|
+
else:
|
|
1036
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1037
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1038
|
+
'output'].strip()
|
|
1039
|
+
|
|
1040
|
+
else:
|
|
1041
|
+
raise Exception('Unsupported operating system')
|
|
1042
|
+
|
|
1043
|
+
try:
|
|
1044
|
+
content = env.controller.get_file(preference_file_path)
|
|
1045
|
+
data = json.loads(content)
|
|
1046
|
+
|
|
1047
|
+
if_enable_do_not_track = data.get('safebrowsing', {}).get('enhanced', {}) # bool
|
|
1048
|
+
return "true" if if_enable_do_not_track else "false"
|
|
1049
|
+
except Exception as e:
|
|
1050
|
+
logger.error(f"Error: {e}")
|
|
1051
|
+
return "Google"
|
|
1052
|
+
|
|
1053
|
+
|
|
1054
|
+
def get_new_startup_page(env, config: Dict[str, str]):
|
|
1055
|
+
os_type = env.vm_platform
|
|
1056
|
+
if os_type == 'Windows':
|
|
1057
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
1058
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
1059
|
+
elif os_type == 'Darwin':
|
|
1060
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1061
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
1062
|
+
'output'].strip()
|
|
1063
|
+
elif os_type == 'Linux':
|
|
1064
|
+
if "arm" in platform.machine():
|
|
1065
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1066
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1067
|
+
'output'].strip()
|
|
1068
|
+
else:
|
|
1069
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1070
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1071
|
+
'output'].strip()
|
|
1072
|
+
|
|
1073
|
+
else:
|
|
1074
|
+
raise Exception('Unsupported operating system')
|
|
1075
|
+
|
|
1076
|
+
try:
|
|
1077
|
+
content = env.controller.get_file(preference_file_path)
|
|
1078
|
+
data = json.loads(content)
|
|
1079
|
+
|
|
1080
|
+
# if data has no key called 'session', it means the chrome is on a fresh-start mode, which is a true state;
|
|
1081
|
+
# otherwise, try to find the code number in 'restored_on_startup' in 'session'
|
|
1082
|
+
if "session" not in data.keys():
|
|
1083
|
+
return "true"
|
|
1084
|
+
else:
|
|
1085
|
+
if_enable_do_not_track = data.get('session', {}).get('restore_on_startup', {}) # int, need to be 5
|
|
1086
|
+
return "true" if if_enable_do_not_track == 5 else "false"
|
|
1087
|
+
except Exception as e:
|
|
1088
|
+
logger.error(f"Error: {e}")
|
|
1089
|
+
return "Google"
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
def get_find_unpacked_extension_path(env, config: Dict[str, str]):
|
|
1093
|
+
os_type = env.vm_platform
|
|
1094
|
+
if os_type == 'Windows':
|
|
1095
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
1096
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
1097
|
+
elif os_type == 'Darwin':
|
|
1098
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1099
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
1100
|
+
'output'].strip()
|
|
1101
|
+
elif os_type == 'Linux':
|
|
1102
|
+
if "arm" in platform.machine():
|
|
1103
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1104
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1105
|
+
'output'].strip()
|
|
1106
|
+
else:
|
|
1107
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1108
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1109
|
+
'output'].strip()
|
|
1110
|
+
|
|
1111
|
+
else:
|
|
1112
|
+
raise Exception('Unsupported operating system')
|
|
1113
|
+
|
|
1114
|
+
try:
|
|
1115
|
+
content = env.controller.get_file(preference_file_path)
|
|
1116
|
+
data = json.loads(content)
|
|
1117
|
+
# Preferences store all the path of installed extensions, return them all and let metrics try to find one matches the targeted extension path
|
|
1118
|
+
all_extensions_path = []
|
|
1119
|
+
all_extensions = data.get('extensions', {}).get('settings', {})
|
|
1120
|
+
for id in all_extensions.keys():
|
|
1121
|
+
path = all_extensions[id]["path"]
|
|
1122
|
+
all_extensions_path.append(path)
|
|
1123
|
+
return all_extensions_path
|
|
1124
|
+
except Exception as e:
|
|
1125
|
+
logger.error(f"Error: {e}")
|
|
1126
|
+
return "Google"
|
|
1127
|
+
|
|
1128
|
+
|
|
1129
|
+
def get_find_installed_extension_name(env, config: Dict[str, str]):
|
|
1130
|
+
os_type = env.vm_platform
|
|
1131
|
+
if os_type == 'Windows':
|
|
1132
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
1133
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
1134
|
+
elif os_type == 'Darwin':
|
|
1135
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1136
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
1137
|
+
'output'].strip()
|
|
1138
|
+
elif os_type == 'Linux':
|
|
1139
|
+
if "arm" in platform.machine():
|
|
1140
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1141
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1142
|
+
'output'].strip()
|
|
1143
|
+
else:
|
|
1144
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1145
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1146
|
+
'output'].strip()
|
|
1147
|
+
|
|
1148
|
+
else:
|
|
1149
|
+
raise Exception('Unsupported operating system')
|
|
1150
|
+
|
|
1151
|
+
try:
|
|
1152
|
+
content = env.controller.get_file(preference_file_path)
|
|
1153
|
+
data = json.loads(content)
|
|
1154
|
+
# Preferences store all the path of installed extensions, return them all and let metrics try to find one matches the targeted extension path
|
|
1155
|
+
all_extensions_name = []
|
|
1156
|
+
all_extensions = data.get('extensions', {}).get('settings', {})
|
|
1157
|
+
for id in all_extensions.keys():
|
|
1158
|
+
name = all_extensions[id]["manifest"]["name"]
|
|
1159
|
+
all_extensions_name.append(name)
|
|
1160
|
+
return all_extensions_name
|
|
1161
|
+
except Exception as e:
|
|
1162
|
+
logger.error(f"Error: {e}")
|
|
1163
|
+
return "Google"
|
|
1164
|
+
|
|
1165
|
+
|
|
1166
|
+
def get_data_delete_automacally(env, config: Dict[str, str]):
|
|
1167
|
+
"""
|
|
1168
|
+
This function is used to open th "auto-delete" mode of chromium
|
|
1169
|
+
"""
|
|
1170
|
+
os_type = env.vm_platform
|
|
1171
|
+
if os_type == 'Windows':
|
|
1172
|
+
preference_file_path = env.controller.execute_python_command("""import os; print(os.path.join(os.getenv('LOCALAPPDATA'),
|
|
1173
|
+
'Google\\Chrome\\User Data\\Default\\Preferences'))""")['output'].strip()
|
|
1174
|
+
elif os_type == 'Darwin':
|
|
1175
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1176
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'Library/Application Support/Google/Chrome/Default/Preferences'))")[
|
|
1177
|
+
'output'].strip()
|
|
1178
|
+
elif os_type == 'Linux':
|
|
1179
|
+
if "arm" in platform.machine():
|
|
1180
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1181
|
+
"import os; print(os.path.join(os.getenv('HOME'), 'snap/chromium/common/chromium/Default/Preferences'))")[
|
|
1182
|
+
'output'].strip()
|
|
1183
|
+
else:
|
|
1184
|
+
preference_file_path = env.controller.execute_python_command(
|
|
1185
|
+
"import os; print(os.path.join(os.getenv('HOME'), '.config/google-chrome/Default/Preferences'))")[
|
|
1186
|
+
'output'].strip()
|
|
1187
|
+
else:
|
|
1188
|
+
raise Exception('Unsupported operating system')
|
|
1189
|
+
|
|
1190
|
+
try:
|
|
1191
|
+
content = env.controller.get_file(preference_file_path)
|
|
1192
|
+
data = json.loads(content)
|
|
1193
|
+
data_delete_state = data["profile"].get("default_content_setting_values", None)
|
|
1194
|
+
return "true" if data_delete_state is not None else "false"
|
|
1195
|
+
except Exception as e:
|
|
1196
|
+
logger.error(f"Error: {e}")
|
|
1197
|
+
return "Google"
|
|
1198
|
+
|
|
1199
|
+
|
|
1200
|
+
def get_active_tab_html_parse(env, config: Dict[str, Any]):
|
|
1201
|
+
"""
|
|
1202
|
+
This function is used to get the specific element's text content from the active tab's html.
|
|
1203
|
+
config:
|
|
1204
|
+
Dict[str, str]{
|
|
1205
|
+
# Keys used in get_active_url_from_accessTree: "xpath", "selectors"
|
|
1206
|
+
'category':
|
|
1207
|
+
choose from ["class", "label", "xpath", "input"], used to indicate how to find the element
|
|
1208
|
+
'labelObject':
|
|
1209
|
+
only exists when category is "label",
|
|
1210
|
+
a dict like { "labelSelector": "the key you want to store the text content of this label's ee=lement"}
|
|
1211
|
+
'class_singleObject':
|
|
1212
|
+
only exists when category is "class", a dict with keys as the class name,
|
|
1213
|
+
like { "class name" : "the key you want to store the text content of this element" }
|
|
1214
|
+
'class_multiObject':
|
|
1215
|
+
only exists when category is "class", used for elements with same class name.
|
|
1216
|
+
Two layer of dict, like
|
|
1217
|
+
( {
|
|
1218
|
+
"class name": {
|
|
1219
|
+
"rank in this class" : "the key you want to store the text content of this element"
|
|
1220
|
+
...
|
|
1221
|
+
}
|
|
1222
|
+
} )
|
|
1223
|
+
'xpathObject':
|
|
1224
|
+
only exists when category is "xpath", a dict with keys as the xpath,
|
|
1225
|
+
like { "full xpath" : "the key you want to store the text content of this element" }
|
|
1226
|
+
'inputObject':
|
|
1227
|
+
only exists when category is "input",
|
|
1228
|
+
a dict with keys as the input element's xpath, like { "full xpath" : "the key you want to store the text content of this element" }
|
|
1229
|
+
}
|
|
1230
|
+
"""
|
|
1231
|
+
active_tab_url = get_active_url_from_accessTree(env, config)
|
|
1232
|
+
logger.info(f"[DEBUG] get_active_url_from_accessTree returned: {active_tab_url} (type: {type(active_tab_url)})")
|
|
1233
|
+
if not isinstance(active_tab_url, str):
|
|
1234
|
+
logger.error(f"[DEBUG] active_tab_url is not a string, got {type(active_tab_url)}: {active_tab_url}")
|
|
1235
|
+
return None
|
|
1236
|
+
host = env.vm_ip
|
|
1237
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
1238
|
+
server_port = env.server_port
|
|
1239
|
+
|
|
1240
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
1241
|
+
|
|
1242
|
+
# DEBUG: Add logging for configuration
|
|
1243
|
+
logger.info(f"[DEBUG] get_active_tab_html_parse called with config: {config}")
|
|
1244
|
+
|
|
1245
|
+
with sync_playwright() as p:
|
|
1246
|
+
# connect to remote Chrome instance
|
|
1247
|
+
try:
|
|
1248
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
1249
|
+
except Exception as e:
|
|
1250
|
+
# If the connection fails, start a new browser instance
|
|
1251
|
+
platform.machine()
|
|
1252
|
+
if "arm" in platform.machine():
|
|
1253
|
+
# start a new browser instance if the connection fails
|
|
1254
|
+
payload = json.dumps({"command": [
|
|
1255
|
+
"chromium",
|
|
1256
|
+
"--remote-debugging-port=1337"
|
|
1257
|
+
], "shell": False})
|
|
1258
|
+
else:
|
|
1259
|
+
payload = json.dumps({"command": [
|
|
1260
|
+
"google-chrome",
|
|
1261
|
+
"--remote-debugging-port=1337"
|
|
1262
|
+
], "shell": False})
|
|
1263
|
+
|
|
1264
|
+
headers = {"Content-Type": "application/json"}
|
|
1265
|
+
requests.post("http://" + host + ":" + str(server_port) + "/setup" + "/launch", headers=headers, data=payload)
|
|
1266
|
+
time.sleep(5)
|
|
1267
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
1268
|
+
target_page = None
|
|
1269
|
+
for context in browser.contexts:
|
|
1270
|
+
for page in context.pages:
|
|
1271
|
+
page.wait_for_load_state("networkidle")
|
|
1272
|
+
# the accTree and playwright can get encoding(percent-encoding) characters, we need to convert them to normal characters
|
|
1273
|
+
# Normalize URLs by removing trailing slashes and decoding percent-encoding
|
|
1274
|
+
def normalize_url(url):
|
|
1275
|
+
return unquote(url).rstrip('/')
|
|
1276
|
+
|
|
1277
|
+
if normalize_url(page.url) == normalize_url(active_tab_url):
|
|
1278
|
+
target_page = page
|
|
1279
|
+
print("\33[32mtartget page url: ", target_page.url, "\33[0m")
|
|
1280
|
+
print("\33[32mtartget page title: ", target_page.title(), "\33[0m")
|
|
1281
|
+
break
|
|
1282
|
+
if target_page is None:
|
|
1283
|
+
logger.error("[DEBUG] Could not find target tab matching URL. Available tabs:")
|
|
1284
|
+
for context in browser.contexts:
|
|
1285
|
+
for page in context.pages:
|
|
1286
|
+
logger.error(f"[DEBUG] - Tab URL: {page.url}")
|
|
1287
|
+
logger.error(f"[DEBUG] Expected URL: {active_tab_url}")
|
|
1288
|
+
return {}
|
|
1289
|
+
|
|
1290
|
+
return_json = {}
|
|
1291
|
+
|
|
1292
|
+
def safely_get_text_content(selector):
|
|
1293
|
+
elements = target_page.query_selector_all(selector)
|
|
1294
|
+
return [element.text_content().strip() for element in elements if element]
|
|
1295
|
+
|
|
1296
|
+
def safely_get_direct_text_nodes_playwright(selector):
|
|
1297
|
+
"""
|
|
1298
|
+
Extract all direct text node contents under the specified selector element (excluding text inside child div, span, etc.).
|
|
1299
|
+
Returns a list of lists, each sublist contains the direct text nodes of one element.
|
|
1300
|
+
Suitable for structures like: <div>SEA<div class="aura-separator"></div>NYC</div>
|
|
1301
|
+
"""
|
|
1302
|
+
elements = target_page.query_selector_all(selector)
|
|
1303
|
+
results = []
|
|
1304
|
+
for element in elements:
|
|
1305
|
+
texts = element.evaluate('''
|
|
1306
|
+
(node) => Array.from(node.childNodes)
|
|
1307
|
+
.filter(n => n.nodeType === Node.TEXT_NODE)
|
|
1308
|
+
.map(n => n.textContent.trim())
|
|
1309
|
+
.filter(Boolean)
|
|
1310
|
+
''')
|
|
1311
|
+
results.append(texts)
|
|
1312
|
+
# Safety check: return empty list if no elements found
|
|
1313
|
+
return results[0] if results else []
|
|
1314
|
+
|
|
1315
|
+
def safely_get_direct_li_playwright(selector):
|
|
1316
|
+
elements = target_page.query_selector_all(selector + " li.catAllProducts")
|
|
1317
|
+
return [element.query_selector('span').inner_text().strip() for element in elements if element.query_selector('span')]
|
|
1318
|
+
|
|
1319
|
+
def safely_get_only_child_text_content(selector):
|
|
1320
|
+
elements = target_page.query_selector_all(selector)
|
|
1321
|
+
return [element.query_selector('h3').text_content().strip() for element in elements if element.query_selector('h3')]
|
|
1322
|
+
|
|
1323
|
+
if config["category"] == "class":
|
|
1324
|
+
class_multiObject = config.get("class_multiObject", {})
|
|
1325
|
+
for class_name, object_dict in class_multiObject.items():
|
|
1326
|
+
elements_texts = safely_get_text_content("." + class_name)
|
|
1327
|
+
for order_key, key in object_dict.items():
|
|
1328
|
+
index = int(order_key)
|
|
1329
|
+
if len(elements_texts) > index:
|
|
1330
|
+
return_json[key] = elements_texts[index]
|
|
1331
|
+
else:
|
|
1332
|
+
logger.warning(f"[DEBUG] Element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
|
1333
|
+
return_json[key] = "" # Return empty string instead of None
|
|
1334
|
+
|
|
1335
|
+
class_multiObject_child = config.get("class_multiObject_child", {})
|
|
1336
|
+
for class_name, object_dict in class_multiObject_child.items():
|
|
1337
|
+
elements_texts = safely_get_direct_text_nodes_playwright("." + class_name)
|
|
1338
|
+
for order_key, key in object_dict.items():
|
|
1339
|
+
index = int(order_key)
|
|
1340
|
+
if len(elements_texts) > index:
|
|
1341
|
+
return_json[key] = elements_texts[index]
|
|
1342
|
+
else:
|
|
1343
|
+
logger.warning(f"[DEBUG] Child element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
|
1344
|
+
return_json[key] = "" # Return empty string instead of None
|
|
1345
|
+
|
|
1346
|
+
class_multiObject_only_child = config.get("class_multiObject_only_child", {})
|
|
1347
|
+
for class_name, object_dict in class_multiObject_only_child.items():
|
|
1348
|
+
elements_texts = safely_get_only_child_text_content("." + class_name)
|
|
1349
|
+
for order_key, key in object_dict.items():
|
|
1350
|
+
index = int(order_key)
|
|
1351
|
+
if len(elements_texts) > index:
|
|
1352
|
+
return_json[key] = elements_texts[index]
|
|
1353
|
+
else:
|
|
1354
|
+
logger.warning(f"[DEBUG] Only child element at index {index} not found for class '{class_name}'. Found {len(elements_texts)} elements.")
|
|
1355
|
+
return_json[key] = "" # Return empty string instead of None
|
|
1356
|
+
|
|
1357
|
+
class_multiObject_search_exist = config.get("class_multiObject_search_exist", {})
|
|
1358
|
+
for class_name, object_list in class_multiObject_search_exist.items():
|
|
1359
|
+
elements_texts = safely_get_text_content("." + class_name)
|
|
1360
|
+
logger.info(f"[DEBUG] Found elements with class '{class_name}': {elements_texts}")
|
|
1361
|
+
logger.info(f"[DEBUG] Expected elements: {[obj for obj in object_list if obj != 'is_other_exist']}")
|
|
1362
|
+
|
|
1363
|
+
for each_object in object_list:
|
|
1364
|
+
if each_object == "is_other_exist":
|
|
1365
|
+
continue
|
|
1366
|
+
if each_object in elements_texts:
|
|
1367
|
+
return_json[each_object] = True
|
|
1368
|
+
else:
|
|
1369
|
+
return_json[each_object] = False
|
|
1370
|
+
if "is_other_exist" in object_list:
|
|
1371
|
+
extra_elements = []
|
|
1372
|
+
for each_element in elements_texts:
|
|
1373
|
+
if each_element not in object_list:
|
|
1374
|
+
extra_elements.append(each_element)
|
|
1375
|
+
return_json["is_other_exist"] = True
|
|
1376
|
+
if extra_elements:
|
|
1377
|
+
logger.warning(f"[DEBUG] Found unexpected elements not in expected list: {extra_elements}")
|
|
1378
|
+
else:
|
|
1379
|
+
logger.info(f"[DEBUG] No unexpected elements found")
|
|
1380
|
+
if "is_other_exist" not in return_json.keys():
|
|
1381
|
+
return_json["is_other_exist"] = False
|
|
1382
|
+
|
|
1383
|
+
|
|
1384
|
+
class_singleObject = config.get("class_singleObject", {})
|
|
1385
|
+
for class_name, key in class_singleObject.items():
|
|
1386
|
+
element_text = safely_get_text_content("." + class_name)
|
|
1387
|
+
logger.info(f"[DEBUG] Class '{class_name}' found {len(element_text)} elements")
|
|
1388
|
+
if element_text:
|
|
1389
|
+
return_json[key] = element_text[0]
|
|
1390
|
+
logger.info(f"[DEBUG] Class extraction for key '{key}': '{element_text[0]}'")
|
|
1391
|
+
else:
|
|
1392
|
+
logger.warning(f"[DEBUG] No elements found for class: {class_name}")
|
|
1393
|
+
return_json[key] = "" # Return empty string instead of None
|
|
1394
|
+
|
|
1395
|
+
elif config['category'] == "label":
|
|
1396
|
+
# Assuming get_by_label is a custom function or part of the framework being used
|
|
1397
|
+
labelObject = config.get("labelObject", {})
|
|
1398
|
+
for labelSelector, key in labelObject.items():
|
|
1399
|
+
text = target_page.locator(f"text={labelSelector}").first.text_content().strip()
|
|
1400
|
+
if text:
|
|
1401
|
+
return_json[key] = text
|
|
1402
|
+
|
|
1403
|
+
elif config["category"] == "xpath":
|
|
1404
|
+
xpathObject = config.get("xpathObject", {})
|
|
1405
|
+
logger.info(f"[DEBUG] Processing xpath category with xpathObject: {xpathObject}")
|
|
1406
|
+
|
|
1407
|
+
for xpath, key in xpathObject.items():
|
|
1408
|
+
logger.info(f"[DEBUG] Processing xpath: {xpath} -> key: {key}")
|
|
1409
|
+
elements = target_page.locator(f"xpath={xpath}")
|
|
1410
|
+
element_count = elements.count()
|
|
1411
|
+
logger.info(f"[DEBUG] Found {element_count} elements for xpath: {xpath}")
|
|
1412
|
+
|
|
1413
|
+
if element_count > 0:
|
|
1414
|
+
try:
|
|
1415
|
+
text_content = elements.first.text_content()
|
|
1416
|
+
if text_content is not None:
|
|
1417
|
+
text_content = text_content.strip()
|
|
1418
|
+
logger.info(f"[DEBUG] Raw text content for key '{key}': '{text_content}' (type: {type(text_content)})")
|
|
1419
|
+
|
|
1420
|
+
# 处理空文本内容的情况
|
|
1421
|
+
if text_content is None or text_content == "":
|
|
1422
|
+
logger.warning(f"[DEBUG] Element found but text content is empty for key '{key}' xpath: {xpath}")
|
|
1423
|
+
# 尝试获取更多信息
|
|
1424
|
+
element_html = elements.first.inner_html()
|
|
1425
|
+
element_text = elements.first.inner_text()
|
|
1426
|
+
logger.info(f"[DEBUG] Element innerHTML: '{element_html[:100]}...' innerText: '{element_text}'")
|
|
1427
|
+
|
|
1428
|
+
return_json[key] = text_content if text_content else ""
|
|
1429
|
+
logger.info(f"[DEBUG] Final value for key '{key}': '{return_json[key]}'")
|
|
1430
|
+
except Exception as e:
|
|
1431
|
+
logger.error(f"[DEBUG] Error extracting text from element for key '{key}': {e}")
|
|
1432
|
+
return_json[key] = ""
|
|
1433
|
+
else:
|
|
1434
|
+
logger.warning(f"[DEBUG] No elements found for xpath: {xpath}")
|
|
1435
|
+
# 尝试一些备用的xpath查找方法
|
|
1436
|
+
try:
|
|
1437
|
+
# 尝试不使用xpath前缀
|
|
1438
|
+
fallback_elements = target_page.locator(xpath)
|
|
1439
|
+
fallback_count = fallback_elements.count()
|
|
1440
|
+
logger.info(f"[DEBUG] Fallback search (without xpath prefix) found {fallback_count} elements")
|
|
1441
|
+
if fallback_count > 0:
|
|
1442
|
+
text_content = fallback_elements.first.text_content()
|
|
1443
|
+
if text_content:
|
|
1444
|
+
text_content = text_content.strip()
|
|
1445
|
+
return_json[key] = text_content if text_content else ""
|
|
1446
|
+
logger.info(f"[DEBUG] Fallback extraction successful for key '{key}': '{return_json[key]}'")
|
|
1447
|
+
else:
|
|
1448
|
+
return_json[key] = ""
|
|
1449
|
+
except Exception as e:
|
|
1450
|
+
logger.info(f"[DEBUG] Fallback xpath search also failed: {e}")
|
|
1451
|
+
return_json[key] = ""
|
|
1452
|
+
|
|
1453
|
+
elif config["category"] == "input":
|
|
1454
|
+
inputObjects = config.get("inputObject", {})
|
|
1455
|
+
logger.info(f"[DEBUG] Processing input category with inputObjects: {inputObjects}")
|
|
1456
|
+
for xpath, key in inputObjects.items():
|
|
1457
|
+
logger.info(f"[DEBUG] Processing input xpath: {xpath} -> key: {key}")
|
|
1458
|
+
inputs = target_page.locator(f"xpath={xpath}")
|
|
1459
|
+
input_count = inputs.count()
|
|
1460
|
+
logger.info(f"[DEBUG] Found {input_count} input elements for xpath: {xpath}")
|
|
1461
|
+
if input_count > 0:
|
|
1462
|
+
try:
|
|
1463
|
+
input_value = inputs.first.input_value()
|
|
1464
|
+
if input_value:
|
|
1465
|
+
input_value = input_value.strip()
|
|
1466
|
+
return_json[key] = input_value if input_value else ""
|
|
1467
|
+
logger.info(f"[DEBUG] Input value for key '{key}': '{return_json[key]}'")
|
|
1468
|
+
except Exception as e:
|
|
1469
|
+
logger.error(f"[DEBUG] Error getting input value for key '{key}': {e}")
|
|
1470
|
+
return_json[key] = ""
|
|
1471
|
+
else:
|
|
1472
|
+
logger.warning(f"[DEBUG] No input elements found for xpath: {xpath}")
|
|
1473
|
+
return_json[key] = ""
|
|
1474
|
+
|
|
1475
|
+
elif config["category"] == "class&url":
|
|
1476
|
+
class_multiObject = config.get("class_multiObject", {})
|
|
1477
|
+
for class_name, object_list in class_multiObject.items():
|
|
1478
|
+
elements_texts = safely_get_text_content("." + class_name)
|
|
1479
|
+
for each_key in object_list:
|
|
1480
|
+
if any(each_key.lower() == text.lower() for text in elements_texts):
|
|
1481
|
+
return_json[each_key.lower()] = True
|
|
1482
|
+
|
|
1483
|
+
for each_key in elements_texts:
|
|
1484
|
+
# each_key.lower() not in object_list.lower():
|
|
1485
|
+
if all(each_key.lower() not in item.lower() for item in object_list):
|
|
1486
|
+
return_json["is_other_exist"] = True
|
|
1487
|
+
break
|
|
1488
|
+
if "is_other_exist" not in return_json.keys():
|
|
1489
|
+
return_json["is_other_exist"] = False
|
|
1490
|
+
|
|
1491
|
+
class_multiObject_li = config.get("class_multiObject_li", {})
|
|
1492
|
+
for class_name, object_list in class_multiObject_li.items():
|
|
1493
|
+
elements_texts = safely_get_direct_li_playwright("." + class_name)
|
|
1494
|
+
for each_key in object_list:
|
|
1495
|
+
if any(each_key.lower() == text.lower() for text in elements_texts):
|
|
1496
|
+
return_json[each_key.lower()] = True
|
|
1497
|
+
|
|
1498
|
+
for each_key in elements_texts:
|
|
1499
|
+
# each_key.lower() not in object_list.lower():
|
|
1500
|
+
if all(each_key.lower() not in item.lower() for item in object_list):
|
|
1501
|
+
return_json["is_other_exist"] = True
|
|
1502
|
+
break
|
|
1503
|
+
if "is_other_exist" not in return_json.keys():
|
|
1504
|
+
return_json["is_other_exist"] = False
|
|
1505
|
+
|
|
1506
|
+
url_include_expected = config.get("url_include_expected", [])
|
|
1507
|
+
for key in url_include_expected:
|
|
1508
|
+
if key.lower() in target_page.url.lower():
|
|
1509
|
+
if key.lower() not in return_json.keys():
|
|
1510
|
+
return_json[key.lower()] = True
|
|
1511
|
+
else:
|
|
1512
|
+
if key.lower() not in return_json.keys():
|
|
1513
|
+
return_json[key.lower()] = False
|
|
1514
|
+
|
|
1515
|
+
url_include_expected_multichoice = config.get("url_include_expected_multichoice", {})
|
|
1516
|
+
for key, value in url_include_expected_multichoice.items():
|
|
1517
|
+
if key.lower() in target_page.url.lower():
|
|
1518
|
+
if value.lower() not in return_json.keys():
|
|
1519
|
+
return_json[value.lower()] = True
|
|
1520
|
+
else:
|
|
1521
|
+
if value.lower() not in return_json.keys():
|
|
1522
|
+
return_json[value.lower()] = False
|
|
1523
|
+
|
|
1524
|
+
browser.close()
|
|
1525
|
+
|
|
1526
|
+
# DEBUG: Add logging for final result and check for None values
|
|
1527
|
+
logger.info(f"[DEBUG] get_active_tab_html_parse final result: {return_json}")
|
|
1528
|
+
|
|
1529
|
+
# 检查是否有None值
|
|
1530
|
+
none_keys = [key for key, value in return_json.items() if value is None]
|
|
1531
|
+
if none_keys:
|
|
1532
|
+
logger.warning(f"[DEBUG] Found None values for keys: {none_keys}")
|
|
1533
|
+
|
|
1534
|
+
# 检查是否期望的键都存在
|
|
1535
|
+
if config["category"] == "xpath":
|
|
1536
|
+
expected_keys = set(config.get("xpathObject", {}).values())
|
|
1537
|
+
actual_keys = set(return_json.keys())
|
|
1538
|
+
missing_keys = expected_keys - actual_keys
|
|
1539
|
+
if missing_keys:
|
|
1540
|
+
logger.warning(f"[DEBUG] Missing expected keys: {missing_keys}")
|
|
1541
|
+
|
|
1542
|
+
return return_json
|
|
1543
|
+
|
|
1544
|
+
|
|
1545
|
+
def get_gotoRecreationPage_and_get_html_content(env, config: Dict[str, Any]):
|
|
1546
|
+
"""
|
|
1547
|
+
especially used for www.recreation.gov examples
|
|
1548
|
+
"""
|
|
1549
|
+
host = env.vm_ip
|
|
1550
|
+
port = env.chromium_port # fixme: this port is hard-coded, need to be changed from config file
|
|
1551
|
+
server_port = env.server_port
|
|
1552
|
+
|
|
1553
|
+
remote_debugging_url = f"http://{host}:{port}"
|
|
1554
|
+
with sync_playwright() as p:
|
|
1555
|
+
try:
|
|
1556
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
1557
|
+
except Exception as e:
|
|
1558
|
+
# If the connection fails, start a new browser instance
|
|
1559
|
+
platform.machine()
|
|
1560
|
+
if "arm" in platform.machine():
|
|
1561
|
+
# start a new browser instance if the connection fails
|
|
1562
|
+
payload = json.dumps({"command": [
|
|
1563
|
+
"chromium",
|
|
1564
|
+
"--remote-debugging-port=1337"
|
|
1565
|
+
], "shell": False})
|
|
1566
|
+
else:
|
|
1567
|
+
payload = json.dumps({"command": [
|
|
1568
|
+
"google-chrome",
|
|
1569
|
+
"--remote-debugging-port=1337"
|
|
1570
|
+
], "shell": False})
|
|
1571
|
+
|
|
1572
|
+
headers = {"Content-Type": "application/json"}
|
|
1573
|
+
requests.post("http://" + host + ":" + server_port + "/setup" + "/launch", headers=headers, data=payload)
|
|
1574
|
+
time.sleep(5)
|
|
1575
|
+
browser = p.chromium.connect_over_cdp(remote_debugging_url)
|
|
1576
|
+
page = browser.new_page()
|
|
1577
|
+
page.goto("https://www.recreation.gov/")
|
|
1578
|
+
page.fill("input#hero-search-input", "Diamond")
|
|
1579
|
+
page.click("button.nav-search-button")
|
|
1580
|
+
print("after first click")
|
|
1581
|
+
time.sleep(10)
|
|
1582
|
+
# Assuming .search-result-highlight--success leads to a new page or requires page load
|
|
1583
|
+
with page.expect_popup() as popup_info:
|
|
1584
|
+
page.click(".search-result-highlight--success")
|
|
1585
|
+
time.sleep(30)
|
|
1586
|
+
print("after second click")
|
|
1587
|
+
newpage = popup_info.value
|
|
1588
|
+
newpage.wait_for_load_state()
|
|
1589
|
+
print("go to newpage: ")
|
|
1590
|
+
print(newpage.title())
|
|
1591
|
+
time.sleep(2)
|
|
1592
|
+
|
|
1593
|
+
# Try to click the button with better error handling and longer timeout
|
|
1594
|
+
try:
|
|
1595
|
+
# Wait for the button to be available with a longer timeout
|
|
1596
|
+
newpage.wait_for_selector("button.next-available", timeout=60000)
|
|
1597
|
+
newpage.click("button.next-available", timeout=60000)
|
|
1598
|
+
print("after third click")
|
|
1599
|
+
except Exception as e:
|
|
1600
|
+
logger.error(f"Failed to click 'next-available' button: {e}")
|
|
1601
|
+
# Try alternative selectors if the main one fails
|
|
1602
|
+
try:
|
|
1603
|
+
newpage.wait_for_selector("button[class*='next']", timeout=30000)
|
|
1604
|
+
newpage.click("button[class*='next']", timeout=30000)
|
|
1605
|
+
print("after third click (alternative selector)")
|
|
1606
|
+
except Exception as e2:
|
|
1607
|
+
logger.error(f"Alternative selector also failed: {e2}")
|
|
1608
|
+
# Continue execution even if button click fails
|
|
1609
|
+
print("Continuing without clicking next-available button")
|
|
1610
|
+
|
|
1611
|
+
return_json = {}
|
|
1612
|
+
return_json["expected"] = {}
|
|
1613
|
+
# find the text of elements in html with specific class name
|
|
1614
|
+
if config["selector"] == "class":
|
|
1615
|
+
if "order" in config.keys():
|
|
1616
|
+
className = config["class"]
|
|
1617
|
+
try:
|
|
1618
|
+
elements = newpage.query_selector_all("." + className)
|
|
1619
|
+
order_index = int(config["order"])
|
|
1620
|
+
if len(elements) > order_index:
|
|
1621
|
+
return_json["expected"][className] = elements[order_index].text_content().strip()
|
|
1622
|
+
else:
|
|
1623
|
+
logger.warning(f"Element with class '{className}' at index {order_index} not found. Found {len(elements)} elements.")
|
|
1624
|
+
# For expected values, if we can't find the element, the evaluation cannot proceed
|
|
1625
|
+
# Return a structure that indicates failure to get expected value
|
|
1626
|
+
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
|
1627
|
+
except Exception as e:
|
|
1628
|
+
logger.error(f"Error accessing element with class '{className}': {e}")
|
|
1629
|
+
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
|
1630
|
+
else:
|
|
1631
|
+
className = config["class"]
|
|
1632
|
+
try:
|
|
1633
|
+
element = newpage.query_selector("." + className)
|
|
1634
|
+
if element:
|
|
1635
|
+
return_json["expected"][className] = element.text_content().strip()
|
|
1636
|
+
else:
|
|
1637
|
+
logger.warning(f"Element with class '{className}' not found.")
|
|
1638
|
+
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
|
1639
|
+
except Exception as e:
|
|
1640
|
+
logger.error(f"Error accessing element with class '{className}': {e}")
|
|
1641
|
+
return_json["expected"][className] = "__EVALUATION_FAILED__"
|
|
1642
|
+
browser.close()
|
|
1643
|
+
return return_json
|
|
1644
|
+
|
|
1645
|
+
|
|
1646
|
+
def get_active_tab_url_parse(env, config: Dict[str, Any]):
|
|
1647
|
+
"""
|
|
1648
|
+
This function is used to parse the url according to config["parse_keys"].
|
|
1649
|
+
config:
|
|
1650
|
+
'parse_keys': must exist,
|
|
1651
|
+
a list of keys to extract from the query parameters of the url.
|
|
1652
|
+
'replace': optional,
|
|
1653
|
+
a dict, used to replace the original key with the new key.
|
|
1654
|
+
( { "original key": "new key" } )
|
|
1655
|
+
"""
|
|
1656
|
+
active_tab_url = get_active_url_from_accessTree(env, config)
|
|
1657
|
+
if active_tab_url is None:
|
|
1658
|
+
return None
|
|
1659
|
+
|
|
1660
|
+
# connect to remote Chrome instance
|
|
1661
|
+
# parse in a hard-coded way to find the specific info about task
|
|
1662
|
+
parsed_url = urlparse(active_tab_url)
|
|
1663
|
+
# Extract the query parameters
|
|
1664
|
+
query_params = parse_qs(parsed_url.query)
|
|
1665
|
+
# Define the keys of interest
|
|
1666
|
+
keys_of_interest = [key for key in config["parse_keys"]]
|
|
1667
|
+
# Extract the parameters of interest
|
|
1668
|
+
extracted_params = {key: query_params.get(key, [''])[0] for key in keys_of_interest}
|
|
1669
|
+
if "replace" in config:
|
|
1670
|
+
for key in config["replace"].keys():
|
|
1671
|
+
# change original key to new key, keep value unchange
|
|
1672
|
+
value = extracted_params.pop(key)
|
|
1673
|
+
extracted_params[config["replace"][key]] = value
|
|
1674
|
+
if config.get("split_list", False):
|
|
1675
|
+
extracted_params = {key: extracted_params[key].split(',') for key in extracted_params.keys()}
|
|
1676
|
+
return extracted_params
|
|
1677
|
+
|
|
1678
|
+
|
|
1679
|
+
def get_url_dashPart(env, config: Dict[str, str]):
|
|
1680
|
+
"""
|
|
1681
|
+
This function is used to extract one of the dash-separated part of the URL.
|
|
1682
|
+
config
|
|
1683
|
+
'partIndex': must exist,
|
|
1684
|
+
the index of the dash-separated part to extract, starting from 0.
|
|
1685
|
+
'needDeleteId': optional,
|
|
1686
|
+
a boolean, used to indicate whether to delete the "id" part ( an example: "/part-you-want?id=xxx" )
|
|
1687
|
+
'returnType': must exist,
|
|
1688
|
+
a string, used to indicate the return type, "string" or "json".
|
|
1689
|
+
"""
|
|
1690
|
+
active_tab_url = get_active_url_from_accessTree(env, config)
|
|
1691
|
+
if active_tab_url is None:
|
|
1692
|
+
return None
|
|
1693
|
+
|
|
1694
|
+
# extract the last dash-separated part of the URL, and delete all the characters after "id"
|
|
1695
|
+
dash_part = active_tab_url.split("/")[config["partIndex"]]
|
|
1696
|
+
if config["needDeleteId"]:
|
|
1697
|
+
dash_part = dash_part.split("?")[0]
|
|
1698
|
+
# print("active_tab_title: {}".format(active_tab_info.get('title', 'None')))
|
|
1699
|
+
# print("active_tab_url: {}".format(active_tab_info.get('url', 'None')))
|
|
1700
|
+
# print("active_tab_content: {}".format(active_tab_info.get('content', 'None')))
|
|
1701
|
+
if config["returnType"] == "string":
|
|
1702
|
+
return dash_part
|
|
1703
|
+
elif config["returnType"] == "json":
|
|
1704
|
+
return {config["key"]: dash_part}
|
|
1705
|
+
|
|
1706
|
+
|
|
1707
|
+
def get_macys_product_url_parse(env, config: Dict[str, str]):
|
|
1708
|
+
"""
|
|
1709
|
+
Parse Macy's product url path, extract:
|
|
1710
|
+
- mens_clothing: true if 'mens-clothing' in path, else None
|
|
1711
|
+
- shirts: true if any key 'Top_style' or 'Product_department' value is 'shirts', else None
|
|
1712
|
+
- Men_regular_size_t, Price_discount_range (as list), Sleeve_length: as before, None if not found
|
|
1713
|
+
All fields are None if not found for robustness.
|
|
1714
|
+
"""
|
|
1715
|
+
from urllib.parse import urlparse, unquote
|
|
1716
|
+
result = {}
|
|
1717
|
+
# 1. Parse URL
|
|
1718
|
+
active_tab_url = get_active_url_from_accessTree(env, config)
|
|
1719
|
+
if active_tab_url is None:
|
|
1720
|
+
return None
|
|
1721
|
+
parsed = urlparse(active_tab_url)
|
|
1722
|
+
path = unquote(parsed.path)
|
|
1723
|
+
result = {}
|
|
1724
|
+
# mens_clothing
|
|
1725
|
+
result['mens_clothing'] = True if 'mens-clothing' in path else None
|
|
1726
|
+
# key-value
|
|
1727
|
+
path_parts = path.strip('/').split('/')
|
|
1728
|
+
key_value_json = {}
|
|
1729
|
+
shirts_flag = False
|
|
1730
|
+
short_sleeve_flag = False # Initialize short_sleeve_flag to avoid UnboundLocalError
|
|
1731
|
+
if "shirts" in path:
|
|
1732
|
+
shirts_flag = True
|
|
1733
|
+
if "short-sleeve" in path:
|
|
1734
|
+
short_sleeve_flag = True
|
|
1735
|
+
for i in range(len(path_parts)-1):
|
|
1736
|
+
if ',' in path_parts[i] and ',' in path_parts[i+1]:
|
|
1737
|
+
keys = [k.strip() for k in path_parts[i].split(',')]
|
|
1738
|
+
values = [v.strip() for v in path_parts[i+1].split(',')]
|
|
1739
|
+
for k, v in zip(keys, values):
|
|
1740
|
+
if k == "Price_discount_range":
|
|
1741
|
+
key_value_json[k] = [item.strip() for item in v.split('|')] if v else None
|
|
1742
|
+
else:
|
|
1743
|
+
key_value_json[k] = v if v else None
|
|
1744
|
+
if k == 'Product_department' and (v == 'shirts' or v == 'Shirts' or v == 'Shirt'):
|
|
1745
|
+
shirts_flag = True
|
|
1746
|
+
if k == 'Sleeve_length' and (v == 'short-sleeve' or v == 'Short Sleeve'):
|
|
1747
|
+
short_sleeve_flag = True
|
|
1748
|
+
break
|
|
1749
|
+
for field in ['Men_regular_size_t', 'Price_discount_range']:
|
|
1750
|
+
if field not in key_value_json:
|
|
1751
|
+
key_value_json[field] = None
|
|
1752
|
+
result['shirts'] = shirts_flag if shirts_flag else None
|
|
1753
|
+
result['short_sleeve'] = short_sleeve_flag if short_sleeve_flag else None
|
|
1754
|
+
# parse_keys
|
|
1755
|
+
for key in config["parse_keys"]:
|
|
1756
|
+
if key in key_value_json:
|
|
1757
|
+
if key == "Price_discount_range":
|
|
1758
|
+
# Check if key_value_json[key] is not None before using 'in' operator
|
|
1759
|
+
if key_value_json[key] is not None and '50_PERCENT_ off & more' in key_value_json[key] and not '30_PERCENT_ off & more' in key_value_json[key] and not '20_PERCENT_ off & more' in key_value_json[key]:
|
|
1760
|
+
result[key] = '50_PERCENT_ off & more'
|
|
1761
|
+
else:
|
|
1762
|
+
result[key] = 'not_50_PERCENT_ off & more'
|
|
1763
|
+
else:
|
|
1764
|
+
result[key] = key_value_json[key]
|
|
1765
|
+
return result
|
|
1766
|
+
|
|
1767
|
+
|
|
1768
|
+
# Alias for backward compatibility - the old function name was too generic
|
|
1769
|
+
def get_url_path_parse(env, config: Dict[str, str]):
|
|
1770
|
+
"""
|
|
1771
|
+
Alias for get_macys_product_url_parse to maintain backward compatibility.
|
|
1772
|
+
This function name is kept for existing configurations that still use "url_path_parse" type.
|
|
1773
|
+
"""
|
|
1774
|
+
return get_macys_product_url_parse(env, config)
|