vibesurf 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibesurf might be problematic. Click here for more details.
- vibe_surf/__init__.py +12 -0
- vibe_surf/_version.py +34 -0
- vibe_surf/agents/__init__.py +0 -0
- vibe_surf/agents/browser_use_agent.py +1106 -0
- vibe_surf/agents/prompts/__init__.py +1 -0
- vibe_surf/agents/prompts/vibe_surf_prompt.py +176 -0
- vibe_surf/agents/report_writer_agent.py +360 -0
- vibe_surf/agents/vibe_surf_agent.py +1632 -0
- vibe_surf/backend/__init__.py +0 -0
- vibe_surf/backend/api/__init__.py +3 -0
- vibe_surf/backend/api/activity.py +243 -0
- vibe_surf/backend/api/config.py +740 -0
- vibe_surf/backend/api/files.py +322 -0
- vibe_surf/backend/api/models.py +257 -0
- vibe_surf/backend/api/task.py +300 -0
- vibe_surf/backend/database/__init__.py +13 -0
- vibe_surf/backend/database/manager.py +129 -0
- vibe_surf/backend/database/models.py +164 -0
- vibe_surf/backend/database/queries.py +922 -0
- vibe_surf/backend/database/schemas.py +100 -0
- vibe_surf/backend/llm_config.py +182 -0
- vibe_surf/backend/main.py +137 -0
- vibe_surf/backend/migrations/__init__.py +16 -0
- vibe_surf/backend/migrations/init_db.py +303 -0
- vibe_surf/backend/migrations/seed_data.py +236 -0
- vibe_surf/backend/shared_state.py +601 -0
- vibe_surf/backend/utils/__init__.py +7 -0
- vibe_surf/backend/utils/encryption.py +164 -0
- vibe_surf/backend/utils/llm_factory.py +225 -0
- vibe_surf/browser/__init__.py +8 -0
- vibe_surf/browser/agen_browser_profile.py +130 -0
- vibe_surf/browser/agent_browser_session.py +416 -0
- vibe_surf/browser/browser_manager.py +296 -0
- vibe_surf/browser/utils.py +790 -0
- vibe_surf/browser/watchdogs/__init__.py +0 -0
- vibe_surf/browser/watchdogs/action_watchdog.py +291 -0
- vibe_surf/browser/watchdogs/dom_watchdog.py +954 -0
- vibe_surf/chrome_extension/background.js +558 -0
- vibe_surf/chrome_extension/config.js +48 -0
- vibe_surf/chrome_extension/content.js +284 -0
- vibe_surf/chrome_extension/dev-reload.js +47 -0
- vibe_surf/chrome_extension/icons/convert-svg.js +33 -0
- vibe_surf/chrome_extension/icons/logo-preview.html +187 -0
- vibe_surf/chrome_extension/icons/logo.png +0 -0
- vibe_surf/chrome_extension/manifest.json +53 -0
- vibe_surf/chrome_extension/popup.html +134 -0
- vibe_surf/chrome_extension/scripts/api-client.js +473 -0
- vibe_surf/chrome_extension/scripts/main.js +491 -0
- vibe_surf/chrome_extension/scripts/markdown-it.min.js +3 -0
- vibe_surf/chrome_extension/scripts/session-manager.js +599 -0
- vibe_surf/chrome_extension/scripts/ui-manager.js +3687 -0
- vibe_surf/chrome_extension/sidepanel.html +347 -0
- vibe_surf/chrome_extension/styles/animations.css +471 -0
- vibe_surf/chrome_extension/styles/components.css +670 -0
- vibe_surf/chrome_extension/styles/main.css +2307 -0
- vibe_surf/chrome_extension/styles/settings.css +1100 -0
- vibe_surf/cli.py +357 -0
- vibe_surf/controller/__init__.py +0 -0
- vibe_surf/controller/file_system.py +53 -0
- vibe_surf/controller/mcp_client.py +68 -0
- vibe_surf/controller/vibesurf_controller.py +616 -0
- vibe_surf/controller/views.py +37 -0
- vibe_surf/llm/__init__.py +21 -0
- vibe_surf/llm/openai_compatible.py +237 -0
- vibesurf-0.1.0.dist-info/METADATA +97 -0
- vibesurf-0.1.0.dist-info/RECORD +70 -0
- vibesurf-0.1.0.dist-info/WHEEL +5 -0
- vibesurf-0.1.0.dist-info/entry_points.txt +2 -0
- vibesurf-0.1.0.dist-info/licenses/LICENSE +201 -0
- vibesurf-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Encryption utilities for VibeSurf Backend
|
|
3
|
+
|
|
4
|
+
Uses machine MAC address for key derivation to encrypt sensitive data like API keys.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import uuid
|
|
9
|
+
import base64
|
|
10
|
+
from cryptography.fernet import Fernet
|
|
11
|
+
from cryptography.hazmat.primitives import hashes
|
|
12
|
+
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
|
13
|
+
import logging
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
import psutil
|
|
18
|
+
import logging
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_machine_id() -> str:
|
|
22
|
+
"""Get unique machine identifier based on MAC address."""
|
|
23
|
+
try:
|
|
24
|
+
nics = psutil.net_if_addrs()
|
|
25
|
+
|
|
26
|
+
priority_interfaces = ['en0', 'eth0', 'en1']
|
|
27
|
+
|
|
28
|
+
for interface in priority_interfaces:
|
|
29
|
+
if interface in nics:
|
|
30
|
+
for addr in nics[interface]:
|
|
31
|
+
if addr.family == psutil.AF_LINK:
|
|
32
|
+
mac = addr.address
|
|
33
|
+
if mac and mac != '00:00:00:00:00:00':
|
|
34
|
+
return mac.replace(':', '').upper()
|
|
35
|
+
|
|
36
|
+
for interface, addrs in nics.items():
|
|
37
|
+
for addr in addrs:
|
|
38
|
+
if addr.family == psutil.AF_LINK:
|
|
39
|
+
mac = addr.address
|
|
40
|
+
if (mac and mac != '00:00:00:00:00:00' and
|
|
41
|
+
not mac.startswith('02:') and
|
|
42
|
+
not interface.startswith(('lo', 'docker', 'veth'))):
|
|
43
|
+
return mac.replace(':', '').upper()
|
|
44
|
+
|
|
45
|
+
except Exception as e:
|
|
46
|
+
logging.warning(f"Could not get MAC address via psutil: {e}")
|
|
47
|
+
|
|
48
|
+
return "VIBESURF_WARMSHAO"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def derive_key(machine_id: str, salt: bytes = None) -> bytes:
|
|
52
|
+
"""Derive encryption key from machine ID."""
|
|
53
|
+
if salt is None:
|
|
54
|
+
# Use a fixed salt for consistency across sessions
|
|
55
|
+
salt = b'vibesurf_warmshao_2025'
|
|
56
|
+
|
|
57
|
+
# Convert machine_id to bytes
|
|
58
|
+
password = machine_id.encode('utf-8')
|
|
59
|
+
|
|
60
|
+
# Derive key using PBKDF2
|
|
61
|
+
kdf = PBKDF2HMAC(
|
|
62
|
+
algorithm=hashes.SHA256(),
|
|
63
|
+
length=32,
|
|
64
|
+
salt=salt,
|
|
65
|
+
iterations=100000,
|
|
66
|
+
)
|
|
67
|
+
key = base64.urlsafe_b64encode(kdf.derive(password))
|
|
68
|
+
return key
|
|
69
|
+
|
|
70
|
+
def get_encryption_key() -> bytes:
|
|
71
|
+
"""Get the encryption key for this machine."""
|
|
72
|
+
machine_id = get_machine_id()
|
|
73
|
+
return derive_key(machine_id)
|
|
74
|
+
|
|
75
|
+
def encrypt_api_key(api_key: str) -> str:
|
|
76
|
+
"""
|
|
77
|
+
Encrypt API key using machine-specific key.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
api_key: Plain text API key
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
str: Base64 encoded encrypted API key
|
|
84
|
+
"""
|
|
85
|
+
if not api_key or api_key.strip() == "":
|
|
86
|
+
return ""
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
key = get_encryption_key()
|
|
90
|
+
fernet = Fernet(key)
|
|
91
|
+
encrypted_data = fernet.encrypt(api_key.encode('utf-8'))
|
|
92
|
+
return base64.urlsafe_b64encode(encrypted_data).decode('utf-8')
|
|
93
|
+
except Exception as e:
|
|
94
|
+
logger.error(f"Failed to encrypt API key: {e}")
|
|
95
|
+
raise ValueError("Encryption failed")
|
|
96
|
+
|
|
97
|
+
def decrypt_api_key(encrypted_api_key: str) -> str:
|
|
98
|
+
"""
|
|
99
|
+
Decrypt API key using machine-specific key.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
encrypted_api_key: Base64 encoded encrypted API key
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
str: Decrypted API key
|
|
106
|
+
"""
|
|
107
|
+
if not encrypted_api_key or encrypted_api_key.strip() == "":
|
|
108
|
+
return ""
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
key = get_encryption_key()
|
|
112
|
+
fernet = Fernet(key)
|
|
113
|
+
encrypted_data = base64.urlsafe_b64decode(encrypted_api_key.encode('utf-8'))
|
|
114
|
+
decrypted_data = fernet.decrypt(encrypted_data)
|
|
115
|
+
return decrypted_data.decode('utf-8')
|
|
116
|
+
except Exception as e:
|
|
117
|
+
logger.error(f"Failed to decrypt API key: {e}")
|
|
118
|
+
raise ValueError("Decryption failed")
|
|
119
|
+
|
|
120
|
+
def is_encrypted(value: str) -> bool:
|
|
121
|
+
"""
|
|
122
|
+
Check if a value appears to be encrypted.
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
value: String to check
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
bool: True if value appears encrypted
|
|
129
|
+
"""
|
|
130
|
+
if not value:
|
|
131
|
+
return False
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
# Try to decode as base64
|
|
135
|
+
base64.urlsafe_b64decode(value.encode('utf-8'))
|
|
136
|
+
# If it's base64 and contains the Fernet token prefix, likely encrypted
|
|
137
|
+
return len(value) > 50 and '=' in value
|
|
138
|
+
except:
|
|
139
|
+
return False
|
|
140
|
+
|
|
141
|
+
# Test functions
|
|
142
|
+
def test_encryption():
|
|
143
|
+
"""Test encryption/decryption functionality."""
|
|
144
|
+
test_api_key = "sk-test123456789"
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
# Test encryption
|
|
148
|
+
encrypted = encrypt_api_key(test_api_key)
|
|
149
|
+
print(f"Original: {test_api_key}")
|
|
150
|
+
print(f"Encrypted: {encrypted}")
|
|
151
|
+
|
|
152
|
+
# Test decryption
|
|
153
|
+
decrypted = decrypt_api_key(encrypted)
|
|
154
|
+
print(f"Decrypted: {decrypted}")
|
|
155
|
+
|
|
156
|
+
# Verify
|
|
157
|
+
assert test_api_key == decrypted, "Encryption/decryption failed"
|
|
158
|
+
print("✅ Encryption test passed")
|
|
159
|
+
|
|
160
|
+
except Exception as e:
|
|
161
|
+
print(f"❌ Encryption test failed: {e}")
|
|
162
|
+
|
|
163
|
+
if __name__ == "__main__":
|
|
164
|
+
test_encryption()
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LLM Factory utilities for creating LLM instances from profiles
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Optional
|
|
6
|
+
import logging
|
|
7
|
+
from ..llm_config import get_supported_providers, is_provider_supported
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
def create_llm_from_profile(llm_profile):
|
|
12
|
+
"""Create LLM instance from LLMProfile database record (dict or object)"""
|
|
13
|
+
try:
|
|
14
|
+
# Import LLM classes from browser_use and vibe_surf
|
|
15
|
+
from browser_use.llm import (
|
|
16
|
+
ChatOpenAI, ChatAnthropic, ChatGoogle, ChatAzureOpenAI,
|
|
17
|
+
ChatGroq, ChatOllama, ChatOpenRouter, ChatDeepSeek,
|
|
18
|
+
ChatAWSBedrock, ChatAnthropicBedrock
|
|
19
|
+
)
|
|
20
|
+
from vibe_surf.llm import ChatOpenAICompatible
|
|
21
|
+
|
|
22
|
+
# Handle both dict and object access patterns
|
|
23
|
+
def get_attr(obj, key, default=None):
|
|
24
|
+
if isinstance(obj, dict):
|
|
25
|
+
return obj.get(key, default)
|
|
26
|
+
else:
|
|
27
|
+
return getattr(obj, key, default)
|
|
28
|
+
|
|
29
|
+
provider = get_attr(llm_profile, 'provider')
|
|
30
|
+
model = get_attr(llm_profile, 'model')
|
|
31
|
+
api_key = get_attr(llm_profile, 'api_key') # Should already be decrypted by queries
|
|
32
|
+
base_url = get_attr(llm_profile, 'base_url')
|
|
33
|
+
temperature = get_attr(llm_profile, 'temperature') or 0.7
|
|
34
|
+
max_tokens = get_attr(llm_profile, 'max_tokens')
|
|
35
|
+
top_p = get_attr(llm_profile, 'top_p')
|
|
36
|
+
frequency_penalty = get_attr(llm_profile, 'frequency_penalty')
|
|
37
|
+
seed = get_attr(llm_profile, 'seed')
|
|
38
|
+
provider_config = get_attr(llm_profile, 'provider_config', {})
|
|
39
|
+
|
|
40
|
+
# Validate provider
|
|
41
|
+
if not is_provider_supported(provider):
|
|
42
|
+
raise ValueError(f"Unsupported provider: {provider}. Supported: {get_supported_providers()}")
|
|
43
|
+
|
|
44
|
+
# Common parameters
|
|
45
|
+
common_params = {}
|
|
46
|
+
if temperature is not None:
|
|
47
|
+
common_params["temperature"] = temperature
|
|
48
|
+
if max_tokens is not None:
|
|
49
|
+
common_params["max_tokens"] = max_tokens
|
|
50
|
+
if top_p is not None:
|
|
51
|
+
common_params["top_p"] = top_p
|
|
52
|
+
if frequency_penalty is not None:
|
|
53
|
+
common_params["frequency_penalty"] = frequency_penalty
|
|
54
|
+
if seed is not None:
|
|
55
|
+
common_params["seed"] = seed
|
|
56
|
+
|
|
57
|
+
# Add provider-specific config if available
|
|
58
|
+
if provider_config:
|
|
59
|
+
common_params.update(provider_config)
|
|
60
|
+
|
|
61
|
+
# Create LLM instance based on provider
|
|
62
|
+
if provider == "openai":
|
|
63
|
+
params = {
|
|
64
|
+
"model": model,
|
|
65
|
+
"api_key": api_key,
|
|
66
|
+
**common_params
|
|
67
|
+
}
|
|
68
|
+
if base_url:
|
|
69
|
+
params["base_url"] = base_url
|
|
70
|
+
return ChatOpenAI(**params)
|
|
71
|
+
|
|
72
|
+
elif provider == "anthropic":
|
|
73
|
+
return ChatAnthropic(
|
|
74
|
+
model=model,
|
|
75
|
+
api_key=api_key,
|
|
76
|
+
**common_params
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
elif provider == "google":
|
|
80
|
+
return ChatGoogle(
|
|
81
|
+
model=model,
|
|
82
|
+
api_key=api_key,
|
|
83
|
+
**common_params
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
elif provider == "azure_openai":
|
|
87
|
+
if not base_url:
|
|
88
|
+
raise ValueError("Azure OpenAI requires base_url (azure_endpoint)")
|
|
89
|
+
return ChatAzureOpenAI(
|
|
90
|
+
model=model,
|
|
91
|
+
api_version="2025-01-01-preview",
|
|
92
|
+
api_key=api_key,
|
|
93
|
+
azure_endpoint=base_url,
|
|
94
|
+
**common_params
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
elif provider == "groq":
|
|
98
|
+
return ChatGroq(
|
|
99
|
+
model=model,
|
|
100
|
+
api_key=api_key,
|
|
101
|
+
**common_params
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
elif provider == "ollama":
|
|
105
|
+
params = {
|
|
106
|
+
"model": model,
|
|
107
|
+
**common_params
|
|
108
|
+
}
|
|
109
|
+
if base_url:
|
|
110
|
+
params["host"] = base_url
|
|
111
|
+
else:
|
|
112
|
+
params["host"] = "http://localhost:11434" # Default Ollama URL
|
|
113
|
+
return ChatOllama(**params)
|
|
114
|
+
|
|
115
|
+
elif provider == "openrouter":
|
|
116
|
+
return ChatOpenRouter(
|
|
117
|
+
model=model,
|
|
118
|
+
api_key=api_key,
|
|
119
|
+
**common_params
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
elif provider == "deepseek":
|
|
123
|
+
return ChatDeepSeek(
|
|
124
|
+
model=model,
|
|
125
|
+
api_key=api_key,
|
|
126
|
+
**common_params
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
elif provider == "aws_bedrock":
|
|
130
|
+
params = {
|
|
131
|
+
"model": model,
|
|
132
|
+
"aws_access_key_id": api_key, # AWS uses different auth
|
|
133
|
+
**common_params
|
|
134
|
+
}
|
|
135
|
+
# Add AWS-specific parameters from provider_config
|
|
136
|
+
if "aws_secret_access_key" in provider_config:
|
|
137
|
+
params["aws_secret_access_key"] = provider_config["aws_secret_access_key"]
|
|
138
|
+
if "aws_region" in provider_config:
|
|
139
|
+
params["aws_region"] = provider_config["aws_region"]
|
|
140
|
+
if 'aws_region' not in params:
|
|
141
|
+
params["aws_region"] = "us-east-1"
|
|
142
|
+
return ChatAWSBedrock(**params)
|
|
143
|
+
|
|
144
|
+
elif provider == "anthropic_bedrock":
|
|
145
|
+
params = {
|
|
146
|
+
"model": model,
|
|
147
|
+
"aws_access_key_id": api_key, # AWS uses different auth
|
|
148
|
+
**common_params
|
|
149
|
+
}
|
|
150
|
+
# Add AWS-specific parameters from provider_config
|
|
151
|
+
if "aws_secret_access_key" in provider_config:
|
|
152
|
+
params["aws_secret_access_key"] = provider_config["aws_secret_access_key"]
|
|
153
|
+
if "region_name" in provider_config:
|
|
154
|
+
params["region_name"] = provider_config["region_name"]
|
|
155
|
+
return ChatAnthropicBedrock(**params)
|
|
156
|
+
|
|
157
|
+
elif provider == "openai_compatible":
|
|
158
|
+
if not base_url:
|
|
159
|
+
raise ValueError("OpenAI Compatible provider requires base_url")
|
|
160
|
+
return ChatOpenAICompatible(
|
|
161
|
+
model=model,
|
|
162
|
+
api_key=api_key,
|
|
163
|
+
base_url=base_url,
|
|
164
|
+
**common_params
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
else:
|
|
168
|
+
raise ValueError(f"Unsupported provider: {provider}")
|
|
169
|
+
|
|
170
|
+
except Exception as e:
|
|
171
|
+
logger.error(f"Failed to create LLM from profile: {e}")
|
|
172
|
+
raise RuntimeError(f"Failed to create LLM from profile: {str(e)}")
|
|
173
|
+
|
|
174
|
+
def validate_llm_configuration(provider: str, model: str, api_key: str, base_url: Optional[str] = None):
|
|
175
|
+
"""Validate LLM configuration parameters"""
|
|
176
|
+
if not provider:
|
|
177
|
+
raise ValueError("Provider is required")
|
|
178
|
+
|
|
179
|
+
if not model:
|
|
180
|
+
raise ValueError("Model is required")
|
|
181
|
+
|
|
182
|
+
if not is_provider_supported(provider):
|
|
183
|
+
raise ValueError(f"Unsupported provider: {provider}. Supported: {get_supported_providers()}")
|
|
184
|
+
|
|
185
|
+
# Provider-specific validation
|
|
186
|
+
from ..llm_config import get_provider_metadata
|
|
187
|
+
metadata = get_provider_metadata(provider)
|
|
188
|
+
|
|
189
|
+
if metadata.get("requires_api_key", True) and not api_key:
|
|
190
|
+
raise ValueError(f"API key is required for provider: {provider}")
|
|
191
|
+
|
|
192
|
+
if metadata.get("requires_base_url", False) and not base_url:
|
|
193
|
+
raise ValueError(f"Base URL is required for provider: {provider}")
|
|
194
|
+
|
|
195
|
+
return True
|
|
196
|
+
|
|
197
|
+
def get_llm_creation_parameters(provider: str):
|
|
198
|
+
"""Get the required and optional parameters for creating an LLM instance"""
|
|
199
|
+
from ..llm_config import get_provider_metadata
|
|
200
|
+
|
|
201
|
+
if not is_provider_supported(provider):
|
|
202
|
+
raise ValueError(f"Unsupported provider: {provider}")
|
|
203
|
+
|
|
204
|
+
metadata = get_provider_metadata(provider)
|
|
205
|
+
|
|
206
|
+
required_params = ["model"]
|
|
207
|
+
optional_params = ["temperature", "max_tokens", "top_p", "frequency_penalty", "seed"]
|
|
208
|
+
|
|
209
|
+
if metadata.get("requires_api_key", True):
|
|
210
|
+
required_params.append("api_key")
|
|
211
|
+
|
|
212
|
+
if metadata.get("requires_base_url", False):
|
|
213
|
+
required_params.append("base_url")
|
|
214
|
+
elif metadata.get("supports_base_url", False):
|
|
215
|
+
optional_params.append("base_url")
|
|
216
|
+
|
|
217
|
+
# Special cases for AWS Bedrock
|
|
218
|
+
if provider in ["aws_bedrock", "anthropic_bedrock"]:
|
|
219
|
+
required_params.extend(["aws_secret_access_key", "region_name"])
|
|
220
|
+
|
|
221
|
+
return {
|
|
222
|
+
"required": required_params,
|
|
223
|
+
"optional": optional_params,
|
|
224
|
+
"metadata": metadata
|
|
225
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from typing import Optional, TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from vibe_surf.browser.browser_manager import BrowserManager
|
|
4
|
+
from vibe_surf.browser.agent_browser_session import AgentBrowserSession
|
|
5
|
+
from vibe_surf.browser.agen_browser_profile import AgentBrowserProfile
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
__all__ = [ "AgentBrowserSession", "AgentBrowserProfile", "BrowserManager"]
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import pdb
|
|
2
|
+
import sys
|
|
3
|
+
import tempfile
|
|
4
|
+
from collections.abc import Iterable
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from functools import cache
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from re import Pattern
|
|
9
|
+
from typing import Annotated, Any, Literal, Self
|
|
10
|
+
from urllib.parse import urlparse
|
|
11
|
+
|
|
12
|
+
from pydantic import AfterValidator, AliasChoices, BaseModel, ConfigDict, Field, field_validator, model_validator
|
|
13
|
+
from uuid_extensions import uuid7str
|
|
14
|
+
|
|
15
|
+
from browser_use.config import CONFIG
|
|
16
|
+
from browser_use.observability import observe_debug
|
|
17
|
+
from browser_use.utils import _log_pretty_path, logger
|
|
18
|
+
|
|
19
|
+
from browser_use.browser import BrowserProfile
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AgentBrowserProfile(BrowserProfile):
|
|
23
|
+
custom_extensions: list = Field(
|
|
24
|
+
default=lambda: [],
|
|
25
|
+
description="Enable Custom Extensions.",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def _ensure_default_extensions_downloaded(self) -> list[str]:
|
|
29
|
+
"""
|
|
30
|
+
Ensure default extensions are downloaded and cached locally.
|
|
31
|
+
Returns list of paths to extension directories.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
# Extension definitions - optimized for automation and content extraction
|
|
35
|
+
extensions = [
|
|
36
|
+
# {
|
|
37
|
+
# 'name': 'uBlock Origin',
|
|
38
|
+
# 'id': 'cjpalhdlnbpafiamejdnhcphjbkeiagm',
|
|
39
|
+
# 'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dcjpalhdlnbpafiamejdnhcphjbkeiagm%26uc',
|
|
40
|
+
# },
|
|
41
|
+
{
|
|
42
|
+
'name': "I still don't care about cookies",
|
|
43
|
+
'id': 'edibdbjcniadpccecjdfdjjppcpchdlm',
|
|
44
|
+
'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dedibdbjcniadpccecjdfdjjppcpchdlm%26uc',
|
|
45
|
+
},
|
|
46
|
+
# {
|
|
47
|
+
# 'name': 'ClearURLs',
|
|
48
|
+
# 'id': 'lckanjgmijmafbedllaakclkaicjfmnk',
|
|
49
|
+
# 'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dlckanjgmijmafbedllaakclkaicjfmnk%26uc',
|
|
50
|
+
# },
|
|
51
|
+
# {
|
|
52
|
+
# 'name': 'Captcha Solver: Auto captcha solving service',
|
|
53
|
+
# 'id': 'pgojnojmmhpofjgdmaebadhbocahppod',
|
|
54
|
+
# 'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dpgojnojmmhpofjgdmaebadhbocahppod%26uc',
|
|
55
|
+
# },
|
|
56
|
+
# {
|
|
57
|
+
# 'name': 'Consent-O-Matic',
|
|
58
|
+
# 'id': 'mdjildafknihdffpkfmmpnpoiajfjnjd',
|
|
59
|
+
# 'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dmdjildafknihdffpkfmmpnpoiajfjnjd%26uc',
|
|
60
|
+
# },
|
|
61
|
+
# {
|
|
62
|
+
# 'name': 'Privacy | Protect Your Payments',
|
|
63
|
+
# 'id': 'hmgpakheknboplhmlicfkkgjipfabmhp',
|
|
64
|
+
# 'url': 'https://clients2.google.com/service/update2/crx?response=redirect&prodversion=130&acceptformat=crx3&x=id%3Dhmgpakheknboplhmlicfkkgjipfabmhp%26uc',
|
|
65
|
+
# },
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
# Create extensions cache directory
|
|
69
|
+
cache_dir = CONFIG.BROWSER_USE_EXTENSIONS_DIR
|
|
70
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
71
|
+
# logger.debug(f'📁 Extensions cache directory: {_log_pretty_path(cache_dir)}')
|
|
72
|
+
|
|
73
|
+
extension_paths = []
|
|
74
|
+
loaded_extension_names = []
|
|
75
|
+
|
|
76
|
+
for ext in extensions:
|
|
77
|
+
ext_dir = cache_dir / ext['id']
|
|
78
|
+
crx_file = cache_dir / f'{ext["id"]}.crx'
|
|
79
|
+
|
|
80
|
+
# Check if extension is already extracted
|
|
81
|
+
if ext_dir.exists() and (ext_dir / 'manifest.json').exists():
|
|
82
|
+
# logger.debug(f'✅ Using cached {ext["name"]} extension from {_log_pretty_path(ext_dir)}')
|
|
83
|
+
extension_paths.append(str(ext_dir))
|
|
84
|
+
loaded_extension_names.append(ext['name'])
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
# Download extension if not cached
|
|
89
|
+
if not crx_file.exists():
|
|
90
|
+
logger.info(f'📦 Downloading {ext["name"]} extension...')
|
|
91
|
+
self._download_extension(ext['url'], crx_file)
|
|
92
|
+
else:
|
|
93
|
+
logger.debug(f'📦 Found cached {ext["name"]} .crx file')
|
|
94
|
+
|
|
95
|
+
# Extract extension
|
|
96
|
+
logger.info(f'📂 Extracting {ext["name"]} extension...')
|
|
97
|
+
self._extract_extension(crx_file, ext_dir)
|
|
98
|
+
extension_paths.append(str(ext_dir))
|
|
99
|
+
loaded_extension_names.append(ext['name'])
|
|
100
|
+
|
|
101
|
+
except Exception as e:
|
|
102
|
+
logger.warning(f'⚠️ Failed to setup {ext["name"]} extension: {e}')
|
|
103
|
+
continue
|
|
104
|
+
|
|
105
|
+
if extension_paths:
|
|
106
|
+
logger.debug(
|
|
107
|
+
f'[BrowserProfile] 🧩 Extensions loaded ({len(extension_paths)}): [{", ".join(loaded_extension_names)}]')
|
|
108
|
+
else:
|
|
109
|
+
logger.warning('[BrowserProfile] ⚠️ No default extensions could be loaded')
|
|
110
|
+
|
|
111
|
+
return extension_paths
|
|
112
|
+
|
|
113
|
+
def _get_extension_args(self) -> list[str]:
|
|
114
|
+
"""Get Chrome args for enabling default extensions (ad blocker and cookie handler)."""
|
|
115
|
+
extension_paths = self._ensure_default_extensions_downloaded()
|
|
116
|
+
|
|
117
|
+
args = [
|
|
118
|
+
'--enable-extensions',
|
|
119
|
+
'--disable-extensions-file-access-check',
|
|
120
|
+
'--disable-extensions-http-throttling',
|
|
121
|
+
'--enable-extension-activity-logging',
|
|
122
|
+
'--disable-features=DisableLoadExtensionCommandLineSwitch'
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
if self.custom_extensions:
|
|
126
|
+
extension_paths.extend(self.custom_extensions)
|
|
127
|
+
if extension_paths:
|
|
128
|
+
args.append(f'--load-extension={",".join(extension_paths)}')
|
|
129
|
+
print(args)
|
|
130
|
+
return args
|