vibesurf 0.1.16__py3-none-any.whl → 0.1.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibesurf might be problematic. Click here for more details.
- vibe_surf/_version.py +2 -2
- vibe_surf/agents/browser_use_agent.py +6 -12
- vibe_surf/agents/report_writer_agent.py +3 -3
- vibe_surf/agents/vibe_surf_agent.py +17 -14
- vibe_surf/agents/views.py +4 -2
- vibe_surf/backend/api/models.py +3 -0
- vibe_surf/backend/api/task.py +3 -1
- vibe_surf/backend/database/manager.py +187 -4
- vibe_surf/backend/database/migrations/v001_initial_schema.sql +118 -0
- vibe_surf/backend/database/migrations/v002_add_agent_mode.sql +6 -0
- vibe_surf/backend/database/models.py +3 -0
- vibe_surf/backend/database/queries.py +4 -2
- vibe_surf/backend/shared_state.py +5 -3
- vibe_surf/chrome_extension/scripts/api-client.js +4 -2
- vibe_surf/chrome_extension/scripts/ui-manager.js +27 -1
- vibe_surf/chrome_extension/sidepanel.html +5 -0
- vibe_surf/chrome_extension/styles/input.css +11 -3
- vibe_surf/llm/openai_compatible.py +1 -2
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/METADATA +1 -1
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/RECORD +24 -25
- vibe_surf/backend/migrations/__init__.py +0 -16
- vibe_surf/backend/migrations/init_db.py +0 -303
- vibe_surf/backend/migrations/seed_data.py +0 -236
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/WHEEL +0 -0
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/entry_points.txt +0 -0
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/licenses/LICENSE +0 -0
- {vibesurf-0.1.16.dist-info → vibesurf-0.1.17.dist-info}/top_level.txt +0 -0
|
@@ -1,236 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Database Seed Data Scripts - Simplified Single Task Model
|
|
3
|
-
|
|
4
|
-
Creates sample data for development and testing purposes using the simplified Task table.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import asyncio
|
|
8
|
-
from datetime import datetime, timedelta
|
|
9
|
-
from typing import List
|
|
10
|
-
|
|
11
|
-
from ..database.models import Task, TaskStatus
|
|
12
|
-
from .. import shared_state
|
|
13
|
-
|
|
14
|
-
async def seed_sample_tasks(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
15
|
-
"""
|
|
16
|
-
Seed the database with sample tasks for development.
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
database_url: Database connection URL
|
|
20
|
-
|
|
21
|
-
Returns:
|
|
22
|
-
bool: True if seeding was successful
|
|
23
|
-
"""
|
|
24
|
-
try:
|
|
25
|
-
# Use shared_state db_manager if available, otherwise create temporary one
|
|
26
|
-
if shared_state.db_manager:
|
|
27
|
-
db_manager = shared_state.db_manager
|
|
28
|
-
else:
|
|
29
|
-
from ..database.manager import DatabaseManager
|
|
30
|
-
db_manager = DatabaseManager(database_url)
|
|
31
|
-
|
|
32
|
-
async for db in db_manager.get_session():
|
|
33
|
-
# Check if data already exists
|
|
34
|
-
result = await db.execute("SELECT COUNT(*) FROM tasks")
|
|
35
|
-
count = result.scalar()
|
|
36
|
-
|
|
37
|
-
if count > 0:
|
|
38
|
-
print(f"⚠️ Database already contains {count} tasks. Skipping seed data.")
|
|
39
|
-
return True
|
|
40
|
-
|
|
41
|
-
print("🌱 Seeding sample tasks...")
|
|
42
|
-
|
|
43
|
-
# Create sample tasks
|
|
44
|
-
tasks = await create_sample_tasks(db)
|
|
45
|
-
|
|
46
|
-
await db.commit()
|
|
47
|
-
|
|
48
|
-
print("✅ Sample tasks seeded successfully")
|
|
49
|
-
return True
|
|
50
|
-
|
|
51
|
-
except Exception as e:
|
|
52
|
-
print(f"❌ Seeding failed: {e}")
|
|
53
|
-
return False
|
|
54
|
-
|
|
55
|
-
async def create_sample_tasks(db) -> List[Task]:
|
|
56
|
-
"""Create sample tasks for testing."""
|
|
57
|
-
|
|
58
|
-
sample_tasks = [
|
|
59
|
-
Task(
|
|
60
|
-
session_id="session_001",
|
|
61
|
-
task_description="Create a simple web scraper to extract product information from an e-commerce website",
|
|
62
|
-
status=TaskStatus.COMPLETED,
|
|
63
|
-
upload_files_path="./uploads/session_001/requirements.pdf",
|
|
64
|
-
mcp_server_config={
|
|
65
|
-
"exclude_actions": [],
|
|
66
|
-
"max_actions_per_task": 100,
|
|
67
|
-
"display_files_in_done_text": True
|
|
68
|
-
},
|
|
69
|
-
llm_config={
|
|
70
|
-
"model": "gpt-4o-mini",
|
|
71
|
-
"provider": "openai",
|
|
72
|
-
"temperature": 0.1,
|
|
73
|
-
"max_tokens": 4000
|
|
74
|
-
},
|
|
75
|
-
task_result="Successfully created web scraper that extracts product names, prices, and descriptions. Generated 500 product records.",
|
|
76
|
-
report_path="./reports/session_001/scraper_report.html",
|
|
77
|
-
started_at=datetime.now() - timedelta(hours=2),
|
|
78
|
-
completed_at=datetime.now() - timedelta(hours=1),
|
|
79
|
-
task_metadata={
|
|
80
|
-
"execution_duration_seconds": 3600.0,
|
|
81
|
-
"total_actions": 45,
|
|
82
|
-
"created_via": "api"
|
|
83
|
-
}
|
|
84
|
-
),
|
|
85
|
-
Task(
|
|
86
|
-
session_id="session_002",
|
|
87
|
-
task_description="Automate login process for a social media platform and post a scheduled message",
|
|
88
|
-
status=TaskStatus.RUNNING,
|
|
89
|
-
upload_files_path="./uploads/session_002/login_credentials.txt",
|
|
90
|
-
mcp_server_config={
|
|
91
|
-
"exclude_actions": ["dangerous_action"],
|
|
92
|
-
"max_actions_per_task": 50,
|
|
93
|
-
"display_files_in_done_text": True
|
|
94
|
-
},
|
|
95
|
-
llm_config={
|
|
96
|
-
"model": "gpt-4o",
|
|
97
|
-
"provider": "openai",
|
|
98
|
-
"temperature": 0.2,
|
|
99
|
-
"max_tokens": 2000
|
|
100
|
-
},
|
|
101
|
-
started_at=datetime.now() - timedelta(minutes=30),
|
|
102
|
-
task_metadata={
|
|
103
|
-
"created_via": "api",
|
|
104
|
-
"estimated_duration": 1800
|
|
105
|
-
}
|
|
106
|
-
),
|
|
107
|
-
Task(
|
|
108
|
-
session_id="session_003",
|
|
109
|
-
task_description="Research and compile information about top AI companies and their latest products",
|
|
110
|
-
status=TaskStatus.PENDING,
|
|
111
|
-
mcp_server_config={
|
|
112
|
-
"exclude_actions": [],
|
|
113
|
-
"max_actions_per_task": 200,
|
|
114
|
-
"display_files_in_done_text": True
|
|
115
|
-
},
|
|
116
|
-
llm_config={
|
|
117
|
-
"model": "claude-3-sonnet-20240229",
|
|
118
|
-
"provider": "anthropic",
|
|
119
|
-
"temperature": 0.3,
|
|
120
|
-
"max_tokens": 8000
|
|
121
|
-
},
|
|
122
|
-
task_metadata={
|
|
123
|
-
"created_via": "api",
|
|
124
|
-
"priority": "high"
|
|
125
|
-
}
|
|
126
|
-
),
|
|
127
|
-
Task(
|
|
128
|
-
session_id="session_004",
|
|
129
|
-
task_description="Fill out and submit an online form with provided customer data",
|
|
130
|
-
status=TaskStatus.FAILED,
|
|
131
|
-
upload_files_path="./uploads/session_004/customer_data.csv",
|
|
132
|
-
mcp_server_config={
|
|
133
|
-
"exclude_actions": [],
|
|
134
|
-
"max_actions_per_task": 30,
|
|
135
|
-
"display_files_in_done_text": True
|
|
136
|
-
},
|
|
137
|
-
llm_config={
|
|
138
|
-
"model": "gpt-3.5-turbo",
|
|
139
|
-
"provider": "openai",
|
|
140
|
-
"temperature": 0.0,
|
|
141
|
-
"max_tokens": 1000
|
|
142
|
-
},
|
|
143
|
-
error_message="Form submission failed due to CAPTCHA protection",
|
|
144
|
-
started_at=datetime.now() - timedelta(hours=6),
|
|
145
|
-
completed_at=datetime.now() - timedelta(hours=5, minutes=45),
|
|
146
|
-
task_metadata={
|
|
147
|
-
"execution_duration_seconds": 900.0,
|
|
148
|
-
"total_actions": 15,
|
|
149
|
-
"created_via": "api",
|
|
150
|
-
"error_recovery_attempts": 3
|
|
151
|
-
}
|
|
152
|
-
),
|
|
153
|
-
Task(
|
|
154
|
-
session_id="session_005",
|
|
155
|
-
task_description="Monitor a website for price changes and send notifications when target price is reached",
|
|
156
|
-
status=TaskStatus.PAUSED,
|
|
157
|
-
mcp_server_config={
|
|
158
|
-
"exclude_actions": [],
|
|
159
|
-
"max_actions_per_task": 1000,
|
|
160
|
-
"display_files_in_done_text": False
|
|
161
|
-
},
|
|
162
|
-
llm_config={
|
|
163
|
-
"model": "gpt-4o-mini",
|
|
164
|
-
"provider": "openai",
|
|
165
|
-
"temperature": 0.1,
|
|
166
|
-
"max_tokens": 2000
|
|
167
|
-
},
|
|
168
|
-
started_at=datetime.now() - timedelta(hours=12),
|
|
169
|
-
task_metadata={
|
|
170
|
-
"created_via": "api",
|
|
171
|
-
"monitoring_interval": 3600,
|
|
172
|
-
"target_price": 299.99
|
|
173
|
-
}
|
|
174
|
-
)
|
|
175
|
-
]
|
|
176
|
-
|
|
177
|
-
for task in sample_tasks:
|
|
178
|
-
db.add(task)
|
|
179
|
-
|
|
180
|
-
await db.flush()
|
|
181
|
-
return sample_tasks
|
|
182
|
-
|
|
183
|
-
async def clear_sample_data(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
184
|
-
"""
|
|
185
|
-
Clear all sample data from the database.
|
|
186
|
-
|
|
187
|
-
Args:
|
|
188
|
-
database_url: Database connection URL
|
|
189
|
-
|
|
190
|
-
Returns:
|
|
191
|
-
bool: True if clearing was successful
|
|
192
|
-
"""
|
|
193
|
-
try:
|
|
194
|
-
# Use shared_state db_manager if available, otherwise create temporary one
|
|
195
|
-
if shared_state.db_manager:
|
|
196
|
-
db_manager = shared_state.db_manager
|
|
197
|
-
else:
|
|
198
|
-
from ..database.manager import DatabaseManager
|
|
199
|
-
db_manager = DatabaseManager(database_url)
|
|
200
|
-
|
|
201
|
-
async for db in db_manager.get_session():
|
|
202
|
-
print("🧹 Clearing sample tasks...")
|
|
203
|
-
|
|
204
|
-
# Delete all tasks
|
|
205
|
-
await db.execute("DELETE FROM tasks")
|
|
206
|
-
await db.commit()
|
|
207
|
-
|
|
208
|
-
print("✅ Sample data cleared successfully")
|
|
209
|
-
return True
|
|
210
|
-
|
|
211
|
-
except Exception as e:
|
|
212
|
-
print(f"❌ Clearing sample data failed: {e}")
|
|
213
|
-
return False
|
|
214
|
-
|
|
215
|
-
# CLI functions
|
|
216
|
-
async def main():
|
|
217
|
-
"""Main function for running seed operations from command line."""
|
|
218
|
-
import sys
|
|
219
|
-
|
|
220
|
-
if len(sys.argv) < 2:
|
|
221
|
-
print("Usage: python -m backend.migrations.seed_data [seed|clear]")
|
|
222
|
-
return
|
|
223
|
-
|
|
224
|
-
command = sys.argv[1]
|
|
225
|
-
database_url = sys.argv[2] if len(sys.argv) > 2 else "sqlite+aiosqlite:///./vibe_surf.db"
|
|
226
|
-
|
|
227
|
-
if command == "seed":
|
|
228
|
-
await seed_sample_tasks(database_url)
|
|
229
|
-
elif command == "clear":
|
|
230
|
-
await clear_sample_data(database_url)
|
|
231
|
-
else:
|
|
232
|
-
print(f"Unknown command: {command}")
|
|
233
|
-
print("Available commands: seed, clear")
|
|
234
|
-
|
|
235
|
-
if __name__ == "__main__":
|
|
236
|
-
asyncio.run(main())
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|