vibesurf 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibesurf might be problematic. Click here for more details.
- vibe_surf/__init__.py +12 -0
- vibe_surf/_version.py +34 -0
- vibe_surf/agents/__init__.py +0 -0
- vibe_surf/agents/browser_use_agent.py +1106 -0
- vibe_surf/agents/prompts/__init__.py +1 -0
- vibe_surf/agents/prompts/vibe_surf_prompt.py +176 -0
- vibe_surf/agents/report_writer_agent.py +360 -0
- vibe_surf/agents/vibe_surf_agent.py +1632 -0
- vibe_surf/backend/__init__.py +0 -0
- vibe_surf/backend/api/__init__.py +3 -0
- vibe_surf/backend/api/activity.py +243 -0
- vibe_surf/backend/api/config.py +740 -0
- vibe_surf/backend/api/files.py +322 -0
- vibe_surf/backend/api/models.py +257 -0
- vibe_surf/backend/api/task.py +300 -0
- vibe_surf/backend/database/__init__.py +13 -0
- vibe_surf/backend/database/manager.py +129 -0
- vibe_surf/backend/database/models.py +164 -0
- vibe_surf/backend/database/queries.py +922 -0
- vibe_surf/backend/database/schemas.py +100 -0
- vibe_surf/backend/llm_config.py +182 -0
- vibe_surf/backend/main.py +137 -0
- vibe_surf/backend/migrations/__init__.py +16 -0
- vibe_surf/backend/migrations/init_db.py +303 -0
- vibe_surf/backend/migrations/seed_data.py +236 -0
- vibe_surf/backend/shared_state.py +601 -0
- vibe_surf/backend/utils/__init__.py +7 -0
- vibe_surf/backend/utils/encryption.py +164 -0
- vibe_surf/backend/utils/llm_factory.py +225 -0
- vibe_surf/browser/__init__.py +8 -0
- vibe_surf/browser/agen_browser_profile.py +130 -0
- vibe_surf/browser/agent_browser_session.py +416 -0
- vibe_surf/browser/browser_manager.py +296 -0
- vibe_surf/browser/utils.py +790 -0
- vibe_surf/browser/watchdogs/__init__.py +0 -0
- vibe_surf/browser/watchdogs/action_watchdog.py +291 -0
- vibe_surf/browser/watchdogs/dom_watchdog.py +954 -0
- vibe_surf/chrome_extension/background.js +558 -0
- vibe_surf/chrome_extension/config.js +48 -0
- vibe_surf/chrome_extension/content.js +284 -0
- vibe_surf/chrome_extension/dev-reload.js +47 -0
- vibe_surf/chrome_extension/icons/convert-svg.js +33 -0
- vibe_surf/chrome_extension/icons/logo-preview.html +187 -0
- vibe_surf/chrome_extension/icons/logo.png +0 -0
- vibe_surf/chrome_extension/manifest.json +53 -0
- vibe_surf/chrome_extension/popup.html +134 -0
- vibe_surf/chrome_extension/scripts/api-client.js +473 -0
- vibe_surf/chrome_extension/scripts/main.js +491 -0
- vibe_surf/chrome_extension/scripts/markdown-it.min.js +3 -0
- vibe_surf/chrome_extension/scripts/session-manager.js +599 -0
- vibe_surf/chrome_extension/scripts/ui-manager.js +3687 -0
- vibe_surf/chrome_extension/sidepanel.html +347 -0
- vibe_surf/chrome_extension/styles/animations.css +471 -0
- vibe_surf/chrome_extension/styles/components.css +670 -0
- vibe_surf/chrome_extension/styles/main.css +2307 -0
- vibe_surf/chrome_extension/styles/settings.css +1100 -0
- vibe_surf/cli.py +357 -0
- vibe_surf/controller/__init__.py +0 -0
- vibe_surf/controller/file_system.py +53 -0
- vibe_surf/controller/mcp_client.py +68 -0
- vibe_surf/controller/vibesurf_controller.py +616 -0
- vibe_surf/controller/views.py +37 -0
- vibe_surf/llm/__init__.py +21 -0
- vibe_surf/llm/openai_compatible.py +237 -0
- vibesurf-0.1.0.dist-info/METADATA +97 -0
- vibesurf-0.1.0.dist-info/RECORD +70 -0
- vibesurf-0.1.0.dist-info/WHEEL +5 -0
- vibesurf-0.1.0.dist-info/entry_points.txt +2 -0
- vibesurf-0.1.0.dist-info/licenses/LICENSE +201 -0
- vibesurf-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Initialization and Migration Scripts - Simplified
|
|
3
|
+
|
|
4
|
+
Handles database schema creation for Task, LLMProfile, and UploadedFile tables.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
from typing import Optional
|
|
9
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
|
|
10
|
+
from sqlalchemy import text
|
|
11
|
+
from sqlalchemy.exc import SQLAlchemyError
|
|
12
|
+
|
|
13
|
+
from ..database.models import Base
|
|
14
|
+
|
|
15
|
+
async def init_database(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
16
|
+
"""
|
|
17
|
+
Initialize the database with Task, LLMProfile, and UploadedFile tables and indexes.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
database_url: Database connection URL
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
bool: True if initialization was successful
|
|
24
|
+
"""
|
|
25
|
+
try:
|
|
26
|
+
# Create async engine
|
|
27
|
+
engine = create_async_engine(
|
|
28
|
+
database_url,
|
|
29
|
+
echo=False,
|
|
30
|
+
future=True,
|
|
31
|
+
pool_pre_ping=True
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# Create all tables
|
|
35
|
+
success = await create_tables(engine)
|
|
36
|
+
|
|
37
|
+
if success:
|
|
38
|
+
print("✅ Database initialized successfully")
|
|
39
|
+
print(f"📍 Database URL: {database_url}")
|
|
40
|
+
|
|
41
|
+
# Print table information
|
|
42
|
+
await print_table_info(engine)
|
|
43
|
+
else:
|
|
44
|
+
print("❌ Database initialization failed")
|
|
45
|
+
|
|
46
|
+
await engine.dispose()
|
|
47
|
+
return success
|
|
48
|
+
|
|
49
|
+
except Exception as e:
|
|
50
|
+
print(f"❌ Database initialization error: {e}")
|
|
51
|
+
return False
|
|
52
|
+
|
|
53
|
+
async def create_tables(engine: AsyncEngine) -> bool:
|
|
54
|
+
"""
|
|
55
|
+
Create all tables (Task, LLMProfile, UploadedFile) and indexes.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
engine: SQLAlchemy async engine
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
bool: True if creation was successful
|
|
62
|
+
"""
|
|
63
|
+
try:
|
|
64
|
+
async with engine.begin() as conn:
|
|
65
|
+
# Create all tables (Task, LLMProfile, UploadedFile)
|
|
66
|
+
await conn.run_sync(Base.metadata.create_all)
|
|
67
|
+
|
|
68
|
+
# Create additional indexes for performance
|
|
69
|
+
await create_performance_indexes(conn)
|
|
70
|
+
|
|
71
|
+
print("✅ Database tables created successfully")
|
|
72
|
+
return True
|
|
73
|
+
|
|
74
|
+
except SQLAlchemyError as e:
|
|
75
|
+
print(f"❌ Error creating tables: {e}")
|
|
76
|
+
return False
|
|
77
|
+
|
|
78
|
+
async def drop_tables(engine: AsyncEngine) -> bool:
|
|
79
|
+
"""
|
|
80
|
+
Drop all database tables (use with caution).
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
engine: SQLAlchemy async engine
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
bool: True if drop was successful
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
async with engine.begin() as conn:
|
|
90
|
+
await conn.run_sync(Base.metadata.drop_all)
|
|
91
|
+
|
|
92
|
+
print("✅ Database tables dropped successfully")
|
|
93
|
+
return True
|
|
94
|
+
|
|
95
|
+
except SQLAlchemyError as e:
|
|
96
|
+
print(f"❌ Error dropping tables: {e}")
|
|
97
|
+
return False
|
|
98
|
+
|
|
99
|
+
async def create_performance_indexes(conn):
|
|
100
|
+
"""
|
|
101
|
+
Create additional performance indexes for all tables.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
conn: Database connection
|
|
105
|
+
"""
|
|
106
|
+
# Performance indexes for all tables
|
|
107
|
+
indexes = [
|
|
108
|
+
# Tasks - for session task tracking
|
|
109
|
+
"CREATE INDEX IF NOT EXISTS idx_tasks_session_status ON tasks(session_id, status)",
|
|
110
|
+
"CREATE INDEX IF NOT EXISTS idx_tasks_session_created ON tasks(session_id, created_at DESC)",
|
|
111
|
+
"CREATE INDEX IF NOT EXISTS idx_tasks_status_created ON tasks(status, created_at DESC)",
|
|
112
|
+
"CREATE INDEX IF NOT EXISTS idx_tasks_completed ON tasks(completed_at DESC)",
|
|
113
|
+
|
|
114
|
+
# LLMProfile - for profile lookups
|
|
115
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_profiles_name ON llm_profiles(profile_name)",
|
|
116
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_profiles_provider ON llm_profiles(provider)",
|
|
117
|
+
"CREATE INDEX IF NOT EXISTS idx_llm_profiles_created ON llm_profiles(created_at DESC)",
|
|
118
|
+
|
|
119
|
+
# UploadedFile - for file management
|
|
120
|
+
"CREATE INDEX IF NOT EXISTS idx_uploaded_files_session ON uploaded_files(session_id)",
|
|
121
|
+
"CREATE INDEX IF NOT EXISTS idx_uploaded_files_session_active ON uploaded_files(session_id, is_deleted)",
|
|
122
|
+
"CREATE INDEX IF NOT EXISTS idx_uploaded_files_upload_time ON uploaded_files(upload_time DESC)",
|
|
123
|
+
"CREATE INDEX IF NOT EXISTS idx_uploaded_files_deleted ON uploaded_files(is_deleted, deleted_at)",
|
|
124
|
+
"CREATE INDEX IF NOT EXISTS idx_uploaded_files_filename ON uploaded_files(original_filename)",
|
|
125
|
+
]
|
|
126
|
+
|
|
127
|
+
for index_sql in indexes:
|
|
128
|
+
try:
|
|
129
|
+
await conn.execute(text(index_sql))
|
|
130
|
+
print(f"✅ Created index: {index_sql.split('idx_')[1].split(' ')[0]}")
|
|
131
|
+
except Exception as e:
|
|
132
|
+
print(f"⚠️ Index creation warning: {e}")
|
|
133
|
+
|
|
134
|
+
async def print_table_info(engine: AsyncEngine):
|
|
135
|
+
"""
|
|
136
|
+
Print information about created tables.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
engine: SQLAlchemy async engine
|
|
140
|
+
"""
|
|
141
|
+
try:
|
|
142
|
+
async with engine.connect() as conn:
|
|
143
|
+
# Get table list
|
|
144
|
+
if "sqlite" in str(engine.url):
|
|
145
|
+
result = await conn.execute(text("SELECT name FROM sqlite_master WHERE type='table'"))
|
|
146
|
+
else:
|
|
147
|
+
result = await conn.execute(text("SELECT tablename FROM pg_tables WHERE schemaname='public'"))
|
|
148
|
+
|
|
149
|
+
tables = [row[0] for row in result]
|
|
150
|
+
|
|
151
|
+
print(f"\n📊 Created {len(tables)} tables:")
|
|
152
|
+
for table in sorted(tables):
|
|
153
|
+
if not table.startswith('sqlite_'): # Skip SQLite system tables
|
|
154
|
+
print(f" • {table}")
|
|
155
|
+
|
|
156
|
+
# Get approximate row counts (for SQLite)
|
|
157
|
+
if "sqlite" in str(engine.url):
|
|
158
|
+
print(f"\n📈 Table sizes:")
|
|
159
|
+
for table in sorted(tables):
|
|
160
|
+
if not table.startswith('sqlite_'):
|
|
161
|
+
try:
|
|
162
|
+
count_result = await conn.execute(text(f"SELECT COUNT(*) FROM {table}"))
|
|
163
|
+
count = count_result.scalar()
|
|
164
|
+
print(f" • {table}: {count} rows")
|
|
165
|
+
except Exception:
|
|
166
|
+
print(f" • {table}: - rows")
|
|
167
|
+
|
|
168
|
+
except Exception as e:
|
|
169
|
+
print(f"⚠️ Could not retrieve table information: {e}")
|
|
170
|
+
|
|
171
|
+
async def verify_database_schema(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
172
|
+
"""
|
|
173
|
+
Verify that the database schema is correctly set up.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
database_url: Database connection URL
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
bool: True if schema is valid
|
|
180
|
+
"""
|
|
181
|
+
try:
|
|
182
|
+
engine = create_async_engine(database_url, echo=False)
|
|
183
|
+
|
|
184
|
+
# Expected tables (Task, LLMProfile, UploadedFile)
|
|
185
|
+
expected_tables = ['tasks', 'llm_profiles', 'uploaded_files']
|
|
186
|
+
|
|
187
|
+
async with engine.connect() as conn:
|
|
188
|
+
# Check if all expected tables exist
|
|
189
|
+
if "sqlite" in str(engine.url):
|
|
190
|
+
result = await conn.execute(text("SELECT name FROM sqlite_master WHERE type='table'"))
|
|
191
|
+
else:
|
|
192
|
+
result = await conn.execute(text("SELECT tablename FROM pg_tables WHERE schemaname='public'"))
|
|
193
|
+
|
|
194
|
+
existing_tables = {row[0] for row in result}
|
|
195
|
+
missing_tables = set(expected_tables) - existing_tables
|
|
196
|
+
|
|
197
|
+
if missing_tables:
|
|
198
|
+
print(f"❌ Missing tables: {missing_tables}")
|
|
199
|
+
await engine.dispose()
|
|
200
|
+
return False
|
|
201
|
+
|
|
202
|
+
# Test basic operations
|
|
203
|
+
await conn.execute(text("SELECT 1"))
|
|
204
|
+
|
|
205
|
+
await engine.dispose()
|
|
206
|
+
print("✅ Database schema verification passed")
|
|
207
|
+
return True
|
|
208
|
+
|
|
209
|
+
except Exception as e:
|
|
210
|
+
print(f"❌ Database schema verification failed: {e}")
|
|
211
|
+
return False
|
|
212
|
+
|
|
213
|
+
async def reset_database(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
214
|
+
"""
|
|
215
|
+
Reset the database by dropping and recreating all tables.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
database_url: Database connection URL
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
bool: True if reset was successful
|
|
222
|
+
"""
|
|
223
|
+
try:
|
|
224
|
+
engine = create_async_engine(database_url, echo=False)
|
|
225
|
+
|
|
226
|
+
print("🔄 Resetting database...")
|
|
227
|
+
|
|
228
|
+
# Drop all tables
|
|
229
|
+
if not await drop_tables(engine):
|
|
230
|
+
await engine.dispose()
|
|
231
|
+
return False
|
|
232
|
+
|
|
233
|
+
# Recreate all tables
|
|
234
|
+
if not await create_tables(engine):
|
|
235
|
+
await engine.dispose()
|
|
236
|
+
return False
|
|
237
|
+
|
|
238
|
+
await engine.dispose()
|
|
239
|
+
print("✅ Database reset completed successfully")
|
|
240
|
+
return True
|
|
241
|
+
|
|
242
|
+
except Exception as e:
|
|
243
|
+
print(f"❌ Database reset failed: {e}")
|
|
244
|
+
return False
|
|
245
|
+
|
|
246
|
+
async def migrate_database(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
247
|
+
"""
|
|
248
|
+
Apply database migrations and updates.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
database_url: Database connection URL
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
bool: True if migration was successful
|
|
255
|
+
"""
|
|
256
|
+
try:
|
|
257
|
+
engine = create_async_engine(database_url, echo=False)
|
|
258
|
+
|
|
259
|
+
print("🔄 Running database migrations...")
|
|
260
|
+
|
|
261
|
+
# For now, this is equivalent to create_tables
|
|
262
|
+
# In the future, this could handle schema updates
|
|
263
|
+
success = await create_tables(engine)
|
|
264
|
+
|
|
265
|
+
await engine.dispose()
|
|
266
|
+
|
|
267
|
+
if success:
|
|
268
|
+
print("✅ Database migrations completed successfully")
|
|
269
|
+
else:
|
|
270
|
+
print("❌ Database migrations failed")
|
|
271
|
+
|
|
272
|
+
return success
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
print(f"❌ Database migration failed: {e}")
|
|
276
|
+
return False
|
|
277
|
+
|
|
278
|
+
# CLI functions for direct usage
|
|
279
|
+
async def main():
|
|
280
|
+
"""Main function for running migrations from command line."""
|
|
281
|
+
import sys
|
|
282
|
+
|
|
283
|
+
if len(sys.argv) < 2:
|
|
284
|
+
print("Usage: python -m backend.migrations.init_db [init|reset|verify|migrate]")
|
|
285
|
+
return
|
|
286
|
+
|
|
287
|
+
command = sys.argv[1]
|
|
288
|
+
database_url = sys.argv[2] if len(sys.argv) > 2 else "sqlite+aiosqlite:///./vibe_surf.db"
|
|
289
|
+
|
|
290
|
+
if command == "init":
|
|
291
|
+
await init_database(database_url)
|
|
292
|
+
elif command == "reset":
|
|
293
|
+
await reset_database(database_url)
|
|
294
|
+
elif command == "verify":
|
|
295
|
+
await verify_database_schema(database_url)
|
|
296
|
+
elif command == "migrate":
|
|
297
|
+
await migrate_database(database_url)
|
|
298
|
+
else:
|
|
299
|
+
print(f"Unknown command: {command}")
|
|
300
|
+
print("Available commands: init, reset, verify, migrate")
|
|
301
|
+
|
|
302
|
+
if __name__ == "__main__":
|
|
303
|
+
asyncio.run(main())
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Seed Data Scripts - Simplified Single Task Model
|
|
3
|
+
|
|
4
|
+
Creates sample data for development and testing purposes using the simplified Task table.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
from typing import List
|
|
10
|
+
|
|
11
|
+
from ..database.models import Task, TaskStatus
|
|
12
|
+
from .. import shared_state
|
|
13
|
+
|
|
14
|
+
async def seed_sample_tasks(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
15
|
+
"""
|
|
16
|
+
Seed the database with sample tasks for development.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
database_url: Database connection URL
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
bool: True if seeding was successful
|
|
23
|
+
"""
|
|
24
|
+
try:
|
|
25
|
+
# Use shared_state db_manager if available, otherwise create temporary one
|
|
26
|
+
if shared_state.db_manager:
|
|
27
|
+
db_manager = shared_state.db_manager
|
|
28
|
+
else:
|
|
29
|
+
from ..database.manager import DatabaseManager
|
|
30
|
+
db_manager = DatabaseManager(database_url)
|
|
31
|
+
|
|
32
|
+
async for db in db_manager.get_session():
|
|
33
|
+
# Check if data already exists
|
|
34
|
+
result = await db.execute("SELECT COUNT(*) FROM tasks")
|
|
35
|
+
count = result.scalar()
|
|
36
|
+
|
|
37
|
+
if count > 0:
|
|
38
|
+
print(f"⚠️ Database already contains {count} tasks. Skipping seed data.")
|
|
39
|
+
return True
|
|
40
|
+
|
|
41
|
+
print("🌱 Seeding sample tasks...")
|
|
42
|
+
|
|
43
|
+
# Create sample tasks
|
|
44
|
+
tasks = await create_sample_tasks(db)
|
|
45
|
+
|
|
46
|
+
await db.commit()
|
|
47
|
+
|
|
48
|
+
print("✅ Sample tasks seeded successfully")
|
|
49
|
+
return True
|
|
50
|
+
|
|
51
|
+
except Exception as e:
|
|
52
|
+
print(f"❌ Seeding failed: {e}")
|
|
53
|
+
return False
|
|
54
|
+
|
|
55
|
+
async def create_sample_tasks(db) -> List[Task]:
|
|
56
|
+
"""Create sample tasks for testing."""
|
|
57
|
+
|
|
58
|
+
sample_tasks = [
|
|
59
|
+
Task(
|
|
60
|
+
session_id="session_001",
|
|
61
|
+
task_description="Create a simple web scraper to extract product information from an e-commerce website",
|
|
62
|
+
status=TaskStatus.COMPLETED,
|
|
63
|
+
upload_files_path="./uploads/session_001/requirements.pdf",
|
|
64
|
+
mcp_server_config={
|
|
65
|
+
"exclude_actions": [],
|
|
66
|
+
"max_actions_per_task": 100,
|
|
67
|
+
"display_files_in_done_text": True
|
|
68
|
+
},
|
|
69
|
+
llm_config={
|
|
70
|
+
"model": "gpt-4o-mini",
|
|
71
|
+
"provider": "openai",
|
|
72
|
+
"temperature": 0.1,
|
|
73
|
+
"max_tokens": 4000
|
|
74
|
+
},
|
|
75
|
+
task_result="Successfully created web scraper that extracts product names, prices, and descriptions. Generated 500 product records.",
|
|
76
|
+
report_path="./reports/session_001/scraper_report.html",
|
|
77
|
+
started_at=datetime.now() - timedelta(hours=2),
|
|
78
|
+
completed_at=datetime.now() - timedelta(hours=1),
|
|
79
|
+
task_metadata={
|
|
80
|
+
"execution_duration_seconds": 3600.0,
|
|
81
|
+
"total_actions": 45,
|
|
82
|
+
"created_via": "api"
|
|
83
|
+
}
|
|
84
|
+
),
|
|
85
|
+
Task(
|
|
86
|
+
session_id="session_002",
|
|
87
|
+
task_description="Automate login process for a social media platform and post a scheduled message",
|
|
88
|
+
status=TaskStatus.RUNNING,
|
|
89
|
+
upload_files_path="./uploads/session_002/login_credentials.txt",
|
|
90
|
+
mcp_server_config={
|
|
91
|
+
"exclude_actions": ["dangerous_action"],
|
|
92
|
+
"max_actions_per_task": 50,
|
|
93
|
+
"display_files_in_done_text": True
|
|
94
|
+
},
|
|
95
|
+
llm_config={
|
|
96
|
+
"model": "gpt-4o",
|
|
97
|
+
"provider": "openai",
|
|
98
|
+
"temperature": 0.2,
|
|
99
|
+
"max_tokens": 2000
|
|
100
|
+
},
|
|
101
|
+
started_at=datetime.now() - timedelta(minutes=30),
|
|
102
|
+
task_metadata={
|
|
103
|
+
"created_via": "api",
|
|
104
|
+
"estimated_duration": 1800
|
|
105
|
+
}
|
|
106
|
+
),
|
|
107
|
+
Task(
|
|
108
|
+
session_id="session_003",
|
|
109
|
+
task_description="Research and compile information about top AI companies and their latest products",
|
|
110
|
+
status=TaskStatus.PENDING,
|
|
111
|
+
mcp_server_config={
|
|
112
|
+
"exclude_actions": [],
|
|
113
|
+
"max_actions_per_task": 200,
|
|
114
|
+
"display_files_in_done_text": True
|
|
115
|
+
},
|
|
116
|
+
llm_config={
|
|
117
|
+
"model": "claude-3-sonnet-20240229",
|
|
118
|
+
"provider": "anthropic",
|
|
119
|
+
"temperature": 0.3,
|
|
120
|
+
"max_tokens": 8000
|
|
121
|
+
},
|
|
122
|
+
task_metadata={
|
|
123
|
+
"created_via": "api",
|
|
124
|
+
"priority": "high"
|
|
125
|
+
}
|
|
126
|
+
),
|
|
127
|
+
Task(
|
|
128
|
+
session_id="session_004",
|
|
129
|
+
task_description="Fill out and submit an online form with provided customer data",
|
|
130
|
+
status=TaskStatus.FAILED,
|
|
131
|
+
upload_files_path="./uploads/session_004/customer_data.csv",
|
|
132
|
+
mcp_server_config={
|
|
133
|
+
"exclude_actions": [],
|
|
134
|
+
"max_actions_per_task": 30,
|
|
135
|
+
"display_files_in_done_text": True
|
|
136
|
+
},
|
|
137
|
+
llm_config={
|
|
138
|
+
"model": "gpt-3.5-turbo",
|
|
139
|
+
"provider": "openai",
|
|
140
|
+
"temperature": 0.0,
|
|
141
|
+
"max_tokens": 1000
|
|
142
|
+
},
|
|
143
|
+
error_message="Form submission failed due to CAPTCHA protection",
|
|
144
|
+
started_at=datetime.now() - timedelta(hours=6),
|
|
145
|
+
completed_at=datetime.now() - timedelta(hours=5, minutes=45),
|
|
146
|
+
task_metadata={
|
|
147
|
+
"execution_duration_seconds": 900.0,
|
|
148
|
+
"total_actions": 15,
|
|
149
|
+
"created_via": "api",
|
|
150
|
+
"error_recovery_attempts": 3
|
|
151
|
+
}
|
|
152
|
+
),
|
|
153
|
+
Task(
|
|
154
|
+
session_id="session_005",
|
|
155
|
+
task_description="Monitor a website for price changes and send notifications when target price is reached",
|
|
156
|
+
status=TaskStatus.PAUSED,
|
|
157
|
+
mcp_server_config={
|
|
158
|
+
"exclude_actions": [],
|
|
159
|
+
"max_actions_per_task": 1000,
|
|
160
|
+
"display_files_in_done_text": False
|
|
161
|
+
},
|
|
162
|
+
llm_config={
|
|
163
|
+
"model": "gpt-4o-mini",
|
|
164
|
+
"provider": "openai",
|
|
165
|
+
"temperature": 0.1,
|
|
166
|
+
"max_tokens": 2000
|
|
167
|
+
},
|
|
168
|
+
started_at=datetime.now() - timedelta(hours=12),
|
|
169
|
+
task_metadata={
|
|
170
|
+
"created_via": "api",
|
|
171
|
+
"monitoring_interval": 3600,
|
|
172
|
+
"target_price": 299.99
|
|
173
|
+
}
|
|
174
|
+
)
|
|
175
|
+
]
|
|
176
|
+
|
|
177
|
+
for task in sample_tasks:
|
|
178
|
+
db.add(task)
|
|
179
|
+
|
|
180
|
+
await db.flush()
|
|
181
|
+
return sample_tasks
|
|
182
|
+
|
|
183
|
+
async def clear_sample_data(database_url: str = "sqlite+aiosqlite:///./vibe_surf.db") -> bool:
|
|
184
|
+
"""
|
|
185
|
+
Clear all sample data from the database.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
database_url: Database connection URL
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
bool: True if clearing was successful
|
|
192
|
+
"""
|
|
193
|
+
try:
|
|
194
|
+
# Use shared_state db_manager if available, otherwise create temporary one
|
|
195
|
+
if shared_state.db_manager:
|
|
196
|
+
db_manager = shared_state.db_manager
|
|
197
|
+
else:
|
|
198
|
+
from ..database.manager import DatabaseManager
|
|
199
|
+
db_manager = DatabaseManager(database_url)
|
|
200
|
+
|
|
201
|
+
async for db in db_manager.get_session():
|
|
202
|
+
print("🧹 Clearing sample tasks...")
|
|
203
|
+
|
|
204
|
+
# Delete all tasks
|
|
205
|
+
await db.execute("DELETE FROM tasks")
|
|
206
|
+
await db.commit()
|
|
207
|
+
|
|
208
|
+
print("✅ Sample data cleared successfully")
|
|
209
|
+
return True
|
|
210
|
+
|
|
211
|
+
except Exception as e:
|
|
212
|
+
print(f"❌ Clearing sample data failed: {e}")
|
|
213
|
+
return False
|
|
214
|
+
|
|
215
|
+
# CLI functions
|
|
216
|
+
async def main():
|
|
217
|
+
"""Main function for running seed operations from command line."""
|
|
218
|
+
import sys
|
|
219
|
+
|
|
220
|
+
if len(sys.argv) < 2:
|
|
221
|
+
print("Usage: python -m backend.migrations.seed_data [seed|clear]")
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
command = sys.argv[1]
|
|
225
|
+
database_url = sys.argv[2] if len(sys.argv) > 2 else "sqlite+aiosqlite:///./vibe_surf.db"
|
|
226
|
+
|
|
227
|
+
if command == "seed":
|
|
228
|
+
await seed_sample_tasks(database_url)
|
|
229
|
+
elif command == "clear":
|
|
230
|
+
await clear_sample_data(database_url)
|
|
231
|
+
else:
|
|
232
|
+
print(f"Unknown command: {command}")
|
|
233
|
+
print("Available commands: seed, clear")
|
|
234
|
+
|
|
235
|
+
if __name__ == "__main__":
|
|
236
|
+
asyncio.run(main())
|