vibesurf 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibesurf might be problematic. Click here for more details.
- vibe_surf/__init__.py +12 -0
- vibe_surf/_version.py +34 -0
- vibe_surf/agents/__init__.py +0 -0
- vibe_surf/agents/browser_use_agent.py +1106 -0
- vibe_surf/agents/prompts/__init__.py +1 -0
- vibe_surf/agents/prompts/vibe_surf_prompt.py +176 -0
- vibe_surf/agents/report_writer_agent.py +360 -0
- vibe_surf/agents/vibe_surf_agent.py +1632 -0
- vibe_surf/backend/__init__.py +0 -0
- vibe_surf/backend/api/__init__.py +3 -0
- vibe_surf/backend/api/activity.py +243 -0
- vibe_surf/backend/api/config.py +740 -0
- vibe_surf/backend/api/files.py +322 -0
- vibe_surf/backend/api/models.py +257 -0
- vibe_surf/backend/api/task.py +300 -0
- vibe_surf/backend/database/__init__.py +13 -0
- vibe_surf/backend/database/manager.py +129 -0
- vibe_surf/backend/database/models.py +164 -0
- vibe_surf/backend/database/queries.py +922 -0
- vibe_surf/backend/database/schemas.py +100 -0
- vibe_surf/backend/llm_config.py +182 -0
- vibe_surf/backend/main.py +137 -0
- vibe_surf/backend/migrations/__init__.py +16 -0
- vibe_surf/backend/migrations/init_db.py +303 -0
- vibe_surf/backend/migrations/seed_data.py +236 -0
- vibe_surf/backend/shared_state.py +601 -0
- vibe_surf/backend/utils/__init__.py +7 -0
- vibe_surf/backend/utils/encryption.py +164 -0
- vibe_surf/backend/utils/llm_factory.py +225 -0
- vibe_surf/browser/__init__.py +8 -0
- vibe_surf/browser/agen_browser_profile.py +130 -0
- vibe_surf/browser/agent_browser_session.py +416 -0
- vibe_surf/browser/browser_manager.py +296 -0
- vibe_surf/browser/utils.py +790 -0
- vibe_surf/browser/watchdogs/__init__.py +0 -0
- vibe_surf/browser/watchdogs/action_watchdog.py +291 -0
- vibe_surf/browser/watchdogs/dom_watchdog.py +954 -0
- vibe_surf/chrome_extension/background.js +558 -0
- vibe_surf/chrome_extension/config.js +48 -0
- vibe_surf/chrome_extension/content.js +284 -0
- vibe_surf/chrome_extension/dev-reload.js +47 -0
- vibe_surf/chrome_extension/icons/convert-svg.js +33 -0
- vibe_surf/chrome_extension/icons/logo-preview.html +187 -0
- vibe_surf/chrome_extension/icons/logo.png +0 -0
- vibe_surf/chrome_extension/manifest.json +53 -0
- vibe_surf/chrome_extension/popup.html +134 -0
- vibe_surf/chrome_extension/scripts/api-client.js +473 -0
- vibe_surf/chrome_extension/scripts/main.js +491 -0
- vibe_surf/chrome_extension/scripts/markdown-it.min.js +3 -0
- vibe_surf/chrome_extension/scripts/session-manager.js +599 -0
- vibe_surf/chrome_extension/scripts/ui-manager.js +3687 -0
- vibe_surf/chrome_extension/sidepanel.html +347 -0
- vibe_surf/chrome_extension/styles/animations.css +471 -0
- vibe_surf/chrome_extension/styles/components.css +670 -0
- vibe_surf/chrome_extension/styles/main.css +2307 -0
- vibe_surf/chrome_extension/styles/settings.css +1100 -0
- vibe_surf/cli.py +357 -0
- vibe_surf/controller/__init__.py +0 -0
- vibe_surf/controller/file_system.py +53 -0
- vibe_surf/controller/mcp_client.py +68 -0
- vibe_surf/controller/vibesurf_controller.py +616 -0
- vibe_surf/controller/views.py +37 -0
- vibe_surf/llm/__init__.py +21 -0
- vibe_surf/llm/openai_compatible.py +237 -0
- vibesurf-0.1.0.dist-info/METADATA +97 -0
- vibesurf-0.1.0.dist-info/RECORD +70 -0
- vibesurf-0.1.0.dist-info/WHEEL +5 -0
- vibesurf-0.1.0.dist-info/entry_points.txt +2 -0
- vibesurf-0.1.0.dist-info/licenses/LICENSE +201 -0
- vibesurf-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,922 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Query Operations for VibeSurf Backend - With LLM Profile Management
|
|
3
|
+
|
|
4
|
+
Centralized database operations for Task and LLMProfile tables.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import List, Optional, Dict, Any
|
|
8
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
9
|
+
from sqlalchemy import select, update, delete, func, desc, and_, or_
|
|
10
|
+
from sqlalchemy.orm import selectinload
|
|
11
|
+
from .models import Task, TaskStatus, LLMProfile, UploadedFile, McpProfile
|
|
12
|
+
from ..utils.encryption import encrypt_api_key, decrypt_api_key
|
|
13
|
+
import logging
|
|
14
|
+
import json
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
class LLMProfileQueries:
|
|
19
|
+
"""Query operations for LLMProfile model"""
|
|
20
|
+
|
|
21
|
+
@staticmethod
|
|
22
|
+
async def create_profile(
|
|
23
|
+
db: AsyncSession,
|
|
24
|
+
profile_name: str,
|
|
25
|
+
provider: str,
|
|
26
|
+
model: str,
|
|
27
|
+
api_key: Optional[str] = None,
|
|
28
|
+
base_url: Optional[str] = None,
|
|
29
|
+
temperature: Optional[float] = None,
|
|
30
|
+
max_tokens: Optional[int] = None,
|
|
31
|
+
top_p: Optional[float] = None,
|
|
32
|
+
frequency_penalty: Optional[float] = None,
|
|
33
|
+
seed: Optional[int] = None,
|
|
34
|
+
provider_config: Optional[Dict[str, Any]] = None,
|
|
35
|
+
description: Optional[str] = None,
|
|
36
|
+
is_default: bool = False
|
|
37
|
+
) -> Dict[str, Any]:
|
|
38
|
+
"""Create a new LLM profile with encrypted API key"""
|
|
39
|
+
try:
|
|
40
|
+
# Encrypt API key if provided
|
|
41
|
+
encrypted_api_key = encrypt_api_key(api_key) if api_key else None
|
|
42
|
+
|
|
43
|
+
profile = LLMProfile(
|
|
44
|
+
profile_name=profile_name,
|
|
45
|
+
provider=provider,
|
|
46
|
+
model=model,
|
|
47
|
+
base_url=base_url,
|
|
48
|
+
encrypted_api_key=encrypted_api_key,
|
|
49
|
+
temperature=temperature,
|
|
50
|
+
max_tokens=max_tokens,
|
|
51
|
+
top_p=top_p,
|
|
52
|
+
frequency_penalty=frequency_penalty,
|
|
53
|
+
seed=seed,
|
|
54
|
+
provider_config=provider_config or {},
|
|
55
|
+
description=description,
|
|
56
|
+
is_default=is_default
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
db.add(profile)
|
|
60
|
+
await db.flush()
|
|
61
|
+
await db.refresh(profile)
|
|
62
|
+
|
|
63
|
+
# Extract data immediately to avoid greenlet issues
|
|
64
|
+
profile_data = {
|
|
65
|
+
"profile_id": profile.profile_id,
|
|
66
|
+
"profile_name": profile.profile_name,
|
|
67
|
+
"provider": profile.provider,
|
|
68
|
+
"model": profile.model,
|
|
69
|
+
"base_url": profile.base_url,
|
|
70
|
+
"temperature": profile.temperature,
|
|
71
|
+
"max_tokens": profile.max_tokens,
|
|
72
|
+
"top_p": profile.top_p,
|
|
73
|
+
"frequency_penalty": profile.frequency_penalty,
|
|
74
|
+
"seed": profile.seed,
|
|
75
|
+
"provider_config": profile.provider_config,
|
|
76
|
+
"description": profile.description,
|
|
77
|
+
"is_active": profile.is_active,
|
|
78
|
+
"is_default": profile.is_default,
|
|
79
|
+
"created_at": profile.created_at,
|
|
80
|
+
"updated_at": profile.updated_at,
|
|
81
|
+
"last_used_at": profile.last_used_at
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return profile_data
|
|
85
|
+
except Exception as e:
|
|
86
|
+
logger.error(f"Failed to create LLM profile {profile_name}: {e}")
|
|
87
|
+
raise
|
|
88
|
+
|
|
89
|
+
@staticmethod
|
|
90
|
+
async def get_profile(db: AsyncSession, profile_name: str) -> Optional[LLMProfile]:
|
|
91
|
+
"""Get LLM profile by name"""
|
|
92
|
+
try:
|
|
93
|
+
result = await db.execute(
|
|
94
|
+
select(LLMProfile).where(LLMProfile.profile_name == profile_name)
|
|
95
|
+
)
|
|
96
|
+
profile = result.scalar_one_or_none()
|
|
97
|
+
if profile:
|
|
98
|
+
# Ensure all attributes are loaded by accessing them
|
|
99
|
+
_ = (profile.profile_id, profile.created_at, profile.updated_at,
|
|
100
|
+
profile.last_used_at, profile.is_active, profile.is_default)
|
|
101
|
+
return profile
|
|
102
|
+
except Exception as e:
|
|
103
|
+
logger.error(f"Failed to get LLM profile {profile_name}: {e}")
|
|
104
|
+
raise
|
|
105
|
+
|
|
106
|
+
@staticmethod
|
|
107
|
+
async def get_profile_with_decrypted_key(db: AsyncSession, profile_name: str) -> Optional[Dict[str, Any]]:
|
|
108
|
+
"""Get LLM profile with decrypted API key"""
|
|
109
|
+
try:
|
|
110
|
+
profile = await LLMProfileQueries.get_profile(db, profile_name)
|
|
111
|
+
if not profile:
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
# Decrypt API key
|
|
115
|
+
decrypted_api_key = decrypt_api_key(profile.encrypted_api_key) if profile.encrypted_api_key else None
|
|
116
|
+
|
|
117
|
+
return {
|
|
118
|
+
"profile_id": profile.profile_id,
|
|
119
|
+
"profile_name": profile.profile_name,
|
|
120
|
+
"provider": profile.provider,
|
|
121
|
+
"model": profile.model,
|
|
122
|
+
"base_url": profile.base_url,
|
|
123
|
+
"api_key": decrypted_api_key, # Decrypted for use
|
|
124
|
+
"temperature": profile.temperature,
|
|
125
|
+
"max_tokens": profile.max_tokens,
|
|
126
|
+
"top_p": profile.top_p,
|
|
127
|
+
"frequency_penalty": profile.frequency_penalty,
|
|
128
|
+
"seed": profile.seed,
|
|
129
|
+
"provider_config": profile.provider_config,
|
|
130
|
+
"description": profile.description,
|
|
131
|
+
"is_active": profile.is_active,
|
|
132
|
+
"is_default": profile.is_default,
|
|
133
|
+
"created_at": profile.created_at,
|
|
134
|
+
"updated_at": profile.updated_at,
|
|
135
|
+
"last_used_at": profile.last_used_at
|
|
136
|
+
}
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error(f"Failed to get LLM profile with decrypted key {profile_name}: {e}")
|
|
139
|
+
raise
|
|
140
|
+
|
|
141
|
+
@staticmethod
|
|
142
|
+
async def list_profiles(
|
|
143
|
+
db: AsyncSession,
|
|
144
|
+
active_only: bool = True,
|
|
145
|
+
limit: int = 50,
|
|
146
|
+
offset: int = 0
|
|
147
|
+
) -> List[LLMProfile]:
|
|
148
|
+
"""List LLM profiles"""
|
|
149
|
+
try:
|
|
150
|
+
query = select(LLMProfile)
|
|
151
|
+
|
|
152
|
+
if active_only:
|
|
153
|
+
query = query.where(LLMProfile.is_active == True)
|
|
154
|
+
|
|
155
|
+
query = query.order_by(desc(LLMProfile.last_used_at), desc(LLMProfile.created_at))
|
|
156
|
+
query = query.limit(limit).offset(offset)
|
|
157
|
+
|
|
158
|
+
result = await db.execute(query)
|
|
159
|
+
profiles = result.scalars().all()
|
|
160
|
+
|
|
161
|
+
# Ensure all attributes are loaded for each profile
|
|
162
|
+
for profile in profiles:
|
|
163
|
+
_ = (profile.profile_id, profile.created_at, profile.updated_at,
|
|
164
|
+
profile.last_used_at, profile.is_active, profile.is_default)
|
|
165
|
+
|
|
166
|
+
return profiles
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.error(f"Failed to list LLM profiles: {e}")
|
|
169
|
+
raise
|
|
170
|
+
|
|
171
|
+
@staticmethod
|
|
172
|
+
async def update_profile(
|
|
173
|
+
db: AsyncSession,
|
|
174
|
+
profile_name: str,
|
|
175
|
+
updates: Dict[str, Any]
|
|
176
|
+
) -> bool:
|
|
177
|
+
"""Update LLM profile"""
|
|
178
|
+
try:
|
|
179
|
+
# Handle API key encryption if present
|
|
180
|
+
if "api_key" in updates:
|
|
181
|
+
api_key = updates.pop("api_key")
|
|
182
|
+
if api_key:
|
|
183
|
+
updates["encrypted_api_key"] = encrypt_api_key(api_key)
|
|
184
|
+
else:
|
|
185
|
+
updates["encrypted_api_key"] = None
|
|
186
|
+
|
|
187
|
+
result = await db.execute(
|
|
188
|
+
update(LLMProfile)
|
|
189
|
+
.where(LLMProfile.profile_name == profile_name)
|
|
190
|
+
.values(**updates)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
return result.rowcount > 0
|
|
194
|
+
except Exception as e:
|
|
195
|
+
logger.error(f"Failed to update LLM profile {profile_name}: {e}")
|
|
196
|
+
raise
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
async def delete_profile(db: AsyncSession, profile_name: str) -> bool:
|
|
200
|
+
"""Delete LLM profile"""
|
|
201
|
+
try:
|
|
202
|
+
result = await db.execute(
|
|
203
|
+
delete(LLMProfile).where(LLMProfile.profile_name == profile_name)
|
|
204
|
+
)
|
|
205
|
+
return result.rowcount > 0
|
|
206
|
+
except Exception as e:
|
|
207
|
+
logger.error(f"Failed to delete LLM profile {profile_name}: {e}")
|
|
208
|
+
raise
|
|
209
|
+
|
|
210
|
+
@staticmethod
|
|
211
|
+
async def get_default_profile(db: AsyncSession) -> Optional[LLMProfile]:
|
|
212
|
+
"""Get the default LLM profile"""
|
|
213
|
+
try:
|
|
214
|
+
result = await db.execute(
|
|
215
|
+
select(LLMProfile).where(LLMProfile.is_default == True)
|
|
216
|
+
)
|
|
217
|
+
profile = result.scalar_one_or_none()
|
|
218
|
+
if profile:
|
|
219
|
+
# Ensure all attributes are loaded by accessing them
|
|
220
|
+
_ = (profile.profile_id, profile.created_at, profile.updated_at,
|
|
221
|
+
profile.last_used_at, profile.is_active, profile.is_default)
|
|
222
|
+
return profile
|
|
223
|
+
except Exception as e:
|
|
224
|
+
logger.error(f"Failed to get default LLM profile: {e}")
|
|
225
|
+
raise
|
|
226
|
+
|
|
227
|
+
@staticmethod
|
|
228
|
+
async def set_default_profile(db: AsyncSession, profile_name: str) -> bool:
|
|
229
|
+
"""Set a profile as default (and unset others)"""
|
|
230
|
+
try:
|
|
231
|
+
# First, unset all defaults
|
|
232
|
+
await db.execute(
|
|
233
|
+
update(LLMProfile).values(is_default=False)
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
# Then set the specified profile as default
|
|
237
|
+
result = await db.execute(
|
|
238
|
+
update(LLMProfile)
|
|
239
|
+
.where(LLMProfile.profile_name == profile_name)
|
|
240
|
+
.values(is_default=True)
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
return result.rowcount > 0
|
|
244
|
+
except Exception as e:
|
|
245
|
+
logger.error(f"Failed to set default LLM profile {profile_name}: {e}")
|
|
246
|
+
raise
|
|
247
|
+
|
|
248
|
+
@staticmethod
|
|
249
|
+
async def update_last_used(db: AsyncSession, profile_name: str) -> bool:
|
|
250
|
+
"""Update the last_used_at timestamp for a profile"""
|
|
251
|
+
try:
|
|
252
|
+
result = await db.execute(
|
|
253
|
+
update(LLMProfile)
|
|
254
|
+
.where(LLMProfile.profile_name == profile_name)
|
|
255
|
+
.values(last_used_at=func.now())
|
|
256
|
+
)
|
|
257
|
+
return result.rowcount > 0
|
|
258
|
+
except Exception as e:
|
|
259
|
+
logger.error(f"Failed to update last_used for LLM profile {profile_name}: {e}")
|
|
260
|
+
raise
|
|
261
|
+
|
|
262
|
+
class McpProfileQueries:
|
|
263
|
+
"""Query operations for McpProfile model"""
|
|
264
|
+
|
|
265
|
+
@staticmethod
|
|
266
|
+
async def create_profile(
|
|
267
|
+
db: AsyncSession,
|
|
268
|
+
display_name: str,
|
|
269
|
+
mcp_server_name: str,
|
|
270
|
+
mcp_server_params: Dict[str, Any],
|
|
271
|
+
description: Optional[str] = None
|
|
272
|
+
) -> Dict[str, Any]:
|
|
273
|
+
"""Create a new MCP profile"""
|
|
274
|
+
try:
|
|
275
|
+
profile = McpProfile(
|
|
276
|
+
display_name=display_name,
|
|
277
|
+
mcp_server_name=mcp_server_name,
|
|
278
|
+
mcp_server_params=mcp_server_params,
|
|
279
|
+
description=description
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
db.add(profile)
|
|
283
|
+
await db.flush()
|
|
284
|
+
await db.refresh(profile)
|
|
285
|
+
|
|
286
|
+
# Extract data immediately to avoid greenlet issues
|
|
287
|
+
profile_data = {
|
|
288
|
+
"mcp_id": profile.mcp_id,
|
|
289
|
+
"display_name": profile.display_name,
|
|
290
|
+
"mcp_server_name": profile.mcp_server_name,
|
|
291
|
+
"mcp_server_params": profile.mcp_server_params,
|
|
292
|
+
"description": profile.description,
|
|
293
|
+
"is_active": profile.is_active,
|
|
294
|
+
"created_at": profile.created_at,
|
|
295
|
+
"updated_at": profile.updated_at,
|
|
296
|
+
"last_used_at": profile.last_used_at
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
return profile_data
|
|
300
|
+
except Exception as e:
|
|
301
|
+
logger.error(f"Failed to create MCP profile {display_name}: {e}")
|
|
302
|
+
raise
|
|
303
|
+
|
|
304
|
+
@staticmethod
|
|
305
|
+
async def get_profile(db: AsyncSession, mcp_id: str) -> Optional[McpProfile]:
|
|
306
|
+
"""Get MCP profile by ID"""
|
|
307
|
+
try:
|
|
308
|
+
result = await db.execute(
|
|
309
|
+
select(McpProfile).where(McpProfile.mcp_id == mcp_id)
|
|
310
|
+
)
|
|
311
|
+
profile = result.scalar_one_or_none()
|
|
312
|
+
if profile:
|
|
313
|
+
# Ensure all attributes are loaded by accessing them
|
|
314
|
+
_ = (profile.mcp_id, profile.created_at, profile.updated_at,
|
|
315
|
+
profile.last_used_at, profile.is_active)
|
|
316
|
+
return profile
|
|
317
|
+
except Exception as e:
|
|
318
|
+
logger.error(f"Failed to get MCP profile {mcp_id}: {e}")
|
|
319
|
+
raise
|
|
320
|
+
|
|
321
|
+
@staticmethod
|
|
322
|
+
async def get_profile_by_display_name(db: AsyncSession, display_name: str) -> Optional[McpProfile]:
|
|
323
|
+
"""Get MCP profile by display name"""
|
|
324
|
+
try:
|
|
325
|
+
result = await db.execute(
|
|
326
|
+
select(McpProfile).where(McpProfile.display_name == display_name)
|
|
327
|
+
)
|
|
328
|
+
profile = result.scalar_one_or_none()
|
|
329
|
+
if profile:
|
|
330
|
+
_ = (profile.mcp_id, profile.created_at, profile.updated_at,
|
|
331
|
+
profile.last_used_at, profile.is_active)
|
|
332
|
+
return profile
|
|
333
|
+
except Exception as e:
|
|
334
|
+
logger.error(f"Failed to get MCP profile by display name {display_name}: {e}")
|
|
335
|
+
raise
|
|
336
|
+
|
|
337
|
+
@staticmethod
|
|
338
|
+
async def list_profiles(
|
|
339
|
+
db: AsyncSession,
|
|
340
|
+
active_only: bool = True,
|
|
341
|
+
limit: int = 50,
|
|
342
|
+
offset: int = 0
|
|
343
|
+
) -> List[McpProfile]:
|
|
344
|
+
"""List MCP profiles"""
|
|
345
|
+
try:
|
|
346
|
+
query = select(McpProfile)
|
|
347
|
+
|
|
348
|
+
if active_only:
|
|
349
|
+
query = query.where(McpProfile.is_active == True)
|
|
350
|
+
|
|
351
|
+
query = query.order_by(desc(McpProfile.last_used_at), desc(McpProfile.created_at))
|
|
352
|
+
query = query.limit(limit).offset(offset)
|
|
353
|
+
|
|
354
|
+
result = await db.execute(query)
|
|
355
|
+
profiles = result.scalars().all()
|
|
356
|
+
|
|
357
|
+
# Ensure all attributes are loaded for each profile
|
|
358
|
+
for profile in profiles:
|
|
359
|
+
_ = (profile.mcp_id, profile.created_at, profile.updated_at,
|
|
360
|
+
profile.last_used_at, profile.is_active)
|
|
361
|
+
|
|
362
|
+
return profiles
|
|
363
|
+
except Exception as e:
|
|
364
|
+
logger.error(f"Failed to list MCP profiles: {e}")
|
|
365
|
+
raise
|
|
366
|
+
|
|
367
|
+
@staticmethod
|
|
368
|
+
async def get_active_profiles(db: AsyncSession) -> List[McpProfile]:
|
|
369
|
+
"""Get all active MCP profiles"""
|
|
370
|
+
try:
|
|
371
|
+
result = await db.execute(
|
|
372
|
+
select(McpProfile).where(McpProfile.is_active == True)
|
|
373
|
+
)
|
|
374
|
+
profiles = result.scalars().all()
|
|
375
|
+
|
|
376
|
+
# Ensure all attributes are loaded for each profile
|
|
377
|
+
for profile in profiles:
|
|
378
|
+
_ = (profile.mcp_id, profile.created_at, profile.updated_at,
|
|
379
|
+
profile.last_used_at, profile.is_active)
|
|
380
|
+
|
|
381
|
+
return profiles
|
|
382
|
+
except Exception as e:
|
|
383
|
+
logger.error(f"Failed to get active MCP profiles: {e}")
|
|
384
|
+
raise
|
|
385
|
+
|
|
386
|
+
@staticmethod
|
|
387
|
+
async def update_profile(
|
|
388
|
+
db: AsyncSession,
|
|
389
|
+
mcp_id: str,
|
|
390
|
+
updates: Dict[str, Any]
|
|
391
|
+
) -> bool:
|
|
392
|
+
"""Update MCP profile"""
|
|
393
|
+
try:
|
|
394
|
+
logger.info(f"Updating profile {mcp_id}")
|
|
395
|
+
|
|
396
|
+
result = await db.execute(
|
|
397
|
+
update(McpProfile)
|
|
398
|
+
.where(McpProfile.mcp_id == mcp_id)
|
|
399
|
+
.values(**updates)
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
rows_affected = result.rowcount
|
|
403
|
+
logger.info(f"Update query affected {rows_affected} rows")
|
|
404
|
+
|
|
405
|
+
return rows_affected > 0
|
|
406
|
+
except Exception as e:
|
|
407
|
+
logger.error(f"Failed to update MCP profile {mcp_id}: {e}")
|
|
408
|
+
raise
|
|
409
|
+
|
|
410
|
+
@staticmethod
|
|
411
|
+
async def delete_profile(db: AsyncSession, mcp_id: str) -> bool:
|
|
412
|
+
"""Delete MCP profile"""
|
|
413
|
+
try:
|
|
414
|
+
result = await db.execute(
|
|
415
|
+
delete(McpProfile).where(McpProfile.mcp_id == mcp_id)
|
|
416
|
+
)
|
|
417
|
+
return result.rowcount > 0
|
|
418
|
+
except Exception as e:
|
|
419
|
+
logger.error(f"Failed to delete MCP profile {mcp_id}: {e}")
|
|
420
|
+
raise
|
|
421
|
+
|
|
422
|
+
@staticmethod
|
|
423
|
+
async def update_last_used(db: AsyncSession, mcp_id: str) -> bool:
|
|
424
|
+
"""Update the last_used_at timestamp for a profile"""
|
|
425
|
+
try:
|
|
426
|
+
result = await db.execute(
|
|
427
|
+
update(McpProfile)
|
|
428
|
+
.where(McpProfile.mcp_id == mcp_id)
|
|
429
|
+
.values(last_used_at=func.now())
|
|
430
|
+
)
|
|
431
|
+
return result.rowcount > 0
|
|
432
|
+
except Exception as e:
|
|
433
|
+
logger.error(f"Failed to update last_used for MCP profile {mcp_id}: {e}")
|
|
434
|
+
raise
|
|
435
|
+
|
|
436
|
+
class TaskQueries:
|
|
437
|
+
"""Database queries for task management with LLM Profile support"""
|
|
438
|
+
|
|
439
|
+
@staticmethod
|
|
440
|
+
async def save_task(
|
|
441
|
+
db: AsyncSession,
|
|
442
|
+
task_id: str,
|
|
443
|
+
session_id: str,
|
|
444
|
+
task_description: str,
|
|
445
|
+
llm_profile_name: str,
|
|
446
|
+
upload_files_path: Optional[str] = None,
|
|
447
|
+
workspace_dir: Optional[str] = None,
|
|
448
|
+
mcp_server_config: Optional[str] = None, # JSON string
|
|
449
|
+
task_result: Optional[str] = None,
|
|
450
|
+
task_status: str = "pending",
|
|
451
|
+
error_message: Optional[str] = None,
|
|
452
|
+
report_path: Optional[str] = None
|
|
453
|
+
) -> Task:
|
|
454
|
+
"""Create or update a task record"""
|
|
455
|
+
try:
|
|
456
|
+
# Check if task exists
|
|
457
|
+
result = await db.execute(select(Task).where(Task.task_id == task_id))
|
|
458
|
+
existing_task = result.scalar_one_or_none()
|
|
459
|
+
|
|
460
|
+
if existing_task:
|
|
461
|
+
# Update existing task
|
|
462
|
+
update_data = {}
|
|
463
|
+
if task_result is not None:
|
|
464
|
+
update_data['task_result'] = task_result
|
|
465
|
+
if task_status:
|
|
466
|
+
update_data['status'] = TaskStatus(task_status)
|
|
467
|
+
if error_message is not None:
|
|
468
|
+
update_data['error_message'] = error_message
|
|
469
|
+
if report_path is not None:
|
|
470
|
+
update_data['report_path'] = report_path
|
|
471
|
+
if task_status == "running" and not existing_task.started_at:
|
|
472
|
+
update_data['started_at'] = func.now()
|
|
473
|
+
if task_status in ["completed", "failed", "stopped"]:
|
|
474
|
+
update_data['completed_at'] = func.now()
|
|
475
|
+
|
|
476
|
+
await db.execute(
|
|
477
|
+
update(Task).where(Task.task_id == task_id).values(**update_data)
|
|
478
|
+
)
|
|
479
|
+
await db.refresh(existing_task)
|
|
480
|
+
return existing_task
|
|
481
|
+
else:
|
|
482
|
+
# DEBUG: Log the type and content of mcp_server_config before saving
|
|
483
|
+
logger.info(f"Creating task with mcp_server_config type: {type(mcp_server_config)}, value: {mcp_server_config}")
|
|
484
|
+
|
|
485
|
+
# Serialize mcp_server_config to JSON string if it's a dict
|
|
486
|
+
if isinstance(mcp_server_config, dict):
|
|
487
|
+
mcp_server_config_json = json.dumps(mcp_server_config)
|
|
488
|
+
logger.info(f"Converted dict to JSON string: {mcp_server_config_json}")
|
|
489
|
+
else:
|
|
490
|
+
mcp_server_config_json = mcp_server_config
|
|
491
|
+
|
|
492
|
+
# Create new task
|
|
493
|
+
task = Task(
|
|
494
|
+
task_id=task_id,
|
|
495
|
+
session_id=session_id,
|
|
496
|
+
task_description=task_description,
|
|
497
|
+
status=TaskStatus(task_status),
|
|
498
|
+
llm_profile_name=llm_profile_name,
|
|
499
|
+
upload_files_path=upload_files_path,
|
|
500
|
+
workspace_dir=workspace_dir,
|
|
501
|
+
mcp_server_config=mcp_server_config_json,
|
|
502
|
+
task_result=task_result,
|
|
503
|
+
error_message=error_message,
|
|
504
|
+
report_path=report_path
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
db.add(task)
|
|
508
|
+
await db.flush()
|
|
509
|
+
await db.refresh(task)
|
|
510
|
+
return task
|
|
511
|
+
|
|
512
|
+
except Exception as e:
|
|
513
|
+
logger.error(f"Failed to save task {task_id}: {e}")
|
|
514
|
+
raise
|
|
515
|
+
|
|
516
|
+
@staticmethod
|
|
517
|
+
async def get_task(db: AsyncSession, task_id: str) -> Optional[Task]:
|
|
518
|
+
"""Get task by ID"""
|
|
519
|
+
try:
|
|
520
|
+
result = await db.execute(select(Task).where(Task.task_id == task_id))
|
|
521
|
+
return result.scalar_one_or_none()
|
|
522
|
+
except Exception as e:
|
|
523
|
+
logger.error(f"Failed to get task {task_id}: {e}")
|
|
524
|
+
raise
|
|
525
|
+
|
|
526
|
+
@staticmethod
|
|
527
|
+
async def get_tasks_by_session(
|
|
528
|
+
db: AsyncSession,
|
|
529
|
+
session_id: str,
|
|
530
|
+
limit: int = 50,
|
|
531
|
+
offset: int = 0
|
|
532
|
+
) -> List[Task]:
|
|
533
|
+
"""Get all tasks for a session"""
|
|
534
|
+
try:
|
|
535
|
+
result = await db.execute(
|
|
536
|
+
select(Task)
|
|
537
|
+
.where(Task.session_id == session_id)
|
|
538
|
+
.order_by(desc(Task.created_at))
|
|
539
|
+
.limit(limit)
|
|
540
|
+
.offset(offset)
|
|
541
|
+
)
|
|
542
|
+
return result.scalars().all()
|
|
543
|
+
except Exception as e:
|
|
544
|
+
logger.error(f"Failed to get tasks for session {session_id}: {e}")
|
|
545
|
+
raise
|
|
546
|
+
|
|
547
|
+
@staticmethod
|
|
548
|
+
async def get_recent_tasks(db: AsyncSession, limit: int = -1) -> List[Task]:
|
|
549
|
+
"""Get recent tasks"""
|
|
550
|
+
try:
|
|
551
|
+
query = select(Task).order_by(desc(Task.created_at))
|
|
552
|
+
|
|
553
|
+
# Handle -1 as "get all records"
|
|
554
|
+
if limit != -1:
|
|
555
|
+
query = query.limit(limit)
|
|
556
|
+
|
|
557
|
+
result = await db.execute(query)
|
|
558
|
+
return result.scalars().all()
|
|
559
|
+
except Exception as e:
|
|
560
|
+
logger.error(f"Failed to get recent tasks: {e}")
|
|
561
|
+
raise
|
|
562
|
+
|
|
563
|
+
@staticmethod
|
|
564
|
+
async def get_all_sessions(
|
|
565
|
+
db: AsyncSession,
|
|
566
|
+
limit: int = -1,
|
|
567
|
+
offset: int = 0
|
|
568
|
+
) -> List[Dict[str, Any]]:
|
|
569
|
+
"""Get all unique sessions with task counts and metadata"""
|
|
570
|
+
try:
|
|
571
|
+
# Get distinct session_ids with aggregated data
|
|
572
|
+
query = select(
|
|
573
|
+
Task.session_id,
|
|
574
|
+
func.count(Task.task_id).label('task_count'),
|
|
575
|
+
func.min(Task.created_at).label('created_at'),
|
|
576
|
+
func.max(Task.created_at).label('last_activity'),
|
|
577
|
+
func.max(Task.status).label('latest_status')
|
|
578
|
+
).group_by(Task.session_id).order_by(desc(func.max(Task.created_at)))
|
|
579
|
+
|
|
580
|
+
# Handle -1 as "get all records"
|
|
581
|
+
if limit != -1:
|
|
582
|
+
query = query.limit(limit)
|
|
583
|
+
|
|
584
|
+
# Always apply offset if provided
|
|
585
|
+
if offset > 0:
|
|
586
|
+
query = query.offset(offset)
|
|
587
|
+
|
|
588
|
+
result = await db.execute(query)
|
|
589
|
+
|
|
590
|
+
sessions = []
|
|
591
|
+
for row in result.all():
|
|
592
|
+
sessions.append({
|
|
593
|
+
'session_id': row.session_id,
|
|
594
|
+
'task_count': row.task_count,
|
|
595
|
+
'created_at': row.created_at.isoformat() if row.created_at else None,
|
|
596
|
+
'last_activity': row.last_activity.isoformat() if row.last_activity else None,
|
|
597
|
+
'status': row.latest_status.value if row.latest_status else 'unknown'
|
|
598
|
+
})
|
|
599
|
+
|
|
600
|
+
return sessions
|
|
601
|
+
except Exception as e:
|
|
602
|
+
logger.error(f"Failed to get all sessions: {e}")
|
|
603
|
+
raise
|
|
604
|
+
|
|
605
|
+
@staticmethod
|
|
606
|
+
async def update_task_status(
|
|
607
|
+
db: AsyncSession,
|
|
608
|
+
task_id: str,
|
|
609
|
+
status: str,
|
|
610
|
+
error_message: Optional[str] = None,
|
|
611
|
+
task_result: Optional[str] = None,
|
|
612
|
+
report_path: Optional[str] = None
|
|
613
|
+
) -> bool:
|
|
614
|
+
"""Update task status"""
|
|
615
|
+
try:
|
|
616
|
+
update_data = {
|
|
617
|
+
'status': TaskStatus(status)
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
if status == "running":
|
|
621
|
+
update_data['started_at'] = func.now()
|
|
622
|
+
elif status in ["completed", "failed", "stopped"]:
|
|
623
|
+
update_data['completed_at'] = func.now()
|
|
624
|
+
|
|
625
|
+
if error_message:
|
|
626
|
+
update_data['error_message'] = error_message
|
|
627
|
+
if task_result:
|
|
628
|
+
update_data['task_result'] = task_result
|
|
629
|
+
if report_path:
|
|
630
|
+
update_data['report_path'] = report_path
|
|
631
|
+
|
|
632
|
+
result = await db.execute(
|
|
633
|
+
update(Task).where(Task.task_id == task_id).values(**update_data)
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
return result.rowcount > 0
|
|
637
|
+
except Exception as e:
|
|
638
|
+
logger.error(f"Failed to update task status {task_id}: {e}")
|
|
639
|
+
raise
|
|
640
|
+
|
|
641
|
+
@staticmethod
|
|
642
|
+
async def delete_task(db: AsyncSession, task_id: str) -> bool:
|
|
643
|
+
"""Delete a task"""
|
|
644
|
+
try:
|
|
645
|
+
result = await db.execute(delete(Task).where(Task.task_id == task_id))
|
|
646
|
+
return result.rowcount > 0
|
|
647
|
+
except Exception as e:
|
|
648
|
+
logger.error(f"Failed to delete task {task_id}: {e}")
|
|
649
|
+
raise
|
|
650
|
+
|
|
651
|
+
@staticmethod
|
|
652
|
+
async def get_running_tasks(db: AsyncSession) -> List[Task]:
|
|
653
|
+
"""Get all currently running tasks"""
|
|
654
|
+
try:
|
|
655
|
+
result = await db.execute(
|
|
656
|
+
select(Task).where(Task.status.in_([TaskStatus.RUNNING, TaskStatus.PAUSED]))
|
|
657
|
+
)
|
|
658
|
+
return result.scalars().all()
|
|
659
|
+
except Exception as e:
|
|
660
|
+
logger.error(f"Failed to get running tasks: {e}")
|
|
661
|
+
raise
|
|
662
|
+
|
|
663
|
+
@staticmethod
|
|
664
|
+
async def get_active_task(db: AsyncSession) -> Optional[Task]:
|
|
665
|
+
"""Get currently running task (for single-task model)"""
|
|
666
|
+
try:
|
|
667
|
+
result = await db.execute(
|
|
668
|
+
select(Task).where(Task.status == TaskStatus.RUNNING)
|
|
669
|
+
)
|
|
670
|
+
return result.scalar_one_or_none()
|
|
671
|
+
except Exception as e:
|
|
672
|
+
logger.error(f"Failed to get active task: {e}")
|
|
673
|
+
raise
|
|
674
|
+
|
|
675
|
+
@staticmethod
|
|
676
|
+
async def get_tasks_by_llm_profile(
|
|
677
|
+
db: AsyncSession,
|
|
678
|
+
llm_profile_name: str,
|
|
679
|
+
limit: int = 50,
|
|
680
|
+
offset: int = 0
|
|
681
|
+
) -> List[Task]:
|
|
682
|
+
"""Get tasks that used a specific LLM profile"""
|
|
683
|
+
try:
|
|
684
|
+
result = await db.execute(
|
|
685
|
+
select(Task)
|
|
686
|
+
.where(Task.llm_profile_name == llm_profile_name)
|
|
687
|
+
.order_by(desc(Task.created_at))
|
|
688
|
+
.limit(limit)
|
|
689
|
+
.offset(offset)
|
|
690
|
+
)
|
|
691
|
+
return result.scalars().all()
|
|
692
|
+
except Exception as e:
|
|
693
|
+
logger.error(f"Failed to get tasks for LLM profile {llm_profile_name}: {e}")
|
|
694
|
+
raise
|
|
695
|
+
|
|
696
|
+
@staticmethod
|
|
697
|
+
async def update_task_completion(
|
|
698
|
+
db: AsyncSession,
|
|
699
|
+
task_id: str,
|
|
700
|
+
task_result: Optional[str] = None,
|
|
701
|
+
task_status: str = "completed",
|
|
702
|
+
error_message: Optional[str] = None,
|
|
703
|
+
report_path: Optional[str] = None
|
|
704
|
+
) -> bool:
|
|
705
|
+
"""Update task completion status and results"""
|
|
706
|
+
try:
|
|
707
|
+
update_data = {
|
|
708
|
+
'status': TaskStatus(task_status),
|
|
709
|
+
'completed_at': func.now()
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
if task_result is not None:
|
|
713
|
+
update_data['task_result'] = task_result
|
|
714
|
+
if error_message is not None:
|
|
715
|
+
update_data['error_message'] = error_message
|
|
716
|
+
if report_path is not None:
|
|
717
|
+
update_data['report_path'] = report_path
|
|
718
|
+
|
|
719
|
+
result = await db.execute(
|
|
720
|
+
update(Task).where(Task.task_id == task_id).values(**update_data)
|
|
721
|
+
)
|
|
722
|
+
|
|
723
|
+
return result.rowcount > 0
|
|
724
|
+
except Exception as e:
|
|
725
|
+
logger.error(f"Failed to update task completion {task_id}: {e}")
|
|
726
|
+
raise
|
|
727
|
+
|
|
728
|
+
@staticmethod
|
|
729
|
+
async def get_task_counts_by_status(db: AsyncSession) -> Dict[str, int]:
|
|
730
|
+
"""Get count of tasks by status"""
|
|
731
|
+
try:
|
|
732
|
+
result = await db.execute(
|
|
733
|
+
select(Task.status, func.count(Task.task_id))
|
|
734
|
+
.group_by(Task.status)
|
|
735
|
+
)
|
|
736
|
+
|
|
737
|
+
counts = {}
|
|
738
|
+
for status, count in result.all():
|
|
739
|
+
counts[status.value] = count
|
|
740
|
+
|
|
741
|
+
return counts
|
|
742
|
+
except Exception as e:
|
|
743
|
+
logger.error(f"Failed to get task counts by status: {e}")
|
|
744
|
+
raise
|
|
745
|
+
|
|
746
|
+
class UploadedFileQueries:
|
|
747
|
+
"""Query operations for UploadedFile model"""
|
|
748
|
+
|
|
749
|
+
@staticmethod
|
|
750
|
+
async def create_file_record(
|
|
751
|
+
db: AsyncSession,
|
|
752
|
+
file_id: str,
|
|
753
|
+
original_filename: str,
|
|
754
|
+
stored_filename: str,
|
|
755
|
+
file_path: str,
|
|
756
|
+
session_id: Optional[str],
|
|
757
|
+
file_size: int,
|
|
758
|
+
mime_type: str,
|
|
759
|
+
relative_path: str
|
|
760
|
+
) -> UploadedFile:
|
|
761
|
+
"""Create a new uploaded file record"""
|
|
762
|
+
try:
|
|
763
|
+
uploaded_file = UploadedFile(
|
|
764
|
+
file_id=file_id,
|
|
765
|
+
original_filename=original_filename,
|
|
766
|
+
stored_filename=stored_filename,
|
|
767
|
+
file_path=file_path,
|
|
768
|
+
session_id=session_id,
|
|
769
|
+
file_size=file_size,
|
|
770
|
+
mime_type=mime_type,
|
|
771
|
+
relative_path=relative_path
|
|
772
|
+
)
|
|
773
|
+
|
|
774
|
+
db.add(uploaded_file)
|
|
775
|
+
await db.flush()
|
|
776
|
+
await db.refresh(uploaded_file)
|
|
777
|
+
return uploaded_file
|
|
778
|
+
except Exception as e:
|
|
779
|
+
logger.error(f"Failed to create file record {file_id}: {e}")
|
|
780
|
+
raise
|
|
781
|
+
|
|
782
|
+
@staticmethod
|
|
783
|
+
async def get_file(db: AsyncSession, file_id: str) -> Optional[UploadedFile]:
|
|
784
|
+
"""Get uploaded file by ID"""
|
|
785
|
+
try:
|
|
786
|
+
result = await db.execute(
|
|
787
|
+
select(UploadedFile).where(
|
|
788
|
+
and_(UploadedFile.file_id == file_id, UploadedFile.is_deleted == False)
|
|
789
|
+
)
|
|
790
|
+
)
|
|
791
|
+
return result.scalar_one_or_none()
|
|
792
|
+
except Exception as e:
|
|
793
|
+
logger.error(f"Failed to get file {file_id}: {e}")
|
|
794
|
+
raise
|
|
795
|
+
|
|
796
|
+
@staticmethod
|
|
797
|
+
async def list_files(
|
|
798
|
+
db: AsyncSession,
|
|
799
|
+
session_id: Optional[str] = None,
|
|
800
|
+
limit: int = -1,
|
|
801
|
+
offset: int = 0,
|
|
802
|
+
active_only: bool = True
|
|
803
|
+
) -> List[UploadedFile]:
|
|
804
|
+
"""List uploaded files with optional filtering"""
|
|
805
|
+
try:
|
|
806
|
+
query = select(UploadedFile)
|
|
807
|
+
|
|
808
|
+
if active_only:
|
|
809
|
+
query = query.where(UploadedFile.is_deleted == False)
|
|
810
|
+
|
|
811
|
+
if session_id is not None:
|
|
812
|
+
query = query.where(UploadedFile.session_id == session_id)
|
|
813
|
+
|
|
814
|
+
query = query.order_by(desc(UploadedFile.upload_time))
|
|
815
|
+
|
|
816
|
+
# Handle -1 as "get all records"
|
|
817
|
+
if limit != -1:
|
|
818
|
+
query = query.limit(limit)
|
|
819
|
+
|
|
820
|
+
# Always apply offset if provided
|
|
821
|
+
if offset > 0:
|
|
822
|
+
query = query.offset(offset)
|
|
823
|
+
|
|
824
|
+
result = await db.execute(query)
|
|
825
|
+
return result.scalars().all()
|
|
826
|
+
except Exception as e:
|
|
827
|
+
logger.error(f"Failed to list files: {e}")
|
|
828
|
+
raise
|
|
829
|
+
|
|
830
|
+
@staticmethod
|
|
831
|
+
async def count_files(
|
|
832
|
+
db: AsyncSession,
|
|
833
|
+
session_id: Optional[str] = None,
|
|
834
|
+
active_only: bool = True
|
|
835
|
+
) -> int:
|
|
836
|
+
"""Count uploaded files with optional filtering"""
|
|
837
|
+
try:
|
|
838
|
+
query = select(func.count(UploadedFile.file_id))
|
|
839
|
+
|
|
840
|
+
if active_only:
|
|
841
|
+
query = query.where(UploadedFile.is_deleted == False)
|
|
842
|
+
|
|
843
|
+
if session_id is not None:
|
|
844
|
+
query = query.where(UploadedFile.session_id == session_id)
|
|
845
|
+
|
|
846
|
+
result = await db.execute(query)
|
|
847
|
+
return result.scalar() or 0
|
|
848
|
+
except Exception as e:
|
|
849
|
+
logger.error(f"Failed to count files: {e}")
|
|
850
|
+
raise
|
|
851
|
+
|
|
852
|
+
@staticmethod
|
|
853
|
+
async def delete_file(db: AsyncSession, file_id: str) -> bool:
|
|
854
|
+
"""Soft delete uploaded file by marking as deleted"""
|
|
855
|
+
try:
|
|
856
|
+
result = await db.execute(
|
|
857
|
+
update(UploadedFile)
|
|
858
|
+
.where(UploadedFile.file_id == file_id)
|
|
859
|
+
.values(is_deleted=True, deleted_at=func.now())
|
|
860
|
+
)
|
|
861
|
+
return result.rowcount > 0
|
|
862
|
+
except Exception as e:
|
|
863
|
+
logger.error(f"Failed to delete file {file_id}: {e}")
|
|
864
|
+
raise
|
|
865
|
+
|
|
866
|
+
@staticmethod
|
|
867
|
+
async def hard_delete_file(db: AsyncSession, file_id: str) -> bool:
|
|
868
|
+
"""Permanently delete uploaded file record"""
|
|
869
|
+
try:
|
|
870
|
+
result = await db.execute(
|
|
871
|
+
delete(UploadedFile).where(UploadedFile.file_id == file_id)
|
|
872
|
+
)
|
|
873
|
+
return result.rowcount > 0
|
|
874
|
+
except Exception as e:
|
|
875
|
+
logger.error(f"Failed to hard delete file {file_id}: {e}")
|
|
876
|
+
raise
|
|
877
|
+
|
|
878
|
+
@staticmethod
|
|
879
|
+
async def get_files_by_session(
|
|
880
|
+
db: AsyncSession,
|
|
881
|
+
session_id: str,
|
|
882
|
+
limit: int = -1,
|
|
883
|
+
offset: int = 0
|
|
884
|
+
) -> List[UploadedFile]:
|
|
885
|
+
"""Get all uploaded files for a specific session"""
|
|
886
|
+
try:
|
|
887
|
+
query = select(UploadedFile).where(and_(
|
|
888
|
+
UploadedFile.session_id == session_id,
|
|
889
|
+
UploadedFile.is_deleted == False
|
|
890
|
+
)).order_by(desc(UploadedFile.upload_time))
|
|
891
|
+
|
|
892
|
+
# Handle -1 as "get all records"
|
|
893
|
+
if limit != -1:
|
|
894
|
+
query = query.limit(limit)
|
|
895
|
+
|
|
896
|
+
# Always apply offset if provided
|
|
897
|
+
if offset > 0:
|
|
898
|
+
query = query.offset(offset)
|
|
899
|
+
|
|
900
|
+
result = await db.execute(query)
|
|
901
|
+
return result.scalars().all()
|
|
902
|
+
except Exception as e:
|
|
903
|
+
logger.error(f"Failed to get files for session {session_id}: {e}")
|
|
904
|
+
raise
|
|
905
|
+
|
|
906
|
+
@staticmethod
|
|
907
|
+
async def cleanup_deleted_files(db: AsyncSession, days_old: int = 30) -> int:
|
|
908
|
+
"""Clean up files marked as deleted for more than specified days"""
|
|
909
|
+
try:
|
|
910
|
+
cutoff_date = func.now() - func.make_interval(days=days_old)
|
|
911
|
+
|
|
912
|
+
result = await db.execute(
|
|
913
|
+
delete(UploadedFile)
|
|
914
|
+
.where(and_(
|
|
915
|
+
UploadedFile.is_deleted == True,
|
|
916
|
+
UploadedFile.deleted_at < cutoff_date
|
|
917
|
+
))
|
|
918
|
+
)
|
|
919
|
+
return result.rowcount
|
|
920
|
+
except Exception as e:
|
|
921
|
+
logger.error(f"Failed to cleanup deleted files: {e}")
|
|
922
|
+
raise
|