kairo-code 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- image-service/main.py +178 -0
- infra/chat/app/main.py +84 -0
- kairo/backend/__init__.py +0 -0
- kairo/backend/api/__init__.py +0 -0
- kairo/backend/api/admin/__init__.py +23 -0
- kairo/backend/api/admin/audit.py +54 -0
- kairo/backend/api/admin/content.py +142 -0
- kairo/backend/api/admin/incidents.py +148 -0
- kairo/backend/api/admin/stats.py +125 -0
- kairo/backend/api/admin/system.py +87 -0
- kairo/backend/api/admin/users.py +279 -0
- kairo/backend/api/agents.py +94 -0
- kairo/backend/api/api_keys.py +85 -0
- kairo/backend/api/auth.py +116 -0
- kairo/backend/api/billing.py +41 -0
- kairo/backend/api/chat.py +72 -0
- kairo/backend/api/conversations.py +125 -0
- kairo/backend/api/device_auth.py +100 -0
- kairo/backend/api/files.py +83 -0
- kairo/backend/api/health.py +36 -0
- kairo/backend/api/images.py +80 -0
- kairo/backend/api/openai_compat.py +225 -0
- kairo/backend/api/projects.py +102 -0
- kairo/backend/api/usage.py +32 -0
- kairo/backend/api/webhooks.py +79 -0
- kairo/backend/app.py +297 -0
- kairo/backend/config.py +179 -0
- kairo/backend/core/__init__.py +0 -0
- kairo/backend/core/admin_auth.py +24 -0
- kairo/backend/core/api_key_auth.py +55 -0
- kairo/backend/core/database.py +28 -0
- kairo/backend/core/dependencies.py +70 -0
- kairo/backend/core/logging.py +23 -0
- kairo/backend/core/rate_limit.py +73 -0
- kairo/backend/core/security.py +29 -0
- kairo/backend/models/__init__.py +19 -0
- kairo/backend/models/agent.py +30 -0
- kairo/backend/models/api_key.py +25 -0
- kairo/backend/models/api_usage.py +29 -0
- kairo/backend/models/audit_log.py +26 -0
- kairo/backend/models/conversation.py +48 -0
- kairo/backend/models/device_code.py +30 -0
- kairo/backend/models/feature_flag.py +21 -0
- kairo/backend/models/image_generation.py +24 -0
- kairo/backend/models/incident.py +28 -0
- kairo/backend/models/project.py +28 -0
- kairo/backend/models/uptime_record.py +24 -0
- kairo/backend/models/usage.py +24 -0
- kairo/backend/models/user.py +49 -0
- kairo/backend/schemas/__init__.py +0 -0
- kairo/backend/schemas/admin/__init__.py +0 -0
- kairo/backend/schemas/admin/audit.py +28 -0
- kairo/backend/schemas/admin/content.py +53 -0
- kairo/backend/schemas/admin/stats.py +77 -0
- kairo/backend/schemas/admin/system.py +44 -0
- kairo/backend/schemas/admin/users.py +48 -0
- kairo/backend/schemas/agent.py +42 -0
- kairo/backend/schemas/api_key.py +30 -0
- kairo/backend/schemas/auth.py +57 -0
- kairo/backend/schemas/chat.py +26 -0
- kairo/backend/schemas/conversation.py +39 -0
- kairo/backend/schemas/device_auth.py +40 -0
- kairo/backend/schemas/image.py +15 -0
- kairo/backend/schemas/openai_compat.py +76 -0
- kairo/backend/schemas/project.py +21 -0
- kairo/backend/schemas/status.py +81 -0
- kairo/backend/schemas/usage.py +15 -0
- kairo/backend/services/__init__.py +0 -0
- kairo/backend/services/admin/__init__.py +0 -0
- kairo/backend/services/admin/audit_service.py +78 -0
- kairo/backend/services/admin/content_service.py +119 -0
- kairo/backend/services/admin/incident_service.py +94 -0
- kairo/backend/services/admin/stats_service.py +281 -0
- kairo/backend/services/admin/system_service.py +126 -0
- kairo/backend/services/admin/user_service.py +157 -0
- kairo/backend/services/agent_service.py +107 -0
- kairo/backend/services/api_key_service.py +66 -0
- kairo/backend/services/api_usage_service.py +126 -0
- kairo/backend/services/auth_service.py +101 -0
- kairo/backend/services/chat_service.py +501 -0
- kairo/backend/services/conversation_service.py +264 -0
- kairo/backend/services/device_auth_service.py +193 -0
- kairo/backend/services/email_service.py +55 -0
- kairo/backend/services/image_service.py +181 -0
- kairo/backend/services/llm_service.py +186 -0
- kairo/backend/services/project_service.py +109 -0
- kairo/backend/services/status_service.py +167 -0
- kairo/backend/services/stripe_service.py +78 -0
- kairo/backend/services/usage_service.py +150 -0
- kairo/backend/services/web_search_service.py +96 -0
- kairo/migrations/env.py +60 -0
- kairo/migrations/versions/001_initial.py +55 -0
- kairo/migrations/versions/002_usage_tracking_and_indexes.py +66 -0
- kairo/migrations/versions/003_username_to_email.py +21 -0
- kairo/migrations/versions/004_add_plans_and_verification.py +67 -0
- kairo/migrations/versions/005_add_projects.py +52 -0
- kairo/migrations/versions/006_add_image_generation.py +63 -0
- kairo/migrations/versions/007_add_admin_portal.py +107 -0
- kairo/migrations/versions/008_add_device_code_auth.py +76 -0
- kairo/migrations/versions/009_add_status_page.py +65 -0
- kairo/tools/extract_claude_data.py +465 -0
- kairo/tools/filter_claude_data.py +303 -0
- kairo/tools/generate_curated_data.py +157 -0
- kairo/tools/mix_training_data.py +295 -0
- kairo_code/__init__.py +3 -0
- kairo_code/agents/__init__.py +25 -0
- kairo_code/agents/architect.py +98 -0
- kairo_code/agents/audit.py +100 -0
- kairo_code/agents/base.py +463 -0
- kairo_code/agents/coder.py +155 -0
- kairo_code/agents/database.py +77 -0
- kairo_code/agents/docs.py +88 -0
- kairo_code/agents/explorer.py +62 -0
- kairo_code/agents/guardian.py +80 -0
- kairo_code/agents/planner.py +66 -0
- kairo_code/agents/reviewer.py +91 -0
- kairo_code/agents/security.py +94 -0
- kairo_code/agents/terraform.py +88 -0
- kairo_code/agents/testing.py +97 -0
- kairo_code/agents/uiux.py +88 -0
- kairo_code/auth.py +232 -0
- kairo_code/config.py +172 -0
- kairo_code/conversation.py +173 -0
- kairo_code/heartbeat.py +63 -0
- kairo_code/llm.py +291 -0
- kairo_code/logging_config.py +156 -0
- kairo_code/main.py +818 -0
- kairo_code/router.py +217 -0
- kairo_code/sandbox.py +248 -0
- kairo_code/settings.py +183 -0
- kairo_code/tools/__init__.py +51 -0
- kairo_code/tools/analysis.py +509 -0
- kairo_code/tools/base.py +417 -0
- kairo_code/tools/code.py +58 -0
- kairo_code/tools/definitions.py +617 -0
- kairo_code/tools/files.py +315 -0
- kairo_code/tools/review.py +390 -0
- kairo_code/tools/search.py +185 -0
- kairo_code/ui.py +418 -0
- kairo_code-0.1.0.dist-info/METADATA +13 -0
- kairo_code-0.1.0.dist-info/RECORD +144 -0
- kairo_code-0.1.0.dist-info/WHEEL +5 -0
- kairo_code-0.1.0.dist-info/entry_points.txt +2 -0
- kairo_code-0.1.0.dist-info/top_level.txt +4 -0
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
from sqlalchemy import select
|
|
5
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
6
|
+
|
|
7
|
+
from backend.models.audit_log import AuditLog
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AuditService:
|
|
13
|
+
def __init__(self, db: AsyncSession):
|
|
14
|
+
self.db = db
|
|
15
|
+
|
|
16
|
+
async def log(
|
|
17
|
+
self,
|
|
18
|
+
admin_user_id: str,
|
|
19
|
+
action: str,
|
|
20
|
+
target_type: str,
|
|
21
|
+
target_id: str | None = None,
|
|
22
|
+
details: dict | None = None,
|
|
23
|
+
result: str = "success",
|
|
24
|
+
ip_address: str | None = None,
|
|
25
|
+
user_agent: str | None = None,
|
|
26
|
+
session_id: str | None = None,
|
|
27
|
+
) -> AuditLog:
|
|
28
|
+
entry = AuditLog(
|
|
29
|
+
admin_user_id=admin_user_id,
|
|
30
|
+
action=action,
|
|
31
|
+
target_type=target_type,
|
|
32
|
+
target_id=target_id,
|
|
33
|
+
details=details,
|
|
34
|
+
result=result,
|
|
35
|
+
ip_address=ip_address,
|
|
36
|
+
user_agent=user_agent,
|
|
37
|
+
session_id=session_id,
|
|
38
|
+
)
|
|
39
|
+
self.db.add(entry)
|
|
40
|
+
await self.db.commit()
|
|
41
|
+
await self.db.refresh(entry)
|
|
42
|
+
logger.info(
|
|
43
|
+
"Audit: admin=%s action=%s target=%s/%s result=%s",
|
|
44
|
+
admin_user_id, action, target_type, target_id, result,
|
|
45
|
+
)
|
|
46
|
+
return entry
|
|
47
|
+
|
|
48
|
+
async def list_logs(
|
|
49
|
+
self,
|
|
50
|
+
filters: dict | None = None,
|
|
51
|
+
cursor: str | None = None,
|
|
52
|
+
limit: int = 50,
|
|
53
|
+
) -> list[AuditLog]:
|
|
54
|
+
stmt = select(AuditLog).order_by(AuditLog.created_at.desc())
|
|
55
|
+
|
|
56
|
+
if filters:
|
|
57
|
+
if filters.get("admin_id"):
|
|
58
|
+
stmt = stmt.where(AuditLog.admin_user_id == filters["admin_id"])
|
|
59
|
+
if filters.get("action"):
|
|
60
|
+
stmt = stmt.where(AuditLog.action == filters["action"])
|
|
61
|
+
if filters.get("target_type"):
|
|
62
|
+
stmt = stmt.where(AuditLog.target_type == filters["target_type"])
|
|
63
|
+
if filters.get("result"):
|
|
64
|
+
stmt = stmt.where(AuditLog.result == filters["result"])
|
|
65
|
+
if filters.get("date_from"):
|
|
66
|
+
stmt = stmt.where(AuditLog.created_at >= filters["date_from"])
|
|
67
|
+
if filters.get("date_to"):
|
|
68
|
+
stmt = stmt.where(AuditLog.created_at <= filters["date_to"])
|
|
69
|
+
|
|
70
|
+
if cursor:
|
|
71
|
+
# Cursor is the id of the last item seen; fetch older entries
|
|
72
|
+
cursor_log = await self.db.get(AuditLog, cursor)
|
|
73
|
+
if cursor_log:
|
|
74
|
+
stmt = stmt.where(AuditLog.created_at < cursor_log.created_at)
|
|
75
|
+
|
|
76
|
+
stmt = stmt.limit(limit)
|
|
77
|
+
result = await self.db.execute(stmt)
|
|
78
|
+
return list(result.scalars().all())
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import func, select
|
|
4
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
5
|
+
from sqlalchemy.orm import selectinload
|
|
6
|
+
|
|
7
|
+
from backend.models.conversation import Conversation, Message
|
|
8
|
+
from backend.models.image_generation import ImageGeneration
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class AdminContentService:
|
|
14
|
+
def __init__(self, db: AsyncSession):
|
|
15
|
+
self.db = db
|
|
16
|
+
|
|
17
|
+
async def list_conversations(
|
|
18
|
+
self,
|
|
19
|
+
search: str | None = None,
|
|
20
|
+
user_id: str | None = None,
|
|
21
|
+
cursor: str | None = None,
|
|
22
|
+
limit: int = 50,
|
|
23
|
+
) -> list[dict]:
|
|
24
|
+
stmt = select(
|
|
25
|
+
Conversation,
|
|
26
|
+
func.count(Message.id).label("message_count"),
|
|
27
|
+
).outerjoin(Message).group_by(Conversation.id).order_by(Conversation.updated_at.desc())
|
|
28
|
+
|
|
29
|
+
if search:
|
|
30
|
+
stmt = stmt.where(Conversation.title.ilike(f"%{search}%"))
|
|
31
|
+
if user_id:
|
|
32
|
+
stmt = stmt.where(Conversation.user_id == user_id)
|
|
33
|
+
|
|
34
|
+
if cursor:
|
|
35
|
+
cursor_conv = await self.db.get(Conversation, cursor)
|
|
36
|
+
if cursor_conv:
|
|
37
|
+
stmt = stmt.where(Conversation.updated_at < cursor_conv.updated_at)
|
|
38
|
+
|
|
39
|
+
stmt = stmt.limit(limit)
|
|
40
|
+
result = await self.db.execute(stmt)
|
|
41
|
+
rows = result.all()
|
|
42
|
+
return [
|
|
43
|
+
{
|
|
44
|
+
"id": conv.id,
|
|
45
|
+
"title": conv.title,
|
|
46
|
+
"model": conv.model,
|
|
47
|
+
"user_id": conv.user_id,
|
|
48
|
+
"message_count": msg_count,
|
|
49
|
+
"created_at": conv.created_at,
|
|
50
|
+
"updated_at": conv.updated_at,
|
|
51
|
+
}
|
|
52
|
+
for conv, msg_count in rows
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
async def get_conversation_detail(self, conversation_id: str) -> dict | None:
|
|
56
|
+
stmt = (
|
|
57
|
+
select(Conversation)
|
|
58
|
+
.options(selectinload(Conversation.messages))
|
|
59
|
+
.where(Conversation.id == conversation_id)
|
|
60
|
+
)
|
|
61
|
+
result = await self.db.execute(stmt)
|
|
62
|
+
conv = result.scalar_one_or_none()
|
|
63
|
+
if not conv:
|
|
64
|
+
return None
|
|
65
|
+
return {
|
|
66
|
+
"id": conv.id,
|
|
67
|
+
"title": conv.title,
|
|
68
|
+
"model": conv.model,
|
|
69
|
+
"user_id": conv.user_id,
|
|
70
|
+
"created_at": conv.created_at,
|
|
71
|
+
"updated_at": conv.updated_at,
|
|
72
|
+
"messages": [
|
|
73
|
+
{
|
|
74
|
+
"id": m.id,
|
|
75
|
+
"role": m.role,
|
|
76
|
+
"content": m.content,
|
|
77
|
+
"image_url": m.image_url,
|
|
78
|
+
"created_at": m.created_at,
|
|
79
|
+
}
|
|
80
|
+
for m in conv.messages
|
|
81
|
+
],
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async def delete_conversation(self, conversation_id: str) -> bool:
|
|
85
|
+
conv = await self.db.get(Conversation, conversation_id)
|
|
86
|
+
if not conv:
|
|
87
|
+
return False
|
|
88
|
+
await self.db.delete(conv)
|
|
89
|
+
await self.db.commit()
|
|
90
|
+
logger.info("Conversation %s deleted", conversation_id)
|
|
91
|
+
return True
|
|
92
|
+
|
|
93
|
+
async def list_recent_images(
|
|
94
|
+
self,
|
|
95
|
+
user_id: str | None = None,
|
|
96
|
+
cursor: str | None = None,
|
|
97
|
+
limit: int = 50,
|
|
98
|
+
) -> list[ImageGeneration]:
|
|
99
|
+
stmt = select(ImageGeneration).order_by(ImageGeneration.created_at.desc())
|
|
100
|
+
|
|
101
|
+
if user_id:
|
|
102
|
+
stmt = stmt.where(ImageGeneration.user_id == user_id)
|
|
103
|
+
if cursor:
|
|
104
|
+
cursor_img = await self.db.get(ImageGeneration, cursor)
|
|
105
|
+
if cursor_img:
|
|
106
|
+
stmt = stmt.where(ImageGeneration.created_at < cursor_img.created_at)
|
|
107
|
+
|
|
108
|
+
stmt = stmt.limit(limit)
|
|
109
|
+
result = await self.db.execute(stmt)
|
|
110
|
+
return list(result.scalars().all())
|
|
111
|
+
|
|
112
|
+
async def delete_image(self, image_id: str) -> bool:
|
|
113
|
+
img = await self.db.get(ImageGeneration, image_id)
|
|
114
|
+
if not img:
|
|
115
|
+
return False
|
|
116
|
+
await self.db.delete(img)
|
|
117
|
+
await self.db.commit()
|
|
118
|
+
logger.info("Image %s deleted", image_id)
|
|
119
|
+
return True
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import uuid
|
|
3
|
+
from datetime import datetime, UTC
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import select
|
|
6
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
7
|
+
|
|
8
|
+
from backend.models.incident import Incident
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class IncidentService:
|
|
14
|
+
"""Admin service for CRUD operations on incidents."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, db: AsyncSession):
|
|
17
|
+
self.db = db
|
|
18
|
+
|
|
19
|
+
async def create_incident(
|
|
20
|
+
self,
|
|
21
|
+
title: str,
|
|
22
|
+
component: str,
|
|
23
|
+
description: str | None = None,
|
|
24
|
+
severity: str = "warning",
|
|
25
|
+
) -> Incident:
|
|
26
|
+
"""Create a new incident."""
|
|
27
|
+
incident = Incident(
|
|
28
|
+
id=str(uuid.uuid4()),
|
|
29
|
+
title=title,
|
|
30
|
+
description=description,
|
|
31
|
+
severity=severity,
|
|
32
|
+
component=component,
|
|
33
|
+
status="investigating",
|
|
34
|
+
started_at=datetime.now(UTC),
|
|
35
|
+
created_at=datetime.now(UTC),
|
|
36
|
+
updated_at=datetime.now(UTC),
|
|
37
|
+
)
|
|
38
|
+
self.db.add(incident)
|
|
39
|
+
await self.db.commit()
|
|
40
|
+
await self.db.refresh(incident)
|
|
41
|
+
logger.info("Incident created: id=%s title=%s component=%s severity=%s", incident.id, title, component, severity)
|
|
42
|
+
return incident
|
|
43
|
+
|
|
44
|
+
async def update_incident(self, incident_id: str, **kwargs) -> Incident | None:
|
|
45
|
+
"""Update an existing incident's fields."""
|
|
46
|
+
incident = await self.get_incident(incident_id)
|
|
47
|
+
if not incident:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
allowed_fields = {"title", "description", "severity", "status", "resolved_at"}
|
|
51
|
+
for key, value in kwargs.items():
|
|
52
|
+
if key in allowed_fields and value is not None:
|
|
53
|
+
setattr(incident, key, value)
|
|
54
|
+
|
|
55
|
+
# Auto-set resolved_at when status changes to resolved
|
|
56
|
+
if kwargs.get("status") == "resolved" and not incident.resolved_at:
|
|
57
|
+
incident.resolved_at = datetime.now(UTC)
|
|
58
|
+
|
|
59
|
+
incident.updated_at = datetime.now(UTC)
|
|
60
|
+
await self.db.commit()
|
|
61
|
+
await self.db.refresh(incident)
|
|
62
|
+
logger.info("Incident updated: id=%s fields=%s", incident_id, list(kwargs.keys()))
|
|
63
|
+
return incident
|
|
64
|
+
|
|
65
|
+
async def resolve_incident(self, incident_id: str) -> Incident | None:
|
|
66
|
+
"""Mark an incident as resolved."""
|
|
67
|
+
incident = await self.get_incident(incident_id)
|
|
68
|
+
if not incident:
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
incident.status = "resolved"
|
|
72
|
+
incident.resolved_at = datetime.now(UTC)
|
|
73
|
+
incident.updated_at = datetime.now(UTC)
|
|
74
|
+
await self.db.commit()
|
|
75
|
+
await self.db.refresh(incident)
|
|
76
|
+
logger.info("Incident resolved: id=%s", incident_id)
|
|
77
|
+
return incident
|
|
78
|
+
|
|
79
|
+
async def list_incidents(
|
|
80
|
+
self, limit: int = 50, include_resolved: bool = False
|
|
81
|
+
) -> list[Incident]:
|
|
82
|
+
"""List incidents, optionally including resolved ones."""
|
|
83
|
+
stmt = select(Incident).order_by(Incident.started_at.desc())
|
|
84
|
+
if not include_resolved:
|
|
85
|
+
stmt = stmt.where(Incident.status != "resolved")
|
|
86
|
+
stmt = stmt.limit(limit)
|
|
87
|
+
result = await self.db.execute(stmt)
|
|
88
|
+
return list(result.scalars().all())
|
|
89
|
+
|
|
90
|
+
async def get_incident(self, incident_id: str) -> Incident | None:
|
|
91
|
+
"""Get a single incident by ID."""
|
|
92
|
+
stmt = select(Incident).where(Incident.id == incident_id)
|
|
93
|
+
result = await self.db.execute(stmt)
|
|
94
|
+
return result.scalar_one_or_none()
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from datetime import date, datetime, timedelta, timezone
|
|
3
|
+
|
|
4
|
+
from sqlalchemy import func, select
|
|
5
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
6
|
+
|
|
7
|
+
from backend.models.conversation import Conversation
|
|
8
|
+
from backend.models.image_generation import ImageGeneration
|
|
9
|
+
from backend.models.usage import UsageRecord
|
|
10
|
+
from backend.models.user import User
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _fill_daily_counts(
|
|
16
|
+
rows: list[tuple], days: int, value_key: str = "count"
|
|
17
|
+
) -> list[dict]:
|
|
18
|
+
"""Fill gaps so every date in the range has an entry (zero if missing).
|
|
19
|
+
|
|
20
|
+
``rows`` must be an iterable of objects with ``.date`` and a numeric
|
|
21
|
+
attribute whose name matches *value_key*. Returns a list of dicts
|
|
22
|
+
sorted by date ascending with keys ``date`` (str) and *value_key*.
|
|
23
|
+
"""
|
|
24
|
+
today = date.today()
|
|
25
|
+
start = today - timedelta(days=days - 1)
|
|
26
|
+
|
|
27
|
+
lookup: dict[str, int] = {}
|
|
28
|
+
for r in rows:
|
|
29
|
+
# r.date may already be a date object or a string
|
|
30
|
+
d = str(r.date) if not isinstance(r.date, str) else r.date
|
|
31
|
+
lookup[d] = getattr(r, value_key, 0) or 0
|
|
32
|
+
|
|
33
|
+
result: list[dict] = []
|
|
34
|
+
current = start
|
|
35
|
+
while current <= today:
|
|
36
|
+
key = current.isoformat()
|
|
37
|
+
result.append({"date": key, value_key: lookup.get(key, 0)})
|
|
38
|
+
current += timedelta(days=1)
|
|
39
|
+
return result
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class AdminStatsService:
|
|
43
|
+
def __init__(self, db: AsyncSession):
|
|
44
|
+
self.db = db
|
|
45
|
+
|
|
46
|
+
async def get_overview(self) -> dict:
|
|
47
|
+
now = datetime.now(timezone.utc)
|
|
48
|
+
start_of_day = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
49
|
+
|
|
50
|
+
# Total users
|
|
51
|
+
total_result = await self.db.execute(select(func.count(User.id)))
|
|
52
|
+
total_users = total_result.scalar() or 0
|
|
53
|
+
|
|
54
|
+
# Active users by time window (users with usage records)
|
|
55
|
+
active_today = await self._count_active_users(start_of_day)
|
|
56
|
+
active_7d = await self._count_active_users(now - timedelta(days=7))
|
|
57
|
+
active_30d = await self._count_active_users(now - timedelta(days=30))
|
|
58
|
+
|
|
59
|
+
# Plan counts
|
|
60
|
+
pro_result = await self.db.execute(
|
|
61
|
+
select(func.count(User.id)).where(User.plan == "pro")
|
|
62
|
+
)
|
|
63
|
+
pro_subscribers = pro_result.scalar() or 0
|
|
64
|
+
|
|
65
|
+
max_result = await self.db.execute(
|
|
66
|
+
select(func.count(User.id)).where(User.plan == "max")
|
|
67
|
+
)
|
|
68
|
+
max_subscribers = max_result.scalar() or 0
|
|
69
|
+
|
|
70
|
+
# Estimated MRR (Pro=$20, Max=$50)
|
|
71
|
+
mrr = (pro_subscribers * 20) + (max_subscribers * 50)
|
|
72
|
+
|
|
73
|
+
# Conversations created today
|
|
74
|
+
conv_result = await self.db.execute(
|
|
75
|
+
select(func.count(Conversation.id)).where(Conversation.created_at >= start_of_day)
|
|
76
|
+
)
|
|
77
|
+
conversations_today = conv_result.scalar() or 0
|
|
78
|
+
|
|
79
|
+
# Images generated today
|
|
80
|
+
img_result = await self.db.execute(
|
|
81
|
+
select(func.count(ImageGeneration.id)).where(ImageGeneration.created_at >= start_of_day)
|
|
82
|
+
)
|
|
83
|
+
images_today = img_result.scalar() or 0
|
|
84
|
+
|
|
85
|
+
return {
|
|
86
|
+
"total_users": total_users,
|
|
87
|
+
"active_today": active_today,
|
|
88
|
+
"active_7d": active_7d,
|
|
89
|
+
"active_30d": active_30d,
|
|
90
|
+
"pro_subscribers": pro_subscribers,
|
|
91
|
+
"max_subscribers": max_subscribers,
|
|
92
|
+
"mrr": mrr,
|
|
93
|
+
"conversations_today": conversations_today,
|
|
94
|
+
"images_today": images_today,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
async def _count_active_users(self, since: datetime) -> int:
|
|
98
|
+
result = await self.db.execute(
|
|
99
|
+
select(func.count(func.distinct(UsageRecord.user_id))).where(
|
|
100
|
+
UsageRecord.created_at >= since
|
|
101
|
+
)
|
|
102
|
+
)
|
|
103
|
+
return result.scalar() or 0
|
|
104
|
+
|
|
105
|
+
async def get_user_growth(self, days: int = 30) -> list[dict]:
|
|
106
|
+
since = datetime.now(timezone.utc) - timedelta(days=days)
|
|
107
|
+
stmt = (
|
|
108
|
+
select(
|
|
109
|
+
func.date(User.created_at).label("date"),
|
|
110
|
+
func.count(User.id).label("signups"),
|
|
111
|
+
)
|
|
112
|
+
.where(User.created_at >= since)
|
|
113
|
+
.group_by(func.date(User.created_at))
|
|
114
|
+
.order_by(func.date(User.created_at))
|
|
115
|
+
)
|
|
116
|
+
result = await self.db.execute(stmt)
|
|
117
|
+
return [{"date": str(r.date), "signups": r.signups} for r in result.all()]
|
|
118
|
+
|
|
119
|
+
async def get_usage_stats(self, days: int = 30) -> list[dict]:
|
|
120
|
+
since = datetime.now(timezone.utc) - timedelta(days=days)
|
|
121
|
+
stmt = (
|
|
122
|
+
select(
|
|
123
|
+
func.date(UsageRecord.created_at).label("date"),
|
|
124
|
+
func.sum(UsageRecord.prompt_tokens + UsageRecord.completion_tokens).label("total_tokens"),
|
|
125
|
+
)
|
|
126
|
+
.where(UsageRecord.created_at >= since)
|
|
127
|
+
.group_by(func.date(UsageRecord.created_at))
|
|
128
|
+
.order_by(func.date(UsageRecord.created_at))
|
|
129
|
+
)
|
|
130
|
+
result = await self.db.execute(stmt)
|
|
131
|
+
return [{"date": str(r.date), "total_tokens": r.total_tokens or 0} for r in result.all()]
|
|
132
|
+
|
|
133
|
+
async def get_revenue_stats(self) -> dict:
|
|
134
|
+
free_result = await self.db.execute(
|
|
135
|
+
select(func.count(User.id)).where(User.plan == "free")
|
|
136
|
+
)
|
|
137
|
+
pro_result = await self.db.execute(
|
|
138
|
+
select(func.count(User.id)).where(User.plan == "pro")
|
|
139
|
+
)
|
|
140
|
+
max_result = await self.db.execute(
|
|
141
|
+
select(func.count(User.id)).where(User.plan == "max")
|
|
142
|
+
)
|
|
143
|
+
return {
|
|
144
|
+
"free_count": free_result.scalar() or 0,
|
|
145
|
+
"pro_count": pro_result.scalar() or 0,
|
|
146
|
+
"max_count": max_result.scalar() or 0,
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
async def get_top_users(self, limit: int = 20) -> list[dict]:
|
|
150
|
+
stmt = (
|
|
151
|
+
select(
|
|
152
|
+
UsageRecord.user_id,
|
|
153
|
+
User.email,
|
|
154
|
+
func.sum(UsageRecord.prompt_tokens + UsageRecord.completion_tokens).label("total_tokens"),
|
|
155
|
+
)
|
|
156
|
+
.join(User, UsageRecord.user_id == User.id)
|
|
157
|
+
.group_by(UsageRecord.user_id, User.email)
|
|
158
|
+
.order_by(func.sum(UsageRecord.prompt_tokens + UsageRecord.completion_tokens).desc())
|
|
159
|
+
.limit(limit)
|
|
160
|
+
)
|
|
161
|
+
result = await self.db.execute(stmt)
|
|
162
|
+
return [
|
|
163
|
+
{"user_id": r.user_id, "email": r.email, "total_tokens": r.total_tokens or 0}
|
|
164
|
+
for r in result.all()
|
|
165
|
+
]
|
|
166
|
+
|
|
167
|
+
# ------------------------------------------------------------------ #
|
|
168
|
+
# Analytics endpoints #
|
|
169
|
+
# ------------------------------------------------------------------ #
|
|
170
|
+
|
|
171
|
+
async def get_daily_signups(self, days: int = 30) -> list[dict]:
|
|
172
|
+
"""Daily signup counts for the past *days* days, zero-filled."""
|
|
173
|
+
since = datetime.now(timezone.utc) - timedelta(days=days - 1)
|
|
174
|
+
stmt = (
|
|
175
|
+
select(
|
|
176
|
+
func.date(User.created_at).label("date"),
|
|
177
|
+
func.count(User.id).label("count"),
|
|
178
|
+
)
|
|
179
|
+
.where(User.created_at >= since)
|
|
180
|
+
.group_by(func.date(User.created_at))
|
|
181
|
+
.order_by(func.date(User.created_at))
|
|
182
|
+
)
|
|
183
|
+
result = await self.db.execute(stmt)
|
|
184
|
+
return _fill_daily_counts(result.all(), days, "count")
|
|
185
|
+
|
|
186
|
+
async def get_daily_active_users(self, days: int = 30) -> list[dict]:
|
|
187
|
+
"""Daily active user counts (distinct users with usage records)."""
|
|
188
|
+
since = datetime.now(timezone.utc) - timedelta(days=days - 1)
|
|
189
|
+
stmt = (
|
|
190
|
+
select(
|
|
191
|
+
func.date(UsageRecord.created_at).label("date"),
|
|
192
|
+
func.count(func.distinct(UsageRecord.user_id)).label("count"),
|
|
193
|
+
)
|
|
194
|
+
.where(UsageRecord.created_at >= since)
|
|
195
|
+
.group_by(func.date(UsageRecord.created_at))
|
|
196
|
+
.order_by(func.date(UsageRecord.created_at))
|
|
197
|
+
)
|
|
198
|
+
result = await self.db.execute(stmt)
|
|
199
|
+
return _fill_daily_counts(result.all(), days, "count")
|
|
200
|
+
|
|
201
|
+
async def get_usage_over_time(self, days: int = 30) -> list[dict]:
|
|
202
|
+
"""Daily total tokens (prompt + completion) for the past *days* days."""
|
|
203
|
+
since = datetime.now(timezone.utc) - timedelta(days=days - 1)
|
|
204
|
+
stmt = (
|
|
205
|
+
select(
|
|
206
|
+
func.date(UsageRecord.created_at).label("date"),
|
|
207
|
+
func.sum(
|
|
208
|
+
UsageRecord.prompt_tokens + UsageRecord.completion_tokens
|
|
209
|
+
).label("tokens"),
|
|
210
|
+
)
|
|
211
|
+
.where(UsageRecord.created_at >= since)
|
|
212
|
+
.group_by(func.date(UsageRecord.created_at))
|
|
213
|
+
.order_by(func.date(UsageRecord.created_at))
|
|
214
|
+
)
|
|
215
|
+
result = await self.db.execute(stmt)
|
|
216
|
+
return _fill_daily_counts(result.all(), days, "tokens")
|
|
217
|
+
|
|
218
|
+
async def get_plan_distribution(self) -> list[dict]:
|
|
219
|
+
"""Current count of users grouped by plan."""
|
|
220
|
+
stmt = (
|
|
221
|
+
select(
|
|
222
|
+
User.plan.label("plan"),
|
|
223
|
+
func.count(User.id).label("count"),
|
|
224
|
+
)
|
|
225
|
+
.group_by(User.plan)
|
|
226
|
+
.order_by(func.count(User.id).desc())
|
|
227
|
+
)
|
|
228
|
+
result = await self.db.execute(stmt)
|
|
229
|
+
return [{"plan": r.plan, "count": r.count} for r in result.all()]
|
|
230
|
+
|
|
231
|
+
async def get_top_users_analytics(self, limit: int = 20) -> list[dict]:
|
|
232
|
+
"""Top N users by total token usage with conversation and image counts.
|
|
233
|
+
|
|
234
|
+
Uses correlated scalar sub-selects for conversations and images to
|
|
235
|
+
avoid row multiplication from multiple JOINs.
|
|
236
|
+
"""
|
|
237
|
+
# Subquery: conversation count per user
|
|
238
|
+
conv_sub = (
|
|
239
|
+
select(func.count(Conversation.id))
|
|
240
|
+
.where(Conversation.user_id == UsageRecord.user_id)
|
|
241
|
+
.correlate(UsageRecord)
|
|
242
|
+
.scalar_subquery()
|
|
243
|
+
)
|
|
244
|
+
# Subquery: image generation count per user
|
|
245
|
+
img_sub = (
|
|
246
|
+
select(func.count(ImageGeneration.id))
|
|
247
|
+
.where(ImageGeneration.user_id == UsageRecord.user_id)
|
|
248
|
+
.correlate(UsageRecord)
|
|
249
|
+
.scalar_subquery()
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
stmt = (
|
|
253
|
+
select(
|
|
254
|
+
User.email,
|
|
255
|
+
User.plan,
|
|
256
|
+
func.sum(
|
|
257
|
+
UsageRecord.prompt_tokens + UsageRecord.completion_tokens
|
|
258
|
+
).label("total_tokens"),
|
|
259
|
+
conv_sub.label("conversations"),
|
|
260
|
+
img_sub.label("images"),
|
|
261
|
+
)
|
|
262
|
+
.join(User, UsageRecord.user_id == User.id)
|
|
263
|
+
.group_by(UsageRecord.user_id, User.email, User.plan)
|
|
264
|
+
.order_by(
|
|
265
|
+
func.sum(
|
|
266
|
+
UsageRecord.prompt_tokens + UsageRecord.completion_tokens
|
|
267
|
+
).desc()
|
|
268
|
+
)
|
|
269
|
+
.limit(limit)
|
|
270
|
+
)
|
|
271
|
+
result = await self.db.execute(stmt)
|
|
272
|
+
return [
|
|
273
|
+
{
|
|
274
|
+
"email": r.email,
|
|
275
|
+
"plan": r.plan,
|
|
276
|
+
"total_tokens": r.total_tokens or 0,
|
|
277
|
+
"conversations": r.conversations or 0,
|
|
278
|
+
"images": r.images or 0,
|
|
279
|
+
}
|
|
280
|
+
for r in result.all()
|
|
281
|
+
]
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import time
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
from sqlalchemy import select, text
|
|
7
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
8
|
+
|
|
9
|
+
from backend.config import settings
|
|
10
|
+
from backend.models.feature_flag import FeatureFlag
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AdminSystemService:
|
|
16
|
+
def __init__(self, db: AsyncSession):
|
|
17
|
+
self.db = db
|
|
18
|
+
|
|
19
|
+
async def get_health(self) -> dict:
|
|
20
|
+
services = []
|
|
21
|
+
|
|
22
|
+
# Database check
|
|
23
|
+
try:
|
|
24
|
+
t0 = time.monotonic()
|
|
25
|
+
await self.db.execute(text("SELECT 1"))
|
|
26
|
+
latency = int((time.monotonic() - t0) * 1000)
|
|
27
|
+
services.append({"name": "Database", "status": "healthy", "latency_ms": latency})
|
|
28
|
+
except Exception as e:
|
|
29
|
+
services.append({"name": "Database", "status": "down", "message": str(e)[:100]})
|
|
30
|
+
|
|
31
|
+
async with httpx.AsyncClient(timeout=5.0) as client:
|
|
32
|
+
# vLLM check
|
|
33
|
+
try:
|
|
34
|
+
t0 = time.monotonic()
|
|
35
|
+
resp = await client.get(f"{settings.VLLM_BASE_URL}/health")
|
|
36
|
+
latency = int((time.monotonic() - t0) * 1000)
|
|
37
|
+
status = "healthy" if resp.status_code == 200 else "degraded"
|
|
38
|
+
services.append({"name": "vLLM", "status": status, "latency_ms": latency})
|
|
39
|
+
except Exception:
|
|
40
|
+
services.append({"name": "vLLM", "status": "down", "message": "Connection failed"})
|
|
41
|
+
|
|
42
|
+
# vLLM Lite check
|
|
43
|
+
if settings.VLLM_LITE_BASE_URL:
|
|
44
|
+
try:
|
|
45
|
+
t0 = time.monotonic()
|
|
46
|
+
resp = await client.get(f"{settings.VLLM_LITE_BASE_URL}/health")
|
|
47
|
+
latency = int((time.monotonic() - t0) * 1000)
|
|
48
|
+
status = "healthy" if resp.status_code == 200 else "degraded"
|
|
49
|
+
services.append({"name": "vLLM Lite", "status": status, "latency_ms": latency})
|
|
50
|
+
except Exception:
|
|
51
|
+
services.append({"name": "vLLM Lite", "status": "down", "message": "Connection failed"})
|
|
52
|
+
|
|
53
|
+
# FLUX check
|
|
54
|
+
if settings.FLUX_BASE_URL:
|
|
55
|
+
try:
|
|
56
|
+
t0 = time.monotonic()
|
|
57
|
+
resp = await client.get(f"{settings.FLUX_BASE_URL}/health")
|
|
58
|
+
latency = int((time.monotonic() - t0) * 1000)
|
|
59
|
+
status = "healthy" if resp.status_code == 200 else "degraded"
|
|
60
|
+
services.append({"name": "FLUX Image", "status": status, "latency_ms": latency})
|
|
61
|
+
except Exception:
|
|
62
|
+
services.append({"name": "FLUX Image", "status": "down", "message": "Connection failed"})
|
|
63
|
+
|
|
64
|
+
# Determine overall status
|
|
65
|
+
statuses = [s["status"] for s in services]
|
|
66
|
+
if all(s == "healthy" for s in statuses):
|
|
67
|
+
overall = "healthy"
|
|
68
|
+
elif any(s == "down" for s in statuses):
|
|
69
|
+
overall = "degraded"
|
|
70
|
+
else:
|
|
71
|
+
overall = "degraded"
|
|
72
|
+
|
|
73
|
+
return {
|
|
74
|
+
"overall": overall,
|
|
75
|
+
"services": services,
|
|
76
|
+
"checked_at": datetime.now(timezone.utc).isoformat(),
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async def get_feature_flags(self) -> list[FeatureFlag]:
|
|
80
|
+
stmt = select(FeatureFlag).order_by(FeatureFlag.key)
|
|
81
|
+
result = await self.db.execute(stmt)
|
|
82
|
+
return list(result.scalars().all())
|
|
83
|
+
|
|
84
|
+
async def toggle_feature_flag(
|
|
85
|
+
self, key: str, enabled: bool, admin_user_id: str
|
|
86
|
+
) -> FeatureFlag | None:
|
|
87
|
+
stmt = select(FeatureFlag).where(FeatureFlag.key == key)
|
|
88
|
+
result = await self.db.execute(stmt)
|
|
89
|
+
flag = result.scalar_one_or_none()
|
|
90
|
+
if not flag:
|
|
91
|
+
return None
|
|
92
|
+
flag.enabled = enabled
|
|
93
|
+
flag.updated_by = admin_user_id
|
|
94
|
+
flag.updated_at = datetime.now(timezone.utc)
|
|
95
|
+
await self.db.commit()
|
|
96
|
+
await self.db.refresh(flag)
|
|
97
|
+
logger.info("Feature flag %s set to %s by %s", key, enabled, admin_user_id)
|
|
98
|
+
return flag
|
|
99
|
+
|
|
100
|
+
async def get_config(self) -> list[dict]:
|
|
101
|
+
"""Return allowlisted non-secret config values."""
|
|
102
|
+
allowlist = [
|
|
103
|
+
"VLLM_BASE_URL",
|
|
104
|
+
"VLLM_LITE_BASE_URL",
|
|
105
|
+
"HOST",
|
|
106
|
+
"PORT",
|
|
107
|
+
"LOG_LEVEL",
|
|
108
|
+
"JWT_EXPIRE_HOURS",
|
|
109
|
+
"DEFAULT_DAILY_TOKEN_LIMIT",
|
|
110
|
+
"DEFAULT_MONTHLY_TOKEN_LIMIT",
|
|
111
|
+
"PRO_DAILY_TOKEN_LIMIT",
|
|
112
|
+
"PRO_MONTHLY_TOKEN_LIMIT",
|
|
113
|
+
"MAX_DAILY_TOKEN_LIMIT",
|
|
114
|
+
"MAX_MONTHLY_TOKEN_LIMIT",
|
|
115
|
+
"PRO_DAILY_IMAGE_LIMIT",
|
|
116
|
+
"PRO_MONTHLY_IMAGE_LIMIT",
|
|
117
|
+
"MAX_DAILY_IMAGE_LIMIT",
|
|
118
|
+
"MAX_MONTHLY_IMAGE_LIMIT",
|
|
119
|
+
"AGENT_OFFLINE_THRESHOLD_SECONDS",
|
|
120
|
+
"S3_IMAGES_REGION",
|
|
121
|
+
]
|
|
122
|
+
entries = []
|
|
123
|
+
for key in allowlist:
|
|
124
|
+
if hasattr(settings, key):
|
|
125
|
+
entries.append({"key": key, "value": str(getattr(settings, key))})
|
|
126
|
+
return entries
|