kairo-code 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- image-service/main.py +178 -0
- infra/chat/app/main.py +84 -0
- kairo/backend/__init__.py +0 -0
- kairo/backend/api/__init__.py +0 -0
- kairo/backend/api/admin/__init__.py +23 -0
- kairo/backend/api/admin/audit.py +54 -0
- kairo/backend/api/admin/content.py +142 -0
- kairo/backend/api/admin/incidents.py +148 -0
- kairo/backend/api/admin/stats.py +125 -0
- kairo/backend/api/admin/system.py +87 -0
- kairo/backend/api/admin/users.py +279 -0
- kairo/backend/api/agents.py +94 -0
- kairo/backend/api/api_keys.py +85 -0
- kairo/backend/api/auth.py +116 -0
- kairo/backend/api/billing.py +41 -0
- kairo/backend/api/chat.py +72 -0
- kairo/backend/api/conversations.py +125 -0
- kairo/backend/api/device_auth.py +100 -0
- kairo/backend/api/files.py +83 -0
- kairo/backend/api/health.py +36 -0
- kairo/backend/api/images.py +80 -0
- kairo/backend/api/openai_compat.py +225 -0
- kairo/backend/api/projects.py +102 -0
- kairo/backend/api/usage.py +32 -0
- kairo/backend/api/webhooks.py +79 -0
- kairo/backend/app.py +297 -0
- kairo/backend/config.py +179 -0
- kairo/backend/core/__init__.py +0 -0
- kairo/backend/core/admin_auth.py +24 -0
- kairo/backend/core/api_key_auth.py +55 -0
- kairo/backend/core/database.py +28 -0
- kairo/backend/core/dependencies.py +70 -0
- kairo/backend/core/logging.py +23 -0
- kairo/backend/core/rate_limit.py +73 -0
- kairo/backend/core/security.py +29 -0
- kairo/backend/models/__init__.py +19 -0
- kairo/backend/models/agent.py +30 -0
- kairo/backend/models/api_key.py +25 -0
- kairo/backend/models/api_usage.py +29 -0
- kairo/backend/models/audit_log.py +26 -0
- kairo/backend/models/conversation.py +48 -0
- kairo/backend/models/device_code.py +30 -0
- kairo/backend/models/feature_flag.py +21 -0
- kairo/backend/models/image_generation.py +24 -0
- kairo/backend/models/incident.py +28 -0
- kairo/backend/models/project.py +28 -0
- kairo/backend/models/uptime_record.py +24 -0
- kairo/backend/models/usage.py +24 -0
- kairo/backend/models/user.py +49 -0
- kairo/backend/schemas/__init__.py +0 -0
- kairo/backend/schemas/admin/__init__.py +0 -0
- kairo/backend/schemas/admin/audit.py +28 -0
- kairo/backend/schemas/admin/content.py +53 -0
- kairo/backend/schemas/admin/stats.py +77 -0
- kairo/backend/schemas/admin/system.py +44 -0
- kairo/backend/schemas/admin/users.py +48 -0
- kairo/backend/schemas/agent.py +42 -0
- kairo/backend/schemas/api_key.py +30 -0
- kairo/backend/schemas/auth.py +57 -0
- kairo/backend/schemas/chat.py +26 -0
- kairo/backend/schemas/conversation.py +39 -0
- kairo/backend/schemas/device_auth.py +40 -0
- kairo/backend/schemas/image.py +15 -0
- kairo/backend/schemas/openai_compat.py +76 -0
- kairo/backend/schemas/project.py +21 -0
- kairo/backend/schemas/status.py +81 -0
- kairo/backend/schemas/usage.py +15 -0
- kairo/backend/services/__init__.py +0 -0
- kairo/backend/services/admin/__init__.py +0 -0
- kairo/backend/services/admin/audit_service.py +78 -0
- kairo/backend/services/admin/content_service.py +119 -0
- kairo/backend/services/admin/incident_service.py +94 -0
- kairo/backend/services/admin/stats_service.py +281 -0
- kairo/backend/services/admin/system_service.py +126 -0
- kairo/backend/services/admin/user_service.py +157 -0
- kairo/backend/services/agent_service.py +107 -0
- kairo/backend/services/api_key_service.py +66 -0
- kairo/backend/services/api_usage_service.py +126 -0
- kairo/backend/services/auth_service.py +101 -0
- kairo/backend/services/chat_service.py +501 -0
- kairo/backend/services/conversation_service.py +264 -0
- kairo/backend/services/device_auth_service.py +193 -0
- kairo/backend/services/email_service.py +55 -0
- kairo/backend/services/image_service.py +181 -0
- kairo/backend/services/llm_service.py +186 -0
- kairo/backend/services/project_service.py +109 -0
- kairo/backend/services/status_service.py +167 -0
- kairo/backend/services/stripe_service.py +78 -0
- kairo/backend/services/usage_service.py +150 -0
- kairo/backend/services/web_search_service.py +96 -0
- kairo/migrations/env.py +60 -0
- kairo/migrations/versions/001_initial.py +55 -0
- kairo/migrations/versions/002_usage_tracking_and_indexes.py +66 -0
- kairo/migrations/versions/003_username_to_email.py +21 -0
- kairo/migrations/versions/004_add_plans_and_verification.py +67 -0
- kairo/migrations/versions/005_add_projects.py +52 -0
- kairo/migrations/versions/006_add_image_generation.py +63 -0
- kairo/migrations/versions/007_add_admin_portal.py +107 -0
- kairo/migrations/versions/008_add_device_code_auth.py +76 -0
- kairo/migrations/versions/009_add_status_page.py +65 -0
- kairo/tools/extract_claude_data.py +465 -0
- kairo/tools/filter_claude_data.py +303 -0
- kairo/tools/generate_curated_data.py +157 -0
- kairo/tools/mix_training_data.py +295 -0
- kairo_code/__init__.py +3 -0
- kairo_code/agents/__init__.py +25 -0
- kairo_code/agents/architect.py +98 -0
- kairo_code/agents/audit.py +100 -0
- kairo_code/agents/base.py +463 -0
- kairo_code/agents/coder.py +155 -0
- kairo_code/agents/database.py +77 -0
- kairo_code/agents/docs.py +88 -0
- kairo_code/agents/explorer.py +62 -0
- kairo_code/agents/guardian.py +80 -0
- kairo_code/agents/planner.py +66 -0
- kairo_code/agents/reviewer.py +91 -0
- kairo_code/agents/security.py +94 -0
- kairo_code/agents/terraform.py +88 -0
- kairo_code/agents/testing.py +97 -0
- kairo_code/agents/uiux.py +88 -0
- kairo_code/auth.py +232 -0
- kairo_code/config.py +172 -0
- kairo_code/conversation.py +173 -0
- kairo_code/heartbeat.py +63 -0
- kairo_code/llm.py +291 -0
- kairo_code/logging_config.py +156 -0
- kairo_code/main.py +818 -0
- kairo_code/router.py +217 -0
- kairo_code/sandbox.py +248 -0
- kairo_code/settings.py +183 -0
- kairo_code/tools/__init__.py +51 -0
- kairo_code/tools/analysis.py +509 -0
- kairo_code/tools/base.py +417 -0
- kairo_code/tools/code.py +58 -0
- kairo_code/tools/definitions.py +617 -0
- kairo_code/tools/files.py +315 -0
- kairo_code/tools/review.py +390 -0
- kairo_code/tools/search.py +185 -0
- kairo_code/ui.py +418 -0
- kairo_code-0.1.0.dist-info/METADATA +13 -0
- kairo_code-0.1.0.dist-info/RECORD +144 -0
- kairo_code-0.1.0.dist-info/WHEEL +5 -0
- kairo_code-0.1.0.dist-info/entry_points.txt +2 -0
- kairo_code-0.1.0.dist-info/top_level.txt +4 -0
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import re
|
|
3
|
+
from html import unescape
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger(__name__)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def web_search(query: str, max_results: int = 5) -> list[dict]:
|
|
11
|
+
"""Search the web via DuckDuckGo HTML and return results."""
|
|
12
|
+
results = []
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
async with httpx.AsyncClient(timeout=10.0) as client:
|
|
16
|
+
resp = await client.post(
|
|
17
|
+
"https://html.duckduckgo.com/html/",
|
|
18
|
+
data={"q": query, "b": ""},
|
|
19
|
+
headers={
|
|
20
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
|
21
|
+
},
|
|
22
|
+
)
|
|
23
|
+
resp.raise_for_status()
|
|
24
|
+
html = resp.text
|
|
25
|
+
|
|
26
|
+
# Parse results using regex (avoid BeautifulSoup dependency)
|
|
27
|
+
# DuckDuckGo HTML results have class="result__a" for links and class="result__snippet" for snippets
|
|
28
|
+
link_pattern = re.compile(
|
|
29
|
+
r'<a[^>]+class="result__a"[^>]*href="([^"]*)"[^>]*>(.*?)</a>',
|
|
30
|
+
re.DOTALL,
|
|
31
|
+
)
|
|
32
|
+
snippet_pattern = re.compile(
|
|
33
|
+
r'<a[^>]+class="result__snippet"[^>]*>(.*?)</a>',
|
|
34
|
+
re.DOTALL,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
links = link_pattern.findall(html)
|
|
38
|
+
snippets = snippet_pattern.findall(html)
|
|
39
|
+
|
|
40
|
+
for i, (url, title) in enumerate(links[:max_results]):
|
|
41
|
+
# Clean HTML tags and decode entities
|
|
42
|
+
clean_title = unescape(re.sub(r"<[^>]+>", "", title)).strip()
|
|
43
|
+
clean_snippet = ""
|
|
44
|
+
if i < len(snippets):
|
|
45
|
+
clean_snippet = unescape(re.sub(r"<[^>]+>", "", snippets[i])).strip()
|
|
46
|
+
|
|
47
|
+
# DuckDuckGo uses redirect URLs - extract actual URL
|
|
48
|
+
actual_url = url
|
|
49
|
+
uddg_match = re.search(r"uddg=([^&]+)", url)
|
|
50
|
+
if uddg_match:
|
|
51
|
+
from urllib.parse import unquote
|
|
52
|
+
actual_url = unquote(uddg_match.group(1))
|
|
53
|
+
|
|
54
|
+
if clean_title and actual_url:
|
|
55
|
+
results.append({
|
|
56
|
+
"title": clean_title,
|
|
57
|
+
"url": actual_url,
|
|
58
|
+
"snippet": clean_snippet,
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
except Exception as e:
|
|
62
|
+
logger.warning("Web search failed for '%s': %s", query, e)
|
|
63
|
+
|
|
64
|
+
return results
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def format_search_results(results: list[dict]) -> str:
|
|
68
|
+
"""Format search results for injection into the LLM context."""
|
|
69
|
+
if not results:
|
|
70
|
+
return ""
|
|
71
|
+
|
|
72
|
+
parts = [
|
|
73
|
+
"=== WEB SEARCH RESULTS (AUTHORITATIVE — USE THESE AS YOUR PRIMARY SOURCE) ===",
|
|
74
|
+
"",
|
|
75
|
+
]
|
|
76
|
+
for i, r in enumerate(results, 1):
|
|
77
|
+
parts.append(f"Result {i}: {r['title']}")
|
|
78
|
+
if r.get("snippet"):
|
|
79
|
+
parts.append(f" Summary: {r['snippet']}")
|
|
80
|
+
parts.append(f" URL: {r['url']}")
|
|
81
|
+
parts.append("")
|
|
82
|
+
|
|
83
|
+
parts.append("=== END OF SEARCH RESULTS ===")
|
|
84
|
+
parts.append("")
|
|
85
|
+
parts.append(
|
|
86
|
+
"CRITICAL INSTRUCTIONS FOR USING THESE RESULTS:\n"
|
|
87
|
+
"1. Base your ENTIRE answer on the search results above. They are FRESH and AUTHORITATIVE.\n"
|
|
88
|
+
"2. Your training data is OUTDATED and UNRELIABLE for this topic. Do NOT fall back to it.\n"
|
|
89
|
+
"3. If search results say something is FREE or does NOT require signup, trust them — do NOT claim otherwise.\n"
|
|
90
|
+
"4. If search results show an API works a certain way, describe it EXACTLY as shown — do NOT invent parameters, "
|
|
91
|
+
"endpoints, headers, or authentication requirements that are not mentioned in the results.\n"
|
|
92
|
+
"5. Do NOT fabricate URLs. Only share URLs that appear in the search results above.\n"
|
|
93
|
+
"6. If the search results do not contain enough information, say so honestly. Do NOT guess or fill gaps with training data.\n"
|
|
94
|
+
"7. Integrate the information naturally into your response. Do NOT use [Source N] citations."
|
|
95
|
+
)
|
|
96
|
+
return "\n".join(parts)
|
kairo/migrations/env.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
from logging.config import fileConfig
|
|
4
|
+
|
|
5
|
+
from alembic import context
|
|
6
|
+
from sqlalchemy import pool
|
|
7
|
+
from sqlalchemy.ext.asyncio import async_engine_from_config
|
|
8
|
+
|
|
9
|
+
from backend.core.database import Base
|
|
10
|
+
from backend.models import Conversation, Message, User, UsageRecord, Project, ImageGeneration # noqa: F401
|
|
11
|
+
|
|
12
|
+
config = context.config
|
|
13
|
+
if config.config_file_name is not None:
|
|
14
|
+
fileConfig(config.config_file_name)
|
|
15
|
+
|
|
16
|
+
# Override with env var if available
|
|
17
|
+
db_url = os.environ.get("KAIRO_DATABASE_URL")
|
|
18
|
+
if db_url:
|
|
19
|
+
config.set_main_option("sqlalchemy.url", db_url)
|
|
20
|
+
|
|
21
|
+
target_metadata = Base.metadata
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def run_migrations_offline() -> None:
|
|
25
|
+
url = config.get_main_option("sqlalchemy.url")
|
|
26
|
+
context.configure(
|
|
27
|
+
url=url,
|
|
28
|
+
target_metadata=target_metadata,
|
|
29
|
+
literal_binds=True,
|
|
30
|
+
dialect_opts={"paramstyle": "named"},
|
|
31
|
+
)
|
|
32
|
+
with context.begin_transaction():
|
|
33
|
+
context.run_migrations()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def do_run_migrations(connection):
|
|
37
|
+
context.configure(connection=connection, target_metadata=target_metadata)
|
|
38
|
+
with context.begin_transaction():
|
|
39
|
+
context.run_migrations()
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
async def run_async_migrations() -> None:
|
|
43
|
+
connectable = async_engine_from_config(
|
|
44
|
+
config.get_section(config.config_ini_section, {}),
|
|
45
|
+
prefix="sqlalchemy.",
|
|
46
|
+
poolclass=pool.NullPool,
|
|
47
|
+
)
|
|
48
|
+
async with connectable.connect() as connection:
|
|
49
|
+
await connection.run_sync(do_run_migrations)
|
|
50
|
+
await connectable.dispose()
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def run_migrations_online() -> None:
|
|
54
|
+
asyncio.run(run_async_migrations())
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
if context.is_offline_mode():
|
|
58
|
+
run_migrations_offline()
|
|
59
|
+
else:
|
|
60
|
+
run_migrations_online()
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"""Initial schema
|
|
2
|
+
|
|
3
|
+
Revision ID: 001
|
|
4
|
+
Revises:
|
|
5
|
+
Create Date: 2025-01-01 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "001"
|
|
12
|
+
down_revision = None
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
op.create_table(
|
|
19
|
+
"users",
|
|
20
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
21
|
+
sa.Column("username", sa.String(), unique=True, nullable=False),
|
|
22
|
+
sa.Column("hashed_password", sa.String(), nullable=False),
|
|
23
|
+
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
op.create_table(
|
|
27
|
+
"conversations",
|
|
28
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
29
|
+
sa.Column("title", sa.String(), nullable=False),
|
|
30
|
+
sa.Column("model", sa.String(), nullable=False),
|
|
31
|
+
sa.Column("user_id", sa.String(), sa.ForeignKey("users.id"), nullable=True),
|
|
32
|
+
sa.Column("summary", sa.Text(), nullable=True),
|
|
33
|
+
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
|
34
|
+
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
op.create_table(
|
|
38
|
+
"messages",
|
|
39
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
40
|
+
sa.Column(
|
|
41
|
+
"conversation_id",
|
|
42
|
+
sa.String(),
|
|
43
|
+
sa.ForeignKey("conversations.id", ondelete="CASCADE"),
|
|
44
|
+
nullable=False,
|
|
45
|
+
),
|
|
46
|
+
sa.Column("role", sa.String(), nullable=False),
|
|
47
|
+
sa.Column("content", sa.Text(), nullable=False),
|
|
48
|
+
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def downgrade() -> None:
|
|
53
|
+
op.drop_table("messages")
|
|
54
|
+
op.drop_table("conversations")
|
|
55
|
+
op.drop_table("users")
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Usage tracking and indexes
|
|
2
|
+
|
|
3
|
+
Revision ID: 002
|
|
4
|
+
Revises: 001
|
|
5
|
+
Create Date: 2025-01-15 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "002"
|
|
12
|
+
down_revision = "001"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
# usage_records table
|
|
19
|
+
op.create_table(
|
|
20
|
+
"usage_records",
|
|
21
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
22
|
+
sa.Column(
|
|
23
|
+
"user_id",
|
|
24
|
+
sa.String(),
|
|
25
|
+
sa.ForeignKey("users.id", ondelete="CASCADE"),
|
|
26
|
+
nullable=False,
|
|
27
|
+
),
|
|
28
|
+
sa.Column(
|
|
29
|
+
"conversation_id",
|
|
30
|
+
sa.String(),
|
|
31
|
+
sa.ForeignKey("conversations.id", ondelete="SET NULL"),
|
|
32
|
+
nullable=True,
|
|
33
|
+
),
|
|
34
|
+
sa.Column("model", sa.String(), nullable=False),
|
|
35
|
+
sa.Column("prompt_tokens", sa.Integer(), nullable=False),
|
|
36
|
+
sa.Column("completion_tokens", sa.Integer(), nullable=False),
|
|
37
|
+
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# Add limit columns to users
|
|
41
|
+
op.add_column(
|
|
42
|
+
"users",
|
|
43
|
+
sa.Column("daily_token_limit", sa.Integer(), nullable=False, server_default="100000"),
|
|
44
|
+
)
|
|
45
|
+
op.add_column(
|
|
46
|
+
"users",
|
|
47
|
+
sa.Column("monthly_token_limit", sa.Integer(), nullable=False, server_default="2000000"),
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# Performance indexes
|
|
51
|
+
op.create_index("ix_conversations_user_id", "conversations", ["user_id"])
|
|
52
|
+
op.create_index("ix_messages_conversation_id", "messages", ["conversation_id"])
|
|
53
|
+
op.create_index("ix_messages_created_at", "messages", ["created_at"])
|
|
54
|
+
op.create_index("ix_usage_records_user_id", "usage_records", ["user_id"])
|
|
55
|
+
op.create_index("ix_usage_records_user_date", "usage_records", ["user_id", "created_at"])
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def downgrade() -> None:
|
|
59
|
+
op.drop_index("ix_usage_records_user_date")
|
|
60
|
+
op.drop_index("ix_usage_records_user_id")
|
|
61
|
+
op.drop_index("ix_messages_created_at")
|
|
62
|
+
op.drop_index("ix_messages_conversation_id")
|
|
63
|
+
op.drop_index("ix_conversations_user_id")
|
|
64
|
+
op.drop_column("users", "monthly_token_limit")
|
|
65
|
+
op.drop_column("users", "daily_token_limit")
|
|
66
|
+
op.drop_table("usage_records")
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Rename username to email
|
|
2
|
+
|
|
3
|
+
Revision ID: 003
|
|
4
|
+
Revises: 002
|
|
5
|
+
Create Date: 2026-01-27 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
|
|
10
|
+
revision = "003"
|
|
11
|
+
down_revision = "002"
|
|
12
|
+
branch_labels = None
|
|
13
|
+
depends_on = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def upgrade() -> None:
|
|
17
|
+
op.alter_column("users", "username", new_column_name="email")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def downgrade() -> None:
|
|
21
|
+
op.alter_column("users", "email", new_column_name="username")
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""Add plans, stripe, email verification, password reset
|
|
2
|
+
|
|
3
|
+
Revision ID: 004
|
|
4
|
+
Revises: 003
|
|
5
|
+
Create Date: 2026-01-28 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "004"
|
|
12
|
+
down_revision = "003"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
op.add_column(
|
|
19
|
+
"users",
|
|
20
|
+
sa.Column("plan", sa.String(), nullable=False, server_default="free"),
|
|
21
|
+
)
|
|
22
|
+
op.add_column(
|
|
23
|
+
"users",
|
|
24
|
+
sa.Column("stripe_customer_id", sa.String(), nullable=True),
|
|
25
|
+
)
|
|
26
|
+
op.add_column(
|
|
27
|
+
"users",
|
|
28
|
+
sa.Column("stripe_subscription_id", sa.String(), nullable=True),
|
|
29
|
+
)
|
|
30
|
+
op.add_column(
|
|
31
|
+
"users",
|
|
32
|
+
sa.Column("email_verified", sa.Boolean(), nullable=False, server_default="false"),
|
|
33
|
+
)
|
|
34
|
+
op.add_column(
|
|
35
|
+
"users",
|
|
36
|
+
sa.Column("email_verification_token", sa.String(), nullable=True),
|
|
37
|
+
)
|
|
38
|
+
op.add_column(
|
|
39
|
+
"users",
|
|
40
|
+
sa.Column("password_reset_token", sa.String(), nullable=True),
|
|
41
|
+
)
|
|
42
|
+
op.add_column(
|
|
43
|
+
"users",
|
|
44
|
+
sa.Column("password_reset_expires", sa.DateTime(timezone=True), nullable=True),
|
|
45
|
+
)
|
|
46
|
+
op.create_index(
|
|
47
|
+
"ix_users_stripe_customer_id", "users", ["stripe_customer_id"], unique=True,
|
|
48
|
+
)
|
|
49
|
+
op.create_index(
|
|
50
|
+
"ix_users_email_verification_token", "users", ["email_verification_token"],
|
|
51
|
+
)
|
|
52
|
+
op.create_index(
|
|
53
|
+
"ix_users_password_reset_token", "users", ["password_reset_token"],
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def downgrade() -> None:
|
|
58
|
+
op.drop_index("ix_users_password_reset_token", table_name="users")
|
|
59
|
+
op.drop_index("ix_users_email_verification_token", table_name="users")
|
|
60
|
+
op.drop_index("ix_users_stripe_customer_id", table_name="users")
|
|
61
|
+
op.drop_column("users", "password_reset_expires")
|
|
62
|
+
op.drop_column("users", "password_reset_token")
|
|
63
|
+
op.drop_column("users", "email_verification_token")
|
|
64
|
+
op.drop_column("users", "email_verified")
|
|
65
|
+
op.drop_column("users", "stripe_subscription_id")
|
|
66
|
+
op.drop_column("users", "stripe_customer_id")
|
|
67
|
+
op.drop_column("users", "plan")
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Add projects table and link conversations to projects
|
|
2
|
+
|
|
3
|
+
Revision ID: 005
|
|
4
|
+
Revises: 004
|
|
5
|
+
Create Date: 2026-01-29 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "005"
|
|
12
|
+
down_revision = "004"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
op.create_table(
|
|
19
|
+
"projects",
|
|
20
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
21
|
+
sa.Column("name", sa.String(), nullable=False),
|
|
22
|
+
sa.Column("instructions", sa.Text(), nullable=True),
|
|
23
|
+
sa.Column(
|
|
24
|
+
"user_id",
|
|
25
|
+
sa.String(),
|
|
26
|
+
sa.ForeignKey("users.id"),
|
|
27
|
+
nullable=False,
|
|
28
|
+
),
|
|
29
|
+
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
|
30
|
+
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
|
31
|
+
)
|
|
32
|
+
op.create_index("ix_projects_user_id", "projects", ["user_id"])
|
|
33
|
+
|
|
34
|
+
op.add_column(
|
|
35
|
+
"conversations",
|
|
36
|
+
sa.Column(
|
|
37
|
+
"project_id",
|
|
38
|
+
sa.String(),
|
|
39
|
+
sa.ForeignKey("projects.id", ondelete="SET NULL"),
|
|
40
|
+
nullable=True,
|
|
41
|
+
),
|
|
42
|
+
)
|
|
43
|
+
op.create_index(
|
|
44
|
+
"ix_conversations_project_id", "conversations", ["project_id"]
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def downgrade() -> None:
|
|
49
|
+
op.drop_index("ix_conversations_project_id", table_name="conversations")
|
|
50
|
+
op.drop_column("conversations", "project_id")
|
|
51
|
+
op.drop_index("ix_projects_user_id", table_name="projects")
|
|
52
|
+
op.drop_table("projects")
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Add image generation support
|
|
2
|
+
|
|
3
|
+
Revision ID: 006
|
|
4
|
+
Revises: 005
|
|
5
|
+
Create Date: 2026-01-29 12:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "006"
|
|
12
|
+
down_revision = "005"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
# Add image_url to messages
|
|
19
|
+
op.add_column(
|
|
20
|
+
"messages",
|
|
21
|
+
sa.Column("image_url", sa.String(), nullable=True),
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
# Create image_generations table
|
|
25
|
+
op.create_table(
|
|
26
|
+
"image_generations",
|
|
27
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
28
|
+
sa.Column(
|
|
29
|
+
"user_id",
|
|
30
|
+
sa.String(),
|
|
31
|
+
sa.ForeignKey("users.id", ondelete="CASCADE"),
|
|
32
|
+
nullable=False,
|
|
33
|
+
),
|
|
34
|
+
sa.Column(
|
|
35
|
+
"conversation_id",
|
|
36
|
+
sa.String(),
|
|
37
|
+
sa.ForeignKey("conversations.id", ondelete="SET NULL"),
|
|
38
|
+
nullable=True,
|
|
39
|
+
),
|
|
40
|
+
sa.Column("prompt", sa.Text(), nullable=False),
|
|
41
|
+
sa.Column("image_url", sa.String(), nullable=False),
|
|
42
|
+
sa.Column("width", sa.Integer(), nullable=False, server_default="1024"),
|
|
43
|
+
sa.Column("height", sa.Integer(), nullable=False, server_default="1024"),
|
|
44
|
+
sa.Column(
|
|
45
|
+
"created_at",
|
|
46
|
+
sa.DateTime(timezone=True),
|
|
47
|
+
nullable=False,
|
|
48
|
+
server_default=sa.text("now()"),
|
|
49
|
+
),
|
|
50
|
+
)
|
|
51
|
+
op.create_index(
|
|
52
|
+
"ix_image_generations_user_id", "image_generations", ["user_id"]
|
|
53
|
+
)
|
|
54
|
+
op.create_index(
|
|
55
|
+
"ix_image_generations_created_at", "image_generations", ["created_at"]
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def downgrade() -> None:
|
|
60
|
+
op.drop_index("ix_image_generations_created_at", table_name="image_generations")
|
|
61
|
+
op.drop_index("ix_image_generations_user_id", table_name="image_generations")
|
|
62
|
+
op.drop_table("image_generations")
|
|
63
|
+
op.drop_column("messages", "image_url")
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
"""Add admin portal: role, status, audit_logs, feature_flags
|
|
2
|
+
|
|
3
|
+
Revision ID: 007
|
|
4
|
+
Revises: 006
|
|
5
|
+
Create Date: 2026-01-30 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "007"
|
|
12
|
+
down_revision = "006"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
# Add role and status to users
|
|
19
|
+
op.add_column(
|
|
20
|
+
"users",
|
|
21
|
+
sa.Column("role", sa.String(), nullable=False, server_default="user"),
|
|
22
|
+
)
|
|
23
|
+
op.add_column(
|
|
24
|
+
"users",
|
|
25
|
+
sa.Column("status", sa.String(), nullable=False, server_default="active"),
|
|
26
|
+
)
|
|
27
|
+
op.create_index("ix_users_role", "users", ["role"])
|
|
28
|
+
op.create_index("ix_users_status", "users", ["status"])
|
|
29
|
+
|
|
30
|
+
# Create audit_logs table
|
|
31
|
+
op.create_table(
|
|
32
|
+
"audit_logs",
|
|
33
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
34
|
+
sa.Column(
|
|
35
|
+
"admin_user_id",
|
|
36
|
+
sa.String(),
|
|
37
|
+
sa.ForeignKey("users.id", ondelete="CASCADE"),
|
|
38
|
+
nullable=False,
|
|
39
|
+
),
|
|
40
|
+
sa.Column("action", sa.String(), nullable=False),
|
|
41
|
+
sa.Column("target_type", sa.String(), nullable=False),
|
|
42
|
+
sa.Column("target_id", sa.String(), nullable=True),
|
|
43
|
+
sa.Column("details", sa.JSON(), nullable=True),
|
|
44
|
+
sa.Column("result", sa.String(), nullable=False, server_default="success"),
|
|
45
|
+
sa.Column("ip_address", sa.String(), nullable=True),
|
|
46
|
+
sa.Column("user_agent", sa.Text(), nullable=True),
|
|
47
|
+
sa.Column("session_id", sa.String(), nullable=True),
|
|
48
|
+
sa.Column(
|
|
49
|
+
"created_at",
|
|
50
|
+
sa.DateTime(timezone=True),
|
|
51
|
+
nullable=False,
|
|
52
|
+
server_default=sa.text("now()"),
|
|
53
|
+
),
|
|
54
|
+
)
|
|
55
|
+
op.create_index("ix_audit_logs_admin_user_id", "audit_logs", ["admin_user_id"])
|
|
56
|
+
op.create_index("ix_audit_logs_action", "audit_logs", ["action"])
|
|
57
|
+
op.create_index("ix_audit_logs_target_type", "audit_logs", ["target_type"])
|
|
58
|
+
op.create_index("ix_audit_logs_created_at", "audit_logs", ["created_at"])
|
|
59
|
+
|
|
60
|
+
# Create feature_flags table
|
|
61
|
+
op.create_table(
|
|
62
|
+
"feature_flags",
|
|
63
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
64
|
+
sa.Column("key", sa.String(), nullable=False, unique=True),
|
|
65
|
+
sa.Column("enabled", sa.Boolean(), nullable=False, server_default="false"),
|
|
66
|
+
sa.Column(
|
|
67
|
+
"updated_by",
|
|
68
|
+
sa.String(),
|
|
69
|
+
sa.ForeignKey("users.id", ondelete="SET NULL"),
|
|
70
|
+
nullable=True,
|
|
71
|
+
),
|
|
72
|
+
sa.Column(
|
|
73
|
+
"updated_at",
|
|
74
|
+
sa.DateTime(timezone=True),
|
|
75
|
+
nullable=False,
|
|
76
|
+
server_default=sa.text("now()"),
|
|
77
|
+
),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Seed initial feature flags
|
|
81
|
+
import uuid
|
|
82
|
+
|
|
83
|
+
op.execute(
|
|
84
|
+
sa.text(
|
|
85
|
+
"INSERT INTO feature_flags (id, key, enabled) VALUES "
|
|
86
|
+
"(:id1, 'FEATURE_KAIRO_API_ENABLED', false), "
|
|
87
|
+
"(:id2, 'FEATURE_KAIRO_AGENTS_ENABLED', false), "
|
|
88
|
+
"(:id3, 'FEATURE_IMAGE_GEN_ENABLED', false)"
|
|
89
|
+
).bindparams(
|
|
90
|
+
id1=str(uuid.uuid4()),
|
|
91
|
+
id2=str(uuid.uuid4()),
|
|
92
|
+
id3=str(uuid.uuid4()),
|
|
93
|
+
)
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def downgrade() -> None:
|
|
98
|
+
op.drop_table("feature_flags")
|
|
99
|
+
op.drop_index("ix_audit_logs_created_at", table_name="audit_logs")
|
|
100
|
+
op.drop_index("ix_audit_logs_target_type", table_name="audit_logs")
|
|
101
|
+
op.drop_index("ix_audit_logs_action", table_name="audit_logs")
|
|
102
|
+
op.drop_index("ix_audit_logs_admin_user_id", table_name="audit_logs")
|
|
103
|
+
op.drop_table("audit_logs")
|
|
104
|
+
op.drop_index("ix_users_status", table_name="users")
|
|
105
|
+
op.drop_index("ix_users_role", table_name="users")
|
|
106
|
+
op.drop_column("users", "status")
|
|
107
|
+
op.drop_column("users", "role")
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Add device code auth flow (RFC 8628)
|
|
2
|
+
|
|
3
|
+
Revision ID: 008
|
|
4
|
+
Revises: 007
|
|
5
|
+
Create Date: 2026-01-31 00:00:00.000000
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from alembic import op
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
|
|
11
|
+
revision = "008"
|
|
12
|
+
down_revision = "007"
|
|
13
|
+
branch_labels = None
|
|
14
|
+
depends_on = None
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def upgrade() -> None:
|
|
18
|
+
# Create device_codes table
|
|
19
|
+
op.create_table(
|
|
20
|
+
"device_codes",
|
|
21
|
+
sa.Column("id", sa.String(), primary_key=True),
|
|
22
|
+
sa.Column("device_code", sa.String(), nullable=False, unique=True),
|
|
23
|
+
sa.Column("user_code", sa.String(), nullable=False, unique=True),
|
|
24
|
+
sa.Column(
|
|
25
|
+
"user_id",
|
|
26
|
+
sa.String(),
|
|
27
|
+
sa.ForeignKey("users.id", ondelete="CASCADE"),
|
|
28
|
+
nullable=True,
|
|
29
|
+
),
|
|
30
|
+
sa.Column("status", sa.String(), nullable=False, server_default="pending"),
|
|
31
|
+
sa.Column(
|
|
32
|
+
"cli_api_key_id",
|
|
33
|
+
sa.String(),
|
|
34
|
+
sa.ForeignKey("api_keys.id", ondelete="SET NULL"),
|
|
35
|
+
nullable=True,
|
|
36
|
+
),
|
|
37
|
+
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
|
|
38
|
+
sa.Column(
|
|
39
|
+
"created_at",
|
|
40
|
+
sa.DateTime(timezone=True),
|
|
41
|
+
nullable=False,
|
|
42
|
+
server_default=sa.text("now()"),
|
|
43
|
+
),
|
|
44
|
+
sa.Column("approved_at", sa.DateTime(timezone=True), nullable=True),
|
|
45
|
+
sa.Column("client_name", sa.String(), nullable=True),
|
|
46
|
+
sa.Column("cli_token", sa.String(), nullable=True),
|
|
47
|
+
sa.Column("interval", sa.Integer(), nullable=False, server_default="5"),
|
|
48
|
+
)
|
|
49
|
+
op.create_index("ix_device_codes_device_code", "device_codes", ["device_code"], unique=True)
|
|
50
|
+
op.create_index("ix_device_codes_user_code", "device_codes", ["user_code"], unique=True)
|
|
51
|
+
op.create_index("ix_device_codes_user_id", "device_codes", ["user_id"])
|
|
52
|
+
op.create_index("ix_device_codes_status", "device_codes", ["status"])
|
|
53
|
+
|
|
54
|
+
# Add key_type to api_keys
|
|
55
|
+
op.add_column(
|
|
56
|
+
"api_keys",
|
|
57
|
+
sa.Column("key_type", sa.String(), nullable=False, server_default="api"),
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Add source to usage_records
|
|
61
|
+
op.add_column(
|
|
62
|
+
"usage_records",
|
|
63
|
+
sa.Column("source", sa.String(), nullable=False, server_default="chat"),
|
|
64
|
+
)
|
|
65
|
+
op.create_index("ix_usage_records_source", "usage_records", ["source"])
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def downgrade() -> None:
|
|
69
|
+
op.drop_index("ix_usage_records_source", table_name="usage_records")
|
|
70
|
+
op.drop_column("usage_records", "source")
|
|
71
|
+
op.drop_column("api_keys", "key_type")
|
|
72
|
+
op.drop_index("ix_device_codes_status", table_name="device_codes")
|
|
73
|
+
op.drop_index("ix_device_codes_user_id", table_name="device_codes")
|
|
74
|
+
op.drop_index("ix_device_codes_user_code", table_name="device_codes")
|
|
75
|
+
op.drop_index("ix_device_codes_device_code", table_name="device_codes")
|
|
76
|
+
op.drop_table("device_codes")
|