nao-core 0.0.38__py3-none-manylinux2014_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nao_core/__init__.py +2 -0
- nao_core/__init__.py.bak +2 -0
- nao_core/bin/build-info.json +5 -0
- nao_core/bin/fastapi/main.py +268 -0
- nao_core/bin/fastapi/test_main.py +156 -0
- nao_core/bin/migrations-postgres/0000_user_auth_and_chat_tables.sql +98 -0
- nao_core/bin/migrations-postgres/0001_message_feedback.sql +9 -0
- nao_core/bin/migrations-postgres/0002_chat_message_stop_reason_and_error_message.sql +2 -0
- nao_core/bin/migrations-postgres/0003_handle_slack_with_thread.sql +2 -0
- nao_core/bin/migrations-postgres/0004_input_and_output_tokens.sql +8 -0
- nao_core/bin/migrations-postgres/0005_add_project_tables.sql +39 -0
- nao_core/bin/migrations-postgres/0006_llm_model_ids.sql +4 -0
- nao_core/bin/migrations-postgres/0007_chat_message_llm_info.sql +2 -0
- nao_core/bin/migrations-postgres/meta/0000_snapshot.json +707 -0
- nao_core/bin/migrations-postgres/meta/0001_snapshot.json +766 -0
- nao_core/bin/migrations-postgres/meta/0002_snapshot.json +778 -0
- nao_core/bin/migrations-postgres/meta/0003_snapshot.json +799 -0
- nao_core/bin/migrations-postgres/meta/0004_snapshot.json +847 -0
- nao_core/bin/migrations-postgres/meta/0005_snapshot.json +1129 -0
- nao_core/bin/migrations-postgres/meta/0006_snapshot.json +1141 -0
- nao_core/bin/migrations-postgres/meta/_journal.json +62 -0
- nao_core/bin/migrations-sqlite/0000_user_auth_and_chat_tables.sql +98 -0
- nao_core/bin/migrations-sqlite/0001_message_feedback.sql +8 -0
- nao_core/bin/migrations-sqlite/0002_chat_message_stop_reason_and_error_message.sql +2 -0
- nao_core/bin/migrations-sqlite/0003_handle_slack_with_thread.sql +2 -0
- nao_core/bin/migrations-sqlite/0004_input_and_output_tokens.sql +8 -0
- nao_core/bin/migrations-sqlite/0005_add_project_tables.sql +38 -0
- nao_core/bin/migrations-sqlite/0006_llm_model_ids.sql +4 -0
- nao_core/bin/migrations-sqlite/0007_chat_message_llm_info.sql +2 -0
- nao_core/bin/migrations-sqlite/meta/0000_snapshot.json +674 -0
- nao_core/bin/migrations-sqlite/meta/0001_snapshot.json +735 -0
- nao_core/bin/migrations-sqlite/meta/0002_snapshot.json +749 -0
- nao_core/bin/migrations-sqlite/meta/0003_snapshot.json +763 -0
- nao_core/bin/migrations-sqlite/meta/0004_snapshot.json +819 -0
- nao_core/bin/migrations-sqlite/meta/0005_snapshot.json +1086 -0
- nao_core/bin/migrations-sqlite/meta/0006_snapshot.json +1100 -0
- nao_core/bin/migrations-sqlite/meta/_journal.json +62 -0
- nao_core/bin/nao-chat-server +0 -0
- nao_core/bin/public/assets/code-block-F6WJLWQG-CV0uOmNJ.js +153 -0
- nao_core/bin/public/assets/index-DcbndLHo.css +1 -0
- nao_core/bin/public/assets/index-t1hZI3nl.js +560 -0
- nao_core/bin/public/favicon.ico +0 -0
- nao_core/bin/public/index.html +18 -0
- nao_core/bin/rg +0 -0
- nao_core/commands/__init__.py +6 -0
- nao_core/commands/chat.py +225 -0
- nao_core/commands/debug.py +158 -0
- nao_core/commands/init.py +358 -0
- nao_core/commands/sync/__init__.py +124 -0
- nao_core/commands/sync/accessors.py +290 -0
- nao_core/commands/sync/cleanup.py +156 -0
- nao_core/commands/sync/providers/__init__.py +32 -0
- nao_core/commands/sync/providers/base.py +113 -0
- nao_core/commands/sync/providers/databases/__init__.py +17 -0
- nao_core/commands/sync/providers/databases/bigquery.py +79 -0
- nao_core/commands/sync/providers/databases/databricks.py +79 -0
- nao_core/commands/sync/providers/databases/duckdb.py +78 -0
- nao_core/commands/sync/providers/databases/postgres.py +79 -0
- nao_core/commands/sync/providers/databases/provider.py +129 -0
- nao_core/commands/sync/providers/databases/snowflake.py +79 -0
- nao_core/commands/sync/providers/notion/__init__.py +5 -0
- nao_core/commands/sync/providers/notion/provider.py +205 -0
- nao_core/commands/sync/providers/repositories/__init__.py +5 -0
- nao_core/commands/sync/providers/repositories/provider.py +134 -0
- nao_core/commands/sync/registry.py +23 -0
- nao_core/config/__init__.py +30 -0
- nao_core/config/base.py +100 -0
- nao_core/config/databases/__init__.py +55 -0
- nao_core/config/databases/base.py +85 -0
- nao_core/config/databases/bigquery.py +99 -0
- nao_core/config/databases/databricks.py +79 -0
- nao_core/config/databases/duckdb.py +41 -0
- nao_core/config/databases/postgres.py +83 -0
- nao_core/config/databases/snowflake.py +125 -0
- nao_core/config/exceptions.py +7 -0
- nao_core/config/llm/__init__.py +19 -0
- nao_core/config/notion/__init__.py +8 -0
- nao_core/config/repos/__init__.py +3 -0
- nao_core/config/repos/base.py +11 -0
- nao_core/config/slack/__init__.py +12 -0
- nao_core/context/__init__.py +54 -0
- nao_core/context/base.py +57 -0
- nao_core/context/git.py +177 -0
- nao_core/context/local.py +59 -0
- nao_core/main.py +13 -0
- nao_core/templates/__init__.py +41 -0
- nao_core/templates/context.py +193 -0
- nao_core/templates/defaults/databases/columns.md.j2 +23 -0
- nao_core/templates/defaults/databases/description.md.j2 +32 -0
- nao_core/templates/defaults/databases/preview.md.j2 +22 -0
- nao_core/templates/defaults/databases/profiling.md.j2 +34 -0
- nao_core/templates/engine.py +133 -0
- nao_core/templates/render.py +196 -0
- nao_core-0.0.38.dist-info/METADATA +150 -0
- nao_core-0.0.38.dist-info/RECORD +98 -0
- nao_core-0.0.38.dist-info/WHEEL +4 -0
- nao_core-0.0.38.dist-info/entry_points.txt +2 -0
- nao_core-0.0.38.dist-info/licenses/LICENSE +22 -0
nao_core/__init__.py
ADDED
nao_core/__init__.py.bak
ADDED
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from contextlib import asynccontextmanager
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import uvicorn
|
|
9
|
+
from dotenv import load_dotenv
|
|
10
|
+
from fastapi import FastAPI, HTTPException
|
|
11
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
12
|
+
from pydantic import BaseModel
|
|
13
|
+
|
|
14
|
+
load_dotenv()
|
|
15
|
+
|
|
16
|
+
cli_path = Path(__file__).parent.parent.parent / "cli"
|
|
17
|
+
sys.path.insert(0, str(cli_path))
|
|
18
|
+
|
|
19
|
+
from nao_core.config import NaoConfig
|
|
20
|
+
from nao_core.context import get_context_provider
|
|
21
|
+
|
|
22
|
+
port = int(os.environ.get("PORT", 8005))
|
|
23
|
+
|
|
24
|
+
# Global scheduler instance
|
|
25
|
+
scheduler = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@asynccontextmanager
|
|
29
|
+
async def lifespan(app: FastAPI):
|
|
30
|
+
"""Manage application lifespan - setup scheduler on startup."""
|
|
31
|
+
global scheduler
|
|
32
|
+
|
|
33
|
+
# Setup periodic refresh if configured
|
|
34
|
+
refresh_schedule = os.environ.get("NAO_REFRESH_SCHEDULE")
|
|
35
|
+
if refresh_schedule:
|
|
36
|
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|
37
|
+
from apscheduler.triggers.cron import CronTrigger
|
|
38
|
+
|
|
39
|
+
scheduler = AsyncIOScheduler()
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
trigger = CronTrigger.from_crontab(refresh_schedule)
|
|
43
|
+
scheduler.add_job(
|
|
44
|
+
_refresh_context_task,
|
|
45
|
+
trigger,
|
|
46
|
+
id="context_refresh",
|
|
47
|
+
name="Periodic context refresh",
|
|
48
|
+
)
|
|
49
|
+
scheduler.start()
|
|
50
|
+
print(f"[Scheduler] Periodic refresh enabled: {refresh_schedule}")
|
|
51
|
+
except ValueError as e:
|
|
52
|
+
print(f"[Scheduler] Invalid cron expression '{refresh_schedule}': {e}")
|
|
53
|
+
|
|
54
|
+
yield
|
|
55
|
+
|
|
56
|
+
# Shutdown scheduler
|
|
57
|
+
if scheduler:
|
|
58
|
+
scheduler.shutdown(wait=False)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
async def _refresh_context_task():
|
|
62
|
+
"""Background task for scheduled context refresh."""
|
|
63
|
+
try:
|
|
64
|
+
provider = get_context_provider()
|
|
65
|
+
updated = provider.refresh()
|
|
66
|
+
if updated:
|
|
67
|
+
print(f"[Scheduler] Context refreshed at {datetime.now().isoformat()}")
|
|
68
|
+
else:
|
|
69
|
+
print(f"[Scheduler] Context already up-to-date at {datetime.now().isoformat()}")
|
|
70
|
+
except Exception as e:
|
|
71
|
+
print(f"[Scheduler] Failed to refresh context: {e}")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
app = FastAPI(lifespan=lifespan)
|
|
75
|
+
|
|
76
|
+
app.add_middleware(
|
|
77
|
+
CORSMiddleware,
|
|
78
|
+
allow_origins=["*"],
|
|
79
|
+
allow_methods=["*"],
|
|
80
|
+
allow_headers=["*"],
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# =============================================================================
|
|
85
|
+
# Request/Response Models
|
|
86
|
+
# =============================================================================
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class ExecuteSQLRequest(BaseModel):
|
|
90
|
+
sql: str
|
|
91
|
+
nao_project_folder: str
|
|
92
|
+
database_id: str | None = None
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class ExecuteSQLResponse(BaseModel):
|
|
96
|
+
data: list[dict]
|
|
97
|
+
row_count: int
|
|
98
|
+
columns: list[str]
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class RefreshResponse(BaseModel):
|
|
102
|
+
status: str
|
|
103
|
+
updated: bool
|
|
104
|
+
message: str
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class HealthResponse(BaseModel):
|
|
108
|
+
status: str
|
|
109
|
+
context_source: str
|
|
110
|
+
context_initialized: bool
|
|
111
|
+
refresh_schedule: str | None
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
# =============================================================================
|
|
115
|
+
# API Endpoints
|
|
116
|
+
# =============================================================================
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@app.get("/health", response_model=HealthResponse)
|
|
120
|
+
async def health_check():
|
|
121
|
+
"""Health check endpoint with context status."""
|
|
122
|
+
try:
|
|
123
|
+
provider = get_context_provider()
|
|
124
|
+
context_source = os.environ.get("NAO_CONTEXT_SOURCE", "local")
|
|
125
|
+
return HealthResponse(
|
|
126
|
+
status="ok",
|
|
127
|
+
context_source=context_source,
|
|
128
|
+
context_initialized=provider.is_initialized(),
|
|
129
|
+
refresh_schedule=os.environ.get("NAO_REFRESH_SCHEDULE"),
|
|
130
|
+
)
|
|
131
|
+
except Exception:
|
|
132
|
+
return HealthResponse(
|
|
133
|
+
status="error",
|
|
134
|
+
context_source=os.environ.get("NAO_CONTEXT_SOURCE", "local"),
|
|
135
|
+
context_initialized=False,
|
|
136
|
+
refresh_schedule=os.environ.get("NAO_REFRESH_SCHEDULE"),
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@app.post("/api/refresh", response_model=RefreshResponse)
|
|
141
|
+
async def refresh_context():
|
|
142
|
+
"""Trigger a context refresh (git pull if using git source).
|
|
143
|
+
|
|
144
|
+
This endpoint can be called by:
|
|
145
|
+
- CI/CD pipelines after pushing new context
|
|
146
|
+
- Webhooks when data schemas change
|
|
147
|
+
- Manual triggers for immediate updates
|
|
148
|
+
"""
|
|
149
|
+
try:
|
|
150
|
+
provider = get_context_provider()
|
|
151
|
+
updated = provider.refresh()
|
|
152
|
+
|
|
153
|
+
if updated:
|
|
154
|
+
return RefreshResponse(
|
|
155
|
+
status="ok",
|
|
156
|
+
updated=True,
|
|
157
|
+
message="Context updated successfully",
|
|
158
|
+
)
|
|
159
|
+
else:
|
|
160
|
+
return RefreshResponse(
|
|
161
|
+
status="ok",
|
|
162
|
+
updated=False,
|
|
163
|
+
message="Context already up-to-date",
|
|
164
|
+
)
|
|
165
|
+
except Exception as e:
|
|
166
|
+
raise HTTPException(
|
|
167
|
+
status_code=500,
|
|
168
|
+
detail=f"Failed to refresh context: {str(e)}",
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@app.post("/execute_sql", response_model=ExecuteSQLResponse)
|
|
173
|
+
async def execute_sql(request: ExecuteSQLRequest):
|
|
174
|
+
try:
|
|
175
|
+
# Load the nao config from the project folder
|
|
176
|
+
project_path = Path(request.nao_project_folder)
|
|
177
|
+
os.chdir(project_path)
|
|
178
|
+
config = NaoConfig.try_load(project_path)
|
|
179
|
+
|
|
180
|
+
if config is None:
|
|
181
|
+
raise HTTPException(
|
|
182
|
+
status_code=400,
|
|
183
|
+
detail=f"Could not load nao_config.yaml from {request.nao_project_folder}",
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
if len(config.databases) == 0:
|
|
187
|
+
raise HTTPException(
|
|
188
|
+
status_code=400,
|
|
189
|
+
detail="No databases configured in nao_config.yaml",
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# Determine which database to use
|
|
193
|
+
if len(config.databases) == 1:
|
|
194
|
+
db_config = config.databases[0]
|
|
195
|
+
elif request.database_id:
|
|
196
|
+
# Find the database by name
|
|
197
|
+
db_config = next(
|
|
198
|
+
(db for db in config.databases if db.name == request.database_id),
|
|
199
|
+
None,
|
|
200
|
+
)
|
|
201
|
+
if db_config is None:
|
|
202
|
+
available_databases = [db.name for db in config.databases]
|
|
203
|
+
raise HTTPException(
|
|
204
|
+
status_code=400,
|
|
205
|
+
detail={
|
|
206
|
+
"message": f"Database '{request.database_id}' not found",
|
|
207
|
+
"available_databases": available_databases,
|
|
208
|
+
},
|
|
209
|
+
)
|
|
210
|
+
else:
|
|
211
|
+
# Multiple databases and no database_id specified
|
|
212
|
+
available_databases = [db.name for db in config.databases]
|
|
213
|
+
raise HTTPException(
|
|
214
|
+
status_code=400,
|
|
215
|
+
detail={
|
|
216
|
+
"message": "Multiple databases configured. Please specify database_id.",
|
|
217
|
+
"available_databases": available_databases,
|
|
218
|
+
},
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
connection = db_config.connect()
|
|
222
|
+
|
|
223
|
+
# Use raw_sql to execute arbitrary SQL (including CTEs)
|
|
224
|
+
cursor = connection.raw_sql(request.sql)
|
|
225
|
+
|
|
226
|
+
# Handle different cursor types from different backends
|
|
227
|
+
if hasattr(cursor, "fetchdf"):
|
|
228
|
+
# DuckDB returns a cursor with fetchdf()
|
|
229
|
+
df = cursor.fetchdf()
|
|
230
|
+
elif hasattr(cursor, "to_dataframe"):
|
|
231
|
+
# Some backends return cursors with to_dataframe()
|
|
232
|
+
df = cursor.to_dataframe()
|
|
233
|
+
else:
|
|
234
|
+
# Fallback: try to use pandas read_sql or fetchall
|
|
235
|
+
import pandas as pd
|
|
236
|
+
|
|
237
|
+
columns = [desc[0] for desc in cursor.description]
|
|
238
|
+
df = pd.DataFrame(cursor.fetchall(), columns=columns)
|
|
239
|
+
|
|
240
|
+
def convert_value(v):
|
|
241
|
+
if isinstance(v, (np.integer,)):
|
|
242
|
+
return int(v)
|
|
243
|
+
if isinstance(v, (np.floating,)):
|
|
244
|
+
return float(v)
|
|
245
|
+
if isinstance(v, np.ndarray):
|
|
246
|
+
return v.tolist()
|
|
247
|
+
if hasattr(v, "item"): # numpy scalar
|
|
248
|
+
return v.item()
|
|
249
|
+
return v
|
|
250
|
+
|
|
251
|
+
data = [{k: convert_value(v) for k, v in row.items()} for row in df.to_dict(orient="records")]
|
|
252
|
+
|
|
253
|
+
return ExecuteSQLResponse(
|
|
254
|
+
data=data,
|
|
255
|
+
row_count=len(data),
|
|
256
|
+
columns=[str(c) for c in df.columns.tolist()],
|
|
257
|
+
)
|
|
258
|
+
except HTTPException:
|
|
259
|
+
raise
|
|
260
|
+
except Exception as e:
|
|
261
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
if __name__ == "__main__":
|
|
265
|
+
nao_project_folder = os.getenv("NAO_DEFAULT_PROJECT_PATH")
|
|
266
|
+
if nao_project_folder:
|
|
267
|
+
os.chdir(nao_project_folder)
|
|
268
|
+
uvicorn.run("main:app", host="0.0.0.0", port=port, reload=True)
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import tempfile
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
import yaml
|
|
6
|
+
from fastapi.testclient import TestClient
|
|
7
|
+
|
|
8
|
+
from main import app
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def assert_sql_result(data: dict, *, row_count: int, columns: list[str], expected_data: list[dict]):
|
|
12
|
+
"""Assert that SQL response data matches expected values."""
|
|
13
|
+
assert data["row_count"] == row_count
|
|
14
|
+
assert data["columns"] == columns
|
|
15
|
+
assert len(data["data"]) == row_count
|
|
16
|
+
assert data["data"] == expected_data
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@pytest.fixture
|
|
20
|
+
def duckdb_project_folder():
|
|
21
|
+
"""Create a temporary project folder with a DuckDB config."""
|
|
22
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
23
|
+
config = {
|
|
24
|
+
"project_name": "test-project",
|
|
25
|
+
"databases": [
|
|
26
|
+
{
|
|
27
|
+
"name": "test-duckdb",
|
|
28
|
+
"type": "duckdb",
|
|
29
|
+
"path": ":memory:",
|
|
30
|
+
}
|
|
31
|
+
],
|
|
32
|
+
}
|
|
33
|
+
config_path = Path(tmpdir) / "nao_config.yaml"
|
|
34
|
+
with config_path.open("w") as f:
|
|
35
|
+
yaml.dump(config, f)
|
|
36
|
+
yield tmpdir
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def test_execute_sql_simple_duckdb(duckdb_project_folder):
|
|
40
|
+
"""Test execute_sql endpoint with a DuckDB in-memory database."""
|
|
41
|
+
client = TestClient(app)
|
|
42
|
+
|
|
43
|
+
response = client.post(
|
|
44
|
+
"/execute_sql",
|
|
45
|
+
json={
|
|
46
|
+
"sql": "SELECT 1 AS id, 'hello' AS message",
|
|
47
|
+
"nao_project_folder": duckdb_project_folder,
|
|
48
|
+
},
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
assert response.status_code == 200
|
|
52
|
+
assert_sql_result(
|
|
53
|
+
response.json(),
|
|
54
|
+
row_count=1,
|
|
55
|
+
columns=["id", "message"],
|
|
56
|
+
expected_data=[{"id": 1, "message": "hello"}],
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def test_execute_sql_with_cte_duckdb(duckdb_project_folder):
|
|
61
|
+
"""Test execute_sql endpoint with a DuckDB in-memory database."""
|
|
62
|
+
client = TestClient(app)
|
|
63
|
+
|
|
64
|
+
response = client.post(
|
|
65
|
+
"/execute_sql",
|
|
66
|
+
json={
|
|
67
|
+
"sql": "WITH test AS (SELECT 1 AS id, 'hello' AS message) SELECT * FROM test",
|
|
68
|
+
"nao_project_folder": duckdb_project_folder,
|
|
69
|
+
},
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
assert response.status_code == 200
|
|
73
|
+
assert_sql_result(
|
|
74
|
+
response.json(),
|
|
75
|
+
row_count=1,
|
|
76
|
+
columns=["id", "message"],
|
|
77
|
+
expected_data=[{"id": 1, "message": "hello"}],
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# BigQuery tests (requires SSO authentication)
|
|
82
|
+
|
|
83
|
+
@pytest.fixture
|
|
84
|
+
def bigquery_project_folder():
|
|
85
|
+
"""Create a temporary project folder with a BigQuery config using SSO."""
|
|
86
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
87
|
+
config = {
|
|
88
|
+
"project_name": "test-project",
|
|
89
|
+
"databases": [
|
|
90
|
+
{
|
|
91
|
+
"name": "nao-bigquery",
|
|
92
|
+
"type": "bigquery",
|
|
93
|
+
"project_id": "nao-corp",
|
|
94
|
+
"sso": True,
|
|
95
|
+
}
|
|
96
|
+
],
|
|
97
|
+
}
|
|
98
|
+
config_path = Path(tmpdir) / "nao_config.yaml"
|
|
99
|
+
with config_path.open("w") as f:
|
|
100
|
+
yaml.dump(config, f)
|
|
101
|
+
yield tmpdir
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def test_execute_sql_simple_bigquery(bigquery_project_folder):
|
|
105
|
+
"""Test execute_sql endpoint with BigQuery using SSO."""
|
|
106
|
+
client = TestClient(app)
|
|
107
|
+
|
|
108
|
+
response = client.post(
|
|
109
|
+
"/execute_sql",
|
|
110
|
+
json={
|
|
111
|
+
"sql": "SELECT 1 AS id, 'hello' AS message",
|
|
112
|
+
"nao_project_folder": bigquery_project_folder,
|
|
113
|
+
},
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
assert response.status_code == 200
|
|
117
|
+
assert_sql_result(
|
|
118
|
+
response.json(),
|
|
119
|
+
row_count=1,
|
|
120
|
+
columns=["id", "message"],
|
|
121
|
+
expected_data=[{"id": 1, "message": "hello"}],
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def test_execute_sql_with_cte_bigquery(bigquery_project_folder):
|
|
126
|
+
"""Test execute_sql endpoint with a CTE query on BigQuery."""
|
|
127
|
+
client = TestClient(app)
|
|
128
|
+
|
|
129
|
+
cte_sql = """
|
|
130
|
+
WITH users AS (
|
|
131
|
+
SELECT 1 AS id, 'Alice' AS name
|
|
132
|
+
UNION ALL SELECT 2, 'Bob'
|
|
133
|
+
UNION ALL SELECT 3, 'Charlie'
|
|
134
|
+
)
|
|
135
|
+
SELECT * FROM users
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
response = client.post(
|
|
139
|
+
"/execute_sql",
|
|
140
|
+
json={
|
|
141
|
+
"sql": cte_sql,
|
|
142
|
+
"nao_project_folder": bigquery_project_folder,
|
|
143
|
+
},
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
assert response.status_code == 200
|
|
147
|
+
assert_sql_result(
|
|
148
|
+
response.json(),
|
|
149
|
+
row_count=3,
|
|
150
|
+
columns=["id", "name"],
|
|
151
|
+
expected_data=[
|
|
152
|
+
{"id": 1, "name": "Alice"},
|
|
153
|
+
{"id": 2, "name": "Bob"},
|
|
154
|
+
{"id": 3, "name": "Charlie"},
|
|
155
|
+
],
|
|
156
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
CREATE TABLE "account" (
|
|
2
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
3
|
+
"account_id" text NOT NULL,
|
|
4
|
+
"provider_id" text NOT NULL,
|
|
5
|
+
"user_id" text NOT NULL,
|
|
6
|
+
"access_token" text,
|
|
7
|
+
"refresh_token" text,
|
|
8
|
+
"id_token" text,
|
|
9
|
+
"access_token_expires_at" timestamp,
|
|
10
|
+
"refresh_token_expires_at" timestamp,
|
|
11
|
+
"scope" text,
|
|
12
|
+
"password" text,
|
|
13
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
14
|
+
"updated_at" timestamp NOT NULL
|
|
15
|
+
);
|
|
16
|
+
--> statement-breakpoint
|
|
17
|
+
CREATE TABLE "chat" (
|
|
18
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
19
|
+
"user_id" text NOT NULL,
|
|
20
|
+
"title" text DEFAULT 'New Conversation' NOT NULL,
|
|
21
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
22
|
+
"updated_at" timestamp DEFAULT now() NOT NULL
|
|
23
|
+
);
|
|
24
|
+
--> statement-breakpoint
|
|
25
|
+
CREATE TABLE "chat_message" (
|
|
26
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
27
|
+
"chat_id" text NOT NULL,
|
|
28
|
+
"role" text NOT NULL,
|
|
29
|
+
"created_at" timestamp DEFAULT now() NOT NULL
|
|
30
|
+
);
|
|
31
|
+
--> statement-breakpoint
|
|
32
|
+
CREATE TABLE "message_part" (
|
|
33
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
34
|
+
"message_id" text NOT NULL,
|
|
35
|
+
"order" integer NOT NULL,
|
|
36
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
37
|
+
"type" text NOT NULL,
|
|
38
|
+
"text" text,
|
|
39
|
+
"reasoning_text" text,
|
|
40
|
+
"tool_call_id" text,
|
|
41
|
+
"tool_name" text,
|
|
42
|
+
"tool_state" text,
|
|
43
|
+
"tool_error_text" text,
|
|
44
|
+
"tool_input" jsonb,
|
|
45
|
+
"tool_output" jsonb,
|
|
46
|
+
"tool_approval_id" text,
|
|
47
|
+
"tool_approval_approved" boolean,
|
|
48
|
+
"tool_approval_reason" text,
|
|
49
|
+
CONSTRAINT "text_required_if_type_is_text" CHECK (CASE WHEN "message_part"."type" = 'text' THEN "message_part"."text" IS NOT NULL ELSE TRUE END),
|
|
50
|
+
CONSTRAINT "reasoning_text_required_if_type_is_reasoning" CHECK (CASE WHEN "message_part"."type" = 'reasoning' THEN "message_part"."reasoning_text" IS NOT NULL ELSE TRUE END),
|
|
51
|
+
CONSTRAINT "tool_call_fields_required" CHECK (CASE WHEN "message_part"."type" LIKE 'tool-%' THEN "message_part"."tool_call_id" IS NOT NULL AND "message_part"."tool_state" IS NOT NULL ELSE TRUE END)
|
|
52
|
+
);
|
|
53
|
+
--> statement-breakpoint
|
|
54
|
+
CREATE TABLE "session" (
|
|
55
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
56
|
+
"expires_at" timestamp NOT NULL,
|
|
57
|
+
"token" text NOT NULL,
|
|
58
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
59
|
+
"updated_at" timestamp NOT NULL,
|
|
60
|
+
"ip_address" text,
|
|
61
|
+
"user_agent" text,
|
|
62
|
+
"user_id" text NOT NULL,
|
|
63
|
+
CONSTRAINT "session_token_unique" UNIQUE("token")
|
|
64
|
+
);
|
|
65
|
+
--> statement-breakpoint
|
|
66
|
+
CREATE TABLE "user" (
|
|
67
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
68
|
+
"name" text NOT NULL,
|
|
69
|
+
"email" text NOT NULL,
|
|
70
|
+
"email_verified" boolean DEFAULT false NOT NULL,
|
|
71
|
+
"image" text,
|
|
72
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
73
|
+
"updated_at" timestamp DEFAULT now() NOT NULL,
|
|
74
|
+
CONSTRAINT "user_email_unique" UNIQUE("email")
|
|
75
|
+
);
|
|
76
|
+
--> statement-breakpoint
|
|
77
|
+
CREATE TABLE "verification" (
|
|
78
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
79
|
+
"identifier" text NOT NULL,
|
|
80
|
+
"value" text NOT NULL,
|
|
81
|
+
"expires_at" timestamp NOT NULL,
|
|
82
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
83
|
+
"updated_at" timestamp DEFAULT now() NOT NULL
|
|
84
|
+
);
|
|
85
|
+
--> statement-breakpoint
|
|
86
|
+
ALTER TABLE "account" ADD CONSTRAINT "account_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
87
|
+
ALTER TABLE "chat" ADD CONSTRAINT "chat_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
88
|
+
ALTER TABLE "chat_message" ADD CONSTRAINT "chat_message_chat_id_chat_id_fk" FOREIGN KEY ("chat_id") REFERENCES "public"."chat"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
89
|
+
ALTER TABLE "message_part" ADD CONSTRAINT "message_part_message_id_chat_message_id_fk" FOREIGN KEY ("message_id") REFERENCES "public"."chat_message"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
90
|
+
ALTER TABLE "session" ADD CONSTRAINT "session_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
91
|
+
CREATE INDEX "account_userId_idx" ON "account" USING btree ("user_id");--> statement-breakpoint
|
|
92
|
+
CREATE INDEX "chat_userId_idx" ON "chat" USING btree ("user_id");--> statement-breakpoint
|
|
93
|
+
CREATE INDEX "chat_message_chatId_idx" ON "chat_message" USING btree ("chat_id");--> statement-breakpoint
|
|
94
|
+
CREATE INDEX "chat_message_createdAt_idx" ON "chat_message" USING btree ("created_at");--> statement-breakpoint
|
|
95
|
+
CREATE INDEX "parts_message_id_idx" ON "message_part" USING btree ("message_id");--> statement-breakpoint
|
|
96
|
+
CREATE INDEX "parts_message_id_order_idx" ON "message_part" USING btree ("message_id","order");--> statement-breakpoint
|
|
97
|
+
CREATE INDEX "session_userId_idx" ON "session" USING btree ("user_id");--> statement-breakpoint
|
|
98
|
+
CREATE INDEX "verification_identifier_idx" ON "verification" USING btree ("identifier");
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
CREATE TABLE "message_feedback" (
|
|
2
|
+
"message_id" text PRIMARY KEY NOT NULL,
|
|
3
|
+
"vote" text NOT NULL,
|
|
4
|
+
"explanation" text,
|
|
5
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
6
|
+
"updated_at" timestamp DEFAULT now() NOT NULL
|
|
7
|
+
);
|
|
8
|
+
--> statement-breakpoint
|
|
9
|
+
ALTER TABLE "message_feedback" ADD CONSTRAINT "message_feedback_message_id_chat_message_id_fk" FOREIGN KEY ("message_id") REFERENCES "public"."chat_message"("id") ON DELETE cascade ON UPDATE no action;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
ALTER TABLE "message_part" ADD COLUMN "input_total_tokens" integer;--> statement-breakpoint
|
|
2
|
+
ALTER TABLE "message_part" ADD COLUMN "input_no_cache_tokens" integer;--> statement-breakpoint
|
|
3
|
+
ALTER TABLE "message_part" ADD COLUMN "input_cache_read_tokens" integer;--> statement-breakpoint
|
|
4
|
+
ALTER TABLE "message_part" ADD COLUMN "input_cache_write_tokens" integer;--> statement-breakpoint
|
|
5
|
+
ALTER TABLE "message_part" ADD COLUMN "output_total_tokens" integer;--> statement-breakpoint
|
|
6
|
+
ALTER TABLE "message_part" ADD COLUMN "output_text_tokens" integer;--> statement-breakpoint
|
|
7
|
+
ALTER TABLE "message_part" ADD COLUMN "output_reasoning_tokens" integer;--> statement-breakpoint
|
|
8
|
+
ALTER TABLE "message_part" ADD COLUMN "total_tokens" integer;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
CREATE TABLE "project" (
|
|
2
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
3
|
+
"name" text NOT NULL,
|
|
4
|
+
"type" text NOT NULL,
|
|
5
|
+
"path" text,
|
|
6
|
+
"slack_bot_token" text,
|
|
7
|
+
"slack_signing_secret" text,
|
|
8
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
9
|
+
"updated_at" timestamp DEFAULT now() NOT NULL,
|
|
10
|
+
CONSTRAINT "local_project_path_required" CHECK (CASE WHEN "project"."type" = 'local' THEN "project"."path" IS NOT NULL ELSE TRUE END)
|
|
11
|
+
);
|
|
12
|
+
--> statement-breakpoint
|
|
13
|
+
CREATE TABLE "project_llm_config" (
|
|
14
|
+
"id" text PRIMARY KEY NOT NULL,
|
|
15
|
+
"project_id" text NOT NULL,
|
|
16
|
+
"provider" text NOT NULL,
|
|
17
|
+
"api_key" text NOT NULL,
|
|
18
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
19
|
+
"updated_at" timestamp DEFAULT now() NOT NULL,
|
|
20
|
+
CONSTRAINT "project_llm_config_unique" UNIQUE("id","project_id","provider")
|
|
21
|
+
);
|
|
22
|
+
--> statement-breakpoint
|
|
23
|
+
CREATE TABLE "project_member" (
|
|
24
|
+
"project_id" text NOT NULL,
|
|
25
|
+
"user_id" text NOT NULL,
|
|
26
|
+
"role" text NOT NULL,
|
|
27
|
+
"created_at" timestamp DEFAULT now() NOT NULL,
|
|
28
|
+
CONSTRAINT "project_member_project_id_user_id_pk" PRIMARY KEY("project_id","user_id")
|
|
29
|
+
);
|
|
30
|
+
--> statement-breakpoint
|
|
31
|
+
ALTER TABLE "chat" ADD COLUMN "project_id" text NOT NULL;--> statement-breakpoint
|
|
32
|
+
ALTER TABLE "project_llm_config" ADD CONSTRAINT "project_llm_config_project_id_project_id_fk" FOREIGN KEY ("project_id") REFERENCES "public"."project"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
33
|
+
ALTER TABLE "project_member" ADD CONSTRAINT "project_member_project_id_project_id_fk" FOREIGN KEY ("project_id") REFERENCES "public"."project"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
34
|
+
ALTER TABLE "project_member" ADD CONSTRAINT "project_member_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
35
|
+
CREATE INDEX "project_llm_config_projectId_idx" ON "project_llm_config" USING btree ("project_id");--> statement-breakpoint
|
|
36
|
+
CREATE INDEX "project_member_userId_idx" ON "project_member" USING btree ("user_id");--> statement-breakpoint
|
|
37
|
+
DELETE FROM "chat";--> statement-breakpoint
|
|
38
|
+
ALTER TABLE "chat" ADD CONSTRAINT "chat_project_id_project_id_fk" FOREIGN KEY ("project_id") REFERENCES "public"."project"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
|
39
|
+
CREATE INDEX "chat_projectId_idx" ON "chat" USING btree ("project_id");
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
ALTER TABLE "project_llm_config" DROP CONSTRAINT "project_llm_config_unique";--> statement-breakpoint
|
|
2
|
+
ALTER TABLE "project_llm_config" ADD COLUMN "enabled_models" jsonb DEFAULT '[]'::jsonb NOT NULL;--> statement-breakpoint
|
|
3
|
+
ALTER TABLE "project_llm_config" ADD COLUMN "base_url" text;--> statement-breakpoint
|
|
4
|
+
ALTER TABLE "project_llm_config" ADD CONSTRAINT "project_llm_config_project_provider" UNIQUE("project_id","provider");
|