qalita 2.3.2__py3-none-any.whl → 2.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qalita/__main__.py +213 -9
- qalita/commands/{agent.py → worker.py} +89 -89
- qalita/internal/config.py +26 -19
- qalita/internal/utils.py +1 -1
- qalita/web/app.py +97 -14
- qalita/web/blueprints/context.py +13 -60
- qalita/web/blueprints/dashboard.py +35 -76
- qalita/web/blueprints/helpers.py +154 -63
- qalita/web/blueprints/sources.py +29 -61
- qalita/web/blueprints/{agents.py → workers.py} +108 -185
- qalita-2.5.2.dist-info/METADATA +66 -0
- qalita-2.5.2.dist-info/RECORD +24 -0
- {qalita-2.3.2.dist-info → qalita-2.5.2.dist-info}/WHEEL +1 -1
- qalita-2.5.2.dist-info/entry_points.txt +2 -0
- qalita/web/blueprints/studio.py +0 -1294
- qalita/web/public/chatgpt.svg +0 -3
- qalita/web/public/claude.png +0 -0
- qalita/web/public/favicon.ico +0 -0
- qalita/web/public/gemini.png +0 -0
- qalita/web/public/logo-no-slogan.png +0 -0
- qalita/web/public/logo-white-no-slogan.svg +0 -11
- qalita/web/public/mistral.svg +0 -1
- qalita/web/public/noise.webp +0 -0
- qalita/web/public/ollama.png +0 -0
- qalita/web/public/platform.png +0 -0
- qalita/web/public/sources-logos/alloy-db.png +0 -0
- qalita/web/public/sources-logos/amazon-athena.png +0 -0
- qalita/web/public/sources-logos/amazon-rds.png +0 -0
- qalita/web/public/sources-logos/api.svg +0 -2
- qalita/web/public/sources-logos/avro.svg +0 -20
- qalita/web/public/sources-logos/azure-database-mysql.png +0 -0
- qalita/web/public/sources-logos/azure-database-postgresql.png +0 -0
- qalita/web/public/sources-logos/azure-sql-database.png +0 -0
- qalita/web/public/sources-logos/azure-sql-managed-instance.png +0 -0
- qalita/web/public/sources-logos/azure-synapse-analytics.png +0 -0
- qalita/web/public/sources-logos/azure_blob.svg +0 -1
- qalita/web/public/sources-logos/bigquery.png +0 -0
- qalita/web/public/sources-logos/cassandra.svg +0 -254
- qalita/web/public/sources-logos/clickhouse.png +0 -0
- qalita/web/public/sources-logos/cloud-sql.png +0 -0
- qalita/web/public/sources-logos/cockroach-db.png +0 -0
- qalita/web/public/sources-logos/csv.svg +0 -1
- qalita/web/public/sources-logos/database.svg +0 -3
- qalita/web/public/sources-logos/databricks.png +0 -0
- qalita/web/public/sources-logos/duckdb.png +0 -0
- qalita/web/public/sources-logos/elasticsearch.svg +0 -1
- qalita/web/public/sources-logos/excel.svg +0 -1
- qalita/web/public/sources-logos/file.svg +0 -1
- qalita/web/public/sources-logos/folder.svg +0 -6
- qalita/web/public/sources-logos/gcs.png +0 -0
- qalita/web/public/sources-logos/hdfs.svg +0 -1
- qalita/web/public/sources-logos/ibm-db2.png +0 -0
- qalita/web/public/sources-logos/json.png +0 -0
- qalita/web/public/sources-logos/maria-db.png +0 -0
- qalita/web/public/sources-logos/mongodb.svg +0 -1
- qalita/web/public/sources-logos/mssql.svg +0 -1
- qalita/web/public/sources-logos/mysql.svg +0 -7
- qalita/web/public/sources-logos/oracle.svg +0 -4
- qalita/web/public/sources-logos/parquet.svg +0 -16
- qalita/web/public/sources-logos/picture.png +0 -0
- qalita/web/public/sources-logos/postgresql.svg +0 -22
- qalita/web/public/sources-logos/questdb.png +0 -0
- qalita/web/public/sources-logos/redshift.png +0 -0
- qalita/web/public/sources-logos/s3.svg +0 -34
- qalita/web/public/sources-logos/sap-hana.png +0 -0
- qalita/web/public/sources-logos/sftp.png +0 -0
- qalita/web/public/sources-logos/single-store.png +0 -0
- qalita/web/public/sources-logos/snowflake.png +0 -0
- qalita/web/public/sources-logos/sqlite.svg +0 -104
- qalita/web/public/sources-logos/sqlserver.png +0 -0
- qalita/web/public/sources-logos/starburst.png +0 -0
- qalita/web/public/sources-logos/stream.png +0 -0
- qalita/web/public/sources-logos/teradata.png +0 -0
- qalita/web/public/sources-logos/timescale.png +0 -0
- qalita/web/public/sources-logos/xls.svg +0 -1
- qalita/web/public/sources-logos/xlsx.svg +0 -1
- qalita/web/public/sources-logos/yugabyte-db.png +0 -0
- qalita/web/public/studio-logo.svg +0 -10
- qalita/web/public/studio.css +0 -304
- qalita/web/public/studio.png +0 -0
- qalita/web/public/styles.css +0 -682
- qalita/web/templates/dashboard.html +0 -373
- qalita/web/templates/navbar.html +0 -40
- qalita/web/templates/sources/added.html +0 -57
- qalita/web/templates/sources/edit.html +0 -411
- qalita/web/templates/sources/select-source.html +0 -128
- qalita/web/templates/studio/agent-panel.html +0 -828
- qalita/web/templates/studio/context-panel.html +0 -300
- qalita/web/templates/studio/index.html +0 -79
- qalita/web/templates/studio/navbar.html +0 -14
- qalita/web/templates/studio/view-panel.html +0 -529
- qalita-2.3.2.dist-info/METADATA +0 -58
- qalita-2.3.2.dist-info/RECORD +0 -101
- qalita-2.3.2.dist-info/entry_points.txt +0 -3
- {qalita-2.3.2.dist-info → qalita-2.5.2.dist-info}/licenses/LICENSE +0 -0
qalita/web/blueprints/studio.py
DELETED
|
@@ -1,1294 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
# QALITA (c) COPYRIGHT 2025 - ALL RIGHTS RESERVED -
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
import os
|
|
6
|
-
import json
|
|
7
|
-
import yaml
|
|
8
|
-
import requests
|
|
9
|
-
from datetime import datetime
|
|
10
|
-
from flask import Blueprint, render_template, jsonify, request, current_app, Response
|
|
11
|
-
from flask import stream_with_context
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
bp = Blueprint("studio", __name__)
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
@bp.get("/")
|
|
18
|
-
def studio_home():
|
|
19
|
-
return render_template("studio/index.html")
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
# ---- Config management ----
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def _qalita_home():
|
|
26
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
27
|
-
try:
|
|
28
|
-
return cfg.qalita_home # type: ignore[attr-defined]
|
|
29
|
-
except Exception:
|
|
30
|
-
return os.path.expanduser("~/.qalita")
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def _studio_config_path() -> str:
|
|
34
|
-
root = _qalita_home()
|
|
35
|
-
try:
|
|
36
|
-
os.makedirs(root, exist_ok=True)
|
|
37
|
-
except Exception:
|
|
38
|
-
pass
|
|
39
|
-
return os.path.join(root, ".studio")
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def _qalita_home() -> str:
|
|
43
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
44
|
-
try:
|
|
45
|
-
return getattr(cfg, "qalita_home")
|
|
46
|
-
except Exception:
|
|
47
|
-
return os.path.expanduser("~/.qalita")
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def _read_qalita_conf() -> dict:
|
|
51
|
-
try:
|
|
52
|
-
path = os.path.join(_qalita_home(), "sources-conf.yaml")
|
|
53
|
-
if not os.path.isfile(path):
|
|
54
|
-
return {}
|
|
55
|
-
with open(path, "r", encoding="utf-8") as f:
|
|
56
|
-
data = yaml.safe_load(f) or {}
|
|
57
|
-
return data if isinstance(data, dict) else {}
|
|
58
|
-
except Exception:
|
|
59
|
-
return {}
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def _find_source_by_id(conf: dict, source_id: str) -> dict | None:
|
|
63
|
-
try:
|
|
64
|
-
items = conf.get("sources") if isinstance(conf.get("sources"), list) else []
|
|
65
|
-
for s in items:
|
|
66
|
-
if isinstance(s, dict) and str(s.get("id", "")) == str(source_id):
|
|
67
|
-
return s
|
|
68
|
-
except Exception:
|
|
69
|
-
return None
|
|
70
|
-
return None
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def _redact_sensitive(obj: dict) -> dict:
|
|
74
|
-
try:
|
|
75
|
-
SENSITIVE = {"password", "secret", "token", "access_key", "secret_key", "connection_string", "credentials", "api_key"}
|
|
76
|
-
def scrub(v):
|
|
77
|
-
if isinstance(v, dict):
|
|
78
|
-
return {k: ("***" if k.lower() in SENSITIVE else scrub(v2)) for k, v2 in v.items()}
|
|
79
|
-
if isinstance(v, list):
|
|
80
|
-
return [scrub(it) for it in v]
|
|
81
|
-
return v
|
|
82
|
-
return scrub(dict(obj)) if isinstance(obj, dict) else {}
|
|
83
|
-
except Exception:
|
|
84
|
-
return {}
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def _augment_prompt_with_context(prompt: str, issue_id: str | None, source_id: str | None, issue_details: dict | None = None, source_details: dict | None = None) -> str:
|
|
88
|
-
base = prompt or ""
|
|
89
|
-
meta_parts: list[str] = []
|
|
90
|
-
if issue_id:
|
|
91
|
-
meta_parts.append(f"Issue: {issue_id}")
|
|
92
|
-
if source_id:
|
|
93
|
-
meta_parts.append(f"Source: {source_id}")
|
|
94
|
-
# Attach compact JSON of issue details if present
|
|
95
|
-
if issue_details:
|
|
96
|
-
try:
|
|
97
|
-
snip = json.dumps(issue_details, ensure_ascii=False)[:800]
|
|
98
|
-
meta_parts.append(f"IssueDetails: {snip}")
|
|
99
|
-
except Exception:
|
|
100
|
-
pass
|
|
101
|
-
if source_details:
|
|
102
|
-
try:
|
|
103
|
-
red = _redact_sensitive(source_details)
|
|
104
|
-
snip = json.dumps(red, ensure_ascii=False)[:800]
|
|
105
|
-
meta_parts.append(f"SourceDetails: {snip}")
|
|
106
|
-
except Exception:
|
|
107
|
-
pass
|
|
108
|
-
if not meta_parts:
|
|
109
|
-
return base
|
|
110
|
-
meta = "\n\n[Context]\n" + " | ".join(meta_parts) + "\n" # lightweight hint
|
|
111
|
-
return meta + base
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def _cloud_enabled() -> bool:
|
|
115
|
-
"""Return whether Studio cloud providers are enabled via env flag.
|
|
116
|
-
|
|
117
|
-
Env: QALITA_STUDIO_ENABLE_CLOUD = 1|true|yes|on to enable. Default: disabled.
|
|
118
|
-
"""
|
|
119
|
-
try:
|
|
120
|
-
raw = str(os.getenv("QALITA_STUDIO_ENABLE_CLOUD", "0") or "").strip().lower()
|
|
121
|
-
return raw in ("1", "true", "yes", "on")
|
|
122
|
-
except Exception:
|
|
123
|
-
return False
|
|
124
|
-
|
|
125
|
-
def _studio_conv_dir() -> str:
|
|
126
|
-
"""Return the conversations directory, ensuring it exists."""
|
|
127
|
-
root = _qalita_home()
|
|
128
|
-
conv_dir = os.path.join(root, "studio_conversations")
|
|
129
|
-
try:
|
|
130
|
-
os.makedirs(conv_dir, exist_ok=True)
|
|
131
|
-
except Exception:
|
|
132
|
-
pass
|
|
133
|
-
return conv_dir
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
def _safe_conv_id(raw: str) -> str:
|
|
137
|
-
"""Sanitize a conversation id to be filesystem-safe."""
|
|
138
|
-
s = (raw or "").strip()
|
|
139
|
-
if not s:
|
|
140
|
-
s = datetime.utcnow().strftime("conv_%Y%m%d_%H%M%S")
|
|
141
|
-
# allow alnum, dash, underscore only
|
|
142
|
-
out = []
|
|
143
|
-
for ch in s:
|
|
144
|
-
if ch.isalnum() or ch in ("-", "_"):
|
|
145
|
-
out.append(ch)
|
|
146
|
-
s2 = "".join(out)
|
|
147
|
-
return s2 or datetime.utcnow().strftime("conv_%Y%m%d_%H%M%S")
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def _studio_conv_file_for(conv_id: str) -> str:
|
|
151
|
-
conv_dir = _studio_conv_dir()
|
|
152
|
-
safe_id = _safe_conv_id(conv_id)
|
|
153
|
-
return os.path.join(conv_dir, f"{safe_id}.jsonl")
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
def _studio_conv_write(conv_id: str, record: dict) -> None:
|
|
157
|
-
"""Append one JSONL record to the studio conversations log.
|
|
158
|
-
|
|
159
|
-
Errors are swallowed to avoid impacting the main request flow.
|
|
160
|
-
"""
|
|
161
|
-
try:
|
|
162
|
-
path = _studio_conv_file_for(conv_id)
|
|
163
|
-
record = dict(record or {})
|
|
164
|
-
if "ts" not in record:
|
|
165
|
-
record["ts"] = datetime.utcnow().isoformat() + "Z"
|
|
166
|
-
with open(path, "a", encoding="utf-8") as f:
|
|
167
|
-
f.write(json.dumps(record, ensure_ascii=False))
|
|
168
|
-
f.write("\n")
|
|
169
|
-
except Exception:
|
|
170
|
-
pass
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
@bp.get("/conversations")
|
|
174
|
-
def conversations_list():
|
|
175
|
-
"""List available conversations (one file per conversation)."""
|
|
176
|
-
conv_dir = _studio_conv_dir()
|
|
177
|
-
items: list[dict] = []
|
|
178
|
-
try:
|
|
179
|
-
for name in os.listdir(conv_dir):
|
|
180
|
-
if not name.endswith(".jsonl"):
|
|
181
|
-
continue
|
|
182
|
-
path = os.path.join(conv_dir, name)
|
|
183
|
-
try:
|
|
184
|
-
st = os.stat(path)
|
|
185
|
-
# count lines may be expensive, do bounded scan
|
|
186
|
-
count = 0
|
|
187
|
-
with open(path, "r", encoding="utf-8") as f:
|
|
188
|
-
for _ in f:
|
|
189
|
-
count += 1
|
|
190
|
-
if count > 10000:
|
|
191
|
-
break
|
|
192
|
-
items.append(
|
|
193
|
-
{
|
|
194
|
-
"id": name[:-6],
|
|
195
|
-
"file": name,
|
|
196
|
-
"size": st.st_size,
|
|
197
|
-
"mtime": datetime.utcfromtimestamp(st.st_mtime).isoformat()
|
|
198
|
-
+ "Z",
|
|
199
|
-
"lines": count,
|
|
200
|
-
}
|
|
201
|
-
)
|
|
202
|
-
except Exception:
|
|
203
|
-
continue
|
|
204
|
-
# Sort by mtime desc
|
|
205
|
-
items.sort(key=lambda x: x.get("mtime", ""), reverse=True)
|
|
206
|
-
except Exception:
|
|
207
|
-
items = []
|
|
208
|
-
return jsonify({"ok": True, "items": items})
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
@bp.get("/conversation")
|
|
212
|
-
def conversation_get():
|
|
213
|
-
"""Return a conversation's messages from its id."""
|
|
214
|
-
conv_id = _safe_conv_id(request.args.get("id", ""))
|
|
215
|
-
if not conv_id:
|
|
216
|
-
return jsonify({"ok": False, "message": "Missing id"}), 400
|
|
217
|
-
path = _studio_conv_file_for(conv_id)
|
|
218
|
-
if not os.path.isfile(path):
|
|
219
|
-
return jsonify({"ok": False, "message": "Not found"}), 404
|
|
220
|
-
messages: list[dict] = []
|
|
221
|
-
try:
|
|
222
|
-
with open(path, "r", encoding="utf-8") as f:
|
|
223
|
-
for raw in f:
|
|
224
|
-
line = (raw or "").strip()
|
|
225
|
-
if not line:
|
|
226
|
-
continue
|
|
227
|
-
try:
|
|
228
|
-
obj = json.loads(line)
|
|
229
|
-
except Exception:
|
|
230
|
-
continue
|
|
231
|
-
# New format uses role/text
|
|
232
|
-
if isinstance(obj, dict) and obj.get("role") and obj.get("text") is not None:
|
|
233
|
-
messages.append(
|
|
234
|
-
{
|
|
235
|
-
"role": obj.get("role"),
|
|
236
|
-
"text": obj.get("text"),
|
|
237
|
-
"ts": obj.get("ts"),
|
|
238
|
-
}
|
|
239
|
-
)
|
|
240
|
-
# Back-compat: prompt/response record
|
|
241
|
-
elif isinstance(obj, dict) and obj.get("prompt") is not None:
|
|
242
|
-
messages.append({"role": "user", "text": obj.get("prompt"), "ts": obj.get("ts")})
|
|
243
|
-
if obj.get("response") is not None:
|
|
244
|
-
messages.append(
|
|
245
|
-
{"role": "assistant", "text": obj.get("response"), "ts": obj.get("ts")}
|
|
246
|
-
)
|
|
247
|
-
except Exception as exc:
|
|
248
|
-
return jsonify({"ok": False, "message": str(exc)}), 500
|
|
249
|
-
return jsonify({"ok": True, "id": conv_id, "messages": messages})
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
@bp.get("/status")
|
|
253
|
-
def studio_status():
|
|
254
|
-
p = _studio_config_path()
|
|
255
|
-
exists = os.path.isfile(p)
|
|
256
|
-
data: dict | None = None
|
|
257
|
-
if exists:
|
|
258
|
-
try:
|
|
259
|
-
with open(p, "r", encoding="utf-8") as f:
|
|
260
|
-
raw = f.read().strip()
|
|
261
|
-
if raw:
|
|
262
|
-
data = json.loads(raw)
|
|
263
|
-
except Exception:
|
|
264
|
-
data = None
|
|
265
|
-
# Surface current provider quickly for the UI
|
|
266
|
-
current_provider = None
|
|
267
|
-
if isinstance(data, dict):
|
|
268
|
-
current_provider = data.get("current_provider")
|
|
269
|
-
if not current_provider and isinstance(data.get("providers"), dict):
|
|
270
|
-
# Pick one deterministically for display
|
|
271
|
-
try:
|
|
272
|
-
current_provider = next(iter(data["providers"].keys()))
|
|
273
|
-
except Exception:
|
|
274
|
-
current_provider = None
|
|
275
|
-
# Enforce local-only when cloud is disabled
|
|
276
|
-
if not _cloud_enabled() and current_provider and current_provider != "local":
|
|
277
|
-
current_provider = "local"
|
|
278
|
-
return jsonify(
|
|
279
|
-
{
|
|
280
|
-
"configured": exists,
|
|
281
|
-
"config": data,
|
|
282
|
-
"current_provider": current_provider,
|
|
283
|
-
"cloud_enabled": _cloud_enabled(),
|
|
284
|
-
}
|
|
285
|
-
)
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
@bp.post("/config")
|
|
289
|
-
def studio_save_config():
|
|
290
|
-
payload = request.get_json(silent=True) or {}
|
|
291
|
-
p = _studio_config_path()
|
|
292
|
-
# Merge semantics to support multiple providers while remaining backward compatible
|
|
293
|
-
try:
|
|
294
|
-
current: dict = {}
|
|
295
|
-
if os.path.isfile(p):
|
|
296
|
-
try:
|
|
297
|
-
with open(p, "r", encoding="utf-8") as rf:
|
|
298
|
-
raw = (rf.read() or "").strip()
|
|
299
|
-
if raw:
|
|
300
|
-
current = json.loads(raw)
|
|
301
|
-
if not isinstance(current, dict):
|
|
302
|
-
current = {}
|
|
303
|
-
except Exception:
|
|
304
|
-
current = {}
|
|
305
|
-
# Normalize base structure
|
|
306
|
-
if "providers" not in current or not isinstance(current.get("providers"), dict):
|
|
307
|
-
current["providers"] = {}
|
|
308
|
-
providers: dict = current["providers"] # type: ignore[assignment]
|
|
309
|
-
# Path A: structured provider update
|
|
310
|
-
provider = (payload.get("provider") or "").strip()
|
|
311
|
-
conf = (
|
|
312
|
-
payload.get("config") if isinstance(payload.get("config"), dict) else None
|
|
313
|
-
)
|
|
314
|
-
set_current = bool(payload.get("set_current"))
|
|
315
|
-
if provider and conf is not None:
|
|
316
|
-
# Block saving non-local provider when cloud is disabled
|
|
317
|
-
if provider != "local" and not _cloud_enabled():
|
|
318
|
-
return (
|
|
319
|
-
jsonify({
|
|
320
|
-
"ok": False,
|
|
321
|
-
"message": "Cloud providers are disabled. Set QALITA_STUDIO_ENABLE_CLOUD=1 to enable.",
|
|
322
|
-
}),
|
|
323
|
-
403,
|
|
324
|
-
)
|
|
325
|
-
providers[provider] = conf
|
|
326
|
-
if set_current:
|
|
327
|
-
current["current_provider"] = provider
|
|
328
|
-
else:
|
|
329
|
-
# Path B: legacy flat payload (e.g., { "model": "gpt-oss:20b" })
|
|
330
|
-
# Interpret as local provider settings
|
|
331
|
-
if "model" in payload and isinstance(payload.get("model"), str):
|
|
332
|
-
local_conf = (
|
|
333
|
-
providers.get("local", {})
|
|
334
|
-
if isinstance(providers.get("local"), dict)
|
|
335
|
-
else {}
|
|
336
|
-
)
|
|
337
|
-
local_conf["model"] = (payload.get("model") or "").strip()
|
|
338
|
-
providers["local"] = local_conf
|
|
339
|
-
# Prefer local as current if not already chosen
|
|
340
|
-
if not current.get("current_provider"):
|
|
341
|
-
current["current_provider"] = "local"
|
|
342
|
-
else:
|
|
343
|
-
# Fallback: overwrite with provided payload (explicit user intent)
|
|
344
|
-
current = payload
|
|
345
|
-
if "providers" not in current:
|
|
346
|
-
current = {
|
|
347
|
-
"providers": {"legacy": payload},
|
|
348
|
-
"current_provider": current.get("current_provider", "legacy"),
|
|
349
|
-
}
|
|
350
|
-
# Persist
|
|
351
|
-
with open(p, "w", encoding="utf-8") as f:
|
|
352
|
-
f.write(json.dumps(current, ensure_ascii=False, indent=2))
|
|
353
|
-
return jsonify({"ok": True, "saved": True})
|
|
354
|
-
except Exception as exc:
|
|
355
|
-
return jsonify({"ok": False, "message": str(exc)}), 500
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
@bp.get("/check-ollama")
|
|
359
|
-
def check_ollama():
|
|
360
|
-
url = "http://127.0.0.1:11434/api/tags"
|
|
361
|
-
try:
|
|
362
|
-
r = requests.get(url, timeout=2)
|
|
363
|
-
ok = r.status_code == 200
|
|
364
|
-
return jsonify({"ok": ok})
|
|
365
|
-
except Exception:
|
|
366
|
-
return jsonify({"ok": False})
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
@bp.get("/providers")
|
|
370
|
-
def list_providers():
|
|
371
|
-
"""Return available agent provider types and current selection from .studio config."""
|
|
372
|
-
p = _studio_config_path()
|
|
373
|
-
data: dict = {}
|
|
374
|
-
try:
|
|
375
|
-
if os.path.isfile(p):
|
|
376
|
-
with open(p, "r", encoding="utf-8") as f:
|
|
377
|
-
raw = (f.read() or "").strip()
|
|
378
|
-
if raw:
|
|
379
|
-
data = json.loads(raw)
|
|
380
|
-
if not isinstance(data, dict):
|
|
381
|
-
data = {}
|
|
382
|
-
except Exception:
|
|
383
|
-
data = {}
|
|
384
|
-
providers = data.get("providers") if isinstance(data.get("providers"), dict) else {}
|
|
385
|
-
current = (
|
|
386
|
-
data.get("current_provider")
|
|
387
|
-
if isinstance(data.get("current_provider"), str)
|
|
388
|
-
else None
|
|
389
|
-
)
|
|
390
|
-
# Static list for now; can be extended later or discovered dynamically
|
|
391
|
-
available = [
|
|
392
|
-
{"id": "local", "name": "Local Agent", "logo": "/static/ollama.png"},
|
|
393
|
-
{"id": "openai", "name": "ChatGPT", "logo": "/static/chatgpt.svg"},
|
|
394
|
-
{"id": "mistral", "name": "Mistral", "logo": "/static/mistral.svg"},
|
|
395
|
-
{"id": "claude", "name": "Claude", "logo": "/static/sources-logos/api.svg"},
|
|
396
|
-
{"id": "gemini", "name": "Gemini", "logo": "/static/sources-logos/api.svg"},
|
|
397
|
-
]
|
|
398
|
-
if not _cloud_enabled():
|
|
399
|
-
available = [it for it in available if it.get("id") == "local"]
|
|
400
|
-
return jsonify(
|
|
401
|
-
{
|
|
402
|
-
"available": available,
|
|
403
|
-
"current": current,
|
|
404
|
-
"configs": providers,
|
|
405
|
-
"cloud_enabled": _cloud_enabled(),
|
|
406
|
-
}
|
|
407
|
-
)
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
@bp.post("/check-remote")
|
|
411
|
-
def check_remote():
|
|
412
|
-
"""Best-effort connectivity check for remote AI providers (OpenAI, Mistral).
|
|
413
|
-
|
|
414
|
-
Body: { "provider": "openai"|"mistral", "api_key": "...", "model": "..." }
|
|
415
|
-
Returns: { ok: bool, message?: str, provider: str }
|
|
416
|
-
"""
|
|
417
|
-
if not _cloud_enabled():
|
|
418
|
-
return (
|
|
419
|
-
jsonify({
|
|
420
|
-
"ok": False,
|
|
421
|
-
"message": "Cloud providers are disabled. Set QALITA_STUDIO_ENABLE_CLOUD=1 to enable.",
|
|
422
|
-
}),
|
|
423
|
-
403,
|
|
424
|
-
)
|
|
425
|
-
data = request.get_json(silent=True) or {}
|
|
426
|
-
provider = (data.get("provider") or "").strip().lower()
|
|
427
|
-
api_key = (data.get("api_key") or "").strip()
|
|
428
|
-
model = (data.get("model") or "").strip()
|
|
429
|
-
if not provider or not api_key:
|
|
430
|
-
return (
|
|
431
|
-
jsonify(
|
|
432
|
-
{
|
|
433
|
-
"ok": False,
|
|
434
|
-
"message": "Missing provider or API key",
|
|
435
|
-
"provider": provider,
|
|
436
|
-
}
|
|
437
|
-
),
|
|
438
|
-
400,
|
|
439
|
-
)
|
|
440
|
-
try:
|
|
441
|
-
if provider == "openai":
|
|
442
|
-
# Lightweight models list call
|
|
443
|
-
url = "https://api.openai.com/v1/models"
|
|
444
|
-
headers = {"Authorization": f"Bearer {api_key}"}
|
|
445
|
-
r = requests.get(url, headers=headers, timeout=8)
|
|
446
|
-
if 200 <= r.status_code < 300:
|
|
447
|
-
return jsonify({"ok": True, "provider": provider})
|
|
448
|
-
try:
|
|
449
|
-
body = r.json()
|
|
450
|
-
except Exception:
|
|
451
|
-
body = {"detail": r.text[:200]}
|
|
452
|
-
return (
|
|
453
|
-
jsonify(
|
|
454
|
-
{
|
|
455
|
-
"ok": False,
|
|
456
|
-
"provider": provider,
|
|
457
|
-
"status": r.status_code,
|
|
458
|
-
"error": body,
|
|
459
|
-
}
|
|
460
|
-
),
|
|
461
|
-
200,
|
|
462
|
-
)
|
|
463
|
-
if provider == "mistral":
|
|
464
|
-
# Mistral whoami endpoint
|
|
465
|
-
url = "https://api.mistral.ai/v1/models"
|
|
466
|
-
headers = {"Authorization": f"Bearer {api_key}"}
|
|
467
|
-
r = requests.get(url, headers=headers, timeout=8)
|
|
468
|
-
if 200 <= r.status_code < 300:
|
|
469
|
-
return jsonify({"ok": True, "provider": provider})
|
|
470
|
-
try:
|
|
471
|
-
body = r.json()
|
|
472
|
-
except Exception:
|
|
473
|
-
body = {"detail": r.text[:200]}
|
|
474
|
-
return (
|
|
475
|
-
jsonify(
|
|
476
|
-
{
|
|
477
|
-
"ok": False,
|
|
478
|
-
"provider": provider,
|
|
479
|
-
"status": r.status_code,
|
|
480
|
-
"error": body,
|
|
481
|
-
}
|
|
482
|
-
),
|
|
483
|
-
200,
|
|
484
|
-
)
|
|
485
|
-
if provider == "claude":
|
|
486
|
-
# Anthropic models list requires API key header and version header
|
|
487
|
-
url = "https://api.anthropic.com/v1/models"
|
|
488
|
-
headers = {"x-api-key": api_key, "anthropic-version": "2023-06-01"}
|
|
489
|
-
r = requests.get(url, headers=headers, timeout=8)
|
|
490
|
-
if 200 <= r.status_code < 300:
|
|
491
|
-
return jsonify({"ok": True, "provider": provider})
|
|
492
|
-
try:
|
|
493
|
-
body = r.json()
|
|
494
|
-
except Exception:
|
|
495
|
-
body = {"detail": r.text[:200]}
|
|
496
|
-
return (
|
|
497
|
-
jsonify(
|
|
498
|
-
{
|
|
499
|
-
"ok": False,
|
|
500
|
-
"provider": provider,
|
|
501
|
-
"status": r.status_code,
|
|
502
|
-
"error": body,
|
|
503
|
-
}
|
|
504
|
-
),
|
|
505
|
-
200,
|
|
506
|
-
)
|
|
507
|
-
if provider == "gemini":
|
|
508
|
-
# Google Generative Language API models list with key in query
|
|
509
|
-
url = f"https://generativelanguage.googleapis.com/v1/models?key={api_key}"
|
|
510
|
-
r = requests.get(url, timeout=8)
|
|
511
|
-
if 200 <= r.status_code < 300:
|
|
512
|
-
return jsonify({"ok": True, "provider": provider})
|
|
513
|
-
try:
|
|
514
|
-
body = r.json()
|
|
515
|
-
except Exception:
|
|
516
|
-
body = {"detail": r.text[:200]}
|
|
517
|
-
return (
|
|
518
|
-
jsonify(
|
|
519
|
-
{
|
|
520
|
-
"ok": False,
|
|
521
|
-
"provider": provider,
|
|
522
|
-
"status": r.status_code,
|
|
523
|
-
"error": body,
|
|
524
|
-
}
|
|
525
|
-
),
|
|
526
|
-
200,
|
|
527
|
-
)
|
|
528
|
-
return (
|
|
529
|
-
jsonify(
|
|
530
|
-
{"ok": False, "message": "Unsupported provider", "provider": provider}
|
|
531
|
-
),
|
|
532
|
-
400,
|
|
533
|
-
)
|
|
534
|
-
except Exception as exc:
|
|
535
|
-
return jsonify({"ok": False, "message": str(exc), "provider": provider}), 200
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
@bp.get("/check-backend")
|
|
539
|
-
def check_backend():
|
|
540
|
-
"""Proxy healthcheck against the remote backend URL from current context.
|
|
541
|
-
Avoids CORS issues in the browser and standardizes the response shape.
|
|
542
|
-
"""
|
|
543
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
544
|
-
backend_url: str | None = None
|
|
545
|
-
token_value: str | None = None
|
|
546
|
-
try:
|
|
547
|
-
backend_url = getattr(cfg, "url", None)
|
|
548
|
-
token_value = getattr(cfg, "token", None)
|
|
549
|
-
except Exception:
|
|
550
|
-
backend_url = None
|
|
551
|
-
token_value = None
|
|
552
|
-
# Fallback: read selected env pointer and parse URL from env file
|
|
553
|
-
try:
|
|
554
|
-
if not backend_url:
|
|
555
|
-
home = _qalita_home()
|
|
556
|
-
pointer = os.path.join(home, ".current_env")
|
|
557
|
-
if os.path.isfile(pointer):
|
|
558
|
-
with open(pointer, "r", encoding="utf-8") as f:
|
|
559
|
-
env_path = (f.read() or "").strip()
|
|
560
|
-
if env_path and os.path.isfile(env_path):
|
|
561
|
-
with open(env_path, "r", encoding="utf-8") as ef:
|
|
562
|
-
for raw in ef.readlines():
|
|
563
|
-
line = (raw or "").strip()
|
|
564
|
-
if not line or line.startswith("#") or "=" not in line:
|
|
565
|
-
continue
|
|
566
|
-
k, v = line.split("=", 1)
|
|
567
|
-
k = (k or "").strip().upper()
|
|
568
|
-
v = (v or "").strip().strip('"').strip("'")
|
|
569
|
-
if k in (
|
|
570
|
-
"QALITA_AGENT_ENDPOINT",
|
|
571
|
-
"AGENT_ENDPOINT",
|
|
572
|
-
"QALITA_URL",
|
|
573
|
-
"URL",
|
|
574
|
-
):
|
|
575
|
-
backend_url = v
|
|
576
|
-
if (
|
|
577
|
-
k in ("QALITA_AGENT_TOKEN", "QALITA_TOKEN", "TOKEN")
|
|
578
|
-
and not token_value
|
|
579
|
-
):
|
|
580
|
-
token_value = v
|
|
581
|
-
# no break: we want to scan whole file to capture both url and token
|
|
582
|
-
except Exception:
|
|
583
|
-
pass
|
|
584
|
-
# Compute readiness flags
|
|
585
|
-
endpoint_present = bool(backend_url)
|
|
586
|
-
token_present = bool(token_value)
|
|
587
|
-
configured = endpoint_present and token_present
|
|
588
|
-
if not backend_url:
|
|
589
|
-
return (
|
|
590
|
-
jsonify(
|
|
591
|
-
{
|
|
592
|
-
"ok": False,
|
|
593
|
-
"status": None,
|
|
594
|
-
"url": None,
|
|
595
|
-
"endpoint_present": endpoint_present,
|
|
596
|
-
"token_present": token_present,
|
|
597
|
-
"configured": configured,
|
|
598
|
-
}
|
|
599
|
-
),
|
|
600
|
-
200,
|
|
601
|
-
)
|
|
602
|
-
try:
|
|
603
|
-
url = str(backend_url).rstrip("/") + "/api/v1/healthcheck"
|
|
604
|
-
except Exception:
|
|
605
|
-
url = str(backend_url) + "/api/v1/healthcheck"
|
|
606
|
-
try:
|
|
607
|
-
r = requests.get(url, timeout=3)
|
|
608
|
-
ok = 200 <= r.status_code < 300
|
|
609
|
-
return jsonify(
|
|
610
|
-
{
|
|
611
|
-
"ok": ok,
|
|
612
|
-
"status": r.status_code,
|
|
613
|
-
"url": str(backend_url).rstrip("/"),
|
|
614
|
-
"endpoint_present": endpoint_present,
|
|
615
|
-
"token_present": token_present,
|
|
616
|
-
"configured": configured,
|
|
617
|
-
}
|
|
618
|
-
)
|
|
619
|
-
except Exception:
|
|
620
|
-
return (
|
|
621
|
-
jsonify(
|
|
622
|
-
{
|
|
623
|
-
"ok": False,
|
|
624
|
-
"status": None,
|
|
625
|
-
"url": str(backend_url).rstrip("/"),
|
|
626
|
-
"endpoint_present": endpoint_present,
|
|
627
|
-
"token_present": token_present,
|
|
628
|
-
"configured": configured,
|
|
629
|
-
}
|
|
630
|
-
),
|
|
631
|
-
200,
|
|
632
|
-
)
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
@bp.get("/projects")
|
|
636
|
-
def studio_projects():
|
|
637
|
-
"""Proxy projects list against the remote backend URL from current context.
|
|
638
|
-
Standardizes response to { ok: bool, items: [...] } and avoids CORS.
|
|
639
|
-
"""
|
|
640
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
641
|
-
backend_url: str | None = None
|
|
642
|
-
token_value: str | None = None
|
|
643
|
-
try:
|
|
644
|
-
backend_url = getattr(cfg, "url", None)
|
|
645
|
-
token_value = getattr(cfg, "token", None)
|
|
646
|
-
except Exception:
|
|
647
|
-
backend_url = None
|
|
648
|
-
token_value = None
|
|
649
|
-
# Fallback to env file like in check_backend
|
|
650
|
-
try:
|
|
651
|
-
if not backend_url:
|
|
652
|
-
home = _qalita_home()
|
|
653
|
-
pointer = os.path.join(home, ".current_env")
|
|
654
|
-
if os.path.isfile(pointer):
|
|
655
|
-
with open(pointer, "r", encoding="utf-8") as f:
|
|
656
|
-
env_path = (f.read() or "").strip()
|
|
657
|
-
if env_path and os.path.isfile(env_path):
|
|
658
|
-
with open(env_path, "r", encoding="utf-8") as ef:
|
|
659
|
-
for raw in ef.readlines():
|
|
660
|
-
line = (raw or "").strip()
|
|
661
|
-
if not line or line.startswith("#") or "=" not in line:
|
|
662
|
-
continue
|
|
663
|
-
k, v = line.split("=", 1)
|
|
664
|
-
k = (k or "").strip().upper()
|
|
665
|
-
v = (v or "").strip().strip('"').strip("'")
|
|
666
|
-
if k in (
|
|
667
|
-
"QALITA_AGENT_ENDPOINT",
|
|
668
|
-
"AGENT_ENDPOINT",
|
|
669
|
-
"QALITA_URL",
|
|
670
|
-
"URL",
|
|
671
|
-
):
|
|
672
|
-
backend_url = v
|
|
673
|
-
if (
|
|
674
|
-
k in ("QALITA_AGENT_TOKEN", "QALITA_TOKEN", "TOKEN")
|
|
675
|
-
and not token_value
|
|
676
|
-
):
|
|
677
|
-
token_value = v
|
|
678
|
-
except Exception:
|
|
679
|
-
pass
|
|
680
|
-
if not backend_url:
|
|
681
|
-
return jsonify({"ok": False, "items": [], "message": "Missing backend URL"}), 200
|
|
682
|
-
try:
|
|
683
|
-
url = str(backend_url).rstrip("/") + "/api/v2/projects"
|
|
684
|
-
except Exception:
|
|
685
|
-
url = str(backend_url) + "/api/v2/projects"
|
|
686
|
-
headers = {"Accept": "application/json"}
|
|
687
|
-
if token_value:
|
|
688
|
-
headers["Authorization"] = f"Bearer {token_value}"
|
|
689
|
-
try:
|
|
690
|
-
r = requests.get(url, headers=headers, timeout=8)
|
|
691
|
-
# Normalize response shapes
|
|
692
|
-
try:
|
|
693
|
-
body = r.json()
|
|
694
|
-
except Exception:
|
|
695
|
-
body = None
|
|
696
|
-
def _normalize_projects(j):
|
|
697
|
-
try:
|
|
698
|
-
if not j:
|
|
699
|
-
return []
|
|
700
|
-
if isinstance(j, list):
|
|
701
|
-
return j
|
|
702
|
-
if isinstance(j, dict):
|
|
703
|
-
if isinstance(j.get("items"), list):
|
|
704
|
-
return j["items"]
|
|
705
|
-
if isinstance(j.get("data"), list):
|
|
706
|
-
return j["data"]
|
|
707
|
-
if isinstance(j.get("results"), list):
|
|
708
|
-
return j["results"]
|
|
709
|
-
if isinstance(j.get("projects"), list):
|
|
710
|
-
return j["projects"]
|
|
711
|
-
if isinstance(j.get("data"), dict) and isinstance(j["data"].get("items"), list):
|
|
712
|
-
return j["data"]["items"]
|
|
713
|
-
# Single object
|
|
714
|
-
if (j.get("id") is not None) or (j.get("name") is not None):
|
|
715
|
-
return [j]
|
|
716
|
-
except Exception:
|
|
717
|
-
return []
|
|
718
|
-
return []
|
|
719
|
-
if 200 <= r.status_code < 300:
|
|
720
|
-
items = _normalize_projects(body)
|
|
721
|
-
return jsonify({"ok": True, "items": items})
|
|
722
|
-
# Error passthrough (without failing the request status)
|
|
723
|
-
return jsonify({"ok": False, "status": r.status_code, "error": body}), 200
|
|
724
|
-
except Exception as exc:
|
|
725
|
-
return jsonify({"ok": False, "items": [], "message": str(exc)}), 200
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
@bp.get("/sources")
|
|
729
|
-
def studio_sources():
|
|
730
|
-
"""Proxy sources list against the remote backend URL from current context.
|
|
731
|
-
Enrich with local presence and validation flags from ~/.qalita/sources-conf.yaml.
|
|
732
|
-
Response shape: { ok: bool, items: [ { ..., local_present, local_validate } ] }
|
|
733
|
-
Optional query passthrough: project_id
|
|
734
|
-
"""
|
|
735
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
736
|
-
backend_url: str | None = None
|
|
737
|
-
token_value: str | None = None
|
|
738
|
-
try:
|
|
739
|
-
backend_url = getattr(cfg, "url", None)
|
|
740
|
-
token_value = getattr(cfg, "token", None)
|
|
741
|
-
except Exception:
|
|
742
|
-
backend_url = None
|
|
743
|
-
token_value = None
|
|
744
|
-
# Fallback to env file like in check_backend
|
|
745
|
-
try:
|
|
746
|
-
if not backend_url:
|
|
747
|
-
home = _qalita_home()
|
|
748
|
-
pointer = os.path.join(home, ".current_env")
|
|
749
|
-
if os.path.isfile(pointer):
|
|
750
|
-
with open(pointer, "r", encoding="utf-8") as f:
|
|
751
|
-
env_path = (f.read() or "").strip()
|
|
752
|
-
if env_path and os.path.isfile(env_path):
|
|
753
|
-
with open(env_path, "r", encoding="utf-8") as ef:
|
|
754
|
-
for raw in ef.readlines():
|
|
755
|
-
line = (raw or "").strip()
|
|
756
|
-
if not line or line.startswith("#") or "=" not in line:
|
|
757
|
-
continue
|
|
758
|
-
k, v = line.split("=", 1)
|
|
759
|
-
k = (k or "").strip().upper()
|
|
760
|
-
v = (v or "").strip().strip('"').strip("'")
|
|
761
|
-
if k in (
|
|
762
|
-
"QALITA_AGENT_ENDPOINT",
|
|
763
|
-
"AGENT_ENDPOINT",
|
|
764
|
-
"QALITA_URL",
|
|
765
|
-
"URL",
|
|
766
|
-
):
|
|
767
|
-
backend_url = v
|
|
768
|
-
if (
|
|
769
|
-
k in ("QALITA_AGENT_TOKEN", "QALITA_TOKEN", "TOKEN")
|
|
770
|
-
and not token_value
|
|
771
|
-
):
|
|
772
|
-
token_value = v
|
|
773
|
-
except Exception:
|
|
774
|
-
pass
|
|
775
|
-
if not backend_url:
|
|
776
|
-
return jsonify({"ok": False, "items": [], "message": "Missing backend URL"}), 200
|
|
777
|
-
try:
|
|
778
|
-
base = str(backend_url).rstrip("/") + "/api/v2/sources"
|
|
779
|
-
except Exception:
|
|
780
|
-
base = str(backend_url) + "/api/v2/sources"
|
|
781
|
-
# Optional filters passthrough
|
|
782
|
-
params = {}
|
|
783
|
-
project_id = (request.args.get("project_id") or "").strip()
|
|
784
|
-
if project_id:
|
|
785
|
-
params["project_id"] = project_id
|
|
786
|
-
headers = {"Accept": "application/json"}
|
|
787
|
-
if token_value:
|
|
788
|
-
headers["Authorization"] = f"Bearer {token_value}"
|
|
789
|
-
try:
|
|
790
|
-
r = requests.get(base, headers=headers, params=params, timeout=8)
|
|
791
|
-
try:
|
|
792
|
-
body = r.json()
|
|
793
|
-
except Exception:
|
|
794
|
-
body = None
|
|
795
|
-
def _normalize_sources(j):
|
|
796
|
-
try:
|
|
797
|
-
if not j:
|
|
798
|
-
return []
|
|
799
|
-
if isinstance(j, list):
|
|
800
|
-
return j
|
|
801
|
-
if isinstance(j, dict):
|
|
802
|
-
if isinstance(j.get("items"), list):
|
|
803
|
-
return j["items"]
|
|
804
|
-
if isinstance(j.get("data"), list):
|
|
805
|
-
return j["data"]
|
|
806
|
-
if isinstance(j.get("results"), list):
|
|
807
|
-
return j["results"]
|
|
808
|
-
if isinstance(j.get("data"), dict) and isinstance(j["data"].get("items"), list):
|
|
809
|
-
return j["data"]["items"]
|
|
810
|
-
if isinstance(j.get("sources"), list):
|
|
811
|
-
return j["sources"]
|
|
812
|
-
# Single object
|
|
813
|
-
if (j.get("id") is not None) or (j.get("name") is not None):
|
|
814
|
-
return [j]
|
|
815
|
-
except Exception:
|
|
816
|
-
return []
|
|
817
|
-
return []
|
|
818
|
-
if 200 <= r.status_code < 300:
|
|
819
|
-
items = _normalize_sources(body)
|
|
820
|
-
# Enrich with local conf presence and validate flag
|
|
821
|
-
conf = _read_qalita_conf()
|
|
822
|
-
local_sources = conf.get("sources") if isinstance(conf.get("sources"), list) else []
|
|
823
|
-
local_by_id: dict[str, dict] = {}
|
|
824
|
-
try:
|
|
825
|
-
for s in local_sources:
|
|
826
|
-
if isinstance(s, dict) and s.get("id") is not None:
|
|
827
|
-
local_by_id[str(s.get("id"))] = s
|
|
828
|
-
except Exception:
|
|
829
|
-
local_by_id = {}
|
|
830
|
-
enriched = []
|
|
831
|
-
seen_ids: set[str] = set()
|
|
832
|
-
for it in items:
|
|
833
|
-
try:
|
|
834
|
-
obj = dict(it) if isinstance(it, dict) else {"value": it}
|
|
835
|
-
except Exception:
|
|
836
|
-
obj = {"value": it}
|
|
837
|
-
sid = str(obj.get("id", ""))
|
|
838
|
-
if sid:
|
|
839
|
-
seen_ids.add(sid)
|
|
840
|
-
lobj = local_by_id.get(sid)
|
|
841
|
-
obj["local_present"] = bool(lobj is not None)
|
|
842
|
-
if isinstance(lobj, dict):
|
|
843
|
-
val = lobj.get("validate")
|
|
844
|
-
try:
|
|
845
|
-
obj["local_validate"] = (str(val).lower() if val is not None else None)
|
|
846
|
-
except Exception:
|
|
847
|
-
obj["local_validate"] = None
|
|
848
|
-
else:
|
|
849
|
-
obj["local_validate"] = None
|
|
850
|
-
enriched.append(obj)
|
|
851
|
-
# Add local-only sources (not present in backend response)
|
|
852
|
-
try:
|
|
853
|
-
for sid, lobj in local_by_id.items():
|
|
854
|
-
if sid in seen_ids:
|
|
855
|
-
continue
|
|
856
|
-
try:
|
|
857
|
-
name = lobj.get("name") or (
|
|
858
|
-
lobj.get("source", {}).get("name") if isinstance(lobj.get("source"), dict) else None
|
|
859
|
-
) or f"Source {sid}"
|
|
860
|
-
stype = lobj.get("type") or (
|
|
861
|
-
lobj.get("source", {}).get("type") if isinstance(lobj.get("source"), dict) else None
|
|
862
|
-
)
|
|
863
|
-
except Exception:
|
|
864
|
-
name = f"Source {sid}"
|
|
865
|
-
stype = None
|
|
866
|
-
val = lobj.get("validate")
|
|
867
|
-
try:
|
|
868
|
-
vnorm = (str(val).lower() if val is not None else None)
|
|
869
|
-
except Exception:
|
|
870
|
-
vnorm = None
|
|
871
|
-
enriched.append({
|
|
872
|
-
"id": sid,
|
|
873
|
-
"name": name,
|
|
874
|
-
"type": stype,
|
|
875
|
-
"local_present": True,
|
|
876
|
-
"local_validate": vnorm,
|
|
877
|
-
})
|
|
878
|
-
except Exception:
|
|
879
|
-
pass
|
|
880
|
-
return jsonify({"ok": True, "items": enriched})
|
|
881
|
-
return jsonify({"ok": False, "status": r.status_code, "error": body}), 200
|
|
882
|
-
except Exception as exc:
|
|
883
|
-
return jsonify({"ok": False, "items": [], "message": str(exc)}), 200
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
@bp.get("/sync-conversations")
|
|
887
|
-
def sync_conversations():
|
|
888
|
-
"""Ensure local conversations for a given issue are present by pulling from backend if missing.
|
|
889
|
-
|
|
890
|
-
Query: issue_id
|
|
891
|
-
"""
|
|
892
|
-
issue_id = (request.args.get("issue_id") or "").strip()
|
|
893
|
-
if not issue_id:
|
|
894
|
-
return jsonify({"ok": False, "message": "Missing issue_id"}), 400
|
|
895
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
896
|
-
backend_url: str | None = None
|
|
897
|
-
token_value: str | None = None
|
|
898
|
-
try:
|
|
899
|
-
backend_url = getattr(cfg, "url", None)
|
|
900
|
-
token_value = getattr(cfg, "token", None)
|
|
901
|
-
except Exception:
|
|
902
|
-
backend_url = None
|
|
903
|
-
token_value = None
|
|
904
|
-
# Try env file fallback
|
|
905
|
-
try:
|
|
906
|
-
if not backend_url:
|
|
907
|
-
home = _qalita_home()
|
|
908
|
-
pointer = os.path.join(home, ".current_env")
|
|
909
|
-
if os.path.isfile(pointer):
|
|
910
|
-
with open(pointer, "r", encoding="utf-8") as f:
|
|
911
|
-
env_path = (f.read() or "").strip()
|
|
912
|
-
if env_path and os.path.isfile(env_path):
|
|
913
|
-
with open(env_path, "r", encoding="utf-8") as ef:
|
|
914
|
-
for raw in ef.readlines():
|
|
915
|
-
line = (raw or "").strip()
|
|
916
|
-
if not line or line.startswith("#") or "=" not in line:
|
|
917
|
-
continue
|
|
918
|
-
k, v = line.split("=", 1)
|
|
919
|
-
k = (k or "").strip().upper()
|
|
920
|
-
v = (v or "").strip().strip('"').strip("'")
|
|
921
|
-
if k in ("QALITA_AGENT_ENDPOINT", "AGENT_ENDPOINT", "QALITA_URL", "URL"):
|
|
922
|
-
backend_url = v
|
|
923
|
-
if k in ("QALITA_AGENT_TOKEN", "QALITA_TOKEN", "TOKEN") and not token_value:
|
|
924
|
-
token_value = v
|
|
925
|
-
except Exception:
|
|
926
|
-
pass
|
|
927
|
-
if not backend_url:
|
|
928
|
-
return jsonify({"ok": False, "message": "Missing backend URL"}), 200
|
|
929
|
-
headers = {"Accept": "application/json"}
|
|
930
|
-
if token_value:
|
|
931
|
-
headers["Authorization"] = f"Bearer {token_value}"
|
|
932
|
-
# List conversations
|
|
933
|
-
try:
|
|
934
|
-
base = str(backend_url).rstrip("/") + f"/api/v1/issues/{issue_id}/studio_conversations"
|
|
935
|
-
except Exception:
|
|
936
|
-
base = str(backend_url) + f"/api/v1/issues/{issue_id}/studio_conversations"
|
|
937
|
-
try:
|
|
938
|
-
r = requests.get(base, headers=headers, timeout=10)
|
|
939
|
-
items = r.json() if r.ok else []
|
|
940
|
-
except Exception:
|
|
941
|
-
items = []
|
|
942
|
-
# Download any missing files
|
|
943
|
-
conv_dir = _studio_conv_dir()
|
|
944
|
-
downloaded = 0
|
|
945
|
-
try:
|
|
946
|
-
for it in (items or []):
|
|
947
|
-
try:
|
|
948
|
-
fname = (it.get("filename") or (it.get("conv_id", "") + ".jsonl")).strip()
|
|
949
|
-
except Exception:
|
|
950
|
-
fname = None
|
|
951
|
-
if not fname:
|
|
952
|
-
continue
|
|
953
|
-
local_path = os.path.join(conv_dir, fname)
|
|
954
|
-
if os.path.isfile(local_path):
|
|
955
|
-
continue
|
|
956
|
-
# fetch download
|
|
957
|
-
try:
|
|
958
|
-
did = it.get("id")
|
|
959
|
-
url = str(backend_url).rstrip("/") + f"/api/v1/issues/{issue_id}/studio_conversations/{did}/download"
|
|
960
|
-
except Exception:
|
|
961
|
-
continue
|
|
962
|
-
try:
|
|
963
|
-
dr = requests.get(url, headers=headers, timeout=20)
|
|
964
|
-
if dr.status_code >= 400:
|
|
965
|
-
continue
|
|
966
|
-
os.makedirs(conv_dir, exist_ok=True)
|
|
967
|
-
with open(local_path, "wb") as f:
|
|
968
|
-
f.write(dr.content or b"")
|
|
969
|
-
downloaded += 1
|
|
970
|
-
except Exception:
|
|
971
|
-
continue
|
|
972
|
-
except Exception:
|
|
973
|
-
pass
|
|
974
|
-
return jsonify({"ok": True, "downloaded": downloaded})
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
@bp.post("/upload-conversation")
|
|
978
|
-
def upload_conversation():
|
|
979
|
-
"""Upload a local conversation file for an issue to the backend.
|
|
980
|
-
|
|
981
|
-
Body: { conv_id: str, issue_id: str }
|
|
982
|
-
"""
|
|
983
|
-
data = request.get_json(silent=True) or {}
|
|
984
|
-
conv_id = _safe_conv_id((data.get("conv_id") or "").strip())
|
|
985
|
-
issue_id = (data.get("issue_id") or "").strip()
|
|
986
|
-
if not conv_id or not issue_id:
|
|
987
|
-
return jsonify({"ok": False, "message": "Missing conv_id or issue_id"}), 400
|
|
988
|
-
path = _studio_conv_file_for(conv_id)
|
|
989
|
-
if not os.path.isfile(path):
|
|
990
|
-
return jsonify({"ok": False, "message": "Local conversation not found"}), 404
|
|
991
|
-
# Backend context
|
|
992
|
-
cfg = current_app.config.get("QALITA_CONFIG_OBJ")
|
|
993
|
-
backend_url: str | None = None
|
|
994
|
-
token_value: str | None = None
|
|
995
|
-
try:
|
|
996
|
-
backend_url = getattr(cfg, "url", None)
|
|
997
|
-
token_value = getattr(cfg, "token", None)
|
|
998
|
-
except Exception:
|
|
999
|
-
backend_url = None
|
|
1000
|
-
token_value = None
|
|
1001
|
-
# Try env file fallback
|
|
1002
|
-
try:
|
|
1003
|
-
if not backend_url:
|
|
1004
|
-
home = _qalita_home()
|
|
1005
|
-
pointer = os.path.join(home, ".current_env")
|
|
1006
|
-
if os.path.isfile(pointer):
|
|
1007
|
-
with open(pointer, "r", encoding="utf-8") as f:
|
|
1008
|
-
env_path = (f.read() or "").strip()
|
|
1009
|
-
if env_path and os.path.isfile(env_path):
|
|
1010
|
-
with open(env_path, "r", encoding="utf-8") as ef:
|
|
1011
|
-
for raw in ef.readlines():
|
|
1012
|
-
line = (raw or "").strip()
|
|
1013
|
-
if not line or line.startswith("#") or "=" not in line:
|
|
1014
|
-
continue
|
|
1015
|
-
k, v = line.split("=", 1)
|
|
1016
|
-
k = (k or "").strip().upper()
|
|
1017
|
-
v = (v or "").strip().strip('"').strip("'")
|
|
1018
|
-
if k in ("QALITA_AGENT_ENDPOINT", "AGENT_ENDPOINT", "QALITA_URL", "URL"):
|
|
1019
|
-
backend_url = v
|
|
1020
|
-
if k in ("QALITA_AGENT_TOKEN", "QALITA_TOKEN", "TOKEN") and not token_value:
|
|
1021
|
-
token_value = v
|
|
1022
|
-
except Exception:
|
|
1023
|
-
pass
|
|
1024
|
-
if not backend_url:
|
|
1025
|
-
return jsonify({"ok": False, "message": "Missing backend URL"}), 200
|
|
1026
|
-
try:
|
|
1027
|
-
with open(path, "rb") as f:
|
|
1028
|
-
files = {
|
|
1029
|
-
"file": (f"{conv_id}.jsonl", f, "text/plain"),
|
|
1030
|
-
}
|
|
1031
|
-
data_form = {
|
|
1032
|
-
"conv_id": conv_id,
|
|
1033
|
-
"filename": f"{conv_id}.jsonl",
|
|
1034
|
-
}
|
|
1035
|
-
headers = {}
|
|
1036
|
-
if token_value:
|
|
1037
|
-
headers["Authorization"] = f"Bearer {token_value}"
|
|
1038
|
-
url = str(backend_url).rstrip("/") + f"/api/v1/issues/{issue_id}/studio_conversations"
|
|
1039
|
-
r = requests.post(url, headers=headers, files=files, data=data_form, timeout=30)
|
|
1040
|
-
if r.status_code >= 400:
|
|
1041
|
-
try:
|
|
1042
|
-
body = r.json()
|
|
1043
|
-
except Exception:
|
|
1044
|
-
body = {"detail": r.text[:200]}
|
|
1045
|
-
return jsonify({"ok": False, "status": r.status_code, "error": body}), 200
|
|
1046
|
-
return jsonify({"ok": True})
|
|
1047
|
-
except Exception as exc:
|
|
1048
|
-
return jsonify({"ok": False, "message": str(exc)}), 200
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
@bp.post("/chat")
|
|
1052
|
-
def studio_chat():
|
|
1053
|
-
data = request.get_json(silent=True) or {}
|
|
1054
|
-
prompt = (data.get("prompt") or "").strip()
|
|
1055
|
-
conv_id = _safe_conv_id((data.get("conv_id") or "").strip())
|
|
1056
|
-
issue_id = (data.get("issue_id") or "").strip()
|
|
1057
|
-
source_id = (data.get("source_id") or "").strip()
|
|
1058
|
-
issue_details = data.get("issue_details") if isinstance(data.get("issue_details"), dict) else None
|
|
1059
|
-
# Prefer model from request; else fall back to saved Studio config; else default
|
|
1060
|
-
model = (data.get("model") or "").strip()
|
|
1061
|
-
if not model:
|
|
1062
|
-
try:
|
|
1063
|
-
cfg_path = _studio_config_path()
|
|
1064
|
-
if os.path.isfile(cfg_path):
|
|
1065
|
-
with open(cfg_path, "r", encoding="utf-8") as f:
|
|
1066
|
-
raw = f.read().strip()
|
|
1067
|
-
if raw:
|
|
1068
|
-
cfg = json.loads(raw)
|
|
1069
|
-
model = (cfg.get("model") or "").strip()
|
|
1070
|
-
except Exception:
|
|
1071
|
-
# Ignore config read errors and continue to use default below
|
|
1072
|
-
pass
|
|
1073
|
-
if not model:
|
|
1074
|
-
model = "gpt-oss:20b"
|
|
1075
|
-
if not prompt:
|
|
1076
|
-
return jsonify({"ok": False, "message": "Missing prompt"}), 400
|
|
1077
|
-
# Streaming toggle via query or body
|
|
1078
|
-
stream_flag_raw = (
|
|
1079
|
-
(request.args.get("stream") or data.get("stream") or "").strip().lower()
|
|
1080
|
-
)
|
|
1081
|
-
stream_enabled = stream_flag_raw in ("1", "true", "yes", "on")
|
|
1082
|
-
if stream_enabled:
|
|
1083
|
-
|
|
1084
|
-
def generate_stream():
|
|
1085
|
-
req = None
|
|
1086
|
-
accumulated = ""
|
|
1087
|
-
logged = False
|
|
1088
|
-
try:
|
|
1089
|
-
# Log user message at start of request
|
|
1090
|
-
try:
|
|
1091
|
-
_studio_conv_write(conv_id, {"role": "user", "text": prompt, "model": model, "issue_id": issue_id or None, "source_id": source_id or None, "issue_details": issue_details or None})
|
|
1092
|
-
except Exception:
|
|
1093
|
-
pass
|
|
1094
|
-
# Try attach source details when present
|
|
1095
|
-
src_details = None
|
|
1096
|
-
try:
|
|
1097
|
-
if source_id:
|
|
1098
|
-
conf = _read_qalita_conf()
|
|
1099
|
-
src_obj = _find_source_by_id(conf, source_id)
|
|
1100
|
-
if isinstance(src_obj, dict):
|
|
1101
|
-
src_details = src_obj
|
|
1102
|
-
except Exception:
|
|
1103
|
-
src_details = None
|
|
1104
|
-
|
|
1105
|
-
req = requests.post(
|
|
1106
|
-
"http://127.0.0.1:11434/api/generate",
|
|
1107
|
-
json={"model": model, "prompt": _augment_prompt_with_context(prompt, issue_id, source_id, issue_details, src_details), "stream": True},
|
|
1108
|
-
stream=True,
|
|
1109
|
-
timeout=300,
|
|
1110
|
-
)
|
|
1111
|
-
if req.status_code != 200:
|
|
1112
|
-
try:
|
|
1113
|
-
body = req.json()
|
|
1114
|
-
msg = (
|
|
1115
|
-
(body.get("error") if isinstance(body, dict) else None)
|
|
1116
|
-
or (body.get("message") if isinstance(body, dict) else None)
|
|
1117
|
-
or str(body)
|
|
1118
|
-
)
|
|
1119
|
-
except Exception:
|
|
1120
|
-
msg = f"Ollama error: {req.status_code}"
|
|
1121
|
-
try:
|
|
1122
|
-
_studio_conv_write(
|
|
1123
|
-
conv_id,
|
|
1124
|
-
{
|
|
1125
|
-
"role": "assistant",
|
|
1126
|
-
"text": accumulated or f"[ERROR] {msg}",
|
|
1127
|
-
"model": model,
|
|
1128
|
-
"ok": False,
|
|
1129
|
-
"status": req.status_code,
|
|
1130
|
-
"error": msg,
|
|
1131
|
-
"stream": True,
|
|
1132
|
-
"issue_id": issue_id or None,
|
|
1133
|
-
"source_id": source_id or None,
|
|
1134
|
-
},
|
|
1135
|
-
)
|
|
1136
|
-
logged = True
|
|
1137
|
-
except Exception:
|
|
1138
|
-
pass
|
|
1139
|
-
yield f"[ERROR] {msg}"
|
|
1140
|
-
return
|
|
1141
|
-
for line in req.iter_lines(decode_unicode=True):
|
|
1142
|
-
if not line:
|
|
1143
|
-
continue
|
|
1144
|
-
try:
|
|
1145
|
-
obj = json.loads(line)
|
|
1146
|
-
if obj.get("response"):
|
|
1147
|
-
piece = obj["response"]
|
|
1148
|
-
accumulated += piece
|
|
1149
|
-
yield piece
|
|
1150
|
-
if obj.get("done"):
|
|
1151
|
-
break
|
|
1152
|
-
except Exception:
|
|
1153
|
-
# Fallback: passthrough raw line
|
|
1154
|
-
accumulated += line
|
|
1155
|
-
yield line
|
|
1156
|
-
except GeneratorExit:
|
|
1157
|
-
# Client disconnected/aborted
|
|
1158
|
-
if req is not None:
|
|
1159
|
-
try:
|
|
1160
|
-
req.close()
|
|
1161
|
-
except Exception:
|
|
1162
|
-
pass
|
|
1163
|
-
try:
|
|
1164
|
-
if not logged:
|
|
1165
|
-
_studio_conv_write(
|
|
1166
|
-
conv_id,
|
|
1167
|
-
{
|
|
1168
|
-
"role": "assistant",
|
|
1169
|
-
"text": accumulated,
|
|
1170
|
-
"model": model,
|
|
1171
|
-
"ok": True,
|
|
1172
|
-
"interrupted": True,
|
|
1173
|
-
"stream": True,
|
|
1174
|
-
"issue_id": issue_id or None,
|
|
1175
|
-
"source_id": source_id or None,
|
|
1176
|
-
},
|
|
1177
|
-
)
|
|
1178
|
-
logged = True
|
|
1179
|
-
except Exception:
|
|
1180
|
-
pass
|
|
1181
|
-
raise
|
|
1182
|
-
except Exception as exc:
|
|
1183
|
-
try:
|
|
1184
|
-
if not logged:
|
|
1185
|
-
_studio_conv_write(
|
|
1186
|
-
conv_id,
|
|
1187
|
-
{
|
|
1188
|
-
"role": "assistant",
|
|
1189
|
-
"text": accumulated or f"[ERROR] Failed to reach Ollama: {exc}",
|
|
1190
|
-
"model": model,
|
|
1191
|
-
"ok": False,
|
|
1192
|
-
"error": str(exc),
|
|
1193
|
-
"stream": True,
|
|
1194
|
-
"issue_id": issue_id or None,
|
|
1195
|
-
"source_id": source_id or None,
|
|
1196
|
-
},
|
|
1197
|
-
)
|
|
1198
|
-
logged = True
|
|
1199
|
-
except Exception:
|
|
1200
|
-
pass
|
|
1201
|
-
yield f"[ERROR] Failed to reach Ollama: {exc}"
|
|
1202
|
-
finally:
|
|
1203
|
-
if req is not None:
|
|
1204
|
-
try:
|
|
1205
|
-
req.close()
|
|
1206
|
-
except Exception:
|
|
1207
|
-
pass
|
|
1208
|
-
try:
|
|
1209
|
-
if not logged:
|
|
1210
|
-
_studio_conv_write(
|
|
1211
|
-
conv_id,
|
|
1212
|
-
{
|
|
1213
|
-
"role": "assistant",
|
|
1214
|
-
"text": accumulated,
|
|
1215
|
-
"model": model,
|
|
1216
|
-
"ok": True,
|
|
1217
|
-
"stream": True,
|
|
1218
|
-
"issue_id": issue_id or None,
|
|
1219
|
-
"source_id": source_id or None,
|
|
1220
|
-
},
|
|
1221
|
-
)
|
|
1222
|
-
logged = True
|
|
1223
|
-
except Exception:
|
|
1224
|
-
pass
|
|
1225
|
-
|
|
1226
|
-
return Response(stream_with_context(generate_stream()), mimetype="text/plain; charset=utf-8")
|
|
1227
|
-
try:
|
|
1228
|
-
# Log user message for non-streaming
|
|
1229
|
-
try:
|
|
1230
|
-
_studio_conv_write(conv_id, {"role": "user", "text": prompt, "model": model, "issue_id": issue_id or None, "source_id": source_id or None, "issue_details": issue_details or None})
|
|
1231
|
-
except Exception:
|
|
1232
|
-
pass
|
|
1233
|
-
# Try attach source details when present
|
|
1234
|
-
src_details = None
|
|
1235
|
-
try:
|
|
1236
|
-
if source_id:
|
|
1237
|
-
conf = _read_qalita_conf()
|
|
1238
|
-
src_obj = _find_source_by_id(conf, source_id)
|
|
1239
|
-
if isinstance(src_obj, dict):
|
|
1240
|
-
src_details = src_obj
|
|
1241
|
-
except Exception:
|
|
1242
|
-
src_details = None
|
|
1243
|
-
|
|
1244
|
-
r = requests.post(
|
|
1245
|
-
"http://127.0.0.1:11434/api/generate",
|
|
1246
|
-
json={"model": model, "prompt": _augment_prompt_with_context(prompt, issue_id, source_id, issue_details, src_details), "stream": False},
|
|
1247
|
-
timeout=60,
|
|
1248
|
-
)
|
|
1249
|
-
if r.status_code == 200:
|
|
1250
|
-
out = r.json().get("response", "")
|
|
1251
|
-
try:
|
|
1252
|
-
_studio_conv_write(conv_id, {"role": "assistant", "text": out, "model": model, "ok": True, "stream": False, "issue_id": issue_id or None, "source_id": source_id or None})
|
|
1253
|
-
except Exception:
|
|
1254
|
-
pass
|
|
1255
|
-
return jsonify({"ok": True, "response": out, "conv_id": conv_id})
|
|
1256
|
-
if r.status_code == 404:
|
|
1257
|
-
try:
|
|
1258
|
-
_studio_conv_write(conv_id, {"role": "assistant", "text": "", "model": model, "ok": False, "status": r.status_code, "error": "model_not_found", "stream": False})
|
|
1259
|
-
except Exception:
|
|
1260
|
-
pass
|
|
1261
|
-
return (
|
|
1262
|
-
jsonify(
|
|
1263
|
-
{
|
|
1264
|
-
"ok": False,
|
|
1265
|
-
"message": f"Model not found in Ollama: '{model}'. Install it with 'ollama pull {model}' or update your Studio model.",
|
|
1266
|
-
}
|
|
1267
|
-
),
|
|
1268
|
-
500,
|
|
1269
|
-
)
|
|
1270
|
-
# Try to surface error body if available
|
|
1271
|
-
try:
|
|
1272
|
-
err_body = r.json()
|
|
1273
|
-
except Exception:
|
|
1274
|
-
err_body = {"detail": r.text[:200]}
|
|
1275
|
-
try:
|
|
1276
|
-
_studio_conv_write(conv_id, {"role": "assistant", "text": "", "model": model, "ok": False, "status": r.status_code, "error": err_body, "stream": False})
|
|
1277
|
-
except Exception:
|
|
1278
|
-
pass
|
|
1279
|
-
return (
|
|
1280
|
-
jsonify(
|
|
1281
|
-
{
|
|
1282
|
-
"ok": False,
|
|
1283
|
-
"message": f"Ollama error: {r.status_code}",
|
|
1284
|
-
"error": err_body,
|
|
1285
|
-
}
|
|
1286
|
-
),
|
|
1287
|
-
500,
|
|
1288
|
-
)
|
|
1289
|
-
except Exception as exc:
|
|
1290
|
-
try:
|
|
1291
|
-
_studio_conv_write(conv_id, {"role": "assistant", "text": "", "model": model, "ok": False, "error": str(exc), "stream": False})
|
|
1292
|
-
except Exception:
|
|
1293
|
-
pass
|
|
1294
|
-
return jsonify({"ok": False, "message": f"Failed to reach Ollama: {exc}"}), 502
|