ocerebro 0.3.2 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cerebro/index/entities.db +0 -0
- package/package.json +1 -1
- package/pyproject.toml +3 -1
- package/src/dashboard/__init__.py +1 -0
- package/src/dashboard/api.py +404 -0
- package/src/dashboard/server.py +179 -0
- package/src/dashboard/static/index.html +519 -0
- package/src/dashboard/static/style.css +579 -0
- package/src/index/entities_db.py +5 -1
- package/src/mcp/server.py +95 -32
|
Binary file
|
package/package.json
CHANGED
package/pyproject.toml
CHANGED
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "ocerebro"
|
|
7
|
-
version = "0.
|
|
7
|
+
version = "0.4.0"
|
|
8
8
|
description = "OCerebro - Sistema de Memoria para Agentes (Claude Code/MCP)"
|
|
9
9
|
readme = "README.md"
|
|
10
10
|
requires-python = ">=3.10"
|
|
@@ -38,6 +38,8 @@ dependencies = [
|
|
|
38
38
|
"mcp>=1.0.0",
|
|
39
39
|
"anthropic>=0.40.0",
|
|
40
40
|
"spacy>=3.5.0",
|
|
41
|
+
"fastapi>=0.109.0",
|
|
42
|
+
"uvicorn>=0.27.0",
|
|
41
43
|
]
|
|
42
44
|
|
|
43
45
|
[project.optional-dependencies]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Dashboard package
|
|
@@ -0,0 +1,404 @@
|
|
|
1
|
+
"""API endpoints para o Dashboard do OCerebro"""
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, HTTPException, Query
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
import sqlite3
|
|
7
|
+
import json
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def create_router(
|
|
12
|
+
metadata_db,
|
|
13
|
+
embeddings_db,
|
|
14
|
+
entities_db,
|
|
15
|
+
cerebro_path: Path
|
|
16
|
+
) -> APIRouter:
|
|
17
|
+
"""Cria o router com todos os endpoints da API"""
|
|
18
|
+
|
|
19
|
+
router = APIRouter(prefix="/api")
|
|
20
|
+
|
|
21
|
+
# Store references to databases
|
|
22
|
+
router.metadata_db = metadata_db
|
|
23
|
+
router.embeddings_db = embeddings_db
|
|
24
|
+
router.entities_db = entities_db
|
|
25
|
+
router.cerebro_path = cerebro_path
|
|
26
|
+
|
|
27
|
+
@router.get("/status")
|
|
28
|
+
async def get_status():
|
|
29
|
+
"""Retorna status geral do sistema"""
|
|
30
|
+
try:
|
|
31
|
+
# Total de memórias
|
|
32
|
+
conn = router.metadata_db._connect()
|
|
33
|
+
total_memories = conn.execute(
|
|
34
|
+
"SELECT COUNT(*) FROM memories"
|
|
35
|
+
).fetchone()[0]
|
|
36
|
+
|
|
37
|
+
# Última atividade
|
|
38
|
+
last_activity = conn.execute(
|
|
39
|
+
"SELECT created_at FROM memories ORDER BY created_at DESC LIMIT 1"
|
|
40
|
+
).fetchone()
|
|
41
|
+
last_activity = last_activity[0] if last_activity else None
|
|
42
|
+
conn.close()
|
|
43
|
+
|
|
44
|
+
# Stats do grafo
|
|
45
|
+
graph_stats = router.entities_db.get_stats()
|
|
46
|
+
|
|
47
|
+
# Projetos únicos
|
|
48
|
+
conn = router.metadata_db._connect()
|
|
49
|
+
projects = conn.execute(
|
|
50
|
+
"SELECT COUNT(DISTINCT project) FROM memories"
|
|
51
|
+
).fetchone()[0]
|
|
52
|
+
conn.close()
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
"total_memories": total_memories,
|
|
56
|
+
"total_entities": graph_stats["total_entities"],
|
|
57
|
+
"total_relationships": graph_stats["total_relationships"],
|
|
58
|
+
"projects": projects,
|
|
59
|
+
"last_activity": last_activity
|
|
60
|
+
}
|
|
61
|
+
except Exception as e:
|
|
62
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
63
|
+
|
|
64
|
+
@router.get("/projects")
|
|
65
|
+
async def get_projects():
|
|
66
|
+
"""Retorna lista de projetos com contagem de memórias"""
|
|
67
|
+
try:
|
|
68
|
+
conn = router.metadata_db._connect()
|
|
69
|
+
|
|
70
|
+
# Projetos com contagem
|
|
71
|
+
rows = conn.execute("""
|
|
72
|
+
SELECT project, COUNT(*) as memory_count
|
|
73
|
+
FROM memories
|
|
74
|
+
GROUP BY project
|
|
75
|
+
ORDER BY memory_count DESC
|
|
76
|
+
""").fetchall()
|
|
77
|
+
|
|
78
|
+
projects = []
|
|
79
|
+
for row in rows:
|
|
80
|
+
project = row["project"]
|
|
81
|
+
|
|
82
|
+
# Contagem por tipo
|
|
83
|
+
types_rows = conn.execute("""
|
|
84
|
+
SELECT type, COUNT(*) as count
|
|
85
|
+
FROM memories
|
|
86
|
+
WHERE project = ?
|
|
87
|
+
GROUP BY type
|
|
88
|
+
""", (project,)).fetchall()
|
|
89
|
+
|
|
90
|
+
types = {r["type"]: r["count"] for r in types_rows}
|
|
91
|
+
|
|
92
|
+
projects.append({
|
|
93
|
+
"name": project,
|
|
94
|
+
"memory_count": row["memory_count"],
|
|
95
|
+
"types": types
|
|
96
|
+
})
|
|
97
|
+
|
|
98
|
+
conn.close()
|
|
99
|
+
return projects
|
|
100
|
+
except Exception as e:
|
|
101
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
102
|
+
|
|
103
|
+
@router.get("/graph")
|
|
104
|
+
async def get_graph(
|
|
105
|
+
project: Optional[str] = Query(None),
|
|
106
|
+
types: Optional[str] = Query(None)
|
|
107
|
+
):
|
|
108
|
+
"""Retorna grafo de entidades no formato Cytoscape.js"""
|
|
109
|
+
try:
|
|
110
|
+
# Parse tipos
|
|
111
|
+
type_list = types.split(",") if types else None
|
|
112
|
+
|
|
113
|
+
conn = router.entities_db._connect()
|
|
114
|
+
|
|
115
|
+
# Busca entidades
|
|
116
|
+
if project:
|
|
117
|
+
if type_list:
|
|
118
|
+
placeholders = ",".join("?" * len(type_list))
|
|
119
|
+
query = f"""
|
|
120
|
+
SELECT DISTINCT e.id, e.entity_name, e.entity_type, e.memory_id
|
|
121
|
+
FROM entities e
|
|
122
|
+
WHERE e.entity_type IN ({placeholders})
|
|
123
|
+
AND EXISTS (
|
|
124
|
+
SELECT 1 FROM memories m
|
|
125
|
+
WHERE m.id = e.memory_id AND m.project = ?
|
|
126
|
+
)
|
|
127
|
+
LIMIT 100
|
|
128
|
+
"""
|
|
129
|
+
params = type_list + [project]
|
|
130
|
+
else:
|
|
131
|
+
query = """
|
|
132
|
+
SELECT DISTINCT e.id, e.entity_name, e.entity_type, e.memory_id
|
|
133
|
+
FROM entities e
|
|
134
|
+
WHERE EXISTS (
|
|
135
|
+
SELECT 1 FROM memories m
|
|
136
|
+
WHERE m.id = e.memory_id AND m.project = ?
|
|
137
|
+
)
|
|
138
|
+
LIMIT 100
|
|
139
|
+
"""
|
|
140
|
+
params = [project]
|
|
141
|
+
|
|
142
|
+
cursor = conn.execute(query, params)
|
|
143
|
+
else:
|
|
144
|
+
if type_list:
|
|
145
|
+
placeholders = ",".join("?" * len(type_list))
|
|
146
|
+
query = f"""
|
|
147
|
+
SELECT DISTINCT e.id, e.entity_name, e.entity_type, e.memory_id
|
|
148
|
+
FROM entities e
|
|
149
|
+
WHERE e.entity_type IN ({placeholders})
|
|
150
|
+
LIMIT 100
|
|
151
|
+
"""
|
|
152
|
+
cursor = conn.execute(query, type_list)
|
|
153
|
+
else:
|
|
154
|
+
cursor = conn.execute("""
|
|
155
|
+
SELECT DISTINCT e.id, e.entity_name, e.entity_type, e.memory_id
|
|
156
|
+
FROM entities e
|
|
157
|
+
LIMIT 100
|
|
158
|
+
""")
|
|
159
|
+
|
|
160
|
+
entities = cursor.fetchall()
|
|
161
|
+
|
|
162
|
+
# Constrói nós
|
|
163
|
+
nodes = []
|
|
164
|
+
entity_ids = set()
|
|
165
|
+
for e in entities:
|
|
166
|
+
entity_ids.add(e["entity_name"])
|
|
167
|
+
nodes.append({
|
|
168
|
+
"data": {
|
|
169
|
+
"id": e["entity_name"],
|
|
170
|
+
"label": e["entity_name"],
|
|
171
|
+
"type": e["entity_type"],
|
|
172
|
+
"memory_id": e["memory_id"]
|
|
173
|
+
}
|
|
174
|
+
})
|
|
175
|
+
|
|
176
|
+
# Busca relacionamentos
|
|
177
|
+
if entity_ids:
|
|
178
|
+
placeholders = ",".join("?" * len(entity_ids))
|
|
179
|
+
rels_query = f"""
|
|
180
|
+
SELECT source_entity, target_entity, relationship_type
|
|
181
|
+
FROM entity_relationships
|
|
182
|
+
WHERE source_entity IN ({placeholders})
|
|
183
|
+
OR target_entity IN ({placeholders})
|
|
184
|
+
LIMIT 500
|
|
185
|
+
"""
|
|
186
|
+
cursor = conn.execute(rels_query, list(entity_ids) + list(entity_ids))
|
|
187
|
+
rels = cursor.fetchall()
|
|
188
|
+
|
|
189
|
+
edges = []
|
|
190
|
+
for r in rels:
|
|
191
|
+
edges.append({
|
|
192
|
+
"data": {
|
|
193
|
+
"id": f"{r['source_entity']}_{r['target_entity']}",
|
|
194
|
+
"source": r["source_entity"],
|
|
195
|
+
"target": r["target_entity"],
|
|
196
|
+
"label": r["relationship_type"]
|
|
197
|
+
}
|
|
198
|
+
})
|
|
199
|
+
else:
|
|
200
|
+
edges = []
|
|
201
|
+
|
|
202
|
+
conn.close()
|
|
203
|
+
|
|
204
|
+
return {"nodes": nodes, "edges": edges}
|
|
205
|
+
except Exception as e:
|
|
206
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
207
|
+
|
|
208
|
+
@router.get("/memories")
|
|
209
|
+
async def get_memories(
|
|
210
|
+
project: Optional[str] = Query(None),
|
|
211
|
+
mem_type: Optional[str] = Query(None),
|
|
212
|
+
q: Optional[str] = Query(None),
|
|
213
|
+
limit: int = Query(50, ge=1, le=200)
|
|
214
|
+
):
|
|
215
|
+
"""Retorna lista de memórias com metadados"""
|
|
216
|
+
try:
|
|
217
|
+
conn = router.metadata_db._connect()
|
|
218
|
+
|
|
219
|
+
if q:
|
|
220
|
+
# Busca full-text
|
|
221
|
+
rows = conn.execute("""
|
|
222
|
+
SELECT id, title, type, project, tags, created_at, updated_at
|
|
223
|
+
FROM memories
|
|
224
|
+
WHERE content MATCH ?
|
|
225
|
+
LIMIT ?
|
|
226
|
+
""", (q, limit)).fetchall()
|
|
227
|
+
else:
|
|
228
|
+
# Busca com filtros
|
|
229
|
+
query = "SELECT id, title, type, project, tags, created_at, updated_at FROM memories WHERE 1=1"
|
|
230
|
+
params = []
|
|
231
|
+
|
|
232
|
+
if project:
|
|
233
|
+
query += " AND project = ?"
|
|
234
|
+
params.append(project)
|
|
235
|
+
|
|
236
|
+
if mem_type:
|
|
237
|
+
query += " AND type = ?"
|
|
238
|
+
params.append(mem_type)
|
|
239
|
+
|
|
240
|
+
query += " ORDER BY created_at DESC LIMIT ?"
|
|
241
|
+
params.append(limit)
|
|
242
|
+
|
|
243
|
+
rows = conn.execute(query, params).fetchall()
|
|
244
|
+
|
|
245
|
+
memories = []
|
|
246
|
+
for row in rows:
|
|
247
|
+
# Calcula GC risk
|
|
248
|
+
from src.forgetting.gc import calculate_rfms_score
|
|
249
|
+
memory_dict = dict(row)
|
|
250
|
+
gc_risk = 1.0 - calculate_rfms_score(memory_dict)
|
|
251
|
+
|
|
252
|
+
memories.append({
|
|
253
|
+
"id": memory_dict["id"],
|
|
254
|
+
"title": memory_dict["title"] or memory_dict["id"],
|
|
255
|
+
"type": memory_dict["type"],
|
|
256
|
+
"project": memory_dict["project"],
|
|
257
|
+
"tags": memory_dict["tags"].split(",") if memory_dict["tags"] else [],
|
|
258
|
+
"created_at": memory_dict["created_at"],
|
|
259
|
+
"updated_at": memory_dict["updated_at"],
|
|
260
|
+
"gc_risk": round(gc_risk, 2)
|
|
261
|
+
})
|
|
262
|
+
|
|
263
|
+
conn.close()
|
|
264
|
+
return memories
|
|
265
|
+
except Exception as e:
|
|
266
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
267
|
+
|
|
268
|
+
@router.get("/memory/{memory_id}")
|
|
269
|
+
async def get_memory(memory_id: str):
|
|
270
|
+
"""Retorna conteúdo completo de uma memória"""
|
|
271
|
+
try:
|
|
272
|
+
# Busca metadados
|
|
273
|
+
memory = router.metadata_db.get_by_id(memory_id)
|
|
274
|
+
if not memory:
|
|
275
|
+
raise HTTPException(status_code=404, detail="Memória não encontrada")
|
|
276
|
+
|
|
277
|
+
# Tenta encontrar o arquivo .md
|
|
278
|
+
content = ""
|
|
279
|
+
|
|
280
|
+
# Procura em official/
|
|
281
|
+
official_path = router.cerebro_path / "official"
|
|
282
|
+
if official_path.exists():
|
|
283
|
+
for root, dirs, files in [(official_path.parent, official_path.name, [])]:
|
|
284
|
+
for dirpath, dirnames, filenames in [(official_path, [], [])]:
|
|
285
|
+
pass
|
|
286
|
+
|
|
287
|
+
# Procura recursivamente
|
|
288
|
+
for md_file in official_path.rglob(f"{memory_id}.md"):
|
|
289
|
+
content = md_file.read_text(encoding="utf-8")
|
|
290
|
+
break
|
|
291
|
+
|
|
292
|
+
# Se não achou, procura em working/
|
|
293
|
+
if not content:
|
|
294
|
+
working_path = router.cerebro_path / "working"
|
|
295
|
+
for yaml_file in working_path.rglob(f"{memory_id}.yaml"):
|
|
296
|
+
content = yaml_file.read_text(encoding="utf-8")
|
|
297
|
+
break
|
|
298
|
+
|
|
299
|
+
# Se ainda não achou, procura em raw/
|
|
300
|
+
if not content:
|
|
301
|
+
raw_path = router.cerebro_path / "raw"
|
|
302
|
+
for jsonl_file in raw_path.rglob("*.jsonl"):
|
|
303
|
+
with open(jsonl_file, "r", encoding="utf-8") as f:
|
|
304
|
+
for line in f:
|
|
305
|
+
event = json.loads(line)
|
|
306
|
+
if event.get("id") == memory_id:
|
|
307
|
+
content = f"# {memory_id}\n\n```json\n{json.dumps(event, indent=2)}\n```"
|
|
308
|
+
break
|
|
309
|
+
|
|
310
|
+
# Fallback: tenta auto memory
|
|
311
|
+
if not content:
|
|
312
|
+
from src.core.paths import get_auto_mem_path
|
|
313
|
+
auto_path = get_auto_mem_path()
|
|
314
|
+
for md_file in auto_path.rglob(f"{memory_id}.md"):
|
|
315
|
+
content = md_file.read_text(encoding="utf-8")
|
|
316
|
+
break
|
|
317
|
+
|
|
318
|
+
return {
|
|
319
|
+
"id": memory_id,
|
|
320
|
+
"content": content,
|
|
321
|
+
"metadata": memory
|
|
322
|
+
}
|
|
323
|
+
except HTTPException:
|
|
324
|
+
raise
|
|
325
|
+
except Exception as e:
|
|
326
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
327
|
+
|
|
328
|
+
@router.get("/timeline")
|
|
329
|
+
async def get_timeline(
|
|
330
|
+
project: Optional[str] = Query(None),
|
|
331
|
+
days: int = Query(30, ge=1, le=90)
|
|
332
|
+
):
|
|
333
|
+
"""Retorna dados para timeline agrupados por dia"""
|
|
334
|
+
try:
|
|
335
|
+
conn = router.metadata_db._connect()
|
|
336
|
+
|
|
337
|
+
if project:
|
|
338
|
+
query = """
|
|
339
|
+
SELECT DATE(created_at) as date, type, COUNT(*) as count
|
|
340
|
+
FROM memories
|
|
341
|
+
WHERE project = ?
|
|
342
|
+
AND created_at >= DATE('now', ?)
|
|
343
|
+
GROUP BY DATE(created_at), type
|
|
344
|
+
ORDER BY date
|
|
345
|
+
"""
|
|
346
|
+
params = (project, f"-{days} days")
|
|
347
|
+
else:
|
|
348
|
+
query = """
|
|
349
|
+
SELECT DATE(created_at) as date, type, COUNT(*) as count
|
|
350
|
+
FROM memories
|
|
351
|
+
WHERE created_at >= DATE('now', ?)
|
|
352
|
+
GROUP BY DATE(created_at), type
|
|
353
|
+
ORDER BY date
|
|
354
|
+
"""
|
|
355
|
+
params = (f"-{days} days",)
|
|
356
|
+
|
|
357
|
+
rows = conn.execute(query, params).fetchall()
|
|
358
|
+
conn.close()
|
|
359
|
+
|
|
360
|
+
# Agrupa por data
|
|
361
|
+
by_date = {}
|
|
362
|
+
types_set = set()
|
|
363
|
+
|
|
364
|
+
for row in rows:
|
|
365
|
+
date = row["date"]
|
|
366
|
+
mem_type = row["type"]
|
|
367
|
+
count = row["count"]
|
|
368
|
+
|
|
369
|
+
if date not in by_date:
|
|
370
|
+
by_date[date] = {}
|
|
371
|
+
|
|
372
|
+
by_date[date][mem_type] = count
|
|
373
|
+
types_set.add(mem_type)
|
|
374
|
+
|
|
375
|
+
# Formata para Chart.js
|
|
376
|
+
labels = sorted(by_date.keys())
|
|
377
|
+
datasets = []
|
|
378
|
+
|
|
379
|
+
colors = {
|
|
380
|
+
"decision": "#3B82F6",
|
|
381
|
+
"error": "#EF4444",
|
|
382
|
+
"reference": "#10B981",
|
|
383
|
+
"feedback": "#F59E0B",
|
|
384
|
+
"default": "#6366F1"
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
for mem_type in sorted(types_set):
|
|
388
|
+
data = [by_date.get(date, {}).get(mem_type, 0) for date in labels]
|
|
389
|
+
color = colors.get(mem_type, colors["default"])
|
|
390
|
+
|
|
391
|
+
datasets.append({
|
|
392
|
+
"label": mem_type,
|
|
393
|
+
"data": data,
|
|
394
|
+
"borderColor": color,
|
|
395
|
+
"backgroundColor": color + "40", # 25% opacity
|
|
396
|
+
"tension": 0.3,
|
|
397
|
+
"fill": True
|
|
398
|
+
})
|
|
399
|
+
|
|
400
|
+
return {"labels": labels, "datasets": datasets}
|
|
401
|
+
except Exception as e:
|
|
402
|
+
raise HTTPException(status_code=500, detail=str(e))
|
|
403
|
+
|
|
404
|
+
return router
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""Servidor web do Dashboard do OCerebro"""
|
|
2
|
+
|
|
3
|
+
import socket
|
|
4
|
+
import threading
|
|
5
|
+
import webbrowser
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Optional
|
|
8
|
+
|
|
9
|
+
from fastapi import FastAPI
|
|
10
|
+
from fastapi.middleware.cors import CORSMiddleware
|
|
11
|
+
from fastapi.staticfiles import StaticFiles
|
|
12
|
+
import uvicorn
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class DashboardServer:
|
|
16
|
+
"""
|
|
17
|
+
Servidor FastAPI para o dashboard do OCerebro.
|
|
18
|
+
|
|
19
|
+
Responsabilidades:
|
|
20
|
+
- Montar app FastAPI com static files e API
|
|
21
|
+
- Iniciar servidor uvicorn em thread daemon
|
|
22
|
+
- Verificar se já está rodando
|
|
23
|
+
- Abrir browser automaticamente
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
cerebro_path: Path,
|
|
29
|
+
metadata_db,
|
|
30
|
+
embeddings_db,
|
|
31
|
+
entities_db
|
|
32
|
+
):
|
|
33
|
+
"""
|
|
34
|
+
Inicializa o servidor do dashboard.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
cerebro_path: Path para o diretório .cerebro
|
|
38
|
+
metadata_db: Instância do MetadataDB
|
|
39
|
+
embeddings_db: Instância do EmbeddingsDB
|
|
40
|
+
entities_db: Instância do EntitiesDB
|
|
41
|
+
"""
|
|
42
|
+
self.cerebro_path = cerebro_path
|
|
43
|
+
self.metadata_db = metadata_db
|
|
44
|
+
self.embeddings_db = embeddings_db
|
|
45
|
+
self.entities_db = entities_db
|
|
46
|
+
|
|
47
|
+
self.app = self._create_app()
|
|
48
|
+
self._server_thread: Optional[threading.Thread] = None
|
|
49
|
+
self._port: Optional[int] = None
|
|
50
|
+
|
|
51
|
+
def _create_app(self) -> FastAPI:
|
|
52
|
+
"""Cria e configura o app FastAPI"""
|
|
53
|
+
app = FastAPI(
|
|
54
|
+
title="OCerebro Dashboard",
|
|
55
|
+
description="Dashboard visual para memória do OCerebro",
|
|
56
|
+
version="0.3.0"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# CORS para permitir requests do browser local
|
|
60
|
+
app.add_middleware(
|
|
61
|
+
CORSMiddleware,
|
|
62
|
+
allow_origins=["*"],
|
|
63
|
+
allow_credentials=True,
|
|
64
|
+
allow_methods=["*"],
|
|
65
|
+
allow_headers=["*"],
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Monta static files
|
|
69
|
+
static_path = Path(__file__).parent / "static"
|
|
70
|
+
if static_path.exists():
|
|
71
|
+
app.mount("/static", StaticFiles(directory=str(static_path)), name="static")
|
|
72
|
+
|
|
73
|
+
# Monta API router
|
|
74
|
+
from src.dashboard.api import create_router
|
|
75
|
+
router = create_router(
|
|
76
|
+
self.metadata_db,
|
|
77
|
+
self.embeddings_db,
|
|
78
|
+
self.entities_db,
|
|
79
|
+
self.cerebro_path
|
|
80
|
+
)
|
|
81
|
+
app.include_router(router)
|
|
82
|
+
|
|
83
|
+
# Página principal
|
|
84
|
+
@app.get("/")
|
|
85
|
+
async def root():
|
|
86
|
+
from fastapi.responses import FileResponse
|
|
87
|
+
index_path = static_path / "index.html"
|
|
88
|
+
if index_path.exists():
|
|
89
|
+
return FileResponse(str(index_path))
|
|
90
|
+
return {"error": "index.html not found"}
|
|
91
|
+
|
|
92
|
+
return app
|
|
93
|
+
|
|
94
|
+
def is_running(self, port: int = 7999) -> bool:
|
|
95
|
+
"""
|
|
96
|
+
Verifica se o servidor já está rodando na porta.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
port: Porta para verificar
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
True se já estiver rodando
|
|
103
|
+
"""
|
|
104
|
+
try:
|
|
105
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
106
|
+
sock.settimeout(1)
|
|
107
|
+
result = sock.connect_ex(('127.0.0.1', port))
|
|
108
|
+
sock.close()
|
|
109
|
+
return result == 0
|
|
110
|
+
except Exception:
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
def start(self, port: int = 7999) -> bool:
|
|
114
|
+
"""
|
|
115
|
+
Inicia o servidor em thread daemon.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
port: Porta para escutar
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
True se iniciado com sucesso
|
|
122
|
+
"""
|
|
123
|
+
if self.is_running(port):
|
|
124
|
+
return True
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
self._port = port
|
|
128
|
+
|
|
129
|
+
def run_server():
|
|
130
|
+
uvicorn.run(
|
|
131
|
+
self.app,
|
|
132
|
+
host="127.0.0.1",
|
|
133
|
+
port=port,
|
|
134
|
+
log_level="error",
|
|
135
|
+
access_log=False
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
self._server_thread = threading.Thread(
|
|
139
|
+
target=run_server,
|
|
140
|
+
daemon=True,
|
|
141
|
+
name="dashboard-server"
|
|
142
|
+
)
|
|
143
|
+
self._server_thread.start()
|
|
144
|
+
|
|
145
|
+
# Aguarda servidor estar pronto
|
|
146
|
+
import time
|
|
147
|
+
for _ in range(50): # 5 segundos max
|
|
148
|
+
time.sleep(0.1)
|
|
149
|
+
if self.is_running(port):
|
|
150
|
+
return True
|
|
151
|
+
|
|
152
|
+
return False
|
|
153
|
+
except Exception:
|
|
154
|
+
return False
|
|
155
|
+
|
|
156
|
+
def open_browser(self, port: int = 7999) -> bool:
|
|
157
|
+
"""
|
|
158
|
+
Abre o dashboard no browser padrão.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
port: Porta do servidor
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
True se abriu com sucesso
|
|
165
|
+
"""
|
|
166
|
+
try:
|
|
167
|
+
url = f"http://localhost:{port}"
|
|
168
|
+
webbrowser.open(url)
|
|
169
|
+
return True
|
|
170
|
+
except Exception:
|
|
171
|
+
return False
|
|
172
|
+
|
|
173
|
+
def get_status(self) -> dict:
|
|
174
|
+
"""Retorna status do servidor"""
|
|
175
|
+
return {
|
|
176
|
+
"running": self.is_running(self._port) if self._port else False,
|
|
177
|
+
"port": self._port,
|
|
178
|
+
"thread_alive": self._server_thread.is_alive() if self._server_thread else False
|
|
179
|
+
}
|