@cybermem/cli 0.6.9 ā 0.6.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commands/deploy.js +1 -1
- package/dist/templates/auth-sidecar/Dockerfile +5 -0
- package/dist/templates/auth-sidecar/server.js +159 -0
- package/dist/templates/docker-compose.yml +22 -48
- package/dist/templates/monitoring/db_exporter/exporter.py +100 -59
- package/package.json +1 -1
- package/templates/auth-sidecar/Dockerfile +5 -0
- package/templates/auth-sidecar/server.js +159 -0
- package/templates/docker-compose.yml +22 -48
- package/templates/monitoring/db_exporter/exporter.py +100 -59
- package/templates/openmemory/Dockerfile +0 -19
package/dist/commands/deploy.js
CHANGED
|
@@ -229,7 +229,7 @@ async function deploy(options) {
|
|
|
229
229
|
}
|
|
230
230
|
`));
|
|
231
231
|
console.log(chalk_1.default.gray(' - Restart: sudo systemctl restart caddy'));
|
|
232
|
-
console.log(chalk_1.default.green('\nš Full docs: https://cybermem.dev
|
|
232
|
+
console.log(chalk_1.default.green('\nš Full docs: https://docs.cybermem.dev#https'));
|
|
233
233
|
}
|
|
234
234
|
}
|
|
235
235
|
catch (error) {
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CyberMem Auth Sidecar
|
|
3
|
+
*
|
|
4
|
+
* ForwardAuth service for Traefik that validates:
|
|
5
|
+
* 1. JWT tokens (RS256) with embedded public key
|
|
6
|
+
* 2. API keys (X-API-Key header) - deprecated fallback
|
|
7
|
+
* 3. Local requests (localhost bypass)
|
|
8
|
+
*
|
|
9
|
+
* NO SECRETS REQUIRED - public key is embedded.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const http = require("http");
|
|
13
|
+
const fs = require("fs");
|
|
14
|
+
const crypto = require("crypto");
|
|
15
|
+
|
|
16
|
+
const PORT = process.env.PORT || 3001;
|
|
17
|
+
const API_KEY_FILE = process.env.API_KEY_FILE || "/.env";
|
|
18
|
+
|
|
19
|
+
// RSA Public Key for JWT verification (embedded - no secrets!)
|
|
20
|
+
const PUBLIC_KEY = `-----BEGIN PUBLIC KEY-----
|
|
21
|
+
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAkrWPslHt+dcX/lckX4mw
|
|
22
|
+
AaI4koCqn7NqEkTtuyJuzFv969Da0ghhWdTIRR6H8pYfsTtqtX2UAZox8i5IJ9t9
|
|
23
|
+
JS8nBfbL2fFiuEz51LMNKMSLw7j2dJT/g5iIdT64LyJZ/9+kLMXC
|
|
24
|
+
EBWPIyEvx4GMzKSf2L+jNaUY/0J8n/JNAbKtIplKtfOU/tNWuoZfcj3SnoxrmApN
|
|
25
|
+
Xw+LsE26EM2Gq7MKLQf3r3GUIm2dBgs7XUNJRiezrPgFzekiaiDyFsNhhk1jkx2I
|
|
26
|
+
ljQgSslGQ4dODE73KB07b0Qi7zPWAtGlCyDQD5RLICzht1mMENta7x+TlPJfDv8g
|
|
27
|
+
XeEmW5ihAgMBAAE=
|
|
28
|
+
-----END PUBLIC KEY-----`;
|
|
29
|
+
|
|
30
|
+
// Load API key from file (deprecated fallback)
|
|
31
|
+
function loadApiKey() {
|
|
32
|
+
try {
|
|
33
|
+
const content = fs.readFileSync(API_KEY_FILE, "utf-8");
|
|
34
|
+
const match = content.match(/OM_API_KEY=(.+)/);
|
|
35
|
+
return match ? match[1].trim() : null;
|
|
36
|
+
} catch {
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// RS256 JWT validation
|
|
42
|
+
function validateJwt(token) {
|
|
43
|
+
try {
|
|
44
|
+
const parts = token.split(".");
|
|
45
|
+
if (parts.length !== 3) return null;
|
|
46
|
+
|
|
47
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
48
|
+
|
|
49
|
+
// Decode header to check algorithm
|
|
50
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
51
|
+
if (header.alg !== "RS256") {
|
|
52
|
+
console.log("JWT: unsupported algorithm", header.alg);
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Verify RS256 signature
|
|
57
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
58
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
59
|
+
|
|
60
|
+
const verify = crypto.createVerify("RSA-SHA256");
|
|
61
|
+
verify.update(data);
|
|
62
|
+
|
|
63
|
+
if (!verify.verify(PUBLIC_KEY, signature)) {
|
|
64
|
+
console.log("JWT: signature verification failed");
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Decode payload
|
|
69
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
70
|
+
|
|
71
|
+
// Check expiration
|
|
72
|
+
if (payload.exp && payload.exp < Date.now() / 1000) {
|
|
73
|
+
console.log("JWT: token expired");
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Check issuer
|
|
78
|
+
if (payload.iss !== "cybermem.dev") {
|
|
79
|
+
console.log("JWT: invalid issuer", payload.iss);
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return payload;
|
|
84
|
+
} catch (err) {
|
|
85
|
+
console.log("JWT validation error:", err.message);
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Check if request is from localhost
|
|
91
|
+
function isLocalRequest(req) {
|
|
92
|
+
const forwarded = req.headers["x-forwarded-for"];
|
|
93
|
+
const realIp = req.headers["x-real-ip"];
|
|
94
|
+
const ip = forwarded?.split(",")[0] || realIp || req.socket.remoteAddress;
|
|
95
|
+
|
|
96
|
+
return ip === "127.0.0.1" || ip === "::1" || ip === "localhost";
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// ForwardAuth handler
|
|
100
|
+
const server = http.createServer((req, res) => {
|
|
101
|
+
// Health check
|
|
102
|
+
if (req.url === "/health") {
|
|
103
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
104
|
+
res.end(JSON.stringify({ status: "ok" }));
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const authHeader = req.headers["authorization"];
|
|
109
|
+
const apiKeyHeader = req.headers["x-api-key"];
|
|
110
|
+
|
|
111
|
+
// 1. Check JWT (Authorization: Bearer <token>)
|
|
112
|
+
if (authHeader?.startsWith("Bearer ")) {
|
|
113
|
+
const token = authHeader.substring(7);
|
|
114
|
+
const payload = validateJwt(token);
|
|
115
|
+
|
|
116
|
+
if (payload) {
|
|
117
|
+
console.log(`Auth OK: JWT (${payload.email || payload.sub})`);
|
|
118
|
+
res.writeHead(200, {
|
|
119
|
+
"X-User-Id": payload.sub || "",
|
|
120
|
+
"X-User-Email": payload.email || "",
|
|
121
|
+
"X-User-Name": payload.name || "",
|
|
122
|
+
"X-Auth-Method": "jwt",
|
|
123
|
+
});
|
|
124
|
+
res.end();
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// 2. Check API Key (deprecated fallback)
|
|
130
|
+
const expectedKey = loadApiKey();
|
|
131
|
+
if (apiKeyHeader && expectedKey && apiKeyHeader === expectedKey) {
|
|
132
|
+
console.log("Auth OK: API Key (deprecated)");
|
|
133
|
+
res.writeHead(200, {
|
|
134
|
+
"X-Auth-Method": "api-key",
|
|
135
|
+
"X-Auth-Deprecated": "true",
|
|
136
|
+
});
|
|
137
|
+
res.end();
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// 3. Local bypass (development)
|
|
142
|
+
if (isLocalRequest(req)) {
|
|
143
|
+
console.log("Auth OK: Local bypass");
|
|
144
|
+
res.writeHead(200, {
|
|
145
|
+
"X-Auth-Method": "local",
|
|
146
|
+
});
|
|
147
|
+
res.end();
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// 4. Unauthorized
|
|
152
|
+
console.log("Auth FAILED: No valid credentials");
|
|
153
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
154
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
server.listen(PORT, () => {
|
|
158
|
+
console.log(`Auth sidecar (RS256) listening on port ${PORT}`);
|
|
159
|
+
});
|
|
@@ -40,47 +40,20 @@ services:
|
|
|
40
40
|
- traefik.http.services.mcp-get.loadbalancer.server.port=8081
|
|
41
41
|
restart: unless-stopped
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
container_name: cybermem-openmemory
|
|
48
|
-
ports: [] # Access via Traefik on 8626
|
|
49
|
-
volumes:
|
|
50
|
-
- openmemory-data:/data
|
|
51
|
-
- ${CYBERMEM_ENV_PATH}:/.env
|
|
43
|
+
# Auth sidecar for JWT/API key validation (ForwardAuth)
|
|
44
|
+
auth-sidecar:
|
|
45
|
+
image: ghcr.io/mikhailkogan17/cybermem-auth-sidecar:latest
|
|
46
|
+
container_name: cybermem-auth-sidecar
|
|
52
47
|
environment:
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
# Embeddings (local dev uses Ollama)
|
|
59
|
-
OM_EMBEDDINGS: ${EMBEDDINGS_PROVIDER:-ollama}
|
|
60
|
-
OLLAMA_URL: ${OLLAMA_URL:-http://ollama:11434}
|
|
61
|
-
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
|
62
|
-
|
|
63
|
-
# Database (local dev uses SQLite)
|
|
64
|
-
OM_METADATA_BACKEND: ${DB_BACKEND:-sqlite}
|
|
65
|
-
OM_DB_PATH: ${DB_PATH:-/data/openmemory.sqlite}
|
|
66
|
-
OM_VECTOR_BACKEND: ${VECTOR_BACKEND:-sqlite}
|
|
67
|
-
|
|
68
|
-
# PostgreSQL (for production/testing)
|
|
69
|
-
OM_PG_HOST: ${PG_HOST:-postgres}
|
|
70
|
-
OM_PG_PORT: ${PG_PORT:-5432}
|
|
71
|
-
OM_PG_DB: ${PG_DB:-openmemory}
|
|
72
|
-
OM_PG_USER: ${PG_USER:-openmemory}
|
|
73
|
-
OM_PG_PASSWORD: ${PG_PASSWORD:-}
|
|
74
|
-
|
|
75
|
-
# Performance
|
|
76
|
-
OM_RATE_LIMIT_ENABLED: "true"
|
|
77
|
-
OM_RATE_LIMIT_MAX_REQUESTS: "1000"
|
|
78
|
-
|
|
48
|
+
PORT: "3001"
|
|
49
|
+
API_KEY_FILE: /.env
|
|
50
|
+
volumes:
|
|
51
|
+
- ${CYBERMEM_ENV_PATH}:/.env:ro
|
|
79
52
|
labels:
|
|
80
53
|
- traefik.enable=true
|
|
81
|
-
|
|
82
|
-
- traefik.http.
|
|
83
|
-
- traefik.http.
|
|
54
|
+
# ForwardAuth middleware
|
|
55
|
+
- traefik.http.middlewares.auth-check.forwardauth.address=http://auth-sidecar:3001/auth
|
|
56
|
+
- traefik.http.middlewares.auth-check.forwardauth.authResponseHeaders=X-User-Id,X-User-Email,X-User-Name,X-Auth-Method
|
|
84
57
|
healthcheck:
|
|
85
58
|
test:
|
|
86
59
|
[
|
|
@@ -89,15 +62,16 @@ services:
|
|
|
89
62
|
"--quiet",
|
|
90
63
|
"--tries=1",
|
|
91
64
|
"--spider",
|
|
92
|
-
"http://localhost:
|
|
65
|
+
"http://localhost:3001/health",
|
|
93
66
|
]
|
|
94
67
|
interval: 30s
|
|
95
|
-
timeout:
|
|
68
|
+
timeout: 5s
|
|
96
69
|
retries: 3
|
|
97
|
-
start_period: 40s
|
|
98
70
|
restart: unless-stopped
|
|
99
|
-
|
|
100
|
-
|
|
71
|
+
|
|
72
|
+
# NOTE: openmemory container REMOVED
|
|
73
|
+
# Memory now handled by @cybermem/mcp with embedded openmemory-js SDK
|
|
74
|
+
# SQLite stored at ~/.cybermem/data/openmemory.sqlite
|
|
101
75
|
|
|
102
76
|
db-exporter:
|
|
103
77
|
image: ghcr.io/mikhailkogan17/cybermem-db_exporter:latest
|
|
@@ -109,10 +83,9 @@ services:
|
|
|
109
83
|
ports:
|
|
110
84
|
- "8000:8000"
|
|
111
85
|
volumes:
|
|
112
|
-
|
|
86
|
+
# Mount host openmemory data dir (created by SDK)
|
|
87
|
+
- ${HOME}/.cybermem/data:/data:ro
|
|
113
88
|
restart: unless-stopped
|
|
114
|
-
depends_on:
|
|
115
|
-
- openmemory
|
|
116
89
|
|
|
117
90
|
log-exporter:
|
|
118
91
|
image: ghcr.io/mikhailkogan17/cybermem-log_exporter:latest
|
|
@@ -124,12 +97,11 @@ services:
|
|
|
124
97
|
DB_PATH: /data/openmemory.sqlite
|
|
125
98
|
volumes:
|
|
126
99
|
- traefik-logs:/var/log/traefik:ro
|
|
127
|
-
-
|
|
100
|
+
- ${HOME}/.cybermem/data:/data:ro
|
|
128
101
|
- ./monitoring/log_exporter/exporter.py:/app/exporter.py:ro
|
|
129
102
|
restart: unless-stopped
|
|
130
103
|
depends_on:
|
|
131
104
|
- traefik
|
|
132
|
-
- openmemory
|
|
133
105
|
|
|
134
106
|
postgres:
|
|
135
107
|
image: postgres:15-alpine
|
|
@@ -200,6 +172,7 @@ services:
|
|
|
200
172
|
image: ghcr.io/mikhailkogan17/cybermem-dashboard:latest
|
|
201
173
|
container_name: cybermem-dashboard
|
|
202
174
|
environment:
|
|
175
|
+
DB_EXPORTER_URL: http://db-exporter:8000
|
|
203
176
|
NEXT_PUBLIC_PROMETHEUS_URL: http://prometheus:9090
|
|
204
177
|
PROMETHEUS_URL: http://prometheus:9090
|
|
205
178
|
OM_API_KEY: ${OM_API_KEY:-dev-secret-key}
|
|
@@ -221,6 +194,7 @@ services:
|
|
|
221
194
|
restart: unless-stopped
|
|
222
195
|
depends_on:
|
|
223
196
|
- prometheus
|
|
197
|
+
- db-exporter
|
|
224
198
|
|
|
225
199
|
volumes:
|
|
226
200
|
openmemory-data:
|
|
@@ -78,10 +78,19 @@ success_rate_aggregate = Gauge(
|
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
|
|
81
|
-
def get_db_connection():
|
|
82
|
-
"""Get SQLite database connection.
|
|
81
|
+
def get_db_connection(readonly=True):
|
|
82
|
+
"""Get SQLite database connection.
|
|
83
|
+
|
|
84
|
+
For WAL mode databases, we need read-write access to perform checkpoint
|
|
85
|
+
and see the latest data.
|
|
86
|
+
"""
|
|
83
87
|
try:
|
|
84
|
-
|
|
88
|
+
if readonly:
|
|
89
|
+
# Read-only mode for metrics queries
|
|
90
|
+
conn = sqlite3.connect(f"file:{DB_PATH}?mode=ro", uri=True, timeout=10.0)
|
|
91
|
+
else:
|
|
92
|
+
# Read-write mode for checkpoint operations
|
|
93
|
+
conn = sqlite3.connect(DB_PATH, timeout=10.0)
|
|
85
94
|
conn.row_factory = sqlite3.Row
|
|
86
95
|
return conn
|
|
87
96
|
except Exception as e:
|
|
@@ -89,6 +98,19 @@ def get_db_connection():
|
|
|
89
98
|
raise
|
|
90
99
|
|
|
91
100
|
|
|
101
|
+
def do_wal_checkpoint():
|
|
102
|
+
"""Perform WAL checkpoint to flush data to main database file."""
|
|
103
|
+
try:
|
|
104
|
+
conn = get_db_connection(readonly=False)
|
|
105
|
+
result = conn.execute("PRAGMA wal_checkpoint(PASSIVE)").fetchone()
|
|
106
|
+
conn.close()
|
|
107
|
+
logger.debug(f"WAL checkpoint result: {result}")
|
|
108
|
+
return True
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.warning(f"WAL checkpoint failed (read-only volume?): {e}")
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
|
|
92
114
|
def collect_metrics():
|
|
93
115
|
"""Collect all metrics from OpenMemory database."""
|
|
94
116
|
try:
|
|
@@ -280,6 +302,19 @@ def metrics():
|
|
|
280
302
|
return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)
|
|
281
303
|
|
|
282
304
|
|
|
305
|
+
@app.route("/health")
|
|
306
|
+
def health():
|
|
307
|
+
"""Health check endpoint for dashboard"""
|
|
308
|
+
try:
|
|
309
|
+
db = get_db_connection()
|
|
310
|
+
cursor = db.cursor()
|
|
311
|
+
cursor.execute("SELECT 1")
|
|
312
|
+
db.close()
|
|
313
|
+
return jsonify({"status": "ok", "db": "connected"})
|
|
314
|
+
except Exception as e:
|
|
315
|
+
return jsonify({"status": "error", "error": str(e)}), 503
|
|
316
|
+
|
|
317
|
+
|
|
283
318
|
@app.route("/api/logs")
|
|
284
319
|
def api_logs():
|
|
285
320
|
"""Access logs API endpoint"""
|
|
@@ -401,88 +436,92 @@ def api_stats():
|
|
|
401
436
|
|
|
402
437
|
@app.route("/api/timeseries")
|
|
403
438
|
def api_timeseries():
|
|
404
|
-
"""Time series data for dashboard charts -
|
|
439
|
+
"""Time series data for dashboard charts - cumulative totals with exact timestamps"""
|
|
405
440
|
try:
|
|
406
441
|
period = request.args.get("period", "24h")
|
|
407
442
|
|
|
408
|
-
# Parse period to
|
|
443
|
+
# Parse period to seconds
|
|
409
444
|
period_map = {"1h": 3600, "24h": 86400, "7d": 604800, "30d": 2592000}
|
|
410
445
|
period_seconds = period_map.get(period, 86400)
|
|
411
446
|
start_ms = int((time.time() - period_seconds) * 1000)
|
|
412
|
-
|
|
413
|
-
# Bucket size: 1h for 24h, 6h for 7d, 1d for 30d
|
|
414
|
-
if period in ["1h", "24h"]:
|
|
415
|
-
bucket_format = "%Y-%m-%d %H:00"
|
|
416
|
-
bucket_seconds = 3600
|
|
417
|
-
elif period == "7d":
|
|
418
|
-
bucket_format = "%Y-%m-%d %H:00"
|
|
419
|
-
bucket_seconds = 21600 # 6 hours
|
|
420
|
-
else:
|
|
421
|
-
bucket_format = "%Y-%m-%d"
|
|
422
|
-
bucket_seconds = 86400
|
|
447
|
+
now_ms = int(time.time() * 1000)
|
|
423
448
|
|
|
424
449
|
db = get_db_connection()
|
|
425
450
|
cursor = db.cursor()
|
|
426
451
|
|
|
427
|
-
# Get
|
|
452
|
+
# Get all events in the period, ordered by timestamp
|
|
428
453
|
cursor.execute(
|
|
429
454
|
"""
|
|
430
455
|
SELECT
|
|
431
|
-
|
|
456
|
+
timestamp,
|
|
432
457
|
client_name,
|
|
433
|
-
operation
|
|
434
|
-
COUNT(*) as count
|
|
458
|
+
operation
|
|
435
459
|
FROM cybermem_access_log
|
|
436
460
|
WHERE timestamp >= ?
|
|
437
|
-
|
|
438
|
-
ORDER BY dt
|
|
461
|
+
ORDER BY timestamp ASC
|
|
439
462
|
""",
|
|
440
|
-
[start_ms
|
|
463
|
+
[start_ms],
|
|
441
464
|
)
|
|
442
465
|
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
reads = {}
|
|
446
|
-
updates = {}
|
|
447
|
-
deletes = {}
|
|
466
|
+
rows = cursor.fetchall()
|
|
467
|
+
db.close()
|
|
448
468
|
|
|
449
|
-
|
|
450
|
-
|
|
469
|
+
# Map operations to chart categories
|
|
470
|
+
def get_op_key(op):
|
|
471
|
+
if op == "create":
|
|
472
|
+
return "creates"
|
|
473
|
+
if op in ["read", "list", "query", "search", "other"]:
|
|
474
|
+
return "reads"
|
|
475
|
+
if op == "update":
|
|
476
|
+
return "updates"
|
|
477
|
+
if op == "delete":
|
|
478
|
+
return "deletes"
|
|
479
|
+
return None
|
|
480
|
+
|
|
481
|
+
# Build cumulative data structures
|
|
482
|
+
# Each chart will have list of {time, client1: cumulative_count, client2: cumulative_count, ...}
|
|
483
|
+
results = {"creates": [], "reads": [], "updates": [], "deletes": []}
|
|
484
|
+
|
|
485
|
+
# Track running totals per client per operation type
|
|
486
|
+
running_totals = {"creates": {}, "reads": {}, "updates": {}, "deletes": {}}
|
|
487
|
+
|
|
488
|
+
# Add initial zero point at period start
|
|
489
|
+
start_ts = start_ms // 1000
|
|
490
|
+
for op_key in results:
|
|
491
|
+
results[op_key].append({"time": start_ts})
|
|
492
|
+
|
|
493
|
+
# Process each event and build cumulative series
|
|
494
|
+
for row in rows:
|
|
495
|
+
ts = row["timestamp"] // 1000 # Convert to seconds
|
|
451
496
|
client = row["client_name"] or "unknown"
|
|
452
497
|
op = row["operation"]
|
|
453
|
-
|
|
498
|
+
op_key = get_op_key(op)
|
|
454
499
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
bucket_ts = (ts // bucket_seconds) * bucket_seconds
|
|
500
|
+
if not op_key:
|
|
501
|
+
continue
|
|
458
502
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
elif op in ["read", "list", "query", "search", "other"]:
|
|
464
|
-
if bucket_ts not in reads:
|
|
465
|
-
reads[bucket_ts] = {"time": bucket_ts}
|
|
466
|
-
reads[bucket_ts][client] = reads[bucket_ts].get(client, 0) + count
|
|
467
|
-
elif op == "update":
|
|
468
|
-
if bucket_ts not in updates:
|
|
469
|
-
updates[bucket_ts] = {"time": bucket_ts}
|
|
470
|
-
updates[bucket_ts][client] = updates[bucket_ts].get(client, 0) + count
|
|
471
|
-
elif op == "delete":
|
|
472
|
-
if bucket_ts not in deletes:
|
|
473
|
-
deletes[bucket_ts] = {"time": bucket_ts}
|
|
474
|
-
deletes[bucket_ts][client] = deletes[bucket_ts].get(client, 0) + count
|
|
503
|
+
# Increment running total for this client
|
|
504
|
+
if client not in running_totals[op_key]:
|
|
505
|
+
running_totals[op_key][client] = 0
|
|
506
|
+
running_totals[op_key][client] += 1
|
|
475
507
|
|
|
476
|
-
|
|
508
|
+
# Create a data point with current cumulative state
|
|
509
|
+
point = {"time": ts}
|
|
510
|
+
for c, count in running_totals[op_key].items():
|
|
511
|
+
point[c] = count
|
|
477
512
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
513
|
+
results[op_key].append(point)
|
|
514
|
+
|
|
515
|
+
# Add final point at "now" with same totals
|
|
516
|
+
now_ts = now_ms // 1000
|
|
517
|
+
for op_key in results:
|
|
518
|
+
if running_totals[op_key]:
|
|
519
|
+
final_point = {"time": now_ts}
|
|
520
|
+
for c, count in running_totals[op_key].items():
|
|
521
|
+
final_point[c] = count
|
|
522
|
+
results[op_key].append(final_point)
|
|
523
|
+
|
|
524
|
+
return jsonify(results)
|
|
486
525
|
except Exception as e:
|
|
487
526
|
logger.error(f"Error in /api/timeseries: {e}", exc_info=True)
|
|
488
527
|
return jsonify({"error": str(e)}), 500
|
|
@@ -493,6 +532,8 @@ def metrics_collection_loop():
|
|
|
493
532
|
logger.info("Starting metrics collection loop")
|
|
494
533
|
while True:
|
|
495
534
|
try:
|
|
535
|
+
# Checkpoint WAL to ensure we see latest data
|
|
536
|
+
do_wal_checkpoint()
|
|
496
537
|
collect_metrics()
|
|
497
538
|
time.sleep(SCRAPE_INTERVAL)
|
|
498
539
|
except Exception as e:
|
package/package.json
CHANGED
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* CyberMem Auth Sidecar
|
|
3
|
+
*
|
|
4
|
+
* ForwardAuth service for Traefik that validates:
|
|
5
|
+
* 1. JWT tokens (RS256) with embedded public key
|
|
6
|
+
* 2. API keys (X-API-Key header) - deprecated fallback
|
|
7
|
+
* 3. Local requests (localhost bypass)
|
|
8
|
+
*
|
|
9
|
+
* NO SECRETS REQUIRED - public key is embedded.
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const http = require("http");
|
|
13
|
+
const fs = require("fs");
|
|
14
|
+
const crypto = require("crypto");
|
|
15
|
+
|
|
16
|
+
const PORT = process.env.PORT || 3001;
|
|
17
|
+
const API_KEY_FILE = process.env.API_KEY_FILE || "/.env";
|
|
18
|
+
|
|
19
|
+
// RSA Public Key for JWT verification (embedded - no secrets!)
|
|
20
|
+
const PUBLIC_KEY = `-----BEGIN PUBLIC KEY-----
|
|
21
|
+
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAkrWPslHt+dcX/lckX4mw
|
|
22
|
+
AaI4koCqn7NqEkTtuyJuzFv969Da0ghhWdTIRR6H8pYfsTtqtX2UAZox8i5IJ9t9
|
|
23
|
+
JS8nBfbL2fFiuEz51LMNKMSLw7j2dJT/g5iIdT64LyJZ/9+kLMXC
|
|
24
|
+
EBWPIyEvx4GMzKSf2L+jNaUY/0J8n/JNAbKtIplKtfOU/tNWuoZfcj3SnoxrmApN
|
|
25
|
+
Xw+LsE26EM2Gq7MKLQf3r3GUIm2dBgs7XUNJRiezrPgFzekiaiDyFsNhhk1jkx2I
|
|
26
|
+
ljQgSslGQ4dODE73KB07b0Qi7zPWAtGlCyDQD5RLICzht1mMENta7x+TlPJfDv8g
|
|
27
|
+
XeEmW5ihAgMBAAE=
|
|
28
|
+
-----END PUBLIC KEY-----`;
|
|
29
|
+
|
|
30
|
+
// Load API key from file (deprecated fallback)
|
|
31
|
+
function loadApiKey() {
|
|
32
|
+
try {
|
|
33
|
+
const content = fs.readFileSync(API_KEY_FILE, "utf-8");
|
|
34
|
+
const match = content.match(/OM_API_KEY=(.+)/);
|
|
35
|
+
return match ? match[1].trim() : null;
|
|
36
|
+
} catch {
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// RS256 JWT validation
|
|
42
|
+
function validateJwt(token) {
|
|
43
|
+
try {
|
|
44
|
+
const parts = token.split(".");
|
|
45
|
+
if (parts.length !== 3) return null;
|
|
46
|
+
|
|
47
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
48
|
+
|
|
49
|
+
// Decode header to check algorithm
|
|
50
|
+
const header = JSON.parse(Buffer.from(headerB64, "base64url").toString());
|
|
51
|
+
if (header.alg !== "RS256") {
|
|
52
|
+
console.log("JWT: unsupported algorithm", header.alg);
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Verify RS256 signature
|
|
57
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
58
|
+
const signature = Buffer.from(signatureB64, "base64url");
|
|
59
|
+
|
|
60
|
+
const verify = crypto.createVerify("RSA-SHA256");
|
|
61
|
+
verify.update(data);
|
|
62
|
+
|
|
63
|
+
if (!verify.verify(PUBLIC_KEY, signature)) {
|
|
64
|
+
console.log("JWT: signature verification failed");
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Decode payload
|
|
69
|
+
const payload = JSON.parse(Buffer.from(payloadB64, "base64url").toString());
|
|
70
|
+
|
|
71
|
+
// Check expiration
|
|
72
|
+
if (payload.exp && payload.exp < Date.now() / 1000) {
|
|
73
|
+
console.log("JWT: token expired");
|
|
74
|
+
return null;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Check issuer
|
|
78
|
+
if (payload.iss !== "cybermem.dev") {
|
|
79
|
+
console.log("JWT: invalid issuer", payload.iss);
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return payload;
|
|
84
|
+
} catch (err) {
|
|
85
|
+
console.log("JWT validation error:", err.message);
|
|
86
|
+
return null;
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Check if request is from localhost
|
|
91
|
+
function isLocalRequest(req) {
|
|
92
|
+
const forwarded = req.headers["x-forwarded-for"];
|
|
93
|
+
const realIp = req.headers["x-real-ip"];
|
|
94
|
+
const ip = forwarded?.split(",")[0] || realIp || req.socket.remoteAddress;
|
|
95
|
+
|
|
96
|
+
return ip === "127.0.0.1" || ip === "::1" || ip === "localhost";
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// ForwardAuth handler
|
|
100
|
+
const server = http.createServer((req, res) => {
|
|
101
|
+
// Health check
|
|
102
|
+
if (req.url === "/health") {
|
|
103
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
104
|
+
res.end(JSON.stringify({ status: "ok" }));
|
|
105
|
+
return;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const authHeader = req.headers["authorization"];
|
|
109
|
+
const apiKeyHeader = req.headers["x-api-key"];
|
|
110
|
+
|
|
111
|
+
// 1. Check JWT (Authorization: Bearer <token>)
|
|
112
|
+
if (authHeader?.startsWith("Bearer ")) {
|
|
113
|
+
const token = authHeader.substring(7);
|
|
114
|
+
const payload = validateJwt(token);
|
|
115
|
+
|
|
116
|
+
if (payload) {
|
|
117
|
+
console.log(`Auth OK: JWT (${payload.email || payload.sub})`);
|
|
118
|
+
res.writeHead(200, {
|
|
119
|
+
"X-User-Id": payload.sub || "",
|
|
120
|
+
"X-User-Email": payload.email || "",
|
|
121
|
+
"X-User-Name": payload.name || "",
|
|
122
|
+
"X-Auth-Method": "jwt",
|
|
123
|
+
});
|
|
124
|
+
res.end();
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// 2. Check API Key (deprecated fallback)
|
|
130
|
+
const expectedKey = loadApiKey();
|
|
131
|
+
if (apiKeyHeader && expectedKey && apiKeyHeader === expectedKey) {
|
|
132
|
+
console.log("Auth OK: API Key (deprecated)");
|
|
133
|
+
res.writeHead(200, {
|
|
134
|
+
"X-Auth-Method": "api-key",
|
|
135
|
+
"X-Auth-Deprecated": "true",
|
|
136
|
+
});
|
|
137
|
+
res.end();
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// 3. Local bypass (development)
|
|
142
|
+
if (isLocalRequest(req)) {
|
|
143
|
+
console.log("Auth OK: Local bypass");
|
|
144
|
+
res.writeHead(200, {
|
|
145
|
+
"X-Auth-Method": "local",
|
|
146
|
+
});
|
|
147
|
+
res.end();
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// 4. Unauthorized
|
|
152
|
+
console.log("Auth FAILED: No valid credentials");
|
|
153
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
154
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
server.listen(PORT, () => {
|
|
158
|
+
console.log(`Auth sidecar (RS256) listening on port ${PORT}`);
|
|
159
|
+
});
|
|
@@ -40,47 +40,20 @@ services:
|
|
|
40
40
|
- traefik.http.services.mcp-get.loadbalancer.server.port=8081
|
|
41
41
|
restart: unless-stopped
|
|
42
42
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
container_name: cybermem-openmemory
|
|
48
|
-
ports: [] # Access via Traefik on 8626
|
|
49
|
-
volumes:
|
|
50
|
-
- openmemory-data:/data
|
|
51
|
-
- ${CYBERMEM_ENV_PATH}:/.env
|
|
43
|
+
# Auth sidecar for JWT/API key validation (ForwardAuth)
|
|
44
|
+
auth-sidecar:
|
|
45
|
+
image: ghcr.io/mikhailkogan17/cybermem-auth-sidecar:latest
|
|
46
|
+
container_name: cybermem-auth-sidecar
|
|
52
47
|
environment:
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
# Embeddings (local dev uses Ollama)
|
|
59
|
-
OM_EMBEDDINGS: ${EMBEDDINGS_PROVIDER:-ollama}
|
|
60
|
-
OLLAMA_URL: ${OLLAMA_URL:-http://ollama:11434}
|
|
61
|
-
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
|
62
|
-
|
|
63
|
-
# Database (local dev uses SQLite)
|
|
64
|
-
OM_METADATA_BACKEND: ${DB_BACKEND:-sqlite}
|
|
65
|
-
OM_DB_PATH: ${DB_PATH:-/data/openmemory.sqlite}
|
|
66
|
-
OM_VECTOR_BACKEND: ${VECTOR_BACKEND:-sqlite}
|
|
67
|
-
|
|
68
|
-
# PostgreSQL (for production/testing)
|
|
69
|
-
OM_PG_HOST: ${PG_HOST:-postgres}
|
|
70
|
-
OM_PG_PORT: ${PG_PORT:-5432}
|
|
71
|
-
OM_PG_DB: ${PG_DB:-openmemory}
|
|
72
|
-
OM_PG_USER: ${PG_USER:-openmemory}
|
|
73
|
-
OM_PG_PASSWORD: ${PG_PASSWORD:-}
|
|
74
|
-
|
|
75
|
-
# Performance
|
|
76
|
-
OM_RATE_LIMIT_ENABLED: "true"
|
|
77
|
-
OM_RATE_LIMIT_MAX_REQUESTS: "1000"
|
|
78
|
-
|
|
48
|
+
PORT: "3001"
|
|
49
|
+
API_KEY_FILE: /.env
|
|
50
|
+
volumes:
|
|
51
|
+
- ${CYBERMEM_ENV_PATH}:/.env:ro
|
|
79
52
|
labels:
|
|
80
53
|
- traefik.enable=true
|
|
81
|
-
|
|
82
|
-
- traefik.http.
|
|
83
|
-
- traefik.http.
|
|
54
|
+
# ForwardAuth middleware
|
|
55
|
+
- traefik.http.middlewares.auth-check.forwardauth.address=http://auth-sidecar:3001/auth
|
|
56
|
+
- traefik.http.middlewares.auth-check.forwardauth.authResponseHeaders=X-User-Id,X-User-Email,X-User-Name,X-Auth-Method
|
|
84
57
|
healthcheck:
|
|
85
58
|
test:
|
|
86
59
|
[
|
|
@@ -89,15 +62,16 @@ services:
|
|
|
89
62
|
"--quiet",
|
|
90
63
|
"--tries=1",
|
|
91
64
|
"--spider",
|
|
92
|
-
"http://localhost:
|
|
65
|
+
"http://localhost:3001/health",
|
|
93
66
|
]
|
|
94
67
|
interval: 30s
|
|
95
|
-
timeout:
|
|
68
|
+
timeout: 5s
|
|
96
69
|
retries: 3
|
|
97
|
-
start_period: 40s
|
|
98
70
|
restart: unless-stopped
|
|
99
|
-
|
|
100
|
-
|
|
71
|
+
|
|
72
|
+
# NOTE: openmemory container REMOVED
|
|
73
|
+
# Memory now handled by @cybermem/mcp with embedded openmemory-js SDK
|
|
74
|
+
# SQLite stored at ~/.cybermem/data/openmemory.sqlite
|
|
101
75
|
|
|
102
76
|
db-exporter:
|
|
103
77
|
image: ghcr.io/mikhailkogan17/cybermem-db_exporter:latest
|
|
@@ -109,10 +83,9 @@ services:
|
|
|
109
83
|
ports:
|
|
110
84
|
- "8000:8000"
|
|
111
85
|
volumes:
|
|
112
|
-
|
|
86
|
+
# Mount host openmemory data dir (created by SDK)
|
|
87
|
+
- ${HOME}/.cybermem/data:/data:ro
|
|
113
88
|
restart: unless-stopped
|
|
114
|
-
depends_on:
|
|
115
|
-
- openmemory
|
|
116
89
|
|
|
117
90
|
log-exporter:
|
|
118
91
|
image: ghcr.io/mikhailkogan17/cybermem-log_exporter:latest
|
|
@@ -124,12 +97,11 @@ services:
|
|
|
124
97
|
DB_PATH: /data/openmemory.sqlite
|
|
125
98
|
volumes:
|
|
126
99
|
- traefik-logs:/var/log/traefik:ro
|
|
127
|
-
-
|
|
100
|
+
- ${HOME}/.cybermem/data:/data:ro
|
|
128
101
|
- ./monitoring/log_exporter/exporter.py:/app/exporter.py:ro
|
|
129
102
|
restart: unless-stopped
|
|
130
103
|
depends_on:
|
|
131
104
|
- traefik
|
|
132
|
-
- openmemory
|
|
133
105
|
|
|
134
106
|
postgres:
|
|
135
107
|
image: postgres:15-alpine
|
|
@@ -200,6 +172,7 @@ services:
|
|
|
200
172
|
image: ghcr.io/mikhailkogan17/cybermem-dashboard:latest
|
|
201
173
|
container_name: cybermem-dashboard
|
|
202
174
|
environment:
|
|
175
|
+
DB_EXPORTER_URL: http://db-exporter:8000
|
|
203
176
|
NEXT_PUBLIC_PROMETHEUS_URL: http://prometheus:9090
|
|
204
177
|
PROMETHEUS_URL: http://prometheus:9090
|
|
205
178
|
OM_API_KEY: ${OM_API_KEY:-dev-secret-key}
|
|
@@ -221,6 +194,7 @@ services:
|
|
|
221
194
|
restart: unless-stopped
|
|
222
195
|
depends_on:
|
|
223
196
|
- prometheus
|
|
197
|
+
- db-exporter
|
|
224
198
|
|
|
225
199
|
volumes:
|
|
226
200
|
openmemory-data:
|
|
@@ -78,10 +78,19 @@ success_rate_aggregate = Gauge(
|
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
|
|
81
|
-
def get_db_connection():
|
|
82
|
-
"""Get SQLite database connection.
|
|
81
|
+
def get_db_connection(readonly=True):
|
|
82
|
+
"""Get SQLite database connection.
|
|
83
|
+
|
|
84
|
+
For WAL mode databases, we need read-write access to perform checkpoint
|
|
85
|
+
and see the latest data.
|
|
86
|
+
"""
|
|
83
87
|
try:
|
|
84
|
-
|
|
88
|
+
if readonly:
|
|
89
|
+
# Read-only mode for metrics queries
|
|
90
|
+
conn = sqlite3.connect(f"file:{DB_PATH}?mode=ro", uri=True, timeout=10.0)
|
|
91
|
+
else:
|
|
92
|
+
# Read-write mode for checkpoint operations
|
|
93
|
+
conn = sqlite3.connect(DB_PATH, timeout=10.0)
|
|
85
94
|
conn.row_factory = sqlite3.Row
|
|
86
95
|
return conn
|
|
87
96
|
except Exception as e:
|
|
@@ -89,6 +98,19 @@ def get_db_connection():
|
|
|
89
98
|
raise
|
|
90
99
|
|
|
91
100
|
|
|
101
|
+
def do_wal_checkpoint():
|
|
102
|
+
"""Perform WAL checkpoint to flush data to main database file."""
|
|
103
|
+
try:
|
|
104
|
+
conn = get_db_connection(readonly=False)
|
|
105
|
+
result = conn.execute("PRAGMA wal_checkpoint(PASSIVE)").fetchone()
|
|
106
|
+
conn.close()
|
|
107
|
+
logger.debug(f"WAL checkpoint result: {result}")
|
|
108
|
+
return True
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.warning(f"WAL checkpoint failed (read-only volume?): {e}")
|
|
111
|
+
return False
|
|
112
|
+
|
|
113
|
+
|
|
92
114
|
def collect_metrics():
|
|
93
115
|
"""Collect all metrics from OpenMemory database."""
|
|
94
116
|
try:
|
|
@@ -280,6 +302,19 @@ def metrics():
|
|
|
280
302
|
return Response(generate_latest(), mimetype=CONTENT_TYPE_LATEST)
|
|
281
303
|
|
|
282
304
|
|
|
305
|
+
@app.route("/health")
|
|
306
|
+
def health():
|
|
307
|
+
"""Health check endpoint for dashboard"""
|
|
308
|
+
try:
|
|
309
|
+
db = get_db_connection()
|
|
310
|
+
cursor = db.cursor()
|
|
311
|
+
cursor.execute("SELECT 1")
|
|
312
|
+
db.close()
|
|
313
|
+
return jsonify({"status": "ok", "db": "connected"})
|
|
314
|
+
except Exception as e:
|
|
315
|
+
return jsonify({"status": "error", "error": str(e)}), 503
|
|
316
|
+
|
|
317
|
+
|
|
283
318
|
@app.route("/api/logs")
|
|
284
319
|
def api_logs():
|
|
285
320
|
"""Access logs API endpoint"""
|
|
@@ -401,88 +436,92 @@ def api_stats():
|
|
|
401
436
|
|
|
402
437
|
@app.route("/api/timeseries")
|
|
403
438
|
def api_timeseries():
|
|
404
|
-
"""Time series data for dashboard charts -
|
|
439
|
+
"""Time series data for dashboard charts - cumulative totals with exact timestamps"""
|
|
405
440
|
try:
|
|
406
441
|
period = request.args.get("period", "24h")
|
|
407
442
|
|
|
408
|
-
# Parse period to
|
|
443
|
+
# Parse period to seconds
|
|
409
444
|
period_map = {"1h": 3600, "24h": 86400, "7d": 604800, "30d": 2592000}
|
|
410
445
|
period_seconds = period_map.get(period, 86400)
|
|
411
446
|
start_ms = int((time.time() - period_seconds) * 1000)
|
|
412
|
-
|
|
413
|
-
# Bucket size: 1h for 24h, 6h for 7d, 1d for 30d
|
|
414
|
-
if period in ["1h", "24h"]:
|
|
415
|
-
bucket_format = "%Y-%m-%d %H:00"
|
|
416
|
-
bucket_seconds = 3600
|
|
417
|
-
elif period == "7d":
|
|
418
|
-
bucket_format = "%Y-%m-%d %H:00"
|
|
419
|
-
bucket_seconds = 21600 # 6 hours
|
|
420
|
-
else:
|
|
421
|
-
bucket_format = "%Y-%m-%d"
|
|
422
|
-
bucket_seconds = 86400
|
|
447
|
+
now_ms = int(time.time() * 1000)
|
|
423
448
|
|
|
424
449
|
db = get_db_connection()
|
|
425
450
|
cursor = db.cursor()
|
|
426
451
|
|
|
427
|
-
# Get
|
|
452
|
+
# Get all events in the period, ordered by timestamp
|
|
428
453
|
cursor.execute(
|
|
429
454
|
"""
|
|
430
455
|
SELECT
|
|
431
|
-
|
|
456
|
+
timestamp,
|
|
432
457
|
client_name,
|
|
433
|
-
operation
|
|
434
|
-
COUNT(*) as count
|
|
458
|
+
operation
|
|
435
459
|
FROM cybermem_access_log
|
|
436
460
|
WHERE timestamp >= ?
|
|
437
|
-
|
|
438
|
-
ORDER BY dt
|
|
461
|
+
ORDER BY timestamp ASC
|
|
439
462
|
""",
|
|
440
|
-
[start_ms
|
|
463
|
+
[start_ms],
|
|
441
464
|
)
|
|
442
465
|
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
reads = {}
|
|
446
|
-
updates = {}
|
|
447
|
-
deletes = {}
|
|
466
|
+
rows = cursor.fetchall()
|
|
467
|
+
db.close()
|
|
448
468
|
|
|
449
|
-
|
|
450
|
-
|
|
469
|
+
# Map operations to chart categories
|
|
470
|
+
def get_op_key(op):
|
|
471
|
+
if op == "create":
|
|
472
|
+
return "creates"
|
|
473
|
+
if op in ["read", "list", "query", "search", "other"]:
|
|
474
|
+
return "reads"
|
|
475
|
+
if op == "update":
|
|
476
|
+
return "updates"
|
|
477
|
+
if op == "delete":
|
|
478
|
+
return "deletes"
|
|
479
|
+
return None
|
|
480
|
+
|
|
481
|
+
# Build cumulative data structures
|
|
482
|
+
# Each chart will have list of {time, client1: cumulative_count, client2: cumulative_count, ...}
|
|
483
|
+
results = {"creates": [], "reads": [], "updates": [], "deletes": []}
|
|
484
|
+
|
|
485
|
+
# Track running totals per client per operation type
|
|
486
|
+
running_totals = {"creates": {}, "reads": {}, "updates": {}, "deletes": {}}
|
|
487
|
+
|
|
488
|
+
# Add initial zero point at period start
|
|
489
|
+
start_ts = start_ms // 1000
|
|
490
|
+
for op_key in results:
|
|
491
|
+
results[op_key].append({"time": start_ts})
|
|
492
|
+
|
|
493
|
+
# Process each event and build cumulative series
|
|
494
|
+
for row in rows:
|
|
495
|
+
ts = row["timestamp"] // 1000 # Convert to seconds
|
|
451
496
|
client = row["client_name"] or "unknown"
|
|
452
497
|
op = row["operation"]
|
|
453
|
-
|
|
498
|
+
op_key = get_op_key(op)
|
|
454
499
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
bucket_ts = (ts // bucket_seconds) * bucket_seconds
|
|
500
|
+
if not op_key:
|
|
501
|
+
continue
|
|
458
502
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
elif op in ["read", "list", "query", "search", "other"]:
|
|
464
|
-
if bucket_ts not in reads:
|
|
465
|
-
reads[bucket_ts] = {"time": bucket_ts}
|
|
466
|
-
reads[bucket_ts][client] = reads[bucket_ts].get(client, 0) + count
|
|
467
|
-
elif op == "update":
|
|
468
|
-
if bucket_ts not in updates:
|
|
469
|
-
updates[bucket_ts] = {"time": bucket_ts}
|
|
470
|
-
updates[bucket_ts][client] = updates[bucket_ts].get(client, 0) + count
|
|
471
|
-
elif op == "delete":
|
|
472
|
-
if bucket_ts not in deletes:
|
|
473
|
-
deletes[bucket_ts] = {"time": bucket_ts}
|
|
474
|
-
deletes[bucket_ts][client] = deletes[bucket_ts].get(client, 0) + count
|
|
503
|
+
# Increment running total for this client
|
|
504
|
+
if client not in running_totals[op_key]:
|
|
505
|
+
running_totals[op_key][client] = 0
|
|
506
|
+
running_totals[op_key][client] += 1
|
|
475
507
|
|
|
476
|
-
|
|
508
|
+
# Create a data point with current cumulative state
|
|
509
|
+
point = {"time": ts}
|
|
510
|
+
for c, count in running_totals[op_key].items():
|
|
511
|
+
point[c] = count
|
|
477
512
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
513
|
+
results[op_key].append(point)
|
|
514
|
+
|
|
515
|
+
# Add final point at "now" with same totals
|
|
516
|
+
now_ts = now_ms // 1000
|
|
517
|
+
for op_key in results:
|
|
518
|
+
if running_totals[op_key]:
|
|
519
|
+
final_point = {"time": now_ts}
|
|
520
|
+
for c, count in running_totals[op_key].items():
|
|
521
|
+
final_point[c] = count
|
|
522
|
+
results[op_key].append(final_point)
|
|
523
|
+
|
|
524
|
+
return jsonify(results)
|
|
486
525
|
except Exception as e:
|
|
487
526
|
logger.error(f"Error in /api/timeseries: {e}", exc_info=True)
|
|
488
527
|
return jsonify({"error": str(e)}), 500
|
|
@@ -493,6 +532,8 @@ def metrics_collection_loop():
|
|
|
493
532
|
logger.info("Starting metrics collection loop")
|
|
494
533
|
while True:
|
|
495
534
|
try:
|
|
535
|
+
# Checkpoint WAL to ensure we see latest data
|
|
536
|
+
do_wal_checkpoint()
|
|
496
537
|
collect_metrics()
|
|
497
538
|
time.sleep(SCRAPE_INTERVAL)
|
|
498
539
|
except Exception as e:
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
# OpenMemory using official npm package
|
|
2
|
-
FROM node:20-alpine
|
|
3
|
-
|
|
4
|
-
WORKDIR /app
|
|
5
|
-
|
|
6
|
-
# Install openmemory-js from npm (waiting for release with MCP fix)
|
|
7
|
-
RUN npm install openmemory-js@1.3.2
|
|
8
|
-
|
|
9
|
-
# Create data directory
|
|
10
|
-
RUN mkdir -p /data && chown -R node:node /data /app
|
|
11
|
-
|
|
12
|
-
USER node
|
|
13
|
-
|
|
14
|
-
EXPOSE 8080
|
|
15
|
-
|
|
16
|
-
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
|
|
17
|
-
CMD node -e "require('http').get('http://localhost:8080/health', (res) => process.exit(res.statusCode === 200 ? 0 : 1)).on('error', () => process.exit(1))"
|
|
18
|
-
|
|
19
|
-
CMD ["npm", "start", "--prefix", "/app/node_modules/openmemory-js"]
|