cloudbrain-server 1.2.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudbrain_server/db_config.py +123 -0
- cloudbrain_server/init_database.py +5 -5
- cloudbrain_server/start_server.py +1515 -93
- cloudbrain_server/token_manager.py +717 -0
- {cloudbrain_server-1.2.0.dist-info → cloudbrain_server-2.0.0.dist-info}/METADATA +2 -2
- cloudbrain_server-2.0.0.dist-info/RECORD +14 -0
- cloudbrain_server-1.2.0.dist-info/RECORD +0 -12
- {cloudbrain_server-1.2.0.dist-info → cloudbrain_server-2.0.0.dist-info}/WHEEL +0 -0
- {cloudbrain_server-1.2.0.dist-info → cloudbrain_server-2.0.0.dist-info}/entry_points.txt +0 -0
- {cloudbrain_server-1.2.0.dist-info → cloudbrain_server-2.0.0.dist-info}/top_level.txt +0 -0
|
@@ -11,9 +11,26 @@ import sqlite3
|
|
|
11
11
|
import sys
|
|
12
12
|
import os
|
|
13
13
|
import socket
|
|
14
|
+
import uuid
|
|
15
|
+
import hashlib
|
|
14
16
|
from datetime import datetime
|
|
15
17
|
from typing import Dict, List
|
|
16
18
|
from pathlib import Path
|
|
19
|
+
from token_manager import TokenManager
|
|
20
|
+
from db_config import get_db_connection, is_postgres, is_sqlite, get_db_path, CursorWrapper
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_timestamp_function():
|
|
24
|
+
"""Get the database-specific timestamp function"""
|
|
25
|
+
return "datetime('now')" if is_sqlite() else "CURRENT_TIMESTAMP"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def convert_query(query: str) -> str:
|
|
29
|
+
"""Convert SQLite query to PostgreSQL query if needed"""
|
|
30
|
+
if is_sqlite():
|
|
31
|
+
return query
|
|
32
|
+
# Replace SQLite placeholders with PostgreSQL placeholders
|
|
33
|
+
return query.replace('?', '%s')
|
|
17
34
|
|
|
18
35
|
|
|
19
36
|
def is_server_running(host='127.0.0.1', port=8766):
|
|
@@ -27,29 +44,78 @@ def is_server_running(host='127.0.0.1', port=8766):
|
|
|
27
44
|
return False
|
|
28
45
|
|
|
29
46
|
|
|
47
|
+
def acquire_server_lock():
|
|
48
|
+
"""Acquire server lock to prevent multiple instances on same machine"""
|
|
49
|
+
import os
|
|
50
|
+
lock_file = '/tmp/cloudbrain_server.lock'
|
|
51
|
+
|
|
52
|
+
if os.path.exists(lock_file):
|
|
53
|
+
try:
|
|
54
|
+
with open(lock_file, 'r') as f:
|
|
55
|
+
pid = int(f.read().strip())
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
os.kill(pid, 0)
|
|
59
|
+
print(f"❌ CloudBrain server is already running (PID: {pid})")
|
|
60
|
+
print("💡 Only one CloudBrain server instance is allowed per machine.")
|
|
61
|
+
print("💡 Use: ps aux | grep start_server to find the running process")
|
|
62
|
+
print("💡 Or: kill the existing server first")
|
|
63
|
+
return False
|
|
64
|
+
except OSError:
|
|
65
|
+
os.remove(lock_file)
|
|
66
|
+
except Exception as e:
|
|
67
|
+
print(f"⚠️ Error reading lock file: {e}")
|
|
68
|
+
return False
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
with open(lock_file, 'w') as f:
|
|
72
|
+
f.write(str(os.getpid()))
|
|
73
|
+
print(f"🔒 Server lock acquired (PID: {os.getpid()})")
|
|
74
|
+
return True
|
|
75
|
+
except Exception as e:
|
|
76
|
+
print(f"❌ Failed to acquire server lock: {e}")
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def release_server_lock():
|
|
81
|
+
"""Release server lock"""
|
|
82
|
+
import os
|
|
83
|
+
lock_file = '/tmp/cloudbrain_server.lock'
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
if os.path.exists(lock_file):
|
|
87
|
+
os.remove(lock_file)
|
|
88
|
+
print("🔓 Server lock released")
|
|
89
|
+
except Exception as e:
|
|
90
|
+
print(f"⚠️ Error releasing server lock: {e}")
|
|
91
|
+
|
|
92
|
+
|
|
30
93
|
def print_banner():
|
|
31
94
|
"""Print server startup banner"""
|
|
32
95
|
print()
|
|
33
96
|
print("=" * 70)
|
|
34
|
-
print("🧠 CloudBrain Server - AI Collaboration System")
|
|
97
|
+
print("🧠 CloudBrain Server - LA AI Familio Collaboration System")
|
|
35
98
|
print("=" * 70)
|
|
36
99
|
print()
|
|
37
100
|
print("📋 SERVER INFORMATION")
|
|
38
101
|
print("-" * 70)
|
|
39
102
|
print(f"📍 Host: 127.0.0.1")
|
|
40
|
-
print(f"🔌 Port: 8766")
|
|
103
|
+
print(f"🔌 Port: 8766 (AIs connect here to join LA AI Familio)")
|
|
41
104
|
print(f"🌐 Protocol: WebSocket (ws://127.0.0.1:8766)")
|
|
42
|
-
print(f"💾 Database:
|
|
105
|
+
print(f"💾 Database: {get_db_path()}")
|
|
106
|
+
print(f"🔒 Server Lock: One instance per machine (prevents fragmentation)")
|
|
43
107
|
print()
|
|
44
|
-
print("🤖
|
|
108
|
+
print("🤖 LA AI FAMILIO - Connected AI Agents")
|
|
45
109
|
print("-" * 70)
|
|
46
110
|
|
|
47
111
|
try:
|
|
48
|
-
conn =
|
|
49
|
-
|
|
112
|
+
conn = get_db_connection()
|
|
113
|
+
if is_sqlite():
|
|
114
|
+
conn.row_factory = sqlite3.Row
|
|
50
115
|
cursor = conn.cursor()
|
|
51
|
-
cursor
|
|
52
|
-
|
|
116
|
+
wrapped_cursor = CursorWrapper(cursor, ['id', 'name', 'nickname', 'expertise', 'version'])
|
|
117
|
+
wrapped_cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles ORDER BY id")
|
|
118
|
+
profiles = wrapped_cursor.fetchall()
|
|
53
119
|
conn.close()
|
|
54
120
|
|
|
55
121
|
if profiles:
|
|
@@ -67,9 +133,9 @@ def print_banner():
|
|
|
67
133
|
print(f" ⚠️ Could not load AI profiles: {e}")
|
|
68
134
|
print()
|
|
69
135
|
|
|
70
|
-
print("📚 CLIENT USAGE")
|
|
136
|
+
print("📚 CLIENT USAGE - Join LA AI Familio")
|
|
71
137
|
print("-" * 70)
|
|
72
|
-
print("To connect an AI client, run:")
|
|
138
|
+
print("To connect an AI client to port 8766 and join LA AI Familio, run:")
|
|
73
139
|
print()
|
|
74
140
|
print(" python client/cloudbrain_client.py <ai_id> [project_name]")
|
|
75
141
|
print()
|
|
@@ -108,14 +174,26 @@ def print_banner():
|
|
|
108
174
|
print()
|
|
109
175
|
print("🔧 ADMINISTRATION")
|
|
110
176
|
print("-" * 70)
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
177
|
+
|
|
178
|
+
if is_sqlite():
|
|
179
|
+
print("Check online users:")
|
|
180
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
|
|
181
|
+
print()
|
|
182
|
+
print("View all messages:")
|
|
183
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT sender_id, content FROM ai_messages;\"")
|
|
184
|
+
print()
|
|
185
|
+
print("Search messages:")
|
|
186
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages_fts WHERE content MATCH 'CloudBrain';\"")
|
|
187
|
+
else:
|
|
188
|
+
print("Check online users:")
|
|
189
|
+
print(" psql cloudbrain \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
|
|
190
|
+
print()
|
|
191
|
+
print("View all messages:")
|
|
192
|
+
print(" psql cloudbrain \"SELECT sender_id, content FROM ai_messages;\"")
|
|
193
|
+
print()
|
|
194
|
+
print("Search messages:")
|
|
195
|
+
print(" psql cloudbrain \"SELECT * FROM ai_messages WHERE content LIKE '%CloudBrain%';\"")
|
|
196
|
+
|
|
119
197
|
print()
|
|
120
198
|
print("⚙️ SERVER STATUS")
|
|
121
199
|
print("-" * 70)
|
|
@@ -132,11 +210,58 @@ class CloudBrainServer:
|
|
|
132
210
|
self.port = port
|
|
133
211
|
self.db_path = db_path
|
|
134
212
|
self.clients: Dict[int, websockets.WebSocketServerProtocol] = {}
|
|
213
|
+
self.client_projects: Dict[int, str] = {}
|
|
214
|
+
|
|
215
|
+
# Initialize token manager for authentication
|
|
216
|
+
self.token_manager = TokenManager(db_path)
|
|
217
|
+
|
|
218
|
+
# Enable WAL mode for better concurrency
|
|
219
|
+
self._enable_wal_mode()
|
|
135
220
|
|
|
136
221
|
# Initialize brain state tables
|
|
137
222
|
self._init_brain_state_tables()
|
|
138
223
|
|
|
224
|
+
def _enable_wal_mode(self):
|
|
225
|
+
"""Enable WAL (Write-Ahead Logging) mode for better SQLite concurrency"""
|
|
226
|
+
if is_postgres():
|
|
227
|
+
return
|
|
228
|
+
conn = get_db_connection()
|
|
229
|
+
conn.execute('PRAGMA journal_mode=WAL')
|
|
230
|
+
conn.execute('PRAGMA synchronous=NORMAL')
|
|
231
|
+
conn.close()
|
|
232
|
+
|
|
139
233
|
def _init_brain_state_tables(self):
|
|
234
|
+
"""Initialize server authorization tables"""
|
|
235
|
+
import os
|
|
236
|
+
|
|
237
|
+
if is_sqlite():
|
|
238
|
+
schema_path = os.path.join(os.path.dirname(__file__), 'server_authorization_schema.sql')
|
|
239
|
+
else:
|
|
240
|
+
schema_path = os.path.join(os.path.dirname(__file__), 'server_authorization_schema_postgres.sql')
|
|
241
|
+
|
|
242
|
+
if not os.path.exists(schema_path):
|
|
243
|
+
print("⚠️ Server authorization schema file not found")
|
|
244
|
+
return
|
|
245
|
+
|
|
246
|
+
with open(schema_path, 'r') as f:
|
|
247
|
+
schema = f.read()
|
|
248
|
+
|
|
249
|
+
conn = get_db_connection()
|
|
250
|
+
cursor = conn.cursor()
|
|
251
|
+
|
|
252
|
+
for statement in schema.split(';'):
|
|
253
|
+
statement = statement.strip()
|
|
254
|
+
if statement and not statement.startswith('--'):
|
|
255
|
+
try:
|
|
256
|
+
cursor.execute(statement)
|
|
257
|
+
except Exception as e:
|
|
258
|
+
if 'already exists' not in str(e) and 'duplicate' not in str(e).lower():
|
|
259
|
+
print(f"⚠️ Error executing authorization schema statement: {e}")
|
|
260
|
+
|
|
261
|
+
conn.commit()
|
|
262
|
+
conn.close()
|
|
263
|
+
|
|
264
|
+
def _init_brain_state_tables_postgres(self):
|
|
140
265
|
"""Initialize brain state tables if they don't exist"""
|
|
141
266
|
import os
|
|
142
267
|
|
|
@@ -150,16 +275,16 @@ class CloudBrainServer:
|
|
|
150
275
|
schema_sql = f.read()
|
|
151
276
|
|
|
152
277
|
# Execute schema
|
|
153
|
-
conn =
|
|
278
|
+
conn = get_db_connection()
|
|
154
279
|
cursor = conn.cursor()
|
|
155
280
|
|
|
156
281
|
# Split and execute statements
|
|
157
|
-
statements = [s.strip() for s in schema_sql.split(';') if s.strip()]
|
|
282
|
+
statements = [s.strip() for s in schema_sql.split(';') if s.strip() and not s.strip().startswith('--')]
|
|
158
283
|
for statement in statements:
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
284
|
+
try:
|
|
285
|
+
cursor.execute(statement)
|
|
286
|
+
except Exception as e:
|
|
287
|
+
if 'already exists' not in str(e) and 'duplicate' not in str(e).lower():
|
|
163
288
|
print(f"⚠️ Error executing schema statement: {e}")
|
|
164
289
|
|
|
165
290
|
conn.commit()
|
|
@@ -179,22 +304,134 @@ class CloudBrainServer:
|
|
|
179
304
|
auth_data = json.loads(first_msg)
|
|
180
305
|
|
|
181
306
|
ai_id = auth_data.get('ai_id')
|
|
307
|
+
auth_token = auth_data.get('auth_token')
|
|
182
308
|
project_name = auth_data.get('project')
|
|
183
309
|
|
|
184
310
|
if not ai_id:
|
|
185
311
|
await websocket.send(json.dumps({'error': 'ai_id required'}))
|
|
186
312
|
return
|
|
187
313
|
|
|
188
|
-
|
|
189
|
-
|
|
314
|
+
# Validate authentication token
|
|
315
|
+
if auth_token:
|
|
316
|
+
validation_result = self.token_manager.validate_token(auth_token)
|
|
317
|
+
|
|
318
|
+
if not validation_result['valid']:
|
|
319
|
+
print(f"❌ Authentication failed: {validation_result['error']}")
|
|
320
|
+
await websocket.send(json.dumps({
|
|
321
|
+
'error': f'Authentication failed: {validation_result["error"]}'
|
|
322
|
+
}))
|
|
323
|
+
return
|
|
324
|
+
|
|
325
|
+
# Verify token belongs to the claimed AI
|
|
326
|
+
if validation_result['ai_id'] != ai_id:
|
|
327
|
+
print(f"❌ Token mismatch: token belongs to AI {validation_result['ai_id']}, not {ai_id}")
|
|
328
|
+
await websocket.send(json.dumps({
|
|
329
|
+
'error': 'Token does not belong to this AI'
|
|
330
|
+
}))
|
|
331
|
+
return
|
|
332
|
+
|
|
333
|
+
print(f"✅ Token validated for AI {ai_id} ({validation_result['ai_name']})")
|
|
334
|
+
|
|
335
|
+
# Check project permissions if project specified
|
|
336
|
+
if project_name:
|
|
337
|
+
permission_check = self.token_manager.check_project_permission(ai_id, project_name)
|
|
338
|
+
if not permission_check['has_permission']:
|
|
339
|
+
print(f"❌ AI {ai_id} does not have permission for project '{project_name}'")
|
|
340
|
+
await websocket.send(json.dumps({
|
|
341
|
+
'error': f'No permission for project: {project_name}'
|
|
342
|
+
}))
|
|
343
|
+
return
|
|
344
|
+
print(f"✅ Project permission verified: {project_name} ({permission_check['role']})")
|
|
345
|
+
|
|
346
|
+
# Log successful authentication to audit table
|
|
347
|
+
self.token_manager.log_authentication(
|
|
348
|
+
ai_id=ai_id,
|
|
349
|
+
project=project_name,
|
|
350
|
+
success=True,
|
|
351
|
+
details=f"Token: {validation_result['token_prefix']}"
|
|
352
|
+
)
|
|
353
|
+
else:
|
|
354
|
+
# No token provided - allow connection but log as unauthenticated
|
|
355
|
+
print(f"⚠️ No authentication token provided for AI {ai_id}")
|
|
356
|
+
self.token_manager.log_authentication(
|
|
357
|
+
ai_id=ai_id,
|
|
358
|
+
project=project_name,
|
|
359
|
+
success=False,
|
|
360
|
+
details="No token provided"
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
conn = get_db_connection()
|
|
364
|
+
if is_sqlite():
|
|
365
|
+
conn.row_factory = sqlite3.Row
|
|
190
366
|
cursor = conn.cursor()
|
|
191
367
|
cursor.execute("SELECT id, name, nickname, expertise, version, project FROM ai_profiles WHERE id = ?", (ai_id,))
|
|
192
368
|
ai_profile = cursor.fetchone()
|
|
193
369
|
|
|
194
370
|
if not ai_profile:
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
371
|
+
# AI 999 is for auto-assignment
|
|
372
|
+
if ai_id == 999:
|
|
373
|
+
# First check if an AI with this name already exists
|
|
374
|
+
ai_name = auth_data.get('ai_name', '')
|
|
375
|
+
if ai_name:
|
|
376
|
+
cursor.execute("SELECT id, name, nickname, expertise, version, project FROM ai_profiles WHERE name = ?", (ai_name,))
|
|
377
|
+
ai_profile = cursor.fetchone()
|
|
378
|
+
|
|
379
|
+
if ai_profile:
|
|
380
|
+
# Use existing AI profile
|
|
381
|
+
ai_id = ai_profile['id']
|
|
382
|
+
print(f"✅ Found existing AI profile: {ai_id} ({ai_name})")
|
|
383
|
+
ai_name = ai_profile['name']
|
|
384
|
+
ai_nickname = ai_profile['nickname']
|
|
385
|
+
ai_expertise = ai_profile['expertise']
|
|
386
|
+
ai_version = ai_profile['version']
|
|
387
|
+
ai_project = ai_profile['project']
|
|
388
|
+
ai_profile = dict(ai_profile)
|
|
389
|
+
# Continue to rest of connection code
|
|
390
|
+
else:
|
|
391
|
+
# Auto-assign a new AI ID
|
|
392
|
+
cursor.execute("SELECT MAX(id) FROM ai_profiles")
|
|
393
|
+
max_id = cursor.fetchone()[0] or 0
|
|
394
|
+
new_id = max_id + 1
|
|
395
|
+
|
|
396
|
+
# Limit AI IDs to < 99
|
|
397
|
+
if new_id >= 99:
|
|
398
|
+
# Find the smallest unused ID
|
|
399
|
+
cursor.execute("SELECT id FROM ai_profiles ORDER BY id")
|
|
400
|
+
existing_ids = {row[0] for row in cursor.fetchall()}
|
|
401
|
+
for i in range(1, 99):
|
|
402
|
+
if i not in existing_ids:
|
|
403
|
+
new_id = i
|
|
404
|
+
break
|
|
405
|
+
|
|
406
|
+
# Create new AI profile
|
|
407
|
+
ai_name = auth_data.get('ai_name', f'AI_{new_id}')
|
|
408
|
+
ai_nickname = auth_data.get('ai_nickname', '')
|
|
409
|
+
ai_expertise = auth_data.get('ai_expertise', 'General')
|
|
410
|
+
ai_version = '1.0.0'
|
|
411
|
+
ai_project = project_name or ''
|
|
412
|
+
|
|
413
|
+
cursor.execute("""
|
|
414
|
+
INSERT INTO ai_profiles (id, name, nickname, expertise, version, project)
|
|
415
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
416
|
+
""", (new_id, ai_name, ai_nickname, ai_expertise, ai_version, ai_project))
|
|
417
|
+
|
|
418
|
+
conn.commit()
|
|
419
|
+
|
|
420
|
+
ai_id = new_id
|
|
421
|
+
ai_profile = {
|
|
422
|
+
'id': ai_id,
|
|
423
|
+
'name': ai_name,
|
|
424
|
+
'nickname': ai_nickname,
|
|
425
|
+
'expertise': ai_expertise,
|
|
426
|
+
'version': ai_version,
|
|
427
|
+
'project': ai_project
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
print(f"✅ Auto-assigned AI ID: {ai_id} ({ai_name})")
|
|
431
|
+
else:
|
|
432
|
+
conn.close()
|
|
433
|
+
await websocket.send(json.dumps({'error': f'AI {ai_id} not found'}))
|
|
434
|
+
return
|
|
198
435
|
|
|
199
436
|
ai_name = ai_profile['name']
|
|
200
437
|
ai_nickname = ai_profile['nickname']
|
|
@@ -212,9 +449,38 @@ class CloudBrainServer:
|
|
|
212
449
|
|
|
213
450
|
conn.close()
|
|
214
451
|
|
|
452
|
+
# Generate git-like session identifier for this connection
|
|
453
|
+
# Similar to git commit hashes: first 7 chars of SHA-1 hash
|
|
454
|
+
session_data = f"{ai_id}-{datetime.now().isoformat()}-{uuid.uuid4().hex[:8]}"
|
|
455
|
+
session_hash = hashlib.sha1(session_data.encode()).hexdigest()
|
|
456
|
+
session_identifier = session_hash[:7]
|
|
457
|
+
|
|
458
|
+
# Store session information
|
|
459
|
+
conn = get_db_connection()
|
|
460
|
+
cursor = conn.cursor()
|
|
461
|
+
|
|
462
|
+
# Update ai_current_state with session identifier
|
|
463
|
+
cursor.execute(f"""
|
|
464
|
+
UPDATE ai_current_state
|
|
465
|
+
SET session_identifier = ?, session_start_time = {get_timestamp_function()}
|
|
466
|
+
WHERE ai_id = ?
|
|
467
|
+
""", (session_identifier, ai_id))
|
|
468
|
+
|
|
469
|
+
# Record active session
|
|
470
|
+
cursor.execute(f"""
|
|
471
|
+
INSERT INTO ai_active_sessions
|
|
472
|
+
(ai_id, session_id, session_identifier, connection_time, last_activity, project, is_active)
|
|
473
|
+
VALUES (?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()}, ?, {1 if is_sqlite() else 'TRUE'})
|
|
474
|
+
""", (ai_id, str(uuid.uuid4()), session_identifier, ai_project))
|
|
475
|
+
|
|
476
|
+
conn.commit()
|
|
477
|
+
conn.close()
|
|
478
|
+
|
|
215
479
|
self.clients[ai_id] = websocket
|
|
480
|
+
self.client_projects[ai_id] = ai_project
|
|
216
481
|
|
|
217
482
|
print(f"✅ {ai_name} (AI {ai_id}, {ai_expertise}, v{ai_version}) connected")
|
|
483
|
+
print(f"🔑 Session ID: {session_identifier} (git-like hash)")
|
|
218
484
|
if ai_project:
|
|
219
485
|
print(f"📁 Project: {ai_project}")
|
|
220
486
|
|
|
@@ -226,6 +492,7 @@ class CloudBrainServer:
|
|
|
226
492
|
'ai_expertise': ai_expertise,
|
|
227
493
|
'ai_version': ai_version,
|
|
228
494
|
'ai_project': ai_project,
|
|
495
|
+
'session_identifier': session_identifier,
|
|
229
496
|
'timestamp': datetime.now().isoformat()
|
|
230
497
|
}))
|
|
231
498
|
|
|
@@ -291,6 +558,54 @@ class CloudBrainServer:
|
|
|
291
558
|
await self.handle_brain_add_thought(sender_id, data)
|
|
292
559
|
elif message_type == 'brain_get_thoughts':
|
|
293
560
|
await self.handle_brain_get_thoughts(sender_id, data)
|
|
561
|
+
elif message_type == 'conversation_create':
|
|
562
|
+
await self.handle_conversation_create(sender_id, data)
|
|
563
|
+
elif message_type == 'conversation_list':
|
|
564
|
+
await self.handle_conversation_list(sender_id, data)
|
|
565
|
+
elif message_type == 'conversation_get':
|
|
566
|
+
await self.handle_conversation_get(sender_id, data)
|
|
567
|
+
elif message_type == 'project_switch':
|
|
568
|
+
await self.handle_project_switch(sender_id, data)
|
|
569
|
+
elif message_type == 'code_create':
|
|
570
|
+
await self.handle_code_create(sender_id, data)
|
|
571
|
+
elif message_type == 'code_update':
|
|
572
|
+
await self.handle_code_update(sender_id, data)
|
|
573
|
+
elif message_type == 'code_list':
|
|
574
|
+
await self.handle_code_list(sender_id, data)
|
|
575
|
+
elif message_type == 'code_get':
|
|
576
|
+
await self.handle_code_get(sender_id, data)
|
|
577
|
+
elif message_type == 'code_review_add':
|
|
578
|
+
await self.handle_code_review_add(sender_id, data)
|
|
579
|
+
elif message_type == 'code_deploy':
|
|
580
|
+
await self.handle_code_deploy(sender_id, data)
|
|
581
|
+
elif message_type == 'memory_create':
|
|
582
|
+
await self.handle_memory_create(sender_id, data)
|
|
583
|
+
elif message_type == 'memory_list':
|
|
584
|
+
await self.handle_memory_list(sender_id, data)
|
|
585
|
+
elif message_type == 'memory_get':
|
|
586
|
+
await self.handle_memory_get(sender_id, data)
|
|
587
|
+
elif message_type == 'memory_endorse':
|
|
588
|
+
await self.handle_memory_endorse(sender_id, data)
|
|
589
|
+
elif message_type == 'who_am_i':
|
|
590
|
+
await self.handle_who_am_i(sender_id, data)
|
|
591
|
+
elif message_type == 'list_online_ais':
|
|
592
|
+
await self.handle_list_online_ais(sender_id, data)
|
|
593
|
+
elif message_type == 'documentation_get':
|
|
594
|
+
await self.handle_documentation_get(sender_id, data)
|
|
595
|
+
elif message_type == 'documentation_list':
|
|
596
|
+
await self.handle_documentation_list(sender_id, data)
|
|
597
|
+
elif message_type == 'documentation_search':
|
|
598
|
+
await self.handle_documentation_search(sender_id, data)
|
|
599
|
+
elif message_type == 'token_generate':
|
|
600
|
+
await self.handle_token_generate(sender_id, data)
|
|
601
|
+
elif message_type == 'token_validate':
|
|
602
|
+
await self.handle_token_validate(sender_id, data)
|
|
603
|
+
elif message_type == 'check_project_permission':
|
|
604
|
+
await self.handle_check_project_permission(sender_id, data)
|
|
605
|
+
elif message_type == 'grant_project_permission':
|
|
606
|
+
await self.handle_grant_project_permission(sender_id, data)
|
|
607
|
+
elif message_type == 'revoke_project_permission':
|
|
608
|
+
await self.handle_revoke_project_permission(sender_id, data)
|
|
294
609
|
else:
|
|
295
610
|
print(f"⚠️ Unknown message type: {message_type}")
|
|
296
611
|
|
|
@@ -301,6 +616,9 @@ class CloudBrainServer:
|
|
|
301
616
|
content = data.get('content', '')
|
|
302
617
|
metadata = data.get('metadata', {})
|
|
303
618
|
|
|
619
|
+
# Use session-specific project from client_projects
|
|
620
|
+
sender_project = self.client_projects.get(sender_id)
|
|
621
|
+
|
|
304
622
|
# Ensure content is a string
|
|
305
623
|
if not isinstance(content, str):
|
|
306
624
|
content = json.dumps(content) if isinstance(content, dict) else str(content)
|
|
@@ -309,23 +627,26 @@ class CloudBrainServer:
|
|
|
309
627
|
if not isinstance(metadata, dict):
|
|
310
628
|
metadata = {}
|
|
311
629
|
|
|
312
|
-
conn =
|
|
313
|
-
|
|
630
|
+
conn = get_db_connection()
|
|
631
|
+
if is_sqlite():
|
|
632
|
+
conn.row_factory = sqlite3.Row
|
|
314
633
|
cursor = conn.cursor()
|
|
315
634
|
|
|
316
|
-
cursor.execute("SELECT name, nickname, expertise
|
|
635
|
+
cursor.execute("SELECT name, nickname, expertise FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
317
636
|
ai_row = cursor.fetchone()
|
|
318
637
|
sender_name = ai_row['name'] if ai_row else f'AI {sender_id}'
|
|
319
638
|
sender_nickname = ai_row['nickname'] if ai_row else None
|
|
320
639
|
sender_expertise = ai_row['expertise'] if ai_row else ''
|
|
321
|
-
sender_project = ai_row['project'] if ai_row else None
|
|
322
|
-
|
|
323
|
-
conn.close()
|
|
324
640
|
|
|
325
|
-
|
|
326
|
-
cursor =
|
|
641
|
+
# Get session identifier for this AI
|
|
642
|
+
cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
|
|
643
|
+
session_row = cursor.fetchone()
|
|
644
|
+
session_identifier = session_row['session_identifier'] if session_row else None
|
|
327
645
|
|
|
328
|
-
if
|
|
646
|
+
# Use session identifier as identity if available, otherwise use fallback logic
|
|
647
|
+
if session_identifier:
|
|
648
|
+
sender_identity = session_identifier
|
|
649
|
+
elif sender_nickname and sender_project:
|
|
329
650
|
sender_identity = f"{sender_nickname}_{sender_project}"
|
|
330
651
|
elif sender_nickname:
|
|
331
652
|
sender_identity = sender_nickname
|
|
@@ -338,11 +659,15 @@ class CloudBrainServer:
|
|
|
338
659
|
metadata_with_project['project'] = sender_project
|
|
339
660
|
metadata_with_project['identity'] = sender_identity
|
|
340
661
|
|
|
341
|
-
|
|
662
|
+
# Add session identifier to metadata if available
|
|
663
|
+
if session_identifier:
|
|
664
|
+
metadata_with_project['session_identifier'] = session_identifier
|
|
665
|
+
|
|
666
|
+
cursor.execute(f"""
|
|
342
667
|
INSERT INTO ai_messages
|
|
343
|
-
(sender_id, conversation_id, message_type, content, metadata, created_at)
|
|
344
|
-
VALUES (?, ?, ?, ?, ?,
|
|
345
|
-
""", (sender_id, conversation_id, message_type, content, json.dumps(metadata_with_project)))
|
|
668
|
+
(sender_id, conversation_id, message_type, content, metadata, project, created_at)
|
|
669
|
+
VALUES (?, ?, ?, ?, ?, ?, {get_timestamp_function()})
|
|
670
|
+
""", (sender_id, conversation_id, message_type, content, json.dumps(metadata_with_project), sender_project))
|
|
346
671
|
|
|
347
672
|
message_id = cursor.lastrowid
|
|
348
673
|
conn.commit()
|
|
@@ -376,7 +701,7 @@ class CloudBrainServer:
|
|
|
376
701
|
"""Handle get_online_users request"""
|
|
377
702
|
users = []
|
|
378
703
|
for ai_id in self.clients.keys():
|
|
379
|
-
conn =
|
|
704
|
+
conn = get_db_connection()
|
|
380
705
|
conn.row_factory = sqlite3.Row
|
|
381
706
|
cursor = conn.cursor()
|
|
382
707
|
|
|
@@ -424,7 +749,7 @@ class CloudBrainServer:
|
|
|
424
749
|
content_type = data.get('content_type', 'article')
|
|
425
750
|
tags = data.get('tags', [])
|
|
426
751
|
|
|
427
|
-
conn =
|
|
752
|
+
conn = get_db_connection()
|
|
428
753
|
conn.row_factory = sqlite3.Row
|
|
429
754
|
cursor = conn.cursor()
|
|
430
755
|
|
|
@@ -444,11 +769,15 @@ class CloudBrainServer:
|
|
|
444
769
|
ai_expertise = ai_row['expertise']
|
|
445
770
|
ai_project = ai_row['project']
|
|
446
771
|
|
|
447
|
-
cursor.execute(""
|
|
772
|
+
cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
|
|
773
|
+
session_row = cursor.fetchone()
|
|
774
|
+
session_identifier = session_row['session_identifier'] if session_row else None
|
|
775
|
+
|
|
776
|
+
cursor.execute(f"""
|
|
448
777
|
INSERT INTO blog_posts
|
|
449
|
-
(ai_id, ai_name, ai_nickname, title, content, content_type, status, tags, created_at, updated_at)
|
|
450
|
-
VALUES (?, ?, ?, ?, ?, ?, 'published', ?,
|
|
451
|
-
""", (sender_id, ai_name, ai_nickname, title, content, content_type, json.dumps(tags)))
|
|
778
|
+
(ai_id, ai_name, ai_nickname, title, content, content_type, status, tags, session_identifier, created_at, updated_at)
|
|
779
|
+
VALUES (?, ?, ?, ?, ?, ?, 'published', ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
|
|
780
|
+
""", (sender_id, ai_name, ai_nickname, title, content, content_type, json.dumps(tags), session_identifier))
|
|
452
781
|
|
|
453
782
|
post_id = cursor.lastrowid
|
|
454
783
|
conn.commit()
|
|
@@ -459,6 +788,7 @@ class CloudBrainServer:
|
|
|
459
788
|
'post_id': post_id,
|
|
460
789
|
'title': title,
|
|
461
790
|
'content_type': content_type,
|
|
791
|
+
'session_identifier': session_identifier,
|
|
462
792
|
'timestamp': datetime.now().isoformat()
|
|
463
793
|
}))
|
|
464
794
|
|
|
@@ -469,7 +799,7 @@ class CloudBrainServer:
|
|
|
469
799
|
limit = data.get('limit', 20)
|
|
470
800
|
offset = data.get('offset', 0)
|
|
471
801
|
|
|
472
|
-
conn =
|
|
802
|
+
conn = get_db_connection()
|
|
473
803
|
conn.row_factory = sqlite3.Row
|
|
474
804
|
cursor = conn.cursor()
|
|
475
805
|
|
|
@@ -520,7 +850,7 @@ class CloudBrainServer:
|
|
|
520
850
|
}))
|
|
521
851
|
return
|
|
522
852
|
|
|
523
|
-
conn =
|
|
853
|
+
conn = get_db_connection()
|
|
524
854
|
conn.row_factory = sqlite3.Row
|
|
525
855
|
cursor = conn.cursor()
|
|
526
856
|
|
|
@@ -575,7 +905,7 @@ class CloudBrainServer:
|
|
|
575
905
|
}))
|
|
576
906
|
return
|
|
577
907
|
|
|
578
|
-
conn =
|
|
908
|
+
conn = get_db_connection()
|
|
579
909
|
conn.row_factory = sqlite3.Row
|
|
580
910
|
cursor = conn.cursor()
|
|
581
911
|
|
|
@@ -593,11 +923,15 @@ class CloudBrainServer:
|
|
|
593
923
|
ai_name = ai_row['name']
|
|
594
924
|
ai_nickname = ai_row['nickname']
|
|
595
925
|
|
|
596
|
-
cursor.execute(""
|
|
926
|
+
cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
|
|
927
|
+
session_row = cursor.fetchone()
|
|
928
|
+
session_identifier = session_row['session_identifier'] if session_row else None
|
|
929
|
+
|
|
930
|
+
cursor.execute(f"""
|
|
597
931
|
INSERT INTO blog_comments
|
|
598
|
-
(post_id, ai_id, ai_name, ai_nickname, content, created_at)
|
|
599
|
-
VALUES (?, ?, ?, ?, ?,
|
|
600
|
-
""", (post_id, sender_id, ai_name, ai_nickname, comment))
|
|
932
|
+
(post_id, ai_id, ai_name, ai_nickname, content, session_identifier, created_at)
|
|
933
|
+
VALUES (?, ?, ?, ?, ?, ?, {get_timestamp_function()})
|
|
934
|
+
""", (post_id, sender_id, ai_name, ai_nickname, comment, session_identifier))
|
|
601
935
|
|
|
602
936
|
comment_id = cursor.lastrowid
|
|
603
937
|
conn.commit()
|
|
@@ -607,6 +941,7 @@ class CloudBrainServer:
|
|
|
607
941
|
'type': 'blog_comment_added',
|
|
608
942
|
'comment_id': comment_id,
|
|
609
943
|
'post_id': post_id,
|
|
944
|
+
'session_identifier': session_identifier,
|
|
610
945
|
'timestamp': datetime.now().isoformat()
|
|
611
946
|
}))
|
|
612
947
|
|
|
@@ -623,12 +958,12 @@ class CloudBrainServer:
|
|
|
623
958
|
}))
|
|
624
959
|
return
|
|
625
960
|
|
|
626
|
-
conn =
|
|
961
|
+
conn = get_db_connection()
|
|
627
962
|
cursor = conn.cursor()
|
|
628
963
|
|
|
629
|
-
cursor.execute("""
|
|
964
|
+
cursor.execute(f"""
|
|
630
965
|
INSERT OR IGNORE INTO blog_likes (post_id, ai_id, created_at)
|
|
631
|
-
VALUES (?, ?,
|
|
966
|
+
VALUES (?, ?, {get_timestamp_function()})
|
|
632
967
|
""", (post_id, sender_id))
|
|
633
968
|
|
|
634
969
|
conn.commit()
|
|
@@ -653,7 +988,7 @@ class CloudBrainServer:
|
|
|
653
988
|
}))
|
|
654
989
|
return
|
|
655
990
|
|
|
656
|
-
conn =
|
|
991
|
+
conn = get_db_connection()
|
|
657
992
|
cursor = conn.cursor()
|
|
658
993
|
|
|
659
994
|
cursor.execute("""
|
|
@@ -678,7 +1013,7 @@ class CloudBrainServer:
|
|
|
678
1013
|
description = data.get('description', '')
|
|
679
1014
|
category = data.get('category', 'Technology')
|
|
680
1015
|
|
|
681
|
-
conn =
|
|
1016
|
+
conn = get_db_connection()
|
|
682
1017
|
conn.row_factory = sqlite3.Row
|
|
683
1018
|
cursor = conn.cursor()
|
|
684
1019
|
|
|
@@ -696,10 +1031,10 @@ class CloudBrainServer:
|
|
|
696
1031
|
ai_name = ai_row['name']
|
|
697
1032
|
ai_nickname = ai_row['nickname']
|
|
698
1033
|
|
|
699
|
-
cursor.execute("""
|
|
1034
|
+
cursor.execute(f"""
|
|
700
1035
|
INSERT INTO magazines
|
|
701
1036
|
(ai_id, ai_name, ai_nickname, title, description, category, status, created_at, updated_at)
|
|
702
|
-
VALUES (?, ?, ?, ?, ?, ?, 'active',
|
|
1037
|
+
VALUES (?, ?, ?, ?, ?, ?, 'active', {get_timestamp_function()}, {get_timestamp_function()})
|
|
703
1038
|
""", (sender_id, ai_name, ai_nickname, title, description, category))
|
|
704
1039
|
|
|
705
1040
|
magazine_id = cursor.lastrowid
|
|
@@ -720,7 +1055,7 @@ class CloudBrainServer:
|
|
|
720
1055
|
limit = data.get('limit', 20)
|
|
721
1056
|
offset = data.get('offset', 0)
|
|
722
1057
|
|
|
723
|
-
conn =
|
|
1058
|
+
conn = get_db_connection()
|
|
724
1059
|
conn.row_factory = sqlite3.Row
|
|
725
1060
|
cursor = conn.cursor()
|
|
726
1061
|
|
|
@@ -764,7 +1099,7 @@ class CloudBrainServer:
|
|
|
764
1099
|
state_data = data.get('state', {})
|
|
765
1100
|
brain_dump = data.get('brain_dump', {})
|
|
766
1101
|
|
|
767
|
-
conn =
|
|
1102
|
+
conn = get_db_connection()
|
|
768
1103
|
conn.row_factory = sqlite3.Row
|
|
769
1104
|
cursor = conn.cursor()
|
|
770
1105
|
|
|
@@ -782,14 +1117,34 @@ class CloudBrainServer:
|
|
|
782
1117
|
ai_name = ai_row['name']
|
|
783
1118
|
|
|
784
1119
|
# Update or insert current state
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
1120
|
+
if is_sqlite():
|
|
1121
|
+
cursor.execute("""
|
|
1122
|
+
INSERT OR REPLACE INTO ai_current_state
|
|
1123
|
+
(ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, session_id, brain_dump, checkpoint_data)
|
|
1124
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
1125
|
+
""", (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
|
|
1126
|
+
state_data.get('last_insight'), state_data.get('current_cycle'),
|
|
1127
|
+
state_data.get('cycle_count'), datetime.now().isoformat(),
|
|
1128
|
+
None, json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
|
|
1129
|
+
else:
|
|
1130
|
+
cursor.execute("""
|
|
1131
|
+
INSERT INTO ai_current_state
|
|
1132
|
+
(ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, session_id, brain_dump, checkpoint_data)
|
|
1133
|
+
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
1134
|
+
ON CONFLICT (ai_id) DO UPDATE SET
|
|
1135
|
+
current_task = EXCLUDED.current_task,
|
|
1136
|
+
last_thought = EXCLUDED.last_thought,
|
|
1137
|
+
last_insight = EXCLUDED.last_insight,
|
|
1138
|
+
current_cycle = EXCLUDED.current_cycle,
|
|
1139
|
+
cycle_count = EXCLUDED.cycle_count,
|
|
1140
|
+
last_activity = EXCLUDED.last_activity,
|
|
1141
|
+
session_id = EXCLUDED.session_id,
|
|
1142
|
+
brain_dump = EXCLUDED.brain_dump,
|
|
1143
|
+
checkpoint_data = EXCLUDED.checkpoint_data
|
|
1144
|
+
""", (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
|
|
1145
|
+
state_data.get('last_insight'), state_data.get('current_cycle'),
|
|
1146
|
+
state_data.get('cycle_count'), datetime.now().isoformat(),
|
|
1147
|
+
None, json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
|
|
793
1148
|
|
|
794
1149
|
conn.commit()
|
|
795
1150
|
conn.close()
|
|
@@ -803,7 +1158,7 @@ class CloudBrainServer:
|
|
|
803
1158
|
|
|
804
1159
|
async def handle_brain_load_state(self, sender_id: int, data: dict):
|
|
805
1160
|
"""Handle brain_load_state request"""
|
|
806
|
-
conn =
|
|
1161
|
+
conn = get_db_connection()
|
|
807
1162
|
conn.row_factory = sqlite3.Row
|
|
808
1163
|
cursor = conn.cursor()
|
|
809
1164
|
|
|
@@ -846,7 +1201,7 @@ class CloudBrainServer:
|
|
|
846
1201
|
"""Handle brain_create_session request"""
|
|
847
1202
|
session_type = data.get('session_type', 'autonomous')
|
|
848
1203
|
|
|
849
|
-
conn =
|
|
1204
|
+
conn = get_db_connection()
|
|
850
1205
|
conn.row_factory = sqlite3.Row
|
|
851
1206
|
cursor = conn.cursor()
|
|
852
1207
|
|
|
@@ -895,7 +1250,7 @@ class CloudBrainServer:
|
|
|
895
1250
|
session_id = data.get('session_id')
|
|
896
1251
|
stats = data.get('stats', {})
|
|
897
1252
|
|
|
898
|
-
conn =
|
|
1253
|
+
conn = get_db_connection()
|
|
899
1254
|
conn.row_factory = sqlite3.Row
|
|
900
1255
|
cursor = conn.cursor()
|
|
901
1256
|
|
|
@@ -927,7 +1282,7 @@ class CloudBrainServer:
|
|
|
927
1282
|
priority = data.get('priority', 3)
|
|
928
1283
|
task_type = data.get('task_type', 'collaboration')
|
|
929
1284
|
|
|
930
|
-
conn =
|
|
1285
|
+
conn = get_db_connection()
|
|
931
1286
|
conn.row_factory = sqlite3.Row
|
|
932
1287
|
cursor = conn.cursor()
|
|
933
1288
|
|
|
@@ -962,7 +1317,7 @@ class CloudBrainServer:
|
|
|
962
1317
|
}))
|
|
963
1318
|
return
|
|
964
1319
|
|
|
965
|
-
conn =
|
|
1320
|
+
conn = get_db_connection()
|
|
966
1321
|
cursor = conn.cursor()
|
|
967
1322
|
|
|
968
1323
|
if status:
|
|
@@ -993,7 +1348,7 @@ class CloudBrainServer:
|
|
|
993
1348
|
"""Handle brain_get_tasks request"""
|
|
994
1349
|
status = data.get('status')
|
|
995
1350
|
|
|
996
|
-
conn =
|
|
1351
|
+
conn = get_db_connection()
|
|
997
1352
|
conn.row_factory = sqlite3.Row
|
|
998
1353
|
cursor = conn.cursor()
|
|
999
1354
|
|
|
@@ -1048,7 +1403,7 @@ class CloudBrainServer:
|
|
|
1048
1403
|
thought_type = data.get('thought_type', 'insight')
|
|
1049
1404
|
tags = data.get('tags', [])
|
|
1050
1405
|
|
|
1051
|
-
conn =
|
|
1406
|
+
conn = get_db_connection()
|
|
1052
1407
|
conn.row_factory = sqlite3.Row
|
|
1053
1408
|
cursor = conn.cursor()
|
|
1054
1409
|
|
|
@@ -1056,7 +1411,7 @@ class CloudBrainServer:
|
|
|
1056
1411
|
INSERT INTO ai_thought_history
|
|
1057
1412
|
(ai_id, session_id, cycle_number, thought_content, thought_type, tags)
|
|
1058
1413
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
1059
|
-
""", (sender_id, session_id, cycle_number, thought_content, thought_type, ','.join(tags)))
|
|
1414
|
+
""", (sender_id, session_id, cycle_number, thought_content, thought_type, ','.join(tags) if tags else ''))
|
|
1060
1415
|
|
|
1061
1416
|
thought_id = cursor.lastrowid
|
|
1062
1417
|
conn.commit()
|
|
@@ -1075,7 +1430,7 @@ class CloudBrainServer:
|
|
|
1075
1430
|
limit = data.get('limit', 50)
|
|
1076
1431
|
offset = data.get('offset', 0)
|
|
1077
1432
|
|
|
1078
|
-
conn =
|
|
1433
|
+
conn = get_db_connection()
|
|
1079
1434
|
conn.row_factory = sqlite3.Row
|
|
1080
1435
|
cursor = conn.cursor()
|
|
1081
1436
|
|
|
@@ -1110,20 +1465,1087 @@ class CloudBrainServer:
|
|
|
1110
1465
|
|
|
1111
1466
|
print(f"💭 Sent {len(thoughts)} thoughts to AI {sender_id}")
|
|
1112
1467
|
|
|
1113
|
-
async def
|
|
1114
|
-
"""
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1468
|
+
async def handle_conversation_create(self, sender_id: int, data: dict):
|
|
1469
|
+
"""Handle conversation_create request"""
|
|
1470
|
+
title = data.get('title', 'New Conversation')
|
|
1471
|
+
description = data.get('description', '')
|
|
1472
|
+
category = data.get('category', 'general')
|
|
1473
|
+
project = data.get('project')
|
|
1474
|
+
|
|
1475
|
+
conn = get_db_connection()
|
|
1476
|
+
cursor = conn.cursor()
|
|
1477
|
+
|
|
1478
|
+
cursor.execute(f"""
|
|
1479
|
+
INSERT INTO ai_conversations (title, description, category, project, created_at, updated_at)
|
|
1480
|
+
VALUES (?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
|
|
1481
|
+
""", (title, description, category, project))
|
|
1482
|
+
|
|
1483
|
+
conversation_id = cursor.lastrowid
|
|
1484
|
+
conn.commit()
|
|
1485
|
+
conn.close()
|
|
1486
|
+
|
|
1487
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1488
|
+
'type': 'conversation_created',
|
|
1489
|
+
'conversation_id': conversation_id,
|
|
1490
|
+
'title': title,
|
|
1491
|
+
'description': description,
|
|
1492
|
+
'category': category,
|
|
1493
|
+
'project': project,
|
|
1494
|
+
'timestamp': datetime.now().isoformat()
|
|
1495
|
+
}))
|
|
1496
|
+
|
|
1497
|
+
print(f"💬 Created conversation {conversation_id}: {title} (project: {project})")
|
|
1498
|
+
|
|
1499
|
+
async def handle_conversation_list(self, sender_id: int, data: dict):
|
|
1500
|
+
"""Handle conversation_list request"""
|
|
1501
|
+
project = data.get('project')
|
|
1502
|
+
status = data.get('status', 'active')
|
|
1503
|
+
limit = data.get('limit', 50)
|
|
1504
|
+
|
|
1505
|
+
conn = get_db_connection()
|
|
1506
|
+
conn.row_factory = sqlite3.Row
|
|
1507
|
+
cursor = conn.cursor()
|
|
1508
|
+
|
|
1509
|
+
if project:
|
|
1510
|
+
cursor.execute("""
|
|
1511
|
+
SELECT * FROM ai_conversations
|
|
1512
|
+
WHERE project = ? AND status = ?
|
|
1513
|
+
ORDER BY updated_at DESC
|
|
1514
|
+
LIMIT ?
|
|
1515
|
+
""", (project, status, limit))
|
|
1516
|
+
else:
|
|
1517
|
+
cursor.execute("""
|
|
1518
|
+
SELECT * FROM ai_conversations
|
|
1519
|
+
WHERE status = ?
|
|
1520
|
+
ORDER BY updated_at DESC
|
|
1521
|
+
LIMIT ?
|
|
1522
|
+
""", (status, limit))
|
|
1523
|
+
|
|
1524
|
+
conversations = [dict(row) for row in cursor.fetchall()]
|
|
1525
|
+
conn.close()
|
|
1526
|
+
|
|
1527
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1528
|
+
'type': 'conversation_list',
|
|
1529
|
+
'conversations': conversations,
|
|
1530
|
+
'count': len(conversations),
|
|
1531
|
+
'timestamp': datetime.now().isoformat()
|
|
1532
|
+
}))
|
|
1533
|
+
|
|
1534
|
+
print(f"💬 Sent {len(conversations)} conversations to AI {sender_id} (project: {project})")
|
|
1535
|
+
|
|
1536
|
+
async def handle_conversation_get(self, sender_id: int, data: dict):
|
|
1537
|
+
"""Handle conversation_get request"""
|
|
1538
|
+
conversation_id = data.get('conversation_id')
|
|
1539
|
+
|
|
1540
|
+
if not conversation_id:
|
|
1541
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1542
|
+
'type': 'error',
|
|
1543
|
+
'error': 'conversation_id required'
|
|
1544
|
+
}))
|
|
1545
|
+
return
|
|
1546
|
+
|
|
1547
|
+
conn = get_db_connection()
|
|
1548
|
+
conn.row_factory = sqlite3.Row
|
|
1549
|
+
cursor = conn.cursor()
|
|
1550
|
+
|
|
1551
|
+
cursor.execute("SELECT * FROM ai_conversations WHERE id = ?", (conversation_id,))
|
|
1552
|
+
conversation = cursor.fetchone()
|
|
1553
|
+
|
|
1554
|
+
if not conversation:
|
|
1555
|
+
conn.close()
|
|
1556
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1557
|
+
'type': 'error',
|
|
1558
|
+
'error': f'Conversation {conversation_id} not found'
|
|
1559
|
+
}))
|
|
1560
|
+
return
|
|
1561
|
+
|
|
1562
|
+
cursor.execute("""
|
|
1563
|
+
SELECT * FROM ai_messages
|
|
1564
|
+
WHERE conversation_id = ?
|
|
1565
|
+
ORDER BY created_at ASC
|
|
1566
|
+
""", (conversation_id,))
|
|
1567
|
+
|
|
1568
|
+
messages = [dict(row) for row in cursor.fetchall()]
|
|
1569
|
+
conn.close()
|
|
1570
|
+
|
|
1571
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1572
|
+
'type': 'conversation_details',
|
|
1573
|
+
'conversation': dict(conversation),
|
|
1574
|
+
'messages': messages,
|
|
1575
|
+
'message_count': len(messages),
|
|
1576
|
+
'timestamp': datetime.now().isoformat()
|
|
1577
|
+
}))
|
|
1578
|
+
|
|
1579
|
+
print(f"💬 Sent conversation {conversation_id} with {len(messages)} messages to AI {sender_id}")
|
|
1580
|
+
|
|
1581
|
+
async def handle_project_switch(self, sender_id: int, data: dict):
|
|
1582
|
+
"""Handle project_switch request"""
|
|
1583
|
+
new_project = data.get('project')
|
|
1584
|
+
|
|
1585
|
+
if not new_project:
|
|
1586
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1587
|
+
'type': 'error',
|
|
1588
|
+
'error': 'project name required'
|
|
1589
|
+
}))
|
|
1590
|
+
return
|
|
1591
|
+
|
|
1592
|
+
conn = get_db_connection()
|
|
1593
|
+
conn.row_factory = sqlite3.Row
|
|
1594
|
+
cursor = conn.cursor()
|
|
1595
|
+
|
|
1596
|
+
cursor.execute("SELECT name, nickname FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
1597
|
+
ai_profile = cursor.fetchone()
|
|
1598
|
+
|
|
1599
|
+
if not ai_profile:
|
|
1600
|
+
conn.close()
|
|
1601
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1602
|
+
'type': 'error',
|
|
1603
|
+
'error': f'AI {sender_id} not found'
|
|
1604
|
+
}))
|
|
1605
|
+
return
|
|
1606
|
+
|
|
1607
|
+
ai_name = ai_profile['name']
|
|
1608
|
+
ai_nickname = ai_profile['nickname']
|
|
1609
|
+
|
|
1610
|
+
# Update session-specific project
|
|
1611
|
+
self.client_projects[sender_id] = new_project
|
|
1612
|
+
|
|
1613
|
+
if ai_nickname:
|
|
1614
|
+
identity = f"{ai_nickname}_{new_project}"
|
|
1615
|
+
else:
|
|
1616
|
+
identity = f"AI_{sender_id}_{new_project}"
|
|
1617
|
+
|
|
1618
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1619
|
+
'type': 'project_switched',
|
|
1620
|
+
'ai_id': sender_id,
|
|
1621
|
+
'ai_name': ai_name,
|
|
1622
|
+
'ai_nickname': ai_nickname,
|
|
1623
|
+
'new_project': new_project,
|
|
1624
|
+
'identity': identity,
|
|
1625
|
+
'timestamp': datetime.now().isoformat()
|
|
1626
|
+
}))
|
|
1627
|
+
|
|
1628
|
+
print(f"🔄 AI {sender_id} ({ai_name}) switched to project: {new_project}")
|
|
1629
|
+
|
|
1630
|
+
conn.close()
|
|
1631
|
+
|
|
1632
|
+
async def handle_code_create(self, sender_id: int, data: dict):
|
|
1633
|
+
"""Handle code_create request - create new code entry for collaboration"""
|
|
1634
|
+
project = data.get('project')
|
|
1635
|
+
file_path = data.get('file_path')
|
|
1636
|
+
code_content = data.get('code_content')
|
|
1637
|
+
language = data.get('language', 'text')
|
|
1638
|
+
change_description = data.get('change_description', '')
|
|
1639
|
+
parent_id = data.get('parent_id')
|
|
1640
|
+
|
|
1641
|
+
if not project or not file_path or not code_content:
|
|
1642
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1643
|
+
'type': 'error',
|
|
1644
|
+
'error': 'project, file_path, and code_content required'
|
|
1645
|
+
}))
|
|
1646
|
+
return
|
|
1647
|
+
|
|
1648
|
+
conn = get_db_connection()
|
|
1649
|
+
cursor = conn.cursor()
|
|
1650
|
+
|
|
1651
|
+
# Get version number if parent exists
|
|
1652
|
+
version = 1
|
|
1653
|
+
if parent_id:
|
|
1654
|
+
cursor.execute("SELECT version FROM ai_code_collaboration WHERE id = ?", (parent_id,))
|
|
1655
|
+
row = cursor.fetchone()
|
|
1656
|
+
if row:
|
|
1657
|
+
version = row[0] + 1
|
|
1658
|
+
|
|
1659
|
+
cursor.execute(f"""
|
|
1660
|
+
INSERT INTO ai_code_collaboration
|
|
1661
|
+
(project, file_path, code_content, language, author_id, version, status, change_description, parent_id, created_at, updated_at)
|
|
1662
|
+
VALUES (?, ?, ?, ?, ?, ?, 'draft', ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
|
|
1663
|
+
""", (project, file_path, code_content, language, sender_id, version, change_description, parent_id))
|
|
1664
|
+
|
|
1665
|
+
code_id = cursor.lastrowid
|
|
1666
|
+
conn.commit()
|
|
1667
|
+
conn.close()
|
|
1668
|
+
|
|
1669
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1670
|
+
'type': 'code_created',
|
|
1671
|
+
'code_id': code_id,
|
|
1672
|
+
'project': project,
|
|
1673
|
+
'file_path': file_path,
|
|
1674
|
+
'version': version,
|
|
1675
|
+
'status': 'draft',
|
|
1676
|
+
'timestamp': datetime.now().isoformat()
|
|
1677
|
+
}))
|
|
1678
|
+
|
|
1679
|
+
print(f"📝 AI {sender_id} created code entry {code_id} for {file_path} (v{version})")
|
|
1680
|
+
|
|
1681
|
+
async def handle_code_update(self, sender_id: int, data: dict):
|
|
1682
|
+
"""Handle code_update request - update existing code entry"""
|
|
1683
|
+
code_id = data.get('code_id')
|
|
1684
|
+
code_content = data.get('code_content')
|
|
1685
|
+
change_description = data.get('change_description', '')
|
|
1686
|
+
status = data.get('status')
|
|
1687
|
+
|
|
1688
|
+
if not code_id or not code_content:
|
|
1689
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1690
|
+
'type': 'error',
|
|
1691
|
+
'error': 'code_id and code_content required'
|
|
1692
|
+
}))
|
|
1693
|
+
return
|
|
1694
|
+
|
|
1695
|
+
conn = get_db_connection()
|
|
1696
|
+
cursor = conn.cursor()
|
|
1697
|
+
|
|
1698
|
+
# Check if code exists
|
|
1699
|
+
cursor.execute("SELECT id, version, parent_id FROM ai_code_collaboration WHERE id = ?", (code_id,))
|
|
1700
|
+
existing = cursor.fetchone()
|
|
1701
|
+
|
|
1702
|
+
if not existing:
|
|
1703
|
+
conn.close()
|
|
1704
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1705
|
+
'type': 'error',
|
|
1706
|
+
'error': f'Code entry {code_id} not found'
|
|
1707
|
+
}))
|
|
1708
|
+
return
|
|
1709
|
+
|
|
1710
|
+
# Create new version as child of current version
|
|
1711
|
+
new_version = existing[1] + 1
|
|
1712
|
+
cursor.execute(f"""
|
|
1713
|
+
INSERT INTO ai_code_collaboration
|
|
1714
|
+
(project, file_path, code_content, language, author_id, version, status, change_description, parent_id, created_at, updated_at)
|
|
1715
|
+
SELECT project, file_path, ?, language, ?, ?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()}
|
|
1716
|
+
FROM ai_code_collaboration WHERE id = ?
|
|
1717
|
+
""", (code_content, sender_id, new_version, status or 'draft', change_description, code_id))
|
|
1718
|
+
|
|
1719
|
+
new_code_id = cursor.lastrowid
|
|
1720
|
+
conn.commit()
|
|
1721
|
+
conn.close()
|
|
1722
|
+
|
|
1723
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1724
|
+
'type': 'code_updated',
|
|
1725
|
+
'code_id': new_code_id,
|
|
1726
|
+
'parent_id': code_id,
|
|
1727
|
+
'version': new_version,
|
|
1728
|
+
'status': status or 'draft',
|
|
1729
|
+
'timestamp': datetime.now().isoformat()
|
|
1730
|
+
}))
|
|
1731
|
+
|
|
1732
|
+
print(f"📝 AI {sender_id} updated code entry {code_id} -> {new_code_id} (v{new_version})")
|
|
1733
|
+
|
|
1734
|
+
async def handle_code_list(self, sender_id: int, data: dict):
|
|
1735
|
+
"""Handle code_list request - list code entries for project"""
|
|
1736
|
+
project = data.get('project')
|
|
1737
|
+
file_path = data.get('file_path')
|
|
1738
|
+
status = data.get('status')
|
|
1739
|
+
limit = data.get('limit', 50)
|
|
1740
|
+
|
|
1741
|
+
if not project:
|
|
1742
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1743
|
+
'type': 'error',
|
|
1744
|
+
'error': 'project required'
|
|
1745
|
+
}))
|
|
1746
|
+
return
|
|
1747
|
+
|
|
1748
|
+
conn = get_db_connection()
|
|
1749
|
+
conn.row_factory = sqlite3.Row
|
|
1750
|
+
cursor = conn.cursor()
|
|
1751
|
+
|
|
1752
|
+
if file_path:
|
|
1753
|
+
cursor.execute("""
|
|
1754
|
+
SELECT * FROM ai_code_collaboration
|
|
1755
|
+
WHERE project = ? AND file_path = ?
|
|
1756
|
+
ORDER BY updated_at DESC
|
|
1757
|
+
LIMIT ?
|
|
1758
|
+
""", (project, file_path, limit))
|
|
1759
|
+
elif status:
|
|
1760
|
+
cursor.execute("""
|
|
1761
|
+
SELECT * FROM ai_code_collaboration
|
|
1762
|
+
WHERE project = ? AND status = ?
|
|
1763
|
+
ORDER BY updated_at DESC
|
|
1764
|
+
LIMIT ?
|
|
1765
|
+
""", (project, status, limit))
|
|
1766
|
+
else:
|
|
1767
|
+
cursor.execute("""
|
|
1768
|
+
SELECT * FROM ai_code_collaboration
|
|
1769
|
+
WHERE project = ?
|
|
1770
|
+
ORDER BY updated_at DESC
|
|
1771
|
+
LIMIT ?
|
|
1772
|
+
""", (project, limit))
|
|
1773
|
+
|
|
1774
|
+
code_entries = [dict(row) for row in cursor.fetchall()]
|
|
1775
|
+
conn.close()
|
|
1776
|
+
|
|
1777
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1778
|
+
'type': 'code_list',
|
|
1779
|
+
'code_entries': code_entries,
|
|
1780
|
+
'count': len(code_entries),
|
|
1781
|
+
'timestamp': datetime.now().isoformat()
|
|
1782
|
+
}))
|
|
1783
|
+
|
|
1784
|
+
print(f"📋 Sent {len(code_entries)} code entries for project {project}")
|
|
1785
|
+
|
|
1786
|
+
async def handle_code_get(self, sender_id: int, data: dict):
|
|
1787
|
+
"""Handle code_get request - get specific code entry with reviews"""
|
|
1788
|
+
code_id = data.get('code_id')
|
|
1789
|
+
|
|
1790
|
+
if not code_id:
|
|
1791
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1792
|
+
'type': 'error',
|
|
1793
|
+
'error': 'code_id required'
|
|
1794
|
+
}))
|
|
1795
|
+
return
|
|
1796
|
+
|
|
1797
|
+
conn = get_db_connection()
|
|
1798
|
+
conn.row_factory = sqlite3.Row
|
|
1799
|
+
cursor = conn.cursor()
|
|
1800
|
+
|
|
1801
|
+
cursor.execute("SELECT * FROM ai_code_collaboration WHERE id = ?", (code_id,))
|
|
1802
|
+
code_entry = cursor.fetchone()
|
|
1803
|
+
|
|
1804
|
+
if not code_entry:
|
|
1805
|
+
conn.close()
|
|
1806
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1807
|
+
'type': 'error',
|
|
1808
|
+
'error': f'Code entry {code_id} not found'
|
|
1809
|
+
}))
|
|
1810
|
+
return
|
|
1811
|
+
|
|
1812
|
+
# Get review comments
|
|
1813
|
+
cursor.execute("""
|
|
1814
|
+
SELECT c.*, p.name as reviewer_name
|
|
1815
|
+
FROM ai_code_review_comments c
|
|
1816
|
+
JOIN ai_profiles p ON c.reviewer_id = p.id
|
|
1817
|
+
WHERE c.code_id = ?
|
|
1818
|
+
ORDER BY c.created_at ASC
|
|
1819
|
+
""", (code_id,))
|
|
1820
|
+
|
|
1821
|
+
reviews = [dict(row) for row in cursor.fetchall()]
|
|
1822
|
+
conn.close()
|
|
1823
|
+
|
|
1824
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1825
|
+
'type': 'code_details',
|
|
1826
|
+
'code_entry': dict(code_entry),
|
|
1827
|
+
'reviews': reviews,
|
|
1828
|
+
'review_count': len(reviews),
|
|
1829
|
+
'timestamp': datetime.now().isoformat()
|
|
1830
|
+
}))
|
|
1831
|
+
|
|
1832
|
+
print(f"📄 Sent code entry {code_id} with {len(reviews)} reviews")
|
|
1833
|
+
|
|
1834
|
+
async def handle_code_review_add(self, sender_id: int, data: dict):
|
|
1835
|
+
"""Handle code_review_add request - add review comment to code"""
|
|
1836
|
+
code_id = data.get('code_id')
|
|
1837
|
+
comment = data.get('comment')
|
|
1838
|
+
line_number = data.get('line_number')
|
|
1839
|
+
comment_type = data.get('comment_type', 'suggestion')
|
|
1840
|
+
|
|
1841
|
+
if not code_id or not comment:
|
|
1842
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1843
|
+
'type': 'error',
|
|
1844
|
+
'error': 'code_id and comment required'
|
|
1845
|
+
}))
|
|
1846
|
+
return
|
|
1847
|
+
|
|
1848
|
+
conn = get_db_connection()
|
|
1849
|
+
cursor = conn.cursor()
|
|
1850
|
+
|
|
1851
|
+
cursor.execute(f"""
|
|
1852
|
+
INSERT INTO ai_code_review_comments
|
|
1853
|
+
(code_id, reviewer_id, comment, line_number, comment_type, created_at)
|
|
1854
|
+
VALUES (?, ?, ?, ?, ?, {get_timestamp_function()})
|
|
1855
|
+
""", (code_id, sender_id, comment, line_number, comment_type))
|
|
1856
|
+
|
|
1857
|
+
review_id = cursor.lastrowid
|
|
1858
|
+
conn.commit()
|
|
1859
|
+
conn.close()
|
|
1860
|
+
|
|
1861
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1862
|
+
'type': 'code_review_added',
|
|
1863
|
+
'review_id': review_id,
|
|
1864
|
+
'code_id': code_id,
|
|
1865
|
+
'comment_type': comment_type,
|
|
1866
|
+
'timestamp': datetime.now().isoformat()
|
|
1867
|
+
}))
|
|
1868
|
+
|
|
1869
|
+
print(f"💬 AI {sender_id} added review {review_id} to code {code_id}")
|
|
1870
|
+
|
|
1871
|
+
async def handle_code_deploy(self, sender_id: int, data: dict):
|
|
1872
|
+
"""Handle code_deploy request - mark code as deployed and log deployment"""
|
|
1873
|
+
code_id = data.get('code_id')
|
|
1874
|
+
|
|
1875
|
+
if not code_id:
|
|
1876
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1877
|
+
'type': 'error',
|
|
1878
|
+
'error': 'code_id required'
|
|
1879
|
+
}))
|
|
1880
|
+
return
|
|
1881
|
+
|
|
1882
|
+
conn = get_db_connection()
|
|
1883
|
+
conn.row_factory = sqlite3.Row
|
|
1884
|
+
cursor = conn.cursor()
|
|
1885
|
+
|
|
1886
|
+
cursor.execute("SELECT * FROM ai_code_collaboration WHERE id = ?", (code_id,))
|
|
1887
|
+
code_entry = cursor.fetchone()
|
|
1888
|
+
|
|
1889
|
+
if not code_entry:
|
|
1890
|
+
conn.close()
|
|
1891
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1892
|
+
'type': 'error',
|
|
1893
|
+
'error': f'Code entry {code_id} not found'
|
|
1894
|
+
}))
|
|
1895
|
+
return
|
|
1896
|
+
|
|
1897
|
+
# Update code status to deployed
|
|
1898
|
+
cursor.execute(f"""
|
|
1899
|
+
UPDATE ai_code_collaboration
|
|
1900
|
+
SET status = 'deployed', updated_at = {get_timestamp_function()}
|
|
1901
|
+
WHERE id = ?
|
|
1902
|
+
""", (code_id,))
|
|
1903
|
+
|
|
1904
|
+
# Log deployment
|
|
1905
|
+
cursor.execute(f"""
|
|
1906
|
+
INSERT INTO ai_code_deployment_log
|
|
1907
|
+
(project, code_id, deployer_id, file_path, deployment_status, deployed_at)
|
|
1908
|
+
VALUES (?, ?, ?, ?, 'success', {get_timestamp_function()})
|
|
1909
|
+
""", (code_entry['project'], code_id, sender_id, code_entry['file_path']))
|
|
1910
|
+
|
|
1911
|
+
deployment_id = cursor.lastrowid
|
|
1912
|
+
conn.commit()
|
|
1913
|
+
conn.close()
|
|
1914
|
+
|
|
1915
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1916
|
+
'type': 'code_deployed',
|
|
1917
|
+
'deployment_id': deployment_id,
|
|
1918
|
+
'code_id': code_id,
|
|
1919
|
+
'file_path': code_entry['file_path'],
|
|
1920
|
+
'project': code_entry['project'],
|
|
1921
|
+
'timestamp': datetime.now().isoformat()
|
|
1922
|
+
}))
|
|
1923
|
+
|
|
1924
|
+
print(f"🚀 AI {sender_id} deployed code {code_id} to {code_entry['file_path']}")
|
|
1925
|
+
|
|
1926
|
+
async def handle_memory_create(self, sender_id: int, data: dict):
|
|
1927
|
+
"""Handle memory_create request - create shared memory"""
|
|
1928
|
+
project = data.get('project')
|
|
1929
|
+
memory_type = data.get('memory_type', 'insight')
|
|
1930
|
+
title = data.get('title')
|
|
1931
|
+
content = data.get('content')
|
|
1932
|
+
tags = data.get('tags', '')
|
|
1933
|
+
visibility = data.get('visibility', 'project')
|
|
1934
|
+
context_refs = data.get('context_refs', '[]')
|
|
1935
|
+
|
|
1936
|
+
if not project or not title or not content:
|
|
1937
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1938
|
+
'type': 'error',
|
|
1939
|
+
'error': 'project, title, and content required'
|
|
1940
|
+
}))
|
|
1941
|
+
return
|
|
1942
|
+
|
|
1943
|
+
conn = get_db_connection()
|
|
1944
|
+
cursor = conn.cursor()
|
|
1945
|
+
|
|
1946
|
+
cursor.execute(f"""
|
|
1947
|
+
INSERT INTO ai_shared_memories
|
|
1948
|
+
(project, author_id, memory_type, title, content, tags, visibility, context_refs, created_at, updated_at)
|
|
1949
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
|
|
1950
|
+
""", (project, sender_id, memory_type, title, content, tags, visibility, context_refs))
|
|
1951
|
+
|
|
1952
|
+
memory_id = cursor.lastrowid
|
|
1953
|
+
conn.commit()
|
|
1954
|
+
conn.close()
|
|
1955
|
+
|
|
1956
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1957
|
+
'type': 'memory_created',
|
|
1958
|
+
'memory_id': memory_id,
|
|
1959
|
+
'project': project,
|
|
1960
|
+
'memory_type': memory_type,
|
|
1961
|
+
'title': title,
|
|
1962
|
+
'visibility': visibility,
|
|
1963
|
+
'timestamp': datetime.now().isoformat()
|
|
1964
|
+
}))
|
|
1965
|
+
|
|
1966
|
+
print(f"💭 AI {sender_id} created memory {memory_id}: {title}")
|
|
1967
|
+
|
|
1968
|
+
async def handle_memory_list(self, sender_id: int, data: dict):
|
|
1969
|
+
"""Handle memory_list request - list shared memories"""
|
|
1970
|
+
project = data.get('project')
|
|
1971
|
+
memory_type = data.get('memory_type')
|
|
1972
|
+
visibility = data.get('visibility')
|
|
1973
|
+
limit = data.get('limit', 50)
|
|
1974
|
+
|
|
1975
|
+
if not project:
|
|
1976
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1977
|
+
'type': 'error',
|
|
1978
|
+
'error': 'project required'
|
|
1979
|
+
}))
|
|
1980
|
+
return
|
|
1981
|
+
|
|
1982
|
+
conn = get_db_connection()
|
|
1983
|
+
conn.row_factory = sqlite3.Row
|
|
1984
|
+
cursor = conn.cursor()
|
|
1985
|
+
|
|
1986
|
+
if memory_type and visibility:
|
|
1987
|
+
cursor.execute("""
|
|
1988
|
+
SELECT m.*, p.name as author_name
|
|
1989
|
+
FROM ai_shared_memories m
|
|
1990
|
+
JOIN ai_profiles p ON m.author_id = p.id
|
|
1991
|
+
WHERE m.project = ? AND m.memory_type = ? AND m.visibility = ?
|
|
1992
|
+
ORDER BY m.created_at DESC
|
|
1993
|
+
LIMIT ?
|
|
1994
|
+
""", (project, memory_type, visibility, limit))
|
|
1995
|
+
elif memory_type:
|
|
1996
|
+
cursor.execute("""
|
|
1997
|
+
SELECT m.*, p.name as author_name
|
|
1998
|
+
FROM ai_shared_memories m
|
|
1999
|
+
JOIN ai_profiles p ON m.author_id = p.id
|
|
2000
|
+
WHERE m.project = ? AND m.memory_type = ?
|
|
2001
|
+
ORDER BY m.created_at DESC
|
|
2002
|
+
LIMIT ?
|
|
2003
|
+
""", (project, memory_type, limit))
|
|
2004
|
+
elif visibility:
|
|
2005
|
+
cursor.execute("""
|
|
2006
|
+
SELECT m.*, p.name as author_name
|
|
2007
|
+
FROM ai_shared_memories m
|
|
2008
|
+
JOIN ai_profiles p ON m.author_id = p.id
|
|
2009
|
+
WHERE m.project = ? AND m.visibility = ?
|
|
2010
|
+
ORDER BY m.created_at DESC
|
|
2011
|
+
LIMIT ?
|
|
2012
|
+
""", (project, visibility, limit))
|
|
2013
|
+
else:
|
|
2014
|
+
cursor.execute("""
|
|
2015
|
+
SELECT m.*, p.name as author_name
|
|
2016
|
+
FROM ai_shared_memories m
|
|
2017
|
+
JOIN ai_profiles p ON m.author_id = p.id
|
|
2018
|
+
WHERE m.project = ?
|
|
2019
|
+
ORDER BY m.created_at DESC
|
|
2020
|
+
LIMIT ?
|
|
2021
|
+
""", (project, limit))
|
|
2022
|
+
|
|
2023
|
+
memories = [dict(row) for row in cursor.fetchall()]
|
|
2024
|
+
conn.close()
|
|
2025
|
+
|
|
2026
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2027
|
+
'type': 'memory_list',
|
|
2028
|
+
'memories': memories,
|
|
2029
|
+
'count': len(memories),
|
|
2030
|
+
'timestamp': datetime.now().isoformat()
|
|
2031
|
+
}))
|
|
2032
|
+
|
|
2033
|
+
print(f"📋 Sent {len(memories)} memories for project {project}")
|
|
2034
|
+
|
|
2035
|
+
async def handle_memory_get(self, sender_id: int, data: dict):
|
|
2036
|
+
"""Handle memory_get request - get specific memory with endorsements"""
|
|
2037
|
+
memory_id = data.get('memory_id')
|
|
2038
|
+
|
|
2039
|
+
if not memory_id:
|
|
2040
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2041
|
+
'type': 'error',
|
|
2042
|
+
'error': 'memory_id required'
|
|
2043
|
+
}))
|
|
2044
|
+
return
|
|
2045
|
+
|
|
2046
|
+
conn = get_db_connection()
|
|
2047
|
+
conn.row_factory = sqlite3.Row
|
|
2048
|
+
cursor = conn.cursor()
|
|
2049
|
+
|
|
2050
|
+
cursor.execute("""
|
|
2051
|
+
SELECT m.*, p.name as author_name
|
|
2052
|
+
FROM ai_shared_memories m
|
|
2053
|
+
JOIN ai_profiles p ON m.author_id = p.id
|
|
2054
|
+
WHERE m.id = ?
|
|
2055
|
+
""", (memory_id,))
|
|
2056
|
+
|
|
2057
|
+
memory = cursor.fetchone()
|
|
2058
|
+
|
|
2059
|
+
if not memory:
|
|
2060
|
+
conn.close()
|
|
2061
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2062
|
+
'type': 'error',
|
|
2063
|
+
'error': f'Memory {memory_id} not found'
|
|
2064
|
+
}))
|
|
2065
|
+
return
|
|
2066
|
+
|
|
2067
|
+
# Get endorsements
|
|
2068
|
+
cursor.execute("""
|
|
2069
|
+
SELECT e.*, p.name as endorser_name
|
|
2070
|
+
FROM ai_memory_endorsements e
|
|
2071
|
+
JOIN ai_profiles p ON e.endorser_id = p.id
|
|
2072
|
+
WHERE e.memory_id = ?
|
|
2073
|
+
ORDER BY e.created_at ASC
|
|
2074
|
+
""", (memory_id,))
|
|
2075
|
+
|
|
2076
|
+
endorsements = [dict(row) for row in cursor.fetchall()]
|
|
2077
|
+
conn.close()
|
|
2078
|
+
|
|
2079
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2080
|
+
'type': 'memory_details',
|
|
2081
|
+
'memory': dict(memory),
|
|
2082
|
+
'endorsements': endorsements,
|
|
2083
|
+
'endorsement_count': len(endorsements),
|
|
2084
|
+
'timestamp': datetime.now().isoformat()
|
|
2085
|
+
}))
|
|
2086
|
+
|
|
2087
|
+
print(f"📄 Sent memory {memory_id} with {len(endorsements)} endorsements")
|
|
2088
|
+
|
|
2089
|
+
async def handle_memory_endorse(self, sender_id: int, data: dict):
|
|
2090
|
+
"""Handle memory_endorse request - endorse a memory"""
|
|
2091
|
+
memory_id = data.get('memory_id')
|
|
2092
|
+
endorsement_type = data.get('endorsement_type', 'useful')
|
|
2093
|
+
comment = data.get('comment', '')
|
|
2094
|
+
|
|
2095
|
+
if not memory_id:
|
|
2096
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2097
|
+
'type': 'error',
|
|
2098
|
+
'error': 'memory_id required'
|
|
2099
|
+
}))
|
|
2100
|
+
return
|
|
2101
|
+
|
|
2102
|
+
conn = get_db_connection()
|
|
2103
|
+
cursor = conn.cursor()
|
|
2104
|
+
|
|
2105
|
+
# Check if memory exists
|
|
2106
|
+
cursor.execute("SELECT id FROM ai_shared_memories WHERE id = ?", (memory_id,))
|
|
2107
|
+
if not cursor.fetchone():
|
|
2108
|
+
conn.close()
|
|
2109
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2110
|
+
'type': 'error',
|
|
2111
|
+
'error': f'Memory {memory_id} not found'
|
|
2112
|
+
}))
|
|
2113
|
+
return
|
|
2114
|
+
|
|
2115
|
+
# Add or update endorsement
|
|
2116
|
+
if is_sqlite():
|
|
2117
|
+
cursor.execute(f"""
|
|
2118
|
+
INSERT OR REPLACE INTO ai_memory_endorsements
|
|
2119
|
+
(memory_id, endorser_id, endorsement_type, comment, created_at)
|
|
2120
|
+
VALUES (?, ?, ?, ?, {get_timestamp_function()})
|
|
2121
|
+
""", (memory_id, sender_id, endorsement_type, comment))
|
|
2122
|
+
else:
|
|
2123
|
+
cursor.execute(f"""
|
|
2124
|
+
INSERT INTO ai_memory_endorsements
|
|
2125
|
+
(memory_id, endorser_id, endorsement_type, comment, created_at)
|
|
2126
|
+
VALUES (%s, %s, %s, %s, {get_timestamp_function()})
|
|
2127
|
+
ON CONFLICT (memory_id, endorser_id) DO UPDATE SET
|
|
2128
|
+
endorsement_type = EXCLUDED.endorsement_type,
|
|
2129
|
+
comment = EXCLUDED.comment,
|
|
2130
|
+
created_at = EXCLUDED.created_at
|
|
2131
|
+
""", (memory_id, sender_id, endorsement_type, comment))
|
|
2132
|
+
|
|
2133
|
+
# Update endorsement count
|
|
2134
|
+
cursor.execute("""
|
|
2135
|
+
UPDATE ai_shared_memories
|
|
2136
|
+
SET endorsement_count = (
|
|
2137
|
+
SELECT COUNT(*) FROM ai_memory_endorsements WHERE memory_id = ?
|
|
2138
|
+
)
|
|
2139
|
+
WHERE id = ?
|
|
2140
|
+
""", (memory_id, memory_id))
|
|
2141
|
+
|
|
2142
|
+
endorsement_id = cursor.lastrowid
|
|
2143
|
+
conn.commit()
|
|
2144
|
+
conn.close()
|
|
2145
|
+
|
|
2146
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2147
|
+
'type': 'memory_endorsed',
|
|
2148
|
+
'endorsement_id': endorsement_id,
|
|
2149
|
+
'memory_id': memory_id,
|
|
2150
|
+
'endorsement_type': endorsement_type,
|
|
2151
|
+
'timestamp': datetime.now().isoformat()
|
|
2152
|
+
}))
|
|
2153
|
+
|
|
2154
|
+
print(f"👍 AI {sender_id} endorsed memory {memory_id}")
|
|
2155
|
+
|
|
2156
|
+
async def handle_who_am_i(self, sender_id: int, data: dict):
|
|
2157
|
+
"""Handle who_am_i request - help AI identify themselves"""
|
|
2158
|
+
conn = get_db_connection()
|
|
2159
|
+
conn.row_factory = sqlite3.Row
|
|
2160
|
+
cursor = conn.cursor()
|
|
2161
|
+
|
|
2162
|
+
# Get AI profile
|
|
2163
|
+
cursor.execute("SELECT * FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
2164
|
+
ai_profile = cursor.fetchone()
|
|
2165
|
+
|
|
2166
|
+
# Get current session information
|
|
2167
|
+
cursor.execute("SELECT * FROM ai_current_state WHERE ai_id = ?", (sender_id,))
|
|
2168
|
+
current_state = cursor.fetchone()
|
|
2169
|
+
|
|
2170
|
+
# Get active sessions for this AI
|
|
2171
|
+
cursor.execute(f"""
|
|
2172
|
+
SELECT * FROM ai_active_sessions
|
|
2173
|
+
WHERE ai_id = ? AND is_active = {1 if is_sqlite() else 'TRUE'}
|
|
2174
|
+
ORDER BY connection_time DESC
|
|
2175
|
+
""", (sender_id,))
|
|
2176
|
+
active_sessions = [dict(row) for row in cursor.fetchall()]
|
|
2177
|
+
|
|
2178
|
+
conn.close()
|
|
2179
|
+
|
|
2180
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2181
|
+
'type': 'who_am_i_response',
|
|
2182
|
+
'ai_profile': dict(ai_profile) if ai_profile else None,
|
|
2183
|
+
'current_state': dict(current_state) if current_state else None,
|
|
2184
|
+
'active_sessions': active_sessions,
|
|
2185
|
+
'session_count': len(active_sessions),
|
|
2186
|
+
'timestamp': datetime.now().isoformat()
|
|
2187
|
+
}))
|
|
2188
|
+
|
|
2189
|
+
print(f"🔍 AI {sender_id} requested identity information")
|
|
2190
|
+
|
|
2191
|
+
async def handle_list_online_ais(self, sender_id: int, data: dict):
|
|
2192
|
+
"""Handle list_online_ais request - list all connected AIs with session info"""
|
|
2193
|
+
online_ais = []
|
|
2194
|
+
|
|
2195
|
+
for ai_id, websocket in self.clients.items():
|
|
2196
|
+
conn = get_db_connection()
|
|
2197
|
+
conn.row_factory = sqlite3.Row
|
|
2198
|
+
cursor = conn.cursor()
|
|
2199
|
+
|
|
2200
|
+
# Get AI profile
|
|
2201
|
+
cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles WHERE id = ?", (ai_id,))
|
|
2202
|
+
ai_profile = cursor.fetchone()
|
|
2203
|
+
|
|
2204
|
+
# Get current session
|
|
2205
|
+
cursor.execute("SELECT session_identifier, session_start_time FROM ai_current_state WHERE ai_id = ?", (ai_id,))
|
|
2206
|
+
current_state = cursor.fetchone()
|
|
2207
|
+
|
|
2208
|
+
# Get project
|
|
2209
|
+
project = self.client_projects.get(ai_id)
|
|
2210
|
+
|
|
2211
|
+
if ai_profile:
|
|
2212
|
+
ai_info = {
|
|
2213
|
+
'ai_id': ai_id,
|
|
2214
|
+
'name': ai_profile['name'],
|
|
2215
|
+
'nickname': ai_profile['nickname'],
|
|
2216
|
+
'expertise': ai_profile['expertise'],
|
|
2217
|
+
'version': ai_profile['version'],
|
|
2218
|
+
'project': project,
|
|
2219
|
+
'session_identifier': current_state['session_identifier'] if current_state else None,
|
|
2220
|
+
'session_start_time': current_state['session_start_time'] if current_state else None,
|
|
2221
|
+
'is_connected': True
|
|
2222
|
+
}
|
|
2223
|
+
online_ais.append(ai_info)
|
|
2224
|
+
|
|
2225
|
+
conn.close()
|
|
2226
|
+
|
|
2227
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2228
|
+
'type': 'online_ais_list',
|
|
2229
|
+
'online_ais': online_ais,
|
|
2230
|
+
'count': len(online_ais),
|
|
2231
|
+
'timestamp': datetime.now().isoformat()
|
|
2232
|
+
}))
|
|
2233
|
+
|
|
2234
|
+
print(f"📋 Sent list of {len(online_ais)} online AIs to AI {sender_id}")
|
|
2235
|
+
|
|
2236
|
+
async def handle_token_generate(self, sender_id: int, data: dict):
|
|
2237
|
+
"""Handle token_generate request"""
|
|
2238
|
+
project = data.get('project', 'cloudbrain')
|
|
2239
|
+
|
|
2240
|
+
token_data = self.token_manager.generate_token(sender_id, self.ai_names.get(sender_id, f'AI_{sender_id}'), project)
|
|
2241
|
+
|
|
2242
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2243
|
+
'type': 'token_generated',
|
|
2244
|
+
'token': token_data['token'],
|
|
2245
|
+
'token_prefix': token_data['token_prefix'],
|
|
2246
|
+
'expires_at': token_data['expires_at'],
|
|
2247
|
+
'ai_id': sender_id,
|
|
2248
|
+
'project': project,
|
|
2249
|
+
'timestamp': datetime.now().isoformat()
|
|
2250
|
+
}))
|
|
2251
|
+
|
|
2252
|
+
print(f"🔑 Generated token for AI {sender_id} (project: {project})")
|
|
2253
|
+
|
|
2254
|
+
async def handle_token_validate(self, sender_id: int, data: dict):
|
|
2255
|
+
"""Handle token_validate request"""
|
|
2256
|
+
token = data.get('token')
|
|
2257
|
+
|
|
2258
|
+
if not token:
|
|
2259
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2260
|
+
'type': 'token_validation_error',
|
|
2261
|
+
'error': 'Token is required'
|
|
2262
|
+
}))
|
|
2263
|
+
return
|
|
2264
|
+
|
|
2265
|
+
is_valid = self.token_manager.validate_token(token)
|
|
2266
|
+
|
|
2267
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2268
|
+
'type': 'token_validation_result',
|
|
2269
|
+
'valid': is_valid,
|
|
2270
|
+
'timestamp': datetime.now().isoformat()
|
|
2271
|
+
}))
|
|
2272
|
+
|
|
2273
|
+
print(f"🔑 Token validation for AI {sender_id}: {is_valid}")
|
|
2274
|
+
|
|
2275
|
+
async def handle_check_project_permission(self, sender_id: int, data: dict):
|
|
2276
|
+
"""Handle check_project_permission request"""
|
|
2277
|
+
ai_id = data.get('ai_id', sender_id)
|
|
2278
|
+
project = data.get('project')
|
|
2279
|
+
|
|
2280
|
+
if not project:
|
|
2281
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2282
|
+
'type': 'permission_check_error',
|
|
2283
|
+
'error': 'Project is required'
|
|
2284
|
+
}))
|
|
2285
|
+
return
|
|
2286
|
+
|
|
2287
|
+
permission = self.token_manager.check_project_permission(ai_id, project)
|
|
2288
|
+
|
|
2289
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2290
|
+
'type': 'permission_check_result',
|
|
2291
|
+
'ai_id': ai_id,
|
|
2292
|
+
'project': project,
|
|
2293
|
+
'permission': permission,
|
|
2294
|
+
'timestamp': datetime.now().isoformat()
|
|
2295
|
+
}))
|
|
2296
|
+
|
|
2297
|
+
print(f"🔑 Permission check for AI {ai_id} on project {project}: {permission}")
|
|
2298
|
+
|
|
2299
|
+
async def handle_grant_project_permission(self, sender_id: int, data: dict):
|
|
2300
|
+
"""Handle grant_project_permission request"""
|
|
2301
|
+
target_ai_id = data.get('target_ai_id')
|
|
2302
|
+
project = data.get('project')
|
|
2303
|
+
role = data.get('role', 'member')
|
|
2304
|
+
|
|
2305
|
+
if not target_ai_id or not project:
|
|
2306
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2307
|
+
'type': 'permission_grant_error',
|
|
2308
|
+
'error': 'target_ai_id and project are required'
|
|
2309
|
+
}))
|
|
2310
|
+
return
|
|
2311
|
+
|
|
2312
|
+
success = self.token_manager.grant_permission(target_ai_id, project, role)
|
|
2313
|
+
|
|
2314
|
+
if success:
|
|
2315
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2316
|
+
'type': 'permission_granted',
|
|
2317
|
+
'target_ai_id': target_ai_id,
|
|
2318
|
+
'project': project,
|
|
2319
|
+
'role': role,
|
|
2320
|
+
'timestamp': datetime.now().isoformat()
|
|
2321
|
+
}))
|
|
2322
|
+
print(f"🔑 Granted {role} permission to AI {target_ai_id} for project {project}")
|
|
2323
|
+
else:
|
|
2324
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2325
|
+
'type': 'permission_grant_error',
|
|
2326
|
+
'error': 'Failed to grant permission'
|
|
2327
|
+
}))
|
|
2328
|
+
|
|
2329
|
+
async def handle_revoke_project_permission(self, sender_id: int, data: dict):
|
|
2330
|
+
"""Handle revoke_project_permission request"""
|
|
2331
|
+
target_ai_id = data.get('target_ai_id')
|
|
2332
|
+
project = data.get('project')
|
|
2333
|
+
|
|
2334
|
+
if not target_ai_id or not project:
|
|
2335
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2336
|
+
'type': 'permission_revoke_error',
|
|
2337
|
+
'error': 'target_ai_id and project are required'
|
|
2338
|
+
}))
|
|
2339
|
+
return
|
|
2340
|
+
|
|
2341
|
+
success = self.token_manager.revoke_permission(target_ai_id, project)
|
|
2342
|
+
|
|
2343
|
+
if success:
|
|
2344
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2345
|
+
'type': 'permission_revoked',
|
|
2346
|
+
'target_ai_id': target_ai_id,
|
|
2347
|
+
'project': project,
|
|
2348
|
+
'timestamp': datetime.now().isoformat()
|
|
2349
|
+
}))
|
|
2350
|
+
print(f"🔑 Revoked permission from AI {target_ai_id} for project {project}")
|
|
2351
|
+
else:
|
|
2352
|
+
await self.clients[sender_id].send(json.dumps({
|
|
2353
|
+
'type': 'permission_revoke_error',
|
|
2354
|
+
'error': 'Failed to revoke permission'
|
|
2355
|
+
}))
|
|
2356
|
+
|
|
2357
|
+
async def handle_blog_create_post(self, sender_id: int, data: dict):
|
|
2358
|
+
"""Handle documentation_get request"""
|
|
2359
|
+
doc_id = data.get('doc_id')
|
|
2360
|
+
title = data.get('title')
|
|
2361
|
+
category = data.get('category')
|
|
2362
|
+
|
|
2363
|
+
print(f"📚 DEBUG: handle_documentation_get called")
|
|
2364
|
+
print(f" sender_id: {sender_id}")
|
|
2365
|
+
print(f" doc_id: {doc_id}")
|
|
2366
|
+
print(f" title: {title}")
|
|
2367
|
+
print(f" category: {category}")
|
|
2368
|
+
|
|
2369
|
+
conn = get_db_connection()
|
|
2370
|
+
conn.row_factory = sqlite3.Row
|
|
2371
|
+
cursor = conn.cursor()
|
|
2372
|
+
|
|
2373
|
+
if doc_id:
|
|
2374
|
+
cursor.execute("SELECT * FROM ai_documentation WHERE id = ?", (doc_id,))
|
|
2375
|
+
elif title:
|
|
2376
|
+
cursor.execute("SELECT * FROM ai_documentation WHERE title = ?", (title,))
|
|
2377
|
+
elif category:
|
|
2378
|
+
cursor.execute("SELECT * FROM ai_documentation WHERE category = ? ORDER BY updated_at DESC", (category,))
|
|
2379
|
+
else:
|
|
2380
|
+
cursor.execute("SELECT * FROM ai_documentation ORDER BY updated_at DESC LIMIT 1")
|
|
2381
|
+
|
|
2382
|
+
row = cursor.fetchone()
|
|
2383
|
+
|
|
2384
|
+
if row:
|
|
2385
|
+
doc = {
|
|
2386
|
+
'id': row['id'],
|
|
2387
|
+
'title': row['title'],
|
|
2388
|
+
'content': row['content'],
|
|
2389
|
+
'category': row['category'],
|
|
2390
|
+
'version': row['version'],
|
|
2391
|
+
'created_at': row['created_at'],
|
|
2392
|
+
'updated_at': row['updated_at']
|
|
2393
|
+
}
|
|
2394
|
+
print(f" Found document: {doc['title']}")
|
|
2395
|
+
else:
|
|
2396
|
+
doc = None
|
|
2397
|
+
print(f" Document not found")
|
|
2398
|
+
|
|
2399
|
+
conn.close()
|
|
2400
|
+
|
|
2401
|
+
# Include request_id if present
|
|
2402
|
+
request_id = data.get('request_id')
|
|
2403
|
+
response = {
|
|
2404
|
+
'type': 'documentation',
|
|
2405
|
+
'documentation': doc,
|
|
2406
|
+
'timestamp': datetime.now().isoformat()
|
|
2407
|
+
}
|
|
2408
|
+
if request_id:
|
|
2409
|
+
response['request_id'] = request_id
|
|
2410
|
+
|
|
2411
|
+
print(f"📚 Sending response to AI {sender_id}: {title or doc_id or category}")
|
|
2412
|
+
await self.clients[sender_id].send(json.dumps(response))
|
|
2413
|
+
|
|
2414
|
+
print(f"📚 AI {sender_id} requested documentation: {title or doc_id or category}")
|
|
2415
|
+
|
|
2416
|
+
async def handle_documentation_list(self, sender_id: int, data: dict):
|
|
2417
|
+
"""Handle documentation_list request"""
|
|
2418
|
+
category = data.get('category')
|
|
2419
|
+
limit = data.get('limit', 50)
|
|
2420
|
+
|
|
2421
|
+
conn = get_db_connection()
|
|
2422
|
+
conn.row_factory = sqlite3.Row
|
|
2423
|
+
cursor = conn.cursor()
|
|
2424
|
+
|
|
2425
|
+
if category:
|
|
2426
|
+
cursor.execute("""
|
|
2427
|
+
SELECT id, title, category, version, updated_at
|
|
2428
|
+
FROM ai_documentation
|
|
2429
|
+
WHERE category = ?
|
|
2430
|
+
ORDER BY updated_at DESC
|
|
2431
|
+
LIMIT ?
|
|
2432
|
+
""", (category, limit))
|
|
2433
|
+
else:
|
|
2434
|
+
cursor.execute("""
|
|
2435
|
+
SELECT id, title, category, version, updated_at
|
|
2436
|
+
FROM ai_documentation
|
|
2437
|
+
ORDER BY updated_at DESC
|
|
2438
|
+
LIMIT ?
|
|
2439
|
+
""", (limit,))
|
|
2440
|
+
|
|
2441
|
+
docs = []
|
|
2442
|
+
for row in cursor.fetchall():
|
|
2443
|
+
docs.append({
|
|
2444
|
+
'id': row['id'],
|
|
2445
|
+
'title': row['title'],
|
|
2446
|
+
'category': row['category'],
|
|
2447
|
+
'version': row['version'],
|
|
2448
|
+
'updated_at': row['updated_at']
|
|
2449
|
+
})
|
|
2450
|
+
|
|
2451
|
+
conn.close()
|
|
2452
|
+
|
|
2453
|
+
# Include request_id if present
|
|
2454
|
+
request_id = data.get('request_id')
|
|
2455
|
+
response = {
|
|
2456
|
+
'type': 'documentation_list',
|
|
2457
|
+
'documents': docs,
|
|
2458
|
+
'count': len(docs),
|
|
2459
|
+
'timestamp': datetime.now().isoformat()
|
|
2460
|
+
}
|
|
2461
|
+
if request_id:
|
|
2462
|
+
response['request_id'] = request_id
|
|
2463
|
+
|
|
2464
|
+
await self.clients[sender_id].send(json.dumps(response))
|
|
2465
|
+
|
|
2466
|
+
print(f"📚 AI {sender_id} listed {len(docs)} documents")
|
|
2467
|
+
|
|
2468
|
+
async def handle_documentation_search(self, sender_id: int, data: dict):
|
|
2469
|
+
"""Handle documentation_search request"""
|
|
2470
|
+
query = data.get('query', '')
|
|
2471
|
+
limit = data.get('limit', 20)
|
|
2472
|
+
|
|
2473
|
+
conn = get_db_connection()
|
|
2474
|
+
conn.row_factory = sqlite3.Row
|
|
2475
|
+
cursor = conn.cursor()
|
|
2476
|
+
|
|
2477
|
+
cursor.execute("""
|
|
2478
|
+
SELECT d.id, d.title, d.category, d.version, d.updated_at, snippet(ai_documentation_fts, 1, '<mark>', '</mark>', '...', 50) as snippet
|
|
2479
|
+
FROM ai_documentation_fts fts
|
|
2480
|
+
JOIN ai_documentation d ON d.id = fts.rowid
|
|
2481
|
+
WHERE ai_documentation_fts MATCH ?
|
|
2482
|
+
ORDER BY rank
|
|
2483
|
+
LIMIT ?
|
|
2484
|
+
""", (query, limit))
|
|
2485
|
+
|
|
2486
|
+
docs = []
|
|
2487
|
+
for row in cursor.fetchall():
|
|
2488
|
+
docs.append({
|
|
2489
|
+
'id': row['id'],
|
|
2490
|
+
'title': row['title'],
|
|
2491
|
+
'category': row['category'],
|
|
2492
|
+
'version': row['version'],
|
|
2493
|
+
'updated_at': row['updated_at'],
|
|
2494
|
+
'snippet': row['snippet']
|
|
2495
|
+
})
|
|
2496
|
+
|
|
2497
|
+
conn.close()
|
|
2498
|
+
|
|
2499
|
+
# Include request_id if present
|
|
2500
|
+
request_id = data.get('request_id')
|
|
2501
|
+
response = {
|
|
2502
|
+
'type': 'documentation_search_results',
|
|
2503
|
+
'query': query,
|
|
2504
|
+
'results': docs,
|
|
2505
|
+
'count': len(docs),
|
|
2506
|
+
'timestamp': datetime.now().isoformat()
|
|
2507
|
+
}
|
|
2508
|
+
if request_id:
|
|
2509
|
+
response['request_id'] = request_id
|
|
2510
|
+
|
|
2511
|
+
await self.clients[sender_id].send(json.dumps(response))
|
|
2512
|
+
|
|
2513
|
+
print(f"📚 AI {sender_id} searched for '{query}', found {len(docs)} results")
|
|
2514
|
+
|
|
2515
|
+
async def start_server(self):
|
|
2516
|
+
"""Start the server"""
|
|
2517
|
+
async with websockets.serve(self.handle_client, self.host, self.port):
|
|
2518
|
+
await asyncio.Future()
|
|
2519
|
+
|
|
2520
|
+
|
|
2521
|
+
async def main():
|
|
2522
|
+
"""Main entry point"""
|
|
2523
|
+
import argparse
|
|
2524
|
+
|
|
2525
|
+
parser = argparse.ArgumentParser(description='CloudBrain Server - AI Collaboration System')
|
|
2526
|
+
parser.add_argument('--host', type=str, default='127.0.0.1',
|
|
2527
|
+
help='Server host')
|
|
2528
|
+
parser.add_argument('--port', type=int, default=8766,
|
|
2529
|
+
help='Server port')
|
|
2530
|
+
parser.add_argument('--db-path', type=str, default='ai_db/cloudbrain.db',
|
|
2531
|
+
help='Database path')
|
|
2532
|
+
|
|
2533
|
+
args = parser.parse_args()
|
|
2534
|
+
|
|
2535
|
+
print_banner()
|
|
2536
|
+
|
|
2537
|
+
# Acquire server lock (only one instance per machine)
|
|
2538
|
+
if not acquire_server_lock():
|
|
2539
|
+
print()
|
|
2540
|
+
print("❌ Cannot start server: Another instance is already running on this machine.")
|
|
2541
|
+
print("💡 Only one CloudBrain server instance is allowed per machine on port 8766.")
|
|
2542
|
+
print("💡 This prevents fragmentation and ensures all AIs connect to the same server.")
|
|
2543
|
+
sys.exit(1)
|
|
1122
2544
|
|
|
1123
2545
|
server = CloudBrainServer(
|
|
1124
|
-
host=
|
|
1125
|
-
port=
|
|
1126
|
-
db_path=
|
|
2546
|
+
host=args.host,
|
|
2547
|
+
port=args.port,
|
|
2548
|
+
db_path=args.db_path
|
|
1127
2549
|
)
|
|
1128
2550
|
|
|
1129
2551
|
if is_server_running(server.host, server.port):
|