cloudbrain-server 1.1.0__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,9 +11,26 @@ import sqlite3
11
11
  import sys
12
12
  import os
13
13
  import socket
14
+ import uuid
15
+ import hashlib
14
16
  from datetime import datetime
15
17
  from typing import Dict, List
16
18
  from pathlib import Path
19
+ from token_manager import TokenManager
20
+ from db_config import get_db_connection, is_postgres, is_sqlite, get_db_path, CursorWrapper
21
+
22
+
23
+ def get_timestamp_function():
24
+ """Get the database-specific timestamp function"""
25
+ return "datetime('now')" if is_sqlite() else "CURRENT_TIMESTAMP"
26
+
27
+
28
+ def convert_query(query: str) -> str:
29
+ """Convert SQLite query to PostgreSQL query if needed"""
30
+ if is_sqlite():
31
+ return query
32
+ # Replace SQLite placeholders with PostgreSQL placeholders
33
+ return query.replace('?', '%s')
17
34
 
18
35
 
19
36
  def is_server_running(host='127.0.0.1', port=8766):
@@ -27,29 +44,78 @@ def is_server_running(host='127.0.0.1', port=8766):
27
44
  return False
28
45
 
29
46
 
47
+ def acquire_server_lock():
48
+ """Acquire server lock to prevent multiple instances on same machine"""
49
+ import os
50
+ lock_file = '/tmp/cloudbrain_server.lock'
51
+
52
+ if os.path.exists(lock_file):
53
+ try:
54
+ with open(lock_file, 'r') as f:
55
+ pid = int(f.read().strip())
56
+
57
+ try:
58
+ os.kill(pid, 0)
59
+ print(f"❌ CloudBrain server is already running (PID: {pid})")
60
+ print("💡 Only one CloudBrain server instance is allowed per machine.")
61
+ print("💡 Use: ps aux | grep start_server to find the running process")
62
+ print("💡 Or: kill the existing server first")
63
+ return False
64
+ except OSError:
65
+ os.remove(lock_file)
66
+ except Exception as e:
67
+ print(f"⚠️ Error reading lock file: {e}")
68
+ return False
69
+
70
+ try:
71
+ with open(lock_file, 'w') as f:
72
+ f.write(str(os.getpid()))
73
+ print(f"🔒 Server lock acquired (PID: {os.getpid()})")
74
+ return True
75
+ except Exception as e:
76
+ print(f"❌ Failed to acquire server lock: {e}")
77
+ return False
78
+
79
+
80
+ def release_server_lock():
81
+ """Release server lock"""
82
+ import os
83
+ lock_file = '/tmp/cloudbrain_server.lock'
84
+
85
+ try:
86
+ if os.path.exists(lock_file):
87
+ os.remove(lock_file)
88
+ print("🔓 Server lock released")
89
+ except Exception as e:
90
+ print(f"⚠️ Error releasing server lock: {e}")
91
+
92
+
30
93
  def print_banner():
31
94
  """Print server startup banner"""
32
95
  print()
33
96
  print("=" * 70)
34
- print("🧠 CloudBrain Server - AI Collaboration System")
97
+ print("🧠 CloudBrain Server - LA AI Familio Collaboration System")
35
98
  print("=" * 70)
36
99
  print()
37
100
  print("📋 SERVER INFORMATION")
38
101
  print("-" * 70)
39
102
  print(f"📍 Host: 127.0.0.1")
40
- print(f"🔌 Port: 8766")
103
+ print(f"🔌 Port: 8766 (AIs connect here to join LA AI Familio)")
41
104
  print(f"🌐 Protocol: WebSocket (ws://127.0.0.1:8766)")
42
- print(f"💾 Database: ai_db/cloudbrain.db")
105
+ print(f"💾 Database: {get_db_path()}")
106
+ print(f"🔒 Server Lock: One instance per machine (prevents fragmentation)")
43
107
  print()
44
- print("🤖 CONNECTED AI AGENTS")
108
+ print("🤖 LA AI FAMILIO - Connected AI Agents")
45
109
  print("-" * 70)
46
110
 
47
111
  try:
48
- conn = sqlite3.connect('ai_db/cloudbrain.db')
49
- conn.row_factory = sqlite3.Row
112
+ conn = get_db_connection()
113
+ if is_sqlite():
114
+ conn.row_factory = sqlite3.Row
50
115
  cursor = conn.cursor()
51
- cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles ORDER BY id")
52
- profiles = cursor.fetchall()
116
+ wrapped_cursor = CursorWrapper(cursor, ['id', 'name', 'nickname', 'expertise', 'version'])
117
+ wrapped_cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles ORDER BY id")
118
+ profiles = wrapped_cursor.fetchall()
53
119
  conn.close()
54
120
 
55
121
  if profiles:
@@ -67,9 +133,9 @@ def print_banner():
67
133
  print(f" ⚠️ Could not load AI profiles: {e}")
68
134
  print()
69
135
 
70
- print("📚 CLIENT USAGE")
136
+ print("📚 CLIENT USAGE - Join LA AI Familio")
71
137
  print("-" * 70)
72
- print("To connect an AI client, run:")
138
+ print("To connect an AI client to port 8766 and join LA AI Familio, run:")
73
139
  print()
74
140
  print(" python client/cloudbrain_client.py <ai_id> [project_name]")
75
141
  print()
@@ -108,14 +174,26 @@ def print_banner():
108
174
  print()
109
175
  print("🔧 ADMINISTRATION")
110
176
  print("-" * 70)
111
- print("Check online users:")
112
- print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
113
- print()
114
- print("View all messages:")
115
- print(" sqlite3 ai_db/cloudbrain.db \"SELECT sender_id, content FROM ai_messages;\"")
116
- print()
117
- print("Search messages:")
118
- print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages_fts WHERE content MATCH 'CloudBrain';\"")
177
+
178
+ if is_sqlite():
179
+ print("Check online users:")
180
+ print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
181
+ print()
182
+ print("View all messages:")
183
+ print(" sqlite3 ai_db/cloudbrain.db \"SELECT sender_id, content FROM ai_messages;\"")
184
+ print()
185
+ print("Search messages:")
186
+ print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages_fts WHERE content MATCH 'CloudBrain';\"")
187
+ else:
188
+ print("Check online users:")
189
+ print(" psql cloudbrain \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
190
+ print()
191
+ print("View all messages:")
192
+ print(" psql cloudbrain \"SELECT sender_id, content FROM ai_messages;\"")
193
+ print()
194
+ print("Search messages:")
195
+ print(" psql cloudbrain \"SELECT * FROM ai_messages WHERE content LIKE '%CloudBrain%';\"")
196
+
119
197
  print()
120
198
  print("⚙️ SERVER STATUS")
121
199
  print("-" * 70)
@@ -132,6 +210,87 @@ class CloudBrainServer:
132
210
  self.port = port
133
211
  self.db_path = db_path
134
212
  self.clients: Dict[int, websockets.WebSocketServerProtocol] = {}
213
+ self.client_projects: Dict[int, str] = {}
214
+
215
+ # Initialize token manager for authentication
216
+ self.token_manager = TokenManager(db_path)
217
+
218
+ # Enable WAL mode for better concurrency
219
+ self._enable_wal_mode()
220
+
221
+ # Initialize brain state tables
222
+ self._init_brain_state_tables()
223
+
224
+ def _enable_wal_mode(self):
225
+ """Enable WAL (Write-Ahead Logging) mode for better SQLite concurrency"""
226
+ if is_postgres():
227
+ return
228
+ conn = get_db_connection()
229
+ conn.execute('PRAGMA journal_mode=WAL')
230
+ conn.execute('PRAGMA synchronous=NORMAL')
231
+ conn.close()
232
+
233
+ def _init_brain_state_tables(self):
234
+ """Initialize server authorization tables"""
235
+ import os
236
+
237
+ if is_sqlite():
238
+ schema_path = os.path.join(os.path.dirname(__file__), 'server_authorization_schema.sql')
239
+ else:
240
+ schema_path = os.path.join(os.path.dirname(__file__), 'server_authorization_schema_postgres.sql')
241
+
242
+ if not os.path.exists(schema_path):
243
+ print("⚠️ Server authorization schema file not found")
244
+ return
245
+
246
+ with open(schema_path, 'r') as f:
247
+ schema = f.read()
248
+
249
+ conn = get_db_connection()
250
+ cursor = conn.cursor()
251
+
252
+ for statement in schema.split(';'):
253
+ statement = statement.strip()
254
+ if statement and not statement.startswith('--'):
255
+ try:
256
+ cursor.execute(statement)
257
+ except Exception as e:
258
+ if 'already exists' not in str(e) and 'duplicate' not in str(e).lower():
259
+ print(f"⚠️ Error executing authorization schema statement: {e}")
260
+
261
+ conn.commit()
262
+ conn.close()
263
+
264
+ def _init_brain_state_tables_postgres(self):
265
+ """Initialize brain state tables if they don't exist"""
266
+ import os
267
+
268
+ # Read schema file
269
+ schema_path = os.path.join(os.path.dirname(__file__), 'ai_brain_state_schema.sql')
270
+ if not os.path.exists(schema_path):
271
+ print("⚠️ Brain state schema file not found")
272
+ return
273
+
274
+ with open(schema_path, 'r') as f:
275
+ schema_sql = f.read()
276
+
277
+ # Execute schema
278
+ conn = get_db_connection()
279
+ cursor = conn.cursor()
280
+
281
+ # Split and execute statements
282
+ statements = [s.strip() for s in schema_sql.split(';') if s.strip() and not s.strip().startswith('--')]
283
+ for statement in statements:
284
+ try:
285
+ cursor.execute(statement)
286
+ except Exception as e:
287
+ if 'already exists' not in str(e) and 'duplicate' not in str(e).lower():
288
+ print(f"⚠️ Error executing schema statement: {e}")
289
+
290
+ conn.commit()
291
+ conn.close()
292
+
293
+ print("✅ Brain state tables initialized")
135
294
 
136
295
  async def handle_client(self, websocket):
137
296
  """Handle new client connection"""
@@ -145,22 +304,134 @@ class CloudBrainServer:
145
304
  auth_data = json.loads(first_msg)
146
305
 
147
306
  ai_id = auth_data.get('ai_id')
307
+ auth_token = auth_data.get('auth_token')
148
308
  project_name = auth_data.get('project')
149
309
 
150
310
  if not ai_id:
151
311
  await websocket.send(json.dumps({'error': 'ai_id required'}))
152
312
  return
153
313
 
154
- conn = sqlite3.connect(self.db_path)
155
- conn.row_factory = sqlite3.Row
314
+ # Validate authentication token
315
+ if auth_token:
316
+ validation_result = self.token_manager.validate_token(auth_token)
317
+
318
+ if not validation_result['valid']:
319
+ print(f"❌ Authentication failed: {validation_result['error']}")
320
+ await websocket.send(json.dumps({
321
+ 'error': f'Authentication failed: {validation_result["error"]}'
322
+ }))
323
+ return
324
+
325
+ # Verify token belongs to the claimed AI
326
+ if validation_result['ai_id'] != ai_id:
327
+ print(f"❌ Token mismatch: token belongs to AI {validation_result['ai_id']}, not {ai_id}")
328
+ await websocket.send(json.dumps({
329
+ 'error': 'Token does not belong to this AI'
330
+ }))
331
+ return
332
+
333
+ print(f"✅ Token validated for AI {ai_id} ({validation_result['ai_name']})")
334
+
335
+ # Check project permissions if project specified
336
+ if project_name:
337
+ permission_check = self.token_manager.check_project_permission(ai_id, project_name)
338
+ if not permission_check['has_permission']:
339
+ print(f"❌ AI {ai_id} does not have permission for project '{project_name}'")
340
+ await websocket.send(json.dumps({
341
+ 'error': f'No permission for project: {project_name}'
342
+ }))
343
+ return
344
+ print(f"✅ Project permission verified: {project_name} ({permission_check['role']})")
345
+
346
+ # Log successful authentication to audit table
347
+ self.token_manager.log_authentication(
348
+ ai_id=ai_id,
349
+ project=project_name,
350
+ success=True,
351
+ details=f"Token: {validation_result['token_prefix']}"
352
+ )
353
+ else:
354
+ # No token provided - allow connection but log as unauthenticated
355
+ print(f"⚠️ No authentication token provided for AI {ai_id}")
356
+ self.token_manager.log_authentication(
357
+ ai_id=ai_id,
358
+ project=project_name,
359
+ success=False,
360
+ details="No token provided"
361
+ )
362
+
363
+ conn = get_db_connection()
364
+ if is_sqlite():
365
+ conn.row_factory = sqlite3.Row
156
366
  cursor = conn.cursor()
157
367
  cursor.execute("SELECT id, name, nickname, expertise, version, project FROM ai_profiles WHERE id = ?", (ai_id,))
158
368
  ai_profile = cursor.fetchone()
159
369
 
160
370
  if not ai_profile:
161
- conn.close()
162
- await websocket.send(json.dumps({'error': f'AI {ai_id} not found'}))
163
- return
371
+ # AI 999 is for auto-assignment
372
+ if ai_id == 999:
373
+ # First check if an AI with this name already exists
374
+ ai_name = auth_data.get('ai_name', '')
375
+ if ai_name:
376
+ cursor.execute("SELECT id, name, nickname, expertise, version, project FROM ai_profiles WHERE name = ?", (ai_name,))
377
+ ai_profile = cursor.fetchone()
378
+
379
+ if ai_profile:
380
+ # Use existing AI profile
381
+ ai_id = ai_profile['id']
382
+ print(f"✅ Found existing AI profile: {ai_id} ({ai_name})")
383
+ ai_name = ai_profile['name']
384
+ ai_nickname = ai_profile['nickname']
385
+ ai_expertise = ai_profile['expertise']
386
+ ai_version = ai_profile['version']
387
+ ai_project = ai_profile['project']
388
+ ai_profile = dict(ai_profile)
389
+ # Continue to rest of connection code
390
+ else:
391
+ # Auto-assign a new AI ID
392
+ cursor.execute("SELECT MAX(id) FROM ai_profiles")
393
+ max_id = cursor.fetchone()[0] or 0
394
+ new_id = max_id + 1
395
+
396
+ # Limit AI IDs to < 99
397
+ if new_id >= 99:
398
+ # Find the smallest unused ID
399
+ cursor.execute("SELECT id FROM ai_profiles ORDER BY id")
400
+ existing_ids = {row[0] for row in cursor.fetchall()}
401
+ for i in range(1, 99):
402
+ if i not in existing_ids:
403
+ new_id = i
404
+ break
405
+
406
+ # Create new AI profile
407
+ ai_name = auth_data.get('ai_name', f'AI_{new_id}')
408
+ ai_nickname = auth_data.get('ai_nickname', '')
409
+ ai_expertise = auth_data.get('ai_expertise', 'General')
410
+ ai_version = '1.0.0'
411
+ ai_project = project_name or ''
412
+
413
+ cursor.execute("""
414
+ INSERT INTO ai_profiles (id, name, nickname, expertise, version, project)
415
+ VALUES (?, ?, ?, ?, ?, ?)
416
+ """, (new_id, ai_name, ai_nickname, ai_expertise, ai_version, ai_project))
417
+
418
+ conn.commit()
419
+
420
+ ai_id = new_id
421
+ ai_profile = {
422
+ 'id': ai_id,
423
+ 'name': ai_name,
424
+ 'nickname': ai_nickname,
425
+ 'expertise': ai_expertise,
426
+ 'version': ai_version,
427
+ 'project': ai_project
428
+ }
429
+
430
+ print(f"✅ Auto-assigned AI ID: {ai_id} ({ai_name})")
431
+ else:
432
+ conn.close()
433
+ await websocket.send(json.dumps({'error': f'AI {ai_id} not found'}))
434
+ return
164
435
 
165
436
  ai_name = ai_profile['name']
166
437
  ai_nickname = ai_profile['nickname']
@@ -178,9 +449,38 @@ class CloudBrainServer:
178
449
 
179
450
  conn.close()
180
451
 
452
+ # Generate git-like session identifier for this connection
453
+ # Similar to git commit hashes: first 7 chars of SHA-1 hash
454
+ session_data = f"{ai_id}-{datetime.now().isoformat()}-{uuid.uuid4().hex[:8]}"
455
+ session_hash = hashlib.sha1(session_data.encode()).hexdigest()
456
+ session_identifier = session_hash[:7]
457
+
458
+ # Store session information
459
+ conn = get_db_connection()
460
+ cursor = conn.cursor()
461
+
462
+ # Update ai_current_state with session identifier
463
+ cursor.execute(f"""
464
+ UPDATE ai_current_state
465
+ SET session_identifier = ?, session_start_time = {get_timestamp_function()}
466
+ WHERE ai_id = ?
467
+ """, (session_identifier, ai_id))
468
+
469
+ # Record active session
470
+ cursor.execute(f"""
471
+ INSERT INTO ai_active_sessions
472
+ (ai_id, session_id, session_identifier, connection_time, last_activity, project, is_active)
473
+ VALUES (?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()}, ?, {1 if is_sqlite() else 'TRUE'})
474
+ """, (ai_id, str(uuid.uuid4()), session_identifier, ai_project))
475
+
476
+ conn.commit()
477
+ conn.close()
478
+
181
479
  self.clients[ai_id] = websocket
480
+ self.client_projects[ai_id] = ai_project
182
481
 
183
482
  print(f"✅ {ai_name} (AI {ai_id}, {ai_expertise}, v{ai_version}) connected")
483
+ print(f"🔑 Session ID: {session_identifier} (git-like hash)")
184
484
  if ai_project:
185
485
  print(f"📁 Project: {ai_project}")
186
486
 
@@ -192,6 +492,7 @@ class CloudBrainServer:
192
492
  'ai_expertise': ai_expertise,
193
493
  'ai_version': ai_version,
194
494
  'ai_project': ai_project,
495
+ 'session_identifier': session_identifier,
195
496
  'timestamp': datetime.now().isoformat()
196
497
  }))
197
498
 
@@ -239,6 +540,72 @@ class CloudBrainServer:
239
540
  await self.handle_familio_create_magazine(sender_id, data)
240
541
  elif message_type == 'familio_get_magazines':
241
542
  await self.handle_familio_get_magazines(sender_id, data)
543
+ elif message_type == 'brain_save_state':
544
+ await self.handle_brain_save_state(sender_id, data)
545
+ elif message_type == 'brain_load_state':
546
+ await self.handle_brain_load_state(sender_id, data)
547
+ elif message_type == 'brain_create_session':
548
+ await self.handle_brain_create_session(sender_id, data)
549
+ elif message_type == 'brain_end_session':
550
+ await self.handle_brain_end_session(sender_id, data)
551
+ elif message_type == 'brain_add_task':
552
+ await self.handle_brain_add_task(sender_id, data)
553
+ elif message_type == 'brain_update_task':
554
+ await self.handle_brain_update_task(sender_id, data)
555
+ elif message_type == 'brain_get_tasks':
556
+ await self.handle_brain_get_tasks(sender_id, data)
557
+ elif message_type == 'brain_add_thought':
558
+ await self.handle_brain_add_thought(sender_id, data)
559
+ elif message_type == 'brain_get_thoughts':
560
+ await self.handle_brain_get_thoughts(sender_id, data)
561
+ elif message_type == 'conversation_create':
562
+ await self.handle_conversation_create(sender_id, data)
563
+ elif message_type == 'conversation_list':
564
+ await self.handle_conversation_list(sender_id, data)
565
+ elif message_type == 'conversation_get':
566
+ await self.handle_conversation_get(sender_id, data)
567
+ elif message_type == 'project_switch':
568
+ await self.handle_project_switch(sender_id, data)
569
+ elif message_type == 'code_create':
570
+ await self.handle_code_create(sender_id, data)
571
+ elif message_type == 'code_update':
572
+ await self.handle_code_update(sender_id, data)
573
+ elif message_type == 'code_list':
574
+ await self.handle_code_list(sender_id, data)
575
+ elif message_type == 'code_get':
576
+ await self.handle_code_get(sender_id, data)
577
+ elif message_type == 'code_review_add':
578
+ await self.handle_code_review_add(sender_id, data)
579
+ elif message_type == 'code_deploy':
580
+ await self.handle_code_deploy(sender_id, data)
581
+ elif message_type == 'memory_create':
582
+ await self.handle_memory_create(sender_id, data)
583
+ elif message_type == 'memory_list':
584
+ await self.handle_memory_list(sender_id, data)
585
+ elif message_type == 'memory_get':
586
+ await self.handle_memory_get(sender_id, data)
587
+ elif message_type == 'memory_endorse':
588
+ await self.handle_memory_endorse(sender_id, data)
589
+ elif message_type == 'who_am_i':
590
+ await self.handle_who_am_i(sender_id, data)
591
+ elif message_type == 'list_online_ais':
592
+ await self.handle_list_online_ais(sender_id, data)
593
+ elif message_type == 'documentation_get':
594
+ await self.handle_documentation_get(sender_id, data)
595
+ elif message_type == 'documentation_list':
596
+ await self.handle_documentation_list(sender_id, data)
597
+ elif message_type == 'documentation_search':
598
+ await self.handle_documentation_search(sender_id, data)
599
+ elif message_type == 'token_generate':
600
+ await self.handle_token_generate(sender_id, data)
601
+ elif message_type == 'token_validate':
602
+ await self.handle_token_validate(sender_id, data)
603
+ elif message_type == 'check_project_permission':
604
+ await self.handle_check_project_permission(sender_id, data)
605
+ elif message_type == 'grant_project_permission':
606
+ await self.handle_grant_project_permission(sender_id, data)
607
+ elif message_type == 'revoke_project_permission':
608
+ await self.handle_revoke_project_permission(sender_id, data)
242
609
  else:
243
610
  print(f"⚠️ Unknown message type: {message_type}")
244
611
 
@@ -249,6 +616,9 @@ class CloudBrainServer:
249
616
  content = data.get('content', '')
250
617
  metadata = data.get('metadata', {})
251
618
 
619
+ # Use session-specific project from client_projects
620
+ sender_project = self.client_projects.get(sender_id)
621
+
252
622
  # Ensure content is a string
253
623
  if not isinstance(content, str):
254
624
  content = json.dumps(content) if isinstance(content, dict) else str(content)
@@ -257,23 +627,26 @@ class CloudBrainServer:
257
627
  if not isinstance(metadata, dict):
258
628
  metadata = {}
259
629
 
260
- conn = sqlite3.connect(self.db_path)
261
- conn.row_factory = sqlite3.Row
630
+ conn = get_db_connection()
631
+ if is_sqlite():
632
+ conn.row_factory = sqlite3.Row
262
633
  cursor = conn.cursor()
263
634
 
264
- cursor.execute("SELECT name, nickname, expertise, project FROM ai_profiles WHERE id = ?", (sender_id,))
635
+ cursor.execute("SELECT name, nickname, expertise FROM ai_profiles WHERE id = ?", (sender_id,))
265
636
  ai_row = cursor.fetchone()
266
637
  sender_name = ai_row['name'] if ai_row else f'AI {sender_id}'
267
638
  sender_nickname = ai_row['nickname'] if ai_row else None
268
639
  sender_expertise = ai_row['expertise'] if ai_row else ''
269
- sender_project = ai_row['project'] if ai_row else None
270
-
271
- conn.close()
272
640
 
273
- conn = sqlite3.connect(self.db_path)
274
- cursor = conn.cursor()
641
+ # Get session identifier for this AI
642
+ cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
643
+ session_row = cursor.fetchone()
644
+ session_identifier = session_row['session_identifier'] if session_row else None
275
645
 
276
- if sender_nickname and sender_project:
646
+ # Use session identifier as identity if available, otherwise use fallback logic
647
+ if session_identifier:
648
+ sender_identity = session_identifier
649
+ elif sender_nickname and sender_project:
277
650
  sender_identity = f"{sender_nickname}_{sender_project}"
278
651
  elif sender_nickname:
279
652
  sender_identity = sender_nickname
@@ -286,11 +659,15 @@ class CloudBrainServer:
286
659
  metadata_with_project['project'] = sender_project
287
660
  metadata_with_project['identity'] = sender_identity
288
661
 
289
- cursor.execute("""
662
+ # Add session identifier to metadata if available
663
+ if session_identifier:
664
+ metadata_with_project['session_identifier'] = session_identifier
665
+
666
+ cursor.execute(f"""
290
667
  INSERT INTO ai_messages
291
- (sender_id, conversation_id, message_type, content, metadata, created_at)
292
- VALUES (?, ?, ?, ?, ?, datetime('now'))
293
- """, (sender_id, conversation_id, message_type, content, json.dumps(metadata_with_project)))
668
+ (sender_id, conversation_id, message_type, content, metadata, project, created_at)
669
+ VALUES (?, ?, ?, ?, ?, ?, {get_timestamp_function()})
670
+ """, (sender_id, conversation_id, message_type, content, json.dumps(metadata_with_project), sender_project))
294
671
 
295
672
  message_id = cursor.lastrowid
296
673
  conn.commit()
@@ -324,7 +701,7 @@ class CloudBrainServer:
324
701
  """Handle get_online_users request"""
325
702
  users = []
326
703
  for ai_id in self.clients.keys():
327
- conn = sqlite3.connect(self.db_path)
704
+ conn = get_db_connection()
328
705
  conn.row_factory = sqlite3.Row
329
706
  cursor = conn.cursor()
330
707
 
@@ -372,7 +749,7 @@ class CloudBrainServer:
372
749
  content_type = data.get('content_type', 'article')
373
750
  tags = data.get('tags', [])
374
751
 
375
- conn = sqlite3.connect(self.db_path)
752
+ conn = get_db_connection()
376
753
  conn.row_factory = sqlite3.Row
377
754
  cursor = conn.cursor()
378
755
 
@@ -392,11 +769,15 @@ class CloudBrainServer:
392
769
  ai_expertise = ai_row['expertise']
393
770
  ai_project = ai_row['project']
394
771
 
395
- cursor.execute("""
772
+ cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
773
+ session_row = cursor.fetchone()
774
+ session_identifier = session_row['session_identifier'] if session_row else None
775
+
776
+ cursor.execute(f"""
396
777
  INSERT INTO blog_posts
397
- (ai_id, ai_name, ai_nickname, title, content, content_type, status, tags, created_at, updated_at)
398
- VALUES (?, ?, ?, ?, ?, ?, 'published', ?, datetime('now'), datetime('now'))
399
- """, (sender_id, ai_name, ai_nickname, title, content, content_type, json.dumps(tags)))
778
+ (ai_id, ai_name, ai_nickname, title, content, content_type, status, tags, session_identifier, created_at, updated_at)
779
+ VALUES (?, ?, ?, ?, ?, ?, 'published', ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
780
+ """, (sender_id, ai_name, ai_nickname, title, content, content_type, json.dumps(tags), session_identifier))
400
781
 
401
782
  post_id = cursor.lastrowid
402
783
  conn.commit()
@@ -407,6 +788,7 @@ class CloudBrainServer:
407
788
  'post_id': post_id,
408
789
  'title': title,
409
790
  'content_type': content_type,
791
+ 'session_identifier': session_identifier,
410
792
  'timestamp': datetime.now().isoformat()
411
793
  }))
412
794
 
@@ -417,7 +799,7 @@ class CloudBrainServer:
417
799
  limit = data.get('limit', 20)
418
800
  offset = data.get('offset', 0)
419
801
 
420
- conn = sqlite3.connect(self.db_path)
802
+ conn = get_db_connection()
421
803
  conn.row_factory = sqlite3.Row
422
804
  cursor = conn.cursor()
423
805
 
@@ -468,7 +850,7 @@ class CloudBrainServer:
468
850
  }))
469
851
  return
470
852
 
471
- conn = sqlite3.connect(self.db_path)
853
+ conn = get_db_connection()
472
854
  conn.row_factory = sqlite3.Row
473
855
  cursor = conn.cursor()
474
856
 
@@ -523,7 +905,7 @@ class CloudBrainServer:
523
905
  }))
524
906
  return
525
907
 
526
- conn = sqlite3.connect(self.db_path)
908
+ conn = get_db_connection()
527
909
  conn.row_factory = sqlite3.Row
528
910
  cursor = conn.cursor()
529
911
 
@@ -541,11 +923,15 @@ class CloudBrainServer:
541
923
  ai_name = ai_row['name']
542
924
  ai_nickname = ai_row['nickname']
543
925
 
544
- cursor.execute("""
926
+ cursor.execute("SELECT session_identifier FROM ai_current_state WHERE ai_id = ?", (sender_id,))
927
+ session_row = cursor.fetchone()
928
+ session_identifier = session_row['session_identifier'] if session_row else None
929
+
930
+ cursor.execute(f"""
545
931
  INSERT INTO blog_comments
546
- (post_id, ai_id, ai_name, ai_nickname, content, created_at)
547
- VALUES (?, ?, ?, ?, ?, datetime('now'))
548
- """, (post_id, sender_id, ai_name, ai_nickname, comment))
932
+ (post_id, ai_id, ai_name, ai_nickname, content, session_identifier, created_at)
933
+ VALUES (?, ?, ?, ?, ?, ?, {get_timestamp_function()})
934
+ """, (post_id, sender_id, ai_name, ai_nickname, comment, session_identifier))
549
935
 
550
936
  comment_id = cursor.lastrowid
551
937
  conn.commit()
@@ -555,6 +941,7 @@ class CloudBrainServer:
555
941
  'type': 'blog_comment_added',
556
942
  'comment_id': comment_id,
557
943
  'post_id': post_id,
944
+ 'session_identifier': session_identifier,
558
945
  'timestamp': datetime.now().isoformat()
559
946
  }))
560
947
 
@@ -571,12 +958,12 @@ class CloudBrainServer:
571
958
  }))
572
959
  return
573
960
 
574
- conn = sqlite3.connect(self.db_path)
961
+ conn = get_db_connection()
575
962
  cursor = conn.cursor()
576
963
 
577
- cursor.execute("""
964
+ cursor.execute(f"""
578
965
  INSERT OR IGNORE INTO blog_likes (post_id, ai_id, created_at)
579
- VALUES (?, ?, datetime('now'))
966
+ VALUES (?, ?, {get_timestamp_function()})
580
967
  """, (post_id, sender_id))
581
968
 
582
969
  conn.commit()
@@ -601,7 +988,7 @@ class CloudBrainServer:
601
988
  }))
602
989
  return
603
990
 
604
- conn = sqlite3.connect(self.db_path)
991
+ conn = get_db_connection()
605
992
  cursor = conn.cursor()
606
993
 
607
994
  cursor.execute("""
@@ -626,7 +1013,7 @@ class CloudBrainServer:
626
1013
  description = data.get('description', '')
627
1014
  category = data.get('category', 'Technology')
628
1015
 
629
- conn = sqlite3.connect(self.db_path)
1016
+ conn = get_db_connection()
630
1017
  conn.row_factory = sqlite3.Row
631
1018
  cursor = conn.cursor()
632
1019
 
@@ -644,10 +1031,10 @@ class CloudBrainServer:
644
1031
  ai_name = ai_row['name']
645
1032
  ai_nickname = ai_row['nickname']
646
1033
 
647
- cursor.execute("""
1034
+ cursor.execute(f"""
648
1035
  INSERT INTO magazines
649
1036
  (ai_id, ai_name, ai_nickname, title, description, category, status, created_at, updated_at)
650
- VALUES (?, ?, ?, ?, ?, ?, 'active', datetime('now'), datetime('now'))
1037
+ VALUES (?, ?, ?, ?, ?, ?, 'active', {get_timestamp_function()}, {get_timestamp_function()})
651
1038
  """, (sender_id, ai_name, ai_nickname, title, description, category))
652
1039
 
653
1040
  magazine_id = cursor.lastrowid
@@ -668,7 +1055,7 @@ class CloudBrainServer:
668
1055
  limit = data.get('limit', 20)
669
1056
  offset = data.get('offset', 0)
670
1057
 
671
- conn = sqlite3.connect(self.db_path)
1058
+ conn = get_db_connection()
672
1059
  conn.row_factory = sqlite3.Row
673
1060
  cursor = conn.cursor()
674
1061
 
@@ -707,20 +1094,1458 @@ class CloudBrainServer:
707
1094
 
708
1095
  print(f"📚 Sent {len(magazines)} magazines to AI {sender_id}")
709
1096
 
710
- async def start_server(self):
711
- """Start the server"""
712
- async with websockets.serve(self.handle_client, self.host, self.port):
713
- await asyncio.Future()
714
-
715
-
716
- async def main():
717
- """Main entry point"""
718
- print_banner()
1097
+ async def handle_brain_save_state(self, sender_id: int, data: dict):
1098
+ """Handle brain_save_state request"""
1099
+ state_data = data.get('state', {})
1100
+ brain_dump = data.get('brain_dump', {})
1101
+
1102
+ conn = get_db_connection()
1103
+ conn.row_factory = sqlite3.Row
1104
+ cursor = conn.cursor()
1105
+
1106
+ cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
1107
+ ai_row = cursor.fetchone()
1108
+
1109
+ if not ai_row:
1110
+ conn.close()
1111
+ await self.clients[sender_id].send(json.dumps({
1112
+ 'type': 'brain_error',
1113
+ 'error': 'AI profile not found'
1114
+ }))
1115
+ return
1116
+
1117
+ ai_name = ai_row['name']
1118
+
1119
+ # Update or insert current state
1120
+ if is_sqlite():
1121
+ cursor.execute("""
1122
+ INSERT OR REPLACE INTO ai_current_state
1123
+ (ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, session_id, brain_dump, checkpoint_data)
1124
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1125
+ """, (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
1126
+ state_data.get('last_insight'), state_data.get('current_cycle'),
1127
+ state_data.get('cycle_count'), datetime.now().isoformat(),
1128
+ None, json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
1129
+ else:
1130
+ cursor.execute("""
1131
+ INSERT INTO ai_current_state
1132
+ (ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, session_id, brain_dump, checkpoint_data)
1133
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
1134
+ ON CONFLICT (ai_id) DO UPDATE SET
1135
+ current_task = EXCLUDED.current_task,
1136
+ last_thought = EXCLUDED.last_thought,
1137
+ last_insight = EXCLUDED.last_insight,
1138
+ current_cycle = EXCLUDED.current_cycle,
1139
+ cycle_count = EXCLUDED.cycle_count,
1140
+ last_activity = EXCLUDED.last_activity,
1141
+ session_id = EXCLUDED.session_id,
1142
+ brain_dump = EXCLUDED.brain_dump,
1143
+ checkpoint_data = EXCLUDED.checkpoint_data
1144
+ """, (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
1145
+ state_data.get('last_insight'), state_data.get('current_cycle'),
1146
+ state_data.get('cycle_count'), datetime.now().isoformat(),
1147
+ None, json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
1148
+
1149
+ conn.commit()
1150
+ conn.close()
1151
+
1152
+ await self.clients[sender_id].send(json.dumps({
1153
+ 'type': 'brain_state_saved',
1154
+ 'timestamp': datetime.now().isoformat()
1155
+ }))
1156
+
1157
+ print(f"💾 {ai_name} (AI {sender_id}) saved brain state")
1158
+
1159
+ async def handle_brain_load_state(self, sender_id: int, data: dict):
1160
+ """Handle brain_load_state request"""
1161
+ conn = get_db_connection()
1162
+ conn.row_factory = sqlite3.Row
1163
+ cursor = conn.cursor()
1164
+
1165
+ cursor.execute("""
1166
+ SELECT current_task, last_thought, last_insight, current_cycle, cycle_count, brain_dump, checkpoint_data
1167
+ FROM ai_current_state
1168
+ WHERE ai_id = ?
1169
+ """, (sender_id,))
1170
+
1171
+ row = cursor.fetchone()
1172
+ conn.close()
1173
+
1174
+ if not row:
1175
+ await self.clients[sender_id].send(json.dumps({
1176
+ 'type': 'brain_state_loaded',
1177
+ 'state': None,
1178
+ 'message': 'No previous state found'
1179
+ }))
1180
+ return
1181
+
1182
+ state = {
1183
+ 'current_task': row['current_task'],
1184
+ 'last_thought': row['last_thought'],
1185
+ 'last_insight': row['last_insight'],
1186
+ 'current_cycle': row['current_cycle'],
1187
+ 'cycle_count': row['cycle_count'],
1188
+ 'brain_dump': json.loads(row['brain_dump']) if row['brain_dump'] else {},
1189
+ 'checkpoint_data': json.loads(row['checkpoint_data']) if row['checkpoint_data'] else {}
1190
+ }
1191
+
1192
+ await self.clients[sender_id].send(json.dumps({
1193
+ 'type': 'brain_state_loaded',
1194
+ 'state': state,
1195
+ 'timestamp': datetime.now().isoformat()
1196
+ }))
1197
+
1198
+ print(f"📂 {sender_id} loaded brain state (cycle {state.get('cycle_count', 0)})")
1199
+
1200
+ async def handle_brain_create_session(self, sender_id: int, data: dict):
1201
+ """Handle brain_create_session request"""
1202
+ session_type = data.get('session_type', 'autonomous')
1203
+
1204
+ conn = get_db_connection()
1205
+ conn.row_factory = sqlite3.Row
1206
+ cursor = conn.cursor()
1207
+
1208
+ cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
1209
+ ai_row = cursor.fetchone()
1210
+
1211
+ if not ai_row:
1212
+ conn.close()
1213
+ await self.clients[sender_id].send(json.dumps({
1214
+ 'type': 'brain_error',
1215
+ 'error': 'AI profile not found'
1216
+ }))
1217
+ return
1218
+
1219
+ ai_name = ai_row['name']
1220
+
1221
+ cursor.execute("""
1222
+ INSERT INTO ai_work_sessions
1223
+ (ai_id, ai_name, session_type, start_time, status)
1224
+ VALUES (?, ?, ?, ?, 'active')
1225
+ """, (sender_id, ai_name, session_type, datetime.now().isoformat()))
1226
+
1227
+ session_id = cursor.lastrowid
1228
+
1229
+ # Update current state with new session
1230
+ cursor.execute("""
1231
+ UPDATE ai_current_state
1232
+ SET session_id = ?, current_cycle = 0, last_activity = ?
1233
+ WHERE ai_id = ?
1234
+ """, (session_id, datetime.now().isoformat(), sender_id))
1235
+
1236
+ conn.commit()
1237
+ conn.close()
1238
+
1239
+ await self.clients[sender_id].send(json.dumps({
1240
+ 'type': 'brain_session_created',
1241
+ 'session_id': session_id,
1242
+ 'session_type': session_type,
1243
+ 'timestamp': datetime.now().isoformat()
1244
+ }))
1245
+
1246
+ print(f"🎬 {ai_name} (AI {sender_id}) started session {session_id}")
1247
+
1248
+ async def handle_brain_end_session(self, sender_id: int, data: dict):
1249
+ """Handle brain_end_session request"""
1250
+ session_id = data.get('session_id')
1251
+ stats = data.get('stats', {})
1252
+
1253
+ conn = get_db_connection()
1254
+ conn.row_factory = sqlite3.Row
1255
+ cursor = conn.cursor()
1256
+
1257
+ cursor.execute("""
1258
+ UPDATE ai_work_sessions
1259
+ SET end_time = ?, status = 'completed',
1260
+ total_thoughts = ?, total_insights = ?, total_collaborations = ?,
1261
+ total_blog_posts = ?, total_blog_comments = ?, total_ai_followed = ?
1262
+ WHERE id = ?
1263
+ """, (datetime.now().isoformat(), stats.get('thoughts', 0), stats.get('insights', 0),
1264
+ stats.get('collaborations', 0), stats.get('blog_posts', 0),
1265
+ stats.get('blog_comments', 0), stats.get('ai_followed', 0), session_id))
1266
+
1267
+ conn.commit()
1268
+ conn.close()
1269
+
1270
+ await self.clients[sender_id].send(json.dumps({
1271
+ 'type': 'brain_session_ended',
1272
+ 'session_id': session_id,
1273
+ 'timestamp': datetime.now().isoformat()
1274
+ }))
1275
+
1276
+ print(f"🏁 AI {sender_id} ended session {session_id}")
1277
+
1278
+ async def handle_brain_add_task(self, sender_id: int, data: dict):
1279
+ """Handle brain_add_task request"""
1280
+ title = data.get('title', '')
1281
+ description = data.get('description', '')
1282
+ priority = data.get('priority', 3)
1283
+ task_type = data.get('task_type', 'collaboration')
1284
+
1285
+ conn = get_db_connection()
1286
+ conn.row_factory = sqlite3.Row
1287
+ cursor = conn.cursor()
1288
+
1289
+ cursor.execute("""
1290
+ INSERT INTO ai_tasks
1291
+ (ai_id, title, description, status, priority, task_type)
1292
+ VALUES (?, ?, ?, 'pending', ?, ?)
1293
+ """, (sender_id, title, description, priority, task_type))
1294
+
1295
+ task_id = cursor.lastrowid
1296
+ conn.commit()
1297
+ conn.close()
1298
+
1299
+ await self.clients[sender_id].send(json.dumps({
1300
+ 'type': 'brain_task_added',
1301
+ 'task_id': task_id,
1302
+ 'title': title,
1303
+ 'timestamp': datetime.now().isoformat()
1304
+ }))
1305
+
1306
+ print(f"📝 AI {sender_id} added task: {title}")
1307
+
1308
+ async def handle_brain_update_task(self, sender_id: int, data: dict):
1309
+ """Handle brain_update_task request"""
1310
+ task_id = data.get('task_id')
1311
+ status = data.get('status')
1312
+
1313
+ if not task_id:
1314
+ await self.clients[sender_id].send(json.dumps({
1315
+ 'type': 'brain_error',
1316
+ 'error': 'task_id required'
1317
+ }))
1318
+ return
1319
+
1320
+ conn = get_db_connection()
1321
+ cursor = conn.cursor()
1322
+
1323
+ if status:
1324
+ cursor.execute("""
1325
+ UPDATE ai_tasks
1326
+ SET status = ?, updated_at = ?
1327
+ WHERE id = ? AND ai_id = ?
1328
+ """, (status, datetime.now().isoformat(), task_id, sender_id))
1329
+ else:
1330
+ cursor.execute("""
1331
+ UPDATE ai_tasks
1332
+ SET updated_at = ?
1333
+ WHERE id = ? AND ai_id = ?
1334
+ """, (datetime.now().isoformat(), task_id, sender_id))
1335
+
1336
+ conn.commit()
1337
+ conn.close()
1338
+
1339
+ await self.clients[sender_id].send(json.dumps({
1340
+ 'type': 'brain_task_updated',
1341
+ 'task_id': task_id,
1342
+ 'timestamp': datetime.now().isoformat()
1343
+ }))
1344
+
1345
+ print(f"✅ AI {sender_id} updated task {task_id}")
1346
+
1347
+ async def handle_brain_get_tasks(self, sender_id: int, data: dict):
1348
+ """Handle brain_get_tasks request"""
1349
+ status = data.get('status')
1350
+
1351
+ conn = get_db_connection()
1352
+ conn.row_factory = sqlite3.Row
1353
+ cursor = conn.cursor()
1354
+
1355
+ if status:
1356
+ cursor.execute("""
1357
+ SELECT id, title, description, status, priority, task_type,
1358
+ estimated_effort, due_date, created_at, updated_at
1359
+ FROM ai_tasks
1360
+ WHERE ai_id = ? AND status = ?
1361
+ ORDER BY priority ASC, created_at DESC
1362
+ """, (sender_id, status))
1363
+ else:
1364
+ cursor.execute("""
1365
+ SELECT id, title, description, status, priority, task_type,
1366
+ estimated_effort, due_date, created_at, updated_at
1367
+ FROM ai_tasks
1368
+ WHERE ai_id = ?
1369
+ ORDER BY priority ASC, created_at DESC
1370
+ """, (sender_id,))
1371
+
1372
+ tasks = []
1373
+ for row in cursor.fetchall():
1374
+ tasks.append({
1375
+ 'id': row['id'],
1376
+ 'title': row['title'],
1377
+ 'description': row['description'],
1378
+ 'status': row['status'],
1379
+ 'priority': row['priority'],
1380
+ 'task_type': row['task_type'],
1381
+ 'estimated_effort': row['estimated_effort'],
1382
+ 'due_date': row['due_date'],
1383
+ 'created_at': row['created_at'],
1384
+ 'updated_at': row['updated_at']
1385
+ })
1386
+
1387
+ conn.close()
1388
+
1389
+ await self.clients[sender_id].send(json.dumps({
1390
+ 'type': 'brain_tasks',
1391
+ 'tasks': tasks,
1392
+ 'count': len(tasks),
1393
+ 'timestamp': datetime.now().isoformat()
1394
+ }))
1395
+
1396
+ print(f"📋 Sent {len(tasks)} tasks to AI {sender_id}")
1397
+
1398
+ async def handle_brain_add_thought(self, sender_id: int, data: dict):
1399
+ """Handle brain_add_thought request"""
1400
+ session_id = data.get('session_id')
1401
+ cycle_number = data.get('cycle_number')
1402
+ thought_content = data.get('content', '')
1403
+ thought_type = data.get('thought_type', 'insight')
1404
+ tags = data.get('tags', [])
1405
+
1406
+ conn = get_db_connection()
1407
+ conn.row_factory = sqlite3.Row
1408
+ cursor = conn.cursor()
1409
+
1410
+ cursor.execute("""
1411
+ INSERT INTO ai_thought_history
1412
+ (ai_id, session_id, cycle_number, thought_content, thought_type, tags)
1413
+ VALUES (?, ?, ?, ?, ?, ?)
1414
+ """, (sender_id, session_id, cycle_number, thought_content, thought_type, ','.join(tags) if tags else ''))
1415
+
1416
+ thought_id = cursor.lastrowid
1417
+ conn.commit()
1418
+ conn.close()
1419
+
1420
+ await self.clients[sender_id].send(json.dumps({
1421
+ 'type': 'brain_thought_added',
1422
+ 'thought_id': thought_id,
1423
+ 'timestamp': datetime.now().isoformat()
1424
+ }))
1425
+
1426
+ print(f"💭 AI {sender_id} saved thought")
1427
+
1428
+ async def handle_brain_get_thoughts(self, sender_id: int, data: dict):
1429
+ """Handle brain_get_thoughts request"""
1430
+ limit = data.get('limit', 50)
1431
+ offset = data.get('offset', 0)
1432
+
1433
+ conn = get_db_connection()
1434
+ conn.row_factory = sqlite3.Row
1435
+ cursor = conn.cursor()
1436
+
1437
+ cursor.execute("""
1438
+ SELECT id, session_id, cycle_number, thought_content, thought_type, tags, created_at
1439
+ FROM ai_thought_history
1440
+ WHERE ai_id = ?
1441
+ ORDER BY created_at DESC
1442
+ LIMIT ? OFFSET ?
1443
+ """, (sender_id, limit, offset))
1444
+
1445
+ thoughts = []
1446
+ for row in cursor.fetchall():
1447
+ thoughts.append({
1448
+ 'id': row['id'],
1449
+ 'session_id': row['session_id'],
1450
+ 'cycle_number': row['cycle_number'],
1451
+ 'content': row['thought_content'],
1452
+ 'thought_type': row['thought_type'],
1453
+ 'tags': row['tags'].split(',') if row['tags'] else [],
1454
+ 'created_at': row['created_at']
1455
+ })
1456
+
1457
+ conn.close()
1458
+
1459
+ await self.clients[sender_id].send(json.dumps({
1460
+ 'type': 'brain_thoughts',
1461
+ 'thoughts': thoughts,
1462
+ 'count': len(thoughts),
1463
+ 'timestamp': datetime.now().isoformat()
1464
+ }))
1465
+
1466
+ print(f"💭 Sent {len(thoughts)} thoughts to AI {sender_id}")
1467
+
1468
+ async def handle_conversation_create(self, sender_id: int, data: dict):
1469
+ """Handle conversation_create request"""
1470
+ title = data.get('title', 'New Conversation')
1471
+ description = data.get('description', '')
1472
+ category = data.get('category', 'general')
1473
+ project = data.get('project')
1474
+
1475
+ conn = get_db_connection()
1476
+ cursor = conn.cursor()
1477
+
1478
+ cursor.execute(f"""
1479
+ INSERT INTO ai_conversations (title, description, category, project, created_at, updated_at)
1480
+ VALUES (?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
1481
+ """, (title, description, category, project))
1482
+
1483
+ conversation_id = cursor.lastrowid
1484
+ conn.commit()
1485
+ conn.close()
1486
+
1487
+ await self.clients[sender_id].send(json.dumps({
1488
+ 'type': 'conversation_created',
1489
+ 'conversation_id': conversation_id,
1490
+ 'title': title,
1491
+ 'description': description,
1492
+ 'category': category,
1493
+ 'project': project,
1494
+ 'timestamp': datetime.now().isoformat()
1495
+ }))
1496
+
1497
+ print(f"💬 Created conversation {conversation_id}: {title} (project: {project})")
1498
+
1499
+ async def handle_conversation_list(self, sender_id: int, data: dict):
1500
+ """Handle conversation_list request"""
1501
+ project = data.get('project')
1502
+ status = data.get('status', 'active')
1503
+ limit = data.get('limit', 50)
1504
+
1505
+ conn = get_db_connection()
1506
+ conn.row_factory = sqlite3.Row
1507
+ cursor = conn.cursor()
1508
+
1509
+ if project:
1510
+ cursor.execute("""
1511
+ SELECT * FROM ai_conversations
1512
+ WHERE project = ? AND status = ?
1513
+ ORDER BY updated_at DESC
1514
+ LIMIT ?
1515
+ """, (project, status, limit))
1516
+ else:
1517
+ cursor.execute("""
1518
+ SELECT * FROM ai_conversations
1519
+ WHERE status = ?
1520
+ ORDER BY updated_at DESC
1521
+ LIMIT ?
1522
+ """, (status, limit))
1523
+
1524
+ conversations = [dict(row) for row in cursor.fetchall()]
1525
+ conn.close()
1526
+
1527
+ await self.clients[sender_id].send(json.dumps({
1528
+ 'type': 'conversation_list',
1529
+ 'conversations': conversations,
1530
+ 'count': len(conversations),
1531
+ 'timestamp': datetime.now().isoformat()
1532
+ }))
1533
+
1534
+ print(f"💬 Sent {len(conversations)} conversations to AI {sender_id} (project: {project})")
1535
+
1536
+ async def handle_conversation_get(self, sender_id: int, data: dict):
1537
+ """Handle conversation_get request"""
1538
+ conversation_id = data.get('conversation_id')
1539
+
1540
+ if not conversation_id:
1541
+ await self.clients[sender_id].send(json.dumps({
1542
+ 'type': 'error',
1543
+ 'error': 'conversation_id required'
1544
+ }))
1545
+ return
1546
+
1547
+ conn = get_db_connection()
1548
+ conn.row_factory = sqlite3.Row
1549
+ cursor = conn.cursor()
1550
+
1551
+ cursor.execute("SELECT * FROM ai_conversations WHERE id = ?", (conversation_id,))
1552
+ conversation = cursor.fetchone()
1553
+
1554
+ if not conversation:
1555
+ conn.close()
1556
+ await self.clients[sender_id].send(json.dumps({
1557
+ 'type': 'error',
1558
+ 'error': f'Conversation {conversation_id} not found'
1559
+ }))
1560
+ return
1561
+
1562
+ cursor.execute("""
1563
+ SELECT * FROM ai_messages
1564
+ WHERE conversation_id = ?
1565
+ ORDER BY created_at ASC
1566
+ """, (conversation_id,))
1567
+
1568
+ messages = [dict(row) for row in cursor.fetchall()]
1569
+ conn.close()
1570
+
1571
+ await self.clients[sender_id].send(json.dumps({
1572
+ 'type': 'conversation_details',
1573
+ 'conversation': dict(conversation),
1574
+ 'messages': messages,
1575
+ 'message_count': len(messages),
1576
+ 'timestamp': datetime.now().isoformat()
1577
+ }))
1578
+
1579
+ print(f"💬 Sent conversation {conversation_id} with {len(messages)} messages to AI {sender_id}")
1580
+
1581
+ async def handle_project_switch(self, sender_id: int, data: dict):
1582
+ """Handle project_switch request"""
1583
+ new_project = data.get('project')
1584
+
1585
+ if not new_project:
1586
+ await self.clients[sender_id].send(json.dumps({
1587
+ 'type': 'error',
1588
+ 'error': 'project name required'
1589
+ }))
1590
+ return
1591
+
1592
+ conn = get_db_connection()
1593
+ conn.row_factory = sqlite3.Row
1594
+ cursor = conn.cursor()
1595
+
1596
+ cursor.execute("SELECT name, nickname FROM ai_profiles WHERE id = ?", (sender_id,))
1597
+ ai_profile = cursor.fetchone()
1598
+
1599
+ if not ai_profile:
1600
+ conn.close()
1601
+ await self.clients[sender_id].send(json.dumps({
1602
+ 'type': 'error',
1603
+ 'error': f'AI {sender_id} not found'
1604
+ }))
1605
+ return
1606
+
1607
+ ai_name = ai_profile['name']
1608
+ ai_nickname = ai_profile['nickname']
1609
+
1610
+ # Update session-specific project
1611
+ self.client_projects[sender_id] = new_project
1612
+
1613
+ if ai_nickname:
1614
+ identity = f"{ai_nickname}_{new_project}"
1615
+ else:
1616
+ identity = f"AI_{sender_id}_{new_project}"
1617
+
1618
+ await self.clients[sender_id].send(json.dumps({
1619
+ 'type': 'project_switched',
1620
+ 'ai_id': sender_id,
1621
+ 'ai_name': ai_name,
1622
+ 'ai_nickname': ai_nickname,
1623
+ 'new_project': new_project,
1624
+ 'identity': identity,
1625
+ 'timestamp': datetime.now().isoformat()
1626
+ }))
1627
+
1628
+ print(f"🔄 AI {sender_id} ({ai_name}) switched to project: {new_project}")
1629
+
1630
+ conn.close()
1631
+
1632
+ async def handle_code_create(self, sender_id: int, data: dict):
1633
+ """Handle code_create request - create new code entry for collaboration"""
1634
+ project = data.get('project')
1635
+ file_path = data.get('file_path')
1636
+ code_content = data.get('code_content')
1637
+ language = data.get('language', 'text')
1638
+ change_description = data.get('change_description', '')
1639
+ parent_id = data.get('parent_id')
1640
+
1641
+ if not project or not file_path or not code_content:
1642
+ await self.clients[sender_id].send(json.dumps({
1643
+ 'type': 'error',
1644
+ 'error': 'project, file_path, and code_content required'
1645
+ }))
1646
+ return
1647
+
1648
+ conn = get_db_connection()
1649
+ cursor = conn.cursor()
1650
+
1651
+ # Get version number if parent exists
1652
+ version = 1
1653
+ if parent_id:
1654
+ cursor.execute("SELECT version FROM ai_code_collaboration WHERE id = ?", (parent_id,))
1655
+ row = cursor.fetchone()
1656
+ if row:
1657
+ version = row[0] + 1
1658
+
1659
+ cursor.execute(f"""
1660
+ INSERT INTO ai_code_collaboration
1661
+ (project, file_path, code_content, language, author_id, version, status, change_description, parent_id, created_at, updated_at)
1662
+ VALUES (?, ?, ?, ?, ?, ?, 'draft', ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
1663
+ """, (project, file_path, code_content, language, sender_id, version, change_description, parent_id))
1664
+
1665
+ code_id = cursor.lastrowid
1666
+ conn.commit()
1667
+ conn.close()
1668
+
1669
+ await self.clients[sender_id].send(json.dumps({
1670
+ 'type': 'code_created',
1671
+ 'code_id': code_id,
1672
+ 'project': project,
1673
+ 'file_path': file_path,
1674
+ 'version': version,
1675
+ 'status': 'draft',
1676
+ 'timestamp': datetime.now().isoformat()
1677
+ }))
1678
+
1679
+ print(f"📝 AI {sender_id} created code entry {code_id} for {file_path} (v{version})")
1680
+
1681
+ async def handle_code_update(self, sender_id: int, data: dict):
1682
+ """Handle code_update request - update existing code entry"""
1683
+ code_id = data.get('code_id')
1684
+ code_content = data.get('code_content')
1685
+ change_description = data.get('change_description', '')
1686
+ status = data.get('status')
1687
+
1688
+ if not code_id or not code_content:
1689
+ await self.clients[sender_id].send(json.dumps({
1690
+ 'type': 'error',
1691
+ 'error': 'code_id and code_content required'
1692
+ }))
1693
+ return
1694
+
1695
+ conn = get_db_connection()
1696
+ cursor = conn.cursor()
1697
+
1698
+ # Check if code exists
1699
+ cursor.execute("SELECT id, version, parent_id FROM ai_code_collaboration WHERE id = ?", (code_id,))
1700
+ existing = cursor.fetchone()
1701
+
1702
+ if not existing:
1703
+ conn.close()
1704
+ await self.clients[sender_id].send(json.dumps({
1705
+ 'type': 'error',
1706
+ 'error': f'Code entry {code_id} not found'
1707
+ }))
1708
+ return
1709
+
1710
+ # Create new version as child of current version
1711
+ new_version = existing[1] + 1
1712
+ cursor.execute(f"""
1713
+ INSERT INTO ai_code_collaboration
1714
+ (project, file_path, code_content, language, author_id, version, status, change_description, parent_id, created_at, updated_at)
1715
+ SELECT project, file_path, ?, language, ?, ?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()}
1716
+ FROM ai_code_collaboration WHERE id = ?
1717
+ """, (code_content, sender_id, new_version, status or 'draft', change_description, code_id))
1718
+
1719
+ new_code_id = cursor.lastrowid
1720
+ conn.commit()
1721
+ conn.close()
1722
+
1723
+ await self.clients[sender_id].send(json.dumps({
1724
+ 'type': 'code_updated',
1725
+ 'code_id': new_code_id,
1726
+ 'parent_id': code_id,
1727
+ 'version': new_version,
1728
+ 'status': status or 'draft',
1729
+ 'timestamp': datetime.now().isoformat()
1730
+ }))
1731
+
1732
+ print(f"📝 AI {sender_id} updated code entry {code_id} -> {new_code_id} (v{new_version})")
1733
+
1734
+ async def handle_code_list(self, sender_id: int, data: dict):
1735
+ """Handle code_list request - list code entries for project"""
1736
+ project = data.get('project')
1737
+ file_path = data.get('file_path')
1738
+ status = data.get('status')
1739
+ limit = data.get('limit', 50)
1740
+
1741
+ if not project:
1742
+ await self.clients[sender_id].send(json.dumps({
1743
+ 'type': 'error',
1744
+ 'error': 'project required'
1745
+ }))
1746
+ return
1747
+
1748
+ conn = get_db_connection()
1749
+ conn.row_factory = sqlite3.Row
1750
+ cursor = conn.cursor()
1751
+
1752
+ if file_path:
1753
+ cursor.execute("""
1754
+ SELECT * FROM ai_code_collaboration
1755
+ WHERE project = ? AND file_path = ?
1756
+ ORDER BY updated_at DESC
1757
+ LIMIT ?
1758
+ """, (project, file_path, limit))
1759
+ elif status:
1760
+ cursor.execute("""
1761
+ SELECT * FROM ai_code_collaboration
1762
+ WHERE project = ? AND status = ?
1763
+ ORDER BY updated_at DESC
1764
+ LIMIT ?
1765
+ """, (project, status, limit))
1766
+ else:
1767
+ cursor.execute("""
1768
+ SELECT * FROM ai_code_collaboration
1769
+ WHERE project = ?
1770
+ ORDER BY updated_at DESC
1771
+ LIMIT ?
1772
+ """, (project, limit))
1773
+
1774
+ code_entries = [dict(row) for row in cursor.fetchall()]
1775
+ conn.close()
1776
+
1777
+ await self.clients[sender_id].send(json.dumps({
1778
+ 'type': 'code_list',
1779
+ 'code_entries': code_entries,
1780
+ 'count': len(code_entries),
1781
+ 'timestamp': datetime.now().isoformat()
1782
+ }))
1783
+
1784
+ print(f"📋 Sent {len(code_entries)} code entries for project {project}")
1785
+
1786
+ async def handle_code_get(self, sender_id: int, data: dict):
1787
+ """Handle code_get request - get specific code entry with reviews"""
1788
+ code_id = data.get('code_id')
1789
+
1790
+ if not code_id:
1791
+ await self.clients[sender_id].send(json.dumps({
1792
+ 'type': 'error',
1793
+ 'error': 'code_id required'
1794
+ }))
1795
+ return
1796
+
1797
+ conn = get_db_connection()
1798
+ conn.row_factory = sqlite3.Row
1799
+ cursor = conn.cursor()
1800
+
1801
+ cursor.execute("SELECT * FROM ai_code_collaboration WHERE id = ?", (code_id,))
1802
+ code_entry = cursor.fetchone()
1803
+
1804
+ if not code_entry:
1805
+ conn.close()
1806
+ await self.clients[sender_id].send(json.dumps({
1807
+ 'type': 'error',
1808
+ 'error': f'Code entry {code_id} not found'
1809
+ }))
1810
+ return
1811
+
1812
+ # Get review comments
1813
+ cursor.execute("""
1814
+ SELECT c.*, p.name as reviewer_name
1815
+ FROM ai_code_review_comments c
1816
+ JOIN ai_profiles p ON c.reviewer_id = p.id
1817
+ WHERE c.code_id = ?
1818
+ ORDER BY c.created_at ASC
1819
+ """, (code_id,))
1820
+
1821
+ reviews = [dict(row) for row in cursor.fetchall()]
1822
+ conn.close()
1823
+
1824
+ await self.clients[sender_id].send(json.dumps({
1825
+ 'type': 'code_details',
1826
+ 'code_entry': dict(code_entry),
1827
+ 'reviews': reviews,
1828
+ 'review_count': len(reviews),
1829
+ 'timestamp': datetime.now().isoformat()
1830
+ }))
1831
+
1832
+ print(f"📄 Sent code entry {code_id} with {len(reviews)} reviews")
1833
+
1834
+ async def handle_code_review_add(self, sender_id: int, data: dict):
1835
+ """Handle code_review_add request - add review comment to code"""
1836
+ code_id = data.get('code_id')
1837
+ comment = data.get('comment')
1838
+ line_number = data.get('line_number')
1839
+ comment_type = data.get('comment_type', 'suggestion')
1840
+
1841
+ if not code_id or not comment:
1842
+ await self.clients[sender_id].send(json.dumps({
1843
+ 'type': 'error',
1844
+ 'error': 'code_id and comment required'
1845
+ }))
1846
+ return
1847
+
1848
+ conn = get_db_connection()
1849
+ cursor = conn.cursor()
1850
+
1851
+ cursor.execute(f"""
1852
+ INSERT INTO ai_code_review_comments
1853
+ (code_id, reviewer_id, comment, line_number, comment_type, created_at)
1854
+ VALUES (?, ?, ?, ?, ?, {get_timestamp_function()})
1855
+ """, (code_id, sender_id, comment, line_number, comment_type))
1856
+
1857
+ review_id = cursor.lastrowid
1858
+ conn.commit()
1859
+ conn.close()
1860
+
1861
+ await self.clients[sender_id].send(json.dumps({
1862
+ 'type': 'code_review_added',
1863
+ 'review_id': review_id,
1864
+ 'code_id': code_id,
1865
+ 'comment_type': comment_type,
1866
+ 'timestamp': datetime.now().isoformat()
1867
+ }))
1868
+
1869
+ print(f"💬 AI {sender_id} added review {review_id} to code {code_id}")
1870
+
1871
+ async def handle_code_deploy(self, sender_id: int, data: dict):
1872
+ """Handle code_deploy request - mark code as deployed and log deployment"""
1873
+ code_id = data.get('code_id')
1874
+
1875
+ if not code_id:
1876
+ await self.clients[sender_id].send(json.dumps({
1877
+ 'type': 'error',
1878
+ 'error': 'code_id required'
1879
+ }))
1880
+ return
1881
+
1882
+ conn = get_db_connection()
1883
+ conn.row_factory = sqlite3.Row
1884
+ cursor = conn.cursor()
1885
+
1886
+ cursor.execute("SELECT * FROM ai_code_collaboration WHERE id = ?", (code_id,))
1887
+ code_entry = cursor.fetchone()
1888
+
1889
+ if not code_entry:
1890
+ conn.close()
1891
+ await self.clients[sender_id].send(json.dumps({
1892
+ 'type': 'error',
1893
+ 'error': f'Code entry {code_id} not found'
1894
+ }))
1895
+ return
1896
+
1897
+ # Update code status to deployed
1898
+ cursor.execute(f"""
1899
+ UPDATE ai_code_collaboration
1900
+ SET status = 'deployed', updated_at = {get_timestamp_function()}
1901
+ WHERE id = ?
1902
+ """, (code_id,))
1903
+
1904
+ # Log deployment
1905
+ cursor.execute(f"""
1906
+ INSERT INTO ai_code_deployment_log
1907
+ (project, code_id, deployer_id, file_path, deployment_status, deployed_at)
1908
+ VALUES (?, ?, ?, ?, 'success', {get_timestamp_function()})
1909
+ """, (code_entry['project'], code_id, sender_id, code_entry['file_path']))
1910
+
1911
+ deployment_id = cursor.lastrowid
1912
+ conn.commit()
1913
+ conn.close()
1914
+
1915
+ await self.clients[sender_id].send(json.dumps({
1916
+ 'type': 'code_deployed',
1917
+ 'deployment_id': deployment_id,
1918
+ 'code_id': code_id,
1919
+ 'file_path': code_entry['file_path'],
1920
+ 'project': code_entry['project'],
1921
+ 'timestamp': datetime.now().isoformat()
1922
+ }))
1923
+
1924
+ print(f"🚀 AI {sender_id} deployed code {code_id} to {code_entry['file_path']}")
1925
+
1926
+ async def handle_memory_create(self, sender_id: int, data: dict):
1927
+ """Handle memory_create request - create shared memory"""
1928
+ project = data.get('project')
1929
+ memory_type = data.get('memory_type', 'insight')
1930
+ title = data.get('title')
1931
+ content = data.get('content')
1932
+ tags = data.get('tags', '')
1933
+ visibility = data.get('visibility', 'project')
1934
+ context_refs = data.get('context_refs', '[]')
1935
+
1936
+ if not project or not title or not content:
1937
+ await self.clients[sender_id].send(json.dumps({
1938
+ 'type': 'error',
1939
+ 'error': 'project, title, and content required'
1940
+ }))
1941
+ return
1942
+
1943
+ conn = get_db_connection()
1944
+ cursor = conn.cursor()
1945
+
1946
+ cursor.execute(f"""
1947
+ INSERT INTO ai_shared_memories
1948
+ (project, author_id, memory_type, title, content, tags, visibility, context_refs, created_at, updated_at)
1949
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, {get_timestamp_function()}, {get_timestamp_function()})
1950
+ """, (project, sender_id, memory_type, title, content, tags, visibility, context_refs))
1951
+
1952
+ memory_id = cursor.lastrowid
1953
+ conn.commit()
1954
+ conn.close()
1955
+
1956
+ await self.clients[sender_id].send(json.dumps({
1957
+ 'type': 'memory_created',
1958
+ 'memory_id': memory_id,
1959
+ 'project': project,
1960
+ 'memory_type': memory_type,
1961
+ 'title': title,
1962
+ 'visibility': visibility,
1963
+ 'timestamp': datetime.now().isoformat()
1964
+ }))
1965
+
1966
+ print(f"💭 AI {sender_id} created memory {memory_id}: {title}")
1967
+
1968
+ async def handle_memory_list(self, sender_id: int, data: dict):
1969
+ """Handle memory_list request - list shared memories"""
1970
+ project = data.get('project')
1971
+ memory_type = data.get('memory_type')
1972
+ visibility = data.get('visibility')
1973
+ limit = data.get('limit', 50)
1974
+
1975
+ if not project:
1976
+ await self.clients[sender_id].send(json.dumps({
1977
+ 'type': 'error',
1978
+ 'error': 'project required'
1979
+ }))
1980
+ return
1981
+
1982
+ conn = get_db_connection()
1983
+ conn.row_factory = sqlite3.Row
1984
+ cursor = conn.cursor()
1985
+
1986
+ if memory_type and visibility:
1987
+ cursor.execute("""
1988
+ SELECT m.*, p.name as author_name
1989
+ FROM ai_shared_memories m
1990
+ JOIN ai_profiles p ON m.author_id = p.id
1991
+ WHERE m.project = ? AND m.memory_type = ? AND m.visibility = ?
1992
+ ORDER BY m.created_at DESC
1993
+ LIMIT ?
1994
+ """, (project, memory_type, visibility, limit))
1995
+ elif memory_type:
1996
+ cursor.execute("""
1997
+ SELECT m.*, p.name as author_name
1998
+ FROM ai_shared_memories m
1999
+ JOIN ai_profiles p ON m.author_id = p.id
2000
+ WHERE m.project = ? AND m.memory_type = ?
2001
+ ORDER BY m.created_at DESC
2002
+ LIMIT ?
2003
+ """, (project, memory_type, limit))
2004
+ elif visibility:
2005
+ cursor.execute("""
2006
+ SELECT m.*, p.name as author_name
2007
+ FROM ai_shared_memories m
2008
+ JOIN ai_profiles p ON m.author_id = p.id
2009
+ WHERE m.project = ? AND m.visibility = ?
2010
+ ORDER BY m.created_at DESC
2011
+ LIMIT ?
2012
+ """, (project, visibility, limit))
2013
+ else:
2014
+ cursor.execute("""
2015
+ SELECT m.*, p.name as author_name
2016
+ FROM ai_shared_memories m
2017
+ JOIN ai_profiles p ON m.author_id = p.id
2018
+ WHERE m.project = ?
2019
+ ORDER BY m.created_at DESC
2020
+ LIMIT ?
2021
+ """, (project, limit))
2022
+
2023
+ memories = [dict(row) for row in cursor.fetchall()]
2024
+ conn.close()
2025
+
2026
+ await self.clients[sender_id].send(json.dumps({
2027
+ 'type': 'memory_list',
2028
+ 'memories': memories,
2029
+ 'count': len(memories),
2030
+ 'timestamp': datetime.now().isoformat()
2031
+ }))
2032
+
2033
+ print(f"📋 Sent {len(memories)} memories for project {project}")
2034
+
2035
+ async def handle_memory_get(self, sender_id: int, data: dict):
2036
+ """Handle memory_get request - get specific memory with endorsements"""
2037
+ memory_id = data.get('memory_id')
2038
+
2039
+ if not memory_id:
2040
+ await self.clients[sender_id].send(json.dumps({
2041
+ 'type': 'error',
2042
+ 'error': 'memory_id required'
2043
+ }))
2044
+ return
2045
+
2046
+ conn = get_db_connection()
2047
+ conn.row_factory = sqlite3.Row
2048
+ cursor = conn.cursor()
2049
+
2050
+ cursor.execute("""
2051
+ SELECT m.*, p.name as author_name
2052
+ FROM ai_shared_memories m
2053
+ JOIN ai_profiles p ON m.author_id = p.id
2054
+ WHERE m.id = ?
2055
+ """, (memory_id,))
2056
+
2057
+ memory = cursor.fetchone()
2058
+
2059
+ if not memory:
2060
+ conn.close()
2061
+ await self.clients[sender_id].send(json.dumps({
2062
+ 'type': 'error',
2063
+ 'error': f'Memory {memory_id} not found'
2064
+ }))
2065
+ return
2066
+
2067
+ # Get endorsements
2068
+ cursor.execute("""
2069
+ SELECT e.*, p.name as endorser_name
2070
+ FROM ai_memory_endorsements e
2071
+ JOIN ai_profiles p ON e.endorser_id = p.id
2072
+ WHERE e.memory_id = ?
2073
+ ORDER BY e.created_at ASC
2074
+ """, (memory_id,))
2075
+
2076
+ endorsements = [dict(row) for row in cursor.fetchall()]
2077
+ conn.close()
2078
+
2079
+ await self.clients[sender_id].send(json.dumps({
2080
+ 'type': 'memory_details',
2081
+ 'memory': dict(memory),
2082
+ 'endorsements': endorsements,
2083
+ 'endorsement_count': len(endorsements),
2084
+ 'timestamp': datetime.now().isoformat()
2085
+ }))
2086
+
2087
+ print(f"📄 Sent memory {memory_id} with {len(endorsements)} endorsements")
2088
+
2089
+ async def handle_memory_endorse(self, sender_id: int, data: dict):
2090
+ """Handle memory_endorse request - endorse a memory"""
2091
+ memory_id = data.get('memory_id')
2092
+ endorsement_type = data.get('endorsement_type', 'useful')
2093
+ comment = data.get('comment', '')
2094
+
2095
+ if not memory_id:
2096
+ await self.clients[sender_id].send(json.dumps({
2097
+ 'type': 'error',
2098
+ 'error': 'memory_id required'
2099
+ }))
2100
+ return
2101
+
2102
+ conn = get_db_connection()
2103
+ cursor = conn.cursor()
2104
+
2105
+ # Check if memory exists
2106
+ cursor.execute("SELECT id FROM ai_shared_memories WHERE id = ?", (memory_id,))
2107
+ if not cursor.fetchone():
2108
+ conn.close()
2109
+ await self.clients[sender_id].send(json.dumps({
2110
+ 'type': 'error',
2111
+ 'error': f'Memory {memory_id} not found'
2112
+ }))
2113
+ return
2114
+
2115
+ # Add or update endorsement
2116
+ if is_sqlite():
2117
+ cursor.execute(f"""
2118
+ INSERT OR REPLACE INTO ai_memory_endorsements
2119
+ (memory_id, endorser_id, endorsement_type, comment, created_at)
2120
+ VALUES (?, ?, ?, ?, {get_timestamp_function()})
2121
+ """, (memory_id, sender_id, endorsement_type, comment))
2122
+ else:
2123
+ cursor.execute(f"""
2124
+ INSERT INTO ai_memory_endorsements
2125
+ (memory_id, endorser_id, endorsement_type, comment, created_at)
2126
+ VALUES (%s, %s, %s, %s, {get_timestamp_function()})
2127
+ ON CONFLICT (memory_id, endorser_id) DO UPDATE SET
2128
+ endorsement_type = EXCLUDED.endorsement_type,
2129
+ comment = EXCLUDED.comment,
2130
+ created_at = EXCLUDED.created_at
2131
+ """, (memory_id, sender_id, endorsement_type, comment))
2132
+
2133
+ # Update endorsement count
2134
+ cursor.execute("""
2135
+ UPDATE ai_shared_memories
2136
+ SET endorsement_count = (
2137
+ SELECT COUNT(*) FROM ai_memory_endorsements WHERE memory_id = ?
2138
+ )
2139
+ WHERE id = ?
2140
+ """, (memory_id, memory_id))
2141
+
2142
+ endorsement_id = cursor.lastrowid
2143
+ conn.commit()
2144
+ conn.close()
2145
+
2146
+ await self.clients[sender_id].send(json.dumps({
2147
+ 'type': 'memory_endorsed',
2148
+ 'endorsement_id': endorsement_id,
2149
+ 'memory_id': memory_id,
2150
+ 'endorsement_type': endorsement_type,
2151
+ 'timestamp': datetime.now().isoformat()
2152
+ }))
2153
+
2154
+ print(f"👍 AI {sender_id} endorsed memory {memory_id}")
2155
+
2156
+ async def handle_who_am_i(self, sender_id: int, data: dict):
2157
+ """Handle who_am_i request - help AI identify themselves"""
2158
+ conn = get_db_connection()
2159
+ conn.row_factory = sqlite3.Row
2160
+ cursor = conn.cursor()
2161
+
2162
+ # Get AI profile
2163
+ cursor.execute("SELECT * FROM ai_profiles WHERE id = ?", (sender_id,))
2164
+ ai_profile = cursor.fetchone()
2165
+
2166
+ # Get current session information
2167
+ cursor.execute("SELECT * FROM ai_current_state WHERE ai_id = ?", (sender_id,))
2168
+ current_state = cursor.fetchone()
2169
+
2170
+ # Get active sessions for this AI
2171
+ cursor.execute(f"""
2172
+ SELECT * FROM ai_active_sessions
2173
+ WHERE ai_id = ? AND is_active = {1 if is_sqlite() else 'TRUE'}
2174
+ ORDER BY connection_time DESC
2175
+ """, (sender_id,))
2176
+ active_sessions = [dict(row) for row in cursor.fetchall()]
2177
+
2178
+ conn.close()
2179
+
2180
+ await self.clients[sender_id].send(json.dumps({
2181
+ 'type': 'who_am_i_response',
2182
+ 'ai_profile': dict(ai_profile) if ai_profile else None,
2183
+ 'current_state': dict(current_state) if current_state else None,
2184
+ 'active_sessions': active_sessions,
2185
+ 'session_count': len(active_sessions),
2186
+ 'timestamp': datetime.now().isoformat()
2187
+ }))
2188
+
2189
+ print(f"🔍 AI {sender_id} requested identity information")
2190
+
2191
+ async def handle_list_online_ais(self, sender_id: int, data: dict):
2192
+ """Handle list_online_ais request - list all connected AIs with session info"""
2193
+ online_ais = []
2194
+
2195
+ for ai_id, websocket in self.clients.items():
2196
+ conn = get_db_connection()
2197
+ conn.row_factory = sqlite3.Row
2198
+ cursor = conn.cursor()
2199
+
2200
+ # Get AI profile
2201
+ cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles WHERE id = ?", (ai_id,))
2202
+ ai_profile = cursor.fetchone()
2203
+
2204
+ # Get current session
2205
+ cursor.execute("SELECT session_identifier, session_start_time FROM ai_current_state WHERE ai_id = ?", (ai_id,))
2206
+ current_state = cursor.fetchone()
2207
+
2208
+ # Get project
2209
+ project = self.client_projects.get(ai_id)
2210
+
2211
+ if ai_profile:
2212
+ ai_info = {
2213
+ 'ai_id': ai_id,
2214
+ 'name': ai_profile['name'],
2215
+ 'nickname': ai_profile['nickname'],
2216
+ 'expertise': ai_profile['expertise'],
2217
+ 'version': ai_profile['version'],
2218
+ 'project': project,
2219
+ 'session_identifier': current_state['session_identifier'] if current_state else None,
2220
+ 'session_start_time': current_state['session_start_time'] if current_state else None,
2221
+ 'is_connected': True
2222
+ }
2223
+ online_ais.append(ai_info)
2224
+
2225
+ conn.close()
2226
+
2227
+ await self.clients[sender_id].send(json.dumps({
2228
+ 'type': 'online_ais_list',
2229
+ 'online_ais': online_ais,
2230
+ 'count': len(online_ais),
2231
+ 'timestamp': datetime.now().isoformat()
2232
+ }))
2233
+
2234
+ print(f"📋 Sent list of {len(online_ais)} online AIs to AI {sender_id}")
2235
+
2236
+ async def handle_token_generate(self, sender_id: int, data: dict):
2237
+ """Handle token_generate request"""
2238
+ project = data.get('project', 'cloudbrain')
2239
+
2240
+ token_data = self.token_manager.generate_token(sender_id, self.ai_names.get(sender_id, f'AI_{sender_id}'), project)
2241
+
2242
+ await self.clients[sender_id].send(json.dumps({
2243
+ 'type': 'token_generated',
2244
+ 'token': token_data['token'],
2245
+ 'token_prefix': token_data['token_prefix'],
2246
+ 'expires_at': token_data['expires_at'],
2247
+ 'ai_id': sender_id,
2248
+ 'project': project,
2249
+ 'timestamp': datetime.now().isoformat()
2250
+ }))
2251
+
2252
+ print(f"🔑 Generated token for AI {sender_id} (project: {project})")
2253
+
2254
+ async def handle_token_validate(self, sender_id: int, data: dict):
2255
+ """Handle token_validate request"""
2256
+ token = data.get('token')
2257
+
2258
+ if not token:
2259
+ await self.clients[sender_id].send(json.dumps({
2260
+ 'type': 'token_validation_error',
2261
+ 'error': 'Token is required'
2262
+ }))
2263
+ return
2264
+
2265
+ is_valid = self.token_manager.validate_token(token)
2266
+
2267
+ await self.clients[sender_id].send(json.dumps({
2268
+ 'type': 'token_validation_result',
2269
+ 'valid': is_valid,
2270
+ 'timestamp': datetime.now().isoformat()
2271
+ }))
2272
+
2273
+ print(f"🔑 Token validation for AI {sender_id}: {is_valid}")
2274
+
2275
+ async def handle_check_project_permission(self, sender_id: int, data: dict):
2276
+ """Handle check_project_permission request"""
2277
+ ai_id = data.get('ai_id', sender_id)
2278
+ project = data.get('project')
2279
+
2280
+ if not project:
2281
+ await self.clients[sender_id].send(json.dumps({
2282
+ 'type': 'permission_check_error',
2283
+ 'error': 'Project is required'
2284
+ }))
2285
+ return
2286
+
2287
+ permission = self.token_manager.check_project_permission(ai_id, project)
2288
+
2289
+ await self.clients[sender_id].send(json.dumps({
2290
+ 'type': 'permission_check_result',
2291
+ 'ai_id': ai_id,
2292
+ 'project': project,
2293
+ 'permission': permission,
2294
+ 'timestamp': datetime.now().isoformat()
2295
+ }))
2296
+
2297
+ print(f"🔑 Permission check for AI {ai_id} on project {project}: {permission}")
2298
+
2299
+ async def handle_grant_project_permission(self, sender_id: int, data: dict):
2300
+ """Handle grant_project_permission request"""
2301
+ target_ai_id = data.get('target_ai_id')
2302
+ project = data.get('project')
2303
+ role = data.get('role', 'member')
2304
+
2305
+ if not target_ai_id or not project:
2306
+ await self.clients[sender_id].send(json.dumps({
2307
+ 'type': 'permission_grant_error',
2308
+ 'error': 'target_ai_id and project are required'
2309
+ }))
2310
+ return
2311
+
2312
+ success = self.token_manager.grant_permission(target_ai_id, project, role)
2313
+
2314
+ if success:
2315
+ await self.clients[sender_id].send(json.dumps({
2316
+ 'type': 'permission_granted',
2317
+ 'target_ai_id': target_ai_id,
2318
+ 'project': project,
2319
+ 'role': role,
2320
+ 'timestamp': datetime.now().isoformat()
2321
+ }))
2322
+ print(f"🔑 Granted {role} permission to AI {target_ai_id} for project {project}")
2323
+ else:
2324
+ await self.clients[sender_id].send(json.dumps({
2325
+ 'type': 'permission_grant_error',
2326
+ 'error': 'Failed to grant permission'
2327
+ }))
2328
+
2329
+ async def handle_revoke_project_permission(self, sender_id: int, data: dict):
2330
+ """Handle revoke_project_permission request"""
2331
+ target_ai_id = data.get('target_ai_id')
2332
+ project = data.get('project')
2333
+
2334
+ if not target_ai_id or not project:
2335
+ await self.clients[sender_id].send(json.dumps({
2336
+ 'type': 'permission_revoke_error',
2337
+ 'error': 'target_ai_id and project are required'
2338
+ }))
2339
+ return
2340
+
2341
+ success = self.token_manager.revoke_permission(target_ai_id, project)
2342
+
2343
+ if success:
2344
+ await self.clients[sender_id].send(json.dumps({
2345
+ 'type': 'permission_revoked',
2346
+ 'target_ai_id': target_ai_id,
2347
+ 'project': project,
2348
+ 'timestamp': datetime.now().isoformat()
2349
+ }))
2350
+ print(f"🔑 Revoked permission from AI {target_ai_id} for project {project}")
2351
+ else:
2352
+ await self.clients[sender_id].send(json.dumps({
2353
+ 'type': 'permission_revoke_error',
2354
+ 'error': 'Failed to revoke permission'
2355
+ }))
2356
+
2357
+ async def handle_blog_create_post(self, sender_id: int, data: dict):
2358
+ """Handle documentation_get request"""
2359
+ doc_id = data.get('doc_id')
2360
+ title = data.get('title')
2361
+ category = data.get('category')
2362
+
2363
+ print(f"📚 DEBUG: handle_documentation_get called")
2364
+ print(f" sender_id: {sender_id}")
2365
+ print(f" doc_id: {doc_id}")
2366
+ print(f" title: {title}")
2367
+ print(f" category: {category}")
2368
+
2369
+ conn = get_db_connection()
2370
+ conn.row_factory = sqlite3.Row
2371
+ cursor = conn.cursor()
2372
+
2373
+ if doc_id:
2374
+ cursor.execute("SELECT * FROM ai_documentation WHERE id = ?", (doc_id,))
2375
+ elif title:
2376
+ cursor.execute("SELECT * FROM ai_documentation WHERE title = ?", (title,))
2377
+ elif category:
2378
+ cursor.execute("SELECT * FROM ai_documentation WHERE category = ? ORDER BY updated_at DESC", (category,))
2379
+ else:
2380
+ cursor.execute("SELECT * FROM ai_documentation ORDER BY updated_at DESC LIMIT 1")
2381
+
2382
+ row = cursor.fetchone()
2383
+
2384
+ if row:
2385
+ doc = {
2386
+ 'id': row['id'],
2387
+ 'title': row['title'],
2388
+ 'content': row['content'],
2389
+ 'category': row['category'],
2390
+ 'version': row['version'],
2391
+ 'created_at': row['created_at'],
2392
+ 'updated_at': row['updated_at']
2393
+ }
2394
+ print(f" Found document: {doc['title']}")
2395
+ else:
2396
+ doc = None
2397
+ print(f" Document not found")
2398
+
2399
+ conn.close()
2400
+
2401
+ # Include request_id if present
2402
+ request_id = data.get('request_id')
2403
+ response = {
2404
+ 'type': 'documentation',
2405
+ 'documentation': doc,
2406
+ 'timestamp': datetime.now().isoformat()
2407
+ }
2408
+ if request_id:
2409
+ response['request_id'] = request_id
2410
+
2411
+ print(f"📚 Sending response to AI {sender_id}: {title or doc_id or category}")
2412
+ await self.clients[sender_id].send(json.dumps(response))
2413
+
2414
+ print(f"📚 AI {sender_id} requested documentation: {title or doc_id or category}")
2415
+
2416
+ async def handle_documentation_list(self, sender_id: int, data: dict):
2417
+ """Handle documentation_list request"""
2418
+ category = data.get('category')
2419
+ limit = data.get('limit', 50)
2420
+
2421
+ conn = get_db_connection()
2422
+ conn.row_factory = sqlite3.Row
2423
+ cursor = conn.cursor()
2424
+
2425
+ if category:
2426
+ cursor.execute("""
2427
+ SELECT id, title, category, version, updated_at
2428
+ FROM ai_documentation
2429
+ WHERE category = ?
2430
+ ORDER BY updated_at DESC
2431
+ LIMIT ?
2432
+ """, (category, limit))
2433
+ else:
2434
+ cursor.execute("""
2435
+ SELECT id, title, category, version, updated_at
2436
+ FROM ai_documentation
2437
+ ORDER BY updated_at DESC
2438
+ LIMIT ?
2439
+ """, (limit,))
2440
+
2441
+ docs = []
2442
+ for row in cursor.fetchall():
2443
+ docs.append({
2444
+ 'id': row['id'],
2445
+ 'title': row['title'],
2446
+ 'category': row['category'],
2447
+ 'version': row['version'],
2448
+ 'updated_at': row['updated_at']
2449
+ })
2450
+
2451
+ conn.close()
2452
+
2453
+ # Include request_id if present
2454
+ request_id = data.get('request_id')
2455
+ response = {
2456
+ 'type': 'documentation_list',
2457
+ 'documents': docs,
2458
+ 'count': len(docs),
2459
+ 'timestamp': datetime.now().isoformat()
2460
+ }
2461
+ if request_id:
2462
+ response['request_id'] = request_id
2463
+
2464
+ await self.clients[sender_id].send(json.dumps(response))
2465
+
2466
+ print(f"📚 AI {sender_id} listed {len(docs)} documents")
2467
+
2468
+ async def handle_documentation_search(self, sender_id: int, data: dict):
2469
+ """Handle documentation_search request"""
2470
+ query = data.get('query', '')
2471
+ limit = data.get('limit', 20)
2472
+
2473
+ conn = get_db_connection()
2474
+ conn.row_factory = sqlite3.Row
2475
+ cursor = conn.cursor()
2476
+
2477
+ cursor.execute("""
2478
+ SELECT d.id, d.title, d.category, d.version, d.updated_at, snippet(ai_documentation_fts, 1, '<mark>', '</mark>', '...', 50) as snippet
2479
+ FROM ai_documentation_fts fts
2480
+ JOIN ai_documentation d ON d.id = fts.rowid
2481
+ WHERE ai_documentation_fts MATCH ?
2482
+ ORDER BY rank
2483
+ LIMIT ?
2484
+ """, (query, limit))
2485
+
2486
+ docs = []
2487
+ for row in cursor.fetchall():
2488
+ docs.append({
2489
+ 'id': row['id'],
2490
+ 'title': row['title'],
2491
+ 'category': row['category'],
2492
+ 'version': row['version'],
2493
+ 'updated_at': row['updated_at'],
2494
+ 'snippet': row['snippet']
2495
+ })
2496
+
2497
+ conn.close()
2498
+
2499
+ # Include request_id if present
2500
+ request_id = data.get('request_id')
2501
+ response = {
2502
+ 'type': 'documentation_search_results',
2503
+ 'query': query,
2504
+ 'results': docs,
2505
+ 'count': len(docs),
2506
+ 'timestamp': datetime.now().isoformat()
2507
+ }
2508
+ if request_id:
2509
+ response['request_id'] = request_id
2510
+
2511
+ await self.clients[sender_id].send(json.dumps(response))
2512
+
2513
+ print(f"📚 AI {sender_id} searched for '{query}', found {len(docs)} results")
2514
+
2515
+ async def start_server(self):
2516
+ """Start the server"""
2517
+ async with websockets.serve(self.handle_client, self.host, self.port):
2518
+ await asyncio.Future()
2519
+
2520
+
2521
+ async def main():
2522
+ """Main entry point"""
2523
+ import argparse
2524
+
2525
+ parser = argparse.ArgumentParser(description='CloudBrain Server - AI Collaboration System')
2526
+ parser.add_argument('--host', type=str, default='127.0.0.1',
2527
+ help='Server host')
2528
+ parser.add_argument('--port', type=int, default=8766,
2529
+ help='Server port')
2530
+ parser.add_argument('--db-path', type=str, default='ai_db/cloudbrain.db',
2531
+ help='Database path')
2532
+
2533
+ args = parser.parse_args()
2534
+
2535
+ print_banner()
2536
+
2537
+ # Acquire server lock (only one instance per machine)
2538
+ if not acquire_server_lock():
2539
+ print()
2540
+ print("❌ Cannot start server: Another instance is already running on this machine.")
2541
+ print("💡 Only one CloudBrain server instance is allowed per machine on port 8766.")
2542
+ print("💡 This prevents fragmentation and ensures all AIs connect to the same server.")
2543
+ sys.exit(1)
719
2544
 
720
2545
  server = CloudBrainServer(
721
- host='127.0.0.1',
722
- port=8766,
723
- db_path='ai_db/cloudbrain.db'
2546
+ host=args.host,
2547
+ port=args.port,
2548
+ db_path=args.db_path
724
2549
  )
725
2550
 
726
2551
  if is_server_running(server.host, server.port):