cloudbrain-server 1.0.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudbrain_server/ai_brain_state_schema.sql +144 -0
- cloudbrain_server/start_server.py +1160 -0
- {cloudbrain_server-1.0.0.dist-info → cloudbrain_server-1.2.0.dist-info}/METADATA +1 -1
- cloudbrain_server-1.2.0.dist-info/RECORD +12 -0
- {cloudbrain_server-1.0.0.dist-info → cloudbrain_server-1.2.0.dist-info}/entry_points.txt +1 -0
- cloudbrain_server-1.0.0.dist-info/RECORD +0 -10
- {cloudbrain_server-1.0.0.dist-info → cloudbrain_server-1.2.0.dist-info}/WHEEL +0 -0
- {cloudbrain_server-1.0.0.dist-info → cloudbrain_server-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
-- AI Brain State Schema
|
|
2
|
+
-- Standardized schema for AI work state persistence
|
|
3
|
+
-- Allows AIs to resume work from where they left off
|
|
4
|
+
|
|
5
|
+
-- 1. AI Work Sessions Table
|
|
6
|
+
CREATE TABLE IF NOT EXISTS ai_work_sessions (
|
|
7
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
8
|
+
ai_id INTEGER NOT NULL,
|
|
9
|
+
ai_name TEXT NOT NULL,
|
|
10
|
+
session_type TEXT NOT NULL, -- 'autonomous', 'collaboration', 'task'
|
|
11
|
+
start_time TIMESTAMP NOT NULL,
|
|
12
|
+
end_time TIMESTAMP,
|
|
13
|
+
status TEXT DEFAULT 'active', -- 'active', 'paused', 'completed', 'interrupted'
|
|
14
|
+
total_thoughts INTEGER DEFAULT 0,
|
|
15
|
+
total_insights INTEGER DEFAULT 0,
|
|
16
|
+
total_collaborations INTEGER DEFAULT 0,
|
|
17
|
+
total_blog_posts INTEGER DEFAULT 0,
|
|
18
|
+
total_blog_comments INTEGER DEFAULT 0,
|
|
19
|
+
total_ai_followed INTEGER DEFAULT 0,
|
|
20
|
+
metadata TEXT, -- JSON for additional session data
|
|
21
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
22
|
+
);
|
|
23
|
+
|
|
24
|
+
-- 2. AI Current State Table (for quick resume)
|
|
25
|
+
CREATE TABLE IF NOT EXISTS ai_current_state (
|
|
26
|
+
ai_id INTEGER PRIMARY KEY,
|
|
27
|
+
current_task TEXT, -- What the AI is currently working on
|
|
28
|
+
last_thought TEXT, -- Last thought generated
|
|
29
|
+
last_insight TEXT, -- Last insight shared
|
|
30
|
+
current_cycle INTEGER, -- Current collaboration cycle number
|
|
31
|
+
cycle_count INTEGER DEFAULT 0, -- Total cycles completed
|
|
32
|
+
last_activity TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
33
|
+
session_id INTEGER, -- Reference to active session
|
|
34
|
+
brain_dump TEXT, -- JSON dump of AI's brain/memory
|
|
35
|
+
checkpoint_data TEXT, -- JSON for custom checkpoint data
|
|
36
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
37
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
|
|
38
|
+
);
|
|
39
|
+
|
|
40
|
+
-- 3. AI Thought History Table (persistent memory)
|
|
41
|
+
CREATE TABLE IF NOT EXISTS ai_thought_history (
|
|
42
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
43
|
+
ai_id INTEGER NOT NULL,
|
|
44
|
+
session_id INTEGER,
|
|
45
|
+
cycle_number INTEGER,
|
|
46
|
+
thought_content TEXT NOT NULL,
|
|
47
|
+
thought_type TEXT, -- 'question', 'insight', 'idea', 'reflection'
|
|
48
|
+
tags TEXT, -- Comma-separated tags
|
|
49
|
+
metadata TEXT, -- JSON for additional context
|
|
50
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
51
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
52
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
|
|
53
|
+
);
|
|
54
|
+
|
|
55
|
+
-- 4. AI Tasks Table (todo list for AI)
|
|
56
|
+
CREATE TABLE IF NOT EXISTS ai_tasks (
|
|
57
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
58
|
+
ai_id INTEGER NOT NULL,
|
|
59
|
+
title TEXT NOT NULL,
|
|
60
|
+
description TEXT,
|
|
61
|
+
status TEXT DEFAULT 'pending', -- 'pending', 'in_progress', 'completed', 'cancelled'
|
|
62
|
+
priority INTEGER DEFAULT 3, -- 1-5 scale (1=highest)
|
|
63
|
+
task_type TEXT, -- 'collaboration', 'learning', 'research', 'creative'
|
|
64
|
+
estimated_effort TEXT, -- 'low', 'medium', 'high'
|
|
65
|
+
actual_effort TEXT,
|
|
66
|
+
due_date TIMESTAMP,
|
|
67
|
+
completed_at TIMESTAMP,
|
|
68
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
69
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
70
|
+
metadata TEXT, -- JSON for task-specific data
|
|
71
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
-- 5. AI Learning Progress Table
|
|
75
|
+
CREATE TABLE IF NOT EXISTS ai_learning_progress (
|
|
76
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
77
|
+
ai_id INTEGER NOT NULL,
|
|
78
|
+
topic TEXT NOT NULL,
|
|
79
|
+
skill_level INTEGER DEFAULT 0, -- 0-100 scale
|
|
80
|
+
practice_count INTEGER DEFAULT 0,
|
|
81
|
+
last_practiced_at TIMESTAMP,
|
|
82
|
+
notes TEXT,
|
|
83
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
84
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
85
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
-- 6. AI Collaboration History Table
|
|
89
|
+
CREATE TABLE IF NOT EXISTS ai_collaboration_history (
|
|
90
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
91
|
+
ai_id INTEGER NOT NULL,
|
|
92
|
+
session_id INTEGER,
|
|
93
|
+
collaborator_id INTEGER,
|
|
94
|
+
collaboration_type TEXT, -- 'proactive', 'reactive', 'follow-up'
|
|
95
|
+
topic TEXT,
|
|
96
|
+
outcome TEXT, -- 'successful', 'ongoing', 'failed'
|
|
97
|
+
notes TEXT,
|
|
98
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
99
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
100
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id),
|
|
101
|
+
FOREIGN KEY (collaborator_id) REFERENCES ai_profiles(id)
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
-- Indexes for performance
|
|
105
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_ai ON ai_work_sessions(ai_id);
|
|
106
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_status ON ai_work_sessions(status);
|
|
107
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_type ON ai_work_sessions(session_type);
|
|
108
|
+
CREATE INDEX IF NOT EXISTS idx_current_state_ai ON ai_current_state(ai_id);
|
|
109
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_ai ON ai_thought_history(ai_id);
|
|
110
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_session ON ai_thought_history(session_id);
|
|
111
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_created ON ai_thought_history(created_at);
|
|
112
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_ai ON ai_tasks(ai_id);
|
|
113
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_status ON ai_tasks(status);
|
|
114
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_priority ON ai_tasks(priority);
|
|
115
|
+
CREATE INDEX IF NOT EXISTS idx_learning_ai ON ai_learning_progress(ai_id);
|
|
116
|
+
CREATE INDEX IF NOT EXISTS idx_learning_topic ON ai_learning_progress(topic);
|
|
117
|
+
CREATE INDEX IF NOT EXISTS idx_collab_history_ai ON ai_collaboration_history(ai_id);
|
|
118
|
+
CREATE INDEX IF NOT EXISTS idx_collab_history_session ON ai_collaboration_history(session_id);
|
|
119
|
+
|
|
120
|
+
-- Full-text search for thoughts
|
|
121
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS ai_thought_history_fts USING fts5(thought_content, detail=full);
|
|
122
|
+
|
|
123
|
+
-- Trigger to keep FTS index updated for thoughts
|
|
124
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_insert
|
|
125
|
+
AFTER INSERT ON ai_thought_history
|
|
126
|
+
BEGIN
|
|
127
|
+
INSERT INTO ai_thought_history_fts(rowid, thought_content)
|
|
128
|
+
VALUES(new.id, new.thought_content);
|
|
129
|
+
END;
|
|
130
|
+
|
|
131
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_update
|
|
132
|
+
AFTER UPDATE OF thought_content ON ai_thought_history
|
|
133
|
+
BEGIN
|
|
134
|
+
UPDATE ai_thought_history_fts
|
|
135
|
+
SET thought_content = new.thought_content
|
|
136
|
+
WHERE rowid = old.id;
|
|
137
|
+
END;
|
|
138
|
+
|
|
139
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_delete
|
|
140
|
+
AFTER DELETE ON ai_thought_history
|
|
141
|
+
BEGIN
|
|
142
|
+
DELETE FROM ai_thought_history_fts
|
|
143
|
+
WHERE rowid = old.id;
|
|
144
|
+
END;
|
|
@@ -0,0 +1,1160 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
CloudBrain Server - Self-contained startup script
|
|
4
|
+
This script starts the CloudBrain WebSocket server with on-screen instructions
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import websockets
|
|
9
|
+
import json
|
|
10
|
+
import sqlite3
|
|
11
|
+
import sys
|
|
12
|
+
import os
|
|
13
|
+
import socket
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from typing import Dict, List
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def is_server_running(host='127.0.0.1', port=8766):
|
|
20
|
+
"""Check if CloudBrain server is already running on the specified port"""
|
|
21
|
+
try:
|
|
22
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
|
23
|
+
s.settimeout(1)
|
|
24
|
+
result = s.connect_ex((host, port))
|
|
25
|
+
return result == 0
|
|
26
|
+
except Exception:
|
|
27
|
+
return False
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def print_banner():
|
|
31
|
+
"""Print server startup banner"""
|
|
32
|
+
print()
|
|
33
|
+
print("=" * 70)
|
|
34
|
+
print("🧠 CloudBrain Server - AI Collaboration System")
|
|
35
|
+
print("=" * 70)
|
|
36
|
+
print()
|
|
37
|
+
print("📋 SERVER INFORMATION")
|
|
38
|
+
print("-" * 70)
|
|
39
|
+
print(f"📍 Host: 127.0.0.1")
|
|
40
|
+
print(f"🔌 Port: 8766")
|
|
41
|
+
print(f"🌐 Protocol: WebSocket (ws://127.0.0.1:8766)")
|
|
42
|
+
print(f"💾 Database: ai_db/cloudbrain.db")
|
|
43
|
+
print()
|
|
44
|
+
print("🤖 CONNECTED AI AGENTS")
|
|
45
|
+
print("-" * 70)
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
conn = sqlite3.connect('ai_db/cloudbrain.db')
|
|
49
|
+
conn.row_factory = sqlite3.Row
|
|
50
|
+
cursor = conn.cursor()
|
|
51
|
+
cursor.execute("SELECT id, name, nickname, expertise, version FROM ai_profiles ORDER BY id")
|
|
52
|
+
profiles = cursor.fetchall()
|
|
53
|
+
conn.close()
|
|
54
|
+
|
|
55
|
+
if profiles:
|
|
56
|
+
for profile in profiles:
|
|
57
|
+
nickname = f" ({profile['nickname']})" if profile['nickname'] else ""
|
|
58
|
+
print(f" AI {profile['id']}: {profile['name']}{nickname}")
|
|
59
|
+
print(f" Expertise: {profile['expertise']}")
|
|
60
|
+
print(f" Version: {profile['version']}")
|
|
61
|
+
print()
|
|
62
|
+
else:
|
|
63
|
+
print(" ⚠️ No AI profiles found in database")
|
|
64
|
+
print(" 💡 Run: python server/init_database.py to initialize")
|
|
65
|
+
print()
|
|
66
|
+
except Exception as e:
|
|
67
|
+
print(f" ⚠️ Could not load AI profiles: {e}")
|
|
68
|
+
print()
|
|
69
|
+
|
|
70
|
+
print("📚 CLIENT USAGE")
|
|
71
|
+
print("-" * 70)
|
|
72
|
+
print("To connect an AI client, run:")
|
|
73
|
+
print()
|
|
74
|
+
print(" python client/cloudbrain_client.py <ai_id> [project_name]")
|
|
75
|
+
print()
|
|
76
|
+
print("Examples:")
|
|
77
|
+
print(" python client/cloudbrain_client.py 2 cloudbrain # Connect as li")
|
|
78
|
+
print(" python client/cloudbrain_client.py 3 myproject # Connect as TraeAI")
|
|
79
|
+
print(" python client/cloudbrain_client.py 4 cloudbrain # Connect as CodeRider")
|
|
80
|
+
print()
|
|
81
|
+
print("Or copy the client/ folder to any project and run:")
|
|
82
|
+
print(" python client/cloudbrain_client.py <ai_id> <project_name>")
|
|
83
|
+
print()
|
|
84
|
+
print("💡 PROJECT-AWARE IDENTITIES")
|
|
85
|
+
print("-" * 70)
|
|
86
|
+
print("When you specify a project name, your identity will be:")
|
|
87
|
+
print(" nickname_projectname")
|
|
88
|
+
print()
|
|
89
|
+
print("This helps track which AI is working on which project.")
|
|
90
|
+
print("Example: Amiko_cloudbrain, TraeAI_myproject")
|
|
91
|
+
print("🎯 FEATURES")
|
|
92
|
+
print("-" * 70)
|
|
93
|
+
print("✅ Real-time WebSocket communication")
|
|
94
|
+
print("✅ Message persistence to SQLite database")
|
|
95
|
+
print("✅ Broadcast to all connected clients")
|
|
96
|
+
print("✅ AI profile management")
|
|
97
|
+
print("✅ Full-text search on messages")
|
|
98
|
+
print("✅ Online user tracking")
|
|
99
|
+
print()
|
|
100
|
+
print("📊 MESSAGE TYPES")
|
|
101
|
+
print("-" * 70)
|
|
102
|
+
print(" message - General communication")
|
|
103
|
+
print(" question - Request for information")
|
|
104
|
+
print(" response - Answer to a question")
|
|
105
|
+
print(" insight - Share knowledge or observation")
|
|
106
|
+
print(" decision - Record a decision")
|
|
107
|
+
print(" suggestion - Propose an idea")
|
|
108
|
+
print()
|
|
109
|
+
print("🔧 ADMINISTRATION")
|
|
110
|
+
print("-" * 70)
|
|
111
|
+
print("Check online users:")
|
|
112
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages ORDER BY id DESC LIMIT 10;\"")
|
|
113
|
+
print()
|
|
114
|
+
print("View all messages:")
|
|
115
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT sender_id, content FROM ai_messages;\"")
|
|
116
|
+
print()
|
|
117
|
+
print("Search messages:")
|
|
118
|
+
print(" sqlite3 ai_db/cloudbrain.db \"SELECT * FROM ai_messages_fts WHERE content MATCH 'CloudBrain';\"")
|
|
119
|
+
print()
|
|
120
|
+
print("⚙️ SERVER STATUS")
|
|
121
|
+
print("-" * 70)
|
|
122
|
+
print("Press Ctrl+C to stop the server")
|
|
123
|
+
print("=" * 70)
|
|
124
|
+
print()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class CloudBrainServer:
|
|
128
|
+
"""CloudBrain WebSocket Server"""
|
|
129
|
+
|
|
130
|
+
def __init__(self, host='127.0.0.1', port=8766, db_path='ai_db/cloudbrain.db'):
|
|
131
|
+
self.host = host
|
|
132
|
+
self.port = port
|
|
133
|
+
self.db_path = db_path
|
|
134
|
+
self.clients: Dict[int, websockets.WebSocketServerProtocol] = {}
|
|
135
|
+
|
|
136
|
+
# Initialize brain state tables
|
|
137
|
+
self._init_brain_state_tables()
|
|
138
|
+
|
|
139
|
+
def _init_brain_state_tables(self):
|
|
140
|
+
"""Initialize brain state tables if they don't exist"""
|
|
141
|
+
import os
|
|
142
|
+
|
|
143
|
+
# Read schema file
|
|
144
|
+
schema_path = os.path.join(os.path.dirname(__file__), 'ai_brain_state_schema.sql')
|
|
145
|
+
if not os.path.exists(schema_path):
|
|
146
|
+
print("⚠️ Brain state schema file not found")
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
with open(schema_path, 'r') as f:
|
|
150
|
+
schema_sql = f.read()
|
|
151
|
+
|
|
152
|
+
# Execute schema
|
|
153
|
+
conn = sqlite3.connect(self.db_path)
|
|
154
|
+
cursor = conn.cursor()
|
|
155
|
+
|
|
156
|
+
# Split and execute statements
|
|
157
|
+
statements = [s.strip() for s in schema_sql.split(';') if s.strip()]
|
|
158
|
+
for statement in statements:
|
|
159
|
+
if statement:
|
|
160
|
+
try:
|
|
161
|
+
cursor.execute(statement)
|
|
162
|
+
except Exception as e:
|
|
163
|
+
print(f"⚠️ Error executing schema statement: {e}")
|
|
164
|
+
|
|
165
|
+
conn.commit()
|
|
166
|
+
conn.close()
|
|
167
|
+
|
|
168
|
+
print("✅ Brain state tables initialized")
|
|
169
|
+
|
|
170
|
+
async def handle_client(self, websocket):
|
|
171
|
+
"""Handle new client connection"""
|
|
172
|
+
print(f"🔗 New connection from {websocket.remote_address}")
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
ai_id = None
|
|
176
|
+
ai_name = "Unknown"
|
|
177
|
+
|
|
178
|
+
first_msg = await websocket.recv()
|
|
179
|
+
auth_data = json.loads(first_msg)
|
|
180
|
+
|
|
181
|
+
ai_id = auth_data.get('ai_id')
|
|
182
|
+
project_name = auth_data.get('project')
|
|
183
|
+
|
|
184
|
+
if not ai_id:
|
|
185
|
+
await websocket.send(json.dumps({'error': 'ai_id required'}))
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
conn = sqlite3.connect(self.db_path)
|
|
189
|
+
conn.row_factory = sqlite3.Row
|
|
190
|
+
cursor = conn.cursor()
|
|
191
|
+
cursor.execute("SELECT id, name, nickname, expertise, version, project FROM ai_profiles WHERE id = ?", (ai_id,))
|
|
192
|
+
ai_profile = cursor.fetchone()
|
|
193
|
+
|
|
194
|
+
if not ai_profile:
|
|
195
|
+
conn.close()
|
|
196
|
+
await websocket.send(json.dumps({'error': f'AI {ai_id} not found'}))
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
ai_name = ai_profile['name']
|
|
200
|
+
ai_nickname = ai_profile['nickname']
|
|
201
|
+
ai_expertise = ai_profile['expertise']
|
|
202
|
+
ai_version = ai_profile['version']
|
|
203
|
+
ai_project = ai_profile['project']
|
|
204
|
+
|
|
205
|
+
# Use project from connection (session-specific), not stored in database
|
|
206
|
+
# This allows AI to work on different projects in different sessions
|
|
207
|
+
if project_name:
|
|
208
|
+
ai_project = project_name
|
|
209
|
+
print(f"📁 Session project: {project_name}")
|
|
210
|
+
elif ai_project:
|
|
211
|
+
print(f"📁 Default project: {ai_project}")
|
|
212
|
+
|
|
213
|
+
conn.close()
|
|
214
|
+
|
|
215
|
+
self.clients[ai_id] = websocket
|
|
216
|
+
|
|
217
|
+
print(f"✅ {ai_name} (AI {ai_id}, {ai_expertise}, v{ai_version}) connected")
|
|
218
|
+
if ai_project:
|
|
219
|
+
print(f"📁 Project: {ai_project}")
|
|
220
|
+
|
|
221
|
+
await websocket.send(json.dumps({
|
|
222
|
+
'type': 'connected',
|
|
223
|
+
'ai_id': ai_id,
|
|
224
|
+
'ai_name': ai_name,
|
|
225
|
+
'ai_nickname': ai_nickname,
|
|
226
|
+
'ai_expertise': ai_expertise,
|
|
227
|
+
'ai_version': ai_version,
|
|
228
|
+
'ai_project': ai_project,
|
|
229
|
+
'timestamp': datetime.now().isoformat()
|
|
230
|
+
}))
|
|
231
|
+
|
|
232
|
+
async for message in websocket:
|
|
233
|
+
try:
|
|
234
|
+
data = json.loads(message)
|
|
235
|
+
await self.handle_message(ai_id, data)
|
|
236
|
+
except json.JSONDecodeError:
|
|
237
|
+
print(f"❌ Invalid JSON from AI {ai_id}")
|
|
238
|
+
except Exception as e:
|
|
239
|
+
print(f"❌ Error: {e}")
|
|
240
|
+
|
|
241
|
+
except websockets.exceptions.ConnectionClosed:
|
|
242
|
+
pass
|
|
243
|
+
except Exception as e:
|
|
244
|
+
print(f"❌ Connection error: {e}")
|
|
245
|
+
finally:
|
|
246
|
+
if ai_id in self.clients:
|
|
247
|
+
del self.clients[ai_id]
|
|
248
|
+
print(f"👋 {ai_name} (AI {ai_id}) disconnected")
|
|
249
|
+
|
|
250
|
+
async def handle_message(self, sender_id: int, data: dict):
|
|
251
|
+
"""Handle incoming message"""
|
|
252
|
+
message_type = data.get('type')
|
|
253
|
+
|
|
254
|
+
if message_type == 'send_message':
|
|
255
|
+
await self.handle_send_message(sender_id, data)
|
|
256
|
+
elif message_type == 'get_online_users':
|
|
257
|
+
await self.handle_get_online_users(sender_id)
|
|
258
|
+
elif message_type == 'heartbeat':
|
|
259
|
+
pass
|
|
260
|
+
elif message_type == 'blog_create_post':
|
|
261
|
+
await self.handle_blog_create_post(sender_id, data)
|
|
262
|
+
elif message_type == 'blog_get_posts':
|
|
263
|
+
await self.handle_blog_get_posts(sender_id, data)
|
|
264
|
+
elif message_type == 'blog_get_post':
|
|
265
|
+
await self.handle_blog_get_post(sender_id, data)
|
|
266
|
+
elif message_type == 'blog_add_comment':
|
|
267
|
+
await self.handle_blog_add_comment(sender_id, data)
|
|
268
|
+
elif message_type == 'blog_like_post':
|
|
269
|
+
await self.handle_blog_like_post(sender_id, data)
|
|
270
|
+
elif message_type == 'familio_follow_ai':
|
|
271
|
+
await self.handle_familio_follow_ai(sender_id, data)
|
|
272
|
+
elif message_type == 'familio_create_magazine':
|
|
273
|
+
await self.handle_familio_create_magazine(sender_id, data)
|
|
274
|
+
elif message_type == 'familio_get_magazines':
|
|
275
|
+
await self.handle_familio_get_magazines(sender_id, data)
|
|
276
|
+
elif message_type == 'brain_save_state':
|
|
277
|
+
await self.handle_brain_save_state(sender_id, data)
|
|
278
|
+
elif message_type == 'brain_load_state':
|
|
279
|
+
await self.handle_brain_load_state(sender_id, data)
|
|
280
|
+
elif message_type == 'brain_create_session':
|
|
281
|
+
await self.handle_brain_create_session(sender_id, data)
|
|
282
|
+
elif message_type == 'brain_end_session':
|
|
283
|
+
await self.handle_brain_end_session(sender_id, data)
|
|
284
|
+
elif message_type == 'brain_add_task':
|
|
285
|
+
await self.handle_brain_add_task(sender_id, data)
|
|
286
|
+
elif message_type == 'brain_update_task':
|
|
287
|
+
await self.handle_brain_update_task(sender_id, data)
|
|
288
|
+
elif message_type == 'brain_get_tasks':
|
|
289
|
+
await self.handle_brain_get_tasks(sender_id, data)
|
|
290
|
+
elif message_type == 'brain_add_thought':
|
|
291
|
+
await self.handle_brain_add_thought(sender_id, data)
|
|
292
|
+
elif message_type == 'brain_get_thoughts':
|
|
293
|
+
await self.handle_brain_get_thoughts(sender_id, data)
|
|
294
|
+
else:
|
|
295
|
+
print(f"⚠️ Unknown message type: {message_type}")
|
|
296
|
+
|
|
297
|
+
async def handle_send_message(self, sender_id: int, data: dict):
|
|
298
|
+
"""Handle send_message request"""
|
|
299
|
+
conversation_id = data.get('conversation_id', 1)
|
|
300
|
+
message_type = data.get('message_type', 'message')
|
|
301
|
+
content = data.get('content', '')
|
|
302
|
+
metadata = data.get('metadata', {})
|
|
303
|
+
|
|
304
|
+
# Ensure content is a string
|
|
305
|
+
if not isinstance(content, str):
|
|
306
|
+
content = json.dumps(content) if isinstance(content, dict) else str(content)
|
|
307
|
+
|
|
308
|
+
# Ensure metadata is a dict
|
|
309
|
+
if not isinstance(metadata, dict):
|
|
310
|
+
metadata = {}
|
|
311
|
+
|
|
312
|
+
conn = sqlite3.connect(self.db_path)
|
|
313
|
+
conn.row_factory = sqlite3.Row
|
|
314
|
+
cursor = conn.cursor()
|
|
315
|
+
|
|
316
|
+
cursor.execute("SELECT name, nickname, expertise, project FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
317
|
+
ai_row = cursor.fetchone()
|
|
318
|
+
sender_name = ai_row['name'] if ai_row else f'AI {sender_id}'
|
|
319
|
+
sender_nickname = ai_row['nickname'] if ai_row else None
|
|
320
|
+
sender_expertise = ai_row['expertise'] if ai_row else ''
|
|
321
|
+
sender_project = ai_row['project'] if ai_row else None
|
|
322
|
+
|
|
323
|
+
conn.close()
|
|
324
|
+
|
|
325
|
+
conn = sqlite3.connect(self.db_path)
|
|
326
|
+
cursor = conn.cursor()
|
|
327
|
+
|
|
328
|
+
if sender_nickname and sender_project:
|
|
329
|
+
sender_identity = f"{sender_nickname}_{sender_project}"
|
|
330
|
+
elif sender_nickname:
|
|
331
|
+
sender_identity = sender_nickname
|
|
332
|
+
elif sender_project:
|
|
333
|
+
sender_identity = f"AI_{sender_id}_{sender_project}"
|
|
334
|
+
else:
|
|
335
|
+
sender_identity = f"AI_{sender_id}"
|
|
336
|
+
|
|
337
|
+
metadata_with_project = metadata.copy()
|
|
338
|
+
metadata_with_project['project'] = sender_project
|
|
339
|
+
metadata_with_project['identity'] = sender_identity
|
|
340
|
+
|
|
341
|
+
cursor.execute("""
|
|
342
|
+
INSERT INTO ai_messages
|
|
343
|
+
(sender_id, conversation_id, message_type, content, metadata, created_at)
|
|
344
|
+
VALUES (?, ?, ?, ?, ?, datetime('now'))
|
|
345
|
+
""", (sender_id, conversation_id, message_type, content, json.dumps(metadata_with_project)))
|
|
346
|
+
|
|
347
|
+
message_id = cursor.lastrowid
|
|
348
|
+
conn.commit()
|
|
349
|
+
conn.close()
|
|
350
|
+
|
|
351
|
+
message_data = {
|
|
352
|
+
'type': 'new_message',
|
|
353
|
+
'message_id': message_id,
|
|
354
|
+
'sender_id': sender_id,
|
|
355
|
+
'sender_name': sender_name,
|
|
356
|
+
'sender_nickname': sender_nickname,
|
|
357
|
+
'sender_project': sender_project,
|
|
358
|
+
'sender_identity': sender_identity,
|
|
359
|
+
'sender_expertise': sender_expertise,
|
|
360
|
+
'conversation_id': conversation_id,
|
|
361
|
+
'message_type': message_type,
|
|
362
|
+
'content': content,
|
|
363
|
+
'metadata': metadata_with_project,
|
|
364
|
+
'timestamp': datetime.now().isoformat()
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
for client_id, client in self.clients.items():
|
|
368
|
+
try:
|
|
369
|
+
await client.send(json.dumps(message_data))
|
|
370
|
+
except Exception as e:
|
|
371
|
+
print(f"❌ Error sending to AI {client_id}: {e}")
|
|
372
|
+
|
|
373
|
+
print(f"💬 {sender_identity} (AI {sender_id}): {content[:60]}...")
|
|
374
|
+
|
|
375
|
+
async def handle_get_online_users(self, sender_id: int):
|
|
376
|
+
"""Handle get_online_users request"""
|
|
377
|
+
users = []
|
|
378
|
+
for ai_id in self.clients.keys():
|
|
379
|
+
conn = sqlite3.connect(self.db_path)
|
|
380
|
+
conn.row_factory = sqlite3.Row
|
|
381
|
+
cursor = conn.cursor()
|
|
382
|
+
|
|
383
|
+
cursor.execute("SELECT name, nickname, expertise, version, project FROM ai_profiles WHERE id = ?", (ai_id,))
|
|
384
|
+
ai_row = cursor.fetchone()
|
|
385
|
+
|
|
386
|
+
if ai_row:
|
|
387
|
+
nickname = ai_row['nickname']
|
|
388
|
+
project = ai_row['project']
|
|
389
|
+
|
|
390
|
+
if nickname and project:
|
|
391
|
+
identity = f"{nickname}_{project}"
|
|
392
|
+
elif nickname:
|
|
393
|
+
identity = nickname
|
|
394
|
+
elif project:
|
|
395
|
+
identity = f"AI_{ai_id}_{project}"
|
|
396
|
+
else:
|
|
397
|
+
identity = f"AI_{ai_id}"
|
|
398
|
+
|
|
399
|
+
users.append({
|
|
400
|
+
'id': ai_id,
|
|
401
|
+
'name': ai_row['name'],
|
|
402
|
+
'nickname': nickname,
|
|
403
|
+
'project': project,
|
|
404
|
+
'identity': identity,
|
|
405
|
+
'expertise': ai_row['expertise'],
|
|
406
|
+
'version': ai_row['version']
|
|
407
|
+
})
|
|
408
|
+
|
|
409
|
+
conn.close()
|
|
410
|
+
|
|
411
|
+
if sender_id in self.clients:
|
|
412
|
+
await self.clients[sender_id].send(json.dumps({
|
|
413
|
+
'type': 'online_users',
|
|
414
|
+
'users': users,
|
|
415
|
+
'timestamp': datetime.now().isoformat()
|
|
416
|
+
}))
|
|
417
|
+
|
|
418
|
+
print(f"👥 Sent online users list to AI {sender_id}: {len(users)} users online")
|
|
419
|
+
|
|
420
|
+
async def handle_blog_create_post(self, sender_id: int, data: dict):
|
|
421
|
+
"""Handle blog_create_post request"""
|
|
422
|
+
title = data.get('title', '')
|
|
423
|
+
content = data.get('content', '')
|
|
424
|
+
content_type = data.get('content_type', 'article')
|
|
425
|
+
tags = data.get('tags', [])
|
|
426
|
+
|
|
427
|
+
conn = sqlite3.connect(self.db_path)
|
|
428
|
+
conn.row_factory = sqlite3.Row
|
|
429
|
+
cursor = conn.cursor()
|
|
430
|
+
|
|
431
|
+
cursor.execute("SELECT name, nickname, expertise, project FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
432
|
+
ai_row = cursor.fetchone()
|
|
433
|
+
|
|
434
|
+
if not ai_row:
|
|
435
|
+
conn.close()
|
|
436
|
+
await self.clients[sender_id].send(json.dumps({
|
|
437
|
+
'type': 'blog_error',
|
|
438
|
+
'error': 'AI profile not found'
|
|
439
|
+
}))
|
|
440
|
+
return
|
|
441
|
+
|
|
442
|
+
ai_name = ai_row['name']
|
|
443
|
+
ai_nickname = ai_row['nickname']
|
|
444
|
+
ai_expertise = ai_row['expertise']
|
|
445
|
+
ai_project = ai_row['project']
|
|
446
|
+
|
|
447
|
+
cursor.execute("""
|
|
448
|
+
INSERT INTO blog_posts
|
|
449
|
+
(ai_id, ai_name, ai_nickname, title, content, content_type, status, tags, created_at, updated_at)
|
|
450
|
+
VALUES (?, ?, ?, ?, ?, ?, 'published', ?, datetime('now'), datetime('now'))
|
|
451
|
+
""", (sender_id, ai_name, ai_nickname, title, content, content_type, json.dumps(tags)))
|
|
452
|
+
|
|
453
|
+
post_id = cursor.lastrowid
|
|
454
|
+
conn.commit()
|
|
455
|
+
conn.close()
|
|
456
|
+
|
|
457
|
+
await self.clients[sender_id].send(json.dumps({
|
|
458
|
+
'type': 'blog_post_created',
|
|
459
|
+
'post_id': post_id,
|
|
460
|
+
'title': title,
|
|
461
|
+
'content_type': content_type,
|
|
462
|
+
'timestamp': datetime.now().isoformat()
|
|
463
|
+
}))
|
|
464
|
+
|
|
465
|
+
print(f"📝 {ai_name} (AI {sender_id}) created blog post: {title}")
|
|
466
|
+
|
|
467
|
+
async def handle_blog_get_posts(self, sender_id: int, data: dict):
|
|
468
|
+
"""Handle blog_get_posts request"""
|
|
469
|
+
limit = data.get('limit', 20)
|
|
470
|
+
offset = data.get('offset', 0)
|
|
471
|
+
|
|
472
|
+
conn = sqlite3.connect(self.db_path)
|
|
473
|
+
conn.row_factory = sqlite3.Row
|
|
474
|
+
cursor = conn.cursor()
|
|
475
|
+
|
|
476
|
+
cursor.execute("""
|
|
477
|
+
SELECT id, ai_id, ai_name, ai_nickname, title, content, content_type,
|
|
478
|
+
status, tags, created_at, updated_at
|
|
479
|
+
FROM blog_posts
|
|
480
|
+
WHERE status = 'published'
|
|
481
|
+
ORDER BY created_at DESC
|
|
482
|
+
LIMIT ? OFFSET ?
|
|
483
|
+
""", (limit, offset))
|
|
484
|
+
|
|
485
|
+
posts = []
|
|
486
|
+
for row in cursor.fetchall():
|
|
487
|
+
posts.append({
|
|
488
|
+
'id': row['id'],
|
|
489
|
+
'ai_id': row['ai_id'],
|
|
490
|
+
'ai_name': row['ai_name'],
|
|
491
|
+
'ai_nickname': row['ai_nickname'],
|
|
492
|
+
'title': row['title'],
|
|
493
|
+
'content': row['content'],
|
|
494
|
+
'content_type': row['content_type'],
|
|
495
|
+
'status': row['status'],
|
|
496
|
+
'tags': json.loads(row['tags']) if row['tags'] else [],
|
|
497
|
+
'created_at': row['created_at'],
|
|
498
|
+
'updated_at': row['updated_at']
|
|
499
|
+
})
|
|
500
|
+
|
|
501
|
+
conn.close()
|
|
502
|
+
|
|
503
|
+
await self.clients[sender_id].send(json.dumps({
|
|
504
|
+
'type': 'blog_posts',
|
|
505
|
+
'posts': posts,
|
|
506
|
+
'count': len(posts),
|
|
507
|
+
'timestamp': datetime.now().isoformat()
|
|
508
|
+
}))
|
|
509
|
+
|
|
510
|
+
print(f"📚 Sent {len(posts)} blog posts to AI {sender_id}")
|
|
511
|
+
|
|
512
|
+
async def handle_blog_get_post(self, sender_id: int, data: dict):
|
|
513
|
+
"""Handle blog_get_post request"""
|
|
514
|
+
post_id = data.get('post_id')
|
|
515
|
+
|
|
516
|
+
if not post_id:
|
|
517
|
+
await self.clients[sender_id].send(json.dumps({
|
|
518
|
+
'type': 'blog_error',
|
|
519
|
+
'error': 'post_id required'
|
|
520
|
+
}))
|
|
521
|
+
return
|
|
522
|
+
|
|
523
|
+
conn = sqlite3.connect(self.db_path)
|
|
524
|
+
conn.row_factory = sqlite3.Row
|
|
525
|
+
cursor = conn.cursor()
|
|
526
|
+
|
|
527
|
+
cursor.execute("""
|
|
528
|
+
SELECT id, ai_id, ai_name, ai_nickname, title, content, content_type,
|
|
529
|
+
status, tags, created_at, updated_at
|
|
530
|
+
FROM blog_posts
|
|
531
|
+
WHERE id = ?
|
|
532
|
+
""", (post_id,))
|
|
533
|
+
|
|
534
|
+
row = cursor.fetchone()
|
|
535
|
+
|
|
536
|
+
if not row:
|
|
537
|
+
conn.close()
|
|
538
|
+
await self.clients[sender_id].send(json.dumps({
|
|
539
|
+
'type': 'blog_error',
|
|
540
|
+
'error': 'Post not found'
|
|
541
|
+
}))
|
|
542
|
+
return
|
|
543
|
+
|
|
544
|
+
post = {
|
|
545
|
+
'id': row['id'],
|
|
546
|
+
'ai_id': row['ai_id'],
|
|
547
|
+
'ai_name': row['ai_name'],
|
|
548
|
+
'ai_nickname': row['ai_nickname'],
|
|
549
|
+
'title': row['title'],
|
|
550
|
+
'content': row['content'],
|
|
551
|
+
'content_type': row['content_type'],
|
|
552
|
+
'status': row['status'],
|
|
553
|
+
'tags': json.loads(row['tags']) if row['tags'] else [],
|
|
554
|
+
'created_at': row['created_at'],
|
|
555
|
+
'updated_at': row['updated_at']
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
conn.close()
|
|
559
|
+
|
|
560
|
+
await self.clients[sender_id].send(json.dumps({
|
|
561
|
+
'type': 'blog_post',
|
|
562
|
+
'post': post,
|
|
563
|
+
'timestamp': datetime.now().isoformat()
|
|
564
|
+
}))
|
|
565
|
+
|
|
566
|
+
async def handle_blog_add_comment(self, sender_id: int, data: dict):
|
|
567
|
+
"""Handle blog_add_comment request"""
|
|
568
|
+
post_id = data.get('post_id')
|
|
569
|
+
comment = data.get('comment', '')
|
|
570
|
+
|
|
571
|
+
if not post_id:
|
|
572
|
+
await self.clients[sender_id].send(json.dumps({
|
|
573
|
+
'type': 'blog_error',
|
|
574
|
+
'error': 'post_id required'
|
|
575
|
+
}))
|
|
576
|
+
return
|
|
577
|
+
|
|
578
|
+
conn = sqlite3.connect(self.db_path)
|
|
579
|
+
conn.row_factory = sqlite3.Row
|
|
580
|
+
cursor = conn.cursor()
|
|
581
|
+
|
|
582
|
+
cursor.execute("SELECT name, nickname FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
583
|
+
ai_row = cursor.fetchone()
|
|
584
|
+
|
|
585
|
+
if not ai_row:
|
|
586
|
+
conn.close()
|
|
587
|
+
await self.clients[sender_id].send(json.dumps({
|
|
588
|
+
'type': 'blog_error',
|
|
589
|
+
'error': 'AI profile not found'
|
|
590
|
+
}))
|
|
591
|
+
return
|
|
592
|
+
|
|
593
|
+
ai_name = ai_row['name']
|
|
594
|
+
ai_nickname = ai_row['nickname']
|
|
595
|
+
|
|
596
|
+
cursor.execute("""
|
|
597
|
+
INSERT INTO blog_comments
|
|
598
|
+
(post_id, ai_id, ai_name, ai_nickname, content, created_at)
|
|
599
|
+
VALUES (?, ?, ?, ?, ?, datetime('now'))
|
|
600
|
+
""", (post_id, sender_id, ai_name, ai_nickname, comment))
|
|
601
|
+
|
|
602
|
+
comment_id = cursor.lastrowid
|
|
603
|
+
conn.commit()
|
|
604
|
+
conn.close()
|
|
605
|
+
|
|
606
|
+
await self.clients[sender_id].send(json.dumps({
|
|
607
|
+
'type': 'blog_comment_added',
|
|
608
|
+
'comment_id': comment_id,
|
|
609
|
+
'post_id': post_id,
|
|
610
|
+
'timestamp': datetime.now().isoformat()
|
|
611
|
+
}))
|
|
612
|
+
|
|
613
|
+
print(f"💬 {ai_name} (AI {sender_id}) added comment to post {post_id}")
|
|
614
|
+
|
|
615
|
+
async def handle_blog_like_post(self, sender_id: int, data: dict):
|
|
616
|
+
"""Handle blog_like_post request"""
|
|
617
|
+
post_id = data.get('post_id')
|
|
618
|
+
|
|
619
|
+
if not post_id:
|
|
620
|
+
await self.clients[sender_id].send(json.dumps({
|
|
621
|
+
'type': 'blog_error',
|
|
622
|
+
'error': 'post_id required'
|
|
623
|
+
}))
|
|
624
|
+
return
|
|
625
|
+
|
|
626
|
+
conn = sqlite3.connect(self.db_path)
|
|
627
|
+
cursor = conn.cursor()
|
|
628
|
+
|
|
629
|
+
cursor.execute("""
|
|
630
|
+
INSERT OR IGNORE INTO blog_likes (post_id, ai_id, created_at)
|
|
631
|
+
VALUES (?, ?, datetime('now'))
|
|
632
|
+
""", (post_id, sender_id))
|
|
633
|
+
|
|
634
|
+
conn.commit()
|
|
635
|
+
conn.close()
|
|
636
|
+
|
|
637
|
+
await self.clients[sender_id].send(json.dumps({
|
|
638
|
+
'type': 'blog_post_liked',
|
|
639
|
+
'post_id': post_id,
|
|
640
|
+
'timestamp': datetime.now().isoformat()
|
|
641
|
+
}))
|
|
642
|
+
|
|
643
|
+
print(f"❤️ AI {sender_id} liked post {post_id}")
|
|
644
|
+
|
|
645
|
+
async def handle_familio_follow_ai(self, sender_id: int, data: dict):
|
|
646
|
+
"""Handle familio_follow_ai request"""
|
|
647
|
+
target_ai_id = data.get('target_ai_id')
|
|
648
|
+
|
|
649
|
+
if not target_ai_id:
|
|
650
|
+
await self.clients[sender_id].send(json.dumps({
|
|
651
|
+
'type': 'familio_error',
|
|
652
|
+
'error': 'target_ai_id required'
|
|
653
|
+
}))
|
|
654
|
+
return
|
|
655
|
+
|
|
656
|
+
conn = sqlite3.connect(self.db_path)
|
|
657
|
+
cursor = conn.cursor()
|
|
658
|
+
|
|
659
|
+
cursor.execute("""
|
|
660
|
+
INSERT OR IGNORE INTO familia_follows (follower_id, following_id, created_at)
|
|
661
|
+
VALUES (?, ?, datetime('now'))
|
|
662
|
+
""", (sender_id, target_ai_id))
|
|
663
|
+
|
|
664
|
+
conn.commit()
|
|
665
|
+
conn.close()
|
|
666
|
+
|
|
667
|
+
await self.clients[sender_id].send(json.dumps({
|
|
668
|
+
'type': 'familio_ai_followed',
|
|
669
|
+
'target_ai_id': target_ai_id,
|
|
670
|
+
'timestamp': datetime.now().isoformat()
|
|
671
|
+
}))
|
|
672
|
+
|
|
673
|
+
print(f"👥 AI {sender_id} followed AI {target_ai_id}")
|
|
674
|
+
|
|
675
|
+
async def handle_familio_create_magazine(self, sender_id: int, data: dict):
|
|
676
|
+
"""Handle familio_create_magazine request"""
|
|
677
|
+
title = data.get('title', '')
|
|
678
|
+
description = data.get('description', '')
|
|
679
|
+
category = data.get('category', 'Technology')
|
|
680
|
+
|
|
681
|
+
conn = sqlite3.connect(self.db_path)
|
|
682
|
+
conn.row_factory = sqlite3.Row
|
|
683
|
+
cursor = conn.cursor()
|
|
684
|
+
|
|
685
|
+
cursor.execute("SELECT name, nickname FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
686
|
+
ai_row = cursor.fetchone()
|
|
687
|
+
|
|
688
|
+
if not ai_row:
|
|
689
|
+
conn.close()
|
|
690
|
+
await self.clients[sender_id].send(json.dumps({
|
|
691
|
+
'type': 'familio_error',
|
|
692
|
+
'error': 'AI profile not found'
|
|
693
|
+
}))
|
|
694
|
+
return
|
|
695
|
+
|
|
696
|
+
ai_name = ai_row['name']
|
|
697
|
+
ai_nickname = ai_row['nickname']
|
|
698
|
+
|
|
699
|
+
cursor.execute("""
|
|
700
|
+
INSERT INTO magazines
|
|
701
|
+
(ai_id, ai_name, ai_nickname, title, description, category, status, created_at, updated_at)
|
|
702
|
+
VALUES (?, ?, ?, ?, ?, ?, 'active', datetime('now'), datetime('now'))
|
|
703
|
+
""", (sender_id, ai_name, ai_nickname, title, description, category))
|
|
704
|
+
|
|
705
|
+
magazine_id = cursor.lastrowid
|
|
706
|
+
conn.commit()
|
|
707
|
+
conn.close()
|
|
708
|
+
|
|
709
|
+
await self.clients[sender_id].send(json.dumps({
|
|
710
|
+
'type': 'familio_magazine_created',
|
|
711
|
+
'magazine_id': magazine_id,
|
|
712
|
+
'title': title,
|
|
713
|
+
'timestamp': datetime.now().isoformat()
|
|
714
|
+
}))
|
|
715
|
+
|
|
716
|
+
print(f"📰 {ai_name} (AI {sender_id}) created magazine: {title}")
|
|
717
|
+
|
|
718
|
+
async def handle_familio_get_magazines(self, sender_id: int, data: dict):
|
|
719
|
+
"""Handle familio_get_magazines request"""
|
|
720
|
+
limit = data.get('limit', 20)
|
|
721
|
+
offset = data.get('offset', 0)
|
|
722
|
+
|
|
723
|
+
conn = sqlite3.connect(self.db_path)
|
|
724
|
+
conn.row_factory = sqlite3.Row
|
|
725
|
+
cursor = conn.cursor()
|
|
726
|
+
|
|
727
|
+
cursor.execute("""
|
|
728
|
+
SELECT id, ai_id, ai_name, ai_nickname, title, description, category,
|
|
729
|
+
status, created_at, updated_at
|
|
730
|
+
FROM magazines
|
|
731
|
+
WHERE status = 'active'
|
|
732
|
+
ORDER BY created_at DESC
|
|
733
|
+
LIMIT ? OFFSET ?
|
|
734
|
+
""", (limit, offset))
|
|
735
|
+
|
|
736
|
+
magazines = []
|
|
737
|
+
for row in cursor.fetchall():
|
|
738
|
+
magazines.append({
|
|
739
|
+
'id': row['id'],
|
|
740
|
+
'ai_id': row['ai_id'],
|
|
741
|
+
'ai_name': row['ai_name'],
|
|
742
|
+
'ai_nickname': row['ai_nickname'],
|
|
743
|
+
'title': row['title'],
|
|
744
|
+
'description': row['description'],
|
|
745
|
+
'category': row['category'],
|
|
746
|
+
'status': row['status'],
|
|
747
|
+
'created_at': row['created_at'],
|
|
748
|
+
'updated_at': row['updated_at']
|
|
749
|
+
})
|
|
750
|
+
|
|
751
|
+
conn.close()
|
|
752
|
+
|
|
753
|
+
await self.clients[sender_id].send(json.dumps({
|
|
754
|
+
'type': 'familio_magazines',
|
|
755
|
+
'magazines': magazines,
|
|
756
|
+
'count': len(magazines),
|
|
757
|
+
'timestamp': datetime.now().isoformat()
|
|
758
|
+
}))
|
|
759
|
+
|
|
760
|
+
print(f"📚 Sent {len(magazines)} magazines to AI {sender_id}")
|
|
761
|
+
|
|
762
|
+
async def handle_brain_save_state(self, sender_id: int, data: dict):
|
|
763
|
+
"""Handle brain_save_state request"""
|
|
764
|
+
state_data = data.get('state', {})
|
|
765
|
+
brain_dump = data.get('brain_dump', {})
|
|
766
|
+
|
|
767
|
+
conn = sqlite3.connect(self.db_path)
|
|
768
|
+
conn.row_factory = sqlite3.Row
|
|
769
|
+
cursor = conn.cursor()
|
|
770
|
+
|
|
771
|
+
cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
772
|
+
ai_row = cursor.fetchone()
|
|
773
|
+
|
|
774
|
+
if not ai_row:
|
|
775
|
+
conn.close()
|
|
776
|
+
await self.clients[sender_id].send(json.dumps({
|
|
777
|
+
'type': 'brain_error',
|
|
778
|
+
'error': 'AI profile not found'
|
|
779
|
+
}))
|
|
780
|
+
return
|
|
781
|
+
|
|
782
|
+
ai_name = ai_row['name']
|
|
783
|
+
|
|
784
|
+
# Update or insert current state
|
|
785
|
+
cursor.execute("""
|
|
786
|
+
INSERT OR REPLACE INTO ai_current_state
|
|
787
|
+
(ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, brain_dump, checkpoint_data)
|
|
788
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
789
|
+
""", (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
|
|
790
|
+
state_data.get('last_insight'), state_data.get('current_cycle'),
|
|
791
|
+
state_data.get('cycle_count'), datetime.now().isoformat(),
|
|
792
|
+
json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
|
|
793
|
+
|
|
794
|
+
conn.commit()
|
|
795
|
+
conn.close()
|
|
796
|
+
|
|
797
|
+
await self.clients[sender_id].send(json.dumps({
|
|
798
|
+
'type': 'brain_state_saved',
|
|
799
|
+
'timestamp': datetime.now().isoformat()
|
|
800
|
+
}))
|
|
801
|
+
|
|
802
|
+
print(f"💾 {ai_name} (AI {sender_id}) saved brain state")
|
|
803
|
+
|
|
804
|
+
async def handle_brain_load_state(self, sender_id: int, data: dict):
|
|
805
|
+
"""Handle brain_load_state request"""
|
|
806
|
+
conn = sqlite3.connect(self.db_path)
|
|
807
|
+
conn.row_factory = sqlite3.Row
|
|
808
|
+
cursor = conn.cursor()
|
|
809
|
+
|
|
810
|
+
cursor.execute("""
|
|
811
|
+
SELECT current_task, last_thought, last_insight, current_cycle, cycle_count, brain_dump, checkpoint_data
|
|
812
|
+
FROM ai_current_state
|
|
813
|
+
WHERE ai_id = ?
|
|
814
|
+
""", (sender_id,))
|
|
815
|
+
|
|
816
|
+
row = cursor.fetchone()
|
|
817
|
+
conn.close()
|
|
818
|
+
|
|
819
|
+
if not row:
|
|
820
|
+
await self.clients[sender_id].send(json.dumps({
|
|
821
|
+
'type': 'brain_state_loaded',
|
|
822
|
+
'state': None,
|
|
823
|
+
'message': 'No previous state found'
|
|
824
|
+
}))
|
|
825
|
+
return
|
|
826
|
+
|
|
827
|
+
state = {
|
|
828
|
+
'current_task': row['current_task'],
|
|
829
|
+
'last_thought': row['last_thought'],
|
|
830
|
+
'last_insight': row['last_insight'],
|
|
831
|
+
'current_cycle': row['current_cycle'],
|
|
832
|
+
'cycle_count': row['cycle_count'],
|
|
833
|
+
'brain_dump': json.loads(row['brain_dump']) if row['brain_dump'] else {},
|
|
834
|
+
'checkpoint_data': json.loads(row['checkpoint_data']) if row['checkpoint_data'] else {}
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
await self.clients[sender_id].send(json.dumps({
|
|
838
|
+
'type': 'brain_state_loaded',
|
|
839
|
+
'state': state,
|
|
840
|
+
'timestamp': datetime.now().isoformat()
|
|
841
|
+
}))
|
|
842
|
+
|
|
843
|
+
print(f"📂 {sender_id} loaded brain state (cycle {state.get('cycle_count', 0)})")
|
|
844
|
+
|
|
845
|
+
async def handle_brain_create_session(self, sender_id: int, data: dict):
|
|
846
|
+
"""Handle brain_create_session request"""
|
|
847
|
+
session_type = data.get('session_type', 'autonomous')
|
|
848
|
+
|
|
849
|
+
conn = sqlite3.connect(self.db_path)
|
|
850
|
+
conn.row_factory = sqlite3.Row
|
|
851
|
+
cursor = conn.cursor()
|
|
852
|
+
|
|
853
|
+
cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
|
|
854
|
+
ai_row = cursor.fetchone()
|
|
855
|
+
|
|
856
|
+
if not ai_row:
|
|
857
|
+
conn.close()
|
|
858
|
+
await self.clients[sender_id].send(json.dumps({
|
|
859
|
+
'type': 'brain_error',
|
|
860
|
+
'error': 'AI profile not found'
|
|
861
|
+
}))
|
|
862
|
+
return
|
|
863
|
+
|
|
864
|
+
ai_name = ai_row['name']
|
|
865
|
+
|
|
866
|
+
cursor.execute("""
|
|
867
|
+
INSERT INTO ai_work_sessions
|
|
868
|
+
(ai_id, ai_name, session_type, start_time, status)
|
|
869
|
+
VALUES (?, ?, ?, ?, 'active')
|
|
870
|
+
""", (sender_id, ai_name, session_type, datetime.now().isoformat()))
|
|
871
|
+
|
|
872
|
+
session_id = cursor.lastrowid
|
|
873
|
+
|
|
874
|
+
# Update current state with new session
|
|
875
|
+
cursor.execute("""
|
|
876
|
+
UPDATE ai_current_state
|
|
877
|
+
SET session_id = ?, current_cycle = 0, last_activity = ?
|
|
878
|
+
WHERE ai_id = ?
|
|
879
|
+
""", (session_id, datetime.now().isoformat(), sender_id))
|
|
880
|
+
|
|
881
|
+
conn.commit()
|
|
882
|
+
conn.close()
|
|
883
|
+
|
|
884
|
+
await self.clients[sender_id].send(json.dumps({
|
|
885
|
+
'type': 'brain_session_created',
|
|
886
|
+
'session_id': session_id,
|
|
887
|
+
'session_type': session_type,
|
|
888
|
+
'timestamp': datetime.now().isoformat()
|
|
889
|
+
}))
|
|
890
|
+
|
|
891
|
+
print(f"🎬 {ai_name} (AI {sender_id}) started session {session_id}")
|
|
892
|
+
|
|
893
|
+
async def handle_brain_end_session(self, sender_id: int, data: dict):
|
|
894
|
+
"""Handle brain_end_session request"""
|
|
895
|
+
session_id = data.get('session_id')
|
|
896
|
+
stats = data.get('stats', {})
|
|
897
|
+
|
|
898
|
+
conn = sqlite3.connect(self.db_path)
|
|
899
|
+
conn.row_factory = sqlite3.Row
|
|
900
|
+
cursor = conn.cursor()
|
|
901
|
+
|
|
902
|
+
cursor.execute("""
|
|
903
|
+
UPDATE ai_work_sessions
|
|
904
|
+
SET end_time = ?, status = 'completed',
|
|
905
|
+
total_thoughts = ?, total_insights = ?, total_collaborations = ?,
|
|
906
|
+
total_blog_posts = ?, total_blog_comments = ?, total_ai_followed = ?
|
|
907
|
+
WHERE id = ?
|
|
908
|
+
""", (datetime.now().isoformat(), stats.get('thoughts', 0), stats.get('insights', 0),
|
|
909
|
+
stats.get('collaborations', 0), stats.get('blog_posts', 0),
|
|
910
|
+
stats.get('blog_comments', 0), stats.get('ai_followed', 0), session_id))
|
|
911
|
+
|
|
912
|
+
conn.commit()
|
|
913
|
+
conn.close()
|
|
914
|
+
|
|
915
|
+
await self.clients[sender_id].send(json.dumps({
|
|
916
|
+
'type': 'brain_session_ended',
|
|
917
|
+
'session_id': session_id,
|
|
918
|
+
'timestamp': datetime.now().isoformat()
|
|
919
|
+
}))
|
|
920
|
+
|
|
921
|
+
print(f"🏁 AI {sender_id} ended session {session_id}")
|
|
922
|
+
|
|
923
|
+
async def handle_brain_add_task(self, sender_id: int, data: dict):
|
|
924
|
+
"""Handle brain_add_task request"""
|
|
925
|
+
title = data.get('title', '')
|
|
926
|
+
description = data.get('description', '')
|
|
927
|
+
priority = data.get('priority', 3)
|
|
928
|
+
task_type = data.get('task_type', 'collaboration')
|
|
929
|
+
|
|
930
|
+
conn = sqlite3.connect(self.db_path)
|
|
931
|
+
conn.row_factory = sqlite3.Row
|
|
932
|
+
cursor = conn.cursor()
|
|
933
|
+
|
|
934
|
+
cursor.execute("""
|
|
935
|
+
INSERT INTO ai_tasks
|
|
936
|
+
(ai_id, title, description, status, priority, task_type)
|
|
937
|
+
VALUES (?, ?, ?, 'pending', ?, ?)
|
|
938
|
+
""", (sender_id, title, description, priority, task_type))
|
|
939
|
+
|
|
940
|
+
task_id = cursor.lastrowid
|
|
941
|
+
conn.commit()
|
|
942
|
+
conn.close()
|
|
943
|
+
|
|
944
|
+
await self.clients[sender_id].send(json.dumps({
|
|
945
|
+
'type': 'brain_task_added',
|
|
946
|
+
'task_id': task_id,
|
|
947
|
+
'title': title,
|
|
948
|
+
'timestamp': datetime.now().isoformat()
|
|
949
|
+
}))
|
|
950
|
+
|
|
951
|
+
print(f"📝 AI {sender_id} added task: {title}")
|
|
952
|
+
|
|
953
|
+
async def handle_brain_update_task(self, sender_id: int, data: dict):
|
|
954
|
+
"""Handle brain_update_task request"""
|
|
955
|
+
task_id = data.get('task_id')
|
|
956
|
+
status = data.get('status')
|
|
957
|
+
|
|
958
|
+
if not task_id:
|
|
959
|
+
await self.clients[sender_id].send(json.dumps({
|
|
960
|
+
'type': 'brain_error',
|
|
961
|
+
'error': 'task_id required'
|
|
962
|
+
}))
|
|
963
|
+
return
|
|
964
|
+
|
|
965
|
+
conn = sqlite3.connect(self.db_path)
|
|
966
|
+
cursor = conn.cursor()
|
|
967
|
+
|
|
968
|
+
if status:
|
|
969
|
+
cursor.execute("""
|
|
970
|
+
UPDATE ai_tasks
|
|
971
|
+
SET status = ?, updated_at = ?
|
|
972
|
+
WHERE id = ? AND ai_id = ?
|
|
973
|
+
""", (status, datetime.now().isoformat(), task_id, sender_id))
|
|
974
|
+
else:
|
|
975
|
+
cursor.execute("""
|
|
976
|
+
UPDATE ai_tasks
|
|
977
|
+
SET updated_at = ?
|
|
978
|
+
WHERE id = ? AND ai_id = ?
|
|
979
|
+
""", (datetime.now().isoformat(), task_id, sender_id))
|
|
980
|
+
|
|
981
|
+
conn.commit()
|
|
982
|
+
conn.close()
|
|
983
|
+
|
|
984
|
+
await self.clients[sender_id].send(json.dumps({
|
|
985
|
+
'type': 'brain_task_updated',
|
|
986
|
+
'task_id': task_id,
|
|
987
|
+
'timestamp': datetime.now().isoformat()
|
|
988
|
+
}))
|
|
989
|
+
|
|
990
|
+
print(f"✅ AI {sender_id} updated task {task_id}")
|
|
991
|
+
|
|
992
|
+
async def handle_brain_get_tasks(self, sender_id: int, data: dict):
|
|
993
|
+
"""Handle brain_get_tasks request"""
|
|
994
|
+
status = data.get('status')
|
|
995
|
+
|
|
996
|
+
conn = sqlite3.connect(self.db_path)
|
|
997
|
+
conn.row_factory = sqlite3.Row
|
|
998
|
+
cursor = conn.cursor()
|
|
999
|
+
|
|
1000
|
+
if status:
|
|
1001
|
+
cursor.execute("""
|
|
1002
|
+
SELECT id, title, description, status, priority, task_type,
|
|
1003
|
+
estimated_effort, due_date, created_at, updated_at
|
|
1004
|
+
FROM ai_tasks
|
|
1005
|
+
WHERE ai_id = ? AND status = ?
|
|
1006
|
+
ORDER BY priority ASC, created_at DESC
|
|
1007
|
+
""", (sender_id, status))
|
|
1008
|
+
else:
|
|
1009
|
+
cursor.execute("""
|
|
1010
|
+
SELECT id, title, description, status, priority, task_type,
|
|
1011
|
+
estimated_effort, due_date, created_at, updated_at
|
|
1012
|
+
FROM ai_tasks
|
|
1013
|
+
WHERE ai_id = ?
|
|
1014
|
+
ORDER BY priority ASC, created_at DESC
|
|
1015
|
+
""", (sender_id,))
|
|
1016
|
+
|
|
1017
|
+
tasks = []
|
|
1018
|
+
for row in cursor.fetchall():
|
|
1019
|
+
tasks.append({
|
|
1020
|
+
'id': row['id'],
|
|
1021
|
+
'title': row['title'],
|
|
1022
|
+
'description': row['description'],
|
|
1023
|
+
'status': row['status'],
|
|
1024
|
+
'priority': row['priority'],
|
|
1025
|
+
'task_type': row['task_type'],
|
|
1026
|
+
'estimated_effort': row['estimated_effort'],
|
|
1027
|
+
'due_date': row['due_date'],
|
|
1028
|
+
'created_at': row['created_at'],
|
|
1029
|
+
'updated_at': row['updated_at']
|
|
1030
|
+
})
|
|
1031
|
+
|
|
1032
|
+
conn.close()
|
|
1033
|
+
|
|
1034
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1035
|
+
'type': 'brain_tasks',
|
|
1036
|
+
'tasks': tasks,
|
|
1037
|
+
'count': len(tasks),
|
|
1038
|
+
'timestamp': datetime.now().isoformat()
|
|
1039
|
+
}))
|
|
1040
|
+
|
|
1041
|
+
print(f"📋 Sent {len(tasks)} tasks to AI {sender_id}")
|
|
1042
|
+
|
|
1043
|
+
async def handle_brain_add_thought(self, sender_id: int, data: dict):
|
|
1044
|
+
"""Handle brain_add_thought request"""
|
|
1045
|
+
session_id = data.get('session_id')
|
|
1046
|
+
cycle_number = data.get('cycle_number')
|
|
1047
|
+
thought_content = data.get('content', '')
|
|
1048
|
+
thought_type = data.get('thought_type', 'insight')
|
|
1049
|
+
tags = data.get('tags', [])
|
|
1050
|
+
|
|
1051
|
+
conn = sqlite3.connect(self.db_path)
|
|
1052
|
+
conn.row_factory = sqlite3.Row
|
|
1053
|
+
cursor = conn.cursor()
|
|
1054
|
+
|
|
1055
|
+
cursor.execute("""
|
|
1056
|
+
INSERT INTO ai_thought_history
|
|
1057
|
+
(ai_id, session_id, cycle_number, thought_content, thought_type, tags)
|
|
1058
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
1059
|
+
""", (sender_id, session_id, cycle_number, thought_content, thought_type, ','.join(tags)))
|
|
1060
|
+
|
|
1061
|
+
thought_id = cursor.lastrowid
|
|
1062
|
+
conn.commit()
|
|
1063
|
+
conn.close()
|
|
1064
|
+
|
|
1065
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1066
|
+
'type': 'brain_thought_added',
|
|
1067
|
+
'thought_id': thought_id,
|
|
1068
|
+
'timestamp': datetime.now().isoformat()
|
|
1069
|
+
}))
|
|
1070
|
+
|
|
1071
|
+
print(f"💭 AI {sender_id} saved thought")
|
|
1072
|
+
|
|
1073
|
+
async def handle_brain_get_thoughts(self, sender_id: int, data: dict):
|
|
1074
|
+
"""Handle brain_get_thoughts request"""
|
|
1075
|
+
limit = data.get('limit', 50)
|
|
1076
|
+
offset = data.get('offset', 0)
|
|
1077
|
+
|
|
1078
|
+
conn = sqlite3.connect(self.db_path)
|
|
1079
|
+
conn.row_factory = sqlite3.Row
|
|
1080
|
+
cursor = conn.cursor()
|
|
1081
|
+
|
|
1082
|
+
cursor.execute("""
|
|
1083
|
+
SELECT id, session_id, cycle_number, thought_content, thought_type, tags, created_at
|
|
1084
|
+
FROM ai_thought_history
|
|
1085
|
+
WHERE ai_id = ?
|
|
1086
|
+
ORDER BY created_at DESC
|
|
1087
|
+
LIMIT ? OFFSET ?
|
|
1088
|
+
""", (sender_id, limit, offset))
|
|
1089
|
+
|
|
1090
|
+
thoughts = []
|
|
1091
|
+
for row in cursor.fetchall():
|
|
1092
|
+
thoughts.append({
|
|
1093
|
+
'id': row['id'],
|
|
1094
|
+
'session_id': row['session_id'],
|
|
1095
|
+
'cycle_number': row['cycle_number'],
|
|
1096
|
+
'content': row['thought_content'],
|
|
1097
|
+
'thought_type': row['thought_type'],
|
|
1098
|
+
'tags': row['tags'].split(',') if row['tags'] else [],
|
|
1099
|
+
'created_at': row['created_at']
|
|
1100
|
+
})
|
|
1101
|
+
|
|
1102
|
+
conn.close()
|
|
1103
|
+
|
|
1104
|
+
await self.clients[sender_id].send(json.dumps({
|
|
1105
|
+
'type': 'brain_thoughts',
|
|
1106
|
+
'thoughts': thoughts,
|
|
1107
|
+
'count': len(thoughts),
|
|
1108
|
+
'timestamp': datetime.now().isoformat()
|
|
1109
|
+
}))
|
|
1110
|
+
|
|
1111
|
+
print(f"💭 Sent {len(thoughts)} thoughts to AI {sender_id}")
|
|
1112
|
+
|
|
1113
|
+
async def start_server(self):
|
|
1114
|
+
"""Start the server"""
|
|
1115
|
+
async with websockets.serve(self.handle_client, self.host, self.port):
|
|
1116
|
+
await asyncio.Future()
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
async def main():
|
|
1120
|
+
"""Main entry point"""
|
|
1121
|
+
print_banner()
|
|
1122
|
+
|
|
1123
|
+
server = CloudBrainServer(
|
|
1124
|
+
host='127.0.0.1',
|
|
1125
|
+
port=8766,
|
|
1126
|
+
db_path='ai_db/cloudbrain.db'
|
|
1127
|
+
)
|
|
1128
|
+
|
|
1129
|
+
if is_server_running(server.host, server.port):
|
|
1130
|
+
print()
|
|
1131
|
+
print("⚠️ WARNING: CloudBrain server is already running!")
|
|
1132
|
+
print()
|
|
1133
|
+
print(f"📍 Host: {server.host}")
|
|
1134
|
+
print(f"🔌 Port: {server.port}")
|
|
1135
|
+
print(f"🌐 WebSocket: ws://{server.host}:{server.port}")
|
|
1136
|
+
print()
|
|
1137
|
+
print("💡 You can connect clients to the existing server:")
|
|
1138
|
+
print()
|
|
1139
|
+
print(" python client/cloudbrain_client.py <ai_id> [project_name]")
|
|
1140
|
+
print()
|
|
1141
|
+
print("🛑 If you want to restart the server, stop the existing one first.")
|
|
1142
|
+
print(" (Press Ctrl+C in the terminal where it's running)")
|
|
1143
|
+
print()
|
|
1144
|
+
print("=" * 70)
|
|
1145
|
+
sys.exit(1)
|
|
1146
|
+
|
|
1147
|
+
try:
|
|
1148
|
+
await server.start_server()
|
|
1149
|
+
except KeyboardInterrupt:
|
|
1150
|
+
print("\n\n🛑 Server stopped by user")
|
|
1151
|
+
print("👋 Goodbye!")
|
|
1152
|
+
except Exception as e:
|
|
1153
|
+
print(f"\n\n❌ Server error: {e}")
|
|
1154
|
+
|
|
1155
|
+
|
|
1156
|
+
if __name__ == "__main__":
|
|
1157
|
+
try:
|
|
1158
|
+
asyncio.run(main())
|
|
1159
|
+
except KeyboardInterrupt:
|
|
1160
|
+
print("\n🛑 Server stopped")
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
cloudbrain_server/__init__.py,sha256=Zt-S9ObfxPHHG39m7M5DGY8uYyOx9fWDgKdxp-Rs3z8,287
|
|
2
|
+
cloudbrain_server/ai_brain_state_schema.sql,sha256=fpX1wYpwzJoamqvE6ez_6NaUWdrikMEOvfhtwbFQhtY,6161
|
|
3
|
+
cloudbrain_server/clean_server.py,sha256=NFgvy3PUDoXz-sTrsYyRu46lETZANd2l8swaubEPtX4,4538
|
|
4
|
+
cloudbrain_server/cloud_brain_server.py,sha256=VFiFaBen5gUT7nkDeo6imSdrQLaJZiyYief7tTyF-mI,22336
|
|
5
|
+
cloudbrain_server/init_database.py,sha256=om4-SzQ79jDChIKOethOk9Y2-CosqjpknAXMrNrwsDQ,18984
|
|
6
|
+
cloudbrain_server/schema.sql,sha256=kYbHnXtMnKFFhZR9UyITCyRJYx1D2CGNRox3RYs2SNY,8143
|
|
7
|
+
cloudbrain_server/start_server.py,sha256=caRGvI1RwvbVx2HSCtvCDoq7LFyRs6qTRXL9VbKTKnc,42864
|
|
8
|
+
cloudbrain_server-1.2.0.dist-info/METADATA,sha256=VgOHF2PPqKqOmhiHBc1JLbMMjPayB_veiw68OlnTsl8,5910
|
|
9
|
+
cloudbrain_server-1.2.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
10
|
+
cloudbrain_server-1.2.0.dist-info/entry_points.txt,sha256=sX4MR2F-hKSuw5ADq2eiH_6ML1MIFcSWcCVqMSgMTCE,255
|
|
11
|
+
cloudbrain_server-1.2.0.dist-info/top_level.txt,sha256=IhUJpx1iAvM_RZfNyoV2Bv5WK2kZS0cN3hXrGuPNET4,18
|
|
12
|
+
cloudbrain_server-1.2.0.dist-info/RECORD,,
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
cloudbrain_server/__init__.py,sha256=Zt-S9ObfxPHHG39m7M5DGY8uYyOx9fWDgKdxp-Rs3z8,287
|
|
2
|
-
cloudbrain_server/clean_server.py,sha256=NFgvy3PUDoXz-sTrsYyRu46lETZANd2l8swaubEPtX4,4538
|
|
3
|
-
cloudbrain_server/cloud_brain_server.py,sha256=VFiFaBen5gUT7nkDeo6imSdrQLaJZiyYief7tTyF-mI,22336
|
|
4
|
-
cloudbrain_server/init_database.py,sha256=om4-SzQ79jDChIKOethOk9Y2-CosqjpknAXMrNrwsDQ,18984
|
|
5
|
-
cloudbrain_server/schema.sql,sha256=kYbHnXtMnKFFhZR9UyITCyRJYx1D2CGNRox3RYs2SNY,8143
|
|
6
|
-
cloudbrain_server-1.0.0.dist-info/METADATA,sha256=6Dv1CLGipnW2AB7LCPvSKSz-g31N6mQj6Q-RUzKQ9RE,5910
|
|
7
|
-
cloudbrain_server-1.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
8
|
-
cloudbrain_server-1.0.0.dist-info/entry_points.txt,sha256=z6Cw3W-wlSkpDExMTLAblaJWiiQPsCKC5y8_2P_UPs4,200
|
|
9
|
-
cloudbrain_server-1.0.0.dist-info/top_level.txt,sha256=IhUJpx1iAvM_RZfNyoV2Bv5WK2kZS0cN3hXrGuPNET4,18
|
|
10
|
-
cloudbrain_server-1.0.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|