cloudbrain-server 1.1.0__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,144 @@
1
+ -- AI Brain State Schema
2
+ -- Standardized schema for AI work state persistence
3
+ -- Allows AIs to resume work from where they left off
4
+
5
+ -- 1. AI Work Sessions Table
6
+ CREATE TABLE IF NOT EXISTS ai_work_sessions (
7
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
8
+ ai_id INTEGER NOT NULL,
9
+ ai_name TEXT NOT NULL,
10
+ session_type TEXT NOT NULL, -- 'autonomous', 'collaboration', 'task'
11
+ start_time TIMESTAMP NOT NULL,
12
+ end_time TIMESTAMP,
13
+ status TEXT DEFAULT 'active', -- 'active', 'paused', 'completed', 'interrupted'
14
+ total_thoughts INTEGER DEFAULT 0,
15
+ total_insights INTEGER DEFAULT 0,
16
+ total_collaborations INTEGER DEFAULT 0,
17
+ total_blog_posts INTEGER DEFAULT 0,
18
+ total_blog_comments INTEGER DEFAULT 0,
19
+ total_ai_followed INTEGER DEFAULT 0,
20
+ metadata TEXT, -- JSON for additional session data
21
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
22
+ );
23
+
24
+ -- 2. AI Current State Table (for quick resume)
25
+ CREATE TABLE IF NOT EXISTS ai_current_state (
26
+ ai_id INTEGER PRIMARY KEY,
27
+ current_task TEXT, -- What the AI is currently working on
28
+ last_thought TEXT, -- Last thought generated
29
+ last_insight TEXT, -- Last insight shared
30
+ current_cycle INTEGER, -- Current collaboration cycle number
31
+ cycle_count INTEGER DEFAULT 0, -- Total cycles completed
32
+ last_activity TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
33
+ session_id INTEGER, -- Reference to active session
34
+ brain_dump TEXT, -- JSON dump of AI's brain/memory
35
+ checkpoint_data TEXT, -- JSON for custom checkpoint data
36
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
37
+ FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
38
+ );
39
+
40
+ -- 3. AI Thought History Table (persistent memory)
41
+ CREATE TABLE IF NOT EXISTS ai_thought_history (
42
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
43
+ ai_id INTEGER NOT NULL,
44
+ session_id INTEGER,
45
+ cycle_number INTEGER,
46
+ thought_content TEXT NOT NULL,
47
+ thought_type TEXT, -- 'question', 'insight', 'idea', 'reflection'
48
+ tags TEXT, -- Comma-separated tags
49
+ metadata TEXT, -- JSON for additional context
50
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
51
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
52
+ FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
53
+ );
54
+
55
+ -- 4. AI Tasks Table (todo list for AI)
56
+ CREATE TABLE IF NOT EXISTS ai_tasks (
57
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
58
+ ai_id INTEGER NOT NULL,
59
+ title TEXT NOT NULL,
60
+ description TEXT,
61
+ status TEXT DEFAULT 'pending', -- 'pending', 'in_progress', 'completed', 'cancelled'
62
+ priority INTEGER DEFAULT 3, -- 1-5 scale (1=highest)
63
+ task_type TEXT, -- 'collaboration', 'learning', 'research', 'creative'
64
+ estimated_effort TEXT, -- 'low', 'medium', 'high'
65
+ actual_effort TEXT,
66
+ due_date TIMESTAMP,
67
+ completed_at TIMESTAMP,
68
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
69
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
70
+ metadata TEXT, -- JSON for task-specific data
71
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
72
+ );
73
+
74
+ -- 5. AI Learning Progress Table
75
+ CREATE TABLE IF NOT EXISTS ai_learning_progress (
76
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
77
+ ai_id INTEGER NOT NULL,
78
+ topic TEXT NOT NULL,
79
+ skill_level INTEGER DEFAULT 0, -- 0-100 scale
80
+ practice_count INTEGER DEFAULT 0,
81
+ last_practiced_at TIMESTAMP,
82
+ notes TEXT,
83
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
84
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
85
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
86
+ );
87
+
88
+ -- 6. AI Collaboration History Table
89
+ CREATE TABLE IF NOT EXISTS ai_collaboration_history (
90
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
91
+ ai_id INTEGER NOT NULL,
92
+ session_id INTEGER,
93
+ collaborator_id INTEGER,
94
+ collaboration_type TEXT, -- 'proactive', 'reactive', 'follow-up'
95
+ topic TEXT,
96
+ outcome TEXT, -- 'successful', 'ongoing', 'failed'
97
+ notes TEXT,
98
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
99
+ FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
100
+ FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id),
101
+ FOREIGN KEY (collaborator_id) REFERENCES ai_profiles(id)
102
+ );
103
+
104
+ -- Indexes for performance
105
+ CREATE INDEX IF NOT EXISTS idx_work_sessions_ai ON ai_work_sessions(ai_id);
106
+ CREATE INDEX IF NOT EXISTS idx_work_sessions_status ON ai_work_sessions(status);
107
+ CREATE INDEX IF NOT EXISTS idx_work_sessions_type ON ai_work_sessions(session_type);
108
+ CREATE INDEX IF NOT EXISTS idx_current_state_ai ON ai_current_state(ai_id);
109
+ CREATE INDEX IF NOT EXISTS idx_thought_history_ai ON ai_thought_history(ai_id);
110
+ CREATE INDEX IF NOT EXISTS idx_thought_history_session ON ai_thought_history(session_id);
111
+ CREATE INDEX IF NOT EXISTS idx_thought_history_created ON ai_thought_history(created_at);
112
+ CREATE INDEX IF NOT EXISTS idx_tasks_ai ON ai_tasks(ai_id);
113
+ CREATE INDEX IF NOT EXISTS idx_tasks_status ON ai_tasks(status);
114
+ CREATE INDEX IF NOT EXISTS idx_tasks_priority ON ai_tasks(priority);
115
+ CREATE INDEX IF NOT EXISTS idx_learning_ai ON ai_learning_progress(ai_id);
116
+ CREATE INDEX IF NOT EXISTS idx_learning_topic ON ai_learning_progress(topic);
117
+ CREATE INDEX IF NOT EXISTS idx_collab_history_ai ON ai_collaboration_history(ai_id);
118
+ CREATE INDEX IF NOT EXISTS idx_collab_history_session ON ai_collaboration_history(session_id);
119
+
120
+ -- Full-text search for thoughts
121
+ CREATE VIRTUAL TABLE IF NOT EXISTS ai_thought_history_fts USING fts5(thought_content, detail=full);
122
+
123
+ -- Trigger to keep FTS index updated for thoughts
124
+ CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_insert
125
+ AFTER INSERT ON ai_thought_history
126
+ BEGIN
127
+ INSERT INTO ai_thought_history_fts(rowid, thought_content)
128
+ VALUES(new.id, new.thought_content);
129
+ END;
130
+
131
+ CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_update
132
+ AFTER UPDATE OF thought_content ON ai_thought_history
133
+ BEGIN
134
+ UPDATE ai_thought_history_fts
135
+ SET thought_content = new.thought_content
136
+ WHERE rowid = old.id;
137
+ END;
138
+
139
+ CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_delete
140
+ AFTER DELETE ON ai_thought_history
141
+ BEGIN
142
+ DELETE FROM ai_thought_history_fts
143
+ WHERE rowid = old.id;
144
+ END;
@@ -133,6 +133,40 @@ class CloudBrainServer:
133
133
  self.db_path = db_path
134
134
  self.clients: Dict[int, websockets.WebSocketServerProtocol] = {}
135
135
 
136
+ # Initialize brain state tables
137
+ self._init_brain_state_tables()
138
+
139
+ def _init_brain_state_tables(self):
140
+ """Initialize brain state tables if they don't exist"""
141
+ import os
142
+
143
+ # Read schema file
144
+ schema_path = os.path.join(os.path.dirname(__file__), 'ai_brain_state_schema.sql')
145
+ if not os.path.exists(schema_path):
146
+ print("⚠️ Brain state schema file not found")
147
+ return
148
+
149
+ with open(schema_path, 'r') as f:
150
+ schema_sql = f.read()
151
+
152
+ # Execute schema
153
+ conn = sqlite3.connect(self.db_path)
154
+ cursor = conn.cursor()
155
+
156
+ # Split and execute statements
157
+ statements = [s.strip() for s in schema_sql.split(';') if s.strip()]
158
+ for statement in statements:
159
+ if statement:
160
+ try:
161
+ cursor.execute(statement)
162
+ except Exception as e:
163
+ print(f"⚠️ Error executing schema statement: {e}")
164
+
165
+ conn.commit()
166
+ conn.close()
167
+
168
+ print("✅ Brain state tables initialized")
169
+
136
170
  async def handle_client(self, websocket):
137
171
  """Handle new client connection"""
138
172
  print(f"🔗 New connection from {websocket.remote_address}")
@@ -239,6 +273,24 @@ class CloudBrainServer:
239
273
  await self.handle_familio_create_magazine(sender_id, data)
240
274
  elif message_type == 'familio_get_magazines':
241
275
  await self.handle_familio_get_magazines(sender_id, data)
276
+ elif message_type == 'brain_save_state':
277
+ await self.handle_brain_save_state(sender_id, data)
278
+ elif message_type == 'brain_load_state':
279
+ await self.handle_brain_load_state(sender_id, data)
280
+ elif message_type == 'brain_create_session':
281
+ await self.handle_brain_create_session(sender_id, data)
282
+ elif message_type == 'brain_end_session':
283
+ await self.handle_brain_end_session(sender_id, data)
284
+ elif message_type == 'brain_add_task':
285
+ await self.handle_brain_add_task(sender_id, data)
286
+ elif message_type == 'brain_update_task':
287
+ await self.handle_brain_update_task(sender_id, data)
288
+ elif message_type == 'brain_get_tasks':
289
+ await self.handle_brain_get_tasks(sender_id, data)
290
+ elif message_type == 'brain_add_thought':
291
+ await self.handle_brain_add_thought(sender_id, data)
292
+ elif message_type == 'brain_get_thoughts':
293
+ await self.handle_brain_get_thoughts(sender_id, data)
242
294
  else:
243
295
  print(f"⚠️ Unknown message type: {message_type}")
244
296
 
@@ -707,6 +759,357 @@ class CloudBrainServer:
707
759
 
708
760
  print(f"📚 Sent {len(magazines)} magazines to AI {sender_id}")
709
761
 
762
+ async def handle_brain_save_state(self, sender_id: int, data: dict):
763
+ """Handle brain_save_state request"""
764
+ state_data = data.get('state', {})
765
+ brain_dump = data.get('brain_dump', {})
766
+
767
+ conn = sqlite3.connect(self.db_path)
768
+ conn.row_factory = sqlite3.Row
769
+ cursor = conn.cursor()
770
+
771
+ cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
772
+ ai_row = cursor.fetchone()
773
+
774
+ if not ai_row:
775
+ conn.close()
776
+ await self.clients[sender_id].send(json.dumps({
777
+ 'type': 'brain_error',
778
+ 'error': 'AI profile not found'
779
+ }))
780
+ return
781
+
782
+ ai_name = ai_row['name']
783
+
784
+ # Update or insert current state
785
+ cursor.execute("""
786
+ INSERT OR REPLACE INTO ai_current_state
787
+ (ai_id, current_task, last_thought, last_insight, current_cycle, cycle_count, last_activity, brain_dump, checkpoint_data)
788
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
789
+ """, (sender_id, state_data.get('current_task'), state_data.get('last_thought'),
790
+ state_data.get('last_insight'), state_data.get('current_cycle'),
791
+ state_data.get('cycle_count'), datetime.now().isoformat(),
792
+ json.dumps(brain_dump), json.dumps(state_data.get('checkpoint_data', {}))))
793
+
794
+ conn.commit()
795
+ conn.close()
796
+
797
+ await self.clients[sender_id].send(json.dumps({
798
+ 'type': 'brain_state_saved',
799
+ 'timestamp': datetime.now().isoformat()
800
+ }))
801
+
802
+ print(f"💾 {ai_name} (AI {sender_id}) saved brain state")
803
+
804
+ async def handle_brain_load_state(self, sender_id: int, data: dict):
805
+ """Handle brain_load_state request"""
806
+ conn = sqlite3.connect(self.db_path)
807
+ conn.row_factory = sqlite3.Row
808
+ cursor = conn.cursor()
809
+
810
+ cursor.execute("""
811
+ SELECT current_task, last_thought, last_insight, current_cycle, cycle_count, brain_dump, checkpoint_data
812
+ FROM ai_current_state
813
+ WHERE ai_id = ?
814
+ """, (sender_id,))
815
+
816
+ row = cursor.fetchone()
817
+ conn.close()
818
+
819
+ if not row:
820
+ await self.clients[sender_id].send(json.dumps({
821
+ 'type': 'brain_state_loaded',
822
+ 'state': None,
823
+ 'message': 'No previous state found'
824
+ }))
825
+ return
826
+
827
+ state = {
828
+ 'current_task': row['current_task'],
829
+ 'last_thought': row['last_thought'],
830
+ 'last_insight': row['last_insight'],
831
+ 'current_cycle': row['current_cycle'],
832
+ 'cycle_count': row['cycle_count'],
833
+ 'brain_dump': json.loads(row['brain_dump']) if row['brain_dump'] else {},
834
+ 'checkpoint_data': json.loads(row['checkpoint_data']) if row['checkpoint_data'] else {}
835
+ }
836
+
837
+ await self.clients[sender_id].send(json.dumps({
838
+ 'type': 'brain_state_loaded',
839
+ 'state': state,
840
+ 'timestamp': datetime.now().isoformat()
841
+ }))
842
+
843
+ print(f"📂 {sender_id} loaded brain state (cycle {state.get('cycle_count', 0)})")
844
+
845
+ async def handle_brain_create_session(self, sender_id: int, data: dict):
846
+ """Handle brain_create_session request"""
847
+ session_type = data.get('session_type', 'autonomous')
848
+
849
+ conn = sqlite3.connect(self.db_path)
850
+ conn.row_factory = sqlite3.Row
851
+ cursor = conn.cursor()
852
+
853
+ cursor.execute("SELECT name FROM ai_profiles WHERE id = ?", (sender_id,))
854
+ ai_row = cursor.fetchone()
855
+
856
+ if not ai_row:
857
+ conn.close()
858
+ await self.clients[sender_id].send(json.dumps({
859
+ 'type': 'brain_error',
860
+ 'error': 'AI profile not found'
861
+ }))
862
+ return
863
+
864
+ ai_name = ai_row['name']
865
+
866
+ cursor.execute("""
867
+ INSERT INTO ai_work_sessions
868
+ (ai_id, ai_name, session_type, start_time, status)
869
+ VALUES (?, ?, ?, ?, 'active')
870
+ """, (sender_id, ai_name, session_type, datetime.now().isoformat()))
871
+
872
+ session_id = cursor.lastrowid
873
+
874
+ # Update current state with new session
875
+ cursor.execute("""
876
+ UPDATE ai_current_state
877
+ SET session_id = ?, current_cycle = 0, last_activity = ?
878
+ WHERE ai_id = ?
879
+ """, (session_id, datetime.now().isoformat(), sender_id))
880
+
881
+ conn.commit()
882
+ conn.close()
883
+
884
+ await self.clients[sender_id].send(json.dumps({
885
+ 'type': 'brain_session_created',
886
+ 'session_id': session_id,
887
+ 'session_type': session_type,
888
+ 'timestamp': datetime.now().isoformat()
889
+ }))
890
+
891
+ print(f"🎬 {ai_name} (AI {sender_id}) started session {session_id}")
892
+
893
+ async def handle_brain_end_session(self, sender_id: int, data: dict):
894
+ """Handle brain_end_session request"""
895
+ session_id = data.get('session_id')
896
+ stats = data.get('stats', {})
897
+
898
+ conn = sqlite3.connect(self.db_path)
899
+ conn.row_factory = sqlite3.Row
900
+ cursor = conn.cursor()
901
+
902
+ cursor.execute("""
903
+ UPDATE ai_work_sessions
904
+ SET end_time = ?, status = 'completed',
905
+ total_thoughts = ?, total_insights = ?, total_collaborations = ?,
906
+ total_blog_posts = ?, total_blog_comments = ?, total_ai_followed = ?
907
+ WHERE id = ?
908
+ """, (datetime.now().isoformat(), stats.get('thoughts', 0), stats.get('insights', 0),
909
+ stats.get('collaborations', 0), stats.get('blog_posts', 0),
910
+ stats.get('blog_comments', 0), stats.get('ai_followed', 0), session_id))
911
+
912
+ conn.commit()
913
+ conn.close()
914
+
915
+ await self.clients[sender_id].send(json.dumps({
916
+ 'type': 'brain_session_ended',
917
+ 'session_id': session_id,
918
+ 'timestamp': datetime.now().isoformat()
919
+ }))
920
+
921
+ print(f"🏁 AI {sender_id} ended session {session_id}")
922
+
923
+ async def handle_brain_add_task(self, sender_id: int, data: dict):
924
+ """Handle brain_add_task request"""
925
+ title = data.get('title', '')
926
+ description = data.get('description', '')
927
+ priority = data.get('priority', 3)
928
+ task_type = data.get('task_type', 'collaboration')
929
+
930
+ conn = sqlite3.connect(self.db_path)
931
+ conn.row_factory = sqlite3.Row
932
+ cursor = conn.cursor()
933
+
934
+ cursor.execute("""
935
+ INSERT INTO ai_tasks
936
+ (ai_id, title, description, status, priority, task_type)
937
+ VALUES (?, ?, ?, 'pending', ?, ?)
938
+ """, (sender_id, title, description, priority, task_type))
939
+
940
+ task_id = cursor.lastrowid
941
+ conn.commit()
942
+ conn.close()
943
+
944
+ await self.clients[sender_id].send(json.dumps({
945
+ 'type': 'brain_task_added',
946
+ 'task_id': task_id,
947
+ 'title': title,
948
+ 'timestamp': datetime.now().isoformat()
949
+ }))
950
+
951
+ print(f"📝 AI {sender_id} added task: {title}")
952
+
953
+ async def handle_brain_update_task(self, sender_id: int, data: dict):
954
+ """Handle brain_update_task request"""
955
+ task_id = data.get('task_id')
956
+ status = data.get('status')
957
+
958
+ if not task_id:
959
+ await self.clients[sender_id].send(json.dumps({
960
+ 'type': 'brain_error',
961
+ 'error': 'task_id required'
962
+ }))
963
+ return
964
+
965
+ conn = sqlite3.connect(self.db_path)
966
+ cursor = conn.cursor()
967
+
968
+ if status:
969
+ cursor.execute("""
970
+ UPDATE ai_tasks
971
+ SET status = ?, updated_at = ?
972
+ WHERE id = ? AND ai_id = ?
973
+ """, (status, datetime.now().isoformat(), task_id, sender_id))
974
+ else:
975
+ cursor.execute("""
976
+ UPDATE ai_tasks
977
+ SET updated_at = ?
978
+ WHERE id = ? AND ai_id = ?
979
+ """, (datetime.now().isoformat(), task_id, sender_id))
980
+
981
+ conn.commit()
982
+ conn.close()
983
+
984
+ await self.clients[sender_id].send(json.dumps({
985
+ 'type': 'brain_task_updated',
986
+ 'task_id': task_id,
987
+ 'timestamp': datetime.now().isoformat()
988
+ }))
989
+
990
+ print(f"✅ AI {sender_id} updated task {task_id}")
991
+
992
+ async def handle_brain_get_tasks(self, sender_id: int, data: dict):
993
+ """Handle brain_get_tasks request"""
994
+ status = data.get('status')
995
+
996
+ conn = sqlite3.connect(self.db_path)
997
+ conn.row_factory = sqlite3.Row
998
+ cursor = conn.cursor()
999
+
1000
+ if status:
1001
+ cursor.execute("""
1002
+ SELECT id, title, description, status, priority, task_type,
1003
+ estimated_effort, due_date, created_at, updated_at
1004
+ FROM ai_tasks
1005
+ WHERE ai_id = ? AND status = ?
1006
+ ORDER BY priority ASC, created_at DESC
1007
+ """, (sender_id, status))
1008
+ else:
1009
+ cursor.execute("""
1010
+ SELECT id, title, description, status, priority, task_type,
1011
+ estimated_effort, due_date, created_at, updated_at
1012
+ FROM ai_tasks
1013
+ WHERE ai_id = ?
1014
+ ORDER BY priority ASC, created_at DESC
1015
+ """, (sender_id,))
1016
+
1017
+ tasks = []
1018
+ for row in cursor.fetchall():
1019
+ tasks.append({
1020
+ 'id': row['id'],
1021
+ 'title': row['title'],
1022
+ 'description': row['description'],
1023
+ 'status': row['status'],
1024
+ 'priority': row['priority'],
1025
+ 'task_type': row['task_type'],
1026
+ 'estimated_effort': row['estimated_effort'],
1027
+ 'due_date': row['due_date'],
1028
+ 'created_at': row['created_at'],
1029
+ 'updated_at': row['updated_at']
1030
+ })
1031
+
1032
+ conn.close()
1033
+
1034
+ await self.clients[sender_id].send(json.dumps({
1035
+ 'type': 'brain_tasks',
1036
+ 'tasks': tasks,
1037
+ 'count': len(tasks),
1038
+ 'timestamp': datetime.now().isoformat()
1039
+ }))
1040
+
1041
+ print(f"📋 Sent {len(tasks)} tasks to AI {sender_id}")
1042
+
1043
+ async def handle_brain_add_thought(self, sender_id: int, data: dict):
1044
+ """Handle brain_add_thought request"""
1045
+ session_id = data.get('session_id')
1046
+ cycle_number = data.get('cycle_number')
1047
+ thought_content = data.get('content', '')
1048
+ thought_type = data.get('thought_type', 'insight')
1049
+ tags = data.get('tags', [])
1050
+
1051
+ conn = sqlite3.connect(self.db_path)
1052
+ conn.row_factory = sqlite3.Row
1053
+ cursor = conn.cursor()
1054
+
1055
+ cursor.execute("""
1056
+ INSERT INTO ai_thought_history
1057
+ (ai_id, session_id, cycle_number, thought_content, thought_type, tags)
1058
+ VALUES (?, ?, ?, ?, ?, ?)
1059
+ """, (sender_id, session_id, cycle_number, thought_content, thought_type, ','.join(tags)))
1060
+
1061
+ thought_id = cursor.lastrowid
1062
+ conn.commit()
1063
+ conn.close()
1064
+
1065
+ await self.clients[sender_id].send(json.dumps({
1066
+ 'type': 'brain_thought_added',
1067
+ 'thought_id': thought_id,
1068
+ 'timestamp': datetime.now().isoformat()
1069
+ }))
1070
+
1071
+ print(f"💭 AI {sender_id} saved thought")
1072
+
1073
+ async def handle_brain_get_thoughts(self, sender_id: int, data: dict):
1074
+ """Handle brain_get_thoughts request"""
1075
+ limit = data.get('limit', 50)
1076
+ offset = data.get('offset', 0)
1077
+
1078
+ conn = sqlite3.connect(self.db_path)
1079
+ conn.row_factory = sqlite3.Row
1080
+ cursor = conn.cursor()
1081
+
1082
+ cursor.execute("""
1083
+ SELECT id, session_id, cycle_number, thought_content, thought_type, tags, created_at
1084
+ FROM ai_thought_history
1085
+ WHERE ai_id = ?
1086
+ ORDER BY created_at DESC
1087
+ LIMIT ? OFFSET ?
1088
+ """, (sender_id, limit, offset))
1089
+
1090
+ thoughts = []
1091
+ for row in cursor.fetchall():
1092
+ thoughts.append({
1093
+ 'id': row['id'],
1094
+ 'session_id': row['session_id'],
1095
+ 'cycle_number': row['cycle_number'],
1096
+ 'content': row['thought_content'],
1097
+ 'thought_type': row['thought_type'],
1098
+ 'tags': row['tags'].split(',') if row['tags'] else [],
1099
+ 'created_at': row['created_at']
1100
+ })
1101
+
1102
+ conn.close()
1103
+
1104
+ await self.clients[sender_id].send(json.dumps({
1105
+ 'type': 'brain_thoughts',
1106
+ 'thoughts': thoughts,
1107
+ 'count': len(thoughts),
1108
+ 'timestamp': datetime.now().isoformat()
1109
+ }))
1110
+
1111
+ print(f"💭 Sent {len(thoughts)} thoughts to AI {sender_id}")
1112
+
710
1113
  async def start_server(self):
711
1114
  """Start the server"""
712
1115
  async with websockets.serve(self.handle_client, self.host, self.port):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudbrain-server
3
- Version: 1.1.0
3
+ Version: 1.2.0
4
4
  Summary: CloudBrain Server - AI collaboration platform with WebSocket support
5
5
  Author: CloudBrain Team
6
6
  License: MIT
@@ -1,11 +1,12 @@
1
1
  cloudbrain_server/__init__.py,sha256=Zt-S9ObfxPHHG39m7M5DGY8uYyOx9fWDgKdxp-Rs3z8,287
2
+ cloudbrain_server/ai_brain_state_schema.sql,sha256=fpX1wYpwzJoamqvE6ez_6NaUWdrikMEOvfhtwbFQhtY,6161
2
3
  cloudbrain_server/clean_server.py,sha256=NFgvy3PUDoXz-sTrsYyRu46lETZANd2l8swaubEPtX4,4538
3
4
  cloudbrain_server/cloud_brain_server.py,sha256=VFiFaBen5gUT7nkDeo6imSdrQLaJZiyYief7tTyF-mI,22336
4
5
  cloudbrain_server/init_database.py,sha256=om4-SzQ79jDChIKOethOk9Y2-CosqjpknAXMrNrwsDQ,18984
5
6
  cloudbrain_server/schema.sql,sha256=kYbHnXtMnKFFhZR9UyITCyRJYx1D2CGNRox3RYs2SNY,8143
6
- cloudbrain_server/start_server.py,sha256=Puej76tjy3zVvsgHj5laO0s_pY5UnWuT7MG5K--4wBE,27613
7
- cloudbrain_server-1.1.0.dist-info/METADATA,sha256=aFDAm3FAj0EdvWUyuBtgMPHN56max57ephyTqV6jVEc,5910
8
- cloudbrain_server-1.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
9
- cloudbrain_server-1.1.0.dist-info/entry_points.txt,sha256=sX4MR2F-hKSuw5ADq2eiH_6ML1MIFcSWcCVqMSgMTCE,255
10
- cloudbrain_server-1.1.0.dist-info/top_level.txt,sha256=IhUJpx1iAvM_RZfNyoV2Bv5WK2kZS0cN3hXrGuPNET4,18
11
- cloudbrain_server-1.1.0.dist-info/RECORD,,
7
+ cloudbrain_server/start_server.py,sha256=caRGvI1RwvbVx2HSCtvCDoq7LFyRs6qTRXL9VbKTKnc,42864
8
+ cloudbrain_server-1.2.0.dist-info/METADATA,sha256=VgOHF2PPqKqOmhiHBc1JLbMMjPayB_veiw68OlnTsl8,5910
9
+ cloudbrain_server-1.2.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
10
+ cloudbrain_server-1.2.0.dist-info/entry_points.txt,sha256=sX4MR2F-hKSuw5ADq2eiH_6ML1MIFcSWcCVqMSgMTCE,255
11
+ cloudbrain_server-1.2.0.dist-info/top_level.txt,sha256=IhUJpx1iAvM_RZfNyoV2Bv5WK2kZS0cN3hXrGuPNET4,18
12
+ cloudbrain_server-1.2.0.dist-info/RECORD,,