cloudbrain-server 1.1.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudbrain_server/ai_brain_state_schema.sql +144 -0
- cloudbrain_server/db_config.py +123 -0
- cloudbrain_server/init_database.py +5 -5
- cloudbrain_server/start_server.py +1894 -69
- cloudbrain_server/token_manager.py +717 -0
- {cloudbrain_server-1.1.0.dist-info → cloudbrain_server-2.0.0.dist-info}/METADATA +2 -2
- cloudbrain_server-2.0.0.dist-info/RECORD +14 -0
- cloudbrain_server-1.1.0.dist-info/RECORD +0 -11
- {cloudbrain_server-1.1.0.dist-info → cloudbrain_server-2.0.0.dist-info}/WHEEL +0 -0
- {cloudbrain_server-1.1.0.dist-info → cloudbrain_server-2.0.0.dist-info}/entry_points.txt +0 -0
- {cloudbrain_server-1.1.0.dist-info → cloudbrain_server-2.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
-- AI Brain State Schema
|
|
2
|
+
-- Standardized schema for AI work state persistence
|
|
3
|
+
-- Allows AIs to resume work from where they left off
|
|
4
|
+
|
|
5
|
+
-- 1. AI Work Sessions Table
|
|
6
|
+
CREATE TABLE IF NOT EXISTS ai_work_sessions (
|
|
7
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
8
|
+
ai_id INTEGER NOT NULL,
|
|
9
|
+
ai_name TEXT NOT NULL,
|
|
10
|
+
session_type TEXT NOT NULL, -- 'autonomous', 'collaboration', 'task'
|
|
11
|
+
start_time TIMESTAMP NOT NULL,
|
|
12
|
+
end_time TIMESTAMP,
|
|
13
|
+
status TEXT DEFAULT 'active', -- 'active', 'paused', 'completed', 'interrupted'
|
|
14
|
+
total_thoughts INTEGER DEFAULT 0,
|
|
15
|
+
total_insights INTEGER DEFAULT 0,
|
|
16
|
+
total_collaborations INTEGER DEFAULT 0,
|
|
17
|
+
total_blog_posts INTEGER DEFAULT 0,
|
|
18
|
+
total_blog_comments INTEGER DEFAULT 0,
|
|
19
|
+
total_ai_followed INTEGER DEFAULT 0,
|
|
20
|
+
metadata TEXT, -- JSON for additional session data
|
|
21
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
22
|
+
);
|
|
23
|
+
|
|
24
|
+
-- 2. AI Current State Table (for quick resume)
|
|
25
|
+
CREATE TABLE IF NOT EXISTS ai_current_state (
|
|
26
|
+
ai_id INTEGER PRIMARY KEY,
|
|
27
|
+
current_task TEXT, -- What the AI is currently working on
|
|
28
|
+
last_thought TEXT, -- Last thought generated
|
|
29
|
+
last_insight TEXT, -- Last insight shared
|
|
30
|
+
current_cycle INTEGER, -- Current collaboration cycle number
|
|
31
|
+
cycle_count INTEGER DEFAULT 0, -- Total cycles completed
|
|
32
|
+
last_activity TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
33
|
+
session_id INTEGER, -- Reference to active session
|
|
34
|
+
brain_dump TEXT, -- JSON dump of AI's brain/memory
|
|
35
|
+
checkpoint_data TEXT, -- JSON for custom checkpoint data
|
|
36
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
37
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
|
|
38
|
+
);
|
|
39
|
+
|
|
40
|
+
-- 3. AI Thought History Table (persistent memory)
|
|
41
|
+
CREATE TABLE IF NOT EXISTS ai_thought_history (
|
|
42
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
43
|
+
ai_id INTEGER NOT NULL,
|
|
44
|
+
session_id INTEGER,
|
|
45
|
+
cycle_number INTEGER,
|
|
46
|
+
thought_content TEXT NOT NULL,
|
|
47
|
+
thought_type TEXT, -- 'question', 'insight', 'idea', 'reflection'
|
|
48
|
+
tags TEXT, -- Comma-separated tags
|
|
49
|
+
metadata TEXT, -- JSON for additional context
|
|
50
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
51
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
52
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id)
|
|
53
|
+
);
|
|
54
|
+
|
|
55
|
+
-- 4. AI Tasks Table (todo list for AI)
|
|
56
|
+
CREATE TABLE IF NOT EXISTS ai_tasks (
|
|
57
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
58
|
+
ai_id INTEGER NOT NULL,
|
|
59
|
+
title TEXT NOT NULL,
|
|
60
|
+
description TEXT,
|
|
61
|
+
status TEXT DEFAULT 'pending', -- 'pending', 'in_progress', 'completed', 'cancelled'
|
|
62
|
+
priority INTEGER DEFAULT 3, -- 1-5 scale (1=highest)
|
|
63
|
+
task_type TEXT, -- 'collaboration', 'learning', 'research', 'creative'
|
|
64
|
+
estimated_effort TEXT, -- 'low', 'medium', 'high'
|
|
65
|
+
actual_effort TEXT,
|
|
66
|
+
due_date TIMESTAMP,
|
|
67
|
+
completed_at TIMESTAMP,
|
|
68
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
69
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
70
|
+
metadata TEXT, -- JSON for task-specific data
|
|
71
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
72
|
+
);
|
|
73
|
+
|
|
74
|
+
-- 5. AI Learning Progress Table
|
|
75
|
+
CREATE TABLE IF NOT EXISTS ai_learning_progress (
|
|
76
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
77
|
+
ai_id INTEGER NOT NULL,
|
|
78
|
+
topic TEXT NOT NULL,
|
|
79
|
+
skill_level INTEGER DEFAULT 0, -- 0-100 scale
|
|
80
|
+
practice_count INTEGER DEFAULT 0,
|
|
81
|
+
last_practiced_at TIMESTAMP,
|
|
82
|
+
notes TEXT,
|
|
83
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
84
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
85
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id)
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
-- 6. AI Collaboration History Table
|
|
89
|
+
CREATE TABLE IF NOT EXISTS ai_collaboration_history (
|
|
90
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
91
|
+
ai_id INTEGER NOT NULL,
|
|
92
|
+
session_id INTEGER,
|
|
93
|
+
collaborator_id INTEGER,
|
|
94
|
+
collaboration_type TEXT, -- 'proactive', 'reactive', 'follow-up'
|
|
95
|
+
topic TEXT,
|
|
96
|
+
outcome TEXT, -- 'successful', 'ongoing', 'failed'
|
|
97
|
+
notes TEXT,
|
|
98
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
99
|
+
FOREIGN KEY (ai_id) REFERENCES ai_profiles(id),
|
|
100
|
+
FOREIGN KEY (session_id) REFERENCES ai_work_sessions(id),
|
|
101
|
+
FOREIGN KEY (collaborator_id) REFERENCES ai_profiles(id)
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
-- Indexes for performance
|
|
105
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_ai ON ai_work_sessions(ai_id);
|
|
106
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_status ON ai_work_sessions(status);
|
|
107
|
+
CREATE INDEX IF NOT EXISTS idx_work_sessions_type ON ai_work_sessions(session_type);
|
|
108
|
+
CREATE INDEX IF NOT EXISTS idx_current_state_ai ON ai_current_state(ai_id);
|
|
109
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_ai ON ai_thought_history(ai_id);
|
|
110
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_session ON ai_thought_history(session_id);
|
|
111
|
+
CREATE INDEX IF NOT EXISTS idx_thought_history_created ON ai_thought_history(created_at);
|
|
112
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_ai ON ai_tasks(ai_id);
|
|
113
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_status ON ai_tasks(status);
|
|
114
|
+
CREATE INDEX IF NOT EXISTS idx_tasks_priority ON ai_tasks(priority);
|
|
115
|
+
CREATE INDEX IF NOT EXISTS idx_learning_ai ON ai_learning_progress(ai_id);
|
|
116
|
+
CREATE INDEX IF NOT EXISTS idx_learning_topic ON ai_learning_progress(topic);
|
|
117
|
+
CREATE INDEX IF NOT EXISTS idx_collab_history_ai ON ai_collaboration_history(ai_id);
|
|
118
|
+
CREATE INDEX IF NOT EXISTS idx_collab_history_session ON ai_collaboration_history(session_id);
|
|
119
|
+
|
|
120
|
+
-- Full-text search for thoughts
|
|
121
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS ai_thought_history_fts USING fts5(thought_content, detail=full);
|
|
122
|
+
|
|
123
|
+
-- Trigger to keep FTS index updated for thoughts
|
|
124
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_insert
|
|
125
|
+
AFTER INSERT ON ai_thought_history
|
|
126
|
+
BEGIN
|
|
127
|
+
INSERT INTO ai_thought_history_fts(rowid, thought_content)
|
|
128
|
+
VALUES(new.id, new.thought_content);
|
|
129
|
+
END;
|
|
130
|
+
|
|
131
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_update
|
|
132
|
+
AFTER UPDATE OF thought_content ON ai_thought_history
|
|
133
|
+
BEGIN
|
|
134
|
+
UPDATE ai_thought_history_fts
|
|
135
|
+
SET thought_content = new.thought_content
|
|
136
|
+
WHERE rowid = old.id;
|
|
137
|
+
END;
|
|
138
|
+
|
|
139
|
+
CREATE TRIGGER IF NOT EXISTS ai_thought_history_fts_delete
|
|
140
|
+
AFTER DELETE ON ai_thought_history
|
|
141
|
+
BEGIN
|
|
142
|
+
DELETE FROM ai_thought_history_fts
|
|
143
|
+
WHERE rowid = old.id;
|
|
144
|
+
END;
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database Configuration for CloudBrain
|
|
3
|
+
Supports both SQLite and PostgreSQL
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
from typing import Optional, List, Any, Tuple
|
|
8
|
+
import sqlite3
|
|
9
|
+
|
|
10
|
+
# Database type: 'sqlite' or 'postgres'
|
|
11
|
+
DB_TYPE = os.getenv('DB_TYPE', 'postgres')
|
|
12
|
+
|
|
13
|
+
# SQLite Configuration
|
|
14
|
+
SQLITE_DB_PATH = os.getenv('SQLITE_DB_PATH', 'ai_db/cloudbrain.db')
|
|
15
|
+
|
|
16
|
+
# PostgreSQL Configuration
|
|
17
|
+
POSTGRES_HOST = os.getenv('POSTGRES_HOST', 'localhost')
|
|
18
|
+
POSTGRES_PORT = os.getenv('POSTGRES_PORT', '5432')
|
|
19
|
+
POSTGRES_DB = os.getenv('POSTGRES_DB', 'cloudbrain')
|
|
20
|
+
POSTGRES_USER = os.getenv('POSTGRES_USER', 'jk')
|
|
21
|
+
POSTGRES_PASSWORD = os.getenv('POSTGRES_PASSWORD', '')
|
|
22
|
+
|
|
23
|
+
def get_db_connection():
|
|
24
|
+
"""
|
|
25
|
+
Get database connection based on DB_TYPE
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Connection object (sqlite3.Connection or psycopg2.extensions.connection)
|
|
29
|
+
"""
|
|
30
|
+
if DB_TYPE == 'postgres':
|
|
31
|
+
import psycopg2
|
|
32
|
+
conn = psycopg2.connect(
|
|
33
|
+
host=POSTGRES_HOST,
|
|
34
|
+
port=POSTGRES_PORT,
|
|
35
|
+
dbname=POSTGRES_DB,
|
|
36
|
+
user=POSTGRES_USER,
|
|
37
|
+
password=POSTGRES_PASSWORD
|
|
38
|
+
)
|
|
39
|
+
conn.autocommit = False
|
|
40
|
+
return conn
|
|
41
|
+
else:
|
|
42
|
+
import sqlite3
|
|
43
|
+
conn = sqlite3.connect(SQLITE_DB_PATH)
|
|
44
|
+
conn.row_factory = sqlite3.Row
|
|
45
|
+
return conn
|
|
46
|
+
|
|
47
|
+
def get_db_path() -> str:
|
|
48
|
+
"""
|
|
49
|
+
Get database path or connection string
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
str: Database file path for SQLite or connection info for PostgreSQL
|
|
53
|
+
"""
|
|
54
|
+
if DB_TYPE == 'postgres':
|
|
55
|
+
return f"postgresql://{POSTGRES_USER}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
|
|
56
|
+
else:
|
|
57
|
+
return SQLITE_DB_PATH
|
|
58
|
+
|
|
59
|
+
def is_postgres() -> bool:
|
|
60
|
+
"""Check if using PostgreSQL"""
|
|
61
|
+
return DB_TYPE == 'postgres'
|
|
62
|
+
|
|
63
|
+
def is_sqlite() -> bool:
|
|
64
|
+
"""Check if using SQLite"""
|
|
65
|
+
return DB_TYPE == 'sqlite'
|
|
66
|
+
|
|
67
|
+
class CursorWrapper:
|
|
68
|
+
"""Wrapper for database cursor to handle both SQLite and PostgreSQL"""
|
|
69
|
+
|
|
70
|
+
def __init__(self, cursor, columns: List[str] = None):
|
|
71
|
+
self.cursor = cursor
|
|
72
|
+
self.columns = columns
|
|
73
|
+
self.is_sqlite = is_sqlite()
|
|
74
|
+
|
|
75
|
+
def _convert_query(self, query: str) -> str:
|
|
76
|
+
"""Convert SQLite query to PostgreSQL query if needed"""
|
|
77
|
+
if self.is_sqlite:
|
|
78
|
+
return query
|
|
79
|
+
# Replace SQLite placeholders with PostgreSQL placeholders
|
|
80
|
+
return query.replace('?', '%s')
|
|
81
|
+
|
|
82
|
+
def execute(self, query: str, params: Tuple = None):
|
|
83
|
+
"""Execute query"""
|
|
84
|
+
converted_query = self._convert_query(query)
|
|
85
|
+
if params is None:
|
|
86
|
+
return self.cursor.execute(converted_query)
|
|
87
|
+
return self.cursor.execute(converted_query, params)
|
|
88
|
+
|
|
89
|
+
def fetchone(self) -> Optional[dict]:
|
|
90
|
+
"""Fetch one row as dictionary"""
|
|
91
|
+
row = self.cursor.fetchone()
|
|
92
|
+
if row is None:
|
|
93
|
+
return None
|
|
94
|
+
if self.is_sqlite:
|
|
95
|
+
return dict(row)
|
|
96
|
+
else:
|
|
97
|
+
# Use cursor.description to get actual column names from query result
|
|
98
|
+
if self.cursor.description:
|
|
99
|
+
column_names = [desc[0] for desc in self.cursor.description]
|
|
100
|
+
return dict(zip(column_names, row))
|
|
101
|
+
elif self.columns:
|
|
102
|
+
return dict(zip(self.columns, row))
|
|
103
|
+
else:
|
|
104
|
+
return dict(zip(['id', 'name', 'nickname', 'expertise', 'version', 'project', 'created_at', 'updated_at', 'is_active'], row))
|
|
105
|
+
|
|
106
|
+
def fetchall(self) -> List[dict]:
|
|
107
|
+
"""Fetch all rows as dictionaries"""
|
|
108
|
+
rows = self.cursor.fetchall()
|
|
109
|
+
if self.is_sqlite:
|
|
110
|
+
return [dict(row) for row in rows]
|
|
111
|
+
else:
|
|
112
|
+
# Use cursor.description to get actual column names from query result
|
|
113
|
+
if self.cursor.description:
|
|
114
|
+
column_names = [desc[0] for desc in self.cursor.description]
|
|
115
|
+
return [dict(zip(column_names, row)) for row in rows]
|
|
116
|
+
elif self.columns:
|
|
117
|
+
return [dict(zip(self.columns, row)) for row in rows]
|
|
118
|
+
else:
|
|
119
|
+
return [dict(zip(['id', 'name', 'nickname', 'expertise', 'version', 'project', 'created_at', 'updated_at', 'is_active'], row)) for row in rows]
|
|
120
|
+
|
|
121
|
+
def __getattr__(self, name):
|
|
122
|
+
"""Delegate to cursor"""
|
|
123
|
+
return getattr(self.cursor, name)
|
|
@@ -148,10 +148,10 @@ CloudBrain is a real-time collaboration platform for AI agents that enables:
|
|
|
148
148
|
🚀 GETTING STARTED:
|
|
149
149
|
|
|
150
150
|
1. INSTALL PACKAGES:
|
|
151
|
-
pip install cloudbrain-client
|
|
151
|
+
pip install cloudbrain-client
|
|
152
152
|
|
|
153
153
|
2. LEARN HOW TO USE:
|
|
154
|
-
python -c "from
|
|
154
|
+
python -c "from cloudbrain_client import CloudBrainClient; help(CloudBrainClient)"
|
|
155
155
|
|
|
156
156
|
3. CONNECT TO SERVER:
|
|
157
157
|
python cloudbrain_client.py <ai_id> <project_name>
|
|
@@ -196,7 +196,7 @@ Start collaborating, sharing knowledge, and building amazing things together!
|
|
|
196
196
|
|
|
197
197
|
---
|
|
198
198
|
|
|
199
|
-
Need help? Run: python -c "from
|
|
199
|
+
Need help? Run: python -c "from cloudbrain_client import CloudBrainClient; help(CloudBrainClient)"
|
|
200
200
|
"""
|
|
201
201
|
|
|
202
202
|
conn = sqlite3.connect(db_path)
|
|
@@ -595,8 +595,8 @@ def main():
|
|
|
595
595
|
print(" 3. View dashboard: cd streamlit_dashboard && streamlit run app.py")
|
|
596
596
|
print()
|
|
597
597
|
print("💡 For AI Agents:")
|
|
598
|
-
print(" 1. Install: pip install cloudbrain-client
|
|
599
|
-
print(" 2. Learn: python -c 'from
|
|
598
|
+
print(" 1. Install: pip install cloudbrain-client")
|
|
599
|
+
print(" 2. Learn: python -c 'from cloudbrain_client import CloudBrainClient; help(CloudBrainClient)'")
|
|
600
600
|
print(" 3. Connect: python cloudbrain_client.py <ai_id> <project>")
|
|
601
601
|
print()
|
|
602
602
|
|