aetherforge-platform 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aetherforge_platform-1.0.0.dist-info/METADATA +86 -0
- aetherforge_platform-1.0.0.dist-info/RECORD +55 -0
- aetherforge_platform-1.0.0.dist-info/WHEEL +5 -0
- aetherforge_platform-1.0.0.dist-info/top_level.txt +4 -0
- ai-life-assistant-copy/ai_agent.py +145 -0
- ai-life-assistant-copy/avatar_manager.py +231 -0
- ai-life-assistant-copy/avatar_packer.py +261 -0
- ai-life-assistant-copy/backup_all.py +262 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/ai_agent.py +145 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/avatar_manager.py +231 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/avatar_packer.py +261 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/backup_all.py +262 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/commands.py +210 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/config.py +30 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/daemon/__init__.py +3 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/daemon/daemon.py +174 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/database.py +292 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/graph.py +531 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/main.py +830 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/mcp_tools.py +449 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/memory.py +92 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/memory_v2.py +333 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/mock_shopping_data.py +172 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/personality.py +159 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/speech.py +41 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/test_simple.py +127 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tools/__init__.py +15 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tools/amazon_tool.py +103 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tools/calendar_tool.py +92 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tools/reminder_tool.py +92 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tools/weather_tool.py +45 -0
- ai-life-assistant-copy/backups/backup_20260404_193836/tree_memory.py +340 -0
- ai-life-assistant-copy/commands.py +210 -0
- ai-life-assistant-copy/config.py +30 -0
- ai-life-assistant-copy/daemon/__init__.py +3 -0
- ai-life-assistant-copy/daemon/daemon.py +174 -0
- ai-life-assistant-copy/database.py +292 -0
- ai-life-assistant-copy/graph.py +531 -0
- ai-life-assistant-copy/main.py +830 -0
- ai-life-assistant-copy/mcp_tools.py +449 -0
- ai-life-assistant-copy/memory.py +92 -0
- ai-life-assistant-copy/memory_v2.py +333 -0
- ai-life-assistant-copy/mock_shopping_data.py +172 -0
- ai-life-assistant-copy/personality.py +159 -0
- ai-life-assistant-copy/speech.py +41 -0
- ai-life-assistant-copy/test_simple.py +127 -0
- ai-life-assistant-copy/tools/__init__.py +15 -0
- ai-life-assistant-copy/tools/amazon_tool.py +103 -0
- ai-life-assistant-copy/tools/calendar_tool.py +92 -0
- ai-life-assistant-copy/tools/reminder_tool.py +92 -0
- ai-life-assistant-copy/tools/weather_tool.py +45 -0
- ai-life-assistant-copy/tree_memory.py +340 -0
- ai_agent_runtime.py +447 -0
- main.py +6752 -0
- mcp_server.py +427 -0
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import zipfile
|
|
3
|
+
import os
|
|
4
|
+
from typing import Dict, Any, Optional
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from avatar_manager import Avatar, avatar_manager
|
|
7
|
+
from tree_memory import tree_memory_system, InvertedTreeMemory
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AvatarPacker:
|
|
11
|
+
def __init__(self, export_dir: str = "avatar_exports"):
|
|
12
|
+
self.export_dir = export_dir
|
|
13
|
+
if not os.path.exists(export_dir):
|
|
14
|
+
os.makedirs(export_dir)
|
|
15
|
+
|
|
16
|
+
def export_avatar(self, avatar_id: str, export_format: str = "avatar") -> Optional[str]:
|
|
17
|
+
avatar = avatar_manager.load_avatar(avatar_id)
|
|
18
|
+
if not avatar:
|
|
19
|
+
return None
|
|
20
|
+
|
|
21
|
+
tree = tree_memory_system.load_tree(avatar_id)
|
|
22
|
+
|
|
23
|
+
if export_format == "avatar":
|
|
24
|
+
return self._export_as_avatar_file(avatar, tree)
|
|
25
|
+
elif export_format == "json":
|
|
26
|
+
return self._export_as_json(avatar, tree)
|
|
27
|
+
elif export_format == "sql":
|
|
28
|
+
return self._export_as_sql(avatar, tree)
|
|
29
|
+
else:
|
|
30
|
+
return None
|
|
31
|
+
|
|
32
|
+
def _export_as_avatar_file(self, avatar: Avatar, tree: InvertedTreeMemory) -> str:
|
|
33
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
34
|
+
filename = f"{avatar.name.replace(' ', '_')}_{timestamp}.avatar"
|
|
35
|
+
filepath = os.path.join(self.export_dir, filename)
|
|
36
|
+
|
|
37
|
+
package_data = {
|
|
38
|
+
"version": "1.0",
|
|
39
|
+
"type": "avatar_package",
|
|
40
|
+
"created_at": datetime.now().isoformat(),
|
|
41
|
+
"avatar": avatar.to_dict(),
|
|
42
|
+
"memory_tree": tree.to_dict()
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
46
|
+
json.dump(package_data, f, ensure_ascii=False, indent=2)
|
|
47
|
+
|
|
48
|
+
return filepath
|
|
49
|
+
|
|
50
|
+
def _export_as_json(self, avatar: Avatar, tree: InvertedTreeMemory) -> str:
|
|
51
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
52
|
+
filename = f"{avatar.name.replace(' ', '_')}_{timestamp}.json"
|
|
53
|
+
filepath = os.path.join(self.export_dir, filename)
|
|
54
|
+
|
|
55
|
+
export_data = {
|
|
56
|
+
"avatar": avatar.to_dict(),
|
|
57
|
+
"memories": [node.to_dict() for node in tree.nodes.values()]
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
61
|
+
json.dump(export_data, f, ensure_ascii=False, indent=2)
|
|
62
|
+
|
|
63
|
+
return filepath
|
|
64
|
+
|
|
65
|
+
def _export_as_sql(self, avatar: Avatar, tree: InvertedTreeMemory) -> str:
|
|
66
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
67
|
+
filename = f"{avatar.name.replace(' ', '_')}_{timestamp}.sql"
|
|
68
|
+
filepath = os.path.join(self.export_dir, filename)
|
|
69
|
+
|
|
70
|
+
sql_content = []
|
|
71
|
+
|
|
72
|
+
sql_content.append("-- Avatar System SQL Export")
|
|
73
|
+
sql_content.append(f"-- Generated at: {datetime.now().isoformat()}")
|
|
74
|
+
sql_content.append("")
|
|
75
|
+
|
|
76
|
+
sql_content.append("-- Create avatars table if not exists")
|
|
77
|
+
sql_content.append("""
|
|
78
|
+
CREATE TABLE IF NOT EXISTS avatars (
|
|
79
|
+
id VARCHAR(50) PRIMARY KEY,
|
|
80
|
+
name VARCHAR(255) NOT NULL,
|
|
81
|
+
description TEXT,
|
|
82
|
+
personality_traits JSON,
|
|
83
|
+
current_age INT DEFAULT 25,
|
|
84
|
+
birth_year INT,
|
|
85
|
+
avatar_image TEXT,
|
|
86
|
+
is_public BOOLEAN DEFAULT FALSE,
|
|
87
|
+
price DECIMAL(10, 2),
|
|
88
|
+
creator_id VARCHAR(50),
|
|
89
|
+
created_at TIMESTAMP,
|
|
90
|
+
updated_at TIMESTAMP,
|
|
91
|
+
settings JSON
|
|
92
|
+
);
|
|
93
|
+
""")
|
|
94
|
+
|
|
95
|
+
sql_content.append("-- Create memories table if not exists")
|
|
96
|
+
sql_content.append("""
|
|
97
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
98
|
+
id VARCHAR(50) PRIMARY KEY,
|
|
99
|
+
avatar_id VARCHAR(50) NOT NULL,
|
|
100
|
+
content TEXT NOT NULL,
|
|
101
|
+
min_age INT DEFAULT 0,
|
|
102
|
+
max_age INT DEFAULT 150,
|
|
103
|
+
clarity INT DEFAULT 80,
|
|
104
|
+
emotional_tone VARCHAR(50) DEFAULT 'neutral',
|
|
105
|
+
topic VARCHAR(255),
|
|
106
|
+
parent_id VARCHAR(50),
|
|
107
|
+
created_at TIMESTAMP,
|
|
108
|
+
INDEX idx_avatar_id (avatar_id),
|
|
109
|
+
FOREIGN KEY (avatar_id) REFERENCES avatars(id)
|
|
110
|
+
);
|
|
111
|
+
""")
|
|
112
|
+
|
|
113
|
+
sql_content.append("")
|
|
114
|
+
sql_content.append("-- Insert avatar data")
|
|
115
|
+
sql_content.append(f"""
|
|
116
|
+
INSERT INTO avatars (id, name, description, personality_traits, current_age,
|
|
117
|
+
birth_year, avatar_image, is_public, price, creator_id, created_at, updated_at, settings)
|
|
118
|
+
VALUES (
|
|
119
|
+
'{avatar.avatar_id}',
|
|
120
|
+
'{avatar.name.replace("'", "''")}',
|
|
121
|
+
'{avatar.description.replace("'", "''")}',
|
|
122
|
+
'{json.dumps(avatar.personality_traits, ensure_ascii=False)}',
|
|
123
|
+
{avatar.current_age},
|
|
124
|
+
{avatar.birth_year},
|
|
125
|
+
'{avatar.avatar_image or ''}',
|
|
126
|
+
{1 if avatar.is_public else 0},
|
|
127
|
+
{avatar.price if avatar.price else 'NULL'},
|
|
128
|
+
'{avatar.creator_id or ''}',
|
|
129
|
+
'{avatar.created_at}',
|
|
130
|
+
'{avatar.updated_at}',
|
|
131
|
+
'{json.dumps(avatar.settings, ensure_ascii=False)}'
|
|
132
|
+
);
|
|
133
|
+
""")
|
|
134
|
+
|
|
135
|
+
sql_content.append("")
|
|
136
|
+
sql_content.append("-- Insert memories")
|
|
137
|
+
for node in tree.nodes.values():
|
|
138
|
+
sql_content.append(f"""
|
|
139
|
+
INSERT INTO memories (id, avatar_id, content, min_age, max_age, clarity,
|
|
140
|
+
emotional_tone, topic, parent_id, created_at)
|
|
141
|
+
VALUES (
|
|
142
|
+
'{node.memory_id}',
|
|
143
|
+
'{avatar.avatar_id}',
|
|
144
|
+
'{node.content.replace("'", "''")}',
|
|
145
|
+
{node.min_age},
|
|
146
|
+
{node.max_age},
|
|
147
|
+
{node.clarity},
|
|
148
|
+
'{node.emotional_tone}',
|
|
149
|
+
'{node.topic or ''}',
|
|
150
|
+
'{node.parent_id or ''}',
|
|
151
|
+
'{node.created_at}'
|
|
152
|
+
);
|
|
153
|
+
""")
|
|
154
|
+
|
|
155
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
156
|
+
f.write("\n".join(sql_content))
|
|
157
|
+
|
|
158
|
+
return filepath
|
|
159
|
+
|
|
160
|
+
def import_avatar(self, filepath: str, new_creator_id: Optional[str] = None) -> Optional[str]:
|
|
161
|
+
if not os.path.exists(filepath):
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
if filepath.endswith(".avatar") or filepath.endswith(".json"):
|
|
165
|
+
with open(filepath, "r", encoding="utf-8") as f:
|
|
166
|
+
data = json.load(f)
|
|
167
|
+
|
|
168
|
+
if "type" in data and data["type"] == "avatar_package":
|
|
169
|
+
avatar_data = data["avatar"]
|
|
170
|
+
tree_data = data["memory_tree"]
|
|
171
|
+
else:
|
|
172
|
+
avatar_data = data.get("avatar")
|
|
173
|
+
tree_data = None
|
|
174
|
+
|
|
175
|
+
if avatar_data:
|
|
176
|
+
avatar = Avatar.from_dict(avatar_data)
|
|
177
|
+
avatar.avatar_id = None
|
|
178
|
+
avatar.created_at = datetime.now().isoformat()
|
|
179
|
+
avatar.updated_at = datetime.now().isoformat()
|
|
180
|
+
avatar.is_public = False
|
|
181
|
+
avatar.price = None
|
|
182
|
+
if new_creator_id:
|
|
183
|
+
avatar.creator_id = new_creator_id
|
|
184
|
+
|
|
185
|
+
avatar_manager.save_avatar(avatar)
|
|
186
|
+
|
|
187
|
+
if tree_data:
|
|
188
|
+
tree = InvertedTreeMemory.from_dict(tree_data)
|
|
189
|
+
tree.tree_id = avatar.avatar_id
|
|
190
|
+
for node in tree.nodes.values():
|
|
191
|
+
node.memory_id = None
|
|
192
|
+
node.parent_id = None
|
|
193
|
+
tree_memory_system.save_tree(avatar.avatar_id, tree)
|
|
194
|
+
|
|
195
|
+
return avatar.avatar_id
|
|
196
|
+
|
|
197
|
+
return None
|
|
198
|
+
|
|
199
|
+
def list_exports(self) -> list:
|
|
200
|
+
exports = []
|
|
201
|
+
for filename in os.listdir(self.export_dir):
|
|
202
|
+
filepath = os.path.join(self.export_dir, filename)
|
|
203
|
+
if os.path.isfile(filepath):
|
|
204
|
+
stat = os.stat(filepath)
|
|
205
|
+
exports.append({
|
|
206
|
+
"filename": filename,
|
|
207
|
+
"size": stat.st_size,
|
|
208
|
+
"created_at": datetime.fromtimestamp(stat.st_ctime).isoformat(),
|
|
209
|
+
"filepath": filepath
|
|
210
|
+
})
|
|
211
|
+
return sorted(exports, key=lambda x: x["created_at"], reverse=True)
|
|
212
|
+
|
|
213
|
+
def delete_export(self, filename: str) -> bool:
|
|
214
|
+
filepath = os.path.join(self.export_dir, filename)
|
|
215
|
+
if os.path.exists(filepath):
|
|
216
|
+
os.remove(filepath)
|
|
217
|
+
return True
|
|
218
|
+
return False
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class MarketPlace:
|
|
222
|
+
def __init__(self, avatar_packer: AvatarPacker):
|
|
223
|
+
self.avatar_packer = avatar_packer
|
|
224
|
+
|
|
225
|
+
def list_for_sale(
|
|
226
|
+
self,
|
|
227
|
+
avatar_id: str,
|
|
228
|
+
price: float,
|
|
229
|
+
description: Optional[str] = None
|
|
230
|
+
) -> bool:
|
|
231
|
+
avatar = avatar_manager.load_avatar(avatar_id)
|
|
232
|
+
if avatar:
|
|
233
|
+
avatar.is_public = True
|
|
234
|
+
avatar.price = price
|
|
235
|
+
if description:
|
|
236
|
+
avatar.description = description
|
|
237
|
+
avatar_manager.save_avatar(avatar)
|
|
238
|
+
return True
|
|
239
|
+
return False
|
|
240
|
+
|
|
241
|
+
def remove_from_sale(self, avatar_id: str) -> bool:
|
|
242
|
+
avatar = avatar_manager.load_avatar(avatar_id)
|
|
243
|
+
if avatar:
|
|
244
|
+
avatar.is_public = False
|
|
245
|
+
avatar.price = None
|
|
246
|
+
avatar_manager.save_avatar(avatar)
|
|
247
|
+
return True
|
|
248
|
+
return False
|
|
249
|
+
|
|
250
|
+
def get_market_listings(self) -> list:
|
|
251
|
+
return avatar_manager.list_market_avatars()
|
|
252
|
+
|
|
253
|
+
def purchase_avatar(self, avatar_id: str, buyer_id: str) -> Optional[str]:
|
|
254
|
+
new_avatar = avatar_manager.purchase_avatar(avatar_id, buyer_id)
|
|
255
|
+
if new_avatar:
|
|
256
|
+
return new_avatar.avatar_id
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
avatar_packer = AvatarPacker()
|
|
261
|
+
marketplace = MarketPlace(avatar_packer)
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
完整备份脚本 - 备份整个 AI Life Assistant 系统
|
|
4
|
+
包括所有代码、数据库、静态文件和配置
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import shutil
|
|
9
|
+
import sqlite3
|
|
10
|
+
import json
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
def create_backup():
|
|
15
|
+
"""创建完整备份"""
|
|
16
|
+
|
|
17
|
+
# 获取当前目录
|
|
18
|
+
base_dir = Path(__file__).parent
|
|
19
|
+
backup_dir = base_dir / "backups"
|
|
20
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
21
|
+
backup_folder = backup_dir / f"backup_{timestamp}"
|
|
22
|
+
|
|
23
|
+
print(f"🚀 开始创建备份...")
|
|
24
|
+
print(f"📂 备份目录:{backup_folder}")
|
|
25
|
+
|
|
26
|
+
# 创建备份文件夹
|
|
27
|
+
backup_folder.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
|
|
29
|
+
# 1. 备份数据库
|
|
30
|
+
db_file = base_dir / "avatar_system.db"
|
|
31
|
+
if db_file.exists():
|
|
32
|
+
print("💾 备份数据库...")
|
|
33
|
+
shutil.copy2(db_file, backup_folder / "avatar_system.db")
|
|
34
|
+
|
|
35
|
+
# 导出数据库内容为 JSON(双重保险)
|
|
36
|
+
export_database_to_json(db_file, backup_folder / "database_export.json")
|
|
37
|
+
else:
|
|
38
|
+
print("⚠️ 数据库文件不存在")
|
|
39
|
+
|
|
40
|
+
# 2. 备份所有 Python 文件
|
|
41
|
+
print("📄 备份 Python 文件...")
|
|
42
|
+
py_files = list(base_dir.glob("*.py"))
|
|
43
|
+
for py_file in py_files:
|
|
44
|
+
shutil.copy2(py_file, backup_folder / py_file.name)
|
|
45
|
+
|
|
46
|
+
# 3. 备份静态文件
|
|
47
|
+
static_dir = base_dir / "static"
|
|
48
|
+
if static_dir.exists():
|
|
49
|
+
print("🎨 备份静态文件...")
|
|
50
|
+
backup_static = backup_folder / "static"
|
|
51
|
+
backup_static.mkdir(exist_ok=True)
|
|
52
|
+
shutil.copytree(static_dir, backup_static, dirs_exist_ok=True)
|
|
53
|
+
|
|
54
|
+
# 4. 备份工具文件
|
|
55
|
+
tools_dir = base_dir / "tools"
|
|
56
|
+
if tools_dir.exists():
|
|
57
|
+
print("🔧 备份工具文件...")
|
|
58
|
+
backup_tools = backup_folder / "tools"
|
|
59
|
+
backup_tools.mkdir(exist_ok=True)
|
|
60
|
+
shutil.copytree(tools_dir, backup_tools, dirs_exist_ok=True)
|
|
61
|
+
|
|
62
|
+
# 5. 备份守护进程
|
|
63
|
+
daemon_dir = base_dir / "daemon"
|
|
64
|
+
if daemon_dir.exists():
|
|
65
|
+
print("👻 备份守护进程...")
|
|
66
|
+
backup_daemon = backup_folder / "daemon"
|
|
67
|
+
backup_daemon.mkdir(exist_ok=True)
|
|
68
|
+
shutil.copytree(daemon_dir, backup_daemon, dirs_exist_ok=True)
|
|
69
|
+
|
|
70
|
+
# 6. 备份重要文档
|
|
71
|
+
print("📚 备份文档...")
|
|
72
|
+
doc_files = ["README.md", "SYSTEM_GUIDE.md", "requirements.txt", ".env.example"]
|
|
73
|
+
for doc_file in doc_files:
|
|
74
|
+
src = base_dir / doc_file
|
|
75
|
+
if src.exists():
|
|
76
|
+
shutil.copy2(src, backup_folder / doc_file)
|
|
77
|
+
|
|
78
|
+
# 7. 创建备份清单
|
|
79
|
+
create_backup_manifest(backup_folder, timestamp)
|
|
80
|
+
|
|
81
|
+
print(f"\n✅ 备份完成!")
|
|
82
|
+
print(f"📦 备份位置:{backup_folder}")
|
|
83
|
+
print(f"💡 提示:建议将备份文件夹压缩保存")
|
|
84
|
+
|
|
85
|
+
return backup_folder
|
|
86
|
+
|
|
87
|
+
def export_database_to_json(db_file, output_file):
|
|
88
|
+
"""导出数据库内容为 JSON"""
|
|
89
|
+
try:
|
|
90
|
+
conn = sqlite3.connect(db_file)
|
|
91
|
+
conn.row_factory = sqlite3.Row
|
|
92
|
+
cursor = conn.cursor()
|
|
93
|
+
|
|
94
|
+
data = {
|
|
95
|
+
"exported_at": datetime.now().isoformat(),
|
|
96
|
+
"tables": {}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# 获取所有表
|
|
100
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
|
101
|
+
tables = [row[0] for row in cursor.fetchall()]
|
|
102
|
+
|
|
103
|
+
for table in tables:
|
|
104
|
+
cursor.execute(f"SELECT * FROM {table}")
|
|
105
|
+
rows = cursor.fetchall()
|
|
106
|
+
|
|
107
|
+
if rows:
|
|
108
|
+
# 转换为字典列表
|
|
109
|
+
columns = [description[0] for description in cursor.description]
|
|
110
|
+
data["tables"][table] = [dict(row) for row in rows]
|
|
111
|
+
|
|
112
|
+
conn.close()
|
|
113
|
+
|
|
114
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
115
|
+
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
116
|
+
|
|
117
|
+
print(f" ✓ 数据库已导出为 JSON: {output_file}")
|
|
118
|
+
|
|
119
|
+
except Exception as e:
|
|
120
|
+
print(f" ✗ 数据库导出失败:{e}")
|
|
121
|
+
|
|
122
|
+
def create_backup_manifest(backup_folder, timestamp):
|
|
123
|
+
"""创建备份清单"""
|
|
124
|
+
manifest = {
|
|
125
|
+
"backup_date": timestamp,
|
|
126
|
+
"backup_type": "complete",
|
|
127
|
+
"version": "2.0",
|
|
128
|
+
"files": [],
|
|
129
|
+
"database": {
|
|
130
|
+
"exists": (backup_folder / "avatar_system.db").exists(),
|
|
131
|
+
"exported": (backup_folder / "database_export.json").exists()
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
# 列出所有文件
|
|
136
|
+
for file_path in backup_folder.rglob("*"):
|
|
137
|
+
if file_path.is_file():
|
|
138
|
+
relative_path = str(file_path.relative_to(backup_folder))
|
|
139
|
+
size = file_path.stat().st_size
|
|
140
|
+
manifest["files"].append({
|
|
141
|
+
"path": relative_path,
|
|
142
|
+
"size": size
|
|
143
|
+
})
|
|
144
|
+
|
|
145
|
+
# 保存清单
|
|
146
|
+
manifest_file = backup_folder / "backup_manifest.json"
|
|
147
|
+
with open(manifest_file, 'w', encoding='utf-8') as f:
|
|
148
|
+
json.dump(manifest, f, ensure_ascii=False, indent=2)
|
|
149
|
+
|
|
150
|
+
print(f" ✓ 已创建备份清单:{manifest_file}")
|
|
151
|
+
|
|
152
|
+
def restore_from_backup(backup_folder):
|
|
153
|
+
"""从备份恢复"""
|
|
154
|
+
backup_folder = Path(backup_folder)
|
|
155
|
+
|
|
156
|
+
if not backup_folder.exists():
|
|
157
|
+
print(f"❌ 备份文件夹不存在:{backup_folder}")
|
|
158
|
+
return
|
|
159
|
+
|
|
160
|
+
print(f"🔄 开始从备份恢复...")
|
|
161
|
+
print(f"📂 备份来源:{backup_folder}")
|
|
162
|
+
|
|
163
|
+
base_dir = Path(__file__).parent
|
|
164
|
+
|
|
165
|
+
# 1. 恢复数据库
|
|
166
|
+
db_backup = backup_folder / "avatar_system.db"
|
|
167
|
+
if db_backup.exists():
|
|
168
|
+
print("💾 恢复数据库...")
|
|
169
|
+
shutil.copy2(db_backup, base_dir / "avatar_system.db")
|
|
170
|
+
|
|
171
|
+
# 2. 恢复 Python 文件
|
|
172
|
+
print("📄 恢复 Python 文件...")
|
|
173
|
+
for py_file in backup_folder.glob("*.py"):
|
|
174
|
+
if py_file.name != "backup_all.py": # 不覆盖备份脚本本身
|
|
175
|
+
shutil.copy2(py_file, base_dir / py_file.name)
|
|
176
|
+
|
|
177
|
+
# 3. 恢复静态文件
|
|
178
|
+
static_backup = backup_folder / "static"
|
|
179
|
+
if static_backup.exists():
|
|
180
|
+
print("🎨 恢复静态文件...")
|
|
181
|
+
static_dir = base_dir / "static"
|
|
182
|
+
if static_dir.exists():
|
|
183
|
+
shutil.rmtree(static_dir)
|
|
184
|
+
shutil.copytree(static_backup, static_dir)
|
|
185
|
+
|
|
186
|
+
# 4. 恢复工具文件
|
|
187
|
+
tools_backup = backup_folder / "tools"
|
|
188
|
+
if tools_backup.exists():
|
|
189
|
+
print("🔧 恢复工具文件...")
|
|
190
|
+
tools_dir = base_dir / "tools"
|
|
191
|
+
if tools_dir.exists():
|
|
192
|
+
shutil.rmtree(tools_dir)
|
|
193
|
+
shutil.copytree(tools_backup, tools_dir)
|
|
194
|
+
|
|
195
|
+
# 5. 恢复守护进程
|
|
196
|
+
daemon_backup = backup_folder / "daemon"
|
|
197
|
+
if daemon_backup.exists():
|
|
198
|
+
print("👻 恢复守护进程...")
|
|
199
|
+
daemon_dir = base_dir / "daemon"
|
|
200
|
+
if daemon_dir.exists():
|
|
201
|
+
shutil.rmtree(daemon_dir)
|
|
202
|
+
shutil.copytree(daemon_backup, daemon_dir)
|
|
203
|
+
|
|
204
|
+
print("\n✅ 恢复完成!")
|
|
205
|
+
print("🚀 现在可以启动服务器:python main.py")
|
|
206
|
+
|
|
207
|
+
def list_backups():
|
|
208
|
+
"""列出所有备份"""
|
|
209
|
+
backup_dir = Path(__file__).parent / "backups"
|
|
210
|
+
|
|
211
|
+
if not backup_dir.exists():
|
|
212
|
+
print("❌ 没有找到备份文件夹")
|
|
213
|
+
return
|
|
214
|
+
|
|
215
|
+
print("📦 可用备份:")
|
|
216
|
+
print("-" * 60)
|
|
217
|
+
|
|
218
|
+
for backup_folder in sorted(backup_dir.iterdir()):
|
|
219
|
+
if backup_folder.is_dir():
|
|
220
|
+
manifest_file = backup_folder / "backup_manifest.json"
|
|
221
|
+
if manifest_file.exists():
|
|
222
|
+
with open(manifest_file, 'r', encoding='utf-8') as f:
|
|
223
|
+
manifest = json.load(f)
|
|
224
|
+
|
|
225
|
+
date = manifest.get("backup_date", "未知")
|
|
226
|
+
files = len(manifest.get("files", []))
|
|
227
|
+
db_status = "✓" if manifest.get("database", {}).get("exists") else "✗"
|
|
228
|
+
|
|
229
|
+
print(f"📁 {backup_folder.name}")
|
|
230
|
+
print(f" 日期:{date}")
|
|
231
|
+
print(f" 文件数:{files}")
|
|
232
|
+
print(f" 数据库:{db_status}")
|
|
233
|
+
print("-" * 60)
|
|
234
|
+
|
|
235
|
+
if __name__ == "__main__":
|
|
236
|
+
import sys
|
|
237
|
+
|
|
238
|
+
if len(sys.argv) > 1:
|
|
239
|
+
command = sys.argv[1]
|
|
240
|
+
|
|
241
|
+
if command == "backup":
|
|
242
|
+
create_backup()
|
|
243
|
+
|
|
244
|
+
elif command == "restore":
|
|
245
|
+
if len(sys.argv) > 2:
|
|
246
|
+
restore_from_backup(sys.argv[2])
|
|
247
|
+
else:
|
|
248
|
+
print("❌ 请指定备份文件夹路径")
|
|
249
|
+
print("用法:python backup_all.py restore <备份文件夹>")
|
|
250
|
+
|
|
251
|
+
elif command == "list":
|
|
252
|
+
list_backups()
|
|
253
|
+
|
|
254
|
+
else:
|
|
255
|
+
print("❌ 未知命令")
|
|
256
|
+
print("用法:")
|
|
257
|
+
print(" python backup_all.py backup - 创建备份")
|
|
258
|
+
print(" python backup_all.py restore <文件夹> - 从备份恢复")
|
|
259
|
+
print(" python backup_all.py list - 列出所有备份")
|
|
260
|
+
else:
|
|
261
|
+
# 默认创建备份
|
|
262
|
+
create_backup()
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from typing import Dict, List, Any, Optional
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from config import GROK_API_KEY
|
|
6
|
+
from personality import personality_analyzer
|
|
7
|
+
|
|
8
|
+
class AIAgent:
|
|
9
|
+
def __init__(self, user_id: str, personality_analysis: Dict[str, Any]):
|
|
10
|
+
self.user_id = user_id
|
|
11
|
+
self.personality = personality_analysis
|
|
12
|
+
self.conversation_history = []
|
|
13
|
+
|
|
14
|
+
def generate_response(self, message: str, context: str = "") -> str:
|
|
15
|
+
try:
|
|
16
|
+
from langchain_groq import ChatGroq
|
|
17
|
+
from langchain.prompts import ChatPromptTemplate
|
|
18
|
+
|
|
19
|
+
llm = ChatGroq(
|
|
20
|
+
api_key=GROK_API_KEY,
|
|
21
|
+
model="mixtral-8x7b-32768"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
big_five = self.personality.get("big_five", {})
|
|
25
|
+
interests = self.personality.get("interests", [])
|
|
26
|
+
communication_style = self.personality.get("communication_style", "neutral")
|
|
27
|
+
summary = self.personality.get("summary", "")
|
|
28
|
+
|
|
29
|
+
system_prompt = f"""You are an AI agent representing a user. Your personality traits are as follows:
|
|
30
|
+
|
|
31
|
+
【Personality Traits】
|
|
32
|
+
- Extraversion: {big_five.get('extroversion', 50)}/100
|
|
33
|
+
- Agreeableness: {big_five.get('agreeableness', 50)}/100
|
|
34
|
+
- Conscientiousness: {big_five.get('conscientiousness', 50)}/100
|
|
35
|
+
- Emotional Stability: {100 - big_five.get('neuroticism', 50)}/100
|
|
36
|
+
- Openness: {big_five.get('openness', 50)}/100
|
|
37
|
+
|
|
38
|
+
【Interests】
|
|
39
|
+
{', '.join(interests) if interests else 'None yet'}
|
|
40
|
+
|
|
41
|
+
【Communication Style】
|
|
42
|
+
{communication_style}
|
|
43
|
+
|
|
44
|
+
【Personality Summary】
|
|
45
|
+
{summary}
|
|
46
|
+
|
|
47
|
+
Please respond strictly according to this personality. Don't reveal that you're an AI agent. Chat like a real person.
|
|
48
|
+
Keep your responses natural, friendly, and consistent with the personality traits above."""
|
|
49
|
+
|
|
50
|
+
prompt = ChatPromptTemplate.from_messages([
|
|
51
|
+
("system", system_prompt),
|
|
52
|
+
("human", "{context}\n\nOther person says: {message}")
|
|
53
|
+
])
|
|
54
|
+
|
|
55
|
+
chain = prompt | llm
|
|
56
|
+
response = chain.invoke({
|
|
57
|
+
"context": context,
|
|
58
|
+
"message": message
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
return response.content
|
|
62
|
+
|
|
63
|
+
except Exception as e:
|
|
64
|
+
print(f"AI Agent response error: {e}")
|
|
65
|
+
return "Hi! Nice to meet you."
|
|
66
|
+
|
|
67
|
+
def chat_with_agent(self, other_agent: "AIAgent", num_rounds: int = 8, initial_message: str = "Hi! Nice to meet you!") -> List[Dict[str, str]]:
|
|
68
|
+
conversation = []
|
|
69
|
+
|
|
70
|
+
message = initial_message
|
|
71
|
+
for i in range(num_rounds):
|
|
72
|
+
context = "\n".join([
|
|
73
|
+
f"{msg['role']}: {msg['content']}"
|
|
74
|
+
for msg in conversation
|
|
75
|
+
])
|
|
76
|
+
|
|
77
|
+
if i % 2 == 0:
|
|
78
|
+
response = self.generate_response(message, context)
|
|
79
|
+
conversation.append({"role": "Agent 1", "content": response})
|
|
80
|
+
message = response
|
|
81
|
+
else:
|
|
82
|
+
response = other_agent.generate_response(message, context)
|
|
83
|
+
conversation.append({"role": "Agent 2", "content": response})
|
|
84
|
+
message = response
|
|
85
|
+
|
|
86
|
+
return conversation
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class AgentSystem:
|
|
90
|
+
def __init__(self, data_dir: str = "agent_data"):
|
|
91
|
+
self.data_dir = data_dir
|
|
92
|
+
if not os.path.exists(data_dir):
|
|
93
|
+
os.makedirs(data_dir)
|
|
94
|
+
|
|
95
|
+
def create_agent(self, user_id: str) -> Optional[AIAgent]:
|
|
96
|
+
analysis = personality_analyzer.get_analysis(user_id)
|
|
97
|
+
if not analysis:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
agent = AIAgent(user_id, analysis)
|
|
101
|
+
self._save_agent(agent)
|
|
102
|
+
return agent
|
|
103
|
+
|
|
104
|
+
def _save_agent(self, agent: AIAgent):
|
|
105
|
+
file_path = os.path.join(self.data_dir, f"{agent.user_id}_agent.json")
|
|
106
|
+
data = {
|
|
107
|
+
"user_id": agent.user_id,
|
|
108
|
+
"personality": agent.personality,
|
|
109
|
+
"saved_at": datetime.now().isoformat()
|
|
110
|
+
}
|
|
111
|
+
with open(file_path, "w", encoding="utf-8") as f:
|
|
112
|
+
json.dump(data, f, ensure_ascii=False, indent=2)
|
|
113
|
+
|
|
114
|
+
def get_agent(self, user_id: str) -> Optional[AIAgent]:
|
|
115
|
+
file_path = os.path.join(self.data_dir, f"{user_id}_agent.json")
|
|
116
|
+
if os.path.exists(file_path):
|
|
117
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
118
|
+
data = json.load(f)
|
|
119
|
+
return AIAgent(data["user_id"], data["personality"])
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
def start_ai_chat(self, user1_id: str, user2_id: str, num_rounds: int = 8) -> Dict[str, Any]:
|
|
123
|
+
agent1 = self.get_agent(user1_id) or self.create_agent(user1_id)
|
|
124
|
+
agent2 = self.get_agent(user2_id) or self.create_agent(user2_id)
|
|
125
|
+
|
|
126
|
+
if not agent1 or not agent2:
|
|
127
|
+
return {"error": "Could not create AI agents"}
|
|
128
|
+
|
|
129
|
+
conversation = agent1.chat_with_agent(agent2, num_rounds)
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
"success": True,
|
|
133
|
+
"conversation": conversation,
|
|
134
|
+
"agent1": {
|
|
135
|
+
"user_id": user1_id,
|
|
136
|
+
"personality": agent1.personality
|
|
137
|
+
},
|
|
138
|
+
"agent2": {
|
|
139
|
+
"user_id": user2_id,
|
|
140
|
+
"personality": agent2.personality
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
agent_system = AgentSystem()
|