AstrBot 4.10.2__py3-none-any.whl → 4.10.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- astrbot/builtin_stars/astrbot/long_term_memory.py +186 -0
- astrbot/builtin_stars/astrbot/main.py +128 -0
- astrbot/builtin_stars/astrbot/metadata.yaml +4 -0
- astrbot/builtin_stars/astrbot/process_llm_request.py +245 -0
- astrbot/builtin_stars/builtin_commands/commands/__init__.py +31 -0
- astrbot/builtin_stars/builtin_commands/commands/admin.py +77 -0
- astrbot/builtin_stars/builtin_commands/commands/alter_cmd.py +173 -0
- astrbot/builtin_stars/builtin_commands/commands/conversation.py +366 -0
- astrbot/builtin_stars/builtin_commands/commands/help.py +88 -0
- astrbot/builtin_stars/builtin_commands/commands/llm.py +20 -0
- astrbot/builtin_stars/builtin_commands/commands/persona.py +142 -0
- astrbot/builtin_stars/builtin_commands/commands/plugin.py +120 -0
- astrbot/builtin_stars/builtin_commands/commands/provider.py +329 -0
- astrbot/builtin_stars/builtin_commands/commands/setunset.py +36 -0
- astrbot/builtin_stars/builtin_commands/commands/sid.py +36 -0
- astrbot/builtin_stars/builtin_commands/commands/t2i.py +23 -0
- astrbot/builtin_stars/builtin_commands/commands/tool.py +31 -0
- astrbot/builtin_stars/builtin_commands/commands/tts.py +36 -0
- astrbot/builtin_stars/builtin_commands/commands/utils/rst_scene.py +26 -0
- astrbot/builtin_stars/builtin_commands/main.py +237 -0
- astrbot/builtin_stars/builtin_commands/metadata.yaml +4 -0
- astrbot/builtin_stars/python_interpreter/main.py +537 -0
- astrbot/builtin_stars/python_interpreter/metadata.yaml +4 -0
- astrbot/builtin_stars/python_interpreter/requirements.txt +1 -0
- astrbot/builtin_stars/python_interpreter/shared/api.py +22 -0
- astrbot/builtin_stars/reminder/main.py +266 -0
- astrbot/builtin_stars/reminder/metadata.yaml +4 -0
- astrbot/builtin_stars/session_controller/main.py +114 -0
- astrbot/builtin_stars/session_controller/metadata.yaml +5 -0
- astrbot/builtin_stars/web_searcher/engines/__init__.py +111 -0
- astrbot/builtin_stars/web_searcher/engines/bing.py +30 -0
- astrbot/builtin_stars/web_searcher/engines/sogo.py +52 -0
- astrbot/builtin_stars/web_searcher/main.py +436 -0
- astrbot/builtin_stars/web_searcher/metadata.yaml +4 -0
- astrbot/cli/__init__.py +1 -1
- astrbot/core/agent/message.py +9 -0
- astrbot/core/agent/runners/tool_loop_agent_runner.py +2 -1
- astrbot/core/backup/__init__.py +26 -0
- astrbot/core/backup/constants.py +77 -0
- astrbot/core/backup/exporter.py +476 -0
- astrbot/core/backup/importer.py +761 -0
- astrbot/core/config/default.py +1 -1
- astrbot/core/log.py +1 -1
- astrbot/core/pipeline/process_stage/method/agent_sub_stages/internal.py +1 -1
- astrbot/core/pipeline/waking_check/stage.py +2 -1
- astrbot/core/provider/entities.py +32 -9
- astrbot/core/provider/provider.py +3 -1
- astrbot/core/provider/sources/anthropic_source.py +80 -27
- astrbot/core/provider/sources/fishaudio_tts_api_source.py +14 -6
- astrbot/core/provider/sources/gemini_source.py +75 -26
- astrbot/core/provider/sources/openai_source.py +68 -25
- astrbot/core/star/context.py +1 -1
- astrbot/core/star/star_manager.py +11 -13
- astrbot/core/utils/astrbot_path.py +34 -0
- astrbot/dashboard/routes/__init__.py +2 -0
- astrbot/dashboard/routes/backup.py +589 -0
- astrbot/dashboard/routes/log.py +44 -10
- astrbot/dashboard/server.py +8 -1
- {astrbot-4.10.2.dist-info → astrbot-4.10.3.dist-info}/METADATA +1 -1
- {astrbot-4.10.2.dist-info → astrbot-4.10.3.dist-info}/RECORD +63 -24
- {astrbot-4.10.2.dist-info → astrbot-4.10.3.dist-info}/WHEEL +0 -0
- {astrbot-4.10.2.dist-info → astrbot-4.10.3.dist-info}/entry_points.txt +0 -0
- {astrbot-4.10.2.dist-info → astrbot-4.10.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,476 @@
|
|
|
1
|
+
"""AstrBot 数据导出器
|
|
2
|
+
|
|
3
|
+
负责将所有数据导出为 ZIP 备份文件。
|
|
4
|
+
导出格式为 JSON,这是数据库无关的方案,支持未来向 MySQL/PostgreSQL 迁移。
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import json
|
|
9
|
+
import os
|
|
10
|
+
import zipfile
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import TYPE_CHECKING, Any
|
|
14
|
+
|
|
15
|
+
from sqlalchemy import select
|
|
16
|
+
|
|
17
|
+
from astrbot.core import logger
|
|
18
|
+
from astrbot.core.config.default import VERSION
|
|
19
|
+
from astrbot.core.db import BaseDatabase
|
|
20
|
+
from astrbot.core.utils.astrbot_path import (
|
|
21
|
+
get_astrbot_backups_path,
|
|
22
|
+
get_astrbot_data_path,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
# 从共享常量模块导入
|
|
26
|
+
from .constants import (
|
|
27
|
+
BACKUP_MANIFEST_VERSION,
|
|
28
|
+
KB_METADATA_MODELS,
|
|
29
|
+
MAIN_DB_MODELS,
|
|
30
|
+
get_backup_directories,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from astrbot.core.knowledge_base.kb_mgr import KnowledgeBaseManager
|
|
35
|
+
|
|
36
|
+
CMD_CONFIG_FILE_PATH = os.path.join(get_astrbot_data_path(), "cmd_config.json")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class AstrBotExporter:
|
|
40
|
+
"""AstrBot 数据导出器
|
|
41
|
+
|
|
42
|
+
导出内容:
|
|
43
|
+
- 主数据库所有表(data/data_v4.db)
|
|
44
|
+
- 知识库元数据(data/knowledge_base/kb.db)
|
|
45
|
+
- 每个知识库的向量文档数据
|
|
46
|
+
- 配置文件(data/cmd_config.json)
|
|
47
|
+
- 附件文件
|
|
48
|
+
- 知识库多媒体文件
|
|
49
|
+
- 插件目录(data/plugins)
|
|
50
|
+
- 插件数据目录(data/plugin_data)
|
|
51
|
+
- 配置目录(data/config)
|
|
52
|
+
- T2I 模板目录(data/t2i_templates)
|
|
53
|
+
- WebChat 数据目录(data/webchat)
|
|
54
|
+
- 临时文件目录(data/temp)
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def __init__(
|
|
58
|
+
self,
|
|
59
|
+
main_db: BaseDatabase,
|
|
60
|
+
kb_manager: "KnowledgeBaseManager | None" = None,
|
|
61
|
+
config_path: str = CMD_CONFIG_FILE_PATH,
|
|
62
|
+
):
|
|
63
|
+
self.main_db = main_db
|
|
64
|
+
self.kb_manager = kb_manager
|
|
65
|
+
self.config_path = config_path
|
|
66
|
+
self._checksums: dict[str, str] = {}
|
|
67
|
+
|
|
68
|
+
async def export_all(
|
|
69
|
+
self,
|
|
70
|
+
output_dir: str | None = None,
|
|
71
|
+
progress_callback: Any | None = None,
|
|
72
|
+
) -> str:
|
|
73
|
+
"""导出所有数据到 ZIP 文件
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
output_dir: 输出目录
|
|
77
|
+
progress_callback: 进度回调函数,接收参数 (stage, current, total, message)
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
str: 生成的 ZIP 文件路径
|
|
81
|
+
"""
|
|
82
|
+
if output_dir is None:
|
|
83
|
+
output_dir = get_astrbot_backups_path()
|
|
84
|
+
|
|
85
|
+
# 确保输出目录存在
|
|
86
|
+
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
87
|
+
|
|
88
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
89
|
+
zip_filename = f"astrbot_backup_{timestamp}.zip"
|
|
90
|
+
zip_path = os.path.join(output_dir, zip_filename)
|
|
91
|
+
|
|
92
|
+
logger.info(f"开始导出备份到 {zip_path}")
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zf:
|
|
96
|
+
# 1. 导出主数据库
|
|
97
|
+
if progress_callback:
|
|
98
|
+
await progress_callback("main_db", 0, 100, "正在导出主数据库...")
|
|
99
|
+
main_data = await self._export_main_database()
|
|
100
|
+
main_db_json = json.dumps(
|
|
101
|
+
main_data, ensure_ascii=False, indent=2, default=str
|
|
102
|
+
)
|
|
103
|
+
zf.writestr("databases/main_db.json", main_db_json)
|
|
104
|
+
self._add_checksum("databases/main_db.json", main_db_json)
|
|
105
|
+
if progress_callback:
|
|
106
|
+
await progress_callback("main_db", 100, 100, "主数据库导出完成")
|
|
107
|
+
|
|
108
|
+
# 2. 导出知识库数据
|
|
109
|
+
kb_meta_data: dict[str, Any] = {
|
|
110
|
+
"knowledge_bases": [],
|
|
111
|
+
"kb_documents": [],
|
|
112
|
+
"kb_media": [],
|
|
113
|
+
}
|
|
114
|
+
if self.kb_manager:
|
|
115
|
+
if progress_callback:
|
|
116
|
+
await progress_callback(
|
|
117
|
+
"kb_metadata", 0, 100, "正在导出知识库元数据..."
|
|
118
|
+
)
|
|
119
|
+
kb_meta_data = await self._export_kb_metadata()
|
|
120
|
+
kb_meta_json = json.dumps(
|
|
121
|
+
kb_meta_data, ensure_ascii=False, indent=2, default=str
|
|
122
|
+
)
|
|
123
|
+
zf.writestr("databases/kb_metadata.json", kb_meta_json)
|
|
124
|
+
self._add_checksum("databases/kb_metadata.json", kb_meta_json)
|
|
125
|
+
if progress_callback:
|
|
126
|
+
await progress_callback(
|
|
127
|
+
"kb_metadata", 100, 100, "知识库元数据导出完成"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
# 导出每个知识库的文档数据
|
|
131
|
+
kb_insts = self.kb_manager.kb_insts
|
|
132
|
+
total_kbs = len(kb_insts)
|
|
133
|
+
for idx, (kb_id, kb_helper) in enumerate(kb_insts.items()):
|
|
134
|
+
if progress_callback:
|
|
135
|
+
await progress_callback(
|
|
136
|
+
"kb_documents",
|
|
137
|
+
idx,
|
|
138
|
+
total_kbs,
|
|
139
|
+
f"正在导出知识库 {kb_helper.kb.kb_name} 的文档数据...",
|
|
140
|
+
)
|
|
141
|
+
doc_data = await self._export_kb_documents(kb_helper)
|
|
142
|
+
doc_json = json.dumps(
|
|
143
|
+
doc_data, ensure_ascii=False, indent=2, default=str
|
|
144
|
+
)
|
|
145
|
+
doc_path = f"databases/kb_{kb_id}/documents.json"
|
|
146
|
+
zf.writestr(doc_path, doc_json)
|
|
147
|
+
self._add_checksum(doc_path, doc_json)
|
|
148
|
+
|
|
149
|
+
# 导出 FAISS 索引文件
|
|
150
|
+
await self._export_faiss_index(zf, kb_helper, kb_id)
|
|
151
|
+
|
|
152
|
+
# 导出知识库多媒体文件
|
|
153
|
+
await self._export_kb_media_files(zf, kb_helper, kb_id)
|
|
154
|
+
|
|
155
|
+
if progress_callback:
|
|
156
|
+
await progress_callback(
|
|
157
|
+
"kb_documents", total_kbs, total_kbs, "知识库文档导出完成"
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
# 3. 导出配置文件
|
|
161
|
+
if progress_callback:
|
|
162
|
+
await progress_callback("config", 0, 100, "正在导出配置文件...")
|
|
163
|
+
if os.path.exists(self.config_path):
|
|
164
|
+
with open(self.config_path, encoding="utf-8") as f:
|
|
165
|
+
config_content = f.read()
|
|
166
|
+
zf.writestr("config/cmd_config.json", config_content)
|
|
167
|
+
self._add_checksum("config/cmd_config.json", config_content)
|
|
168
|
+
if progress_callback:
|
|
169
|
+
await progress_callback("config", 100, 100, "配置文件导出完成")
|
|
170
|
+
|
|
171
|
+
# 4. 导出附件文件
|
|
172
|
+
if progress_callback:
|
|
173
|
+
await progress_callback("attachments", 0, 100, "正在导出附件...")
|
|
174
|
+
await self._export_attachments(zf, main_data.get("attachments", []))
|
|
175
|
+
if progress_callback:
|
|
176
|
+
await progress_callback("attachments", 100, 100, "附件导出完成")
|
|
177
|
+
|
|
178
|
+
# 5. 导出插件和其他目录
|
|
179
|
+
if progress_callback:
|
|
180
|
+
await progress_callback(
|
|
181
|
+
"directories", 0, 100, "正在导出插件和数据目录..."
|
|
182
|
+
)
|
|
183
|
+
dir_stats = await self._export_directories(zf)
|
|
184
|
+
if progress_callback:
|
|
185
|
+
await progress_callback("directories", 100, 100, "目录导出完成")
|
|
186
|
+
|
|
187
|
+
# 6. 生成 manifest
|
|
188
|
+
if progress_callback:
|
|
189
|
+
await progress_callback("manifest", 0, 100, "正在生成清单...")
|
|
190
|
+
manifest = self._generate_manifest(main_data, kb_meta_data, dir_stats)
|
|
191
|
+
manifest_json = json.dumps(manifest, ensure_ascii=False, indent=2)
|
|
192
|
+
zf.writestr("manifest.json", manifest_json)
|
|
193
|
+
if progress_callback:
|
|
194
|
+
await progress_callback("manifest", 100, 100, "清单生成完成")
|
|
195
|
+
|
|
196
|
+
logger.info(f"备份导出完成: {zip_path}")
|
|
197
|
+
return zip_path
|
|
198
|
+
|
|
199
|
+
except Exception as e:
|
|
200
|
+
logger.error(f"备份导出失败: {e}")
|
|
201
|
+
# 清理失败的文件
|
|
202
|
+
if os.path.exists(zip_path):
|
|
203
|
+
os.remove(zip_path)
|
|
204
|
+
raise
|
|
205
|
+
|
|
206
|
+
async def _export_main_database(self) -> dict[str, list[dict]]:
|
|
207
|
+
"""导出主数据库所有表"""
|
|
208
|
+
export_data: dict[str, list[dict]] = {}
|
|
209
|
+
|
|
210
|
+
async with self.main_db.get_db() as session:
|
|
211
|
+
for table_name, model_class in MAIN_DB_MODELS.items():
|
|
212
|
+
try:
|
|
213
|
+
result = await session.execute(select(model_class))
|
|
214
|
+
records = result.scalars().all()
|
|
215
|
+
export_data[table_name] = [
|
|
216
|
+
self._model_to_dict(record) for record in records
|
|
217
|
+
]
|
|
218
|
+
logger.debug(
|
|
219
|
+
f"导出表 {table_name}: {len(export_data[table_name])} 条记录"
|
|
220
|
+
)
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.warning(f"导出表 {table_name} 失败: {e}")
|
|
223
|
+
export_data[table_name] = []
|
|
224
|
+
|
|
225
|
+
return export_data
|
|
226
|
+
|
|
227
|
+
async def _export_kb_metadata(self) -> dict[str, list[dict]]:
|
|
228
|
+
"""导出知识库元数据库"""
|
|
229
|
+
if not self.kb_manager:
|
|
230
|
+
return {"knowledge_bases": [], "kb_documents": [], "kb_media": []}
|
|
231
|
+
|
|
232
|
+
export_data: dict[str, list[dict]] = {}
|
|
233
|
+
|
|
234
|
+
async with self.kb_manager.kb_db.get_db() as session:
|
|
235
|
+
for table_name, model_class in KB_METADATA_MODELS.items():
|
|
236
|
+
try:
|
|
237
|
+
result = await session.execute(select(model_class))
|
|
238
|
+
records = result.scalars().all()
|
|
239
|
+
export_data[table_name] = [
|
|
240
|
+
self._model_to_dict(record) for record in records
|
|
241
|
+
]
|
|
242
|
+
logger.debug(
|
|
243
|
+
f"导出知识库表 {table_name}: {len(export_data[table_name])} 条记录"
|
|
244
|
+
)
|
|
245
|
+
except Exception as e:
|
|
246
|
+
logger.warning(f"导出知识库表 {table_name} 失败: {e}")
|
|
247
|
+
export_data[table_name] = []
|
|
248
|
+
|
|
249
|
+
return export_data
|
|
250
|
+
|
|
251
|
+
async def _export_kb_documents(self, kb_helper: Any) -> dict[str, Any]:
|
|
252
|
+
"""导出知识库的文档块数据"""
|
|
253
|
+
try:
|
|
254
|
+
from astrbot.core.db.vec_db.faiss_impl.vec_db import FaissVecDB
|
|
255
|
+
|
|
256
|
+
vec_db: FaissVecDB = kb_helper.vec_db
|
|
257
|
+
if not vec_db or not vec_db.document_storage:
|
|
258
|
+
return {"documents": []}
|
|
259
|
+
|
|
260
|
+
# 获取所有文档
|
|
261
|
+
docs = await vec_db.document_storage.get_documents(
|
|
262
|
+
metadata_filters={},
|
|
263
|
+
offset=0,
|
|
264
|
+
limit=None, # 获取全部
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return {"documents": docs}
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.warning(f"导出知识库文档失败: {e}")
|
|
270
|
+
return {"documents": []}
|
|
271
|
+
|
|
272
|
+
async def _export_faiss_index(
|
|
273
|
+
self,
|
|
274
|
+
zf: zipfile.ZipFile,
|
|
275
|
+
kb_helper: Any,
|
|
276
|
+
kb_id: str,
|
|
277
|
+
) -> None:
|
|
278
|
+
"""导出 FAISS 索引文件"""
|
|
279
|
+
try:
|
|
280
|
+
index_path = kb_helper.kb_dir / "index.faiss"
|
|
281
|
+
if index_path.exists():
|
|
282
|
+
archive_path = f"databases/kb_{kb_id}/index.faiss"
|
|
283
|
+
zf.write(str(index_path), archive_path)
|
|
284
|
+
logger.debug(f"导出 FAISS 索引: {archive_path}")
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logger.warning(f"导出 FAISS 索引失败: {e}")
|
|
287
|
+
|
|
288
|
+
async def _export_kb_media_files(
|
|
289
|
+
self, zf: zipfile.ZipFile, kb_helper: Any, kb_id: str
|
|
290
|
+
) -> None:
|
|
291
|
+
"""导出知识库的多媒体文件"""
|
|
292
|
+
try:
|
|
293
|
+
media_dir = kb_helper.kb_medias_dir
|
|
294
|
+
if not media_dir.exists():
|
|
295
|
+
return
|
|
296
|
+
|
|
297
|
+
for root, _, files in os.walk(media_dir):
|
|
298
|
+
for file in files:
|
|
299
|
+
file_path = Path(root) / file
|
|
300
|
+
# 计算相对路径
|
|
301
|
+
rel_path = file_path.relative_to(kb_helper.kb_dir)
|
|
302
|
+
archive_path = f"files/kb_media/{kb_id}/{rel_path}"
|
|
303
|
+
zf.write(str(file_path), archive_path)
|
|
304
|
+
except Exception as e:
|
|
305
|
+
logger.warning(f"导出知识库媒体文件失败: {e}")
|
|
306
|
+
|
|
307
|
+
async def _export_directories(
|
|
308
|
+
self, zf: zipfile.ZipFile
|
|
309
|
+
) -> dict[str, dict[str, int]]:
|
|
310
|
+
"""导出插件和其他数据目录
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
dict: 每个目录的统计信息 {dir_name: {"files": count, "size": bytes}}
|
|
314
|
+
"""
|
|
315
|
+
stats: dict[str, dict[str, int]] = {}
|
|
316
|
+
backup_directories = get_backup_directories()
|
|
317
|
+
|
|
318
|
+
for dir_name, dir_path in backup_directories.items():
|
|
319
|
+
full_path = Path(dir_path)
|
|
320
|
+
if not full_path.exists():
|
|
321
|
+
logger.debug(f"目录不存在,跳过: {full_path}")
|
|
322
|
+
continue
|
|
323
|
+
|
|
324
|
+
file_count = 0
|
|
325
|
+
total_size = 0
|
|
326
|
+
|
|
327
|
+
try:
|
|
328
|
+
for root, dirs, files in os.walk(full_path):
|
|
329
|
+
# 跳过 __pycache__ 目录
|
|
330
|
+
dirs[:] = [d for d in dirs if d != "__pycache__"]
|
|
331
|
+
|
|
332
|
+
for file in files:
|
|
333
|
+
# 跳过 .pyc 文件
|
|
334
|
+
if file.endswith(".pyc"):
|
|
335
|
+
continue
|
|
336
|
+
|
|
337
|
+
file_path = Path(root) / file
|
|
338
|
+
try:
|
|
339
|
+
# 计算相对路径
|
|
340
|
+
rel_path = file_path.relative_to(full_path)
|
|
341
|
+
archive_path = f"directories/{dir_name}/{rel_path}"
|
|
342
|
+
zf.write(str(file_path), archive_path)
|
|
343
|
+
file_count += 1
|
|
344
|
+
total_size += file_path.stat().st_size
|
|
345
|
+
except Exception as e:
|
|
346
|
+
logger.warning(f"导出文件 {file_path} 失败: {e}")
|
|
347
|
+
|
|
348
|
+
stats[dir_name] = {"files": file_count, "size": total_size}
|
|
349
|
+
logger.debug(
|
|
350
|
+
f"导出目录 {dir_name}: {file_count} 个文件, {total_size} 字节"
|
|
351
|
+
)
|
|
352
|
+
except Exception as e:
|
|
353
|
+
logger.warning(f"导出目录 {dir_path} 失败: {e}")
|
|
354
|
+
stats[dir_name] = {"files": 0, "size": 0}
|
|
355
|
+
|
|
356
|
+
return stats
|
|
357
|
+
|
|
358
|
+
async def _export_attachments(
|
|
359
|
+
self, zf: zipfile.ZipFile, attachments: list[dict]
|
|
360
|
+
) -> None:
|
|
361
|
+
"""导出附件文件"""
|
|
362
|
+
for attachment in attachments:
|
|
363
|
+
try:
|
|
364
|
+
file_path = attachment.get("path", "")
|
|
365
|
+
if file_path and os.path.exists(file_path):
|
|
366
|
+
# 使用 attachment_id 作为文件名
|
|
367
|
+
attachment_id = attachment.get("attachment_id", "")
|
|
368
|
+
ext = os.path.splitext(file_path)[1]
|
|
369
|
+
archive_path = f"files/attachments/{attachment_id}{ext}"
|
|
370
|
+
zf.write(file_path, archive_path)
|
|
371
|
+
except Exception as e:
|
|
372
|
+
logger.warning(f"导出附件失败: {e}")
|
|
373
|
+
|
|
374
|
+
def _model_to_dict(self, record: Any) -> dict:
|
|
375
|
+
"""将 SQLModel 实例转换为字典
|
|
376
|
+
|
|
377
|
+
这是数据库无关的序列化方式,支持未来迁移到其他数据库。
|
|
378
|
+
"""
|
|
379
|
+
# 使用 SQLModel 内置的 model_dump 方法(如果可用)
|
|
380
|
+
if hasattr(record, "model_dump"):
|
|
381
|
+
data = record.model_dump(mode="python")
|
|
382
|
+
# 处理 datetime 类型
|
|
383
|
+
for key, value in data.items():
|
|
384
|
+
if isinstance(value, datetime):
|
|
385
|
+
data[key] = value.isoformat()
|
|
386
|
+
return data
|
|
387
|
+
|
|
388
|
+
# 回退到手动提取
|
|
389
|
+
data = {}
|
|
390
|
+
# 使用 inspect 获取表信息
|
|
391
|
+
from sqlalchemy import inspect as sa_inspect
|
|
392
|
+
|
|
393
|
+
mapper = sa_inspect(record.__class__)
|
|
394
|
+
for column in mapper.columns:
|
|
395
|
+
value = getattr(record, column.name)
|
|
396
|
+
# 处理 datetime 类型 - 统一转为 ISO 格式字符串
|
|
397
|
+
if isinstance(value, datetime):
|
|
398
|
+
value = value.isoformat()
|
|
399
|
+
data[column.name] = value
|
|
400
|
+
return data
|
|
401
|
+
|
|
402
|
+
def _add_checksum(self, path: str, content: str | bytes) -> None:
|
|
403
|
+
"""计算并添加文件校验和"""
|
|
404
|
+
if isinstance(content, str):
|
|
405
|
+
content = content.encode("utf-8")
|
|
406
|
+
checksum = hashlib.sha256(content).hexdigest()
|
|
407
|
+
self._checksums[path] = f"sha256:{checksum}"
|
|
408
|
+
|
|
409
|
+
def _generate_manifest(
|
|
410
|
+
self,
|
|
411
|
+
main_data: dict[str, list[dict]],
|
|
412
|
+
kb_meta_data: dict[str, list[dict]],
|
|
413
|
+
dir_stats: dict[str, dict[str, int]] | None = None,
|
|
414
|
+
) -> dict:
|
|
415
|
+
"""生成备份清单"""
|
|
416
|
+
if dir_stats is None:
|
|
417
|
+
dir_stats = {}
|
|
418
|
+
# 收集知识库 ID
|
|
419
|
+
kb_document_tables = {}
|
|
420
|
+
if self.kb_manager:
|
|
421
|
+
for kb_id in self.kb_manager.kb_insts.keys():
|
|
422
|
+
kb_document_tables[kb_id] = "documents"
|
|
423
|
+
|
|
424
|
+
# 收集附件文件列表
|
|
425
|
+
attachment_files = []
|
|
426
|
+
for attachment in main_data.get("attachments", []):
|
|
427
|
+
attachment_id = attachment.get("attachment_id", "")
|
|
428
|
+
path = attachment.get("path", "")
|
|
429
|
+
if attachment_id and path:
|
|
430
|
+
ext = os.path.splitext(path)[1]
|
|
431
|
+
attachment_files.append(f"{attachment_id}{ext}")
|
|
432
|
+
|
|
433
|
+
# 收集知识库媒体文件
|
|
434
|
+
kb_media_files: dict[str, list[str]] = {}
|
|
435
|
+
if self.kb_manager:
|
|
436
|
+
for kb_id, kb_helper in self.kb_manager.kb_insts.items():
|
|
437
|
+
media_files: list[str] = []
|
|
438
|
+
media_dir = kb_helper.kb_medias_dir
|
|
439
|
+
if media_dir.exists():
|
|
440
|
+
for root, _, files in os.walk(media_dir):
|
|
441
|
+
for file in files:
|
|
442
|
+
media_files.append(file)
|
|
443
|
+
if media_files:
|
|
444
|
+
kb_media_files[kb_id] = media_files
|
|
445
|
+
|
|
446
|
+
manifest = {
|
|
447
|
+
"version": BACKUP_MANIFEST_VERSION,
|
|
448
|
+
"astrbot_version": VERSION,
|
|
449
|
+
"exported_at": datetime.now(timezone.utc).isoformat(),
|
|
450
|
+
"schema_version": {
|
|
451
|
+
"main_db": "v4",
|
|
452
|
+
"kb_db": "v1",
|
|
453
|
+
},
|
|
454
|
+
"tables": {
|
|
455
|
+
"main_db": list(main_data.keys()),
|
|
456
|
+
"kb_metadata": list(kb_meta_data.keys()),
|
|
457
|
+
"kb_documents": kb_document_tables,
|
|
458
|
+
},
|
|
459
|
+
"files": {
|
|
460
|
+
"attachments": attachment_files,
|
|
461
|
+
"kb_media": kb_media_files,
|
|
462
|
+
},
|
|
463
|
+
"directories": list(dir_stats.keys()),
|
|
464
|
+
"checksums": self._checksums,
|
|
465
|
+
"statistics": {
|
|
466
|
+
"main_db": {
|
|
467
|
+
table: len(records) for table, records in main_data.items()
|
|
468
|
+
},
|
|
469
|
+
"kb_metadata": {
|
|
470
|
+
table: len(records) for table, records in kb_meta_data.items()
|
|
471
|
+
},
|
|
472
|
+
"directories": dir_stats,
|
|
473
|
+
},
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
return manifest
|