@poolzin/pool-bot 2026.3.23 → 2026.3.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/dist/.buildstamp +1 -1
- package/dist/acp/policy.js +52 -0
- package/dist/agents/btw.js +280 -0
- package/dist/agents/fast-mode.js +24 -0
- package/dist/agents/live-model-errors.js +23 -0
- package/dist/agents/model-auth-env-vars.js +44 -0
- package/dist/agents/model-auth-markers.js +69 -0
- package/dist/agents/models-config.providers.discovery.js +180 -0
- package/dist/agents/models-config.providers.static.js +480 -0
- package/dist/auto-reply/reply/typing-policy.js +15 -0
- package/dist/build-info.json +3 -3
- package/dist/channels/account-snapshot-fields.js +176 -0
- package/dist/channels/draft-stream-controls.js +89 -0
- package/dist/channels/inbound-debounce-policy.js +28 -0
- package/dist/channels/typing-lifecycle.js +39 -0
- package/dist/cli/program/command-registry.js +52 -0
- package/dist/commands/agent-binding.js +123 -0
- package/dist/commands/agents.commands.bind.js +280 -0
- package/dist/commands/backup-shared.js +186 -0
- package/dist/commands/backup-verify.js +236 -0
- package/dist/commands/backup.js +166 -0
- package/dist/commands/channel-account-context.js +15 -0
- package/dist/commands/channel-account.js +190 -0
- package/dist/commands/gateway-install-token.js +117 -0
- package/dist/commands/oauth-tls-preflight.js +121 -0
- package/dist/commands/ollama-setup.js +402 -0
- package/dist/commands/self-hosted-provider-setup.js +207 -0
- package/dist/commands/session-store-targets.js +12 -0
- package/dist/commands/sessions-cleanup.js +97 -0
- package/dist/cron/heartbeat-policy.js +26 -0
- package/dist/gateway/hooks-mapping.js +46 -7
- package/dist/hooks/module-loader.js +28 -0
- package/dist/infra/agent-command-binding.js +144 -0
- package/dist/infra/backup.js +328 -0
- package/dist/infra/channel-account-context.js +173 -0
- package/dist/infra/session-cleanup.js +143 -0
- package/package.json +1 -1
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Backup System for Pool Bot
|
|
3
|
+
*
|
|
4
|
+
* Creates and restores backups of sessions, config, and credentials.
|
|
5
|
+
* Implemented from scratch for Pool Bot architecture.
|
|
6
|
+
*/
|
|
7
|
+
import fs from "node:fs/promises";
|
|
8
|
+
import path from "node:path";
|
|
9
|
+
const BACKUP_DIR = path.join(process.cwd(), ".poolbot", "backups");
|
|
10
|
+
const CONFIG_DIR = path.join(process.cwd(), ".poolbot");
|
|
11
|
+
const SESSIONS_DIR = path.join(CONFIG_DIR, "sessions");
|
|
12
|
+
const CREDENTIALS_DIR = path.join(CONFIG_DIR, "credentials");
|
|
13
|
+
/**
|
|
14
|
+
* Create a backup of Pool Bot data
|
|
15
|
+
*/
|
|
16
|
+
export async function createBackup(options = {}) {
|
|
17
|
+
const startTime = Date.now();
|
|
18
|
+
const { includeSessions = true, includeConfig = true, includeCredentials = true, destinationDir = BACKUP_DIR, } = options;
|
|
19
|
+
try {
|
|
20
|
+
// Ensure backup directory exists
|
|
21
|
+
await fs.mkdir(destinationDir, { recursive: true });
|
|
22
|
+
// Create backup metadata
|
|
23
|
+
const metadata = {
|
|
24
|
+
version: "1.0",
|
|
25
|
+
createdAt: startTime,
|
|
26
|
+
poolBotVersion: process.env.npm_package_version || "unknown",
|
|
27
|
+
includesSessions: includeSessions,
|
|
28
|
+
includesConfig: includeConfig,
|
|
29
|
+
includesCredentials: includeCredentials,
|
|
30
|
+
};
|
|
31
|
+
// Create temporary directory for backup contents
|
|
32
|
+
const tempDir = path.join(destinationDir, `backup-${Date.now()}-temp`);
|
|
33
|
+
await fs.mkdir(tempDir, { recursive: true });
|
|
34
|
+
let totalSize = 0;
|
|
35
|
+
let sessionsBackedUp = 0;
|
|
36
|
+
// Backup sessions
|
|
37
|
+
if (includeSessions) {
|
|
38
|
+
const sessionsBackupDir = path.join(tempDir, "sessions");
|
|
39
|
+
await fs.mkdir(sessionsBackupDir, { recursive: true });
|
|
40
|
+
try {
|
|
41
|
+
const sessionFiles = await fs.readdir(SESSIONS_DIR);
|
|
42
|
+
for (const file of sessionFiles) {
|
|
43
|
+
if (file.endsWith(".json")) {
|
|
44
|
+
const srcPath = path.join(SESSIONS_DIR, file);
|
|
45
|
+
const destPath = path.join(sessionsBackupDir, file);
|
|
46
|
+
await fs.copyFile(srcPath, destPath);
|
|
47
|
+
sessionsBackedUp++;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
metadata.sessionCount = sessionsBackedUp;
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
if (error.code !== "ENOENT") {
|
|
54
|
+
throw error;
|
|
55
|
+
}
|
|
56
|
+
// Sessions directory doesn't exist yet - that's ok
|
|
57
|
+
metadata.sessionCount = 0;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// Backup config
|
|
61
|
+
if (includeConfig) {
|
|
62
|
+
const configFiles = ["config.json", "poolbot.json"];
|
|
63
|
+
const configBackupDir = path.join(tempDir, "config");
|
|
64
|
+
await fs.mkdir(configBackupDir, { recursive: true });
|
|
65
|
+
for (const configFile of configFiles) {
|
|
66
|
+
const srcPath = path.join(CONFIG_DIR, configFile);
|
|
67
|
+
const destPath = path.join(configBackupDir, configFile);
|
|
68
|
+
try {
|
|
69
|
+
await fs.copyFile(srcPath, destPath);
|
|
70
|
+
const stats = await fs.stat(destPath);
|
|
71
|
+
totalSize += stats.size;
|
|
72
|
+
metadata.configSize = stats.size;
|
|
73
|
+
}
|
|
74
|
+
catch (error) {
|
|
75
|
+
if (error.code !== "ENOENT") {
|
|
76
|
+
throw error;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
// Backup credentials
|
|
82
|
+
if (includeCredentials) {
|
|
83
|
+
const credentialsBackupDir = path.join(tempDir, "credentials");
|
|
84
|
+
await fs.mkdir(credentialsBackupDir, { recursive: true });
|
|
85
|
+
try {
|
|
86
|
+
const credFiles = await fs.readdir(CREDENTIALS_DIR);
|
|
87
|
+
for (const file of credFiles) {
|
|
88
|
+
const srcPath = path.join(CREDENTIALS_DIR, file);
|
|
89
|
+
const destPath = path.join(credentialsBackupDir, file);
|
|
90
|
+
await fs.copyFile(srcPath, destPath);
|
|
91
|
+
const stats = await fs.stat(destPath);
|
|
92
|
+
totalSize += stats.size;
|
|
93
|
+
metadata.credentialsSize = (metadata.credentialsSize || 0) + stats.size;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
catch (error) {
|
|
97
|
+
if (error.code !== "ENOENT") {
|
|
98
|
+
throw error;
|
|
99
|
+
}
|
|
100
|
+
// Credentials directory doesn't exist yet - that's ok
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
// Write metadata
|
|
104
|
+
const metadataPath = path.join(tempDir, "metadata.json");
|
|
105
|
+
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
|
106
|
+
const metadataStats = await fs.stat(metadataPath);
|
|
107
|
+
totalSize += metadataStats.size;
|
|
108
|
+
// Create compressed archive
|
|
109
|
+
const timestamp = new Date(startTime).toISOString().replace(/[:.]/g, "-");
|
|
110
|
+
const backupFileName = `poolbot-backup-${timestamp}.tar.gz`;
|
|
111
|
+
const backupPath = path.join(destinationDir, backupFileName);
|
|
112
|
+
// For now, just copy the temp directory contents (simplified - not actually tar.gz)
|
|
113
|
+
// In production, you'd use a proper tar library
|
|
114
|
+
await fs.rename(tempDir, backupPath.replace(".tar.gz", ""));
|
|
115
|
+
const duration = Date.now() - startTime;
|
|
116
|
+
return {
|
|
117
|
+
success: true,
|
|
118
|
+
backupPath: backupPath.replace(".tar.gz", ""),
|
|
119
|
+
metadata,
|
|
120
|
+
sessionsBackedUp,
|
|
121
|
+
configBackedUp: includeConfig,
|
|
122
|
+
credentialsBackedUp: includeCredentials,
|
|
123
|
+
totalSize,
|
|
124
|
+
duration,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
catch (error) {
|
|
128
|
+
const duration = Date.now() - startTime;
|
|
129
|
+
return {
|
|
130
|
+
success: false,
|
|
131
|
+
backupPath: "",
|
|
132
|
+
metadata: {
|
|
133
|
+
version: "1.0",
|
|
134
|
+
createdAt: startTime,
|
|
135
|
+
poolBotVersion: process.env.npm_package_version || "unknown",
|
|
136
|
+
includesSessions: false,
|
|
137
|
+
includesConfig: false,
|
|
138
|
+
includesCredentials: false,
|
|
139
|
+
},
|
|
140
|
+
totalSize: 0,
|
|
141
|
+
duration,
|
|
142
|
+
error: error.message,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Restore from a backup
|
|
148
|
+
*/
|
|
149
|
+
export async function restoreBackup(options) {
|
|
150
|
+
const startTime = Date.now();
|
|
151
|
+
const { backupPath, restoreSessions = true, restoreConfig = true, restoreCredentials = true, dryRun = false, } = options;
|
|
152
|
+
try {
|
|
153
|
+
// Verify backup exists
|
|
154
|
+
const backupDir = backupPath.endsWith(".tar.gz")
|
|
155
|
+
? backupPath.replace(".tar.gz", "")
|
|
156
|
+
: backupPath;
|
|
157
|
+
const metadataPath = path.join(backupDir, "metadata.json");
|
|
158
|
+
try {
|
|
159
|
+
await fs.access(metadataPath);
|
|
160
|
+
}
|
|
161
|
+
catch {
|
|
162
|
+
throw new Error(`Invalid backup: metadata.json not found at ${metadataPath}`);
|
|
163
|
+
}
|
|
164
|
+
// Read metadata
|
|
165
|
+
const metadataContent = await fs.readFile(metadataPath, "utf-8");
|
|
166
|
+
const metadata = JSON.parse(metadataContent);
|
|
167
|
+
const report = {
|
|
168
|
+
success: true,
|
|
169
|
+
backupPath,
|
|
170
|
+
dryRun,
|
|
171
|
+
duration: 0,
|
|
172
|
+
sessionsRestored: 0,
|
|
173
|
+
configRestored: false,
|
|
174
|
+
credentialsRestored: false,
|
|
175
|
+
};
|
|
176
|
+
if (dryRun) {
|
|
177
|
+
// Just report what would be restored
|
|
178
|
+
if (restoreSessions && metadata.includesSessions) {
|
|
179
|
+
report.sessionsRestored = metadata.sessionCount || 0;
|
|
180
|
+
}
|
|
181
|
+
if (restoreConfig && metadata.includesConfig) {
|
|
182
|
+
report.configRestored = true;
|
|
183
|
+
}
|
|
184
|
+
if (restoreCredentials && metadata.includesCredentials) {
|
|
185
|
+
report.credentialsRestored = true;
|
|
186
|
+
}
|
|
187
|
+
report.duration = Date.now() - startTime;
|
|
188
|
+
return report;
|
|
189
|
+
}
|
|
190
|
+
// Restore sessions
|
|
191
|
+
if (restoreSessions && metadata.includesSessions) {
|
|
192
|
+
const sessionsBackupDir = path.join(backupDir, "sessions");
|
|
193
|
+
try {
|
|
194
|
+
await fs.mkdir(SESSIONS_DIR, { recursive: true });
|
|
195
|
+
const sessionFiles = await fs.readdir(sessionsBackupDir);
|
|
196
|
+
for (const file of sessionFiles) {
|
|
197
|
+
if (file.endsWith(".json")) {
|
|
198
|
+
const srcPath = path.join(sessionsBackupDir, file);
|
|
199
|
+
const destPath = path.join(SESSIONS_DIR, file);
|
|
200
|
+
await fs.copyFile(srcPath, destPath);
|
|
201
|
+
report.sessionsRestored = (report.sessionsRestored || 0) + 1;
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
catch (error) {
|
|
206
|
+
if (error.code !== "ENOENT") {
|
|
207
|
+
throw error;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
// Restore config
|
|
212
|
+
if (restoreConfig && metadata.includesConfig) {
|
|
213
|
+
const configBackupDir = path.join(backupDir, "config");
|
|
214
|
+
try {
|
|
215
|
+
const configFiles = await fs.readdir(configBackupDir);
|
|
216
|
+
for (const file of configFiles) {
|
|
217
|
+
const srcPath = path.join(configBackupDir, file);
|
|
218
|
+
const destPath = path.join(CONFIG_DIR, file);
|
|
219
|
+
await fs.copyFile(srcPath, destPath);
|
|
220
|
+
report.configRestored = true;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
catch (error) {
|
|
224
|
+
if (error.code !== "ENOENT") {
|
|
225
|
+
throw error;
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
// Restore credentials
|
|
230
|
+
if (restoreCredentials && metadata.includesCredentials) {
|
|
231
|
+
const credentialsBackupDir = path.join(backupDir, "credentials");
|
|
232
|
+
try {
|
|
233
|
+
await fs.mkdir(CREDENTIALS_DIR, { recursive: true });
|
|
234
|
+
const credFiles = await fs.readdir(credentialsBackupDir);
|
|
235
|
+
for (const file of credFiles) {
|
|
236
|
+
const srcPath = path.join(credentialsBackupDir, file);
|
|
237
|
+
const destPath = path.join(CREDENTIALS_DIR, file);
|
|
238
|
+
await fs.copyFile(srcPath, destPath);
|
|
239
|
+
report.credentialsRestored = true;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
catch (error) {
|
|
243
|
+
if (error.code !== "ENOENT") {
|
|
244
|
+
throw error;
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
report.duration = Date.now() - startTime;
|
|
249
|
+
return report;
|
|
250
|
+
}
|
|
251
|
+
catch (error) {
|
|
252
|
+
const duration = Date.now() - startTime;
|
|
253
|
+
return {
|
|
254
|
+
success: false,
|
|
255
|
+
backupPath,
|
|
256
|
+
dryRun,
|
|
257
|
+
duration,
|
|
258
|
+
error: error.message,
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* List available backups
|
|
264
|
+
*/
|
|
265
|
+
export async function listBackups(backupDir = BACKUP_DIR) {
|
|
266
|
+
try {
|
|
267
|
+
await fs.mkdir(backupDir, { recursive: true });
|
|
268
|
+
const entries = await fs.readdir(backupDir, { withFileTypes: true });
|
|
269
|
+
const backups = [];
|
|
270
|
+
for (const entry of entries) {
|
|
271
|
+
if (entry.isDirectory() && entry.name.startsWith("poolbot-backup-")) {
|
|
272
|
+
const metadataPath = path.join(backupDir, entry.name, "metadata.json");
|
|
273
|
+
try {
|
|
274
|
+
const metadataContent = await fs.readFile(metadataPath, "utf-8");
|
|
275
|
+
const metadata = JSON.parse(metadataContent);
|
|
276
|
+
// Calculate total size
|
|
277
|
+
let totalSize = 0;
|
|
278
|
+
const backupPath = path.join(backupDir, entry.name);
|
|
279
|
+
const files = await fs.readdir(backupPath, { recursive: true });
|
|
280
|
+
for (const file of files) {
|
|
281
|
+
const filePath = path.join(backupPath, file);
|
|
282
|
+
try {
|
|
283
|
+
const stats = await fs.stat(filePath);
|
|
284
|
+
totalSize += stats.size;
|
|
285
|
+
}
|
|
286
|
+
catch {
|
|
287
|
+
// File might have been deleted
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
backups.push({
|
|
291
|
+
path: backupPath,
|
|
292
|
+
metadata,
|
|
293
|
+
size: totalSize,
|
|
294
|
+
createdAt: metadata.createdAt,
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
catch {
|
|
298
|
+
// Skip invalid backups
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
// Sort by creation date (newest first)
|
|
303
|
+
return backups.sort((a, b) => b.createdAt - a.createdAt);
|
|
304
|
+
}
|
|
305
|
+
catch {
|
|
306
|
+
return [];
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
/**
|
|
310
|
+
* Delete a backup
|
|
311
|
+
*/
|
|
312
|
+
export async function deleteBackup(backupPath) {
|
|
313
|
+
try {
|
|
314
|
+
// Check if path exists first
|
|
315
|
+
await fs.access(backupPath);
|
|
316
|
+
await fs.rm(backupPath, { recursive: true, force: true });
|
|
317
|
+
return true;
|
|
318
|
+
}
|
|
319
|
+
catch {
|
|
320
|
+
return false;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Get backup directory
|
|
325
|
+
*/
|
|
326
|
+
export function getBackupDir() {
|
|
327
|
+
return BACKUP_DIR;
|
|
328
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Channel Account Context System
|
|
3
|
+
*
|
|
4
|
+
* Manage multiple accounts per channel with context switching.
|
|
5
|
+
* Implemented from scratch for Pool Bot architecture.
|
|
6
|
+
*/
|
|
7
|
+
import fs from "node:fs/promises";
|
|
8
|
+
import path from "node:path";
|
|
9
|
+
const ACCOUNTS_FILE = path.join(process.cwd(), ".poolbot", "channel-accounts.json");
|
|
10
|
+
/**
|
|
11
|
+
* Load channel accounts from file
|
|
12
|
+
*/
|
|
13
|
+
export async function loadChannelAccounts() {
|
|
14
|
+
try {
|
|
15
|
+
const data = await fs.readFile(ACCOUNTS_FILE, "utf-8");
|
|
16
|
+
return JSON.parse(data);
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
if (error.code === "ENOENT") {
|
|
20
|
+
return { accounts: {}, activeAccounts: {} };
|
|
21
|
+
}
|
|
22
|
+
throw error;
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Save channel accounts to file
|
|
27
|
+
*/
|
|
28
|
+
export async function saveChannelAccounts(store) {
|
|
29
|
+
await fs.mkdir(path.dirname(ACCOUNTS_FILE), { recursive: true });
|
|
30
|
+
await fs.writeFile(ACCOUNTS_FILE, JSON.stringify(store, null, 2));
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Add a new channel account
|
|
34
|
+
*/
|
|
35
|
+
export async function addChannelAccount(params) {
|
|
36
|
+
const { channelId, channelType, accountName, credentials, metadata } = params;
|
|
37
|
+
const now = Date.now();
|
|
38
|
+
const accountId = `${channelType}-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
|
39
|
+
const key = `${channelId}:${accountId}`;
|
|
40
|
+
const store = await loadChannelAccounts();
|
|
41
|
+
const account = {
|
|
42
|
+
id: accountId,
|
|
43
|
+
channelId,
|
|
44
|
+
channelType,
|
|
45
|
+
accountName,
|
|
46
|
+
credentials: credentials || {},
|
|
47
|
+
metadata: metadata || {},
|
|
48
|
+
isActive: false,
|
|
49
|
+
createdAt: now,
|
|
50
|
+
updatedAt: now,
|
|
51
|
+
};
|
|
52
|
+
store.accounts[key] = account;
|
|
53
|
+
await saveChannelAccounts(store);
|
|
54
|
+
return account;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Remove a channel account
|
|
58
|
+
*/
|
|
59
|
+
export async function removeChannelAccount(channelId, accountId) {
|
|
60
|
+
const key = `${channelId}:${accountId}`;
|
|
61
|
+
const store = await loadChannelAccounts();
|
|
62
|
+
if (!store.accounts[key]) {
|
|
63
|
+
return false;
|
|
64
|
+
}
|
|
65
|
+
delete store.accounts[key];
|
|
66
|
+
// Also remove from active accounts if set
|
|
67
|
+
if (store.activeAccounts[channelId] === accountId) {
|
|
68
|
+
delete store.activeAccounts[channelId];
|
|
69
|
+
}
|
|
70
|
+
await saveChannelAccounts(store);
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Get a channel account
|
|
75
|
+
*/
|
|
76
|
+
export async function getChannelAccount(channelId, accountId) {
|
|
77
|
+
const key = `${channelId}:${accountId}`;
|
|
78
|
+
const store = await loadChannelAccounts();
|
|
79
|
+
return store.accounts[key];
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Get all accounts for a channel
|
|
83
|
+
*/
|
|
84
|
+
export async function getChannelAccounts(channelId) {
|
|
85
|
+
const store = await loadChannelAccounts();
|
|
86
|
+
return Object.values(store.accounts).filter((account) => account.channelId === channelId);
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Get active account for a channel
|
|
90
|
+
*/
|
|
91
|
+
export async function getActiveChannelAccount(channelId) {
|
|
92
|
+
const store = await loadChannelAccounts();
|
|
93
|
+
const activeAccountId = store.activeAccounts[channelId];
|
|
94
|
+
if (!activeAccountId) {
|
|
95
|
+
return undefined;
|
|
96
|
+
}
|
|
97
|
+
const key = `${channelId}:${activeAccountId}`;
|
|
98
|
+
return store.accounts[key];
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Set active account for a channel
|
|
102
|
+
*/
|
|
103
|
+
export async function setActiveChannelAccount(channelId, accountId) {
|
|
104
|
+
const key = `${channelId}:${accountId}`;
|
|
105
|
+
const store = await loadChannelAccounts();
|
|
106
|
+
if (!store.accounts[key]) {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
// Deactivate all accounts for this channel
|
|
110
|
+
Object.values(store.accounts).forEach((account) => {
|
|
111
|
+
if (account.channelId === channelId) {
|
|
112
|
+
account.isActive = false;
|
|
113
|
+
account.updatedAt = Date.now();
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
// Activate the selected account
|
|
117
|
+
store.accounts[key].isActive = true;
|
|
118
|
+
store.accounts[key].updatedAt = Date.now();
|
|
119
|
+
store.accounts[key].lastUsedAt = Date.now();
|
|
120
|
+
store.activeAccounts[channelId] = accountId;
|
|
121
|
+
await saveChannelAccounts(store);
|
|
122
|
+
return true;
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* Update channel account
|
|
126
|
+
*/
|
|
127
|
+
export async function updateChannelAccount(params) {
|
|
128
|
+
const { channelId, accountId, accountName, credentials, metadata } = params;
|
|
129
|
+
const key = `${channelId}:${accountId}`;
|
|
130
|
+
const store = await loadChannelAccounts();
|
|
131
|
+
if (!store.accounts[key]) {
|
|
132
|
+
return undefined;
|
|
133
|
+
}
|
|
134
|
+
const account = store.accounts[key];
|
|
135
|
+
if (accountName !== undefined) {
|
|
136
|
+
account.accountName = accountName;
|
|
137
|
+
}
|
|
138
|
+
if (credentials !== undefined) {
|
|
139
|
+
account.credentials = { ...account.credentials, ...credentials };
|
|
140
|
+
}
|
|
141
|
+
if (metadata !== undefined) {
|
|
142
|
+
account.metadata = { ...account.metadata, ...metadata };
|
|
143
|
+
}
|
|
144
|
+
account.updatedAt = Date.now();
|
|
145
|
+
store.accounts[key] = account;
|
|
146
|
+
await saveChannelAccounts(store);
|
|
147
|
+
return account;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* List all channel accounts
|
|
151
|
+
*/
|
|
152
|
+
export async function listAllChannelAccounts() {
|
|
153
|
+
const store = await loadChannelAccounts();
|
|
154
|
+
return Object.values(store.accounts);
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Get account usage statistics
|
|
158
|
+
*/
|
|
159
|
+
export async function getChannelAccountStats() {
|
|
160
|
+
const store = await loadChannelAccounts();
|
|
161
|
+
const accounts = Object.values(store.accounts);
|
|
162
|
+
const stats = {
|
|
163
|
+
totalAccounts: accounts.length,
|
|
164
|
+
channelsWithAccounts: new Set(accounts.map((a) => a.channelId)).size,
|
|
165
|
+
activeAccounts: accounts.filter((a) => a.isActive).length,
|
|
166
|
+
accountsByChannelType: {},
|
|
167
|
+
};
|
|
168
|
+
accounts.forEach((account) => {
|
|
169
|
+
stats.accountsByChannelType[account.channelType] =
|
|
170
|
+
(stats.accountsByChannelType[account.channelType] || 0) + 1;
|
|
171
|
+
});
|
|
172
|
+
return stats;
|
|
173
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Session Cleanup System
|
|
3
|
+
*
|
|
4
|
+
* Automatically cleans up old sessions based on disk budget.
|
|
5
|
+
* Implemented from scratch for Pool Bot architecture.
|
|
6
|
+
*/
|
|
7
|
+
import fs from "node:fs/promises";
|
|
8
|
+
import path from "node:path";
|
|
9
|
+
const DEFAULT_SESSIONS_DIR = path.join(process.cwd(), ".poolbot", "sessions");
|
|
10
|
+
const DEFAULT_MAX_DISK_USAGE_MB = 1000; // 1GB
|
|
11
|
+
const DEFAULT_MIN_SESSIONS_TO_KEEP = 5;
|
|
12
|
+
/**
|
|
13
|
+
* Get all sessions with metadata
|
|
14
|
+
*/
|
|
15
|
+
export async function getSessions(sessionsDir = DEFAULT_SESSIONS_DIR) {
|
|
16
|
+
try {
|
|
17
|
+
await fs.mkdir(sessionsDir, { recursive: true });
|
|
18
|
+
const entries = await fs.readdir(sessionsDir, { withFileTypes: true });
|
|
19
|
+
const sessions = [];
|
|
20
|
+
for (const entry of entries) {
|
|
21
|
+
if (entry.isFile() && entry.name.endsWith(".json")) {
|
|
22
|
+
const sessionPath = path.join(sessionsDir, entry.name);
|
|
23
|
+
const stats = await fs.stat(sessionPath);
|
|
24
|
+
sessions.push({
|
|
25
|
+
path: sessionPath,
|
|
26
|
+
size: stats.size,
|
|
27
|
+
createdAt: stats.birthtimeMs,
|
|
28
|
+
modifiedAt: stats.mtimeMs,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
// Sort by modified date (oldest first)
|
|
33
|
+
return sessions.sort((a, b) => a.modifiedAt - b.modifiedAt);
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
return [];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Calculate total disk usage
|
|
41
|
+
*/
|
|
42
|
+
export async function calculateDiskUsage(sessions) {
|
|
43
|
+
return sessions.reduce((total, session) => total + session.size, 0);
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Cleanup old sessions to stay within disk budget
|
|
47
|
+
*/
|
|
48
|
+
export async function cleanupSessions(options = {}) {
|
|
49
|
+
const startTime = Date.now();
|
|
50
|
+
const { sessionsDir = DEFAULT_SESSIONS_DIR, maxDiskUsageMB = DEFAULT_MAX_DISK_USAGE_MB, minSessionsToKeep = DEFAULT_MIN_SESSIONS_TO_KEEP, dryRun = false, } = options;
|
|
51
|
+
try {
|
|
52
|
+
const maxDiskUsageBytes = maxDiskUsageMB * 1024 * 1024;
|
|
53
|
+
// Get all sessions
|
|
54
|
+
const sessions = await getSessions(sessionsDir);
|
|
55
|
+
let currentDiskUsage = await calculateDiskUsage(sessions);
|
|
56
|
+
const report = {
|
|
57
|
+
success: true,
|
|
58
|
+
sessionsDeleted: 0,
|
|
59
|
+
spaceFreedBytes: 0,
|
|
60
|
+
spaceFreedMB: 0,
|
|
61
|
+
sessionsRemaining: sessions.length,
|
|
62
|
+
currentDiskUsageBytes: currentDiskUsage,
|
|
63
|
+
currentDiskUsageMB: currentDiskUsage / (1024 * 1024),
|
|
64
|
+
dryRun,
|
|
65
|
+
duration: 0,
|
|
66
|
+
};
|
|
67
|
+
// If under budget, nothing to do
|
|
68
|
+
if (currentDiskUsage <= maxDiskUsageBytes) {
|
|
69
|
+
report.duration = Date.now() - startTime;
|
|
70
|
+
return report;
|
|
71
|
+
}
|
|
72
|
+
// Delete oldest sessions until under budget
|
|
73
|
+
const sessionsToDelete = [];
|
|
74
|
+
let spaceToFree = currentDiskUsage - maxDiskUsageBytes;
|
|
75
|
+
for (const session of sessions) {
|
|
76
|
+
// Don't delete if we're at minimum sessions
|
|
77
|
+
if (sessions.length - sessionsToDelete.length <= minSessionsToKeep) {
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
sessionsToDelete.push(session);
|
|
81
|
+
spaceToFree -= session.size;
|
|
82
|
+
// Stop if we've freed enough space
|
|
83
|
+
if (spaceToFree <= 0) {
|
|
84
|
+
break;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
// Delete sessions
|
|
88
|
+
let spaceFreed = 0;
|
|
89
|
+
for (const session of sessionsToDelete) {
|
|
90
|
+
if (dryRun) {
|
|
91
|
+
spaceFreed += session.size;
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
try {
|
|
95
|
+
await fs.unlink(session.path);
|
|
96
|
+
spaceFreed += session.size;
|
|
97
|
+
}
|
|
98
|
+
catch (error) {
|
|
99
|
+
console.error(`Failed to delete session ${session.path}:`, error.message);
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
report.sessionsDeleted = sessionsToDelete.length;
|
|
104
|
+
report.spaceFreedBytes = spaceFreed;
|
|
105
|
+
report.spaceFreedMB = spaceFreed / (1024 * 1024);
|
|
106
|
+
report.sessionsRemaining = sessions.length - sessionsToDelete.length;
|
|
107
|
+
report.currentDiskUsageBytes = currentDiskUsage - spaceFreed;
|
|
108
|
+
report.currentDiskUsageMB = (currentDiskUsage - spaceFreed) / (1024 * 1024);
|
|
109
|
+
report.duration = Date.now() - startTime;
|
|
110
|
+
return report;
|
|
111
|
+
}
|
|
112
|
+
catch (error) {
|
|
113
|
+
const duration = Date.now() - startTime;
|
|
114
|
+
return {
|
|
115
|
+
success: false,
|
|
116
|
+
sessionsDeleted: 0,
|
|
117
|
+
spaceFreedBytes: 0,
|
|
118
|
+
spaceFreedMB: 0,
|
|
119
|
+
sessionsRemaining: 0,
|
|
120
|
+
currentDiskUsageBytes: 0,
|
|
121
|
+
currentDiskUsageMB: 0,
|
|
122
|
+
dryRun,
|
|
123
|
+
duration,
|
|
124
|
+
error: error.message,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Get session cleanup status
|
|
130
|
+
*/
|
|
131
|
+
export async function getSessionCleanupStatus(options) {
|
|
132
|
+
const { sessionsDir = DEFAULT_SESSIONS_DIR, maxDiskUsageMB = DEFAULT_MAX_DISK_USAGE_MB } = options;
|
|
133
|
+
const sessions = await getSessions(sessionsDir);
|
|
134
|
+
const currentUsage = await calculateDiskUsage(sessions);
|
|
135
|
+
const maxUsageBytes = maxDiskUsageMB * 1024 * 1024;
|
|
136
|
+
return {
|
|
137
|
+
currentUsageMB: currentUsage / (1024 * 1024),
|
|
138
|
+
maxUsageMB: maxDiskUsageMB,
|
|
139
|
+
usagePercent: (currentUsage / maxUsageBytes) * 100,
|
|
140
|
+
sessionCount: sessions.length,
|
|
141
|
+
overBudget: currentUsage > maxUsageBytes,
|
|
142
|
+
};
|
|
143
|
+
}
|