arisa 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +191 -0
- package/README.md +200 -0
- package/SOUL.md +36 -0
- package/bin/arisa.js +85 -0
- package/package.json +43 -0
- package/scripts/test-secrets.ts +22 -0
- package/src/core/attachments.ts +104 -0
- package/src/core/auth.ts +58 -0
- package/src/core/context.ts +30 -0
- package/src/core/file-detector.ts +39 -0
- package/src/core/format.ts +159 -0
- package/src/core/history.ts +193 -0
- package/src/core/index.ts +437 -0
- package/src/core/intent.ts +112 -0
- package/src/core/media.ts +144 -0
- package/src/core/onboarding.ts +115 -0
- package/src/core/processor.ts +268 -0
- package/src/core/router.ts +64 -0
- package/src/core/scheduler.ts +192 -0
- package/src/daemon/agent-cli.ts +119 -0
- package/src/daemon/autofix.ts +116 -0
- package/src/daemon/bridge.ts +162 -0
- package/src/daemon/channels/base.ts +10 -0
- package/src/daemon/channels/telegram.ts +306 -0
- package/src/daemon/fallback.ts +49 -0
- package/src/daemon/index.ts +213 -0
- package/src/daemon/lifecycle.ts +288 -0
- package/src/daemon/setup.ts +79 -0
- package/src/shared/config.ts +130 -0
- package/src/shared/db.ts +304 -0
- package/src/shared/deepbase-secure.ts +39 -0
- package/src/shared/logger.ts +42 -0
- package/src/shared/paths.ts +90 -0
- package/src/shared/ports.ts +98 -0
- package/src/shared/secrets.ts +136 -0
- package/src/shared/types.ts +103 -0
- package/tsconfig.json +19 -0
package/src/shared/db.ts
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module shared/db
|
|
3
|
+
* @role Unified persistence layer using deepbase
|
|
4
|
+
* @responsibilities
|
|
5
|
+
* - Persist scheduled tasks, authorized users, onboarded users, queue messages
|
|
6
|
+
* - Provide type-safe operations with JSON storage
|
|
7
|
+
* - Auto-connect on first operation
|
|
8
|
+
* @dependencies deepbase, shared/types, shared/config
|
|
9
|
+
* @effects Disk I/O (runtime db directory)
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import DeepBase from "deepbase";
|
|
13
|
+
import { config } from "./config";
|
|
14
|
+
import type { ScheduledTask, AttachmentRecord, MessageRecord } from "./types";
|
|
15
|
+
import { DeepbaseSecure } from "./deepbase-secure";
|
|
16
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync } from "fs";
|
|
17
|
+
import { randomBytes } from "crypto";
|
|
18
|
+
import { dirname } from "path";
|
|
19
|
+
|
|
20
|
+
const DB_PATH = `${config.arisaDir}/db`;
|
|
21
|
+
const ARISA_DB_FILE = `${DB_PATH}/arisa.json`;
|
|
22
|
+
const LEGACY_DB_FILE = `${DB_PATH}/tinyclaw.json`;
|
|
23
|
+
|
|
24
|
+
function readDbJson(path: string): Record<string, any> {
|
|
25
|
+
try {
|
|
26
|
+
if (!existsSync(path)) return {};
|
|
27
|
+
const raw = readFileSync(path, "utf8").trim();
|
|
28
|
+
if (!raw) return {};
|
|
29
|
+
const parsed = JSON.parse(raw);
|
|
30
|
+
return parsed && typeof parsed === "object" ? parsed : {};
|
|
31
|
+
} catch {
|
|
32
|
+
return {};
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function mergeLegacyIntoArisa(): void {
|
|
37
|
+
if (!existsSync(LEGACY_DB_FILE)) return;
|
|
38
|
+
|
|
39
|
+
// If arisa DB doesn't exist yet, seed it from legacy and keep legacy file as backup/history.
|
|
40
|
+
if (!existsSync(ARISA_DB_FILE)) {
|
|
41
|
+
try {
|
|
42
|
+
mkdirSync(DB_PATH, { recursive: true });
|
|
43
|
+
copyFileSync(LEGACY_DB_FILE, ARISA_DB_FILE);
|
|
44
|
+
} catch {
|
|
45
|
+
// Best-effort migration; if copy fails, app can still run on a fresh arisa DB.
|
|
46
|
+
}
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const arisa = readDbJson(ARISA_DB_FILE);
|
|
51
|
+
const legacy = readDbJson(LEGACY_DB_FILE);
|
|
52
|
+
let changed = false;
|
|
53
|
+
|
|
54
|
+
for (const [collection, legacyCollection] of Object.entries(legacy)) {
|
|
55
|
+
if (!legacyCollection || typeof legacyCollection !== "object") continue;
|
|
56
|
+
|
|
57
|
+
const arisaCollection = arisa[collection];
|
|
58
|
+
if (!arisaCollection || typeof arisaCollection !== "object") {
|
|
59
|
+
arisa[collection] = legacyCollection;
|
|
60
|
+
changed = true;
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
for (const [key, value] of Object.entries(legacyCollection as Record<string, any>)) {
|
|
65
|
+
if (!(key in arisaCollection)) {
|
|
66
|
+
arisaCollection[key] = value;
|
|
67
|
+
changed = true;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (changed) {
|
|
73
|
+
try {
|
|
74
|
+
writeFileSync(ARISA_DB_FILE, JSON.stringify(arisa, null, 4));
|
|
75
|
+
} catch {
|
|
76
|
+
// Ignore write failures; runtime will continue with current DB state.
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Ensure legacy data is available in arisa DB before DeepBase picks a file.
|
|
82
|
+
mergeLegacyIntoArisa();
|
|
83
|
+
|
|
84
|
+
// Initialize deepbase with the storage directory
|
|
85
|
+
const db = new DeepBase({
|
|
86
|
+
path: DB_PATH,
|
|
87
|
+
name: "arisa",
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
// Initialize encrypted secrets database
|
|
91
|
+
function getOrCreateEncryptionKey(): string {
|
|
92
|
+
const keyPath = `${config.arisaDir}/.encryption_key`;
|
|
93
|
+
const keyDir = dirname(keyPath);
|
|
94
|
+
|
|
95
|
+
if (!existsSync(keyDir)) {
|
|
96
|
+
mkdirSync(keyDir, { recursive: true });
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (existsSync(keyPath)) {
|
|
100
|
+
return readFileSync(keyPath, 'utf-8').trim();
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const key = randomBytes(32).toString('hex');
|
|
104
|
+
writeFileSync(keyPath, key, { mode: 0o600 });
|
|
105
|
+
return key;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
const secretsDb = new DeepbaseSecure({
|
|
109
|
+
path: DB_PATH,
|
|
110
|
+
name: "secrets",
|
|
111
|
+
encryptionKey: getOrCreateEncryptionKey(),
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Helper functions for common operations
|
|
116
|
+
*/
|
|
117
|
+
|
|
118
|
+
// Tasks
|
|
119
|
+
export async function getTasks(): Promise<ScheduledTask[]> {
|
|
120
|
+
const tasks = await db.values("tasks");
|
|
121
|
+
return tasks || [];
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
export async function getTask(id: string): Promise<ScheduledTask | null> {
|
|
125
|
+
return await db.get("tasks", id);
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
export async function addTask(task: ScheduledTask): Promise<void> {
|
|
129
|
+
await db.set("tasks", task.id, task);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
export async function updateTask(id: string, updates: Partial<ScheduledTask>): Promise<void> {
|
|
133
|
+
const existing = await db.get("tasks", id);
|
|
134
|
+
if (existing) {
|
|
135
|
+
await db.set("tasks", id, { ...existing, ...updates });
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
export async function deleteTask(id: string): Promise<void> {
|
|
140
|
+
await db.del("tasks", id);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
export async function deleteTasks(filter: Partial<ScheduledTask>): Promise<number> {
|
|
144
|
+
const tasks = await getTasks();
|
|
145
|
+
let deleted = 0;
|
|
146
|
+
|
|
147
|
+
for (const task of tasks) {
|
|
148
|
+
const matches = Object.entries(filter).every(([key, value]) => {
|
|
149
|
+
return task[key as keyof ScheduledTask] === value;
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
if (matches) {
|
|
153
|
+
await db.del("tasks", task.id);
|
|
154
|
+
deleted++;
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
return deleted;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Authorized users
|
|
162
|
+
export async function isAuthorized(userId: string): Promise<boolean> {
|
|
163
|
+
const user = await db.get("authorized", userId);
|
|
164
|
+
return user !== null && user !== undefined;
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
export async function addAuthorized(userId: string): Promise<void> {
|
|
168
|
+
await db.set("authorized", userId, { userId });
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
export async function getAuthorizedUsers(): Promise<string[]> {
|
|
172
|
+
const users = await db.keys("authorized");
|
|
173
|
+
return users || [];
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Onboarded users
|
|
177
|
+
export async function isOnboarded(userId: string): Promise<boolean> {
|
|
178
|
+
const user = await db.get("onboarded", userId);
|
|
179
|
+
return user !== null && user !== undefined;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
export async function addOnboarded(userId: string): Promise<void> {
|
|
183
|
+
await db.set("onboarded", userId, { userId });
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
export async function getOnboardedUsers(): Promise<string[]> {
|
|
187
|
+
const users = await db.keys("onboarded");
|
|
188
|
+
return users || [];
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Queue operations
|
|
192
|
+
export async function enqueueMessage(message: {
|
|
193
|
+
id: string;
|
|
194
|
+
chatId: string | number;
|
|
195
|
+
text: string;
|
|
196
|
+
type: "heartbeat" | "message";
|
|
197
|
+
}): Promise<void> {
|
|
198
|
+
await db.set("queue", message.id, {
|
|
199
|
+
...message,
|
|
200
|
+
timestamp: Date.now(),
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
export async function dequeueMessages(limit?: number): Promise<any[]> {
|
|
205
|
+
const messages = await db.values("queue");
|
|
206
|
+
if (!messages || messages.length === 0) return [];
|
|
207
|
+
|
|
208
|
+
const sorted = messages.sort((a: any, b: any) => a.timestamp - b.timestamp);
|
|
209
|
+
const batch = limit ? sorted.slice(0, limit) : sorted;
|
|
210
|
+
|
|
211
|
+
// Delete the dequeued messages
|
|
212
|
+
for (const msg of batch) {
|
|
213
|
+
await db.del("queue", msg.id);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return batch;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export async function getQueueSize(): Promise<number> {
|
|
220
|
+
const messages = await db.keys("queue");
|
|
221
|
+
return messages ? messages.length : 0;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
export async function clearQueue(): Promise<void> {
|
|
225
|
+
const keys = await db.keys("queue");
|
|
226
|
+
if (keys) {
|
|
227
|
+
for (const key of keys) {
|
|
228
|
+
await db.del("queue", key);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Settings (auth token, etc.)
|
|
234
|
+
export async function getSetting(key: string): Promise<string | null> {
|
|
235
|
+
const val = await db.get("settings", key);
|
|
236
|
+
return val?.value ?? null;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
export async function setSetting(key: string, value: string): Promise<void> {
|
|
240
|
+
await db.set("settings", key, { value });
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// Attachments
|
|
244
|
+
export async function addAttachment(record: AttachmentRecord): Promise<void> {
|
|
245
|
+
await db.set("attachments", record.id, record);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
export async function getAttachments(chatId?: string): Promise<AttachmentRecord[]> {
|
|
249
|
+
const all = (await db.values("attachments")) || [];
|
|
250
|
+
if (!chatId) return all;
|
|
251
|
+
return all.filter((a: AttachmentRecord) => a.chatId === chatId);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
export async function getAttachment(id: string): Promise<AttachmentRecord | null> {
|
|
255
|
+
return await db.get("attachments", id);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
export async function deleteAttachment(id: string): Promise<void> {
|
|
259
|
+
await db.del("attachments", id);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
export async function getExpiredAttachments(maxAgeDays: number): Promise<AttachmentRecord[]> {
|
|
263
|
+
const all = (await db.values("attachments")) || [];
|
|
264
|
+
const cutoff = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000;
|
|
265
|
+
return all.filter((a: AttachmentRecord) => a.createdAt < cutoff);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Messages (ledger)
|
|
269
|
+
export async function saveMessageRecord(record: MessageRecord): Promise<void> {
|
|
270
|
+
await db.set("messages", record.id, record);
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
export async function getMessageRecord(chatId: string, messageId: number): Promise<MessageRecord | null> {
|
|
274
|
+
return await db.get("messages", `${chatId}_${messageId}`);
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
export async function cleanupOldMessages(maxAgeDays: number): Promise<number> {
|
|
278
|
+
const all = (await db.values("messages")) || [];
|
|
279
|
+
const cutoff = Date.now() - maxAgeDays * 24 * 60 * 60 * 1000;
|
|
280
|
+
let deleted = 0;
|
|
281
|
+
for (const record of all) {
|
|
282
|
+
if ((record as MessageRecord).timestamp < cutoff) {
|
|
283
|
+
await db.del("messages", (record as MessageRecord).id);
|
|
284
|
+
deleted++;
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
return deleted;
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
// Secrets (API keys stored encrypted)
|
|
291
|
+
export async function getSecret(key: string): Promise<string | null> {
|
|
292
|
+
const val = await secretsDb.get("secrets", key);
|
|
293
|
+
return val?.value ?? null;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
export async function setSecret(key: string, value: string): Promise<void> {
|
|
297
|
+
await secretsDb.set("secrets", key, { value });
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
export async function deleteSecret(key: string): Promise<void> {
|
|
301
|
+
await secretsDb.del("secrets", key);
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
export { db, secretsDb };
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import CryptoJS from 'crypto-js';
|
|
2
|
+
import DeepBase from 'deepbase';
|
|
3
|
+
import { JsonDriver } from 'deepbase-json';
|
|
4
|
+
|
|
5
|
+
interface DeepbaseSecureOptions {
|
|
6
|
+
encryptionKey: string;
|
|
7
|
+
path: string;
|
|
8
|
+
name: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export class DeepbaseSecure extends DeepBase {
|
|
12
|
+
constructor(opts: DeepbaseSecureOptions) {
|
|
13
|
+
const encryptionKey = opts.encryptionKey;
|
|
14
|
+
const { path, name } = opts;
|
|
15
|
+
|
|
16
|
+
// Create JSON driver with encryption
|
|
17
|
+
const driver = new JsonDriver({
|
|
18
|
+
path,
|
|
19
|
+
name,
|
|
20
|
+
stringify: (obj: any) => {
|
|
21
|
+
const iv = CryptoJS.lib.WordArray.random(128 / 8);
|
|
22
|
+
const encrypted = CryptoJS.AES.encrypt(
|
|
23
|
+
JSON.stringify(obj),
|
|
24
|
+
encryptionKey,
|
|
25
|
+
{ iv }
|
|
26
|
+
);
|
|
27
|
+
return iv.toString(CryptoJS.enc.Hex) + ':' + encrypted.toString();
|
|
28
|
+
},
|
|
29
|
+
parse: (encryptedData: string) => {
|
|
30
|
+
const [ivHex, encrypted] = encryptedData.split(':');
|
|
31
|
+
const iv = CryptoJS.enc.Hex.parse(ivHex);
|
|
32
|
+
const bytes = CryptoJS.AES.decrypt(encrypted, encryptionKey, { iv });
|
|
33
|
+
return JSON.parse(bytes.toString(CryptoJS.enc.Utf8));
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
super(driver);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module shared/logger
|
|
3
|
+
* @role Structured logging to runtime logs dir and stdout.
|
|
4
|
+
* @responsibilities
|
|
5
|
+
* - Create named loggers per component (core, daemon, telegram, scheduler)
|
|
6
|
+
* - Write timestamped log lines to file + console
|
|
7
|
+
* - Ensure log directory exists
|
|
8
|
+
* @dependencies shared/config
|
|
9
|
+
* @effects Writes to disk (logs dir), writes to stdout
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
import { appendFileSync, mkdirSync, existsSync } from "fs";
|
|
13
|
+
import { join } from "path";
|
|
14
|
+
import { config } from "./config";
|
|
15
|
+
|
|
16
|
+
type Level = "DEBUG" | "INFO" | "WARN" | "ERROR";
|
|
17
|
+
|
|
18
|
+
export function createLogger(component: string) {
|
|
19
|
+
const logFile = join(config.logsDir, `${component}.log`);
|
|
20
|
+
|
|
21
|
+
if (!existsSync(config.logsDir)) {
|
|
22
|
+
mkdirSync(config.logsDir, { recursive: true });
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function write(level: Level, message: string) {
|
|
26
|
+
const timestamp = new Date().toISOString();
|
|
27
|
+
const line = `[${timestamp}] [${level}] ${message}\n`;
|
|
28
|
+
console.log(line.trim());
|
|
29
|
+
try {
|
|
30
|
+
appendFileSync(logFile, line);
|
|
31
|
+
} catch {
|
|
32
|
+
// If we can't write to log file, at least console output happened
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
return {
|
|
37
|
+
debug: (msg: string) => write("DEBUG", msg),
|
|
38
|
+
info: (msg: string) => write("INFO", msg),
|
|
39
|
+
warn: (msg: string) => write("WARN", msg),
|
|
40
|
+
error: (msg: string) => write("ERROR", msg),
|
|
41
|
+
};
|
|
42
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module shared/paths
|
|
3
|
+
* @role Resolve project and runtime data directories with migration-safe defaults.
|
|
4
|
+
* @responsibilities
|
|
5
|
+
* - Resolve project directory (supports ARISA_PROJECT_DIR override)
|
|
6
|
+
* - Resolve runtime data directory (prefers ~/.arisa by default)
|
|
7
|
+
* - Migrate legacy project-local dirs (.tinyclaw/.arisa) into ~/.arisa
|
|
8
|
+
* - Support ARISA_DATA_DIR override for advanced deployments
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { cpSync, existsSync, mkdirSync, readdirSync, renameSync, rmSync } from "fs";
|
|
12
|
+
import { homedir } from "os";
|
|
13
|
+
import { isAbsolute, join, resolve } from "path";
|
|
14
|
+
|
|
15
|
+
const DEFAULT_PROJECT_DIR = join(import.meta.dir, "..", "..");
|
|
16
|
+
const PROJECT_DIR = process.env.ARISA_PROJECT_DIR
|
|
17
|
+
? resolve(process.env.ARISA_PROJECT_DIR)
|
|
18
|
+
: DEFAULT_PROJECT_DIR;
|
|
19
|
+
|
|
20
|
+
const HOME_DATA_DIR = join(homedir(), ".arisa");
|
|
21
|
+
const PROJECT_ARISA_DIR = join(PROJECT_DIR, ".arisa");
|
|
22
|
+
const LEGACY_DATA_DIR = join(PROJECT_DIR, ".tinyclaw");
|
|
23
|
+
|
|
24
|
+
function samePath(a: string, b: string): boolean {
|
|
25
|
+
return resolve(a) === resolve(b);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function hasFiles(dir: string): boolean {
|
|
29
|
+
try {
|
|
30
|
+
return existsSync(dir) && readdirSync(dir).length > 0;
|
|
31
|
+
} catch {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function moveOrMerge(sourceDir: string, targetDir: string): void {
|
|
37
|
+
if (!existsSync(sourceDir) || samePath(sourceDir, targetDir)) return;
|
|
38
|
+
|
|
39
|
+
try {
|
|
40
|
+
if (!existsSync(targetDir)) {
|
|
41
|
+
mkdirSync(resolve(targetDir, ".."), { recursive: true });
|
|
42
|
+
renameSync(sourceDir, targetDir);
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Merge source into target and drop source afterward.
|
|
47
|
+
cpSync(sourceDir, targetDir, { recursive: true, force: false, errorOnExist: false });
|
|
48
|
+
rmSync(sourceDir, { recursive: true, force: true });
|
|
49
|
+
} catch {
|
|
50
|
+
// Do not crash startup if migration has a filesystem issue.
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function resolveOverrideDataDir(): string | null {
|
|
55
|
+
const override = process.env.ARISA_DATA_DIR?.trim();
|
|
56
|
+
if (!override) return null;
|
|
57
|
+
return isAbsolute(override) ? override : resolve(PROJECT_DIR, override);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
function resolveDataDir(): string {
|
|
61
|
+
const overrideDir = resolveOverrideDataDir();
|
|
62
|
+
if (overrideDir) {
|
|
63
|
+
return overrideDir;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// One-time migration from local runtime dirs to ~/.arisa.
|
|
67
|
+
if (!hasFiles(HOME_DATA_DIR)) {
|
|
68
|
+
if (hasFiles(PROJECT_ARISA_DIR)) {
|
|
69
|
+
moveOrMerge(PROJECT_ARISA_DIR, HOME_DATA_DIR);
|
|
70
|
+
} else if (hasFiles(LEGACY_DATA_DIR)) {
|
|
71
|
+
moveOrMerge(LEGACY_DATA_DIR, HOME_DATA_DIR);
|
|
72
|
+
}
|
|
73
|
+
} else {
|
|
74
|
+
// If ~/.arisa already exists, still merge any local leftovers into it.
|
|
75
|
+
if (hasFiles(PROJECT_ARISA_DIR)) moveOrMerge(PROJECT_ARISA_DIR, HOME_DATA_DIR);
|
|
76
|
+
if (hasFiles(LEGACY_DATA_DIR)) moveOrMerge(LEGACY_DATA_DIR, HOME_DATA_DIR);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (!existsSync(HOME_DATA_DIR)) {
|
|
80
|
+
mkdirSync(HOME_DATA_DIR, { recursive: true });
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return HOME_DATA_DIR;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
export const projectDir = PROJECT_DIR;
|
|
87
|
+
export const preferredDataDir = HOME_DATA_DIR;
|
|
88
|
+
export const projectLocalDataDir = PROJECT_ARISA_DIR;
|
|
89
|
+
export const legacyDataDir = LEGACY_DATA_DIR;
|
|
90
|
+
export const dataDir = resolveDataDir();
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module shared/ports
|
|
3
|
+
* @role Process cleanup via PID files + /proc scan, retry-aware Bun.serve.
|
|
4
|
+
* @effects Reads/writes runtime pid files, kills processes via SIGKILL
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { existsSync, readFileSync, readdirSync, writeFileSync, unlinkSync, mkdirSync } from "fs";
|
|
8
|
+
import { join, dirname } from "path";
|
|
9
|
+
import { dataDir } from "./paths";
|
|
10
|
+
|
|
11
|
+
const ARISA_DIR = dataDir;
|
|
12
|
+
|
|
13
|
+
function pidPath(name: string): string {
|
|
14
|
+
return join(ARISA_DIR, `${name}.pid`);
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// Patterns to match in /proc cmdline for each process type
|
|
18
|
+
const CMDLINE_PATTERNS: Record<string, string> = {
|
|
19
|
+
daemon: "daemon/index.ts",
|
|
20
|
+
core: "core/index.ts",
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Kill previous instances of a named process, then write our PID.
|
|
25
|
+
* Uses PID file + /proc scan for robustness in containers.
|
|
26
|
+
*/
|
|
27
|
+
export function claimProcess(name: string): void {
|
|
28
|
+
const myPid = process.pid;
|
|
29
|
+
|
|
30
|
+
// 1. Kill from PID file
|
|
31
|
+
const path = pidPath(name);
|
|
32
|
+
if (existsSync(path)) {
|
|
33
|
+
try {
|
|
34
|
+
const oldPid = parseInt(readFileSync(path, "utf8").trim(), 10);
|
|
35
|
+
if (oldPid && oldPid !== myPid) {
|
|
36
|
+
try { process.kill(oldPid, "SIGKILL"); } catch {}
|
|
37
|
+
}
|
|
38
|
+
} catch {}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// 2. Scan /proc for any matching processes (Linux containers)
|
|
42
|
+
const pattern = CMDLINE_PATTERNS[name];
|
|
43
|
+
if (pattern) {
|
|
44
|
+
killByPattern(pattern, myPid);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// 3. Write our PID
|
|
48
|
+
if (!existsSync(dirname(path))) mkdirSync(dirname(path), { recursive: true });
|
|
49
|
+
writeFileSync(path, String(myPid));
|
|
50
|
+
|
|
51
|
+
// 4. Brief pause to let OS release resources
|
|
52
|
+
Bun.sleepSync(200);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Scan /proc cmdline and kill processes matching a pattern (Linux only).
|
|
56
|
+
function killByPattern(pattern: string, excludePid: number): void {
|
|
57
|
+
try {
|
|
58
|
+
if (!existsSync("/proc")) return;
|
|
59
|
+
const dirs = readdirSync("/proc").filter((d) => /^\d+$/.test(d));
|
|
60
|
+
for (const pid of dirs) {
|
|
61
|
+
const numPid = Number(pid);
|
|
62
|
+
if (numPid === excludePid) continue;
|
|
63
|
+
try {
|
|
64
|
+
const cmdline = readFileSync(`/proc/${pid}/cmdline`, "utf8");
|
|
65
|
+
if (cmdline.includes(pattern)) {
|
|
66
|
+
process.kill(numPid, "SIGKILL");
|
|
67
|
+
}
|
|
68
|
+
} catch {}
|
|
69
|
+
}
|
|
70
|
+
} catch {}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Remove our PID file on clean shutdown.
|
|
75
|
+
*/
|
|
76
|
+
export function releaseProcess(name: string): void {
|
|
77
|
+
try { unlinkSync(pidPath(name)); } catch {}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Bun.serve() with retry — if port is busy, waits for previous process to die.
|
|
82
|
+
*/
|
|
83
|
+
export async function serveWithRetry(
|
|
84
|
+
options: Parameters<typeof Bun.serve>[0],
|
|
85
|
+
retries = 5,
|
|
86
|
+
): Promise<ReturnType<typeof Bun.serve>> {
|
|
87
|
+
for (let i = 0; i < retries; i++) {
|
|
88
|
+
try {
|
|
89
|
+
return Bun.serve(options);
|
|
90
|
+
} catch (e: any) {
|
|
91
|
+
if (e?.code !== "EADDRINUSE" || i === retries - 1) throw e;
|
|
92
|
+
const port = (options as any).port ?? "?";
|
|
93
|
+
console.log(`[ports] Port ${port} busy, retrying (${i + 1}/${retries})...`);
|
|
94
|
+
await new Promise((r) => setTimeout(r, 1000));
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
throw new Error("unreachable");
|
|
98
|
+
}
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module shared/secrets
|
|
3
|
+
* @role Encrypted secrets storage using DeepbaseSecure
|
|
4
|
+
* @responsibilities
|
|
5
|
+
* - Generate/load encryption key
|
|
6
|
+
* - Store API keys encrypted at rest
|
|
7
|
+
* - Provide type-safe getters for secrets
|
|
8
|
+
* @dependencies DeepbaseSecure, crypto-js
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { join } from "path";
|
|
12
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, renameSync } from "fs";
|
|
13
|
+
import CryptoJS from "crypto-js";
|
|
14
|
+
import { DeepbaseSecure } from "./deepbase-secure";
|
|
15
|
+
import { dataDir } from "./paths";
|
|
16
|
+
|
|
17
|
+
const ARISA_DIR = dataDir;
|
|
18
|
+
const ENCRYPTION_KEY_PATH = join(ARISA_DIR, ".encryption_key");
|
|
19
|
+
const SECRETS_DB_PATH = join(ARISA_DIR, "db");
|
|
20
|
+
|
|
21
|
+
// Ensure runtime data and db dirs exist
|
|
22
|
+
mkdirSync(join(ARISA_DIR, "db"), { recursive: true });
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Load or generate encryption key
|
|
26
|
+
*/
|
|
27
|
+
function getEncryptionKey(): string {
|
|
28
|
+
if (existsSync(ENCRYPTION_KEY_PATH)) {
|
|
29
|
+
return readFileSync(ENCRYPTION_KEY_PATH, "utf8").trim();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Generate random 256-bit key
|
|
33
|
+
const key = CryptoJS.lib.WordArray.random(256 / 8).toString(CryptoJS.enc.Hex);
|
|
34
|
+
writeFileSync(ENCRYPTION_KEY_PATH, key, { mode: 0o600 });
|
|
35
|
+
return key;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const encryptionKey = getEncryptionKey();
|
|
39
|
+
let secretsDb = createSecretsDb();
|
|
40
|
+
|
|
41
|
+
function createSecretsDb(): DeepbaseSecure {
|
|
42
|
+
return new DeepbaseSecure({
|
|
43
|
+
path: SECRETS_DB_PATH,
|
|
44
|
+
name: "secrets",
|
|
45
|
+
encryptionKey,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Initialize connection
|
|
50
|
+
let connectionPromise: Promise<void> | null = null;
|
|
51
|
+
let recoveredOnce = false;
|
|
52
|
+
|
|
53
|
+
function looksCorrupted(err: unknown): boolean {
|
|
54
|
+
const msg = err instanceof Error ? `${err.message}\n${err.stack || ""}` : String(err);
|
|
55
|
+
return /Malformed UTF-8 data|Unexpected token|JSON|decrypt|invalid/i.test(msg);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function backupCorruptSecretsDb(): void {
|
|
59
|
+
try {
|
|
60
|
+
const timestamp = Date.now();
|
|
61
|
+
for (const file of readdirSync(SECRETS_DB_PATH)) {
|
|
62
|
+
if (!file.startsWith("secrets")) continue;
|
|
63
|
+
const src = join(SECRETS_DB_PATH, file);
|
|
64
|
+
const dst = join(SECRETS_DB_PATH, `${file}.corrupt.${timestamp}`);
|
|
65
|
+
try {
|
|
66
|
+
renameSync(src, dst);
|
|
67
|
+
} catch {
|
|
68
|
+
// Best-effort backup; ignore per-file failures.
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
} catch {
|
|
72
|
+
// Ignore backup errors; recovery will still retry with a fresh DB handle.
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
async function ensureConnected(): Promise<void> {
|
|
77
|
+
if (!connectionPromise) {
|
|
78
|
+
connectionPromise = (async () => {
|
|
79
|
+
try {
|
|
80
|
+
await secretsDb.connect();
|
|
81
|
+
} catch (err) {
|
|
82
|
+
if (!recoveredOnce && looksCorrupted(err)) {
|
|
83
|
+
recoveredOnce = true;
|
|
84
|
+
console.warn("[secrets] Encrypted secrets DB looks corrupted; backing up and recreating it.");
|
|
85
|
+
backupCorruptSecretsDb();
|
|
86
|
+
secretsDb = createSecretsDb();
|
|
87
|
+
await secretsDb.connect();
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
throw err;
|
|
91
|
+
}
|
|
92
|
+
})().catch((err) => {
|
|
93
|
+
connectionPromise = null;
|
|
94
|
+
throw err;
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
await connectionPromise;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Get a secret by key
|
|
102
|
+
*/
|
|
103
|
+
export async function getSecret(key: string): Promise<string | undefined> {
|
|
104
|
+
try {
|
|
105
|
+
await ensureConnected();
|
|
106
|
+
return await secretsDb.get("secrets", key);
|
|
107
|
+
} catch (err) {
|
|
108
|
+
console.warn(`[secrets] Could not read ${key} from encrypted DB: ${err}`);
|
|
109
|
+
return undefined;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Set a secret by key
|
|
115
|
+
*/
|
|
116
|
+
export async function setSecret(key: string, value: string): Promise<void> {
|
|
117
|
+
await ensureConnected();
|
|
118
|
+
await secretsDb.set("secrets", key, value);
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
/**
|
|
122
|
+
* Delete a secret by key
|
|
123
|
+
*/
|
|
124
|
+
export async function deleteSecret(key: string): Promise<void> {
|
|
125
|
+
await ensureConnected();
|
|
126
|
+
await secretsDb.del("secrets", key);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Type-safe getters for known secrets
|
|
131
|
+
*/
|
|
132
|
+
export const secrets = {
|
|
133
|
+
telegram: () => getSecret("TELEGRAM_BOT_TOKEN"),
|
|
134
|
+
openai: () => getSecret("OPENAI_API_KEY"),
|
|
135
|
+
elevenlabs: () => getSecret("ELEVENLABS_API_KEY"),
|
|
136
|
+
};
|