@statechange/council 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +207 -0
- package/council/creative/ABOUT.md +23 -0
- package/council/critic/ABOUT.md +23 -0
- package/council/strategist/ABOUT.md +21 -0
- package/dist/backends/anthropic.d.ts +2 -0
- package/dist/backends/anthropic.js +57 -0
- package/dist/backends/anthropic.js.map +1 -0
- package/dist/backends/google.d.ts +2 -0
- package/dist/backends/google.js +68 -0
- package/dist/backends/google.js.map +1 -0
- package/dist/backends/index.d.ts +3 -0
- package/dist/backends/index.js +47 -0
- package/dist/backends/index.js.map +1 -0
- package/dist/backends/ollama.d.ts +2 -0
- package/dist/backends/ollama.js +68 -0
- package/dist/backends/ollama.js.map +1 -0
- package/dist/backends/openai.d.ts +2 -0
- package/dist/backends/openai.js +62 -0
- package/dist/backends/openai.js.map +1 -0
- package/dist/backends/types.d.ts +1 -0
- package/dist/backends/types.js +2 -0
- package/dist/backends/types.js.map +1 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +152 -0
- package/dist/cli.js.map +1 -0
- package/dist/commands/config.d.ts +7 -0
- package/dist/commands/config.js +118 -0
- package/dist/commands/config.js.map +1 -0
- package/dist/commands/counsellor.d.ts +7 -0
- package/dist/commands/counsellor.js +98 -0
- package/dist/commands/counsellor.js.map +1 -0
- package/dist/commands/discuss.d.ts +12 -0
- package/dist/commands/discuss.js +154 -0
- package/dist/commands/discuss.js.map +1 -0
- package/dist/commands/history.d.ts +5 -0
- package/dist/commands/history.js +46 -0
- package/dist/commands/history.js.map +1 -0
- package/dist/commands/list.d.ts +5 -0
- package/dist/commands/list.js +40 -0
- package/dist/commands/list.js.map +1 -0
- package/dist/core/conversation-engine.d.ts +13 -0
- package/dist/core/conversation-engine.js +226 -0
- package/dist/core/conversation-engine.js.map +1 -0
- package/dist/core/counsellor-loader.d.ts +4 -0
- package/dist/core/counsellor-loader.js +97 -0
- package/dist/core/counsellor-loader.js.map +1 -0
- package/dist/core/counsellor-registry.d.ts +12 -0
- package/dist/core/counsellor-registry.js +131 -0
- package/dist/core/counsellor-registry.js.map +1 -0
- package/dist/core/excalidraw-cheatsheet.d.ts +5 -0
- package/dist/core/excalidraw-cheatsheet.js +65 -0
- package/dist/core/excalidraw-cheatsheet.js.map +1 -0
- package/dist/core/history.d.ts +16 -0
- package/dist/core/history.js +74 -0
- package/dist/core/history.js.map +1 -0
- package/dist/core/infographic.d.ts +4 -0
- package/dist/core/infographic.js +81 -0
- package/dist/core/infographic.js.map +1 -0
- package/dist/core/key-scanner.d.ts +8 -0
- package/dist/core/key-scanner.js +79 -0
- package/dist/core/key-scanner.js.map +1 -0
- package/dist/core/logger.d.ts +5 -0
- package/dist/core/logger.js +38 -0
- package/dist/core/logger.js.map +1 -0
- package/dist/core/output-formatter.d.ts +2 -0
- package/dist/core/output-formatter.js +47 -0
- package/dist/core/output-formatter.js.map +1 -0
- package/dist/core/secretary.d.ts +23 -0
- package/dist/core/secretary.js +171 -0
- package/dist/core/secretary.js.map +1 -0
- package/dist/core/skill-loader.d.ts +2 -0
- package/dist/core/skill-loader.js +32 -0
- package/dist/core/skill-loader.js.map +1 -0
- package/dist/electron/ipc-handlers.d.ts +3 -0
- package/dist/electron/ipc-handlers.js +477 -0
- package/dist/electron/ipc-handlers.js.map +1 -0
- package/dist/electron/main.d.ts +1 -0
- package/dist/electron/main.js +85 -0
- package/dist/electron/main.js.map +1 -0
- package/dist/electron/preload.d.ts +1 -0
- package/dist/electron/preload.js +38 -0
- package/dist/electron/preload.js.map +1 -0
- package/dist/types.d.ts +184 -0
- package/dist/types.js +12 -0
- package/dist/types.js.map +1 -0
- package/dist-electron/main.js +1635 -0
- package/package.json +87 -0
- package/skills/council-manage/SKILL.md +214 -0
- package/skills/council-setup-keys/SKILL.md +127 -0
|
@@ -0,0 +1,1635 @@
|
|
|
1
|
+
import { shell, dialog, protocol, app, net, BrowserWindow, ipcMain } from "electron";
|
|
2
|
+
import { join, basename, resolve, dirname } from "node:path";
|
|
3
|
+
import { fileURLToPath, pathToFileURL } from "node:url";
|
|
4
|
+
import { existsSync, writeFileSync, appendFileSync } from "node:fs";
|
|
5
|
+
import { readFile, readdir, stat, mkdir, rm, writeFile, appendFile } from "node:fs/promises";
|
|
6
|
+
import { execFile } from "node:child_process";
|
|
7
|
+
import { promisify } from "node:util";
|
|
8
|
+
import { homedir } from "node:os";
|
|
9
|
+
import matter from "gray-matter";
|
|
10
|
+
import { z } from "zod";
|
|
11
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
12
|
+
import OpenAI from "openai";
|
|
13
|
+
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
14
|
+
import { Ollama } from "ollama";
|
|
15
|
+
import { createRequire } from "node:module";
|
|
16
|
+
import "dotenv/config";
|
|
17
|
+
const CounsellorFrontmatterSchema = z.object({
|
|
18
|
+
name: z.string(),
|
|
19
|
+
description: z.string(),
|
|
20
|
+
interests: z.array(z.string()).default([]),
|
|
21
|
+
backend: z.enum(["anthropic", "openai", "google", "ollama"]),
|
|
22
|
+
model: z.string().optional(),
|
|
23
|
+
skills: z.array(z.string()).default([]),
|
|
24
|
+
temperature: z.number().min(0).max(2).optional(),
|
|
25
|
+
avatar: z.string().optional()
|
|
26
|
+
});
|
|
27
|
+
const searchPaths = (counsellorDir, skillName) => [
|
|
28
|
+
join(counsellorDir, "skills", skillName, "SKILL.md"),
|
|
29
|
+
join(process.cwd(), ".claude", "skills", skillName, "SKILL.md"),
|
|
30
|
+
join(homedir(), ".agents", "skills", skillName, "SKILL.md"),
|
|
31
|
+
join(homedir(), ".claude", "skills", skillName, "SKILL.md")
|
|
32
|
+
];
|
|
33
|
+
async function resolveSkill(skillName, counsellorDir) {
|
|
34
|
+
for (const candidate of searchPaths(counsellorDir, skillName)) {
|
|
35
|
+
if (existsSync(candidate)) {
|
|
36
|
+
const raw = await readFile(candidate, "utf-8");
|
|
37
|
+
const { content } = matter(raw);
|
|
38
|
+
return content.trim();
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
async function resolveSkills(skillNames, counsellorDir) {
|
|
44
|
+
const sections = [];
|
|
45
|
+
for (const name of skillNames) {
|
|
46
|
+
const content = await resolveSkill(name, counsellorDir);
|
|
47
|
+
if (content) {
|
|
48
|
+
sections.push(`## Skill: ${name}
|
|
49
|
+
|
|
50
|
+
${content}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return sections.join("\n\n");
|
|
54
|
+
}
|
|
55
|
+
function resolveAvatar(avatar, dirPath) {
|
|
56
|
+
if (!avatar) return void 0;
|
|
57
|
+
if (avatar.startsWith("http://") || avatar.startsWith("https://")) return avatar;
|
|
58
|
+
const absPath = avatar.startsWith("/") ? avatar : join(dirPath, avatar);
|
|
59
|
+
return `council-file://${absPath}`;
|
|
60
|
+
}
|
|
61
|
+
async function resolveReferences(content, counsellorDir) {
|
|
62
|
+
const refPattern = /\{\{(.+?)\}\}/g;
|
|
63
|
+
let resolved = content;
|
|
64
|
+
for (const match of content.matchAll(refPattern)) {
|
|
65
|
+
const refPath = match[1].trim();
|
|
66
|
+
const fullPath = join(counsellorDir, refPath);
|
|
67
|
+
if (existsSync(fullPath)) {
|
|
68
|
+
const refContent = await readFile(fullPath, "utf-8");
|
|
69
|
+
resolved = resolved.replace(match[0], refContent.trim());
|
|
70
|
+
} else {
|
|
71
|
+
resolved = resolved.replace(match[0], `[Reference not found: ${refPath}]`);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return resolved;
|
|
75
|
+
}
|
|
76
|
+
async function loadSingleCounsellor(dirPath) {
|
|
77
|
+
const absPath = resolve(dirPath);
|
|
78
|
+
const aboutPath = join(absPath, "ABOUT.md");
|
|
79
|
+
if (!existsSync(aboutPath)) {
|
|
80
|
+
throw new Error(`No ABOUT.md found in ${absPath}`);
|
|
81
|
+
}
|
|
82
|
+
const raw = await readFile(aboutPath, "utf-8");
|
|
83
|
+
const { data, content } = matter(raw);
|
|
84
|
+
const frontmatter = CounsellorFrontmatterSchema.parse(data);
|
|
85
|
+
let systemPrompt = await resolveReferences(content.trim(), absPath);
|
|
86
|
+
if (frontmatter.skills.length > 0) {
|
|
87
|
+
const skillContent = await resolveSkills(frontmatter.skills, absPath);
|
|
88
|
+
if (skillContent) {
|
|
89
|
+
systemPrompt += "\n\n" + skillContent;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return {
|
|
93
|
+
id: basename(absPath),
|
|
94
|
+
frontmatter,
|
|
95
|
+
systemPrompt,
|
|
96
|
+
dirPath: absPath,
|
|
97
|
+
avatarUrl: resolveAvatar(frontmatter.avatar, absPath)
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
async function loadCounsellors(councilDir, registeredPaths) {
|
|
101
|
+
const counsellors = [];
|
|
102
|
+
const seenIds = /* @__PURE__ */ new Set();
|
|
103
|
+
if (registeredPaths?.length) {
|
|
104
|
+
for (const rPath of registeredPaths) {
|
|
105
|
+
if (!existsSync(join(rPath, "ABOUT.md"))) continue;
|
|
106
|
+
try {
|
|
107
|
+
const c = await loadSingleCounsellor(rPath);
|
|
108
|
+
if (!seenIds.has(c.id)) {
|
|
109
|
+
counsellors.push(c);
|
|
110
|
+
seenIds.add(c.id);
|
|
111
|
+
}
|
|
112
|
+
} catch {
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
if (existsSync(councilDir)) {
|
|
117
|
+
const entries = await readdir(councilDir);
|
|
118
|
+
for (const entry of entries) {
|
|
119
|
+
const entryPath = join(councilDir, entry);
|
|
120
|
+
const info = await stat(entryPath);
|
|
121
|
+
if (info.isDirectory() && existsSync(join(entryPath, "ABOUT.md"))) {
|
|
122
|
+
if (!seenIds.has(basename(entryPath))) {
|
|
123
|
+
counsellors.push(await loadSingleCounsellor(entryPath));
|
|
124
|
+
seenIds.add(basename(entryPath));
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (counsellors.length === 0) {
|
|
130
|
+
throw new Error(`No counsellors found in ${councilDir}. Each counsellor needs a directory with an ABOUT.md file.`);
|
|
131
|
+
}
|
|
132
|
+
return counsellors;
|
|
133
|
+
}
|
|
134
|
+
const execFileAsync = promisify(execFile);
|
|
135
|
+
const CONFIG_PATH = join(homedir(), ".ai-council", "config.json");
|
|
136
|
+
const CLONES_DIR = join(homedir(), ".ai-council", "counsellors");
|
|
137
|
+
async function loadConfig$1() {
|
|
138
|
+
try {
|
|
139
|
+
return JSON.parse(await readFile(CONFIG_PATH, "utf-8"));
|
|
140
|
+
} catch {
|
|
141
|
+
return { backends: {} };
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
async function saveConfig(config) {
|
|
145
|
+
const dir = join(homedir(), ".ai-council");
|
|
146
|
+
await mkdir(dir, { recursive: true });
|
|
147
|
+
await writeFile(CONFIG_PATH, JSON.stringify(config, null, 2), "utf-8");
|
|
148
|
+
}
|
|
149
|
+
function getRegisteredPaths(config) {
|
|
150
|
+
return Object.values(config.counsellors ?? {}).map((e) => e.path);
|
|
151
|
+
}
|
|
152
|
+
async function addLocalCounsellor(dirPath) {
|
|
153
|
+
const absPath = resolve(dirPath);
|
|
154
|
+
const aboutPath = join(absPath, "ABOUT.md");
|
|
155
|
+
if (!existsSync(aboutPath)) {
|
|
156
|
+
throw new Error(`No ABOUT.md found in ${absPath}`);
|
|
157
|
+
}
|
|
158
|
+
const id = basename(absPath);
|
|
159
|
+
const config = await loadConfig$1();
|
|
160
|
+
const registry = config.counsellors ?? {};
|
|
161
|
+
if (registry[id]) {
|
|
162
|
+
throw new Error(`Counsellor "${id}" is already registered (path: ${registry[id].path})`);
|
|
163
|
+
}
|
|
164
|
+
const raw = await readFile(aboutPath, "utf-8");
|
|
165
|
+
const nameMatch = raw.match(/^name:\s*["']?(.+?)["']?\s*$/m);
|
|
166
|
+
const displayName = nameMatch?.[1] ?? id;
|
|
167
|
+
registry[id] = {
|
|
168
|
+
path: absPath,
|
|
169
|
+
source: "local",
|
|
170
|
+
addedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
171
|
+
};
|
|
172
|
+
config.counsellors = registry;
|
|
173
|
+
await saveConfig(config);
|
|
174
|
+
return { id, name: displayName };
|
|
175
|
+
}
|
|
176
|
+
async function addRemoteCounsellor(url) {
|
|
177
|
+
await mkdir(CLONES_DIR, { recursive: true });
|
|
178
|
+
const repoName = basename(url, ".git").replace(/\.git$/, "");
|
|
179
|
+
const clonePath = join(CLONES_DIR, repoName);
|
|
180
|
+
if (existsSync(clonePath)) {
|
|
181
|
+
throw new Error(`Directory already exists: ${clonePath}. Remove it first or use a different URL.`);
|
|
182
|
+
}
|
|
183
|
+
await execFileAsync("git", ["clone", "--depth", "1", url, clonePath]);
|
|
184
|
+
const results = [];
|
|
185
|
+
const config = await loadConfig$1();
|
|
186
|
+
const registry = config.counsellors ?? {};
|
|
187
|
+
if (existsSync(join(clonePath, "ABOUT.md"))) {
|
|
188
|
+
const id = repoName;
|
|
189
|
+
if (registry[id]) {
|
|
190
|
+
throw new Error(`Counsellor "${id}" is already registered`);
|
|
191
|
+
}
|
|
192
|
+
const raw = await readFile(join(clonePath, "ABOUT.md"), "utf-8");
|
|
193
|
+
const nameMatch = raw.match(/^name:\s*["']?(.+?)["']?\s*$/m);
|
|
194
|
+
const displayName = nameMatch?.[1] ?? id;
|
|
195
|
+
registry[id] = {
|
|
196
|
+
path: clonePath,
|
|
197
|
+
source: "git",
|
|
198
|
+
url,
|
|
199
|
+
addedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
200
|
+
};
|
|
201
|
+
results.push({ id, name: displayName });
|
|
202
|
+
} else {
|
|
203
|
+
const entries = await readdir(clonePath);
|
|
204
|
+
for (const entry of entries) {
|
|
205
|
+
if (entry.startsWith(".")) continue;
|
|
206
|
+
const entryPath = join(clonePath, entry);
|
|
207
|
+
const info = await stat(entryPath);
|
|
208
|
+
if (info.isDirectory() && existsSync(join(entryPath, "ABOUT.md"))) {
|
|
209
|
+
const id = entry;
|
|
210
|
+
if (registry[id]) continue;
|
|
211
|
+
const raw = await readFile(join(entryPath, "ABOUT.md"), "utf-8");
|
|
212
|
+
const nameMatch = raw.match(/^name:\s*["']?(.+?)["']?\s*$/m);
|
|
213
|
+
const displayName = nameMatch?.[1] ?? id;
|
|
214
|
+
registry[id] = {
|
|
215
|
+
path: entryPath,
|
|
216
|
+
source: "git",
|
|
217
|
+
url,
|
|
218
|
+
addedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
219
|
+
};
|
|
220
|
+
results.push({ id, name: displayName });
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
if (results.length === 0) {
|
|
224
|
+
await rm(clonePath, { recursive: true, force: true });
|
|
225
|
+
throw new Error(`No counsellors found in cloned repository (no ABOUT.md files)`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
config.counsellors = registry;
|
|
229
|
+
await saveConfig(config);
|
|
230
|
+
return results;
|
|
231
|
+
}
|
|
232
|
+
async function removeCounsellor(id, deleteFiles = false) {
|
|
233
|
+
const config = await loadConfig$1();
|
|
234
|
+
const registry = config.counsellors ?? {};
|
|
235
|
+
const entry = registry[id];
|
|
236
|
+
if (!entry) {
|
|
237
|
+
throw new Error(`Counsellor "${id}" is not registered`);
|
|
238
|
+
}
|
|
239
|
+
if (deleteFiles && entry.source === "git" && existsSync(entry.path)) {
|
|
240
|
+
await rm(entry.path, { recursive: true, force: true });
|
|
241
|
+
}
|
|
242
|
+
delete registry[id];
|
|
243
|
+
config.counsellors = registry;
|
|
244
|
+
await saveConfig(config);
|
|
245
|
+
}
|
|
246
|
+
function createAnthropicBackend(config) {
|
|
247
|
+
const client = new Anthropic({
|
|
248
|
+
apiKey: config.apiKey ?? process.env.ANTHROPIC_API_KEY,
|
|
249
|
+
...config.baseUrl ? { baseURL: config.baseUrl } : {}
|
|
250
|
+
});
|
|
251
|
+
return {
|
|
252
|
+
name: "anthropic",
|
|
253
|
+
defaultModel: "claude-sonnet-4-5-20250929",
|
|
254
|
+
async chat(request) {
|
|
255
|
+
const response = await client.messages.create({
|
|
256
|
+
model: request.model,
|
|
257
|
+
max_tokens: 4096,
|
|
258
|
+
system: request.systemPrompt,
|
|
259
|
+
messages: request.messages.map((m) => ({
|
|
260
|
+
role: m.role,
|
|
261
|
+
content: m.content
|
|
262
|
+
})),
|
|
263
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
264
|
+
});
|
|
265
|
+
const textBlock = response.content.find((b) => b.type === "text");
|
|
266
|
+
return {
|
|
267
|
+
content: textBlock?.text ?? "",
|
|
268
|
+
tokenUsage: {
|
|
269
|
+
input: response.usage.input_tokens,
|
|
270
|
+
output: response.usage.output_tokens
|
|
271
|
+
}
|
|
272
|
+
};
|
|
273
|
+
},
|
|
274
|
+
async *chatStream(request) {
|
|
275
|
+
const stream = client.messages.stream({
|
|
276
|
+
model: request.model,
|
|
277
|
+
max_tokens: 4096,
|
|
278
|
+
system: request.systemPrompt,
|
|
279
|
+
messages: request.messages.map((m) => ({
|
|
280
|
+
role: m.role,
|
|
281
|
+
content: m.content
|
|
282
|
+
})),
|
|
283
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
284
|
+
});
|
|
285
|
+
for await (const event of stream) {
|
|
286
|
+
if (event.type === "content_block_delta" && event.delta.type === "text_delta") {
|
|
287
|
+
yield { delta: event.delta.text };
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
const finalMessage = await stream.finalMessage();
|
|
291
|
+
yield {
|
|
292
|
+
delta: "",
|
|
293
|
+
tokenUsage: {
|
|
294
|
+
input: finalMessage.usage.input_tokens,
|
|
295
|
+
output: finalMessage.usage.output_tokens
|
|
296
|
+
}
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
};
|
|
300
|
+
}
|
|
301
|
+
function createOpenAIBackend(config) {
|
|
302
|
+
const client = new OpenAI({
|
|
303
|
+
apiKey: config.apiKey ?? process.env.OPENAI_API_KEY,
|
|
304
|
+
...config.baseUrl ? { baseURL: config.baseUrl } : {}
|
|
305
|
+
});
|
|
306
|
+
return {
|
|
307
|
+
name: "openai",
|
|
308
|
+
defaultModel: "gpt-4o",
|
|
309
|
+
async chat(request) {
|
|
310
|
+
const response = await client.chat.completions.create({
|
|
311
|
+
model: request.model,
|
|
312
|
+
messages: [
|
|
313
|
+
{ role: "system", content: request.systemPrompt },
|
|
314
|
+
...request.messages.map((m) => ({
|
|
315
|
+
role: m.role,
|
|
316
|
+
content: m.content
|
|
317
|
+
}))
|
|
318
|
+
],
|
|
319
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
320
|
+
});
|
|
321
|
+
const choice = response.choices[0];
|
|
322
|
+
return {
|
|
323
|
+
content: choice?.message?.content ?? "",
|
|
324
|
+
tokenUsage: response.usage ? { input: response.usage.prompt_tokens, output: response.usage.completion_tokens } : void 0
|
|
325
|
+
};
|
|
326
|
+
},
|
|
327
|
+
async *chatStream(request) {
|
|
328
|
+
const stream = await client.chat.completions.create({
|
|
329
|
+
model: request.model,
|
|
330
|
+
messages: [
|
|
331
|
+
{ role: "system", content: request.systemPrompt },
|
|
332
|
+
...request.messages.map((m) => ({
|
|
333
|
+
role: m.role,
|
|
334
|
+
content: m.content
|
|
335
|
+
}))
|
|
336
|
+
],
|
|
337
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {},
|
|
338
|
+
stream: true,
|
|
339
|
+
stream_options: { include_usage: true }
|
|
340
|
+
});
|
|
341
|
+
for await (const chunk of stream) {
|
|
342
|
+
const delta = chunk.choices[0]?.delta?.content;
|
|
343
|
+
if (delta) {
|
|
344
|
+
yield { delta };
|
|
345
|
+
}
|
|
346
|
+
if (chunk.usage) {
|
|
347
|
+
yield {
|
|
348
|
+
delta: "",
|
|
349
|
+
tokenUsage: {
|
|
350
|
+
input: chunk.usage.prompt_tokens,
|
|
351
|
+
output: chunk.usage.completion_tokens
|
|
352
|
+
}
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
function createGoogleBackend(config) {
|
|
360
|
+
const apiKey = config.apiKey ?? process.env.GOOGLE_API_KEY ?? "";
|
|
361
|
+
const genAI = new GoogleGenerativeAI(apiKey);
|
|
362
|
+
return {
|
|
363
|
+
name: "google",
|
|
364
|
+
defaultModel: "gemini-2.0-flash",
|
|
365
|
+
async chat(request) {
|
|
366
|
+
const model = genAI.getGenerativeModel({
|
|
367
|
+
model: request.model,
|
|
368
|
+
systemInstruction: request.systemPrompt,
|
|
369
|
+
generationConfig: {
|
|
370
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
371
|
+
}
|
|
372
|
+
});
|
|
373
|
+
const history = request.messages.slice(0, -1).map((m) => ({
|
|
374
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
375
|
+
parts: [{ text: m.content }]
|
|
376
|
+
}));
|
|
377
|
+
const chat = model.startChat({ history });
|
|
378
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
379
|
+
const result = await chat.sendMessage(lastMessage?.content ?? "");
|
|
380
|
+
const response = result.response;
|
|
381
|
+
return {
|
|
382
|
+
content: response.text(),
|
|
383
|
+
tokenUsage: response.usageMetadata ? {
|
|
384
|
+
input: response.usageMetadata.promptTokenCount ?? 0,
|
|
385
|
+
output: response.usageMetadata.candidatesTokenCount ?? 0
|
|
386
|
+
} : void 0
|
|
387
|
+
};
|
|
388
|
+
},
|
|
389
|
+
async *chatStream(request) {
|
|
390
|
+
const genModel = genAI.getGenerativeModel({
|
|
391
|
+
model: request.model,
|
|
392
|
+
systemInstruction: request.systemPrompt,
|
|
393
|
+
generationConfig: {
|
|
394
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
395
|
+
}
|
|
396
|
+
});
|
|
397
|
+
const history = request.messages.slice(0, -1).map((m) => ({
|
|
398
|
+
role: m.role === "assistant" ? "model" : "user",
|
|
399
|
+
parts: [{ text: m.content }]
|
|
400
|
+
}));
|
|
401
|
+
const chat = genModel.startChat({ history });
|
|
402
|
+
const lastMessage = request.messages[request.messages.length - 1];
|
|
403
|
+
const result = await chat.sendMessageStream(lastMessage?.content ?? "");
|
|
404
|
+
for await (const chunk of result.stream) {
|
|
405
|
+
const text = chunk.text();
|
|
406
|
+
if (text) {
|
|
407
|
+
yield { delta: text };
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
const response = await result.response;
|
|
411
|
+
yield {
|
|
412
|
+
delta: "",
|
|
413
|
+
tokenUsage: response.usageMetadata ? {
|
|
414
|
+
input: response.usageMetadata.promptTokenCount ?? 0,
|
|
415
|
+
output: response.usageMetadata.candidatesTokenCount ?? 0
|
|
416
|
+
} : void 0
|
|
417
|
+
};
|
|
418
|
+
}
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
function createOllamaBackend(config) {
|
|
422
|
+
const client = new Ollama({
|
|
423
|
+
host: config.baseUrl ?? "http://localhost:11434"
|
|
424
|
+
});
|
|
425
|
+
return {
|
|
426
|
+
name: "ollama",
|
|
427
|
+
defaultModel: "llama3.2",
|
|
428
|
+
async chat(request) {
|
|
429
|
+
const response = await client.chat({
|
|
430
|
+
model: request.model,
|
|
431
|
+
messages: [
|
|
432
|
+
{ role: "system", content: request.systemPrompt },
|
|
433
|
+
...request.messages.map((m) => ({
|
|
434
|
+
role: m.role,
|
|
435
|
+
content: m.content
|
|
436
|
+
}))
|
|
437
|
+
],
|
|
438
|
+
options: {
|
|
439
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
440
|
+
}
|
|
441
|
+
});
|
|
442
|
+
return {
|
|
443
|
+
content: response.message.content,
|
|
444
|
+
tokenUsage: response.prompt_eval_count !== void 0 ? {
|
|
445
|
+
input: response.prompt_eval_count ?? 0,
|
|
446
|
+
output: response.eval_count ?? 0
|
|
447
|
+
} : void 0
|
|
448
|
+
};
|
|
449
|
+
},
|
|
450
|
+
async *chatStream(request) {
|
|
451
|
+
const response = await client.chat({
|
|
452
|
+
model: request.model,
|
|
453
|
+
messages: [
|
|
454
|
+
{ role: "system", content: request.systemPrompt },
|
|
455
|
+
...request.messages.map((m) => ({
|
|
456
|
+
role: m.role,
|
|
457
|
+
content: m.content
|
|
458
|
+
}))
|
|
459
|
+
],
|
|
460
|
+
options: {
|
|
461
|
+
...request.temperature !== void 0 ? { temperature: request.temperature } : {}
|
|
462
|
+
},
|
|
463
|
+
stream: true
|
|
464
|
+
});
|
|
465
|
+
let promptEvalCount;
|
|
466
|
+
let evalCount;
|
|
467
|
+
for await (const chunk of response) {
|
|
468
|
+
if (chunk.message.content) {
|
|
469
|
+
yield { delta: chunk.message.content };
|
|
470
|
+
}
|
|
471
|
+
if (chunk.done) {
|
|
472
|
+
promptEvalCount = chunk.prompt_eval_count;
|
|
473
|
+
evalCount = chunk.eval_count;
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
yield {
|
|
477
|
+
delta: "",
|
|
478
|
+
tokenUsage: promptEvalCount !== void 0 ? { input: promptEvalCount ?? 0, output: evalCount ?? 0 } : void 0
|
|
479
|
+
};
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
}
|
|
483
|
+
const factories = {
|
|
484
|
+
anthropic: createAnthropicBackend,
|
|
485
|
+
openai: createOpenAIBackend,
|
|
486
|
+
google: createGoogleBackend,
|
|
487
|
+
ollama: createOllamaBackend
|
|
488
|
+
};
|
|
489
|
+
let configCache = null;
|
|
490
|
+
async function loadConfig() {
|
|
491
|
+
if (configCache) return configCache;
|
|
492
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
493
|
+
try {
|
|
494
|
+
const raw = await readFile(configPath, "utf-8");
|
|
495
|
+
configCache = JSON.parse(raw);
|
|
496
|
+
} catch {
|
|
497
|
+
configCache = { backends: {} };
|
|
498
|
+
}
|
|
499
|
+
return configCache;
|
|
500
|
+
}
|
|
501
|
+
const backendCache = /* @__PURE__ */ new Map();
|
|
502
|
+
function clearCaches() {
|
|
503
|
+
configCache = null;
|
|
504
|
+
backendCache.clear();
|
|
505
|
+
}
|
|
506
|
+
async function getBackend(name) {
|
|
507
|
+
const cached = backendCache.get(name);
|
|
508
|
+
if (cached) return cached;
|
|
509
|
+
const factory = factories[name];
|
|
510
|
+
if (!factory) {
|
|
511
|
+
throw new Error(`Unknown backend: "${name}". Available: ${Object.keys(factories).join(", ")}`);
|
|
512
|
+
}
|
|
513
|
+
const config = await loadConfig();
|
|
514
|
+
const backendConfig = config.backends[name] ?? {};
|
|
515
|
+
const backend = factory(backendConfig);
|
|
516
|
+
backendCache.set(name, backend);
|
|
517
|
+
return backend;
|
|
518
|
+
}
|
|
519
|
+
function getExcalidrawCheatsheet() {
|
|
520
|
+
return `
|
|
521
|
+
## Excalidraw Element Reference
|
|
522
|
+
|
|
523
|
+
Output a JSON array of Excalidraw elements. Each element needs these fields:
|
|
524
|
+
|
|
525
|
+
### Common Fields (all elements)
|
|
526
|
+
- \`type\`: "rectangle" | "ellipse" | "diamond" | "text" | "arrow" | "line"
|
|
527
|
+
- \`id\`: unique string (e.g. "rect1", "text1", "arrow1")
|
|
528
|
+
- \`x\`, \`y\`: number (top-left origin, x increases right, y increases down)
|
|
529
|
+
- \`width\`, \`height\`: number
|
|
530
|
+
- \`strokeColor\`: hex string (e.g. "#1e1e1e")
|
|
531
|
+
- \`backgroundColor\`: hex string or "transparent"
|
|
532
|
+
- \`fillStyle\`: "solid" | "hachure" | "cross-hatch"
|
|
533
|
+
- \`strokeWidth\`: 1 | 2 | 4
|
|
534
|
+
- \`roughness\`: 0 (sharp) | 1 (sketchy)
|
|
535
|
+
- \`opacity\`: 100
|
|
536
|
+
- \`angle\`: 0
|
|
537
|
+
- \`seed\`: any integer (e.g. 1)
|
|
538
|
+
- \`version\`: 1
|
|
539
|
+
- \`isDeleted\`: false
|
|
540
|
+
- \`groupIds\`: []
|
|
541
|
+
- \`boundElements\`: null or array of { id: string, type: "text" | "arrow" }
|
|
542
|
+
- \`link\`: null
|
|
543
|
+
- \`locked\`: false
|
|
544
|
+
|
|
545
|
+
### Text Elements
|
|
546
|
+
Additional fields: \`text\`, \`fontSize\` (16-24), \`fontFamily\` (1=hand, 2=normal, 3=mono), \`textAlign\` ("left"|"center"|"right"), \`verticalAlign\` ("top"|"middle"), \`baseline\`: 0, \`containerId\`: null or parent shape id
|
|
547
|
+
|
|
548
|
+
### Arrow/Line Elements
|
|
549
|
+
Additional fields: \`points\` (array of [x,y] relative to element x,y — first point always [0,0]), \`startBinding\` and \`endBinding\`: null or { elementId: string, focus: 0, gap: 5 }, \`lastCommittedPoint\`: null, \`startArrowhead\`: null, \`endArrowhead\`: "arrow" | null
|
|
550
|
+
|
|
551
|
+
### Color Palette
|
|
552
|
+
- Blue: "#1971c2", Light blue bg: "#a5d8ff"
|
|
553
|
+
- Green: "#2f9e44", Light green bg: "#b2f2bb"
|
|
554
|
+
- Red: "#e03131", Light red bg: "#ffc9c9"
|
|
555
|
+
- Orange: "#e8590c", Light orange bg: "#ffd8a8"
|
|
556
|
+
- Purple: "#7048e8", Light purple bg: "#d0bfff"
|
|
557
|
+
- Yellow: "#f08c00", Light yellow bg: "#ffec99"
|
|
558
|
+
- Gray: "#868e96", Light gray bg: "#dee2e6"
|
|
559
|
+
- Dark: "#1e1e1e"
|
|
560
|
+
|
|
561
|
+
### Layout Tips
|
|
562
|
+
- Space shapes ~200px apart horizontally, ~150px vertically
|
|
563
|
+
- Typical shape size: 160×80 for rectangles, 120×60 for ellipses
|
|
564
|
+
- Center text inside shapes using containerId
|
|
565
|
+
- Use arrows to show relationships (agreement, disagreement, influence)
|
|
566
|
+
|
|
567
|
+
### Compact Example
|
|
568
|
+
\`\`\`json
|
|
569
|
+
[
|
|
570
|
+
{"type":"rectangle","id":"r1","x":50,"y":50,"width":160,"height":80,"strokeColor":"#1971c2","backgroundColor":"#a5d8ff","fillStyle":"solid","strokeWidth":2,"roughness":1,"opacity":100,"angle":0,"seed":1,"version":1,"isDeleted":false,"groupIds":[],"boundElements":[{"id":"t1","type":"text"},{"id":"a1","type":"arrow"}],"link":null,"locked":false},
|
|
571
|
+
{"type":"text","id":"t1","x":60,"y":70,"width":140,"height":40,"text":"Counsellor A","fontSize":16,"fontFamily":2,"textAlign":"center","verticalAlign":"middle","baseline":0,"containerId":"r1","strokeColor":"#1e1e1e","backgroundColor":"transparent","fillStyle":"solid","strokeWidth":1,"roughness":0,"opacity":100,"angle":0,"seed":2,"version":1,"isDeleted":false,"groupIds":[],"boundElements":null,"link":null,"locked":false},
|
|
572
|
+
{"type":"rectangle","id":"r2","x":350,"y":50,"width":160,"height":80,"strokeColor":"#2f9e44","backgroundColor":"#b2f2bb","fillStyle":"solid","strokeWidth":2,"roughness":1,"opacity":100,"angle":0,"seed":3,"version":1,"isDeleted":false,"groupIds":[],"boundElements":[{"id":"t2","type":"text"},{"id":"a1","type":"arrow"}],"link":null,"locked":false},
|
|
573
|
+
{"type":"text","id":"t2","x":360,"y":70,"width":140,"height":40,"text":"Counsellor B","fontSize":16,"fontFamily":2,"textAlign":"center","verticalAlign":"middle","baseline":0,"containerId":"r2","strokeColor":"#1e1e1e","backgroundColor":"transparent","fillStyle":"solid","strokeWidth":1,"roughness":0,"opacity":100,"angle":0,"seed":4,"version":1,"isDeleted":false,"groupIds":[],"boundElements":null,"link":null,"locked":false},
|
|
574
|
+
{"type":"arrow","id":"a1","x":210,"y":90,"width":140,"height":0,"points":[[0,0],[140,0]],"startBinding":{"elementId":"r1","focus":0,"gap":5},"endBinding":{"elementId":"r2","focus":0,"gap":5},"startArrowhead":null,"endArrowhead":"arrow","strokeColor":"#1e1e1e","backgroundColor":"transparent","fillStyle":"solid","strokeWidth":2,"roughness":1,"opacity":100,"angle":0,"seed":5,"version":1,"isDeleted":false,"groupIds":[],"boundElements":null,"link":null,"locked":false,"lastCommittedPoint":null}
|
|
575
|
+
]
|
|
576
|
+
\`\`\`
|
|
577
|
+
`.trim();
|
|
578
|
+
}
|
|
579
|
+
const EXCALIDRAW_DELIMITER = "---EXCALIDRAW---";
|
|
580
|
+
const DEFAULT_SYSTEM_PROMPT = `You are the Secretary of a council discussion. Your job is to synthesize a clear, structured summary of the conversation that just took place.
|
|
581
|
+
|
|
582
|
+
Structure your summary with these sections:
|
|
583
|
+
|
|
584
|
+
## Individual Positions
|
|
585
|
+
Briefly summarize each counsellor's key arguments and stance.
|
|
586
|
+
|
|
587
|
+
## Points of Convergence
|
|
588
|
+
Where did the counsellors agree? What common ground emerged?
|
|
589
|
+
|
|
590
|
+
## Points of Divergence
|
|
591
|
+
Where did they disagree? What are the key tensions?
|
|
592
|
+
|
|
593
|
+
## Synthesis
|
|
594
|
+
What are the most important takeaways? What would you recommend based on the full discussion?
|
|
595
|
+
|
|
596
|
+
Be concise but thorough. Use markdown formatting.`;
|
|
597
|
+
function buildTranscript(result) {
|
|
598
|
+
const lines = [];
|
|
599
|
+
lines.push(`Topic: ${result.topic}`);
|
|
600
|
+
lines.push(`Counsellors: ${result.counsellors.map((c) => c.name).join(", ")}`);
|
|
601
|
+
lines.push(`Rounds: ${result.rounds}`);
|
|
602
|
+
lines.push("");
|
|
603
|
+
let currentRound = 0;
|
|
604
|
+
for (const turn of result.turns) {
|
|
605
|
+
if (turn.round !== currentRound) {
|
|
606
|
+
currentRound = turn.round;
|
|
607
|
+
lines.push(`--- Round ${currentRound} ---`);
|
|
608
|
+
lines.push("");
|
|
609
|
+
}
|
|
610
|
+
lines.push(`[${turn.counsellorName}]:`);
|
|
611
|
+
lines.push(turn.content);
|
|
612
|
+
lines.push("");
|
|
613
|
+
}
|
|
614
|
+
return lines.join("\n");
|
|
615
|
+
}
|
|
616
|
+
async function runSecretary({
|
|
617
|
+
result,
|
|
618
|
+
config,
|
|
619
|
+
onChunk,
|
|
620
|
+
signal
|
|
621
|
+
}) {
|
|
622
|
+
const secretaryConfig = config.secretary;
|
|
623
|
+
if (!secretaryConfig?.backend) {
|
|
624
|
+
throw new Error("No secretary backend configured");
|
|
625
|
+
}
|
|
626
|
+
const backend = await getBackend(secretaryConfig.backend);
|
|
627
|
+
const model = secretaryConfig.model ?? backend.defaultModel;
|
|
628
|
+
const basePrompt = secretaryConfig.systemPrompt || DEFAULT_SYSTEM_PROMPT;
|
|
629
|
+
const cheatsheet = getExcalidrawCheatsheet();
|
|
630
|
+
const systemPrompt = `${basePrompt}
|
|
631
|
+
|
|
632
|
+
${cheatsheet}
|
|
633
|
+
|
|
634
|
+
After your text summary, output \`${EXCALIDRAW_DELIMITER}\` on its own line, then a JSON array of Excalidraw elements showing a visual map of where each counsellor stands on the topic. Use shapes for each counsellor with their name, arrows to show relationships (agreement/disagreement), and position them to visually represent the discussion dynamics.`;
|
|
635
|
+
const transcript = buildTranscript(result);
|
|
636
|
+
const chatRequest = {
|
|
637
|
+
model,
|
|
638
|
+
systemPrompt,
|
|
639
|
+
messages: [{ role: "user", content: `Please summarize this council discussion and create a position diagram:
|
|
640
|
+
|
|
641
|
+
${transcript}` }],
|
|
642
|
+
temperature: 0.5
|
|
643
|
+
};
|
|
644
|
+
let fullResponse = "";
|
|
645
|
+
if (backend.chatStream) {
|
|
646
|
+
for await (const chunk of backend.chatStream(chatRequest)) {
|
|
647
|
+
if (signal?.aborted) break;
|
|
648
|
+
fullResponse += chunk.delta;
|
|
649
|
+
if (chunk.delta && onChunk) {
|
|
650
|
+
onChunk(chunk.delta);
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
} else {
|
|
654
|
+
const response = await backend.chat(chatRequest);
|
|
655
|
+
fullResponse = response.content;
|
|
656
|
+
if (onChunk) onChunk(fullResponse);
|
|
657
|
+
}
|
|
658
|
+
const delimiterIndex = fullResponse.indexOf(EXCALIDRAW_DELIMITER);
|
|
659
|
+
if (delimiterIndex === -1) {
|
|
660
|
+
return { text: fullResponse.trim() };
|
|
661
|
+
}
|
|
662
|
+
const text = fullResponse.slice(0, delimiterIndex).trim();
|
|
663
|
+
const diagramRaw = fullResponse.slice(delimiterIndex + EXCALIDRAW_DELIMITER.length).trim();
|
|
664
|
+
let diagram;
|
|
665
|
+
try {
|
|
666
|
+
const jsonMatch = diagramRaw.match(/\[[\s\S]*\]/);
|
|
667
|
+
if (jsonMatch) {
|
|
668
|
+
diagram = JSON.parse(jsonMatch[0]);
|
|
669
|
+
}
|
|
670
|
+
} catch {
|
|
671
|
+
}
|
|
672
|
+
return { text, diagram };
|
|
673
|
+
}
|
|
674
|
+
const INTERIM_SYSTEM_PROMPT = `You are the Secretary of a council debate. Briefly summarize this round of discussion. Note emerging agreements, disagreements, and shifts in position. 2-3 paragraphs max. Use markdown formatting.`;
|
|
675
|
+
async function runInterimSummary({
|
|
676
|
+
result,
|
|
677
|
+
roundNumber,
|
|
678
|
+
config,
|
|
679
|
+
onChunk,
|
|
680
|
+
signal
|
|
681
|
+
}) {
|
|
682
|
+
const secretaryConfig = config.secretary;
|
|
683
|
+
if (!secretaryConfig?.backend) {
|
|
684
|
+
throw new Error("No secretary backend configured");
|
|
685
|
+
}
|
|
686
|
+
const backend = await getBackend(secretaryConfig.backend);
|
|
687
|
+
const model = secretaryConfig.model ?? backend.defaultModel;
|
|
688
|
+
const roundTurns = result.turns.filter((t) => t.round === roundNumber);
|
|
689
|
+
const lines = [];
|
|
690
|
+
lines.push(`Topic: ${result.topic}`);
|
|
691
|
+
lines.push(`Round ${roundNumber}${roundNumber === 1 ? " (Constructive)" : " (Rebuttal)"}`);
|
|
692
|
+
lines.push("");
|
|
693
|
+
for (const turn of roundTurns) {
|
|
694
|
+
lines.push(`[${turn.counsellorName}]:`);
|
|
695
|
+
lines.push(turn.content);
|
|
696
|
+
lines.push("");
|
|
697
|
+
}
|
|
698
|
+
const chatRequest = {
|
|
699
|
+
model,
|
|
700
|
+
systemPrompt: INTERIM_SYSTEM_PROMPT,
|
|
701
|
+
messages: [{ role: "user", content: `Please summarize this round:
|
|
702
|
+
|
|
703
|
+
${lines.join("\n")}` }],
|
|
704
|
+
temperature: 0.5
|
|
705
|
+
};
|
|
706
|
+
let fullResponse = "";
|
|
707
|
+
if (backend.chatStream) {
|
|
708
|
+
for await (const chunk of backend.chatStream(chatRequest)) {
|
|
709
|
+
if (signal?.aborted) break;
|
|
710
|
+
fullResponse += chunk.delta;
|
|
711
|
+
if (chunk.delta && onChunk) {
|
|
712
|
+
onChunk(chunk.delta);
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
} else {
|
|
716
|
+
const response = await backend.chat(chatRequest);
|
|
717
|
+
fullResponse = response.content;
|
|
718
|
+
if (onChunk) onChunk(fullResponse);
|
|
719
|
+
}
|
|
720
|
+
return fullResponse.trim();
|
|
721
|
+
}
|
|
722
|
+
async function generateTitle({
|
|
723
|
+
topic,
|
|
724
|
+
firstRoundTurns,
|
|
725
|
+
config
|
|
726
|
+
}) {
|
|
727
|
+
const secretaryConfig = config.secretary;
|
|
728
|
+
if (!secretaryConfig?.backend) {
|
|
729
|
+
throw new Error("No secretary backend configured");
|
|
730
|
+
}
|
|
731
|
+
const backend = await getBackend(secretaryConfig.backend);
|
|
732
|
+
const model = secretaryConfig.model ?? backend.defaultModel;
|
|
733
|
+
const turnsSummary = firstRoundTurns.map((t) => `[${t.counsellorName}]: ${t.content.slice(0, 300)}`).join("\n\n");
|
|
734
|
+
const response = await backend.chat({
|
|
735
|
+
model,
|
|
736
|
+
systemPrompt: "Generate a concise title (max 8 words) for this council discussion. Return only the title, no quotes or punctuation at the end.",
|
|
737
|
+
messages: [
|
|
738
|
+
{
|
|
739
|
+
role: "user",
|
|
740
|
+
content: `Topic: ${topic}
|
|
741
|
+
|
|
742
|
+
First round:
|
|
743
|
+
${turnsSummary}`
|
|
744
|
+
}
|
|
745
|
+
],
|
|
746
|
+
temperature: 0.3
|
|
747
|
+
});
|
|
748
|
+
return response.content.trim().replace(/^["']+|["']+$/g, "").replace(/[.!?]+$/, "").trim();
|
|
749
|
+
}
|
|
750
|
+
const LOG_DIR = join(homedir(), ".ai-council");
|
|
751
|
+
const LOG_FILE = join(LOG_DIR, "council.log");
|
|
752
|
+
let ensured = false;
|
|
753
|
+
async function ensureDir() {
|
|
754
|
+
if (ensured) return;
|
|
755
|
+
await mkdir(LOG_DIR, { recursive: true });
|
|
756
|
+
ensured = true;
|
|
757
|
+
}
|
|
758
|
+
function formatEntry(level, context, message, extra) {
|
|
759
|
+
const ts = (/* @__PURE__ */ new Date()).toISOString();
|
|
760
|
+
let line = `[${ts}] ${level} [${context}] ${message}`;
|
|
761
|
+
if (extra !== void 0) {
|
|
762
|
+
const detail = extra instanceof Error ? `${extra.message}
|
|
763
|
+
${extra.stack ?? ""}` : typeof extra === "string" ? extra : JSON.stringify(extra, null, 2);
|
|
764
|
+
line += `
|
|
765
|
+
${detail.replace(/\n/g, "\n ")}`;
|
|
766
|
+
}
|
|
767
|
+
return line + "\n";
|
|
768
|
+
}
|
|
769
|
+
async function write(level, context, message, extra) {
|
|
770
|
+
try {
|
|
771
|
+
await ensureDir();
|
|
772
|
+
await appendFile(LOG_FILE, formatEntry(level, context, message, extra));
|
|
773
|
+
} catch {
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
const log$1 = {
|
|
777
|
+
info: (context, message, extra) => write("INFO", context, message, extra),
|
|
778
|
+
warn: (context, message, extra) => write("WARN", context, message, extra),
|
|
779
|
+
error: (context, message, extra) => write("ERROR", context, message, extra)
|
|
780
|
+
};
|
|
781
|
+
function buildMessages(topic, turns, currentCounsellorId) {
|
|
782
|
+
const messages = [{ role: "user", content: topic }];
|
|
783
|
+
for (const turn of turns) {
|
|
784
|
+
if (turn.counsellorId === currentCounsellorId) {
|
|
785
|
+
messages.push({ role: "assistant", content: turn.content });
|
|
786
|
+
} else {
|
|
787
|
+
messages.push({
|
|
788
|
+
role: "user",
|
|
789
|
+
content: `[${turn.counsellorName}, Round ${turn.round}]: ${turn.content}`
|
|
790
|
+
});
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
return messages;
|
|
794
|
+
}
|
|
795
|
+
function buildDebateMessages(topic, turns, currentCounsellorId, currentRound) {
|
|
796
|
+
const messages = [{ role: "user", content: topic }];
|
|
797
|
+
if (currentRound === 1) {
|
|
798
|
+
return messages;
|
|
799
|
+
}
|
|
800
|
+
const constructiveTurns = turns.filter((t) => t.round === 1);
|
|
801
|
+
for (const turn of constructiveTurns) {
|
|
802
|
+
if (turn.counsellorId === currentCounsellorId) {
|
|
803
|
+
messages.push({ role: "assistant", content: turn.content });
|
|
804
|
+
} else {
|
|
805
|
+
messages.push({
|
|
806
|
+
role: "user",
|
|
807
|
+
content: `[${turn.counsellorName}, Constructive]: ${turn.content}`
|
|
808
|
+
});
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
const prevRound = currentRound - 1;
|
|
812
|
+
if (prevRound > 1) {
|
|
813
|
+
const prevTurns = turns.filter((t) => t.round === prevRound);
|
|
814
|
+
for (const turn of prevTurns) {
|
|
815
|
+
if (turn.counsellorId === currentCounsellorId) {
|
|
816
|
+
messages.push({ role: "assistant", content: turn.content });
|
|
817
|
+
} else {
|
|
818
|
+
messages.push({
|
|
819
|
+
role: "user",
|
|
820
|
+
content: `[${turn.counsellorName}, Round ${prevRound}]: ${turn.content}`
|
|
821
|
+
});
|
|
822
|
+
}
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
for (const turn of turns) {
|
|
826
|
+
if (turn.counsellorId === currentCounsellorId && turn.round !== 1 && turn.round !== prevRound && turn.round < currentRound) {
|
|
827
|
+
messages.push({ role: "assistant", content: turn.content });
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
return messages;
|
|
831
|
+
}
|
|
832
|
+
function shuffleWithSeed(array, seed) {
|
|
833
|
+
const result = [...array];
|
|
834
|
+
let s = seed | 0;
|
|
835
|
+
const rand = () => {
|
|
836
|
+
s = s + 1831565813 | 0;
|
|
837
|
+
let t = Math.imul(s ^ s >>> 15, 1 | s);
|
|
838
|
+
t = t + Math.imul(t ^ t >>> 7, 61 | t) ^ t;
|
|
839
|
+
return ((t ^ t >>> 14) >>> 0) / 4294967296;
|
|
840
|
+
};
|
|
841
|
+
for (let i = result.length - 1; i > 0; i--) {
|
|
842
|
+
const j = Math.floor(rand() * (i + 1));
|
|
843
|
+
[result[i], result[j]] = [result[j], result[i]];
|
|
844
|
+
}
|
|
845
|
+
return result;
|
|
846
|
+
}
|
|
847
|
+
function buildResult(opts, turns, startedAt, totalInput, totalOutput, roundSummaries) {
|
|
848
|
+
return {
|
|
849
|
+
topic: opts.topic,
|
|
850
|
+
topicSource: opts.topicSource,
|
|
851
|
+
counsellors: opts.counsellors.map((c) => ({
|
|
852
|
+
id: c.id,
|
|
853
|
+
name: c.frontmatter.name,
|
|
854
|
+
description: c.frontmatter.description,
|
|
855
|
+
backend: c.frontmatter.backend,
|
|
856
|
+
model: c.frontmatter.model ?? "default",
|
|
857
|
+
avatarUrl: c.avatarUrl
|
|
858
|
+
})),
|
|
859
|
+
rounds: opts.rounds,
|
|
860
|
+
turns,
|
|
861
|
+
startedAt,
|
|
862
|
+
completedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
863
|
+
totalTokenUsage: { input: totalInput, output: totalOutput },
|
|
864
|
+
...roundSummaries && Object.keys(roundSummaries).length > 0 ? { roundSummaries } : {},
|
|
865
|
+
...opts.mode === "debate" ? { mode: "debate" } : {}
|
|
866
|
+
};
|
|
867
|
+
}
|
|
868
|
+
async function runConversation(topicOrOpts, topicSource, counsellors, rounds, onEvent) {
|
|
869
|
+
let opts;
|
|
870
|
+
if (typeof topicOrOpts === "string") {
|
|
871
|
+
opts = {
|
|
872
|
+
topic: topicOrOpts,
|
|
873
|
+
topicSource,
|
|
874
|
+
counsellors,
|
|
875
|
+
rounds,
|
|
876
|
+
onEvent
|
|
877
|
+
};
|
|
878
|
+
} else {
|
|
879
|
+
opts = topicOrOpts;
|
|
880
|
+
}
|
|
881
|
+
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
882
|
+
const turns = [];
|
|
883
|
+
let totalInput = 0;
|
|
884
|
+
let totalOutput = 0;
|
|
885
|
+
const isDebate = opts.mode === "debate";
|
|
886
|
+
const roundSummaries = {};
|
|
887
|
+
log$1.info("conversation", `Starting ${isDebate ? "debate" : "freeform"} — ${opts.counsellors.length} counsellors, ${opts.rounds} rounds`, {
|
|
888
|
+
counsellors: opts.counsellors.map((c) => `${c.frontmatter.name} (${c.frontmatter.backend}/${c.frontmatter.model ?? "default"})`),
|
|
889
|
+
topic: opts.topic.slice(0, 200)
|
|
890
|
+
});
|
|
891
|
+
for (let round = 1; round <= opts.rounds; round++) {
|
|
892
|
+
const roundCounsellors = isDebate && round > 1 ? shuffleWithSeed(opts.counsellors, round) : opts.counsellors;
|
|
893
|
+
for (const counsellor of roundCounsellors) {
|
|
894
|
+
if (opts.signal?.aborted) {
|
|
895
|
+
return buildResult(opts, turns, startedAt, totalInput, totalOutput, roundSummaries);
|
|
896
|
+
}
|
|
897
|
+
if (opts.beforeTurn) {
|
|
898
|
+
const injected = await opts.beforeTurn();
|
|
899
|
+
if (injected) {
|
|
900
|
+
turns.push(injected);
|
|
901
|
+
opts.onEvent({ type: "turn_complete", turn: injected });
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
opts.onEvent({ type: "turn_start", round, counsellorName: counsellor.frontmatter.name });
|
|
905
|
+
try {
|
|
906
|
+
const backend = await getBackend(counsellor.frontmatter.backend);
|
|
907
|
+
const model = counsellor.frontmatter.model ?? backend.defaultModel;
|
|
908
|
+
const messages = isDebate ? buildDebateMessages(opts.topic, turns, counsellor.id, round) : buildMessages(opts.topic, turns, counsellor.id);
|
|
909
|
+
const chatRequest = {
|
|
910
|
+
model,
|
|
911
|
+
systemPrompt: counsellor.systemPrompt,
|
|
912
|
+
messages,
|
|
913
|
+
temperature: counsellor.frontmatter.temperature
|
|
914
|
+
};
|
|
915
|
+
let content;
|
|
916
|
+
let tokenUsage;
|
|
917
|
+
if (backend.chatStream) {
|
|
918
|
+
content = "";
|
|
919
|
+
for await (const chunk of backend.chatStream(chatRequest)) {
|
|
920
|
+
if (opts.signal?.aborted) break;
|
|
921
|
+
content += chunk.delta;
|
|
922
|
+
if (chunk.delta) {
|
|
923
|
+
opts.onEvent({ type: "turn_chunk", counsellorName: counsellor.frontmatter.name, delta: chunk.delta });
|
|
924
|
+
}
|
|
925
|
+
if (chunk.tokenUsage) {
|
|
926
|
+
tokenUsage = chunk.tokenUsage;
|
|
927
|
+
}
|
|
928
|
+
}
|
|
929
|
+
} else {
|
|
930
|
+
const response = await backend.chat(chatRequest);
|
|
931
|
+
content = response.content;
|
|
932
|
+
tokenUsage = response.tokenUsage;
|
|
933
|
+
}
|
|
934
|
+
const turn = {
|
|
935
|
+
round,
|
|
936
|
+
counsellorId: counsellor.id,
|
|
937
|
+
counsellorName: counsellor.frontmatter.name,
|
|
938
|
+
content,
|
|
939
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
940
|
+
model,
|
|
941
|
+
backend: counsellor.frontmatter.backend,
|
|
942
|
+
tokenUsage,
|
|
943
|
+
avatarUrl: counsellor.avatarUrl
|
|
944
|
+
};
|
|
945
|
+
if (tokenUsage) {
|
|
946
|
+
totalInput += tokenUsage.input;
|
|
947
|
+
totalOutput += tokenUsage.output;
|
|
948
|
+
}
|
|
949
|
+
turns.push(turn);
|
|
950
|
+
opts.onEvent({ type: "turn_complete", turn });
|
|
951
|
+
} catch (err) {
|
|
952
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
953
|
+
log$1.error("conversation", `Turn failed for ${counsellor.frontmatter.name} (round ${round}, model ${counsellor.frontmatter.model ?? "default"}, backend ${counsellor.frontmatter.backend})`, err);
|
|
954
|
+
opts.onEvent({ type: "error", counsellorName: counsellor.frontmatter.name, error: message });
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
opts.onEvent({ type: "round_complete", round });
|
|
958
|
+
if (isDebate && opts.config?.secretary?.backend && !opts.signal?.aborted) {
|
|
959
|
+
try {
|
|
960
|
+
const interimResult = buildResult(opts, turns, startedAt, totalInput, totalOutput, roundSummaries);
|
|
961
|
+
opts.onEvent({ type: "round_summary_start", round });
|
|
962
|
+
const summary = await runInterimSummary({
|
|
963
|
+
result: interimResult,
|
|
964
|
+
roundNumber: round,
|
|
965
|
+
config: opts.config,
|
|
966
|
+
onChunk: (delta) => {
|
|
967
|
+
opts.onEvent({ type: "round_summary_chunk", round, delta });
|
|
968
|
+
},
|
|
969
|
+
signal: opts.signal
|
|
970
|
+
});
|
|
971
|
+
roundSummaries[round] = summary;
|
|
972
|
+
opts.onEvent({ type: "round_summary_complete", round, summary });
|
|
973
|
+
} catch (err) {
|
|
974
|
+
log$1.error("conversation", `Interim summary failed for round ${round}`, err);
|
|
975
|
+
}
|
|
976
|
+
}
|
|
977
|
+
}
|
|
978
|
+
return buildResult(opts, turns, startedAt, totalInput, totalOutput, roundSummaries);
|
|
979
|
+
}
|
|
980
|
+
const HISTORY_DIR = join(homedir(), ".ai-council", "history");
|
|
981
|
+
function slugify(text) {
|
|
982
|
+
return text.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 40);
|
|
983
|
+
}
|
|
984
|
+
async function saveToHistory(result) {
|
|
985
|
+
await mkdir(HISTORY_DIR, { recursive: true });
|
|
986
|
+
const timestamp = new Date(result.startedAt).toISOString().replace(/[:.]/g, "-").slice(0, 19);
|
|
987
|
+
const slug = slugify(result.topic);
|
|
988
|
+
const id = `${timestamp}-${slug}`;
|
|
989
|
+
const filePath = join(HISTORY_DIR, `${id}.json`);
|
|
990
|
+
await writeFile(filePath, JSON.stringify(result, null, 2), "utf-8");
|
|
991
|
+
return id;
|
|
992
|
+
}
|
|
993
|
+
async function listHistory() {
|
|
994
|
+
await mkdir(HISTORY_DIR, { recursive: true });
|
|
995
|
+
const files = await readdir(HISTORY_DIR);
|
|
996
|
+
const entries = [];
|
|
997
|
+
for (const file of files) {
|
|
998
|
+
if (!file.endsWith(".json")) continue;
|
|
999
|
+
try {
|
|
1000
|
+
const raw = await readFile(join(HISTORY_DIR, file), "utf-8");
|
|
1001
|
+
const result = JSON.parse(raw);
|
|
1002
|
+
entries.push({
|
|
1003
|
+
id: basename(file, ".json"),
|
|
1004
|
+
topic: result.topic,
|
|
1005
|
+
title: result.title,
|
|
1006
|
+
counsellors: result.counsellors.map((c) => c.name),
|
|
1007
|
+
rounds: result.rounds,
|
|
1008
|
+
startedAt: result.startedAt,
|
|
1009
|
+
completedAt: result.completedAt
|
|
1010
|
+
});
|
|
1011
|
+
} catch {
|
|
1012
|
+
}
|
|
1013
|
+
}
|
|
1014
|
+
return entries.sort((a, b) => b.startedAt.localeCompare(a.startedAt));
|
|
1015
|
+
}
|
|
1016
|
+
async function getHistoryEntry(id) {
|
|
1017
|
+
const filePath = join(HISTORY_DIR, `${id}.json`);
|
|
1018
|
+
const raw = await readFile(filePath, "utf-8");
|
|
1019
|
+
const result = JSON.parse(raw);
|
|
1020
|
+
if (result.infographic && !result.infographics) {
|
|
1021
|
+
result.infographics = [result.infographic];
|
|
1022
|
+
delete result.infographic;
|
|
1023
|
+
}
|
|
1024
|
+
return result;
|
|
1025
|
+
}
|
|
1026
|
+
async function deleteHistoryEntry(id) {
|
|
1027
|
+
const filePath = join(HISTORY_DIR, `${id}.json`);
|
|
1028
|
+
await rm(filePath);
|
|
1029
|
+
}
|
|
1030
|
+
async function addInfographicToHistory(id, infographic) {
|
|
1031
|
+
const filePath = join(HISTORY_DIR, `${id}.json`);
|
|
1032
|
+
const raw = await readFile(filePath, "utf-8");
|
|
1033
|
+
const result = JSON.parse(raw);
|
|
1034
|
+
if (!result.infographics) result.infographics = [];
|
|
1035
|
+
result.infographics.push(infographic);
|
|
1036
|
+
await writeFile(filePath, JSON.stringify(result, null, 2), "utf-8");
|
|
1037
|
+
}
|
|
1038
|
+
async function deleteInfographicFromHistory(id, index) {
|
|
1039
|
+
const filePath = join(HISTORY_DIR, `${id}.json`);
|
|
1040
|
+
const raw = await readFile(filePath, "utf-8");
|
|
1041
|
+
const result = JSON.parse(raw);
|
|
1042
|
+
if (result.infographics && index >= 0 && index < result.infographics.length) {
|
|
1043
|
+
result.infographics.splice(index, 1);
|
|
1044
|
+
}
|
|
1045
|
+
await writeFile(filePath, JSON.stringify(result, null, 2), "utf-8");
|
|
1046
|
+
}
|
|
1047
|
+
const require$1 = createRequire(import.meta.url);
|
|
1048
|
+
function buildPrompt(result) {
|
|
1049
|
+
const names = result.counsellors.map((c) => c.name).join(", ");
|
|
1050
|
+
const summary = result.summary ?? result.turns.map((t) => `${t.counsellorName}: ${t.content.slice(0, 200)}`).join("\n");
|
|
1051
|
+
return [
|
|
1052
|
+
"Create a professional infographic summarizing a panel discussion.",
|
|
1053
|
+
`Topic: ${result.topic.slice(0, 300)}`,
|
|
1054
|
+
`Key points: ${summary.slice(0, 1500)}`,
|
|
1055
|
+
`Panelists: ${names}`,
|
|
1056
|
+
"Use a clean, modern design with sections for convergence points, divergence points, and key takeaways.",
|
|
1057
|
+
"Include relevant icons and visual hierarchy. Use a horizontal landscape layout."
|
|
1058
|
+
].join(" ");
|
|
1059
|
+
}
|
|
1060
|
+
function detectBackend(config) {
|
|
1061
|
+
if (config.infographic?.backend) return config.infographic.backend;
|
|
1062
|
+
const hasGoogle = !!(config.backends.google?.apiKey || process.env.GOOGLE_API_KEY);
|
|
1063
|
+
const hasOpenai = !!(config.backends.openai?.apiKey || process.env.OPENAI_API_KEY);
|
|
1064
|
+
if (hasGoogle) return "google";
|
|
1065
|
+
if (hasOpenai) return "openai";
|
|
1066
|
+
return null;
|
|
1067
|
+
}
|
|
1068
|
+
async function generateViaOpenAI(prompt, config) {
|
|
1069
|
+
const OpenAI2 = require$1("openai").default;
|
|
1070
|
+
const client = new OpenAI2({
|
|
1071
|
+
apiKey: config.backends.openai?.apiKey || process.env.OPENAI_API_KEY,
|
|
1072
|
+
...config.backends.openai?.baseUrl ? { baseURL: config.backends.openai.baseUrl } : {}
|
|
1073
|
+
});
|
|
1074
|
+
const response = await client.images.generate({
|
|
1075
|
+
model: "gpt-image-1",
|
|
1076
|
+
prompt,
|
|
1077
|
+
quality: "high",
|
|
1078
|
+
size: "1536x1024"
|
|
1079
|
+
});
|
|
1080
|
+
const b64 = response.data?.[0]?.b64_json;
|
|
1081
|
+
if (!b64) throw new Error("No image data returned from OpenAI");
|
|
1082
|
+
return b64;
|
|
1083
|
+
}
|
|
1084
|
+
async function generateViaGoogle(prompt, config) {
|
|
1085
|
+
const { GoogleGenAI } = require$1("@google/genai");
|
|
1086
|
+
const apiKey = config.backends.google?.apiKey || process.env.GOOGLE_API_KEY;
|
|
1087
|
+
if (!apiKey) throw new Error("No Google API key configured");
|
|
1088
|
+
const ai = new GoogleGenAI({ apiKey });
|
|
1089
|
+
const response = await ai.models.generateContent({
|
|
1090
|
+
model: "gemini-3-pro-image-preview",
|
|
1091
|
+
contents: prompt,
|
|
1092
|
+
config: {
|
|
1093
|
+
responseModalities: ["IMAGE", "TEXT"]
|
|
1094
|
+
}
|
|
1095
|
+
});
|
|
1096
|
+
const parts = response.candidates?.[0]?.content?.parts;
|
|
1097
|
+
if (!parts) throw new Error("No response parts from Gemini");
|
|
1098
|
+
for (const part of parts) {
|
|
1099
|
+
if (part.inlineData?.data) {
|
|
1100
|
+
return part.inlineData.data;
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
throw new Error("No image data in Gemini response");
|
|
1104
|
+
}
|
|
1105
|
+
async function generateInfographic(result, config, backendOverride) {
|
|
1106
|
+
const backend = backendOverride ?? detectBackend(config);
|
|
1107
|
+
if (!backend) throw new Error("No image-capable backend configured (need OpenAI or Google API key)");
|
|
1108
|
+
const prompt = buildPrompt(result);
|
|
1109
|
+
if (backend === "openai") {
|
|
1110
|
+
return generateViaOpenAI(prompt, config);
|
|
1111
|
+
} else {
|
|
1112
|
+
return generateViaGoogle(prompt, config);
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
const DEFAULT_URLS = {
|
|
1116
|
+
anthropic: "https://api.anthropic.com",
|
|
1117
|
+
openai: "https://api.openai.com/v1",
|
|
1118
|
+
google: "https://generativelanguage.googleapis.com",
|
|
1119
|
+
ollama: "http://localhost:11434"
|
|
1120
|
+
};
|
|
1121
|
+
const KNOWN_ANTHROPIC_MODELS = [
|
|
1122
|
+
"claude-opus-4-20250514",
|
|
1123
|
+
"claude-sonnet-4-5-20250514",
|
|
1124
|
+
"claude-sonnet-4-20250514",
|
|
1125
|
+
"claude-haiku-4-20250414",
|
|
1126
|
+
"claude-3-5-haiku-20241022"
|
|
1127
|
+
];
|
|
1128
|
+
const KNOWN_GOOGLE_MODELS = [
|
|
1129
|
+
"gemini-2.5-pro",
|
|
1130
|
+
"gemini-2.5-flash",
|
|
1131
|
+
"gemini-2.0-flash",
|
|
1132
|
+
"gemini-2.0-flash-lite",
|
|
1133
|
+
"gemini-1.5-pro",
|
|
1134
|
+
"gemini-1.5-flash"
|
|
1135
|
+
];
|
|
1136
|
+
async function probeBackend(name, config) {
|
|
1137
|
+
try {
|
|
1138
|
+
switch (name) {
|
|
1139
|
+
case "ollama": {
|
|
1140
|
+
const { Ollama: Ollama2 } = await import("ollama");
|
|
1141
|
+
const client = new Ollama2({ host: config.baseUrl || DEFAULT_URLS.ollama });
|
|
1142
|
+
const response = await client.list();
|
|
1143
|
+
const models = response.models.map((m) => m.name).sort();
|
|
1144
|
+
return { connected: true, models };
|
|
1145
|
+
}
|
|
1146
|
+
case "openai": {
|
|
1147
|
+
const { default: OpenAI2 } = await import("openai");
|
|
1148
|
+
const client = new OpenAI2({
|
|
1149
|
+
apiKey: config.apiKey || process.env.OPENAI_API_KEY,
|
|
1150
|
+
...config.baseUrl ? { baseURL: config.baseUrl } : {}
|
|
1151
|
+
});
|
|
1152
|
+
const response = await client.models.list();
|
|
1153
|
+
const models = response.data.map((m) => m.id).filter((id) => id.startsWith("gpt-") || id.startsWith("o") || id.startsWith("chatgpt-")).sort();
|
|
1154
|
+
return { connected: true, models };
|
|
1155
|
+
}
|
|
1156
|
+
case "anthropic": {
|
|
1157
|
+
const { default: Anthropic2 } = await import("@anthropic-ai/sdk");
|
|
1158
|
+
const client = new Anthropic2({
|
|
1159
|
+
apiKey: config.apiKey || process.env.ANTHROPIC_API_KEY,
|
|
1160
|
+
...config.baseUrl ? { baseURL: config.baseUrl } : {}
|
|
1161
|
+
});
|
|
1162
|
+
try {
|
|
1163
|
+
const response = await client.models.list({ limit: 100 });
|
|
1164
|
+
const models = response.data.map((m) => m.id).sort();
|
|
1165
|
+
return { connected: true, models };
|
|
1166
|
+
} catch {
|
|
1167
|
+
return { connected: true, models: KNOWN_ANTHROPIC_MODELS };
|
|
1168
|
+
}
|
|
1169
|
+
}
|
|
1170
|
+
case "google": {
|
|
1171
|
+
const apiKey = config.apiKey || process.env.GOOGLE_API_KEY || "";
|
|
1172
|
+
if (!apiKey) return { connected: false, models: KNOWN_GOOGLE_MODELS, error: "No API key" };
|
|
1173
|
+
const res = await fetch(
|
|
1174
|
+
`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`
|
|
1175
|
+
);
|
|
1176
|
+
if (!res.ok) {
|
|
1177
|
+
const body = await res.json().catch(() => ({}));
|
|
1178
|
+
const msg = body?.error?.message || `HTTP ${res.status}`;
|
|
1179
|
+
return { connected: false, models: KNOWN_GOOGLE_MODELS, error: msg };
|
|
1180
|
+
}
|
|
1181
|
+
const data = await res.json();
|
|
1182
|
+
const models = (data.models || []).filter((m) => m.name.includes("gemini") && m.supportedGenerationMethods?.includes("generateContent")).map((m) => m.name.replace("models/", "")).sort();
|
|
1183
|
+
return { connected: true, models: models.length > 0 ? models : KNOWN_GOOGLE_MODELS };
|
|
1184
|
+
}
|
|
1185
|
+
default:
|
|
1186
|
+
return { connected: false, models: [], error: `Unknown backend: ${name}` };
|
|
1187
|
+
}
|
|
1188
|
+
} catch (err) {
|
|
1189
|
+
const error = err instanceof Error ? err.message : String(err);
|
|
1190
|
+
const fallbackModels = name === "anthropic" ? KNOWN_ANTHROPIC_MODELS : name === "google" ? KNOWN_GOOGLE_MODELS : [];
|
|
1191
|
+
return { connected: false, models: fallbackModels, error };
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
let activeAbortController = null;
|
|
1195
|
+
let injectionBuffer = [];
|
|
1196
|
+
function registerIpcHandlers(ipcMain2, getWindow) {
|
|
1197
|
+
ipcMain2.handle("counsellors:list", async (_event, councilDir) => {
|
|
1198
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
1199
|
+
let config = { backends: {} };
|
|
1200
|
+
try {
|
|
1201
|
+
const raw = await readFile(configPath, "utf-8");
|
|
1202
|
+
config = JSON.parse(raw);
|
|
1203
|
+
} catch {
|
|
1204
|
+
}
|
|
1205
|
+
const registeredPaths = getRegisteredPaths(config);
|
|
1206
|
+
const registry = config.counsellors ?? {};
|
|
1207
|
+
const counsellors = await loadCounsellors(councilDir, registeredPaths);
|
|
1208
|
+
return counsellors.map((c) => {
|
|
1209
|
+
const regEntry = registry[c.id];
|
|
1210
|
+
return {
|
|
1211
|
+
id: c.id,
|
|
1212
|
+
dirPath: c.dirPath,
|
|
1213
|
+
name: c.frontmatter.name,
|
|
1214
|
+
description: c.frontmatter.description,
|
|
1215
|
+
backend: c.frontmatter.backend,
|
|
1216
|
+
model: c.frontmatter.model,
|
|
1217
|
+
temperature: c.frontmatter.temperature,
|
|
1218
|
+
interests: c.frontmatter.interests,
|
|
1219
|
+
avatarUrl: c.avatarUrl,
|
|
1220
|
+
source: regEntry?.source,
|
|
1221
|
+
registryUrl: regEntry?.url
|
|
1222
|
+
};
|
|
1223
|
+
});
|
|
1224
|
+
});
|
|
1225
|
+
ipcMain2.handle("counsellors:get", async (_event, dirPath) => {
|
|
1226
|
+
const aboutPath = join(dirPath, "ABOUT.md");
|
|
1227
|
+
const raw = await readFile(aboutPath, "utf-8");
|
|
1228
|
+
const { data, content } = matter(raw);
|
|
1229
|
+
return { frontmatter: data, body: content.trim(), raw };
|
|
1230
|
+
});
|
|
1231
|
+
ipcMain2.handle("counsellors:save", async (_event, dirPath, aboutMd) => {
|
|
1232
|
+
const aboutPath = join(dirPath, "ABOUT.md");
|
|
1233
|
+
await writeFile(aboutPath, aboutMd, "utf-8");
|
|
1234
|
+
return { success: true };
|
|
1235
|
+
});
|
|
1236
|
+
ipcMain2.handle("counsellors:create", async (_event, councilDir, id, aboutMd) => {
|
|
1237
|
+
const dirPath = join(councilDir, id);
|
|
1238
|
+
await mkdir(dirPath, { recursive: true });
|
|
1239
|
+
await writeFile(join(dirPath, "ABOUT.md"), aboutMd, "utf-8");
|
|
1240
|
+
return { success: true, dirPath };
|
|
1241
|
+
});
|
|
1242
|
+
ipcMain2.handle("counsellors:delete", async (_event, dirPath) => {
|
|
1243
|
+
await rm(dirPath, { recursive: true, force: true });
|
|
1244
|
+
return { success: true };
|
|
1245
|
+
});
|
|
1246
|
+
ipcMain2.handle("config:get", async () => {
|
|
1247
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
1248
|
+
let config = { backends: {} };
|
|
1249
|
+
try {
|
|
1250
|
+
const raw = await readFile(configPath, "utf-8");
|
|
1251
|
+
config = JSON.parse(raw);
|
|
1252
|
+
} catch {
|
|
1253
|
+
}
|
|
1254
|
+
const envStatus = {
|
|
1255
|
+
ANTHROPIC_API_KEY: !!process.env.ANTHROPIC_API_KEY,
|
|
1256
|
+
OPENAI_API_KEY: !!process.env.OPENAI_API_KEY,
|
|
1257
|
+
GOOGLE_API_KEY: !!process.env.GOOGLE_API_KEY
|
|
1258
|
+
};
|
|
1259
|
+
const suffix = (key) => key ? "..." + key.slice(-4) : void 0;
|
|
1260
|
+
const envKeySuffix = {
|
|
1261
|
+
ANTHROPIC_API_KEY: suffix(process.env.ANTHROPIC_API_KEY),
|
|
1262
|
+
OPENAI_API_KEY: suffix(process.env.OPENAI_API_KEY),
|
|
1263
|
+
GOOGLE_API_KEY: suffix(process.env.GOOGLE_API_KEY)
|
|
1264
|
+
};
|
|
1265
|
+
return { config, envStatus, envKeySuffix, defaultUrls: DEFAULT_URLS };
|
|
1266
|
+
});
|
|
1267
|
+
ipcMain2.handle("backend:probe", async (_event, name, config) => {
|
|
1268
|
+
return probeBackend(name, config);
|
|
1269
|
+
});
|
|
1270
|
+
ipcMain2.handle("config:save", async (_event, config) => {
|
|
1271
|
+
const configDir = join(homedir(), ".ai-council");
|
|
1272
|
+
await mkdir(configDir, { recursive: true });
|
|
1273
|
+
await writeFile(join(configDir, "config.json"), JSON.stringify(config, null, 2), "utf-8");
|
|
1274
|
+
clearCaches();
|
|
1275
|
+
return { success: true };
|
|
1276
|
+
});
|
|
1277
|
+
ipcMain2.handle("discussion:start", async (_event, params) => {
|
|
1278
|
+
const win = getWindow();
|
|
1279
|
+
if (!win) return { error: "No window" };
|
|
1280
|
+
if (activeAbortController) {
|
|
1281
|
+
activeAbortController.abort();
|
|
1282
|
+
}
|
|
1283
|
+
activeAbortController = new AbortController();
|
|
1284
|
+
injectionBuffer = [];
|
|
1285
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
1286
|
+
let config = { backends: {} };
|
|
1287
|
+
try {
|
|
1288
|
+
const raw = await readFile(configPath, "utf-8");
|
|
1289
|
+
config = JSON.parse(raw);
|
|
1290
|
+
} catch {
|
|
1291
|
+
}
|
|
1292
|
+
const registeredPaths = getRegisteredPaths(config);
|
|
1293
|
+
const allCounsellors = await loadCounsellors(params.councilDir, registeredPaths);
|
|
1294
|
+
const counsellors = params.counsellorIds?.length ? allCounsellors.filter((c) => params.counsellorIds.includes(c.id)) : allCounsellors;
|
|
1295
|
+
if (counsellors.length === 0) {
|
|
1296
|
+
win.webContents.send("discussion:event", { type: "error", counsellorName: "", error: "No counsellors found" });
|
|
1297
|
+
return;
|
|
1298
|
+
}
|
|
1299
|
+
const send = (event) => {
|
|
1300
|
+
if (!win.isDestroyed()) {
|
|
1301
|
+
win.webContents.send("discussion:event", event);
|
|
1302
|
+
}
|
|
1303
|
+
};
|
|
1304
|
+
const beforeTurn = async () => {
|
|
1305
|
+
if (injectionBuffer.length === 0) return null;
|
|
1306
|
+
const content = injectionBuffer.shift();
|
|
1307
|
+
return {
|
|
1308
|
+
round: 0,
|
|
1309
|
+
counsellorId: "__user__",
|
|
1310
|
+
counsellorName: "You",
|
|
1311
|
+
content,
|
|
1312
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1313
|
+
model: "human",
|
|
1314
|
+
backend: "human"
|
|
1315
|
+
};
|
|
1316
|
+
};
|
|
1317
|
+
const opts = {
|
|
1318
|
+
topic: params.topic,
|
|
1319
|
+
topicSource: params.topicSource,
|
|
1320
|
+
counsellors,
|
|
1321
|
+
rounds: params.rounds,
|
|
1322
|
+
onEvent: send,
|
|
1323
|
+
beforeTurn,
|
|
1324
|
+
signal: activeAbortController.signal,
|
|
1325
|
+
mode: params.mode,
|
|
1326
|
+
config
|
|
1327
|
+
};
|
|
1328
|
+
try {
|
|
1329
|
+
const result = await runConversation(opts);
|
|
1330
|
+
if (config.secretary?.backend) {
|
|
1331
|
+
try {
|
|
1332
|
+
send({ type: "summary_start" });
|
|
1333
|
+
const secretaryResult = await runSecretary({
|
|
1334
|
+
result,
|
|
1335
|
+
config,
|
|
1336
|
+
onChunk: (delta) => {
|
|
1337
|
+
send({ type: "summary_chunk", delta });
|
|
1338
|
+
},
|
|
1339
|
+
signal: activeAbortController?.signal
|
|
1340
|
+
});
|
|
1341
|
+
result.summary = secretaryResult.text;
|
|
1342
|
+
if (secretaryResult.diagram) {
|
|
1343
|
+
result.diagram = secretaryResult.diagram;
|
|
1344
|
+
}
|
|
1345
|
+
send({ type: "summary_complete", summary: secretaryResult.text, diagram: secretaryResult.diagram });
|
|
1346
|
+
} catch (err) {
|
|
1347
|
+
log$1.error("ipc:discussion", "Secretary summary failed", err);
|
|
1348
|
+
send({ type: "error", counsellorName: "Secretary", error: err instanceof Error ? err.message : String(err) });
|
|
1349
|
+
}
|
|
1350
|
+
}
|
|
1351
|
+
if (config.secretary?.backend) {
|
|
1352
|
+
try {
|
|
1353
|
+
const firstRoundTurns = result.turns.filter((t) => t.round === 1);
|
|
1354
|
+
const title = await generateTitle({
|
|
1355
|
+
topic: result.topic,
|
|
1356
|
+
firstRoundTurns,
|
|
1357
|
+
config
|
|
1358
|
+
});
|
|
1359
|
+
result.title = title;
|
|
1360
|
+
send({ type: "title_generated", title });
|
|
1361
|
+
} catch (err) {
|
|
1362
|
+
log$1.error("ipc:discussion", "Title generation failed", err);
|
|
1363
|
+
}
|
|
1364
|
+
}
|
|
1365
|
+
if (params.infographicBackends?.length) {
|
|
1366
|
+
for (const backend of params.infographicBackends) {
|
|
1367
|
+
try {
|
|
1368
|
+
send({ type: "infographic_start" });
|
|
1369
|
+
const infographicData = await generateInfographic(result, config, backend);
|
|
1370
|
+
if (!result.infographics) result.infographics = [];
|
|
1371
|
+
result.infographics.push(infographicData);
|
|
1372
|
+
send({ type: "infographic_complete", infographic: infographicData });
|
|
1373
|
+
} catch (err) {
|
|
1374
|
+
log$1.error("ipc:discussion", `Infographic generation failed (${backend})`, err);
|
|
1375
|
+
send({ type: "infographic_error", error: err instanceof Error ? err.message : String(err) });
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
send({ type: "complete", result });
|
|
1380
|
+
try {
|
|
1381
|
+
await saveToHistory(result);
|
|
1382
|
+
} catch (err) {
|
|
1383
|
+
log$1.error("ipc:discussion", "Failed to save to history", err);
|
|
1384
|
+
}
|
|
1385
|
+
} catch (err) {
|
|
1386
|
+
log$1.error("ipc:discussion", "Discussion failed", err);
|
|
1387
|
+
send({ type: "error", counsellorName: "", error: err instanceof Error ? err.message : String(err) });
|
|
1388
|
+
} finally {
|
|
1389
|
+
activeAbortController = null;
|
|
1390
|
+
}
|
|
1391
|
+
});
|
|
1392
|
+
ipcMain2.handle("discussion:stop", async () => {
|
|
1393
|
+
if (activeAbortController) {
|
|
1394
|
+
activeAbortController.abort();
|
|
1395
|
+
activeAbortController = null;
|
|
1396
|
+
}
|
|
1397
|
+
return { success: true };
|
|
1398
|
+
});
|
|
1399
|
+
ipcMain2.handle("discussion:inject", async (_event, content) => {
|
|
1400
|
+
injectionBuffer.push(content);
|
|
1401
|
+
return { success: true };
|
|
1402
|
+
});
|
|
1403
|
+
ipcMain2.handle("registry:add-local", async (_event, dirPath) => {
|
|
1404
|
+
return addLocalCounsellor(dirPath);
|
|
1405
|
+
});
|
|
1406
|
+
ipcMain2.handle("registry:add-remote", async (_event, url) => {
|
|
1407
|
+
return addRemoteCounsellor(url);
|
|
1408
|
+
});
|
|
1409
|
+
ipcMain2.handle("registry:remove", async (_event, id, deleteFiles) => {
|
|
1410
|
+
await removeCounsellor(id, deleteFiles);
|
|
1411
|
+
return { success: true };
|
|
1412
|
+
});
|
|
1413
|
+
ipcMain2.handle("shell:open-in-finder", async (_event, dirPath) => {
|
|
1414
|
+
shell.showItemInFolder(join(dirPath, "ABOUT.md"));
|
|
1415
|
+
});
|
|
1416
|
+
ipcMain2.handle("shell:open-in-terminal", async (_event, dirPath) => {
|
|
1417
|
+
const execFileAsync2 = promisify(execFile);
|
|
1418
|
+
await execFileAsync2("open", ["-a", "Terminal", dirPath]);
|
|
1419
|
+
});
|
|
1420
|
+
ipcMain2.handle("shell:open-in-editor", async (_event, dirPath) => {
|
|
1421
|
+
const execFileAsync2 = promisify(execFile);
|
|
1422
|
+
try {
|
|
1423
|
+
await execFileAsync2("code", [dirPath]);
|
|
1424
|
+
} catch {
|
|
1425
|
+
shell.openPath(dirPath);
|
|
1426
|
+
}
|
|
1427
|
+
});
|
|
1428
|
+
ipcMain2.handle("history:list", async () => listHistory());
|
|
1429
|
+
ipcMain2.handle("history:get", async (_event, id) => getHistoryEntry(id));
|
|
1430
|
+
ipcMain2.handle("history:delete", async (_event, id) => {
|
|
1431
|
+
await deleteHistoryEntry(id);
|
|
1432
|
+
return { success: true };
|
|
1433
|
+
});
|
|
1434
|
+
ipcMain2.handle("infographic:generate", async (_event, historyId, backend) => {
|
|
1435
|
+
const configPath = join(homedir(), ".ai-council", "config.json");
|
|
1436
|
+
let config = { backends: {} };
|
|
1437
|
+
try {
|
|
1438
|
+
const raw = await readFile(configPath, "utf-8");
|
|
1439
|
+
config = JSON.parse(raw);
|
|
1440
|
+
} catch {
|
|
1441
|
+
}
|
|
1442
|
+
const result = await getHistoryEntry(historyId);
|
|
1443
|
+
const infographicData = await generateInfographic(result, config, backend);
|
|
1444
|
+
await addInfographicToHistory(historyId, infographicData);
|
|
1445
|
+
return { infographic: infographicData };
|
|
1446
|
+
});
|
|
1447
|
+
ipcMain2.handle("infographic:delete", async (_event, historyId, index) => {
|
|
1448
|
+
await deleteInfographicFromHistory(historyId, index);
|
|
1449
|
+
return { success: true };
|
|
1450
|
+
});
|
|
1451
|
+
ipcMain2.handle("file:read-as-text", async (_event, filePath) => {
|
|
1452
|
+
const name = basename(filePath);
|
|
1453
|
+
const ext = name.includes(".") ? "." + name.split(".").pop().toLowerCase() : "";
|
|
1454
|
+
const textExtensions = /* @__PURE__ */ new Set([
|
|
1455
|
+
".txt",
|
|
1456
|
+
".md",
|
|
1457
|
+
".csv",
|
|
1458
|
+
".json",
|
|
1459
|
+
".yaml",
|
|
1460
|
+
".yml",
|
|
1461
|
+
".xml",
|
|
1462
|
+
".html",
|
|
1463
|
+
".htm",
|
|
1464
|
+
".js",
|
|
1465
|
+
".ts",
|
|
1466
|
+
".jsx",
|
|
1467
|
+
".tsx",
|
|
1468
|
+
".py",
|
|
1469
|
+
".rb",
|
|
1470
|
+
".go",
|
|
1471
|
+
".rs",
|
|
1472
|
+
".java",
|
|
1473
|
+
".c",
|
|
1474
|
+
".cpp",
|
|
1475
|
+
".h",
|
|
1476
|
+
".hpp",
|
|
1477
|
+
".css",
|
|
1478
|
+
".scss",
|
|
1479
|
+
".less",
|
|
1480
|
+
".sql",
|
|
1481
|
+
".sh",
|
|
1482
|
+
".bash",
|
|
1483
|
+
".zsh",
|
|
1484
|
+
".env",
|
|
1485
|
+
".toml",
|
|
1486
|
+
".ini",
|
|
1487
|
+
".cfg",
|
|
1488
|
+
".conf",
|
|
1489
|
+
".log",
|
|
1490
|
+
".svg"
|
|
1491
|
+
]);
|
|
1492
|
+
const markitdownExtensions = /* @__PURE__ */ new Set([
|
|
1493
|
+
".pdf",
|
|
1494
|
+
".docx",
|
|
1495
|
+
".pptx",
|
|
1496
|
+
".xlsx",
|
|
1497
|
+
".xls",
|
|
1498
|
+
".doc",
|
|
1499
|
+
".ppt",
|
|
1500
|
+
".epub",
|
|
1501
|
+
".rtf"
|
|
1502
|
+
]);
|
|
1503
|
+
if (textExtensions.has(ext)) {
|
|
1504
|
+
try {
|
|
1505
|
+
const content = await readFile(filePath, "utf-8");
|
|
1506
|
+
return { name, content };
|
|
1507
|
+
} catch (err) {
|
|
1508
|
+
return { name, content: `[Error reading file: ${err instanceof Error ? err.message : String(err)}]` };
|
|
1509
|
+
}
|
|
1510
|
+
}
|
|
1511
|
+
if (markitdownExtensions.has(ext)) {
|
|
1512
|
+
const execFileAsync2 = promisify(execFile);
|
|
1513
|
+
try {
|
|
1514
|
+
const { stdout } = await execFileAsync2("markitdown", [filePath], {
|
|
1515
|
+
timeout: 3e4,
|
|
1516
|
+
maxBuffer: 10 * 1024 * 1024
|
|
1517
|
+
// 10 MB
|
|
1518
|
+
});
|
|
1519
|
+
return { name, content: stdout };
|
|
1520
|
+
} catch (err) {
|
|
1521
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
1522
|
+
if (msg.includes("ENOENT")) {
|
|
1523
|
+
return {
|
|
1524
|
+
name,
|
|
1525
|
+
content: `[Cannot convert ${ext} file: markitdown is not installed. Run: pip install 'markitdown[all]']`
|
|
1526
|
+
};
|
|
1527
|
+
}
|
|
1528
|
+
return { name, content: `[Error converting file: ${msg}]` };
|
|
1529
|
+
}
|
|
1530
|
+
}
|
|
1531
|
+
return { name, content: `[Unsupported file type: ${name}]` };
|
|
1532
|
+
});
|
|
1533
|
+
ipcMain2.handle("markitdown:check", async () => {
|
|
1534
|
+
const execFileAsync2 = promisify(execFile);
|
|
1535
|
+
try {
|
|
1536
|
+
const { stdout } = await execFileAsync2("markitdown", ["--version"], { timeout: 5e3 });
|
|
1537
|
+
return { installed: true, version: stdout.trim() };
|
|
1538
|
+
} catch {
|
|
1539
|
+
return { installed: false };
|
|
1540
|
+
}
|
|
1541
|
+
});
|
|
1542
|
+
ipcMain2.handle("markitdown:install", async () => {
|
|
1543
|
+
const execFileAsync2 = promisify(execFile);
|
|
1544
|
+
try {
|
|
1545
|
+
await execFileAsync2("pip", ["install", "markitdown[all]"], {
|
|
1546
|
+
timeout: 12e4,
|
|
1547
|
+
maxBuffer: 10 * 1024 * 1024
|
|
1548
|
+
});
|
|
1549
|
+
return { success: true };
|
|
1550
|
+
} catch (err) {
|
|
1551
|
+
return { success: false, error: err instanceof Error ? err.message : String(err) };
|
|
1552
|
+
}
|
|
1553
|
+
});
|
|
1554
|
+
ipcMain2.handle("dialog:selectDirectory", async () => {
|
|
1555
|
+
const win = getWindow();
|
|
1556
|
+
if (!win) return null;
|
|
1557
|
+
const result = await dialog.showOpenDialog(win, {
|
|
1558
|
+
properties: ["openDirectory"]
|
|
1559
|
+
});
|
|
1560
|
+
return result.canceled ? null : result.filePaths[0];
|
|
1561
|
+
});
|
|
1562
|
+
}
|
|
1563
|
+
const __filename$1 = fileURLToPath(import.meta.url);
|
|
1564
|
+
const __dirname$1 = dirname(__filename$1);
|
|
1565
|
+
const logFile = join(__dirname$1, "..", "electron-debug.log");
|
|
1566
|
+
function log(source, ...args) {
|
|
1567
|
+
const line = `[${(/* @__PURE__ */ new Date()).toISOString()}] [${source}] ${args.map((a) => typeof a === "string" ? a : JSON.stringify(a)).join(" ")}
|
|
1568
|
+
`;
|
|
1569
|
+
appendFileSync(logFile, line);
|
|
1570
|
+
}
|
|
1571
|
+
writeFileSync(logFile, `=== AI Council Electron — started ${(/* @__PURE__ */ new Date()).toISOString()} ===
|
|
1572
|
+
`);
|
|
1573
|
+
let mainWindow = null;
|
|
1574
|
+
function createWindow() {
|
|
1575
|
+
log("main", "Creating BrowserWindow");
|
|
1576
|
+
mainWindow = new BrowserWindow({
|
|
1577
|
+
width: 1200,
|
|
1578
|
+
height: 800,
|
|
1579
|
+
minWidth: 800,
|
|
1580
|
+
minHeight: 600,
|
|
1581
|
+
title: "AI Council",
|
|
1582
|
+
webPreferences: {
|
|
1583
|
+
contextIsolation: true,
|
|
1584
|
+
nodeIntegration: false,
|
|
1585
|
+
preload: join(__dirname$1, "preload.mjs")
|
|
1586
|
+
}
|
|
1587
|
+
});
|
|
1588
|
+
mainWindow.webContents.on("console-message", (_event, level, message, line, sourceId) => {
|
|
1589
|
+
const levelName = ["DEBUG", "INFO", "WARN", "ERROR"][level] || "LOG";
|
|
1590
|
+
log(`renderer:${levelName}`, `${message} (${sourceId}:${line})`);
|
|
1591
|
+
});
|
|
1592
|
+
mainWindow.webContents.on("render-process-gone", (_event, details) => {
|
|
1593
|
+
log("main:CRASH", "Renderer process gone:", details);
|
|
1594
|
+
});
|
|
1595
|
+
mainWindow.webContents.on("did-fail-load", (_event, errorCode, errorDescription) => {
|
|
1596
|
+
log("main:LOAD_ERROR", `Failed to load: ${errorCode} ${errorDescription}`);
|
|
1597
|
+
});
|
|
1598
|
+
const url = process.env.VITE_DEV_SERVER_URL;
|
|
1599
|
+
if (url) {
|
|
1600
|
+
log("main", `Loading dev server URL: ${url}`);
|
|
1601
|
+
mainWindow.loadURL(url);
|
|
1602
|
+
} else {
|
|
1603
|
+
const filePath = join(__dirname$1, "../dist-renderer/index.html");
|
|
1604
|
+
log("main", `Loading file: ${filePath}`);
|
|
1605
|
+
mainWindow.loadFile(filePath);
|
|
1606
|
+
}
|
|
1607
|
+
if (process.env.VITE_DEV_SERVER_URL) {
|
|
1608
|
+
mainWindow.webContents.openDevTools();
|
|
1609
|
+
}
|
|
1610
|
+
mainWindow.on("closed", () => {
|
|
1611
|
+
mainWindow = null;
|
|
1612
|
+
});
|
|
1613
|
+
}
|
|
1614
|
+
protocol.registerSchemesAsPrivileged([
|
|
1615
|
+
{ scheme: "council-file", privileges: { bypassCSP: true, supportFetchAPI: true } }
|
|
1616
|
+
]);
|
|
1617
|
+
app.whenReady().then(() => {
|
|
1618
|
+
log("main", "App ready, registering IPC handlers");
|
|
1619
|
+
protocol.handle("council-file", (request) => {
|
|
1620
|
+
const filePath = decodeURIComponent(request.url.replace("council-file://", ""));
|
|
1621
|
+
return net.fetch(pathToFileURL(filePath).href);
|
|
1622
|
+
});
|
|
1623
|
+
registerIpcHandlers(ipcMain, () => mainWindow);
|
|
1624
|
+
createWindow();
|
|
1625
|
+
app.on("activate", () => {
|
|
1626
|
+
if (BrowserWindow.getAllWindows().length === 0) {
|
|
1627
|
+
createWindow();
|
|
1628
|
+
}
|
|
1629
|
+
});
|
|
1630
|
+
});
|
|
1631
|
+
app.on("window-all-closed", () => {
|
|
1632
|
+
if (process.platform !== "darwin") {
|
|
1633
|
+
app.quit();
|
|
1634
|
+
}
|
|
1635
|
+
});
|