brainctl 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/cli.d.ts +4 -0
  2. package/dist/cli.js +16 -2
  3. package/dist/commands/mcp.d.ts +2 -0
  4. package/dist/commands/mcp.js +9 -0
  5. package/dist/commands/profile.d.ts +3 -0
  6. package/dist/commands/profile.js +43 -0
  7. package/dist/commands/sync.d.ts +3 -0
  8. package/dist/commands/sync.js +31 -0
  9. package/dist/commands/ui.d.ts +2 -0
  10. package/dist/commands/ui.js +10 -0
  11. package/dist/config.d.ts +9 -1
  12. package/dist/config.js +24 -6
  13. package/dist/context/memory.js +7 -2
  14. package/dist/errors.d.ts +9 -0
  15. package/dist/errors.js +15 -0
  16. package/dist/executor/process.js +2 -0
  17. package/dist/executor/resolver.js +5 -2
  18. package/dist/executor/types.d.ts +1 -0
  19. package/dist/mcp/server.d.ts +7 -0
  20. package/dist/mcp/server.js +183 -0
  21. package/dist/services/config-write-service.d.ts +12 -0
  22. package/dist/services/config-write-service.js +70 -0
  23. package/dist/services/memory-write-service.d.ts +12 -0
  24. package/dist/services/memory-write-service.js +56 -0
  25. package/dist/services/profile-service.d.ts +30 -0
  26. package/dist/services/profile-service.js +190 -0
  27. package/dist/services/run-service.d.ts +5 -1
  28. package/dist/services/run-service.js +3 -2
  29. package/dist/services/sync/agent-writer.d.ts +18 -0
  30. package/dist/services/sync/agent-writer.js +5 -0
  31. package/dist/services/sync/claude-writer.d.ts +2 -0
  32. package/dist/services/sync/claude-writer.js +83 -0
  33. package/dist/services/sync/codex-writer.d.ts +2 -0
  34. package/dist/services/sync/codex-writer.js +116 -0
  35. package/dist/services/sync/gemini-writer.d.ts +2 -0
  36. package/dist/services/sync/gemini-writer.js +83 -0
  37. package/dist/services/sync-service.d.ts +15 -0
  38. package/dist/services/sync-service.js +64 -0
  39. package/dist/types.d.ts +39 -1
  40. package/dist/ui/routes.d.ts +10 -0
  41. package/dist/ui/routes.js +228 -0
  42. package/dist/ui/server.d.ts +14 -0
  43. package/dist/ui/server.js +47 -0
  44. package/dist/ui/streaming.d.ts +3 -0
  45. package/dist/ui/streaming.js +16 -0
  46. package/dist/web/assets/index-CRJ6cM0Q.css +1 -0
  47. package/dist/web/assets/index-Cr8gt3VF.js +9 -0
  48. package/dist/web/index.html +14 -0
  49. package/package.json +17 -3
@@ -0,0 +1,56 @@
1
+ import { lstat, mkdir, realpath, writeFile } from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import { MemoryPathError } from '../errors.js';
4
+ export function createMemoryWriteService() {
5
+ return {
6
+ async execute(request) {
7
+ const cwd = request.cwd ?? process.cwd();
8
+ const targetPath = path.resolve(cwd, request.filePath);
9
+ const workspaceRoot = await realpath(cwd);
10
+ const resolvedTargetPath = await resolvePathForWrite(targetPath);
11
+ if (!isWithinDirectory(workspaceRoot, resolvedTargetPath)) {
12
+ throw new MemoryPathError('Memory files must stay within the workspace root.');
13
+ }
14
+ await mkdir(path.dirname(targetPath), { recursive: true });
15
+ await writeFile(targetPath, request.content, 'utf8');
16
+ return { filePath: targetPath };
17
+ }
18
+ };
19
+ }
20
+ async function resolvePathForWrite(targetPath) {
21
+ const existingPath = await findNearestExistingPath(targetPath);
22
+ const resolvedExistingPath = await realpath(existingPath);
23
+ if (existingPath === targetPath) {
24
+ return resolvedExistingPath;
25
+ }
26
+ return path.resolve(resolvedExistingPath, path.relative(existingPath, targetPath));
27
+ }
28
+ async function findNearestExistingPath(targetPath) {
29
+ let currentPath = targetPath;
30
+ while (true) {
31
+ try {
32
+ await lstat(currentPath);
33
+ return currentPath;
34
+ }
35
+ catch (error) {
36
+ if (!isMissingPathError(error)) {
37
+ throw error;
38
+ }
39
+ }
40
+ const parentPath = path.dirname(currentPath);
41
+ if (parentPath === currentPath) {
42
+ throw new MemoryPathError(`Could not resolve filesystem path for ${targetPath}.`);
43
+ }
44
+ currentPath = parentPath;
45
+ }
46
+ }
47
+ function isWithinDirectory(parentDirectory, targetPath) {
48
+ const relativePath = path.relative(parentDirectory, targetPath);
49
+ if (relativePath === '') {
50
+ return true;
51
+ }
52
+ return !relativePath.startsWith(`..${path.sep}`) && relativePath !== '..' && !path.isAbsolute(relativePath);
53
+ }
54
+ function isMissingPathError(error) {
55
+ return error instanceof Error && 'code' in error && error.code === 'ENOENT';
56
+ }
@@ -0,0 +1,30 @@
1
+ import type { BrainctlMetaConfig, ProfileConfig } from '../types.js';
2
+ export interface ProfileService {
3
+ list(options?: {
4
+ cwd?: string;
5
+ }): Promise<{
6
+ profiles: string[];
7
+ activeProfile: string | null;
8
+ }>;
9
+ get(options: {
10
+ cwd?: string;
11
+ name: string;
12
+ }): Promise<ProfileConfig>;
13
+ create(options: {
14
+ cwd?: string;
15
+ name: string;
16
+ description?: string;
17
+ }): Promise<{
18
+ profilePath: string;
19
+ }>;
20
+ use(options: {
21
+ cwd?: string;
22
+ name: string;
23
+ }): Promise<{
24
+ previousProfile: string | null;
25
+ }>;
26
+ getMetaConfig(options?: {
27
+ cwd?: string;
28
+ }): Promise<BrainctlMetaConfig>;
29
+ }
30
+ export declare function createProfileService(): ProfileService;
@@ -0,0 +1,190 @@
1
+ import { readdir, readFile, writeFile, mkdir, stat } from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import YAML from 'yaml';
4
+ import { ProfileError, ProfileNotFoundError } from '../errors.js';
5
+ const BRAINCTL_DIR = '.brainctl';
6
+ const PROFILES_DIR = '.brainctl/profiles';
7
+ const META_CONFIG = '.brainctl/config.yaml';
8
+ export function createProfileService() {
9
+ return {
10
+ async list(options = {}) {
11
+ const cwd = options.cwd ?? process.cwd();
12
+ const profilesDir = path.join(cwd, PROFILES_DIR);
13
+ let files = [];
14
+ try {
15
+ const entries = await readdir(profilesDir);
16
+ files = entries
17
+ .filter((f) => f.endsWith('.yaml'))
18
+ .map((f) => f.replace(/\.yaml$/, ''))
19
+ .sort();
20
+ }
21
+ catch {
22
+ // No profiles directory yet
23
+ }
24
+ const meta = await loadMetaConfig(cwd);
25
+ return {
26
+ profiles: files,
27
+ activeProfile: meta.active_profile || null,
28
+ };
29
+ },
30
+ async get(options) {
31
+ const cwd = options.cwd ?? process.cwd();
32
+ const profilePath = path.join(cwd, PROFILES_DIR, `${options.name}.yaml`);
33
+ let source;
34
+ try {
35
+ source = await readFile(profilePath, 'utf8');
36
+ }
37
+ catch {
38
+ throw new ProfileNotFoundError(`Profile "${options.name}" not found at ${profilePath}`);
39
+ }
40
+ return parseProfile(source, options.name);
41
+ },
42
+ async create(options) {
43
+ const cwd = options.cwd ?? process.cwd();
44
+ const profilesDir = path.join(cwd, PROFILES_DIR);
45
+ const profilePath = path.join(profilesDir, `${options.name}.yaml`);
46
+ if (await pathExists(profilePath)) {
47
+ throw new ProfileError(`Profile "${options.name}" already exists.`);
48
+ }
49
+ const scaffold = {
50
+ name: options.name,
51
+ description: options.description ?? '',
52
+ skills: {
53
+ example: {
54
+ description: 'Example skill',
55
+ prompt: 'Describe what this skill does...',
56
+ },
57
+ },
58
+ mcps: {},
59
+ memory: {
60
+ paths: ['./memory'],
61
+ },
62
+ };
63
+ await mkdir(profilesDir, { recursive: true });
64
+ await writeFile(profilePath, YAML.stringify(scaffold), 'utf8');
65
+ return { profilePath };
66
+ },
67
+ async use(options) {
68
+ const cwd = options.cwd ?? process.cwd();
69
+ // Validate profile exists
70
+ const profilePath = path.join(cwd, PROFILES_DIR, `${options.name}.yaml`);
71
+ if (!(await pathExists(profilePath))) {
72
+ throw new ProfileNotFoundError(`Profile "${options.name}" not found.`);
73
+ }
74
+ const meta = await loadMetaConfig(cwd);
75
+ const previousProfile = meta.active_profile || null;
76
+ meta.active_profile = options.name;
77
+ const metaPath = path.join(cwd, META_CONFIG);
78
+ await mkdir(path.dirname(metaPath), { recursive: true });
79
+ await writeFile(metaPath, YAML.stringify(meta), 'utf8');
80
+ return { previousProfile };
81
+ },
82
+ async getMetaConfig(options = {}) {
83
+ const cwd = options.cwd ?? process.cwd();
84
+ return loadMetaConfig(cwd);
85
+ },
86
+ };
87
+ }
88
+ async function loadMetaConfig(cwd) {
89
+ const metaPath = path.join(cwd, META_CONFIG);
90
+ try {
91
+ const source = await readFile(metaPath, 'utf8');
92
+ const parsed = YAML.parse(source) ?? {};
93
+ return {
94
+ active_profile: typeof parsed.active_profile === 'string' ? parsed.active_profile : '',
95
+ agents: Array.isArray(parsed.agents) ? parsed.agents : ['claude', 'codex'],
96
+ };
97
+ }
98
+ catch {
99
+ return { active_profile: '', agents: ['claude', 'codex', 'gemini'] };
100
+ }
101
+ }
102
+ function parseProfile(source, name) {
103
+ let parsed;
104
+ try {
105
+ parsed = YAML.parse(source) ?? {};
106
+ }
107
+ catch {
108
+ throw new ProfileError(`Profile "${name}" has invalid YAML.`);
109
+ }
110
+ if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) {
111
+ throw new ProfileError(`Profile "${name}" has invalid structure.`);
112
+ }
113
+ const data = parsed;
114
+ const skills = {};
115
+ if (data.skills && typeof data.skills === 'object' && !Array.isArray(data.skills)) {
116
+ for (const [key, value] of Object.entries(data.skills)) {
117
+ if (value && typeof value === 'object' && !Array.isArray(value)) {
118
+ const s = value;
119
+ if (typeof s.prompt === 'string') {
120
+ skills[key] = {
121
+ prompt: s.prompt,
122
+ description: typeof s.description === 'string' ? s.description : undefined,
123
+ };
124
+ }
125
+ }
126
+ }
127
+ }
128
+ const mcps = {};
129
+ if (data.mcps && typeof data.mcps === 'object' && !Array.isArray(data.mcps)) {
130
+ for (const [key, value] of Object.entries(data.mcps)) {
131
+ if (value && typeof value === 'object' && !Array.isArray(value)) {
132
+ const m = value;
133
+ if (m.type === 'npm' && typeof m.package === 'string') {
134
+ mcps[key] = {
135
+ type: 'npm',
136
+ package: m.package,
137
+ env: parseEnv(m.env),
138
+ };
139
+ }
140
+ else if (m.type === 'bundled' && typeof m.command === 'string') {
141
+ mcps[key] = {
142
+ type: 'bundled',
143
+ path: typeof m.path === 'string' ? m.path : '.',
144
+ install: typeof m.install === 'string' ? m.install : undefined,
145
+ command: m.command,
146
+ args: Array.isArray(m.args) ? m.args.map(String) : undefined,
147
+ env: parseEnv(m.env),
148
+ };
149
+ }
150
+ }
151
+ }
152
+ }
153
+ const memoryPaths = [];
154
+ if (data.memory && typeof data.memory === 'object' && !Array.isArray(data.memory)) {
155
+ const mem = data.memory;
156
+ if (Array.isArray(mem.paths)) {
157
+ for (const p of mem.paths) {
158
+ if (typeof p === 'string') {
159
+ memoryPaths.push(p);
160
+ }
161
+ }
162
+ }
163
+ }
164
+ return {
165
+ name: typeof data.name === 'string' ? data.name : name,
166
+ description: typeof data.description === 'string' ? data.description : undefined,
167
+ skills,
168
+ mcps,
169
+ memory: { paths: memoryPaths },
170
+ };
171
+ }
172
+ function parseEnv(value) {
173
+ if (!value || typeof value !== 'object' || Array.isArray(value)) {
174
+ return undefined;
175
+ }
176
+ const result = {};
177
+ for (const [k, v] of Object.entries(value)) {
178
+ result[k] = String(v);
179
+ }
180
+ return Object.keys(result).length > 0 ? result : undefined;
181
+ }
182
+ async function pathExists(targetPath) {
183
+ try {
184
+ await stat(targetPath);
185
+ return true;
186
+ }
187
+ catch {
188
+ return false;
189
+ }
190
+ }
@@ -1,7 +1,11 @@
1
1
  import type { ExecutorResolver } from '../executor/resolver.js';
2
2
  import type { ExecutionStep, ExecutionTrace, RunRequest } from '../types.js';
3
3
  export interface RunService {
4
- execute(request: RunRequest): Promise<ExecutionTrace>;
4
+ execute(request: RunRequest, options?: RunServiceExecuteOptions): Promise<ExecutionTrace>;
5
+ }
6
+ export interface RunServiceExecuteOptions {
7
+ onOutputChunk?: (chunk: string) => void;
8
+ streamOutput?: boolean;
5
9
  }
6
10
  interface RunServiceDependencies {
7
11
  resolver?: ExecutorResolver;
@@ -9,7 +9,7 @@ import { createExecutorResolver } from '../executor/resolver.js';
9
9
  export function createRunService(dependencies = {}) {
10
10
  const resolver = dependencies.resolver ?? createExecutorResolver();
11
11
  return {
12
- async execute(request) {
12
+ async execute(request, options = {}) {
13
13
  const cwd = request.cwd ?? process.cwd();
14
14
  const config = await loadConfig({ cwd });
15
15
  const memory = await loadMemory({ paths: config.memory.paths });
@@ -30,7 +30,8 @@ export function createRunService(dependencies = {}) {
30
30
  fallbackUsed = true;
31
31
  }
32
32
  const result = await executor.instance.run(context, {
33
- streamOutput: true
33
+ streamOutput: options.streamOutput ?? true,
34
+ onOutputChunk: options.onOutputChunk
34
35
  });
35
36
  previousOutput = result.output;
36
37
  results.push({
@@ -0,0 +1,18 @@
1
+ import type { McpServerConfig } from '../../types.js';
2
+ export interface AgentWriteOptions {
3
+ mcpServers: Record<string, McpServerConfig>;
4
+ cwd: string;
5
+ }
6
+ export interface AgentWriteResult {
7
+ configPath: string;
8
+ backedUpTo: string | null;
9
+ }
10
+ export interface AgentConfigWriter {
11
+ write(options: AgentWriteOptions): Promise<AgentWriteResult>;
12
+ restore(options: {
13
+ cwd: string;
14
+ }): Promise<{
15
+ restoredFrom: string;
16
+ }>;
17
+ }
18
+ export declare function formatTimestamp(): string;
@@ -0,0 +1,5 @@
1
+ export function formatTimestamp() {
2
+ const now = new Date();
3
+ const pad = (n) => String(n).padStart(2, '0');
4
+ return `${now.getFullYear()}${pad(now.getMonth() + 1)}${pad(now.getDate())}-${pad(now.getHours())}${pad(now.getMinutes())}${pad(now.getSeconds())}`;
5
+ }
@@ -0,0 +1,2 @@
1
+ import type { AgentConfigWriter } from './agent-writer.js';
2
+ export declare function createClaudeWriter(): AgentConfigWriter;
@@ -0,0 +1,83 @@
1
+ import { copyFile, readdir, readFile, rename, writeFile } from 'node:fs/promises';
2
+ import { homedir } from 'node:os';
3
+ import path from 'node:path';
4
+ import { SyncError } from '../../errors.js';
5
+ import { formatTimestamp } from './agent-writer.js';
6
+ export function createClaudeWriter() {
7
+ return {
8
+ async write(options) {
9
+ const configPath = path.join(homedir(), '.claude.json');
10
+ let existing = {};
11
+ let backedUpTo = null;
12
+ // Read existing config
13
+ try {
14
+ const source = await readFile(configPath, 'utf8');
15
+ existing = JSON.parse(source);
16
+ }
17
+ catch {
18
+ // No existing config, start fresh
19
+ }
20
+ // Backup if file exists
21
+ if (Object.keys(existing).length > 0) {
22
+ const backupPath = `${configPath}.bak.${formatTimestamp()}`;
23
+ await copyFile(configPath, backupPath);
24
+ backedUpTo = backupPath;
25
+ }
26
+ // Build mcpServers for this project
27
+ const mcpServers = {};
28
+ for (const [name, config] of Object.entries(options.mcpServers)) {
29
+ mcpServers[name] = toClaudeFormat(config);
30
+ }
31
+ // Always include brainctl itself
32
+ mcpServers['brainctl'] = {
33
+ type: 'stdio',
34
+ command: 'npx',
35
+ args: ['-y', 'brainctl', 'mcp'],
36
+ };
37
+ // Merge into existing config (preserve other projects)
38
+ const projects = (existing.projects ?? {});
39
+ const projectConfig = projects[options.cwd] ?? {};
40
+ projectConfig.mcpServers = mcpServers;
41
+ projects[options.cwd] = projectConfig;
42
+ existing.projects = projects;
43
+ // Atomic write: write to temp, then rename
44
+ const tmpPath = `${configPath}.tmp.${Date.now()}`;
45
+ await writeFile(tmpPath, JSON.stringify(existing, null, 2) + '\n', 'utf8');
46
+ await rename(tmpPath, configPath);
47
+ return { configPath, backedUpTo };
48
+ },
49
+ async restore(options) {
50
+ const configPath = path.join(homedir(), '.claude.json');
51
+ const dir = path.dirname(configPath);
52
+ const base = path.basename(configPath);
53
+ const entries = await readdir(dir);
54
+ const backups = entries
55
+ .filter((f) => f.startsWith(`${base}.bak.`))
56
+ .sort()
57
+ .reverse();
58
+ if (backups.length === 0) {
59
+ throw new SyncError('No Claude config backup found.');
60
+ }
61
+ const latestBackup = path.join(dir, backups[0]);
62
+ await copyFile(latestBackup, configPath);
63
+ return { restoredFrom: latestBackup };
64
+ },
65
+ };
66
+ }
67
+ function toClaudeFormat(config) {
68
+ if (config.type === 'npm') {
69
+ return {
70
+ type: 'stdio',
71
+ command: 'npx',
72
+ args: ['-y', config.package],
73
+ ...(config.env ? { env: config.env } : {}),
74
+ };
75
+ }
76
+ // bundled
77
+ return {
78
+ type: 'stdio',
79
+ command: config.command,
80
+ args: config.args ?? [],
81
+ ...(config.env ? { env: config.env } : {}),
82
+ };
83
+ }
@@ -0,0 +1,2 @@
1
+ import type { AgentConfigWriter } from './agent-writer.js';
2
+ export declare function createCodexWriter(): AgentConfigWriter;
@@ -0,0 +1,116 @@
1
+ import { copyFile, mkdir, readdir, readFile, rename, writeFile } from 'node:fs/promises';
2
+ import { homedir } from 'node:os';
3
+ import path from 'node:path';
4
+ import { SyncError } from '../../errors.js';
5
+ import { formatTimestamp } from './agent-writer.js';
6
+ export function createCodexWriter() {
7
+ return {
8
+ async write(options) {
9
+ const configDir = path.join(homedir(), '.codex');
10
+ const configPath = path.join(configDir, 'config.toml');
11
+ let existingContent = '';
12
+ let backedUpTo = null;
13
+ // Read existing config
14
+ try {
15
+ existingContent = await readFile(configPath, 'utf8');
16
+ }
17
+ catch {
18
+ // No existing config
19
+ }
20
+ // Backup if file exists
21
+ if (existingContent.length > 0) {
22
+ const backupPath = `${configPath}.bak.${formatTimestamp()}`;
23
+ await copyFile(configPath, backupPath);
24
+ backedUpTo = backupPath;
25
+ }
26
+ // Build MCP servers section
27
+ const allServers = { ...options.mcpServers };
28
+ // Always include brainctl itself
29
+ allServers['brainctl'] = {
30
+ type: 'npm',
31
+ package: 'brainctl',
32
+ };
33
+ const mcpToml = buildMcpToml(allServers);
34
+ // Preserve non-mcp_servers content from existing config
35
+ const existingNonMcp = stripMcpSections(existingContent);
36
+ const finalContent = existingNonMcp.trim().length > 0
37
+ ? `${existingNonMcp.trim()}\n\n${mcpToml}`
38
+ : mcpToml;
39
+ // Atomic write
40
+ await mkdir(configDir, { recursive: true });
41
+ const tmpPath = `${configPath}.tmp.${Date.now()}`;
42
+ await writeFile(tmpPath, finalContent + '\n', 'utf8');
43
+ await rename(tmpPath, configPath);
44
+ return { configPath, backedUpTo };
45
+ },
46
+ async restore(options) {
47
+ const configPath = path.join(homedir(), '.codex', 'config.toml');
48
+ const dir = path.dirname(configPath);
49
+ const base = path.basename(configPath);
50
+ let entries;
51
+ try {
52
+ entries = await readdir(dir);
53
+ }
54
+ catch {
55
+ throw new SyncError('No Codex config directory found.');
56
+ }
57
+ const backups = entries
58
+ .filter((f) => f.startsWith(`${base}.bak.`))
59
+ .sort()
60
+ .reverse();
61
+ if (backups.length === 0) {
62
+ throw new SyncError('No Codex config backup found.');
63
+ }
64
+ const latestBackup = path.join(dir, backups[0]);
65
+ await copyFile(latestBackup, configPath);
66
+ return { restoredFrom: latestBackup };
67
+ },
68
+ };
69
+ }
70
+ function buildMcpToml(servers) {
71
+ const lines = [];
72
+ for (const [name, config] of Object.entries(servers)) {
73
+ lines.push(`[mcp_servers.${name}]`);
74
+ if (config.type === 'npm') {
75
+ lines.push(`command = "npx"`);
76
+ lines.push(`args = ["-y", ${tomlString(config.package)}]`);
77
+ }
78
+ else {
79
+ lines.push(`command = ${tomlString(config.command)}`);
80
+ if (config.args && config.args.length > 0) {
81
+ const argsStr = config.args.map(tomlString).join(', ');
82
+ lines.push(`args = [${argsStr}]`);
83
+ }
84
+ }
85
+ if (config.env && Object.keys(config.env).length > 0) {
86
+ lines.push('');
87
+ lines.push(`[mcp_servers.${name}.env]`);
88
+ for (const [key, value] of Object.entries(config.env)) {
89
+ lines.push(`${key} = ${tomlString(value)}`);
90
+ }
91
+ }
92
+ lines.push('');
93
+ }
94
+ return lines.join('\n').trim();
95
+ }
96
+ function tomlString(value) {
97
+ return `"${value.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`;
98
+ }
99
+ function stripMcpSections(content) {
100
+ const lines = content.split('\n');
101
+ const result = [];
102
+ let inMcpSection = false;
103
+ for (const line of lines) {
104
+ if (/^\[mcp_servers[\].]/.test(line)) {
105
+ inMcpSection = true;
106
+ continue;
107
+ }
108
+ if (inMcpSection && /^\[/.test(line) && !/^\[mcp_servers[\].]/.test(line)) {
109
+ inMcpSection = false;
110
+ }
111
+ if (!inMcpSection) {
112
+ result.push(line);
113
+ }
114
+ }
115
+ return result.join('\n');
116
+ }
@@ -0,0 +1,2 @@
1
+ import type { AgentConfigWriter } from './agent-writer.js';
2
+ export declare function createGeminiWriter(): AgentConfigWriter;
@@ -0,0 +1,83 @@
1
+ import { copyFile, mkdir, readdir, readFile, rename, writeFile } from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import { SyncError } from '../../errors.js';
4
+ import { formatTimestamp } from './agent-writer.js';
5
+ export function createGeminiWriter() {
6
+ return {
7
+ async write(options) {
8
+ const geminiDir = path.join(options.cwd, '.gemini');
9
+ const configPath = path.join(geminiDir, 'settings.json');
10
+ let existing = {};
11
+ let backedUpTo = null;
12
+ // Read existing config
13
+ try {
14
+ const source = await readFile(configPath, 'utf8');
15
+ existing = JSON.parse(source);
16
+ }
17
+ catch {
18
+ // No existing config, start fresh
19
+ }
20
+ // Backup if file exists with content
21
+ if (Object.keys(existing).length > 0) {
22
+ const backupPath = `${configPath}.bak.${formatTimestamp()}`;
23
+ await copyFile(configPath, backupPath);
24
+ backedUpTo = backupPath;
25
+ }
26
+ // Build mcpServers
27
+ const mcpServers = {};
28
+ for (const [name, config] of Object.entries(options.mcpServers)) {
29
+ mcpServers[name] = toGeminiFormat(config);
30
+ }
31
+ // Always include brainctl itself
32
+ mcpServers['brainctl'] = {
33
+ command: 'npx',
34
+ args: ['-y', 'brainctl', 'mcp'],
35
+ };
36
+ // Merge into existing config (preserve other settings)
37
+ existing.mcpServers = mcpServers;
38
+ // Atomic write
39
+ await mkdir(geminiDir, { recursive: true });
40
+ const tmpPath = `${configPath}.tmp.${Date.now()}`;
41
+ await writeFile(tmpPath, JSON.stringify(existing, null, 2) + '\n', 'utf8');
42
+ await rename(tmpPath, configPath);
43
+ return { configPath, backedUpTo };
44
+ },
45
+ async restore(options) {
46
+ const configPath = path.join(options.cwd, '.gemini', 'settings.json');
47
+ const dir = path.dirname(configPath);
48
+ const base = path.basename(configPath);
49
+ let entries;
50
+ try {
51
+ entries = await readdir(dir);
52
+ }
53
+ catch {
54
+ throw new SyncError('No Gemini config directory found.');
55
+ }
56
+ const backups = entries
57
+ .filter((f) => f.startsWith(`${base}.bak.`))
58
+ .sort()
59
+ .reverse();
60
+ if (backups.length === 0) {
61
+ throw new SyncError('No Gemini config backup found.');
62
+ }
63
+ const latestBackup = path.join(dir, backups[0]);
64
+ await copyFile(latestBackup, configPath);
65
+ return { restoredFrom: latestBackup };
66
+ },
67
+ };
68
+ }
69
+ function toGeminiFormat(config) {
70
+ if (config.type === 'npm') {
71
+ return {
72
+ command: 'npx',
73
+ args: ['-y', config.package],
74
+ ...(config.env ? { env: config.env } : {}),
75
+ };
76
+ }
77
+ // bundled
78
+ return {
79
+ command: config.command,
80
+ args: config.args ?? [],
81
+ ...(config.env ? { env: config.env } : {}),
82
+ };
83
+ }
@@ -0,0 +1,15 @@
1
+ import type { AgentName, SyncResult } from '../types.js';
2
+ import type { AgentConfigWriter } from './sync/agent-writer.js';
3
+ import { type ProfileService } from './profile-service.js';
4
+ export interface SyncService {
5
+ execute(options?: {
6
+ cwd?: string;
7
+ restore?: boolean;
8
+ }): Promise<SyncResult>;
9
+ }
10
+ interface SyncServiceDependencies {
11
+ profileService?: ProfileService;
12
+ writers?: Partial<Record<AgentName, AgentConfigWriter>>;
13
+ }
14
+ export declare function createSyncService(dependencies?: SyncServiceDependencies): SyncService;
15
+ export {};