drybase 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,320 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import * as git from './git.js';
4
+ import * as github from './github.js';
5
+ import * as differ from '../utils/differ.js';
6
+ import * as stateUtil from '../utils/state.js';
7
+ import { runTests } from '../utils/test-runner.js';
8
+ import * as logger from '../utils/logger.js';
9
+
10
+ /**
11
+ * Maps changed files from base watch paths to target sync path.
12
+ * Strips the watchPath root and prepends syncPath.
13
+ *
14
+ * E.g., watchPaths=["src/shared"], file="src/shared/utils/logger.js", syncPath="lib/base"
15
+ * → "lib/base/utils/logger.js"
16
+ */
17
+ export function buildFileMapping(watchPaths, changedFiles, syncPath) {
18
+ const mapping = {};
19
+ for (const file of changedFiles) {
20
+ for (const wp of watchPaths) {
21
+ const normalized = wp.replace(/\/$/, '');
22
+ if (file === normalized || file.startsWith(normalized + '/')) {
23
+ const relative = file.slice(normalized.length).replace(/^\//, '');
24
+ const target = relative ? path.join(syncPath, relative) : syncPath;
25
+ mapping[file] = target;
26
+ break;
27
+ }
28
+ }
29
+ }
30
+ return mapping;
31
+ }
32
+
33
+ /**
34
+ * Discovers which files from the base repo have changed since the last sync.
35
+ */
36
+ export async function getSyncableFiles(config, state, repoName) {
37
+ const baseDir = config.baseRepo.path;
38
+ const watchPaths = config.baseRepo.watchPaths;
39
+ const lastSync = stateUtil.getLastSync(state, repoName);
40
+
41
+ // Collect all files under each watchPath
42
+ const allFiles = [];
43
+ for (const wp of watchPaths) {
44
+ const fullWp = path.resolve(baseDir, wp);
45
+ const files = await collectFiles(fullWp, baseDir);
46
+ allFiles.push(...files);
47
+ }
48
+
49
+ if (!lastSync) {
50
+ // First sync — all files are new
51
+ return allFiles;
52
+ }
53
+
54
+ // Compare hashes with last sync
55
+ const currentHashes = await differ.computeHashMap(baseDir, allFiles);
56
+ const lastHashes = {};
57
+ if (lastSync.files) {
58
+ for (const [relPath, info] of Object.entries(lastSync.files)) {
59
+ lastHashes[relPath] = info.hash;
60
+ }
61
+ }
62
+
63
+ const changed = [];
64
+ for (const file of allFiles) {
65
+ const lastHash = lastHashes[file];
66
+ if (!lastHash || lastHash !== currentHashes[file]) {
67
+ changed.push(file);
68
+ }
69
+ }
70
+
71
+ return changed;
72
+ }
73
+
74
+ /**
75
+ * Recursively collect all files under a directory, returning paths relative to baseDir.
76
+ */
77
+ async function collectFiles(dir, baseDir) {
78
+ const files = [];
79
+ let entries;
80
+ try {
81
+ entries = await fs.readdir(dir, { withFileTypes: true });
82
+ } catch {
83
+ return files;
84
+ }
85
+
86
+ for (const entry of entries) {
87
+ const fullPath = path.join(dir, entry.name);
88
+ if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
89
+ if (entry.isDirectory()) {
90
+ files.push(...(await collectFiles(fullPath, baseDir)));
91
+ } else if (entry.isFile()) {
92
+ files.push(path.relative(baseDir, fullPath));
93
+ }
94
+ }
95
+ return files;
96
+ }
97
+
98
+ /**
99
+ * Sync changed files to a single target repo.
100
+ */
101
+ export async function syncToRepo(config, targetRepo, changedFiles, state, options = {}) {
102
+ const { dryRun = false, force = false } = options;
103
+ const baseDir = config.baseRepo.path;
104
+ const repoName = targetRepo.name;
105
+ const targetDir = targetRepo.localPath;
106
+ let syncMode = targetRepo.syncMode || 'pr-always';
107
+ const targetBranch = targetRepo.branch || 'main';
108
+
109
+ if (!changedFiles.length) {
110
+ logger.info(`No changes to sync to ${repoName}`);
111
+ return { repo: repoName, status: 'no-changes', files: [] };
112
+ }
113
+
114
+ // Build file mapping
115
+ const mapping = buildFileMapping(config.baseRepo.watchPaths, changedFiles, targetRepo.syncPath);
116
+ const mappedEntries = Object.entries(mapping);
117
+
118
+ logger.info(`Syncing ${mappedEntries.length} file(s) to ${repoName}`);
119
+
120
+ // Divergence detection
121
+ if (!force) {
122
+ const lastSync = stateUtil.getLastSync(state, repoName);
123
+ if (lastSync?.files) {
124
+ const targetRelPaths = mappedEntries.map(([, t]) => t);
125
+ const currentTargetHashes = await differ.computeHashMap(targetDir, targetRelPaths);
126
+ const lastSyncTargetHashes = {};
127
+ for (const [srcFile, targetFile] of mappedEntries) {
128
+ const lastFileInfo = lastSync.files[srcFile];
129
+ if (lastFileInfo?.targetHash) {
130
+ lastSyncTargetHashes[targetFile] = lastFileInfo.targetHash;
131
+ }
132
+ }
133
+ const diverged = differ.detectDivergence(lastSyncTargetHashes, currentTargetHashes);
134
+ if (diverged.length) {
135
+ logger.warn(`Divergence detected in ${repoName}: ${diverged.join(', ')}`);
136
+ logger.warn('Target files were manually edited. Upgrading to pr-always mode.');
137
+ syncMode = 'pr-always';
138
+ }
139
+ }
140
+ }
141
+
142
+ if (dryRun) {
143
+ logger.info('[DRY RUN] Would sync:');
144
+ for (const [src, target] of mappedEntries) {
145
+ logger.dim(` ${src} → ${target}`);
146
+ }
147
+ return { repo: repoName, status: 'dry-run', files: changedFiles };
148
+ }
149
+
150
+ // Copy files to target
151
+ for (const [srcRel, targetRel] of mappedEntries) {
152
+ const srcFull = path.resolve(baseDir, srcRel);
153
+ const targetFull = path.resolve(targetDir, targetRel);
154
+ await fs.mkdir(path.dirname(targetFull), { recursive: true });
155
+ await fs.copyFile(srcFull, targetFull);
156
+ logger.debug(` Copied ${srcRel} → ${targetRel}`);
157
+ }
158
+
159
+ // Build commit message
160
+ const fileList = mappedEntries.map(([s]) => path.basename(s)).join(', ');
161
+ const template = config.options?.commitMessageTemplate || 'Sync base code: {{files}}';
162
+ const commitMessage = template.replace('{{files}}', fileList);
163
+
164
+ // Execute sync mode strategy
165
+ const targetFiles = mappedEntries.map(([, t]) => t);
166
+
167
+ if (syncMode === 'direct') {
168
+ return await syncDirect(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state);
169
+ } else if (syncMode === 'auto-if-tests-pass') {
170
+ return await syncAutoIfTestsPass(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state);
171
+ } else {
172
+ // pr-always
173
+ return await syncPRAlways(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state);
174
+ }
175
+ }
176
+
177
+ async function buildFileHashes(baseDir, mapping) {
178
+ const files = {};
179
+ for (const [srcRel, targetRel] of Object.entries(mapping)) {
180
+ const hash = await differ.computeFileHash(path.resolve(baseDir, srcRel));
181
+ files[srcRel] = { hash, action: 'modified', targetHash: hash };
182
+ }
183
+ return files;
184
+ }
185
+
186
+ async function syncDirect(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state) {
187
+ await git.checkoutBranch(targetDir, targetBranch);
188
+ const commitSha = await git.commitFiles(targetDir, targetFiles, commitMessage);
189
+ await git.pushBranch(targetDir, targetBranch);
190
+
191
+ const fileHashes = await buildFileHashes(baseDir, mapping);
192
+ const newState = stateUtil.recordSync(state, {
193
+ repo: repoName,
194
+ files: fileHashes,
195
+ commitSha,
196
+ status: 'success',
197
+ });
198
+ await stateUtil.saveState(config._configDir, newState);
199
+
200
+ logger.success(`Direct sync to ${repoName} complete (${commitSha})`);
201
+ return { repo: repoName, status: 'success', commitSha, files: changedFiles };
202
+ }
203
+
204
+ async function syncAutoIfTestsPass(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state) {
205
+ const branchPrefix = config.options?.branchPrefix || 'sync-bot';
206
+ const syncBranch = `${branchPrefix}/${Date.now()}`;
207
+
208
+ await git.createBranch(targetDir, syncBranch, targetBranch);
209
+ const commitSha = await git.commitFiles(targetDir, targetFiles, commitMessage);
210
+ await git.pushBranch(targetDir, syncBranch);
211
+
212
+ // Run tests
213
+ const testCommand = config.options?.testCommand || 'npm test';
214
+ const runTestsEnabled = config.options?.runTests !== false;
215
+
216
+ if (runTestsEnabled) {
217
+ const { passed, output } = await runTests(testCommand, targetDir);
218
+
219
+ if (passed && config.options?.autoMergeOnPass !== false) {
220
+ // Tests passed — auto merge
221
+ await git.checkoutBranch(targetDir, targetBranch);
222
+ const mergeGit = git.getGit(targetDir);
223
+ await mergeGit.merge([syncBranch]);
224
+ await git.pushBranch(targetDir, targetBranch);
225
+
226
+ const fileHashes = await buildFileHashes(baseDir, mapping);
227
+ const newState = stateUtil.recordSync(state, {
228
+ repo: repoName,
229
+ files: fileHashes,
230
+ commitSha,
231
+ status: 'success',
232
+ });
233
+ await stateUtil.saveState(config._configDir, newState);
234
+
235
+ logger.success(`Tests passed! Auto-merged sync to ${repoName}`);
236
+ return { repo: repoName, status: 'success', commitSha, files: changedFiles };
237
+ }
238
+
239
+ // Tests failed — fall through to PR creation
240
+ logger.warn(`Tests failed for ${repoName}. Creating PR for review.`);
241
+ }
242
+
243
+ // Create PR
244
+ return await createSyncPR(repoName, syncBranch, targetBranch, commitMessage, changedFiles, commitSha, baseDir, mapping, config, state);
245
+ }
246
+
247
+ async function syncPRAlways(targetDir, targetFiles, commitMessage, repoName, targetBranch, changedFiles, baseDir, mapping, config, state) {
248
+ const branchPrefix = config.options?.branchPrefix || 'sync-bot';
249
+ const syncBranch = `${branchPrefix}/${Date.now()}`;
250
+
251
+ await git.createBranch(targetDir, syncBranch, targetBranch);
252
+ const commitSha = await git.commitFiles(targetDir, targetFiles, commitMessage);
253
+ await git.pushBranch(targetDir, syncBranch);
254
+
255
+ return await createSyncPR(repoName, syncBranch, targetBranch, commitMessage, changedFiles, commitSha, baseDir, mapping, config, state);
256
+ }
257
+
258
+ async function createSyncPR(repoName, syncBranch, targetBranch, commitMessage, changedFiles, commitSha, baseDir, mapping, config, state) {
259
+ const fileList = changedFiles.map((f) => `- \`${f}\``).join('\n');
260
+ const body = `## Automated Base Code Sync\n\n**Files synced:**\n${fileList}\n\n*Created by drybase*`;
261
+
262
+ const pr = await github.createPullRequest({
263
+ repoName,
264
+ title: commitMessage,
265
+ body,
266
+ head: syncBranch,
267
+ base: targetBranch,
268
+ labels: ['automated-sync'],
269
+ });
270
+
271
+ const fileHashes = await buildFileHashes(baseDir, mapping);
272
+ const newState = stateUtil.recordSync(state, {
273
+ repo: repoName,
274
+ files: fileHashes,
275
+ commitSha,
276
+ status: 'pr-created',
277
+ });
278
+ await stateUtil.saveState(config._configDir, newState);
279
+
280
+ logger.success(`Created PR #${pr.number} for ${repoName}`);
281
+ return { repo: repoName, status: 'pr-created', prNumber: pr.number, commitSha, files: changedFiles };
282
+ }
283
+
284
+ /**
285
+ * Sync to all (or filtered) target repos.
286
+ */
287
+ export async function syncAll(config, options = {}) {
288
+ const { repo: repoFilter, dryRun = false, force = false } = options;
289
+ const state = await stateUtil.loadState(config._configDir);
290
+
291
+ github.initOctokit(config.github.token);
292
+
293
+ const targets = repoFilter
294
+ ? config.targetRepos.filter((r) => r.name === repoFilter)
295
+ : config.targetRepos;
296
+
297
+ if (!targets.length) {
298
+ if (repoFilter) {
299
+ logger.error(`No target repo matching "${repoFilter}"`);
300
+ } else {
301
+ logger.warn('No target repos configured');
302
+ }
303
+ return [];
304
+ }
305
+
306
+ const results = [];
307
+ for (const target of targets) {
308
+ try {
309
+ const changedFiles = await getSyncableFiles(config, state, target.name);
310
+ const result = await syncToRepo(config, target, changedFiles, state, { dryRun, force });
311
+ results.push(result);
312
+ } catch (err) {
313
+ logger.error(`Sync to ${target.name} failed: ${err.message}`);
314
+ logger.debug(err.stack);
315
+ results.push({ repo: target.name, status: 'failed', error: err.message });
316
+ }
317
+ }
318
+
319
+ return results;
320
+ }
@@ -0,0 +1,92 @@
1
+ import chokidar from 'chokidar';
2
+ import path from 'node:path';
3
+ import * as logger from '../utils/logger.js';
4
+ import { syncAll } from './syncer.js';
5
+
6
+ const DEBOUNCE_MS = 2000;
7
+ const IGNORE_PATTERNS = [
8
+ '**/node_modules/**',
9
+ '**/.git/**',
10
+ '**/drybase.json',
11
+ '**/.drybase-state.json',
12
+ '**/.DS_Store',
13
+ ];
14
+
15
+ export function createWatcher(config) {
16
+ const baseDir = config.baseRepo.path;
17
+ const watchPaths = config.baseRepo.watchPaths.map((wp) => path.resolve(baseDir, wp));
18
+
19
+ const changedFiles = new Set();
20
+ let debounceTimer = null;
21
+ let syncing = false;
22
+
23
+ const watcher = chokidar.watch(watchPaths, {
24
+ ignored: IGNORE_PATTERNS,
25
+ persistent: true,
26
+ ignoreInitial: true,
27
+ awaitWriteFinish: { stabilityThreshold: 500 },
28
+ });
29
+
30
+ async function processBatch() {
31
+ if (syncing || changedFiles.size === 0) return;
32
+ syncing = true;
33
+
34
+ const files = [...changedFiles];
35
+ changedFiles.clear();
36
+
37
+ // Convert absolute paths to relative from baseDir
38
+ const relativePaths = files.map((f) => path.relative(baseDir, f));
39
+
40
+ logger.info(`Detected ${relativePaths.length} changed file(s)`);
41
+ for (const f of relativePaths) {
42
+ logger.dim(` ${f}`);
43
+ }
44
+
45
+ try {
46
+ await syncAll(config, {});
47
+ } catch (err) {
48
+ logger.error(`Sync failed: ${err.message}`);
49
+ logger.debug(err.stack);
50
+ } finally {
51
+ syncing = false;
52
+ // Process any files that changed during sync
53
+ if (changedFiles.size > 0) {
54
+ scheduleBatch();
55
+ }
56
+ }
57
+ }
58
+
59
+ function scheduleBatch() {
60
+ if (debounceTimer) clearTimeout(debounceTimer);
61
+ debounceTimer = setTimeout(processBatch, DEBOUNCE_MS);
62
+ }
63
+
64
+ function onFileChange(eventType, filePath) {
65
+ changedFiles.add(filePath);
66
+ logger.debug(`${eventType}: ${filePath}`);
67
+ scheduleBatch();
68
+ }
69
+
70
+ watcher
71
+ .on('add', (p) => onFileChange('add', p))
72
+ .on('change', (p) => onFileChange('change', p))
73
+ .on('unlink', (p) => onFileChange('delete', p));
74
+
75
+ watcher.on('ready', () => {
76
+ logger.success('Watching for changes...');
77
+ for (const wp of watchPaths) {
78
+ logger.dim(` ${wp}`);
79
+ }
80
+ });
81
+
82
+ watcher.on('error', (err) => {
83
+ logger.error(`Watcher error: ${err.message}`);
84
+ });
85
+
86
+ return {
87
+ close() {
88
+ if (debounceTimer) clearTimeout(debounceTimer);
89
+ return watcher.close();
90
+ },
91
+ };
92
+ }
package/src/index.js ADDED
@@ -0,0 +1,117 @@
1
+ import { Command } from 'commander';
2
+ import dotenv from 'dotenv';
3
+
4
+ dotenv.config();
5
+
6
+ const program = new Command();
7
+
8
+ program
9
+ .name('drybase')
10
+ .description('Monitor file changes in a base repository and sync to multiple targets')
11
+ .version('1.0.0');
12
+
13
+ // Lazy-load commands to keep startup fast
14
+ program
15
+ .command('init')
16
+ .description('Initialize a new drybase.json configuration')
17
+ .action(async () => {
18
+ const { initCommand } = await import('./commands/init.js');
19
+ await initCommand();
20
+ });
21
+
22
+ program
23
+ .command('onboard')
24
+ .description('LLM-powered project analysis and config generation')
25
+ .argument('[path]', 'Path to project to analyze', '.')
26
+ .option('--interactive', 'Interactive mode for accepting/rejecting suggestions')
27
+ .option('--no-llm', 'Skip LLM analysis, use manual mode')
28
+ .action(async (path, options) => {
29
+ const { onboardCommand } = await import('./commands/onboard.js');
30
+ await onboardCommand(path, options);
31
+ });
32
+
33
+ program
34
+ .command('watch')
35
+ .description('Watch for file changes and sync in foreground')
36
+ .action(async () => {
37
+ const { watchCommand } = await import('./commands/watch.js');
38
+ await watchCommand();
39
+ });
40
+
41
+ program
42
+ .command('start')
43
+ .description('Start the sync daemon in background')
44
+ .action(async () => {
45
+ const { startCommand } = await import('./commands/start.js');
46
+ await startCommand();
47
+ });
48
+
49
+ program
50
+ .command('stop')
51
+ .description('Stop the sync daemon')
52
+ .action(async () => {
53
+ const { stopCommand } = await import('./commands/stop.js');
54
+ await stopCommand();
55
+ });
56
+
57
+ program
58
+ .command('sync')
59
+ .description('Run a one-time sync')
60
+ .option('--repo <name>', 'Sync only to a specific repo (owner/name)')
61
+ .option('--dry-run', 'Preview changes without syncing')
62
+ .option('--force', 'Skip divergence checks')
63
+ .action(async (options) => {
64
+ const { syncCommand } = await import('./commands/sync.js');
65
+ await syncCommand(options);
66
+ });
67
+
68
+ program
69
+ .command('status')
70
+ .description('Show sync status for all target repos')
71
+ .action(async () => {
72
+ const { statusCommand } = await import('./commands/status.js');
73
+ await statusCommand();
74
+ });
75
+
76
+ program
77
+ .command('history')
78
+ .description('Show sync history')
79
+ .option('--repo <name>', 'Filter by repo (owner/name)')
80
+ .option('--limit <n>', 'Number of entries to show', '20')
81
+ .action(async (options) => {
82
+ const { historyCommand } = await import('./commands/history.js');
83
+ await historyCommand(options);
84
+ });
85
+
86
+ program
87
+ .command('rollback')
88
+ .description('Rollback the last sync for a repo')
89
+ .argument('<repo>', 'Target repo name (owner/name)')
90
+ .option('--yes', 'Skip confirmation prompt')
91
+ .action(async (repo, options) => {
92
+ const { rollbackCommand } = await import('./commands/rollback.js');
93
+ await rollbackCommand(repo, options);
94
+ });
95
+
96
+ program
97
+ .command('validate')
98
+ .description('Validate the drybase.json configuration')
99
+ .action(async () => {
100
+ const { validateCommand } = await import('./commands/validate.js');
101
+ await validateCommand();
102
+ });
103
+
104
+ // Global error handling
105
+ process.on('unhandledRejection', (err) => {
106
+ console.error('Unhandled error:', err.message || err);
107
+ if (process.env.DRYBASE_DEBUG) console.error(err.stack);
108
+ process.exit(1);
109
+ });
110
+
111
+ process.on('uncaughtException', (err) => {
112
+ console.error('Fatal error:', err.message || err);
113
+ if (process.env.DRYBASE_DEBUG) console.error(err.stack);
114
+ process.exit(1);
115
+ });
116
+
117
+ program.parse();
@@ -0,0 +1,141 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import os from 'node:os';
4
+ import { execSync } from 'node:child_process';
5
+
6
+ const CONFIG_NAME = 'drybase.json';
7
+
8
+ export function resolveEnvValue(value) {
9
+ if (typeof value === 'string' && value.startsWith('env:')) {
10
+ const varName = value.slice(4);
11
+ const resolved = process.env[varName];
12
+ if (!resolved) {
13
+ throw new Error(`Environment variable ${varName} is not set (referenced as "${value}")`);
14
+ }
15
+ return resolved;
16
+ }
17
+ return value;
18
+ }
19
+
20
+ export async function findConfigFile() {
21
+ // 1. Environment variable override
22
+ if (process.env.DRYBASE_CONFIG) {
23
+ try {
24
+ await fs.access(process.env.DRYBASE_CONFIG);
25
+ return path.resolve(process.env.DRYBASE_CONFIG);
26
+ } catch {
27
+ throw new Error(`Config file not found at DRYBASE_CONFIG path: ${process.env.DRYBASE_CONFIG}`);
28
+ }
29
+ }
30
+
31
+ // 2. Current working directory
32
+ const cwdConfig = path.resolve(process.cwd(), CONFIG_NAME);
33
+ try {
34
+ await fs.access(cwdConfig);
35
+ return cwdConfig;
36
+ } catch {
37
+ // continue
38
+ }
39
+
40
+ // 3. Git root traversal
41
+ try {
42
+ const gitRoot = execSync('git rev-parse --show-toplevel', {
43
+ encoding: 'utf8',
44
+ stdio: ['pipe', 'pipe', 'pipe'],
45
+ }).trim();
46
+ const gitConfig = path.join(gitRoot, CONFIG_NAME);
47
+ try {
48
+ await fs.access(gitConfig);
49
+ return gitConfig;
50
+ } catch {
51
+ // continue
52
+ }
53
+ } catch {
54
+ // Not in a git repo — skip
55
+ }
56
+
57
+ // 4. Home directory
58
+ const homeConfig = path.join(os.homedir(), CONFIG_NAME);
59
+ try {
60
+ await fs.access(homeConfig);
61
+ return homeConfig;
62
+ } catch {
63
+ // continue
64
+ }
65
+
66
+ return null;
67
+ }
68
+
69
+ export async function loadConfig(configPath) {
70
+ const filePath = configPath || (await findConfigFile());
71
+ if (!filePath) {
72
+ return null;
73
+ }
74
+
75
+ const raw = await fs.readFile(filePath, 'utf8');
76
+ const config = JSON.parse(raw);
77
+ config._configPath = filePath;
78
+ config._configDir = path.dirname(filePath);
79
+
80
+ // Resolve relative paths
81
+ if (config.baseRepo?.path) {
82
+ config.baseRepo.path = path.resolve(config._configDir, config.baseRepo.path);
83
+ }
84
+ for (const repo of config.targetRepos || []) {
85
+ if (repo.localPath) {
86
+ repo.localPath = path.resolve(config._configDir, repo.localPath);
87
+ }
88
+ }
89
+
90
+ // Resolve env: values for secrets
91
+ if (config.github?.token) {
92
+ config.github.token = resolveEnvValue(config.github.token);
93
+ }
94
+ if (config.llm?.apiKey) {
95
+ config.llm.apiKey = resolveEnvValue(config.llm.apiKey);
96
+ }
97
+
98
+ return config;
99
+ }
100
+
101
+ export function validateConfig(config) {
102
+ const errors = [];
103
+
104
+ if (!config) {
105
+ return ['No configuration provided'];
106
+ }
107
+
108
+ // baseRepo
109
+ if (!config.baseRepo) {
110
+ errors.push('Missing "baseRepo" section');
111
+ } else {
112
+ if (!config.baseRepo.path) errors.push('Missing "baseRepo.path"');
113
+ if (!config.baseRepo.watchPaths || !config.baseRepo.watchPaths.length) {
114
+ errors.push('Missing or empty "baseRepo.watchPaths"');
115
+ }
116
+ }
117
+
118
+ // github
119
+ if (!config.github) {
120
+ errors.push('Missing "github" section');
121
+ } else if (!config.github.token) {
122
+ errors.push('Missing "github.token"');
123
+ }
124
+
125
+ // targetRepos
126
+ if (!config.targetRepos || !config.targetRepos.length) {
127
+ errors.push('Missing or empty "targetRepos"');
128
+ } else {
129
+ for (const [i, repo] of config.targetRepos.entries()) {
130
+ const prefix = `targetRepos[${i}]`;
131
+ if (!repo.name) errors.push(`${prefix}: missing "name"`);
132
+ if (!repo.localPath) errors.push(`${prefix}: missing "localPath"`);
133
+ if (!repo.syncPath) errors.push(`${prefix}: missing "syncPath"`);
134
+ if (repo.syncMode && !['direct', 'auto-if-tests-pass', 'pr-always'].includes(repo.syncMode)) {
135
+ errors.push(`${prefix}: invalid syncMode "${repo.syncMode}". Must be direct, auto-if-tests-pass, or pr-always`);
136
+ }
137
+ }
138
+ }
139
+
140
+ return errors.length ? errors : null;
141
+ }