drybase 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,33 @@
1
+ import { loadConfig } from '../utils/config.js';
2
+ import { loadState, getLastSync } from '../utils/state.js';
3
+ import * as logger from '../utils/logger.js';
4
+
5
+ export async function statusCommand() {
6
+ const config = await loadConfig();
7
+ if (!config) {
8
+ logger.error('No drybase.json found. Run "drybase init" to create one.');
9
+ process.exit(1);
10
+ }
11
+
12
+ const state = await loadState(config._configDir);
13
+ const rows = [];
14
+
15
+ for (const repo of config.targetRepos) {
16
+ const last = getLastSync(state, repo.name);
17
+ rows.push({
18
+ Repo: repo.name,
19
+ Mode: repo.syncMode || 'pr-always',
20
+ 'Last Sync': last ? new Date(last.timestamp).toLocaleString() : 'Never',
21
+ Files: last ? Object.keys(last.files).length : 0,
22
+ Status: last?.status || 'pending',
23
+ });
24
+ }
25
+
26
+ if (!rows.length) {
27
+ logger.warn('No target repos configured.');
28
+ return;
29
+ }
30
+
31
+ logger.info('Sync Status\n');
32
+ logger.table(rows);
33
+ }
@@ -0,0 +1,33 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import os from 'node:os';
4
+ import * as logger from '../utils/logger.js';
5
+
6
+ const PID_FILE = path.join(os.homedir(), '.drybase', 'daemon.pid');
7
+
8
+ export async function stopCommand() {
9
+ let pid;
10
+ try {
11
+ pid = parseInt(await fs.readFile(PID_FILE, 'utf8'), 10);
12
+ } catch {
13
+ logger.warn('No daemon running (PID file not found).');
14
+ return;
15
+ }
16
+
17
+ try {
18
+ process.kill(pid, 'SIGTERM');
19
+ logger.success(`Daemon stopped (PID ${pid})`);
20
+ } catch (err) {
21
+ if (err.code === 'ESRCH') {
22
+ logger.warn(`Process ${pid} not found. Cleaning up stale PID file.`);
23
+ } else {
24
+ logger.error(`Failed to stop daemon: ${err.message}`);
25
+ }
26
+ }
27
+
28
+ try {
29
+ await fs.unlink(PID_FILE);
30
+ } catch {
31
+ // Already cleaned up
32
+ }
33
+ }
@@ -0,0 +1,35 @@
1
+ import { loadConfig, validateConfig } from '../utils/config.js';
2
+ import { syncAll } from '../core/syncer.js';
3
+ import * as logger from '../utils/logger.js';
4
+
5
+ export async function syncCommand(options) {
6
+ const config = await loadConfig();
7
+ if (!config) {
8
+ logger.error('No drybase.json found. Run "drybase init" to create one.');
9
+ process.exit(1);
10
+ }
11
+
12
+ const errors = validateConfig(config);
13
+ if (errors) {
14
+ logger.error('Invalid configuration:');
15
+ for (const e of errors) logger.error(` - ${e}`);
16
+ process.exit(1);
17
+ }
18
+
19
+ if (options.dryRun) {
20
+ logger.info('[DRY RUN] Previewing sync...\n');
21
+ }
22
+
23
+ const results = await syncAll(config, {
24
+ repo: options.repo,
25
+ dryRun: options.dryRun,
26
+ force: options.force,
27
+ });
28
+
29
+ // Summary
30
+ logger.info('\nSync Summary:');
31
+ for (const r of results) {
32
+ const icon = r.status === 'success' ? ' ✔' : r.status === 'failed' ? ' ✖' : ' ℹ';
33
+ logger.info(`${icon} ${r.repo}: ${r.status}`);
34
+ }
35
+ }
@@ -0,0 +1,34 @@
1
+ import { loadConfig, validateConfig, findConfigFile } from '../utils/config.js';
2
+ import * as logger from '../utils/logger.js';
3
+
4
+ export async function validateCommand() {
5
+ const configPath = await findConfigFile();
6
+ if (!configPath) {
7
+ logger.error('No drybase.json found. Run "drybase init" to create one.');
8
+ process.exit(1);
9
+ }
10
+
11
+ logger.info(`Validating ${configPath}`);
12
+
13
+ let config;
14
+ try {
15
+ config = await loadConfig(configPath);
16
+ } catch (err) {
17
+ logger.error(`Failed to load config: ${err.message}`);
18
+ process.exit(1);
19
+ }
20
+
21
+ const errors = validateConfig(config);
22
+ if (errors) {
23
+ logger.error('Configuration errors:');
24
+ for (const err of errors) {
25
+ logger.error(` - ${err}`);
26
+ }
27
+ process.exit(1);
28
+ }
29
+
30
+ logger.success('Configuration is valid');
31
+ logger.dim(` Base repo: ${config.baseRepo.path}`);
32
+ logger.dim(` Watch paths: ${config.baseRepo.watchPaths.join(', ')}`);
33
+ logger.dim(` Target repos: ${config.targetRepos.map((r) => r.name).join(', ')}`);
34
+ }
@@ -0,0 +1,33 @@
1
+ import { loadConfig, validateConfig } from '../utils/config.js';
2
+ import { createWatcher } from '../core/watcher.js';
3
+ import * as logger from '../utils/logger.js';
4
+
5
+ export async function watchCommand() {
6
+ const config = await loadConfig();
7
+ if (!config) {
8
+ logger.error('No drybase.json found. Run "drybase init" to create one.');
9
+ process.exit(1);
10
+ }
11
+
12
+ const errors = validateConfig(config);
13
+ if (errors) {
14
+ logger.error('Invalid configuration:');
15
+ for (const e of errors) logger.error(` - ${e}`);
16
+ process.exit(1);
17
+ }
18
+
19
+ logger.info('Starting file watcher...');
20
+
21
+ const watcher = createWatcher(config);
22
+
23
+ // Graceful shutdown
24
+ const cleanup = async () => {
25
+ logger.info('\nStopping watcher...');
26
+ await watcher.close();
27
+ logger.success('Watcher stopped.');
28
+ process.exit(0);
29
+ };
30
+
31
+ process.on('SIGINT', cleanup);
32
+ process.on('SIGTERM', cleanup);
33
+ }
@@ -0,0 +1,157 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import { generateLLMText } from '../utils/llm-adapter.js';
4
+ import * as logger from '../utils/logger.js';
5
+
6
+ const IGNORE_DIRS = new Set([
7
+ 'node_modules', '.git', 'vendor', 'dist', 'build', '.next',
8
+ '__pycache__', '.cache', 'coverage', '.nyc_output',
9
+ ]);
10
+
11
+ const IGNORE_EXTENSIONS = new Set([
12
+ '.png', '.jpg', '.jpeg', '.gif', '.svg', '.ico', '.woff', '.woff2',
13
+ '.ttf', '.eot', '.mp4', '.mp3', '.zip', '.tar', '.gz', '.lock',
14
+ ]);
15
+
16
+ const MAX_SAMPLE_LINES = 10;
17
+ const MAX_FILES_TO_SAMPLE = 50;
18
+
19
+ export async function scanProject(projectPath) {
20
+ const tree = [];
21
+ await walkDir(projectPath, projectPath, tree);
22
+ return tree;
23
+ }
24
+
25
+ async function walkDir(dir, rootDir, tree) {
26
+ let entries;
27
+ try {
28
+ entries = await fs.readdir(dir, { withFileTypes: true });
29
+ } catch {
30
+ return;
31
+ }
32
+
33
+ for (const entry of entries) {
34
+ if (entry.name.startsWith('.') || IGNORE_DIRS.has(entry.name)) continue;
35
+
36
+ const fullPath = path.join(dir, entry.name);
37
+ const relativePath = path.relative(rootDir, fullPath);
38
+
39
+ if (entry.isDirectory()) {
40
+ tree.push({ path: relativePath, type: 'dir' });
41
+ await walkDir(fullPath, rootDir, tree);
42
+ } else if (entry.isFile()) {
43
+ const ext = path.extname(entry.name).toLowerCase();
44
+ if (IGNORE_EXTENSIONS.has(ext)) continue;
45
+
46
+ try {
47
+ const stat = await fs.stat(fullPath);
48
+ tree.push({
49
+ path: relativePath,
50
+ type: 'file',
51
+ size: stat.size,
52
+ ext,
53
+ });
54
+ } catch {
55
+ // Skip inaccessible files
56
+ }
57
+ }
58
+ }
59
+ }
60
+
61
+ export async function sampleFiles(projectPath, fileTree) {
62
+ const files = fileTree.filter((f) => f.type === 'file' && f.size < 50000);
63
+ const sampled = files.slice(0, MAX_FILES_TO_SAMPLE);
64
+ const samples = [];
65
+
66
+ for (const file of sampled) {
67
+ try {
68
+ const content = await fs.readFile(path.join(projectPath, file.path), 'utf8');
69
+ const lines = content.split('\n');
70
+ const sample = lines.slice(0, MAX_SAMPLE_LINES).join('\n');
71
+ samples.push({ path: file.path, sample, totalLines: lines.length });
72
+ } catch {
73
+ // Skip unreadable files
74
+ }
75
+ }
76
+
77
+ return samples;
78
+ }
79
+
80
+ export function buildAnalysisPrompt(fileTree, samples) {
81
+ const treeStr = fileTree.map((f) => {
82
+ if (f.type === 'dir') return `📁 ${f.path}/`;
83
+ return ` ${f.path} (${f.size}b)`;
84
+ }).join('\n');
85
+
86
+ const samplesStr = samples.map((s) =>
87
+ `--- ${s.path} (${s.totalLines} lines) ---\n${s.sample}\n`
88
+ ).join('\n');
89
+
90
+ return `Analyze this project structure and identify files that are likely "base code" that would be shared across multiple similar projects.
91
+
92
+ Base code typically includes:
93
+ - Utility functions and helpers
94
+ - Configuration templates
95
+ - Shared UI components
96
+ - API clients and wrappers
97
+ - Common middleware
98
+ - Validation schemas
99
+ - Type definitions
100
+ - Database helpers
101
+ - Authentication utilities
102
+
103
+ Project structure:
104
+ ${treeStr}
105
+
106
+ Sample file contents:
107
+ ${samplesStr}
108
+
109
+ Respond with JSON only (no markdown, no explanation):
110
+ {
111
+ "baseFiles": [
112
+ {
113
+ "path": "src/utils/logger.js",
114
+ "confidence": "high|medium|low",
115
+ "reason": "Generic logging utility with no project-specific code",
116
+ "category": "utility"
117
+ }
118
+ ],
119
+ "recommendedWatchPaths": ["src/utils", "src/middleware"],
120
+ "recommendedSyncPaths": {
121
+ "utils": "src/utils",
122
+ "config": "config/base"
123
+ }
124
+ }`;
125
+ }
126
+
127
+ export async function analyzeProject(projectPath, llmConfig) {
128
+ logger.info('Scanning project...');
129
+ const tree = await scanProject(projectPath);
130
+ logger.info(`Found ${tree.filter((f) => f.type === 'file').length} files`);
131
+
132
+ logger.info('Sampling file contents...');
133
+ const samples = await sampleFiles(projectPath, tree);
134
+
135
+ logger.info('Analyzing with LLM...');
136
+ const prompt = buildAnalysisPrompt(tree, samples);
137
+
138
+ const text = await generateLLMText(llmConfig, prompt);
139
+
140
+ // Parse JSON, with fallback for markdown-wrapped responses
141
+ let analysis;
142
+ try {
143
+ analysis = JSON.parse(text);
144
+ } catch {
145
+ // Try extracting JSON from markdown code block
146
+ const match = text.match(/```(?:json)?\s*([\s\S]*?)```/);
147
+ if (match) {
148
+ analysis = JSON.parse(match[1].trim());
149
+ } else {
150
+ logger.error('Failed to parse LLM response as JSON');
151
+ logger.debug(text);
152
+ return null;
153
+ }
154
+ }
155
+
156
+ return analysis;
157
+ }
@@ -0,0 +1,81 @@
1
+ import simpleGit from 'simple-git';
2
+ import * as logger from '../utils/logger.js';
3
+
4
+ export function getGit(repoPath) {
5
+ return simpleGit(repoPath);
6
+ }
7
+
8
+ export async function isClean(repoPath) {
9
+ const git = getGit(repoPath);
10
+ const status = await git.status();
11
+ return status.isClean();
12
+ }
13
+
14
+ export async function createBranch(repoPath, branchName, baseBranch) {
15
+ const git = getGit(repoPath);
16
+ await git.checkout(baseBranch);
17
+ await git.pull('origin', baseBranch);
18
+ await git.checkoutLocalBranch(branchName);
19
+ logger.debug(`Created branch ${branchName} from ${baseBranch}`);
20
+ return branchName;
21
+ }
22
+
23
+ export async function checkoutBranch(repoPath, branchName) {
24
+ const git = getGit(repoPath);
25
+ await git.checkout(branchName);
26
+ }
27
+
28
+ export async function commitFiles(repoPath, files, message) {
29
+ const git = getGit(repoPath);
30
+ await git.add(files);
31
+ const result = await git.commit(message);
32
+ logger.debug(`Committed ${files.length} files: ${result.commit}`);
33
+ return result.commit;
34
+ }
35
+
36
+ export async function pushBranch(repoPath, branchName) {
37
+ const git = getGit(repoPath);
38
+ await git.push('origin', branchName, ['--set-upstream']);
39
+ logger.debug(`Pushed branch ${branchName}`);
40
+ }
41
+
42
+ export async function revertCommit(repoPath, commitSha) {
43
+ const git = getGit(repoPath);
44
+ await git.revert(commitSha, ['--no-edit']);
45
+ const log = await git.log({ n: 1 });
46
+ const revertSha = log.latest.hash;
47
+ logger.debug(`Reverted ${commitSha}, new commit: ${revertSha}`);
48
+ return revertSha;
49
+ }
50
+
51
+ export async function stashAndRestore(repoPath, fn) {
52
+ const git = getGit(repoPath);
53
+ const status = await git.status();
54
+ const needsStash = !status.isClean();
55
+
56
+ if (needsStash) {
57
+ await git.stash(['push', '-m', 'drybase-temp-stash']);
58
+ logger.debug('Stashed local changes');
59
+ }
60
+
61
+ try {
62
+ return await fn();
63
+ } finally {
64
+ if (needsStash) {
65
+ await git.stash(['pop']);
66
+ logger.debug('Restored stashed changes');
67
+ }
68
+ }
69
+ }
70
+
71
+ export async function getCurrentBranch(repoPath) {
72
+ const git = getGit(repoPath);
73
+ const branch = await git.revparse(['--abbrev-ref', 'HEAD']);
74
+ return branch.trim();
75
+ }
76
+
77
+ export async function getLatestCommitSha(repoPath) {
78
+ const git = getGit(repoPath);
79
+ const log = await git.log({ n: 1 });
80
+ return log.latest?.hash || null;
81
+ }
@@ -0,0 +1,91 @@
1
+ import { Octokit } from '@octokit/rest';
2
+ import * as logger from '../utils/logger.js';
3
+
4
+ let _octokit = null;
5
+
6
+ export function initOctokit(token) {
7
+ _octokit = new Octokit({ auth: token });
8
+ return _octokit;
9
+ }
10
+
11
+ function getOctokit() {
12
+ if (!_octokit) throw new Error('Octokit not initialized. Call initOctokit(token) first.');
13
+ return _octokit;
14
+ }
15
+
16
+ export function parseRepoName(name) {
17
+ const parts = name.split('/');
18
+ if (parts.length !== 2) throw new Error(`Invalid repo name "${name}". Expected "owner/repo" format.`);
19
+ return { owner: parts[0], repo: parts[1] };
20
+ }
21
+
22
+ async function withRetry(fn, { maxRetries = 3, baseDelay = 1000 } = {}) {
23
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
24
+ try {
25
+ return await fn();
26
+ } catch (err) {
27
+ // Rate limit handling
28
+ if (err.status === 403 && err.response?.headers?.['x-ratelimit-remaining'] === '0') {
29
+ const resetTime = parseInt(err.response.headers['x-ratelimit-reset'], 10) * 1000;
30
+ const waitMs = Math.max(resetTime - Date.now(), 1000);
31
+ logger.warn(`Rate limited. Waiting ${Math.ceil(waitMs / 1000)}s...`);
32
+ await sleep(waitMs);
33
+ continue;
34
+ }
35
+
36
+ if (attempt < maxRetries && (err.status >= 500 || err.status === 429)) {
37
+ const delay = baseDelay * Math.pow(2, attempt);
38
+ logger.debug(`Retrying in ${delay}ms (attempt ${attempt + 1}/${maxRetries})`);
39
+ await sleep(delay);
40
+ continue;
41
+ }
42
+
43
+ throw err;
44
+ }
45
+ }
46
+ }
47
+
48
+ function sleep(ms) {
49
+ return new Promise((resolve) => setTimeout(resolve, ms));
50
+ }
51
+
52
+ export async function createPullRequest({ repoName, title, body, head, base, labels }) {
53
+ const { owner, repo } = parseRepoName(repoName);
54
+ const octokit = getOctokit();
55
+
56
+ const { data: pr } = await withRetry(() =>
57
+ octokit.pulls.create({ owner, repo, title, body, head, base })
58
+ );
59
+
60
+ if (labels?.length) {
61
+ await withRetry(() =>
62
+ octokit.issues.addLabels({ owner, repo, issue_number: pr.number, labels })
63
+ );
64
+ }
65
+
66
+ logger.debug(`Created PR #${pr.number}: ${title}`);
67
+ return pr;
68
+ }
69
+
70
+ export async function mergePullRequest({ repoName, pullNumber, mergeMethod = 'squash' }) {
71
+ const { owner, repo } = parseRepoName(repoName);
72
+ const octokit = getOctokit();
73
+
74
+ const { data } = await withRetry(() =>
75
+ octokit.pulls.merge({ owner, repo, pull_number: pullNumber, merge_method: mergeMethod })
76
+ );
77
+
78
+ logger.debug(`Merged PR #${pullNumber}`);
79
+ return data;
80
+ }
81
+
82
+ export async function addPRComment({ repoName, pullNumber, body }) {
83
+ const { owner, repo } = parseRepoName(repoName);
84
+ const octokit = getOctokit();
85
+
86
+ const { data } = await withRetry(() =>
87
+ octokit.issues.createComment({ owner, repo, issue_number: pullNumber, body })
88
+ );
89
+
90
+ return data;
91
+ }