drybase 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,76 @@
1
+ import { createHash } from 'node:crypto';
2
+ import { createReadStream } from 'node:fs';
3
+ import fs from 'node:fs/promises';
4
+ import path from 'node:path';
5
+
6
+ export function computeFileHash(filePath) {
7
+ return new Promise((resolve, reject) => {
8
+ const hash = createHash('md5');
9
+ const stream = createReadStream(filePath);
10
+ stream.on('data', (chunk) => hash.update(chunk));
11
+ stream.on('end', () => resolve(hash.digest('hex')));
12
+ stream.on('error', reject);
13
+ });
14
+ }
15
+
16
+ export async function computeHashMap(basePath, files) {
17
+ const map = {};
18
+ for (const file of files) {
19
+ const fullPath = path.resolve(basePath, file);
20
+ try {
21
+ map[file] = await computeFileHash(fullPath);
22
+ } catch {
23
+ // File may have been deleted
24
+ map[file] = null;
25
+ }
26
+ }
27
+ return map;
28
+ }
29
+
30
+ export function detectDivergence(lastSyncHashes, currentHashes) {
31
+ const diverged = [];
32
+ for (const [file, currentHash] of Object.entries(currentHashes)) {
33
+ const lastHash = lastSyncHashes[file];
34
+ if (lastHash && currentHash && lastHash !== currentHash) {
35
+ diverged.push(file);
36
+ }
37
+ }
38
+ return diverged;
39
+ }
40
+
41
+ export async function diffFiles(baseDir, targetDir, relativePaths) {
42
+ const results = { added: [], modified: [], deleted: [], unchanged: [] };
43
+
44
+ for (const relPath of relativePaths) {
45
+ const baseFull = path.resolve(baseDir, relPath);
46
+ const targetFull = path.resolve(targetDir, relPath);
47
+
48
+ const baseExists = await fileExists(baseFull);
49
+ const targetExists = await fileExists(targetFull);
50
+
51
+ if (baseExists && !targetExists) {
52
+ results.added.push(relPath);
53
+ } else if (!baseExists && targetExists) {
54
+ results.deleted.push(relPath);
55
+ } else if (baseExists && targetExists) {
56
+ const baseHash = await computeFileHash(baseFull);
57
+ const targetHash = await computeFileHash(targetFull);
58
+ if (baseHash !== targetHash) {
59
+ results.modified.push(relPath);
60
+ } else {
61
+ results.unchanged.push(relPath);
62
+ }
63
+ }
64
+ }
65
+
66
+ return results;
67
+ }
68
+
69
+ async function fileExists(p) {
70
+ try {
71
+ await fs.access(p);
72
+ return true;
73
+ } catch {
74
+ return false;
75
+ }
76
+ }
@@ -0,0 +1,60 @@
1
+ import { generateText } from 'ai';
2
+ import * as logger from './logger.js';
3
+
4
+ function getLLMProvider(llmConfig) {
5
+ const { provider, model, apiKey, baseURL } = llmConfig;
6
+
7
+ const providers = {
8
+ anthropic: async () => {
9
+ const { createAnthropic } = await import('@ai-sdk/anthropic');
10
+ return createAnthropic({ apiKey })(model);
11
+ },
12
+ openai: async () => {
13
+ const { createOpenAI } = await import('@ai-sdk/openai');
14
+ return createOpenAI({ apiKey })(model);
15
+ },
16
+ google: async () => {
17
+ const { createGoogleGenerativeAI } = await import('@ai-sdk/google');
18
+ return createGoogleGenerativeAI({ apiKey })(model);
19
+ },
20
+ mistral: async () => {
21
+ const { createMistral } = await import('@ai-sdk/mistral');
22
+ return createMistral({ apiKey })(model);
23
+ },
24
+ ollama: async () => {
25
+ const { createOpenAI } = await import('@ai-sdk/openai');
26
+ return createOpenAI({ baseURL: baseURL || 'http://localhost:11434/v1', apiKey: 'ollama' })(model);
27
+ },
28
+ 'openai-compatible': async () => {
29
+ const { createOpenAI } = await import('@ai-sdk/openai');
30
+ return createOpenAI({ apiKey, baseURL })(model);
31
+ },
32
+ };
33
+
34
+ if (!providers[provider]) {
35
+ throw new Error(`Unsupported LLM provider: ${provider}. Supported: ${Object.keys(providers).join(', ')}`);
36
+ }
37
+
38
+ return providers[provider]();
39
+ }
40
+
41
+ export async function createLLMClient(llmConfig) {
42
+ return getLLMProvider(llmConfig);
43
+ }
44
+
45
+ export async function generateLLMText(llmConfig, prompt) {
46
+ const model = await getLLMProvider(llmConfig);
47
+ const temperature = llmConfig.options?.temperature ?? 0.3;
48
+ const maxTokens = llmConfig.options?.maxTokens ?? 4000;
49
+
50
+ logger.debug(`LLM request: provider=${llmConfig.provider}, model=${llmConfig.model}`);
51
+
52
+ const { text } = await generateText({
53
+ model,
54
+ messages: [{ role: 'user', content: prompt }],
55
+ temperature,
56
+ maxTokens,
57
+ });
58
+
59
+ return text;
60
+ }
@@ -0,0 +1,45 @@
1
+ import chalk from 'chalk';
2
+
3
+ export function info(...args) {
4
+ console.log(chalk.blue('ℹ'), ...args);
5
+ }
6
+
7
+ export function warn(...args) {
8
+ console.log(chalk.yellow('⚠'), ...args);
9
+ }
10
+
11
+ export function error(...args) {
12
+ console.error(chalk.red('✖'), ...args);
13
+ }
14
+
15
+ export function success(...args) {
16
+ console.log(chalk.green('✔'), ...args);
17
+ }
18
+
19
+ export function debug(...args) {
20
+ if (process.env.DRYBASE_DEBUG) {
21
+ console.log(chalk.gray('⊙'), ...args);
22
+ }
23
+ }
24
+
25
+ export function dim(...args) {
26
+ console.log(chalk.dim(...args));
27
+ }
28
+
29
+ export function table(rows) {
30
+ if (!rows.length) return;
31
+ const keys = Object.keys(rows[0]);
32
+ const widths = keys.map((k) =>
33
+ Math.max(k.length, ...rows.map((r) => String(r[k] ?? '').length))
34
+ );
35
+
36
+ const header = keys.map((k, i) => k.padEnd(widths[i])).join(' ');
37
+ const separator = widths.map((w) => '─'.repeat(w)).join('──');
38
+
39
+ console.log(chalk.bold(header));
40
+ console.log(chalk.dim(separator));
41
+ for (const row of rows) {
42
+ const line = keys.map((k, i) => String(row[k] ?? '').padEnd(widths[i])).join(' ');
43
+ console.log(line);
44
+ }
45
+ }
@@ -0,0 +1,79 @@
1
+ import fs from 'node:fs/promises';
2
+ import path from 'node:path';
3
+ import crypto from 'node:crypto';
4
+
5
+ const STATE_FILE = '.drybase-state.json';
6
+
7
+ function emptyState() {
8
+ return { lastSync: {}, syncHistory: [] };
9
+ }
10
+
11
+ export async function loadState(configDir) {
12
+ const filePath = path.join(configDir, STATE_FILE);
13
+ try {
14
+ const raw = await fs.readFile(filePath, 'utf8');
15
+ return JSON.parse(raw);
16
+ } catch {
17
+ return emptyState();
18
+ }
19
+ }
20
+
21
+ export async function saveState(configDir, state) {
22
+ const filePath = path.join(configDir, STATE_FILE);
23
+ await fs.writeFile(filePath, JSON.stringify(state, null, 2) + '\n', 'utf8');
24
+ }
25
+
26
+ export function getLastSync(state, repoName) {
27
+ return state.lastSync?.[repoName] || null;
28
+ }
29
+
30
+ export function recordSync(state, { repo, files, commitSha, status }) {
31
+ const timestamp = new Date().toISOString();
32
+ const id = `sync-${crypto.randomUUID().slice(0, 8)}`;
33
+
34
+ const newLastSync = {
35
+ ...state.lastSync,
36
+ [repo]: { timestamp, files, commitSha, status },
37
+ };
38
+
39
+ const entry = {
40
+ id,
41
+ timestamp,
42
+ repo,
43
+ files: Object.keys(files),
44
+ commitSha,
45
+ status,
46
+ };
47
+
48
+ const newHistory = [entry, ...state.syncHistory];
49
+
50
+ return { lastSync: newLastSync, syncHistory: newHistory };
51
+ }
52
+
53
+ export function markRolledBack(state, repo) {
54
+ // Update the most recent history entry for this repo
55
+ const newHistory = state.syncHistory.map((entry) => {
56
+ if (entry.repo === repo && entry.status === 'success') {
57
+ // Only mark the first (most recent) matching entry
58
+ return { ...entry, status: 'rolled-back' };
59
+ }
60
+ return entry;
61
+ });
62
+
63
+ // Find the first matching entry and only mark that one
64
+ let found = false;
65
+ const rolledHistory = state.syncHistory.map((entry) => {
66
+ if (!found && entry.repo === repo && entry.status === 'success') {
67
+ found = true;
68
+ return { ...entry, status: 'rolled-back' };
69
+ }
70
+ return entry;
71
+ });
72
+
73
+ const newLastSync = { ...state.lastSync };
74
+ if (newLastSync[repo]) {
75
+ newLastSync[repo] = { ...newLastSync[repo], status: 'rolled-back' };
76
+ }
77
+
78
+ return { lastSync: newLastSync, syncHistory: rolledHistory };
79
+ }
@@ -0,0 +1,58 @@
1
+ import { spawn } from 'node:child_process';
2
+ import * as logger from './logger.js';
3
+
4
+ const DEFAULT_TIMEOUT = 5 * 60 * 1000; // 5 minutes
5
+
6
+ export function runTests(command, cwd, { timeout = DEFAULT_TIMEOUT } = {}) {
7
+ return new Promise((resolve) => {
8
+ const start = Date.now();
9
+ logger.info(`Running tests: ${command}`);
10
+
11
+ const [cmd, ...args] = command.split(' ');
12
+ const child = spawn(cmd, args, {
13
+ cwd,
14
+ shell: true,
15
+ stdio: 'pipe',
16
+ env: { ...process.env },
17
+ });
18
+
19
+ let stdout = '';
20
+ let stderr = '';
21
+
22
+ child.stdout.on('data', (data) => {
23
+ stdout += data.toString();
24
+ });
25
+
26
+ child.stderr.on('data', (data) => {
27
+ stderr += data.toString();
28
+ });
29
+
30
+ const timer = setTimeout(() => {
31
+ child.kill('SIGTERM');
32
+ const duration = Date.now() - start;
33
+ logger.warn(`Tests timed out after ${Math.round(duration / 1000)}s`);
34
+ resolve({ passed: false, output: stdout + stderr + '\n[TIMEOUT]', duration });
35
+ }, timeout);
36
+
37
+ child.on('close', (code) => {
38
+ clearTimeout(timer);
39
+ const duration = Date.now() - start;
40
+ const passed = code === 0;
41
+
42
+ if (passed) {
43
+ logger.success(`Tests passed in ${Math.round(duration / 1000)}s`);
44
+ } else {
45
+ logger.warn(`Tests failed (exit code ${code})`);
46
+ }
47
+
48
+ resolve({ passed, output: stdout + stderr, duration });
49
+ });
50
+
51
+ child.on('error', (err) => {
52
+ clearTimeout(timer);
53
+ const duration = Date.now() - start;
54
+ logger.error(`Test execution error: ${err.message}`);
55
+ resolve({ passed: false, output: err.message, duration });
56
+ });
57
+ });
58
+ }