@whitehatd/crag 0.0.1 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,116 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+ const { computeHash, readFrontmatter, isModified } = require('./integrity');
6
+ const { compareVersions } = require('./version-check');
7
+
8
+ const SKILLS = [
9
+ { name: 'pre-start-context', srcFile: 'pre-start-context.md', installDir: 'pre-start-context' },
10
+ { name: 'post-start-validation', srcFile: 'post-start-validation.md', installDir: 'post-start-validation' },
11
+ ];
12
+
13
+ /**
14
+ * Verify a source file is a regular file (not a symlink) inside the crag package.
15
+ * Protects against symlink attacks where a malicious skill file could be redirected.
16
+ */
17
+ function isTrustedSource(srcPath) {
18
+ try {
19
+ const lstat = fs.lstatSync(srcPath);
20
+ if (!lstat.isFile() || lstat.isSymbolicLink()) return false;
21
+ // Must be inside our own src/skills directory
22
+ const expectedRoot = path.resolve(path.join(__dirname, '..', 'skills'));
23
+ const real = fs.realpathSync(srcPath);
24
+ const rel = path.relative(expectedRoot, real);
25
+ return rel !== '' && !rel.startsWith('..') && !path.isAbsolute(rel);
26
+ } catch {
27
+ return false;
28
+ }
29
+ }
30
+
31
+ /**
32
+ * Sync installed skills with source skills.
33
+ * Options: { force: bool, dryRun: bool }
34
+ * Returns { updated: [], skipped: [], conflicted: [] }
35
+ */
36
+ function syncSkills(targetDir, options = {}) {
37
+ const srcDir = path.join(__dirname, '..', 'skills');
38
+ const result = { updated: [], skipped: [], conflicted: [] };
39
+ const dryRun = !!options.dryRun;
40
+
41
+ for (const skill of SKILLS) {
42
+ const srcPath = path.join(srcDir, skill.srcFile);
43
+ const installPath = path.join(targetDir, '.claude', 'skills', skill.installDir, 'SKILL.md');
44
+ const workflowPath = path.join(targetDir, '.agents', 'workflows', skill.srcFile);
45
+
46
+ // Source skill must exist and be trusted (no symlinks to unexpected locations)
47
+ if (!fs.existsSync(srcPath)) continue;
48
+ if (!isTrustedSource(srcPath)) {
49
+ result.skipped.push({ name: skill.name, version: 'unknown', reason: 'untrusted source (symlink or out-of-tree)' });
50
+ continue;
51
+ }
52
+
53
+ const srcMeta = readFrontmatter(srcPath);
54
+ const srcVersion = srcMeta?.version || '0.0.0';
55
+
56
+ // If skill not installed yet, install it
57
+ if (!fs.existsSync(installPath)) {
58
+ if (!dryRun) {
59
+ const dir = path.dirname(installPath);
60
+ if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
61
+ fs.copyFileSync(srcPath, installPath);
62
+
63
+ // Also copy workflow version
64
+ const wfDir = path.dirname(workflowPath);
65
+ if (!fs.existsSync(wfDir)) fs.mkdirSync(wfDir, { recursive: true });
66
+ const content = fs.readFileSync(srcPath, 'utf-8').replace(/^name:.*\n/m, '');
67
+ fs.writeFileSync(workflowPath, content);
68
+ }
69
+
70
+ result.updated.push({ name: skill.name, from: 'none', to: srcVersion });
71
+ continue;
72
+ }
73
+
74
+ // Compare versions
75
+ const installedMeta = readFrontmatter(installPath);
76
+ const installedVersion = installedMeta?.version || '0.0.0';
77
+
78
+ if (compareVersions(srcVersion, installedVersion) <= 0) {
79
+ result.skipped.push({ name: skill.name, version: installedVersion, reason: 'current' });
80
+ continue;
81
+ }
82
+
83
+ // Newer version available — check for local modifications
84
+ if (!options.force && isModified(installPath)) {
85
+ result.conflicted.push({
86
+ name: skill.name,
87
+ installed: installedVersion,
88
+ available: srcVersion,
89
+ reason: 'locally modified',
90
+ });
91
+ continue;
92
+ }
93
+
94
+ if (!dryRun) {
95
+ // Backup and update
96
+ if (options.force && isModified(installPath)) {
97
+ const backupPath = installPath + '.bak.' + Date.now();
98
+ fs.copyFileSync(installPath, backupPath);
99
+ }
100
+
101
+ fs.copyFileSync(srcPath, installPath);
102
+
103
+ // Update workflow copy
104
+ const wfDir = path.dirname(workflowPath);
105
+ if (!fs.existsSync(wfDir)) fs.mkdirSync(wfDir, { recursive: true });
106
+ const content = fs.readFileSync(srcPath, 'utf-8').replace(/^name:.*\n/m, '');
107
+ fs.writeFileSync(workflowPath, content);
108
+ }
109
+
110
+ result.updated.push({ name: skill.name, from: installedVersion, to: srcVersion });
111
+ }
112
+
113
+ return result;
114
+ }
115
+
116
+ module.exports = { syncSkills };
@@ -0,0 +1,156 @@
1
+ 'use strict';
2
+
3
+ const https = require('https');
4
+ const fs = require('fs');
5
+ const path = require('path');
6
+ const os = require('os');
7
+
8
+ const HOME = process.env.HOME || process.env.USERPROFILE || os.tmpdir();
9
+ const CACHE_DIR = path.join(HOME, '.claude', 'crag');
10
+ const CACHE_FILE = path.join(CACHE_DIR, 'update-check.json');
11
+ const CACHE_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
12
+ const TIMEOUT_MS = 3000;
13
+
14
+ /**
15
+ * Non-blocking update check. Reads from cache if fresh.
16
+ * Prints a one-line notice if a newer version is available.
17
+ * Never blocks, never throws.
18
+ *
19
+ * Opt-out: set CRAG_NO_UPDATE_CHECK=1 in environment.
20
+ */
21
+ function checkOnce() {
22
+ if (process.env.CRAG_NO_UPDATE_CHECK === '1') return;
23
+
24
+ try {
25
+ // Read cache
26
+ if (fs.existsSync(CACHE_FILE)) {
27
+ let cache;
28
+ try {
29
+ cache = JSON.parse(fs.readFileSync(CACHE_FILE, 'utf-8'));
30
+ } catch {
31
+ // Corrupt cache — delete and re-fetch
32
+ try { fs.unlinkSync(CACHE_FILE); } catch {}
33
+ cache = null;
34
+ }
35
+
36
+ if (cache && typeof cache.checkedAt === 'number') {
37
+ const age = Date.now() - cache.checkedAt;
38
+ if (age < CACHE_TTL_MS) {
39
+ if (cache.updateAvailable) {
40
+ const current = require('../../package.json').version;
41
+ console.log(` \x1b[33m↑\x1b[0m crag v${cache.latestVersion} available (you have v${current}). Run: npm update -g @whitehatd/crag`);
42
+ }
43
+ return;
44
+ }
45
+ }
46
+ }
47
+
48
+ // Cache is stale or missing — trigger background check
49
+ checkRegistry();
50
+ } catch {
51
+ // Silent failure — never block CLI
52
+ }
53
+ }
54
+
55
+ /**
56
+ * Fetch latest version from npm registry with timeout.
57
+ * Writes result to cache file atomically.
58
+ */
59
+ function checkRegistry() {
60
+ const currentVersion = require('../../package.json').version;
61
+
62
+ const req = https.get('https://registry.npmjs.org/@whitehatd%2Fcrag/latest', { timeout: TIMEOUT_MS }, (res) => {
63
+ // Abort if status is non-OK
64
+ if (res.statusCode && (res.statusCode < 200 || res.statusCode >= 300)) {
65
+ res.resume(); // Drain to allow cleanup
66
+ return;
67
+ }
68
+
69
+ let data = '';
70
+ let size = 0;
71
+ const MAX_SIZE = 100 * 1024; // 100KB cap
72
+
73
+ res.on('data', (chunk) => {
74
+ size += chunk.length;
75
+ if (size > MAX_SIZE) {
76
+ res.destroy();
77
+ return;
78
+ }
79
+ data += chunk;
80
+ });
81
+
82
+ res.on('end', () => {
83
+ try {
84
+ const pkg = JSON.parse(data);
85
+ const latest = pkg.version;
86
+ if (typeof latest !== 'string') return;
87
+
88
+ const updateAvailable = compareVersions(latest, currentVersion) > 0;
89
+
90
+ if (!fs.existsSync(CACHE_DIR)) {
91
+ fs.mkdirSync(CACHE_DIR, { recursive: true });
92
+ }
93
+
94
+ // Atomic write: write to temp file, then rename
95
+ const tmp = CACHE_FILE + '.tmp.' + process.pid;
96
+ fs.writeFileSync(tmp, JSON.stringify({
97
+ checkedAt: Date.now(),
98
+ latestVersion: latest,
99
+ currentVersion,
100
+ updateAvailable,
101
+ }));
102
+ try {
103
+ fs.renameSync(tmp, CACHE_FILE);
104
+ } catch {
105
+ // Another process wrote first — that's fine
106
+ try { fs.unlinkSync(tmp); } catch {}
107
+ }
108
+
109
+ if (updateAvailable) {
110
+ console.log(` \x1b[33m↑\x1b[0m crag v${latest} available (you have v${currentVersion}). Run: npm update -g @whitehatd/crag`);
111
+ }
112
+ } catch {
113
+ // Malformed response — ignore
114
+ }
115
+ });
116
+
117
+ res.on('error', () => { /* ignore */ });
118
+ });
119
+
120
+ req.on('error', () => { /* Network error — silent */ });
121
+ req.on('timeout', () => { req.destroy(); });
122
+
123
+ // Don't let the request keep the process alive
124
+ if (req.unref) req.unref();
125
+ }
126
+
127
+ /**
128
+ * Compare two semver version strings.
129
+ * Returns >0 if a > b, <0 if a < b, 0 if equal.
130
+ * Handles x.y.z and x.y.z-prerelease formats.
131
+ * Pre-release versions are less than their release counterparts.
132
+ */
133
+ function compareVersions(a, b) {
134
+ const parse = (v) => {
135
+ const [core, prerelease] = String(v).split('-', 2);
136
+ const parts = core.split('.').map(n => parseInt(n, 10) || 0);
137
+ while (parts.length < 3) parts.push(0);
138
+ return { core: parts.slice(0, 3), prerelease: prerelease || null };
139
+ };
140
+
141
+ const pa = parse(a);
142
+ const pb = parse(b);
143
+
144
+ for (let i = 0; i < 3; i++) {
145
+ if (pa.core[i] !== pb.core[i]) return pa.core[i] - pb.core[i];
146
+ }
147
+
148
+ // Core versions equal — pre-release comparison
149
+ // No prerelease > has prerelease (e.g., 1.0.0 > 1.0.0-beta)
150
+ if (!pa.prerelease && pb.prerelease) return 1;
151
+ if (pa.prerelease && !pb.prerelease) return -1;
152
+ if (!pa.prerelease && !pb.prerelease) return 0;
153
+ return pa.prerelease.localeCompare(pb.prerelease);
154
+ }
155
+
156
+ module.exports = { checkOnce, checkRegistry, compareVersions, CACHE_FILE };
@@ -0,0 +1,190 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+
6
+ /**
7
+ * Detect workspace type by walking up from startDir.
8
+ * Returns { type, root, configFile, members: [], warnings: [] }
9
+ *
10
+ * Warnings surface parse errors (vs. missing files) so users know when a
11
+ * workspace config was present but broken.
12
+ *
13
+ * Supported types (in priority order):
14
+ * pnpm, npm, yarn, cargo, go, gradle, maven, nx, turbo,
15
+ * bazel, git-submodules, independent-repos, none
16
+ */
17
+ function detectWorkspace(startDir) {
18
+ let current = path.resolve(startDir);
19
+ const warnings = [];
20
+
21
+ const wrap = (result) => ({ ...result, warnings });
22
+
23
+ while (true) {
24
+ // 1. pnpm workspaces
25
+ const pnpmWs = path.join(current, 'pnpm-workspace.yaml');
26
+ if (fs.existsSync(pnpmWs)) {
27
+ return wrap({ type: 'pnpm', root: current, configFile: 'pnpm-workspace.yaml', members: [] });
28
+ }
29
+
30
+ // 2. npm/yarn workspaces (package.json with "workspaces")
31
+ const pkgPath = path.join(current, 'package.json');
32
+ if (fs.existsSync(pkgPath)) {
33
+ try {
34
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf-8'));
35
+ if (pkg.workspaces) {
36
+ const patterns = Array.isArray(pkg.workspaces) ? pkg.workspaces : (pkg.workspaces.packages || []);
37
+ return wrap({ type: 'npm', root: current, configFile: 'package.json', members: [], patterns });
38
+ }
39
+ } catch (err) {
40
+ warnings.push(`malformed package.json at ${pkgPath}: ${err.message}`);
41
+ }
42
+ }
43
+
44
+ // 3. Cargo workspace
45
+ const cargoPath = path.join(current, 'Cargo.toml');
46
+ if (fs.existsSync(cargoPath)) {
47
+ try {
48
+ const content = fs.readFileSync(cargoPath, 'utf-8');
49
+ if (content.includes('[workspace]')) {
50
+ const membersMatch = content.match(/members\s*=\s*\[([\s\S]*?)\]/);
51
+ const patterns = membersMatch
52
+ ? membersMatch[1].match(/"([^"]+)"/g)?.map(m => m.replace(/"/g, '')) || []
53
+ : [];
54
+ return wrap({ type: 'cargo', root: current, configFile: 'Cargo.toml', members: [], patterns });
55
+ }
56
+ } catch (err) {
57
+ warnings.push(`malformed Cargo.toml at ${cargoPath}: ${err.message}`);
58
+ }
59
+ }
60
+
61
+ // 4. Go workspace
62
+ const goWork = path.join(current, 'go.work');
63
+ if (fs.existsSync(goWork)) {
64
+ try {
65
+ const content = fs.readFileSync(goWork, 'utf-8');
66
+ const useMatch = content.match(/use\s*\(([\s\S]*?)\)/);
67
+ const patterns = useMatch
68
+ ? useMatch[1].split('\n').map(l => l.trim()).filter(l => l && !l.startsWith('//'))
69
+ : [];
70
+ return wrap({ type: 'go', root: current, configFile: 'go.work', members: [], patterns });
71
+ } catch (err) {
72
+ warnings.push(`malformed go.work at ${goWork}: ${err.message}`);
73
+ }
74
+ }
75
+
76
+ // 5. Gradle multi-project
77
+ for (const gradleFile of ['settings.gradle.kts', 'settings.gradle']) {
78
+ const gradlePath = path.join(current, gradleFile);
79
+ if (fs.existsSync(gradlePath)) {
80
+ try {
81
+ const content = fs.readFileSync(gradlePath, 'utf-8');
82
+ if (content.includes('include(') || content.includes('include ')) {
83
+ const includes = [];
84
+ const regex = /include\s*\(\s*["']([^"']+)["']/g;
85
+ let m;
86
+ while ((m = regex.exec(content)) !== null) includes.push(m[1].replace(/:/g, '/'));
87
+ // Also match Groovy-style: include 'module'
88
+ const groovyRegex = /include\s+['"]([^'"]+)['"]/g;
89
+ while ((m = groovyRegex.exec(content)) !== null) includes.push(m[1].replace(/:/g, '/'));
90
+ return wrap({ type: 'gradle', root: current, configFile: gradleFile, members: [], patterns: includes });
91
+ }
92
+ } catch (err) {
93
+ warnings.push(`malformed ${gradleFile} at ${gradlePath}: ${err.message}`);
94
+ }
95
+ }
96
+ }
97
+
98
+ // 6. Maven multi-module
99
+ const pomPath = path.join(current, 'pom.xml');
100
+ if (fs.existsSync(pomPath)) {
101
+ try {
102
+ const content = fs.readFileSync(pomPath, 'utf-8');
103
+ if (content.includes('<modules>')) {
104
+ const modules = [];
105
+ const regex = /<module>([^<]+)<\/module>/g;
106
+ let m;
107
+ while ((m = regex.exec(content)) !== null) modules.push(m[1]);
108
+ if (modules.length > 0) {
109
+ return wrap({ type: 'maven', root: current, configFile: 'pom.xml', members: [], patterns: modules });
110
+ }
111
+ }
112
+ } catch (err) {
113
+ warnings.push(`malformed pom.xml at ${pomPath}: ${err.message}`);
114
+ }
115
+ }
116
+
117
+ // 7. Nx
118
+ if (fs.existsSync(path.join(current, 'nx.json'))) {
119
+ return wrap({ type: 'nx', root: current, configFile: 'nx.json', members: [] });
120
+ }
121
+
122
+ // 8. Turborepo
123
+ if (fs.existsSync(path.join(current, 'turbo.json'))) {
124
+ return wrap({ type: 'turbo', root: current, configFile: 'turbo.json', members: [] });
125
+ }
126
+
127
+ // 9. Bazel
128
+ for (const bazelFile of ['WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel']) {
129
+ if (fs.existsSync(path.join(current, bazelFile))) {
130
+ return wrap({ type: 'bazel', root: current, configFile: bazelFile, members: [] });
131
+ }
132
+ }
133
+
134
+ // 10. Git submodules
135
+ const gitmodulesPath = path.join(current, '.gitmodules');
136
+ if (fs.existsSync(gitmodulesPath)) {
137
+ try {
138
+ const content = fs.readFileSync(gitmodulesPath, 'utf-8');
139
+ const submodules = [];
140
+ const regex = /\[submodule\s+"([^"]+)"\][\s\S]*?path\s*=\s*(.+)/g;
141
+ let m;
142
+ while ((m = regex.exec(content)) !== null) {
143
+ submodules.push({ name: m[1].trim(), path: m[2].trim() });
144
+ }
145
+ return wrap({ type: 'git-submodules', root: current, configFile: '.gitmodules', members: [], submodules });
146
+ } catch (err) {
147
+ warnings.push(`malformed .gitmodules at ${gitmodulesPath}: ${err.message}`);
148
+ }
149
+ }
150
+
151
+ // Stop at git root (if reached without finding workspace markers)
152
+ if (fs.existsSync(path.join(current, '.git'))) {
153
+ // Check for independent nested repos (multiple .git dirs in children)
154
+ const nestedRepos = findNestedRepos(current);
155
+ if (nestedRepos.length >= 2) {
156
+ return wrap({ type: 'independent-repos', root: current, configFile: null, members: [], nestedRepos });
157
+ }
158
+ // Single repo, no workspace
159
+ return wrap({ type: 'none', root: current, configFile: null, members: [] });
160
+ }
161
+
162
+ // Move up
163
+ const parent = path.dirname(current);
164
+ if (parent === current) break; // Filesystem root
165
+ current = parent;
166
+ }
167
+
168
+ return wrap({ type: 'none', root: startDir, configFile: null, members: [] });
169
+ }
170
+
171
+ /**
172
+ * Scan immediate child directories for independent git repos.
173
+ */
174
+ function findNestedRepos(dir) {
175
+ const repos = [];
176
+ try {
177
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
178
+ for (const entry of entries) {
179
+ if (!entry.isDirectory()) continue;
180
+ if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
181
+ const childGit = path.join(dir, entry.name, '.git');
182
+ if (fs.existsSync(childGit)) {
183
+ repos.push({ name: entry.name, path: path.join(dir, entry.name) });
184
+ }
185
+ }
186
+ } catch { /* permission error — skip */ }
187
+ return repos;
188
+ }
189
+
190
+ module.exports = { detectWorkspace, findNestedRepos };