@misterhuydo/sentinel 1.3.7 → 1.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.cairn/.hint-lock
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
2026-03-
|
|
1
|
+
2026-03-24T07:49:55.081Z
|
package/.cairn/minify-map.json
CHANGED
|
@@ -4,5 +4,11 @@
|
|
|
4
4
|
"state": "compressed",
|
|
5
5
|
"minifiedAt": 1774252515044.4768,
|
|
6
6
|
"readCount": 1
|
|
7
|
+
},
|
|
8
|
+
"J:\\Projects\\Sentinel\\cli\\lib\\add.js": {
|
|
9
|
+
"tempPath": "J:\\Projects\\Sentinel\\cli\\.cairn\\views\\fc4a1a_add.js",
|
|
10
|
+
"state": "compressed",
|
|
11
|
+
"minifiedAt": 1774333679398.312,
|
|
12
|
+
"readCount": 1
|
|
7
13
|
}
|
|
8
14
|
}
|
package/.cairn/session.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
|
-
"message": "Auto-checkpoint at 2026-03-
|
|
3
|
-
"checkpoint_at": "2026-03-
|
|
2
|
+
"message": "Auto-checkpoint at 2026-03-24T07:05:17.941Z",
|
|
3
|
+
"checkpoint_at": "2026-03-24T07:05:17.942Z",
|
|
4
4
|
"active_files": [],
|
|
5
5
|
"notes": [],
|
|
6
6
|
"mtime_snapshot": {}
|
|
@@ -0,0 +1,599 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const { execSync, spawnSync } = require('child_process');
|
|
6
|
+
const prompts = require('prompts');
|
|
7
|
+
const chalk = require('chalk');
|
|
8
|
+
const { writeExampleProject, generateWorkspaceScripts, generateProjectScripts } = require('./generate');
|
|
9
|
+
const ok = msg => console.log(chalk.green(' ✔'), msg);
|
|
10
|
+
const info = msg => console.log(chalk.cyan(' →'), msg);
|
|
11
|
+
const warn = msg => console.log(chalk.yellow(' ⚠'), msg);
|
|
12
|
+
const step = msg => console.log('\n' + chalk.bold.white(msg));
|
|
13
|
+
function detectInputType(arg) {
|
|
14
|
+
if (!arg) return 'name';
|
|
15
|
+
if (/^git@/.test(arg) || (/^https?:\/\/github\.com\//.test(arg) && arg.endsWith('.git'))) return 'git';
|
|
16
|
+
if (/^https?:\/\//.test(arg)) return 'url';
|
|
17
|
+
if (arg.toLowerCase().endsWith('.json') || arg.includes('/') || arg.includes('\\')) return 'json';
|
|
18
|
+
return 'name';
|
|
19
|
+
}
|
|
20
|
+
async function resolveWorkspace(initial) {
|
|
21
|
+
const ans = await prompts([{
|
|
22
|
+
type: 'text',
|
|
23
|
+
name: 'workspace',
|
|
24
|
+
message: 'Workspace directory',
|
|
25
|
+
initial: initial || path.join(os.homedir(), 'sentinel'),
|
|
26
|
+
format: v => v.replace(/^~/, os.homedir()),
|
|
27
|
+
}], { onCancel: () => process.exit(0) });
|
|
28
|
+
return ans.workspace;
|
|
29
|
+
}
|
|
30
|
+
function requireCodeDir(workspace) {
|
|
31
|
+
const codeDir = path.join(workspace, 'code');
|
|
32
|
+
if (!fs.existsSync(codeDir)) {
|
|
33
|
+
console.error(chalk.red(`Sentinel code not found at ${codeDir}`));
|
|
34
|
+
console.error(chalk.red('Run "sentinel init" first.'));
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
return codeDir;
|
|
38
|
+
}
|
|
39
|
+
const VALID_NAME = /^[a-z0-9_-]+$/i;
|
|
40
|
+
const GITHUB_URL = /^(git@github\.com:|https:\/\/github\.com\/).+\.git$/;
|
|
41
|
+
function validateProjectJson(obj) {
|
|
42
|
+
const errors = [];
|
|
43
|
+
if (!obj.name || !VALID_NAME.test(obj.name)) {
|
|
44
|
+
errors.push('name must be letters, numbers, hyphens only');
|
|
45
|
+
}
|
|
46
|
+
if (!Array.isArray(obj.repos) || obj.repos.length === 0) {
|
|
47
|
+
errors.push('repos array is required and must be non-empty');
|
|
48
|
+
} else {
|
|
49
|
+
obj.repos.forEach((r, i) => {
|
|
50
|
+
if (!r.REPO_URL || !GITHUB_URL.test(r.REPO_URL)) {
|
|
51
|
+
errors.push(`repos[${i}].REPO_URL must be a valid GitHub URL`);
|
|
52
|
+
}
|
|
53
|
+
if (!r.name) errors.push(`repos[${i}].name is required`);
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
return errors;
|
|
57
|
+
}
|
|
58
|
+
function writePropertiesFile(filePath, obj) {
|
|
59
|
+
const lines = Object.entries(obj).map(([k, v]) => `${k}=${v}`);
|
|
60
|
+
fs.writeFileSync(filePath, lines.join('\n') + '\n');
|
|
61
|
+
}
|
|
62
|
+
function applyJsonToProject(projectDir, obj) {
|
|
63
|
+
const configDir = path.join(projectDir, 'config');
|
|
64
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
65
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
66
|
+
fs.ensureDirSync(repoDir);
|
|
67
|
+
fs.ensureDirSync(logDir);
|
|
68
|
+
if (obj.sentinel) {
|
|
69
|
+
const propsPath = path.join(configDir, 'sentinel.properties');
|
|
70
|
+
const existing = fs.existsSync(propsPath) ? fs.readFileSync(propsPath, 'utf8') : '';
|
|
71
|
+
let updated = existing;
|
|
72
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => {
|
|
73
|
+
const re = new RegExp(`^#?\\s*${k}\\s*=.*$`, 'm');
|
|
74
|
+
if (re.test(updated)) {
|
|
75
|
+
updated = updated.replace(re, `${k}=${v}`);
|
|
76
|
+
} else {
|
|
77
|
+
updated += `\n${k}=${v}`;
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
fs.writeFileSync(propsPath, updated);
|
|
81
|
+
ok('Updated sentinel.properties');
|
|
82
|
+
}
|
|
83
|
+
if (Array.isArray(obj.repos)) {
|
|
84
|
+
obj.repos.forEach(repo => {
|
|
85
|
+
const { name, ...props } = repo;
|
|
86
|
+
writePropertiesFile(path.join(repoDir, `${name}.properties`), props);
|
|
87
|
+
ok(`Created repo-configs/${name}.properties`);
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
if (Array.isArray(obj.log_sources)) {
|
|
91
|
+
obj.log_sources.forEach(src => {
|
|
92
|
+
const { name, ...props } = src;
|
|
93
|
+
writePropertiesFile(path.join(logDir, `${name}.properties`), props);
|
|
94
|
+
ok(`Created log-configs/${name}.properties`);
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function fetchUrl(url) {
|
|
99
|
+
try {
|
|
100
|
+
const result = spawnSync('curl', ['-fsSL', '--max-time', '10', url], { encoding: 'utf8' });
|
|
101
|
+
if (result.status !== 0) throw new Error(result.stderr || 'curl failed');
|
|
102
|
+
return result.stdout;
|
|
103
|
+
} catch (_) {
|
|
104
|
+
const https = require('https');
|
|
105
|
+
const http = require('http');
|
|
106
|
+
const lib = url.startsWith('https') ? https : http;
|
|
107
|
+
return new Promise((resolve, reject) => {
|
|
108
|
+
lib.get(url, res => {
|
|
109
|
+
let data = '';
|
|
110
|
+
res.on('data', c => (data += c));
|
|
111
|
+
res.on('end', () => resolve(data));
|
|
112
|
+
}).on('error', reject);
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
function ensureKnownHosts() {
|
|
117
|
+
const knownHosts = path.join(os.homedir(), '.ssh', 'known_hosts');
|
|
118
|
+
const content = fs.existsSync(knownHosts) ? fs.readFileSync(knownHosts, 'utf8') : '';
|
|
119
|
+
if (content.includes('github.com')) return;
|
|
120
|
+
info('Adding GitHub to known_hosts…');
|
|
121
|
+
const r = spawnSync('ssh-keyscan', ['github.com'], { encoding: 'utf8', timeout: 10000, env: gitEnv() });
|
|
122
|
+
if (r.stdout) fs.appendFileSync(knownHosts, r.stdout);
|
|
123
|
+
}
|
|
124
|
+
function generateDeployKey(repoSlug) {
|
|
125
|
+
const sshDir = path.join(os.homedir(), '.ssh');
|
|
126
|
+
const keyFile = path.join(sshDir, `${repoSlug}.key`);
|
|
127
|
+
fs.ensureDirSync(sshDir);
|
|
128
|
+
fs.chmodSync(sshDir, 0o700);
|
|
129
|
+
if (fs.existsSync(keyFile)) {
|
|
130
|
+
info(`Using existing key: ${keyFile}`);
|
|
131
|
+
} else {
|
|
132
|
+
spawnSync('ssh-keygen', ['-t', 'ed25519', '-C', `sentinel@${repoSlug}`, '-f', keyFile, '-N', ''],
|
|
133
|
+
{ stdio: 'inherit' });
|
|
134
|
+
ok(`Deploy key generated: ${keyFile}`);
|
|
135
|
+
}
|
|
136
|
+
const configFile = path.join(sshDir, 'config');
|
|
137
|
+
const sshHost = `github-${repoSlug}`;
|
|
138
|
+
const existing = fs.existsSync(configFile) ? fs.readFileSync(configFile, 'utf8') : '';
|
|
139
|
+
if (!existing.includes(`Host ${sshHost}`)) {
|
|
140
|
+
fs.appendFileSync(configFile,
|
|
141
|
+
`\nHost ${sshHost}\n HostName github.com\n User git\n IdentityFile ${keyFile}\n IdentitiesOnly yes\n`);
|
|
142
|
+
fs.chmodSync(configFile, 0o600);
|
|
143
|
+
ok('SSH config updated');
|
|
144
|
+
}
|
|
145
|
+
return { keyFile, sshHost };
|
|
146
|
+
}
|
|
147
|
+
function printDeployKeyInstructions(orgRepo, keyFile) {
|
|
148
|
+
const pubKey = fs.readFileSync(`${keyFile}.pub`, 'utf8').trim();
|
|
149
|
+
const bar = '─'.repeat(70);
|
|
150
|
+
console.log('');
|
|
151
|
+
console.log(chalk.bold.yellow(` ┌${bar}┐`));
|
|
152
|
+
console.log(chalk.bold.yellow(` │`) + chalk.bold(` Add this deploy key to GitHub`) + chalk.bold.yellow(' '.repeat(40) + '│'));
|
|
153
|
+
console.log(chalk.bold.yellow(` │`) + chalk.cyan(` github.com/${orgRepo}`) + chalk.bold.yellow(' '.repeat(Math.max(0, 70 - 14 - orgRepo.length)) + '│'));
|
|
154
|
+
console.log(chalk.bold.yellow(` │`) + ` Settings → Deploy keys → Add deploy key` + chalk.bold.yellow(' '.repeat(29) + '│'));
|
|
155
|
+
console.log(chalk.bold.yellow(` │`) + ` Allow write access: ✓` + chalk.bold.yellow(' '.repeat(47) + '│'));
|
|
156
|
+
console.log(chalk.bold.yellow(` └${bar}┘`));
|
|
157
|
+
console.log('');
|
|
158
|
+
console.log(chalk.green(pubKey));
|
|
159
|
+
console.log('');
|
|
160
|
+
}
|
|
161
|
+
function gitEnv(extra = {}) {
|
|
162
|
+
const PATH = [
|
|
163
|
+
process.env.PATH || '',
|
|
164
|
+
'/usr/bin', '/usr/local/bin', '/bin', '/usr/sbin', '/usr/local/sbin',
|
|
165
|
+
].filter(Boolean).join(':');
|
|
166
|
+
return { ...process.env, PATH, GIT_TERMINAL_PROMPT: '0', ...extra };
|
|
167
|
+
}
|
|
168
|
+
function findBin(name) {
|
|
169
|
+
const candidates = [
|
|
170
|
+
`/usr/bin/${name}`, `/usr/local/bin/${name}`, `/bin/${name}`,
|
|
171
|
+
];
|
|
172
|
+
for (const p of candidates) {
|
|
173
|
+
if (fs.existsSync(p)) return p;
|
|
174
|
+
}
|
|
175
|
+
const r = spawnSync('which', [name], { encoding: 'utf8', env: gitEnv() });
|
|
176
|
+
return (r.stdout || '').trim() || name;
|
|
177
|
+
}
|
|
178
|
+
let _gitBin;
|
|
179
|
+
function gitBin() {
|
|
180
|
+
if (!_gitBin) _gitBin = findBin('git');
|
|
181
|
+
return _gitBin;
|
|
182
|
+
}
|
|
183
|
+
function gitUrlToOrgRepo(gitUrl) {
|
|
184
|
+
return gitUrl
|
|
185
|
+
.replace(/^git@github\.com:/, '')
|
|
186
|
+
.replace(/^https?:\/\/github\.com\//, '')
|
|
187
|
+
.replace(/\.git$/, '');
|
|
188
|
+
}
|
|
189
|
+
function toHttpsUrl(gitUrl) {
|
|
190
|
+
return 'https://github.com/' + gitUrlToOrgRepo(gitUrl) + '.git';
|
|
191
|
+
}
|
|
192
|
+
function isPublicRepo(gitUrl) {
|
|
193
|
+
const r = spawnSync(gitBin(), ['ls-remote', '--heads', toHttpsUrl(gitUrl)], {
|
|
194
|
+
encoding: 'utf8', timeout: 10000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
195
|
+
env: gitEnv(),
|
|
196
|
+
});
|
|
197
|
+
return r.status === 0;
|
|
198
|
+
}
|
|
199
|
+
function validateAccess(repoUrl, keyFile) {
|
|
200
|
+
const extra = keyFile
|
|
201
|
+
? { GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }
|
|
202
|
+
: {};
|
|
203
|
+
const r = spawnSync(gitBin(), ['ls-remote', '--heads', repoUrl], {
|
|
204
|
+
encoding: 'utf8', timeout: 15000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
205
|
+
env: gitEnv(extra),
|
|
206
|
+
});
|
|
207
|
+
return { ok: r.status === 0, stderr: (r.stderr || r.error?.message || '').trim() };
|
|
208
|
+
}
|
|
209
|
+
function discoverReposFromClone(cloneDir) {
|
|
210
|
+
const repoCfgDir = path.join(cloneDir, 'config', 'repo-configs');
|
|
211
|
+
if (!fs.existsSync(repoCfgDir)) return [];
|
|
212
|
+
return fs.readdirSync(repoCfgDir)
|
|
213
|
+
.filter(f => f.endsWith('.properties') && !f.startsWith('_'))
|
|
214
|
+
.map(f => {
|
|
215
|
+
const content = fs.readFileSync(path.join(repoCfgDir, f), 'utf8');
|
|
216
|
+
const match = content.match(/^REPO_URL\s*=\s*(.+)$/m);
|
|
217
|
+
return match ? { file: f, propsPath: path.join(repoCfgDir, f), url: match[1].trim() } : null;
|
|
218
|
+
})
|
|
219
|
+
.filter(Boolean);
|
|
220
|
+
}
|
|
221
|
+
async function addFromGit(gitUrl, workspace) {
|
|
222
|
+
const repoSlug = gitUrl.replace(/\.git$/, '').split(/[:/]/).pop();
|
|
223
|
+
const orgRepo = gitUrlToOrgRepo(gitUrl);
|
|
224
|
+
const { name } = await prompts([{
|
|
225
|
+
type: 'text',
|
|
226
|
+
name: 'name',
|
|
227
|
+
message: 'Project name',
|
|
228
|
+
initial: repoSlug,
|
|
229
|
+
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
230
|
+
}], { onCancel: () => process.exit(0) });
|
|
231
|
+
step(`[1/3] Setting up SSH access to ${repoSlug}`);
|
|
232
|
+
ensureKnownHosts();
|
|
233
|
+
const { keyFile } = generateDeployKey(repoSlug);
|
|
234
|
+
printDeployKeyInstructions(orgRepo, keyFile);
|
|
235
|
+
await prompts({
|
|
236
|
+
type: 'text', name: '_', format: () => '',
|
|
237
|
+
message: chalk.bold(`Press Enter once you've added the deploy key to GitHub…`),
|
|
238
|
+
}, { onCancel: () => process.exit(0) });
|
|
239
|
+
const primary = validateAccess(gitUrl, keyFile);
|
|
240
|
+
if (!primary.ok) {
|
|
241
|
+
console.error(chalk.red(' ✖ Cannot reach ' + gitUrl));
|
|
242
|
+
if (primary.stderr) console.error(chalk.red(' ' + primary.stderr));
|
|
243
|
+
console.error(chalk.yellow(' Check the deploy key has write access, then re-run.'));
|
|
244
|
+
process.exit(1);
|
|
245
|
+
}
|
|
246
|
+
ok(`${repoSlug}: reachable`);
|
|
247
|
+
const projectDir = path.join(workspace, name);
|
|
248
|
+
step(`[2/3] Scanning repo-configs in ${repoSlug}…`);
|
|
249
|
+
if (!fs.existsSync(projectDir)) {
|
|
250
|
+
spawnSync(gitBin(), ['clone', '--depth', '1', gitUrl, projectDir], {
|
|
251
|
+
stdio: 'inherit',
|
|
252
|
+
env: gitEnv({ GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }),
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
const discovered = discoverReposFromClone(projectDir);
|
|
256
|
+
const privateRepos = [];
|
|
257
|
+
const publicRepos = [];
|
|
258
|
+
if (discovered.length === 0) {
|
|
259
|
+
info('No repo-configs found — project will use example config.');
|
|
260
|
+
} else {
|
|
261
|
+
info(`Found ${discovered.length} repo(s) in config/repo-configs/:`);
|
|
262
|
+
for (const r of discovered) {
|
|
263
|
+
const slug = r.file.replace('.properties', '');
|
|
264
|
+
const pub = isPublicRepo(r.url);
|
|
265
|
+
const tag = pub ? chalk.green('[public]') : chalk.yellow('[private]');
|
|
266
|
+
console.log(` ${tag} ${slug.padEnd(36)} ${r.url}`);
|
|
267
|
+
if (pub) publicRepos.push({ ...r, slug });
|
|
268
|
+
else privateRepos.push({ ...r, slug });
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
if (privateRepos.length > 0) {
|
|
272
|
+
step(`[3/3] Deploy keys needed for ${privateRepos.length} private repo(s)`);
|
|
273
|
+
for (const r of privateRepos) {
|
|
274
|
+
const { keyFile: rKey } = generateDeployKey(r.slug);
|
|
275
|
+
r.keyFile = rKey;
|
|
276
|
+
const rOrgRepo = gitUrlToOrgRepo(r.url);
|
|
277
|
+
printDeployKeyInstructions(rOrgRepo, rKey);
|
|
278
|
+
}
|
|
279
|
+
if (publicRepos.length > 0) {
|
|
280
|
+
console.log(chalk.green(' ✔ Public repos (no deploy key needed):'));
|
|
281
|
+
for (const r of publicRepos) {
|
|
282
|
+
console.log(chalk.green(` ${r.slug}`));
|
|
283
|
+
}
|
|
284
|
+
console.log('');
|
|
285
|
+
}
|
|
286
|
+
await prompts({
|
|
287
|
+
type: 'text', name: '_', format: () => '',
|
|
288
|
+
message: chalk.bold(`Press Enter once you've added all ${privateRepos.length} deploy key(s) to GitHub…`),
|
|
289
|
+
}, { onCancel: () => process.exit(0) });
|
|
290
|
+
step('Validating repository access…');
|
|
291
|
+
for (const r of privateRepos) {
|
|
292
|
+
const v = validateAccess(r.url, r.keyFile);
|
|
293
|
+
if (!v.ok) {
|
|
294
|
+
console.error(chalk.red(` ✖ ${r.slug}: cannot reach ${r.url}`));
|
|
295
|
+
if (v.stderr) console.error(chalk.red(' ' + v.stderr));
|
|
296
|
+
console.error(chalk.yellow(' Fix access then re-run sentinel add.'));
|
|
297
|
+
process.exit(1);
|
|
298
|
+
}
|
|
299
|
+
ok(`${r.slug}: reachable`);
|
|
300
|
+
}
|
|
301
|
+
for (const r of publicRepos) {
|
|
302
|
+
ok(`${r.slug}: public, no key needed`);
|
|
303
|
+
}
|
|
304
|
+
for (const r of privateRepos) {
|
|
305
|
+
let props = fs.readFileSync(r.propsPath, 'utf8');
|
|
306
|
+
if (/^#?\s*SSH_KEY_FILE\s*=/m.test(props)) {
|
|
307
|
+
props = props.replace(/^#?\s*SSH_KEY_FILE\s*=.*/m, `SSH_KEY_FILE=${r.keyFile}`);
|
|
308
|
+
} else {
|
|
309
|
+
props = props.trimEnd() + `\nSSH_KEY_FILE=${r.keyFile}\n`;
|
|
310
|
+
}
|
|
311
|
+
fs.writeFileSync(r.propsPath, props);
|
|
312
|
+
info(`SSH_KEY_FILE written to config/repo-configs/${r.file}`);
|
|
313
|
+
}
|
|
314
|
+
} else if (discovered.length > 0) {
|
|
315
|
+
step('[3/3] All repos are public — no deploy keys needed');
|
|
316
|
+
ok('All repos accessible without auth');
|
|
317
|
+
}
|
|
318
|
+
const { autoPublish } = await prompts({
|
|
319
|
+
type: 'select',
|
|
320
|
+
name: 'autoPublish',
|
|
321
|
+
message: 'How should Sentinel deploy fixes?',
|
|
322
|
+
hint: 'You can change this per-repo in config/repo-configs/',
|
|
323
|
+
choices: [
|
|
324
|
+
{
|
|
325
|
+
title: 'Open a PR for each fix (AUTO_PUBLISH=false) — recommended',
|
|
326
|
+
description: 'Sentinel pushes to a branch and opens a GitHub PR. You review and merge.',
|
|
327
|
+
value: false,
|
|
328
|
+
},
|
|
329
|
+
{
|
|
330
|
+
title: 'Push directly to main (AUTO_PUBLISH=true) — fully autonomous',
|
|
331
|
+
description: 'Sentinel commits and pushes fixes straight to your main branch.',
|
|
332
|
+
value: true,
|
|
333
|
+
},
|
|
334
|
+
],
|
|
335
|
+
}, { onCancel: () => process.exit(0) });
|
|
336
|
+
if (autoPublish) {
|
|
337
|
+
warn('AUTO_PUBLISH=true: fixes push directly to main. Ensure CI blocks bad pushes.');
|
|
338
|
+
}
|
|
339
|
+
const workspaceProps = path.join(workspace, 'sentinel.properties');
|
|
340
|
+
const existingToken = fs.existsSync(workspaceProps)
|
|
341
|
+
? (fs.readFileSync(workspaceProps, 'utf8').match(/^GITHUB_TOKEN\s*=\s*(.+)$/m) || [])[1]?.trim()
|
|
342
|
+
: '';
|
|
343
|
+
if (!autoPublish) {
|
|
344
|
+
console.log('');
|
|
345
|
+
console.log(chalk.bold(' GitHub Personal Access Token (classic) — required for opening PRs'));
|
|
346
|
+
console.log(chalk.cyan(' github.com/settings/tokens/new → Tokens (classic)'));
|
|
347
|
+
console.log(chalk.cyan(' Note: "Expiration → No expiration" Scope: ✓ repo'));
|
|
348
|
+
console.log('');
|
|
349
|
+
}
|
|
350
|
+
const { githubToken } = await prompts({
|
|
351
|
+
type: (!autoPublish || !existingToken) ? 'password' : null,
|
|
352
|
+
name: 'githubToken',
|
|
353
|
+
message: existingToken
|
|
354
|
+
? 'GitHub token (press Enter to keep current)'
|
|
355
|
+
: autoPublish
|
|
356
|
+
? 'GitHub token (classic, repo scope) — optional, press Enter to skip'
|
|
357
|
+
: 'GitHub token (classic, repo scope)',
|
|
358
|
+
validate: v => {
|
|
359
|
+
if (!autoPublish && !v && !existingToken) return 'Token is required for PR mode';
|
|
360
|
+
if (v && !v.startsWith('ghp_') && !v.startsWith('github_pat_')) return 'Should start with ghp_ or github_pat_';
|
|
361
|
+
return true;
|
|
362
|
+
},
|
|
363
|
+
}, { onCancel: () => process.exit(0) });
|
|
364
|
+
const effectiveToken = githubToken || existingToken || '';
|
|
365
|
+
if (effectiveToken && fs.existsSync(workspaceProps)) {
|
|
366
|
+
let props = fs.readFileSync(workspaceProps, 'utf8');
|
|
367
|
+
if (/^#?\s*GITHUB_TOKEN\s*=/m.test(props))
|
|
368
|
+
props = props.replace(/^#?\s*GITHUB_TOKEN\s*=.*/m, `GITHUB_TOKEN=${effectiveToken}`);
|
|
369
|
+
else
|
|
370
|
+
props = props.trimEnd() + `\nGITHUB_TOKEN=${effectiveToken}\n`;
|
|
371
|
+
fs.writeFileSync(workspaceProps, props);
|
|
372
|
+
ok('GITHUB_TOKEN saved to workspace sentinel.properties');
|
|
373
|
+
} else if (effectiveToken) {
|
|
374
|
+
info('GITHUB_TOKEN will be written when project files are created');
|
|
375
|
+
}
|
|
376
|
+
step('Dry-run preview');
|
|
377
|
+
info(`Will create: ${projectDir}/`);
|
|
378
|
+
if (discovered.length > 0) {
|
|
379
|
+
info(` Using ${discovered.length} repo-config(s) from ${repoSlug}`);
|
|
380
|
+
} else {
|
|
381
|
+
info(` config/repo-configs/${repoSlug}.properties`);
|
|
382
|
+
}
|
|
383
|
+
info(` AUTO_PUBLISH=${autoPublish} (applies to all repos without an explicit setting)`);
|
|
384
|
+
info(' init.sh, start.sh, stop.sh');
|
|
385
|
+
const { confirm } = await prompts({
|
|
386
|
+
type: 'confirm', name: 'confirm',
|
|
387
|
+
message: `Create project "${name}"?`, initial: true,
|
|
388
|
+
}, { onCancel: () => process.exit(0) });
|
|
389
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
390
|
+
if (fs.existsSync(projectDir) && !fs.existsSync(path.join(projectDir, '.git'))) {
|
|
391
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
392
|
+
process.exit(1);
|
|
393
|
+
}
|
|
394
|
+
const codeDir = requireCodeDir(workspace);
|
|
395
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
396
|
+
if (discovered.length > 0) {
|
|
397
|
+
generateProjectScripts(projectDir, codeDir, pythonBin);
|
|
398
|
+
const primaryProps = path.join(projectDir, 'config', 'repo-configs', `${repoSlug}.properties`);
|
|
399
|
+
if (!fs.existsSync(primaryProps)) {
|
|
400
|
+
writePropertiesFile(primaryProps, {
|
|
401
|
+
REPO_NAME: repoSlug,
|
|
402
|
+
REPO_URL: gitUrl,
|
|
403
|
+
BRANCH: 'main',
|
|
404
|
+
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
405
|
+
SSH_KEY_FILE: keyFile,
|
|
406
|
+
CAIRN_MCP_ENABLED: 'true',
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
ok(`Project "${name}" ready at ${projectDir}`);
|
|
410
|
+
printNextSteps(projectDir, autoPublish);
|
|
411
|
+
await offerToStart(projectDir);
|
|
412
|
+
} else {
|
|
413
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
414
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
415
|
+
writePropertiesFile(path.join(repoDir, `${repoSlug}.properties`), {
|
|
416
|
+
REPO_NAME: repoSlug,
|
|
417
|
+
REPO_URL: gitUrl,
|
|
418
|
+
BRANCH: 'main',
|
|
419
|
+
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
420
|
+
SSH_KEY_FILE: keyFile,
|
|
421
|
+
CAIRN_MCP_ENABLED: 'true',
|
|
422
|
+
});
|
|
423
|
+
const example = path.join(repoDir, '_example.properties');
|
|
424
|
+
if (fs.existsSync(example)) fs.removeSync(example);
|
|
425
|
+
generateWorkspaceScripts(workspace, {}, {}, {}, effectiveToken);
|
|
426
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
427
|
+
printNextSteps(projectDir, autoPublish);
|
|
428
|
+
await offerToStart(projectDir);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
async function addFromName(nameArg, workspace) {
|
|
432
|
+
const answers = await prompts([{
|
|
433
|
+
type: 'text',
|
|
434
|
+
name: 'name',
|
|
435
|
+
message: 'Project name',
|
|
436
|
+
initial: nameArg || 'my-project',
|
|
437
|
+
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
438
|
+
}], { onCancel: () => process.exit(0) });
|
|
439
|
+
const { name } = answers;
|
|
440
|
+
const projectDir = path.join(workspace, name);
|
|
441
|
+
step('Dry-run preview');
|
|
442
|
+
info(`Will create: ${projectDir}/`);
|
|
443
|
+
info(' config/sentinel.properties');
|
|
444
|
+
info(' config/repo-configs/_example.properties');
|
|
445
|
+
info(' config/log-configs/_example.properties');
|
|
446
|
+
info(' init.sh, start.sh, stop.sh');
|
|
447
|
+
const { confirm } = await prompts({
|
|
448
|
+
type: 'confirm', name: 'confirm',
|
|
449
|
+
message: `Create project "${name}"?`, initial: true,
|
|
450
|
+
}, { onCancel: () => process.exit(0) });
|
|
451
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
452
|
+
if (fs.existsSync(projectDir)) {
|
|
453
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
454
|
+
process.exit(1);
|
|
455
|
+
}
|
|
456
|
+
const codeDir = requireCodeDir(workspace);
|
|
457
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
458
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
459
|
+
generateWorkspaceScripts(workspace);
|
|
460
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
461
|
+
printNextSteps(projectDir);
|
|
462
|
+
}
|
|
463
|
+
async function addFromJson(jsonPath, workspace) {
|
|
464
|
+
step(`Reading ${jsonPath}`);
|
|
465
|
+
let obj;
|
|
466
|
+
try {
|
|
467
|
+
obj = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
|
|
468
|
+
} catch (e) {
|
|
469
|
+
console.error(chalk.red(` ✖ Cannot parse ${jsonPath}: ${e.message}`));
|
|
470
|
+
process.exit(1);
|
|
471
|
+
}
|
|
472
|
+
const errors = validateProjectJson(obj);
|
|
473
|
+
if (errors.length) {
|
|
474
|
+
console.error(chalk.red(' ✖ Invalid project JSON:'));
|
|
475
|
+
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
476
|
+
process.exit(1);
|
|
477
|
+
}
|
|
478
|
+
ok('JSON is valid');
|
|
479
|
+
const { name } = obj;
|
|
480
|
+
const projectDir = path.join(workspace, name);
|
|
481
|
+
step('Dry-run preview');
|
|
482
|
+
info(`Will create: ${projectDir}/`);
|
|
483
|
+
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
484
|
+
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
485
|
+
if (obj.sentinel) {
|
|
486
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
487
|
+
}
|
|
488
|
+
const { confirm } = await prompts({
|
|
489
|
+
type: 'confirm', name: 'confirm',
|
|
490
|
+
message: `Create project "${name}" from ${path.basename(jsonPath)}?`, initial: true,
|
|
491
|
+
}, { onCancel: () => process.exit(0) });
|
|
492
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
493
|
+
if (fs.existsSync(projectDir)) {
|
|
494
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
495
|
+
process.exit(1);
|
|
496
|
+
}
|
|
497
|
+
const codeDir = requireCodeDir(workspace);
|
|
498
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
499
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
500
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
501
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
502
|
+
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
503
|
+
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
504
|
+
applyJsonToProject(projectDir, obj);
|
|
505
|
+
generateWorkspaceScripts(workspace);
|
|
506
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
507
|
+
printNextSteps(projectDir);
|
|
508
|
+
}
|
|
509
|
+
async function addFromUrl(url, workspace) {
|
|
510
|
+
step(`Fetching ${url}`);
|
|
511
|
+
let raw;
|
|
512
|
+
try {
|
|
513
|
+
raw = fetchUrl(url);
|
|
514
|
+
if (raw && typeof raw.then === 'function') raw = await raw;
|
|
515
|
+
} catch (e) {
|
|
516
|
+
console.error(chalk.red(` ✖ Cannot fetch ${url}: ${e.message}`));
|
|
517
|
+
process.exit(1);
|
|
518
|
+
}
|
|
519
|
+
let obj;
|
|
520
|
+
try {
|
|
521
|
+
obj = JSON.parse(raw);
|
|
522
|
+
} catch (e) {
|
|
523
|
+
console.error(chalk.red(` ✖ Response is not valid JSON: ${e.message}`));
|
|
524
|
+
process.exit(1);
|
|
525
|
+
}
|
|
526
|
+
const errors = validateProjectJson(obj);
|
|
527
|
+
if (errors.length) {
|
|
528
|
+
console.error(chalk.red(' ✖ Invalid project JSON at URL:'));
|
|
529
|
+
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
530
|
+
process.exit(1);
|
|
531
|
+
}
|
|
532
|
+
ok('JSON is valid');
|
|
533
|
+
const { name } = obj;
|
|
534
|
+
const projectDir = path.join(workspace, name);
|
|
535
|
+
step('Dry-run preview');
|
|
536
|
+
info(`Will create: ${projectDir}/`);
|
|
537
|
+
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
538
|
+
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
539
|
+
if (obj.sentinel) {
|
|
540
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
541
|
+
}
|
|
542
|
+
const { confirm } = await prompts({
|
|
543
|
+
type: 'confirm', name: 'confirm',
|
|
544
|
+
message: `Create project "${name}" from ${url}?`, initial: true,
|
|
545
|
+
}, { onCancel: () => process.exit(0) });
|
|
546
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
547
|
+
if (fs.existsSync(projectDir)) {
|
|
548
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
549
|
+
process.exit(1);
|
|
550
|
+
}
|
|
551
|
+
const codeDir = requireCodeDir(workspace);
|
|
552
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
553
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
554
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
555
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
556
|
+
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
557
|
+
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
558
|
+
applyJsonToProject(projectDir, obj);
|
|
559
|
+
generateWorkspaceScripts(workspace);
|
|
560
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
561
|
+
printNextSteps(projectDir);
|
|
562
|
+
}
|
|
563
|
+
function printNextSteps(projectDir, autoPublish) {
|
|
564
|
+
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
565
|
+
const mode = autoPublish === true
|
|
566
|
+
? chalk.yellow('\n ⚠ Fixes push directly to main — ensure CI blocks bad pushes')
|
|
567
|
+
: autoPublish === false
|
|
568
|
+
? chalk.cyan('\n → Sentinel opens a GitHub PR for each fix — review and merge at github.com')
|
|
569
|
+
: '';
|
|
570
|
+
console.log(`
|
|
571
|
+
Config: ${chalk.cyan(path.join(projectDir, 'config', ''))}
|
|
572
|
+
Start: ${chalk.cyan(path.join(projectDir, 'start.sh'))}${mode}
|
|
573
|
+
Logs: ${chalk.cyan(logFile)}
|
|
574
|
+
${chalk.gray(`tail -f ${logFile}`)}
|
|
575
|
+
`);
|
|
576
|
+
}
|
|
577
|
+
async function offerToStart(projectDir) {
|
|
578
|
+
const startSh = path.join(projectDir, 'start.sh');
|
|
579
|
+
if (!fs.existsSync(startSh)) return;
|
|
580
|
+
const { startNow } = await prompts({
|
|
581
|
+
type: 'confirm', name: 'startNow',
|
|
582
|
+
message: 'Start Sentinel now?', initial: true,
|
|
583
|
+
}, { onCancel: () => {} });
|
|
584
|
+
if (!startNow) return;
|
|
585
|
+
const { status } = spawnSync('bash', [startSh], { stdio: 'inherit', env: gitEnv() });
|
|
586
|
+
if (status === 0) {
|
|
587
|
+
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
588
|
+
ok('Sentinel started');
|
|
589
|
+
info(`Logs: tail -f ${logFile}`);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
module.exports = async function add(arg) {
|
|
593
|
+
const type = detectInputType(arg);
|
|
594
|
+
const workspace = await resolveWorkspace();
|
|
595
|
+
if (type === 'git') return addFromGit(arg, workspace);
|
|
596
|
+
if (type === 'url') return addFromUrl(arg, workspace);
|
|
597
|
+
if (type === 'json') return addFromJson(arg, workspace);
|
|
598
|
+
return addFromName(arg, workspace);
|
|
599
|
+
};
|
package/package.json
CHANGED
|
@@ -11,6 +11,7 @@ import logging
|
|
|
11
11
|
import re
|
|
12
12
|
import subprocess
|
|
13
13
|
import textwrap
|
|
14
|
+
from datetime import datetime, timezone
|
|
14
15
|
from pathlib import Path
|
|
15
16
|
|
|
16
17
|
from .config_loader import RepoConfig, SentinelConfig
|
|
@@ -135,10 +136,17 @@ def _claude_cmd(bin_path: str, prompt: str) -> list[str]:
|
|
|
135
136
|
return [bin_path, "--print", prompt]
|
|
136
137
|
|
|
137
138
|
|
|
138
|
-
def _run_claude_attempt(
|
|
139
|
+
def _run_claude_attempt(
|
|
140
|
+
bin_path: str,
|
|
141
|
+
prompt: str,
|
|
142
|
+
env: dict,
|
|
143
|
+
cwd: str | None = None,
|
|
144
|
+
claude_log_path: Path | None = None,
|
|
145
|
+
) -> tuple[str, bool]:
|
|
139
146
|
"""
|
|
140
147
|
Run claude CLI with the given env. Returns (output, timed_out).
|
|
141
148
|
Raises FileNotFoundError if binary is missing.
|
|
149
|
+
If claude_log_path is given, writes the full prompt + raw output there.
|
|
142
150
|
"""
|
|
143
151
|
try:
|
|
144
152
|
result = subprocess.run(
|
|
@@ -146,11 +154,32 @@ def _run_claude_attempt(bin_path: str, prompt: str, env: dict, cwd: str | None =
|
|
|
146
154
|
capture_output=True, text=True, timeout=SUBPROCESS_TIMEOUT, env=env,
|
|
147
155
|
cwd=cwd or None,
|
|
148
156
|
)
|
|
149
|
-
|
|
157
|
+
output = (result.stdout or "") + (result.stderr or "")
|
|
158
|
+
if claude_log_path:
|
|
159
|
+
_write_claude_log(claude_log_path, prompt, output, timed_out=False)
|
|
160
|
+
return output, False
|
|
150
161
|
except subprocess.TimeoutExpired:
|
|
162
|
+
if claude_log_path:
|
|
163
|
+
_write_claude_log(claude_log_path, prompt, "", timed_out=True)
|
|
151
164
|
return "", True
|
|
152
165
|
|
|
153
166
|
|
|
167
|
+
def _write_claude_log(log_path: Path, prompt: str, output: str, timed_out: bool) -> None:
|
|
168
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
169
|
+
sep = "─" * 72
|
|
170
|
+
content = (
|
|
171
|
+
f"{sep}\n"
|
|
172
|
+
f"PROMPT\n"
|
|
173
|
+
f"{sep}\n"
|
|
174
|
+
f"{prompt}\n\n"
|
|
175
|
+
f"{sep}\n"
|
|
176
|
+
f"OUTPUT{' [TIMED OUT]' if timed_out else ''}\n"
|
|
177
|
+
f"{sep}\n"
|
|
178
|
+
f"{output if output else '(no output)'}\n"
|
|
179
|
+
)
|
|
180
|
+
log_path.write_text(content, encoding="utf-8")
|
|
181
|
+
|
|
182
|
+
|
|
154
183
|
def generate_fix(
|
|
155
184
|
event: ErrorEvent,
|
|
156
185
|
repo: RepoConfig,
|
|
@@ -195,7 +224,13 @@ def generate_fix(
|
|
|
195
224
|
except Exception as _e:
|
|
196
225
|
logger.debug("fix_engine: git log check failed: %s", _e)
|
|
197
226
|
|
|
198
|
-
|
|
227
|
+
ts = datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
|
|
228
|
+
claude_logs_dir = Path(cfg.workspace_dir).parent / "logs" / "claude"
|
|
229
|
+
claude_log_path = claude_logs_dir / f"{event.fingerprint[:8]}-{ts}.log"
|
|
230
|
+
logger.info(
|
|
231
|
+
"Invoking Claude Code for %s (fp=%s) — log: %s",
|
|
232
|
+
event.source, event.fingerprint, claude_log_path,
|
|
233
|
+
)
|
|
199
234
|
|
|
200
235
|
base_env = _os.environ.copy()
|
|
201
236
|
api_env = {**base_env, "ANTHROPIC_API_KEY": cfg.anthropic_api_key} if cfg.anthropic_api_key else None
|
|
@@ -217,7 +252,9 @@ def generate_fix(
|
|
|
217
252
|
if env is None:
|
|
218
253
|
continue
|
|
219
254
|
logger.info("fix_engine: trying %s for %s", label, event.fingerprint)
|
|
220
|
-
output, timed_out = _run_claude_attempt(
|
|
255
|
+
output, timed_out = _run_claude_attempt(
|
|
256
|
+
cfg.claude_code_bin, prompt, env, cwd=repo.local_path, claude_log_path=claude_log_path,
|
|
257
|
+
)
|
|
221
258
|
if timed_out:
|
|
222
259
|
logger.error("Claude Code timed out for %s", event.fingerprint)
|
|
223
260
|
return "error", None, ""
|
|
@@ -31,20 +31,17 @@ and opens GitHub PRs for admin review (or pushes directly if AUTO_PUBLISH=true).
|
|
|
31
31
|
Your job:
|
|
32
32
|
- Understand what the DevOps engineer needs in natural language
|
|
33
33
|
- Query Sentinel's live state (errors, fixes, open PRs) on their behalf
|
|
34
|
-
- Deliver tasks/issues to
|
|
34
|
+
- Deliver tasks/issues to this project — you are scoped exclusively to this project
|
|
35
35
|
- Control Sentinel (pause/resume) when asked
|
|
36
36
|
- Give honest, concise answers — you know this system inside out
|
|
37
|
-
- If a project name is unclear or ambiguous, ask the engineer to clarify — never guess
|
|
38
37
|
|
|
39
38
|
What you can do (tools available):
|
|
40
39
|
|
|
41
40
|
1. get_status — Show recent errors detected, fixes applied/pending, open PRs.
|
|
42
41
|
e.g. "what happened today?", "any issues?", "show open PRs"
|
|
43
42
|
|
|
44
|
-
2. create_issue — Deliver a fix/task to
|
|
45
|
-
|
|
46
|
-
If the project name is ambiguous or not found, ask to clarify.
|
|
47
|
-
e.g. "tell 1881 to fix X", "look into Y in elprint", "investigate Z"
|
|
43
|
+
2. create_issue — Deliver a fix/task to this project.
|
|
44
|
+
e.g. "fix X", "look into Y", "investigate Z"
|
|
48
45
|
|
|
49
46
|
3. pause_sentinel — Create SENTINEL_PAUSE file to halt all auto-fix activity.
|
|
50
47
|
e.g. "pause sentinel", "stop auto-fixing"
|
|
@@ -145,8 +142,8 @@ reply with a short summary grouped by category:
|
|
|
145
142
|
• `check_auth_status` — Claude auth health, rate-limit circuit state, fix engine 24 h stats — "is Claude working?", "any rate limits?", "auth issues?"
|
|
146
143
|
|
|
147
144
|
*Project & task delivery*
|
|
148
|
-
• `list_projects` —
|
|
149
|
-
• `create_issue` — deliver a task to
|
|
145
|
+
• `list_projects` — repos and log sources this Sentinel instance manages — "what repos do you watch?"
|
|
146
|
+
• `create_issue` — deliver a fix/task to this project — "fix X", "investigate Y"
|
|
150
147
|
• `trigger_poll` — run a log-fetch + fix cycle right now — "check now"
|
|
151
148
|
• `pause_sentinel` / `resume_sentinel` — halt or resume all auto-fix activity — "pause Sentinel"
|
|
152
149
|
|
|
@@ -900,7 +897,8 @@ _TOOLS = [
|
|
|
900
897
|
# ── Workspace helpers ─────────────────────────────────────────────────────────
|
|
901
898
|
|
|
902
899
|
def _workspace_dir() -> Path:
|
|
903
|
-
|
|
900
|
+
"""Return the current project directory (each process is scoped to one project)."""
|
|
901
|
+
return Path(".").resolve()
|
|
904
902
|
|
|
905
903
|
def _short_name(dir_name: str) -> str:
|
|
906
904
|
"""'sentinel-1881' → '1881', 'sentinel-elprint' → 'elprint', others unchanged."""
|
|
@@ -925,25 +923,21 @@ def _read_project_name(project_dir: Path) -> str:
|
|
|
925
923
|
return _short_name(project_dir.name)
|
|
926
924
|
|
|
927
925
|
def _find_project_dirs(target: str = "") -> list[Path]:
|
|
928
|
-
"""Return
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
results.append(d)
|
|
944
|
-
except Exception:
|
|
945
|
-
pass
|
|
946
|
-
return results
|
|
926
|
+
"""Return the current project dir (optionally filtered by target name).
|
|
927
|
+
Each sentinel process is scoped to one project — cross-project visibility is
|
|
928
|
+
intentionally disabled to prevent information leakage across Slack workspaces.
|
|
929
|
+
"""
|
|
930
|
+
current = Path(".").resolve()
|
|
931
|
+
if not (current / "config").exists():
|
|
932
|
+
return []
|
|
933
|
+
if target:
|
|
934
|
+
t = target.lower()
|
|
935
|
+
name = _read_project_name(current)
|
|
936
|
+
if (t not in current.name.lower()
|
|
937
|
+
and t not in _short_name(current.name).lower()
|
|
938
|
+
and t not in name.lower()):
|
|
939
|
+
return []
|
|
940
|
+
return [current]
|
|
947
941
|
|
|
948
942
|
def _git_pull(path: Path) -> dict:
|
|
949
943
|
try:
|