@misterhuydo/sentinel 1.4.30 → 1.4.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.cairn/.hint-lock
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
2026-03-
|
|
1
|
+
2026-03-25T04:25:39.345Z
|
package/.cairn/session.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
|
-
"message": "Auto-checkpoint at 2026-03-
|
|
3
|
-
"checkpoint_at": "2026-03-
|
|
2
|
+
"message": "Auto-checkpoint at 2026-03-25T04:33:16.269Z",
|
|
3
|
+
"checkpoint_at": "2026-03-25T04:33:16.270Z",
|
|
4
4
|
"active_files": [],
|
|
5
5
|
"notes": [],
|
|
6
6
|
"mtime_snapshot": {}
|
|
@@ -0,0 +1,599 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const { execSync, spawnSync } = require('child_process');
|
|
6
|
+
const prompts = require('prompts');
|
|
7
|
+
const chalk = require('chalk');
|
|
8
|
+
const { writeExampleProject, generateWorkspaceScripts, generateProjectScripts } = require('./generate');
|
|
9
|
+
const ok = msg => console.log(chalk.green(' ✔'), msg);
|
|
10
|
+
const info = msg => console.log(chalk.cyan(' →'), msg);
|
|
11
|
+
const warn = msg => console.log(chalk.yellow(' ⚠'), msg);
|
|
12
|
+
const step = msg => console.log('\n' + chalk.bold.white(msg));
|
|
13
|
+
function detectInputType(arg) {
|
|
14
|
+
if (!arg) return 'name';
|
|
15
|
+
if (/^git@/.test(arg) || (/^https?:\/\/github\.com\//.test(arg) && arg.endsWith('.git'))) return 'git';
|
|
16
|
+
if (/^https?:\/\//.test(arg)) return 'url';
|
|
17
|
+
if (arg.toLowerCase().endsWith('.json') || arg.includes('/') || arg.includes('\\')) return 'json';
|
|
18
|
+
return 'name';
|
|
19
|
+
}
|
|
20
|
+
async function resolveWorkspace(initial) {
|
|
21
|
+
const ans = await prompts([{
|
|
22
|
+
type: 'text',
|
|
23
|
+
name: 'workspace',
|
|
24
|
+
message: 'Workspace directory',
|
|
25
|
+
initial: initial || path.join(os.homedir(), 'sentinel'),
|
|
26
|
+
format: v => v.replace(/^~/, os.homedir()),
|
|
27
|
+
}], { onCancel: () => process.exit(0) });
|
|
28
|
+
return ans.workspace;
|
|
29
|
+
}
|
|
30
|
+
function requireCodeDir(workspace) {
|
|
31
|
+
const codeDir = path.join(workspace, 'code');
|
|
32
|
+
if (!fs.existsSync(codeDir)) {
|
|
33
|
+
console.error(chalk.red(`Sentinel code not found at ${codeDir}`));
|
|
34
|
+
console.error(chalk.red('Run "sentinel init" first.'));
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
return codeDir;
|
|
38
|
+
}
|
|
39
|
+
const VALID_NAME = /^[a-z0-9_-]+$/i;
|
|
40
|
+
const GITHUB_URL = /^(git@github\.com:|https:\/\/github\.com\/).+\.git$/;
|
|
41
|
+
function validateProjectJson(obj) {
|
|
42
|
+
const errors = [];
|
|
43
|
+
if (!obj.name || !VALID_NAME.test(obj.name)) {
|
|
44
|
+
errors.push('name must be letters, numbers, hyphens only');
|
|
45
|
+
}
|
|
46
|
+
if (!Array.isArray(obj.repos) || obj.repos.length === 0) {
|
|
47
|
+
errors.push('repos array is required and must be non-empty');
|
|
48
|
+
} else {
|
|
49
|
+
obj.repos.forEach((r, i) => {
|
|
50
|
+
if (!r.REPO_URL || !GITHUB_URL.test(r.REPO_URL)) {
|
|
51
|
+
errors.push(`repos[${i}].REPO_URL must be a valid GitHub URL`);
|
|
52
|
+
}
|
|
53
|
+
if (!r.name) errors.push(`repos[${i}].name is required`);
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
return errors;
|
|
57
|
+
}
|
|
58
|
+
function writePropertiesFile(filePath, obj) {
|
|
59
|
+
const lines = Object.entries(obj).map(([k, v]) => `${k}=${v}`);
|
|
60
|
+
fs.writeFileSync(filePath, lines.join('\n') + '\n');
|
|
61
|
+
}
|
|
62
|
+
function applyJsonToProject(projectDir, obj) {
|
|
63
|
+
const configDir = path.join(projectDir, 'config');
|
|
64
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
65
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
66
|
+
fs.ensureDirSync(repoDir);
|
|
67
|
+
fs.ensureDirSync(logDir);
|
|
68
|
+
if (obj.sentinel) {
|
|
69
|
+
const propsPath = path.join(configDir, 'sentinel.properties');
|
|
70
|
+
const existing = fs.existsSync(propsPath) ? fs.readFileSync(propsPath, 'utf8') : '';
|
|
71
|
+
let updated = existing;
|
|
72
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => {
|
|
73
|
+
const re = new RegExp(`^#?\\s*${k}\\s*=.*$`, 'm');
|
|
74
|
+
if (re.test(updated)) {
|
|
75
|
+
updated = updated.replace(re, `${k}=${v}`);
|
|
76
|
+
} else {
|
|
77
|
+
updated += `\n${k}=${v}`;
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
fs.writeFileSync(propsPath, updated);
|
|
81
|
+
ok('Updated sentinel.properties');
|
|
82
|
+
}
|
|
83
|
+
if (Array.isArray(obj.repos)) {
|
|
84
|
+
obj.repos.forEach(repo => {
|
|
85
|
+
const { name, ...props } = repo;
|
|
86
|
+
writePropertiesFile(path.join(repoDir, `${name}.properties`), props);
|
|
87
|
+
ok(`Created repo-configs/${name}.properties`);
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
if (Array.isArray(obj.log_sources)) {
|
|
91
|
+
obj.log_sources.forEach(src => {
|
|
92
|
+
const { name, ...props } = src;
|
|
93
|
+
writePropertiesFile(path.join(logDir, `${name}.properties`), props);
|
|
94
|
+
ok(`Created log-configs/${name}.properties`);
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function fetchUrl(url) {
|
|
99
|
+
try {
|
|
100
|
+
const result = spawnSync('curl', ['-fsSL', '--max-time', '10', url], { encoding: 'utf8' });
|
|
101
|
+
if (result.status !== 0) throw new Error(result.stderr || 'curl failed');
|
|
102
|
+
return result.stdout;
|
|
103
|
+
} catch (_) {
|
|
104
|
+
const https = require('https');
|
|
105
|
+
const http = require('http');
|
|
106
|
+
const lib = url.startsWith('https') ? https : http;
|
|
107
|
+
return new Promise((resolve, reject) => {
|
|
108
|
+
lib.get(url, res => {
|
|
109
|
+
let data = '';
|
|
110
|
+
res.on('data', c => (data += c));
|
|
111
|
+
res.on('end', () => resolve(data));
|
|
112
|
+
}).on('error', reject);
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
function ensureKnownHosts() {
|
|
117
|
+
const sshDir = path.join(os.homedir(), '.ssh');
|
|
118
|
+
const knownHosts = path.join(sshDir, 'known_hosts');
|
|
119
|
+
fs.ensureDirSync(sshDir);
|
|
120
|
+
try { fs.chmodSync(sshDir, 0o700); } catch (_) {}
|
|
121
|
+
const content = fs.existsSync(knownHosts) ? fs.readFileSync(knownHosts, 'utf8') : '';
|
|
122
|
+
if (content.includes('github.com')) return;
|
|
123
|
+
info('Adding GitHub to known_hosts…');
|
|
124
|
+
const r = spawnSync('ssh-keyscan', ['github.com'], { encoding: 'utf8', timeout: 10000, env: gitEnv() });
|
|
125
|
+
if (r.stdout) fs.appendFileSync(knownHosts, r.stdout);
|
|
126
|
+
}
|
|
127
|
+
function generateDeployKey(repoSlug, keyDir) {
|
|
128
|
+
const useProjectDir = !!keyDir;
|
|
129
|
+
const dir = keyDir || path.join(os.homedir(), '.ssh');
|
|
130
|
+
const keyFile = path.join(dir, `${repoSlug}.key`);
|
|
131
|
+
if (useProjectDir) {
|
|
132
|
+
fs.ensureDirSync(dir);
|
|
133
|
+
} else {
|
|
134
|
+
const sshDir = path.join(os.homedir(), '.ssh');
|
|
135
|
+
fs.ensureDirSync(sshDir);
|
|
136
|
+
fs.chmodSync(sshDir, 0o700);
|
|
137
|
+
}
|
|
138
|
+
if (fs.existsSync(keyFile)) {
|
|
139
|
+
info(`Using existing key: ${keyFile}`);
|
|
140
|
+
} else {
|
|
141
|
+
spawnSync('ssh-keygen', ['-t', 'ed25519', '-C', `sentinel@${repoSlug}`, '-f', keyFile, '-N', ''],
|
|
142
|
+
{ stdio: 'inherit' });
|
|
143
|
+
ok(`Deploy key generated: ${keyFile}`);
|
|
144
|
+
}
|
|
145
|
+
if (!useProjectDir) {
|
|
146
|
+
const configFile = path.join(os.homedir(), '.ssh', 'config');
|
|
147
|
+
const sshHost = `github-${repoSlug}`;
|
|
148
|
+
const existing = fs.existsSync(configFile) ? fs.readFileSync(configFile, 'utf8') : '';
|
|
149
|
+
if (!existing.includes(`Host ${sshHost}`)) {
|
|
150
|
+
fs.appendFileSync(configFile,
|
|
151
|
+
`\nHost ${sshHost}\n HostName github.com\n User git\n IdentityFile ${keyFile}\n IdentitiesOnly yes\n`);
|
|
152
|
+
fs.chmodSync(configFile, 0o600);
|
|
153
|
+
ok('SSH config updated');
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
return { keyFile };
|
|
157
|
+
}
|
|
158
|
+
function printDeployKeyInstructions(orgRepo, keyFile) {
|
|
159
|
+
const pubKey = fs.readFileSync(`${keyFile}.pub`, 'utf8').trim();
|
|
160
|
+
const bar = '─'.repeat(70);
|
|
161
|
+
console.log('');
|
|
162
|
+
console.log(chalk.bold.yellow(` ┌${bar}┐`));
|
|
163
|
+
console.log(chalk.bold.yellow(` │`) + chalk.bold(` Add this deploy key to GitHub`) + chalk.bold.yellow(' '.repeat(40) + '│'));
|
|
164
|
+
console.log(chalk.bold.yellow(` │`) + chalk.cyan(` github.com/${orgRepo}`) + chalk.bold.yellow(' '.repeat(Math.max(0, 70 - 14 - orgRepo.length)) + '│'));
|
|
165
|
+
console.log(chalk.bold.yellow(` │`) + ` Settings → Deploy keys → Add deploy key` + chalk.bold.yellow(' '.repeat(29) + '│'));
|
|
166
|
+
console.log(chalk.bold.yellow(` │`) + ` Allow write access: ✓` + chalk.bold.yellow(' '.repeat(47) + '│'));
|
|
167
|
+
console.log(chalk.bold.yellow(` └${bar}┘`));
|
|
168
|
+
console.log('');
|
|
169
|
+
console.log(chalk.green(pubKey));
|
|
170
|
+
console.log('');
|
|
171
|
+
}
|
|
172
|
+
function gitEnv(extra = {}) {
|
|
173
|
+
const PATH = [
|
|
174
|
+
process.env.PATH || '',
|
|
175
|
+
'/usr/bin', '/usr/local/bin', '/bin', '/usr/sbin', '/usr/local/sbin',
|
|
176
|
+
].filter(Boolean).join(':');
|
|
177
|
+
return { ...process.env, PATH, GIT_TERMINAL_PROMPT: '0', ...extra };
|
|
178
|
+
}
|
|
179
|
+
function findBin(name) {
|
|
180
|
+
const candidates = [
|
|
181
|
+
`/usr/bin/${name}`, `/usr/local/bin/${name}`, `/bin/${name}`,
|
|
182
|
+
];
|
|
183
|
+
for (const p of candidates) {
|
|
184
|
+
if (fs.existsSync(p)) return p;
|
|
185
|
+
}
|
|
186
|
+
const r = spawnSync('which', [name], { encoding: 'utf8', env: gitEnv() });
|
|
187
|
+
return (r.stdout || '').trim() || name;
|
|
188
|
+
}
|
|
189
|
+
let _gitBin;
|
|
190
|
+
function gitBin() {
|
|
191
|
+
if (!_gitBin) _gitBin = findBin('git');
|
|
192
|
+
return _gitBin;
|
|
193
|
+
}
|
|
194
|
+
function gitUrlToOrgRepo(gitUrl) {
|
|
195
|
+
return gitUrl
|
|
196
|
+
.replace(/^git@github\.com:/, '')
|
|
197
|
+
.replace(/^https?:\/\/github\.com\//, '')
|
|
198
|
+
.replace(/\.git$/, '');
|
|
199
|
+
}
|
|
200
|
+
function toHttpsUrl(gitUrl) {
|
|
201
|
+
return 'https://github.com/' + gitUrlToOrgRepo(gitUrl) + '.git';
|
|
202
|
+
}
|
|
203
|
+
function isPublicRepo(gitUrl) {
|
|
204
|
+
const r = spawnSync(gitBin(), ['ls-remote', '--heads', toHttpsUrl(gitUrl)], {
|
|
205
|
+
encoding: 'utf8', timeout: 10000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
206
|
+
env: gitEnv(),
|
|
207
|
+
});
|
|
208
|
+
return r.status === 0;
|
|
209
|
+
}
|
|
210
|
+
function validateAccess(repoUrl, keyFile) {
|
|
211
|
+
const extra = keyFile
|
|
212
|
+
? { GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }
|
|
213
|
+
: {};
|
|
214
|
+
const r = spawnSync(gitBin(), ['ls-remote', '--heads', repoUrl], {
|
|
215
|
+
encoding: 'utf8', timeout: 15000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
216
|
+
env: gitEnv(extra),
|
|
217
|
+
});
|
|
218
|
+
return { ok: r.status === 0, stderr: (r.stderr || r.error?.message || '').trim() };
|
|
219
|
+
}
|
|
220
|
+
function discoverReposFromClone(cloneDir) {
|
|
221
|
+
const repoCfgDir = path.join(cloneDir, 'config', 'repo-configs');
|
|
222
|
+
if (!fs.existsSync(repoCfgDir)) return [];
|
|
223
|
+
return fs.readdirSync(repoCfgDir)
|
|
224
|
+
.filter(f => f.endsWith('.properties') && !f.startsWith('_'))
|
|
225
|
+
.map(f => {
|
|
226
|
+
const content = fs.readFileSync(path.join(repoCfgDir, f), 'utf8');
|
|
227
|
+
const match = content.match(/^REPO_URL\s*=\s*(.+)$/m);
|
|
228
|
+
return match ? { file: f, propsPath: path.join(repoCfgDir, f), url: match[1].trim() } : null;
|
|
229
|
+
})
|
|
230
|
+
.filter(Boolean);
|
|
231
|
+
}
|
|
232
|
+
async function addFromGit(gitUrl, workspace) {
|
|
233
|
+
const repoSlug = gitUrl.replace(/\.git$/, '').split(/[:/]/).pop();
|
|
234
|
+
const orgRepo = gitUrlToOrgRepo(gitUrl);
|
|
235
|
+
const { name } = await prompts([{
|
|
236
|
+
type: 'text',
|
|
237
|
+
name: 'name',
|
|
238
|
+
message: 'Project name',
|
|
239
|
+
initial: repoSlug,
|
|
240
|
+
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
241
|
+
}], { onCancel: () => process.exit(0) });
|
|
242
|
+
step(`[1/3] Setting up SSH access to ${repoSlug}`);
|
|
243
|
+
ensureKnownHosts();
|
|
244
|
+
const { keyFile } = generateDeployKey(repoSlug);
|
|
245
|
+
printDeployKeyInstructions(orgRepo, keyFile);
|
|
246
|
+
await prompts({
|
|
247
|
+
type: 'text', name: '_', format: () => '',
|
|
248
|
+
message: chalk.bold(`Press Enter once you've added the deploy key to GitHub…`),
|
|
249
|
+
}, { onCancel: () => process.exit(0) });
|
|
250
|
+
const primary = validateAccess(gitUrl, keyFile);
|
|
251
|
+
if (!primary.ok) {
|
|
252
|
+
console.error(chalk.red(' ✖ Cannot reach ' + gitUrl));
|
|
253
|
+
if (primary.stderr) console.error(chalk.red(' ' + primary.stderr));
|
|
254
|
+
console.error(chalk.yellow(' Check the deploy key has write access, then re-run.'));
|
|
255
|
+
process.exit(1);
|
|
256
|
+
}
|
|
257
|
+
ok(`${repoSlug}: reachable`);
|
|
258
|
+
const projectDir = path.join(workspace, name);
|
|
259
|
+
step(`[2/3] Scanning repo-configs in ${repoSlug}…`);
|
|
260
|
+
if (!fs.existsSync(projectDir)) {
|
|
261
|
+
spawnSync(gitBin(), ['clone', '--depth', '1', gitUrl, projectDir], {
|
|
262
|
+
stdio: 'inherit',
|
|
263
|
+
env: gitEnv({ GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }),
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
const discovered = discoverReposFromClone(projectDir);
|
|
267
|
+
const gitignorePath = path.join(projectDir, '.gitignore');
|
|
268
|
+
const giContent = fs.existsSync(gitignorePath) ? fs.readFileSync(gitignorePath, 'utf8') : '';
|
|
269
|
+
const giLines = [];
|
|
270
|
+
if (!giContent.includes('*.key')) giLines.push('*.key');
|
|
271
|
+
if (!giContent.includes('*.pub')) giLines.push('*.pub');
|
|
272
|
+
if (giLines.length) {
|
|
273
|
+
fs.appendFileSync(gitignorePath, (giContent.endsWith('\n') ? '' : '\n') + giLines.join('\n') + '\n');
|
|
274
|
+
ok('.gitignore updated: *.key and *.pub excluded from git');
|
|
275
|
+
}
|
|
276
|
+
const privateRepos = [];
|
|
277
|
+
const publicRepos = [];
|
|
278
|
+
if (discovered.length === 0) {
|
|
279
|
+
info('No repo-configs found — project will use example config.');
|
|
280
|
+
} else {
|
|
281
|
+
info(`Found ${discovered.length} repo(s) in config/repo-configs/:`);
|
|
282
|
+
for (const r of discovered) {
|
|
283
|
+
const slug = r.file.replace('.properties', '');
|
|
284
|
+
const pub = isPublicRepo(r.url);
|
|
285
|
+
const tag = pub ? chalk.green('[public]') : chalk.yellow('[private]');
|
|
286
|
+
console.log(` ${tag} ${slug.padEnd(36)} ${r.url}`);
|
|
287
|
+
if (pub) publicRepos.push({ ...r, slug });
|
|
288
|
+
else privateRepos.push({ ...r, slug });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
if (privateRepos.length > 0) {
|
|
292
|
+
step(`[3/3] Deploy keys needed for ${privateRepos.length} private repo(s)`);
|
|
293
|
+
for (const r of privateRepos) {
|
|
294
|
+
const { keyFile: rKey } = generateDeployKey(r.slug, projectDir);
|
|
295
|
+
r.keyFile = rKey;
|
|
296
|
+
const rOrgRepo = gitUrlToOrgRepo(r.url);
|
|
297
|
+
printDeployKeyInstructions(rOrgRepo, rKey);
|
|
298
|
+
}
|
|
299
|
+
if (publicRepos.length > 0) {
|
|
300
|
+
console.log(chalk.green(' ✔ Public repos (no deploy key needed):'));
|
|
301
|
+
for (const r of publicRepos) {
|
|
302
|
+
console.log(chalk.green(` ${r.slug}`));
|
|
303
|
+
}
|
|
304
|
+
console.log('');
|
|
305
|
+
}
|
|
306
|
+
await prompts({
|
|
307
|
+
type: 'text', name: '_', format: () => '',
|
|
308
|
+
message: chalk.bold(`Press Enter once you've added all ${privateRepos.length} deploy key(s) to GitHub…`),
|
|
309
|
+
}, { onCancel: () => process.exit(0) });
|
|
310
|
+
step('Validating repository access…');
|
|
311
|
+
for (const r of privateRepos) {
|
|
312
|
+
const v = validateAccess(r.url, r.keyFile);
|
|
313
|
+
if (!v.ok) {
|
|
314
|
+
console.error(chalk.red(` ✖ ${r.slug}: cannot reach ${r.url}`));
|
|
315
|
+
if (v.stderr) console.error(chalk.red(' ' + v.stderr));
|
|
316
|
+
console.error(chalk.yellow(' Fix access then re-run sentinel add.'));
|
|
317
|
+
process.exit(1);
|
|
318
|
+
}
|
|
319
|
+
ok(`${r.slug}: reachable`);
|
|
320
|
+
}
|
|
321
|
+
for (const r of publicRepos) {
|
|
322
|
+
ok(`${r.slug}: public, no key needed`);
|
|
323
|
+
}
|
|
324
|
+
for (const r of privateRepos) {
|
|
325
|
+
info(`Key stored at ${r.keyFile} (auto-discovered, not committed to git)`);
|
|
326
|
+
}
|
|
327
|
+
} else if (discovered.length > 0) {
|
|
328
|
+
step('[3/3] All repos are public — no deploy keys needed');
|
|
329
|
+
ok('All repos accessible without auth');
|
|
330
|
+
}
|
|
331
|
+
const { autoPublish } = await prompts({
|
|
332
|
+
type: 'select',
|
|
333
|
+
name: 'autoPublish',
|
|
334
|
+
message: 'How should Sentinel deploy fixes?',
|
|
335
|
+
hint: 'You can change this per-repo in config/repo-configs/',
|
|
336
|
+
choices: [
|
|
337
|
+
{
|
|
338
|
+
title: 'Open a PR for each fix (AUTO_PUBLISH=false) — recommended',
|
|
339
|
+
description: 'Sentinel pushes to a branch and opens a GitHub PR. You review and merge.',
|
|
340
|
+
value: false,
|
|
341
|
+
},
|
|
342
|
+
{
|
|
343
|
+
title: 'Push directly to main (AUTO_PUBLISH=true) — fully autonomous',
|
|
344
|
+
description: 'Sentinel commits and pushes fixes straight to your main branch.',
|
|
345
|
+
value: true,
|
|
346
|
+
},
|
|
347
|
+
],
|
|
348
|
+
}, { onCancel: () => process.exit(0) });
|
|
349
|
+
if (autoPublish) {
|
|
350
|
+
warn('AUTO_PUBLISH=true: fixes push directly to main. Ensure CI blocks bad pushes.');
|
|
351
|
+
}
|
|
352
|
+
const workspaceProps = path.join(workspace, 'sentinel.properties');
|
|
353
|
+
const existingToken = fs.existsSync(workspaceProps)
|
|
354
|
+
? (fs.readFileSync(workspaceProps, 'utf8').match(/^GITHUB_TOKEN\s*=\s*(.+)$/m) || [])[1]?.trim()
|
|
355
|
+
: '';
|
|
356
|
+
if (!autoPublish) {
|
|
357
|
+
console.log('');
|
|
358
|
+
console.log(chalk.bold(' GitHub Personal Access Token (classic) — required for opening PRs'));
|
|
359
|
+
console.log(chalk.cyan(' github.com/settings/tokens/new → Tokens (classic)'));
|
|
360
|
+
console.log(chalk.cyan(' Note: "Expiration → No expiration" Scope: ✓ repo'));
|
|
361
|
+
console.log('');
|
|
362
|
+
}
|
|
363
|
+
const { githubToken } = await prompts({
|
|
364
|
+
type: (!autoPublish || !existingToken) ? 'password' : null,
|
|
365
|
+
name: 'githubToken',
|
|
366
|
+
message: existingToken
|
|
367
|
+
? 'GitHub token (press Enter to keep current)'
|
|
368
|
+
: autoPublish
|
|
369
|
+
? 'GitHub token (classic, repo scope) — optional, press Enter to skip'
|
|
370
|
+
: 'GitHub token (classic, repo scope)',
|
|
371
|
+
validate: v => {
|
|
372
|
+
if (!autoPublish && !v && !existingToken) return 'Token is required for PR mode';
|
|
373
|
+
if (v && !v.startsWith('ghp_') && !v.startsWith('github_pat_')) return 'Should start with ghp_ or github_pat_';
|
|
374
|
+
return true;
|
|
375
|
+
},
|
|
376
|
+
}, { onCancel: () => process.exit(0) });
|
|
377
|
+
const effectiveToken = githubToken || existingToken || '';
|
|
378
|
+
if (effectiveToken && fs.existsSync(workspaceProps)) {
|
|
379
|
+
let props = fs.readFileSync(workspaceProps, 'utf8');
|
|
380
|
+
if (/^#?\s*GITHUB_TOKEN\s*=/m.test(props))
|
|
381
|
+
props = props.replace(/^#?\s*GITHUB_TOKEN\s*=.*/m, `GITHUB_TOKEN=${effectiveToken}`);
|
|
382
|
+
else
|
|
383
|
+
props = props.trimEnd() + `\nGITHUB_TOKEN=${effectiveToken}\n`;
|
|
384
|
+
fs.writeFileSync(workspaceProps, props);
|
|
385
|
+
ok('GITHUB_TOKEN saved to workspace sentinel.properties');
|
|
386
|
+
} else if (effectiveToken) {
|
|
387
|
+
info('GITHUB_TOKEN will be written when project files are created');
|
|
388
|
+
}
|
|
389
|
+
step('Dry-run preview');
|
|
390
|
+
info(`Will create: ${projectDir}/`);
|
|
391
|
+
if (discovered.length > 0) {
|
|
392
|
+
info(` Using ${discovered.length} repo-config(s) from ${repoSlug}`);
|
|
393
|
+
} else {
|
|
394
|
+
info(` config/repo-configs/${repoSlug}.properties`);
|
|
395
|
+
}
|
|
396
|
+
info(` AUTO_PUBLISH=${autoPublish} (applies to all repos without an explicit setting)`);
|
|
397
|
+
info(' init.sh, start.sh, stop.sh');
|
|
398
|
+
const { confirm } = await prompts({
|
|
399
|
+
type: 'confirm', name: 'confirm',
|
|
400
|
+
message: `Create project "${name}"?`, initial: true,
|
|
401
|
+
}, { onCancel: () => process.exit(0) });
|
|
402
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
403
|
+
if (fs.existsSync(projectDir) && !fs.existsSync(path.join(projectDir, '.git'))) {
|
|
404
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
405
|
+
process.exit(1);
|
|
406
|
+
}
|
|
407
|
+
const codeDir = requireCodeDir(workspace);
|
|
408
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
409
|
+
if (discovered.length > 0) {
|
|
410
|
+
generateProjectScripts(projectDir, codeDir, pythonBin);
|
|
411
|
+
ok(`Project "${name}" ready at ${projectDir}`);
|
|
412
|
+
printNextSteps(projectDir, autoPublish);
|
|
413
|
+
await offerToStart(projectDir);
|
|
414
|
+
} else {
|
|
415
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
416
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
417
|
+
writePropertiesFile(path.join(repoDir, `${repoSlug}.properties`), {
|
|
418
|
+
REPO_NAME: repoSlug,
|
|
419
|
+
REPO_URL: gitUrl,
|
|
420
|
+
BRANCH: 'main',
|
|
421
|
+
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
422
|
+
});
|
|
423
|
+
const example = path.join(repoDir, '_example.properties');
|
|
424
|
+
if (fs.existsSync(example)) fs.removeSync(example);
|
|
425
|
+
generateWorkspaceScripts(workspace, {}, {}, {}, effectiveToken);
|
|
426
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
427
|
+
printNextSteps(projectDir, autoPublish);
|
|
428
|
+
await offerToStart(projectDir);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
async function addFromName(nameArg, workspace) {
|
|
432
|
+
const answers = await prompts([{
|
|
433
|
+
type: 'text',
|
|
434
|
+
name: 'name',
|
|
435
|
+
message: 'Project name',
|
|
436
|
+
initial: nameArg || 'my-project',
|
|
437
|
+
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
438
|
+
}], { onCancel: () => process.exit(0) });
|
|
439
|
+
const { name } = answers;
|
|
440
|
+
const projectDir = path.join(workspace, name);
|
|
441
|
+
step('Dry-run preview');
|
|
442
|
+
info(`Will create: ${projectDir}/`);
|
|
443
|
+
info(' config/sentinel.properties');
|
|
444
|
+
info(' config/repo-configs/_example.properties');
|
|
445
|
+
info(' config/log-configs/_example.properties');
|
|
446
|
+
info(' init.sh, start.sh, stop.sh');
|
|
447
|
+
const { confirm } = await prompts({
|
|
448
|
+
type: 'confirm', name: 'confirm',
|
|
449
|
+
message: `Create project "${name}"?`, initial: true,
|
|
450
|
+
}, { onCancel: () => process.exit(0) });
|
|
451
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
452
|
+
if (fs.existsSync(projectDir)) {
|
|
453
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
454
|
+
process.exit(1);
|
|
455
|
+
}
|
|
456
|
+
const codeDir = requireCodeDir(workspace);
|
|
457
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
458
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
459
|
+
generateWorkspaceScripts(workspace);
|
|
460
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
461
|
+
printNextSteps(projectDir);
|
|
462
|
+
}
|
|
463
|
+
async function addFromJson(jsonPath, workspace) {
|
|
464
|
+
step(`Reading ${jsonPath}`);
|
|
465
|
+
let obj;
|
|
466
|
+
try {
|
|
467
|
+
obj = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
|
|
468
|
+
} catch (e) {
|
|
469
|
+
console.error(chalk.red(` ✖ Cannot parse ${jsonPath}: ${e.message}`));
|
|
470
|
+
process.exit(1);
|
|
471
|
+
}
|
|
472
|
+
const errors = validateProjectJson(obj);
|
|
473
|
+
if (errors.length) {
|
|
474
|
+
console.error(chalk.red(' ✖ Invalid project JSON:'));
|
|
475
|
+
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
476
|
+
process.exit(1);
|
|
477
|
+
}
|
|
478
|
+
ok('JSON is valid');
|
|
479
|
+
const { name } = obj;
|
|
480
|
+
const projectDir = path.join(workspace, name);
|
|
481
|
+
step('Dry-run preview');
|
|
482
|
+
info(`Will create: ${projectDir}/`);
|
|
483
|
+
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
484
|
+
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
485
|
+
if (obj.sentinel) {
|
|
486
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
487
|
+
}
|
|
488
|
+
const { confirm } = await prompts({
|
|
489
|
+
type: 'confirm', name: 'confirm',
|
|
490
|
+
message: `Create project "${name}" from ${path.basename(jsonPath)}?`, initial: true,
|
|
491
|
+
}, { onCancel: () => process.exit(0) });
|
|
492
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
493
|
+
if (fs.existsSync(projectDir)) {
|
|
494
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
495
|
+
process.exit(1);
|
|
496
|
+
}
|
|
497
|
+
const codeDir = requireCodeDir(workspace);
|
|
498
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
499
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
500
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
501
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
502
|
+
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
503
|
+
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
504
|
+
applyJsonToProject(projectDir, obj);
|
|
505
|
+
generateWorkspaceScripts(workspace);
|
|
506
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
507
|
+
printNextSteps(projectDir);
|
|
508
|
+
}
|
|
509
|
+
async function addFromUrl(url, workspace) {
|
|
510
|
+
step(`Fetching ${url}`);
|
|
511
|
+
let raw;
|
|
512
|
+
try {
|
|
513
|
+
raw = fetchUrl(url);
|
|
514
|
+
if (raw && typeof raw.then === 'function') raw = await raw;
|
|
515
|
+
} catch (e) {
|
|
516
|
+
console.error(chalk.red(` ✖ Cannot fetch ${url}: ${e.message}`));
|
|
517
|
+
process.exit(1);
|
|
518
|
+
}
|
|
519
|
+
let obj;
|
|
520
|
+
try {
|
|
521
|
+
obj = JSON.parse(raw);
|
|
522
|
+
} catch (e) {
|
|
523
|
+
console.error(chalk.red(` ✖ Response is not valid JSON: ${e.message}`));
|
|
524
|
+
process.exit(1);
|
|
525
|
+
}
|
|
526
|
+
const errors = validateProjectJson(obj);
|
|
527
|
+
if (errors.length) {
|
|
528
|
+
console.error(chalk.red(' ✖ Invalid project JSON at URL:'));
|
|
529
|
+
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
530
|
+
process.exit(1);
|
|
531
|
+
}
|
|
532
|
+
ok('JSON is valid');
|
|
533
|
+
const { name } = obj;
|
|
534
|
+
const projectDir = path.join(workspace, name);
|
|
535
|
+
step('Dry-run preview');
|
|
536
|
+
info(`Will create: ${projectDir}/`);
|
|
537
|
+
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
538
|
+
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
539
|
+
if (obj.sentinel) {
|
|
540
|
+
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
541
|
+
}
|
|
542
|
+
const { confirm } = await prompts({
|
|
543
|
+
type: 'confirm', name: 'confirm',
|
|
544
|
+
message: `Create project "${name}" from ${url}?`, initial: true,
|
|
545
|
+
}, { onCancel: () => process.exit(0) });
|
|
546
|
+
if (!confirm) { info('Aborted.'); return; }
|
|
547
|
+
if (fs.existsSync(projectDir)) {
|
|
548
|
+
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
549
|
+
process.exit(1);
|
|
550
|
+
}
|
|
551
|
+
const codeDir = requireCodeDir(workspace);
|
|
552
|
+
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
553
|
+
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
554
|
+
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
555
|
+
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
556
|
+
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
557
|
+
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
558
|
+
applyJsonToProject(projectDir, obj);
|
|
559
|
+
generateWorkspaceScripts(workspace);
|
|
560
|
+
ok(`Project "${name}" created at ${projectDir}`);
|
|
561
|
+
printNextSteps(projectDir);
|
|
562
|
+
}
|
|
563
|
+
function printNextSteps(projectDir, autoPublish) {
|
|
564
|
+
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
565
|
+
const mode = autoPublish === true
|
|
566
|
+
? chalk.yellow('\n ⚠ Fixes push directly to main — ensure CI blocks bad pushes')
|
|
567
|
+
: autoPublish === false
|
|
568
|
+
? chalk.cyan('\n → Sentinel opens a GitHub PR for each fix — review and merge at github.com')
|
|
569
|
+
: '';
|
|
570
|
+
console.log(`
|
|
571
|
+
Config: ${chalk.cyan(path.join(projectDir, 'config', ''))}
|
|
572
|
+
Start: ${chalk.cyan(path.join(projectDir, 'start.sh'))}${mode}
|
|
573
|
+
Logs: ${chalk.cyan(logFile)}
|
|
574
|
+
${chalk.gray(`tail -f ${logFile}`)}
|
|
575
|
+
`);
|
|
576
|
+
}
|
|
577
|
+
async function offerToStart(projectDir) {
|
|
578
|
+
const startSh = path.join(projectDir, 'start.sh');
|
|
579
|
+
if (!fs.existsSync(startSh)) return;
|
|
580
|
+
const { startNow } = await prompts({
|
|
581
|
+
type: 'confirm', name: 'startNow',
|
|
582
|
+
message: 'Start Sentinel now?', initial: true,
|
|
583
|
+
}, { onCancel: () => {} });
|
|
584
|
+
if (!startNow) return;
|
|
585
|
+
const { status } = spawnSync('bash', [startSh], { stdio: 'inherit', env: gitEnv() });
|
|
586
|
+
if (status === 0) {
|
|
587
|
+
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
588
|
+
ok('Sentinel started');
|
|
589
|
+
info(`Logs: tail -f ${logFile}`);
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
module.exports = async function add(arg) {
|
|
593
|
+
const type = detectInputType(arg);
|
|
594
|
+
const workspace = await resolveWorkspace();
|
|
595
|
+
if (type === 'git') return addFromGit(arg, workspace);
|
|
596
|
+
if (type === 'url') return addFromUrl(arg, workspace);
|
|
597
|
+
if (type === 'json') return addFromJson(arg, workspace);
|
|
598
|
+
return addFromName(arg, workspace);
|
|
599
|
+
};
|
package/package.json
CHANGED
package/python/sentinel/main.py
CHANGED
|
@@ -716,10 +716,8 @@ def _setup_workspace_log() -> None:
|
|
|
716
716
|
fmt = logging.Formatter("%(asctime)s %(levelname)-7s %(name)s — %(message)s")
|
|
717
717
|
handler = logging.FileHandler(workspace_log, mode="w", encoding="utf-8")
|
|
718
718
|
handler.setFormatter(fmt)
|
|
719
|
-
#
|
|
720
|
-
|
|
721
|
-
for name in ("sentinel", "sentinel.sentinel_boss", "sentinel.slack_bot"):
|
|
722
|
-
logging.getLogger(name).addHandler(handler)
|
|
719
|
+
# Add to root sentinel logger only — child loggers propagate up naturally
|
|
720
|
+
logging.getLogger("sentinel").addHandler(handler)
|
|
723
721
|
except Exception as e:
|
|
724
722
|
logger.warning("Could not open workspace log %s: %s", workspace_log, e)
|
|
725
723
|
|