@misterhuydo/sentinel 1.3.9 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cairn/minify-map.json +0 -6
- package/.cairn/session.json +2 -2
- package/lib/add.js +43 -26
- package/package.json +1 -1
- package/python/sentinel/config_loader.py +5 -0
- package/templates/log-configs/_example.properties +12 -0
- package/templates/repo-configs/_example.properties +6 -0
- package/templates/sentinel.properties +4 -0
- package/templates/workspace-sentinel.properties +11 -0
- package/.cairn/views/fc4a1a_add.js +0 -599
package/.cairn/minify-map.json
CHANGED
|
@@ -4,11 +4,5 @@
|
|
|
4
4
|
"state": "compressed",
|
|
5
5
|
"minifiedAt": 1774252515044.4768,
|
|
6
6
|
"readCount": 1
|
|
7
|
-
},
|
|
8
|
-
"J:\\Projects\\Sentinel\\cli\\lib\\add.js": {
|
|
9
|
-
"tempPath": "J:\\Projects\\Sentinel\\cli\\.cairn\\views\\fc4a1a_add.js",
|
|
10
|
-
"state": "compressed",
|
|
11
|
-
"minifiedAt": 1774333679398.312,
|
|
12
|
-
"readCount": 1
|
|
13
7
|
}
|
|
14
8
|
}
|
package/.cairn/session.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
|
-
"message": "Auto-checkpoint at 2026-03-
|
|
3
|
-
"checkpoint_at": "2026-03-
|
|
2
|
+
"message": "Auto-checkpoint at 2026-03-24T08:09:22.572Z",
|
|
3
|
+
"checkpoint_at": "2026-03-24T08:09:22.573Z",
|
|
4
4
|
"active_files": [],
|
|
5
5
|
"notes": [],
|
|
6
6
|
"mtime_snapshot": {}
|
package/lib/add.js
CHANGED
|
@@ -133,11 +133,20 @@ function ensureKnownHosts() {
|
|
|
133
133
|
if (r.stdout) fs.appendFileSync(knownHosts, r.stdout);
|
|
134
134
|
}
|
|
135
135
|
|
|
136
|
-
function generateDeployKey(repoSlug) {
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
136
|
+
function generateDeployKey(repoSlug, keyDir) {
|
|
137
|
+
// keyDir: project workspace -> key stored there, gitignored, never committed to config repo
|
|
138
|
+
// omitted -> ~/.ssh/ for the bootstrap config-repo key (needs SSH config for git pull)
|
|
139
|
+
const useProjectDir = !!keyDir;
|
|
140
|
+
const dir = keyDir || path.join(os.homedir(), '.ssh');
|
|
141
|
+
const keyFile = path.join(dir, `${repoSlug}.key`);
|
|
142
|
+
|
|
143
|
+
if (useProjectDir) {
|
|
144
|
+
fs.ensureDirSync(dir);
|
|
145
|
+
} else {
|
|
146
|
+
const sshDir = path.join(os.homedir(), '.ssh');
|
|
147
|
+
fs.ensureDirSync(sshDir);
|
|
148
|
+
fs.chmodSync(sshDir, 0o700);
|
|
149
|
+
}
|
|
141
150
|
|
|
142
151
|
if (fs.existsSync(keyFile)) {
|
|
143
152
|
info(`Using existing key: ${keyFile}`);
|
|
@@ -147,17 +156,21 @@ function generateDeployKey(repoSlug) {
|
|
|
147
156
|
ok(`Deploy key generated: ${keyFile}`);
|
|
148
157
|
}
|
|
149
158
|
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
159
|
+
// Only update ~/.ssh/config for the bootstrap key — managed repo keys are picked up
|
|
160
|
+
// via GIT_SSH_COMMAND directly, so no SSH config alias is needed for them
|
|
161
|
+
if (!useProjectDir) {
|
|
162
|
+
const configFile = path.join(os.homedir(), '.ssh', 'config');
|
|
163
|
+
const sshHost = `github-${repoSlug}`;
|
|
164
|
+
const existing = fs.existsSync(configFile) ? fs.readFileSync(configFile, 'utf8') : '';
|
|
165
|
+
if (!existing.includes(`Host ${sshHost}`)) {
|
|
166
|
+
fs.appendFileSync(configFile,
|
|
167
|
+
`\nHost ${sshHost}\n HostName github.com\n User git\n IdentityFile ${keyFile}\n IdentitiesOnly yes\n`);
|
|
168
|
+
fs.chmodSync(configFile, 0o600);
|
|
169
|
+
ok('SSH config updated');
|
|
170
|
+
}
|
|
158
171
|
}
|
|
159
172
|
|
|
160
|
-
return { keyFile
|
|
173
|
+
return { keyFile };
|
|
161
174
|
}
|
|
162
175
|
|
|
163
176
|
function printDeployKeyInstructions(orgRepo, keyFile) {
|
|
@@ -295,6 +308,18 @@ async function addFromGit(gitUrl, workspace) {
|
|
|
295
308
|
|
|
296
309
|
const discovered = discoverReposFromClone(projectDir);
|
|
297
310
|
|
|
311
|
+
// Ensure SSH keys are gitignored — they must never be committed to the config repo
|
|
312
|
+
const gitignorePath = path.join(projectDir, '.gitignore');
|
|
313
|
+
const giContent = fs.existsSync(gitignorePath) ? fs.readFileSync(gitignorePath, 'utf8') : '';
|
|
314
|
+
const giLines = [];
|
|
315
|
+
if (!giContent.includes('*.key')) giLines.push('*.key');
|
|
316
|
+
if (!giContent.includes('*.pub')) giLines.push('*.pub');
|
|
317
|
+
if (giLines.length) {
|
|
318
|
+
fs.appendFileSync(gitignorePath, (giContent.endsWith('\n') ? '' : '\n') + giLines.join('\n') + '\n');
|
|
319
|
+
ok('.gitignore updated: *.key and *.pub excluded from git');
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
|
|
298
323
|
// Classify each discovered repo
|
|
299
324
|
const privateRepos = [];
|
|
300
325
|
const publicRepos = [];
|
|
@@ -318,7 +343,7 @@ async function addFromGit(gitUrl, workspace) {
|
|
|
318
343
|
step(`[3/3] Deploy keys needed for ${privateRepos.length} private repo(s)`);
|
|
319
344
|
|
|
320
345
|
for (const r of privateRepos) {
|
|
321
|
-
const { keyFile: rKey } = generateDeployKey(r.slug);
|
|
346
|
+
const { keyFile: rKey } = generateDeployKey(r.slug, projectDir);
|
|
322
347
|
r.keyFile = rKey;
|
|
323
348
|
const rOrgRepo = gitUrlToOrgRepo(r.url);
|
|
324
349
|
printDeployKeyInstructions(rOrgRepo, rKey);
|
|
@@ -353,16 +378,10 @@ async function addFromGit(gitUrl, workspace) {
|
|
|
353
378
|
ok(`${r.slug}: public, no key needed`);
|
|
354
379
|
}
|
|
355
380
|
|
|
356
|
-
//
|
|
381
|
+
// Keys stored at <projectDir>/<slug>.key — config_loader.py auto-discovers them.
|
|
382
|
+
// SSH_KEY_FILE is NOT written to git-tracked .properties files.
|
|
357
383
|
for (const r of privateRepos) {
|
|
358
|
-
|
|
359
|
-
if (/^#?\s*SSH_KEY_FILE\s*=/m.test(props)) {
|
|
360
|
-
props = props.replace(/^#?\s*SSH_KEY_FILE\s*=.*/m, `SSH_KEY_FILE=${r.keyFile}`);
|
|
361
|
-
} else {
|
|
362
|
-
props = props.trimEnd() + `\nSSH_KEY_FILE=${r.keyFile}\n`;
|
|
363
|
-
}
|
|
364
|
-
fs.writeFileSync(r.propsPath, props);
|
|
365
|
-
info(`SSH_KEY_FILE written to config/repo-configs/${r.file}`);
|
|
384
|
+
info(`Key stored at ${r.keyFile} (auto-discovered, not committed to git)`);
|
|
366
385
|
}
|
|
367
386
|
} else if (discovered.length > 0) {
|
|
368
387
|
step('[3/3] All repos are public — no deploy keys needed');
|
|
@@ -475,7 +494,6 @@ async function addFromGit(gitUrl, workspace) {
|
|
|
475
494
|
REPO_URL: gitUrl,
|
|
476
495
|
BRANCH: 'main',
|
|
477
496
|
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
478
|
-
SSH_KEY_FILE: keyFile,
|
|
479
497
|
CAIRN_MCP_ENABLED: 'true',
|
|
480
498
|
});
|
|
481
499
|
}
|
|
@@ -491,7 +509,6 @@ async function addFromGit(gitUrl, workspace) {
|
|
|
491
509
|
REPO_URL: gitUrl,
|
|
492
510
|
BRANCH: 'main',
|
|
493
511
|
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
494
|
-
SSH_KEY_FILE: keyFile,
|
|
495
512
|
CAIRN_MCP_ENABLED: 'true',
|
|
496
513
|
});
|
|
497
514
|
const example = path.join(repoDir, '_example.properties');
|
package/package.json
CHANGED
|
@@ -226,6 +226,11 @@ class ConfigLoader:
|
|
|
226
226
|
r.cicd_token = d.get("CICD_TOKEN", "")
|
|
227
227
|
r.health_url = d.get("HEALTH_URL", "")
|
|
228
228
|
r.ssh_key_file = os.path.expanduser(d.get("SSH_KEY_FILE", ""))
|
|
229
|
+
# Auto-discover key in project dir if not set in properties
|
|
230
|
+
if not r.ssh_key_file:
|
|
231
|
+
auto_key = Path(self.config_dir).parent / f"{r.repo_name}.key"
|
|
232
|
+
if auto_key.exists():
|
|
233
|
+
r.ssh_key_file = str(auto_key)
|
|
229
234
|
self.repos[r.repo_name] = r
|
|
230
235
|
|
|
231
236
|
def _register_sighup(self):
|
|
@@ -38,6 +38,18 @@ GREP_FILTER=WARN|ERROR
|
|
|
38
38
|
# Drop lines matching this regex (grep -iv)
|
|
39
39
|
GREP_EXCLUDE=SSLTool|CommandValidate|hystrix
|
|
40
40
|
|
|
41
|
+
# ── Routing ───────────────────────────────────────────────────────────────────
|
|
42
|
+
|
|
43
|
+
# Which repo-config to route errors from this log source to.
|
|
44
|
+
# The filename stem is the default match (e.g. "MyService.properties" → "MyService" repo-config).
|
|
45
|
+
# Set TARGET_REPO to override: use the exact repo-config filename stem, or "auto" (future).
|
|
46
|
+
# TARGET_REPO=MyService
|
|
47
|
+
|
|
48
|
+
# ── Log sync (optional) ───────────────────────────────────────────────────────
|
|
49
|
+
|
|
50
|
+
# Set to false to disable background rsync for this source (default: true)
|
|
51
|
+
# SYNC_ENABLED=true
|
|
52
|
+
|
|
41
53
|
# ── Cloudflare source (SOURCE_TYPE=cloudflare) ────────────────────────────────
|
|
42
54
|
|
|
43
55
|
# Full URL of the Cloudflare Worker log endpoint
|
|
@@ -19,6 +19,12 @@ BRANCH=main
|
|
|
19
19
|
# true → Sentinel pushes directly to BRANCH and triggers CI/CD
|
|
20
20
|
AUTO_PUBLISH=false
|
|
21
21
|
|
|
22
|
+
# ── Health check (optional) ───────────────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
# HTTP endpoint returning {"Status": "true"} when healthy.
|
|
25
|
+
# Sentinel polls this after each fix to detect 502/503 before the next log cycle.
|
|
26
|
+
# HEALTH_URL=https://myservice.example.com/health
|
|
27
|
+
|
|
22
28
|
# ── CI/CD (optional) ──────────────────────────────────────────────────────────
|
|
23
29
|
# Leave blank if this repo has no deploy pipeline (e.g. shared libraries)
|
|
24
30
|
|
|
@@ -18,6 +18,10 @@ REPORT_INTERVAL_HOURS=1
|
|
|
18
18
|
# Uncomment here only if this project needs a different token.
|
|
19
19
|
# GITHUB_TOKEN=<github-pat>
|
|
20
20
|
|
|
21
|
+
# Fix confirmation: hours of silence after a fix marker appears in production logs before
|
|
22
|
+
# the fix is declared confirmed. Increase for services that deploy infrequently.
|
|
23
|
+
# MARKER_CONFIRM_HOURS=24
|
|
24
|
+
|
|
21
25
|
# State DB and workspace paths (relative to this project dir)
|
|
22
26
|
STATE_DB=./sentinel.db
|
|
23
27
|
WORKSPACE_DIR=./workspace
|
|
@@ -64,6 +64,17 @@ UPGRADE_CHECK_HOURS=6
|
|
|
64
64
|
# Config repo polling: if the project dir is a git repo, pull for config changes every N seconds
|
|
65
65
|
CONFIG_POLL_INTERVAL=60
|
|
66
66
|
|
|
67
|
+
# Fix confirmation: hours of silence after a fix marker appears in production logs before
|
|
68
|
+
# the fix is declared confirmed. Increase for services that deploy infrequently.
|
|
69
|
+
MARKER_CONFIRM_HOURS=24
|
|
70
|
+
|
|
71
|
+
# Log sync: rsync remote logs to workspace/synced/ for full searchable history
|
|
72
|
+
# Set SYNC_ENABLED=false to disable entirely
|
|
73
|
+
SYNC_ENABLED=true
|
|
74
|
+
SYNC_INTERVAL_SECONDS=300
|
|
75
|
+
SYNC_RETENTION_DAYS=30
|
|
76
|
+
SYNC_MAX_FILE_MB=200
|
|
77
|
+
|
|
67
78
|
# Slack Bot (Sentinel Boss) — shared across all projects
|
|
68
79
|
# SLACK_BOT_TOKEN=xoxb-...
|
|
69
80
|
# SLACK_APP_TOKEN=xapp-...
|
|
@@ -1,599 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
const fs = require('fs-extra');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const os = require('os');
|
|
5
|
-
const { execSync, spawnSync } = require('child_process');
|
|
6
|
-
const prompts = require('prompts');
|
|
7
|
-
const chalk = require('chalk');
|
|
8
|
-
const { writeExampleProject, generateWorkspaceScripts, generateProjectScripts } = require('./generate');
|
|
9
|
-
const ok = msg => console.log(chalk.green(' ✔'), msg);
|
|
10
|
-
const info = msg => console.log(chalk.cyan(' →'), msg);
|
|
11
|
-
const warn = msg => console.log(chalk.yellow(' ⚠'), msg);
|
|
12
|
-
const step = msg => console.log('\n' + chalk.bold.white(msg));
|
|
13
|
-
function detectInputType(arg) {
|
|
14
|
-
if (!arg) return 'name';
|
|
15
|
-
if (/^git@/.test(arg) || (/^https?:\/\/github\.com\//.test(arg) && arg.endsWith('.git'))) return 'git';
|
|
16
|
-
if (/^https?:\/\//.test(arg)) return 'url';
|
|
17
|
-
if (arg.toLowerCase().endsWith('.json') || arg.includes('/') || arg.includes('\\')) return 'json';
|
|
18
|
-
return 'name';
|
|
19
|
-
}
|
|
20
|
-
async function resolveWorkspace(initial) {
|
|
21
|
-
const ans = await prompts([{
|
|
22
|
-
type: 'text',
|
|
23
|
-
name: 'workspace',
|
|
24
|
-
message: 'Workspace directory',
|
|
25
|
-
initial: initial || path.join(os.homedir(), 'sentinel'),
|
|
26
|
-
format: v => v.replace(/^~/, os.homedir()),
|
|
27
|
-
}], { onCancel: () => process.exit(0) });
|
|
28
|
-
return ans.workspace;
|
|
29
|
-
}
|
|
30
|
-
function requireCodeDir(workspace) {
|
|
31
|
-
const codeDir = path.join(workspace, 'code');
|
|
32
|
-
if (!fs.existsSync(codeDir)) {
|
|
33
|
-
console.error(chalk.red(`Sentinel code not found at ${codeDir}`));
|
|
34
|
-
console.error(chalk.red('Run "sentinel init" first.'));
|
|
35
|
-
process.exit(1);
|
|
36
|
-
}
|
|
37
|
-
return codeDir;
|
|
38
|
-
}
|
|
39
|
-
const VALID_NAME = /^[a-z0-9_-]+$/i;
|
|
40
|
-
const GITHUB_URL = /^(git@github\.com:|https:\/\/github\.com\/).+\.git$/;
|
|
41
|
-
function validateProjectJson(obj) {
|
|
42
|
-
const errors = [];
|
|
43
|
-
if (!obj.name || !VALID_NAME.test(obj.name)) {
|
|
44
|
-
errors.push('name must be letters, numbers, hyphens only');
|
|
45
|
-
}
|
|
46
|
-
if (!Array.isArray(obj.repos) || obj.repos.length === 0) {
|
|
47
|
-
errors.push('repos array is required and must be non-empty');
|
|
48
|
-
} else {
|
|
49
|
-
obj.repos.forEach((r, i) => {
|
|
50
|
-
if (!r.REPO_URL || !GITHUB_URL.test(r.REPO_URL)) {
|
|
51
|
-
errors.push(`repos[${i}].REPO_URL must be a valid GitHub URL`);
|
|
52
|
-
}
|
|
53
|
-
if (!r.name) errors.push(`repos[${i}].name is required`);
|
|
54
|
-
});
|
|
55
|
-
}
|
|
56
|
-
return errors;
|
|
57
|
-
}
|
|
58
|
-
function writePropertiesFile(filePath, obj) {
|
|
59
|
-
const lines = Object.entries(obj).map(([k, v]) => `${k}=${v}`);
|
|
60
|
-
fs.writeFileSync(filePath, lines.join('\n') + '\n');
|
|
61
|
-
}
|
|
62
|
-
function applyJsonToProject(projectDir, obj) {
|
|
63
|
-
const configDir = path.join(projectDir, 'config');
|
|
64
|
-
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
65
|
-
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
66
|
-
fs.ensureDirSync(repoDir);
|
|
67
|
-
fs.ensureDirSync(logDir);
|
|
68
|
-
if (obj.sentinel) {
|
|
69
|
-
const propsPath = path.join(configDir, 'sentinel.properties');
|
|
70
|
-
const existing = fs.existsSync(propsPath) ? fs.readFileSync(propsPath, 'utf8') : '';
|
|
71
|
-
let updated = existing;
|
|
72
|
-
Object.entries(obj.sentinel).forEach(([k, v]) => {
|
|
73
|
-
const re = new RegExp(`^#?\\s*${k}\\s*=.*$`, 'm');
|
|
74
|
-
if (re.test(updated)) {
|
|
75
|
-
updated = updated.replace(re, `${k}=${v}`);
|
|
76
|
-
} else {
|
|
77
|
-
updated += `\n${k}=${v}`;
|
|
78
|
-
}
|
|
79
|
-
});
|
|
80
|
-
fs.writeFileSync(propsPath, updated);
|
|
81
|
-
ok('Updated sentinel.properties');
|
|
82
|
-
}
|
|
83
|
-
if (Array.isArray(obj.repos)) {
|
|
84
|
-
obj.repos.forEach(repo => {
|
|
85
|
-
const { name, ...props } = repo;
|
|
86
|
-
writePropertiesFile(path.join(repoDir, `${name}.properties`), props);
|
|
87
|
-
ok(`Created repo-configs/${name}.properties`);
|
|
88
|
-
});
|
|
89
|
-
}
|
|
90
|
-
if (Array.isArray(obj.log_sources)) {
|
|
91
|
-
obj.log_sources.forEach(src => {
|
|
92
|
-
const { name, ...props } = src;
|
|
93
|
-
writePropertiesFile(path.join(logDir, `${name}.properties`), props);
|
|
94
|
-
ok(`Created log-configs/${name}.properties`);
|
|
95
|
-
});
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
function fetchUrl(url) {
|
|
99
|
-
try {
|
|
100
|
-
const result = spawnSync('curl', ['-fsSL', '--max-time', '10', url], { encoding: 'utf8' });
|
|
101
|
-
if (result.status !== 0) throw new Error(result.stderr || 'curl failed');
|
|
102
|
-
return result.stdout;
|
|
103
|
-
} catch (_) {
|
|
104
|
-
const https = require('https');
|
|
105
|
-
const http = require('http');
|
|
106
|
-
const lib = url.startsWith('https') ? https : http;
|
|
107
|
-
return new Promise((resolve, reject) => {
|
|
108
|
-
lib.get(url, res => {
|
|
109
|
-
let data = '';
|
|
110
|
-
res.on('data', c => (data += c));
|
|
111
|
-
res.on('end', () => resolve(data));
|
|
112
|
-
}).on('error', reject);
|
|
113
|
-
});
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
function ensureKnownHosts() {
|
|
117
|
-
const knownHosts = path.join(os.homedir(), '.ssh', 'known_hosts');
|
|
118
|
-
const content = fs.existsSync(knownHosts) ? fs.readFileSync(knownHosts, 'utf8') : '';
|
|
119
|
-
if (content.includes('github.com')) return;
|
|
120
|
-
info('Adding GitHub to known_hosts…');
|
|
121
|
-
const r = spawnSync('ssh-keyscan', ['github.com'], { encoding: 'utf8', timeout: 10000, env: gitEnv() });
|
|
122
|
-
if (r.stdout) fs.appendFileSync(knownHosts, r.stdout);
|
|
123
|
-
}
|
|
124
|
-
function generateDeployKey(repoSlug) {
|
|
125
|
-
const sshDir = path.join(os.homedir(), '.ssh');
|
|
126
|
-
const keyFile = path.join(sshDir, `${repoSlug}.key`);
|
|
127
|
-
fs.ensureDirSync(sshDir);
|
|
128
|
-
fs.chmodSync(sshDir, 0o700);
|
|
129
|
-
if (fs.existsSync(keyFile)) {
|
|
130
|
-
info(`Using existing key: ${keyFile}`);
|
|
131
|
-
} else {
|
|
132
|
-
spawnSync('ssh-keygen', ['-t', 'ed25519', '-C', `sentinel@${repoSlug}`, '-f', keyFile, '-N', ''],
|
|
133
|
-
{ stdio: 'inherit' });
|
|
134
|
-
ok(`Deploy key generated: ${keyFile}`);
|
|
135
|
-
}
|
|
136
|
-
const configFile = path.join(sshDir, 'config');
|
|
137
|
-
const sshHost = `github-${repoSlug}`;
|
|
138
|
-
const existing = fs.existsSync(configFile) ? fs.readFileSync(configFile, 'utf8') : '';
|
|
139
|
-
if (!existing.includes(`Host ${sshHost}`)) {
|
|
140
|
-
fs.appendFileSync(configFile,
|
|
141
|
-
`\nHost ${sshHost}\n HostName github.com\n User git\n IdentityFile ${keyFile}\n IdentitiesOnly yes\n`);
|
|
142
|
-
fs.chmodSync(configFile, 0o600);
|
|
143
|
-
ok('SSH config updated');
|
|
144
|
-
}
|
|
145
|
-
return { keyFile, sshHost };
|
|
146
|
-
}
|
|
147
|
-
function printDeployKeyInstructions(orgRepo, keyFile) {
|
|
148
|
-
const pubKey = fs.readFileSync(`${keyFile}.pub`, 'utf8').trim();
|
|
149
|
-
const bar = '─'.repeat(70);
|
|
150
|
-
console.log('');
|
|
151
|
-
console.log(chalk.bold.yellow(` ┌${bar}┐`));
|
|
152
|
-
console.log(chalk.bold.yellow(` │`) + chalk.bold(` Add this deploy key to GitHub`) + chalk.bold.yellow(' '.repeat(40) + '│'));
|
|
153
|
-
console.log(chalk.bold.yellow(` │`) + chalk.cyan(` github.com/${orgRepo}`) + chalk.bold.yellow(' '.repeat(Math.max(0, 70 - 14 - orgRepo.length)) + '│'));
|
|
154
|
-
console.log(chalk.bold.yellow(` │`) + ` Settings → Deploy keys → Add deploy key` + chalk.bold.yellow(' '.repeat(29) + '│'));
|
|
155
|
-
console.log(chalk.bold.yellow(` │`) + ` Allow write access: ✓` + chalk.bold.yellow(' '.repeat(47) + '│'));
|
|
156
|
-
console.log(chalk.bold.yellow(` └${bar}┘`));
|
|
157
|
-
console.log('');
|
|
158
|
-
console.log(chalk.green(pubKey));
|
|
159
|
-
console.log('');
|
|
160
|
-
}
|
|
161
|
-
function gitEnv(extra = {}) {
|
|
162
|
-
const PATH = [
|
|
163
|
-
process.env.PATH || '',
|
|
164
|
-
'/usr/bin', '/usr/local/bin', '/bin', '/usr/sbin', '/usr/local/sbin',
|
|
165
|
-
].filter(Boolean).join(':');
|
|
166
|
-
return { ...process.env, PATH, GIT_TERMINAL_PROMPT: '0', ...extra };
|
|
167
|
-
}
|
|
168
|
-
function findBin(name) {
|
|
169
|
-
const candidates = [
|
|
170
|
-
`/usr/bin/${name}`, `/usr/local/bin/${name}`, `/bin/${name}`,
|
|
171
|
-
];
|
|
172
|
-
for (const p of candidates) {
|
|
173
|
-
if (fs.existsSync(p)) return p;
|
|
174
|
-
}
|
|
175
|
-
const r = spawnSync('which', [name], { encoding: 'utf8', env: gitEnv() });
|
|
176
|
-
return (r.stdout || '').trim() || name;
|
|
177
|
-
}
|
|
178
|
-
let _gitBin;
|
|
179
|
-
function gitBin() {
|
|
180
|
-
if (!_gitBin) _gitBin = findBin('git');
|
|
181
|
-
return _gitBin;
|
|
182
|
-
}
|
|
183
|
-
function gitUrlToOrgRepo(gitUrl) {
|
|
184
|
-
return gitUrl
|
|
185
|
-
.replace(/^git@github\.com:/, '')
|
|
186
|
-
.replace(/^https?:\/\/github\.com\//, '')
|
|
187
|
-
.replace(/\.git$/, '');
|
|
188
|
-
}
|
|
189
|
-
function toHttpsUrl(gitUrl) {
|
|
190
|
-
return 'https://github.com/' + gitUrlToOrgRepo(gitUrl) + '.git';
|
|
191
|
-
}
|
|
192
|
-
function isPublicRepo(gitUrl) {
|
|
193
|
-
const r = spawnSync(gitBin(), ['ls-remote', '--heads', toHttpsUrl(gitUrl)], {
|
|
194
|
-
encoding: 'utf8', timeout: 10000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
195
|
-
env: gitEnv(),
|
|
196
|
-
});
|
|
197
|
-
return r.status === 0;
|
|
198
|
-
}
|
|
199
|
-
function validateAccess(repoUrl, keyFile) {
|
|
200
|
-
const extra = keyFile
|
|
201
|
-
? { GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }
|
|
202
|
-
: {};
|
|
203
|
-
const r = spawnSync(gitBin(), ['ls-remote', '--heads', repoUrl], {
|
|
204
|
-
encoding: 'utf8', timeout: 15000, stdio: ['pipe', 'pipe', 'pipe'],
|
|
205
|
-
env: gitEnv(extra),
|
|
206
|
-
});
|
|
207
|
-
return { ok: r.status === 0, stderr: (r.stderr || r.error?.message || '').trim() };
|
|
208
|
-
}
|
|
209
|
-
function discoverReposFromClone(cloneDir) {
|
|
210
|
-
const repoCfgDir = path.join(cloneDir, 'config', 'repo-configs');
|
|
211
|
-
if (!fs.existsSync(repoCfgDir)) return [];
|
|
212
|
-
return fs.readdirSync(repoCfgDir)
|
|
213
|
-
.filter(f => f.endsWith('.properties') && !f.startsWith('_'))
|
|
214
|
-
.map(f => {
|
|
215
|
-
const content = fs.readFileSync(path.join(repoCfgDir, f), 'utf8');
|
|
216
|
-
const match = content.match(/^REPO_URL\s*=\s*(.+)$/m);
|
|
217
|
-
return match ? { file: f, propsPath: path.join(repoCfgDir, f), url: match[1].trim() } : null;
|
|
218
|
-
})
|
|
219
|
-
.filter(Boolean);
|
|
220
|
-
}
|
|
221
|
-
async function addFromGit(gitUrl, workspace) {
|
|
222
|
-
const repoSlug = gitUrl.replace(/\.git$/, '').split(/[:/]/).pop();
|
|
223
|
-
const orgRepo = gitUrlToOrgRepo(gitUrl);
|
|
224
|
-
const { name } = await prompts([{
|
|
225
|
-
type: 'text',
|
|
226
|
-
name: 'name',
|
|
227
|
-
message: 'Project name',
|
|
228
|
-
initial: repoSlug,
|
|
229
|
-
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
230
|
-
}], { onCancel: () => process.exit(0) });
|
|
231
|
-
step(`[1/3] Setting up SSH access to ${repoSlug}`);
|
|
232
|
-
ensureKnownHosts();
|
|
233
|
-
const { keyFile } = generateDeployKey(repoSlug);
|
|
234
|
-
printDeployKeyInstructions(orgRepo, keyFile);
|
|
235
|
-
await prompts({
|
|
236
|
-
type: 'text', name: '_', format: () => '',
|
|
237
|
-
message: chalk.bold(`Press Enter once you've added the deploy key to GitHub…`),
|
|
238
|
-
}, { onCancel: () => process.exit(0) });
|
|
239
|
-
const primary = validateAccess(gitUrl, keyFile);
|
|
240
|
-
if (!primary.ok) {
|
|
241
|
-
console.error(chalk.red(' ✖ Cannot reach ' + gitUrl));
|
|
242
|
-
if (primary.stderr) console.error(chalk.red(' ' + primary.stderr));
|
|
243
|
-
console.error(chalk.yellow(' Check the deploy key has write access, then re-run.'));
|
|
244
|
-
process.exit(1);
|
|
245
|
-
}
|
|
246
|
-
ok(`${repoSlug}: reachable`);
|
|
247
|
-
const projectDir = path.join(workspace, name);
|
|
248
|
-
step(`[2/3] Scanning repo-configs in ${repoSlug}…`);
|
|
249
|
-
if (!fs.existsSync(projectDir)) {
|
|
250
|
-
spawnSync(gitBin(), ['clone', '--depth', '1', gitUrl, projectDir], {
|
|
251
|
-
stdio: 'inherit',
|
|
252
|
-
env: gitEnv({ GIT_SSH_COMMAND: `ssh -i ${keyFile} -o StrictHostKeyChecking=no -o BatchMode=yes` }),
|
|
253
|
-
});
|
|
254
|
-
}
|
|
255
|
-
const discovered = discoverReposFromClone(projectDir);
|
|
256
|
-
const privateRepos = [];
|
|
257
|
-
const publicRepos = [];
|
|
258
|
-
if (discovered.length === 0) {
|
|
259
|
-
info('No repo-configs found — project will use example config.');
|
|
260
|
-
} else {
|
|
261
|
-
info(`Found ${discovered.length} repo(s) in config/repo-configs/:`);
|
|
262
|
-
for (const r of discovered) {
|
|
263
|
-
const slug = r.file.replace('.properties', '');
|
|
264
|
-
const pub = isPublicRepo(r.url);
|
|
265
|
-
const tag = pub ? chalk.green('[public]') : chalk.yellow('[private]');
|
|
266
|
-
console.log(` ${tag} ${slug.padEnd(36)} ${r.url}`);
|
|
267
|
-
if (pub) publicRepos.push({ ...r, slug });
|
|
268
|
-
else privateRepos.push({ ...r, slug });
|
|
269
|
-
}
|
|
270
|
-
}
|
|
271
|
-
if (privateRepos.length > 0) {
|
|
272
|
-
step(`[3/3] Deploy keys needed for ${privateRepos.length} private repo(s)`);
|
|
273
|
-
for (const r of privateRepos) {
|
|
274
|
-
const { keyFile: rKey } = generateDeployKey(r.slug);
|
|
275
|
-
r.keyFile = rKey;
|
|
276
|
-
const rOrgRepo = gitUrlToOrgRepo(r.url);
|
|
277
|
-
printDeployKeyInstructions(rOrgRepo, rKey);
|
|
278
|
-
}
|
|
279
|
-
if (publicRepos.length > 0) {
|
|
280
|
-
console.log(chalk.green(' ✔ Public repos (no deploy key needed):'));
|
|
281
|
-
for (const r of publicRepos) {
|
|
282
|
-
console.log(chalk.green(` ${r.slug}`));
|
|
283
|
-
}
|
|
284
|
-
console.log('');
|
|
285
|
-
}
|
|
286
|
-
await prompts({
|
|
287
|
-
type: 'text', name: '_', format: () => '',
|
|
288
|
-
message: chalk.bold(`Press Enter once you've added all ${privateRepos.length} deploy key(s) to GitHub…`),
|
|
289
|
-
}, { onCancel: () => process.exit(0) });
|
|
290
|
-
step('Validating repository access…');
|
|
291
|
-
for (const r of privateRepos) {
|
|
292
|
-
const v = validateAccess(r.url, r.keyFile);
|
|
293
|
-
if (!v.ok) {
|
|
294
|
-
console.error(chalk.red(` ✖ ${r.slug}: cannot reach ${r.url}`));
|
|
295
|
-
if (v.stderr) console.error(chalk.red(' ' + v.stderr));
|
|
296
|
-
console.error(chalk.yellow(' Fix access then re-run sentinel add.'));
|
|
297
|
-
process.exit(1);
|
|
298
|
-
}
|
|
299
|
-
ok(`${r.slug}: reachable`);
|
|
300
|
-
}
|
|
301
|
-
for (const r of publicRepos) {
|
|
302
|
-
ok(`${r.slug}: public, no key needed`);
|
|
303
|
-
}
|
|
304
|
-
for (const r of privateRepos) {
|
|
305
|
-
let props = fs.readFileSync(r.propsPath, 'utf8');
|
|
306
|
-
if (/^#?\s*SSH_KEY_FILE\s*=/m.test(props)) {
|
|
307
|
-
props = props.replace(/^#?\s*SSH_KEY_FILE\s*=.*/m, `SSH_KEY_FILE=${r.keyFile}`);
|
|
308
|
-
} else {
|
|
309
|
-
props = props.trimEnd() + `\nSSH_KEY_FILE=${r.keyFile}\n`;
|
|
310
|
-
}
|
|
311
|
-
fs.writeFileSync(r.propsPath, props);
|
|
312
|
-
info(`SSH_KEY_FILE written to config/repo-configs/${r.file}`);
|
|
313
|
-
}
|
|
314
|
-
} else if (discovered.length > 0) {
|
|
315
|
-
step('[3/3] All repos are public — no deploy keys needed');
|
|
316
|
-
ok('All repos accessible without auth');
|
|
317
|
-
}
|
|
318
|
-
const { autoPublish } = await prompts({
|
|
319
|
-
type: 'select',
|
|
320
|
-
name: 'autoPublish',
|
|
321
|
-
message: 'How should Sentinel deploy fixes?',
|
|
322
|
-
hint: 'You can change this per-repo in config/repo-configs/',
|
|
323
|
-
choices: [
|
|
324
|
-
{
|
|
325
|
-
title: 'Open a PR for each fix (AUTO_PUBLISH=false) — recommended',
|
|
326
|
-
description: 'Sentinel pushes to a branch and opens a GitHub PR. You review and merge.',
|
|
327
|
-
value: false,
|
|
328
|
-
},
|
|
329
|
-
{
|
|
330
|
-
title: 'Push directly to main (AUTO_PUBLISH=true) — fully autonomous',
|
|
331
|
-
description: 'Sentinel commits and pushes fixes straight to your main branch.',
|
|
332
|
-
value: true,
|
|
333
|
-
},
|
|
334
|
-
],
|
|
335
|
-
}, { onCancel: () => process.exit(0) });
|
|
336
|
-
if (autoPublish) {
|
|
337
|
-
warn('AUTO_PUBLISH=true: fixes push directly to main. Ensure CI blocks bad pushes.');
|
|
338
|
-
}
|
|
339
|
-
const workspaceProps = path.join(workspace, 'sentinel.properties');
|
|
340
|
-
const existingToken = fs.existsSync(workspaceProps)
|
|
341
|
-
? (fs.readFileSync(workspaceProps, 'utf8').match(/^GITHUB_TOKEN\s*=\s*(.+)$/m) || [])[1]?.trim()
|
|
342
|
-
: '';
|
|
343
|
-
if (!autoPublish) {
|
|
344
|
-
console.log('');
|
|
345
|
-
console.log(chalk.bold(' GitHub Personal Access Token (classic) — required for opening PRs'));
|
|
346
|
-
console.log(chalk.cyan(' github.com/settings/tokens/new → Tokens (classic)'));
|
|
347
|
-
console.log(chalk.cyan(' Note: "Expiration → No expiration" Scope: ✓ repo'));
|
|
348
|
-
console.log('');
|
|
349
|
-
}
|
|
350
|
-
const { githubToken } = await prompts({
|
|
351
|
-
type: (!autoPublish || !existingToken) ? 'password' : null,
|
|
352
|
-
name: 'githubToken',
|
|
353
|
-
message: existingToken
|
|
354
|
-
? 'GitHub token (press Enter to keep current)'
|
|
355
|
-
: autoPublish
|
|
356
|
-
? 'GitHub token (classic, repo scope) — optional, press Enter to skip'
|
|
357
|
-
: 'GitHub token (classic, repo scope)',
|
|
358
|
-
validate: v => {
|
|
359
|
-
if (!autoPublish && !v && !existingToken) return 'Token is required for PR mode';
|
|
360
|
-
if (v && !v.startsWith('ghp_') && !v.startsWith('github_pat_')) return 'Should start with ghp_ or github_pat_';
|
|
361
|
-
return true;
|
|
362
|
-
},
|
|
363
|
-
}, { onCancel: () => process.exit(0) });
|
|
364
|
-
const effectiveToken = githubToken || existingToken || '';
|
|
365
|
-
if (effectiveToken && fs.existsSync(workspaceProps)) {
|
|
366
|
-
let props = fs.readFileSync(workspaceProps, 'utf8');
|
|
367
|
-
if (/^#?\s*GITHUB_TOKEN\s*=/m.test(props))
|
|
368
|
-
props = props.replace(/^#?\s*GITHUB_TOKEN\s*=.*/m, `GITHUB_TOKEN=${effectiveToken}`);
|
|
369
|
-
else
|
|
370
|
-
props = props.trimEnd() + `\nGITHUB_TOKEN=${effectiveToken}\n`;
|
|
371
|
-
fs.writeFileSync(workspaceProps, props);
|
|
372
|
-
ok('GITHUB_TOKEN saved to workspace sentinel.properties');
|
|
373
|
-
} else if (effectiveToken) {
|
|
374
|
-
info('GITHUB_TOKEN will be written when project files are created');
|
|
375
|
-
}
|
|
376
|
-
step('Dry-run preview');
|
|
377
|
-
info(`Will create: ${projectDir}/`);
|
|
378
|
-
if (discovered.length > 0) {
|
|
379
|
-
info(` Using ${discovered.length} repo-config(s) from ${repoSlug}`);
|
|
380
|
-
} else {
|
|
381
|
-
info(` config/repo-configs/${repoSlug}.properties`);
|
|
382
|
-
}
|
|
383
|
-
info(` AUTO_PUBLISH=${autoPublish} (applies to all repos without an explicit setting)`);
|
|
384
|
-
info(' init.sh, start.sh, stop.sh');
|
|
385
|
-
const { confirm } = await prompts({
|
|
386
|
-
type: 'confirm', name: 'confirm',
|
|
387
|
-
message: `Create project "${name}"?`, initial: true,
|
|
388
|
-
}, { onCancel: () => process.exit(0) });
|
|
389
|
-
if (!confirm) { info('Aborted.'); return; }
|
|
390
|
-
if (fs.existsSync(projectDir) && !fs.existsSync(path.join(projectDir, '.git'))) {
|
|
391
|
-
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
392
|
-
process.exit(1);
|
|
393
|
-
}
|
|
394
|
-
const codeDir = requireCodeDir(workspace);
|
|
395
|
-
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
396
|
-
if (discovered.length > 0) {
|
|
397
|
-
generateProjectScripts(projectDir, codeDir, pythonBin);
|
|
398
|
-
const primaryProps = path.join(projectDir, 'config', 'repo-configs', `${repoSlug}.properties`);
|
|
399
|
-
if (!fs.existsSync(primaryProps)) {
|
|
400
|
-
writePropertiesFile(primaryProps, {
|
|
401
|
-
REPO_NAME: repoSlug,
|
|
402
|
-
REPO_URL: gitUrl,
|
|
403
|
-
BRANCH: 'main',
|
|
404
|
-
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
405
|
-
SSH_KEY_FILE: keyFile,
|
|
406
|
-
CAIRN_MCP_ENABLED: 'true',
|
|
407
|
-
});
|
|
408
|
-
}
|
|
409
|
-
ok(`Project "${name}" ready at ${projectDir}`);
|
|
410
|
-
printNextSteps(projectDir, autoPublish);
|
|
411
|
-
await offerToStart(projectDir);
|
|
412
|
-
} else {
|
|
413
|
-
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
414
|
-
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
415
|
-
writePropertiesFile(path.join(repoDir, `${repoSlug}.properties`), {
|
|
416
|
-
REPO_NAME: repoSlug,
|
|
417
|
-
REPO_URL: gitUrl,
|
|
418
|
-
BRANCH: 'main',
|
|
419
|
-
AUTO_PUBLISH: autoPublish ? 'true' : 'false',
|
|
420
|
-
SSH_KEY_FILE: keyFile,
|
|
421
|
-
CAIRN_MCP_ENABLED: 'true',
|
|
422
|
-
});
|
|
423
|
-
const example = path.join(repoDir, '_example.properties');
|
|
424
|
-
if (fs.existsSync(example)) fs.removeSync(example);
|
|
425
|
-
generateWorkspaceScripts(workspace, {}, {}, {}, effectiveToken);
|
|
426
|
-
ok(`Project "${name}" created at ${projectDir}`);
|
|
427
|
-
printNextSteps(projectDir, autoPublish);
|
|
428
|
-
await offerToStart(projectDir);
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
|
-
async function addFromName(nameArg, workspace) {
|
|
432
|
-
const answers = await prompts([{
|
|
433
|
-
type: 'text',
|
|
434
|
-
name: 'name',
|
|
435
|
-
message: 'Project name',
|
|
436
|
-
initial: nameArg || 'my-project',
|
|
437
|
-
validate: v => VALID_NAME.test(v) || 'Use letters, numbers, hyphens only',
|
|
438
|
-
}], { onCancel: () => process.exit(0) });
|
|
439
|
-
const { name } = answers;
|
|
440
|
-
const projectDir = path.join(workspace, name);
|
|
441
|
-
step('Dry-run preview');
|
|
442
|
-
info(`Will create: ${projectDir}/`);
|
|
443
|
-
info(' config/sentinel.properties');
|
|
444
|
-
info(' config/repo-configs/_example.properties');
|
|
445
|
-
info(' config/log-configs/_example.properties');
|
|
446
|
-
info(' init.sh, start.sh, stop.sh');
|
|
447
|
-
const { confirm } = await prompts({
|
|
448
|
-
type: 'confirm', name: 'confirm',
|
|
449
|
-
message: `Create project "${name}"?`, initial: true,
|
|
450
|
-
}, { onCancel: () => process.exit(0) });
|
|
451
|
-
if (!confirm) { info('Aborted.'); return; }
|
|
452
|
-
if (fs.existsSync(projectDir)) {
|
|
453
|
-
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
454
|
-
process.exit(1);
|
|
455
|
-
}
|
|
456
|
-
const codeDir = requireCodeDir(workspace);
|
|
457
|
-
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
458
|
-
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
459
|
-
generateWorkspaceScripts(workspace);
|
|
460
|
-
ok(`Project "${name}" created at ${projectDir}`);
|
|
461
|
-
printNextSteps(projectDir);
|
|
462
|
-
}
|
|
463
|
-
async function addFromJson(jsonPath, workspace) {
|
|
464
|
-
step(`Reading ${jsonPath}`);
|
|
465
|
-
let obj;
|
|
466
|
-
try {
|
|
467
|
-
obj = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
|
|
468
|
-
} catch (e) {
|
|
469
|
-
console.error(chalk.red(` ✖ Cannot parse ${jsonPath}: ${e.message}`));
|
|
470
|
-
process.exit(1);
|
|
471
|
-
}
|
|
472
|
-
const errors = validateProjectJson(obj);
|
|
473
|
-
if (errors.length) {
|
|
474
|
-
console.error(chalk.red(' ✖ Invalid project JSON:'));
|
|
475
|
-
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
476
|
-
process.exit(1);
|
|
477
|
-
}
|
|
478
|
-
ok('JSON is valid');
|
|
479
|
-
const { name } = obj;
|
|
480
|
-
const projectDir = path.join(workspace, name);
|
|
481
|
-
step('Dry-run preview');
|
|
482
|
-
info(`Will create: ${projectDir}/`);
|
|
483
|
-
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
484
|
-
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
485
|
-
if (obj.sentinel) {
|
|
486
|
-
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
487
|
-
}
|
|
488
|
-
const { confirm } = await prompts({
|
|
489
|
-
type: 'confirm', name: 'confirm',
|
|
490
|
-
message: `Create project "${name}" from ${path.basename(jsonPath)}?`, initial: true,
|
|
491
|
-
}, { onCancel: () => process.exit(0) });
|
|
492
|
-
if (!confirm) { info('Aborted.'); return; }
|
|
493
|
-
if (fs.existsSync(projectDir)) {
|
|
494
|
-
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
495
|
-
process.exit(1);
|
|
496
|
-
}
|
|
497
|
-
const codeDir = requireCodeDir(workspace);
|
|
498
|
-
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
499
|
-
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
500
|
-
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
501
|
-
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
502
|
-
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
503
|
-
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
504
|
-
applyJsonToProject(projectDir, obj);
|
|
505
|
-
generateWorkspaceScripts(workspace);
|
|
506
|
-
ok(`Project "${name}" created at ${projectDir}`);
|
|
507
|
-
printNextSteps(projectDir);
|
|
508
|
-
}
|
|
509
|
-
async function addFromUrl(url, workspace) {
|
|
510
|
-
step(`Fetching ${url}`);
|
|
511
|
-
let raw;
|
|
512
|
-
try {
|
|
513
|
-
raw = fetchUrl(url);
|
|
514
|
-
if (raw && typeof raw.then === 'function') raw = await raw;
|
|
515
|
-
} catch (e) {
|
|
516
|
-
console.error(chalk.red(` ✖ Cannot fetch ${url}: ${e.message}`));
|
|
517
|
-
process.exit(1);
|
|
518
|
-
}
|
|
519
|
-
let obj;
|
|
520
|
-
try {
|
|
521
|
-
obj = JSON.parse(raw);
|
|
522
|
-
} catch (e) {
|
|
523
|
-
console.error(chalk.red(` ✖ Response is not valid JSON: ${e.message}`));
|
|
524
|
-
process.exit(1);
|
|
525
|
-
}
|
|
526
|
-
const errors = validateProjectJson(obj);
|
|
527
|
-
if (errors.length) {
|
|
528
|
-
console.error(chalk.red(' ✖ Invalid project JSON at URL:'));
|
|
529
|
-
errors.forEach(e => console.error(chalk.red(` - ${e}`)));
|
|
530
|
-
process.exit(1);
|
|
531
|
-
}
|
|
532
|
-
ok('JSON is valid');
|
|
533
|
-
const { name } = obj;
|
|
534
|
-
const projectDir = path.join(workspace, name);
|
|
535
|
-
step('Dry-run preview');
|
|
536
|
-
info(`Will create: ${projectDir}/`);
|
|
537
|
-
(obj.repos || []).forEach(r => info(` config/repo-configs/${r.name}.properties (${r.REPO_URL})`));
|
|
538
|
-
(obj.log_sources || []).forEach(s => info(` config/log-configs/${s.name}.properties (${s.SOURCE_TYPE})`));
|
|
539
|
-
if (obj.sentinel) {
|
|
540
|
-
Object.entries(obj.sentinel).forEach(([k, v]) => info(` sentinel.properties: ${k}=${v}`));
|
|
541
|
-
}
|
|
542
|
-
const { confirm } = await prompts({
|
|
543
|
-
type: 'confirm', name: 'confirm',
|
|
544
|
-
message: `Create project "${name}" from ${url}?`, initial: true,
|
|
545
|
-
}, { onCancel: () => process.exit(0) });
|
|
546
|
-
if (!confirm) { info('Aborted.'); return; }
|
|
547
|
-
if (fs.existsSync(projectDir)) {
|
|
548
|
-
console.error(chalk.yellow(`Project "${name}" already exists at ${projectDir}`));
|
|
549
|
-
process.exit(1);
|
|
550
|
-
}
|
|
551
|
-
const codeDir = requireCodeDir(workspace);
|
|
552
|
-
const pythonBin = path.join(codeDir, '.venv', 'bin', 'python3');
|
|
553
|
-
writeExampleProject(projectDir, codeDir, pythonBin);
|
|
554
|
-
const repoDir = path.join(projectDir, 'config', 'repo-configs');
|
|
555
|
-
const logDir = path.join(projectDir, 'config', 'log-configs');
|
|
556
|
-
if (fs.existsSync(path.join(repoDir, '_example.properties'))) fs.removeSync(path.join(repoDir, '_example.properties'));
|
|
557
|
-
if (fs.existsSync(path.join(logDir, '_example.properties'))) fs.removeSync(path.join(logDir, '_example.properties'));
|
|
558
|
-
applyJsonToProject(projectDir, obj);
|
|
559
|
-
generateWorkspaceScripts(workspace);
|
|
560
|
-
ok(`Project "${name}" created at ${projectDir}`);
|
|
561
|
-
printNextSteps(projectDir);
|
|
562
|
-
}
|
|
563
|
-
function printNextSteps(projectDir, autoPublish) {
|
|
564
|
-
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
565
|
-
const mode = autoPublish === true
|
|
566
|
-
? chalk.yellow('\n ⚠ Fixes push directly to main — ensure CI blocks bad pushes')
|
|
567
|
-
: autoPublish === false
|
|
568
|
-
? chalk.cyan('\n → Sentinel opens a GitHub PR for each fix — review and merge at github.com')
|
|
569
|
-
: '';
|
|
570
|
-
console.log(`
|
|
571
|
-
Config: ${chalk.cyan(path.join(projectDir, 'config', ''))}
|
|
572
|
-
Start: ${chalk.cyan(path.join(projectDir, 'start.sh'))}${mode}
|
|
573
|
-
Logs: ${chalk.cyan(logFile)}
|
|
574
|
-
${chalk.gray(`tail -f ${logFile}`)}
|
|
575
|
-
`);
|
|
576
|
-
}
|
|
577
|
-
async function offerToStart(projectDir) {
|
|
578
|
-
const startSh = path.join(projectDir, 'start.sh');
|
|
579
|
-
if (!fs.existsSync(startSh)) return;
|
|
580
|
-
const { startNow } = await prompts({
|
|
581
|
-
type: 'confirm', name: 'startNow',
|
|
582
|
-
message: 'Start Sentinel now?', initial: true,
|
|
583
|
-
}, { onCancel: () => {} });
|
|
584
|
-
if (!startNow) return;
|
|
585
|
-
const { status } = spawnSync('bash', [startSh], { stdio: 'inherit', env: gitEnv() });
|
|
586
|
-
if (status === 0) {
|
|
587
|
-
const logFile = path.join(projectDir, 'logs', 'sentinel.log');
|
|
588
|
-
ok('Sentinel started');
|
|
589
|
-
info(`Logs: tail -f ${logFile}`);
|
|
590
|
-
}
|
|
591
|
-
}
|
|
592
|
-
module.exports = async function add(arg) {
|
|
593
|
-
const type = detectInputType(arg);
|
|
594
|
-
const workspace = await resolveWorkspace();
|
|
595
|
-
if (type === 'git') return addFromGit(arg, workspace);
|
|
596
|
-
if (type === 'url') return addFromUrl(arg, workspace);
|
|
597
|
-
if (type === 'json') return addFromJson(arg, workspace);
|
|
598
|
-
return addFromName(arg, workspace);
|
|
599
|
-
};
|