cistack 1.0.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -34
- package/bin/ciflow.js +91 -7
- package/package.json +10 -3
- package/src/analyzers/monorepo.js +124 -0
- package/src/analyzers/workflow.js +195 -0
- package/src/config/loader.js +163 -0
- package/src/detectors/env.js +69 -0
- package/src/detectors/framework.js +37 -12
- package/src/detectors/hosting.js +54 -46
- package/src/detectors/release.js +124 -0
- package/src/generators/dependabot.js +155 -0
- package/src/generators/release.js +195 -0
- package/src/generators/workflow.js +402 -125
- package/src/index.js +247 -54
- package/src/utils/helpers.js +146 -9
package/src/index.js
CHANGED
|
@@ -6,23 +6,32 @@ const chalk = require('chalk');
|
|
|
6
6
|
const ora = require('ora');
|
|
7
7
|
const inquirer = require('inquirer');
|
|
8
8
|
|
|
9
|
-
const CodebaseAnalyzer
|
|
10
|
-
const
|
|
9
|
+
const CodebaseAnalyzer = require('./analyzers/codebase');
|
|
10
|
+
const MonorepoAnalyzer = require('./analyzers/monorepo');
|
|
11
|
+
const HostingDetector = require('./detectors/hosting');
|
|
11
12
|
const FrameworkDetector = require('./detectors/framework');
|
|
12
|
-
const LanguageDetector
|
|
13
|
-
const TestingDetector
|
|
13
|
+
const LanguageDetector = require('./detectors/language');
|
|
14
|
+
const TestingDetector = require('./detectors/testing');
|
|
15
|
+
const ReleaseDetector = require('./detectors/release');
|
|
16
|
+
const EnvDetector = require('./detectors/env');
|
|
14
17
|
const WorkflowGenerator = require('./generators/workflow');
|
|
15
|
-
const
|
|
18
|
+
const DependabotGenerator = require('./generators/dependabot');
|
|
19
|
+
const ReleaseGenerator = require('./generators/release');
|
|
20
|
+
const ConfigLoader = require('./config/loader');
|
|
21
|
+
const { ensureDir, writeFile, banner, smartMergeWorkflow } = require('./utils/helpers');
|
|
22
|
+
|
|
23
|
+
const WorkflowAnalyzer = require('./analyzers/workflow');
|
|
16
24
|
|
|
17
25
|
class CIFlow {
|
|
18
26
|
constructor(options) {
|
|
19
27
|
this.options = options;
|
|
20
28
|
this.projectPath = options.projectPath;
|
|
21
|
-
this.outputDir = path.join(options.projectPath, options.outputDir);
|
|
22
|
-
this.dryRun
|
|
23
|
-
this.force
|
|
24
|
-
this.prompt
|
|
25
|
-
this.verbose
|
|
29
|
+
this.outputDir = path.join(options.projectPath, options.outputDir || '.github/workflows');
|
|
30
|
+
this.dryRun = options.dryRun || false;
|
|
31
|
+
this.force = options.force || false;
|
|
32
|
+
this.prompt = options.prompt !== false;
|
|
33
|
+
this.verbose = options.verbose || false;
|
|
34
|
+
this.explain = options.explain || false;
|
|
26
35
|
}
|
|
27
36
|
|
|
28
37
|
async run() {
|
|
@@ -31,7 +40,19 @@ class CIFlow {
|
|
|
31
40
|
const spinner = ora({ text: 'Scanning project...', color: 'cyan' }).start();
|
|
32
41
|
|
|
33
42
|
try {
|
|
34
|
-
// ── 1.
|
|
43
|
+
// ── 1. Load cistack.config.js ─────────────────────────────────────
|
|
44
|
+
const configLoader = new ConfigLoader(this.projectPath);
|
|
45
|
+
const userConfig = configLoader.load();
|
|
46
|
+
if (Object.keys(userConfig).length > 0) {
|
|
47
|
+
spinner.info(chalk.cyan('cistack.config.js loaded'));
|
|
48
|
+
spinner.start('Scanning project...');
|
|
49
|
+
// Allow config to override outputDir
|
|
50
|
+
if (userConfig.outputDir) {
|
|
51
|
+
this.outputDir = path.join(this.projectPath, userConfig.outputDir);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// ── 2. Analyse the codebase ───────────────────────────────────────
|
|
35
56
|
const analyzer = new CodebaseAnalyzer(this.projectPath, { verbose: this.verbose });
|
|
36
57
|
const codebaseInfo = await analyzer.analyse();
|
|
37
58
|
spinner.succeed(chalk.green('Project scanned'));
|
|
@@ -40,46 +61,69 @@ class CIFlow {
|
|
|
40
61
|
console.log('\n' + chalk.dim(JSON.stringify(codebaseInfo, null, 2)));
|
|
41
62
|
}
|
|
42
63
|
|
|
43
|
-
// ──
|
|
64
|
+
// ── 3. Detect stack + extras in parallel ──────────────────────────
|
|
44
65
|
spinner.start('Detecting stack...');
|
|
45
|
-
const [hosting, frameworks, languages, testing] =
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
66
|
+
const [hosting, frameworks, languages, testing, releaseInfo, envVars, monorepoPackages] =
|
|
67
|
+
await Promise.all([
|
|
68
|
+
new HostingDetector(this.projectPath, codebaseInfo).detect(),
|
|
69
|
+
new FrameworkDetector(this.projectPath, codebaseInfo).detect(),
|
|
70
|
+
new LanguageDetector(this.projectPath, codebaseInfo).detect(),
|
|
71
|
+
new TestingDetector(this.projectPath, codebaseInfo).detect(),
|
|
72
|
+
new ReleaseDetector(this.projectPath, codebaseInfo).detect(),
|
|
73
|
+
new EnvDetector(this.projectPath, codebaseInfo).detect(),
|
|
74
|
+
new MonorepoAnalyzer(this.projectPath, codebaseInfo).analyze(),
|
|
75
|
+
]);
|
|
51
76
|
spinner.succeed(chalk.green('Stack detected'));
|
|
52
77
|
|
|
53
|
-
// ──
|
|
54
|
-
|
|
78
|
+
// ── 4. Apply cistack.config.js overrides ──────────────────────────
|
|
79
|
+
let finalConfig = ConfigLoader.applyToStack(userConfig, {
|
|
80
|
+
hosting,
|
|
81
|
+
frameworks,
|
|
82
|
+
languages,
|
|
83
|
+
testing,
|
|
84
|
+
envVars,
|
|
85
|
+
monorepoPackages,
|
|
86
|
+
_config: userConfig,
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
// ── 5. Print summary ───────────────────────────────────────────────
|
|
90
|
+
this._printSummary(finalConfig, releaseInfo, envVars, monorepoPackages);
|
|
55
91
|
|
|
56
|
-
// ──
|
|
57
|
-
let finalConfig = { hosting, frameworks, languages, testing };
|
|
92
|
+
// ── 6. Optional interactive confirmation ──────────────────────────
|
|
58
93
|
if (this.prompt) {
|
|
59
94
|
finalConfig = await this._interactiveConfirm(finalConfig);
|
|
60
95
|
}
|
|
61
96
|
|
|
62
|
-
// ──
|
|
97
|
+
// ── 7. Generate CI/CD workflow(s) ─────────────────────────────────
|
|
63
98
|
spinner.start('Generating workflow(s)...');
|
|
64
99
|
const generator = new WorkflowGenerator(finalConfig, this.projectPath);
|
|
65
100
|
const workflows = generator.generate();
|
|
66
|
-
spinner.succeed(chalk.green(`Generated ${workflows.length} workflow(s)`));
|
|
101
|
+
spinner.succeed(chalk.green(`Generated ${workflows.length} CI workflow(s)`));
|
|
102
|
+
|
|
103
|
+
// ── 8. Generate dependabot.yml ────────────────────────────────────
|
|
104
|
+
const dependabotGen = new DependabotGenerator(codebaseInfo);
|
|
105
|
+
const dependabotFile = dependabotGen.generate();
|
|
106
|
+
|
|
107
|
+
// ── 9. Generate release.yml (if release tooling detected) ─────────
|
|
108
|
+
let releaseWorkflow = null;
|
|
109
|
+
if (releaseInfo) {
|
|
110
|
+
const releaseGen = new ReleaseGenerator(releaseInfo, finalConfig, this.projectPath);
|
|
111
|
+
releaseWorkflow = releaseGen.generate();
|
|
112
|
+
if (releaseWorkflow) workflows.push(releaseWorkflow);
|
|
113
|
+
}
|
|
67
114
|
|
|
68
|
-
// ──
|
|
115
|
+
// ── 10. Write files ────────────────────────────────────────────────
|
|
69
116
|
if (this.dryRun) {
|
|
70
|
-
|
|
71
|
-
for (const wf of workflows) {
|
|
72
|
-
console.log(chalk.bold.cyan(`\n📄 ${wf.filename}`));
|
|
73
|
-
console.log(chalk.dim('─'.repeat(60)));
|
|
74
|
-
console.log(wf.content);
|
|
75
|
-
}
|
|
117
|
+
this._dryRunPrint(workflows, dependabotFile);
|
|
76
118
|
} else {
|
|
77
119
|
await this._writeWorkflows(workflows);
|
|
120
|
+
await this._writeDependabot(dependabotFile);
|
|
78
121
|
}
|
|
79
122
|
|
|
80
123
|
console.log('\n' + chalk.bold.green('✅ Done! Your GitHub Actions pipeline is ready.'));
|
|
81
124
|
if (!this.dryRun) {
|
|
82
|
-
console.log(chalk.dim(` → ${this.outputDir}
|
|
125
|
+
console.log(chalk.dim(` Workflows → ${this.outputDir}`));
|
|
126
|
+
console.log(chalk.dim(` Dependabot → ${path.join(this.projectPath, '.github', 'dependabot.yml')}\n`));
|
|
83
127
|
}
|
|
84
128
|
} catch (err) {
|
|
85
129
|
spinner.fail(chalk.red('Failed: ' + err.message));
|
|
@@ -88,18 +132,115 @@ class CIFlow {
|
|
|
88
132
|
}
|
|
89
133
|
}
|
|
90
134
|
|
|
135
|
+
async audit() {
|
|
136
|
+
banner();
|
|
137
|
+
const spinner = ora({ text: 'Auditing existing workflows...', color: 'cyan' }).start();
|
|
138
|
+
|
|
139
|
+
try {
|
|
140
|
+
const analyzer = new WorkflowAnalyzer(this.projectPath);
|
|
141
|
+
const results = await analyzer.audit();
|
|
142
|
+
spinner.succeed(chalk.green('Audit complete'));
|
|
143
|
+
|
|
144
|
+
if (results.files.length === 0) {
|
|
145
|
+
console.log(chalk.yellow('\nNo workflow files found to audit.'));
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
console.log('\n' + chalk.bold('🔍 Workflow Audit Results'));
|
|
150
|
+
console.log(chalk.dim('─'.repeat(48)));
|
|
151
|
+
|
|
152
|
+
for (const file of results.files) {
|
|
153
|
+
if (file.error) {
|
|
154
|
+
console.log(`\n📄 ${chalk.red(file.filename)} – ${chalk.red(file.error)}`);
|
|
155
|
+
continue;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
console.log(`\n📄 ${chalk.cyan(file.filename)} – ${file.issues.length > 0 ? chalk.yellow(file.issues.length + ' issues found') : chalk.green('Excellent')}`);
|
|
159
|
+
|
|
160
|
+
for (const issue of file.issues) {
|
|
161
|
+
const color = issue.severity === 'high' ? chalk.red : issue.severity === 'medium' ? chalk.yellow : chalk.dim;
|
|
162
|
+
console.log(` ${color('•')} ${issue.message}`);
|
|
163
|
+
console.log(` ${chalk.dim('Fix:')} ${chalk.italic(issue.fix)}`);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (results.totalIssues > 0) {
|
|
168
|
+
console.log('\n' + chalk.yellow(`💡 Run ${chalk.bold('cistack upgrade')} to automatically fix outdated actions.`));
|
|
169
|
+
} else {
|
|
170
|
+
console.log('\n' + chalk.green('✅ Your workflows are up to date and follow best practices.'));
|
|
171
|
+
}
|
|
172
|
+
console.log('');
|
|
173
|
+
} catch (err) {
|
|
174
|
+
spinner.fail(chalk.red('Audit failed: ' + err.message));
|
|
175
|
+
process.exit(1);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async upgrade() {
|
|
180
|
+
banner();
|
|
181
|
+
const spinner = ora({ text: 'Upgrading actions...', color: 'cyan' }).start();
|
|
182
|
+
|
|
183
|
+
try {
|
|
184
|
+
const analyzer = new WorkflowAnalyzer(this.projectPath);
|
|
185
|
+
const results = await analyzer.upgrade(this.dryRun);
|
|
186
|
+
|
|
187
|
+
if (results.changes === 0) {
|
|
188
|
+
spinner.succeed(chalk.green('All actions are already up to date.'));
|
|
189
|
+
return;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
spinner.succeed(chalk.green(`Upgraded ${results.changes} action(s) across ${results.upgradedFiles.length} file(s)`));
|
|
193
|
+
|
|
194
|
+
if (this.dryRun) {
|
|
195
|
+
console.log(chalk.yellow('\n── DRY RUN – files not modified ──'));
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
for (const file of results.upgradedFiles) {
|
|
199
|
+
console.log(` ${chalk.green('✔')} ${file.filename} (${file.changes} changes)`);
|
|
200
|
+
}
|
|
201
|
+
console.log('');
|
|
202
|
+
} catch (err) {
|
|
203
|
+
spinner.fail(chalk.red('Upgrade failed: ' + err.message));
|
|
204
|
+
process.exit(1);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
91
208
|
// ── helpers ──────────────────────────────────────────────────────────────
|
|
92
209
|
|
|
93
|
-
_printSummary(
|
|
94
|
-
const
|
|
95
|
-
|
|
210
|
+
_printSummary(config, releaseInfo, envVars, monorepoPackages) {
|
|
211
|
+
const { hosting, frameworks, languages, testing } = config;
|
|
212
|
+
const line = (label, value, reasons = []) => {
|
|
213
|
+
console.log(` ${chalk.dim(label.padEnd(20))} ${chalk.cyan(value || chalk.italic('none detected'))}`);
|
|
214
|
+
if (this.explain && reasons && reasons.length > 0) {
|
|
215
|
+
for (const reason of reasons) {
|
|
216
|
+
console.log(` ${chalk.dim('↳')} ${chalk.italic.gray(reason)}`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
};
|
|
96
220
|
|
|
97
221
|
console.log('\n' + chalk.bold(' 📊 Detected Stack'));
|
|
98
|
-
console.log(chalk.dim(' ' + '─'.repeat(
|
|
99
|
-
|
|
100
|
-
line('
|
|
101
|
-
line('
|
|
102
|
-
line('
|
|
222
|
+
console.log(chalk.dim(' ' + '─'.repeat(48)));
|
|
223
|
+
|
|
224
|
+
line('Languages:', languages.map((l) => l.name).join(', '), languages[0] && languages[0].reasons);
|
|
225
|
+
line('Frameworks:', frameworks.map((f) => f.name).join(', '), frameworks[0] && frameworks[0].reasons);
|
|
226
|
+
line('Hosting:', hosting.map((h) => h.name).join(', ') || 'none', hosting[0] && hosting[0].reasons);
|
|
227
|
+
line('Testing:', testing.map((t) => t.name).join(', ') || 'none', testing[0] && testing[0].reasons);
|
|
228
|
+
line('Release tool:', releaseInfo ? releaseInfo.tool : 'none', releaseInfo && releaseInfo.reasons);
|
|
229
|
+
|
|
230
|
+
if (monorepoPackages.length > 0) {
|
|
231
|
+
line('Monorepo pkgs:', monorepoPackages.map((p) => p.name).join(', '));
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
if (envVars.sourceFile) {
|
|
235
|
+
line('Env file:', envVars.sourceFile);
|
|
236
|
+
if (envVars.secrets.length > 0) {
|
|
237
|
+
line(' Secrets:', envVars.secrets.join(', '));
|
|
238
|
+
}
|
|
239
|
+
if (envVars.public.length > 0) {
|
|
240
|
+
line(' Public vars:', envVars.public.join(', '));
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
103
244
|
console.log('');
|
|
104
245
|
}
|
|
105
246
|
|
|
@@ -129,36 +270,88 @@ class CIFlow {
|
|
|
129
270
|
|
|
130
271
|
config.hosting = customHosting
|
|
131
272
|
.filter((h) => h !== 'none')
|
|
132
|
-
.map((h) => ({ name: h, confidence: 1.0, manual: true }));
|
|
273
|
+
.map((h) => ({ name: h, confidence: 1.0, manual: true, secrets: [] }));
|
|
133
274
|
}
|
|
134
275
|
|
|
135
276
|
return config;
|
|
136
277
|
}
|
|
137
278
|
|
|
279
|
+
// ── Dry run ───────────────────────────────────────────────────────────────
|
|
280
|
+
|
|
281
|
+
_dryRunPrint(workflows, dependabotFile) {
|
|
282
|
+
console.log('\n' + chalk.yellow('── DRY RUN – files not written ──\n'));
|
|
283
|
+
|
|
284
|
+
for (const wf of workflows) {
|
|
285
|
+
console.log(chalk.bold.cyan(`\n📄 .github/workflows/${wf.filename}`));
|
|
286
|
+
console.log(chalk.dim('─'.repeat(60)));
|
|
287
|
+
console.log(wf.content);
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
console.log(chalk.bold.cyan(`\n📄 .github/dependabot.yml`));
|
|
291
|
+
console.log(chalk.dim('─'.repeat(60)));
|
|
292
|
+
console.log(dependabotFile.content);
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// ── Write workflows ────────────────────────────────────────────────────────
|
|
296
|
+
|
|
138
297
|
async _writeWorkflows(workflows) {
|
|
139
298
|
ensureDir(this.outputDir);
|
|
140
299
|
|
|
141
300
|
for (const wf of workflows) {
|
|
142
301
|
const filePath = path.join(this.outputDir, wf.filename);
|
|
143
|
-
const exists
|
|
302
|
+
const exists = fs.existsSync(filePath);
|
|
144
303
|
|
|
145
304
|
if (exists && !this.force) {
|
|
146
|
-
const
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
default: false,
|
|
152
|
-
},
|
|
153
|
-
]);
|
|
154
|
-
if (!overwrite) {
|
|
155
|
-
console.log(chalk.dim(` Skipped ${wf.filename}`));
|
|
305
|
+
const existing = fs.readFileSync(filePath, 'utf8');
|
|
306
|
+
const { content: merged, changes } = smartMergeWorkflow(existing, wf.content);
|
|
307
|
+
|
|
308
|
+
if (changes.length === 0) {
|
|
309
|
+
console.log(chalk.dim(` ○ No changes: ${wf.filename}`));
|
|
156
310
|
continue;
|
|
157
311
|
}
|
|
312
|
+
|
|
313
|
+
console.log(chalk.yellow(` ↻ Smart-merged: ${wf.filename}`));
|
|
314
|
+
for (const c of changes) {
|
|
315
|
+
console.log(chalk.dim(` • ${c}`));
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
writeFile(filePath, merged);
|
|
319
|
+
} else if (exists && this.force) {
|
|
320
|
+
writeFile(filePath, wf.content);
|
|
321
|
+
console.log(chalk.green(` ✔ Overwritten: ${wf.filename}`));
|
|
322
|
+
} else {
|
|
323
|
+
writeFile(filePath, wf.content);
|
|
324
|
+
console.log(chalk.green(` ✔ Written: ${wf.filename}`));
|
|
158
325
|
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// ── Write dependabot.yml ───────────────────────────────────────────────────
|
|
330
|
+
|
|
331
|
+
async _writeDependabot(dependabotFile) {
|
|
332
|
+
const githubDir = path.join(this.projectPath, '.github');
|
|
333
|
+
const filePath = path.join(githubDir, 'dependabot.yml');
|
|
334
|
+
const exists = fs.existsSync(filePath);
|
|
335
|
+
|
|
336
|
+
ensureDir(githubDir);
|
|
159
337
|
|
|
160
|
-
|
|
161
|
-
|
|
338
|
+
if (exists && !this.force) {
|
|
339
|
+
const existing = fs.readFileSync(filePath, 'utf8');
|
|
340
|
+
const { content: merged, changes } = smartMergeWorkflow(existing, dependabotFile.content);
|
|
341
|
+
|
|
342
|
+
if (changes.length === 0) {
|
|
343
|
+
console.log(chalk.dim(` ○ No changes: dependabot.yml`));
|
|
344
|
+
return;
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
writeFile(filePath, merged);
|
|
348
|
+
console.log(chalk.yellow(` ↻ Smart-merged: dependabot.yml`));
|
|
349
|
+
for (const c of changes) {
|
|
350
|
+
console.log(chalk.dim(` • ${c}`));
|
|
351
|
+
}
|
|
352
|
+
} else {
|
|
353
|
+
writeFile(filePath, dependabotFile.content);
|
|
354
|
+
console.log(chalk.green(` ✔ Written: .github/dependabot.yml`));
|
|
162
355
|
}
|
|
163
356
|
}
|
|
164
357
|
}
|
package/src/utils/helpers.js
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
const fs = require('fs');
|
|
4
4
|
const path = require('path');
|
|
5
5
|
const chalk = require('chalk');
|
|
6
|
+
const yaml = require('js-yaml');
|
|
6
7
|
|
|
7
8
|
function ensureDir(dirPath) {
|
|
8
9
|
if (!fs.existsSync(dirPath)) {
|
|
@@ -16,16 +17,152 @@ function writeFile(filePath, content) {
|
|
|
16
17
|
}
|
|
17
18
|
|
|
18
19
|
function banner() {
|
|
19
|
-
console.log('\n' + chalk.bold.cyan('
|
|
20
|
-
console.log(chalk.bold.cyan('
|
|
21
|
-
console.log(chalk.bold.cyan(' ██║
|
|
22
|
-
console.log(chalk.bold.cyan(' ██║
|
|
23
|
-
console.log(chalk.bold.cyan('
|
|
24
|
-
console.log(chalk.bold.cyan('
|
|
20
|
+
console.log('\n' + chalk.bold.cyan(' ██████╗██╗███████╗████████╗ █████╗ ██████╗██╗ ██╗'));
|
|
21
|
+
console.log(chalk.bold.cyan(' ██╔════╝██║██╔════╝╚══██╔══╝██╔══██╗██╔════╝██║ ██╔╝'));
|
|
22
|
+
console.log(chalk.bold.cyan(' ██║ ██║███████╗ ██║ ███████║██║ █████╔╝ '));
|
|
23
|
+
console.log(chalk.bold.cyan(' ██║ ██║╚════██║ ██║ ██╔══██║██║ ██╔═██╗ '));
|
|
24
|
+
console.log(chalk.bold.cyan(' ╚██████╗██║███████║ ██║ ██║ ██║╚██████╗██║ ██╗'));
|
|
25
|
+
console.log(chalk.bold.cyan(' ╚═════╝╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝'));
|
|
25
26
|
console.log('');
|
|
26
|
-
console.log(' ' + chalk.dim('GitHub Actions pipeline generator'));
|
|
27
|
-
console.log(' ' + chalk.dim('─'.repeat(
|
|
27
|
+
console.log(' ' + chalk.dim('GitHub Actions pipeline generator ') + chalk.bold.cyan('v2.0.0'));
|
|
28
|
+
console.log(' ' + chalk.dim('─'.repeat(52)));
|
|
28
29
|
console.log('');
|
|
29
30
|
}
|
|
30
31
|
|
|
31
|
-
|
|
32
|
+
/**
|
|
33
|
+
* Smart diff: compare existing workflow YAML with newly generated YAML.
|
|
34
|
+
*
|
|
35
|
+
* Strategy:
|
|
36
|
+
* 1. Parse both into JS objects via js-yaml.
|
|
37
|
+
* 2. Diff at the "jobs" level — for each job key, compare serialised forms.
|
|
38
|
+
* 3. Diff top-level keys (name, on, env, concurrency, permissions).
|
|
39
|
+
* 4. Build a merged object: keep existing jobs/keys that are UNCHANGED,
|
|
40
|
+
* update jobs/keys that CHANGED, add new jobs/keys.
|
|
41
|
+
* 5. Re-serialise and return { content, changes } where changes is a list of
|
|
42
|
+
* human-readable change descriptions.
|
|
43
|
+
*
|
|
44
|
+
* If either file fails to parse as YAML we fall back to a full overwrite.
|
|
45
|
+
*/
|
|
46
|
+
function smartMergeWorkflow(existingContent, newContent) {
|
|
47
|
+
let existing, generated;
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
existing = yaml.load(existingContent);
|
|
51
|
+
generated = yaml.load(newContent);
|
|
52
|
+
} catch (_) {
|
|
53
|
+
// Can't parse — full overwrite
|
|
54
|
+
return { content: newContent, changes: ['full rewrite (YAML parse error)'] };
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (!existing || !generated) {
|
|
58
|
+
return { content: newContent, changes: ['full rewrite (empty document)'] };
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
const changes = [];
|
|
62
|
+
const merged = { ...existing };
|
|
63
|
+
|
|
64
|
+
// ── top-level scalar / small keys ────────────────────────────────────────
|
|
65
|
+
const topLevelKeys = ['name', 'on', 'env', 'concurrency', 'permissions', 'defaults'];
|
|
66
|
+
for (const key of topLevelKeys) {
|
|
67
|
+
if (key in generated) {
|
|
68
|
+
const existSer = JSON.stringify(existing[key] ?? null);
|
|
69
|
+
const genSer = JSON.stringify(generated[key] ?? null);
|
|
70
|
+
if (existSer !== genSer) {
|
|
71
|
+
merged[key] = generated[key];
|
|
72
|
+
changes.push(`updated top-level "${key}"`);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// ── jobs diff ─────────────────────────────────────────────────────────────
|
|
78
|
+
if (generated.jobs) {
|
|
79
|
+
merged.jobs = { ...(existing.jobs || {}) };
|
|
80
|
+
|
|
81
|
+
for (const [jobId, genJob] of Object.entries(generated.jobs)) {
|
|
82
|
+
const existJob = existing.jobs && existing.jobs[jobId];
|
|
83
|
+
|
|
84
|
+
if (!existJob) {
|
|
85
|
+
// Brand-new job
|
|
86
|
+
merged.jobs[jobId] = genJob;
|
|
87
|
+
changes.push(`added job "${jobId}"`);
|
|
88
|
+
} else {
|
|
89
|
+
const existSer = JSON.stringify(existJob);
|
|
90
|
+
const genSer = JSON.stringify(genJob);
|
|
91
|
+
|
|
92
|
+
if (existSer !== genSer) {
|
|
93
|
+
// Job changed — deep merge at step level
|
|
94
|
+
const { job: mergedJob, jobChanges } = _mergeJob(existJob, genJob, jobId);
|
|
95
|
+
merged.jobs[jobId] = mergedJob;
|
|
96
|
+
changes.push(...jobChanges);
|
|
97
|
+
}
|
|
98
|
+
// else — identical, keep existing
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// ── re-serialise ──────────────────────────────────────────────────────────
|
|
104
|
+
// Preserve the cistack header comment from the new content
|
|
105
|
+
const headerMatch = newContent.match(/^(#[^\n]*\n)+\n?/);
|
|
106
|
+
const header = headerMatch ? headerMatch[0] : '';
|
|
107
|
+
|
|
108
|
+
const raw = yaml.dump(merged, {
|
|
109
|
+
indent: 2,
|
|
110
|
+
lineWidth: 120,
|
|
111
|
+
quotingType: "'",
|
|
112
|
+
forceQuotes: false,
|
|
113
|
+
noRefs: true,
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
return { content: header + raw, changes };
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Merge two job objects at the "steps" level.
|
|
121
|
+
* Steps are matched by their "name" property.
|
|
122
|
+
*/
|
|
123
|
+
function _mergeJob(existJob, genJob, jobId) {
|
|
124
|
+
const jobChanges = [];
|
|
125
|
+
const merged = { ...existJob };
|
|
126
|
+
|
|
127
|
+
// Compare non-steps keys
|
|
128
|
+
for (const key of Object.keys(genJob)) {
|
|
129
|
+
if (key === 'steps') continue;
|
|
130
|
+
const existSer = JSON.stringify(existJob[key] ?? null);
|
|
131
|
+
const genSer = JSON.stringify(genJob[key] ?? null);
|
|
132
|
+
if (existSer !== genSer) {
|
|
133
|
+
merged[key] = genJob[key];
|
|
134
|
+
jobChanges.push(` job "${jobId}" → updated "${key}"`);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Merge steps by name
|
|
139
|
+
if (genJob.steps) {
|
|
140
|
+
const existStepsByName = {};
|
|
141
|
+
for (const s of (existJob.steps || [])) {
|
|
142
|
+
if (s.name) existStepsByName[s.name] = s;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
const mergedSteps = [];
|
|
146
|
+
for (const genStep of genJob.steps) {
|
|
147
|
+
const existStep = existStepsByName[genStep.name];
|
|
148
|
+
if (!existStep) {
|
|
149
|
+
mergedSteps.push(genStep);
|
|
150
|
+
jobChanges.push(` job "${jobId}" → added step "${genStep.name}"`);
|
|
151
|
+
} else {
|
|
152
|
+
const existSer = JSON.stringify(existStep);
|
|
153
|
+
const genSer = JSON.stringify(genStep);
|
|
154
|
+
if (existSer !== genSer) {
|
|
155
|
+
mergedSteps.push(genStep); // take generated version
|
|
156
|
+
jobChanges.push(` job "${jobId}" → updated step "${genStep.name}"`);
|
|
157
|
+
} else {
|
|
158
|
+
mergedSteps.push(existStep); // unchanged
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
merged.steps = mergedSteps;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return { job: merged, jobChanges };
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
module.exports = { ensureDir, writeFile, banner, smartMergeWorkflow };
|