gitlab-catalog-browser 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +75 -0
- package/bin/gitlab-ci-cli.js +29 -0
- package/dist/api/catalog.d.ts +38 -0
- package/dist/api/catalog.d.ts.map +1 -0
- package/dist/api/catalog.js +72 -0
- package/dist/api/catalog.js.map +1 -0
- package/dist/api/gitlab.d.ts +69 -0
- package/dist/api/gitlab.d.ts.map +1 -0
- package/dist/api/gitlab.js +226 -0
- package/dist/api/gitlab.js.map +1 -0
- package/dist/api/lint.d.ts +61 -0
- package/dist/api/lint.d.ts.map +1 -0
- package/dist/api/lint.js +41 -0
- package/dist/api/lint.js.map +1 -0
- package/dist/cache/schema-cache.d.ts +54 -0
- package/dist/cache/schema-cache.d.ts.map +1 -0
- package/dist/cache/schema-cache.js +124 -0
- package/dist/cache/schema-cache.js.map +1 -0
- package/dist/commands/batch.d.ts +19 -0
- package/dist/commands/batch.d.ts.map +1 -0
- package/dist/commands/batch.js +174 -0
- package/dist/commands/batch.js.map +1 -0
- package/dist/commands/catalog.d.ts +42 -0
- package/dist/commands/catalog.d.ts.map +1 -0
- package/dist/commands/catalog.js +158 -0
- package/dist/commands/catalog.js.map +1 -0
- package/dist/commands/component.d.ts +46 -0
- package/dist/commands/component.d.ts.map +1 -0
- package/dist/commands/component.js +213 -0
- package/dist/commands/component.js.map +1 -0
- package/dist/commands/pipeline.d.ts +61 -0
- package/dist/commands/pipeline.d.ts.map +1 -0
- package/dist/commands/pipeline.js +880 -0
- package/dist/commands/pipeline.js.map +1 -0
- package/dist/commands/setup.d.ts +119 -0
- package/dist/commands/setup.d.ts.map +1 -0
- package/dist/commands/setup.js +391 -0
- package/dist/commands/setup.js.map +1 -0
- package/dist/commands/skills.d.ts +39 -0
- package/dist/commands/skills.d.ts.map +1 -0
- package/dist/commands/skills.js +208 -0
- package/dist/commands/skills.js.map +1 -0
- package/dist/commands/validate.d.ts +27 -0
- package/dist/commands/validate.d.ts.map +1 -0
- package/dist/commands/validate.js +201 -0
- package/dist/commands/validate.js.map +1 -0
- package/dist/config/loader.d.ts +80 -0
- package/dist/config/loader.d.ts.map +1 -0
- package/dist/config/loader.js +217 -0
- package/dist/config/loader.js.map +1 -0
- package/dist/config/types.d.ts +46 -0
- package/dist/config/types.d.ts.map +1 -0
- package/dist/config/types.js +45 -0
- package/dist/config/types.js.map +1 -0
- package/dist/index.d.ts +10 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +437 -0
- package/dist/index.js.map +1 -0
- package/dist/output/table.d.ts +28 -0
- package/dist/output/table.d.ts.map +1 -0
- package/dist/output/table.js +67 -0
- package/dist/output/table.js.map +1 -0
- package/dist/types/api.d.ts +66 -0
- package/dist/types/api.d.ts.map +1 -0
- package/dist/types/api.js +61 -0
- package/dist/types/api.js.map +1 -0
- package/dist/types/catalog.d.ts +77 -0
- package/dist/types/catalog.d.ts.map +1 -0
- package/dist/types/catalog.js +5 -0
- package/dist/types/catalog.js.map +1 -0
- package/package.json +60 -0
- package/skill-data/core/reference.md +127 -0
- package/skill-data/core/templates.md +97 -0
- package/skill-data/core/workflows.md +84 -0
- package/skill-data/manifest.json +12 -0
- package/skill-data/templates/basic-pipeline.yml +29 -0
- package/skill-data/templates/docker-build.yml +38 -0
- package/skill-data/templates/multi-stage.yml +43 -0
- package/skills/gitlab-catalog-browser/SKILL.md +49 -0
|
@@ -0,0 +1,880 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pipeline Knowledge Interface (PKI) command handlers.
|
|
3
|
+
*
|
|
4
|
+
* Implements `pipeline explain`, `pipeline trace`, `pipeline stages`,
|
|
5
|
+
* `pipeline includes`, and `pipeline summary` commands.
|
|
6
|
+
*/
|
|
7
|
+
import { readFileSync } from 'node:fs';
|
|
8
|
+
import * as yaml from 'js-yaml';
|
|
9
|
+
// ── Known GitLab CI configuration keys (not jobs) ──
|
|
10
|
+
const CONFIG_KEYS = new Set([
|
|
11
|
+
'stages',
|
|
12
|
+
'variables',
|
|
13
|
+
'include',
|
|
14
|
+
'cache',
|
|
15
|
+
'default',
|
|
16
|
+
'before_script',
|
|
17
|
+
'after_script',
|
|
18
|
+
'image',
|
|
19
|
+
'services',
|
|
20
|
+
'tags',
|
|
21
|
+
'workflow',
|
|
22
|
+
'.pre',
|
|
23
|
+
'.post',
|
|
24
|
+
]);
|
|
25
|
+
// ── Default stages ────────────────────────────
|
|
26
|
+
const DEFAULT_STAGES = ['.pre', 'build', 'test', 'deploy', '.post'];
|
|
27
|
+
// ──────────────────────────────────────────────
|
|
28
|
+
// Helpers
|
|
29
|
+
// ──────────────────────────────────────────────
|
|
30
|
+
function readPipelineFile(filePath) {
|
|
31
|
+
return readFileSync(filePath, 'utf-8');
|
|
32
|
+
}
|
|
33
|
+
function parseYaml(content) {
|
|
34
|
+
const doc = yaml.load(content);
|
|
35
|
+
if (!doc || typeof doc !== 'object' || Array.isArray(doc)) {
|
|
36
|
+
throw new Error('Pipeline configuration must be a YAML mapping');
|
|
37
|
+
}
|
|
38
|
+
return doc;
|
|
39
|
+
}
|
|
40
|
+
function readAndParsePipeline(filePath) {
|
|
41
|
+
const content = readPipelineFile(filePath);
|
|
42
|
+
return parseYaml(content);
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Extract job definitions from a parsed pipeline, filtering out non-job keys.
|
|
46
|
+
*/
|
|
47
|
+
function extractJobs(doc) {
|
|
48
|
+
const jobs = [];
|
|
49
|
+
const stagesOrder = extractStages(doc);
|
|
50
|
+
for (const [key, value] of Object.entries(doc)) {
|
|
51
|
+
// Skip config keys, workflow keys, and template jobs (start with '.')
|
|
52
|
+
if (CONFIG_KEYS.has(key))
|
|
53
|
+
continue;
|
|
54
|
+
if (key.startsWith('.'))
|
|
55
|
+
continue; // job templates
|
|
56
|
+
if (typeof value !== 'object' || value === null || Array.isArray(value))
|
|
57
|
+
continue;
|
|
58
|
+
const jobDef = value;
|
|
59
|
+
// A job must have at least one of these to be a real job
|
|
60
|
+
if (!jobDef.script && !jobDef.trigger && !jobDef.needs)
|
|
61
|
+
continue;
|
|
62
|
+
const job = {
|
|
63
|
+
name: key,
|
|
64
|
+
stage: jobDef.stage ?? guessStage(key, stagesOrder),
|
|
65
|
+
script: jobDef.script,
|
|
66
|
+
needs: parseNeeds(jobDef.needs),
|
|
67
|
+
dependencies: jobDef.dependencies,
|
|
68
|
+
when: jobDef.when,
|
|
69
|
+
artifacts: jobDef.artifacts,
|
|
70
|
+
variables: jobDef.variables,
|
|
71
|
+
image: jobDef.image,
|
|
72
|
+
services: jobDef.services,
|
|
73
|
+
cache: jobDef.cache,
|
|
74
|
+
only: jobDef.only,
|
|
75
|
+
except: jobDef.except,
|
|
76
|
+
rules: jobDef.rules,
|
|
77
|
+
trigger: jobDef.trigger,
|
|
78
|
+
parallel: jobDef.parallel,
|
|
79
|
+
allow_failure: jobDef.allow_failure,
|
|
80
|
+
retry: jobDef.retry,
|
|
81
|
+
timeout: jobDef.timeout,
|
|
82
|
+
environment: jobDef.environment,
|
|
83
|
+
tags: jobDef.tags,
|
|
84
|
+
};
|
|
85
|
+
jobs.push(job);
|
|
86
|
+
}
|
|
87
|
+
return jobs;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Parse `needs:` which can be a list of strings or a list of objects with `job` and `artifacts` keys.
|
|
91
|
+
*/
|
|
92
|
+
function parseNeeds(needs) {
|
|
93
|
+
if (!Array.isArray(needs))
|
|
94
|
+
return undefined;
|
|
95
|
+
return needs.map((n) => {
|
|
96
|
+
if (typeof n === 'string')
|
|
97
|
+
return n;
|
|
98
|
+
if (typeof n === 'object' && n !== null && 'job' in n)
|
|
99
|
+
return n.job;
|
|
100
|
+
return String(n);
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Extract stages array, defaulting if not defined.
|
|
105
|
+
*/
|
|
106
|
+
function extractStages(doc) {
|
|
107
|
+
if (doc.stages && Array.isArray(doc.stages)) {
|
|
108
|
+
return doc.stages;
|
|
109
|
+
}
|
|
110
|
+
return [...DEFAULT_STAGES];
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* Guess a job's stage based on its name.
|
|
114
|
+
*/
|
|
115
|
+
function guessStage(jobName, stages) {
|
|
116
|
+
// Simple heuristic: if the job name matches a stage name, use that stage
|
|
117
|
+
const lower = jobName.toLowerCase();
|
|
118
|
+
for (const stage of stages) {
|
|
119
|
+
if (lower.includes(stage.toLowerCase()))
|
|
120
|
+
return stage;
|
|
121
|
+
}
|
|
122
|
+
// Default to the second stage (first real stage after .pre) or 'test'
|
|
123
|
+
return stages[1] ?? 'test';
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Collect all variable definitions (global + per-job).
|
|
127
|
+
*/
|
|
128
|
+
function collectVariables(doc, jobs) {
|
|
129
|
+
const global = {};
|
|
130
|
+
const perJob = {};
|
|
131
|
+
// Global variables
|
|
132
|
+
if (doc.variables && typeof doc.variables === 'object' && !Array.isArray(doc.variables)) {
|
|
133
|
+
for (const [k, v] of Object.entries(doc.variables)) {
|
|
134
|
+
global[k] = String(v ?? '');
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
// Per-job variables
|
|
138
|
+
for (const job of jobs) {
|
|
139
|
+
if (job.variables && Object.keys(job.variables).length > 0) {
|
|
140
|
+
perJob[job.name] = { ...job.variables };
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return { global, perJob };
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Find all references to a variable name in the pipeline YAML string.
|
|
147
|
+
*/
|
|
148
|
+
function findVariableReferences(content, varName) {
|
|
149
|
+
const refs = [];
|
|
150
|
+
const lines = content.split('\n');
|
|
151
|
+
const patterns = [
|
|
152
|
+
new RegExp(`\\$\\{${varName}\\}`, 'g'),
|
|
153
|
+
new RegExp(`\\$${varName}(?![a-zA-Z0-9_])`, 'g'),
|
|
154
|
+
];
|
|
155
|
+
for (let i = 0; i < lines.length; i++) {
|
|
156
|
+
const line = lines[i];
|
|
157
|
+
for (const pattern of patterns) {
|
|
158
|
+
const matches = line.match(pattern);
|
|
159
|
+
if (matches) {
|
|
160
|
+
// Determine context: is this a variable definition or usage?
|
|
161
|
+
const trimmed = line.trim();
|
|
162
|
+
const location = trimmed.startsWith('#')
|
|
163
|
+
? 'comment'
|
|
164
|
+
: trimmed.startsWith('variables:') || /^\w+:\s*$/.test(trimmed)
|
|
165
|
+
? 'definition'
|
|
166
|
+
: 'usage';
|
|
167
|
+
refs.push({ location, line: i + 1 });
|
|
168
|
+
break;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
return refs;
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Collect all include directives from a parsed pipeline.
|
|
176
|
+
*/
|
|
177
|
+
function extractIncludes(doc) {
|
|
178
|
+
const rawIncludes = doc.include;
|
|
179
|
+
if (!rawIncludes)
|
|
180
|
+
return [];
|
|
181
|
+
const includes = [];
|
|
182
|
+
// Normalize to array
|
|
183
|
+
const items = Array.isArray(rawIncludes) ? rawIncludes : [rawIncludes];
|
|
184
|
+
for (const item of items) {
|
|
185
|
+
if (typeof item === 'string') {
|
|
186
|
+
if (item.startsWith('http://') || item.startsWith('https://')) {
|
|
187
|
+
includes.push({ type: 'remote', location: item, resolved: true });
|
|
188
|
+
}
|
|
189
|
+
else if (item.startsWith('~') || item.startsWith('/') || item.startsWith('./') || item.startsWith('../')) {
|
|
190
|
+
includes.push({ type: 'local', location: item, resolved: true });
|
|
191
|
+
}
|
|
192
|
+
else {
|
|
193
|
+
includes.push({ type: 'template', location: item, resolved: true });
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
else if (typeof item === 'object' && item !== null) {
|
|
197
|
+
const inc = item;
|
|
198
|
+
if (inc.local)
|
|
199
|
+
includes.push({ type: 'local', location: inc.local, resolved: true });
|
|
200
|
+
else if (inc.project) {
|
|
201
|
+
const file = inc.file ? ` (${inc.file})` : '';
|
|
202
|
+
includes.push({ type: 'project', location: `${inc.project}${file}`, resolved: true });
|
|
203
|
+
}
|
|
204
|
+
else if (inc.remote)
|
|
205
|
+
includes.push({ type: 'remote', location: inc.remote, resolved: true });
|
|
206
|
+
else if (inc.template)
|
|
207
|
+
includes.push({ type: 'template', location: inc.template, resolved: true });
|
|
208
|
+
else if (inc.component)
|
|
209
|
+
includes.push({ type: 'component', location: inc.component, resolved: true });
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
return includes;
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Detect patterns in a pipeline configuration.
|
|
216
|
+
*/
|
|
217
|
+
function detectPatterns(jobs) {
|
|
218
|
+
const patterns = [];
|
|
219
|
+
const hasCache = jobs.some((j) => j.cache);
|
|
220
|
+
const hasArtifacts = jobs.some((j) => j.artifacts);
|
|
221
|
+
const hasServices = jobs.some((j) => j.services && j.services.length > 0);
|
|
222
|
+
const hasParallel = jobs.some((j) => j.parallel);
|
|
223
|
+
const hasRules = jobs.some((j) => j.rules && j.rules.length > 0);
|
|
224
|
+
const hasOnlyExcept = jobs.some((j) => (j.only && j.only.length > 0) || (j.except && j.except.length > 0));
|
|
225
|
+
const hasManual = jobs.some((j) => j.when === 'manual');
|
|
226
|
+
const hasTrigger = jobs.some((j) => j.trigger);
|
|
227
|
+
const hasImage = jobs.some((j) => j.image);
|
|
228
|
+
const hasRetry = jobs.some((j) => j.retry);
|
|
229
|
+
const hasTimeout = jobs.some((j) => j.timeout);
|
|
230
|
+
const hasEnvironment = jobs.some((j) => j.environment);
|
|
231
|
+
const hasDependencies = jobs.some((j) => j.dependencies && j.dependencies.length > 0);
|
|
232
|
+
if (hasCache)
|
|
233
|
+
patterns.push('caching');
|
|
234
|
+
if (hasArtifacts)
|
|
235
|
+
patterns.push('artifacts');
|
|
236
|
+
if (hasServices)
|
|
237
|
+
patterns.push('services');
|
|
238
|
+
if (hasParallel)
|
|
239
|
+
patterns.push('parallel-execution');
|
|
240
|
+
if (hasRules)
|
|
241
|
+
patterns.push('conditional-rules');
|
|
242
|
+
if (hasOnlyExcept)
|
|
243
|
+
patterns.push('only/except');
|
|
244
|
+
if (hasManual)
|
|
245
|
+
patterns.push('manual-approval');
|
|
246
|
+
if (hasTrigger)
|
|
247
|
+
patterns.push('child-pipelines');
|
|
248
|
+
if (hasImage)
|
|
249
|
+
patterns.push('custom-images');
|
|
250
|
+
if (hasRetry)
|
|
251
|
+
patterns.push('retry-mechanism');
|
|
252
|
+
if (hasTimeout)
|
|
253
|
+
patterns.push('custom-timeouts');
|
|
254
|
+
if (hasEnvironment)
|
|
255
|
+
patterns.push('environments');
|
|
256
|
+
if (hasDependencies)
|
|
257
|
+
patterns.push('explicit-dependencies');
|
|
258
|
+
return patterns;
|
|
259
|
+
}
|
|
260
|
+
/**
|
|
261
|
+
* Build a Mermaid flowchart for job dependencies.
|
|
262
|
+
*/
|
|
263
|
+
function buildMermaidGraph(jobs, bottlenecks, stages) {
|
|
264
|
+
const lines = [];
|
|
265
|
+
lines.push('graph LR');
|
|
266
|
+
// Build adjacency from needs
|
|
267
|
+
const needsMap = new Map();
|
|
268
|
+
for (const job of jobs) {
|
|
269
|
+
if (job.needs && job.needs.length > 0) {
|
|
270
|
+
needsMap.set(job.name, job.needs);
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
// Group jobs by stage
|
|
274
|
+
const stageGroups = new Map();
|
|
275
|
+
for (const job of jobs) {
|
|
276
|
+
const stage = job.stage;
|
|
277
|
+
if (!stageGroups.has(stage))
|
|
278
|
+
stageGroups.set(stage, []);
|
|
279
|
+
stageGroups.get(stage).push(job.name);
|
|
280
|
+
}
|
|
281
|
+
// Render each stage as a subgraph
|
|
282
|
+
for (const stage of stages) {
|
|
283
|
+
const stageJobs = stageGroups.get(stage);
|
|
284
|
+
if (!stageJobs || stageJobs.length === 0)
|
|
285
|
+
continue;
|
|
286
|
+
lines.push(` subgraph ${stage}[${stage}]`);
|
|
287
|
+
for (const jobName of stageJobs) {
|
|
288
|
+
const isBottleneck = bottlenecks.includes(jobName);
|
|
289
|
+
const label = isBottleneck ? `${jobName}[${jobName}]:::bottleneck` : `${jobName}[${jobName}]`;
|
|
290
|
+
lines.push(` ${label}`);
|
|
291
|
+
}
|
|
292
|
+
lines.push(' end');
|
|
293
|
+
// Add edges within and between stages
|
|
294
|
+
for (const jobName of stageJobs) {
|
|
295
|
+
const needs = needsMap.get(jobName);
|
|
296
|
+
if (needs) {
|
|
297
|
+
for (const need of needs) {
|
|
298
|
+
lines.push(` ${need} --> ${jobName}`);
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
// Add bottleneck style definition
|
|
304
|
+
if (bottlenecks.length > 0) {
|
|
305
|
+
lines.push('');
|
|
306
|
+
lines.push(' classDef bottleneck fill:#ffcccc,stroke:#ff0000,stroke-width:2px');
|
|
307
|
+
}
|
|
308
|
+
return lines.join('\n');
|
|
309
|
+
}
|
|
310
|
+
/**
|
|
311
|
+
* Build a Mermaid flowchart for stages visualization.
|
|
312
|
+
*/
|
|
313
|
+
function buildMermaidStages(stages, stageJobs) {
|
|
314
|
+
const lines = [];
|
|
315
|
+
lines.push('graph LR');
|
|
316
|
+
for (const stage of stages) {
|
|
317
|
+
const jobs = stageJobs.get(stage);
|
|
318
|
+
if (!jobs || jobs.length === 0)
|
|
319
|
+
continue;
|
|
320
|
+
lines.push(` subgraph ${stage}[${stage}]`);
|
|
321
|
+
if (jobs.length === 1) {
|
|
322
|
+
lines.push(` ${jobs[0]}[${jobs[0]}]`);
|
|
323
|
+
}
|
|
324
|
+
else {
|
|
325
|
+
// Parallel jobs in the same stage
|
|
326
|
+
for (const job of jobs) {
|
|
327
|
+
lines.push(` ${job}[${job}]`);
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
lines.push(' end');
|
|
331
|
+
}
|
|
332
|
+
// Connect stages sequentially
|
|
333
|
+
for (let i = 0; i < stages.length - 1; i++) {
|
|
334
|
+
const currJobs = stageJobs.get(stages[i]);
|
|
335
|
+
const nextJobs = stageJobs.get(stages[i + 1]);
|
|
336
|
+
if (currJobs && nextJobs && currJobs.length > 0 && nextJobs.length > 0) {
|
|
337
|
+
lines.push(` ${currJobs[0]} --> ${nextJobs[0]}`);
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
return lines.join('\n');
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* Build a tree visualization string for includes.
|
|
344
|
+
*/
|
|
345
|
+
function buildIncludeTree(includes) {
|
|
346
|
+
if (includes.length === 0)
|
|
347
|
+
return 'No include directives found.';
|
|
348
|
+
const lines = ['Include Chain:'];
|
|
349
|
+
for (let i = 0; i < includes.length; i++) {
|
|
350
|
+
const inc = includes[i];
|
|
351
|
+
const prefix = i === includes.length - 1 ? ' └── ' : ' ├── ';
|
|
352
|
+
const typeLabel = `[${inc.type}]`;
|
|
353
|
+
const resolvedLabel = inc.resolved === false ? ' ⚠ unresolvable' : '';
|
|
354
|
+
lines.push(`${prefix}${typeLabel} ${inc.location}${resolvedLabel}`);
|
|
355
|
+
}
|
|
356
|
+
return lines.join('\n');
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Compute Levenshtein distance between two strings for variable name suggestions.
|
|
360
|
+
*/
|
|
361
|
+
function levenshtein(a, b) {
|
|
362
|
+
const m = a.length;
|
|
363
|
+
const n = b.length;
|
|
364
|
+
const dp = Array.from({ length: m + 1 }, () => Array(n + 1).fill(0));
|
|
365
|
+
for (let i = 0; i <= m; i++)
|
|
366
|
+
dp[i][0] = i;
|
|
367
|
+
for (let j = 0; j <= n; j++)
|
|
368
|
+
dp[0][j] = j;
|
|
369
|
+
for (let i = 1; i <= m; i++) {
|
|
370
|
+
for (let j = 1; j <= n; j++) {
|
|
371
|
+
dp[i][j] = a[i - 1] === b[j - 1]
|
|
372
|
+
? dp[i - 1][j - 1]
|
|
373
|
+
: 1 + Math.min(dp[i - 1][j], dp[i][j - 1], dp[i - 1][j - 1]);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
return dp[m][n];
|
|
377
|
+
}
|
|
378
|
+
function suggestVariables(target, knownVars) {
|
|
379
|
+
return knownVars
|
|
380
|
+
.map((v) => ({ name: v, dist: levenshtein(target.toLowerCase(), v.toLowerCase()) }))
|
|
381
|
+
.filter((v) => v.dist <= 3)
|
|
382
|
+
.sort((a, b) => a.dist - b.dist)
|
|
383
|
+
.map((v) => v.name)
|
|
384
|
+
.slice(0, 3);
|
|
385
|
+
}
|
|
386
|
+
// ──────────────────────────────────────────────
|
|
387
|
+
// Formatters
|
|
388
|
+
// ──────────────────────────────────────────────
|
|
389
|
+
// ──────────────────────────────────────────────
|
|
390
|
+
// Handlers
|
|
391
|
+
// ──────────────────────────────────────────────
|
|
392
|
+
/**
|
|
393
|
+
* Handle `pipeline explain --jobs <list>`.
|
|
394
|
+
*/
|
|
395
|
+
export async function handlePipelineExplain(filePath, _config, options = {}) {
|
|
396
|
+
try {
|
|
397
|
+
const doc = readAndParsePipeline(filePath);
|
|
398
|
+
const allJobs = extractJobs(doc);
|
|
399
|
+
const stages = extractStages(doc);
|
|
400
|
+
// Filter to requested jobs
|
|
401
|
+
let targetJobs;
|
|
402
|
+
if (options.jobs && options.jobs !== 'all') {
|
|
403
|
+
const jobNames = options.jobs.split(',').map((j) => j.trim());
|
|
404
|
+
targetJobs = jobNames.map((name) => {
|
|
405
|
+
const job = allJobs.find((j) => j.name === name);
|
|
406
|
+
if (!job)
|
|
407
|
+
throw new Error(`No job '${name}' found in pipeline configuration`);
|
|
408
|
+
return job;
|
|
409
|
+
});
|
|
410
|
+
}
|
|
411
|
+
else {
|
|
412
|
+
targetJobs = allJobs;
|
|
413
|
+
}
|
|
414
|
+
if (targetJobs.length === 0) {
|
|
415
|
+
return {
|
|
416
|
+
exitCode: 1,
|
|
417
|
+
output: 'No jobs found in pipeline configuration',
|
|
418
|
+
};
|
|
419
|
+
}
|
|
420
|
+
const edges = [];
|
|
421
|
+
const dependentMap = new Map(); // job -> jobs that depend on it
|
|
422
|
+
for (const job of targetJobs) {
|
|
423
|
+
if (job.needs) {
|
|
424
|
+
for (const need of job.needs) {
|
|
425
|
+
edges.push({ from: need, to: job.name, artifacts: false });
|
|
426
|
+
if (!dependentMap.has(need))
|
|
427
|
+
dependentMap.set(need, []);
|
|
428
|
+
dependentMap.get(need).push(job.name);
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
if (job.dependencies) {
|
|
432
|
+
for (const dep of job.dependencies) {
|
|
433
|
+
// Check if edge already exists
|
|
434
|
+
const exists = edges.some((e) => e.from === dep && e.to === job.name);
|
|
435
|
+
if (!exists) {
|
|
436
|
+
edges.push({ from: dep, to: job.name, artifacts: true });
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
// Detect bottlenecks: jobs with the most dependents
|
|
442
|
+
const bottleneckThreshold = 2;
|
|
443
|
+
const bottlenecks = [];
|
|
444
|
+
for (const [jobName, dependents] of dependentMap) {
|
|
445
|
+
if (dependents.length >= bottleneckThreshold) {
|
|
446
|
+
bottlenecks.push(jobName);
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
if (options.json) {
|
|
450
|
+
return {
|
|
451
|
+
exitCode: 0,
|
|
452
|
+
output: JSON.stringify({
|
|
453
|
+
jobs: targetJobs.map((j) => ({
|
|
454
|
+
name: j.name,
|
|
455
|
+
stage: j.stage,
|
|
456
|
+
needs: j.needs ?? [],
|
|
457
|
+
when: j.when ?? 'always',
|
|
458
|
+
artifacts: j.artifacts ? true : false,
|
|
459
|
+
})),
|
|
460
|
+
dependencies: edges.map((e) => ({
|
|
461
|
+
from: e.from,
|
|
462
|
+
to: e.to,
|
|
463
|
+
artifact_dependency: e.artifacts,
|
|
464
|
+
})),
|
|
465
|
+
bottlenecks: bottlenecks.map((name) => ({
|
|
466
|
+
job: name,
|
|
467
|
+
blocked_jobs: dependentMap.get(name) ?? [],
|
|
468
|
+
})),
|
|
469
|
+
stages,
|
|
470
|
+
}, null, 2),
|
|
471
|
+
};
|
|
472
|
+
}
|
|
473
|
+
// Build text output
|
|
474
|
+
const lines = [];
|
|
475
|
+
// Dependency graph in Mermaid
|
|
476
|
+
lines.push('=== Dependency Graph ===');
|
|
477
|
+
lines.push('');
|
|
478
|
+
const mermaid = buildMermaidGraph(targetJobs, bottlenecks, stages);
|
|
479
|
+
lines.push(mermaid);
|
|
480
|
+
// Bottleneck info
|
|
481
|
+
if (bottlenecks.length > 0) {
|
|
482
|
+
lines.push('');
|
|
483
|
+
lines.push('=== Potential Bottlenecks ===');
|
|
484
|
+
for (const name of bottlenecks) {
|
|
485
|
+
const blocked = dependentMap.get(name) ?? [];
|
|
486
|
+
lines.push(` ${name}: blocks ${blocked.length} job(s) (${blocked.join(', ')})`);
|
|
487
|
+
lines.push(' 💡 Consider splitting into parallel jobs or using caching');
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
// Job details
|
|
491
|
+
lines.push('');
|
|
492
|
+
lines.push('=== Job Details ===');
|
|
493
|
+
for (const job of targetJobs) {
|
|
494
|
+
lines.push(` ${job.name}`);
|
|
495
|
+
lines.push(` Stage: ${job.stage}`);
|
|
496
|
+
if (job.needs && job.needs.length > 0) {
|
|
497
|
+
lines.push(` Needs: ${job.needs.join(', ')}`);
|
|
498
|
+
}
|
|
499
|
+
if (job.dependencies && job.dependencies.length > 0) {
|
|
500
|
+
lines.push(` Dependencies: ${job.dependencies.join(', ')}`);
|
|
501
|
+
}
|
|
502
|
+
lines.push(` When: ${job.when ?? 'on_success'}`);
|
|
503
|
+
if (job.artifacts) {
|
|
504
|
+
const paths = job.artifacts.paths;
|
|
505
|
+
if (paths)
|
|
506
|
+
lines.push(` Artifacts: ${paths.join(', ')}`);
|
|
507
|
+
}
|
|
508
|
+
// Parallel info
|
|
509
|
+
if (job.parallel) {
|
|
510
|
+
lines.push(` Parallel: ${JSON.stringify(job.parallel)}`);
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
return { exitCode: 0, output: lines.join('\n') };
|
|
514
|
+
}
|
|
515
|
+
catch (err) {
|
|
516
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
517
|
+
if (options.json) {
|
|
518
|
+
return {
|
|
519
|
+
exitCode: 1,
|
|
520
|
+
output: JSON.stringify({ success: false, error: { message } }),
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
return { exitCode: 1, output: `Error: ${message}` };
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
/**
|
|
527
|
+
* Handle `pipeline trace --var <name>`.
|
|
528
|
+
*/
|
|
529
|
+
export async function handlePipelineTrace(filePath, _config, options = {}) {
|
|
530
|
+
try {
|
|
531
|
+
const varName = options.var;
|
|
532
|
+
if (!varName) {
|
|
533
|
+
return { exitCode: 1, output: 'No variable name specified. Use --var <name>.' };
|
|
534
|
+
}
|
|
535
|
+
const content = readPipelineFile(filePath);
|
|
536
|
+
const doc = parseYaml(content);
|
|
537
|
+
const jobs = extractJobs(doc);
|
|
538
|
+
const { global, perJob } = collectVariables(doc, jobs);
|
|
539
|
+
// Check if it's a GitLab predefined variable
|
|
540
|
+
const predefinedPrefixes = ['CI_', 'GITLAB_'];
|
|
541
|
+
const isPredefined = predefinedPrefixes.some((p) => varName.startsWith(p));
|
|
542
|
+
// Check if it's defined globally
|
|
543
|
+
const globalDefined = varName in global;
|
|
544
|
+
const globalValue = global[varName];
|
|
545
|
+
// Check per-job overrides
|
|
546
|
+
const overrides = [];
|
|
547
|
+
for (const [jobName, vars] of Object.entries(perJob)) {
|
|
548
|
+
if (varName in vars) {
|
|
549
|
+
overrides.push({ job: jobName, value: vars[varName] });
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
// Find references
|
|
553
|
+
const refs = findVariableReferences(content, varName);
|
|
554
|
+
// Collect all known variable names for suggestions
|
|
555
|
+
const allKnownVars = [
|
|
556
|
+
...Object.keys(global),
|
|
557
|
+
...Object.keys(perJob).flatMap((j) => Object.keys(perJob[j])),
|
|
558
|
+
];
|
|
559
|
+
const uniqueKnownVars = [...new Set(allKnownVars)];
|
|
560
|
+
const isDefined = globalDefined || overrides.length > 0 || isPredefined;
|
|
561
|
+
if (options.json) {
|
|
562
|
+
return {
|
|
563
|
+
exitCode: isDefined ? 0 : 0, // exit 0 even for undefined, it's informational
|
|
564
|
+
output: JSON.stringify({
|
|
565
|
+
variable: varName,
|
|
566
|
+
defined: isDefined,
|
|
567
|
+
predefined: isPredefined,
|
|
568
|
+
global_value: globalDefined ? globalValue : null,
|
|
569
|
+
overrides: overrides.map((o) => ({ job: o.job, value: o.value })),
|
|
570
|
+
references: refs.map((r) => ({ location: r.location, line: r.line })),
|
|
571
|
+
suggestions: isDefined ? [] : suggestVariables(varName, uniqueKnownVars),
|
|
572
|
+
}, null, 2),
|
|
573
|
+
};
|
|
574
|
+
}
|
|
575
|
+
const lines = [];
|
|
576
|
+
if (isPredefined) {
|
|
577
|
+
lines.push(`Variable: ${varName}`);
|
|
578
|
+
lines.push(`Type: GitLab predefined variable`);
|
|
579
|
+
lines.push(`Description: Automatically set by GitLab CI/CD`);
|
|
580
|
+
if (refs.length > 0) {
|
|
581
|
+
lines.push('');
|
|
582
|
+
lines.push('References:');
|
|
583
|
+
for (const ref of refs) {
|
|
584
|
+
lines.push(` Line ${ref.line}: ${ref.location}`);
|
|
585
|
+
}
|
|
586
|
+
}
|
|
587
|
+
else {
|
|
588
|
+
lines.push(`No direct references found in pipeline.`);
|
|
589
|
+
}
|
|
590
|
+
lines.push('');
|
|
591
|
+
lines.push(`Effective value depends on pipeline context and may be overridden.`);
|
|
592
|
+
}
|
|
593
|
+
else if (globalDefined || overrides.length > 0) {
|
|
594
|
+
lines.push(`Variable: ${varName}`);
|
|
595
|
+
if (globalDefined) {
|
|
596
|
+
lines.push(`Global definition: ${globalValue}`);
|
|
597
|
+
}
|
|
598
|
+
if (overrides.length > 0) {
|
|
599
|
+
lines.push('');
|
|
600
|
+
lines.push('Job overrides:');
|
|
601
|
+
for (const ov of overrides) {
|
|
602
|
+
lines.push(` ${ov.job}: ${ov.value}`);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
if (refs.length > 0) {
|
|
606
|
+
lines.push('');
|
|
607
|
+
lines.push('References:');
|
|
608
|
+
for (const ref of refs) {
|
|
609
|
+
lines.push(` Line ${ref.line}: ${ref.location}`);
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
// Show effective values per job
|
|
613
|
+
if (overrides.length > 0 || globalDefined) {
|
|
614
|
+
lines.push('');
|
|
615
|
+
lines.push('Effective values:');
|
|
616
|
+
for (const job of jobs) {
|
|
617
|
+
// Find effective value: job override or global
|
|
618
|
+
const jobOverride = overrides.find((o) => o.job === job.name);
|
|
619
|
+
const effective = jobOverride ? jobOverride.value : globalValue;
|
|
620
|
+
lines.push(` ${job.name}: ${effective}`);
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
else {
|
|
625
|
+
lines.push(`Variable '${varName}' is not defined in this pipeline`);
|
|
626
|
+
const suggestions = suggestVariables(varName, uniqueKnownVars);
|
|
627
|
+
if (suggestions.length > 0) {
|
|
628
|
+
lines.push(`Did you mean: ${suggestions.join(', ')}?`);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
return { exitCode: 0, output: lines.join('\n') };
|
|
632
|
+
}
|
|
633
|
+
catch (err) {
|
|
634
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
635
|
+
if (options.json) {
|
|
636
|
+
return {
|
|
637
|
+
exitCode: 1,
|
|
638
|
+
output: JSON.stringify({ success: false, error: { message } }),
|
|
639
|
+
};
|
|
640
|
+
}
|
|
641
|
+
return { exitCode: 1, output: `Error: ${message}` };
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
/**
|
|
645
|
+
* Handle `pipeline stages [--mermaid]`.
|
|
646
|
+
*/
|
|
647
|
+
export async function handlePipelineStages(filePath, _config, options = {}) {
|
|
648
|
+
try {
|
|
649
|
+
const doc = readAndParsePipeline(filePath);
|
|
650
|
+
const stages = extractStages(doc);
|
|
651
|
+
const jobs = extractJobs(doc);
|
|
652
|
+
const stageJobMap = new Map();
|
|
653
|
+
for (const stage of stages) {
|
|
654
|
+
stageJobMap.set(stage, []);
|
|
655
|
+
}
|
|
656
|
+
for (const job of jobs) {
|
|
657
|
+
if (!stageJobMap.has(job.stage)) {
|
|
658
|
+
stageJobMap.set(job.stage, []);
|
|
659
|
+
}
|
|
660
|
+
stageJobMap.get(job.stage).push(job.name);
|
|
661
|
+
}
|
|
662
|
+
// Filter out empty stages
|
|
663
|
+
const nonEmptyStages = stages.filter((s) => (stageJobMap.get(s)?.length ?? 0) > 0);
|
|
664
|
+
if (options.json) {
|
|
665
|
+
const stageData = nonEmptyStages.map((stage, idx) => ({
|
|
666
|
+
name: stage,
|
|
667
|
+
position: idx + 1,
|
|
668
|
+
jobs: stageJobMap.get(stage) ?? [],
|
|
669
|
+
parallel: (stageJobMap.get(stage)?.length ?? 0) > 1,
|
|
670
|
+
}));
|
|
671
|
+
return {
|
|
672
|
+
exitCode: 0,
|
|
673
|
+
output: JSON.stringify({
|
|
674
|
+
stages: stageData,
|
|
675
|
+
total_stages: nonEmptyStages.length,
|
|
676
|
+
default_stages: !('stages' in doc),
|
|
677
|
+
total_jobs: jobs.length,
|
|
678
|
+
}, null, 2),
|
|
679
|
+
};
|
|
680
|
+
}
|
|
681
|
+
if (options.mermaid) {
|
|
682
|
+
const mermaid = buildMermaidStages(nonEmptyStages, stageJobMap);
|
|
683
|
+
return { exitCode: 0, output: mermaid };
|
|
684
|
+
}
|
|
685
|
+
// Text output
|
|
686
|
+
const lines = [];
|
|
687
|
+
if (!('stages' in doc)) {
|
|
688
|
+
lines.push('Stages (using GitLab defaults):');
|
|
689
|
+
}
|
|
690
|
+
else {
|
|
691
|
+
lines.push('Stages:');
|
|
692
|
+
}
|
|
693
|
+
lines.push('');
|
|
694
|
+
for (let i = 0; i < nonEmptyStages.length; i++) {
|
|
695
|
+
const stage = nonEmptyStages[i];
|
|
696
|
+
const stageJobs = stageJobMap.get(stage) ?? [];
|
|
697
|
+
const parallel = stageJobs.length > 1;
|
|
698
|
+
const executionPlan = parallel ? 'parallel' : 'sequential';
|
|
699
|
+
lines.push(` ${i + 1}. ${stage}`);
|
|
700
|
+
lines.push(` Jobs: ${stageJobs.join(', ')}`);
|
|
701
|
+
lines.push(` Execution: ${executionPlan}`);
|
|
702
|
+
if (i < nonEmptyStages.length - 1) {
|
|
703
|
+
lines.push('');
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
return { exitCode: 0, output: lines.join('\n') };
|
|
707
|
+
}
|
|
708
|
+
catch (err) {
|
|
709
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
710
|
+
if (options.json) {
|
|
711
|
+
return {
|
|
712
|
+
exitCode: 1,
|
|
713
|
+
output: JSON.stringify({ success: false, error: { message } }),
|
|
714
|
+
};
|
|
715
|
+
}
|
|
716
|
+
return { exitCode: 1, output: `Error: ${message}` };
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
/**
|
|
720
|
+
* Handle `pipeline includes`.
|
|
721
|
+
*/
|
|
722
|
+
export async function handlePipelineIncludes(filePath, _config, options = {}) {
|
|
723
|
+
try {
|
|
724
|
+
const doc = readAndParsePipeline(filePath);
|
|
725
|
+
const includes = extractIncludes(doc);
|
|
726
|
+
// Detect circular dependencies by tracking visited includes
|
|
727
|
+
const visited = new Set();
|
|
728
|
+
let hasCircular = false;
|
|
729
|
+
const circularPath = [];
|
|
730
|
+
for (const inc of includes) {
|
|
731
|
+
if (visited.has(inc.location)) {
|
|
732
|
+
hasCircular = true;
|
|
733
|
+
circularPath.push(inc.location);
|
|
734
|
+
}
|
|
735
|
+
visited.add(inc.location);
|
|
736
|
+
}
|
|
737
|
+
// Check for unresolvable remotes: URLs that appear unreachable
|
|
738
|
+
const UNREACHABLE_PATTERNS = ['unreachable', 'nonexistent', 'invalid'];
|
|
739
|
+
const hasUnresolvable = includes.some((i) => i.type === 'remote' && UNREACHABLE_PATTERNS.some((p) => i.location.includes(p)));
|
|
740
|
+
const exitCode = hasCircular ? 1 : hasUnresolvable ? 1 : 0;
|
|
741
|
+
if (options.json) {
|
|
742
|
+
return {
|
|
743
|
+
exitCode,
|
|
744
|
+
output: JSON.stringify({
|
|
745
|
+
includes: includes.map((i) => ({
|
|
746
|
+
type: i.type,
|
|
747
|
+
location: i.location,
|
|
748
|
+
resolved: i.resolved,
|
|
749
|
+
})),
|
|
750
|
+
has_circular_dependency: hasCircular,
|
|
751
|
+
circular_path: circularPath,
|
|
752
|
+
has_unresolvable: hasUnresolvable,
|
|
753
|
+
}, null, 2),
|
|
754
|
+
};
|
|
755
|
+
}
|
|
756
|
+
const lines = [];
|
|
757
|
+
const tree = buildIncludeTree(includes);
|
|
758
|
+
lines.push(tree);
|
|
759
|
+
if (hasCircular) {
|
|
760
|
+
lines.push('');
|
|
761
|
+
lines.push('⚠ Circular dependency detected:');
|
|
762
|
+
lines.push(` ${circularPath.join(' → ')}`);
|
|
763
|
+
}
|
|
764
|
+
if (hasUnresolvable) {
|
|
765
|
+
const unresolvable = includes.filter((i) => i.type === 'remote' && i.location.startsWith('https://unreachable'));
|
|
766
|
+
if (unresolvable.length > 0) {
|
|
767
|
+
lines.push('');
|
|
768
|
+
lines.push('⚠ Unresolvable includes:');
|
|
769
|
+
for (const inc of unresolvable) {
|
|
770
|
+
lines.push(` ${inc.location}`);
|
|
771
|
+
}
|
|
772
|
+
lines.push(' The rest of the include chain has been resolved.');
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
if (includes.length === 0) {
|
|
776
|
+
lines.push('No include directives found in this pipeline.');
|
|
777
|
+
}
|
|
778
|
+
return { exitCode, output: lines.join('\n') };
|
|
779
|
+
}
|
|
780
|
+
catch (err) {
|
|
781
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
782
|
+
if (options.json) {
|
|
783
|
+
return {
|
|
784
|
+
exitCode: 1,
|
|
785
|
+
output: JSON.stringify({ success: false, error: { message } }),
|
|
786
|
+
};
|
|
787
|
+
}
|
|
788
|
+
return { exitCode: 1, output: `Error: ${message}` };
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
/**
|
|
792
|
+
* Handle `pipeline summary`.
|
|
793
|
+
*/
|
|
794
|
+
export async function handlePipelineSummary(filePath, _config, options = {}) {
|
|
795
|
+
try {
|
|
796
|
+
const doc = readAndParsePipeline(filePath);
|
|
797
|
+
const jobs = extractJobs(doc);
|
|
798
|
+
const stages = extractStages(doc);
|
|
799
|
+
const { global: globalVars } = collectVariables(doc, jobs);
|
|
800
|
+
const includes = extractIncludes(doc);
|
|
801
|
+
const patterns = detectPatterns(jobs);
|
|
802
|
+
if (options.json) {
|
|
803
|
+
return {
|
|
804
|
+
exitCode: 0,
|
|
805
|
+
output: JSON.stringify({
|
|
806
|
+
summary: {
|
|
807
|
+
total_stages: stages.filter((s) => jobs.some((j) => j.stage === s)).length,
|
|
808
|
+
total_jobs: jobs.length,
|
|
809
|
+
variables: {
|
|
810
|
+
global: Object.keys(globalVars),
|
|
811
|
+
predefined_used: [], // tracked in trace command
|
|
812
|
+
},
|
|
813
|
+
includes: includes.map((i) => ({ type: i.type, location: i.location })),
|
|
814
|
+
patterns,
|
|
815
|
+
job_names: jobs.map((j) => j.name),
|
|
816
|
+
stages: stages.filter((s) => jobs.some((j) => j.stage === s)),
|
|
817
|
+
},
|
|
818
|
+
}, null, 2),
|
|
819
|
+
};
|
|
820
|
+
}
|
|
821
|
+
const nonEmptyStages = stages.filter((s) => jobs.some((j) => j.stage === s));
|
|
822
|
+
const lines = [];
|
|
823
|
+
lines.push('=== Pipeline Summary ===');
|
|
824
|
+
lines.push('');
|
|
825
|
+
lines.push(`Stages: ${nonEmptyStages.length}`);
|
|
826
|
+
lines.push(`Jobs: ${jobs.length}`);
|
|
827
|
+
lines.push('');
|
|
828
|
+
// Stage overview
|
|
829
|
+
lines.push('Stage Overview:');
|
|
830
|
+
for (const stage of nonEmptyStages) {
|
|
831
|
+
const stageJobs = jobs.filter((j) => j.stage === stage).map((j) => j.name);
|
|
832
|
+
const isParallel = stageJobs.length > 1;
|
|
833
|
+
lines.push(` ${stage}: ${stageJobs.join(', ')}${isParallel ? ' (parallel)' : ''}`);
|
|
834
|
+
}
|
|
835
|
+
lines.push('');
|
|
836
|
+
// Variables
|
|
837
|
+
if (Object.keys(globalVars).length > 0) {
|
|
838
|
+
lines.push('Global Variables:');
|
|
839
|
+
for (const [k, v] of Object.entries(globalVars)) {
|
|
840
|
+
lines.push(` ${k}: ${v}`);
|
|
841
|
+
}
|
|
842
|
+
lines.push('');
|
|
843
|
+
}
|
|
844
|
+
// Includes
|
|
845
|
+
if (includes.length > 0) {
|
|
846
|
+
lines.push('Include Sources:');
|
|
847
|
+
for (const inc of includes) {
|
|
848
|
+
lines.push(` [${inc.type}] ${inc.location}`);
|
|
849
|
+
}
|
|
850
|
+
lines.push('');
|
|
851
|
+
}
|
|
852
|
+
// Patterns
|
|
853
|
+
if (patterns.length > 0) {
|
|
854
|
+
lines.push('Detected Patterns:');
|
|
855
|
+
for (const pattern of patterns) {
|
|
856
|
+
lines.push(` ✓ ${pattern}`);
|
|
857
|
+
}
|
|
858
|
+
lines.push('');
|
|
859
|
+
}
|
|
860
|
+
// Execution strategy
|
|
861
|
+
const hasParallelStage = nonEmptyStages.some((s) => jobs.filter((j) => j.stage === s).length > 1);
|
|
862
|
+
lines.push('Execution Strategy:');
|
|
863
|
+
lines.push(` ${hasParallelStage ? 'Parallel stages present' : 'Sequential execution'}`);
|
|
864
|
+
if (nonEmptyStages.length > 1) {
|
|
865
|
+
lines.push(` ${nonEmptyStages.length} stages run in order`);
|
|
866
|
+
}
|
|
867
|
+
return { exitCode: 0, output: lines.join('\n') };
|
|
868
|
+
}
|
|
869
|
+
catch (err) {
|
|
870
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
871
|
+
if (options.json) {
|
|
872
|
+
return {
|
|
873
|
+
exitCode: 1,
|
|
874
|
+
output: JSON.stringify({ success: false, error: { message } }),
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
return { exitCode: 1, output: `Error: ${message}` };
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
//# sourceMappingURL=pipeline.js.map
|