fireflyy 3.0.11 → 4.0.0-dev.fd79cb3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -418
- package/assets/firefly.schema.json +118 -112
- package/dist/dry-run-BfYCtldz.js +38 -0
- package/dist/filesystem.service-DdVwnqoa.js +57 -0
- package/dist/git.service-DarjfyXF.js +587 -0
- package/dist/index.d.ts +43 -23
- package/dist/index.js +20 -1
- package/dist/main.js +110 -16
- package/dist/package-json.service-QN7SzRTt.js +70 -0
- package/dist/program-DSPj4l5A.js +3457 -0
- package/dist/result.constructors-C9M1MP3_.js +261 -0
- package/dist/result.utilities-B03Jkhlx.js +32 -0
- package/dist/schema.utilities-BGd9t1wm.js +60 -0
- package/package.json +88 -84
|
@@ -0,0 +1,3457 @@
|
|
|
1
|
+
import { n as RuntimeEnv, t as logger } from "./main.js";
|
|
2
|
+
import { _ as validationError, a as conflictErrAsync, c as notFoundErrAsync, d as validationErrAsync, f as conflictError, h as notFoundError, i as FireflyOkAsync, l as timeoutErrAsync, m as failedError, n as FireflyErrAsync, r as FireflyOk, s as invalidErr, t as FireflyErr, u as validationErr, v as wrapErrorMessage } from "./result.constructors-C9M1MP3_.js";
|
|
3
|
+
import { n as wrapPromise, r as zip3Async, t as ensureNotAsync } from "./result.utilities-B03Jkhlx.js";
|
|
4
|
+
import { n as parseSchema, t as formatZodErrors } from "./schema.utilities-BGd9t1wm.js";
|
|
5
|
+
import { LogLevels } from "consola";
|
|
6
|
+
import { colors } from "consola/utils";
|
|
7
|
+
import { Command } from "commander";
|
|
8
|
+
import { loadConfig } from "c12";
|
|
9
|
+
import { Result, ResultAsync, err, ok } from "neverthrow";
|
|
10
|
+
import z$1 from "zod";
|
|
11
|
+
import { parse } from "semver";
|
|
12
|
+
import * as path from "path";
|
|
13
|
+
|
|
14
|
+
//#region src/core/environment/debug-flags.ts
|
|
15
|
+
/**
|
|
16
|
+
* Debug flags are environment variables prefixed with `FIREFLY_DEBUG_` that
|
|
17
|
+
* enable diagnostic features during development and troubleshooting.
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* if (DebugFlags.showRawError) {
|
|
22
|
+
* logger.error(parseResult.error);
|
|
23
|
+
* }
|
|
24
|
+
* ```
|
|
25
|
+
*/
|
|
26
|
+
var DebugFlags = class {
|
|
27
|
+
constructor() {}
|
|
28
|
+
/**
|
|
29
|
+
* When enabled, displays raw Zod validation errors for configuration parsing.
|
|
30
|
+
*
|
|
31
|
+
* Useful for debugging configuration schema issues and understanding
|
|
32
|
+
* why validation failed at a granular level.
|
|
33
|
+
*/
|
|
34
|
+
static get showRawError() {
|
|
35
|
+
return Boolean(process.env.FIREFLY_DEBUG_SHOW_RAW_ERROR);
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* When enabled, logs the loaded configuration file contents.
|
|
39
|
+
*
|
|
40
|
+
* Useful for debugging configuration loading and understanding
|
|
41
|
+
* what values are being read from config files.
|
|
42
|
+
*/
|
|
43
|
+
static get showFileConfig() {
|
|
44
|
+
return Boolean(process.env.FIREFLY_DEBUG_SHOW_FILE_CONFIG);
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* When enabled, displays task graph statistics during release execution.
|
|
48
|
+
*
|
|
49
|
+
* Shows information about task dependencies, execution order,
|
|
50
|
+
* and graph structure for debugging workflow issues.
|
|
51
|
+
*/
|
|
52
|
+
static get showTaskGraphStats() {
|
|
53
|
+
return Boolean(process.env.FIREFLY_DEBUG_SHOW_TASK_GRAPH_STATS);
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* When enabled, prevents truncation of release notes in GitHub CLI logs.
|
|
57
|
+
*
|
|
58
|
+
* By default, release notes are truncated in logs to avoid pollution.
|
|
59
|
+
* Enable this flag to see full release notes content during debugging.
|
|
60
|
+
*/
|
|
61
|
+
static get dontTruncateReleaseNotes() {
|
|
62
|
+
return Boolean(process.env.FIREFLY_DEBUG_DONT_TRUNCATE_RELEASE_NOTES?.trim());
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* When enabled, prevents redaction of sensitive GitHub CLI arguments in logs.
|
|
66
|
+
*
|
|
67
|
+
* By default, sensitive values (tokens, passwords, etc.) are redacted.
|
|
68
|
+
* Enable this flag to see full argument values during debugging.
|
|
69
|
+
*
|
|
70
|
+
* WARNING: Use with caution as this may expose sensitive information.
|
|
71
|
+
*/
|
|
72
|
+
static get dontRedactGithubCliArgs() {
|
|
73
|
+
return Boolean(process.env.FIREFLY_DEBUG_DONT_REDACT_GITHUB_CLI_ARGS?.trim());
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
//#endregion
|
|
78
|
+
//#region src/cli/config/config.loader.ts
|
|
79
|
+
/**
|
|
80
|
+
* Loads and resolves Firefly configuration from files.
|
|
81
|
+
*
|
|
82
|
+
* Supports multiple config file formats:
|
|
83
|
+
* - .js, .ts, .mjs, .cjs, .mts, .cts .json, .jsonrc, .json5, .yaml, .yml, .toml
|
|
84
|
+
*
|
|
85
|
+
* Configuration is merged in the following order (later overrides earlier):
|
|
86
|
+
* 1. Default values from schemas
|
|
87
|
+
* 2. Config file values
|
|
88
|
+
* 3. CLI option overrides (handled by commander.ts)
|
|
89
|
+
*
|
|
90
|
+
* @example
|
|
91
|
+
* ```ts
|
|
92
|
+
* const loader = new ConfigLoader({
|
|
93
|
+
* commandName: "release",
|
|
94
|
+
* configFile: "./custom.config.ts",
|
|
95
|
+
* });
|
|
96
|
+
*
|
|
97
|
+
* const configResult = await loader.load();
|
|
98
|
+
* if (configResult.isOk()) {
|
|
99
|
+
* console.log(configResult.value);
|
|
100
|
+
* }
|
|
101
|
+
* ```
|
|
102
|
+
*/
|
|
103
|
+
var ConfigLoader = class {
|
|
104
|
+
constructor(options = {}) {
|
|
105
|
+
this.options = options;
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Loads and validates the configuration.
|
|
109
|
+
*
|
|
110
|
+
* @returns Async result containing the merged configuration or an error
|
|
111
|
+
*/
|
|
112
|
+
load() {
|
|
113
|
+
const { cwd = process.cwd(), configFile } = this.options;
|
|
114
|
+
return wrapPromise(loadConfig({
|
|
115
|
+
name: "firefly",
|
|
116
|
+
cwd,
|
|
117
|
+
configFile: configFile || "firefly.config",
|
|
118
|
+
packageJson: false
|
|
119
|
+
})).andThen((result) => {
|
|
120
|
+
this.logConfigFile(result.configFile);
|
|
121
|
+
if (DebugFlags.showFileConfig) logger.verbose(JSON.stringify(result.config, null, 2));
|
|
122
|
+
return FireflyOkAsync(this.extractCommandConfig(result.config ?? {}));
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
logConfigFile(configFile) {
|
|
126
|
+
if (configFile && configFile !== "firefly.config") logger.info(`Using firefly config: ${colors.underline(configFile)}`);
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Extracts command-specific configuration from the full config.
|
|
130
|
+
*
|
|
131
|
+
* If a commandName is specified, this merges the command's nested config
|
|
132
|
+
* (e.g., `release: { ... }`) with the root config.
|
|
133
|
+
*
|
|
134
|
+
* @param config - The full runtime configuration
|
|
135
|
+
* @returns The extracted and merged configuration
|
|
136
|
+
*/
|
|
137
|
+
extractCommandConfig(config) {
|
|
138
|
+
const commandName = this.options.commandName;
|
|
139
|
+
if (!commandName) return config;
|
|
140
|
+
const commandSpecificConfig = config[commandName];
|
|
141
|
+
if (!(commandSpecificConfig && typeof commandSpecificConfig === "object")) return config;
|
|
142
|
+
const baseConfig = { ...config };
|
|
143
|
+
delete baseConfig[commandName];
|
|
144
|
+
return {
|
|
145
|
+
...baseConfig,
|
|
146
|
+
...commandSpecificConfig
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
};
|
|
150
|
+
|
|
151
|
+
//#endregion
|
|
152
|
+
//#region src/cli/options/options.utilities.ts
|
|
153
|
+
/**
|
|
154
|
+
* Compound words that should be treated as single units in kebab-case.
|
|
155
|
+
* These are typically brand names or technical terms that shouldn't be split.
|
|
156
|
+
*/
|
|
157
|
+
const COMPOUND_WORDS = [
|
|
158
|
+
"GitHub",
|
|
159
|
+
"GitLab",
|
|
160
|
+
"BitBucket"
|
|
161
|
+
];
|
|
162
|
+
/**
|
|
163
|
+
* Converts a camelCase string to kebab-case.
|
|
164
|
+
*
|
|
165
|
+
* Handles compound words (e.g., "GitHub", "GitLab") as single units
|
|
166
|
+
* to ensure consistent CLI flag naming.
|
|
167
|
+
*
|
|
168
|
+
* @param str - The camelCase string to convert
|
|
169
|
+
* @returns The kebab-case equivalent
|
|
170
|
+
*
|
|
171
|
+
* @example
|
|
172
|
+
* ```ts
|
|
173
|
+
* camelToKebab("bumpStrategy") // "bump-strategy"
|
|
174
|
+
* camelToKebab("skipGitHubRelease") // "skip-github-release"
|
|
175
|
+
* camelToKebab("skipGitLabRelease") // "skip-gitlab-release"
|
|
176
|
+
* ```
|
|
177
|
+
*/
|
|
178
|
+
function camelToKebab(str) {
|
|
179
|
+
let result = str;
|
|
180
|
+
for (const word of COMPOUND_WORDS) {
|
|
181
|
+
result = result.replace(new RegExp(`([a-z])${word}`, "g"), `$1-${word.toLowerCase()}`);
|
|
182
|
+
result = result.replace(new RegExp(`^${word}`, "g"), word.toLowerCase());
|
|
183
|
+
}
|
|
184
|
+
return result.replace(/([a-z0-9])([A-Z])/g, "$1-$2").toLowerCase().replace(/_/g, "-");
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
//#endregion
|
|
188
|
+
//#region src/cli/options/options.builder.ts
|
|
189
|
+
/**
|
|
190
|
+
* Builds and registers Commander.js CLI options from Zod schemas.
|
|
191
|
+
*
|
|
192
|
+
* Automatically converts Zod schema definitions into Commander options with:
|
|
193
|
+
* - Appropriate type parsing (string, number, enum, boolean)
|
|
194
|
+
* - Default value extraction
|
|
195
|
+
* - Description from schema metadata
|
|
196
|
+
* - Shorthand aliases for common options
|
|
197
|
+
*
|
|
198
|
+
* @example
|
|
199
|
+
* ```ts
|
|
200
|
+
* const builder = new OptionsBuilder();
|
|
201
|
+
* builder.registerGlobalOptions(program);
|
|
202
|
+
* builder.registerCommandOptions(releaseCmd, ReleaseConfigSchema);
|
|
203
|
+
* ```
|
|
204
|
+
*/
|
|
205
|
+
var OptionsBuilder = class {
|
|
206
|
+
/**
|
|
207
|
+
* Mapping of schema keys to their shorthand CLI flags.
|
|
208
|
+
*/
|
|
209
|
+
shorthandMap = new Map([["bumpStrategy", "bt"], ["releaseType", "rt"]]);
|
|
210
|
+
/**
|
|
211
|
+
* Fields that are handled by global options and should be skipped for command options.
|
|
212
|
+
*/
|
|
213
|
+
skipFields = new Set([
|
|
214
|
+
"cwd",
|
|
215
|
+
"dryRun",
|
|
216
|
+
"verbose",
|
|
217
|
+
"enableRollback"
|
|
218
|
+
]);
|
|
219
|
+
/**
|
|
220
|
+
* Registers global options that apply to all commands.
|
|
221
|
+
*
|
|
222
|
+
* @param program - The root Commander program instance
|
|
223
|
+
*/
|
|
224
|
+
registerGlobalOptions(program) {
|
|
225
|
+
program.option("-C, --cwd <path>", "The working directory for all operations").option("--dry-run", "Run without making actual changes").option("--verbose", "Enable verbose logging").option("--no-enable-rollback", "Disable automatic rollback on failure");
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Registers command-specific options from a Zod schema.
|
|
229
|
+
*
|
|
230
|
+
* Iterates through the schema shape and creates Commander options
|
|
231
|
+
* for each field, skipping global options.
|
|
232
|
+
*
|
|
233
|
+
* @param command - The Commander command to register options on
|
|
234
|
+
* @param schema - The Zod schema defining the command's options
|
|
235
|
+
*/
|
|
236
|
+
registerCommandOptions(command, schema) {
|
|
237
|
+
for (const [key, rawField] of Object.entries(schema.shape)) {
|
|
238
|
+
if (!rawField || this.skipFields.has(key)) continue;
|
|
239
|
+
const ctx = this.buildOptionContext(command, key, rawField);
|
|
240
|
+
if (ctx) this.registerOption(ctx);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Builds the context object for registering a single option.
|
|
245
|
+
*
|
|
246
|
+
* @param command - The Commander command
|
|
247
|
+
* @param key - The schema key
|
|
248
|
+
* @param rawField - The raw Zod field
|
|
249
|
+
* @returns The option context, or null if the option already exists
|
|
250
|
+
*/
|
|
251
|
+
buildOptionContext(command, key, rawField) {
|
|
252
|
+
const field = this.unwrapSchema(rawField);
|
|
253
|
+
const optionName = camelToKebab(key);
|
|
254
|
+
const shorthand = this.shorthandMap.get(key);
|
|
255
|
+
const shorthandPrefix = shorthand && shorthand.length === 1 ? "-" : "--";
|
|
256
|
+
const optionFlag = shorthand ? `${shorthandPrefix}${shorthand}, --${optionName}` : `--${optionName}`;
|
|
257
|
+
if (command.options.some((opt) => opt.long === `--${optionName}` || shorthand && opt.short === `${shorthandPrefix}${shorthand}`)) return null;
|
|
258
|
+
const parsedDefault = this.extractDefaultValue(key, rawField);
|
|
259
|
+
return {
|
|
260
|
+
command,
|
|
261
|
+
key,
|
|
262
|
+
rawField,
|
|
263
|
+
field,
|
|
264
|
+
optionFlag,
|
|
265
|
+
optionName,
|
|
266
|
+
description: rawField.description ?? "",
|
|
267
|
+
parsedDefault
|
|
268
|
+
};
|
|
269
|
+
}
|
|
270
|
+
/**
|
|
271
|
+
* Extracts the default value from a Zod field by parsing an empty object.
|
|
272
|
+
*
|
|
273
|
+
* @param key - The schema key
|
|
274
|
+
* @param rawField - The raw Zod field
|
|
275
|
+
* @returns The default value, or undefined if none
|
|
276
|
+
*/
|
|
277
|
+
extractDefaultValue(key, rawField) {
|
|
278
|
+
const parseResult = z$1.object({ [key]: rawField }).partial().safeParse({});
|
|
279
|
+
return parseResult.success ? parseResult.data[key] : void 0;
|
|
280
|
+
}
|
|
281
|
+
/**
|
|
282
|
+
* Registers a single option based on its type.
|
|
283
|
+
*
|
|
284
|
+
* @param ctx - The option context
|
|
285
|
+
*/
|
|
286
|
+
registerOption(ctx) {
|
|
287
|
+
const { command, rawField, field, optionFlag, description } = ctx;
|
|
288
|
+
if (this.isBooleanField(rawField)) {
|
|
289
|
+
command.option(optionFlag, description);
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
if (field instanceof z$1.ZodNumber) {
|
|
293
|
+
this.registerNumberOption(ctx);
|
|
294
|
+
return;
|
|
295
|
+
}
|
|
296
|
+
if (field instanceof z$1.ZodEnum) {
|
|
297
|
+
this.registerEnumOption(ctx);
|
|
298
|
+
return;
|
|
299
|
+
}
|
|
300
|
+
if (field instanceof z$1.ZodString) {
|
|
301
|
+
this.registerStringOption(ctx);
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
this.registerGenericOption(ctx);
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* Registers a number option with numeric parsing.
|
|
308
|
+
*
|
|
309
|
+
* @param ctx - The option context
|
|
310
|
+
*/
|
|
311
|
+
registerNumberOption(ctx) {
|
|
312
|
+
const { command, rawField, optionFlag, optionName, description, parsedDefault } = ctx;
|
|
313
|
+
const parser = this.createNumberParser(rawField, optionName);
|
|
314
|
+
const wrappedParser = this.wrapParser(parser);
|
|
315
|
+
command.option(`${optionFlag} <${optionName}>`, description, wrappedParser, parsedDefault);
|
|
316
|
+
}
|
|
317
|
+
/**
|
|
318
|
+
* Registers an enum option with choice validation.
|
|
319
|
+
*
|
|
320
|
+
* @param ctx - The option context
|
|
321
|
+
*/
|
|
322
|
+
registerEnumOption(ctx) {
|
|
323
|
+
const { command, rawField, field, optionFlag, optionName, description, parsedDefault } = ctx;
|
|
324
|
+
const choices = this.getEnumChoices(field);
|
|
325
|
+
const parser = this.createEnumParser(rawField, optionName, choices);
|
|
326
|
+
const wrappedParser = this.wrapParser(parser);
|
|
327
|
+
const fullDescription = `${description}${choices.length ? ` (choices: ${choices.join(", ")})` : ""}`;
|
|
328
|
+
command.option(`${optionFlag} <${optionName}>`, fullDescription, wrappedParser, parsedDefault);
|
|
329
|
+
}
|
|
330
|
+
/**
|
|
331
|
+
* Registers a string option with validation.
|
|
332
|
+
*
|
|
333
|
+
* @param ctx - The option context
|
|
334
|
+
*/
|
|
335
|
+
registerStringOption(ctx) {
|
|
336
|
+
const { command, rawField, optionFlag, optionName, description, parsedDefault } = ctx;
|
|
337
|
+
const parser = this.createStringParser(rawField);
|
|
338
|
+
const wrappedParser = this.wrapParser(parser);
|
|
339
|
+
command.option(`${optionFlag} <${optionName}>`, description, wrappedParser, parsedDefault);
|
|
340
|
+
}
|
|
341
|
+
/**
|
|
342
|
+
* Registers a generic option for other Zod types.
|
|
343
|
+
*
|
|
344
|
+
* @param ctx - The option context
|
|
345
|
+
*/
|
|
346
|
+
registerGenericOption(ctx) {
|
|
347
|
+
const { command, rawField, optionFlag, optionName, description, parsedDefault } = ctx;
|
|
348
|
+
const parser = this.createGenericParser(rawField);
|
|
349
|
+
const wrappedParser = this.wrapParser(parser);
|
|
350
|
+
command.option(`${optionFlag} <${optionName}>`, description, wrappedParser, parsedDefault);
|
|
351
|
+
}
|
|
352
|
+
/**
|
|
353
|
+
* Wraps a Result-returning parser into a Commander-compatible parser.
|
|
354
|
+
*
|
|
355
|
+
* Commander expects parsers to return values directly or throw on error.
|
|
356
|
+
* This wrapper converts our Result-based parsers to that pattern.
|
|
357
|
+
*
|
|
358
|
+
* @param parser - The Result-based parser function
|
|
359
|
+
* @returns A Commander-compatible parser function
|
|
360
|
+
*/
|
|
361
|
+
wrapParser(parser) {
|
|
362
|
+
return (input) => {
|
|
363
|
+
const result = parser(input);
|
|
364
|
+
if (result.isErr()) throw new Error(result.error);
|
|
365
|
+
return result.value;
|
|
366
|
+
};
|
|
367
|
+
}
|
|
368
|
+
/**
|
|
369
|
+
* Creates a parser for number options.
|
|
370
|
+
*
|
|
371
|
+
* @template T - The expected return type
|
|
372
|
+
* @param rawField - The raw Zod field
|
|
373
|
+
* @param optionName - The option name for error messages
|
|
374
|
+
* @returns A parser function that converts strings to numbers with validation
|
|
375
|
+
*/
|
|
376
|
+
createNumberParser(rawField, optionName) {
|
|
377
|
+
return (input) => {
|
|
378
|
+
const num = Number(input);
|
|
379
|
+
if (Number.isNaN(num)) return err(`Invalid number for --${optionName}: ${input}`);
|
|
380
|
+
const result = parseSchema(rawField, num);
|
|
381
|
+
if (result.isErr()) return err(result.error.message);
|
|
382
|
+
return ok(result.value);
|
|
383
|
+
};
|
|
384
|
+
}
|
|
385
|
+
/**
|
|
386
|
+
* Creates a parser for enum options.
|
|
387
|
+
*
|
|
388
|
+
* @template T - The expected return type
|
|
389
|
+
* @param rawField - The raw Zod field
|
|
390
|
+
* @param optionName - The option name for error messages
|
|
391
|
+
* @param choices - The valid enum choices
|
|
392
|
+
* @returns A parser function that validates input against the enum choices
|
|
393
|
+
*/
|
|
394
|
+
createEnumParser(rawField, optionName, choices) {
|
|
395
|
+
return (input) => {
|
|
396
|
+
const result = parseSchema(rawField, input);
|
|
397
|
+
if (result.isErr()) return err(`Invalid value for --${optionName}: ${input}. Allowed: ${choices.join(", ")}`);
|
|
398
|
+
return ok(result.value);
|
|
399
|
+
};
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Creates a parser for string options.
|
|
403
|
+
*
|
|
404
|
+
* @template T - The expected return type
|
|
405
|
+
* @param rawField - The raw Zod field
|
|
406
|
+
* @returns A parser function that validates input against the schema
|
|
407
|
+
*/
|
|
408
|
+
createStringParser(rawField) {
|
|
409
|
+
return (input) => {
|
|
410
|
+
const result = parseSchema(rawField, input);
|
|
411
|
+
if (result.isErr()) return err(result.error.message);
|
|
412
|
+
return ok(result.value);
|
|
413
|
+
};
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* Creates a generic parser using Zod validation.
|
|
417
|
+
*
|
|
418
|
+
* @template T - The expected return type
|
|
419
|
+
* @param rawField - The raw Zod field
|
|
420
|
+
* @returns A parser function that validates input against the schema
|
|
421
|
+
*/
|
|
422
|
+
createGenericParser(rawField) {
|
|
423
|
+
return (input) => {
|
|
424
|
+
const result = parseSchema(rawField, input);
|
|
425
|
+
if (result.isErr()) return err(result.error.message);
|
|
426
|
+
return ok(result.value);
|
|
427
|
+
};
|
|
428
|
+
}
|
|
429
|
+
/**
|
|
430
|
+
* Gets the internal definition object from a Zod type.
|
|
431
|
+
* Handles both modern Zod v4 (_zod.def) and legacy (_def) structures.
|
|
432
|
+
*
|
|
433
|
+
* @param field - The raw Zod field
|
|
434
|
+
* @returns The internal definition object
|
|
435
|
+
*/
|
|
436
|
+
getInternalDef(field) {
|
|
437
|
+
const zodContainer = field._zod;
|
|
438
|
+
if (zodContainer?.def) return zodContainer.def;
|
|
439
|
+
return field._def ?? {};
|
|
440
|
+
}
|
|
441
|
+
/**
|
|
442
|
+
* Unwraps Zod wrapper types (optional, default, etc.) to get the inner type.
|
|
443
|
+
*
|
|
444
|
+
* @param field - The raw Zod field
|
|
445
|
+
* @returns The unwrapped Zod type
|
|
446
|
+
*/
|
|
447
|
+
unwrapSchema(field) {
|
|
448
|
+
if (field instanceof z$1.ZodDefault) {
|
|
449
|
+
const inner = this.getInternalDef(field).innerType;
|
|
450
|
+
return inner ? this.unwrapSchema(inner) : field;
|
|
451
|
+
}
|
|
452
|
+
if (field instanceof z$1.ZodOptional) {
|
|
453
|
+
const inner = this.getInternalDef(field).innerType;
|
|
454
|
+
return inner ? this.unwrapSchema(inner) : field;
|
|
455
|
+
}
|
|
456
|
+
if (field instanceof z$1.ZodUnknown) {
|
|
457
|
+
const schema = this.getInternalDef(field).schema;
|
|
458
|
+
return schema ? this.unwrapSchema(schema) : field;
|
|
459
|
+
}
|
|
460
|
+
return field;
|
|
461
|
+
}
|
|
462
|
+
/**
|
|
463
|
+
* Checks if a field is a boolean type (possibly wrapped).
|
|
464
|
+
*
|
|
465
|
+
* @param rawField - The raw Zod field
|
|
466
|
+
* @returns True if the field is boolean, false otherwise
|
|
467
|
+
*/
|
|
468
|
+
isBooleanField(rawField) {
|
|
469
|
+
const def = this.getInternalDef(rawField);
|
|
470
|
+
return (def.innerType ?? def.schema) instanceof z$1.ZodBoolean;
|
|
471
|
+
}
|
|
472
|
+
/**
|
|
473
|
+
* Extracts enum choices from a ZodEnum field.
|
|
474
|
+
*
|
|
475
|
+
* @param field - The ZodEnum field
|
|
476
|
+
* @returns The array of valid enum choices
|
|
477
|
+
*/
|
|
478
|
+
getEnumChoices(field) {
|
|
479
|
+
return this.getInternalDef(field).values ?? [];
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
|
|
483
|
+
//#endregion
|
|
484
|
+
//#region src/cli/options/options.normalizer.ts
|
|
485
|
+
/**
|
|
486
|
+
* Normalizes CLI option names between Commander.js and Zod schema conventions.
|
|
487
|
+
*
|
|
488
|
+
* Commander.js converts kebab-case flags (e.g., `--dry-run`) to camelCase keys
|
|
489
|
+
* in the parsed options object. However, the internal representation may differ
|
|
490
|
+
* from the schema keys. This class ensures all option keys match the schema.
|
|
491
|
+
*
|
|
492
|
+
* @example
|
|
493
|
+
* ```ts
|
|
494
|
+
* const normalizer = new OptionsNormalizer();
|
|
495
|
+
* const normalized = normalizer.normalize(cliOptions, ReleaseConfigSchema);
|
|
496
|
+
* // Ensures all keys match the schema's camelCase keys
|
|
497
|
+
* ```
|
|
498
|
+
*/
|
|
499
|
+
var OptionsNormalizer = class {
|
|
500
|
+
/**
|
|
501
|
+
* Normalizes parsed CLI options to match Zod schema keys.
|
|
502
|
+
*
|
|
503
|
+
* Converts any Commander-style keys to their corresponding schema keys.
|
|
504
|
+
* For example, if Commander produces `enableRollback` but the schema
|
|
505
|
+
* expects `enableRollback`, this ensures consistency.
|
|
506
|
+
*
|
|
507
|
+
* @param options - Raw options from Commander.js
|
|
508
|
+
* @param schema - The Zod schema defining expected option keys
|
|
509
|
+
* @returns Normalized options with keys matching the schema
|
|
510
|
+
*/
|
|
511
|
+
normalize(options, schema) {
|
|
512
|
+
const mappings = this.buildMappingFromSchema(schema);
|
|
513
|
+
const normalized = { ...options };
|
|
514
|
+
for (const [commanderKey, schemaKey] of mappings) if (commanderKey in normalized && !(schemaKey in normalized)) {
|
|
515
|
+
normalized[schemaKey] = normalized[commanderKey];
|
|
516
|
+
delete normalized[commanderKey];
|
|
517
|
+
}
|
|
518
|
+
return normalized;
|
|
519
|
+
}
|
|
520
|
+
/**
|
|
521
|
+
* Builds a mapping from Commander keys to schema keys.
|
|
522
|
+
*
|
|
523
|
+
* Analyzes the schema shape and creates a map for any keys that
|
|
524
|
+
* would be transformed by Commander's kebab-to-camel conversion.
|
|
525
|
+
*
|
|
526
|
+
* @param schema - The Zod schema to extract keys from
|
|
527
|
+
* @returns Map of Commander keys to schema keys
|
|
528
|
+
*/
|
|
529
|
+
buildMappingFromSchema(schema) {
|
|
530
|
+
const mappings = /* @__PURE__ */ new Map();
|
|
531
|
+
const shape = schema.shape;
|
|
532
|
+
for (const camelKey of Object.keys(shape)) {
|
|
533
|
+
const kebabKey = camelToKebab(camelKey);
|
|
534
|
+
if (kebabKey !== camelKey) {
|
|
535
|
+
const commanderKey = kebabKey.replace(/-([a-z])/g, (_, letter) => letter.toUpperCase());
|
|
536
|
+
mappings.set(commanderKey, camelKey);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
return mappings;
|
|
540
|
+
}
|
|
541
|
+
};
|
|
542
|
+
|
|
543
|
+
//#endregion
|
|
544
|
+
//#region src/domain/semver/semver.strategies.ts
|
|
545
|
+
/**
|
|
546
|
+
* The bump strategy determines which versioning strategy to use when bumping versions.
|
|
547
|
+
* - "auto": Automatically determine the version bump based on commit messages.
|
|
548
|
+
* - "manual": Manually specify the version bump, will generate selection options based on current version.
|
|
549
|
+
*/
|
|
550
|
+
const BUMP_STRATEGY_AUTO = "auto";
|
|
551
|
+
const BUMP_STRATEGY_MANUAL = "manual";
|
|
552
|
+
const BumpStrategyValues = [BUMP_STRATEGY_AUTO, BUMP_STRATEGY_MANUAL];
|
|
553
|
+
const BumpStrategySchema = z$1.enum(BumpStrategyValues).or(z$1.literal("")).default("");
|
|
554
|
+
|
|
555
|
+
//#endregion
|
|
556
|
+
//#region src/commands/release/groups/skip-predicates.ts
|
|
557
|
+
const releaseSkipPredicates = {
|
|
558
|
+
skipBump: (ctx) => ctx.config.skipBump,
|
|
559
|
+
skipChangelog: (ctx) => ctx.config.skipChangelog,
|
|
560
|
+
skipGit: (ctx) => ctx.config.skipGit,
|
|
561
|
+
skipPush: (ctx) => ctx.config.skipPush,
|
|
562
|
+
skipGitHubRelease: (ctx) => ctx.config.skipGitHubRelease,
|
|
563
|
+
skipBumpAndChangelog: (ctx) => ctx.config.skipBump && ctx.config.skipChangelog,
|
|
564
|
+
skipBumpAndGit: (ctx) => ctx.config.skipBump && ctx.config.skipGit,
|
|
565
|
+
skipGitOrNoChanges: (ctx) => ctx.config.skipGit || ctx.config.skipBump && ctx.config.skipChangelog,
|
|
566
|
+
skipPushOrNoChanges: (ctx) => ctx.config.skipGit || ctx.config.skipPush || ctx.config.skipBump && ctx.config.skipChangelog,
|
|
567
|
+
skipGitHubReleaseOrNoTag: (ctx) => ctx.config.skipGitHubRelease || ctx.config.skipGit || ctx.config.skipBump && ctx.config.skipChangelog,
|
|
568
|
+
bumpStrategyIsAuto: (ctx) => ctx.config.bumpStrategy === BUMP_STRATEGY_AUTO,
|
|
569
|
+
bumpStrategyIsManual: (ctx) => ctx.config.bumpStrategy === BUMP_STRATEGY_MANUAL,
|
|
570
|
+
hasReleaseType: (ctx) => Boolean(ctx.config.releaseType),
|
|
571
|
+
hasBumpStrategy: (ctx) => Boolean(ctx.config.bumpStrategy)
|
|
572
|
+
};
|
|
573
|
+
|
|
574
|
+
//#endregion
|
|
575
|
+
//#region src/core/task/skip-conditions.ts
|
|
576
|
+
/**
|
|
577
|
+
* Converts a simple predicate into a full SkipCondition result function.
|
|
578
|
+
*
|
|
579
|
+
* @param predicate - Predicate to convert
|
|
580
|
+
* @param reason - Human-readable reason shown in logs
|
|
581
|
+
* @returns Function returning FireflyResult<SkipCondition>
|
|
582
|
+
*
|
|
583
|
+
* @example
|
|
584
|
+
* ```typescript
|
|
585
|
+
* const skipFn = toSkipCondition(
|
|
586
|
+
* (ctx) => ctx.config.skipValidation,
|
|
587
|
+
* "Validation disabled in config"
|
|
588
|
+
* );
|
|
589
|
+
*
|
|
590
|
+
* TaskBuilder.create("validate")
|
|
591
|
+
* .shouldSkip(skipFn)
|
|
592
|
+
* ```
|
|
593
|
+
*/
|
|
594
|
+
function toSkipCondition(predicate, reason) {
|
|
595
|
+
return (ctx) => FireflyOk({
|
|
596
|
+
shouldSkip: predicate(ctx),
|
|
597
|
+
reason
|
|
598
|
+
});
|
|
599
|
+
}
|
|
600
|
+
/**
|
|
601
|
+
* Converts a predicate to a SkipCondition with a default reason.
|
|
602
|
+
*
|
|
603
|
+
* This is a convenience function for internal use when a specific reason
|
|
604
|
+
* is not required. For user-facing code, prefer `toSkipCondition` with
|
|
605
|
+
* an explicit reason.
|
|
606
|
+
*
|
|
607
|
+
* @param predicate - Predicate to convert
|
|
608
|
+
* @param defaultReason - Optional reason (defaults to "Skip condition met")
|
|
609
|
+
* @returns Function returning FireflyResult<SkipCondition>
|
|
610
|
+
*
|
|
611
|
+
* @internal
|
|
612
|
+
*/
|
|
613
|
+
function predicateToSkipFn(predicate, defaultReason = "Skip condition met") {
|
|
614
|
+
return toSkipCondition(predicate, defaultReason);
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Converts a predicate into a SkipCondition with jump targets.
|
|
618
|
+
*
|
|
619
|
+
* When the predicate returns true, the task is skipped and execution
|
|
620
|
+
* jumps to the specified tasks.
|
|
621
|
+
*
|
|
622
|
+
* @param predicate - Predicate to evaluate
|
|
623
|
+
* @param skipToTasks - Task IDs to jump to when skipping
|
|
624
|
+
* @param reason - Optional reason message
|
|
625
|
+
* @returns Function returning FireflyResult<SkipCondition>
|
|
626
|
+
*
|
|
627
|
+
* @example
|
|
628
|
+
* ```typescript
|
|
629
|
+
* const skipAndJump = toSkipConditionWithJump(
|
|
630
|
+
* (ctx) => ctx.config.fastMode,
|
|
631
|
+
* ["finalize"],
|
|
632
|
+
* "Fast mode enabled, skipping to finalize"
|
|
633
|
+
* );
|
|
634
|
+
* ```
|
|
635
|
+
*/
|
|
636
|
+
function toSkipConditionWithJump(predicate, skipToTasks, reason) {
|
|
637
|
+
return (ctx) => FireflyOk({
|
|
638
|
+
shouldSkip: predicate(ctx),
|
|
639
|
+
reason: reason ?? "Skip condition met",
|
|
640
|
+
skipToTasks
|
|
641
|
+
});
|
|
642
|
+
}
|
|
643
|
+
|
|
644
|
+
//#endregion
|
|
645
|
+
//#region src/core/task/task.builder.ts
|
|
646
|
+
/**
|
|
647
|
+
* Fluent builder for constructing validated tasks.
|
|
648
|
+
* Provides a chainable API for defining task properties.
|
|
649
|
+
*
|
|
650
|
+
* @template TContext - The workflow context type this task operates on
|
|
651
|
+
*
|
|
652
|
+
* @example
|
|
653
|
+
* ```typescript
|
|
654
|
+
* const taskResult = TaskBuilder.create<MyContext>("validate-input")
|
|
655
|
+
* .description("Validates user input against schema")
|
|
656
|
+
* .dependsOn("load-config")
|
|
657
|
+
* .skipWhen((ctx) => ctx.data.skipValidation)
|
|
658
|
+
* .execute((ctx) => {
|
|
659
|
+
* const validated = validateInput(ctx.data.input);
|
|
660
|
+
* return FireflyOkAsync(ctx.fork("validatedInput", validated));
|
|
661
|
+
* })
|
|
662
|
+
* .withUndo((ctx) => {
|
|
663
|
+
* logger.info("Rolling back validation");
|
|
664
|
+
* return FireflyOkAsync(undefined);
|
|
665
|
+
* })
|
|
666
|
+
* .build();
|
|
667
|
+
*
|
|
668
|
+
* if (taskResult.isErr()) {
|
|
669
|
+
* console.error("Invalid task:", taskResult.error.message);
|
|
670
|
+
* }
|
|
671
|
+
* ```
|
|
672
|
+
*/
|
|
673
|
+
var TaskBuilder = class TaskBuilder {
|
|
674
|
+
taskId;
|
|
675
|
+
taskDescription;
|
|
676
|
+
taskDependencies = [];
|
|
677
|
+
taskConfigSchema;
|
|
678
|
+
skipFn;
|
|
679
|
+
executeFn;
|
|
680
|
+
undoFn;
|
|
681
|
+
constructor(id) {
|
|
682
|
+
this.taskId = id;
|
|
683
|
+
}
|
|
684
|
+
/**
|
|
685
|
+
* Creates a new TaskBuilder instance.
|
|
686
|
+
* @template TCtx - The workflow context type
|
|
687
|
+
* @param id - Unique identifier for the task
|
|
688
|
+
*/
|
|
689
|
+
static create(id) {
|
|
690
|
+
return new TaskBuilder(id);
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* Sets the task's human-readable description.
|
|
694
|
+
* Required - build() will fail without a description.
|
|
695
|
+
* @param desc - Description of what the task does
|
|
696
|
+
*/
|
|
697
|
+
description(desc) {
|
|
698
|
+
this.taskDescription = desc;
|
|
699
|
+
return this;
|
|
700
|
+
}
|
|
701
|
+
/**
|
|
702
|
+
* Adds a single dependency on another task.
|
|
703
|
+
* The dependency must be registered before this task.
|
|
704
|
+
* @param taskId - ID of the task this depends on
|
|
705
|
+
*/
|
|
706
|
+
dependsOn(taskId) {
|
|
707
|
+
this.taskDependencies.push(taskId);
|
|
708
|
+
return this;
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* Adds multiple dependencies on other tasks.
|
|
712
|
+
* @param taskIds - IDs of tasks this depends on
|
|
713
|
+
*/
|
|
714
|
+
dependsOnAll(...taskIds) {
|
|
715
|
+
this.taskDependencies.push(...taskIds);
|
|
716
|
+
return this;
|
|
717
|
+
}
|
|
718
|
+
/**
|
|
719
|
+
* Sets a Zod schema for configuration validation.
|
|
720
|
+
* @param schema - Zod schema to validate task config against
|
|
721
|
+
*/
|
|
722
|
+
withConfigSchema(schema) {
|
|
723
|
+
this.taskConfigSchema = schema;
|
|
724
|
+
return this;
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* Sets a simple skip condition based on a boolean predicate.
|
|
728
|
+
* @param predicate - Function returning true if task should be skipped
|
|
729
|
+
*/
|
|
730
|
+
skipWhen(predicate) {
|
|
731
|
+
this.skipFn = predicateToSkipFn(predicate);
|
|
732
|
+
return this;
|
|
733
|
+
}
|
|
734
|
+
/**
|
|
735
|
+
* Sets a skip condition with a custom reason message.
|
|
736
|
+
* @param predicate - Function returning true if task should be skipped
|
|
737
|
+
* @param reason - Human-readable reason shown in logs
|
|
738
|
+
*/
|
|
739
|
+
skipWhenWithReason(predicate, reason) {
|
|
740
|
+
this.skipFn = toSkipCondition(predicate, reason);
|
|
741
|
+
return this;
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Sets a skip condition that jumps to specific tasks.
|
|
745
|
+
* @param predicate - Function returning true if task should be skipped
|
|
746
|
+
* @param skipToTasks - Task IDs to jump to when skipping
|
|
747
|
+
*/
|
|
748
|
+
skipWhenAndJumpTo(predicate, skipToTasks) {
|
|
749
|
+
this.skipFn = toSkipConditionWithJump(predicate, skipToTasks);
|
|
750
|
+
return this;
|
|
751
|
+
}
|
|
752
|
+
/**
|
|
753
|
+
* Sets a custom skip condition with full control over the result.
|
|
754
|
+
* @param fn - Function returning a SkipCondition result
|
|
755
|
+
*/
|
|
756
|
+
shouldSkip(fn) {
|
|
757
|
+
this.skipFn = fn;
|
|
758
|
+
return this;
|
|
759
|
+
}
|
|
760
|
+
/**
|
|
761
|
+
* Sets the task's execute function.
|
|
762
|
+
* Required - build() will fail without an execute function.
|
|
763
|
+
* @param fn - Async function that performs the task's work
|
|
764
|
+
*/
|
|
765
|
+
execute(fn) {
|
|
766
|
+
this.executeFn = fn;
|
|
767
|
+
return this;
|
|
768
|
+
}
|
|
769
|
+
/**
|
|
770
|
+
* Sets an optional undo function for rollback support.
|
|
771
|
+
* Called in reverse order when a later task fails.
|
|
772
|
+
* @param fn - Async function that undoes the task's effects
|
|
773
|
+
*/
|
|
774
|
+
withUndo(fn) {
|
|
775
|
+
this.undoFn = fn;
|
|
776
|
+
return this;
|
|
777
|
+
}
|
|
778
|
+
/**
|
|
779
|
+
* Builds the task, validating that required properties are set.
|
|
780
|
+
* @returns `FireflyOk(Task)` if valid, `Err(FireflyError)` if missing required properties
|
|
781
|
+
*/
|
|
782
|
+
build() {
|
|
783
|
+
if (!this.executeFn) return invalidErr({
|
|
784
|
+
message: `Task "${this.taskId}" must have an execute function`,
|
|
785
|
+
source: "TaskBuilder.build"
|
|
786
|
+
});
|
|
787
|
+
if (!this.taskDescription) return invalidErr({
|
|
788
|
+
message: `Task "${this.taskId}" must have a description`,
|
|
789
|
+
source: "TaskBuilder.build"
|
|
790
|
+
});
|
|
791
|
+
return FireflyOk({
|
|
792
|
+
meta: {
|
|
793
|
+
id: this.taskId,
|
|
794
|
+
description: this.taskDescription,
|
|
795
|
+
dependencies: this.taskDependencies.length > 0 ? this.taskDependencies : void 0,
|
|
796
|
+
configSchema: this.taskConfigSchema
|
|
797
|
+
},
|
|
798
|
+
shouldSkip: this.skipFn,
|
|
799
|
+
execute: this.executeFn,
|
|
800
|
+
undo: this.undoFn
|
|
801
|
+
});
|
|
802
|
+
}
|
|
803
|
+
};
|
|
804
|
+
|
|
805
|
+
//#endregion
|
|
806
|
+
//#region src/commands/release/tasks/bump-release-version.task.ts
|
|
807
|
+
function createBumpReleaseVersion() {
|
|
808
|
+
return TaskBuilder.create("bump-release-version").description("Applies the new version bump to relevant files").dependsOnAll("straight-version-bump", "determine-automatic-bump", "prompt-manual-version", "prompt-bump-strategy").skipWhenWithReason((ctx) => ctx.config.skipBump, "Skipped: skipBump is enabled").execute((ctx) => {
|
|
809
|
+
logger.info("bump-release-version");
|
|
810
|
+
return FireflyOkAsync(ctx);
|
|
811
|
+
}).build();
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
//#endregion
|
|
815
|
+
//#region src/core/task/task-group.builder.ts
|
|
816
|
+
/**
|
|
817
|
+
* Fluent builder for constructing validated task groups.
|
|
818
|
+
* Provides a chainable API for defining group properties.
|
|
819
|
+
*
|
|
820
|
+
* @template TContext - The workflow context type for tasks in this group
|
|
821
|
+
*
|
|
822
|
+
* @example
|
|
823
|
+
* ```typescript
|
|
824
|
+
* const gitGroupResult = TaskGroupBuilder.create<ReleaseContext>("git")
|
|
825
|
+
* .description("Git operations for release")
|
|
826
|
+
* .dependsOnGroup("changelog")
|
|
827
|
+
* .skipWhen((ctx) => ctx.config.skipGit)
|
|
828
|
+
* .skipReason("Git operations disabled")
|
|
829
|
+
* .tasks([stageTask, commitTask, tagTask])
|
|
830
|
+
* .build();
|
|
831
|
+
*
|
|
832
|
+
* if (gitGroupResult.isErr()) {
|
|
833
|
+
* console.error("Invalid group:", gitGroupResult.error.message);
|
|
834
|
+
* }
|
|
835
|
+
* ```
|
|
836
|
+
*/
|
|
837
|
+
var TaskGroupBuilder = class TaskGroupBuilder {
|
|
838
|
+
groupId;
|
|
839
|
+
groupDescription;
|
|
840
|
+
groupDependencies = [];
|
|
841
|
+
skipConditionFn;
|
|
842
|
+
skipPredicateFn;
|
|
843
|
+
skipReasonText;
|
|
844
|
+
groupTasks = [];
|
|
845
|
+
constructor(id) {
|
|
846
|
+
this.groupId = id;
|
|
847
|
+
}
|
|
848
|
+
/**
|
|
849
|
+
* Creates a new TaskGroupBuilder instance.
|
|
850
|
+
* @template TCtx - The workflow context type
|
|
851
|
+
* @param id - Unique identifier for the group
|
|
852
|
+
*/
|
|
853
|
+
static create(id) {
|
|
854
|
+
return new TaskGroupBuilder(id);
|
|
855
|
+
}
|
|
856
|
+
/**
|
|
857
|
+
* Sets the group's human-readable description.
|
|
858
|
+
* Required - build() will fail without a description.
|
|
859
|
+
* @param desc - Description of what the group does
|
|
860
|
+
*/
|
|
861
|
+
description(desc) {
|
|
862
|
+
this.groupDescription = desc;
|
|
863
|
+
return this;
|
|
864
|
+
}
|
|
865
|
+
/**
|
|
866
|
+
* Adds a dependency on another group.
|
|
867
|
+
* The dependency group must be registered before this group.
|
|
868
|
+
* @param groupId - ID of the group this depends on
|
|
869
|
+
*/
|
|
870
|
+
dependsOnGroup(groupId) {
|
|
871
|
+
this.groupDependencies.push(groupId);
|
|
872
|
+
return this;
|
|
873
|
+
}
|
|
874
|
+
/**
|
|
875
|
+
* Adds dependencies on multiple groups.
|
|
876
|
+
* @param groupIds - IDs of groups this depends on
|
|
877
|
+
*/
|
|
878
|
+
dependsOnGroups(...groupIds) {
|
|
879
|
+
this.groupDependencies.push(...groupIds);
|
|
880
|
+
return this;
|
|
881
|
+
}
|
|
882
|
+
/**
|
|
883
|
+
* Sets a simple skip condition based on a boolean predicate.
|
|
884
|
+
* When true, all tasks in the group are skipped.
|
|
885
|
+
* @param predicate - Function returning true if group should be skipped
|
|
886
|
+
*/
|
|
887
|
+
skipWhen(predicate) {
|
|
888
|
+
this.skipPredicateFn = predicate;
|
|
889
|
+
return this;
|
|
890
|
+
}
|
|
891
|
+
/**
|
|
892
|
+
* Sets the reason shown in logs when the group is skipped.
|
|
893
|
+
* @param reason - Human-readable reason
|
|
894
|
+
*/
|
|
895
|
+
skipReason(reason) {
|
|
896
|
+
this.skipReasonText = reason;
|
|
897
|
+
return this;
|
|
898
|
+
}
|
|
899
|
+
/**
|
|
900
|
+
* Sets a custom skip condition with full control over the result.
|
|
901
|
+
* @param fn - Function returning a SkipCondition result
|
|
902
|
+
*/
|
|
903
|
+
shouldSkip(fn) {
|
|
904
|
+
this.skipConditionFn = fn;
|
|
905
|
+
return this;
|
|
906
|
+
}
|
|
907
|
+
/**
|
|
908
|
+
* Sets the tasks belonging to this group.
|
|
909
|
+
* @param tasks - Array of tasks to include in the group
|
|
910
|
+
*/
|
|
911
|
+
tasks(tasks) {
|
|
912
|
+
this.groupTasks = tasks;
|
|
913
|
+
return this;
|
|
914
|
+
}
|
|
915
|
+
/**
|
|
916
|
+
* Adds a single task to the group.
|
|
917
|
+
* @param task - Task to add
|
|
918
|
+
*/
|
|
919
|
+
addTask(task) {
|
|
920
|
+
this.groupTasks.push(task);
|
|
921
|
+
return this;
|
|
922
|
+
}
|
|
923
|
+
/**
|
|
924
|
+
* Adds a task result to the group.
|
|
925
|
+
* If the result is an error, the error is stored and will cause build() to fail.
|
|
926
|
+
* @param taskResult - Result containing a task or error
|
|
927
|
+
*/
|
|
928
|
+
addTaskResult(taskResult) {
|
|
929
|
+
if (taskResult.isOk()) this.groupTasks.push(taskResult.value);
|
|
930
|
+
return this;
|
|
931
|
+
}
|
|
932
|
+
/**
|
|
933
|
+
* Builds the task group, validating that required properties are set.
|
|
934
|
+
* @returns `Ok(TaskGroup)` if valid, `Err(FireflyError)` if missing required properties
|
|
935
|
+
*/
|
|
936
|
+
build() {
|
|
937
|
+
if (!this.groupDescription) return invalidErr({
|
|
938
|
+
message: `Task group "${this.groupId}" must have a description`,
|
|
939
|
+
source: "TaskGroupBuilder.build"
|
|
940
|
+
});
|
|
941
|
+
if (this.groupTasks.length === 0) return invalidErr({
|
|
942
|
+
message: `Task group "${this.groupId}" must have at least one task`,
|
|
943
|
+
source: "TaskGroupBuilder.build"
|
|
944
|
+
});
|
|
945
|
+
const meta = {
|
|
946
|
+
id: this.groupId,
|
|
947
|
+
description: this.groupDescription,
|
|
948
|
+
dependsOnGroups: this.groupDependencies.length > 0 ? [...this.groupDependencies] : void 0
|
|
949
|
+
};
|
|
950
|
+
const options = {};
|
|
951
|
+
if (this.skipConditionFn) options.skipCondition = this.skipConditionFn;
|
|
952
|
+
if (this.skipPredicateFn) options.skipWhen = this.skipPredicateFn;
|
|
953
|
+
if (this.skipReasonText) options.skipReason = this.skipReasonText;
|
|
954
|
+
return FireflyOk({
|
|
955
|
+
meta,
|
|
956
|
+
options: Object.keys(options).length > 0 ? options : void 0,
|
|
957
|
+
tasks: [...this.groupTasks]
|
|
958
|
+
});
|
|
959
|
+
}
|
|
960
|
+
};
|
|
961
|
+
/**
|
|
962
|
+
* Convenience function to create a new TaskGroupBuilder.
|
|
963
|
+
* Equivalent to `TaskGroupBuilder.create(id)`.
|
|
964
|
+
*
|
|
965
|
+
* @template TContext - The workflow context type
|
|
966
|
+
* @param id - Unique identifier for the group
|
|
967
|
+
*
|
|
968
|
+
* @example
|
|
969
|
+
* ```typescript
|
|
970
|
+
* const group = buildTaskGroup<MyContext>("my-group")
|
|
971
|
+
* .description("Does something")
|
|
972
|
+
* .tasks([task1, task2])
|
|
973
|
+
* .build();
|
|
974
|
+
* ```
|
|
975
|
+
*/
|
|
976
|
+
function buildTaskGroup(id) {
|
|
977
|
+
return TaskGroupBuilder.create(id);
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
//#endregion
|
|
981
|
+
//#region src/commands/release/groups/bump-execution.group.ts
|
|
982
|
+
/**
|
|
983
|
+
* Creates the bump execution group containing the actual version bump task.
|
|
984
|
+
*
|
|
985
|
+
* This group executes the version bump in package.json after the strategy
|
|
986
|
+
* has been determined. It is skipped when skipBump is enabled.
|
|
987
|
+
*/
|
|
988
|
+
function createBumpExecutionGroup() {
|
|
989
|
+
const taskResult = createBumpReleaseVersion();
|
|
990
|
+
if (taskResult.isErr()) return taskResult.map(() => ({}));
|
|
991
|
+
return buildTaskGroup("bump-execution").description("Version bump execution").dependsOnGroup("bump-strategy").skipWhen(releaseSkipPredicates.skipBump).skipReason("Skipped: skipBump is enabled").tasks([taskResult.value]).build();
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
//#endregion
|
|
995
|
+
//#region src/commands/release/tasks/delegate-bump-strategy.task.ts
|
|
996
|
+
/**
|
|
997
|
+
* Determines if the execute-bump-strategy task should be skipped.
|
|
998
|
+
*
|
|
999
|
+
* The task should execute when:
|
|
1000
|
+
* - No explicit releaseType is set (need to determine version bump)
|
|
1001
|
+
* - A bumpStrategy is configured (auto or manual)
|
|
1002
|
+
* - skipBump is not enabled
|
|
1003
|
+
*/
|
|
1004
|
+
function shouldSkipBumpStrategy(ctx) {
|
|
1005
|
+
const { skipBump, releaseType, bumpStrategy } = ctx.config;
|
|
1006
|
+
if (skipBump) return true;
|
|
1007
|
+
if (releaseType) return true;
|
|
1008
|
+
if (!bumpStrategy) return true;
|
|
1009
|
+
return false;
|
|
1010
|
+
}
|
|
1011
|
+
/**
|
|
1012
|
+
* Generates a skip reason based on the current context.
|
|
1013
|
+
*/
|
|
1014
|
+
function getSkipReason(ctx) {
|
|
1015
|
+
const { skipBump, releaseType, bumpStrategy } = ctx.config;
|
|
1016
|
+
if (skipBump) return "skipBump is enabled";
|
|
1017
|
+
if (releaseType) return `releaseType already set to '${releaseType}'`;
|
|
1018
|
+
if (!bumpStrategy) return "no bumpStrategy configured";
|
|
1019
|
+
return "unknown reason";
|
|
1020
|
+
}
|
|
1021
|
+
function createDelegateBumpStrategyTask() {
|
|
1022
|
+
return TaskBuilder.create("delegate-bump-strategy").description("Delegates the version bump strategy decision").dependsOn("initialize-release-version").shouldSkip((ctx) => {
|
|
1023
|
+
const shouldSkip = shouldSkipBumpStrategy(ctx);
|
|
1024
|
+
return FireflyOk({
|
|
1025
|
+
shouldSkip,
|
|
1026
|
+
reason: shouldSkip ? getSkipReason(ctx) : void 0
|
|
1027
|
+
});
|
|
1028
|
+
}).execute((ctx) => {
|
|
1029
|
+
logger.info("delegate-bump-strategy");
|
|
1030
|
+
return FireflyOkAsync(ctx);
|
|
1031
|
+
}).build();
|
|
1032
|
+
}
|
|
1033
|
+
|
|
1034
|
+
//#endregion
|
|
1035
|
+
//#region src/commands/release/tasks/determine-automatic-bump.task.ts
|
|
1036
|
+
function createDetermineAutomaticBump() {
|
|
1037
|
+
return TaskBuilder.create("determine-automatic-bump").description("Automatically determines the version bump from commit messages").dependsOn("delegate-bump-strategy").skipWhenWithReason((ctx) => ctx.config.skipBump || ctx.config.bumpStrategy !== BUMP_STRATEGY_AUTO, "Skipped: skipBump enabled or bumpStrategy is not 'auto'").execute((ctx) => {
|
|
1038
|
+
logger.info("determine-automatic-bump");
|
|
1039
|
+
return FireflyOkAsync(ctx);
|
|
1040
|
+
}).build();
|
|
1041
|
+
}
|
|
1042
|
+
|
|
1043
|
+
//#endregion
|
|
1044
|
+
//#region src/commands/release/tasks/prompt-bump-strategy.task.ts
|
|
1045
|
+
function createPromptBumpStrategyTask() {
|
|
1046
|
+
return TaskBuilder.create("prompt-bump-strategy").description("Prompts the user for a version bump strategy").dependsOn("initialize-release-version").skipWhenWithReason((ctx) => ctx.config.skipBump || Boolean(ctx.config.bumpStrategy) || Boolean(ctx.config.releaseType), "Skipped: skipBump enabled, or bumpStrategy/releaseType already specified").execute((ctx) => {
|
|
1047
|
+
logger.info("prompt-bump-strategy");
|
|
1048
|
+
return FireflyOkAsync(ctx);
|
|
1049
|
+
}).build();
|
|
1050
|
+
}
|
|
1051
|
+
|
|
1052
|
+
//#endregion
|
|
1053
|
+
//#region src/commands/release/tasks/prompt-manual-bump.task.ts
|
|
1054
|
+
function createPromptManualVersionTask() {
|
|
1055
|
+
return TaskBuilder.create("prompt-manual-version").description("Prompts the user for a manual version bump selections").dependsOn("delegate-bump-strategy").skipWhenWithReason((ctx) => ctx.config.skipBump || Boolean(ctx.config.bumpStrategy) || Boolean(ctx.config.releaseType), "Skipped: skipBump enabled, or bumpStrategy/releaseType already specified").execute((ctx) => {
|
|
1056
|
+
logger.info("prompt-manual-version");
|
|
1057
|
+
return FireflyOkAsync(ctx);
|
|
1058
|
+
}).build();
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
//#endregion
|
|
1062
|
+
//#region src/commands/release/tasks/straight-version-bump.task.ts
|
|
1063
|
+
function createStraightVersionBump() {
|
|
1064
|
+
return TaskBuilder.create("straight-version-bump").description("Performs a direct version bump based on the configured release type").dependsOn("initialize-release-version").skipWhenWithReason((ctx) => ctx.config.skipBump || ctx.config.releaseType === void 0, "Skipped: skipBump is enabled or no release type specified").execute((ctx) => {
|
|
1065
|
+
logger.info("straight-version-bump");
|
|
1066
|
+
return FireflyOkAsync(ctx);
|
|
1067
|
+
}).build();
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
//#endregion
|
|
1071
|
+
//#region src/commands/release/groups/bump-strategy.group.ts
|
|
1072
|
+
/**
|
|
1073
|
+
* Creates the bump strategy group containing version bump decision tasks.
|
|
1074
|
+
*
|
|
1075
|
+
* This group handles determining the version bump strategy (auto/manual)
|
|
1076
|
+
* and the specific release type. It is skipped when skipBump is enabled.
|
|
1077
|
+
*
|
|
1078
|
+
*/
|
|
1079
|
+
function createBumpStrategyGroup() {
|
|
1080
|
+
const taskResults = [
|
|
1081
|
+
createStraightVersionBump(),
|
|
1082
|
+
createPromptBumpStrategyTask(),
|
|
1083
|
+
createDelegateBumpStrategyTask(),
|
|
1084
|
+
createDetermineAutomaticBump(),
|
|
1085
|
+
createPromptManualVersionTask()
|
|
1086
|
+
];
|
|
1087
|
+
const combined = Result.combine(taskResults);
|
|
1088
|
+
if (combined.isErr()) return combined.map(() => ({}));
|
|
1089
|
+
return buildTaskGroup("bump-strategy").description("Version bump strategy selection").dependsOnGroup("setup").skipWhen(releaseSkipPredicates.skipBump).skipReason("Skipped: skipBump is enabled").tasks(combined.value).build();
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
//#endregion
|
|
1093
|
+
//#region src/commands/release/tasks/initialize-release-version.task.ts
|
|
1094
|
+
function createInitializeReleaseVersion() {
|
|
1095
|
+
return TaskBuilder.create("initialize-release-version").description("Hydrate and prepare the release configuration").dependsOn("prepare-release-config").execute((ctx) => {
|
|
1096
|
+
logger.info("initialize-release-version");
|
|
1097
|
+
return FireflyOkAsync(ctx);
|
|
1098
|
+
}).build();
|
|
1099
|
+
}
|
|
1100
|
+
|
|
1101
|
+
//#endregion
|
|
1102
|
+
//#region src/commands/release/tasks/prepare-release-config.task.ts
|
|
1103
|
+
const HTTPS_REMOTE_REGEX = /https?:\/\/[^/]+\/([^/]+)\/([^/.]+)(?:\.git)?/;
|
|
1104
|
+
const SSH_REMOTE_REGEX = /git@[^:]+:([^/]+)\/([^/.]+)(?:\.git)?/;
|
|
1105
|
+
const SCOPED_PACKAGE_REGEX = /^@([^/]+)\/(.+)$/;
|
|
1106
|
+
const PRERELEASE_REGEX = /^\d+\.\d+\.\d+-([a-zA-Z]+)/;
|
|
1107
|
+
/**
|
|
1108
|
+
* Parses a git remote URL to extract owner and repository name.
|
|
1109
|
+
* Supports both HTTPS and SSH formats.
|
|
1110
|
+
*
|
|
1111
|
+
* @example
|
|
1112
|
+
* parseGitRemoteUrl("https://github.com/owner/repo.git") // { owner: "owner", repo: "repo" }
|
|
1113
|
+
* parseGitRemoteUrl("git@github.com:owner/repo.git") // { owner: "owner", repo: "repo" }
|
|
1114
|
+
*/
|
|
1115
|
+
function parseGitRemoteUrl(url) {
|
|
1116
|
+
const httpsMatch = url.match(HTTPS_REMOTE_REGEX);
|
|
1117
|
+
if (httpsMatch?.[1] && httpsMatch[2]) return {
|
|
1118
|
+
owner: httpsMatch[1],
|
|
1119
|
+
repo: httpsMatch[2]
|
|
1120
|
+
};
|
|
1121
|
+
const sshMatch = url.match(SSH_REMOTE_REGEX);
|
|
1122
|
+
if (sshMatch?.[1] && sshMatch[2]) return {
|
|
1123
|
+
owner: sshMatch[1],
|
|
1124
|
+
repo: sshMatch[2]
|
|
1125
|
+
};
|
|
1126
|
+
return null;
|
|
1127
|
+
}
|
|
1128
|
+
/**
|
|
1129
|
+
* Parses a scoped package name to extract scope and name.
|
|
1130
|
+
*
|
|
1131
|
+
* @example
|
|
1132
|
+
* parsePackageName("@company/tool") // { scope: "company", name: "tool" }
|
|
1133
|
+
* parsePackageName("tool") // { scope: undefined, name: "tool" }
|
|
1134
|
+
*/
|
|
1135
|
+
function parsePackageName(packageName) {
|
|
1136
|
+
const scopeMatch = packageName.match(SCOPED_PACKAGE_REGEX);
|
|
1137
|
+
if (scopeMatch?.[1] && scopeMatch[2]) return {
|
|
1138
|
+
scope: scopeMatch[1],
|
|
1139
|
+
name: scopeMatch[2]
|
|
1140
|
+
};
|
|
1141
|
+
return { name: packageName };
|
|
1142
|
+
}
|
|
1143
|
+
/**
|
|
1144
|
+
* Extracts pre-release identifier from a semver version string.
|
|
1145
|
+
*
|
|
1146
|
+
* @example
|
|
1147
|
+
* extractPreReleaseId("1.0.0-beta.1") // "beta"
|
|
1148
|
+
* extractPreReleaseId("1.0.0") // undefined
|
|
1149
|
+
*/
|
|
1150
|
+
function extractPreReleaseId(version) {
|
|
1151
|
+
return version.match(PRERELEASE_REGEX)?.[1];
|
|
1152
|
+
}
|
|
1153
|
+
/**
|
|
1154
|
+
* Hydrates the repository field from git remote URL.
|
|
1155
|
+
*
|
|
1156
|
+
* Behavior:
|
|
1157
|
+
* - If not inside a git repository, resolves to undefined.
|
|
1158
|
+
* - If inside a repository, detect the repository URL
|
|
1159
|
+
* using a fall-through strategy (upstream remote → origin → first remote).
|
|
1160
|
+
* - Parses the URL and returns "owner/repo" when possible.
|
|
1161
|
+
*/
|
|
1162
|
+
function hydrateRepository(ctx) {
|
|
1163
|
+
return ctx.services.git.inferRepositoryUrl().map((url) => {
|
|
1164
|
+
if (!url) return null;
|
|
1165
|
+
const parsed = parseGitRemoteUrl(url);
|
|
1166
|
+
if (parsed) return `${parsed.owner}/${parsed.repo}`;
|
|
1167
|
+
return null;
|
|
1168
|
+
}).map((val) => val ?? void 0).andTee((repository) => logger.verbose(`PrepareReleaseConfigTask: Prepared repository: ${repository}`));
|
|
1169
|
+
}
|
|
1170
|
+
/**
|
|
1171
|
+
* Hydrates name, scope, and preReleaseId from package.json.
|
|
1172
|
+
*
|
|
1173
|
+
* Behavior:
|
|
1174
|
+
* - If package.json does not exist, returns all values as undefined.
|
|
1175
|
+
* - If it exists, reads package.json and returns parsed results for name, scope and preReleaseId.
|
|
1176
|
+
*/
|
|
1177
|
+
function hydrateFromPackageJson(ctx) {
|
|
1178
|
+
return ctx.services.fs.exists("package.json").andThen((exists) => {
|
|
1179
|
+
if (!exists) return FireflyOkAsync({
|
|
1180
|
+
name: void 0,
|
|
1181
|
+
scope: void 0,
|
|
1182
|
+
preReleaseId: void 0
|
|
1183
|
+
});
|
|
1184
|
+
return ctx.services.packageJson.read("package.json").andThen((pkg) => zip3Async(hydrateNameFromPackageJson(ctx, pkg), hydrateScopeFromPackageJson(ctx, pkg), hydratePreReleaseIdFromPackageJson(ctx, pkg)).map(([name, scope, preReleaseId]) => {
|
|
1185
|
+
const result = {};
|
|
1186
|
+
if (name) result.name = name;
|
|
1187
|
+
if (scope) result.scope = scope;
|
|
1188
|
+
if (preReleaseId) result.preReleaseId = preReleaseId;
|
|
1189
|
+
return result;
|
|
1190
|
+
}));
|
|
1191
|
+
});
|
|
1192
|
+
}
|
|
1193
|
+
/**
|
|
1194
|
+
* Hydrates the `name` field from package.json when not provided in config.
|
|
1195
|
+
*
|
|
1196
|
+
* Cases:
|
|
1197
|
+
* 1. If name is undefined and package.json has no name, returns a validation error.
|
|
1198
|
+
* 2. If name is undefined and package.json has a name, extracts the name (stripping scope) and returns it.
|
|
1199
|
+
* 3. Otherwise uses provided name.
|
|
1200
|
+
*/
|
|
1201
|
+
function hydrateNameFromPackageJson(ctx, packageJson) {
|
|
1202
|
+
if (ctx.config.name === void 0 && !packageJson.name) return validationErrAsync({ message: "Could not find a valid name in package.json" });
|
|
1203
|
+
if (ctx.config.name === void 0 && packageJson.name) {
|
|
1204
|
+
const extractedName = parsePackageName(packageJson.name).name;
|
|
1205
|
+
logger.verbose(`PrepareReleaseConfigTask: Prepared name from package.json: ${extractedName}`);
|
|
1206
|
+
return FireflyOkAsync(extractedName);
|
|
1207
|
+
}
|
|
1208
|
+
logger.verbose(`PrepareReleaseConfigTask: Using provided name: "${ctx.config.name}" as it is explicitly set`);
|
|
1209
|
+
return FireflyOkAsync(ctx.config.name);
|
|
1210
|
+
}
|
|
1211
|
+
/**
|
|
1212
|
+
* Hydrates the `scope` field from package.json when not provided in config.
|
|
1213
|
+
*
|
|
1214
|
+
* Cases:
|
|
1215
|
+
* 1. If scope is explicitly provided (key exists and value is not undefined), it is used.
|
|
1216
|
+
* 2. If not provided, but package.json has a scoped `name` (e.g., "@scope/name"), the scope will be extracted and returned.
|
|
1217
|
+
* 3. Otherwise returns undefined.
|
|
1218
|
+
*/
|
|
1219
|
+
function hydrateScopeFromPackageJson(ctx, packageJson) {
|
|
1220
|
+
if (Object.hasOwn(ctx.config, "scope") && ctx.config.scope !== void 0) {
|
|
1221
|
+
logger.verbose(`PrepareReleaseConfigTask: Using provided scope: "${ctx.config.scope}" as it is explicitly set`);
|
|
1222
|
+
return FireflyOkAsync(ctx.config.scope);
|
|
1223
|
+
}
|
|
1224
|
+
if (packageJson.name?.startsWith("@")) {
|
|
1225
|
+
const parsed = parsePackageName(packageJson.name);
|
|
1226
|
+
if (parsed.scope) {
|
|
1227
|
+
logger.verbose(`PrepareReleaseConfigTask: Prepared scope from package.json: ${parsed.scope}`);
|
|
1228
|
+
return FireflyOkAsync(parsed.scope);
|
|
1229
|
+
}
|
|
1230
|
+
}
|
|
1231
|
+
logger.verbose("PrepareReleaseConfigTask: No scope to prepare from package.json");
|
|
1232
|
+
return FireflyOkAsync(void 0);
|
|
1233
|
+
}
|
|
1234
|
+
/**
|
|
1235
|
+
* Hydrates the `preReleaseId` field from `package.json.version` when not provided.
|
|
1236
|
+
*
|
|
1237
|
+
* Cases:
|
|
1238
|
+
* 1. If preReleaseId is explicitly provided and not an empty string, it is used.
|
|
1239
|
+
* 2. If not provided, and `package.json.version` contains a prerelease segment, the prerelease identifier will be extracted and returned.
|
|
1240
|
+
* 3. Otherwise the function defaults to "alpha".
|
|
1241
|
+
*/
|
|
1242
|
+
function hydratePreReleaseIdFromPackageJson(ctx, packageJson) {
|
|
1243
|
+
if (ctx.config.preReleaseId !== void 0 && ctx.config.preReleaseId.trim() !== "") {
|
|
1244
|
+
logger.verbose(`PrepareReleaseConfigTask: Using provided preReleaseId: "${ctx.config.preReleaseId}" as it is explicitly set`);
|
|
1245
|
+
return FireflyOkAsync(ctx.config.preReleaseId);
|
|
1246
|
+
}
|
|
1247
|
+
if (packageJson.version) {
|
|
1248
|
+
const parsed = parse(packageJson.version);
|
|
1249
|
+
if (!parsed) return validationErrAsync({ message: `Invalid version format in package.json: ${packageJson.version}` });
|
|
1250
|
+
if (parsed.prerelease.length > 0 && typeof parsed.prerelease[0] === "string") {
|
|
1251
|
+
const preReleaseId = extractPreReleaseId(packageJson.version);
|
|
1252
|
+
logger.verbose(`PrepareReleaseConfigTask: Prepared preReleaseId from package.json: ${preReleaseId}`);
|
|
1253
|
+
return FireflyOkAsync(preReleaseId);
|
|
1254
|
+
}
|
|
1255
|
+
}
|
|
1256
|
+
logger.verbose("PrepareReleaseConfigTask: No preReleaseId to prepare from package.json, defaulting to 'alpha'");
|
|
1257
|
+
return FireflyOkAsync("alpha");
|
|
1258
|
+
}
|
|
1259
|
+
/**
|
|
1260
|
+
* Hydrates branch setting from git.
|
|
1261
|
+
*
|
|
1262
|
+
* Behavior:
|
|
1263
|
+
* - If not inside a git repository, resolves to undefined.
|
|
1264
|
+
* - If a branch is explicitly provided in the config, validates it against the
|
|
1265
|
+
* current git branch and returns it (otherwise returns a validation error).
|
|
1266
|
+
* - If no branch is provided in the config, uses current git branch.
|
|
1267
|
+
*/
|
|
1268
|
+
function hydrateBranch(ctx) {
|
|
1269
|
+
return ctx.services.git.isInsideRepository().andThen((isRepo) => {
|
|
1270
|
+
if (!isRepo) return FireflyOkAsync(void 0);
|
|
1271
|
+
return ctx.services.git.getCurrentBranch().andThen((currentBranch) => {
|
|
1272
|
+
if (Object.hasOwn(ctx.config, "branch") && ctx.config.branch !== void 0 && ctx.config.branch.trim() !== "") {
|
|
1273
|
+
if (ctx.config.branch !== currentBranch) return validationErrAsync({ message: `Configured branch "${ctx.config.branch}" does not match current git branch "${currentBranch}"` });
|
|
1274
|
+
logger.verbose(`PrepareReleaseConfigTask: Using provided branch: "${ctx.config.branch}" as it is explicitly set`);
|
|
1275
|
+
return FireflyOkAsync(ctx.config.branch);
|
|
1276
|
+
}
|
|
1277
|
+
logger.verbose(`PrepareReleaseConfigTask: Prepared branch from git: ${currentBranch}`);
|
|
1278
|
+
return FireflyOkAsync(currentBranch);
|
|
1279
|
+
});
|
|
1280
|
+
});
|
|
1281
|
+
}
|
|
1282
|
+
/**
|
|
1283
|
+
* Creates the Prepare Release Config Task.
|
|
1284
|
+
*
|
|
1285
|
+
* This task determines and hydrates configuration settings, by inferring values from the environment.
|
|
1286
|
+
*
|
|
1287
|
+
* This task:
|
|
1288
|
+
* 1. Detects repository owner/repo from git remote URL
|
|
1289
|
+
* 2. Extracts name and scope from package.json
|
|
1290
|
+
* 3. Extracts preReleaseId from package.json version
|
|
1291
|
+
* 4. Detects current git branch if not provided
|
|
1292
|
+
*/
|
|
1293
|
+
function createPrepareReleaseConfigTask() {
|
|
1294
|
+
return TaskBuilder.create("prepare-release-config").description("Hydrate and prepare the release configuration").execute((ctx) => {
|
|
1295
|
+
const hydrated = {};
|
|
1296
|
+
return zip3Async(hydrateRepository(ctx), hydrateFromPackageJson(ctx), hydrateBranch(ctx)).map(([repository, pkgData, branch]) => {
|
|
1297
|
+
if (repository) hydrated.repository = repository;
|
|
1298
|
+
if (pkgData.name) hydrated.name = pkgData.name;
|
|
1299
|
+
if (pkgData.scope) hydrated.scope = pkgData.scope;
|
|
1300
|
+
if (pkgData.preReleaseId) hydrated.preReleaseId = pkgData.preReleaseId;
|
|
1301
|
+
if (branch) hydrated.branch = branch;
|
|
1302
|
+
logger.verbose(`PrepareReleaseConfigTask: Hydrated config: ${JSON.stringify(hydrated)}`);
|
|
1303
|
+
return ctx.fork("hydratedConfig", hydrated);
|
|
1304
|
+
});
|
|
1305
|
+
}).build();
|
|
1306
|
+
}
|
|
1307
|
+
|
|
1308
|
+
//#endregion
|
|
1309
|
+
//#region src/core/task/task.helpers.ts
|
|
1310
|
+
/**
|
|
1311
|
+
* Runs a sequence of void async operations on the context.
|
|
1312
|
+
*
|
|
1313
|
+
* Like `pipeline` but for operations that don't modify the context.
|
|
1314
|
+
* Context is returned unchanged after all operations complete.
|
|
1315
|
+
*
|
|
1316
|
+
* @param ctx - Context to pass to operations
|
|
1317
|
+
* @param operations - Array of void async operations
|
|
1318
|
+
* @returns The original context after all operations complete
|
|
1319
|
+
*
|
|
1320
|
+
* @example
|
|
1321
|
+
* ```typescript
|
|
1322
|
+
* execute: (ctx) => runChecks(ctx,
|
|
1323
|
+
* (c) => checkGitStatus(c),
|
|
1324
|
+
* (c) => checkPermissions(c),
|
|
1325
|
+
* (c) => checkDiskSpace(c),
|
|
1326
|
+
* )
|
|
1327
|
+
* ```
|
|
1328
|
+
*/
|
|
1329
|
+
function runChecks(ctx, ...checks) {
|
|
1330
|
+
const initial = FireflyOkAsync(void 0);
|
|
1331
|
+
return checks.reduce((chain, check) => chain.andThen(() => check(ctx)), initial).map(() => ctx);
|
|
1332
|
+
}
|
|
1333
|
+
|
|
1334
|
+
//#endregion
|
|
1335
|
+
//#region src/commands/release/tasks/release-preflight.task.ts
|
|
1336
|
+
/**
|
|
1337
|
+
* Checks if the current directory is a git repository.
|
|
1338
|
+
*/
|
|
1339
|
+
function checkGitRepository(ctx) {
|
|
1340
|
+
return ctx.services.git.isInsideRepository().andThen((isRepo) => ensureNotAsync(!isRepo, { message: "We are not inside a git repository!" })).andTee(() => logger.verbose("ReleasePreflightTask: Confirmed inside a git repository."));
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* Checks if the working directory is clean (no uncommitted changes).
|
|
1344
|
+
*/
|
|
1345
|
+
function checkCleanWorkingDirectory(ctx) {
|
|
1346
|
+
return ctx.services.git.getStatus().andThen((status) => {
|
|
1347
|
+
if (!status.isClean) {
|
|
1348
|
+
const issues = [];
|
|
1349
|
+
if (status.hasStaged) issues.push("staged changes");
|
|
1350
|
+
if (status.hasUnstaged) issues.push("unstaged changes");
|
|
1351
|
+
if (status.hasUntracked) issues.push("untracked files");
|
|
1352
|
+
return conflictErrAsync({
|
|
1353
|
+
message: `Working directory is not clean. Found: ${issues.join(", ")}. Commit or stash changes first.`,
|
|
1354
|
+
details: status
|
|
1355
|
+
});
|
|
1356
|
+
}
|
|
1357
|
+
return FireflyOk(void 0);
|
|
1358
|
+
}).andTee(() => logger.verbose("ReleasePreflightTask: Confirmed working directory is clean."));
|
|
1359
|
+
}
|
|
1360
|
+
/**
|
|
1361
|
+
* Checks if there are unpushed commits in the current branch.
|
|
1362
|
+
*/
|
|
1363
|
+
function checkUnpushedCommits(ctx) {
|
|
1364
|
+
return ctx.services.git.getUnpushedCommits().andThen((result) => ensureNotAsync(result.hasUnpushed, {
|
|
1365
|
+
message: `Found ${result.count} unpushed commit(s). Push changes before releasing.`,
|
|
1366
|
+
source: "commands/release/preflight"
|
|
1367
|
+
})).andTee(() => logger.verbose("ReleasePreflightTask: Confirmed no unpushed commits found."));
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Checks if the `cliff.toml` configuration file exists in the project root.
|
|
1371
|
+
*/
|
|
1372
|
+
function checkCliffConfig(ctx) {
|
|
1373
|
+
const CLIFF_CONFIG_FILE = "cliff.toml";
|
|
1374
|
+
return ctx.services.fs.exists(CLIFF_CONFIG_FILE).andThen((exists) => ensureNotAsync(!exists, {
|
|
1375
|
+
message: `Configuration file "${CLIFF_CONFIG_FILE}" not found. See: https://git-cliff.org/docs/usage/initializing`,
|
|
1376
|
+
source: "commands/release/preflight"
|
|
1377
|
+
})).andTee(() => logger.verbose(`ReleasePreflightTask: Confirmed presence of "${CLIFF_CONFIG_FILE}" in project root.`));
|
|
1378
|
+
}
|
|
1379
|
+
/**
|
|
1380
|
+
* Creates the Release Preflight Task.
|
|
1381
|
+
*
|
|
1382
|
+
* This task checks the environment and prerequisites for a release.
|
|
1383
|
+
* It can be conditionally skipped based on the provided skip condition, though not recommended.
|
|
1384
|
+
*
|
|
1385
|
+
* Skipping this may led to malformed releases or errors during the release process,
|
|
1386
|
+
* and generally done for development purposes only or if you know what you are doing.
|
|
1387
|
+
*
|
|
1388
|
+
* This task:
|
|
1389
|
+
* 1. Check if its on a git repository
|
|
1390
|
+
* 2. Check if on a clean working tree, no uncommitted changes
|
|
1391
|
+
* 3. Check if no unpushed commits
|
|
1392
|
+
* 4. Check if there is `cliff.toml` file in the project root
|
|
1393
|
+
*/
|
|
1394
|
+
function createReleasePreflightTask(skipCondition) {
|
|
1395
|
+
return TaskBuilder.create("release-preflight").description("Validate environment and prerequisites for a release").skipWhen(skipCondition).execute((ctx) => runChecks(ctx, checkGitRepository, checkCleanWorkingDirectory, checkUnpushedCommits, checkCliffConfig)).build();
|
|
1396
|
+
}
|
|
1397
|
+
|
|
1398
|
+
//#endregion
|
|
1399
|
+
//#region src/commands/release/groups/setup.group.ts
|
|
1400
|
+
/**
|
|
1401
|
+
* Creates the setup group containing preflight, config preparation, and version initialization.
|
|
1402
|
+
*
|
|
1403
|
+
* This group runs first and has no skip condition at the group level.
|
|
1404
|
+
* Individual tasks have their own skip conditions (preflight can be skipped via config).
|
|
1405
|
+
*/
|
|
1406
|
+
function createReleaseSetupGroup(skipPreflight) {
|
|
1407
|
+
const taskResults = [
|
|
1408
|
+
createReleasePreflightTask(() => skipPreflight),
|
|
1409
|
+
createPrepareReleaseConfigTask(),
|
|
1410
|
+
createInitializeReleaseVersion()
|
|
1411
|
+
];
|
|
1412
|
+
const combined = Result.combine(taskResults);
|
|
1413
|
+
if (combined.isErr()) return combined.map(() => ({}));
|
|
1414
|
+
return buildTaskGroup("setup").description("Setup and initialization tasks").tasks(combined.value).build();
|
|
1415
|
+
}
|
|
1416
|
+
/**
|
|
1417
|
+
* Creates all release task groups in the correct order.
|
|
1418
|
+
*
|
|
1419
|
+
* @param skipPreflight - Whether to skip the preflight check
|
|
1420
|
+
* @returns Array of task groups or an error
|
|
1421
|
+
*/
|
|
1422
|
+
function createReleaseGroups(skipPreflight) {
|
|
1423
|
+
const groupResults = [
|
|
1424
|
+
createReleaseSetupGroup(skipPreflight),
|
|
1425
|
+
createBumpStrategyGroup(),
|
|
1426
|
+
createBumpExecutionGroup()
|
|
1427
|
+
];
|
|
1428
|
+
return Result.combine(groupResults);
|
|
1429
|
+
}
|
|
1430
|
+
|
|
1431
|
+
//#endregion
|
|
1432
|
+
//#region src/domain/semver/semver.definitions.ts
|
|
1433
|
+
/**
|
|
1434
|
+
* Represents the type of version bump decision to be made.
|
|
1435
|
+
* If not specified, the user will be prompted to select a release decision.
|
|
1436
|
+
*/
|
|
1437
|
+
const ReleaseTypeValues = [
|
|
1438
|
+
"major",
|
|
1439
|
+
"minor",
|
|
1440
|
+
"patch",
|
|
1441
|
+
"prerelease",
|
|
1442
|
+
"premajor",
|
|
1443
|
+
"preminor",
|
|
1444
|
+
"prepatch",
|
|
1445
|
+
"graduate"
|
|
1446
|
+
];
|
|
1447
|
+
const ReleaseTypeSchema = z$1.enum(ReleaseTypeValues);
|
|
1448
|
+
/**
|
|
1449
|
+
* Defines the base version for a pre-release.
|
|
1450
|
+
* Accepts either a number or the string "0" or "1", representing the starting point of the pre-release cycle.
|
|
1451
|
+
* This field is optional to allow flexibility in pre-release versioning.
|
|
1452
|
+
*/
|
|
1453
|
+
const PreReleaseBaseSchema = z$1.union([
|
|
1454
|
+
z$1.number(),
|
|
1455
|
+
z$1.literal("0"),
|
|
1456
|
+
z$1.literal("1")
|
|
1457
|
+
]).optional();
|
|
1458
|
+
|
|
1459
|
+
//#endregion
|
|
1460
|
+
//#region src/commands/release/release.config.ts
|
|
1461
|
+
const COMMIT_MSG_TEMPLATE = "chore(release): release {{name}}@{{version}}";
|
|
1462
|
+
const TAG_NAME_TEMPLATE = "{{name}}@{{version}}";
|
|
1463
|
+
const RELEASE_TITLE_TEMPLATE = "{{name}}@{{version}}";
|
|
1464
|
+
function validateReleaseFlagExclusivity(ctx) {
|
|
1465
|
+
const flagNames = [
|
|
1466
|
+
"releaseLatest",
|
|
1467
|
+
"releasePreRelease",
|
|
1468
|
+
"releaseDraft"
|
|
1469
|
+
];
|
|
1470
|
+
if (flagNames.filter((k) => ctx.value[k]).length > 1) ctx.issues.push({
|
|
1471
|
+
code: "custom",
|
|
1472
|
+
message: `Only one of ${flagNames.join(", ")} can be set to true.`,
|
|
1473
|
+
input: ctx.value,
|
|
1474
|
+
path: ["releaseLatest"]
|
|
1475
|
+
});
|
|
1476
|
+
}
|
|
1477
|
+
function validateSkipGitRedundancy(ctx) {
|
|
1478
|
+
if (ctx.value.skipGit && ctx.value.skipPush) ctx.issues.push({
|
|
1479
|
+
code: "custom",
|
|
1480
|
+
message: "skipPush should not be set when skipGit is true.",
|
|
1481
|
+
input: ctx.value
|
|
1482
|
+
});
|
|
1483
|
+
}
|
|
1484
|
+
function validateBumpStrategyCompatibility(ctx) {
|
|
1485
|
+
const { bumpStrategy, releaseType } = ctx.value;
|
|
1486
|
+
if (bumpStrategy === "auto" && releaseType !== void 0 && releaseType !== "prerelease") ctx.issues.push({
|
|
1487
|
+
code: "custom",
|
|
1488
|
+
message: "When bumpStrategy is 'auto', releaseType can only be 'prerelease' if specified.",
|
|
1489
|
+
input: ctx.value
|
|
1490
|
+
});
|
|
1491
|
+
}
|
|
1492
|
+
function validateSkipFlagCombinations(ctx) {
|
|
1493
|
+
const { skipBump, skipChangelog, skipGit, skipGitHubRelease } = ctx.value;
|
|
1494
|
+
if (skipBump && skipChangelog && skipGit && skipGitHubRelease) {
|
|
1495
|
+
ctx.issues.push({
|
|
1496
|
+
code: "custom",
|
|
1497
|
+
message: "Invalid configuration: skipBump, skipChangelog, skipGit, and skipGitHubRelease are all enabled. Nothing to do.",
|
|
1498
|
+
input: ctx.value
|
|
1499
|
+
});
|
|
1500
|
+
return;
|
|
1501
|
+
}
|
|
1502
|
+
if (skipBump && skipChangelog && !skipGit) ctx.issues.push({
|
|
1503
|
+
code: "custom",
|
|
1504
|
+
message: "Invalid configuration: skipBump and skipChangelog are enabled without skipGit. There are no changes to commit or tag.",
|
|
1505
|
+
input: ctx.value
|
|
1506
|
+
});
|
|
1507
|
+
if (skipGit && !skipGitHubRelease) ctx.issues.push({
|
|
1508
|
+
code: "custom",
|
|
1509
|
+
message: "Invalid configuration: skipGit is enabled without skipGitHubRelease. GitHub releases require a git tag.",
|
|
1510
|
+
input: ctx.value
|
|
1511
|
+
});
|
|
1512
|
+
}
|
|
1513
|
+
const ReleaseConfigSchema = z$1.object({
|
|
1514
|
+
name: z$1.string().optional().describe("Unscoped project name. Auto-detected from package.json."),
|
|
1515
|
+
scope: z$1.string().optional().describe("Org/user scope without '@'. Auto-detected from package.json."),
|
|
1516
|
+
base: z$1.string().default("").describe("Relative path from repository root to project root."),
|
|
1517
|
+
branch: z$1.string().optional().describe("Git branch to release from."),
|
|
1518
|
+
changelogPath: z$1.string().default("CHANGELOG.md").describe("Changelog file path, relative to project root."),
|
|
1519
|
+
bumpStrategy: BumpStrategySchema.describe("\"auto\" (from commits) or \"manual\" (user-specified)."),
|
|
1520
|
+
releaseType: ReleaseTypeSchema.optional().describe("The release type to bump."),
|
|
1521
|
+
preReleaseId: z$1.string().optional().describe("Pre-release ID (e.g., \"alpha\", \"beta\")."),
|
|
1522
|
+
preReleaseBase: PreReleaseBaseSchema.describe("Starting version for pre-releases."),
|
|
1523
|
+
releaseNotes: z$1.string().default("").describe("Custom release notes for changelog."),
|
|
1524
|
+
commitMessage: z$1.string().default(COMMIT_MSG_TEMPLATE).describe("Commit message template with placeholders."),
|
|
1525
|
+
tagName: z$1.string().default(TAG_NAME_TEMPLATE).describe("Tag name template with placeholders."),
|
|
1526
|
+
skipBump: z$1.coerce.boolean().default(false).describe("Skip version bump step."),
|
|
1527
|
+
skipChangelog: z$1.coerce.boolean().default(false).describe("Skip changelog generation step."),
|
|
1528
|
+
skipPush: z$1.coerce.boolean().default(false).describe("Skip push step."),
|
|
1529
|
+
skipGitHubRelease: z$1.coerce.boolean().default(false).describe("Skip GitHub release step."),
|
|
1530
|
+
skipGit: z$1.coerce.boolean().default(false).describe("Skip all git-related steps."),
|
|
1531
|
+
skipPreflightCheck: z$1.coerce.boolean().default(false).describe("Skip preflight checks."),
|
|
1532
|
+
releaseTitle: z$1.string().default(RELEASE_TITLE_TEMPLATE).describe("GitHub release title with placeholders."),
|
|
1533
|
+
releaseLatest: z$1.coerce.boolean().default(true).describe("Mark as latest release."),
|
|
1534
|
+
releasePreRelease: z$1.coerce.boolean().default(false).describe("Mark as pre-release."),
|
|
1535
|
+
releaseDraft: z$1.coerce.boolean().default(false).describe("Release as draft version.")
|
|
1536
|
+
}).check((ctx) => {
|
|
1537
|
+
validateReleaseFlagExclusivity(ctx);
|
|
1538
|
+
validateSkipGitRedundancy(ctx);
|
|
1539
|
+
validateBumpStrategyCompatibility(ctx);
|
|
1540
|
+
validateSkipFlagCombinations(ctx);
|
|
1541
|
+
});
|
|
1542
|
+
|
|
1543
|
+
//#endregion
|
|
1544
|
+
//#region src/core/command/command.factory.ts
|
|
1545
|
+
/**
|
|
1546
|
+
* Factory function for creating commands with full type inference.
|
|
1547
|
+
*
|
|
1548
|
+
* Provides better IDE support and type checking when defining commands.
|
|
1549
|
+
* Uses `satisfies` pattern for optimal type inference.
|
|
1550
|
+
*
|
|
1551
|
+
* @template TConfig - Command configuration type
|
|
1552
|
+
* @template TData - Workflow data type
|
|
1553
|
+
* @template TServices - Tuple of required service keys
|
|
1554
|
+
* @param command - Command definition
|
|
1555
|
+
* @returns The same command (identity function for type inference)
|
|
1556
|
+
*
|
|
1557
|
+
* @example
|
|
1558
|
+
* ```typescript
|
|
1559
|
+
* const myCommand = createCommand({
|
|
1560
|
+
* meta: {
|
|
1561
|
+
* name: "my-command",
|
|
1562
|
+
* description: "Does something useful",
|
|
1563
|
+
* configSchema: z.object({ option: z.boolean() }),
|
|
1564
|
+
* requiredServices: ["fs"] as const,
|
|
1565
|
+
* },
|
|
1566
|
+
* buildTasks: (ctx) => okAsync([]),
|
|
1567
|
+
* });
|
|
1568
|
+
* ```
|
|
1569
|
+
*/
|
|
1570
|
+
function createCommand(command) {
|
|
1571
|
+
return command;
|
|
1572
|
+
}
|
|
1573
|
+
|
|
1574
|
+
//#endregion
|
|
1575
|
+
//#region src/core/service/service.registry.ts
|
|
1576
|
+
/**
|
|
1577
|
+
* Helper function to define a service with proper type inference.
|
|
1578
|
+
* @internal
|
|
1579
|
+
*/
|
|
1580
|
+
function defineService(definition) {
|
|
1581
|
+
return definition;
|
|
1582
|
+
}
|
|
1583
|
+
/**
|
|
1584
|
+
* Registry of all available services and their factories.
|
|
1585
|
+
* Each service is lazily loaded via dynamic `import()`.
|
|
1586
|
+
*/
|
|
1587
|
+
const SERVICE_DEFINITIONS = {
|
|
1588
|
+
fs: defineService({ factory: async ({ basePath }) => {
|
|
1589
|
+
const { createFileSystemService } = await import("./filesystem.service-DdVwnqoa.js");
|
|
1590
|
+
return createFileSystemService(basePath);
|
|
1591
|
+
} }),
|
|
1592
|
+
packageJson: defineService({
|
|
1593
|
+
dependencies: ["fs"],
|
|
1594
|
+
factory: async ({ getService }) => {
|
|
1595
|
+
const fs = await getService("fs");
|
|
1596
|
+
const { createPackageJsonService } = await import("./package-json.service-QN7SzRTt.js");
|
|
1597
|
+
return createPackageJsonService(fs);
|
|
1598
|
+
}
|
|
1599
|
+
}),
|
|
1600
|
+
git: defineService({ factory: async ({ basePath }) => {
|
|
1601
|
+
const { createGitService } = await import("./git.service-DarjfyXF.js");
|
|
1602
|
+
return createGitService(basePath);
|
|
1603
|
+
} })
|
|
1604
|
+
};
|
|
1605
|
+
/**
|
|
1606
|
+
* Array of all service keys for iteration
|
|
1607
|
+
*/
|
|
1608
|
+
const ALL_SERVICE_KEYS = Object.keys(SERVICE_DEFINITIONS);
|
|
1609
|
+
/**
|
|
1610
|
+
* Helper to define a tuple of service keys with proper type inference.
|
|
1611
|
+
* Use this to get editor autocomplete for valid service keys.
|
|
1612
|
+
*
|
|
1613
|
+
* Example:
|
|
1614
|
+
* const RELEASE_SERVICES = defineServiceKeys("fs");
|
|
1615
|
+
*/
|
|
1616
|
+
function defineServiceKeys(...keys) {
|
|
1617
|
+
return keys;
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
//#endregion
|
|
1621
|
+
//#region src/core/task/task.graph.ts
|
|
1622
|
+
/**
|
|
1623
|
+
* Validates a task dependency graph.
|
|
1624
|
+
*
|
|
1625
|
+
* Checks for:
|
|
1626
|
+
* - Duplicate task IDs
|
|
1627
|
+
* - Missing dependencies (references to non-existent tasks)
|
|
1628
|
+
* - Circular dependencies
|
|
1629
|
+
*
|
|
1630
|
+
* Returns the execution order (topological sort) if valid.
|
|
1631
|
+
*
|
|
1632
|
+
* @param tasks - Array of tasks to validate
|
|
1633
|
+
* @returns Validation result with errors, warnings, and execution order
|
|
1634
|
+
*
|
|
1635
|
+
* @example
|
|
1636
|
+
* ```typescript
|
|
1637
|
+
* const result = validateTaskGraph(tasks);
|
|
1638
|
+
* if (!result.isValid) {
|
|
1639
|
+
* console.error("Graph validation failed:", result.errors);
|
|
1640
|
+
* } else {
|
|
1641
|
+
* console.log("Execution order:", result.executionOrder);
|
|
1642
|
+
* }
|
|
1643
|
+
* ```
|
|
1644
|
+
*/
|
|
1645
|
+
function validateTaskGraph(tasks) {
|
|
1646
|
+
const errors = [];
|
|
1647
|
+
const warnings = [];
|
|
1648
|
+
const taskMap = buildTaskMap(tasks, errors);
|
|
1649
|
+
checkMissingDependencies(tasks, taskMap, errors, warnings);
|
|
1650
|
+
checkCyclicDependencies(tasks, taskMap, errors);
|
|
1651
|
+
const { executionOrder, depthMap } = computeExecutionOrder(tasks, taskMap, errors);
|
|
1652
|
+
return {
|
|
1653
|
+
isValid: errors.length === 0,
|
|
1654
|
+
errors,
|
|
1655
|
+
warnings,
|
|
1656
|
+
executionOrder,
|
|
1657
|
+
depthMap
|
|
1658
|
+
};
|
|
1659
|
+
}
|
|
1660
|
+
/**
|
|
1661
|
+
* Builds a map of task IDs to Task objects for efficient lookup.
|
|
1662
|
+
*
|
|
1663
|
+
* Also detects and reports duplicate task IDs.
|
|
1664
|
+
*
|
|
1665
|
+
* @param tasks - Array of tasks to index
|
|
1666
|
+
* @param errors - Array to collect duplicate ID errors
|
|
1667
|
+
* @returns Map from task ID to Task object
|
|
1668
|
+
*/
|
|
1669
|
+
function buildTaskMap(tasks, errors) {
|
|
1670
|
+
const taskMap = /* @__PURE__ */ new Map();
|
|
1671
|
+
for (const task of tasks) if (taskMap.has(task.meta.id)) errors.push(`Duplicate task ID: "${task.meta.id}"`);
|
|
1672
|
+
else taskMap.set(task.meta.id, task);
|
|
1673
|
+
return taskMap;
|
|
1674
|
+
}
|
|
1675
|
+
/**
|
|
1676
|
+
* Validates that all task dependencies reference existing tasks.
|
|
1677
|
+
*
|
|
1678
|
+
* Also warns about tasks missing descriptions.
|
|
1679
|
+
*
|
|
1680
|
+
* @param tasks - Array of tasks to validate
|
|
1681
|
+
* @param taskMap - Map of task IDs to Task objects
|
|
1682
|
+
* @param errors - Array to collect missing dependency errors
|
|
1683
|
+
* @param warnings - Array to collect non-critical warnings
|
|
1684
|
+
*/
|
|
1685
|
+
function checkMissingDependencies(tasks, taskMap, errors, warnings) {
|
|
1686
|
+
for (const task of tasks) {
|
|
1687
|
+
const deps = task.meta.dependencies ?? [];
|
|
1688
|
+
for (const depId of deps) if (!taskMap.has(depId)) errors.push(`Task "${task.meta.id}" depends on unknown task "${depId}"`);
|
|
1689
|
+
if (!task.meta.description || task.meta.description.trim() === "") warnings.push(`Task "${task.meta.id}" has no description`);
|
|
1690
|
+
}
|
|
1691
|
+
}
|
|
1692
|
+
/**
|
|
1693
|
+
* Detects circular dependencies in the task graph using DFS.
|
|
1694
|
+
*
|
|
1695
|
+
* When a cycle is detected, adds an error message showing the cycle path
|
|
1696
|
+
* (e.g., "A → B → C → A").
|
|
1697
|
+
*
|
|
1698
|
+
* @param tasks - Array of tasks to check
|
|
1699
|
+
* @param taskMap - Map of task IDs to Task objects
|
|
1700
|
+
* @param errors - Array to collect cycle detection errors
|
|
1701
|
+
*/
|
|
1702
|
+
function checkCyclicDependencies(tasks, taskMap, errors) {
|
|
1703
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1704
|
+
const recursionStack = /* @__PURE__ */ new Set();
|
|
1705
|
+
const hasCycle = (taskId, path$1) => {
|
|
1706
|
+
if (recursionStack.has(taskId)) {
|
|
1707
|
+
const cycleStart = path$1.indexOf(taskId);
|
|
1708
|
+
const cycle = [...path$1.slice(cycleStart), taskId].join(" → ");
|
|
1709
|
+
errors.push(`Circular dependency detected: ${cycle}`);
|
|
1710
|
+
return true;
|
|
1711
|
+
}
|
|
1712
|
+
if (visited.has(taskId)) return false;
|
|
1713
|
+
visited.add(taskId);
|
|
1714
|
+
recursionStack.add(taskId);
|
|
1715
|
+
const task = taskMap.get(taskId);
|
|
1716
|
+
if (task) {
|
|
1717
|
+
const deps = task.meta.dependencies ?? [];
|
|
1718
|
+
for (const depId of deps) if (taskMap.has(depId) && hasCycle(depId, [...path$1, taskId])) return true;
|
|
1719
|
+
}
|
|
1720
|
+
recursionStack.delete(taskId);
|
|
1721
|
+
return false;
|
|
1722
|
+
};
|
|
1723
|
+
for (const task of tasks) if (!visited.has(task.meta.id)) hasCycle(task.meta.id, []);
|
|
1724
|
+
}
|
|
1725
|
+
/**
|
|
1726
|
+
* Computes the execution order and depth map for a validated task graph.
|
|
1727
|
+
*
|
|
1728
|
+
* Uses topological sort to determine execution order and calculates
|
|
1729
|
+
* the depth of each task in the dependency tree.
|
|
1730
|
+
*
|
|
1731
|
+
* @param tasks - Array of tasks to order
|
|
1732
|
+
* @param taskMap - Map of task IDs to Task objects
|
|
1733
|
+
* @param errors - Array of existing validation errors (skips computation if non-empty)
|
|
1734
|
+
* @returns Object containing execution order (task IDs) and depth map
|
|
1735
|
+
*/
|
|
1736
|
+
function computeExecutionOrder(tasks, taskMap, errors) {
|
|
1737
|
+
const executionOrder = [];
|
|
1738
|
+
const depthMap = /* @__PURE__ */ new Map();
|
|
1739
|
+
if (errors.length === 0) {
|
|
1740
|
+
const sorted = topologicalSort(tasks);
|
|
1741
|
+
if (sorted.isOk()) {
|
|
1742
|
+
executionOrder.push(...sorted.value);
|
|
1743
|
+
for (const taskId of executionOrder) {
|
|
1744
|
+
const task = taskMap.get(taskId);
|
|
1745
|
+
if (task) {
|
|
1746
|
+
const maxDepDepth = (task.meta.dependencies ?? []).reduce((max, depId) => {
|
|
1747
|
+
const depDepth = depthMap.get(depId) ?? 0;
|
|
1748
|
+
return Math.max(max, depDepth);
|
|
1749
|
+
}, -1);
|
|
1750
|
+
depthMap.set(taskId, maxDepDepth + 1);
|
|
1751
|
+
}
|
|
1752
|
+
}
|
|
1753
|
+
}
|
|
1754
|
+
}
|
|
1755
|
+
return {
|
|
1756
|
+
executionOrder,
|
|
1757
|
+
depthMap
|
|
1758
|
+
};
|
|
1759
|
+
}
|
|
1760
|
+
/**
|
|
1761
|
+
* Performs topological sort on tasks.
|
|
1762
|
+
*
|
|
1763
|
+
* @param tasks - Tasks to sort
|
|
1764
|
+
* @returns Sorted task IDs or error if cycle detected
|
|
1765
|
+
*/
|
|
1766
|
+
function topologicalSort(tasks) {
|
|
1767
|
+
const taskMap = /* @__PURE__ */ new Map();
|
|
1768
|
+
const inDegree = /* @__PURE__ */ new Map();
|
|
1769
|
+
const adjacency = /* @__PURE__ */ new Map();
|
|
1770
|
+
for (const task of tasks) {
|
|
1771
|
+
taskMap.set(task.meta.id, task);
|
|
1772
|
+
inDegree.set(task.meta.id, 0);
|
|
1773
|
+
adjacency.set(task.meta.id, []);
|
|
1774
|
+
}
|
|
1775
|
+
for (const task of tasks) {
|
|
1776
|
+
const deps = task.meta.dependencies ?? [];
|
|
1777
|
+
for (const depId of deps) {
|
|
1778
|
+
const adjList = adjacency.get(depId);
|
|
1779
|
+
if (adjList) {
|
|
1780
|
+
adjList.push(task.meta.id);
|
|
1781
|
+
inDegree.set(task.meta.id, (inDegree.get(task.meta.id) ?? 0) + 1);
|
|
1782
|
+
}
|
|
1783
|
+
}
|
|
1784
|
+
}
|
|
1785
|
+
const queue = [];
|
|
1786
|
+
for (const [taskId, degree] of inDegree) if (degree === 0) queue.push(taskId);
|
|
1787
|
+
const result = [];
|
|
1788
|
+
while (queue.length > 0) {
|
|
1789
|
+
const current = queue.shift();
|
|
1790
|
+
if (!current) break;
|
|
1791
|
+
result.push(current);
|
|
1792
|
+
const dependents = adjacency.get(current) ?? [];
|
|
1793
|
+
for (const dependent of dependents) {
|
|
1794
|
+
const newDegree = (inDegree.get(dependent) ?? 1) - 1;
|
|
1795
|
+
inDegree.set(dependent, newDegree);
|
|
1796
|
+
if (newDegree === 0) queue.push(dependent);
|
|
1797
|
+
}
|
|
1798
|
+
}
|
|
1799
|
+
if (result.length !== tasks.length) return invalidErr({ message: "Circular dependency detected in task graph" });
|
|
1800
|
+
return FireflyOk(result);
|
|
1801
|
+
}
|
|
1802
|
+
/**
|
|
1803
|
+
* Computes statistics about a task graph.
|
|
1804
|
+
*
|
|
1805
|
+
* @param tasks - Tasks to analyze
|
|
1806
|
+
* @returns Graph statistics
|
|
1807
|
+
*
|
|
1808
|
+
* @example
|
|
1809
|
+
* ```typescript
|
|
1810
|
+
* const stats = getGraphStatistics(tasks);
|
|
1811
|
+
* console.log(`Total tasks: ${stats.totalTasks}`);
|
|
1812
|
+
* console.log(`Max depth: ${stats.maxDepth}`);
|
|
1813
|
+
* ```
|
|
1814
|
+
*/
|
|
1815
|
+
function getGraphStatistics(tasks) {
|
|
1816
|
+
const validation = validateTaskGraph(tasks);
|
|
1817
|
+
const dependentCount = /* @__PURE__ */ new Map();
|
|
1818
|
+
for (const task of tasks) dependentCount.set(task.meta.id, 0);
|
|
1819
|
+
let totalEdges = 0;
|
|
1820
|
+
for (const task of tasks) {
|
|
1821
|
+
const deps = task.meta.dependencies ?? [];
|
|
1822
|
+
totalEdges += deps.length;
|
|
1823
|
+
for (const depId of deps) dependentCount.set(depId, (dependentCount.get(depId) ?? 0) + 1);
|
|
1824
|
+
}
|
|
1825
|
+
const rootTasks = tasks.filter((t) => (t.meta.dependencies ?? []).length === 0);
|
|
1826
|
+
const leafTasks = tasks.filter((t) => (dependentCount.get(t.meta.id) ?? 0) === 0);
|
|
1827
|
+
const sortedByDeps = [...tasks].sort((a, b) => (b.meta.dependencies ?? []).length - (a.meta.dependencies ?? []).length);
|
|
1828
|
+
const maxDeps = (sortedByDeps[0]?.meta.dependencies ?? []).length;
|
|
1829
|
+
const mostDependentTasks = sortedByDeps.filter((t) => (t.meta.dependencies ?? []).length === maxDeps && maxDeps > 0).map((t) => t.meta.id);
|
|
1830
|
+
const sortedByDependents = [...dependentCount.entries()].sort((a, b) => b[1] - a[1]);
|
|
1831
|
+
const maxDependents = sortedByDependents[0]?.[1] ?? 0;
|
|
1832
|
+
const mostDependendUponTasks = sortedByDependents.filter(([, count]) => count === maxDependents && maxDependents > 0).map(([id]) => id);
|
|
1833
|
+
return {
|
|
1834
|
+
totalTasks: tasks.length,
|
|
1835
|
+
rootTasks: rootTasks.length,
|
|
1836
|
+
leafTasks: leafTasks.length,
|
|
1837
|
+
maxDepth: Math.max(...validation.depthMap.values(), 0),
|
|
1838
|
+
totalEdges,
|
|
1839
|
+
avgDependencies: tasks.length > 0 ? totalEdges / tasks.length : 0,
|
|
1840
|
+
mostDependentTasks,
|
|
1841
|
+
mostDependendUponTasks
|
|
1842
|
+
};
|
|
1843
|
+
}
|
|
1844
|
+
/**
|
|
1845
|
+
* Logs graph statistics to the logger.
|
|
1846
|
+
*
|
|
1847
|
+
* @param stats - Graph statistics to log
|
|
1848
|
+
*/
|
|
1849
|
+
function logGraphStatistics(stats) {
|
|
1850
|
+
logger.verbose("");
|
|
1851
|
+
logger.verbose("Task Graph Statistics:");
|
|
1852
|
+
logger.verbose(`Total tasks: ${stats.totalTasks}`);
|
|
1853
|
+
logger.verbose(`Root tasks (can run first): ${stats.rootTasks}`);
|
|
1854
|
+
logger.verbose(`Leaf tasks (final): ${stats.leafTasks}`);
|
|
1855
|
+
logger.verbose(`Max depth: ${stats.maxDepth}`);
|
|
1856
|
+
logger.verbose(`Avg dependencies: ${stats.avgDependencies.toFixed(2)}`);
|
|
1857
|
+
if (stats.mostDependentTasks.length > 0) logger.verbose(`Most dependent tasks: ${stats.mostDependentTasks.join(", ")}`);
|
|
1858
|
+
if (stats.mostDependendUponTasks.length > 0) logger.verbose(`Critical path tasks: ${stats.mostDependendUponTasks.join(", ")}`);
|
|
1859
|
+
logger.verbose("");
|
|
1860
|
+
}
|
|
1861
|
+
|
|
1862
|
+
//#endregion
|
|
1863
|
+
//#region src/commands/release/release.command.ts
|
|
1864
|
+
const RELEASE_SERVICES = defineServiceKeys("fs", "packageJson", "git");
|
|
1865
|
+
const releaseCommand = createCommand({
|
|
1866
|
+
meta: {
|
|
1867
|
+
name: "release",
|
|
1868
|
+
description: "Automated semantic versioning, changelog generation, and GitHub release creation",
|
|
1869
|
+
configSchema: ReleaseConfigSchema,
|
|
1870
|
+
requiredServices: RELEASE_SERVICES
|
|
1871
|
+
},
|
|
1872
|
+
buildTasks(context) {
|
|
1873
|
+
const groupsResult = createReleaseGroups(context.config.skipPreflightCheck === true);
|
|
1874
|
+
if (groupsResult.isErr()) return FireflyErrAsync(groupsResult.error);
|
|
1875
|
+
const tasks = groupsResult.value.flatMap((group) => group.tasks);
|
|
1876
|
+
if (DebugFlags.showTaskGraphStats) logGraphStatistics(getGraphStatistics(tasks));
|
|
1877
|
+
return FireflyOkAsync(tasks);
|
|
1878
|
+
}
|
|
1879
|
+
});
|
|
1880
|
+
|
|
1881
|
+
//#endregion
|
|
1882
|
+
//#region src/core/environment/workspace.ts
|
|
1883
|
+
/**
|
|
1884
|
+
* Represents the workspace context for CLI operations.
|
|
1885
|
+
*
|
|
1886
|
+
* The workspace provides:
|
|
1887
|
+
* - Centralized base path management
|
|
1888
|
+
* - Path resolution relative to the workspace root
|
|
1889
|
+
* - Consistent working directory across all services
|
|
1890
|
+
*
|
|
1891
|
+
* @example
|
|
1892
|
+
* ```typescript
|
|
1893
|
+
* // Create a workspace from current directory
|
|
1894
|
+
* const workspace = Workspace.current();
|
|
1895
|
+
*
|
|
1896
|
+
* // Create a workspace from explicit path
|
|
1897
|
+
* const workspace = Workspace.from("/path/to/project");
|
|
1898
|
+
*
|
|
1899
|
+
* // Resolve paths relative to workspace
|
|
1900
|
+
* const configPath = workspace.resolve("firefly.config.ts");
|
|
1901
|
+
* const srcPath = workspace.resolve("src", "index.ts");
|
|
1902
|
+
* ```
|
|
1903
|
+
*/
|
|
1904
|
+
var Workspace = class Workspace {
|
|
1905
|
+
#basePath;
|
|
1906
|
+
constructor(basePath) {
|
|
1907
|
+
this.#basePath = path.resolve(basePath);
|
|
1908
|
+
}
|
|
1909
|
+
/**
|
|
1910
|
+
* The absolute path to the workspace root directory.
|
|
1911
|
+
*/
|
|
1912
|
+
get basePath() {
|
|
1913
|
+
return this.#basePath;
|
|
1914
|
+
}
|
|
1915
|
+
/**
|
|
1916
|
+
* Alias for `basePath` - the current working directory for operations.
|
|
1917
|
+
*/
|
|
1918
|
+
get cwd() {
|
|
1919
|
+
return this.#basePath;
|
|
1920
|
+
}
|
|
1921
|
+
/**
|
|
1922
|
+
* Creates a workspace from the current working directory.
|
|
1923
|
+
*
|
|
1924
|
+
* @returns A new Workspace instance rooted at `process.cwd()`
|
|
1925
|
+
*
|
|
1926
|
+
* @example
|
|
1927
|
+
* ```typescript
|
|
1928
|
+
* const workspace = Workspace.current();
|
|
1929
|
+
* console.log(workspace.basePath); // /current/working/directory
|
|
1930
|
+
* ```
|
|
1931
|
+
*/
|
|
1932
|
+
static current() {
|
|
1933
|
+
return new Workspace(process.cwd());
|
|
1934
|
+
}
|
|
1935
|
+
/**
|
|
1936
|
+
* Creates a workspace from an explicit path.
|
|
1937
|
+
*
|
|
1938
|
+
* @param basePath - The root directory for the workspace
|
|
1939
|
+
* @returns A new Workspace instance rooted at the specified path
|
|
1940
|
+
*
|
|
1941
|
+
* @example
|
|
1942
|
+
* ```typescript
|
|
1943
|
+
* const workspace = Workspace.from("/path/to/project");
|
|
1944
|
+
* console.log(workspace.basePath); // /path/to/project
|
|
1945
|
+
* ```
|
|
1946
|
+
*/
|
|
1947
|
+
static from(basePath) {
|
|
1948
|
+
return new Workspace(basePath);
|
|
1949
|
+
}
|
|
1950
|
+
/**
|
|
1951
|
+
* Creates a workspace from options, falling back to current directory.
|
|
1952
|
+
*
|
|
1953
|
+
* @param options - Optional workspace configuration
|
|
1954
|
+
* @returns A new Workspace instance
|
|
1955
|
+
*
|
|
1956
|
+
* @example
|
|
1957
|
+
* ```typescript
|
|
1958
|
+
* // From CLI options
|
|
1959
|
+
* const workspace = Workspace.fromOptions({ basePath: cliOptions.cwd });
|
|
1960
|
+
*
|
|
1961
|
+
* // Falls back to process.cwd() if basePath is undefined
|
|
1962
|
+
* const workspace = Workspace.fromOptions({});
|
|
1963
|
+
* ```
|
|
1964
|
+
*/
|
|
1965
|
+
static fromOptions(options) {
|
|
1966
|
+
return options?.basePath ? Workspace.from(options.basePath) : Workspace.current();
|
|
1967
|
+
}
|
|
1968
|
+
/**
|
|
1969
|
+
* Resolves a path relative to the workspace root.
|
|
1970
|
+
*
|
|
1971
|
+
* @param segments - Path segments to join and resolve
|
|
1972
|
+
* @returns The absolute resolved path
|
|
1973
|
+
*
|
|
1974
|
+
* @example
|
|
1975
|
+
* ```typescript
|
|
1976
|
+
* const workspace = Workspace.from("/project");
|
|
1977
|
+
*
|
|
1978
|
+
* workspace.resolve("src", "index.ts");
|
|
1979
|
+
* // => "/project/src/index.ts"
|
|
1980
|
+
*
|
|
1981
|
+
* workspace.resolve("package.json");
|
|
1982
|
+
* // => "/project/package.json"
|
|
1983
|
+
*
|
|
1984
|
+
* // Absolute paths are returned as-is
|
|
1985
|
+
* workspace.resolve("/absolute/path");
|
|
1986
|
+
* // => "/absolute/path"
|
|
1987
|
+
* ```
|
|
1988
|
+
*/
|
|
1989
|
+
resolve(...segments) {
|
|
1990
|
+
const joined = path.join(...segments);
|
|
1991
|
+
if (path.isAbsolute(joined)) return joined;
|
|
1992
|
+
return path.join(this.#basePath, joined);
|
|
1993
|
+
}
|
|
1994
|
+
/**
|
|
1995
|
+
* Checks if a path is within the workspace boundaries.
|
|
1996
|
+
*
|
|
1997
|
+
* @param targetPath - The path to check
|
|
1998
|
+
* @returns `true` if the path is within the workspace
|
|
1999
|
+
*
|
|
2000
|
+
* @example
|
|
2001
|
+
* ```typescript
|
|
2002
|
+
* const workspace = Workspace.from("/project");
|
|
2003
|
+
*
|
|
2004
|
+
* workspace.contains("/project/src/file.ts"); // true
|
|
2005
|
+
* workspace.contains("/other/path"); // false
|
|
2006
|
+
* ```
|
|
2007
|
+
*/
|
|
2008
|
+
contains(targetPath) {
|
|
2009
|
+
return path.resolve(targetPath).startsWith(this.#basePath);
|
|
2010
|
+
}
|
|
2011
|
+
/**
|
|
2012
|
+
* Returns a string representation of the workspace.
|
|
2013
|
+
*/
|
|
2014
|
+
toString() {
|
|
2015
|
+
return `Workspace(${this.#basePath})`;
|
|
2016
|
+
}
|
|
2017
|
+
/**
|
|
2018
|
+
* Returns the workspace as a JSON-serializable object.
|
|
2019
|
+
*/
|
|
2020
|
+
toJSON() {
|
|
2021
|
+
return { basePath: this.#basePath };
|
|
2022
|
+
}
|
|
2023
|
+
};
|
|
2024
|
+
|
|
2025
|
+
//#endregion
|
|
2026
|
+
//#region src/core/context/workflow.context.ts
|
|
2027
|
+
/**
|
|
2028
|
+
* Immutable implementation of WorkflowContext.
|
|
2029
|
+
*
|
|
2030
|
+
* @template TConfig - Type of the workflow configuration
|
|
2031
|
+
* @template TData - Type of the accumulated workflow data
|
|
2032
|
+
* @template TServices - Type of the resolved services
|
|
2033
|
+
*
|
|
2034
|
+
* @example Creating a workflow context
|
|
2035
|
+
* ```typescript
|
|
2036
|
+
* type Config = { projectName: string; verbose: boolean };
|
|
2037
|
+
* type Data = { files: string[]; processedCount: number };
|
|
2038
|
+
*
|
|
2039
|
+
* const ctx = ImmutableWorkflowContext.create<Config, Data>(
|
|
2040
|
+
* { projectName: "my-app", verbose: true },
|
|
2041
|
+
* resolvedServices,
|
|
2042
|
+
* { files: [], processedCount: 0 }
|
|
2043
|
+
* );
|
|
2044
|
+
* ```
|
|
2045
|
+
*/
|
|
2046
|
+
var ImmutableWorkflowContext = class ImmutableWorkflowContext {
|
|
2047
|
+
startTime;
|
|
2048
|
+
workspace;
|
|
2049
|
+
config;
|
|
2050
|
+
services;
|
|
2051
|
+
/**
|
|
2052
|
+
* Internal data - frozen on access via snapshot()
|
|
2053
|
+
*/
|
|
2054
|
+
#data;
|
|
2055
|
+
/**
|
|
2056
|
+
* Cached frozen snapshot
|
|
2057
|
+
*/
|
|
2058
|
+
#frozenData = null;
|
|
2059
|
+
get [Symbol.toStringTag]() {
|
|
2060
|
+
return "WorkflowContext";
|
|
2061
|
+
}
|
|
2062
|
+
constructor(options) {
|
|
2063
|
+
this.startTime = options.startTime;
|
|
2064
|
+
this.workspace = options.workspace;
|
|
2065
|
+
this.config = options.config;
|
|
2066
|
+
this.#data = options.data;
|
|
2067
|
+
this.services = options.services;
|
|
2068
|
+
}
|
|
2069
|
+
/**
|
|
2070
|
+
* Provides read-only access to data with lazy freezing
|
|
2071
|
+
*/
|
|
2072
|
+
get data() {
|
|
2073
|
+
this.#frozenData ??= Object.freeze({ ...this.#data });
|
|
2074
|
+
return this.#frozenData;
|
|
2075
|
+
}
|
|
2076
|
+
/**
|
|
2077
|
+
* Creates a new workflow context.
|
|
2078
|
+
*
|
|
2079
|
+
* @template TC - Configuration type
|
|
2080
|
+
* @template TD - Data type
|
|
2081
|
+
* @template TS - Services type
|
|
2082
|
+
* @param config - Workflow configuration
|
|
2083
|
+
* @param services - Resolved services
|
|
2084
|
+
* @param initialData - Optional initial data values
|
|
2085
|
+
*
|
|
2086
|
+
* @example Without initial data
|
|
2087
|
+
* ```typescript
|
|
2088
|
+
* const ctx = ImmutableWorkflowContext.create(
|
|
2089
|
+
* { version: "1.0.0" },
|
|
2090
|
+
* services
|
|
2091
|
+
* );
|
|
2092
|
+
* ```
|
|
2093
|
+
*
|
|
2094
|
+
* @example With initial data
|
|
2095
|
+
* ```typescript
|
|
2096
|
+
* const ctx = ImmutableWorkflowContext.create(
|
|
2097
|
+
* { version: "1.0.0" },
|
|
2098
|
+
* services,
|
|
2099
|
+
* { buildNumber: 42, artifacts: [] }
|
|
2100
|
+
* );
|
|
2101
|
+
* ```
|
|
2102
|
+
*/
|
|
2103
|
+
static create(config, services, initialData, workspace) {
|
|
2104
|
+
return new ImmutableWorkflowContext({
|
|
2105
|
+
startTime: /* @__PURE__ */ new Date(),
|
|
2106
|
+
workspace: workspace ?? Workspace.current(),
|
|
2107
|
+
config: Object.freeze({ ...config }),
|
|
2108
|
+
data: initialData ?? {},
|
|
2109
|
+
services
|
|
2110
|
+
});
|
|
2111
|
+
}
|
|
2112
|
+
/**
|
|
2113
|
+
* @example Safely retrieving a value
|
|
2114
|
+
* ```typescript
|
|
2115
|
+
* const versionResult = ctx.get("version");
|
|
2116
|
+
* if (versionResult.isOk()) {
|
|
2117
|
+
* console.log(`Processing version: ${versionResult.value}`);
|
|
2118
|
+
* }
|
|
2119
|
+
* ```
|
|
2120
|
+
*
|
|
2121
|
+
* @example Handling missing keys
|
|
2122
|
+
* ```typescript
|
|
2123
|
+
* const result = ctx.get("optionalField");
|
|
2124
|
+
* if (result.isErr()) {
|
|
2125
|
+
* // Key doesn't exist - use default or handle error
|
|
2126
|
+
* return FireflyOkAsync(ctx.fork("optionalField", defaultValue));
|
|
2127
|
+
* }
|
|
2128
|
+
* ```
|
|
2129
|
+
*/
|
|
2130
|
+
get(key) {
|
|
2131
|
+
if (!(key in this.#data)) return validationErr({ message: `Key "${String(key)}" not found in context` });
|
|
2132
|
+
return FireflyOk(this.#data[key]);
|
|
2133
|
+
}
|
|
2134
|
+
/**
|
|
2135
|
+
* @example Updating a single value
|
|
2136
|
+
* ```typescript
|
|
2137
|
+
* const updatedCtx = ctx.fork("status", "completed");
|
|
2138
|
+
* ```
|
|
2139
|
+
*
|
|
2140
|
+
* @example Chaining forks
|
|
2141
|
+
* ```typescript
|
|
2142
|
+
* const finalCtx = ctx
|
|
2143
|
+
* .fork("startedAt", new Date())
|
|
2144
|
+
* .fork("status", "in-progress");
|
|
2145
|
+
* ```
|
|
2146
|
+
*
|
|
2147
|
+
* @example No-op when value is identical (returns same instance)
|
|
2148
|
+
* ```typescript
|
|
2149
|
+
* const ctx1 = ctx.fork("count", 5);
|
|
2150
|
+
* const ctx2 = ctx1.fork("count", 5);
|
|
2151
|
+
* console.log(ctx1 === ctx2); // true - same reference
|
|
2152
|
+
* ```
|
|
2153
|
+
*/
|
|
2154
|
+
fork(key, value) {
|
|
2155
|
+
if (this.#data[key] === value) return this;
|
|
2156
|
+
const updatedData = {
|
|
2157
|
+
...this.#data,
|
|
2158
|
+
[key]: value
|
|
2159
|
+
};
|
|
2160
|
+
return new ImmutableWorkflowContext({
|
|
2161
|
+
startTime: this.startTime,
|
|
2162
|
+
workspace: this.workspace,
|
|
2163
|
+
config: this.config,
|
|
2164
|
+
data: updatedData,
|
|
2165
|
+
services: this.services
|
|
2166
|
+
});
|
|
2167
|
+
}
|
|
2168
|
+
forkMultiple(updates) {
|
|
2169
|
+
const updateKeys = Object.keys(updates);
|
|
2170
|
+
if (updateKeys.length === 0) return this;
|
|
2171
|
+
if (!updateKeys.some((key) => this.#data[key] !== updates[key])) return this;
|
|
2172
|
+
const updatedData = {
|
|
2173
|
+
...this.#data,
|
|
2174
|
+
...updates
|
|
2175
|
+
};
|
|
2176
|
+
return new ImmutableWorkflowContext({
|
|
2177
|
+
startTime: this.startTime,
|
|
2178
|
+
workspace: this.workspace,
|
|
2179
|
+
config: this.config,
|
|
2180
|
+
data: updatedData,
|
|
2181
|
+
services: this.services
|
|
2182
|
+
});
|
|
2183
|
+
}
|
|
2184
|
+
has(key) {
|
|
2185
|
+
return key in this.#data;
|
|
2186
|
+
}
|
|
2187
|
+
snapshot() {
|
|
2188
|
+
return this.data;
|
|
2189
|
+
}
|
|
2190
|
+
};
|
|
2191
|
+
|
|
2192
|
+
//#endregion
|
|
2193
|
+
//#region src/core/execution/workflow.executor.ts
|
|
2194
|
+
/**
|
|
2195
|
+
* Executes workflow tasks in sequence with error handling and rollback.
|
|
2196
|
+
*
|
|
2197
|
+
* The executor:
|
|
2198
|
+
* 1. Runs tasks sequentially, passing updated context between them
|
|
2199
|
+
* 2. Evaluates skip conditions before each task
|
|
2200
|
+
* 3. Tracks executed tasks for potential rollback
|
|
2201
|
+
* 4. On failure, optionally rolls back completed tasks in reverse order
|
|
2202
|
+
*
|
|
2203
|
+
* @example
|
|
2204
|
+
* ```typescript
|
|
2205
|
+
* const executor = new WorkflowExecutor({
|
|
2206
|
+
* dryRun: false,
|
|
2207
|
+
* enableRollback: true,
|
|
2208
|
+
* });
|
|
2209
|
+
*
|
|
2210
|
+
* const result = await executor.execute(orderedTasks, context);
|
|
2211
|
+
*
|
|
2212
|
+
* if (result.isOk() && result.value.success) {
|
|
2213
|
+
* console.log(`Executed ${result.value.executedTasks.length} tasks`);
|
|
2214
|
+
* } else {
|
|
2215
|
+
* console.error(`Failed at task: ${result.value.failedTask}`);
|
|
2216
|
+
* }
|
|
2217
|
+
* ```
|
|
2218
|
+
*/
|
|
2219
|
+
var WorkflowExecutor = class {
|
|
2220
|
+
options;
|
|
2221
|
+
executedTasks = [];
|
|
2222
|
+
/** Resolved AbortSignal for cancellation */
|
|
2223
|
+
#signal;
|
|
2224
|
+
get [Symbol.toStringTag]() {
|
|
2225
|
+
return "WorkflowExecutor";
|
|
2226
|
+
}
|
|
2227
|
+
constructor(options = {}) {
|
|
2228
|
+
this.options = options;
|
|
2229
|
+
this.#signal = options.signal ?? (options.timeoutMs ? AbortSignal.timeout(options.timeoutMs) : void 0);
|
|
2230
|
+
}
|
|
2231
|
+
/**
|
|
2232
|
+
* Executes a sequence of tasks with the given initial context.
|
|
2233
|
+
*
|
|
2234
|
+
* @param tasks - Ordered array of tasks to execute
|
|
2235
|
+
* @param initialContext - Starting workflow context
|
|
2236
|
+
* @returns Execution result with success/failure status and metadata
|
|
2237
|
+
*/
|
|
2238
|
+
execute(tasks, initialContext) {
|
|
2239
|
+
const startTime = /* @__PURE__ */ new Date();
|
|
2240
|
+
const executedTaskIds = [];
|
|
2241
|
+
const skippedTaskIds = [];
|
|
2242
|
+
if (this.options.dryRun) logger.warn("Workflow executing in DRY RUN mode - no actual changes will be made");
|
|
2243
|
+
logger.verbose(`WorkflowExecutor: Starting execution of ${tasks.length} tasks`);
|
|
2244
|
+
return this.executeTasksSequentially(tasks, initialContext, executedTaskIds, skippedTaskIds).andThen(() => this.buildExecutionSuccessResult(startTime, executedTaskIds, skippedTaskIds)).orElse((error) => this.handleExecutionFailure({
|
|
2245
|
+
error,
|
|
2246
|
+
startTime,
|
|
2247
|
+
executedTaskIds,
|
|
2248
|
+
skippedTaskIds,
|
|
2249
|
+
initialContext
|
|
2250
|
+
}));
|
|
2251
|
+
}
|
|
2252
|
+
buildExecutionSuccessResult(startTime, executedTaskIds, skippedTaskIds) {
|
|
2253
|
+
const endTime = /* @__PURE__ */ new Date();
|
|
2254
|
+
const result = {
|
|
2255
|
+
success: true,
|
|
2256
|
+
executedTasks: executedTaskIds,
|
|
2257
|
+
skippedTasks: skippedTaskIds,
|
|
2258
|
+
rollbackExecuted: false,
|
|
2259
|
+
startTime,
|
|
2260
|
+
endTime,
|
|
2261
|
+
executionTimeMs: endTime.getTime() - startTime.getTime()
|
|
2262
|
+
};
|
|
2263
|
+
logger.verbose("WorkflowExecutor: Execution completed successfully");
|
|
2264
|
+
logger.verbose(`WorkflowExecutor: Executed: ${executedTaskIds.length}, Skipped: ${skippedTaskIds.length}, Time: ${result.executionTimeMs}ms`);
|
|
2265
|
+
return FireflyOkAsync(result);
|
|
2266
|
+
}
|
|
2267
|
+
handleExecutionFailure(args) {
|
|
2268
|
+
const { error, startTime, executedTaskIds, skippedTaskIds, initialContext } = args;
|
|
2269
|
+
const endTime = /* @__PURE__ */ new Date();
|
|
2270
|
+
if (this.options.enableRollback && this.executedTasks.length > 0) {
|
|
2271
|
+
logger.verbose(`WorkflowExecutor: Attempting rollback of ${this.executedTasks.length} tasks`);
|
|
2272
|
+
return this.rollback(initialContext).andThen((rollbackSuccess) => {
|
|
2273
|
+
return FireflyOkAsync({
|
|
2274
|
+
success: false,
|
|
2275
|
+
executedTasks: executedTaskIds,
|
|
2276
|
+
skippedTasks: skippedTaskIds,
|
|
2277
|
+
failedTask: executedTaskIds.at(-1) || "unknown",
|
|
2278
|
+
error,
|
|
2279
|
+
rollbackExecuted: rollbackSuccess,
|
|
2280
|
+
startTime,
|
|
2281
|
+
endTime,
|
|
2282
|
+
executionTimeMs: endTime.getTime() - startTime.getTime()
|
|
2283
|
+
});
|
|
2284
|
+
});
|
|
2285
|
+
}
|
|
2286
|
+
return FireflyOkAsync({
|
|
2287
|
+
success: false,
|
|
2288
|
+
executedTasks: executedTaskIds,
|
|
2289
|
+
skippedTasks: skippedTaskIds,
|
|
2290
|
+
failedTask: executedTaskIds.at(-1) || "unknown",
|
|
2291
|
+
error,
|
|
2292
|
+
rollbackExecuted: false,
|
|
2293
|
+
startTime,
|
|
2294
|
+
endTime,
|
|
2295
|
+
executionTimeMs: endTime.getTime() - startTime.getTime()
|
|
2296
|
+
});
|
|
2297
|
+
}
|
|
2298
|
+
executeTasksSequentially(tasks, context, executedTaskIds, skippedTaskIds) {
|
|
2299
|
+
if (tasks.length === 0) return FireflyOkAsync(void 0);
|
|
2300
|
+
if (this.#signal?.aborted) return timeoutErrAsync({ message: "Workflow execution was aborted" });
|
|
2301
|
+
const [currentTask, ...remainingTasks] = tasks;
|
|
2302
|
+
if (!currentTask) return FireflyOkAsync(void 0);
|
|
2303
|
+
const skipCheck = this.handleTaskSkip(currentTask, remainingTasks, context, skippedTaskIds);
|
|
2304
|
+
if (skipCheck.isErr()) return FireflyErrAsync(skipCheck.error);
|
|
2305
|
+
if (skipCheck.value.shouldSkip) return this.executeTasksSequentially(skipCheck.value.newRemainingTasks, context, executedTaskIds, skippedTaskIds);
|
|
2306
|
+
return this.executeTaskAndContinue(currentTask, remainingTasks, context, {
|
|
2307
|
+
executedTaskIds,
|
|
2308
|
+
skippedTaskIds
|
|
2309
|
+
});
|
|
2310
|
+
}
|
|
2311
|
+
handleTaskSkip(currentTask, remainingTasks, context, skippedTaskIds) {
|
|
2312
|
+
if (!currentTask.shouldSkip) return FireflyOk({
|
|
2313
|
+
shouldSkip: false,
|
|
2314
|
+
newRemainingTasks: remainingTasks
|
|
2315
|
+
});
|
|
2316
|
+
const skipResult = currentTask.shouldSkip(context);
|
|
2317
|
+
if (skipResult.isErr()) return FireflyErr(skipResult.error);
|
|
2318
|
+
if (!skipResult.value.shouldSkip) return FireflyOk({
|
|
2319
|
+
shouldSkip: false,
|
|
2320
|
+
newRemainingTasks: remainingTasks
|
|
2321
|
+
});
|
|
2322
|
+
const reason = skipResult.value.reason || "condition not met";
|
|
2323
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Skipped - ${reason}`);
|
|
2324
|
+
skippedTaskIds.push(currentTask.meta.id);
|
|
2325
|
+
if (skipResult.value.skipToTasks && skipResult.value.skipToTasks.length > 0) {
|
|
2326
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Skipping through to ${skipResult.value.skipToTasks.join(", ")}`);
|
|
2327
|
+
const skipToIndex = remainingTasks.findIndex((t) => skipResult.value.skipToTasks?.includes(t.meta.id));
|
|
2328
|
+
if (skipToIndex >= 0) return FireflyOk({
|
|
2329
|
+
shouldSkip: true,
|
|
2330
|
+
newRemainingTasks: remainingTasks.slice(skipToIndex)
|
|
2331
|
+
});
|
|
2332
|
+
}
|
|
2333
|
+
return FireflyOk({
|
|
2334
|
+
shouldSkip: true,
|
|
2335
|
+
newRemainingTasks: remainingTasks
|
|
2336
|
+
});
|
|
2337
|
+
}
|
|
2338
|
+
executeTaskAndContinue(currentTask, remainingTasks, context, executionLists) {
|
|
2339
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Executing...`);
|
|
2340
|
+
return currentTask.execute(context).andThen((updatedContext) => {
|
|
2341
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Completed`);
|
|
2342
|
+
executionLists.executedTaskIds.push(currentTask.meta.id);
|
|
2343
|
+
this.executedTasks.push(currentTask);
|
|
2344
|
+
return this.executeTasksSequentially(remainingTasks, updatedContext, executionLists.executedTaskIds, executionLists.skippedTaskIds);
|
|
2345
|
+
}).mapErr((error) => {
|
|
2346
|
+
logger.error(error.message);
|
|
2347
|
+
return error;
|
|
2348
|
+
});
|
|
2349
|
+
}
|
|
2350
|
+
rollback(context) {
|
|
2351
|
+
const tasksToRollback = this.executedTasks.toReversed();
|
|
2352
|
+
logger.verbose(`WorkflowExecutor: Rolling back ${tasksToRollback.length} tasks in reverse order`);
|
|
2353
|
+
return this.rollbackTasks(tasksToRollback, context).andThen((errors) => {
|
|
2354
|
+
if (errors.length > 0) {
|
|
2355
|
+
logger.error(`Rollback completed with ${errors.length} errors`);
|
|
2356
|
+
for (const { taskId, error } of errors) logger.error(` Task '${taskId}': ${error.message}`);
|
|
2357
|
+
return FireflyOkAsync(false);
|
|
2358
|
+
}
|
|
2359
|
+
logger.verbose("WorkflowExecutor: Rollback completed successfully");
|
|
2360
|
+
return FireflyOkAsync(true);
|
|
2361
|
+
});
|
|
2362
|
+
}
|
|
2363
|
+
rollbackTasks(tasks, context) {
|
|
2364
|
+
if (tasks.length === 0) return FireflyOkAsync([]);
|
|
2365
|
+
const [currentTask, ...remainingTasks] = tasks;
|
|
2366
|
+
const errors = [];
|
|
2367
|
+
if (!currentTask) return FireflyOkAsync([]);
|
|
2368
|
+
if (!currentTask.undo) {
|
|
2369
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': No undo available, skipping rollback`);
|
|
2370
|
+
return this.rollbackTasks(remainingTasks, context);
|
|
2371
|
+
}
|
|
2372
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Rolling back...`);
|
|
2373
|
+
return currentTask.undo(context).andThen(() => {
|
|
2374
|
+
logger.verbose(`WorkflowExecutor: Task '${currentTask.meta.id}': Rollback completed`);
|
|
2375
|
+
return this.rollbackTasks(remainingTasks, context);
|
|
2376
|
+
}).orElse((error) => {
|
|
2377
|
+
logger.error(`Task '${currentTask.meta.id}': Rollback failed - ${error.message}`);
|
|
2378
|
+
errors.push({
|
|
2379
|
+
taskId: currentTask.meta.id,
|
|
2380
|
+
error
|
|
2381
|
+
});
|
|
2382
|
+
return this.rollbackTasks(remainingTasks, context).map((remainingErrors) => [...errors, ...remainingErrors]);
|
|
2383
|
+
});
|
|
2384
|
+
}
|
|
2385
|
+
};
|
|
2386
|
+
|
|
2387
|
+
//#endregion
|
|
2388
|
+
//#region src/core/registry/base.registry.ts
|
|
2389
|
+
/**
|
|
2390
|
+
* Abstract base class providing common registry functionality.
|
|
2391
|
+
*
|
|
2392
|
+
* Implements the registry pattern with:
|
|
2393
|
+
* - Type-safe item storage and retrieval
|
|
2394
|
+
* - Duplicate detection with configurable error codes
|
|
2395
|
+
* - Batch registration with fail-fast semantics
|
|
2396
|
+
* - Standard CRUD-like operations (register, get, has, clear)
|
|
2397
|
+
*
|
|
2398
|
+
* @template T - The type of items stored in the registry
|
|
2399
|
+
* @template K - The key type used to identify items (defaults to string)
|
|
2400
|
+
*
|
|
2401
|
+
* @example
|
|
2402
|
+
* ```typescript
|
|
2403
|
+
* interface User { id: string; name: string; }
|
|
2404
|
+
*
|
|
2405
|
+
* class UserRegistry extends BaseRegistry<User> {
|
|
2406
|
+
* constructor() {
|
|
2407
|
+
* super({
|
|
2408
|
+
* name: "User",
|
|
2409
|
+
* source: "UserRegistry",
|
|
2410
|
+
* getKey: (user) => user.id,
|
|
2411
|
+
* });
|
|
2412
|
+
* }
|
|
2413
|
+
* }
|
|
2414
|
+
* ```
|
|
2415
|
+
*/
|
|
2416
|
+
var BaseRegistry = class {
|
|
2417
|
+
/** Internal storage for registry items */
|
|
2418
|
+
items = /* @__PURE__ */ new Map();
|
|
2419
|
+
/** Configuration for this registry instance */
|
|
2420
|
+
config;
|
|
2421
|
+
/**
|
|
2422
|
+
* Custom string tag for better debugging output.
|
|
2423
|
+
* Displays as [object {name}Registry] instead of [object Object].
|
|
2424
|
+
*/
|
|
2425
|
+
get [Symbol.toStringTag]() {
|
|
2426
|
+
return `${this.config.name}Registry`;
|
|
2427
|
+
}
|
|
2428
|
+
/**
|
|
2429
|
+
* Creates a new registry instance.
|
|
2430
|
+
* @param config - Configuration options for the registry
|
|
2431
|
+
*/
|
|
2432
|
+
constructor(config) {
|
|
2433
|
+
this.config = {
|
|
2434
|
+
duplicateErrorCode: "CONFLICT",
|
|
2435
|
+
notFoundErrorCode: "NOT_FOUND",
|
|
2436
|
+
...config
|
|
2437
|
+
};
|
|
2438
|
+
}
|
|
2439
|
+
/**
|
|
2440
|
+
* Registers a single item in the registry.
|
|
2441
|
+
*
|
|
2442
|
+
* @param item - The item to register
|
|
2443
|
+
* @returns `Ok(void)` on success, `Err(FireflyError)` if duplicate detected
|
|
2444
|
+
*
|
|
2445
|
+
* @example
|
|
2446
|
+
* ```typescript
|
|
2447
|
+
* const result = registry.register({ id: "task-1", name: "My Task" });
|
|
2448
|
+
* if (result.isErr()) {
|
|
2449
|
+
* console.error(result.error.message);
|
|
2450
|
+
* }
|
|
2451
|
+
* ```
|
|
2452
|
+
*/
|
|
2453
|
+
register(item) {
|
|
2454
|
+
const key = this.config.getKey(item);
|
|
2455
|
+
if (this.items.has(key)) return FireflyErr(this.config.duplicateErrorCode === "CONFLICT" ? conflictError({
|
|
2456
|
+
message: `${this.config.name} "${key}" is already registered`,
|
|
2457
|
+
source: this.config.source
|
|
2458
|
+
}) : notFoundError({
|
|
2459
|
+
message: `${this.config.name} "${key}" is already registered`,
|
|
2460
|
+
source: this.config.source
|
|
2461
|
+
}));
|
|
2462
|
+
this.items.set(key, item);
|
|
2463
|
+
return ok();
|
|
2464
|
+
}
|
|
2465
|
+
/**
|
|
2466
|
+
* Registers multiple items in sequence.
|
|
2467
|
+
* Stops on first error (fail-fast semantics).
|
|
2468
|
+
*
|
|
2469
|
+
* @param items - Array of items to register
|
|
2470
|
+
* @returns `Ok(void)` if all items registered, `Err(FireflyError)` on first failure
|
|
2471
|
+
*/
|
|
2472
|
+
registerAll(items) {
|
|
2473
|
+
for (const item of items) {
|
|
2474
|
+
const result = this.register(item);
|
|
2475
|
+
if (result.isErr()) return result;
|
|
2476
|
+
}
|
|
2477
|
+
return ok();
|
|
2478
|
+
}
|
|
2479
|
+
/**
|
|
2480
|
+
* Retrieves an item by its key.
|
|
2481
|
+
*
|
|
2482
|
+
* @param key - The unique identifier of the item
|
|
2483
|
+
* @returns `Ok(T)` if found, `Err(FireflyError)` if not found
|
|
2484
|
+
*/
|
|
2485
|
+
get(key) {
|
|
2486
|
+
const item = this.items.get(key);
|
|
2487
|
+
if (!item) return FireflyErr(notFoundError({
|
|
2488
|
+
message: `${this.config.name} "${key}" not found in registry`,
|
|
2489
|
+
source: this.config.source
|
|
2490
|
+
}));
|
|
2491
|
+
return ok(item);
|
|
2492
|
+
}
|
|
2493
|
+
/**
|
|
2494
|
+
* Returns all registered items as an array.
|
|
2495
|
+
* @returns Array of all items in registration order
|
|
2496
|
+
*/
|
|
2497
|
+
getAll() {
|
|
2498
|
+
return Array.from(this.items.values());
|
|
2499
|
+
}
|
|
2500
|
+
/**
|
|
2501
|
+
* Returns all registered keys.
|
|
2502
|
+
* @returns Array of all keys in registration order
|
|
2503
|
+
*/
|
|
2504
|
+
getKeys() {
|
|
2505
|
+
return Array.from(this.items.keys());
|
|
2506
|
+
}
|
|
2507
|
+
/**
|
|
2508
|
+
* Checks if an item with the given key exists.
|
|
2509
|
+
* @param key - The key to check
|
|
2510
|
+
* @returns `true` if the item exists, `false` otherwise
|
|
2511
|
+
*/
|
|
2512
|
+
has(key) {
|
|
2513
|
+
return this.items.has(key);
|
|
2514
|
+
}
|
|
2515
|
+
/**
|
|
2516
|
+
* Returns the number of registered items.
|
|
2517
|
+
* @returns The count of items in the registry
|
|
2518
|
+
*/
|
|
2519
|
+
size() {
|
|
2520
|
+
return this.items.size;
|
|
2521
|
+
}
|
|
2522
|
+
/**
|
|
2523
|
+
* Removes all items from the registry.
|
|
2524
|
+
*/
|
|
2525
|
+
clear() {
|
|
2526
|
+
this.items.clear();
|
|
2527
|
+
}
|
|
2528
|
+
};
|
|
2529
|
+
|
|
2530
|
+
//#endregion
|
|
2531
|
+
//#region src/core/task/task-group.types.ts
|
|
2532
|
+
/**
|
|
2533
|
+
* Separator used between group ID and task ID.
|
|
2534
|
+
*/
|
|
2535
|
+
const GROUP_TASK_SEPARATOR = ":";
|
|
2536
|
+
/**
|
|
2537
|
+
* Creates a namespaced task ID from a group ID and task ID.
|
|
2538
|
+
*
|
|
2539
|
+
* @param groupId - The group's unique identifier
|
|
2540
|
+
* @param taskId - The task's unique identifier within the group
|
|
2541
|
+
* @returns Namespaced ID in format `groupId:taskId`
|
|
2542
|
+
*
|
|
2543
|
+
* @example
|
|
2544
|
+
* ```typescript
|
|
2545
|
+
* const id = createNamespacedTaskId("git", "commit");
|
|
2546
|
+
* // Returns "git:commit"
|
|
2547
|
+
* ```
|
|
2548
|
+
*/
|
|
2549
|
+
function createNamespacedTaskId(groupId, taskId) {
|
|
2550
|
+
return `${groupId}${GROUP_TASK_SEPARATOR}${taskId}`;
|
|
2551
|
+
}
|
|
2552
|
+
|
|
2553
|
+
//#endregion
|
|
2554
|
+
//#region src/core/task/task-group.expansion.ts
|
|
2555
|
+
/**
|
|
2556
|
+
* Expands a task group into individual tasks with namespaced IDs.
|
|
2557
|
+
*
|
|
2558
|
+
* This function:
|
|
2559
|
+
* 1. Prefixes each task ID with the group ID (e.g., "group:task")
|
|
2560
|
+
* 2. Updates task dependencies to use namespaced IDs
|
|
2561
|
+
* 3. Merges group skip condition with each task's skip condition
|
|
2562
|
+
* 4. Adds inter-group dependencies to the first task in the group
|
|
2563
|
+
*
|
|
2564
|
+
* @param group - The task group to expand
|
|
2565
|
+
* @param registeredGroups - Map of already registered group IDs to their last task IDs
|
|
2566
|
+
* @returns Expanded tasks with namespaced IDs and merged skip conditions
|
|
2567
|
+
*
|
|
2568
|
+
* @example
|
|
2569
|
+
* ```typescript
|
|
2570
|
+
* const result = expandTaskGroup(gitGroup, new Map([["changelog", "changelog:generate"]]));
|
|
2571
|
+
* if (result.isOk()) {
|
|
2572
|
+
* for (const task of result.value.tasks) {
|
|
2573
|
+
* registry.register(task);
|
|
2574
|
+
* }
|
|
2575
|
+
* }
|
|
2576
|
+
* ```
|
|
2577
|
+
*/
|
|
2578
|
+
function expandTaskGroup(group, registeredGroups) {
|
|
2579
|
+
const groupId = group.meta.id;
|
|
2580
|
+
const expansionContext = {
|
|
2581
|
+
groupId,
|
|
2582
|
+
groupSkipCondition: buildGroupSkipCondition(group),
|
|
2583
|
+
taskIdMapping: /* @__PURE__ */ new Map(),
|
|
2584
|
+
registeredGroups,
|
|
2585
|
+
dependsOnGroups: group.meta.dependsOnGroups
|
|
2586
|
+
};
|
|
2587
|
+
const expandedTasksResult = expandTasks(group.tasks, expansionContext);
|
|
2588
|
+
if (expandedTasksResult.isErr()) return FireflyErr(expandedTasksResult.error);
|
|
2589
|
+
return FireflyOk({
|
|
2590
|
+
groupId,
|
|
2591
|
+
tasks: expandedTasksResult.value,
|
|
2592
|
+
taskIdMapping: expansionContext.taskIdMapping
|
|
2593
|
+
});
|
|
2594
|
+
}
|
|
2595
|
+
/**
|
|
2596
|
+
* Expands an array of tasks within a group context.
|
|
2597
|
+
*
|
|
2598
|
+
* @param tasks - The tasks to expand
|
|
2599
|
+
* @param ctx - The expansion context containing group metadata
|
|
2600
|
+
* @returns Array of expanded tasks with namespaced IDs
|
|
2601
|
+
*/
|
|
2602
|
+
function expandTasks(tasks, ctx) {
|
|
2603
|
+
const expandedTasks = [];
|
|
2604
|
+
for (let i = 0; i < tasks.length; i++) {
|
|
2605
|
+
const task = tasks[i];
|
|
2606
|
+
if (!task) continue;
|
|
2607
|
+
const result = expandSingleTask(task, i, ctx);
|
|
2608
|
+
if (result.isErr()) return FireflyErr(result.error);
|
|
2609
|
+
expandedTasks.push(result.value);
|
|
2610
|
+
}
|
|
2611
|
+
return FireflyOk(expandedTasks);
|
|
2612
|
+
}
|
|
2613
|
+
/**
|
|
2614
|
+
* Expands a single task with namespace prefixing and dependency resolution.
|
|
2615
|
+
*
|
|
2616
|
+
* @param task - The task to expand
|
|
2617
|
+
* @param index - Position of the task within the group (0-based)
|
|
2618
|
+
* @param ctx - The expansion context
|
|
2619
|
+
* @returns The expanded task with namespaced ID and resolved dependencies
|
|
2620
|
+
*/
|
|
2621
|
+
function expandSingleTask(task, index, ctx) {
|
|
2622
|
+
const { groupId, groupSkipCondition, taskIdMapping, registeredGroups, dependsOnGroups } = ctx;
|
|
2623
|
+
const originalTaskId = task.meta.id;
|
|
2624
|
+
const namespacedId = createNamespacedTaskId(groupId, originalTaskId);
|
|
2625
|
+
taskIdMapping.set(originalTaskId, namespacedId);
|
|
2626
|
+
const remappedDependencies = remapDependencies(task.meta.dependencies ?? [], taskIdMapping, groupId);
|
|
2627
|
+
if (index === 0 && dependsOnGroups) {
|
|
2628
|
+
const interGroupResult = addInterGroupDependencies(dependsOnGroups, registeredGroups, groupId);
|
|
2629
|
+
if (interGroupResult.isErr()) return FireflyErr(interGroupResult.error);
|
|
2630
|
+
remappedDependencies.push(...interGroupResult.value);
|
|
2631
|
+
}
|
|
2632
|
+
return FireflyOk(createExpandedTask({
|
|
2633
|
+
originalTask: task,
|
|
2634
|
+
namespacedId,
|
|
2635
|
+
originalTaskId,
|
|
2636
|
+
groupId,
|
|
2637
|
+
dependencies: remappedDependencies,
|
|
2638
|
+
groupSkipCondition
|
|
2639
|
+
}));
|
|
2640
|
+
}
|
|
2641
|
+
/**
|
|
2642
|
+
* Resolves inter-group dependencies to their last task IDs.
|
|
2643
|
+
*
|
|
2644
|
+
* When a group depends on other groups, the first task in the group
|
|
2645
|
+
* needs to depend on the last task of each dependency group.
|
|
2646
|
+
*
|
|
2647
|
+
* @param dependsOnGroups - Array of group IDs this group depends on
|
|
2648
|
+
* @param registeredGroups - Map of registered groups to their last task IDs
|
|
2649
|
+
* @param groupId - Current group ID (for error messages)
|
|
2650
|
+
* @returns Array of namespaced task IDs to depend on, or error if dependency not found
|
|
2651
|
+
*/
|
|
2652
|
+
function addInterGroupDependencies(dependsOnGroups, registeredGroups, groupId) {
|
|
2653
|
+
const deps = [];
|
|
2654
|
+
for (const depGroupId of dependsOnGroups) {
|
|
2655
|
+
const lastTaskOfDepGroup = registeredGroups.get(depGroupId);
|
|
2656
|
+
if (!lastTaskOfDepGroup) return FireflyErr(validationError({
|
|
2657
|
+
message: `Group "${groupId}" depends on group "${depGroupId}" which is not registered`,
|
|
2658
|
+
source: "expandTaskGroup"
|
|
2659
|
+
}));
|
|
2660
|
+
deps.push(lastTaskOfDepGroup);
|
|
2661
|
+
}
|
|
2662
|
+
return FireflyOk(deps);
|
|
2663
|
+
}
|
|
2664
|
+
/**
|
|
2665
|
+
* Builds a unified skip condition from group options.
|
|
2666
|
+
*
|
|
2667
|
+
* If a full `skipCondition` is provided, it's used directly.
|
|
2668
|
+
* If only `skipWhen` predicate is provided, it's converted to a full skip condition.
|
|
2669
|
+
*
|
|
2670
|
+
* @param group - The task group to extract skip condition from
|
|
2671
|
+
* @returns The group's skip condition function, or undefined if none configured
|
|
2672
|
+
*/
|
|
2673
|
+
function buildGroupSkipCondition(group) {
|
|
2674
|
+
const { options } = group;
|
|
2675
|
+
if (!options) return;
|
|
2676
|
+
const { skipCondition, skipWhen, skipReason } = options;
|
|
2677
|
+
if (skipCondition) return skipCondition;
|
|
2678
|
+
if (skipWhen) return (ctx) => FireflyOk({
|
|
2679
|
+
shouldSkip: skipWhen(ctx),
|
|
2680
|
+
reason: skipReason ?? `Group "${group.meta.id}" skip condition met`
|
|
2681
|
+
});
|
|
2682
|
+
}
|
|
2683
|
+
/**
|
|
2684
|
+
* Remaps task dependencies to use namespaced IDs.
|
|
2685
|
+
*
|
|
2686
|
+
* Handles three cases:
|
|
2687
|
+
* 1. Dependency already processed in this group → use mapped namespaced ID
|
|
2688
|
+
* 2. Dependency already namespaced (cross-group) → use as-is
|
|
2689
|
+
* 3. Dependency not yet processed → assume same group, create namespaced ID
|
|
2690
|
+
*
|
|
2691
|
+
* @param originalDeps - Original dependency IDs from the task
|
|
2692
|
+
* @param taskIdMapping - Map of processed task IDs to their namespaced versions
|
|
2693
|
+
* @param groupId - Current group ID for namespacing
|
|
2694
|
+
* @returns Array of remapped dependency IDs
|
|
2695
|
+
*/
|
|
2696
|
+
function remapDependencies(originalDeps, taskIdMapping, groupId) {
|
|
2697
|
+
return originalDeps.map((dep) => {
|
|
2698
|
+
const namespacedDep = taskIdMapping.get(dep);
|
|
2699
|
+
if (namespacedDep) return namespacedDep;
|
|
2700
|
+
if (dep.includes(":")) return dep;
|
|
2701
|
+
return createNamespacedTaskId(groupId, dep);
|
|
2702
|
+
});
|
|
2703
|
+
}
|
|
2704
|
+
/**
|
|
2705
|
+
* Creates an expanded task with merged skip condition.
|
|
2706
|
+
*
|
|
2707
|
+
* The expanded task preserves the original task's execute and undo functions
|
|
2708
|
+
* while updating metadata and merging skip conditions.
|
|
2709
|
+
*
|
|
2710
|
+
* @param opts - Options containing original task and expansion metadata
|
|
2711
|
+
* @returns The expanded task with namespaced ID and merged properties
|
|
2712
|
+
*/
|
|
2713
|
+
function createExpandedTask(opts) {
|
|
2714
|
+
const { originalTask, namespacedId, originalTaskId, groupId, dependencies, groupSkipCondition } = opts;
|
|
2715
|
+
const mergedSkipCondition = mergeSkipConditions(originalTask.shouldSkip, groupSkipCondition);
|
|
2716
|
+
return {
|
|
2717
|
+
meta: {
|
|
2718
|
+
...originalTask.meta,
|
|
2719
|
+
id: namespacedId,
|
|
2720
|
+
dependencies: dependencies.length > 0 ? dependencies : void 0
|
|
2721
|
+
},
|
|
2722
|
+
shouldSkip: mergedSkipCondition,
|
|
2723
|
+
execute: originalTask.execute,
|
|
2724
|
+
undo: originalTask.undo,
|
|
2725
|
+
originalTaskId,
|
|
2726
|
+
groupId,
|
|
2727
|
+
groupSkipCondition
|
|
2728
|
+
};
|
|
2729
|
+
}
|
|
2730
|
+
/**
|
|
2731
|
+
* Merges a group skip condition with a task's own skip condition.
|
|
2732
|
+
*
|
|
2733
|
+
* The group condition is evaluated first. If it returns `shouldSkip: true`,
|
|
2734
|
+
* the task is skipped without evaluating its own condition.
|
|
2735
|
+
* This allows groups to skip all their tasks with a single condition.
|
|
2736
|
+
*
|
|
2737
|
+
* @param taskSkipCondition - The task's original skip condition (if any)
|
|
2738
|
+
* @param groupSkipCondition - The group's skip condition (if any)
|
|
2739
|
+
* @returns Merged skip condition function, or undefined if neither exists
|
|
2740
|
+
*/
|
|
2741
|
+
function mergeSkipConditions(taskSkipCondition, groupSkipCondition) {
|
|
2742
|
+
if (!groupSkipCondition) return taskSkipCondition;
|
|
2743
|
+
if (!taskSkipCondition) return groupSkipCondition;
|
|
2744
|
+
return (ctx) => {
|
|
2745
|
+
const groupResult = groupSkipCondition(ctx);
|
|
2746
|
+
if (groupResult.isErr()) return groupResult;
|
|
2747
|
+
if (groupResult.value.shouldSkip) return groupResult;
|
|
2748
|
+
return taskSkipCondition(ctx);
|
|
2749
|
+
};
|
|
2750
|
+
}
|
|
2751
|
+
/**
|
|
2752
|
+
* Creates an empty group registry for tracking registered groups.
|
|
2753
|
+
*
|
|
2754
|
+
* The registry tracks:
|
|
2755
|
+
* - Which groups have been registered
|
|
2756
|
+
* - The last task ID of each group (for inter-group dependencies)
|
|
2757
|
+
* - All task IDs belonging to each group
|
|
2758
|
+
*
|
|
2759
|
+
* @returns Empty group registry
|
|
2760
|
+
*
|
|
2761
|
+
* @example
|
|
2762
|
+
* ```typescript
|
|
2763
|
+
* const registry = createGroupRegistry();
|
|
2764
|
+
* // Register groups and update registry...
|
|
2765
|
+
* ```
|
|
2766
|
+
*/
|
|
2767
|
+
function createGroupRegistry() {
|
|
2768
|
+
return {
|
|
2769
|
+
lastTaskByGroup: /* @__PURE__ */ new Map(),
|
|
2770
|
+
tasksByGroup: /* @__PURE__ */ new Map()
|
|
2771
|
+
};
|
|
2772
|
+
}
|
|
2773
|
+
/**
|
|
2774
|
+
* Updates the group registry with a newly expanded group.
|
|
2775
|
+
*
|
|
2776
|
+
* Records all task IDs for the group and stores the last task ID
|
|
2777
|
+
* to enable inter-group dependency resolution.
|
|
2778
|
+
*
|
|
2779
|
+
* @param registry - The group registry to update
|
|
2780
|
+
* @param expandedResult - The result of expanding a task group
|
|
2781
|
+
*
|
|
2782
|
+
* @example
|
|
2783
|
+
* ```typescript
|
|
2784
|
+
* const expandResult = expandTaskGroup(group, registry.lastTaskByGroup);
|
|
2785
|
+
* if (expandResult.isOk()) {
|
|
2786
|
+
* updateGroupRegistry(registry, expandResult.value);
|
|
2787
|
+
* }
|
|
2788
|
+
* ```
|
|
2789
|
+
*/
|
|
2790
|
+
function updateGroupRegistry(registry, expandedResult) {
|
|
2791
|
+
const { groupId, tasks } = expandedResult;
|
|
2792
|
+
if (tasks.length === 0) return;
|
|
2793
|
+
const taskIds = tasks.map((t) => t.meta.id);
|
|
2794
|
+
registry.tasksByGroup.set(groupId, taskIds);
|
|
2795
|
+
const lastTask = tasks.at(-1);
|
|
2796
|
+
if (lastTask) registry.lastTaskByGroup.set(groupId, lastTask.meta.id);
|
|
2797
|
+
}
|
|
2798
|
+
|
|
2799
|
+
//#endregion
|
|
2800
|
+
//#region src/core/registry/task.registry.ts
|
|
2801
|
+
/**
|
|
2802
|
+
* Registry for managing workflow tasks with dependency validation.
|
|
2803
|
+
*
|
|
2804
|
+
* @example
|
|
2805
|
+
* ```typescript
|
|
2806
|
+
* const registry = new TaskRegistry();
|
|
2807
|
+
*
|
|
2808
|
+
* // Register tasks (dependencies must be registered first)
|
|
2809
|
+
* registry.register(taskA);
|
|
2810
|
+
* registry.register(taskB); // If taskB depends on taskA, taskA must exist
|
|
2811
|
+
*
|
|
2812
|
+
* // Or register a group of related tasks
|
|
2813
|
+
* registry.registerGroup(gitGroup);
|
|
2814
|
+
*
|
|
2815
|
+
* // Iterate directly in execution order using for-of
|
|
2816
|
+
* const orderResult = registry.buildExecutionOrder();
|
|
2817
|
+
* if (orderResult.isFireflyOk()) {
|
|
2818
|
+
* for (const task of orderResult.value) {
|
|
2819
|
+
* await task.execute(context);
|
|
2820
|
+
* }
|
|
2821
|
+
* }
|
|
2822
|
+
*
|
|
2823
|
+
* // Or use spread operator
|
|
2824
|
+
* const tasks = [...registry];
|
|
2825
|
+
* ```
|
|
2826
|
+
*/
|
|
2827
|
+
var TaskRegistry = class extends BaseRegistry {
|
|
2828
|
+
groupRegistry;
|
|
2829
|
+
constructor() {
|
|
2830
|
+
super({
|
|
2831
|
+
name: "Task",
|
|
2832
|
+
source: "TaskRegistry",
|
|
2833
|
+
getKey: (task) => task.meta.id,
|
|
2834
|
+
duplicateErrorCode: "VALIDATION",
|
|
2835
|
+
notFoundErrorCode: "VALIDATION"
|
|
2836
|
+
});
|
|
2837
|
+
this.groupRegistry = createGroupRegistry();
|
|
2838
|
+
}
|
|
2839
|
+
get [Symbol.toStringTag]() {
|
|
2840
|
+
return "TaskRegistry";
|
|
2841
|
+
}
|
|
2842
|
+
/**
|
|
2843
|
+
* Implements Symbol.iterator for direct iteration over tasks in execution order.
|
|
2844
|
+
* Uses a generator for lazy topological traversal.
|
|
2845
|
+
*
|
|
2846
|
+
* @yields Tasks in dependency-respecting execution order
|
|
2847
|
+
* @throws If circular dependency is detected
|
|
2848
|
+
*
|
|
2849
|
+
* @example
|
|
2850
|
+
* ```typescript
|
|
2851
|
+
* for (const task of registry) {
|
|
2852
|
+
* console.log(task.meta.id);
|
|
2853
|
+
* }
|
|
2854
|
+
*
|
|
2855
|
+
* // Or spread into array
|
|
2856
|
+
* const tasks = [...registry];
|
|
2857
|
+
* ```
|
|
2858
|
+
*/
|
|
2859
|
+
*[Symbol.iterator]() {
|
|
2860
|
+
const visited = /* @__PURE__ */ new Set();
|
|
2861
|
+
const recursionStack = /* @__PURE__ */ new Set();
|
|
2862
|
+
const visit = function* (taskId) {
|
|
2863
|
+
if (recursionStack.has(taskId)) throw new Error(`Circular dependency detected involving task "${taskId}"`);
|
|
2864
|
+
if (visited.has(taskId)) return;
|
|
2865
|
+
const task = this.items.get(taskId);
|
|
2866
|
+
if (!task) return;
|
|
2867
|
+
recursionStack.add(taskId);
|
|
2868
|
+
for (const depId of task.meta.dependencies ?? []) yield* visit.call(this, depId);
|
|
2869
|
+
recursionStack.delete(taskId);
|
|
2870
|
+
visited.add(taskId);
|
|
2871
|
+
yield task;
|
|
2872
|
+
};
|
|
2873
|
+
for (const taskId of this.items.keys()) yield* visit.call(this, taskId);
|
|
2874
|
+
}
|
|
2875
|
+
/**
|
|
2876
|
+
* Registers a task after validating its dependencies exist.
|
|
2877
|
+
*
|
|
2878
|
+
* @param task - The task to register
|
|
2879
|
+
* @returns `FireflyOk(void)` on success, `Err(FireflyError)` if duplicate or missing dependency
|
|
2880
|
+
* @override
|
|
2881
|
+
*/
|
|
2882
|
+
register(task) {
|
|
2883
|
+
if (this.items.has(task.meta.id)) return validationErr({ message: `Task "${task.meta.id}" is already registered` });
|
|
2884
|
+
for (const depId of task.meta.dependencies ?? []) if (!this.items.has(depId)) return validationErr({ message: `Task "${task.meta.id}" depends on "${depId}" which is not registered` });
|
|
2885
|
+
this.items.set(task.meta.id, task);
|
|
2886
|
+
return FireflyOk(void 0);
|
|
2887
|
+
}
|
|
2888
|
+
/**
|
|
2889
|
+
* Registers a task group, expanding it into individual tasks with namespaced IDs.
|
|
2890
|
+
*
|
|
2891
|
+
* This method:
|
|
2892
|
+
* 1. Expands the group into individual tasks with `groupId:taskId` format
|
|
2893
|
+
* 2. Merges group skip conditions with task-level skip conditions
|
|
2894
|
+
* 3. Resolves inter-group dependencies
|
|
2895
|
+
* 4. Registers all expanded tasks
|
|
2896
|
+
*
|
|
2897
|
+
* @param group - The task group to register
|
|
2898
|
+
* @returns `FireflyOk(void)` on success, `Err(FireflyError)` if validation fails
|
|
2899
|
+
*
|
|
2900
|
+
* @example
|
|
2901
|
+
* ```typescript
|
|
2902
|
+
* const gitGroup = buildTaskGroup("git")
|
|
2903
|
+
* .description("Git operations")
|
|
2904
|
+
* .skipWhen((ctx) => ctx.config.skipGit)
|
|
2905
|
+
* .tasks([stageTask, commitTask, tagTask])
|
|
2906
|
+
* .build();
|
|
2907
|
+
*
|
|
2908
|
+
* if (gitGroup.isFireflyOk()) {
|
|
2909
|
+
* registry.registerGroup(gitGroup.value);
|
|
2910
|
+
* }
|
|
2911
|
+
* ```
|
|
2912
|
+
*/
|
|
2913
|
+
registerGroup(group) {
|
|
2914
|
+
if (this.groupRegistry.lastTaskByGroup.has(group.meta.id)) return validationErr({ message: `Task group "${group.meta.id}" is already registered` });
|
|
2915
|
+
const expandResult = expandTaskGroup(group, this.groupRegistry.lastTaskByGroup);
|
|
2916
|
+
if (expandResult.isErr()) return err(expandResult.error);
|
|
2917
|
+
for (const task of expandResult.value.tasks) {
|
|
2918
|
+
const registerResult = this.register(task);
|
|
2919
|
+
if (registerResult.isErr()) return registerResult;
|
|
2920
|
+
}
|
|
2921
|
+
updateGroupRegistry(this.groupRegistry, expandResult.value);
|
|
2922
|
+
return FireflyOk(void 0);
|
|
2923
|
+
}
|
|
2924
|
+
/**
|
|
2925
|
+
* Registers multiple task groups in order.
|
|
2926
|
+
*
|
|
2927
|
+
* Groups are registered sequentially, allowing later groups to depend on earlier ones.
|
|
2928
|
+
*
|
|
2929
|
+
* @param groups - Array of task groups to register
|
|
2930
|
+
* @returns `FireflyOk(void)` on success, `Err(FireflyError)` if any registration fails
|
|
2931
|
+
*/
|
|
2932
|
+
registerGroups(groups) {
|
|
2933
|
+
for (const group of groups) {
|
|
2934
|
+
const result = this.registerGroup(group);
|
|
2935
|
+
if (result.isErr()) return result;
|
|
2936
|
+
}
|
|
2937
|
+
return FireflyOk(void 0);
|
|
2938
|
+
}
|
|
2939
|
+
/**
|
|
2940
|
+
* Gets all task IDs belonging to a specific group.
|
|
2941
|
+
*
|
|
2942
|
+
* @param groupId - The group ID to query
|
|
2943
|
+
* @returns Array of namespaced task IDs, or empty array if group not found
|
|
2944
|
+
*/
|
|
2945
|
+
getGroupTaskIds(groupId) {
|
|
2946
|
+
return this.groupRegistry.tasksByGroup.get(groupId) ?? [];
|
|
2947
|
+
}
|
|
2948
|
+
/**
|
|
2949
|
+
* Gets all registered group IDs.
|
|
2950
|
+
*
|
|
2951
|
+
* @returns Array of group IDs in registration order
|
|
2952
|
+
*/
|
|
2953
|
+
getGroupIds() {
|
|
2954
|
+
return [...this.groupRegistry.lastTaskByGroup.keys()];
|
|
2955
|
+
}
|
|
2956
|
+
/**
|
|
2957
|
+
* Checks if a group has been registered.
|
|
2958
|
+
*
|
|
2959
|
+
* @param groupId - The group ID to check
|
|
2960
|
+
* @returns True if the group is registered
|
|
2961
|
+
*/
|
|
2962
|
+
hasGroup(groupId) {
|
|
2963
|
+
return this.groupRegistry.lastTaskByGroup.has(groupId);
|
|
2964
|
+
}
|
|
2965
|
+
/**
|
|
2966
|
+
* Builds an execution order that respects task dependencies.
|
|
2967
|
+
* @returns `FireflyOk(Task[])` with tasks in execution order, `Err(FireflyError)` if circular dependency detected
|
|
2968
|
+
*/
|
|
2969
|
+
buildExecutionOrder() {
|
|
2970
|
+
const sortResult = topologicalSort([...this.items.values()]);
|
|
2971
|
+
if (sortResult.isErr()) return err(sortResult.error);
|
|
2972
|
+
const orderedTasks = [];
|
|
2973
|
+
for (const taskId of sortResult.value) {
|
|
2974
|
+
const task = this.items.get(taskId);
|
|
2975
|
+
if (task) orderedTasks.push(task);
|
|
2976
|
+
}
|
|
2977
|
+
return FireflyOk(orderedTasks);
|
|
2978
|
+
}
|
|
2979
|
+
};
|
|
2980
|
+
|
|
2981
|
+
//#endregion
|
|
2982
|
+
//#region src/core/service/service.proxy.ts
|
|
2983
|
+
/**
|
|
2984
|
+
* Creates a new resolution context for tracking service instantiation.
|
|
2985
|
+
* @internal
|
|
2986
|
+
*/
|
|
2987
|
+
function createResolutionContext(basePath) {
|
|
2988
|
+
return {
|
|
2989
|
+
basePath,
|
|
2990
|
+
resolving: /* @__PURE__ */ new Set(),
|
|
2991
|
+
resolved: /* @__PURE__ */ new Map()
|
|
2992
|
+
};
|
|
2993
|
+
}
|
|
2994
|
+
/**
|
|
2995
|
+
* Resolves a service within a resolution context, with circular dependency detection.
|
|
2996
|
+
* @internal
|
|
2997
|
+
*/
|
|
2998
|
+
async function resolveServiceWithContext(key, context) {
|
|
2999
|
+
const cached = context.resolved.get(key);
|
|
3000
|
+
if (cached) return cached;
|
|
3001
|
+
if (context.resolving.has(key)) {
|
|
3002
|
+
const chain = [...context.resolving, key].join(" -> ");
|
|
3003
|
+
return Promise.reject(/* @__PURE__ */ new Error(`Circular service dependency detected: ${chain}`));
|
|
3004
|
+
}
|
|
3005
|
+
context.resolving.add(key);
|
|
3006
|
+
const definition = SERVICE_DEFINITIONS[key];
|
|
3007
|
+
const factoryContext = {
|
|
3008
|
+
basePath: context.basePath,
|
|
3009
|
+
getService: (depKey) => resolveServiceWithContext(depKey, context)
|
|
3010
|
+
};
|
|
3011
|
+
const instance = await definition.factory(factoryContext);
|
|
3012
|
+
context.resolving.delete(key);
|
|
3013
|
+
context.resolved.set(key, instance);
|
|
3014
|
+
return instance;
|
|
3015
|
+
}
|
|
3016
|
+
/**
|
|
3017
|
+
* Creates a lazy proxy that defers async service instantiation until first access.
|
|
3018
|
+
*
|
|
3019
|
+
* @template T - The service interface type
|
|
3020
|
+
* @param factory - Async function that creates the actual service instance
|
|
3021
|
+
* @returns A proxy that behaves like the service but instantiates lazily
|
|
3022
|
+
* @internal
|
|
3023
|
+
*/
|
|
3024
|
+
function createLazyServiceProxy(factory) {
|
|
3025
|
+
let instancePromise;
|
|
3026
|
+
let instance;
|
|
3027
|
+
const ensureInstance = async () => {
|
|
3028
|
+
if (instance) return instance;
|
|
3029
|
+
instancePromise ??= factory();
|
|
3030
|
+
instance = await instancePromise;
|
|
3031
|
+
return instance;
|
|
3032
|
+
};
|
|
3033
|
+
return new Proxy({}, {
|
|
3034
|
+
get(_target, prop, receiver) {
|
|
3035
|
+
return (...args) => ResultAsync.fromSafePromise(ensureInstance()).andThen((resolved) => {
|
|
3036
|
+
const value = Reflect.get(resolved, prop, receiver);
|
|
3037
|
+
return typeof value === "function" ? value.apply(resolved, args) : value;
|
|
3038
|
+
});
|
|
3039
|
+
},
|
|
3040
|
+
has(_target, prop) {
|
|
3041
|
+
if (instance) return Reflect.has(instance, prop);
|
|
3042
|
+
return true;
|
|
3043
|
+
},
|
|
3044
|
+
ownKeys(_target) {
|
|
3045
|
+
if (instance) return Reflect.ownKeys(instance);
|
|
3046
|
+
return [];
|
|
3047
|
+
},
|
|
3048
|
+
getOwnPropertyDescriptor(_target, prop) {
|
|
3049
|
+
return instance ? Reflect.getOwnPropertyDescriptor(instance, prop) : void 0;
|
|
3050
|
+
}
|
|
3051
|
+
});
|
|
3052
|
+
}
|
|
3053
|
+
/**
|
|
3054
|
+
* Resolves a specific set of services for use in a workflow context.
|
|
3055
|
+
*
|
|
3056
|
+
* @template TKeys - Tuple type of service keys to resolve
|
|
3057
|
+
* @param requiredServices - Array of service keys to resolve
|
|
3058
|
+
* @param basePath - Base path passed to service factories (usually the project root)
|
|
3059
|
+
* @returns Object containing the resolved services
|
|
3060
|
+
*
|
|
3061
|
+
* @example
|
|
3062
|
+
* ```typescript
|
|
3063
|
+
* const services = resolveServices(["fs", "git"] as const, "/path/to/project");
|
|
3064
|
+
* await services.fs.read("package.json");
|
|
3065
|
+
* ```
|
|
3066
|
+
*/
|
|
3067
|
+
function resolveServices(requiredServices, basePath) {
|
|
3068
|
+
const resolved = {};
|
|
3069
|
+
const context = createResolutionContext(basePath);
|
|
3070
|
+
for (const key of requiredServices) resolved[key] = createLazyServiceProxy(() => resolveServiceWithContext(key, context));
|
|
3071
|
+
return resolved;
|
|
3072
|
+
}
|
|
3073
|
+
|
|
3074
|
+
//#endregion
|
|
3075
|
+
//#region src/core/execution/workflow.orchestrator.ts
|
|
3076
|
+
/**
|
|
3077
|
+
* Orchestrates the execution of workflow commands.
|
|
3078
|
+
* The orchestrator is the main entry point for executing commands.
|
|
3079
|
+
*
|
|
3080
|
+
* It coordinates:
|
|
3081
|
+
* - Service resolution based on command requirements
|
|
3082
|
+
* - Context creation with configuration and initial data
|
|
3083
|
+
* - Task building, registration, and dependency ordering
|
|
3084
|
+
* - Command lifecycle hook execution
|
|
3085
|
+
* - Delegation to WorkflowExecutor for task execution
|
|
3086
|
+
*
|
|
3087
|
+
* @example
|
|
3088
|
+
* ```typescript
|
|
3089
|
+
* const orchestrator = new WorkflowOrchestrator({
|
|
3090
|
+
* basePath: "/path/to/project",
|
|
3091
|
+
* dryRun: false,
|
|
3092
|
+
* enableRollback: true,
|
|
3093
|
+
* });
|
|
3094
|
+
*
|
|
3095
|
+
* const result = await orchestrator.executeCommand(
|
|
3096
|
+
* releaseCommand,
|
|
3097
|
+
* { version: "1.0.0", changelog: true },
|
|
3098
|
+
* { previousVersion: "0.9.0" }
|
|
3099
|
+
* );
|
|
3100
|
+
*
|
|
3101
|
+
* if (result.isOk() && result.value.success) {
|
|
3102
|
+
* console.log("Release completed successfully!");
|
|
3103
|
+
* }
|
|
3104
|
+
* ```
|
|
3105
|
+
*/
|
|
3106
|
+
var WorkflowOrchestrator = class {
|
|
3107
|
+
options;
|
|
3108
|
+
constructor(options = {}) {
|
|
3109
|
+
this.options = options;
|
|
3110
|
+
}
|
|
3111
|
+
/**
|
|
3112
|
+
* Executes a command with the given configuration.
|
|
3113
|
+
*
|
|
3114
|
+
* @template TConfig - Command configuration type
|
|
3115
|
+
* @template TData - Workflow data type
|
|
3116
|
+
* @template TServices - Required services tuple
|
|
3117
|
+
* @param command - The command to execute
|
|
3118
|
+
* @param config - Configuration for the command
|
|
3119
|
+
* @param initialData - Optional initial data values
|
|
3120
|
+
* @returns Execution result with success/failure status
|
|
3121
|
+
*/
|
|
3122
|
+
executeCommand(command, config, initialData) {
|
|
3123
|
+
logger.verbose(`WorkflowOrchestrator: Executing command "${command.meta.name}"`);
|
|
3124
|
+
const context = this.createContext(command, config, initialData);
|
|
3125
|
+
return this.runCommandLifecycle(command, context);
|
|
3126
|
+
}
|
|
3127
|
+
/**
|
|
3128
|
+
* Creates a workflow context with resolved services for the command.
|
|
3129
|
+
*
|
|
3130
|
+
* @template TConfig - Command configuration type
|
|
3131
|
+
* @template TData - Workflow data type
|
|
3132
|
+
* @template TServices - Required services tuple
|
|
3133
|
+
* @param command - The command for which to create the context
|
|
3134
|
+
* @param config - Configuration for the command
|
|
3135
|
+
* @param initialData - Optional initial data values
|
|
3136
|
+
* @returns The constructed workflow context
|
|
3137
|
+
*/
|
|
3138
|
+
createContext(command, config, initialData) {
|
|
3139
|
+
const workspace = this.options.workspace ?? Workspace.current();
|
|
3140
|
+
const requiredServices = command.meta.requiredServices;
|
|
3141
|
+
const services = resolveServices(requiredServices, workspace.basePath);
|
|
3142
|
+
logger.verbose(`WorkflowOrchestrator: Resolved services: [${requiredServices.join(", ")}]`);
|
|
3143
|
+
logger.verbose(`WorkflowOrchestrator: Using workspace: ${workspace.basePath}`);
|
|
3144
|
+
return ImmutableWorkflowContext.create(config, services, initialData, workspace);
|
|
3145
|
+
}
|
|
3146
|
+
/**
|
|
3147
|
+
* Runs the complete command lifecycle: before → tasks → after / error
|
|
3148
|
+
*
|
|
3149
|
+
* @template TConfig - Command configuration type
|
|
3150
|
+
* @template TData - Workflow data type
|
|
3151
|
+
* @template TServices - Required services tuple
|
|
3152
|
+
* @param command - The command to execute
|
|
3153
|
+
* @param context - The workflow context for execution
|
|
3154
|
+
* @returns Execution result with success/failure status
|
|
3155
|
+
*/
|
|
3156
|
+
runCommandLifecycle(command, context) {
|
|
3157
|
+
return (command.beforeExecute ? command.beforeExecute(context) : FireflyOkAsync(void 0)).andThen(() => this.buildAndOrderTasks(command, context)).andThen((tasks) => new WorkflowExecutor(this.options).execute(tasks, context)).andThen((result) => this.runAfterExecute(command, context, result)).orElse((error) => this.handleCommandError(command, context, error));
|
|
3158
|
+
}
|
|
3159
|
+
/**
|
|
3160
|
+
* Runs the afterExecute hook if defined
|
|
3161
|
+
*
|
|
3162
|
+
* @template TConfig - Command configuration type
|
|
3163
|
+
* @template TData - Workflow data type
|
|
3164
|
+
* @template TServices - Required services tuple
|
|
3165
|
+
* @param command - The command to execute
|
|
3166
|
+
* @param context - The workflow context for execution
|
|
3167
|
+
* @param result - The workflow execution result
|
|
3168
|
+
* @returns The original execution result wrapped in FireflyAsyncResult
|
|
3169
|
+
*/
|
|
3170
|
+
runAfterExecute(command, context, result) {
|
|
3171
|
+
if (!command.afterExecute) return FireflyOkAsync(result);
|
|
3172
|
+
return command.afterExecute(result, context).map(() => result);
|
|
3173
|
+
}
|
|
3174
|
+
/**
|
|
3175
|
+
* Handles errors by calling onError hook and wrapping the error
|
|
3176
|
+
*
|
|
3177
|
+
* @template TConfig - Command configuration type
|
|
3178
|
+
* @template TData - Workflow data type
|
|
3179
|
+
* @template TServices - Required services tuple
|
|
3180
|
+
* @param command - The command to execute
|
|
3181
|
+
* @param context - The workflow context for execution
|
|
3182
|
+
* @param error - The error that occurred
|
|
3183
|
+
* @returns A FireflyAsyncResult with the wrapped error
|
|
3184
|
+
*/
|
|
3185
|
+
handleCommandError(command, context, error) {
|
|
3186
|
+
const wrappedError = wrapErrorMessage(failedError({
|
|
3187
|
+
message: error.message,
|
|
3188
|
+
source: "WorkflowOrchestrator.executeCommand"
|
|
3189
|
+
}), "Command execution failed");
|
|
3190
|
+
if (!command.onError) return FireflyErrAsync(error);
|
|
3191
|
+
return command.onError(new Error(error.message), context).andThen(() => FireflyErrAsync(wrappedError)).orElse(() => FireflyErrAsync(wrappedError));
|
|
3192
|
+
}
|
|
3193
|
+
/**
|
|
3194
|
+
* Builds tasks from command and orders them by dependencies
|
|
3195
|
+
*
|
|
3196
|
+
* @template TConfig - Command configuration type
|
|
3197
|
+
* @template TData - Workflow data type
|
|
3198
|
+
* @template TServices - Required services tuple
|
|
3199
|
+
* @param command - The command to build tasks from
|
|
3200
|
+
* @param context - The workflow context for execution
|
|
3201
|
+
* @returns Ordered array of tasks wrapped in FireflyAsyncResult
|
|
3202
|
+
*/
|
|
3203
|
+
buildAndOrderTasks(command, context) {
|
|
3204
|
+
logger.verbose(`WorkflowOrchestrator: Building tasks for "${command.meta.name}"`);
|
|
3205
|
+
return command.buildTasks(context).andThen((tasks) => {
|
|
3206
|
+
if (tasks.length === 0) return validationErrAsync({
|
|
3207
|
+
message: `Command "${command.meta.name}" returned no tasks`,
|
|
3208
|
+
source: "WorkflowOrchestrator.buildAndOrderTasks"
|
|
3209
|
+
});
|
|
3210
|
+
logger.verbose(`WorkflowOrchestrator: Built ${tasks.length} tasks`);
|
|
3211
|
+
const taskRegistry = new TaskRegistry();
|
|
3212
|
+
const registerResult = taskRegistry.registerAll(tasks);
|
|
3213
|
+
if (registerResult.isErr()) return FireflyErrAsync(registerResult.error);
|
|
3214
|
+
const orderedTasksResult = taskRegistry.buildExecutionOrder();
|
|
3215
|
+
if (orderedTasksResult.isErr()) return FireflyErrAsync(orderedTasksResult.error);
|
|
3216
|
+
const orderedTasks = orderedTasksResult.value;
|
|
3217
|
+
return FireflyOkAsync(orderedTasks);
|
|
3218
|
+
});
|
|
3219
|
+
}
|
|
3220
|
+
};
|
|
3221
|
+
|
|
3222
|
+
//#endregion
|
|
3223
|
+
//#region src/core/command/command.types.ts
|
|
3224
|
+
/**
|
|
3225
|
+
* Erases a typed Command to a BrandedCommand for registry storage.
|
|
3226
|
+
*
|
|
3227
|
+
* @param command - The typed command to erase
|
|
3228
|
+
* @returns A branded command safe for heterogeneous storage
|
|
3229
|
+
* @internal
|
|
3230
|
+
*/
|
|
3231
|
+
function eraseCommandType(command) {
|
|
3232
|
+
return command;
|
|
3233
|
+
}
|
|
3234
|
+
|
|
3235
|
+
//#endregion
|
|
3236
|
+
//#region src/core/registry/command.registry.ts
|
|
3237
|
+
/**
|
|
3238
|
+
* Registry for managing workflow commands.
|
|
3239
|
+
*
|
|
3240
|
+
* Extends `BaseRegistry` with command-specific functionality:
|
|
3241
|
+
* - Type-safe command registration with generic support
|
|
3242
|
+
* - Command name uniqueness enforcement
|
|
3243
|
+
* - Retrieval of all registered command names
|
|
3244
|
+
* - Uses branded types instead of `any` for type erasure
|
|
3245
|
+
*
|
|
3246
|
+
* @example
|
|
3247
|
+
* ```typescript
|
|
3248
|
+
* const registry = new CommandRegistry();
|
|
3249
|
+
*
|
|
3250
|
+
* // Register a command
|
|
3251
|
+
* registry.register(releaseCommand);
|
|
3252
|
+
*
|
|
3253
|
+
* // Retrieve and execute
|
|
3254
|
+
* const cmdResult = registry.get("release");
|
|
3255
|
+
* if (cmdResult.isOk()) {
|
|
3256
|
+
* await orchestrator.executeCommand(cmdResult.value, config);
|
|
3257
|
+
* }
|
|
3258
|
+
* ```
|
|
3259
|
+
*/
|
|
3260
|
+
var CommandRegistry = class extends BaseRegistry {
|
|
3261
|
+
constructor() {
|
|
3262
|
+
super({
|
|
3263
|
+
name: "Command",
|
|
3264
|
+
source: "CommandRegistry",
|
|
3265
|
+
getKey: (command) => command.meta.name,
|
|
3266
|
+
duplicateErrorCode: "CONFLICT",
|
|
3267
|
+
notFoundErrorCode: "NOT_FOUND"
|
|
3268
|
+
});
|
|
3269
|
+
}
|
|
3270
|
+
/**
|
|
3271
|
+
* Registers a typed command in the registry.
|
|
3272
|
+
*
|
|
3273
|
+
* @template TConfig - The command's configuration type
|
|
3274
|
+
* @template TData - The command's data type
|
|
3275
|
+
* @template TServices - The command's required services
|
|
3276
|
+
* @param command - The command to register
|
|
3277
|
+
* @returns `Ok(void)` on success, `Err(FireflyError)` if command name already exists
|
|
3278
|
+
*/
|
|
3279
|
+
registerCommand(command) {
|
|
3280
|
+
return this.register(eraseCommandType(command));
|
|
3281
|
+
}
|
|
3282
|
+
/**
|
|
3283
|
+
* Registers multiple typed commands.
|
|
3284
|
+
*
|
|
3285
|
+
* @template TConfig - The commands' configuration type
|
|
3286
|
+
* @template TData - The commands' data type
|
|
3287
|
+
* @template TServices - The commands' required services
|
|
3288
|
+
* @param commands - Array of commands to register
|
|
3289
|
+
* @returns `Ok(void)` if all registered, `Err(FireflyError)` on first failure
|
|
3290
|
+
*/
|
|
3291
|
+
registerAllCommands(commands) {
|
|
3292
|
+
return this.registerAll(commands.map(eraseCommandType));
|
|
3293
|
+
}
|
|
3294
|
+
/**
|
|
3295
|
+
* Returns all registered command names.
|
|
3296
|
+
* @returns Array of command names in registration order
|
|
3297
|
+
*/
|
|
3298
|
+
getNames() {
|
|
3299
|
+
return this.getKeys();
|
|
3300
|
+
}
|
|
3301
|
+
};
|
|
3302
|
+
|
|
3303
|
+
//#endregion
|
|
3304
|
+
//#region src/cli/program.ts
|
|
3305
|
+
/**
|
|
3306
|
+
* Creates and configures the Firefly CLI program.
|
|
3307
|
+
*
|
|
3308
|
+
* Sets up the Commander.js program with:
|
|
3309
|
+
* - Global options (--dry-run, --verbose, --no-enable-rollback)
|
|
3310
|
+
* - All registered commands with their specific options
|
|
3311
|
+
* - Help and version information
|
|
3312
|
+
*
|
|
3313
|
+
* @returns Configured Commander program ready for parsing
|
|
3314
|
+
*/
|
|
3315
|
+
function createFireflyCLI() {
|
|
3316
|
+
const program = new Command();
|
|
3317
|
+
program.name("firefly").description(RuntimeEnv.description).version(RuntimeEnv.version).helpOption("-h, --help", "Display help information").helpCommand("help", "Display help for command");
|
|
3318
|
+
const builder = new OptionsBuilder();
|
|
3319
|
+
builder.registerGlobalOptions(program);
|
|
3320
|
+
const normalizer = new OptionsNormalizer();
|
|
3321
|
+
const registry = createCommandRegistry();
|
|
3322
|
+
const ctx = {
|
|
3323
|
+
program,
|
|
3324
|
+
builder,
|
|
3325
|
+
normalizer,
|
|
3326
|
+
registry
|
|
3327
|
+
};
|
|
3328
|
+
for (const command of registry.getAll()) {
|
|
3329
|
+
const configSchema = command.meta.configSchema;
|
|
3330
|
+
registerCommand(ctx, command.meta.name, configSchema);
|
|
3331
|
+
}
|
|
3332
|
+
return program;
|
|
3333
|
+
}
|
|
3334
|
+
/**
|
|
3335
|
+
* Creates a command registry for the CLI.
|
|
3336
|
+
*
|
|
3337
|
+
* @returns Configured command registry with all commands registered
|
|
3338
|
+
*/
|
|
3339
|
+
function createCommandRegistry() {
|
|
3340
|
+
const registry = new CommandRegistry();
|
|
3341
|
+
registry.registerCommand(releaseCommand);
|
|
3342
|
+
return registry;
|
|
3343
|
+
}
|
|
3344
|
+
/**
|
|
3345
|
+
* Registers a single command with the CLI program.
|
|
3346
|
+
*
|
|
3347
|
+
* @param ctx - The registration context containing program, builder, and registry
|
|
3348
|
+
* @param commandName - The name of the command to register
|
|
3349
|
+
* @param configSchema - The Zod schema defining the command's configuration
|
|
3350
|
+
*/
|
|
3351
|
+
function registerCommand(ctx, commandName, configSchema) {
|
|
3352
|
+
const cmd = ctx.program.command(commandName).description(getCommandDescription(commandName));
|
|
3353
|
+
ctx.builder.registerCommandOptions(cmd, configSchema);
|
|
3354
|
+
cmd.action(async (_cliOptions, command) => {
|
|
3355
|
+
const allOptions = command.optsWithGlobals();
|
|
3356
|
+
const result = await executeCommand(commandName, ctx.normalizer.normalize(allOptions, configSchema), ctx.registry);
|
|
3357
|
+
if (result.isErr()) {
|
|
3358
|
+
logger.error(result.error.message);
|
|
3359
|
+
process.exit(1);
|
|
3360
|
+
}
|
|
3361
|
+
if (!result.value.success) process.exit(1);
|
|
3362
|
+
process.exit(0);
|
|
3363
|
+
});
|
|
3364
|
+
}
|
|
3365
|
+
/**
|
|
3366
|
+
* Executes a command with the given options.
|
|
3367
|
+
*
|
|
3368
|
+
* Handles the full execution flow:
|
|
3369
|
+
* 1. Log version information
|
|
3370
|
+
* 2. Load and merge config file values
|
|
3371
|
+
* 3. Execute through the workflow orchestrator
|
|
3372
|
+
*
|
|
3373
|
+
* @param commandName - The command to execute
|
|
3374
|
+
* @param cliOptions - Parsed and normalized CLI options (already merged with globals via optsWithGlobals)
|
|
3375
|
+
* @param registry - The command registry to look up the command
|
|
3376
|
+
* @returns Async result of the workflow execution
|
|
3377
|
+
*/
|
|
3378
|
+
function executeCommand(commandName, cliOptions, registry) {
|
|
3379
|
+
logVersionInfo(commandName);
|
|
3380
|
+
if (cliOptions.verbose) logger.level = LogLevels.verbose;
|
|
3381
|
+
const workspace = Workspace.fromOptions({ basePath: cliOptions.cwd });
|
|
3382
|
+
return new ConfigLoader({
|
|
3383
|
+
cwd: workspace.basePath,
|
|
3384
|
+
configFile: cliOptions.config,
|
|
3385
|
+
commandName
|
|
3386
|
+
}).load().andThen((fileConfig) => {
|
|
3387
|
+
return executeWithOrchestrator(commandName, {
|
|
3388
|
+
...fileConfig,
|
|
3389
|
+
...cliOptions
|
|
3390
|
+
}, workspace, registry);
|
|
3391
|
+
});
|
|
3392
|
+
}
|
|
3393
|
+
/**
|
|
3394
|
+
* Executes a command through the workflow orchestrator.
|
|
3395
|
+
*
|
|
3396
|
+
* @param commandName - The command to execute
|
|
3397
|
+
* @param config - The merged runtime configuration
|
|
3398
|
+
* @param workspace - The workspace for file operations
|
|
3399
|
+
* @param registry - The command registry
|
|
3400
|
+
* @returns Async result of the workflow execution
|
|
3401
|
+
*/
|
|
3402
|
+
function executeWithOrchestrator(commandName, config, workspace, registry) {
|
|
3403
|
+
const commandResult = registry.get(commandName);
|
|
3404
|
+
if (commandResult.isErr()) return notFoundErrAsync({ message: `Command "${commandName}" not found` });
|
|
3405
|
+
const command = commandResult.value;
|
|
3406
|
+
const parseResult = command.meta.configSchema.safeParse(config);
|
|
3407
|
+
if (!parseResult.success) {
|
|
3408
|
+
const errors = formatZodErrors(parseResult.error);
|
|
3409
|
+
if (DebugFlags.showRawError) logger.error(parseResult.error);
|
|
3410
|
+
return validationErrAsync({ message: errors });
|
|
3411
|
+
}
|
|
3412
|
+
const parsedConfig = {
|
|
3413
|
+
...config,
|
|
3414
|
+
...parseResult.data
|
|
3415
|
+
};
|
|
3416
|
+
return createOrchestrator(parsedConfig, workspace).executeCommand(command, parsedConfig);
|
|
3417
|
+
}
|
|
3418
|
+
/**
|
|
3419
|
+
* Creates a workflow orchestrator with the given configuration.
|
|
3420
|
+
*
|
|
3421
|
+
* @param config - The runtime configuration
|
|
3422
|
+
* @param workspace - The workspace for file operations
|
|
3423
|
+
* @returns Configured workflow orchestrator
|
|
3424
|
+
*/
|
|
3425
|
+
function createOrchestrator(config, workspace) {
|
|
3426
|
+
return new WorkflowOrchestrator({
|
|
3427
|
+
workspace,
|
|
3428
|
+
dryRun: config.dryRun ?? false,
|
|
3429
|
+
enableRollback: config.enableRollback ?? true,
|
|
3430
|
+
verbose: config.verbose ?? false
|
|
3431
|
+
});
|
|
3432
|
+
}
|
|
3433
|
+
/**
|
|
3434
|
+
* Logs version information for the Firefly CLI.
|
|
3435
|
+
*
|
|
3436
|
+
* @param commandName - The name of the executed command
|
|
3437
|
+
*/
|
|
3438
|
+
function logVersionInfo(commandName) {
|
|
3439
|
+
const fireflyVersion = colors.dim(`v${RuntimeEnv.version}`);
|
|
3440
|
+
const fireflyLabel = `${colors.magenta("firefly")} ${fireflyVersion}`;
|
|
3441
|
+
if (commandName === "release") {
|
|
3442
|
+
const gitCliffVersion = colors.dim(`v${RuntimeEnv.gitCliffVersion}`);
|
|
3443
|
+
logger.info(`${fireflyLabel} powered by git-cliff ${gitCliffVersion}`);
|
|
3444
|
+
} else logger.info(fireflyLabel);
|
|
3445
|
+
}
|
|
3446
|
+
/**
|
|
3447
|
+
* Get a description for a specific command.
|
|
3448
|
+
*
|
|
3449
|
+
* @param commandName - The name of the command
|
|
3450
|
+
* @returns A description of the command
|
|
3451
|
+
*/
|
|
3452
|
+
function getCommandDescription(commandName) {
|
|
3453
|
+
return { release: "Automated semantic versioning, changelog generation, and GitHub release creation" }[commandName] ?? `Run the ${commandName} command`;
|
|
3454
|
+
}
|
|
3455
|
+
|
|
3456
|
+
//#endregion
|
|
3457
|
+
export { createFireflyCLI };
|