@hyperfrontend/versioning 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +50 -1
- package/CHANGELOG.md +37 -23
- package/README.md +19 -14
- package/changelog/index.cjs.js +38 -6
- package/changelog/index.cjs.js.map +1 -1
- package/changelog/index.esm.js +38 -6
- package/changelog/index.esm.js.map +1 -1
- package/changelog/models/entry.d.ts +5 -0
- package/changelog/models/entry.d.ts.map +1 -1
- package/changelog/models/index.cjs.js +2 -0
- package/changelog/models/index.cjs.js.map +1 -1
- package/changelog/models/index.esm.js +2 -0
- package/changelog/models/index.esm.js.map +1 -1
- package/changelog/operations/index.cjs.js.map +1 -1
- package/changelog/operations/index.esm.js.map +1 -1
- package/changelog/parse/index.cjs.js +85 -6
- package/changelog/parse/index.cjs.js.map +1 -1
- package/changelog/parse/index.esm.js +85 -6
- package/changelog/parse/index.esm.js.map +1 -1
- package/changelog/parse/line.d.ts.map +1 -1
- package/changelog/parse/parser.d.ts +0 -6
- package/changelog/parse/parser.d.ts.map +1 -1
- package/commits/classify/classifier.d.ts +73 -0
- package/commits/classify/classifier.d.ts.map +1 -0
- package/commits/classify/index.cjs.js +707 -0
- package/commits/classify/index.cjs.js.map +1 -0
- package/commits/classify/index.d.ts +8 -0
- package/commits/classify/index.d.ts.map +1 -0
- package/commits/classify/index.esm.js +679 -0
- package/commits/classify/index.esm.js.map +1 -0
- package/commits/classify/infrastructure.d.ts +205 -0
- package/commits/classify/infrastructure.d.ts.map +1 -0
- package/commits/classify/models.d.ts +108 -0
- package/commits/classify/models.d.ts.map +1 -0
- package/commits/classify/project-scopes.d.ts +69 -0
- package/commits/classify/project-scopes.d.ts.map +1 -0
- package/commits/index.cjs.js +704 -0
- package/commits/index.cjs.js.map +1 -1
- package/commits/index.d.ts +1 -0
- package/commits/index.d.ts.map +1 -1
- package/commits/index.esm.js +678 -1
- package/commits/index.esm.js.map +1 -1
- package/flow/executor/execute.d.ts +6 -0
- package/flow/executor/execute.d.ts.map +1 -1
- package/flow/executor/index.cjs.js +1617 -43
- package/flow/executor/index.cjs.js.map +1 -1
- package/flow/executor/index.esm.js +1623 -49
- package/flow/executor/index.esm.js.map +1 -1
- package/flow/index.cjs.js +6749 -2938
- package/flow/index.cjs.js.map +1 -1
- package/flow/index.esm.js +6751 -2944
- package/flow/index.esm.js.map +1 -1
- package/flow/models/index.cjs.js +138 -0
- package/flow/models/index.cjs.js.map +1 -1
- package/flow/models/index.d.ts +1 -1
- package/flow/models/index.d.ts.map +1 -1
- package/flow/models/index.esm.js +138 -1
- package/flow/models/index.esm.js.map +1 -1
- package/flow/models/types.d.ts +180 -3
- package/flow/models/types.d.ts.map +1 -1
- package/flow/presets/conventional.d.ts +9 -8
- package/flow/presets/conventional.d.ts.map +1 -1
- package/flow/presets/independent.d.ts.map +1 -1
- package/flow/presets/index.cjs.js +3641 -303
- package/flow/presets/index.cjs.js.map +1 -1
- package/flow/presets/index.esm.js +3641 -303
- package/flow/presets/index.esm.js.map +1 -1
- package/flow/presets/synced.d.ts.map +1 -1
- package/flow/steps/analyze-commits.d.ts +9 -6
- package/flow/steps/analyze-commits.d.ts.map +1 -1
- package/flow/steps/calculate-bump.d.ts.map +1 -1
- package/flow/steps/fetch-registry.d.ts.map +1 -1
- package/flow/steps/generate-changelog.d.ts +5 -0
- package/flow/steps/generate-changelog.d.ts.map +1 -1
- package/flow/steps/index.cjs.js +3663 -328
- package/flow/steps/index.cjs.js.map +1 -1
- package/flow/steps/index.d.ts +2 -1
- package/flow/steps/index.d.ts.map +1 -1
- package/flow/steps/index.esm.js +3661 -329
- package/flow/steps/index.esm.js.map +1 -1
- package/flow/steps/resolve-repository.d.ts +36 -0
- package/flow/steps/resolve-repository.d.ts.map +1 -0
- package/flow/steps/update-packages.d.ts.map +1 -1
- package/git/factory.d.ts +14 -0
- package/git/factory.d.ts.map +1 -1
- package/git/index.cjs.js +65 -0
- package/git/index.cjs.js.map +1 -1
- package/git/index.esm.js +66 -2
- package/git/index.esm.js.map +1 -1
- package/git/operations/index.cjs.js +40 -0
- package/git/operations/index.cjs.js.map +1 -1
- package/git/operations/index.d.ts +1 -1
- package/git/operations/index.d.ts.map +1 -1
- package/git/operations/index.esm.js +41 -2
- package/git/operations/index.esm.js.map +1 -1
- package/git/operations/log.d.ts +23 -0
- package/git/operations/log.d.ts.map +1 -1
- package/index.cjs.js +7547 -4947
- package/index.cjs.js.map +1 -1
- package/index.d.ts +3 -1
- package/index.d.ts.map +1 -1
- package/index.esm.js +7550 -4954
- package/index.esm.js.map +1 -1
- package/package.json +39 -1
- package/registry/index.cjs.js +3 -3
- package/registry/index.cjs.js.map +1 -1
- package/registry/index.esm.js +3 -3
- package/registry/index.esm.js.map +1 -1
- package/registry/models/index.cjs.js +2 -0
- package/registry/models/index.cjs.js.map +1 -1
- package/registry/models/index.esm.js +2 -0
- package/registry/models/index.esm.js.map +1 -1
- package/registry/models/version-info.d.ts +10 -0
- package/registry/models/version-info.d.ts.map +1 -1
- package/registry/npm/client.d.ts.map +1 -1
- package/registry/npm/index.cjs.js +1 -3
- package/registry/npm/index.cjs.js.map +1 -1
- package/registry/npm/index.esm.js +1 -3
- package/registry/npm/index.esm.js.map +1 -1
- package/repository/index.cjs.js +998 -0
- package/repository/index.cjs.js.map +1 -0
- package/repository/index.d.ts +4 -0
- package/repository/index.d.ts.map +1 -0
- package/repository/index.esm.js +981 -0
- package/repository/index.esm.js.map +1 -0
- package/repository/models/index.cjs.js +301 -0
- package/repository/models/index.cjs.js.map +1 -0
- package/repository/models/index.d.ts +7 -0
- package/repository/models/index.d.ts.map +1 -0
- package/repository/models/index.esm.js +290 -0
- package/repository/models/index.esm.js.map +1 -0
- package/repository/models/platform.d.ts +58 -0
- package/repository/models/platform.d.ts.map +1 -0
- package/repository/models/repository-config.d.ts +132 -0
- package/repository/models/repository-config.d.ts.map +1 -0
- package/repository/models/resolution.d.ts +121 -0
- package/repository/models/resolution.d.ts.map +1 -0
- package/repository/parse/index.cjs.js +755 -0
- package/repository/parse/index.cjs.js.map +1 -0
- package/repository/parse/index.d.ts +5 -0
- package/repository/parse/index.d.ts.map +1 -0
- package/repository/parse/index.esm.js +749 -0
- package/repository/parse/index.esm.js.map +1 -0
- package/repository/parse/package-json.d.ts +100 -0
- package/repository/parse/package-json.d.ts.map +1 -0
- package/repository/parse/url.d.ts +81 -0
- package/repository/parse/url.d.ts.map +1 -0
- package/repository/url/compare.d.ts +84 -0
- package/repository/url/compare.d.ts.map +1 -0
- package/repository/url/index.cjs.js +178 -0
- package/repository/url/index.cjs.js.map +1 -0
- package/repository/url/index.d.ts +3 -0
- package/repository/url/index.d.ts.map +1 -0
- package/repository/url/index.esm.js +176 -0
- package/repository/url/index.esm.js.map +1 -0
- package/workspace/discovery/changelog-path.d.ts +3 -7
- package/workspace/discovery/changelog-path.d.ts.map +1 -1
- package/workspace/discovery/index.cjs.js +408 -335
- package/workspace/discovery/index.cjs.js.map +1 -1
- package/workspace/discovery/index.esm.js +408 -335
- package/workspace/discovery/index.esm.js.map +1 -1
- package/workspace/discovery/packages.d.ts +0 -6
- package/workspace/discovery/packages.d.ts.map +1 -1
- package/workspace/index.cjs.js +84 -11
- package/workspace/index.cjs.js.map +1 -1
- package/workspace/index.esm.js +84 -11
- package/workspace/index.esm.js.map +1 -1
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { join, basename, relative } from 'node:path';
|
|
2
|
+
import { existsSync, readFileSync, statSync, lstatSync, readdirSync } from 'node:fs';
|
|
3
|
+
|
|
1
4
|
/**
|
|
2
5
|
* Creates a version flow.
|
|
3
6
|
*
|
|
@@ -83,98 +86,2715 @@ function createStep(id, name, execute, options = {}) {
|
|
|
83
86
|
};
|
|
84
87
|
}
|
|
85
88
|
/**
|
|
86
|
-
* Creates a skipped step result.
|
|
89
|
+
* Creates a skipped step result.
|
|
90
|
+
*
|
|
91
|
+
* @param message - Explanation for why the step was skipped
|
|
92
|
+
* @returns A FlowStepResult with 'skipped' status
|
|
93
|
+
*/
|
|
94
|
+
function createSkippedResult(message) {
|
|
95
|
+
return {
|
|
96
|
+
status: 'skipped',
|
|
97
|
+
message,
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
102
|
+
/**
|
|
103
|
+
* Creates the fetch-registry step.
|
|
104
|
+
*
|
|
105
|
+
* This step:
|
|
106
|
+
* 1. Queries the registry for the latest published version
|
|
107
|
+
* 2. Reads the current version from package.json
|
|
108
|
+
* 3. Determines if this is a first release
|
|
109
|
+
*
|
|
110
|
+
* State updates:
|
|
111
|
+
* - publishedVersion: Latest version on registry (null if not published)
|
|
112
|
+
* - currentVersion: Version from local package.json
|
|
113
|
+
* - isFirstRelease: True if never published
|
|
114
|
+
*
|
|
115
|
+
* @returns A FlowStep that fetches registry information
|
|
116
|
+
*/
|
|
117
|
+
function createFetchRegistryStep() {
|
|
118
|
+
return createStep(FETCH_REGISTRY_STEP_ID, 'Fetch Registry Version', async (ctx) => {
|
|
119
|
+
const { registry, tree, projectRoot, packageName, logger } = ctx;
|
|
120
|
+
// Read local package.json for current version
|
|
121
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
122
|
+
let currentVersion = '0.0.0';
|
|
123
|
+
try {
|
|
124
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
125
|
+
if (content) {
|
|
126
|
+
const pkg = parse(content);
|
|
127
|
+
currentVersion = pkg.version ?? '0.0.0';
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
logger.warn(`Could not read package.json: ${error}`);
|
|
132
|
+
}
|
|
133
|
+
// Query registry for published version
|
|
134
|
+
let publishedVersion = null;
|
|
135
|
+
let publishedCommit = null;
|
|
136
|
+
let isFirstRelease = true;
|
|
137
|
+
try {
|
|
138
|
+
publishedVersion = await registry.getLatestVersion(packageName);
|
|
139
|
+
isFirstRelease = publishedVersion === null;
|
|
140
|
+
// When published version exists, get its commit hash from gitHead
|
|
141
|
+
if (publishedVersion) {
|
|
142
|
+
try {
|
|
143
|
+
const versionInfo = await registry.getVersionInfo(packageName, publishedVersion);
|
|
144
|
+
publishedCommit = versionInfo?.gitHead ?? null;
|
|
145
|
+
if (publishedCommit) {
|
|
146
|
+
logger.debug(`Published ${publishedVersion} at commit ${publishedCommit.slice(0, 7)}`);
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
logger.debug(`Published ${publishedVersion} has no gitHead (older package or published without git)`);
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
catch (error) {
|
|
153
|
+
// Version info fetch failed, but we still have the version
|
|
154
|
+
logger.debug(`Could not fetch version info for ${publishedVersion}: ${error}`);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
catch (error) {
|
|
159
|
+
// Package might not exist yet, which is fine
|
|
160
|
+
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
161
|
+
isFirstRelease = true;
|
|
162
|
+
}
|
|
163
|
+
const message = isFirstRelease
|
|
164
|
+
? `First release (local: ${currentVersion})`
|
|
165
|
+
: `Published: ${publishedVersion}${publishedCommit ? ` @ ${publishedCommit.slice(0, 7)}` : ''}, Local: ${currentVersion}`;
|
|
166
|
+
return {
|
|
167
|
+
status: 'success',
|
|
168
|
+
stateUpdates: {
|
|
169
|
+
publishedVersion,
|
|
170
|
+
publishedCommit,
|
|
171
|
+
currentVersion,
|
|
172
|
+
isFirstRelease,
|
|
173
|
+
},
|
|
174
|
+
message,
|
|
175
|
+
};
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Safe copies of Error built-ins via factory functions.
|
|
181
|
+
*
|
|
182
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
183
|
+
* provides factory functions that use Reflect.construct internally.
|
|
184
|
+
*
|
|
185
|
+
* These references are captured at module initialization time to protect against
|
|
186
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
187
|
+
*
|
|
188
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/error
|
|
189
|
+
*/
|
|
190
|
+
// Capture references at module initialization time
|
|
191
|
+
const _Error = globalThis.Error;
|
|
192
|
+
const _Reflect$4 = globalThis.Reflect;
|
|
193
|
+
/**
|
|
194
|
+
* (Safe copy) Creates a new Error using the captured Error constructor.
|
|
195
|
+
* Use this instead of `new Error()`.
|
|
196
|
+
*
|
|
197
|
+
* @param message - Optional error message.
|
|
198
|
+
* @param options - Optional error options.
|
|
199
|
+
* @returns A new Error instance.
|
|
200
|
+
*/
|
|
201
|
+
const createError = (message, options) => _Reflect$4.construct(_Error, [message, options]);
|
|
202
|
+
|
|
203
|
+
/**
|
|
204
|
+
* Creates a new RepositoryConfig.
|
|
205
|
+
*
|
|
206
|
+
* Normalizes the base URL by stripping trailing slashes and validating
|
|
207
|
+
* that custom platforms have a formatter function.
|
|
208
|
+
*
|
|
209
|
+
* @param options - Repository configuration options
|
|
210
|
+
* @returns A new RepositoryConfig object
|
|
211
|
+
* @throws {Error} if platform is 'custom' but no formatCompareUrl is provided
|
|
212
|
+
*
|
|
213
|
+
* @example
|
|
214
|
+
* ```typescript
|
|
215
|
+
* // GitHub repository
|
|
216
|
+
* const config = createRepositoryConfig({
|
|
217
|
+
* platform: 'github',
|
|
218
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
219
|
+
* })
|
|
220
|
+
*
|
|
221
|
+
* // Custom platform
|
|
222
|
+
* const customConfig = createRepositoryConfig({
|
|
223
|
+
* platform: 'custom',
|
|
224
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
225
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
226
|
+
* })
|
|
227
|
+
* ```
|
|
228
|
+
*/
|
|
229
|
+
function createRepositoryConfig(options) {
|
|
230
|
+
const { platform, formatCompareUrl } = options;
|
|
231
|
+
// Validate custom platform has formatter
|
|
232
|
+
if (platform === 'custom' && !formatCompareUrl) {
|
|
233
|
+
throw createError("Repository config with platform 'custom' requires a formatCompareUrl function");
|
|
234
|
+
}
|
|
235
|
+
// Normalize base URL - strip trailing slashes
|
|
236
|
+
const baseUrl = normalizeBaseUrl(options.baseUrl);
|
|
237
|
+
return {
|
|
238
|
+
platform,
|
|
239
|
+
baseUrl,
|
|
240
|
+
formatCompareUrl,
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Checks if a value is a RepositoryConfig object.
|
|
245
|
+
*
|
|
246
|
+
* @param value - Value to check
|
|
247
|
+
* @returns True if the value is a RepositoryConfig
|
|
248
|
+
*
|
|
249
|
+
* @example
|
|
250
|
+
* ```typescript
|
|
251
|
+
* const config = { platform: 'github', baseUrl: 'https://...' }
|
|
252
|
+
* if (isRepositoryConfig(config)) {
|
|
253
|
+
* // config is typed as RepositoryConfig
|
|
254
|
+
* }
|
|
255
|
+
* ```
|
|
256
|
+
*/
|
|
257
|
+
function isRepositoryConfig(value) {
|
|
258
|
+
if (typeof value !== 'object' || value === null) {
|
|
259
|
+
return false;
|
|
260
|
+
}
|
|
261
|
+
const obj = value;
|
|
262
|
+
return (typeof obj['platform'] === 'string' &&
|
|
263
|
+
typeof obj['baseUrl'] === 'string' &&
|
|
264
|
+
(obj['formatCompareUrl'] === undefined || typeof obj['formatCompareUrl'] === 'function'));
|
|
265
|
+
}
|
|
266
|
+
/**
|
|
267
|
+
* Normalizes a base URL by stripping trailing slashes and .git suffix.
|
|
268
|
+
*
|
|
269
|
+
* @param url - URL to normalize
|
|
270
|
+
* @returns Normalized URL
|
|
271
|
+
*
|
|
272
|
+
* @internal
|
|
273
|
+
*/
|
|
274
|
+
function normalizeBaseUrl(url) {
|
|
275
|
+
let normalized = url.trim();
|
|
276
|
+
// Remove trailing slashes
|
|
277
|
+
while (normalized.endsWith('/')) {
|
|
278
|
+
normalized = normalized.slice(0, -1);
|
|
279
|
+
}
|
|
280
|
+
// Remove .git suffix if present
|
|
281
|
+
if (normalized.endsWith('.git')) {
|
|
282
|
+
normalized = normalized.slice(0, -4);
|
|
283
|
+
}
|
|
284
|
+
return normalized;
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Creates a disabled repository resolution configuration.
|
|
289
|
+
*
|
|
290
|
+
* No compare URLs will be generated.
|
|
291
|
+
*
|
|
292
|
+
* @returns A RepositoryResolution with mode 'disabled'
|
|
293
|
+
*
|
|
294
|
+
* @example
|
|
295
|
+
* ```typescript
|
|
296
|
+
* const config = createDisabledResolution()
|
|
297
|
+
* // { mode: 'disabled' }
|
|
298
|
+
* ```
|
|
299
|
+
*/
|
|
300
|
+
/**
|
|
301
|
+
* Checks if a value is a RepositoryResolution object.
|
|
302
|
+
*
|
|
303
|
+
* @param value - Value to check
|
|
304
|
+
* @returns True if the value is a RepositoryResolution
|
|
305
|
+
*/
|
|
306
|
+
function isRepositoryResolution(value) {
|
|
307
|
+
if (typeof value !== 'object' || value === null) {
|
|
308
|
+
return false;
|
|
309
|
+
}
|
|
310
|
+
const obj = value;
|
|
311
|
+
const mode = obj['mode'];
|
|
312
|
+
return mode === 'explicit' || mode === 'inferred' || mode === 'disabled';
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* Default inference order when mode is 'inferred'.
|
|
316
|
+
*/
|
|
317
|
+
const DEFAULT_INFERENCE_ORDER = ['package-json', 'git-remote'];
|
|
318
|
+
|
|
319
|
+
/**
|
|
320
|
+
* Safe copies of Map built-in via factory function.
|
|
321
|
+
*
|
|
322
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
323
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
324
|
+
*
|
|
325
|
+
* These references are captured at module initialization time to protect against
|
|
326
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
327
|
+
*
|
|
328
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
329
|
+
*/
|
|
330
|
+
// Capture references at module initialization time
|
|
331
|
+
const _Map = globalThis.Map;
|
|
332
|
+
const _Reflect$3 = globalThis.Reflect;
|
|
333
|
+
/**
|
|
334
|
+
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
335
|
+
* Use this instead of `new Map()`.
|
|
336
|
+
*
|
|
337
|
+
* @param iterable - Optional iterable of key-value pairs.
|
|
338
|
+
* @returns A new Map instance.
|
|
339
|
+
*/
|
|
340
|
+
const createMap = (iterable) => _Reflect$3.construct(_Map, iterable ? [iterable] : []);
|
|
341
|
+
|
|
342
|
+
/**
|
|
343
|
+
* Safe copies of Math built-in methods.
|
|
344
|
+
*
|
|
345
|
+
* These references are captured at module initialization time to protect against
|
|
346
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
347
|
+
*
|
|
348
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
349
|
+
*/
|
|
350
|
+
// Capture references at module initialization time
|
|
351
|
+
const _Math = globalThis.Math;
|
|
352
|
+
// ============================================================================
|
|
353
|
+
// Min/Max
|
|
354
|
+
// ============================================================================
|
|
355
|
+
/**
|
|
356
|
+
* (Safe copy) Returns the larger of zero or more numbers.
|
|
357
|
+
*/
|
|
358
|
+
const max = _Math.max;
|
|
359
|
+
/**
|
|
360
|
+
* (Safe copy) Returns the smaller of zero or more numbers.
|
|
361
|
+
*/
|
|
362
|
+
const min = _Math.min;
|
|
363
|
+
|
|
364
|
+
/**
|
|
365
|
+
* Safe copies of URL built-ins via factory functions.
|
|
366
|
+
*
|
|
367
|
+
* Provides safe references to URL and URLSearchParams.
|
|
368
|
+
* These references are captured at module initialization time to protect against
|
|
369
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
370
|
+
*
|
|
371
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
372
|
+
*/
|
|
373
|
+
// Capture references at module initialization time
|
|
374
|
+
const _URL = globalThis.URL;
|
|
375
|
+
const _Reflect$2 = globalThis.Reflect;
|
|
376
|
+
// ============================================================================
|
|
377
|
+
// URL
|
|
378
|
+
// ============================================================================
|
|
379
|
+
/**
|
|
380
|
+
* (Safe copy) Creates a new URL using the captured URL constructor.
|
|
381
|
+
* Use this instead of `new URL()`.
|
|
382
|
+
*
|
|
383
|
+
* @param url - The URL string to parse.
|
|
384
|
+
* @param base - Optional base URL for relative URLs.
|
|
385
|
+
* @returns A new URL instance.
|
|
386
|
+
*/
|
|
387
|
+
const createURL = (url, base) => _Reflect$2.construct(_URL, [url, base]);
|
|
388
|
+
/**
|
|
389
|
+
* (Safe copy) Creates an object URL for the given object.
|
|
390
|
+
* Use this instead of `URL.createObjectURL()`.
|
|
391
|
+
*
|
|
392
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
393
|
+
*/
|
|
394
|
+
typeof _URL.createObjectURL === 'function'
|
|
395
|
+
? _URL.createObjectURL.bind(_URL)
|
|
396
|
+
: () => {
|
|
397
|
+
throw new Error('URL.createObjectURL is not available in this environment');
|
|
398
|
+
};
|
|
399
|
+
/**
|
|
400
|
+
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
401
|
+
* Use this instead of `URL.revokeObjectURL()`.
|
|
402
|
+
*
|
|
403
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
404
|
+
*/
|
|
405
|
+
typeof _URL.revokeObjectURL === 'function'
|
|
406
|
+
? _URL.revokeObjectURL.bind(_URL)
|
|
407
|
+
: () => {
|
|
408
|
+
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
409
|
+
};
|
|
410
|
+
|
|
411
|
+
/**
|
|
412
|
+
* Checks if a platform identifier is a known platform with built-in support.
|
|
413
|
+
*
|
|
414
|
+
* @param platform - Platform identifier to check
|
|
415
|
+
* @returns True if the platform is a known platform
|
|
416
|
+
*
|
|
417
|
+
* @example
|
|
418
|
+
* ```typescript
|
|
419
|
+
* isKnownPlatform('github') // true
|
|
420
|
+
* isKnownPlatform('gitlab') // true
|
|
421
|
+
* isKnownPlatform('custom') // false
|
|
422
|
+
* isKnownPlatform('unknown') // false
|
|
423
|
+
* ```
|
|
424
|
+
*/
|
|
425
|
+
function isKnownPlatform(platform) {
|
|
426
|
+
return platform === 'github' || platform === 'gitlab' || platform === 'bitbucket' || platform === 'azure-devops';
|
|
427
|
+
}
|
|
428
|
+
/**
|
|
429
|
+
* Known platform hostnames mapped to their platform type.
|
|
430
|
+
* Used for automatic platform detection from repository URLs.
|
|
431
|
+
*
|
|
432
|
+
* Includes both standard SaaS domains and common patterns for self-hosted instances.
|
|
433
|
+
*/
|
|
434
|
+
const PLATFORM_HOSTNAMES = createMap([
|
|
435
|
+
// GitHub
|
|
436
|
+
['github.com', 'github'],
|
|
437
|
+
// GitLab
|
|
438
|
+
['gitlab.com', 'gitlab'],
|
|
439
|
+
// Bitbucket
|
|
440
|
+
['bitbucket.org', 'bitbucket'],
|
|
441
|
+
// Azure DevOps
|
|
442
|
+
['dev.azure.com', 'azure-devops'],
|
|
443
|
+
['visualstudio.com', 'azure-devops'],
|
|
444
|
+
]);
|
|
445
|
+
/**
|
|
446
|
+
* Detects platform from a hostname.
|
|
447
|
+
*
|
|
448
|
+
* First checks for exact match in known platforms, then applies heuristics
|
|
449
|
+
* for self-hosted instances (e.g., `github.company.com` → `github`).
|
|
450
|
+
*
|
|
451
|
+
* @param hostname - Hostname to detect platform from (e.g., "github.com")
|
|
452
|
+
* @returns Detected platform or 'unknown' if not recognized
|
|
453
|
+
*
|
|
454
|
+
* @example
|
|
455
|
+
* ```typescript
|
|
456
|
+
* detectPlatformFromHostname('github.com') // 'github'
|
|
457
|
+
* detectPlatformFromHostname('gitlab.mycompany.com') // 'gitlab'
|
|
458
|
+
* detectPlatformFromHostname('custom-git.internal') // 'unknown'
|
|
459
|
+
* ```
|
|
460
|
+
*/
|
|
461
|
+
function detectPlatformFromHostname(hostname) {
|
|
462
|
+
const normalized = hostname.toLowerCase();
|
|
463
|
+
// Check exact matches first
|
|
464
|
+
const exactMatch = PLATFORM_HOSTNAMES.get(normalized);
|
|
465
|
+
if (exactMatch) {
|
|
466
|
+
return exactMatch;
|
|
467
|
+
}
|
|
468
|
+
// Check for Azure DevOps legacy domain pattern
|
|
469
|
+
if (normalized.endsWith('.visualstudio.com')) {
|
|
470
|
+
return 'azure-devops';
|
|
471
|
+
}
|
|
472
|
+
// Check for Azure DevOps modern domain pattern (includes ssh.dev.azure.com)
|
|
473
|
+
if (normalized.endsWith('.azure.com')) {
|
|
474
|
+
return 'azure-devops';
|
|
475
|
+
}
|
|
476
|
+
// Heuristics for self-hosted instances
|
|
477
|
+
// GitHub Enterprise typically uses "github" in the hostname
|
|
478
|
+
if (normalized.includes('github')) {
|
|
479
|
+
return 'github';
|
|
480
|
+
}
|
|
481
|
+
// GitLab self-hosted typically uses "gitlab" in the hostname
|
|
482
|
+
if (normalized.includes('gitlab')) {
|
|
483
|
+
return 'gitlab';
|
|
484
|
+
}
|
|
485
|
+
// Bitbucket Data Center/Server might use "bitbucket" in hostname
|
|
486
|
+
if (normalized.includes('bitbucket')) {
|
|
487
|
+
return 'bitbucket';
|
|
488
|
+
}
|
|
489
|
+
return 'unknown';
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
/**
|
|
493
|
+
* Parses a git URL and extracts platform and base URL.
|
|
494
|
+
*
|
|
495
|
+
* Supports multiple URL formats:
|
|
496
|
+
* - `https://github.com/owner/repo`
|
|
497
|
+
* - `https://github.com/owner/repo.git`
|
|
498
|
+
* - `git+https://github.com/owner/repo.git`
|
|
499
|
+
* - `git://github.com/owner/repo.git`
|
|
500
|
+
* - `git@github.com:owner/repo.git` (SSH format)
|
|
501
|
+
*
|
|
502
|
+
* Handles self-hosted instances by detecting platform from hostname:
|
|
503
|
+
* - `github.mycompany.com` → `github`
|
|
504
|
+
* - `gitlab.internal.com` → `gitlab`
|
|
505
|
+
*
|
|
506
|
+
* Handles Azure DevOps URL formats:
|
|
507
|
+
* - `https://dev.azure.com/org/project/_git/repo`
|
|
508
|
+
* - `https://org.visualstudio.com/project/_git/repo`
|
|
509
|
+
*
|
|
510
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
511
|
+
* @returns Parsed repository info with platform and base URL, or null if parsing fails
|
|
512
|
+
*
|
|
513
|
+
* @example
|
|
514
|
+
* ```typescript
|
|
515
|
+
* // GitHub HTTPS
|
|
516
|
+
* parseRepositoryUrl('https://github.com/owner/repo')
|
|
517
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
518
|
+
*
|
|
519
|
+
* // SSH format
|
|
520
|
+
* parseRepositoryUrl('git@github.com:owner/repo.git')
|
|
521
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
522
|
+
*
|
|
523
|
+
* // Azure DevOps
|
|
524
|
+
* parseRepositoryUrl('https://dev.azure.com/org/proj/_git/repo')
|
|
525
|
+
* // → { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' }
|
|
526
|
+
*
|
|
527
|
+
* // Self-hosted GitLab
|
|
528
|
+
* parseRepositoryUrl('https://gitlab.mycompany.com/team/project')
|
|
529
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.mycompany.com/team/project' }
|
|
530
|
+
* ```
|
|
531
|
+
*/
|
|
532
|
+
function parseRepositoryUrl(gitUrl) {
|
|
533
|
+
if (!gitUrl || typeof gitUrl !== 'string') {
|
|
534
|
+
return null;
|
|
535
|
+
}
|
|
536
|
+
const trimmed = gitUrl.trim();
|
|
537
|
+
if (!trimmed) {
|
|
538
|
+
return null;
|
|
539
|
+
}
|
|
540
|
+
// Try SSH format first: git@hostname:path
|
|
541
|
+
const sshParsed = parseSshUrl(trimmed);
|
|
542
|
+
if (sshParsed) {
|
|
543
|
+
return sshParsed;
|
|
544
|
+
}
|
|
545
|
+
// Try HTTP(S) formats
|
|
546
|
+
const httpParsed = parseHttpUrl(trimmed);
|
|
547
|
+
if (httpParsed) {
|
|
548
|
+
return httpParsed;
|
|
549
|
+
}
|
|
550
|
+
return null;
|
|
551
|
+
}
|
|
552
|
+
/**
|
|
553
|
+
* Parses an SSH-style git URL.
|
|
554
|
+
*
|
|
555
|
+
* @param url - URL to parse (e.g., "git@github.com:owner/repo.git")
|
|
556
|
+
* @returns Parsed repository or null
|
|
557
|
+
*
|
|
558
|
+
* @internal
|
|
559
|
+
*/
|
|
560
|
+
function parseSshUrl(url) {
|
|
561
|
+
// Handle optional ssh:// prefix
|
|
562
|
+
let remaining = url;
|
|
563
|
+
if (remaining.startsWith('ssh://')) {
|
|
564
|
+
remaining = remaining.slice(6);
|
|
565
|
+
}
|
|
566
|
+
// Must start with git@
|
|
567
|
+
if (!remaining.startsWith('git@')) {
|
|
568
|
+
return null;
|
|
569
|
+
}
|
|
570
|
+
// Remove git@ prefix
|
|
571
|
+
remaining = remaining.slice(4);
|
|
572
|
+
// Find the separator (: or /)
|
|
573
|
+
const colonIndex = remaining.indexOf(':');
|
|
574
|
+
const slashIndex = remaining.indexOf('/');
|
|
575
|
+
let separatorIndex;
|
|
576
|
+
if (colonIndex === -1 && slashIndex === -1) {
|
|
577
|
+
return null;
|
|
578
|
+
}
|
|
579
|
+
else if (colonIndex === -1) {
|
|
580
|
+
separatorIndex = slashIndex;
|
|
581
|
+
}
|
|
582
|
+
else if (slashIndex === -1) {
|
|
583
|
+
separatorIndex = colonIndex;
|
|
584
|
+
}
|
|
585
|
+
else {
|
|
586
|
+
separatorIndex = min(colonIndex, slashIndex);
|
|
587
|
+
}
|
|
588
|
+
const hostname = remaining.slice(0, separatorIndex);
|
|
589
|
+
const pathPart = normalizePathPart(remaining.slice(separatorIndex + 1));
|
|
590
|
+
if (!hostname || !pathPart) {
|
|
591
|
+
return null;
|
|
592
|
+
}
|
|
593
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
594
|
+
// For Azure DevOps, construct proper base URL
|
|
595
|
+
if (platform === 'azure-devops') {
|
|
596
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
597
|
+
if (baseUrl) {
|
|
598
|
+
return { platform, baseUrl };
|
|
599
|
+
}
|
|
600
|
+
return null;
|
|
601
|
+
}
|
|
602
|
+
// Standard platforms: https://hostname/path
|
|
603
|
+
const baseUrl = `https://${hostname}/${pathPart}`;
|
|
604
|
+
return { platform, baseUrl };
|
|
605
|
+
}
|
|
606
|
+
/**
|
|
607
|
+
* Parses an HTTP(S)-style git URL.
|
|
608
|
+
*
|
|
609
|
+
* @param url - URL to parse
|
|
610
|
+
* @returns Parsed repository or null
|
|
611
|
+
*
|
|
612
|
+
* @internal
|
|
613
|
+
*/
|
|
614
|
+
function parseHttpUrl(url) {
|
|
615
|
+
// Normalize various git URL prefixes to https://
|
|
616
|
+
const normalized = url
|
|
617
|
+
.replace(/^git\+/, '') // git+https:// → https://
|
|
618
|
+
.replace(/^git:\/\//, 'https://'); // git:// → https://
|
|
619
|
+
let parsed;
|
|
620
|
+
try {
|
|
621
|
+
parsed = createURL(normalized);
|
|
622
|
+
}
|
|
623
|
+
catch {
|
|
624
|
+
return null;
|
|
625
|
+
}
|
|
626
|
+
// Only support http and https protocols
|
|
627
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
628
|
+
return null;
|
|
629
|
+
}
|
|
630
|
+
const hostname = parsed.hostname.toLowerCase();
|
|
631
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
632
|
+
const pathPart = normalizePathPart(parsed.pathname);
|
|
633
|
+
if (!pathPart) {
|
|
634
|
+
return null;
|
|
635
|
+
}
|
|
636
|
+
// Handle Azure DevOps special URL structure
|
|
637
|
+
if (platform === 'azure-devops') {
|
|
638
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
639
|
+
if (baseUrl) {
|
|
640
|
+
return { platform, baseUrl };
|
|
641
|
+
}
|
|
642
|
+
// If Azure DevOps URL cannot be parsed properly, return null
|
|
643
|
+
return null;
|
|
644
|
+
}
|
|
645
|
+
// Standard platforms
|
|
646
|
+
const baseUrl = `${parsed.protocol}//${hostname}/${pathPart}`;
|
|
647
|
+
return { platform, baseUrl };
|
|
648
|
+
}
|
|
649
|
+
/**
|
|
650
|
+
* Normalizes a path part by removing leading slashes and .git suffix.
|
|
651
|
+
*
|
|
652
|
+
* @param path - Path to normalize
|
|
653
|
+
* @returns Normalized path or null if empty
|
|
654
|
+
*
|
|
655
|
+
* @internal
|
|
656
|
+
*/
|
|
657
|
+
function normalizePathPart(path) {
|
|
658
|
+
let normalized = path.trim();
|
|
659
|
+
// Remove leading slashes
|
|
660
|
+
while (normalized.startsWith('/')) {
|
|
661
|
+
normalized = normalized.slice(1);
|
|
662
|
+
}
|
|
663
|
+
// Remove trailing slashes
|
|
664
|
+
while (normalized.endsWith('/')) {
|
|
665
|
+
normalized = normalized.slice(0, -1);
|
|
666
|
+
}
|
|
667
|
+
// Remove .git suffix
|
|
668
|
+
if (normalized.endsWith('.git')) {
|
|
669
|
+
normalized = normalized.slice(0, -4);
|
|
670
|
+
}
|
|
671
|
+
// Validate we have something
|
|
672
|
+
if (!normalized) {
|
|
673
|
+
return null;
|
|
674
|
+
}
|
|
675
|
+
return normalized;
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Constructs the base URL for Azure DevOps repositories.
|
|
679
|
+
*
|
|
680
|
+
* Azure DevOps has special URL structures:
|
|
681
|
+
* - Modern: `https://dev.azure.com/{org}/{project}/_git/{repo}`
|
|
682
|
+
* - Legacy: `https://{org}.visualstudio.com/{project}/_git/{repo}`
|
|
683
|
+
* - SSH: `git@ssh.dev.azure.com:v3/{org}/{project}/{repo}`
|
|
684
|
+
*
|
|
685
|
+
* @param hostname - Hostname from the URL
|
|
686
|
+
* @param pathPart - Path portion after hostname
|
|
687
|
+
* @returns Constructed base URL or null if invalid
|
|
688
|
+
*
|
|
689
|
+
* @internal
|
|
690
|
+
*/
|
|
691
|
+
function constructAzureDevOpsBaseUrl(hostname, pathPart) {
|
|
692
|
+
const pathParts = pathPart.split('/');
|
|
693
|
+
// dev.azure.com format: org/project/_git/repo
|
|
694
|
+
if (hostname === 'dev.azure.com' || hostname.endsWith('.azure.com')) {
|
|
695
|
+
// Need at least: org/project/_git/repo (4 parts)
|
|
696
|
+
// Or for SSH v3: v3/org/project/repo (4 parts)
|
|
697
|
+
if (pathParts.length >= 4) {
|
|
698
|
+
// Check for v3 SSH format
|
|
699
|
+
if (pathParts[0] === 'v3') {
|
|
700
|
+
// v3/org/project/repo → https://dev.azure.com/org/project/_git/repo
|
|
701
|
+
const org = pathParts[1];
|
|
702
|
+
const project = pathParts[2];
|
|
703
|
+
const repo = pathParts[3];
|
|
704
|
+
if (org && project && repo) {
|
|
705
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
// Standard format: org/project/_git/repo
|
|
709
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
710
|
+
if (gitIndex >= 2 && pathParts[gitIndex + 1]) {
|
|
711
|
+
const org = pathParts.slice(0, gitIndex - 1).join('/');
|
|
712
|
+
const project = pathParts[gitIndex - 1];
|
|
713
|
+
const repo = pathParts[gitIndex + 1];
|
|
714
|
+
if (org && project && repo) {
|
|
715
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
716
|
+
}
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
return null;
|
|
720
|
+
}
|
|
721
|
+
// visualstudio.com format: {org}.visualstudio.com/project/_git/repo
|
|
722
|
+
if (hostname.endsWith('.visualstudio.com')) {
|
|
723
|
+
const org = hostname.replace('.visualstudio.com', '');
|
|
724
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
725
|
+
if (gitIndex >= 1 && pathParts[gitIndex + 1]) {
|
|
726
|
+
const project = pathParts.slice(0, gitIndex).join('/');
|
|
727
|
+
const repo = pathParts[gitIndex + 1];
|
|
728
|
+
if (project && repo) {
|
|
729
|
+
// Normalize to dev.azure.com format
|
|
730
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
731
|
+
}
|
|
732
|
+
}
|
|
733
|
+
return null;
|
|
734
|
+
}
|
|
735
|
+
return null;
|
|
736
|
+
}
|
|
737
|
+
/**
|
|
738
|
+
* Creates a RepositoryConfig from a git URL.
|
|
739
|
+
*
|
|
740
|
+
* This is a convenience function that combines `parseRepositoryUrl` with
|
|
741
|
+
* `createRepositoryConfig` to produce a ready-to-use configuration.
|
|
742
|
+
*
|
|
743
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
744
|
+
* @returns RepositoryConfig or null if URL cannot be parsed
|
|
745
|
+
*
|
|
746
|
+
* @example
|
|
747
|
+
* ```typescript
|
|
748
|
+
* const config = createRepositoryConfigFromUrl('https://github.com/owner/repo')
|
|
749
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
750
|
+
*
|
|
751
|
+
* const config = createRepositoryConfigFromUrl('git@gitlab.com:group/project.git')
|
|
752
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' }
|
|
753
|
+
* ```
|
|
754
|
+
*/
|
|
755
|
+
function createRepositoryConfigFromUrl(gitUrl) {
|
|
756
|
+
const parsed = parseRepositoryUrl(gitUrl);
|
|
757
|
+
if (!parsed) {
|
|
758
|
+
return null;
|
|
759
|
+
}
|
|
760
|
+
// Don't create configs for unknown platforms as they can't generate URLs
|
|
761
|
+
if (parsed.platform === 'unknown') {
|
|
762
|
+
return null;
|
|
763
|
+
}
|
|
764
|
+
return createRepositoryConfig({
|
|
765
|
+
platform: parsed.platform,
|
|
766
|
+
baseUrl: parsed.baseUrl,
|
|
767
|
+
});
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
/**
|
|
771
|
+
* Shorthand platform prefixes supported in package.json repository field.
|
|
772
|
+
*
|
|
773
|
+
* Format: `"platform:owner/repo"` or `"owner/repo"` (defaults to GitHub)
|
|
774
|
+
*
|
|
775
|
+
* @see https://docs.npmjs.com/cli/v9/configuring-npm/package-json#repository
|
|
776
|
+
*/
|
|
777
|
+
const SHORTHAND_PLATFORMS = createMap([
|
|
778
|
+
['github', 'https://github.com'],
|
|
779
|
+
['gitlab', 'https://gitlab.com'],
|
|
780
|
+
['bitbucket', 'https://bitbucket.org'],
|
|
781
|
+
['gist', 'https://gist.github.com'],
|
|
782
|
+
]);
|
|
783
|
+
/**
|
|
784
|
+
* Infers repository configuration from package.json content.
|
|
785
|
+
*
|
|
786
|
+
* Handles multiple formats:
|
|
787
|
+
* - Shorthand: `"github:owner/repo"`, `"gitlab:group/project"`, `"bitbucket:team/repo"`
|
|
788
|
+
* - Bare shorthand: `"owner/repo"` (defaults to GitHub)
|
|
789
|
+
* - URL string: `"https://github.com/owner/repo"`
|
|
790
|
+
* - Object with URL: `{ "type": "git", "url": "https://..." }`
|
|
791
|
+
*
|
|
792
|
+
* @param packageJsonContent - Raw JSON string content of package.json
|
|
793
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
794
|
+
*
|
|
795
|
+
* @example
|
|
796
|
+
* ```typescript
|
|
797
|
+
* // Shorthand format
|
|
798
|
+
* inferRepositoryFromPackageJson('{"repository": "github:owner/repo"}')
|
|
799
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
800
|
+
*
|
|
801
|
+
* // URL string
|
|
802
|
+
* inferRepositoryFromPackageJson('{"repository": "https://github.com/owner/repo"}')
|
|
803
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
804
|
+
*
|
|
805
|
+
* // Object format
|
|
806
|
+
* inferRepositoryFromPackageJson('{"repository": {"type": "git", "url": "https://github.com/owner/repo"}}')
|
|
807
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
808
|
+
*
|
|
809
|
+
* // Bare shorthand (defaults to GitHub)
|
|
810
|
+
* inferRepositoryFromPackageJson('{"repository": "owner/repo"}')
|
|
811
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
812
|
+
* ```
|
|
813
|
+
*/
|
|
814
|
+
function inferRepositoryFromPackageJson(packageJsonContent) {
|
|
815
|
+
if (!packageJsonContent || typeof packageJsonContent !== 'string') {
|
|
816
|
+
return null;
|
|
817
|
+
}
|
|
818
|
+
let packageJson;
|
|
819
|
+
try {
|
|
820
|
+
packageJson = parse(packageJsonContent);
|
|
821
|
+
}
|
|
822
|
+
catch {
|
|
823
|
+
return null;
|
|
824
|
+
}
|
|
825
|
+
return inferRepositoryFromPackageJsonObject(packageJson);
|
|
826
|
+
}
|
|
827
|
+
/**
|
|
828
|
+
* Infers repository configuration from a parsed package.json object.
|
|
829
|
+
*
|
|
830
|
+
* This is useful when you already have the parsed object.
|
|
831
|
+
*
|
|
832
|
+
* @param packageJson - Parsed package.json object
|
|
833
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
834
|
+
*
|
|
835
|
+
* @example
|
|
836
|
+
* ```typescript
|
|
837
|
+
* const pkg = { repository: 'github:owner/repo' }
|
|
838
|
+
* inferRepositoryFromPackageJsonObject(pkg)
|
|
839
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
840
|
+
* ```
|
|
841
|
+
*/
|
|
842
|
+
function inferRepositoryFromPackageJsonObject(packageJson) {
|
|
843
|
+
const { repository } = packageJson;
|
|
844
|
+
if (!repository) {
|
|
845
|
+
return null;
|
|
846
|
+
}
|
|
847
|
+
// Handle string format
|
|
848
|
+
if (typeof repository === 'string') {
|
|
849
|
+
return parseRepositoryString(repository);
|
|
850
|
+
}
|
|
851
|
+
// Handle object format
|
|
852
|
+
if (typeof repository === 'object' && repository.url) {
|
|
853
|
+
return createRepositoryConfigFromUrl(repository.url);
|
|
854
|
+
}
|
|
855
|
+
return null;
|
|
856
|
+
}
|
|
857
|
+
/**
|
|
858
|
+
* Parses a repository string (shorthand or URL).
|
|
859
|
+
*
|
|
860
|
+
* @param repoString - Repository string from package.json
|
|
861
|
+
* @returns RepositoryConfig or null
|
|
862
|
+
*
|
|
863
|
+
* @internal
|
|
864
|
+
*/
|
|
865
|
+
function parseRepositoryString(repoString) {
|
|
866
|
+
const trimmed = repoString.trim();
|
|
867
|
+
if (!trimmed) {
|
|
868
|
+
return null;
|
|
869
|
+
}
|
|
870
|
+
// Check for shorthand format: platform:owner/repo
|
|
871
|
+
const colonIndex = trimmed.indexOf(':');
|
|
872
|
+
if (colonIndex > 0) {
|
|
873
|
+
const potentialPlatform = trimmed.slice(0, colonIndex);
|
|
874
|
+
// Platform must be only letters (a-z, case insensitive)
|
|
875
|
+
if (isOnlyLetters(potentialPlatform)) {
|
|
876
|
+
const platform = potentialPlatform.toLowerCase();
|
|
877
|
+
const path = trimmed.slice(colonIndex + 1);
|
|
878
|
+
if (path) {
|
|
879
|
+
const baseUrl = SHORTHAND_PLATFORMS.get(platform);
|
|
880
|
+
if (baseUrl) {
|
|
881
|
+
// Construct full URL and parse it
|
|
882
|
+
const fullUrl = `${baseUrl}/${path}`;
|
|
883
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
884
|
+
}
|
|
885
|
+
// Unknown shorthand platform - try as URL
|
|
886
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
// Check for bare shorthand: owner/repo (no protocol, no platform prefix)
|
|
891
|
+
// Must match pattern like "owner/repo" but not "https://..." or "git@..."
|
|
892
|
+
if (!trimmed.includes('://') && !trimmed.startsWith('git@')) {
|
|
893
|
+
if (isBareShorthand(trimmed)) {
|
|
894
|
+
// Bare shorthand defaults to GitHub
|
|
895
|
+
const fullUrl = `https://github.com/${trimmed}`;
|
|
896
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
// Try as a full URL
|
|
900
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
901
|
+
}
|
|
902
|
+
/**
|
|
903
|
+
* Checks if a string contains only ASCII letters (a-z, A-Z).
|
|
904
|
+
*
|
|
905
|
+
* @param str - String to check
|
|
906
|
+
* @returns True if string contains only letters
|
|
907
|
+
*
|
|
908
|
+
* @internal
|
|
909
|
+
*/
|
|
910
|
+
function isOnlyLetters(str) {
|
|
911
|
+
for (let i = 0; i < str.length; i++) {
|
|
912
|
+
const char = str.charCodeAt(i);
|
|
913
|
+
const isLowercase = char >= 97 && char <= 122; // a-z
|
|
914
|
+
const isUppercase = char >= 65 && char <= 90; // A-Z
|
|
915
|
+
if (!isLowercase && !isUppercase) {
|
|
916
|
+
return false;
|
|
917
|
+
}
|
|
918
|
+
}
|
|
919
|
+
return str.length > 0;
|
|
920
|
+
}
|
|
921
|
+
/**
|
|
922
|
+
* Checks if a string is a bare shorthand format (owner/repo).
|
|
923
|
+
* Must have exactly one forward slash with content on both sides.
|
|
924
|
+
*
|
|
925
|
+
* @param str - String to check
|
|
926
|
+
* @returns True if string matches owner/repo format
|
|
927
|
+
*
|
|
928
|
+
* @internal
|
|
929
|
+
*/
|
|
930
|
+
function isBareShorthand(str) {
|
|
931
|
+
const slashIndex = str.indexOf('/');
|
|
932
|
+
if (slashIndex <= 0 || slashIndex === str.length - 1) {
|
|
933
|
+
return false;
|
|
934
|
+
}
|
|
935
|
+
// Must not have another slash
|
|
936
|
+
return str.indexOf('/', slashIndex + 1) === -1;
|
|
937
|
+
}
|
|
938
|
+
|
|
939
|
+
const RESOLVE_REPOSITORY_STEP_ID = 'resolve-repository';
|
|
940
|
+
/**
|
|
941
|
+
* Creates the resolve-repository step.
|
|
942
|
+
*
|
|
943
|
+
* This step resolves repository configuration for compare URL generation.
|
|
944
|
+
* It supports multiple resolution modes:
|
|
945
|
+
*
|
|
946
|
+
* - `undefined` or `'disabled'`: No-op, backward compatible default
|
|
947
|
+
* - `'inferred'`: Auto-detect from package.json or git remote
|
|
948
|
+
* - `RepositoryConfig`: Direct repository configuration provided
|
|
949
|
+
* - `RepositoryResolution`: Fine-grained control with mode and options
|
|
950
|
+
*
|
|
951
|
+
* State updates:
|
|
952
|
+
* - repositoryConfig: Resolved repository configuration (if successful)
|
|
953
|
+
*
|
|
954
|
+
* @returns A FlowStep that resolves repository configuration
|
|
955
|
+
*
|
|
956
|
+
* @example
|
|
957
|
+
* ```typescript
|
|
958
|
+
* // Auto-detect repository
|
|
959
|
+
* const flow = createFlow({
|
|
960
|
+
* repository: 'inferred'
|
|
961
|
+
* })
|
|
962
|
+
*
|
|
963
|
+
* // Explicit repository
|
|
964
|
+
* const flow = createFlow({
|
|
965
|
+
* repository: {
|
|
966
|
+
* platform: 'github',
|
|
967
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
968
|
+
* }
|
|
969
|
+
* })
|
|
970
|
+
* ```
|
|
971
|
+
*/
|
|
972
|
+
function createResolveRepositoryStep() {
|
|
973
|
+
return createStep(RESOLVE_REPOSITORY_STEP_ID, 'Resolve Repository', async (ctx) => {
|
|
974
|
+
const { config, logger, tree, git, projectRoot } = ctx;
|
|
975
|
+
const repoConfig = config.repository;
|
|
976
|
+
// Disabled or undefined - no-op for backward compatibility
|
|
977
|
+
if (repoConfig === undefined || repoConfig === 'disabled') {
|
|
978
|
+
logger.debug('Repository resolution disabled');
|
|
979
|
+
return {
|
|
980
|
+
status: 'skipped',
|
|
981
|
+
message: 'Repository resolution disabled',
|
|
982
|
+
};
|
|
983
|
+
}
|
|
984
|
+
// Direct RepositoryConfig provided
|
|
985
|
+
if (isRepositoryConfig(repoConfig)) {
|
|
986
|
+
logger.debug(`Using explicit repository config: ${repoConfig.platform}`);
|
|
987
|
+
return {
|
|
988
|
+
status: 'success',
|
|
989
|
+
stateUpdates: {
|
|
990
|
+
repositoryConfig: repoConfig,
|
|
991
|
+
},
|
|
992
|
+
message: `Using explicit ${repoConfig.platform} repository`,
|
|
993
|
+
};
|
|
994
|
+
}
|
|
995
|
+
// Shorthand 'inferred' mode
|
|
996
|
+
if (repoConfig === 'inferred') {
|
|
997
|
+
const resolved = await inferRepository(tree, git, projectRoot, DEFAULT_INFERENCE_ORDER, logger);
|
|
998
|
+
if (resolved) {
|
|
999
|
+
return {
|
|
1000
|
+
status: 'success',
|
|
1001
|
+
stateUpdates: {
|
|
1002
|
+
repositoryConfig: resolved,
|
|
1003
|
+
},
|
|
1004
|
+
message: `Inferred ${resolved.platform} repository from ${resolved.baseUrl}`,
|
|
1005
|
+
};
|
|
1006
|
+
}
|
|
1007
|
+
// Graceful degradation - no error, just no URLs
|
|
1008
|
+
logger.debug('Could not infer repository from package.json or git remote');
|
|
1009
|
+
return {
|
|
1010
|
+
status: 'skipped',
|
|
1011
|
+
message: 'Could not infer repository configuration',
|
|
1012
|
+
};
|
|
1013
|
+
}
|
|
1014
|
+
// Full RepositoryResolution object
|
|
1015
|
+
if (isRepositoryResolution(repoConfig)) {
|
|
1016
|
+
return handleRepositoryResolution(repoConfig, tree, git, projectRoot, logger);
|
|
1017
|
+
}
|
|
1018
|
+
// Unknown configuration - should not happen with TypeScript
|
|
1019
|
+
logger.warn('Unknown repository configuration format');
|
|
1020
|
+
return {
|
|
1021
|
+
status: 'skipped',
|
|
1022
|
+
message: 'Unknown repository configuration format',
|
|
1023
|
+
};
|
|
1024
|
+
}, {
|
|
1025
|
+
description: 'Resolves repository configuration for compare URL generation',
|
|
1026
|
+
});
|
|
1027
|
+
}
|
|
1028
|
+
/**
|
|
1029
|
+
* Handles a full RepositoryResolution configuration.
|
|
1030
|
+
*
|
|
1031
|
+
* @param resolution - Repository resolution configuration
|
|
1032
|
+
* @param tree - Virtual file system tree
|
|
1033
|
+
* @param git - Git client instance
|
|
1034
|
+
* @param projectRoot - Path to the project root
|
|
1035
|
+
* @param logger - Logger instance
|
|
1036
|
+
* @returns Flow step result with repository config or skip/error status
|
|
1037
|
+
* @internal
|
|
1038
|
+
*/
|
|
1039
|
+
async function handleRepositoryResolution(resolution, tree, git, projectRoot, logger) {
|
|
1040
|
+
const { mode, repository, inferenceOrder } = resolution;
|
|
1041
|
+
// Disabled mode
|
|
1042
|
+
if (mode === 'disabled') {
|
|
1043
|
+
logger.debug('Repository resolution explicitly disabled');
|
|
1044
|
+
return {
|
|
1045
|
+
status: 'skipped',
|
|
1046
|
+
message: 'Repository resolution disabled',
|
|
1047
|
+
};
|
|
1048
|
+
}
|
|
1049
|
+
// Explicit mode - must have repository
|
|
1050
|
+
if (mode === 'explicit') {
|
|
1051
|
+
if (!repository) {
|
|
1052
|
+
return {
|
|
1053
|
+
status: 'failed',
|
|
1054
|
+
message: 'Repository config required when mode is "explicit"',
|
|
1055
|
+
error: createError('Repository config required when mode is "explicit"'),
|
|
1056
|
+
};
|
|
1057
|
+
}
|
|
1058
|
+
logger.debug(`Using explicit repository config: ${repository.platform}`);
|
|
1059
|
+
return {
|
|
1060
|
+
status: 'success',
|
|
1061
|
+
stateUpdates: {
|
|
1062
|
+
repositoryConfig: repository,
|
|
1063
|
+
},
|
|
1064
|
+
message: `Using explicit ${repository.platform} repository`,
|
|
1065
|
+
};
|
|
1066
|
+
}
|
|
1067
|
+
// Inferred mode
|
|
1068
|
+
const order = inferenceOrder ?? DEFAULT_INFERENCE_ORDER;
|
|
1069
|
+
const resolved = await inferRepository(tree, git, projectRoot, order, logger);
|
|
1070
|
+
if (resolved) {
|
|
1071
|
+
return {
|
|
1072
|
+
status: 'success',
|
|
1073
|
+
stateUpdates: {
|
|
1074
|
+
repositoryConfig: resolved,
|
|
1075
|
+
},
|
|
1076
|
+
message: `Inferred ${resolved.platform} repository`,
|
|
1077
|
+
};
|
|
1078
|
+
}
|
|
1079
|
+
// Graceful degradation
|
|
1080
|
+
logger.debug('Could not infer repository configuration');
|
|
1081
|
+
return {
|
|
1082
|
+
status: 'skipped',
|
|
1083
|
+
message: 'Could not infer repository configuration',
|
|
1084
|
+
};
|
|
1085
|
+
}
|
|
1086
|
+
/**
|
|
1087
|
+
* Infers repository configuration from available sources.
|
|
1088
|
+
*
|
|
1089
|
+
* @param tree - Virtual file system tree
|
|
1090
|
+
* @param git - Git client instance
|
|
1091
|
+
* @param projectRoot - Path to the project root
|
|
1092
|
+
* @param order - Inference source order
|
|
1093
|
+
* @param logger - Logger instance
|
|
1094
|
+
* @returns Repository config or null if none found
|
|
1095
|
+
* @internal
|
|
1096
|
+
*/
|
|
1097
|
+
async function inferRepository(tree, git, projectRoot, order, logger) {
|
|
1098
|
+
for (const source of order) {
|
|
1099
|
+
const config = await inferFromSource(tree, git, projectRoot, source, logger);
|
|
1100
|
+
if (config) {
|
|
1101
|
+
logger.debug(`Inferred repository from ${source}: ${config.platform}`);
|
|
1102
|
+
return config;
|
|
1103
|
+
}
|
|
1104
|
+
}
|
|
1105
|
+
return null;
|
|
1106
|
+
}
|
|
1107
|
+
/**
|
|
1108
|
+
* Infers repository from a single source.
|
|
1109
|
+
*
|
|
1110
|
+
* @param tree - Virtual file system tree
|
|
1111
|
+
* @param git - Git client instance
|
|
1112
|
+
* @param projectRoot - Path to the project root
|
|
1113
|
+
* @param source - Inference source type
|
|
1114
|
+
* @param logger - Logger instance
|
|
1115
|
+
* @returns Repository config or null if not found
|
|
1116
|
+
* @internal
|
|
1117
|
+
*/
|
|
1118
|
+
async function inferFromSource(tree, git, projectRoot, source, logger) {
|
|
1119
|
+
if (source === 'package-json') {
|
|
1120
|
+
return inferFromPackageJson(tree, projectRoot, logger);
|
|
1121
|
+
}
|
|
1122
|
+
if (source === 'git-remote') {
|
|
1123
|
+
return inferFromGitRemote(git, logger);
|
|
1124
|
+
}
|
|
1125
|
+
logger.warn(`Unknown inference source: ${source}`);
|
|
1126
|
+
return null;
|
|
1127
|
+
}
|
|
1128
|
+
/**
|
|
1129
|
+
* Infers repository from package.json repository field.
|
|
1130
|
+
*
|
|
1131
|
+
* @param tree - Virtual file system tree
|
|
1132
|
+
* @param projectRoot - Path to the project root
|
|
1133
|
+
* @param logger - Logger instance
|
|
1134
|
+
* @returns Repository config or null if not found
|
|
1135
|
+
* @internal
|
|
1136
|
+
*/
|
|
1137
|
+
function inferFromPackageJson(tree, projectRoot, logger) {
|
|
1138
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
1139
|
+
if (!tree.exists(packageJsonPath)) {
|
|
1140
|
+
logger.debug(`package.json not found at ${packageJsonPath}`);
|
|
1141
|
+
return null;
|
|
1142
|
+
}
|
|
1143
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
1144
|
+
if (!content) {
|
|
1145
|
+
logger.debug('Could not read package.json');
|
|
1146
|
+
return null;
|
|
1147
|
+
}
|
|
1148
|
+
const config = inferRepositoryFromPackageJson(content);
|
|
1149
|
+
if (config) {
|
|
1150
|
+
logger.debug(`Found repository in package.json: ${config.baseUrl}`);
|
|
1151
|
+
}
|
|
1152
|
+
return config;
|
|
1153
|
+
}
|
|
1154
|
+
/**
|
|
1155
|
+
* Infers repository from git remote URL.
|
|
1156
|
+
*
|
|
1157
|
+
* @param git - Git client instance
|
|
1158
|
+
* @param logger - Logger instance
|
|
1159
|
+
* @returns Repository config or null if not found
|
|
1160
|
+
* @internal
|
|
1161
|
+
*/
|
|
1162
|
+
async function inferFromGitRemote(git, logger) {
|
|
1163
|
+
const remoteUrl = await git.getRemoteUrl('origin');
|
|
1164
|
+
if (!remoteUrl) {
|
|
1165
|
+
logger.debug('Could not get git remote URL');
|
|
1166
|
+
return null;
|
|
1167
|
+
}
|
|
1168
|
+
const config = createRepositoryConfigFromUrl(remoteUrl);
|
|
1169
|
+
if (config) {
|
|
1170
|
+
logger.debug(`Inferred repository from git remote: ${config.baseUrl}`);
|
|
1171
|
+
}
|
|
1172
|
+
return config;
|
|
1173
|
+
}
|
|
1174
|
+
|
|
1175
|
+
/**
|
|
1176
|
+
* Safe copies of Set built-in via factory function.
|
|
1177
|
+
*
|
|
1178
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1179
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1180
|
+
*
|
|
1181
|
+
* These references are captured at module initialization time to protect against
|
|
1182
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1183
|
+
*
|
|
1184
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/set
|
|
1185
|
+
*/
|
|
1186
|
+
// Capture references at module initialization time
|
|
1187
|
+
const _Set = globalThis.Set;
|
|
1188
|
+
const _Reflect$1 = globalThis.Reflect;
|
|
1189
|
+
/**
|
|
1190
|
+
* (Safe copy) Creates a new Set using the captured Set constructor.
|
|
1191
|
+
* Use this instead of `new Set()`.
|
|
1192
|
+
*
|
|
1193
|
+
* @param iterable - Optional iterable of values.
|
|
1194
|
+
* @returns A new Set instance.
|
|
1195
|
+
*/
|
|
1196
|
+
const createSet = (iterable) => _Reflect$1.construct(_Set, iterable ? [iterable] : []);
|
|
1197
|
+
|
|
1198
|
+
/**
|
|
1199
|
+
* Safe copies of Object built-in methods.
|
|
1200
|
+
*
|
|
1201
|
+
* These references are captured at module initialization time to protect against
|
|
1202
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1203
|
+
*
|
|
1204
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1205
|
+
*/
|
|
1206
|
+
// Capture references at module initialization time
|
|
1207
|
+
const _Object = globalThis.Object;
|
|
1208
|
+
/**
|
|
1209
|
+
* (Safe copy) Prevents modification of existing property attributes and values,
|
|
1210
|
+
* and prevents the addition of new properties.
|
|
1211
|
+
*/
|
|
1212
|
+
const freeze = _Object.freeze;
|
|
1213
|
+
/**
|
|
1214
|
+
* (Safe copy) Returns the names of the enumerable string properties and methods of an object.
|
|
1215
|
+
*/
|
|
1216
|
+
const keys = _Object.keys;
|
|
1217
|
+
/**
|
|
1218
|
+
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1219
|
+
*/
|
|
1220
|
+
const entries = _Object.entries;
|
|
1221
|
+
/**
|
|
1222
|
+
* (Safe copy) Returns an array of values of the enumerable own properties of an object.
|
|
1223
|
+
*/
|
|
1224
|
+
const values = _Object.values;
|
|
1225
|
+
/**
|
|
1226
|
+
* (Safe copy) Adds one or more properties to an object, and/or modifies attributes of existing properties.
|
|
1227
|
+
*/
|
|
1228
|
+
const defineProperties = _Object.defineProperties;
|
|
1229
|
+
|
|
1230
|
+
/**
|
|
1231
|
+
* Safe copies of Array built-in static methods.
|
|
1232
|
+
*
|
|
1233
|
+
* These references are captured at module initialization time to protect against
|
|
1234
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1235
|
+
*
|
|
1236
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/array
|
|
1237
|
+
*/
|
|
1238
|
+
// Capture references at module initialization time
|
|
1239
|
+
const _Array = globalThis.Array;
|
|
1240
|
+
/**
|
|
1241
|
+
* (Safe copy) Determines whether the passed value is an Array.
|
|
1242
|
+
*/
|
|
1243
|
+
const isArray = _Array.isArray;
|
|
1244
|
+
|
|
1245
|
+
/**
|
|
1246
|
+
* Safe copies of Console built-in methods.
|
|
1247
|
+
*
|
|
1248
|
+
* These references are captured at module initialization time to protect against
|
|
1249
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1250
|
+
*
|
|
1251
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/console
|
|
1252
|
+
*/
|
|
1253
|
+
// Capture references at module initialization time
|
|
1254
|
+
const _console = globalThis.console;
|
|
1255
|
+
/**
|
|
1256
|
+
* (Safe copy) Outputs a message to the console.
|
|
1257
|
+
*/
|
|
1258
|
+
const log = _console.log.bind(_console);
|
|
1259
|
+
/**
|
|
1260
|
+
* (Safe copy) Outputs a warning message to the console.
|
|
1261
|
+
*/
|
|
1262
|
+
const warn = _console.warn.bind(_console);
|
|
1263
|
+
/**
|
|
1264
|
+
* (Safe copy) Outputs an error message to the console.
|
|
1265
|
+
*/
|
|
1266
|
+
const error = _console.error.bind(_console);
|
|
1267
|
+
/**
|
|
1268
|
+
* (Safe copy) Outputs an informational message to the console.
|
|
1269
|
+
*/
|
|
1270
|
+
const info = _console.info.bind(_console);
|
|
1271
|
+
/**
|
|
1272
|
+
* (Safe copy) Outputs a debug message to the console.
|
|
1273
|
+
*/
|
|
1274
|
+
const debug = _console.debug.bind(_console);
|
|
1275
|
+
/**
|
|
1276
|
+
* (Safe copy) Outputs a stack trace to the console.
|
|
1277
|
+
*/
|
|
1278
|
+
_console.trace.bind(_console);
|
|
1279
|
+
/**
|
|
1280
|
+
* (Safe copy) Displays an interactive listing of the properties of a specified object.
|
|
1281
|
+
*/
|
|
1282
|
+
_console.dir.bind(_console);
|
|
1283
|
+
/**
|
|
1284
|
+
* (Safe copy) Displays tabular data as a table.
|
|
1285
|
+
*/
|
|
1286
|
+
_console.table.bind(_console);
|
|
1287
|
+
/**
|
|
1288
|
+
* (Safe copy) Writes an error message to the console if the assertion is false.
|
|
1289
|
+
*/
|
|
1290
|
+
_console.assert.bind(_console);
|
|
1291
|
+
/**
|
|
1292
|
+
* (Safe copy) Clears the console.
|
|
1293
|
+
*/
|
|
1294
|
+
_console.clear.bind(_console);
|
|
1295
|
+
/**
|
|
1296
|
+
* (Safe copy) Logs the number of times that this particular call to count() has been called.
|
|
1297
|
+
*/
|
|
1298
|
+
_console.count.bind(_console);
|
|
1299
|
+
/**
|
|
1300
|
+
* (Safe copy) Resets the counter used with console.count().
|
|
1301
|
+
*/
|
|
1302
|
+
_console.countReset.bind(_console);
|
|
1303
|
+
/**
|
|
1304
|
+
* (Safe copy) Creates a new inline group in the console.
|
|
1305
|
+
*/
|
|
1306
|
+
_console.group.bind(_console);
|
|
1307
|
+
/**
|
|
1308
|
+
* (Safe copy) Creates a new inline group in the console that is initially collapsed.
|
|
1309
|
+
*/
|
|
1310
|
+
_console.groupCollapsed.bind(_console);
|
|
1311
|
+
/**
|
|
1312
|
+
* (Safe copy) Exits the current inline group.
|
|
1313
|
+
*/
|
|
1314
|
+
_console.groupEnd.bind(_console);
|
|
1315
|
+
/**
|
|
1316
|
+
* (Safe copy) Starts a timer with a name specified as an input parameter.
|
|
1317
|
+
*/
|
|
1318
|
+
_console.time.bind(_console);
|
|
1319
|
+
/**
|
|
1320
|
+
* (Safe copy) Stops a timer that was previously started.
|
|
1321
|
+
*/
|
|
1322
|
+
_console.timeEnd.bind(_console);
|
|
1323
|
+
/**
|
|
1324
|
+
* (Safe copy) Logs the current value of a timer that was previously started.
|
|
1325
|
+
*/
|
|
1326
|
+
_console.timeLog.bind(_console);
|
|
1327
|
+
|
|
1328
|
+
const registeredClasses = [];
|
|
1329
|
+
|
|
1330
|
+
/**
|
|
1331
|
+
* Returns the data type of the target.
|
|
1332
|
+
* Uses native `typeof` operator, however, makes distinction between `null`, `array`, and `object`.
|
|
1333
|
+
* Also, when classes are registered via `registerClass`, it checks if objects are instance of any known registered class.
|
|
1334
|
+
*
|
|
1335
|
+
* @param target - The target to get the data type of.
|
|
1336
|
+
* @returns The data type of the target.
|
|
1337
|
+
*/
|
|
1338
|
+
const getType = (target) => {
|
|
1339
|
+
if (target === null)
|
|
1340
|
+
return 'null';
|
|
1341
|
+
const nativeDataType = typeof target;
|
|
1342
|
+
if (nativeDataType === 'object') {
|
|
1343
|
+
if (isArray(target))
|
|
1344
|
+
return 'array';
|
|
1345
|
+
for (const registeredClass of registeredClasses) {
|
|
1346
|
+
if (target instanceof registeredClass)
|
|
1347
|
+
return registeredClass.name;
|
|
1348
|
+
}
|
|
1349
|
+
}
|
|
1350
|
+
return nativeDataType;
|
|
1351
|
+
};
|
|
1352
|
+
|
|
1353
|
+
/**
|
|
1354
|
+
* Safe copies of Date built-in via factory function and static methods.
|
|
1355
|
+
*
|
|
1356
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1357
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1358
|
+
*
|
|
1359
|
+
* These references are captured at module initialization time to protect against
|
|
1360
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1361
|
+
*
|
|
1362
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1363
|
+
*/
|
|
1364
|
+
// Capture references at module initialization time
|
|
1365
|
+
const _Date = globalThis.Date;
|
|
1366
|
+
const _Reflect = globalThis.Reflect;
|
|
1367
|
+
function createDate(...args) {
|
|
1368
|
+
return _Reflect.construct(_Date, args);
|
|
1369
|
+
}
|
|
1370
|
+
|
|
1371
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1372
|
+
/**
|
|
1373
|
+
* Creates a wrapper function that only executes the wrapped function if the condition function returns true.
|
|
1374
|
+
*
|
|
1375
|
+
* @param func - The function to be conditionally executed.
|
|
1376
|
+
* @param conditionFunc - A function that returns a boolean, determining if `func` should be executed.
|
|
1377
|
+
* @returns A wrapped version of `func` that executes conditionally.
|
|
1378
|
+
*/
|
|
1379
|
+
function createConditionalExecutionFunction(func, conditionFunc) {
|
|
1380
|
+
return function (...args) {
|
|
1381
|
+
if (conditionFunc()) {
|
|
1382
|
+
return func(...args);
|
|
1383
|
+
}
|
|
1384
|
+
};
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1388
|
+
/**
|
|
1389
|
+
* Creates a wrapper function that silently ignores any errors thrown by the wrapped void function.
|
|
1390
|
+
* This function is specifically for wrapping functions that do not return a value (void functions).
|
|
1391
|
+
* Exceptions are swallowed without any logging or handling.
|
|
1392
|
+
*
|
|
1393
|
+
* @param func - The void function to be wrapped.
|
|
1394
|
+
* @returns A wrapped version of the input function that ignores errors.
|
|
1395
|
+
*/
|
|
1396
|
+
function createErrorIgnoringFunction(func) {
|
|
1397
|
+
return function (...args) {
|
|
1398
|
+
try {
|
|
1399
|
+
func(...args);
|
|
1400
|
+
}
|
|
1401
|
+
catch {
|
|
1402
|
+
// Deliberately swallowing/ignoring the exception
|
|
1403
|
+
}
|
|
1404
|
+
};
|
|
1405
|
+
}
|
|
1406
|
+
|
|
1407
|
+
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
1408
|
+
/**
|
|
1409
|
+
* A no-operation function (noop) that does nothing regardless of the arguments passed.
|
|
1410
|
+
* It is designed to be as permissive as possible in its typing without using the `Function` keyword.
|
|
1411
|
+
*
|
|
1412
|
+
* @param args - Any arguments passed to the function (ignored)
|
|
1413
|
+
*/
|
|
1414
|
+
const noop = (...args) => {
|
|
1415
|
+
// Intentionally does nothing
|
|
1416
|
+
};
|
|
1417
|
+
|
|
1418
|
+
const logLevels = ['none', 'error', 'warn', 'log', 'info', 'debug'];
|
|
1419
|
+
const priority = {
|
|
1420
|
+
error: 4,
|
|
1421
|
+
warn: 3,
|
|
1422
|
+
log: 2,
|
|
1423
|
+
info: 1,
|
|
1424
|
+
debug: 0,
|
|
1425
|
+
};
|
|
1426
|
+
/**
|
|
1427
|
+
* Validates whether a given string is a valid log level.
|
|
1428
|
+
*
|
|
1429
|
+
* @param level - The log level to validate
|
|
1430
|
+
* @returns True if the level is valid, false otherwise
|
|
1431
|
+
*/
|
|
1432
|
+
function isValidLogLevel(level) {
|
|
1433
|
+
return logLevels.includes(level);
|
|
1434
|
+
}
|
|
1435
|
+
/**
|
|
1436
|
+
* Creates a log level configuration manager for controlling logging behavior.
|
|
1437
|
+
* Provides methods to get, set, and evaluate log levels based on priority.
|
|
1438
|
+
*
|
|
1439
|
+
* @param level - The initial log level (defaults to 'error')
|
|
1440
|
+
* @returns A configuration object with log level management methods
|
|
1441
|
+
* @throws {Error} When the provided level is not a valid log level
|
|
1442
|
+
*/
|
|
1443
|
+
function createLogLevelConfig(level = 'error') {
|
|
1444
|
+
if (!isValidLogLevel(level)) {
|
|
1445
|
+
throw createError('Cannot create log level configuration with a valid default log level');
|
|
1446
|
+
}
|
|
1447
|
+
const state = { level };
|
|
1448
|
+
const getLogLevel = () => state.level;
|
|
1449
|
+
const setLogLevel = (level) => {
|
|
1450
|
+
if (!isValidLogLevel(level)) {
|
|
1451
|
+
throw createError(`Cannot set value '${level}' level. Expected levels are ${logLevels}.`);
|
|
1452
|
+
}
|
|
1453
|
+
state.level = level;
|
|
1454
|
+
};
|
|
1455
|
+
const shouldLog = (level) => {
|
|
1456
|
+
if (state.level === 'none' || level === 'none' || !isValidLogLevel(level)) {
|
|
1457
|
+
return false;
|
|
1458
|
+
}
|
|
1459
|
+
return priority[level] >= priority[state.level];
|
|
1460
|
+
};
|
|
1461
|
+
return freeze({
|
|
1462
|
+
getLogLevel,
|
|
1463
|
+
setLogLevel,
|
|
1464
|
+
shouldLog,
|
|
1465
|
+
});
|
|
1466
|
+
}
|
|
1467
|
+
|
|
1468
|
+
/**
|
|
1469
|
+
* Creates a logger instance with configurable log level filtering.
|
|
1470
|
+
* Each log function is wrapped to respect the current log level setting.
|
|
1471
|
+
*
|
|
1472
|
+
* @param error - Function to handle error-level logs (required)
|
|
1473
|
+
* @param warn - Function to handle warning-level logs (optional, defaults to noop)
|
|
1474
|
+
* @param log - Function to handle standard logs (optional, defaults to noop)
|
|
1475
|
+
* @param info - Function to handle info-level logs (optional, defaults to noop)
|
|
1476
|
+
* @param debug - Function to handle debug-level logs (optional, defaults to noop)
|
|
1477
|
+
* @returns A frozen logger object with log methods and level control
|
|
1478
|
+
* @throws {ErrorLevelFn} When any provided log function is invalid
|
|
1479
|
+
*/
|
|
1480
|
+
function createLogger(error, warn = noop, log = noop, info = noop, debug = noop) {
|
|
1481
|
+
if (notValidLogFn(error)) {
|
|
1482
|
+
throw createError(notFnMsg('error'));
|
|
1483
|
+
}
|
|
1484
|
+
if (notValidLogFn(warn)) {
|
|
1485
|
+
throw createError(notFnMsg('warn'));
|
|
1486
|
+
}
|
|
1487
|
+
if (notValidLogFn(log)) {
|
|
1488
|
+
throw createError(notFnMsg('log'));
|
|
1489
|
+
}
|
|
1490
|
+
if (notValidLogFn(info)) {
|
|
1491
|
+
throw createError(notFnMsg('info'));
|
|
1492
|
+
}
|
|
1493
|
+
if (notValidLogFn(debug)) {
|
|
1494
|
+
throw createError(notFnMsg('debug'));
|
|
1495
|
+
}
|
|
1496
|
+
const { setLogLevel, getLogLevel, shouldLog } = createLogLevelConfig();
|
|
1497
|
+
const wrapLogFn = (fn, level) => {
|
|
1498
|
+
if (fn === noop)
|
|
1499
|
+
return fn;
|
|
1500
|
+
const condition = () => shouldLog(level);
|
|
1501
|
+
return createConditionalExecutionFunction(createErrorIgnoringFunction(fn), condition);
|
|
1502
|
+
};
|
|
1503
|
+
return freeze({
|
|
1504
|
+
error: wrapLogFn(error, 'error'),
|
|
1505
|
+
warn: wrapLogFn(warn, 'warn'),
|
|
1506
|
+
log: wrapLogFn(log, 'log'),
|
|
1507
|
+
info: wrapLogFn(info, 'info'),
|
|
1508
|
+
debug: wrapLogFn(debug, 'debug'),
|
|
1509
|
+
setLogLevel,
|
|
1510
|
+
getLogLevel,
|
|
1511
|
+
});
|
|
1512
|
+
}
|
|
1513
|
+
/**
|
|
1514
|
+
* Validates whether a given value is a valid log function.
|
|
1515
|
+
*
|
|
1516
|
+
* @param fn - The value to validate
|
|
1517
|
+
* @returns True if the value is not a function (invalid), false if it is valid
|
|
1518
|
+
*/
|
|
1519
|
+
function notValidLogFn(fn) {
|
|
1520
|
+
return getType(fn) !== 'function' && fn !== noop;
|
|
1521
|
+
}
|
|
1522
|
+
/**
|
|
1523
|
+
* Generates an error message for invalid log function parameters.
|
|
1524
|
+
*
|
|
1525
|
+
* @param label - The name of the log function that failed validation
|
|
1526
|
+
* @returns A formatted error message string
|
|
1527
|
+
*/
|
|
1528
|
+
function notFnMsg(label) {
|
|
1529
|
+
return `Cannot create a logger when ${label} is not a function`;
|
|
1530
|
+
}
|
|
1531
|
+
|
|
1532
|
+
createLogger(error, warn, log, info, debug);
|
|
1533
|
+
|
|
1534
|
+
/**
|
|
1535
|
+
* Global log level registry.
|
|
1536
|
+
* Tracks all created scoped loggers to allow global log level changes.
|
|
1537
|
+
*/
|
|
1538
|
+
const loggerRegistry = createSet();
|
|
1539
|
+
/** Redacted placeholder for sensitive values */
|
|
1540
|
+
const REDACTED = '[REDACTED]';
|
|
1541
|
+
/**
|
|
1542
|
+
* Patterns that indicate a sensitive key name.
|
|
1543
|
+
* Keys containing these patterns will have their values sanitized.
|
|
1544
|
+
*/
|
|
1545
|
+
const SENSITIVE_KEY_PATTERNS = [
|
|
1546
|
+
/token/i,
|
|
1547
|
+
/key/i,
|
|
1548
|
+
/password/i,
|
|
1549
|
+
/secret/i,
|
|
1550
|
+
/credential/i,
|
|
1551
|
+
/auth/i,
|
|
1552
|
+
/bearer/i,
|
|
1553
|
+
/api[_-]?key/i,
|
|
1554
|
+
/private/i,
|
|
1555
|
+
/passphrase/i,
|
|
1556
|
+
];
|
|
1557
|
+
/**
|
|
1558
|
+
* Checks if a key name indicates sensitive data.
|
|
1559
|
+
*
|
|
1560
|
+
* @param key - Key name to check
|
|
1561
|
+
* @returns True if the key indicates sensitive data
|
|
1562
|
+
*/
|
|
1563
|
+
function isSensitiveKey(key) {
|
|
1564
|
+
return SENSITIVE_KEY_PATTERNS.some((pattern) => pattern.test(key));
|
|
1565
|
+
}
|
|
1566
|
+
/**
|
|
1567
|
+
* Sanitizes an object by replacing sensitive values with REDACTED.
|
|
1568
|
+
* This function recursively processes nested objects and arrays.
|
|
1569
|
+
*
|
|
1570
|
+
* @param obj - Object to sanitize
|
|
1571
|
+
* @returns New object with sensitive values redacted
|
|
1572
|
+
*/
|
|
1573
|
+
function sanitize(obj) {
|
|
1574
|
+
if (obj === null || obj === undefined) {
|
|
1575
|
+
return obj;
|
|
1576
|
+
}
|
|
1577
|
+
if (isArray(obj)) {
|
|
1578
|
+
return obj.map((item) => sanitize(item));
|
|
1579
|
+
}
|
|
1580
|
+
if (typeof obj === 'object') {
|
|
1581
|
+
const result = {};
|
|
1582
|
+
for (const [key, value] of entries(obj)) {
|
|
1583
|
+
if (isSensitiveKey(key)) {
|
|
1584
|
+
result[key] = REDACTED;
|
|
1585
|
+
}
|
|
1586
|
+
else if (typeof value === 'object' && value !== null) {
|
|
1587
|
+
result[key] = sanitize(value);
|
|
1588
|
+
}
|
|
1589
|
+
else {
|
|
1590
|
+
result[key] = value;
|
|
1591
|
+
}
|
|
1592
|
+
}
|
|
1593
|
+
return result;
|
|
1594
|
+
}
|
|
1595
|
+
return obj;
|
|
1596
|
+
}
|
|
1597
|
+
/**
|
|
1598
|
+
* Formats a log message with optional metadata.
|
|
1599
|
+
*
|
|
1600
|
+
* @param namespace - Logger namespace prefix
|
|
1601
|
+
* @param message - Log message
|
|
1602
|
+
* @param meta - Optional metadata object
|
|
1603
|
+
* @returns Formatted log string
|
|
1604
|
+
*/
|
|
1605
|
+
function formatMessage(namespace, message, meta) {
|
|
1606
|
+
const prefix = `[${namespace}]`;
|
|
1607
|
+
if (meta && keys(meta).length > 0) {
|
|
1608
|
+
const sanitizedMeta = sanitize(meta);
|
|
1609
|
+
return `${prefix} ${message} ${stringify(sanitizedMeta)}`;
|
|
1610
|
+
}
|
|
1611
|
+
return `${prefix} ${message}`;
|
|
1612
|
+
}
|
|
1613
|
+
/**
|
|
1614
|
+
* Creates a scoped logger with namespace prefix and optional secret sanitization.
|
|
1615
|
+
* All log messages will be prefixed with [namespace] and sensitive metadata
|
|
1616
|
+
* values will be automatically redacted.
|
|
1617
|
+
*
|
|
1618
|
+
* @param namespace - Logger namespace (e.g., 'project-scope', 'analyze')
|
|
1619
|
+
* @param options - Logger configuration options
|
|
1620
|
+
* @returns A configured scoped logger instance
|
|
1621
|
+
*
|
|
1622
|
+
* @example
|
|
1623
|
+
* ```typescript
|
|
1624
|
+
* const logger = createScopedLogger('project-scope')
|
|
1625
|
+
* logger.setLogLevel('debug')
|
|
1626
|
+
*
|
|
1627
|
+
* // Basic logging
|
|
1628
|
+
* logger.info('Starting analysis', { path: './project' })
|
|
1629
|
+
*
|
|
1630
|
+
* // Sensitive data is automatically redacted
|
|
1631
|
+
* logger.debug('Config loaded', { apiKey: 'secret123' })
|
|
1632
|
+
* // Output: [project-scope] Config loaded {"apiKey":"[REDACTED]"}
|
|
1633
|
+
* ```
|
|
1634
|
+
*/
|
|
1635
|
+
function createScopedLogger(namespace, options = {}) {
|
|
1636
|
+
const { level = 'error', sanitizeSecrets = true } = options;
|
|
1637
|
+
// Create wrapper functions that add namespace prefix and sanitization
|
|
1638
|
+
const createLogFn = (baseFn) => (message, meta) => {
|
|
1639
|
+
const processedMeta = sanitizeSecrets && meta ? sanitize(meta) : meta;
|
|
1640
|
+
baseFn(formatMessage(namespace, message, processedMeta));
|
|
1641
|
+
};
|
|
1642
|
+
// Create base logger with wrapped functions
|
|
1643
|
+
const baseLogger = createLogger(createLogFn(error), createLogFn(warn), createLogFn(log), createLogFn(info), createLogFn(debug));
|
|
1644
|
+
// Set initial log level (use global override if set)
|
|
1645
|
+
baseLogger.setLogLevel(level);
|
|
1646
|
+
const scopedLogger = freeze({
|
|
1647
|
+
error: (message, meta) => baseLogger.error(message, meta),
|
|
1648
|
+
warn: (message, meta) => baseLogger.warn(message, meta),
|
|
1649
|
+
log: (message, meta) => baseLogger.log(message, meta),
|
|
1650
|
+
info: (message, meta) => baseLogger.info(message, meta),
|
|
1651
|
+
debug: (message, meta) => baseLogger.debug(message, meta),
|
|
1652
|
+
setLogLevel: baseLogger.setLogLevel,
|
|
1653
|
+
getLogLevel: baseLogger.getLogLevel,
|
|
1654
|
+
});
|
|
1655
|
+
// Register logger for global level management
|
|
1656
|
+
loggerRegistry.add(scopedLogger);
|
|
1657
|
+
return scopedLogger;
|
|
1658
|
+
}
|
|
1659
|
+
/**
|
|
1660
|
+
* Default logger instance for the project-scope library.
|
|
1661
|
+
* Use this for general logging within the library.
|
|
1662
|
+
*
|
|
1663
|
+
* @example
|
|
1664
|
+
* ```typescript
|
|
1665
|
+
* import { logger } from '@hyperfrontend/project-scope/core'
|
|
1666
|
+
*
|
|
1667
|
+
* logger.setLogLevel('debug')
|
|
1668
|
+
* logger.debug('Analyzing project', { path: './src' })
|
|
1669
|
+
* ```
|
|
1670
|
+
*/
|
|
1671
|
+
createScopedLogger('project-scope');
|
|
1672
|
+
|
|
1673
|
+
createScopedLogger('project-scope:fs');
|
|
1674
|
+
/**
|
|
1675
|
+
* Create a file system error with code and context.
|
|
1676
|
+
*
|
|
1677
|
+
* @param message - The error message describing what went wrong
|
|
1678
|
+
* @param code - The category code for this type of filesystem failure
|
|
1679
|
+
* @param context - Additional context including path, operation, and cause
|
|
1680
|
+
* @returns A configured Error object with code and context properties
|
|
1681
|
+
*/
|
|
1682
|
+
function createFileSystemError(message, code, context) {
|
|
1683
|
+
const error = createError(message);
|
|
1684
|
+
defineProperties(error, {
|
|
1685
|
+
code: { value: code, enumerable: true },
|
|
1686
|
+
context: { value: context, enumerable: true },
|
|
1687
|
+
});
|
|
1688
|
+
return error;
|
|
1689
|
+
}
|
|
1690
|
+
/**
|
|
1691
|
+
* Read file if exists, return null otherwise.
|
|
1692
|
+
*
|
|
1693
|
+
* @param filePath - Path to file
|
|
1694
|
+
* @param encoding - File encoding (default: utf-8)
|
|
1695
|
+
* @returns File contents or null if file doesn't exist
|
|
1696
|
+
*/
|
|
1697
|
+
function readFileIfExists(filePath, encoding = 'utf-8') {
|
|
1698
|
+
if (!existsSync(filePath)) {
|
|
1699
|
+
return null;
|
|
1700
|
+
}
|
|
1701
|
+
try {
|
|
1702
|
+
return readFileSync(filePath, { encoding });
|
|
1703
|
+
}
|
|
1704
|
+
catch {
|
|
1705
|
+
return null;
|
|
1706
|
+
}
|
|
1707
|
+
}
|
|
1708
|
+
/**
|
|
1709
|
+
* Read and parse JSON file if exists, return null otherwise.
|
|
1710
|
+
*
|
|
1711
|
+
* @param filePath - Path to JSON file
|
|
1712
|
+
* @returns Parsed JSON object or null if file doesn't exist or is invalid
|
|
1713
|
+
*/
|
|
1714
|
+
function readJsonFileIfExists(filePath) {
|
|
1715
|
+
if (!existsSync(filePath)) {
|
|
1716
|
+
return null;
|
|
1717
|
+
}
|
|
1718
|
+
try {
|
|
1719
|
+
const content = readFileSync(filePath, { encoding: 'utf-8' });
|
|
1720
|
+
return parse(content);
|
|
1721
|
+
}
|
|
1722
|
+
catch {
|
|
1723
|
+
return null;
|
|
1724
|
+
}
|
|
1725
|
+
}
|
|
1726
|
+
|
|
1727
|
+
createScopedLogger('project-scope:fs:write');
|
|
1728
|
+
|
|
1729
|
+
/**
|
|
1730
|
+
* Get file stats with error handling.
|
|
1731
|
+
*
|
|
1732
|
+
* @param filePath - Path to file
|
|
1733
|
+
* @param followSymlinks - Whether to follow symlinks (default: true)
|
|
1734
|
+
* @returns File stats or null if path doesn't exist
|
|
1735
|
+
*/
|
|
1736
|
+
function getFileStat(filePath, followSymlinks = true) {
|
|
1737
|
+
if (!existsSync(filePath)) {
|
|
1738
|
+
return null;
|
|
1739
|
+
}
|
|
1740
|
+
try {
|
|
1741
|
+
const stat = followSymlinks ? statSync(filePath) : lstatSync(filePath);
|
|
1742
|
+
return {
|
|
1743
|
+
isFile: stat.isFile(),
|
|
1744
|
+
isDirectory: stat.isDirectory(),
|
|
1745
|
+
isSymlink: stat.isSymbolicLink(),
|
|
1746
|
+
size: stat.size,
|
|
1747
|
+
created: stat.birthtime,
|
|
1748
|
+
modified: stat.mtime,
|
|
1749
|
+
accessed: stat.atime,
|
|
1750
|
+
mode: stat.mode,
|
|
1751
|
+
};
|
|
1752
|
+
}
|
|
1753
|
+
catch {
|
|
1754
|
+
return null;
|
|
1755
|
+
}
|
|
1756
|
+
}
|
|
1757
|
+
/**
|
|
1758
|
+
* Check if path is a directory.
|
|
1759
|
+
*
|
|
1760
|
+
* @param dirPath - Path to check
|
|
1761
|
+
* @returns True if path is a directory
|
|
1762
|
+
*/
|
|
1763
|
+
function isDirectory(dirPath) {
|
|
1764
|
+
const stats = getFileStat(dirPath);
|
|
1765
|
+
return stats?.isDirectory ?? false;
|
|
1766
|
+
}
|
|
1767
|
+
/**
|
|
1768
|
+
* Check if path exists.
|
|
1769
|
+
*
|
|
1770
|
+
* @param filePath - Path to check
|
|
1771
|
+
* @returns True if path exists
|
|
1772
|
+
*/
|
|
1773
|
+
function exists(filePath) {
|
|
1774
|
+
return existsSync(filePath);
|
|
1775
|
+
}
|
|
1776
|
+
|
|
1777
|
+
const fsDirLogger = createScopedLogger('project-scope:fs:dir');
|
|
1778
|
+
/**
|
|
1779
|
+
* List immediate contents of a directory.
|
|
1780
|
+
*
|
|
1781
|
+
* @param dirPath - Absolute or relative path to the directory
|
|
1782
|
+
* @returns Array of entries with metadata for each file/directory
|
|
1783
|
+
* @throws {Error} If directory doesn't exist or isn't a directory
|
|
1784
|
+
*
|
|
1785
|
+
* @example
|
|
1786
|
+
* ```typescript
|
|
1787
|
+
* import { readDirectory } from '@hyperfrontend/project-scope'
|
|
1788
|
+
*
|
|
1789
|
+
* const entries = readDirectory('./src')
|
|
1790
|
+
* for (const entry of entries) {
|
|
1791
|
+
* console.log(entry.name, entry.isFile ? 'file' : 'directory')
|
|
1792
|
+
* }
|
|
1793
|
+
* ```
|
|
1794
|
+
*/
|
|
1795
|
+
function readDirectory(dirPath) {
|
|
1796
|
+
fsDirLogger.debug('Reading directory', { path: dirPath });
|
|
1797
|
+
if (!existsSync(dirPath)) {
|
|
1798
|
+
fsDirLogger.debug('Directory not found', { path: dirPath });
|
|
1799
|
+
throw createFileSystemError(`Directory not found: ${dirPath}`, 'FS_NOT_FOUND', { path: dirPath, operation: 'readdir' });
|
|
1800
|
+
}
|
|
1801
|
+
if (!isDirectory(dirPath)) {
|
|
1802
|
+
fsDirLogger.debug('Path is not a directory', { path: dirPath });
|
|
1803
|
+
throw createFileSystemError(`Not a directory: ${dirPath}`, 'FS_NOT_A_DIRECTORY', { path: dirPath, operation: 'readdir' });
|
|
1804
|
+
}
|
|
1805
|
+
try {
|
|
1806
|
+
const entries = readdirSync(dirPath, { withFileTypes: true });
|
|
1807
|
+
fsDirLogger.debug('Directory read complete', { path: dirPath, entryCount: entries.length });
|
|
1808
|
+
return entries.map((entry) => ({
|
|
1809
|
+
name: entry.name,
|
|
1810
|
+
path: join(dirPath, entry.name),
|
|
1811
|
+
isFile: entry.isFile(),
|
|
1812
|
+
isDirectory: entry.isDirectory(),
|
|
1813
|
+
isSymlink: entry.isSymbolicLink(),
|
|
1814
|
+
}));
|
|
1815
|
+
}
|
|
1816
|
+
catch (error) {
|
|
1817
|
+
fsDirLogger.warn('Failed to read directory', { path: dirPath, error: error instanceof Error ? error.message : String(error) });
|
|
1818
|
+
throw createFileSystemError(`Failed to read directory: ${dirPath}`, 'FS_READ_ERROR', {
|
|
1819
|
+
path: dirPath,
|
|
1820
|
+
operation: 'readdir',
|
|
1821
|
+
cause: error,
|
|
1822
|
+
});
|
|
1823
|
+
}
|
|
1824
|
+
}
|
|
1825
|
+
|
|
1826
|
+
createScopedLogger('project-scope:fs:traversal');
|
|
1827
|
+
|
|
1828
|
+
const packageLogger = createScopedLogger('project-scope:project:package');
|
|
1829
|
+
/**
|
|
1830
|
+
* Verifies that a value is an object with only string values,
|
|
1831
|
+
* used for validating dependency maps and script definitions.
|
|
1832
|
+
*
|
|
1833
|
+
* @param value - Value to check
|
|
1834
|
+
* @returns True if value is a record of strings
|
|
1835
|
+
*/
|
|
1836
|
+
function isStringRecord(value) {
|
|
1837
|
+
if (typeof value !== 'object' || value === null)
|
|
1838
|
+
return false;
|
|
1839
|
+
return values(value).every((v) => typeof v === 'string');
|
|
1840
|
+
}
|
|
1841
|
+
/**
|
|
1842
|
+
* Extracts and normalizes the workspaces field from package.json,
|
|
1843
|
+
* supporting both array format and object with packages array.
|
|
1844
|
+
*
|
|
1845
|
+
* @param value - Raw workspaces value from package.json
|
|
1846
|
+
* @returns Normalized workspace patterns or undefined if invalid
|
|
1847
|
+
*/
|
|
1848
|
+
function parseWorkspaces(value) {
|
|
1849
|
+
if (isArray(value) && value.every((v) => typeof v === 'string')) {
|
|
1850
|
+
return value;
|
|
1851
|
+
}
|
|
1852
|
+
if (typeof value === 'object' && value !== null) {
|
|
1853
|
+
const obj = value;
|
|
1854
|
+
if (isArray(obj['packages'])) {
|
|
1855
|
+
return { packages: obj['packages'] };
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1858
|
+
return undefined;
|
|
1859
|
+
}
|
|
1860
|
+
/**
|
|
1861
|
+
* Validate and normalize package.json data.
|
|
1862
|
+
*
|
|
1863
|
+
* @param data - Raw parsed data
|
|
1864
|
+
* @returns Validated package.json
|
|
1865
|
+
*/
|
|
1866
|
+
function validatePackageJson(data) {
|
|
1867
|
+
if (typeof data !== 'object' || data === null) {
|
|
1868
|
+
throw createError('package.json must be an object');
|
|
1869
|
+
}
|
|
1870
|
+
const pkg = data;
|
|
1871
|
+
return {
|
|
1872
|
+
name: typeof pkg['name'] === 'string' ? pkg['name'] : undefined,
|
|
1873
|
+
version: typeof pkg['version'] === 'string' ? pkg['version'] : undefined,
|
|
1874
|
+
description: typeof pkg['description'] === 'string' ? pkg['description'] : undefined,
|
|
1875
|
+
main: typeof pkg['main'] === 'string' ? pkg['main'] : undefined,
|
|
1876
|
+
module: typeof pkg['module'] === 'string' ? pkg['module'] : undefined,
|
|
1877
|
+
browser: typeof pkg['browser'] === 'string' ? pkg['browser'] : undefined,
|
|
1878
|
+
types: typeof pkg['types'] === 'string' ? pkg['types'] : undefined,
|
|
1879
|
+
bin: typeof pkg['bin'] === 'string' || isStringRecord(pkg['bin']) ? pkg['bin'] : undefined,
|
|
1880
|
+
scripts: isStringRecord(pkg['scripts']) ? pkg['scripts'] : undefined,
|
|
1881
|
+
dependencies: isStringRecord(pkg['dependencies']) ? pkg['dependencies'] : undefined,
|
|
1882
|
+
devDependencies: isStringRecord(pkg['devDependencies']) ? pkg['devDependencies'] : undefined,
|
|
1883
|
+
peerDependencies: isStringRecord(pkg['peerDependencies']) ? pkg['peerDependencies'] : undefined,
|
|
1884
|
+
optionalDependencies: isStringRecord(pkg['optionalDependencies']) ? pkg['optionalDependencies'] : undefined,
|
|
1885
|
+
workspaces: parseWorkspaces(pkg['workspaces']),
|
|
1886
|
+
exports: typeof pkg['exports'] === 'object' ? pkg['exports'] : undefined,
|
|
1887
|
+
engines: isStringRecord(pkg['engines']) ? pkg['engines'] : undefined,
|
|
1888
|
+
...pkg,
|
|
1889
|
+
};
|
|
1890
|
+
}
|
|
1891
|
+
/**
|
|
1892
|
+
* Attempts to read and parse package.json if it exists,
|
|
1893
|
+
* returning null on missing file or parse failure.
|
|
1894
|
+
*
|
|
1895
|
+
* @param projectPath - Project directory path or path to package.json
|
|
1896
|
+
* @returns Parsed package.json or null if not found
|
|
1897
|
+
*/
|
|
1898
|
+
function readPackageJsonIfExists(projectPath) {
|
|
1899
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : join(projectPath, 'package.json');
|
|
1900
|
+
const content = readFileIfExists(packageJsonPath);
|
|
1901
|
+
if (!content) {
|
|
1902
|
+
packageLogger.debug('Package.json not found', { path: packageJsonPath });
|
|
1903
|
+
return null;
|
|
1904
|
+
}
|
|
1905
|
+
try {
|
|
1906
|
+
const validated = validatePackageJson(parse(content));
|
|
1907
|
+
packageLogger.debug('Package.json loaded', { path: packageJsonPath, name: validated.name });
|
|
1908
|
+
return validated;
|
|
1909
|
+
}
|
|
1910
|
+
catch {
|
|
1911
|
+
packageLogger.debug('Failed to parse package.json, returning null', { path: packageJsonPath });
|
|
1912
|
+
return null;
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
|
|
1916
|
+
createScopedLogger('project-scope:root');
|
|
1917
|
+
|
|
1918
|
+
const nxLogger = createScopedLogger('project-scope:nx');
|
|
1919
|
+
/**
|
|
1920
|
+
* Files indicating NX workspace root.
|
|
1921
|
+
*/
|
|
1922
|
+
const NX_CONFIG_FILES = ['nx.json', 'workspace.json'];
|
|
1923
|
+
/**
|
|
1924
|
+
* NX-specific project file.
|
|
1925
|
+
*/
|
|
1926
|
+
const NX_PROJECT_FILE = 'project.json';
|
|
1927
|
+
/**
|
|
1928
|
+
* Check if directory is an NX workspace root.
|
|
1929
|
+
*
|
|
1930
|
+
* @param path - Directory path to check
|
|
1931
|
+
* @returns True if the directory contains nx.json or workspace.json
|
|
1932
|
+
*
|
|
1933
|
+
* @example
|
|
1934
|
+
* ```typescript
|
|
1935
|
+
* import { isNxWorkspace } from '@hyperfrontend/project-scope'
|
|
1936
|
+
*
|
|
1937
|
+
* if (isNxWorkspace('./my-project')) {
|
|
1938
|
+
* console.log('This is an NX monorepo')
|
|
1939
|
+
* }
|
|
1940
|
+
* ```
|
|
1941
|
+
*/
|
|
1942
|
+
function isNxWorkspace(path) {
|
|
1943
|
+
for (const configFile of NX_CONFIG_FILES) {
|
|
1944
|
+
if (exists(join(path, configFile))) {
|
|
1945
|
+
nxLogger.debug('NX workspace detected', { path, configFile });
|
|
1946
|
+
return true;
|
|
1947
|
+
}
|
|
1948
|
+
}
|
|
1949
|
+
nxLogger.debug('Not an NX workspace', { path });
|
|
1950
|
+
return false;
|
|
1951
|
+
}
|
|
1952
|
+
/**
|
|
1953
|
+
* Check if directory is an NX project.
|
|
1954
|
+
*
|
|
1955
|
+
* @param path - Directory path to check
|
|
1956
|
+
* @returns True if the directory contains project.json
|
|
1957
|
+
*/
|
|
1958
|
+
function isNxProject(path) {
|
|
1959
|
+
const isProject = exists(join(path, NX_PROJECT_FILE));
|
|
1960
|
+
nxLogger.debug('NX project check', { path, isProject });
|
|
1961
|
+
return isProject;
|
|
1962
|
+
}
|
|
1963
|
+
/**
|
|
1964
|
+
* Detect NX version from package.json dependencies.
|
|
1965
|
+
*
|
|
1966
|
+
* @param workspacePath - Workspace root path
|
|
1967
|
+
* @returns NX version string (without semver range) or null
|
|
1968
|
+
*/
|
|
1969
|
+
function detectNxVersion(workspacePath) {
|
|
1970
|
+
const packageJson = readPackageJsonIfExists(workspacePath);
|
|
1971
|
+
if (packageJson) {
|
|
1972
|
+
const nxVersion = packageJson.devDependencies?.['nx'] ?? packageJson.dependencies?.['nx'];
|
|
1973
|
+
if (nxVersion) {
|
|
1974
|
+
// Strip semver range characters (^, ~, >=, etc.)
|
|
1975
|
+
return nxVersion.replace(/^[\^~>=<]+/, '');
|
|
1976
|
+
}
|
|
1977
|
+
}
|
|
1978
|
+
return null;
|
|
1979
|
+
}
|
|
1980
|
+
/**
|
|
1981
|
+
* Check if workspace is integrated (not standalone).
|
|
1982
|
+
* Integrated repos typically have workspaceLayout, namedInputs, or targetDefaults.
|
|
1983
|
+
*
|
|
1984
|
+
* @param nxJson - Parsed nx.json configuration
|
|
1985
|
+
* @returns True if the workspace is integrated
|
|
1986
|
+
*/
|
|
1987
|
+
function isIntegratedRepo(nxJson) {
|
|
1988
|
+
return nxJson.workspaceLayout !== undefined || nxJson.namedInputs !== undefined || nxJson.targetDefaults !== undefined;
|
|
1989
|
+
}
|
|
1990
|
+
/**
|
|
1991
|
+
* Get comprehensive NX workspace information.
|
|
1992
|
+
*
|
|
1993
|
+
* @param workspacePath - Workspace root path
|
|
1994
|
+
* @returns Workspace info or null if not an NX workspace
|
|
1995
|
+
*/
|
|
1996
|
+
function getNxWorkspaceInfo(workspacePath) {
|
|
1997
|
+
nxLogger.debug('Getting NX workspace info', { workspacePath });
|
|
1998
|
+
if (!isNxWorkspace(workspacePath)) {
|
|
1999
|
+
return null;
|
|
2000
|
+
}
|
|
2001
|
+
const nxJson = readJsonFileIfExists(join(workspacePath, 'nx.json'));
|
|
2002
|
+
if (!nxJson) {
|
|
2003
|
+
// Check for workspace.json as fallback (older NX)
|
|
2004
|
+
const workspaceJson = readJsonFileIfExists(join(workspacePath, 'workspace.json'));
|
|
2005
|
+
if (!workspaceJson) {
|
|
2006
|
+
nxLogger.debug('No nx.json or workspace.json found', { workspacePath });
|
|
2007
|
+
return null;
|
|
2008
|
+
}
|
|
2009
|
+
nxLogger.debug('Using legacy workspace.json', { workspacePath });
|
|
2010
|
+
// Create minimal nx.json from workspace.json
|
|
2011
|
+
return {
|
|
2012
|
+
root: workspacePath,
|
|
2013
|
+
version: detectNxVersion(workspacePath),
|
|
2014
|
+
nxJson: {},
|
|
2015
|
+
isIntegrated: true,
|
|
2016
|
+
workspaceLayout: {
|
|
2017
|
+
appsDir: 'apps',
|
|
2018
|
+
libsDir: 'libs',
|
|
2019
|
+
},
|
|
2020
|
+
};
|
|
2021
|
+
}
|
|
2022
|
+
const info = {
|
|
2023
|
+
root: workspacePath,
|
|
2024
|
+
version: detectNxVersion(workspacePath),
|
|
2025
|
+
nxJson,
|
|
2026
|
+
isIntegrated: isIntegratedRepo(nxJson),
|
|
2027
|
+
defaultProject: nxJson.defaultProject,
|
|
2028
|
+
workspaceLayout: {
|
|
2029
|
+
appsDir: nxJson.workspaceLayout?.appsDir ?? 'apps',
|
|
2030
|
+
libsDir: nxJson.workspaceLayout?.libsDir ?? 'libs',
|
|
2031
|
+
},
|
|
2032
|
+
};
|
|
2033
|
+
nxLogger.debug('NX workspace info retrieved', {
|
|
2034
|
+
workspacePath,
|
|
2035
|
+
version: info.version,
|
|
2036
|
+
isIntegrated: info.isIntegrated,
|
|
2037
|
+
defaultProject: info.defaultProject,
|
|
2038
|
+
});
|
|
2039
|
+
return info;
|
|
2040
|
+
}
|
|
2041
|
+
|
|
2042
|
+
createScopedLogger('project-scope:nx:devkit');
|
|
2043
|
+
|
|
2044
|
+
const nxConfigLogger = createScopedLogger('project-scope:nx:config');
|
|
2045
|
+
/**
|
|
2046
|
+
* Read project.json for an NX project.
|
|
2047
|
+
*
|
|
2048
|
+
* @param projectPath - Project directory path
|
|
2049
|
+
* @returns Parsed project.json or null if not found
|
|
2050
|
+
*/
|
|
2051
|
+
function readProjectJson(projectPath) {
|
|
2052
|
+
const projectJsonPath = join(projectPath, NX_PROJECT_FILE);
|
|
2053
|
+
nxConfigLogger.debug('Reading project.json', { path: projectJsonPath });
|
|
2054
|
+
const result = readJsonFileIfExists(projectJsonPath);
|
|
2055
|
+
if (result) {
|
|
2056
|
+
nxConfigLogger.debug('Project.json loaded', { path: projectJsonPath, name: result.name });
|
|
2057
|
+
}
|
|
2058
|
+
else {
|
|
2059
|
+
nxConfigLogger.debug('Project.json not found', { path: projectJsonPath });
|
|
2060
|
+
}
|
|
2061
|
+
return result;
|
|
2062
|
+
}
|
|
2063
|
+
/**
|
|
2064
|
+
* Get project configuration from project.json or package.json nx field.
|
|
2065
|
+
*
|
|
2066
|
+
* @param projectPath - Project directory path
|
|
2067
|
+
* @param workspacePath - Workspace root path (for relative path calculation)
|
|
2068
|
+
* @returns Project configuration or null if not found
|
|
2069
|
+
*/
|
|
2070
|
+
function getProjectConfig(projectPath, workspacePath) {
|
|
2071
|
+
nxConfigLogger.debug('Getting project config', { projectPath, workspacePath });
|
|
2072
|
+
// Try project.json first
|
|
2073
|
+
const projectJson = readProjectJson(projectPath);
|
|
2074
|
+
if (projectJson) {
|
|
2075
|
+
nxConfigLogger.debug('Using project.json config', { projectPath, name: projectJson.name });
|
|
2076
|
+
return {
|
|
2077
|
+
...projectJson,
|
|
2078
|
+
root: projectJson.root ?? relative(workspacePath, projectPath),
|
|
2079
|
+
};
|
|
2080
|
+
}
|
|
2081
|
+
// Try to infer from package.json nx field
|
|
2082
|
+
const packageJson = readPackageJsonIfExists(projectPath);
|
|
2083
|
+
if (packageJson && typeof packageJson['nx'] === 'object') {
|
|
2084
|
+
nxConfigLogger.debug('Using package.json nx field', { projectPath, name: packageJson.name });
|
|
2085
|
+
const nxConfig = packageJson['nx'];
|
|
2086
|
+
return {
|
|
2087
|
+
name: packageJson.name,
|
|
2088
|
+
root: relative(workspacePath, projectPath),
|
|
2089
|
+
...nxConfig,
|
|
2090
|
+
};
|
|
2091
|
+
}
|
|
2092
|
+
nxConfigLogger.debug('No project config found', { projectPath });
|
|
2093
|
+
return null;
|
|
2094
|
+
}
|
|
2095
|
+
/**
|
|
2096
|
+
* Recursively scan directory for project.json files.
|
|
2097
|
+
*
|
|
2098
|
+
* @param dirPath - Directory to scan
|
|
2099
|
+
* @param workspacePath - Workspace root path
|
|
2100
|
+
* @param projects - Map to add discovered projects to
|
|
2101
|
+
* @param maxDepth - Maximum recursion depth
|
|
2102
|
+
* @param currentDepth - Current recursion depth
|
|
2103
|
+
*/
|
|
2104
|
+
function scanForProjects(dirPath, workspacePath, projects, maxDepth, currentDepth = 0) {
|
|
2105
|
+
if (currentDepth > maxDepth)
|
|
2106
|
+
return;
|
|
2107
|
+
try {
|
|
2108
|
+
const entries = readDirectory(dirPath);
|
|
2109
|
+
for (const entry of entries) {
|
|
2110
|
+
// Skip node_modules and hidden directories
|
|
2111
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'dist') {
|
|
2112
|
+
continue;
|
|
2113
|
+
}
|
|
2114
|
+
const fullPath = join(dirPath, entry.name);
|
|
2115
|
+
if (entry.isDirectory) {
|
|
2116
|
+
// Check if this directory is an NX project
|
|
2117
|
+
if (isNxProject(fullPath)) {
|
|
2118
|
+
const config = getProjectConfig(fullPath, workspacePath);
|
|
2119
|
+
if (config) {
|
|
2120
|
+
const name = config.name || relative(workspacePath, fullPath).replace(/[\\/]/g, '-');
|
|
2121
|
+
projects.set(name, {
|
|
2122
|
+
...config,
|
|
2123
|
+
name,
|
|
2124
|
+
root: relative(workspacePath, fullPath),
|
|
2125
|
+
});
|
|
2126
|
+
}
|
|
2127
|
+
}
|
|
2128
|
+
// Recursively scan subdirectories
|
|
2129
|
+
scanForProjects(fullPath, workspacePath, projects, maxDepth, currentDepth + 1);
|
|
2130
|
+
}
|
|
2131
|
+
}
|
|
2132
|
+
}
|
|
2133
|
+
catch {
|
|
2134
|
+
// Directory not readable, skip
|
|
2135
|
+
}
|
|
2136
|
+
}
|
|
2137
|
+
/**
|
|
2138
|
+
* Discover all NX projects in workspace.
|
|
2139
|
+
* Supports both workspace.json (older format) and project.json (newer format).
|
|
2140
|
+
*
|
|
2141
|
+
* @param workspacePath - Workspace root path
|
|
2142
|
+
* @returns Map of project name to configuration
|
|
2143
|
+
*/
|
|
2144
|
+
function discoverNxProjects(workspacePath) {
|
|
2145
|
+
const projects = createMap();
|
|
2146
|
+
// Check for workspace.json (older NX format)
|
|
2147
|
+
const workspaceJson = readJsonFileIfExists(join(workspacePath, 'workspace.json'));
|
|
2148
|
+
if (workspaceJson?.projects) {
|
|
2149
|
+
for (const [name, config] of entries(workspaceJson.projects)) {
|
|
2150
|
+
if (typeof config === 'string') {
|
|
2151
|
+
// Path reference to project directory
|
|
2152
|
+
const projectPath = join(workspacePath, config);
|
|
2153
|
+
const projectConfig = getProjectConfig(projectPath, workspacePath);
|
|
2154
|
+
if (projectConfig) {
|
|
2155
|
+
projects.set(name, { ...projectConfig, name });
|
|
2156
|
+
}
|
|
2157
|
+
}
|
|
2158
|
+
else if (typeof config === 'object' && config !== null) {
|
|
2159
|
+
// Inline config
|
|
2160
|
+
projects.set(name, { name, ...config });
|
|
2161
|
+
}
|
|
2162
|
+
}
|
|
2163
|
+
return projects;
|
|
2164
|
+
}
|
|
2165
|
+
// Scan for project.json files (newer NX format)
|
|
2166
|
+
const workspaceInfo = getNxWorkspaceInfo(workspacePath);
|
|
2167
|
+
const appsDir = workspaceInfo?.workspaceLayout.appsDir ?? 'apps';
|
|
2168
|
+
const libsDir = workspaceInfo?.workspaceLayout.libsDir ?? 'libs';
|
|
2169
|
+
const searchDirs = [appsDir, libsDir];
|
|
2170
|
+
// Also check packages directory (common in some setups)
|
|
2171
|
+
if (exists(join(workspacePath, 'packages'))) {
|
|
2172
|
+
searchDirs.push('packages');
|
|
2173
|
+
}
|
|
2174
|
+
for (const dir of searchDirs) {
|
|
2175
|
+
const dirPath = join(workspacePath, dir);
|
|
2176
|
+
if (exists(dirPath) && isDirectory(dirPath)) {
|
|
2177
|
+
try {
|
|
2178
|
+
scanForProjects(dirPath, workspacePath, projects, 3);
|
|
2179
|
+
}
|
|
2180
|
+
catch {
|
|
2181
|
+
// Directory not accessible
|
|
2182
|
+
}
|
|
2183
|
+
}
|
|
2184
|
+
}
|
|
2185
|
+
// Also check root-level projects (standalone projects in monorepo root)
|
|
2186
|
+
if (isNxProject(workspacePath)) {
|
|
2187
|
+
const config = readProjectJson(workspacePath);
|
|
2188
|
+
if (config) {
|
|
2189
|
+
const name = config.name || basename(workspacePath);
|
|
2190
|
+
projects.set(name, {
|
|
2191
|
+
...config,
|
|
2192
|
+
name,
|
|
2193
|
+
root: '.',
|
|
2194
|
+
});
|
|
2195
|
+
}
|
|
2196
|
+
}
|
|
2197
|
+
return projects;
|
|
2198
|
+
}
|
|
2199
|
+
/**
|
|
2200
|
+
* Build a simple project graph from discovered projects.
|
|
2201
|
+
* For full graph capabilities, use `@nx/devkit`.
|
|
2202
|
+
*
|
|
2203
|
+
* @param workspacePath - Workspace root path
|
|
2204
|
+
* @param projects - Existing configuration map to skip auto-discovery
|
|
2205
|
+
* @returns NxProjectGraph with nodes and dependencies
|
|
2206
|
+
*/
|
|
2207
|
+
function buildSimpleProjectGraph(workspacePath, projects) {
|
|
2208
|
+
const projectMap = projects ?? discoverNxProjects(workspacePath);
|
|
2209
|
+
const nodes = {};
|
|
2210
|
+
const dependencies = {};
|
|
2211
|
+
for (const [name, config] of projectMap) {
|
|
2212
|
+
nodes[name] = {
|
|
2213
|
+
name,
|
|
2214
|
+
type: config.projectType ?? 'library',
|
|
2215
|
+
data: config,
|
|
2216
|
+
};
|
|
2217
|
+
dependencies[name] = [];
|
|
2218
|
+
// Add implicit dependencies
|
|
2219
|
+
if (config.implicitDependencies) {
|
|
2220
|
+
for (const dep of config.implicitDependencies) {
|
|
2221
|
+
// Skip negative dependencies (those starting with !)
|
|
2222
|
+
if (!dep.startsWith('!')) {
|
|
2223
|
+
dependencies[name].push({
|
|
2224
|
+
target: dep,
|
|
2225
|
+
type: 'implicit',
|
|
2226
|
+
});
|
|
2227
|
+
}
|
|
2228
|
+
}
|
|
2229
|
+
}
|
|
2230
|
+
}
|
|
2231
|
+
return { nodes, dependencies };
|
|
2232
|
+
}
|
|
2233
|
+
|
|
2234
|
+
/**
|
|
2235
|
+
* Creates an empty classification summary.
|
|
2236
|
+
*
|
|
2237
|
+
* @returns A new ClassificationSummary with all counts at zero
|
|
2238
|
+
*/
|
|
2239
|
+
function createEmptyClassificationSummary() {
|
|
2240
|
+
return {
|
|
2241
|
+
total: 0,
|
|
2242
|
+
included: 0,
|
|
2243
|
+
excluded: 0,
|
|
2244
|
+
bySource: {
|
|
2245
|
+
'direct-scope': 0,
|
|
2246
|
+
'direct-file': 0,
|
|
2247
|
+
'unscoped-file': 0,
|
|
2248
|
+
'indirect-dependency': 0,
|
|
2249
|
+
'indirect-infra': 0,
|
|
2250
|
+
'unscoped-global': 0,
|
|
2251
|
+
excluded: 0,
|
|
2252
|
+
},
|
|
2253
|
+
};
|
|
2254
|
+
}
|
|
2255
|
+
/**
|
|
2256
|
+
* Creates a classified commit.
|
|
2257
|
+
*
|
|
2258
|
+
* @param commit - The parsed conventional commit
|
|
2259
|
+
* @param raw - The raw git commit
|
|
2260
|
+
* @param source - How the commit relates to the project
|
|
2261
|
+
* @param options - Additional classification options
|
|
2262
|
+
* @param options.touchedFiles - Files in the project modified by this commit
|
|
2263
|
+
* @param options.dependencyPath - Chain of dependencies leading to indirect inclusion
|
|
2264
|
+
* @returns A new ClassifiedCommit object
|
|
2265
|
+
*/
|
|
2266
|
+
function createClassifiedCommit(commit, raw, source, options) {
|
|
2267
|
+
const include = isIncludedSource(source);
|
|
2268
|
+
const preserveScope = shouldPreserveScope(source);
|
|
2269
|
+
return {
|
|
2270
|
+
commit,
|
|
2271
|
+
raw,
|
|
2272
|
+
source,
|
|
2273
|
+
include,
|
|
2274
|
+
preserveScope,
|
|
2275
|
+
touchedFiles: options?.touchedFiles,
|
|
2276
|
+
dependencyPath: options?.dependencyPath,
|
|
2277
|
+
};
|
|
2278
|
+
}
|
|
2279
|
+
/**
|
|
2280
|
+
* Determines if a source type should be included in changelog.
|
|
2281
|
+
*
|
|
2282
|
+
* @param source - The commit source type
|
|
2283
|
+
* @returns True if commits with this source should be included
|
|
2284
|
+
*/
|
|
2285
|
+
function isIncludedSource(source) {
|
|
2286
|
+
switch (source) {
|
|
2287
|
+
case 'direct-scope':
|
|
2288
|
+
case 'direct-file':
|
|
2289
|
+
case 'unscoped-file':
|
|
2290
|
+
case 'indirect-dependency':
|
|
2291
|
+
case 'indirect-infra':
|
|
2292
|
+
return true;
|
|
2293
|
+
case 'unscoped-global':
|
|
2294
|
+
case 'excluded':
|
|
2295
|
+
return false;
|
|
2296
|
+
}
|
|
2297
|
+
}
|
|
2298
|
+
/**
|
|
2299
|
+
* Determines if scope should be preserved for a source type.
|
|
2300
|
+
*
|
|
2301
|
+
* Direct commits omit scope (redundant in project changelog).
|
|
2302
|
+
* Indirect commits preserve scope for context.
|
|
2303
|
+
*
|
|
2304
|
+
* @param source - The commit source type
|
|
2305
|
+
* @returns True if scope should be preserved in changelog
|
|
2306
|
+
*/
|
|
2307
|
+
function shouldPreserveScope(source) {
|
|
2308
|
+
switch (source) {
|
|
2309
|
+
case 'direct-scope':
|
|
2310
|
+
case 'unscoped-file':
|
|
2311
|
+
return false; // Scope would be redundant
|
|
2312
|
+
case 'direct-file':
|
|
2313
|
+
case 'indirect-dependency':
|
|
2314
|
+
case 'indirect-infra':
|
|
2315
|
+
return true; // Scope provides context
|
|
2316
|
+
case 'unscoped-global':
|
|
2317
|
+
case 'excluded':
|
|
2318
|
+
return false; // Won't be shown
|
|
2319
|
+
}
|
|
2320
|
+
}
|
|
2321
|
+
|
|
2322
|
+
/**
|
|
2323
|
+
* Derives all scope variations that should match a project.
|
|
2324
|
+
*
|
|
2325
|
+
* Given a project named 'lib-versioning' with package '@hyperfrontend/versioning',
|
|
2326
|
+
* this generates variations like:
|
|
2327
|
+
* - 'lib-versioning' (full project name)
|
|
2328
|
+
* - 'versioning' (without lib- prefix)
|
|
2329
|
+
*
|
|
2330
|
+
* @param options - Project identification options
|
|
2331
|
+
* @returns Array of scope strings that match this project
|
|
2332
|
+
*
|
|
2333
|
+
* @example
|
|
2334
|
+
* deriveProjectScopes({ projectName: 'lib-versioning', packageName: '@hyperfrontend/versioning' })
|
|
2335
|
+
* // Returns: ['lib-versioning', 'versioning']
|
|
2336
|
+
*
|
|
2337
|
+
* @example
|
|
2338
|
+
* deriveProjectScopes({ projectName: 'app-demo', packageName: 'demo-app' })
|
|
2339
|
+
* // Returns: ['app-demo', 'demo']
|
|
2340
|
+
*/
|
|
2341
|
+
function deriveProjectScopes(options) {
|
|
2342
|
+
const { projectName, packageName, additionalScopes = [], prefixes = DEFAULT_PROJECT_PREFIXES } = options;
|
|
2343
|
+
const scopes = createSet();
|
|
2344
|
+
// Always include the full project name
|
|
2345
|
+
scopes.add(projectName);
|
|
2346
|
+
// Add variations based on common prefixes
|
|
2347
|
+
const prefixVariations = extractPrefixVariations(projectName, prefixes);
|
|
2348
|
+
for (const variation of prefixVariations) {
|
|
2349
|
+
scopes.add(variation);
|
|
2350
|
+
}
|
|
2351
|
+
// Add package name variations if provided
|
|
2352
|
+
if (packageName) {
|
|
2353
|
+
const packageVariations = extractPackageNameVariations(packageName);
|
|
2354
|
+
for (const variation of packageVariations) {
|
|
2355
|
+
scopes.add(variation);
|
|
2356
|
+
}
|
|
2357
|
+
}
|
|
2358
|
+
// Add any additional scopes
|
|
2359
|
+
for (const scope of additionalScopes) {
|
|
2360
|
+
if (scope) {
|
|
2361
|
+
scopes.add(scope);
|
|
2362
|
+
}
|
|
2363
|
+
}
|
|
2364
|
+
return [...scopes];
|
|
2365
|
+
}
|
|
2366
|
+
/**
|
|
2367
|
+
* Default project name prefixes that can be stripped for scope matching.
|
|
2368
|
+
*/
|
|
2369
|
+
const DEFAULT_PROJECT_PREFIXES = ['lib-', 'app-', 'e2e-', 'tool-', 'plugin-', 'feature-', 'package-'];
|
|
2370
|
+
/**
|
|
2371
|
+
* Generates scope variations by stripping recognized project prefixes.
|
|
2372
|
+
*
|
|
2373
|
+
* @param projectName - The project name to extract variations from
|
|
2374
|
+
* @param prefixes - Prefixes to check and strip
|
|
2375
|
+
* @returns Array of scope name variations
|
|
2376
|
+
*/
|
|
2377
|
+
function extractPrefixVariations(projectName, prefixes) {
|
|
2378
|
+
const variations = [];
|
|
2379
|
+
for (const prefix of prefixes) {
|
|
2380
|
+
if (projectName.startsWith(prefix)) {
|
|
2381
|
+
const withoutPrefix = projectName.slice(prefix.length);
|
|
2382
|
+
if (withoutPrefix) {
|
|
2383
|
+
variations.push(withoutPrefix);
|
|
2384
|
+
}
|
|
2385
|
+
break; // Only remove one prefix
|
|
2386
|
+
}
|
|
2387
|
+
}
|
|
2388
|
+
return variations;
|
|
2389
|
+
}
|
|
2390
|
+
/**
|
|
2391
|
+
* Extracts scope variations from an npm package name.
|
|
2392
|
+
*
|
|
2393
|
+
* @param packageName - The npm package name (e.g., '@scope/name')
|
|
2394
|
+
* @returns Array of name variations
|
|
2395
|
+
*/
|
|
2396
|
+
function extractPackageNameVariations(packageName) {
|
|
2397
|
+
const variations = [];
|
|
2398
|
+
// Handle scoped packages: @scope/name -> name
|
|
2399
|
+
if (packageName.startsWith('@')) {
|
|
2400
|
+
const slashIndex = packageName.indexOf('/');
|
|
2401
|
+
if (slashIndex !== -1) {
|
|
2402
|
+
const unscoped = packageName.slice(slashIndex + 1);
|
|
2403
|
+
if (unscoped) {
|
|
2404
|
+
variations.push(unscoped);
|
|
2405
|
+
}
|
|
2406
|
+
}
|
|
2407
|
+
}
|
|
2408
|
+
else {
|
|
2409
|
+
// Non-scoped package: just use the name
|
|
2410
|
+
variations.push(packageName);
|
|
2411
|
+
}
|
|
2412
|
+
return variations;
|
|
2413
|
+
}
|
|
2414
|
+
/**
|
|
2415
|
+
* Checks if a commit scope matches any of the project scopes.
|
|
2416
|
+
*
|
|
2417
|
+
* @param commitScope - The scope from a conventional commit
|
|
2418
|
+
* @param projectScopes - Array of scopes that match the project
|
|
2419
|
+
* @returns True if the commit scope matches the project
|
|
2420
|
+
*
|
|
2421
|
+
* @example
|
|
2422
|
+
* scopeMatchesProject('versioning', ['lib-versioning', 'versioning']) // true
|
|
2423
|
+
* scopeMatchesProject('logging', ['lib-versioning', 'versioning']) // false
|
|
2424
|
+
*/
|
|
2425
|
+
function scopeMatchesProject(commitScope, projectScopes) {
|
|
2426
|
+
if (!commitScope) {
|
|
2427
|
+
return false;
|
|
2428
|
+
}
|
|
2429
|
+
// Case-insensitive comparison
|
|
2430
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2431
|
+
return projectScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2432
|
+
}
|
|
2433
|
+
/**
|
|
2434
|
+
* Checks if a commit scope should be explicitly excluded.
|
|
2435
|
+
*
|
|
2436
|
+
* @param commitScope - The scope from a conventional commit
|
|
2437
|
+
* @param excludeScopes - Array of scopes to exclude
|
|
2438
|
+
* @returns True if the scope should be excluded
|
|
2439
|
+
*/
|
|
2440
|
+
function scopeIsExcluded(commitScope, excludeScopes) {
|
|
2441
|
+
if (!commitScope) {
|
|
2442
|
+
return false;
|
|
2443
|
+
}
|
|
2444
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2445
|
+
return excludeScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2446
|
+
}
|
|
2447
|
+
/**
|
|
2448
|
+
* Default scopes to exclude from changelogs.
|
|
2449
|
+
*
|
|
2450
|
+
* These represent repository-level or infrastructure changes
|
|
2451
|
+
* that typically don't belong in individual project changelogs.
|
|
2452
|
+
*/
|
|
2453
|
+
const DEFAULT_EXCLUDE_SCOPES = ['release', 'deps', 'workspace', 'root', 'repo', 'ci', 'build'];
|
|
2454
|
+
|
|
2455
|
+
/**
|
|
2456
|
+
* Classifies a single commit against a project.
|
|
2457
|
+
*
|
|
2458
|
+
* Implements the hybrid classification strategy:
|
|
2459
|
+
* 1. Check scope match (fast path)
|
|
2460
|
+
* 2. Check file touch (validation/catch-all)
|
|
2461
|
+
* 3. Check dependency touch (indirect)
|
|
2462
|
+
* 4. Fallback to excluded
|
|
2463
|
+
*
|
|
2464
|
+
* @param input - The commit to classify
|
|
2465
|
+
* @param context - Classification context with project info
|
|
2466
|
+
* @returns Classified commit with source attribution
|
|
2467
|
+
*
|
|
2468
|
+
* @example
|
|
2469
|
+
* const classified = classifyCommit(
|
|
2470
|
+
* { commit: parsedCommit, raw: gitCommit },
|
|
2471
|
+
* { projectScopes: ['versioning'], fileCommitHashes: new Set(['abc123']) }
|
|
2472
|
+
* )
|
|
2473
|
+
*/
|
|
2474
|
+
function classifyCommit(input, context) {
|
|
2475
|
+
const { commit, raw } = input;
|
|
2476
|
+
const { projectScopes, fileCommitHashes, dependencyCommitMap, infrastructureCommitHashes, excludeScopes = DEFAULT_EXCLUDE_SCOPES, includeScopes = [], } = context;
|
|
2477
|
+
const scope = commit.scope;
|
|
2478
|
+
const hasScope = !!scope;
|
|
2479
|
+
const allProjectScopes = [...projectScopes, ...includeScopes];
|
|
2480
|
+
// First check: Is this scope explicitly excluded?
|
|
2481
|
+
if (hasScope && scopeIsExcluded(scope, excludeScopes)) {
|
|
2482
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2483
|
+
}
|
|
2484
|
+
// Priority 1: Scope-based direct match (fast path)
|
|
2485
|
+
if (hasScope && scopeMatchesProject(scope, allProjectScopes)) {
|
|
2486
|
+
return createClassifiedCommit(commit, raw, 'direct-scope');
|
|
2487
|
+
}
|
|
2488
|
+
// Priority 2: File-based direct match (validation/catch-all)
|
|
2489
|
+
if (fileCommitHashes.has(raw.hash)) {
|
|
2490
|
+
// Commit touched project files
|
|
2491
|
+
if (hasScope) {
|
|
2492
|
+
// Has a scope but it's different - likely a typo or cross-cutting change
|
|
2493
|
+
return createClassifiedCommit(commit, raw, 'direct-file');
|
|
2494
|
+
}
|
|
2495
|
+
// No scope but touched project files
|
|
2496
|
+
return createClassifiedCommit(commit, raw, 'unscoped-file');
|
|
2497
|
+
}
|
|
2498
|
+
// Priority 3: Indirect dependency match
|
|
2499
|
+
if (hasScope && dependencyCommitMap) {
|
|
2500
|
+
const dependencyPath = findDependencyPath(scope, raw.hash, dependencyCommitMap);
|
|
2501
|
+
if (dependencyPath) {
|
|
2502
|
+
return createClassifiedCommit(commit, raw, 'indirect-dependency', { dependencyPath });
|
|
2503
|
+
}
|
|
2504
|
+
}
|
|
2505
|
+
// File-based infrastructure match
|
|
2506
|
+
if (infrastructureCommitHashes?.has(raw.hash)) {
|
|
2507
|
+
return createClassifiedCommit(commit, raw, 'indirect-infra');
|
|
2508
|
+
}
|
|
2509
|
+
// Fallback: No match found
|
|
2510
|
+
if (!hasScope) {
|
|
2511
|
+
// Unscoped commit that didn't touch any project files
|
|
2512
|
+
return createClassifiedCommit(commit, raw, 'unscoped-global');
|
|
2513
|
+
}
|
|
2514
|
+
// Scoped commit that doesn't match anything
|
|
2515
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2516
|
+
}
|
|
2517
|
+
/**
|
|
2518
|
+
* Classifies multiple commits against a project.
|
|
2519
|
+
*
|
|
2520
|
+
* @param commits - Array of commits to classify
|
|
2521
|
+
* @param context - Classification context with project info
|
|
2522
|
+
* @returns Classification result with all commits and summary
|
|
2523
|
+
*/
|
|
2524
|
+
function classifyCommits(commits, context) {
|
|
2525
|
+
const classified = [];
|
|
2526
|
+
const included = [];
|
|
2527
|
+
const excluded = [];
|
|
2528
|
+
const summary = createEmptyClassificationSummary();
|
|
2529
|
+
const bySource = { ...summary.bySource };
|
|
2530
|
+
for (const input of commits) {
|
|
2531
|
+
const result = classifyCommit(input, context);
|
|
2532
|
+
classified.push(result);
|
|
2533
|
+
// Update summary
|
|
2534
|
+
bySource[result.source]++;
|
|
2535
|
+
if (result.include) {
|
|
2536
|
+
included.push(result);
|
|
2537
|
+
}
|
|
2538
|
+
else {
|
|
2539
|
+
excluded.push(result);
|
|
2540
|
+
}
|
|
2541
|
+
}
|
|
2542
|
+
return {
|
|
2543
|
+
commits: classified,
|
|
2544
|
+
included,
|
|
2545
|
+
excluded,
|
|
2546
|
+
summary: {
|
|
2547
|
+
total: classified.length,
|
|
2548
|
+
included: included.length,
|
|
2549
|
+
excluded: excluded.length,
|
|
2550
|
+
bySource,
|
|
2551
|
+
},
|
|
2552
|
+
};
|
|
2553
|
+
}
|
|
2554
|
+
/**
|
|
2555
|
+
* Finds a dependency path for a given scope and commit hash.
|
|
2556
|
+
*
|
|
2557
|
+
* Verifies both:
|
|
2558
|
+
* 1. The scope matches a dependency name (or variation)
|
|
2559
|
+
* 2. The commit hash is in that dependency's commit set
|
|
2560
|
+
*
|
|
2561
|
+
* This prevents false positives from mislabeled commits.
|
|
2562
|
+
*
|
|
2563
|
+
* @param scope - The commit scope
|
|
2564
|
+
* @param hash - The commit hash to verify
|
|
2565
|
+
* @param dependencyCommitMap - Map of dependencies to their commit hashes
|
|
2566
|
+
* @returns Dependency path if found and hash verified, undefined otherwise
|
|
2567
|
+
*/
|
|
2568
|
+
function findDependencyPath(scope, hash, dependencyCommitMap) {
|
|
2569
|
+
const normalizedScope = scope.toLowerCase();
|
|
2570
|
+
for (const [depName, depHashes] of dependencyCommitMap) {
|
|
2571
|
+
// Check if scope matches dependency name or variations
|
|
2572
|
+
const depVariations = getDependencyVariations(depName);
|
|
2573
|
+
if (depVariations.some((v) => v.toLowerCase() === normalizedScope)) {
|
|
2574
|
+
// CRITICAL: Verify the commit actually touched this dependency's files
|
|
2575
|
+
// This prevents false positives from mislabeled commits
|
|
2576
|
+
if (depHashes.has(hash)) {
|
|
2577
|
+
return [depName];
|
|
2578
|
+
}
|
|
2579
|
+
}
|
|
2580
|
+
}
|
|
2581
|
+
return undefined;
|
|
2582
|
+
}
|
|
2583
|
+
/**
|
|
2584
|
+
* Generates name variations for a dependency to enable flexible scope matching.
|
|
87
2585
|
*
|
|
88
|
-
* @param
|
|
89
|
-
* @returns
|
|
2586
|
+
* @param depName - The dependency project or package name
|
|
2587
|
+
* @returns Array of name variations including stripped prefixes
|
|
90
2588
|
*/
|
|
91
|
-
function
|
|
2589
|
+
function getDependencyVariations(depName) {
|
|
2590
|
+
const variations = [depName];
|
|
2591
|
+
// Handle lib- prefix
|
|
2592
|
+
if (depName.startsWith('lib-')) {
|
|
2593
|
+
variations.push(depName.slice(4));
|
|
2594
|
+
}
|
|
2595
|
+
// Handle @scope/name
|
|
2596
|
+
if (depName.startsWith('@')) {
|
|
2597
|
+
const slashIndex = depName.indexOf('/');
|
|
2598
|
+
if (slashIndex !== -1) {
|
|
2599
|
+
variations.push(depName.slice(slashIndex + 1));
|
|
2600
|
+
}
|
|
2601
|
+
}
|
|
2602
|
+
return variations;
|
|
2603
|
+
}
|
|
2604
|
+
/**
|
|
2605
|
+
* Creates a classification context from common inputs.
|
|
2606
|
+
*
|
|
2607
|
+
* @param projectScopes - Scopes that match the project
|
|
2608
|
+
* @param fileCommitHashes - Set of commit hashes that touched project files
|
|
2609
|
+
* @param options - Additional context options
|
|
2610
|
+
* @param options.dependencyCommitMap - Map of dependency names to commit hashes touching them
|
|
2611
|
+
* @param options.infrastructureCommitHashes - Set of commit hashes touching infrastructure paths
|
|
2612
|
+
* @param options.excludeScopes - Scopes to explicitly exclude from classification
|
|
2613
|
+
* @param options.includeScopes - Additional scopes to include as direct matches
|
|
2614
|
+
* @returns A ClassificationContext object
|
|
2615
|
+
*/
|
|
2616
|
+
function createClassificationContext(projectScopes, fileCommitHashes, options) {
|
|
92
2617
|
return {
|
|
93
|
-
|
|
94
|
-
|
|
2618
|
+
projectScopes,
|
|
2619
|
+
fileCommitHashes,
|
|
2620
|
+
dependencyCommitMap: options?.dependencyCommitMap,
|
|
2621
|
+
infrastructureCommitHashes: options?.infrastructureCommitHashes,
|
|
2622
|
+
excludeScopes: options?.excludeScopes ?? DEFAULT_EXCLUDE_SCOPES,
|
|
2623
|
+
includeScopes: options?.includeScopes,
|
|
95
2624
|
};
|
|
96
2625
|
}
|
|
97
|
-
|
|
98
|
-
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
99
2626
|
/**
|
|
100
|
-
* Creates
|
|
101
|
-
*
|
|
102
|
-
* This step:
|
|
103
|
-
* 1. Queries the registry for the latest published version
|
|
104
|
-
* 2. Reads the current version from package.json
|
|
105
|
-
* 3. Determines if this is a first release
|
|
2627
|
+
* Creates a modified conventional commit with scope handling based on classification.
|
|
106
2628
|
*
|
|
107
|
-
*
|
|
108
|
-
*
|
|
109
|
-
* - currentVersion: Version from local package.json
|
|
110
|
-
* - isFirstRelease: True if never published
|
|
2629
|
+
* For direct commits, the scope is removed (redundant in project changelog).
|
|
2630
|
+
* For indirect commits, the scope is preserved (provides context).
|
|
111
2631
|
*
|
|
112
|
-
* @
|
|
2632
|
+
* @param classified - Commit with classification metadata determining scope display
|
|
2633
|
+
* @returns A conventional commit with appropriate scope handling
|
|
113
2634
|
*/
|
|
114
|
-
function
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
//
|
|
118
|
-
const packageJsonPath = `${projectRoot}/package.json`;
|
|
119
|
-
let currentVersion = '0.0.0';
|
|
120
|
-
try {
|
|
121
|
-
const content = tree.read(packageJsonPath, 'utf-8');
|
|
122
|
-
if (content) {
|
|
123
|
-
const pkg = parse(content);
|
|
124
|
-
currentVersion = pkg.version ?? '0.0.0';
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
catch (error) {
|
|
128
|
-
logger.warn(`Could not read package.json: ${error}`);
|
|
129
|
-
}
|
|
130
|
-
// Query registry for published version
|
|
131
|
-
let publishedVersion = null;
|
|
132
|
-
let isFirstRelease = true;
|
|
133
|
-
try {
|
|
134
|
-
publishedVersion = await registry.getLatestVersion(packageName);
|
|
135
|
-
isFirstRelease = publishedVersion === null;
|
|
136
|
-
}
|
|
137
|
-
catch (error) {
|
|
138
|
-
// Package might not exist yet, which is fine
|
|
139
|
-
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
140
|
-
isFirstRelease = true;
|
|
141
|
-
}
|
|
142
|
-
const message = isFirstRelease ? `First release (local: ${currentVersion})` : `Published: ${publishedVersion}, Local: ${currentVersion}`;
|
|
2635
|
+
function toChangelogCommit(classified) {
|
|
2636
|
+
const { commit, preserveScope } = classified;
|
|
2637
|
+
if (!preserveScope && commit.scope) {
|
|
2638
|
+
// Remove the scope for direct commits
|
|
143
2639
|
return {
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
isFirstRelease,
|
|
149
|
-
},
|
|
150
|
-
message,
|
|
2640
|
+
...commit,
|
|
2641
|
+
scope: undefined,
|
|
2642
|
+
// Rebuild raw to reflect removed scope
|
|
2643
|
+
raw: rebuildRawWithoutScope(commit),
|
|
151
2644
|
};
|
|
152
|
-
}
|
|
2645
|
+
}
|
|
2646
|
+
return commit;
|
|
2647
|
+
}
|
|
2648
|
+
/**
|
|
2649
|
+
* Reconstructs a conventional commit message string without the scope portion.
|
|
2650
|
+
*
|
|
2651
|
+
* @param commit - The conventional commit to rebuild
|
|
2652
|
+
* @returns Reconstructed raw message with scope removed
|
|
2653
|
+
*/
|
|
2654
|
+
function rebuildRawWithoutScope(commit) {
|
|
2655
|
+
const breaking = commit.breaking && !commit.breakingDescription ? '!' : '';
|
|
2656
|
+
const header = `${commit.type}${breaking}: ${commit.subject}`;
|
|
2657
|
+
if (!commit.body && commit.footers.length === 0) {
|
|
2658
|
+
return header;
|
|
2659
|
+
}
|
|
2660
|
+
let raw = header;
|
|
2661
|
+
if (commit.body) {
|
|
2662
|
+
raw += `\n\n${commit.body}`;
|
|
2663
|
+
}
|
|
2664
|
+
for (const footer of commit.footers) {
|
|
2665
|
+
raw += `\n${footer.key}${footer.separator}${footer.value}`;
|
|
2666
|
+
}
|
|
2667
|
+
return raw;
|
|
153
2668
|
}
|
|
154
2669
|
|
|
155
2670
|
/**
|
|
156
|
-
*
|
|
2671
|
+
* Creates a matcher that checks if commit scope matches any of the given scopes.
|
|
157
2672
|
*
|
|
158
|
-
*
|
|
159
|
-
*
|
|
2673
|
+
* @param scopes - Scopes to match against (case-insensitive)
|
|
2674
|
+
* @returns Matcher that returns true if scope matches
|
|
160
2675
|
*
|
|
161
|
-
*
|
|
162
|
-
*
|
|
2676
|
+
* @example
|
|
2677
|
+
* const matcher = scopeMatcher(['ci', 'build', 'tooling'])
|
|
2678
|
+
* matcher({ scope: 'CI', ... }) // true
|
|
2679
|
+
* matcher({ scope: 'feat', ... }) // false
|
|
2680
|
+
*/
|
|
2681
|
+
function scopeMatcher(scopes) {
|
|
2682
|
+
const normalizedScopes = createSet(scopes.map((s) => s.toLowerCase()));
|
|
2683
|
+
return (ctx) => {
|
|
2684
|
+
if (!ctx.scope)
|
|
2685
|
+
return false;
|
|
2686
|
+
return normalizedScopes.has(ctx.scope.toLowerCase());
|
|
2687
|
+
};
|
|
2688
|
+
}
|
|
2689
|
+
/**
|
|
2690
|
+
* Creates a matcher that checks if commit scope starts with any of the given prefixes.
|
|
163
2691
|
*
|
|
164
|
-
* @
|
|
2692
|
+
* @param prefixes - Scope prefixes to match (case-insensitive)
|
|
2693
|
+
* @returns Matcher that returns true if scope starts with any prefix
|
|
2694
|
+
*
|
|
2695
|
+
* @example
|
|
2696
|
+
* const matcher = scopePrefixMatcher(['tool-', 'infra-'])
|
|
2697
|
+
* matcher({ scope: 'tool-package', ... }) // true
|
|
2698
|
+
* matcher({ scope: 'lib-utils', ... }) // false
|
|
2699
|
+
*/
|
|
2700
|
+
function scopePrefixMatcher(prefixes) {
|
|
2701
|
+
const normalizedPrefixes = prefixes.map((p) => p.toLowerCase());
|
|
2702
|
+
return (ctx) => {
|
|
2703
|
+
if (!ctx.scope)
|
|
2704
|
+
return false;
|
|
2705
|
+
const normalizedScope = ctx.scope.toLowerCase();
|
|
2706
|
+
return normalizedPrefixes.some((prefix) => normalizedScope.startsWith(prefix));
|
|
2707
|
+
};
|
|
2708
|
+
}
|
|
2709
|
+
/**
|
|
2710
|
+
* Combines matchers with OR logic - returns true if ANY matcher matches.
|
|
2711
|
+
*
|
|
2712
|
+
* @param matchers - Matchers to combine
|
|
2713
|
+
* @returns Combined matcher
|
|
2714
|
+
*
|
|
2715
|
+
* @example
|
|
2716
|
+
* const combined = anyOf(
|
|
2717
|
+
* scopeMatcher(['ci', 'build']),
|
|
2718
|
+
* messageMatcher(['[infra]']),
|
|
2719
|
+
* custom((ctx) => ctx.scope?.startsWith('tool-'))
|
|
2720
|
+
* )
|
|
165
2721
|
*/
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
2722
|
+
function anyOf(...matchers) {
|
|
2723
|
+
return (ctx) => matchers.some((matcher) => matcher(ctx));
|
|
2724
|
+
}
|
|
169
2725
|
/**
|
|
170
|
-
*
|
|
171
|
-
* Use this instead of `new Error()`.
|
|
2726
|
+
* Matches common CI/CD scopes.
|
|
172
2727
|
*
|
|
173
|
-
*
|
|
174
|
-
|
|
175
|
-
|
|
2728
|
+
* Matches: ci, cd, build, pipeline, workflow, actions
|
|
2729
|
+
*/
|
|
2730
|
+
const CI_SCOPE_MATCHER = scopeMatcher(['ci', 'cd', 'build', 'pipeline', 'workflow', 'actions']);
|
|
2731
|
+
/**
|
|
2732
|
+
* Matches common tooling/workspace scopes.
|
|
2733
|
+
*
|
|
2734
|
+
* Matches: tooling, workspace, monorepo, nx, root
|
|
2735
|
+
*/
|
|
2736
|
+
const TOOLING_SCOPE_MATCHER = scopeMatcher(['tooling', 'workspace', 'monorepo', 'nx', 'root']);
|
|
2737
|
+
/**
|
|
2738
|
+
* Matches tool-prefixed scopes (e.g., tool-package, tool-scripts).
|
|
2739
|
+
*/
|
|
2740
|
+
const TOOL_PREFIX_MATCHER = scopePrefixMatcher(['tool-']);
|
|
2741
|
+
/**
|
|
2742
|
+
* Combined matcher for common infrastructure patterns.
|
|
2743
|
+
*
|
|
2744
|
+
* Combines CI, tooling, and tool-prefix matchers.
|
|
2745
|
+
*/
|
|
2746
|
+
anyOf(CI_SCOPE_MATCHER, TOOLING_SCOPE_MATCHER, TOOL_PREFIX_MATCHER);
|
|
2747
|
+
/**
|
|
2748
|
+
* Builds a combined matcher from infrastructure configuration.
|
|
2749
|
+
*
|
|
2750
|
+
* Combines scope-based matching with any custom matcher using OR logic.
|
|
2751
|
+
* Path-based matching is handled separately via git queries.
|
|
2752
|
+
*
|
|
2753
|
+
* @param config - Infrastructure configuration
|
|
2754
|
+
* @returns Combined matcher, or null if no matchers configured
|
|
2755
|
+
*
|
|
2756
|
+
* @example
|
|
2757
|
+
* const matcher = buildInfrastructureMatcher({
|
|
2758
|
+
* scopes: ['ci', 'build'],
|
|
2759
|
+
* matcher: (ctx) => ctx.scope?.startsWith('tool-')
|
|
2760
|
+
* })
|
|
2761
|
+
*/
|
|
2762
|
+
function buildInfrastructureMatcher(config) {
|
|
2763
|
+
const matchers = [];
|
|
2764
|
+
// Add scope matcher if scopes configured
|
|
2765
|
+
if (config.scopes && config.scopes.length > 0) {
|
|
2766
|
+
matchers.push(scopeMatcher(config.scopes));
|
|
2767
|
+
}
|
|
2768
|
+
// Add custom matcher if provided
|
|
2769
|
+
if (config.matcher) {
|
|
2770
|
+
matchers.push(config.matcher);
|
|
2771
|
+
}
|
|
2772
|
+
// Return combined or null
|
|
2773
|
+
if (matchers.length === 0) {
|
|
2774
|
+
return null;
|
|
2775
|
+
}
|
|
2776
|
+
if (matchers.length === 1) {
|
|
2777
|
+
return matchers[0];
|
|
2778
|
+
}
|
|
2779
|
+
return anyOf(...matchers);
|
|
2780
|
+
}
|
|
2781
|
+
/**
|
|
2782
|
+
* Creates match context from a git commit.
|
|
2783
|
+
*
|
|
2784
|
+
* Extracts scope from conventional commit message if present.
|
|
2785
|
+
*
|
|
2786
|
+
* @param commit - Git commit to create context for
|
|
2787
|
+
* @param scope - Pre-parsed scope (optional, saves re-parsing)
|
|
2788
|
+
* @returns Match context for use with matchers
|
|
176
2789
|
*/
|
|
177
|
-
|
|
2790
|
+
function createMatchContext(commit, scope) {
|
|
2791
|
+
return {
|
|
2792
|
+
commit,
|
|
2793
|
+
scope,
|
|
2794
|
+
subject: commit.subject,
|
|
2795
|
+
message: commit.message,
|
|
2796
|
+
};
|
|
2797
|
+
}
|
|
178
2798
|
|
|
179
2799
|
/**
|
|
180
2800
|
* Replaces all occurrences of a character in a string.
|
|
@@ -600,72 +3220,158 @@ function splitLines(message) {
|
|
|
600
3220
|
return lines;
|
|
601
3221
|
}
|
|
602
3222
|
|
|
3223
|
+
/**
|
|
3224
|
+
* Default changelog filename.
|
|
3225
|
+
*/
|
|
3226
|
+
const DEFAULT_CHANGELOG_FILENAME = 'CHANGELOG.md';
|
|
3227
|
+
/**
|
|
3228
|
+
* Default scope filtering configuration.
|
|
3229
|
+
*
|
|
3230
|
+
* Uses DEFAULT_EXCLUDE_SCOPES from commits/classify to ensure consistency
|
|
3231
|
+
* between flow-level filtering and commit classification.
|
|
3232
|
+
*/
|
|
3233
|
+
const DEFAULT_SCOPE_FILTERING_CONFIG = {
|
|
3234
|
+
strategy: 'hybrid',
|
|
3235
|
+
includeScopes: [],
|
|
3236
|
+
excludeScopes: DEFAULT_EXCLUDE_SCOPES,
|
|
3237
|
+
trackDependencyChanges: false,
|
|
3238
|
+
projectPrefixes: DEFAULT_PROJECT_PREFIXES,
|
|
3239
|
+
infrastructure: undefined,
|
|
3240
|
+
infrastructureMatcher: undefined,
|
|
3241
|
+
};
|
|
3242
|
+
|
|
603
3243
|
const ANALYZE_COMMITS_STEP_ID = 'analyze-commits';
|
|
604
3244
|
/**
|
|
605
3245
|
* Creates the analyze-commits step.
|
|
606
3246
|
*
|
|
607
3247
|
* This step:
|
|
608
|
-
* 1.
|
|
609
|
-
* 2.
|
|
610
|
-
* 3.
|
|
611
|
-
* 4.
|
|
3248
|
+
* 1. Uses publishedCommit from npm registry (set by fetch-registry step)
|
|
3249
|
+
* 2. Verifies the commit is reachable from current HEAD
|
|
3250
|
+
* 3. Gets all commits since that commit (or recent commits if first release/fallback)
|
|
3251
|
+
* 4. Parses each commit using conventional commit format
|
|
3252
|
+
* 5. Classifies commits based on scope filtering strategy
|
|
3253
|
+
* 6. Filters to only release-worthy commits that belong to this project
|
|
612
3254
|
*
|
|
613
3255
|
* State updates:
|
|
614
|
-
* -
|
|
615
|
-
* - commits: Array of parsed conventional commits
|
|
3256
|
+
* - effectiveBaseCommit: The verified base commit (null if fallback was used)
|
|
3257
|
+
* - commits: Array of parsed conventional commits (for backward compatibility)
|
|
3258
|
+
* - classificationResult: Full classification result with source attribution
|
|
616
3259
|
*
|
|
617
3260
|
* @returns A FlowStep that analyzes commits
|
|
618
3261
|
*/
|
|
619
3262
|
function createAnalyzeCommitsStep() {
|
|
620
3263
|
return createStep(ANALYZE_COMMITS_STEP_ID, 'Analyze Commits', async (ctx) => {
|
|
621
|
-
const { git, projectName, packageName, config, logger, state } = ctx;
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
3264
|
+
const { git, projectName, projectRoot, packageName, workspaceRoot, config, logger, state } = ctx;
|
|
3265
|
+
const maxFallback = config.maxCommitFallback ?? 500;
|
|
3266
|
+
// Use publishedCommit from registry (set by fetch-registry step)
|
|
3267
|
+
const { publishedCommit, isFirstRelease } = state;
|
|
3268
|
+
let rawCommits;
|
|
3269
|
+
let effectiveBaseCommit = null;
|
|
3270
|
+
if (publishedCommit && !isFirstRelease) {
|
|
3271
|
+
// CRITICAL: Verify the commit exists and is reachable from HEAD
|
|
3272
|
+
if (git.commitReachableFromHead(publishedCommit)) {
|
|
3273
|
+
rawCommits = git.getCommitsSince(publishedCommit);
|
|
3274
|
+
effectiveBaseCommit = publishedCommit;
|
|
3275
|
+
logger.debug(`Found ${rawCommits.length} commits since ${publishedCommit.slice(0, 7)}`);
|
|
631
3276
|
}
|
|
632
3277
|
else {
|
|
633
|
-
//
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
3278
|
+
// GRACEFUL DEGRADATION: Commit not in history (rebase/force push occurred)
|
|
3279
|
+
logger.warn(`Published commit ${publishedCommit.slice(0, 7)} not found in history. ` +
|
|
3280
|
+
`This may indicate a rebase or force push occurred after publishing v${state.publishedVersion}. ` +
|
|
3281
|
+
`Falling back to recent commit analysis.`);
|
|
3282
|
+
rawCommits = git.getCommitLog({ maxCount: maxFallback });
|
|
3283
|
+
// effectiveBaseCommit stays null - no compare URL will be generated
|
|
639
3284
|
}
|
|
640
3285
|
}
|
|
641
|
-
// Get commits
|
|
642
|
-
let rawCommits;
|
|
643
|
-
if (lastReleaseTag) {
|
|
644
|
-
rawCommits = git.getCommitsSince(lastReleaseTag);
|
|
645
|
-
logger.debug(`Found ${rawCommits.length} commits since ${lastReleaseTag}`);
|
|
646
|
-
}
|
|
647
3286
|
else {
|
|
648
|
-
// First release
|
|
649
|
-
rawCommits = git.getCommitLog({ maxCount:
|
|
3287
|
+
// First release or no published version
|
|
3288
|
+
rawCommits = git.getCommitLog({ maxCount: maxFallback });
|
|
650
3289
|
logger.debug(`First release - analyzing up to ${rawCommits.length} commits`);
|
|
651
3290
|
}
|
|
652
|
-
//
|
|
653
|
-
const
|
|
3291
|
+
// Get scope filtering configuration
|
|
3292
|
+
const scopeFilteringConfig = {
|
|
3293
|
+
...DEFAULT_SCOPE_FILTERING_CONFIG,
|
|
3294
|
+
...config.scopeFiltering,
|
|
3295
|
+
};
|
|
3296
|
+
const strategy = resolveStrategy(scopeFilteringConfig.strategy ?? 'hybrid', rawCommits);
|
|
3297
|
+
// Parse commits with conventional commit format
|
|
654
3298
|
const releaseTypes = config.releaseTypes ?? ['feat', 'fix', 'perf', 'revert'];
|
|
3299
|
+
const parsedCommits = [];
|
|
655
3300
|
for (const rawCommit of rawCommits) {
|
|
656
3301
|
const parsed = parseConventionalCommit(rawCommit.message);
|
|
657
3302
|
if (parsed.type && releaseTypes.includes(parsed.type)) {
|
|
658
|
-
|
|
3303
|
+
parsedCommits.push({
|
|
3304
|
+
commit: parsed,
|
|
3305
|
+
raw: {
|
|
3306
|
+
hash: rawCommit.hash,
|
|
3307
|
+
shortHash: rawCommit.hash.slice(0, 7),
|
|
3308
|
+
message: rawCommit.message,
|
|
3309
|
+
subject: parsed.subject ?? rawCommit.message.split('\n')[0],
|
|
3310
|
+
body: parsed.body ?? '',
|
|
3311
|
+
authorName: '',
|
|
3312
|
+
authorEmail: '',
|
|
3313
|
+
authorDate: '',
|
|
3314
|
+
committerName: '',
|
|
3315
|
+
committerEmail: '',
|
|
3316
|
+
commitDate: '',
|
|
3317
|
+
parents: [],
|
|
3318
|
+
refs: [],
|
|
3319
|
+
},
|
|
3320
|
+
});
|
|
659
3321
|
}
|
|
660
3322
|
}
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
3323
|
+
// Build file commit hashes for hybrid/file-only strategies
|
|
3324
|
+
let fileCommitHashes = createSet();
|
|
3325
|
+
if (strategy === 'hybrid' || strategy === 'file-only') {
|
|
3326
|
+
// Get commits that touched project files using path filter
|
|
3327
|
+
const relativePath = getRelativePath(workspaceRoot, projectRoot);
|
|
3328
|
+
const pathFilteredCommits = effectiveBaseCommit
|
|
3329
|
+
? git.getCommitsSince(effectiveBaseCommit, { path: relativePath })
|
|
3330
|
+
: git.getCommitLog({ maxCount: maxFallback, path: relativePath });
|
|
3331
|
+
fileCommitHashes = createSet(pathFilteredCommits.map((c) => c.hash));
|
|
3332
|
+
logger.debug(`Found ${fileCommitHashes.size} commits touching ${relativePath}`);
|
|
3333
|
+
}
|
|
3334
|
+
// Derive project scopes
|
|
3335
|
+
const projectScopes = deriveProjectScopes({
|
|
3336
|
+
projectName,
|
|
3337
|
+
packageName,
|
|
3338
|
+
additionalScopes: scopeFilteringConfig.includeScopes,
|
|
3339
|
+
prefixes: scopeFilteringConfig.projectPrefixes,
|
|
3340
|
+
});
|
|
3341
|
+
logger.debug(`Project scopes: ${projectScopes.join(', ')}`);
|
|
3342
|
+
// Build infrastructure commit hashes for file-based infrastructure detection
|
|
3343
|
+
const infrastructureCommitHashes = buildInfrastructureCommitHashes(git, effectiveBaseCommit, rawCommits, parsedCommits, scopeFilteringConfig, logger, maxFallback);
|
|
3344
|
+
// Build dependency commit map if tracking is enabled (Phase 4)
|
|
3345
|
+
let dependencyCommitMap;
|
|
3346
|
+
if (scopeFilteringConfig.trackDependencyChanges) {
|
|
3347
|
+
dependencyCommitMap = buildDependencyCommitMap(git, workspaceRoot, projectName, effectiveBaseCommit, logger, maxFallback);
|
|
3348
|
+
}
|
|
3349
|
+
// Create classification context
|
|
3350
|
+
const classificationContext = createClassificationContext(projectScopes, fileCommitHashes, {
|
|
3351
|
+
excludeScopes: scopeFilteringConfig.excludeScopes,
|
|
3352
|
+
includeScopes: scopeFilteringConfig.includeScopes,
|
|
3353
|
+
infrastructureCommitHashes,
|
|
3354
|
+
dependencyCommitMap,
|
|
3355
|
+
});
|
|
3356
|
+
// Classify commits
|
|
3357
|
+
const classificationResult = classifyCommits(parsedCommits, classificationContext);
|
|
3358
|
+
// Apply strategy-specific filtering
|
|
3359
|
+
const includedCommits = applyStrategyFilter(classificationResult.included, strategy);
|
|
3360
|
+
// Extract conventional commits for backward compatibility
|
|
3361
|
+
// Use toChangelogCommit to properly handle scope based on classification
|
|
3362
|
+
const commits = includedCommits.map((c) => toChangelogCommit(c));
|
|
3363
|
+
// Build message with classification summary
|
|
3364
|
+
const { summary } = classificationResult;
|
|
3365
|
+
const message = buildSummaryMessage(commits.length, rawCommits.length, summary, strategy);
|
|
3366
|
+
logger.debug(`Classification breakdown: direct-scope=${summary.bySource['direct-scope']}, ` +
|
|
3367
|
+
`direct-file=${summary.bySource['direct-file']}, unscoped-file=${summary.bySource['unscoped-file']}, ` +
|
|
3368
|
+
`excluded=${summary.bySource['excluded']}`);
|
|
664
3369
|
return {
|
|
665
3370
|
status: 'success',
|
|
666
3371
|
stateUpdates: {
|
|
667
|
-
|
|
3372
|
+
effectiveBaseCommit,
|
|
668
3373
|
commits,
|
|
3374
|
+
classificationResult,
|
|
669
3375
|
},
|
|
670
3376
|
message,
|
|
671
3377
|
};
|
|
@@ -673,6 +3379,376 @@ function createAnalyzeCommitsStep() {
|
|
|
673
3379
|
dependsOn: ['fetch-registry'],
|
|
674
3380
|
});
|
|
675
3381
|
}
|
|
3382
|
+
/**
|
|
3383
|
+
* Resolves the filtering strategy, handling 'inferred' by analyzing commits.
|
|
3384
|
+
*
|
|
3385
|
+
* @param strategy - The configured scope filtering strategy
|
|
3386
|
+
* @param commits - The commits to analyze for strategy inference
|
|
3387
|
+
* @returns The resolved strategy (never 'inferred')
|
|
3388
|
+
*/
|
|
3389
|
+
function resolveStrategy(strategy, commits) {
|
|
3390
|
+
if (strategy !== 'inferred') {
|
|
3391
|
+
return strategy;
|
|
3392
|
+
}
|
|
3393
|
+
// Infer strategy from commit history
|
|
3394
|
+
// Count commits with conventional scopes
|
|
3395
|
+
let scopedCount = 0;
|
|
3396
|
+
for (const commit of commits) {
|
|
3397
|
+
const parsed = parseConventionalCommit(commit.message);
|
|
3398
|
+
if (parsed.scope) {
|
|
3399
|
+
scopedCount++;
|
|
3400
|
+
}
|
|
3401
|
+
}
|
|
3402
|
+
const scopeRatio = commits.length > 0 ? scopedCount / commits.length : 0;
|
|
3403
|
+
// If >70% of commits have scopes, scope-only is viable
|
|
3404
|
+
// If <30% have scopes, file-only is better
|
|
3405
|
+
// Otherwise, use hybrid
|
|
3406
|
+
if (scopeRatio > 0.7) {
|
|
3407
|
+
return 'scope-only';
|
|
3408
|
+
}
|
|
3409
|
+
else if (scopeRatio < 0.3) {
|
|
3410
|
+
return 'file-only';
|
|
3411
|
+
}
|
|
3412
|
+
return 'hybrid';
|
|
3413
|
+
}
|
|
3414
|
+
/**
|
|
3415
|
+
* Applies strategy-specific filtering to classified commits.
|
|
3416
|
+
*
|
|
3417
|
+
* @param commits - The classified commits to filter
|
|
3418
|
+
* @param strategy - The resolved filtering strategy to apply
|
|
3419
|
+
* @returns Filtered commits based on the strategy
|
|
3420
|
+
*/
|
|
3421
|
+
function applyStrategyFilter(commits, strategy) {
|
|
3422
|
+
switch (strategy) {
|
|
3423
|
+
case 'scope-only':
|
|
3424
|
+
// Only include direct-scope commits
|
|
3425
|
+
return commits.filter((c) => c.source === 'direct-scope');
|
|
3426
|
+
case 'file-only':
|
|
3427
|
+
// Only include file-based commits (direct-file, unscoped-file)
|
|
3428
|
+
return commits.filter((c) => c.source === 'direct-file' || c.source === 'unscoped-file');
|
|
3429
|
+
case 'hybrid':
|
|
3430
|
+
default:
|
|
3431
|
+
// Include all non-excluded commits (already filtered in classifyCommits)
|
|
3432
|
+
return commits;
|
|
3433
|
+
}
|
|
3434
|
+
}
|
|
3435
|
+
/**
|
|
3436
|
+
* Gets the relative path from workspace root to project root.
|
|
3437
|
+
*
|
|
3438
|
+
* @param workspaceRoot - The absolute path to the workspace root
|
|
3439
|
+
* @param projectRoot - The absolute path to the project root
|
|
3440
|
+
* @returns The relative path from workspace to project
|
|
3441
|
+
*/
|
|
3442
|
+
function getRelativePath(workspaceRoot, projectRoot) {
|
|
3443
|
+
if (projectRoot.startsWith(workspaceRoot)) {
|
|
3444
|
+
return projectRoot.slice(workspaceRoot.length).replace(/^\//, '');
|
|
3445
|
+
}
|
|
3446
|
+
return projectRoot;
|
|
3447
|
+
}
|
|
3448
|
+
/**
|
|
3449
|
+
* Builds a summary message for the step result.
|
|
3450
|
+
*
|
|
3451
|
+
* @param includedCount - Number of commits included in the release
|
|
3452
|
+
* @param totalCount - Total number of commits analyzed
|
|
3453
|
+
* @param summary - Classification summary object
|
|
3454
|
+
* @param summary.bySource - Count of commits by source type
|
|
3455
|
+
* @param strategy - The filtering strategy used
|
|
3456
|
+
* @returns A human-readable summary message
|
|
3457
|
+
*/
|
|
3458
|
+
function buildSummaryMessage(includedCount, totalCount, summary, strategy) {
|
|
3459
|
+
if (includedCount === 0) {
|
|
3460
|
+
return `No releasable commits found for this project (${totalCount} total, strategy: ${strategy})`;
|
|
3461
|
+
}
|
|
3462
|
+
const parts = [`Found ${includedCount} releasable commits`, `(${totalCount} total`, `strategy: ${strategy})`];
|
|
3463
|
+
return parts.join(' ');
|
|
3464
|
+
}
|
|
3465
|
+
/**
|
|
3466
|
+
* Builds a set of commit hashes that touched infrastructure paths or match infrastructure criteria.
|
|
3467
|
+
*
|
|
3468
|
+
* Supports multiple detection methods combined with OR logic:
|
|
3469
|
+
* 1. Path-based: Commits touching configured infrastructure paths (via git)
|
|
3470
|
+
* 2. Scope-based: Commits with scopes matching infrastructure.scopes
|
|
3471
|
+
* 3. Custom matcher: User-provided matching logic
|
|
3472
|
+
*
|
|
3473
|
+
* @param git - Git client for querying commits by path
|
|
3474
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3475
|
+
* @param rawCommits - All raw commits being analyzed
|
|
3476
|
+
* @param parsedCommits - Parsed commits with conventional commit data
|
|
3477
|
+
* @param config - Scope filtering configuration
|
|
3478
|
+
* @param logger - Logger with debug method for output
|
|
3479
|
+
* @param logger.debug - Debug logging function
|
|
3480
|
+
* @param maxFallback - Maximum commits to query when baseCommit is null
|
|
3481
|
+
* @returns Set of commit hashes classified as infrastructure
|
|
3482
|
+
*/
|
|
3483
|
+
function buildInfrastructureCommitHashes(git, baseCommit, rawCommits, parsedCommits, config, logger, maxFallback) {
|
|
3484
|
+
// Collect all infrastructure commit hashes
|
|
3485
|
+
let infraHashes = createSet();
|
|
3486
|
+
// Method 1: Path-based detection (query git for commits touching infra paths)
|
|
3487
|
+
const infraPaths = config.infrastructure?.paths ?? [];
|
|
3488
|
+
if (infraPaths.length > 0) {
|
|
3489
|
+
for (const infraPath of infraPaths) {
|
|
3490
|
+
const pathCommits = baseCommit
|
|
3491
|
+
? git.getCommitsSince(baseCommit, { path: infraPath })
|
|
3492
|
+
: git.getCommitLog({ maxCount: maxFallback, path: infraPath });
|
|
3493
|
+
for (const commit of pathCommits) {
|
|
3494
|
+
infraHashes = infraHashes.add(commit.hash);
|
|
3495
|
+
}
|
|
3496
|
+
}
|
|
3497
|
+
logger.debug(`Found ${infraHashes.size} commits touching infrastructure paths: ${infraPaths.join(', ')}`);
|
|
3498
|
+
}
|
|
3499
|
+
// Method 2 & 3: Scope-based and custom matcher detection
|
|
3500
|
+
// Build a combined matcher from infrastructure config and/or custom matcher
|
|
3501
|
+
const configMatcher = config.infrastructure ? buildInfrastructureMatcher(config.infrastructure) : null;
|
|
3502
|
+
const customMatcher = config.infrastructureMatcher;
|
|
3503
|
+
const combinedMatcher = combineMatcher(configMatcher, customMatcher);
|
|
3504
|
+
if (combinedMatcher) {
|
|
3505
|
+
// Build a lookup for parsed commits by hash
|
|
3506
|
+
let parsedByHash = createMap();
|
|
3507
|
+
for (const parsed of parsedCommits) {
|
|
3508
|
+
parsedByHash = parsedByHash.set(parsed.raw.hash, parsed);
|
|
3509
|
+
}
|
|
3510
|
+
// Evaluate each raw commit against the matcher
|
|
3511
|
+
for (const rawCommit of rawCommits) {
|
|
3512
|
+
// Skip if already matched by path
|
|
3513
|
+
if (infraHashes.has(rawCommit.hash))
|
|
3514
|
+
continue;
|
|
3515
|
+
// Get parsed scope if available
|
|
3516
|
+
const parsed = parsedByHash.get(rawCommit.hash);
|
|
3517
|
+
const scope = parsed?.commit.scope;
|
|
3518
|
+
// Create match context and evaluate
|
|
3519
|
+
const context = createMatchContext(rawCommit, scope);
|
|
3520
|
+
if (combinedMatcher(context)) {
|
|
3521
|
+
infraHashes = infraHashes.add(rawCommit.hash);
|
|
3522
|
+
}
|
|
3523
|
+
}
|
|
3524
|
+
logger.debug(`Infrastructure matcher found ${infraHashes.size} total commits`);
|
|
3525
|
+
}
|
|
3526
|
+
// Return undefined if no infrastructure detection configured
|
|
3527
|
+
if (infraHashes.size === 0 && infraPaths.length === 0 && !combinedMatcher) {
|
|
3528
|
+
return undefined;
|
|
3529
|
+
}
|
|
3530
|
+
return infraHashes;
|
|
3531
|
+
}
|
|
3532
|
+
/**
|
|
3533
|
+
* Combines two optional matchers into one using OR logic.
|
|
3534
|
+
*
|
|
3535
|
+
* @param a - First matcher (may be null)
|
|
3536
|
+
* @param b - Second matcher (may be undefined)
|
|
3537
|
+
* @returns Combined matcher or null if neither provided
|
|
3538
|
+
*/
|
|
3539
|
+
function combineMatcher(a, b) {
|
|
3540
|
+
if (a && b) {
|
|
3541
|
+
return (ctx) => a(ctx) || b(ctx);
|
|
3542
|
+
}
|
|
3543
|
+
return a ?? b ?? null;
|
|
3544
|
+
}
|
|
3545
|
+
/**
|
|
3546
|
+
* Builds a map of dependency project names to the commit hashes that touched them.
|
|
3547
|
+
*
|
|
3548
|
+
* This enables accurate indirect-dependency classification by verifying that:
|
|
3549
|
+
* 1. A commit's scope matches a dependency name
|
|
3550
|
+
* 2. The commit actually touched that dependency's files (hash in set)
|
|
3551
|
+
*
|
|
3552
|
+
* Uses lib-project-scope for dependency discovery, avoiding hard NX dependency.
|
|
3553
|
+
*
|
|
3554
|
+
* @param git - Git client for querying commits by path
|
|
3555
|
+
* @param workspaceRoot - Absolute path to workspace root
|
|
3556
|
+
* @param projectName - Name of the project being versioned
|
|
3557
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3558
|
+
* @param logger - Logger with debug method for output
|
|
3559
|
+
* @param logger.debug - Debug logging function
|
|
3560
|
+
* @param maxFallback - Maximum commits to query when baseCommit is null
|
|
3561
|
+
* @returns Map of dependency names to commit hashes touching that dependency
|
|
3562
|
+
*/
|
|
3563
|
+
function buildDependencyCommitMap(git, workspaceRoot, projectName, baseCommit, logger, maxFallback) {
|
|
3564
|
+
let dependencyMap = createMap();
|
|
3565
|
+
try {
|
|
3566
|
+
// Discover all projects in workspace using lib-project-scope
|
|
3567
|
+
// This gracefully handles NX and non-NX workspaces
|
|
3568
|
+
const projects = discoverNxProjects(workspaceRoot);
|
|
3569
|
+
const projectGraph = buildSimpleProjectGraph(workspaceRoot, projects);
|
|
3570
|
+
// Get dependencies for the current project
|
|
3571
|
+
const projectDeps = projectGraph.dependencies[projectName] ?? [];
|
|
3572
|
+
if (projectDeps.length === 0) {
|
|
3573
|
+
logger.debug(`No dependencies found for project: ${projectName}`);
|
|
3574
|
+
return dependencyMap;
|
|
3575
|
+
}
|
|
3576
|
+
logger.debug(`Found ${projectDeps.length} dependencies for ${projectName}: ${projectDeps.map((d) => d.target).join(', ')}`);
|
|
3577
|
+
// For each dependency, find commits that touched its files
|
|
3578
|
+
for (const dep of projectDeps) {
|
|
3579
|
+
const depNode = projectGraph.nodes[dep.target];
|
|
3580
|
+
if (!depNode?.data?.root) {
|
|
3581
|
+
logger.debug(`Skipping dependency ${dep.target}: no root path found`);
|
|
3582
|
+
continue;
|
|
3583
|
+
}
|
|
3584
|
+
const depRoot = depNode.data.root;
|
|
3585
|
+
// Query git for commits touching this dependency's path
|
|
3586
|
+
const depCommits = baseCommit
|
|
3587
|
+
? git.getCommitsSince(baseCommit, { path: depRoot })
|
|
3588
|
+
: git.getCommitLog({ maxCount: maxFallback, path: depRoot });
|
|
3589
|
+
if (depCommits.length > 0) {
|
|
3590
|
+
const hashSet = createSet(depCommits.map((c) => c.hash));
|
|
3591
|
+
dependencyMap = dependencyMap.set(dep.target, hashSet);
|
|
3592
|
+
logger.debug(`Dependency ${dep.target}: ${depCommits.length} commits at ${depRoot}`);
|
|
3593
|
+
}
|
|
3594
|
+
}
|
|
3595
|
+
}
|
|
3596
|
+
catch (error) {
|
|
3597
|
+
// Graceful degradation: if project discovery fails, return empty map
|
|
3598
|
+
// This allows versioning to proceed without dependency tracking
|
|
3599
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3600
|
+
logger.debug(`Failed to build dependency map: ${message}`);
|
|
3601
|
+
}
|
|
3602
|
+
return dependencyMap;
|
|
3603
|
+
}
|
|
3604
|
+
|
|
3605
|
+
/**
|
|
3606
|
+
* Safe copies of Number built-in methods and constants.
|
|
3607
|
+
*
|
|
3608
|
+
* These references are captured at module initialization time to protect against
|
|
3609
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
3610
|
+
*
|
|
3611
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
3612
|
+
*/
|
|
3613
|
+
// Capture references at module initialization time
|
|
3614
|
+
const _parseInt = globalThis.parseInt;
|
|
3615
|
+
const _isNaN = globalThis.isNaN;
|
|
3616
|
+
// ============================================================================
|
|
3617
|
+
// Parsing
|
|
3618
|
+
// ============================================================================
|
|
3619
|
+
/**
|
|
3620
|
+
* (Safe copy) Parses a string and returns an integer.
|
|
3621
|
+
*/
|
|
3622
|
+
const parseInt = _parseInt;
|
|
3623
|
+
// ============================================================================
|
|
3624
|
+
// Global Type Checking (legacy, less strict)
|
|
3625
|
+
// ============================================================================
|
|
3626
|
+
/**
|
|
3627
|
+
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
3628
|
+
*/
|
|
3629
|
+
const globalIsNaN = _isNaN;
|
|
3630
|
+
|
|
3631
|
+
/**
|
|
3632
|
+
* Compares two semantic versions.
|
|
3633
|
+
*
|
|
3634
|
+
* @param a - First version
|
|
3635
|
+
* @param b - Second version
|
|
3636
|
+
* @returns -1 if a < b, 0 if a == b, 1 if a > b
|
|
3637
|
+
*
|
|
3638
|
+
* @example
|
|
3639
|
+
* compare(parseVersion('1.0.0'), parseVersion('2.0.0')) // -1
|
|
3640
|
+
* compare(parseVersion('1.0.0'), parseVersion('1.0.0')) // 0
|
|
3641
|
+
* compare(parseVersion('2.0.0'), parseVersion('1.0.0')) // 1
|
|
3642
|
+
*/
|
|
3643
|
+
function compare(a, b) {
|
|
3644
|
+
// Compare major, minor, patch
|
|
3645
|
+
if (a.major !== b.major) {
|
|
3646
|
+
return a.major < b.major ? -1 : 1;
|
|
3647
|
+
}
|
|
3648
|
+
if (a.minor !== b.minor) {
|
|
3649
|
+
return a.minor < b.minor ? -1 : 1;
|
|
3650
|
+
}
|
|
3651
|
+
if (a.patch !== b.patch) {
|
|
3652
|
+
return a.patch < b.patch ? -1 : 1;
|
|
3653
|
+
}
|
|
3654
|
+
// Compare prerelease
|
|
3655
|
+
// Version with prerelease has lower precedence than release
|
|
3656
|
+
if (a.prerelease.length === 0 && b.prerelease.length > 0) {
|
|
3657
|
+
return 1; // a is release, b is prerelease -> a > b
|
|
3658
|
+
}
|
|
3659
|
+
if (a.prerelease.length > 0 && b.prerelease.length === 0) {
|
|
3660
|
+
return -1; // a is prerelease, b is release -> a < b
|
|
3661
|
+
}
|
|
3662
|
+
// Both have prerelease - compare identifiers
|
|
3663
|
+
const maxLen = max(a.prerelease.length, b.prerelease.length);
|
|
3664
|
+
for (let i = 0; i < maxLen; i++) {
|
|
3665
|
+
const aId = a.prerelease[i];
|
|
3666
|
+
const bId = b.prerelease[i];
|
|
3667
|
+
// Shorter prerelease array has lower precedence
|
|
3668
|
+
if (aId === undefined && bId !== undefined) {
|
|
3669
|
+
return -1;
|
|
3670
|
+
}
|
|
3671
|
+
if (aId !== undefined && bId === undefined) {
|
|
3672
|
+
return 1;
|
|
3673
|
+
}
|
|
3674
|
+
if (aId === undefined || bId === undefined) {
|
|
3675
|
+
continue;
|
|
3676
|
+
}
|
|
3677
|
+
// Compare identifiers
|
|
3678
|
+
const cmp = compareIdentifiers(aId, bId);
|
|
3679
|
+
if (cmp !== 0) {
|
|
3680
|
+
return cmp;
|
|
3681
|
+
}
|
|
3682
|
+
}
|
|
3683
|
+
return 0;
|
|
3684
|
+
}
|
|
3685
|
+
/**
|
|
3686
|
+
* Checks if a > b.
|
|
3687
|
+
*
|
|
3688
|
+
* @param a - First version to compare
|
|
3689
|
+
* @param b - Second version to compare
|
|
3690
|
+
* @returns True if a is greater than b
|
|
3691
|
+
*/
|
|
3692
|
+
function gt(a, b) {
|
|
3693
|
+
return compare(a, b) === 1;
|
|
3694
|
+
}
|
|
3695
|
+
// ============================================================================
|
|
3696
|
+
// Internal helpers
|
|
3697
|
+
// ============================================================================
|
|
3698
|
+
/**
|
|
3699
|
+
* Compares two prerelease identifiers.
|
|
3700
|
+
* Numeric identifiers have lower precedence than alphanumeric.
|
|
3701
|
+
* Numeric identifiers are compared numerically.
|
|
3702
|
+
* Alphanumeric identifiers are compared lexically.
|
|
3703
|
+
*
|
|
3704
|
+
* @param a - First prerelease identifier
|
|
3705
|
+
* @param b - Second prerelease identifier
|
|
3706
|
+
* @returns -1 if a < b, 0 if equal, 1 if a > b
|
|
3707
|
+
*/
|
|
3708
|
+
function compareIdentifiers(a, b) {
|
|
3709
|
+
const aIsNumeric = isNumeric(a);
|
|
3710
|
+
const bIsNumeric = isNumeric(b);
|
|
3711
|
+
// Numeric identifiers have lower precedence
|
|
3712
|
+
if (aIsNumeric && !bIsNumeric) {
|
|
3713
|
+
return -1;
|
|
3714
|
+
}
|
|
3715
|
+
if (!aIsNumeric && bIsNumeric) {
|
|
3716
|
+
return 1;
|
|
3717
|
+
}
|
|
3718
|
+
// Both numeric - compare as numbers
|
|
3719
|
+
if (aIsNumeric && bIsNumeric) {
|
|
3720
|
+
const aNum = parseInt(a, 10);
|
|
3721
|
+
const bNum = parseInt(b, 10);
|
|
3722
|
+
if (aNum < bNum)
|
|
3723
|
+
return -1;
|
|
3724
|
+
if (aNum > bNum)
|
|
3725
|
+
return 1;
|
|
3726
|
+
return 0;
|
|
3727
|
+
}
|
|
3728
|
+
// Both alphanumeric - compare lexically
|
|
3729
|
+
if (a < b)
|
|
3730
|
+
return -1;
|
|
3731
|
+
if (a > b)
|
|
3732
|
+
return 1;
|
|
3733
|
+
return 0;
|
|
3734
|
+
}
|
|
3735
|
+
/**
|
|
3736
|
+
* Checks if a string consists only of digits.
|
|
3737
|
+
*
|
|
3738
|
+
* @param str - String to check for numeric content
|
|
3739
|
+
* @returns True if string contains only digits
|
|
3740
|
+
*/
|
|
3741
|
+
function isNumeric(str) {
|
|
3742
|
+
if (str.length === 0)
|
|
3743
|
+
return false;
|
|
3744
|
+
for (let i = 0; i < str.length; i++) {
|
|
3745
|
+
const code = str.charCodeAt(i);
|
|
3746
|
+
if (code < 48 || code > 57) {
|
|
3747
|
+
return false;
|
|
3748
|
+
}
|
|
3749
|
+
}
|
|
3750
|
+
return true;
|
|
3751
|
+
}
|
|
676
3752
|
|
|
677
3753
|
/**
|
|
678
3754
|
* Converts a SemVer to its canonical string representation.
|
|
@@ -691,32 +3767,6 @@ function format(version) {
|
|
|
691
3767
|
return result;
|
|
692
3768
|
}
|
|
693
3769
|
|
|
694
|
-
/**
|
|
695
|
-
* Safe copies of Number built-in methods and constants.
|
|
696
|
-
*
|
|
697
|
-
* These references are captured at module initialization time to protect against
|
|
698
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
699
|
-
*
|
|
700
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
701
|
-
*/
|
|
702
|
-
// Capture references at module initialization time
|
|
703
|
-
const _parseInt = globalThis.parseInt;
|
|
704
|
-
const _isNaN = globalThis.isNaN;
|
|
705
|
-
// ============================================================================
|
|
706
|
-
// Parsing
|
|
707
|
-
// ============================================================================
|
|
708
|
-
/**
|
|
709
|
-
* (Safe copy) Parses a string and returns an integer.
|
|
710
|
-
*/
|
|
711
|
-
const parseInt = _parseInt;
|
|
712
|
-
// ============================================================================
|
|
713
|
-
// Global Type Checking (legacy, less strict)
|
|
714
|
-
// ============================================================================
|
|
715
|
-
/**
|
|
716
|
-
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
717
|
-
*/
|
|
718
|
-
const globalIsNaN = _isNaN;
|
|
719
|
-
|
|
720
3770
|
/**
|
|
721
3771
|
* Creates a new SemVer object.
|
|
722
3772
|
*
|
|
@@ -1194,7 +4244,7 @@ function createCalculateBumpStep() {
|
|
|
1194
4244
|
message: 'No version bump needed',
|
|
1195
4245
|
};
|
|
1196
4246
|
}
|
|
1197
|
-
//
|
|
4247
|
+
// Parse versions for comparison
|
|
1198
4248
|
const current = parseVersion(currentVersion ?? '0.0.0');
|
|
1199
4249
|
if (!current.success || !current.version) {
|
|
1200
4250
|
return {
|
|
@@ -1203,6 +4253,27 @@ function createCalculateBumpStep() {
|
|
|
1203
4253
|
message: `Could not parse current version: ${currentVersion}`,
|
|
1204
4254
|
};
|
|
1205
4255
|
}
|
|
4256
|
+
const { publishedVersion } = state;
|
|
4257
|
+
const published = parseVersion(publishedVersion ?? '0.0.0');
|
|
4258
|
+
// Detect pending publication state: currentVersion > publishedVersion
|
|
4259
|
+
// This means a previous bump happened but was never published
|
|
4260
|
+
const isPendingPublication = published.success && published.version && publishedVersion != null && gt(current.version, published.version);
|
|
4261
|
+
if (isPendingPublication && published.version) {
|
|
4262
|
+
// ALWAYS calculate from publishedVersion - commits may have changed
|
|
4263
|
+
const next = increment(published.version, bumpType);
|
|
4264
|
+
const nextVersion = format(next);
|
|
4265
|
+
logger.info(`Pending publication detected: recalculating from ${publishedVersion} → ${nextVersion}`);
|
|
4266
|
+
return {
|
|
4267
|
+
status: 'success',
|
|
4268
|
+
stateUpdates: {
|
|
4269
|
+
bumpType,
|
|
4270
|
+
nextVersion,
|
|
4271
|
+
isPendingPublication: true,
|
|
4272
|
+
},
|
|
4273
|
+
message: `${bumpType} bump (pending): ${publishedVersion} → ${nextVersion}`,
|
|
4274
|
+
};
|
|
4275
|
+
}
|
|
4276
|
+
// Normal path: increment from currentVersion
|
|
1206
4277
|
const next = increment(current.version, bumpType);
|
|
1207
4278
|
const nextVersion = format(next);
|
|
1208
4279
|
return {
|
|
@@ -1257,24 +4328,6 @@ function createCheckIdempotencyStep() {
|
|
|
1257
4328
|
});
|
|
1258
4329
|
}
|
|
1259
4330
|
|
|
1260
|
-
/**
|
|
1261
|
-
* Safe copies of Date built-in via factory function and static methods.
|
|
1262
|
-
*
|
|
1263
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1264
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1265
|
-
*
|
|
1266
|
-
* These references are captured at module initialization time to protect against
|
|
1267
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1268
|
-
*
|
|
1269
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1270
|
-
*/
|
|
1271
|
-
// Capture references at module initialization time
|
|
1272
|
-
const _Date = globalThis.Date;
|
|
1273
|
-
const _Reflect$1 = globalThis.Reflect;
|
|
1274
|
-
function createDate(...args) {
|
|
1275
|
-
return _Reflect$1.construct(_Date, args);
|
|
1276
|
-
}
|
|
1277
|
-
|
|
1278
4331
|
/**
|
|
1279
4332
|
* Creates a new changelog item.
|
|
1280
4333
|
*
|
|
@@ -1289,6 +4342,8 @@ function createChangelogItem(description, options) {
|
|
|
1289
4342
|
commits: options?.commits ?? [],
|
|
1290
4343
|
references: options?.references ?? [],
|
|
1291
4344
|
breaking: options?.breaking ?? false,
|
|
4345
|
+
source: options?.source,
|
|
4346
|
+
indirect: options?.indirect,
|
|
1292
4347
|
};
|
|
1293
4348
|
}
|
|
1294
4349
|
/**
|
|
@@ -1412,96 +4467,6 @@ function getSectionType(heading) {
|
|
|
1412
4467
|
return SECTION_TYPE_MAP[normalized] ?? 'other';
|
|
1413
4468
|
}
|
|
1414
4469
|
|
|
1415
|
-
/**
|
|
1416
|
-
* Safe copies of Map built-in via factory function.
|
|
1417
|
-
*
|
|
1418
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1419
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1420
|
-
*
|
|
1421
|
-
* These references are captured at module initialization time to protect against
|
|
1422
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1423
|
-
*
|
|
1424
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
1425
|
-
*/
|
|
1426
|
-
// Capture references at module initialization time
|
|
1427
|
-
const _Map = globalThis.Map;
|
|
1428
|
-
const _Reflect = globalThis.Reflect;
|
|
1429
|
-
/**
|
|
1430
|
-
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
1431
|
-
* Use this instead of `new Map()`.
|
|
1432
|
-
*
|
|
1433
|
-
* @param iterable - Optional iterable of key-value pairs.
|
|
1434
|
-
* @returns A new Map instance.
|
|
1435
|
-
*/
|
|
1436
|
-
const createMap = (iterable) => _Reflect.construct(_Map, iterable ? [iterable] : []);
|
|
1437
|
-
|
|
1438
|
-
/**
|
|
1439
|
-
* Safe copies of Object built-in methods.
|
|
1440
|
-
*
|
|
1441
|
-
* These references are captured at module initialization time to protect against
|
|
1442
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1443
|
-
*
|
|
1444
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1445
|
-
*/
|
|
1446
|
-
// Capture references at module initialization time
|
|
1447
|
-
const _Object = globalThis.Object;
|
|
1448
|
-
/**
|
|
1449
|
-
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1450
|
-
*/
|
|
1451
|
-
const entries = _Object.entries;
|
|
1452
|
-
|
|
1453
|
-
/**
|
|
1454
|
-
* Safe copies of URL built-ins via factory functions.
|
|
1455
|
-
*
|
|
1456
|
-
* Provides safe references to URL and URLSearchParams.
|
|
1457
|
-
* These references are captured at module initialization time to protect against
|
|
1458
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1459
|
-
*
|
|
1460
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
1461
|
-
*/
|
|
1462
|
-
// Capture references at module initialization time
|
|
1463
|
-
const _URL = globalThis.URL;
|
|
1464
|
-
/**
|
|
1465
|
-
* (Safe copy) Creates an object URL for the given object.
|
|
1466
|
-
* Use this instead of `URL.createObjectURL()`.
|
|
1467
|
-
*
|
|
1468
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1469
|
-
*/
|
|
1470
|
-
typeof _URL.createObjectURL === 'function'
|
|
1471
|
-
? _URL.createObjectURL.bind(_URL)
|
|
1472
|
-
: () => {
|
|
1473
|
-
throw new Error('URL.createObjectURL is not available in this environment');
|
|
1474
|
-
};
|
|
1475
|
-
/**
|
|
1476
|
-
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
1477
|
-
* Use this instead of `URL.revokeObjectURL()`.
|
|
1478
|
-
*
|
|
1479
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1480
|
-
*/
|
|
1481
|
-
typeof _URL.revokeObjectURL === 'function'
|
|
1482
|
-
? _URL.revokeObjectURL.bind(_URL)
|
|
1483
|
-
: () => {
|
|
1484
|
-
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
1485
|
-
};
|
|
1486
|
-
|
|
1487
|
-
/**
|
|
1488
|
-
* Safe copies of Math built-in methods.
|
|
1489
|
-
*
|
|
1490
|
-
* These references are captured at module initialization time to protect against
|
|
1491
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1492
|
-
*
|
|
1493
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
1494
|
-
*/
|
|
1495
|
-
// Capture references at module initialization time
|
|
1496
|
-
const _Math = globalThis.Math;
|
|
1497
|
-
// ============================================================================
|
|
1498
|
-
// Min/Max
|
|
1499
|
-
// ============================================================================
|
|
1500
|
-
/**
|
|
1501
|
-
* (Safe copy) Returns the larger of zero or more numbers.
|
|
1502
|
-
*/
|
|
1503
|
-
const max = _Math.max;
|
|
1504
|
-
|
|
1505
4470
|
/**
|
|
1506
4471
|
* Line Parser
|
|
1507
4472
|
*
|
|
@@ -1557,6 +4522,25 @@ function parseVersionFromHeading(heading) {
|
|
|
1557
4522
|
if (trimmed[pos] === ']') {
|
|
1558
4523
|
pos++;
|
|
1559
4524
|
}
|
|
4525
|
+
// Handle markdown link format [version](url) - jscutlery/semver style
|
|
4526
|
+
// This extracts the compare URL from patterns like [0.0.4](https://github.com/.../compare/...)
|
|
4527
|
+
if (trimmed[pos] === '(') {
|
|
4528
|
+
const urlStart = pos + 1;
|
|
4529
|
+
let depth = 1;
|
|
4530
|
+
pos++;
|
|
4531
|
+
// Find matching closing parenthesis (handles nested parens in URLs)
|
|
4532
|
+
while (pos < trimmed.length && depth > 0) {
|
|
4533
|
+
if (trimmed[pos] === '(')
|
|
4534
|
+
depth++;
|
|
4535
|
+
else if (trimmed[pos] === ')')
|
|
4536
|
+
depth--;
|
|
4537
|
+
pos++;
|
|
4538
|
+
}
|
|
4539
|
+
// Extract URL if we found the closing paren
|
|
4540
|
+
if (depth === 0) {
|
|
4541
|
+
compareUrl = trimmed.slice(urlStart, pos - 1);
|
|
4542
|
+
}
|
|
4543
|
+
}
|
|
1560
4544
|
// Skip whitespace and separator
|
|
1561
4545
|
while (pos < trimmed.length && (trimmed[pos] === ' ' || trimmed[pos] === '-' || trimmed[pos] === '–')) {
|
|
1562
4546
|
pos++;
|
|
@@ -1573,8 +4557,8 @@ function parseVersionFromHeading(heading) {
|
|
|
1573
4557
|
while (pos < trimmed.length && trimmed[pos] === ' ') {
|
|
1574
4558
|
pos++;
|
|
1575
4559
|
}
|
|
1576
|
-
// Check for link at end: [compare](url)
|
|
1577
|
-
if (pos < trimmed.length) {
|
|
4560
|
+
// Check for link at end: [compare](url) - only if no URL was already extracted
|
|
4561
|
+
if (pos < trimmed.length && !compareUrl) {
|
|
1578
4562
|
const linkMatch = extractLink(trimmed.slice(pos));
|
|
1579
4563
|
if (linkMatch?.url) {
|
|
1580
4564
|
compareUrl = linkMatch.url;
|
|
@@ -2268,11 +5252,22 @@ function isWhitespace(char) {
|
|
|
2268
5252
|
}
|
|
2269
5253
|
|
|
2270
5254
|
/**
|
|
2271
|
-
*
|
|
5255
|
+
* Validates that a URL is actually a GitHub URL by parsing it properly.
|
|
5256
|
+
* This prevents SSRF attacks where 'github.com' could appear in path/query.
|
|
2272
5257
|
*
|
|
2273
|
-
*
|
|
2274
|
-
*
|
|
5258
|
+
* @param url - The URL string to validate
|
|
5259
|
+
* @returns True if the URL host is github.com or a subdomain
|
|
2275
5260
|
*/
|
|
5261
|
+
function isGitHubUrl(url) {
|
|
5262
|
+
try {
|
|
5263
|
+
const parsed = createURL(url);
|
|
5264
|
+
// Check that the host is exactly github.com or ends with .github.com
|
|
5265
|
+
return parsed.host === 'github.com' || parsed.host.endsWith('.github.com');
|
|
5266
|
+
}
|
|
5267
|
+
catch {
|
|
5268
|
+
return false;
|
|
5269
|
+
}
|
|
5270
|
+
}
|
|
2276
5271
|
/**
|
|
2277
5272
|
* Parses a changelog markdown string into a Changelog object.
|
|
2278
5273
|
*
|
|
@@ -2340,7 +5335,7 @@ function parseHeader(state) {
|
|
|
2340
5335
|
description.push(`[${token.value}](${nextToken.value})`);
|
|
2341
5336
|
links.push({ label: token.value, url: nextToken.value });
|
|
2342
5337
|
// Try to detect repository URL
|
|
2343
|
-
if (!state.repositoryUrl && nextToken.value
|
|
5338
|
+
if (!state.repositoryUrl && isGitHubUrl(nextToken.value)) {
|
|
2344
5339
|
state.repositoryUrl = extractRepoUrl(nextToken.value);
|
|
2345
5340
|
}
|
|
2346
5341
|
advance(state); // skip link-text
|
|
@@ -2971,20 +5966,28 @@ function serializeIssueRef(ref) {
|
|
|
2971
5966
|
* ```
|
|
2972
5967
|
*/
|
|
2973
5968
|
function addEntry(changelog, entry, options) {
|
|
5969
|
+
const position = options?.position ?? 'start';
|
|
5970
|
+
const replaceExisting = options?.replaceExisting ?? false;
|
|
5971
|
+
const updateMetadata = options?.updateMetadata ?? false;
|
|
2974
5972
|
// Check for existing entry
|
|
2975
5973
|
const existingIndex = changelog.entries.findIndex((e) => e.version === entry.version);
|
|
2976
|
-
if (existingIndex !== -1 &&
|
|
5974
|
+
if (existingIndex !== -1 && !replaceExisting) {
|
|
2977
5975
|
throw createError(`Entry with version "${entry.version}" already exists. Use replaceExisting: true to replace.`);
|
|
2978
5976
|
}
|
|
2979
5977
|
let newEntries;
|
|
2980
|
-
{
|
|
5978
|
+
if (existingIndex !== -1 && replaceExisting) {
|
|
5979
|
+
// Replace existing entry
|
|
5980
|
+
newEntries = [...changelog.entries];
|
|
5981
|
+
newEntries[existingIndex] = entry;
|
|
5982
|
+
}
|
|
5983
|
+
else {
|
|
2981
5984
|
// Add new entry
|
|
2982
|
-
const insertIndex = 0 ;
|
|
5985
|
+
const insertIndex = position === 'start' ? 0 : position === 'end' ? changelog.entries.length : position;
|
|
2983
5986
|
newEntries = [...changelog.entries];
|
|
2984
5987
|
newEntries.splice(insertIndex, 0, entry);
|
|
2985
5988
|
}
|
|
2986
5989
|
// Build new metadata if requested
|
|
2987
|
-
const metadata = changelog.metadata;
|
|
5990
|
+
const metadata = updateMetadata ? { ...changelog.metadata, warnings: [] } : changelog.metadata;
|
|
2988
5991
|
return {
|
|
2989
5992
|
...changelog,
|
|
2990
5993
|
entries: newEntries,
|
|
@@ -2992,11 +5995,149 @@ function addEntry(changelog, entry, options) {
|
|
|
2992
5995
|
};
|
|
2993
5996
|
}
|
|
2994
5997
|
|
|
5998
|
+
/**
|
|
5999
|
+
* Changelog Entry Removal
|
|
6000
|
+
*
|
|
6001
|
+
* Functions for removing entries from a changelog.
|
|
6002
|
+
*/
|
|
6003
|
+
/**
|
|
6004
|
+
* Removes multiple entries from a changelog.
|
|
6005
|
+
*
|
|
6006
|
+
* @param changelog - The changelog to remove from
|
|
6007
|
+
* @param versions - The versions to remove
|
|
6008
|
+
* @param options - Optional removal options
|
|
6009
|
+
* @returns A new changelog without the specified entries
|
|
6010
|
+
*/
|
|
6011
|
+
function removeEntries(changelog, versions, options) {
|
|
6012
|
+
const versionsSet = createSet(versions);
|
|
6013
|
+
const newEntries = changelog.entries.filter((e) => !versionsSet.has(e.version));
|
|
6014
|
+
return {
|
|
6015
|
+
...changelog,
|
|
6016
|
+
entries: newEntries,
|
|
6017
|
+
};
|
|
6018
|
+
}
|
|
6019
|
+
|
|
6020
|
+
/**
|
|
6021
|
+
* Creates a platform-specific compare URL for viewing changes between two commits.
|
|
6022
|
+
*
|
|
6023
|
+
* Each platform has a different URL format:
|
|
6024
|
+
* - **GitHub**: `{baseUrl}/compare/{fromCommit}...{toCommit}` (three dots)
|
|
6025
|
+
* - **GitLab**: `{baseUrl}/-/compare/{fromCommit}...{toCommit}` (three dots, `/-/` prefix)
|
|
6026
|
+
* - **Bitbucket**: `{baseUrl}/compare/{toCommit}..{fromCommit}` (two dots, reversed order)
|
|
6027
|
+
* - **Azure DevOps**: `{baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}` (query params)
|
|
6028
|
+
*
|
|
6029
|
+
* For `custom` platforms, a `formatCompareUrl` function must be provided in the repository config.
|
|
6030
|
+
* For `unknown` platforms, returns `null`.
|
|
6031
|
+
*
|
|
6032
|
+
* @param options - Compare URL options including repository, fromCommit, and toCommit
|
|
6033
|
+
* @returns The compare URL string, or null if URL cannot be generated
|
|
6034
|
+
*
|
|
6035
|
+
* @example
|
|
6036
|
+
* ```typescript
|
|
6037
|
+
* // GitHub
|
|
6038
|
+
* createCompareUrl({
|
|
6039
|
+
* repository: { platform: 'github', baseUrl: 'https://github.com/owner/repo' },
|
|
6040
|
+
* fromCommit: 'abc1234',
|
|
6041
|
+
* toCommit: 'def5678'
|
|
6042
|
+
* })
|
|
6043
|
+
* // → 'https://github.com/owner/repo/compare/abc1234...def5678'
|
|
6044
|
+
*
|
|
6045
|
+
* // GitLab
|
|
6046
|
+
* createCompareUrl({
|
|
6047
|
+
* repository: { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' },
|
|
6048
|
+
* fromCommit: 'abc1234',
|
|
6049
|
+
* toCommit: 'def5678'
|
|
6050
|
+
* })
|
|
6051
|
+
* // → 'https://gitlab.com/group/project/-/compare/abc1234...def5678'
|
|
6052
|
+
*
|
|
6053
|
+
* // Bitbucket (reversed order)
|
|
6054
|
+
* createCompareUrl({
|
|
6055
|
+
* repository: { platform: 'bitbucket', baseUrl: 'https://bitbucket.org/owner/repo' },
|
|
6056
|
+
* fromCommit: 'abc1234',
|
|
6057
|
+
* toCommit: 'def5678'
|
|
6058
|
+
* })
|
|
6059
|
+
* // → 'https://bitbucket.org/owner/repo/compare/def5678..abc1234'
|
|
6060
|
+
*
|
|
6061
|
+
* // Azure DevOps
|
|
6062
|
+
* createCompareUrl({
|
|
6063
|
+
* repository: { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' },
|
|
6064
|
+
* fromCommit: 'abc1234',
|
|
6065
|
+
* toCommit: 'def5678'
|
|
6066
|
+
* })
|
|
6067
|
+
* // → 'https://dev.azure.com/org/proj/_git/repo/compare?version=GTdef5678&compareVersion=GTabc1234'
|
|
6068
|
+
*
|
|
6069
|
+
* // Custom formatter
|
|
6070
|
+
* createCompareUrl({
|
|
6071
|
+
* repository: {
|
|
6072
|
+
* platform: 'custom',
|
|
6073
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
6074
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
6075
|
+
* },
|
|
6076
|
+
* fromCommit: 'abc1234',
|
|
6077
|
+
* toCommit: 'def5678'
|
|
6078
|
+
* })
|
|
6079
|
+
* // → 'https://my-git.internal/diff/abc1234/def5678'
|
|
6080
|
+
* ```
|
|
6081
|
+
*/
|
|
6082
|
+
function createCompareUrl(options) {
|
|
6083
|
+
const { repository, fromCommit, toCommit } = options;
|
|
6084
|
+
// Validate inputs
|
|
6085
|
+
if (!repository || !fromCommit || !toCommit) {
|
|
6086
|
+
return null;
|
|
6087
|
+
}
|
|
6088
|
+
// If custom formatter is provided, use it (works for any platform including overrides)
|
|
6089
|
+
if (repository.formatCompareUrl) {
|
|
6090
|
+
return repository.formatCompareUrl(fromCommit, toCommit);
|
|
6091
|
+
}
|
|
6092
|
+
const { platform, baseUrl } = repository;
|
|
6093
|
+
// Cannot generate URL for unknown platforms without a formatter
|
|
6094
|
+
if (platform === 'unknown') {
|
|
6095
|
+
return null;
|
|
6096
|
+
}
|
|
6097
|
+
// Custom platform requires a formatter
|
|
6098
|
+
if (platform === 'custom') {
|
|
6099
|
+
return null;
|
|
6100
|
+
}
|
|
6101
|
+
// Generate URL for known platforms
|
|
6102
|
+
if (isKnownPlatform(platform)) {
|
|
6103
|
+
return formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit);
|
|
6104
|
+
}
|
|
6105
|
+
return null;
|
|
6106
|
+
}
|
|
6107
|
+
/**
|
|
6108
|
+
* Formats a compare URL for known platforms.
|
|
6109
|
+
*
|
|
6110
|
+
* @param platform - Known platform type
|
|
6111
|
+
* @param baseUrl - Repository base URL
|
|
6112
|
+
* @param fromCommit - Source commit hash (older version)
|
|
6113
|
+
* @param toCommit - Target commit hash (newer version)
|
|
6114
|
+
* @returns Formatted compare URL
|
|
6115
|
+
*
|
|
6116
|
+
* @internal
|
|
6117
|
+
*/
|
|
6118
|
+
function formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit) {
|
|
6119
|
+
switch (platform) {
|
|
6120
|
+
case 'github':
|
|
6121
|
+
// GitHub: {baseUrl}/compare/{fromCommit}...{toCommit}
|
|
6122
|
+
return `${baseUrl}/compare/${fromCommit}...${toCommit}`;
|
|
6123
|
+
case 'gitlab':
|
|
6124
|
+
// GitLab: {baseUrl}/-/compare/{fromCommit}...{toCommit}
|
|
6125
|
+
return `${baseUrl}/-/compare/${fromCommit}...${toCommit}`;
|
|
6126
|
+
case 'bitbucket':
|
|
6127
|
+
// Bitbucket: {baseUrl}/compare/{toCommit}..{fromCommit} (reversed order, two dots)
|
|
6128
|
+
return `${baseUrl}/compare/${toCommit}..${fromCommit}`;
|
|
6129
|
+
case 'azure-devops':
|
|
6130
|
+
// Azure DevOps: {baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}
|
|
6131
|
+
// Use encodeURIComponent for query parameter values
|
|
6132
|
+
return `${baseUrl}/compare?version=GT${encodeURIComponent(toCommit)}&compareVersion=GT${encodeURIComponent(fromCommit)}`;
|
|
6133
|
+
}
|
|
6134
|
+
}
|
|
6135
|
+
|
|
2995
6136
|
const GENERATE_CHANGELOG_STEP_ID = 'generate-changelog';
|
|
2996
6137
|
/**
|
|
2997
6138
|
* Maps conventional commit types to changelog section types.
|
|
2998
6139
|
*/
|
|
2999
|
-
const
|
|
6140
|
+
const DEFAULT_COMMIT_TYPE_TO_SECTION = {
|
|
3000
6141
|
feat: 'features',
|
|
3001
6142
|
fix: 'fixes',
|
|
3002
6143
|
perf: 'performance',
|
|
@@ -3009,23 +6150,102 @@ const COMMIT_TYPE_TO_SECTION = {
|
|
|
3009
6150
|
chore: 'chores',
|
|
3010
6151
|
style: 'other',
|
|
3011
6152
|
};
|
|
6153
|
+
/**
|
|
6154
|
+
* Resolves the commit type to section mapping by merging config with defaults.
|
|
6155
|
+
*
|
|
6156
|
+
* @param configMapping - User-provided partial mapping from FlowConfig
|
|
6157
|
+
* @returns Resolved mapping with user overrides applied
|
|
6158
|
+
*/
|
|
6159
|
+
function resolveCommitTypeMapping(configMapping) {
|
|
6160
|
+
if (!configMapping) {
|
|
6161
|
+
return DEFAULT_COMMIT_TYPE_TO_SECTION;
|
|
6162
|
+
}
|
|
6163
|
+
return { ...DEFAULT_COMMIT_TYPE_TO_SECTION, ...configMapping };
|
|
6164
|
+
}
|
|
6165
|
+
/**
|
|
6166
|
+
* Checks if a commit source represents an indirect change.
|
|
6167
|
+
*
|
|
6168
|
+
* @param source - The commit source type
|
|
6169
|
+
* @returns True if the commit is indirect (dependency or infrastructure)
|
|
6170
|
+
*/
|
|
6171
|
+
function isIndirectSource(source) {
|
|
6172
|
+
return source === 'indirect-dependency' || source === 'indirect-infra';
|
|
6173
|
+
}
|
|
6174
|
+
/**
|
|
6175
|
+
* Groups classified commits by their section type.
|
|
6176
|
+
*
|
|
6177
|
+
* @param commits - Array of classified commits
|
|
6178
|
+
* @param mapping - Commit type to section mapping
|
|
6179
|
+
* @returns Record of section type to classified commits
|
|
6180
|
+
*/
|
|
6181
|
+
function groupClassifiedCommitsBySection(commits, mapping) {
|
|
6182
|
+
const groups = {};
|
|
6183
|
+
for (const classified of commits) {
|
|
6184
|
+
const sectionType = mapping[classified.commit.type ?? 'chore'];
|
|
6185
|
+
// Skip if explicitly excluded (null)
|
|
6186
|
+
if (sectionType === null)
|
|
6187
|
+
continue;
|
|
6188
|
+
// Fallback to 'chores' for unmapped types
|
|
6189
|
+
const resolvedSection = sectionType ?? 'chores';
|
|
6190
|
+
if (!groups[resolvedSection]) {
|
|
6191
|
+
groups[resolvedSection] = [];
|
|
6192
|
+
}
|
|
6193
|
+
groups[resolvedSection].push(classified);
|
|
6194
|
+
}
|
|
6195
|
+
return groups;
|
|
6196
|
+
}
|
|
3012
6197
|
/**
|
|
3013
6198
|
* Groups commits by their section type.
|
|
3014
6199
|
*
|
|
3015
6200
|
* @param commits - Array of conventional commits
|
|
6201
|
+
* @param mapping - Commit type to section mapping
|
|
3016
6202
|
* @returns Record of section type to commits
|
|
3017
6203
|
*/
|
|
3018
|
-
function groupCommitsBySection(commits) {
|
|
6204
|
+
function groupCommitsBySection(commits, mapping) {
|
|
3019
6205
|
const groups = {};
|
|
3020
6206
|
for (const commit of commits) {
|
|
3021
|
-
const sectionType =
|
|
3022
|
-
if (
|
|
3023
|
-
|
|
6207
|
+
const sectionType = mapping[commit.type ?? 'chore'];
|
|
6208
|
+
// Skip if explicitly excluded (null)
|
|
6209
|
+
if (sectionType === null)
|
|
6210
|
+
continue;
|
|
6211
|
+
// Fallback to 'chores' for unmapped types
|
|
6212
|
+
const resolvedSection = sectionType ?? 'chores';
|
|
6213
|
+
if (!groups[resolvedSection]) {
|
|
6214
|
+
groups[resolvedSection] = [];
|
|
3024
6215
|
}
|
|
3025
|
-
groups[
|
|
6216
|
+
groups[resolvedSection].push(commit);
|
|
3026
6217
|
}
|
|
3027
6218
|
return groups;
|
|
3028
6219
|
}
|
|
6220
|
+
/**
|
|
6221
|
+
* Creates a changelog item from a classified commit.
|
|
6222
|
+
*
|
|
6223
|
+
* Applies scope display rules:
|
|
6224
|
+
* - Direct commits: scope omitted (redundant in project changelog)
|
|
6225
|
+
* - Indirect commits: scope preserved (provides context)
|
|
6226
|
+
*
|
|
6227
|
+
* @param classified - The classified commit with source metadata
|
|
6228
|
+
* @returns A changelog item with proper scope handling
|
|
6229
|
+
*/
|
|
6230
|
+
function classifiedCommitToItem(classified) {
|
|
6231
|
+
// Apply scope transformation based on classification
|
|
6232
|
+
const commit = toChangelogCommit(classified);
|
|
6233
|
+
const indirect = isIndirectSource(classified.source);
|
|
6234
|
+
let text = commit.subject;
|
|
6235
|
+
// Add scope prefix if preserved (indirect commits)
|
|
6236
|
+
if (commit.scope) {
|
|
6237
|
+
text = `**${commit.scope}:** ${text}`;
|
|
6238
|
+
}
|
|
6239
|
+
// Add breaking change indicator
|
|
6240
|
+
if (commit.breaking) {
|
|
6241
|
+
text = `⚠️ BREAKING: ${text}`;
|
|
6242
|
+
}
|
|
6243
|
+
return createChangelogItem(text, {
|
|
6244
|
+
source: classified.source,
|
|
6245
|
+
indirect,
|
|
6246
|
+
breaking: commit.breaking,
|
|
6247
|
+
});
|
|
6248
|
+
}
|
|
3029
6249
|
/**
|
|
3030
6250
|
* Creates a changelog item from a conventional commit.
|
|
3031
6251
|
*
|
|
@@ -3061,6 +6281,8 @@ function createGenerateChangelogStep() {
|
|
|
3061
6281
|
return createStep(GENERATE_CHANGELOG_STEP_ID, 'Generate Changelog Entry', async (ctx) => {
|
|
3062
6282
|
const { config, state } = ctx;
|
|
3063
6283
|
const { commits, nextVersion, bumpType } = state;
|
|
6284
|
+
// Resolve commit type to section mapping
|
|
6285
|
+
const commitTypeMapping = resolveCommitTypeMapping(config.commitTypeToSection);
|
|
3064
6286
|
// Skip if no bump needed
|
|
3065
6287
|
if (!nextVersion || bumpType === 'none') {
|
|
3066
6288
|
return createSkippedResult('No version bump, skipping changelog generation');
|
|
@@ -3071,9 +6293,26 @@ function createGenerateChangelogStep() {
|
|
|
3071
6293
|
}
|
|
3072
6294
|
// Handle case with no commits (e.g., first release)
|
|
3073
6295
|
if (!commits || commits.length === 0) {
|
|
6296
|
+
// Generate compare URL using commit hashes ONLY
|
|
6297
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6298
|
+
let compareUrl;
|
|
6299
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6300
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6301
|
+
compareUrl =
|
|
6302
|
+
createCompareUrl({
|
|
6303
|
+
repository: state.repositoryConfig,
|
|
6304
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6305
|
+
toCommit: currentCommit,
|
|
6306
|
+
}) ?? undefined;
|
|
6307
|
+
}
|
|
6308
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6309
|
+
// Log why we're not generating a compare URL
|
|
6310
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6311
|
+
}
|
|
3074
6312
|
const entry = createChangelogEntry(nextVersion, {
|
|
3075
6313
|
date: createDate().toISOString().split('T')[0],
|
|
3076
6314
|
sections: [createChangelogSection('features', 'Features', [createChangelogItem('Initial release')])],
|
|
6315
|
+
compareUrl,
|
|
3077
6316
|
});
|
|
3078
6317
|
return {
|
|
3079
6318
|
status: 'success',
|
|
@@ -3081,41 +6320,109 @@ function createGenerateChangelogStep() {
|
|
|
3081
6320
|
message: 'Generated initial release changelog entry',
|
|
3082
6321
|
};
|
|
3083
6322
|
}
|
|
3084
|
-
//
|
|
3085
|
-
const
|
|
3086
|
-
// Create sections
|
|
6323
|
+
// Use classification result when available for proper scope handling
|
|
6324
|
+
const { classificationResult } = state;
|
|
3087
6325
|
const sections = [];
|
|
3088
|
-
|
|
3089
|
-
|
|
3090
|
-
|
|
3091
|
-
|
|
3092
|
-
|
|
3093
|
-
|
|
3094
|
-
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3099
|
-
|
|
3100
|
-
|
|
3101
|
-
|
|
3102
|
-
|
|
3103
|
-
|
|
3104
|
-
|
|
3105
|
-
|
|
3106
|
-
|
|
3107
|
-
|
|
3108
|
-
|
|
3109
|
-
|
|
3110
|
-
|
|
3111
|
-
|
|
3112
|
-
|
|
6326
|
+
if (classificationResult && classificationResult.included.length > 0) {
|
|
6327
|
+
// Use classified commits for proper scope display rules
|
|
6328
|
+
const classifiedCommits = classificationResult.included;
|
|
6329
|
+
// Separate direct and indirect commits
|
|
6330
|
+
const directCommits = classifiedCommits.filter((c) => !isIndirectSource(c.source));
|
|
6331
|
+
const indirectCommits = classifiedCommits.filter((c) => isIndirectSource(c.source));
|
|
6332
|
+
// Add breaking changes section first if any
|
|
6333
|
+
const breakingCommits = classifiedCommits.filter((c) => c.commit.breaking);
|
|
6334
|
+
if (breakingCommits.length > 0) {
|
|
6335
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6336
|
+
const commit = toChangelogCommit(c);
|
|
6337
|
+
const text = commit.breakingDescription ?? commit.subject;
|
|
6338
|
+
const indirect = isIndirectSource(c.source);
|
|
6339
|
+
return createChangelogItem(commit.scope ? `**${commit.scope}:** ${text}` : text, {
|
|
6340
|
+
source: c.source,
|
|
6341
|
+
indirect,
|
|
6342
|
+
breaking: true,
|
|
6343
|
+
});
|
|
6344
|
+
})));
|
|
6345
|
+
}
|
|
6346
|
+
// Group direct commits by section
|
|
6347
|
+
const groupedDirect = groupClassifiedCommitsBySection(directCommits, commitTypeMapping);
|
|
6348
|
+
// Add other sections in conventional order (direct commits only)
|
|
6349
|
+
const sectionOrder = [
|
|
6350
|
+
{ type: 'features', heading: 'Features' },
|
|
6351
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6352
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6353
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6354
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6355
|
+
{ type: 'build', heading: 'Build' },
|
|
6356
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6357
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6358
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6359
|
+
{ type: 'other', heading: 'Other' },
|
|
6360
|
+
];
|
|
6361
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6362
|
+
const sectionCommits = groupedDirect[sectionType];
|
|
6363
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6364
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(classifiedCommitToItem)));
|
|
6365
|
+
}
|
|
6366
|
+
}
|
|
6367
|
+
// Add Dependency Updates section for indirect commits if any
|
|
6368
|
+
if (indirectCommits.length > 0) {
|
|
6369
|
+
sections.push(createChangelogSection('other', // Use 'other' as section type for dependency updates
|
|
6370
|
+
'Dependency Updates', indirectCommits.map((c) => classifiedCommitToItem(c))));
|
|
6371
|
+
}
|
|
6372
|
+
}
|
|
6373
|
+
else {
|
|
6374
|
+
// Fallback: use commits without classification (backward compatibility)
|
|
6375
|
+
const grouped = groupCommitsBySection(commits, commitTypeMapping);
|
|
6376
|
+
// Add breaking changes section first if any
|
|
6377
|
+
const breakingCommits = commits.filter((c) => c.breaking);
|
|
6378
|
+
if (breakingCommits.length > 0) {
|
|
6379
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6380
|
+
const text = c.breakingDescription ?? c.subject;
|
|
6381
|
+
return createChangelogItem(c.scope ? `**${c.scope}:** ${text}` : text);
|
|
6382
|
+
})));
|
|
6383
|
+
}
|
|
6384
|
+
// Add other sections in conventional order
|
|
6385
|
+
const sectionOrder = [
|
|
6386
|
+
{ type: 'features', heading: 'Features' },
|
|
6387
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6388
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6389
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6390
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6391
|
+
{ type: 'build', heading: 'Build' },
|
|
6392
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6393
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6394
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6395
|
+
{ type: 'other', heading: 'Other' },
|
|
6396
|
+
];
|
|
6397
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6398
|
+
const sectionCommits = grouped[sectionType];
|
|
6399
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6400
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(commitToItem)));
|
|
6401
|
+
}
|
|
3113
6402
|
}
|
|
3114
6403
|
}
|
|
6404
|
+
// Generate compare URL using commit hashes ONLY
|
|
6405
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6406
|
+
let compareUrl;
|
|
6407
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6408
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6409
|
+
compareUrl =
|
|
6410
|
+
createCompareUrl({
|
|
6411
|
+
repository: state.repositoryConfig,
|
|
6412
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6413
|
+
toCommit: currentCommit,
|
|
6414
|
+
}) ?? undefined;
|
|
6415
|
+
ctx.logger.debug(`Compare URL: ${state.effectiveBaseCommit.slice(0, 7)}...${currentCommit.slice(0, 7)}`);
|
|
6416
|
+
}
|
|
6417
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6418
|
+
// Log why we're not generating a compare URL
|
|
6419
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6420
|
+
}
|
|
3115
6421
|
// Create the entry
|
|
3116
6422
|
const entry = createChangelogEntry(nextVersion, {
|
|
3117
6423
|
date: createDate().toISOString().split('T')[0],
|
|
3118
6424
|
sections,
|
|
6425
|
+
compareUrl,
|
|
3119
6426
|
});
|
|
3120
6427
|
return {
|
|
3121
6428
|
status: 'success',
|
|
@@ -3141,14 +6448,15 @@ function createWriteChangelogStep() {
|
|
|
3141
6448
|
if (!nextVersion || bumpType === 'none' || !changelogEntry || config.skipChangelog) {
|
|
3142
6449
|
return createSkippedResult('No changelog to write');
|
|
3143
6450
|
}
|
|
3144
|
-
const
|
|
6451
|
+
const changelogFileName = config.changelogFileName ?? DEFAULT_CHANGELOG_FILENAME;
|
|
6452
|
+
const changelogPath = `${projectRoot}/${changelogFileName}`;
|
|
3145
6453
|
let existingContent = '';
|
|
3146
6454
|
// Read existing changelog
|
|
3147
6455
|
try {
|
|
3148
6456
|
existingContent = tree.read(changelogPath, 'utf-8') ?? '';
|
|
3149
6457
|
}
|
|
3150
6458
|
catch {
|
|
3151
|
-
logger.debug(
|
|
6459
|
+
logger.debug(`No existing ${changelogFileName} found`);
|
|
3152
6460
|
}
|
|
3153
6461
|
// If no existing content, create new changelog
|
|
3154
6462
|
if (!existingContent.trim()) {
|
|
@@ -3166,12 +6474,33 @@ function createWriteChangelogStep() {
|
|
|
3166
6474
|
stateUpdates: {
|
|
3167
6475
|
modifiedFiles: [...(state.modifiedFiles ?? []), changelogPath],
|
|
3168
6476
|
},
|
|
3169
|
-
message: `Created
|
|
6477
|
+
message: `Created ${changelogFileName} with version ${nextVersion}`,
|
|
3170
6478
|
};
|
|
3171
6479
|
}
|
|
3172
6480
|
// Parse existing and add entry
|
|
3173
6481
|
const existing = parseChangelog(existingContent);
|
|
3174
|
-
const
|
|
6482
|
+
const isPendingPublication = state.isPendingPublication === true;
|
|
6483
|
+
let changelog = existing;
|
|
6484
|
+
// Clean up stacked entries when in pending publication state
|
|
6485
|
+
if (isPendingPublication && state.publishedVersion) {
|
|
6486
|
+
const publishedVer = parseVersion(state.publishedVersion);
|
|
6487
|
+
if (publishedVer.success && publishedVer.version) {
|
|
6488
|
+
const pubVer = publishedVer.version;
|
|
6489
|
+
const toRemove = changelog.entries
|
|
6490
|
+
.filter((e) => !e.unreleased)
|
|
6491
|
+
.filter((e) => {
|
|
6492
|
+
const ver = parseVersion(e.version);
|
|
6493
|
+
return ver.success && ver.version && gt(ver.version, pubVer);
|
|
6494
|
+
})
|
|
6495
|
+
.map((e) => e.version);
|
|
6496
|
+
if (toRemove.length > 0) {
|
|
6497
|
+
logger.info(`Removing stacked entries: ${toRemove.join(', ')}`);
|
|
6498
|
+
changelog = removeEntries(changelog, toRemove);
|
|
6499
|
+
}
|
|
6500
|
+
}
|
|
6501
|
+
}
|
|
6502
|
+
// Add entry (replaceExisting handles case where nextVersion entry already exists)
|
|
6503
|
+
const updated = addEntry(changelog, changelogEntry, { replaceExisting: isPendingPublication });
|
|
3175
6504
|
const serialized = serializeChangelog(updated);
|
|
3176
6505
|
tree.write(changelogPath, serialized);
|
|
3177
6506
|
return {
|
|
@@ -3179,7 +6508,7 @@ function createWriteChangelogStep() {
|
|
|
3179
6508
|
stateUpdates: {
|
|
3180
6509
|
modifiedFiles: [...(state.modifiedFiles ?? []), changelogPath],
|
|
3181
6510
|
},
|
|
3182
|
-
message: `Updated
|
|
6511
|
+
message: `Updated ${changelogFileName} with version ${nextVersion}`,
|
|
3183
6512
|
};
|
|
3184
6513
|
}, {
|
|
3185
6514
|
dependsOn: ['generate-changelog'],
|
|
@@ -3208,23 +6537,26 @@ function createUpdatePackageStep() {
|
|
|
3208
6537
|
return createSkippedResult('No version bump needed');
|
|
3209
6538
|
}
|
|
3210
6539
|
const packageJsonPath = `${projectRoot}/package.json`;
|
|
6540
|
+
logger.debug(`Reading package.json from: ${packageJsonPath}`);
|
|
3211
6541
|
// Read package.json
|
|
3212
6542
|
let content;
|
|
3213
6543
|
try {
|
|
3214
6544
|
content = tree.read(packageJsonPath, 'utf-8') ?? '';
|
|
3215
6545
|
if (!content) {
|
|
6546
|
+
logger.error(`package.json not found at ${packageJsonPath}`);
|
|
3216
6547
|
return {
|
|
3217
6548
|
status: 'failed',
|
|
3218
|
-
error: createError(
|
|
3219
|
-
message:
|
|
6549
|
+
error: createError(`package.json not found at ${packageJsonPath}`),
|
|
6550
|
+
message: `Could not read package.json at ${packageJsonPath}`,
|
|
3220
6551
|
};
|
|
3221
6552
|
}
|
|
3222
6553
|
}
|
|
3223
6554
|
catch (error) {
|
|
6555
|
+
logger.error(`Failed to read package.json at ${packageJsonPath}: ${error}`);
|
|
3224
6556
|
return {
|
|
3225
6557
|
status: 'failed',
|
|
3226
6558
|
error: error instanceof Error ? error : createError(String(error)),
|
|
3227
|
-
message:
|
|
6559
|
+
message: `Failed to read package.json at ${packageJsonPath}`,
|
|
3228
6560
|
};
|
|
3229
6561
|
}
|
|
3230
6562
|
// Parse and update version
|
|
@@ -3507,14 +6839,15 @@ const CONVENTIONAL_FLOW_CONFIG = {
|
|
|
3507
6839
|
*
|
|
3508
6840
|
* This flow follows the standard conventional commits workflow:
|
|
3509
6841
|
* 1. Fetch published version from registry
|
|
3510
|
-
* 2.
|
|
3511
|
-
* 3.
|
|
3512
|
-
* 4.
|
|
3513
|
-
* 5.
|
|
3514
|
-
* 6.
|
|
3515
|
-
* 7.
|
|
3516
|
-
* 8.
|
|
3517
|
-
* 9. Create git
|
|
6842
|
+
* 2. Resolve repository configuration (for compare URLs)
|
|
6843
|
+
* 3. Analyze commits since last release
|
|
6844
|
+
* 4. Calculate version bump based on commit types
|
|
6845
|
+
* 5. Check if version already published (idempotency)
|
|
6846
|
+
* 6. Generate changelog entry (with compare URL if repository resolved)
|
|
6847
|
+
* 7. Update package.json version
|
|
6848
|
+
* 8. Write changelog to file
|
|
6849
|
+
* 9. Create git commit (optional)
|
|
6850
|
+
* 10. Create git tag (optional, typically after publish)
|
|
3518
6851
|
*
|
|
3519
6852
|
* @param config - Optional configuration overrides
|
|
3520
6853
|
* @returns A VersionFlow configured for conventional commits
|
|
@@ -3539,6 +6872,7 @@ function createConventionalFlow(config) {
|
|
|
3539
6872
|
const mergedConfig = { ...CONVENTIONAL_FLOW_CONFIG, ...config };
|
|
3540
6873
|
return createFlow('conventional', 'Conventional Commits Flow', [
|
|
3541
6874
|
createFetchRegistryStep(),
|
|
6875
|
+
createResolveRepositoryStep(),
|
|
3542
6876
|
createAnalyzeCommitsStep(),
|
|
3543
6877
|
createCalculateBumpStep(),
|
|
3544
6878
|
createCheckIdempotencyStep(),
|
|
@@ -3670,6 +7004,7 @@ function createIndependentFlow(config) {
|
|
|
3670
7004
|
const mergedConfig = { ...INDEPENDENT_FLOW_CONFIG, ...config };
|
|
3671
7005
|
return createFlow('independent', 'Independent Versioning Flow', [
|
|
3672
7006
|
createFetchRegistryStep(),
|
|
7007
|
+
createResolveRepositoryStep(),
|
|
3673
7008
|
createAnalyzeCommitsStep(),
|
|
3674
7009
|
createCalculateBumpStep(),
|
|
3675
7010
|
createCheckDependentBumpsStep(),
|
|
@@ -3698,6 +7033,7 @@ function createIndependentFlow(config) {
|
|
|
3698
7033
|
function createBatchReleaseFlow(config) {
|
|
3699
7034
|
return createFlow('batch-release', 'Batch Release Flow', [
|
|
3700
7035
|
createFetchRegistryStep(),
|
|
7036
|
+
createResolveRepositoryStep(),
|
|
3701
7037
|
createAnalyzeCommitsStep(),
|
|
3702
7038
|
createCalculateBumpStep(),
|
|
3703
7039
|
createCheckIdempotencyStep(),
|
|
@@ -3831,6 +7167,7 @@ function createSyncedFlow(config) {
|
|
|
3831
7167
|
const mergedConfig = { ...SYNCED_FLOW_CONFIG, ...config };
|
|
3832
7168
|
return createFlow('synced', 'Synced Versioning Flow', [
|
|
3833
7169
|
createFetchRegistryStep(),
|
|
7170
|
+
createResolveRepositoryStep(),
|
|
3834
7171
|
createAnalyzeCommitsStep(),
|
|
3835
7172
|
createCalculateBumpStep(),
|
|
3836
7173
|
createCheckIdempotencyStep(),
|
|
@@ -3870,6 +7207,7 @@ function createFixedVersionFlow(version, config) {
|
|
|
3870
7207
|
});
|
|
3871
7208
|
return createFlow('fixed', 'Fixed Version Flow', [
|
|
3872
7209
|
createFetchRegistryStep(),
|
|
7210
|
+
createResolveRepositoryStep(),
|
|
3873
7211
|
createAnalyzeCommitsStep(),
|
|
3874
7212
|
fixedBumpStep,
|
|
3875
7213
|
createCheckIdempotencyStep(),
|