@hyperfrontend/versioning 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +50 -1
- package/CHANGELOG.md +37 -23
- package/README.md +19 -14
- package/changelog/index.cjs.js +38 -6
- package/changelog/index.cjs.js.map +1 -1
- package/changelog/index.esm.js +38 -6
- package/changelog/index.esm.js.map +1 -1
- package/changelog/models/entry.d.ts +5 -0
- package/changelog/models/entry.d.ts.map +1 -1
- package/changelog/models/index.cjs.js +2 -0
- package/changelog/models/index.cjs.js.map +1 -1
- package/changelog/models/index.esm.js +2 -0
- package/changelog/models/index.esm.js.map +1 -1
- package/changelog/operations/index.cjs.js.map +1 -1
- package/changelog/operations/index.esm.js.map +1 -1
- package/changelog/parse/index.cjs.js +85 -6
- package/changelog/parse/index.cjs.js.map +1 -1
- package/changelog/parse/index.esm.js +85 -6
- package/changelog/parse/index.esm.js.map +1 -1
- package/changelog/parse/line.d.ts.map +1 -1
- package/changelog/parse/parser.d.ts +0 -6
- package/changelog/parse/parser.d.ts.map +1 -1
- package/commits/classify/classifier.d.ts +73 -0
- package/commits/classify/classifier.d.ts.map +1 -0
- package/commits/classify/index.cjs.js +707 -0
- package/commits/classify/index.cjs.js.map +1 -0
- package/commits/classify/index.d.ts +8 -0
- package/commits/classify/index.d.ts.map +1 -0
- package/commits/classify/index.esm.js +679 -0
- package/commits/classify/index.esm.js.map +1 -0
- package/commits/classify/infrastructure.d.ts +205 -0
- package/commits/classify/infrastructure.d.ts.map +1 -0
- package/commits/classify/models.d.ts +108 -0
- package/commits/classify/models.d.ts.map +1 -0
- package/commits/classify/project-scopes.d.ts +69 -0
- package/commits/classify/project-scopes.d.ts.map +1 -0
- package/commits/index.cjs.js +704 -0
- package/commits/index.cjs.js.map +1 -1
- package/commits/index.d.ts +1 -0
- package/commits/index.d.ts.map +1 -1
- package/commits/index.esm.js +678 -1
- package/commits/index.esm.js.map +1 -1
- package/flow/executor/execute.d.ts +6 -0
- package/flow/executor/execute.d.ts.map +1 -1
- package/flow/executor/index.cjs.js +1617 -43
- package/flow/executor/index.cjs.js.map +1 -1
- package/flow/executor/index.esm.js +1623 -49
- package/flow/executor/index.esm.js.map +1 -1
- package/flow/index.cjs.js +6749 -2938
- package/flow/index.cjs.js.map +1 -1
- package/flow/index.esm.js +6751 -2944
- package/flow/index.esm.js.map +1 -1
- package/flow/models/index.cjs.js +138 -0
- package/flow/models/index.cjs.js.map +1 -1
- package/flow/models/index.d.ts +1 -1
- package/flow/models/index.d.ts.map +1 -1
- package/flow/models/index.esm.js +138 -1
- package/flow/models/index.esm.js.map +1 -1
- package/flow/models/types.d.ts +180 -3
- package/flow/models/types.d.ts.map +1 -1
- package/flow/presets/conventional.d.ts +9 -8
- package/flow/presets/conventional.d.ts.map +1 -1
- package/flow/presets/independent.d.ts.map +1 -1
- package/flow/presets/index.cjs.js +3641 -303
- package/flow/presets/index.cjs.js.map +1 -1
- package/flow/presets/index.esm.js +3641 -303
- package/flow/presets/index.esm.js.map +1 -1
- package/flow/presets/synced.d.ts.map +1 -1
- package/flow/steps/analyze-commits.d.ts +9 -6
- package/flow/steps/analyze-commits.d.ts.map +1 -1
- package/flow/steps/calculate-bump.d.ts.map +1 -1
- package/flow/steps/fetch-registry.d.ts.map +1 -1
- package/flow/steps/generate-changelog.d.ts +5 -0
- package/flow/steps/generate-changelog.d.ts.map +1 -1
- package/flow/steps/index.cjs.js +3663 -328
- package/flow/steps/index.cjs.js.map +1 -1
- package/flow/steps/index.d.ts +2 -1
- package/flow/steps/index.d.ts.map +1 -1
- package/flow/steps/index.esm.js +3661 -329
- package/flow/steps/index.esm.js.map +1 -1
- package/flow/steps/resolve-repository.d.ts +36 -0
- package/flow/steps/resolve-repository.d.ts.map +1 -0
- package/flow/steps/update-packages.d.ts.map +1 -1
- package/git/factory.d.ts +14 -0
- package/git/factory.d.ts.map +1 -1
- package/git/index.cjs.js +65 -0
- package/git/index.cjs.js.map +1 -1
- package/git/index.esm.js +66 -2
- package/git/index.esm.js.map +1 -1
- package/git/operations/index.cjs.js +40 -0
- package/git/operations/index.cjs.js.map +1 -1
- package/git/operations/index.d.ts +1 -1
- package/git/operations/index.d.ts.map +1 -1
- package/git/operations/index.esm.js +41 -2
- package/git/operations/index.esm.js.map +1 -1
- package/git/operations/log.d.ts +23 -0
- package/git/operations/log.d.ts.map +1 -1
- package/index.cjs.js +7547 -4947
- package/index.cjs.js.map +1 -1
- package/index.d.ts +3 -1
- package/index.d.ts.map +1 -1
- package/index.esm.js +7550 -4954
- package/index.esm.js.map +1 -1
- package/package.json +39 -1
- package/registry/index.cjs.js +3 -3
- package/registry/index.cjs.js.map +1 -1
- package/registry/index.esm.js +3 -3
- package/registry/index.esm.js.map +1 -1
- package/registry/models/index.cjs.js +2 -0
- package/registry/models/index.cjs.js.map +1 -1
- package/registry/models/index.esm.js +2 -0
- package/registry/models/index.esm.js.map +1 -1
- package/registry/models/version-info.d.ts +10 -0
- package/registry/models/version-info.d.ts.map +1 -1
- package/registry/npm/client.d.ts.map +1 -1
- package/registry/npm/index.cjs.js +1 -3
- package/registry/npm/index.cjs.js.map +1 -1
- package/registry/npm/index.esm.js +1 -3
- package/registry/npm/index.esm.js.map +1 -1
- package/repository/index.cjs.js +998 -0
- package/repository/index.cjs.js.map +1 -0
- package/repository/index.d.ts +4 -0
- package/repository/index.d.ts.map +1 -0
- package/repository/index.esm.js +981 -0
- package/repository/index.esm.js.map +1 -0
- package/repository/models/index.cjs.js +301 -0
- package/repository/models/index.cjs.js.map +1 -0
- package/repository/models/index.d.ts +7 -0
- package/repository/models/index.d.ts.map +1 -0
- package/repository/models/index.esm.js +290 -0
- package/repository/models/index.esm.js.map +1 -0
- package/repository/models/platform.d.ts +58 -0
- package/repository/models/platform.d.ts.map +1 -0
- package/repository/models/repository-config.d.ts +132 -0
- package/repository/models/repository-config.d.ts.map +1 -0
- package/repository/models/resolution.d.ts +121 -0
- package/repository/models/resolution.d.ts.map +1 -0
- package/repository/parse/index.cjs.js +755 -0
- package/repository/parse/index.cjs.js.map +1 -0
- package/repository/parse/index.d.ts +5 -0
- package/repository/parse/index.d.ts.map +1 -0
- package/repository/parse/index.esm.js +749 -0
- package/repository/parse/index.esm.js.map +1 -0
- package/repository/parse/package-json.d.ts +100 -0
- package/repository/parse/package-json.d.ts.map +1 -0
- package/repository/parse/url.d.ts +81 -0
- package/repository/parse/url.d.ts.map +1 -0
- package/repository/url/compare.d.ts +84 -0
- package/repository/url/compare.d.ts.map +1 -0
- package/repository/url/index.cjs.js +178 -0
- package/repository/url/index.cjs.js.map +1 -0
- package/repository/url/index.d.ts +3 -0
- package/repository/url/index.d.ts.map +1 -0
- package/repository/url/index.esm.js +176 -0
- package/repository/url/index.esm.js.map +1 -0
- package/workspace/discovery/changelog-path.d.ts +3 -7
- package/workspace/discovery/changelog-path.d.ts.map +1 -1
- package/workspace/discovery/index.cjs.js +408 -335
- package/workspace/discovery/index.cjs.js.map +1 -1
- package/workspace/discovery/index.esm.js +408 -335
- package/workspace/discovery/index.esm.js.map +1 -1
- package/workspace/discovery/packages.d.ts +0 -6
- package/workspace/discovery/packages.d.ts.map +1 -1
- package/workspace/index.cjs.js +84 -11
- package/workspace/index.cjs.js.map +1 -1
- package/workspace/index.esm.js +84 -11
- package/workspace/index.esm.js.map +1 -1
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
+
var node_path = require('node:path');
|
|
4
|
+
var node_fs = require('node:fs');
|
|
5
|
+
|
|
3
6
|
/**
|
|
4
7
|
* Creates a version flow.
|
|
5
8
|
*
|
|
@@ -85,98 +88,2715 @@ function createStep(id, name, execute, options = {}) {
|
|
|
85
88
|
};
|
|
86
89
|
}
|
|
87
90
|
/**
|
|
88
|
-
* Creates a skipped step result.
|
|
91
|
+
* Creates a skipped step result.
|
|
92
|
+
*
|
|
93
|
+
* @param message - Explanation for why the step was skipped
|
|
94
|
+
* @returns A FlowStepResult with 'skipped' status
|
|
95
|
+
*/
|
|
96
|
+
function createSkippedResult(message) {
|
|
97
|
+
return {
|
|
98
|
+
status: 'skipped',
|
|
99
|
+
message,
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
104
|
+
/**
|
|
105
|
+
* Creates the fetch-registry step.
|
|
106
|
+
*
|
|
107
|
+
* This step:
|
|
108
|
+
* 1. Queries the registry for the latest published version
|
|
109
|
+
* 2. Reads the current version from package.json
|
|
110
|
+
* 3. Determines if this is a first release
|
|
111
|
+
*
|
|
112
|
+
* State updates:
|
|
113
|
+
* - publishedVersion: Latest version on registry (null if not published)
|
|
114
|
+
* - currentVersion: Version from local package.json
|
|
115
|
+
* - isFirstRelease: True if never published
|
|
116
|
+
*
|
|
117
|
+
* @returns A FlowStep that fetches registry information
|
|
118
|
+
*/
|
|
119
|
+
function createFetchRegistryStep() {
|
|
120
|
+
return createStep(FETCH_REGISTRY_STEP_ID, 'Fetch Registry Version', async (ctx) => {
|
|
121
|
+
const { registry, tree, projectRoot, packageName, logger } = ctx;
|
|
122
|
+
// Read local package.json for current version
|
|
123
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
124
|
+
let currentVersion = '0.0.0';
|
|
125
|
+
try {
|
|
126
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
127
|
+
if (content) {
|
|
128
|
+
const pkg = parse(content);
|
|
129
|
+
currentVersion = pkg.version ?? '0.0.0';
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
catch (error) {
|
|
133
|
+
logger.warn(`Could not read package.json: ${error}`);
|
|
134
|
+
}
|
|
135
|
+
// Query registry for published version
|
|
136
|
+
let publishedVersion = null;
|
|
137
|
+
let publishedCommit = null;
|
|
138
|
+
let isFirstRelease = true;
|
|
139
|
+
try {
|
|
140
|
+
publishedVersion = await registry.getLatestVersion(packageName);
|
|
141
|
+
isFirstRelease = publishedVersion === null;
|
|
142
|
+
// When published version exists, get its commit hash from gitHead
|
|
143
|
+
if (publishedVersion) {
|
|
144
|
+
try {
|
|
145
|
+
const versionInfo = await registry.getVersionInfo(packageName, publishedVersion);
|
|
146
|
+
publishedCommit = versionInfo?.gitHead ?? null;
|
|
147
|
+
if (publishedCommit) {
|
|
148
|
+
logger.debug(`Published ${publishedVersion} at commit ${publishedCommit.slice(0, 7)}`);
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
logger.debug(`Published ${publishedVersion} has no gitHead (older package or published without git)`);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
catch (error) {
|
|
155
|
+
// Version info fetch failed, but we still have the version
|
|
156
|
+
logger.debug(`Could not fetch version info for ${publishedVersion}: ${error}`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
catch (error) {
|
|
161
|
+
// Package might not exist yet, which is fine
|
|
162
|
+
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
163
|
+
isFirstRelease = true;
|
|
164
|
+
}
|
|
165
|
+
const message = isFirstRelease
|
|
166
|
+
? `First release (local: ${currentVersion})`
|
|
167
|
+
: `Published: ${publishedVersion}${publishedCommit ? ` @ ${publishedCommit.slice(0, 7)}` : ''}, Local: ${currentVersion}`;
|
|
168
|
+
return {
|
|
169
|
+
status: 'success',
|
|
170
|
+
stateUpdates: {
|
|
171
|
+
publishedVersion,
|
|
172
|
+
publishedCommit,
|
|
173
|
+
currentVersion,
|
|
174
|
+
isFirstRelease,
|
|
175
|
+
},
|
|
176
|
+
message,
|
|
177
|
+
};
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
/**
|
|
182
|
+
* Safe copies of Error built-ins via factory functions.
|
|
183
|
+
*
|
|
184
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
185
|
+
* provides factory functions that use Reflect.construct internally.
|
|
186
|
+
*
|
|
187
|
+
* These references are captured at module initialization time to protect against
|
|
188
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
189
|
+
*
|
|
190
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/error
|
|
191
|
+
*/
|
|
192
|
+
// Capture references at module initialization time
|
|
193
|
+
const _Error = globalThis.Error;
|
|
194
|
+
const _Reflect$4 = globalThis.Reflect;
|
|
195
|
+
/**
|
|
196
|
+
* (Safe copy) Creates a new Error using the captured Error constructor.
|
|
197
|
+
* Use this instead of `new Error()`.
|
|
198
|
+
*
|
|
199
|
+
* @param message - Optional error message.
|
|
200
|
+
* @param options - Optional error options.
|
|
201
|
+
* @returns A new Error instance.
|
|
202
|
+
*/
|
|
203
|
+
const createError = (message, options) => _Reflect$4.construct(_Error, [message, options]);
|
|
204
|
+
|
|
205
|
+
/**
|
|
206
|
+
* Creates a new RepositoryConfig.
|
|
207
|
+
*
|
|
208
|
+
* Normalizes the base URL by stripping trailing slashes and validating
|
|
209
|
+
* that custom platforms have a formatter function.
|
|
210
|
+
*
|
|
211
|
+
* @param options - Repository configuration options
|
|
212
|
+
* @returns A new RepositoryConfig object
|
|
213
|
+
* @throws {Error} if platform is 'custom' but no formatCompareUrl is provided
|
|
214
|
+
*
|
|
215
|
+
* @example
|
|
216
|
+
* ```typescript
|
|
217
|
+
* // GitHub repository
|
|
218
|
+
* const config = createRepositoryConfig({
|
|
219
|
+
* platform: 'github',
|
|
220
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
221
|
+
* })
|
|
222
|
+
*
|
|
223
|
+
* // Custom platform
|
|
224
|
+
* const customConfig = createRepositoryConfig({
|
|
225
|
+
* platform: 'custom',
|
|
226
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
227
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
228
|
+
* })
|
|
229
|
+
* ```
|
|
230
|
+
*/
|
|
231
|
+
function createRepositoryConfig(options) {
|
|
232
|
+
const { platform, formatCompareUrl } = options;
|
|
233
|
+
// Validate custom platform has formatter
|
|
234
|
+
if (platform === 'custom' && !formatCompareUrl) {
|
|
235
|
+
throw createError("Repository config with platform 'custom' requires a formatCompareUrl function");
|
|
236
|
+
}
|
|
237
|
+
// Normalize base URL - strip trailing slashes
|
|
238
|
+
const baseUrl = normalizeBaseUrl(options.baseUrl);
|
|
239
|
+
return {
|
|
240
|
+
platform,
|
|
241
|
+
baseUrl,
|
|
242
|
+
formatCompareUrl,
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
/**
|
|
246
|
+
* Checks if a value is a RepositoryConfig object.
|
|
247
|
+
*
|
|
248
|
+
* @param value - Value to check
|
|
249
|
+
* @returns True if the value is a RepositoryConfig
|
|
250
|
+
*
|
|
251
|
+
* @example
|
|
252
|
+
* ```typescript
|
|
253
|
+
* const config = { platform: 'github', baseUrl: 'https://...' }
|
|
254
|
+
* if (isRepositoryConfig(config)) {
|
|
255
|
+
* // config is typed as RepositoryConfig
|
|
256
|
+
* }
|
|
257
|
+
* ```
|
|
258
|
+
*/
|
|
259
|
+
function isRepositoryConfig(value) {
|
|
260
|
+
if (typeof value !== 'object' || value === null) {
|
|
261
|
+
return false;
|
|
262
|
+
}
|
|
263
|
+
const obj = value;
|
|
264
|
+
return (typeof obj['platform'] === 'string' &&
|
|
265
|
+
typeof obj['baseUrl'] === 'string' &&
|
|
266
|
+
(obj['formatCompareUrl'] === undefined || typeof obj['formatCompareUrl'] === 'function'));
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Normalizes a base URL by stripping trailing slashes and .git suffix.
|
|
270
|
+
*
|
|
271
|
+
* @param url - URL to normalize
|
|
272
|
+
* @returns Normalized URL
|
|
273
|
+
*
|
|
274
|
+
* @internal
|
|
275
|
+
*/
|
|
276
|
+
function normalizeBaseUrl(url) {
|
|
277
|
+
let normalized = url.trim();
|
|
278
|
+
// Remove trailing slashes
|
|
279
|
+
while (normalized.endsWith('/')) {
|
|
280
|
+
normalized = normalized.slice(0, -1);
|
|
281
|
+
}
|
|
282
|
+
// Remove .git suffix if present
|
|
283
|
+
if (normalized.endsWith('.git')) {
|
|
284
|
+
normalized = normalized.slice(0, -4);
|
|
285
|
+
}
|
|
286
|
+
return normalized;
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Creates a disabled repository resolution configuration.
|
|
291
|
+
*
|
|
292
|
+
* No compare URLs will be generated.
|
|
293
|
+
*
|
|
294
|
+
* @returns A RepositoryResolution with mode 'disabled'
|
|
295
|
+
*
|
|
296
|
+
* @example
|
|
297
|
+
* ```typescript
|
|
298
|
+
* const config = createDisabledResolution()
|
|
299
|
+
* // { mode: 'disabled' }
|
|
300
|
+
* ```
|
|
301
|
+
*/
|
|
302
|
+
/**
|
|
303
|
+
* Checks if a value is a RepositoryResolution object.
|
|
304
|
+
*
|
|
305
|
+
* @param value - Value to check
|
|
306
|
+
* @returns True if the value is a RepositoryResolution
|
|
307
|
+
*/
|
|
308
|
+
function isRepositoryResolution(value) {
|
|
309
|
+
if (typeof value !== 'object' || value === null) {
|
|
310
|
+
return false;
|
|
311
|
+
}
|
|
312
|
+
const obj = value;
|
|
313
|
+
const mode = obj['mode'];
|
|
314
|
+
return mode === 'explicit' || mode === 'inferred' || mode === 'disabled';
|
|
315
|
+
}
|
|
316
|
+
/**
|
|
317
|
+
* Default inference order when mode is 'inferred'.
|
|
318
|
+
*/
|
|
319
|
+
const DEFAULT_INFERENCE_ORDER = ['package-json', 'git-remote'];
|
|
320
|
+
|
|
321
|
+
/**
|
|
322
|
+
* Safe copies of Map built-in via factory function.
|
|
323
|
+
*
|
|
324
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
325
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
326
|
+
*
|
|
327
|
+
* These references are captured at module initialization time to protect against
|
|
328
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
329
|
+
*
|
|
330
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
331
|
+
*/
|
|
332
|
+
// Capture references at module initialization time
|
|
333
|
+
const _Map = globalThis.Map;
|
|
334
|
+
const _Reflect$3 = globalThis.Reflect;
|
|
335
|
+
/**
|
|
336
|
+
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
337
|
+
* Use this instead of `new Map()`.
|
|
338
|
+
*
|
|
339
|
+
* @param iterable - Optional iterable of key-value pairs.
|
|
340
|
+
* @returns A new Map instance.
|
|
341
|
+
*/
|
|
342
|
+
const createMap = (iterable) => _Reflect$3.construct(_Map, iterable ? [iterable] : []);
|
|
343
|
+
|
|
344
|
+
/**
|
|
345
|
+
* Safe copies of Math built-in methods.
|
|
346
|
+
*
|
|
347
|
+
* These references are captured at module initialization time to protect against
|
|
348
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
349
|
+
*
|
|
350
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
351
|
+
*/
|
|
352
|
+
// Capture references at module initialization time
|
|
353
|
+
const _Math = globalThis.Math;
|
|
354
|
+
// ============================================================================
|
|
355
|
+
// Min/Max
|
|
356
|
+
// ============================================================================
|
|
357
|
+
/**
|
|
358
|
+
* (Safe copy) Returns the larger of zero or more numbers.
|
|
359
|
+
*/
|
|
360
|
+
const max = _Math.max;
|
|
361
|
+
/**
|
|
362
|
+
* (Safe copy) Returns the smaller of zero or more numbers.
|
|
363
|
+
*/
|
|
364
|
+
const min = _Math.min;
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* Safe copies of URL built-ins via factory functions.
|
|
368
|
+
*
|
|
369
|
+
* Provides safe references to URL and URLSearchParams.
|
|
370
|
+
* These references are captured at module initialization time to protect against
|
|
371
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
372
|
+
*
|
|
373
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
374
|
+
*/
|
|
375
|
+
// Capture references at module initialization time
|
|
376
|
+
const _URL = globalThis.URL;
|
|
377
|
+
const _Reflect$2 = globalThis.Reflect;
|
|
378
|
+
// ============================================================================
|
|
379
|
+
// URL
|
|
380
|
+
// ============================================================================
|
|
381
|
+
/**
|
|
382
|
+
* (Safe copy) Creates a new URL using the captured URL constructor.
|
|
383
|
+
* Use this instead of `new URL()`.
|
|
384
|
+
*
|
|
385
|
+
* @param url - The URL string to parse.
|
|
386
|
+
* @param base - Optional base URL for relative URLs.
|
|
387
|
+
* @returns A new URL instance.
|
|
388
|
+
*/
|
|
389
|
+
const createURL = (url, base) => _Reflect$2.construct(_URL, [url, base]);
|
|
390
|
+
/**
|
|
391
|
+
* (Safe copy) Creates an object URL for the given object.
|
|
392
|
+
* Use this instead of `URL.createObjectURL()`.
|
|
393
|
+
*
|
|
394
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
395
|
+
*/
|
|
396
|
+
typeof _URL.createObjectURL === 'function'
|
|
397
|
+
? _URL.createObjectURL.bind(_URL)
|
|
398
|
+
: () => {
|
|
399
|
+
throw new Error('URL.createObjectURL is not available in this environment');
|
|
400
|
+
};
|
|
401
|
+
/**
|
|
402
|
+
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
403
|
+
* Use this instead of `URL.revokeObjectURL()`.
|
|
404
|
+
*
|
|
405
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
406
|
+
*/
|
|
407
|
+
typeof _URL.revokeObjectURL === 'function'
|
|
408
|
+
? _URL.revokeObjectURL.bind(_URL)
|
|
409
|
+
: () => {
|
|
410
|
+
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
411
|
+
};
|
|
412
|
+
|
|
413
|
+
/**
|
|
414
|
+
* Checks if a platform identifier is a known platform with built-in support.
|
|
415
|
+
*
|
|
416
|
+
* @param platform - Platform identifier to check
|
|
417
|
+
* @returns True if the platform is a known platform
|
|
418
|
+
*
|
|
419
|
+
* @example
|
|
420
|
+
* ```typescript
|
|
421
|
+
* isKnownPlatform('github') // true
|
|
422
|
+
* isKnownPlatform('gitlab') // true
|
|
423
|
+
* isKnownPlatform('custom') // false
|
|
424
|
+
* isKnownPlatform('unknown') // false
|
|
425
|
+
* ```
|
|
426
|
+
*/
|
|
427
|
+
function isKnownPlatform(platform) {
|
|
428
|
+
return platform === 'github' || platform === 'gitlab' || platform === 'bitbucket' || platform === 'azure-devops';
|
|
429
|
+
}
|
|
430
|
+
/**
|
|
431
|
+
* Known platform hostnames mapped to their platform type.
|
|
432
|
+
* Used for automatic platform detection from repository URLs.
|
|
433
|
+
*
|
|
434
|
+
* Includes both standard SaaS domains and common patterns for self-hosted instances.
|
|
435
|
+
*/
|
|
436
|
+
const PLATFORM_HOSTNAMES = createMap([
|
|
437
|
+
// GitHub
|
|
438
|
+
['github.com', 'github'],
|
|
439
|
+
// GitLab
|
|
440
|
+
['gitlab.com', 'gitlab'],
|
|
441
|
+
// Bitbucket
|
|
442
|
+
['bitbucket.org', 'bitbucket'],
|
|
443
|
+
// Azure DevOps
|
|
444
|
+
['dev.azure.com', 'azure-devops'],
|
|
445
|
+
['visualstudio.com', 'azure-devops'],
|
|
446
|
+
]);
|
|
447
|
+
/**
|
|
448
|
+
* Detects platform from a hostname.
|
|
449
|
+
*
|
|
450
|
+
* First checks for exact match in known platforms, then applies heuristics
|
|
451
|
+
* for self-hosted instances (e.g., `github.company.com` → `github`).
|
|
452
|
+
*
|
|
453
|
+
* @param hostname - Hostname to detect platform from (e.g., "github.com")
|
|
454
|
+
* @returns Detected platform or 'unknown' if not recognized
|
|
455
|
+
*
|
|
456
|
+
* @example
|
|
457
|
+
* ```typescript
|
|
458
|
+
* detectPlatformFromHostname('github.com') // 'github'
|
|
459
|
+
* detectPlatformFromHostname('gitlab.mycompany.com') // 'gitlab'
|
|
460
|
+
* detectPlatformFromHostname('custom-git.internal') // 'unknown'
|
|
461
|
+
* ```
|
|
462
|
+
*/
|
|
463
|
+
function detectPlatformFromHostname(hostname) {
|
|
464
|
+
const normalized = hostname.toLowerCase();
|
|
465
|
+
// Check exact matches first
|
|
466
|
+
const exactMatch = PLATFORM_HOSTNAMES.get(normalized);
|
|
467
|
+
if (exactMatch) {
|
|
468
|
+
return exactMatch;
|
|
469
|
+
}
|
|
470
|
+
// Check for Azure DevOps legacy domain pattern
|
|
471
|
+
if (normalized.endsWith('.visualstudio.com')) {
|
|
472
|
+
return 'azure-devops';
|
|
473
|
+
}
|
|
474
|
+
// Check for Azure DevOps modern domain pattern (includes ssh.dev.azure.com)
|
|
475
|
+
if (normalized.endsWith('.azure.com')) {
|
|
476
|
+
return 'azure-devops';
|
|
477
|
+
}
|
|
478
|
+
// Heuristics for self-hosted instances
|
|
479
|
+
// GitHub Enterprise typically uses "github" in the hostname
|
|
480
|
+
if (normalized.includes('github')) {
|
|
481
|
+
return 'github';
|
|
482
|
+
}
|
|
483
|
+
// GitLab self-hosted typically uses "gitlab" in the hostname
|
|
484
|
+
if (normalized.includes('gitlab')) {
|
|
485
|
+
return 'gitlab';
|
|
486
|
+
}
|
|
487
|
+
// Bitbucket Data Center/Server might use "bitbucket" in hostname
|
|
488
|
+
if (normalized.includes('bitbucket')) {
|
|
489
|
+
return 'bitbucket';
|
|
490
|
+
}
|
|
491
|
+
return 'unknown';
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
/**
|
|
495
|
+
* Parses a git URL and extracts platform and base URL.
|
|
496
|
+
*
|
|
497
|
+
* Supports multiple URL formats:
|
|
498
|
+
* - `https://github.com/owner/repo`
|
|
499
|
+
* - `https://github.com/owner/repo.git`
|
|
500
|
+
* - `git+https://github.com/owner/repo.git`
|
|
501
|
+
* - `git://github.com/owner/repo.git`
|
|
502
|
+
* - `git@github.com:owner/repo.git` (SSH format)
|
|
503
|
+
*
|
|
504
|
+
* Handles self-hosted instances by detecting platform from hostname:
|
|
505
|
+
* - `github.mycompany.com` → `github`
|
|
506
|
+
* - `gitlab.internal.com` → `gitlab`
|
|
507
|
+
*
|
|
508
|
+
* Handles Azure DevOps URL formats:
|
|
509
|
+
* - `https://dev.azure.com/org/project/_git/repo`
|
|
510
|
+
* - `https://org.visualstudio.com/project/_git/repo`
|
|
511
|
+
*
|
|
512
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
513
|
+
* @returns Parsed repository info with platform and base URL, or null if parsing fails
|
|
514
|
+
*
|
|
515
|
+
* @example
|
|
516
|
+
* ```typescript
|
|
517
|
+
* // GitHub HTTPS
|
|
518
|
+
* parseRepositoryUrl('https://github.com/owner/repo')
|
|
519
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
520
|
+
*
|
|
521
|
+
* // SSH format
|
|
522
|
+
* parseRepositoryUrl('git@github.com:owner/repo.git')
|
|
523
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
524
|
+
*
|
|
525
|
+
* // Azure DevOps
|
|
526
|
+
* parseRepositoryUrl('https://dev.azure.com/org/proj/_git/repo')
|
|
527
|
+
* // → { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' }
|
|
528
|
+
*
|
|
529
|
+
* // Self-hosted GitLab
|
|
530
|
+
* parseRepositoryUrl('https://gitlab.mycompany.com/team/project')
|
|
531
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.mycompany.com/team/project' }
|
|
532
|
+
* ```
|
|
533
|
+
*/
|
|
534
|
+
function parseRepositoryUrl(gitUrl) {
|
|
535
|
+
if (!gitUrl || typeof gitUrl !== 'string') {
|
|
536
|
+
return null;
|
|
537
|
+
}
|
|
538
|
+
const trimmed = gitUrl.trim();
|
|
539
|
+
if (!trimmed) {
|
|
540
|
+
return null;
|
|
541
|
+
}
|
|
542
|
+
// Try SSH format first: git@hostname:path
|
|
543
|
+
const sshParsed = parseSshUrl(trimmed);
|
|
544
|
+
if (sshParsed) {
|
|
545
|
+
return sshParsed;
|
|
546
|
+
}
|
|
547
|
+
// Try HTTP(S) formats
|
|
548
|
+
const httpParsed = parseHttpUrl(trimmed);
|
|
549
|
+
if (httpParsed) {
|
|
550
|
+
return httpParsed;
|
|
551
|
+
}
|
|
552
|
+
return null;
|
|
553
|
+
}
|
|
554
|
+
/**
|
|
555
|
+
* Parses an SSH-style git URL.
|
|
556
|
+
*
|
|
557
|
+
* @param url - URL to parse (e.g., "git@github.com:owner/repo.git")
|
|
558
|
+
* @returns Parsed repository or null
|
|
559
|
+
*
|
|
560
|
+
* @internal
|
|
561
|
+
*/
|
|
562
|
+
function parseSshUrl(url) {
|
|
563
|
+
// Handle optional ssh:// prefix
|
|
564
|
+
let remaining = url;
|
|
565
|
+
if (remaining.startsWith('ssh://')) {
|
|
566
|
+
remaining = remaining.slice(6);
|
|
567
|
+
}
|
|
568
|
+
// Must start with git@
|
|
569
|
+
if (!remaining.startsWith('git@')) {
|
|
570
|
+
return null;
|
|
571
|
+
}
|
|
572
|
+
// Remove git@ prefix
|
|
573
|
+
remaining = remaining.slice(4);
|
|
574
|
+
// Find the separator (: or /)
|
|
575
|
+
const colonIndex = remaining.indexOf(':');
|
|
576
|
+
const slashIndex = remaining.indexOf('/');
|
|
577
|
+
let separatorIndex;
|
|
578
|
+
if (colonIndex === -1 && slashIndex === -1) {
|
|
579
|
+
return null;
|
|
580
|
+
}
|
|
581
|
+
else if (colonIndex === -1) {
|
|
582
|
+
separatorIndex = slashIndex;
|
|
583
|
+
}
|
|
584
|
+
else if (slashIndex === -1) {
|
|
585
|
+
separatorIndex = colonIndex;
|
|
586
|
+
}
|
|
587
|
+
else {
|
|
588
|
+
separatorIndex = min(colonIndex, slashIndex);
|
|
589
|
+
}
|
|
590
|
+
const hostname = remaining.slice(0, separatorIndex);
|
|
591
|
+
const pathPart = normalizePathPart(remaining.slice(separatorIndex + 1));
|
|
592
|
+
if (!hostname || !pathPart) {
|
|
593
|
+
return null;
|
|
594
|
+
}
|
|
595
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
596
|
+
// For Azure DevOps, construct proper base URL
|
|
597
|
+
if (platform === 'azure-devops') {
|
|
598
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
599
|
+
if (baseUrl) {
|
|
600
|
+
return { platform, baseUrl };
|
|
601
|
+
}
|
|
602
|
+
return null;
|
|
603
|
+
}
|
|
604
|
+
// Standard platforms: https://hostname/path
|
|
605
|
+
const baseUrl = `https://${hostname}/${pathPart}`;
|
|
606
|
+
return { platform, baseUrl };
|
|
607
|
+
}
|
|
608
|
+
/**
|
|
609
|
+
* Parses an HTTP(S)-style git URL.
|
|
610
|
+
*
|
|
611
|
+
* @param url - URL to parse
|
|
612
|
+
* @returns Parsed repository or null
|
|
613
|
+
*
|
|
614
|
+
* @internal
|
|
615
|
+
*/
|
|
616
|
+
function parseHttpUrl(url) {
|
|
617
|
+
// Normalize various git URL prefixes to https://
|
|
618
|
+
const normalized = url
|
|
619
|
+
.replace(/^git\+/, '') // git+https:// → https://
|
|
620
|
+
.replace(/^git:\/\//, 'https://'); // git:// → https://
|
|
621
|
+
let parsed;
|
|
622
|
+
try {
|
|
623
|
+
parsed = createURL(normalized);
|
|
624
|
+
}
|
|
625
|
+
catch {
|
|
626
|
+
return null;
|
|
627
|
+
}
|
|
628
|
+
// Only support http and https protocols
|
|
629
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
630
|
+
return null;
|
|
631
|
+
}
|
|
632
|
+
const hostname = parsed.hostname.toLowerCase();
|
|
633
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
634
|
+
const pathPart = normalizePathPart(parsed.pathname);
|
|
635
|
+
if (!pathPart) {
|
|
636
|
+
return null;
|
|
637
|
+
}
|
|
638
|
+
// Handle Azure DevOps special URL structure
|
|
639
|
+
if (platform === 'azure-devops') {
|
|
640
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
641
|
+
if (baseUrl) {
|
|
642
|
+
return { platform, baseUrl };
|
|
643
|
+
}
|
|
644
|
+
// If Azure DevOps URL cannot be parsed properly, return null
|
|
645
|
+
return null;
|
|
646
|
+
}
|
|
647
|
+
// Standard platforms
|
|
648
|
+
const baseUrl = `${parsed.protocol}//${hostname}/${pathPart}`;
|
|
649
|
+
return { platform, baseUrl };
|
|
650
|
+
}
|
|
651
|
+
/**
|
|
652
|
+
* Normalizes a path part by removing leading slashes and .git suffix.
|
|
653
|
+
*
|
|
654
|
+
* @param path - Path to normalize
|
|
655
|
+
* @returns Normalized path or null if empty
|
|
656
|
+
*
|
|
657
|
+
* @internal
|
|
658
|
+
*/
|
|
659
|
+
function normalizePathPart(path) {
|
|
660
|
+
let normalized = path.trim();
|
|
661
|
+
// Remove leading slashes
|
|
662
|
+
while (normalized.startsWith('/')) {
|
|
663
|
+
normalized = normalized.slice(1);
|
|
664
|
+
}
|
|
665
|
+
// Remove trailing slashes
|
|
666
|
+
while (normalized.endsWith('/')) {
|
|
667
|
+
normalized = normalized.slice(0, -1);
|
|
668
|
+
}
|
|
669
|
+
// Remove .git suffix
|
|
670
|
+
if (normalized.endsWith('.git')) {
|
|
671
|
+
normalized = normalized.slice(0, -4);
|
|
672
|
+
}
|
|
673
|
+
// Validate we have something
|
|
674
|
+
if (!normalized) {
|
|
675
|
+
return null;
|
|
676
|
+
}
|
|
677
|
+
return normalized;
|
|
678
|
+
}
|
|
679
|
+
/**
|
|
680
|
+
* Constructs the base URL for Azure DevOps repositories.
|
|
681
|
+
*
|
|
682
|
+
* Azure DevOps has special URL structures:
|
|
683
|
+
* - Modern: `https://dev.azure.com/{org}/{project}/_git/{repo}`
|
|
684
|
+
* - Legacy: `https://{org}.visualstudio.com/{project}/_git/{repo}`
|
|
685
|
+
* - SSH: `git@ssh.dev.azure.com:v3/{org}/{project}/{repo}`
|
|
686
|
+
*
|
|
687
|
+
* @param hostname - Hostname from the URL
|
|
688
|
+
* @param pathPart - Path portion after hostname
|
|
689
|
+
* @returns Constructed base URL or null if invalid
|
|
690
|
+
*
|
|
691
|
+
* @internal
|
|
692
|
+
*/
|
|
693
|
+
function constructAzureDevOpsBaseUrl(hostname, pathPart) {
|
|
694
|
+
const pathParts = pathPart.split('/');
|
|
695
|
+
// dev.azure.com format: org/project/_git/repo
|
|
696
|
+
if (hostname === 'dev.azure.com' || hostname.endsWith('.azure.com')) {
|
|
697
|
+
// Need at least: org/project/_git/repo (4 parts)
|
|
698
|
+
// Or for SSH v3: v3/org/project/repo (4 parts)
|
|
699
|
+
if (pathParts.length >= 4) {
|
|
700
|
+
// Check for v3 SSH format
|
|
701
|
+
if (pathParts[0] === 'v3') {
|
|
702
|
+
// v3/org/project/repo → https://dev.azure.com/org/project/_git/repo
|
|
703
|
+
const org = pathParts[1];
|
|
704
|
+
const project = pathParts[2];
|
|
705
|
+
const repo = pathParts[3];
|
|
706
|
+
if (org && project && repo) {
|
|
707
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
// Standard format: org/project/_git/repo
|
|
711
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
712
|
+
if (gitIndex >= 2 && pathParts[gitIndex + 1]) {
|
|
713
|
+
const org = pathParts.slice(0, gitIndex - 1).join('/');
|
|
714
|
+
const project = pathParts[gitIndex - 1];
|
|
715
|
+
const repo = pathParts[gitIndex + 1];
|
|
716
|
+
if (org && project && repo) {
|
|
717
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
return null;
|
|
722
|
+
}
|
|
723
|
+
// visualstudio.com format: {org}.visualstudio.com/project/_git/repo
|
|
724
|
+
if (hostname.endsWith('.visualstudio.com')) {
|
|
725
|
+
const org = hostname.replace('.visualstudio.com', '');
|
|
726
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
727
|
+
if (gitIndex >= 1 && pathParts[gitIndex + 1]) {
|
|
728
|
+
const project = pathParts.slice(0, gitIndex).join('/');
|
|
729
|
+
const repo = pathParts[gitIndex + 1];
|
|
730
|
+
if (project && repo) {
|
|
731
|
+
// Normalize to dev.azure.com format
|
|
732
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
return null;
|
|
736
|
+
}
|
|
737
|
+
return null;
|
|
738
|
+
}
|
|
739
|
+
/**
|
|
740
|
+
* Creates a RepositoryConfig from a git URL.
|
|
741
|
+
*
|
|
742
|
+
* This is a convenience function that combines `parseRepositoryUrl` with
|
|
743
|
+
* `createRepositoryConfig` to produce a ready-to-use configuration.
|
|
744
|
+
*
|
|
745
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
746
|
+
* @returns RepositoryConfig or null if URL cannot be parsed
|
|
747
|
+
*
|
|
748
|
+
* @example
|
|
749
|
+
* ```typescript
|
|
750
|
+
* const config = createRepositoryConfigFromUrl('https://github.com/owner/repo')
|
|
751
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
752
|
+
*
|
|
753
|
+
* const config = createRepositoryConfigFromUrl('git@gitlab.com:group/project.git')
|
|
754
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' }
|
|
755
|
+
* ```
|
|
756
|
+
*/
|
|
757
|
+
function createRepositoryConfigFromUrl(gitUrl) {
|
|
758
|
+
const parsed = parseRepositoryUrl(gitUrl);
|
|
759
|
+
if (!parsed) {
|
|
760
|
+
return null;
|
|
761
|
+
}
|
|
762
|
+
// Don't create configs for unknown platforms as they can't generate URLs
|
|
763
|
+
if (parsed.platform === 'unknown') {
|
|
764
|
+
return null;
|
|
765
|
+
}
|
|
766
|
+
return createRepositoryConfig({
|
|
767
|
+
platform: parsed.platform,
|
|
768
|
+
baseUrl: parsed.baseUrl,
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
/**
|
|
773
|
+
* Shorthand platform prefixes supported in package.json repository field.
|
|
774
|
+
*
|
|
775
|
+
* Format: `"platform:owner/repo"` or `"owner/repo"` (defaults to GitHub)
|
|
776
|
+
*
|
|
777
|
+
* @see https://docs.npmjs.com/cli/v9/configuring-npm/package-json#repository
|
|
778
|
+
*/
|
|
779
|
+
const SHORTHAND_PLATFORMS = createMap([
|
|
780
|
+
['github', 'https://github.com'],
|
|
781
|
+
['gitlab', 'https://gitlab.com'],
|
|
782
|
+
['bitbucket', 'https://bitbucket.org'],
|
|
783
|
+
['gist', 'https://gist.github.com'],
|
|
784
|
+
]);
|
|
785
|
+
/**
|
|
786
|
+
* Infers repository configuration from package.json content.
|
|
787
|
+
*
|
|
788
|
+
* Handles multiple formats:
|
|
789
|
+
* - Shorthand: `"github:owner/repo"`, `"gitlab:group/project"`, `"bitbucket:team/repo"`
|
|
790
|
+
* - Bare shorthand: `"owner/repo"` (defaults to GitHub)
|
|
791
|
+
* - URL string: `"https://github.com/owner/repo"`
|
|
792
|
+
* - Object with URL: `{ "type": "git", "url": "https://..." }`
|
|
793
|
+
*
|
|
794
|
+
* @param packageJsonContent - Raw JSON string content of package.json
|
|
795
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
796
|
+
*
|
|
797
|
+
* @example
|
|
798
|
+
* ```typescript
|
|
799
|
+
* // Shorthand format
|
|
800
|
+
* inferRepositoryFromPackageJson('{"repository": "github:owner/repo"}')
|
|
801
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
802
|
+
*
|
|
803
|
+
* // URL string
|
|
804
|
+
* inferRepositoryFromPackageJson('{"repository": "https://github.com/owner/repo"}')
|
|
805
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
806
|
+
*
|
|
807
|
+
* // Object format
|
|
808
|
+
* inferRepositoryFromPackageJson('{"repository": {"type": "git", "url": "https://github.com/owner/repo"}}')
|
|
809
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
810
|
+
*
|
|
811
|
+
* // Bare shorthand (defaults to GitHub)
|
|
812
|
+
* inferRepositoryFromPackageJson('{"repository": "owner/repo"}')
|
|
813
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
814
|
+
* ```
|
|
815
|
+
*/
|
|
816
|
+
function inferRepositoryFromPackageJson(packageJsonContent) {
|
|
817
|
+
if (!packageJsonContent || typeof packageJsonContent !== 'string') {
|
|
818
|
+
return null;
|
|
819
|
+
}
|
|
820
|
+
let packageJson;
|
|
821
|
+
try {
|
|
822
|
+
packageJson = parse(packageJsonContent);
|
|
823
|
+
}
|
|
824
|
+
catch {
|
|
825
|
+
return null;
|
|
826
|
+
}
|
|
827
|
+
return inferRepositoryFromPackageJsonObject(packageJson);
|
|
828
|
+
}
|
|
829
|
+
/**
|
|
830
|
+
* Infers repository configuration from a parsed package.json object.
|
|
831
|
+
*
|
|
832
|
+
* This is useful when you already have the parsed object.
|
|
833
|
+
*
|
|
834
|
+
* @param packageJson - Parsed package.json object
|
|
835
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
836
|
+
*
|
|
837
|
+
* @example
|
|
838
|
+
* ```typescript
|
|
839
|
+
* const pkg = { repository: 'github:owner/repo' }
|
|
840
|
+
* inferRepositoryFromPackageJsonObject(pkg)
|
|
841
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
842
|
+
* ```
|
|
843
|
+
*/
|
|
844
|
+
function inferRepositoryFromPackageJsonObject(packageJson) {
|
|
845
|
+
const { repository } = packageJson;
|
|
846
|
+
if (!repository) {
|
|
847
|
+
return null;
|
|
848
|
+
}
|
|
849
|
+
// Handle string format
|
|
850
|
+
if (typeof repository === 'string') {
|
|
851
|
+
return parseRepositoryString(repository);
|
|
852
|
+
}
|
|
853
|
+
// Handle object format
|
|
854
|
+
if (typeof repository === 'object' && repository.url) {
|
|
855
|
+
return createRepositoryConfigFromUrl(repository.url);
|
|
856
|
+
}
|
|
857
|
+
return null;
|
|
858
|
+
}
|
|
859
|
+
/**
|
|
860
|
+
* Parses a repository string (shorthand or URL).
|
|
861
|
+
*
|
|
862
|
+
* @param repoString - Repository string from package.json
|
|
863
|
+
* @returns RepositoryConfig or null
|
|
864
|
+
*
|
|
865
|
+
* @internal
|
|
866
|
+
*/
|
|
867
|
+
function parseRepositoryString(repoString) {
|
|
868
|
+
const trimmed = repoString.trim();
|
|
869
|
+
if (!trimmed) {
|
|
870
|
+
return null;
|
|
871
|
+
}
|
|
872
|
+
// Check for shorthand format: platform:owner/repo
|
|
873
|
+
const colonIndex = trimmed.indexOf(':');
|
|
874
|
+
if (colonIndex > 0) {
|
|
875
|
+
const potentialPlatform = trimmed.slice(0, colonIndex);
|
|
876
|
+
// Platform must be only letters (a-z, case insensitive)
|
|
877
|
+
if (isOnlyLetters(potentialPlatform)) {
|
|
878
|
+
const platform = potentialPlatform.toLowerCase();
|
|
879
|
+
const path = trimmed.slice(colonIndex + 1);
|
|
880
|
+
if (path) {
|
|
881
|
+
const baseUrl = SHORTHAND_PLATFORMS.get(platform);
|
|
882
|
+
if (baseUrl) {
|
|
883
|
+
// Construct full URL and parse it
|
|
884
|
+
const fullUrl = `${baseUrl}/${path}`;
|
|
885
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
886
|
+
}
|
|
887
|
+
// Unknown shorthand platform - try as URL
|
|
888
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
889
|
+
}
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
// Check for bare shorthand: owner/repo (no protocol, no platform prefix)
|
|
893
|
+
// Must match pattern like "owner/repo" but not "https://..." or "git@..."
|
|
894
|
+
if (!trimmed.includes('://') && !trimmed.startsWith('git@')) {
|
|
895
|
+
if (isBareShorthand(trimmed)) {
|
|
896
|
+
// Bare shorthand defaults to GitHub
|
|
897
|
+
const fullUrl = `https://github.com/${trimmed}`;
|
|
898
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
// Try as a full URL
|
|
902
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
903
|
+
}
|
|
904
|
+
/**
|
|
905
|
+
* Checks if a string contains only ASCII letters (a-z, A-Z).
|
|
906
|
+
*
|
|
907
|
+
* @param str - String to check
|
|
908
|
+
* @returns True if string contains only letters
|
|
909
|
+
*
|
|
910
|
+
* @internal
|
|
911
|
+
*/
|
|
912
|
+
function isOnlyLetters(str) {
|
|
913
|
+
for (let i = 0; i < str.length; i++) {
|
|
914
|
+
const char = str.charCodeAt(i);
|
|
915
|
+
const isLowercase = char >= 97 && char <= 122; // a-z
|
|
916
|
+
const isUppercase = char >= 65 && char <= 90; // A-Z
|
|
917
|
+
if (!isLowercase && !isUppercase) {
|
|
918
|
+
return false;
|
|
919
|
+
}
|
|
920
|
+
}
|
|
921
|
+
return str.length > 0;
|
|
922
|
+
}
|
|
923
|
+
/**
|
|
924
|
+
* Checks if a string is a bare shorthand format (owner/repo).
|
|
925
|
+
* Must have exactly one forward slash with content on both sides.
|
|
926
|
+
*
|
|
927
|
+
* @param str - String to check
|
|
928
|
+
* @returns True if string matches owner/repo format
|
|
929
|
+
*
|
|
930
|
+
* @internal
|
|
931
|
+
*/
|
|
932
|
+
function isBareShorthand(str) {
|
|
933
|
+
const slashIndex = str.indexOf('/');
|
|
934
|
+
if (slashIndex <= 0 || slashIndex === str.length - 1) {
|
|
935
|
+
return false;
|
|
936
|
+
}
|
|
937
|
+
// Must not have another slash
|
|
938
|
+
return str.indexOf('/', slashIndex + 1) === -1;
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
const RESOLVE_REPOSITORY_STEP_ID = 'resolve-repository';
|
|
942
|
+
/**
|
|
943
|
+
* Creates the resolve-repository step.
|
|
944
|
+
*
|
|
945
|
+
* This step resolves repository configuration for compare URL generation.
|
|
946
|
+
* It supports multiple resolution modes:
|
|
947
|
+
*
|
|
948
|
+
* - `undefined` or `'disabled'`: No-op, backward compatible default
|
|
949
|
+
* - `'inferred'`: Auto-detect from package.json or git remote
|
|
950
|
+
* - `RepositoryConfig`: Direct repository configuration provided
|
|
951
|
+
* - `RepositoryResolution`: Fine-grained control with mode and options
|
|
952
|
+
*
|
|
953
|
+
* State updates:
|
|
954
|
+
* - repositoryConfig: Resolved repository configuration (if successful)
|
|
955
|
+
*
|
|
956
|
+
* @returns A FlowStep that resolves repository configuration
|
|
957
|
+
*
|
|
958
|
+
* @example
|
|
959
|
+
* ```typescript
|
|
960
|
+
* // Auto-detect repository
|
|
961
|
+
* const flow = createFlow({
|
|
962
|
+
* repository: 'inferred'
|
|
963
|
+
* })
|
|
964
|
+
*
|
|
965
|
+
* // Explicit repository
|
|
966
|
+
* const flow = createFlow({
|
|
967
|
+
* repository: {
|
|
968
|
+
* platform: 'github',
|
|
969
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
970
|
+
* }
|
|
971
|
+
* })
|
|
972
|
+
* ```
|
|
973
|
+
*/
|
|
974
|
+
function createResolveRepositoryStep() {
|
|
975
|
+
return createStep(RESOLVE_REPOSITORY_STEP_ID, 'Resolve Repository', async (ctx) => {
|
|
976
|
+
const { config, logger, tree, git, projectRoot } = ctx;
|
|
977
|
+
const repoConfig = config.repository;
|
|
978
|
+
// Disabled or undefined - no-op for backward compatibility
|
|
979
|
+
if (repoConfig === undefined || repoConfig === 'disabled') {
|
|
980
|
+
logger.debug('Repository resolution disabled');
|
|
981
|
+
return {
|
|
982
|
+
status: 'skipped',
|
|
983
|
+
message: 'Repository resolution disabled',
|
|
984
|
+
};
|
|
985
|
+
}
|
|
986
|
+
// Direct RepositoryConfig provided
|
|
987
|
+
if (isRepositoryConfig(repoConfig)) {
|
|
988
|
+
logger.debug(`Using explicit repository config: ${repoConfig.platform}`);
|
|
989
|
+
return {
|
|
990
|
+
status: 'success',
|
|
991
|
+
stateUpdates: {
|
|
992
|
+
repositoryConfig: repoConfig,
|
|
993
|
+
},
|
|
994
|
+
message: `Using explicit ${repoConfig.platform} repository`,
|
|
995
|
+
};
|
|
996
|
+
}
|
|
997
|
+
// Shorthand 'inferred' mode
|
|
998
|
+
if (repoConfig === 'inferred') {
|
|
999
|
+
const resolved = await inferRepository(tree, git, projectRoot, DEFAULT_INFERENCE_ORDER, logger);
|
|
1000
|
+
if (resolved) {
|
|
1001
|
+
return {
|
|
1002
|
+
status: 'success',
|
|
1003
|
+
stateUpdates: {
|
|
1004
|
+
repositoryConfig: resolved,
|
|
1005
|
+
},
|
|
1006
|
+
message: `Inferred ${resolved.platform} repository from ${resolved.baseUrl}`,
|
|
1007
|
+
};
|
|
1008
|
+
}
|
|
1009
|
+
// Graceful degradation - no error, just no URLs
|
|
1010
|
+
logger.debug('Could not infer repository from package.json or git remote');
|
|
1011
|
+
return {
|
|
1012
|
+
status: 'skipped',
|
|
1013
|
+
message: 'Could not infer repository configuration',
|
|
1014
|
+
};
|
|
1015
|
+
}
|
|
1016
|
+
// Full RepositoryResolution object
|
|
1017
|
+
if (isRepositoryResolution(repoConfig)) {
|
|
1018
|
+
return handleRepositoryResolution(repoConfig, tree, git, projectRoot, logger);
|
|
1019
|
+
}
|
|
1020
|
+
// Unknown configuration - should not happen with TypeScript
|
|
1021
|
+
logger.warn('Unknown repository configuration format');
|
|
1022
|
+
return {
|
|
1023
|
+
status: 'skipped',
|
|
1024
|
+
message: 'Unknown repository configuration format',
|
|
1025
|
+
};
|
|
1026
|
+
}, {
|
|
1027
|
+
description: 'Resolves repository configuration for compare URL generation',
|
|
1028
|
+
});
|
|
1029
|
+
}
|
|
1030
|
+
/**
|
|
1031
|
+
* Handles a full RepositoryResolution configuration.
|
|
1032
|
+
*
|
|
1033
|
+
* @param resolution - Repository resolution configuration
|
|
1034
|
+
* @param tree - Virtual file system tree
|
|
1035
|
+
* @param git - Git client instance
|
|
1036
|
+
* @param projectRoot - Path to the project root
|
|
1037
|
+
* @param logger - Logger instance
|
|
1038
|
+
* @returns Flow step result with repository config or skip/error status
|
|
1039
|
+
* @internal
|
|
1040
|
+
*/
|
|
1041
|
+
async function handleRepositoryResolution(resolution, tree, git, projectRoot, logger) {
|
|
1042
|
+
const { mode, repository, inferenceOrder } = resolution;
|
|
1043
|
+
// Disabled mode
|
|
1044
|
+
if (mode === 'disabled') {
|
|
1045
|
+
logger.debug('Repository resolution explicitly disabled');
|
|
1046
|
+
return {
|
|
1047
|
+
status: 'skipped',
|
|
1048
|
+
message: 'Repository resolution disabled',
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
// Explicit mode - must have repository
|
|
1052
|
+
if (mode === 'explicit') {
|
|
1053
|
+
if (!repository) {
|
|
1054
|
+
return {
|
|
1055
|
+
status: 'failed',
|
|
1056
|
+
message: 'Repository config required when mode is "explicit"',
|
|
1057
|
+
error: createError('Repository config required when mode is "explicit"'),
|
|
1058
|
+
};
|
|
1059
|
+
}
|
|
1060
|
+
logger.debug(`Using explicit repository config: ${repository.platform}`);
|
|
1061
|
+
return {
|
|
1062
|
+
status: 'success',
|
|
1063
|
+
stateUpdates: {
|
|
1064
|
+
repositoryConfig: repository,
|
|
1065
|
+
},
|
|
1066
|
+
message: `Using explicit ${repository.platform} repository`,
|
|
1067
|
+
};
|
|
1068
|
+
}
|
|
1069
|
+
// Inferred mode
|
|
1070
|
+
const order = inferenceOrder ?? DEFAULT_INFERENCE_ORDER;
|
|
1071
|
+
const resolved = await inferRepository(tree, git, projectRoot, order, logger);
|
|
1072
|
+
if (resolved) {
|
|
1073
|
+
return {
|
|
1074
|
+
status: 'success',
|
|
1075
|
+
stateUpdates: {
|
|
1076
|
+
repositoryConfig: resolved,
|
|
1077
|
+
},
|
|
1078
|
+
message: `Inferred ${resolved.platform} repository`,
|
|
1079
|
+
};
|
|
1080
|
+
}
|
|
1081
|
+
// Graceful degradation
|
|
1082
|
+
logger.debug('Could not infer repository configuration');
|
|
1083
|
+
return {
|
|
1084
|
+
status: 'skipped',
|
|
1085
|
+
message: 'Could not infer repository configuration',
|
|
1086
|
+
};
|
|
1087
|
+
}
|
|
1088
|
+
/**
|
|
1089
|
+
* Infers repository configuration from available sources.
|
|
1090
|
+
*
|
|
1091
|
+
* @param tree - Virtual file system tree
|
|
1092
|
+
* @param git - Git client instance
|
|
1093
|
+
* @param projectRoot - Path to the project root
|
|
1094
|
+
* @param order - Inference source order
|
|
1095
|
+
* @param logger - Logger instance
|
|
1096
|
+
* @returns Repository config or null if none found
|
|
1097
|
+
* @internal
|
|
1098
|
+
*/
|
|
1099
|
+
async function inferRepository(tree, git, projectRoot, order, logger) {
|
|
1100
|
+
for (const source of order) {
|
|
1101
|
+
const config = await inferFromSource(tree, git, projectRoot, source, logger);
|
|
1102
|
+
if (config) {
|
|
1103
|
+
logger.debug(`Inferred repository from ${source}: ${config.platform}`);
|
|
1104
|
+
return config;
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
return null;
|
|
1108
|
+
}
|
|
1109
|
+
/**
|
|
1110
|
+
* Infers repository from a single source.
|
|
1111
|
+
*
|
|
1112
|
+
* @param tree - Virtual file system tree
|
|
1113
|
+
* @param git - Git client instance
|
|
1114
|
+
* @param projectRoot - Path to the project root
|
|
1115
|
+
* @param source - Inference source type
|
|
1116
|
+
* @param logger - Logger instance
|
|
1117
|
+
* @returns Repository config or null if not found
|
|
1118
|
+
* @internal
|
|
1119
|
+
*/
|
|
1120
|
+
async function inferFromSource(tree, git, projectRoot, source, logger) {
|
|
1121
|
+
if (source === 'package-json') {
|
|
1122
|
+
return inferFromPackageJson(tree, projectRoot, logger);
|
|
1123
|
+
}
|
|
1124
|
+
if (source === 'git-remote') {
|
|
1125
|
+
return inferFromGitRemote(git, logger);
|
|
1126
|
+
}
|
|
1127
|
+
logger.warn(`Unknown inference source: ${source}`);
|
|
1128
|
+
return null;
|
|
1129
|
+
}
|
|
1130
|
+
/**
|
|
1131
|
+
* Infers repository from package.json repository field.
|
|
1132
|
+
*
|
|
1133
|
+
* @param tree - Virtual file system tree
|
|
1134
|
+
* @param projectRoot - Path to the project root
|
|
1135
|
+
* @param logger - Logger instance
|
|
1136
|
+
* @returns Repository config or null if not found
|
|
1137
|
+
* @internal
|
|
1138
|
+
*/
|
|
1139
|
+
function inferFromPackageJson(tree, projectRoot, logger) {
|
|
1140
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
1141
|
+
if (!tree.exists(packageJsonPath)) {
|
|
1142
|
+
logger.debug(`package.json not found at ${packageJsonPath}`);
|
|
1143
|
+
return null;
|
|
1144
|
+
}
|
|
1145
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
1146
|
+
if (!content) {
|
|
1147
|
+
logger.debug('Could not read package.json');
|
|
1148
|
+
return null;
|
|
1149
|
+
}
|
|
1150
|
+
const config = inferRepositoryFromPackageJson(content);
|
|
1151
|
+
if (config) {
|
|
1152
|
+
logger.debug(`Found repository in package.json: ${config.baseUrl}`);
|
|
1153
|
+
}
|
|
1154
|
+
return config;
|
|
1155
|
+
}
|
|
1156
|
+
/**
|
|
1157
|
+
* Infers repository from git remote URL.
|
|
1158
|
+
*
|
|
1159
|
+
* @param git - Git client instance
|
|
1160
|
+
* @param logger - Logger instance
|
|
1161
|
+
* @returns Repository config or null if not found
|
|
1162
|
+
* @internal
|
|
1163
|
+
*/
|
|
1164
|
+
async function inferFromGitRemote(git, logger) {
|
|
1165
|
+
const remoteUrl = await git.getRemoteUrl('origin');
|
|
1166
|
+
if (!remoteUrl) {
|
|
1167
|
+
logger.debug('Could not get git remote URL');
|
|
1168
|
+
return null;
|
|
1169
|
+
}
|
|
1170
|
+
const config = createRepositoryConfigFromUrl(remoteUrl);
|
|
1171
|
+
if (config) {
|
|
1172
|
+
logger.debug(`Inferred repository from git remote: ${config.baseUrl}`);
|
|
1173
|
+
}
|
|
1174
|
+
return config;
|
|
1175
|
+
}
|
|
1176
|
+
|
|
1177
|
+
/**
|
|
1178
|
+
* Safe copies of Set built-in via factory function.
|
|
1179
|
+
*
|
|
1180
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1181
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1182
|
+
*
|
|
1183
|
+
* These references are captured at module initialization time to protect against
|
|
1184
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1185
|
+
*
|
|
1186
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/set
|
|
1187
|
+
*/
|
|
1188
|
+
// Capture references at module initialization time
|
|
1189
|
+
const _Set = globalThis.Set;
|
|
1190
|
+
const _Reflect$1 = globalThis.Reflect;
|
|
1191
|
+
/**
|
|
1192
|
+
* (Safe copy) Creates a new Set using the captured Set constructor.
|
|
1193
|
+
* Use this instead of `new Set()`.
|
|
1194
|
+
*
|
|
1195
|
+
* @param iterable - Optional iterable of values.
|
|
1196
|
+
* @returns A new Set instance.
|
|
1197
|
+
*/
|
|
1198
|
+
const createSet = (iterable) => _Reflect$1.construct(_Set, iterable ? [iterable] : []);
|
|
1199
|
+
|
|
1200
|
+
/**
|
|
1201
|
+
* Safe copies of Object built-in methods.
|
|
1202
|
+
*
|
|
1203
|
+
* These references are captured at module initialization time to protect against
|
|
1204
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1205
|
+
*
|
|
1206
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1207
|
+
*/
|
|
1208
|
+
// Capture references at module initialization time
|
|
1209
|
+
const _Object = globalThis.Object;
|
|
1210
|
+
/**
|
|
1211
|
+
* (Safe copy) Prevents modification of existing property attributes and values,
|
|
1212
|
+
* and prevents the addition of new properties.
|
|
1213
|
+
*/
|
|
1214
|
+
const freeze = _Object.freeze;
|
|
1215
|
+
/**
|
|
1216
|
+
* (Safe copy) Returns the names of the enumerable string properties and methods of an object.
|
|
1217
|
+
*/
|
|
1218
|
+
const keys = _Object.keys;
|
|
1219
|
+
/**
|
|
1220
|
+
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1221
|
+
*/
|
|
1222
|
+
const entries = _Object.entries;
|
|
1223
|
+
/**
|
|
1224
|
+
* (Safe copy) Returns an array of values of the enumerable own properties of an object.
|
|
1225
|
+
*/
|
|
1226
|
+
const values = _Object.values;
|
|
1227
|
+
/**
|
|
1228
|
+
* (Safe copy) Adds one or more properties to an object, and/or modifies attributes of existing properties.
|
|
1229
|
+
*/
|
|
1230
|
+
const defineProperties = _Object.defineProperties;
|
|
1231
|
+
|
|
1232
|
+
/**
|
|
1233
|
+
* Safe copies of Array built-in static methods.
|
|
1234
|
+
*
|
|
1235
|
+
* These references are captured at module initialization time to protect against
|
|
1236
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1237
|
+
*
|
|
1238
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/array
|
|
1239
|
+
*/
|
|
1240
|
+
// Capture references at module initialization time
|
|
1241
|
+
const _Array = globalThis.Array;
|
|
1242
|
+
/**
|
|
1243
|
+
* (Safe copy) Determines whether the passed value is an Array.
|
|
1244
|
+
*/
|
|
1245
|
+
const isArray = _Array.isArray;
|
|
1246
|
+
|
|
1247
|
+
/**
|
|
1248
|
+
* Safe copies of Console built-in methods.
|
|
1249
|
+
*
|
|
1250
|
+
* These references are captured at module initialization time to protect against
|
|
1251
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1252
|
+
*
|
|
1253
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/console
|
|
1254
|
+
*/
|
|
1255
|
+
// Capture references at module initialization time
|
|
1256
|
+
const _console = globalThis.console;
|
|
1257
|
+
/**
|
|
1258
|
+
* (Safe copy) Outputs a message to the console.
|
|
1259
|
+
*/
|
|
1260
|
+
const log = _console.log.bind(_console);
|
|
1261
|
+
/**
|
|
1262
|
+
* (Safe copy) Outputs a warning message to the console.
|
|
1263
|
+
*/
|
|
1264
|
+
const warn = _console.warn.bind(_console);
|
|
1265
|
+
/**
|
|
1266
|
+
* (Safe copy) Outputs an error message to the console.
|
|
1267
|
+
*/
|
|
1268
|
+
const error = _console.error.bind(_console);
|
|
1269
|
+
/**
|
|
1270
|
+
* (Safe copy) Outputs an informational message to the console.
|
|
1271
|
+
*/
|
|
1272
|
+
const info = _console.info.bind(_console);
|
|
1273
|
+
/**
|
|
1274
|
+
* (Safe copy) Outputs a debug message to the console.
|
|
1275
|
+
*/
|
|
1276
|
+
const debug = _console.debug.bind(_console);
|
|
1277
|
+
/**
|
|
1278
|
+
* (Safe copy) Outputs a stack trace to the console.
|
|
1279
|
+
*/
|
|
1280
|
+
_console.trace.bind(_console);
|
|
1281
|
+
/**
|
|
1282
|
+
* (Safe copy) Displays an interactive listing of the properties of a specified object.
|
|
1283
|
+
*/
|
|
1284
|
+
_console.dir.bind(_console);
|
|
1285
|
+
/**
|
|
1286
|
+
* (Safe copy) Displays tabular data as a table.
|
|
1287
|
+
*/
|
|
1288
|
+
_console.table.bind(_console);
|
|
1289
|
+
/**
|
|
1290
|
+
* (Safe copy) Writes an error message to the console if the assertion is false.
|
|
1291
|
+
*/
|
|
1292
|
+
_console.assert.bind(_console);
|
|
1293
|
+
/**
|
|
1294
|
+
* (Safe copy) Clears the console.
|
|
1295
|
+
*/
|
|
1296
|
+
_console.clear.bind(_console);
|
|
1297
|
+
/**
|
|
1298
|
+
* (Safe copy) Logs the number of times that this particular call to count() has been called.
|
|
1299
|
+
*/
|
|
1300
|
+
_console.count.bind(_console);
|
|
1301
|
+
/**
|
|
1302
|
+
* (Safe copy) Resets the counter used with console.count().
|
|
1303
|
+
*/
|
|
1304
|
+
_console.countReset.bind(_console);
|
|
1305
|
+
/**
|
|
1306
|
+
* (Safe copy) Creates a new inline group in the console.
|
|
1307
|
+
*/
|
|
1308
|
+
_console.group.bind(_console);
|
|
1309
|
+
/**
|
|
1310
|
+
* (Safe copy) Creates a new inline group in the console that is initially collapsed.
|
|
1311
|
+
*/
|
|
1312
|
+
_console.groupCollapsed.bind(_console);
|
|
1313
|
+
/**
|
|
1314
|
+
* (Safe copy) Exits the current inline group.
|
|
1315
|
+
*/
|
|
1316
|
+
_console.groupEnd.bind(_console);
|
|
1317
|
+
/**
|
|
1318
|
+
* (Safe copy) Starts a timer with a name specified as an input parameter.
|
|
1319
|
+
*/
|
|
1320
|
+
_console.time.bind(_console);
|
|
1321
|
+
/**
|
|
1322
|
+
* (Safe copy) Stops a timer that was previously started.
|
|
1323
|
+
*/
|
|
1324
|
+
_console.timeEnd.bind(_console);
|
|
1325
|
+
/**
|
|
1326
|
+
* (Safe copy) Logs the current value of a timer that was previously started.
|
|
1327
|
+
*/
|
|
1328
|
+
_console.timeLog.bind(_console);
|
|
1329
|
+
|
|
1330
|
+
const registeredClasses = [];
|
|
1331
|
+
|
|
1332
|
+
/**
|
|
1333
|
+
* Returns the data type of the target.
|
|
1334
|
+
* Uses native `typeof` operator, however, makes distinction between `null`, `array`, and `object`.
|
|
1335
|
+
* Also, when classes are registered via `registerClass`, it checks if objects are instance of any known registered class.
|
|
1336
|
+
*
|
|
1337
|
+
* @param target - The target to get the data type of.
|
|
1338
|
+
* @returns The data type of the target.
|
|
1339
|
+
*/
|
|
1340
|
+
const getType = (target) => {
|
|
1341
|
+
if (target === null)
|
|
1342
|
+
return 'null';
|
|
1343
|
+
const nativeDataType = typeof target;
|
|
1344
|
+
if (nativeDataType === 'object') {
|
|
1345
|
+
if (isArray(target))
|
|
1346
|
+
return 'array';
|
|
1347
|
+
for (const registeredClass of registeredClasses) {
|
|
1348
|
+
if (target instanceof registeredClass)
|
|
1349
|
+
return registeredClass.name;
|
|
1350
|
+
}
|
|
1351
|
+
}
|
|
1352
|
+
return nativeDataType;
|
|
1353
|
+
};
|
|
1354
|
+
|
|
1355
|
+
/**
|
|
1356
|
+
* Safe copies of Date built-in via factory function and static methods.
|
|
1357
|
+
*
|
|
1358
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1359
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1360
|
+
*
|
|
1361
|
+
* These references are captured at module initialization time to protect against
|
|
1362
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1363
|
+
*
|
|
1364
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1365
|
+
*/
|
|
1366
|
+
// Capture references at module initialization time
|
|
1367
|
+
const _Date = globalThis.Date;
|
|
1368
|
+
const _Reflect = globalThis.Reflect;
|
|
1369
|
+
function createDate(...args) {
|
|
1370
|
+
return _Reflect.construct(_Date, args);
|
|
1371
|
+
}
|
|
1372
|
+
|
|
1373
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1374
|
+
/**
|
|
1375
|
+
* Creates a wrapper function that only executes the wrapped function if the condition function returns true.
|
|
1376
|
+
*
|
|
1377
|
+
* @param func - The function to be conditionally executed.
|
|
1378
|
+
* @param conditionFunc - A function that returns a boolean, determining if `func` should be executed.
|
|
1379
|
+
* @returns A wrapped version of `func` that executes conditionally.
|
|
1380
|
+
*/
|
|
1381
|
+
function createConditionalExecutionFunction(func, conditionFunc) {
|
|
1382
|
+
return function (...args) {
|
|
1383
|
+
if (conditionFunc()) {
|
|
1384
|
+
return func(...args);
|
|
1385
|
+
}
|
|
1386
|
+
};
|
|
1387
|
+
}
|
|
1388
|
+
|
|
1389
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1390
|
+
/**
|
|
1391
|
+
* Creates a wrapper function that silently ignores any errors thrown by the wrapped void function.
|
|
1392
|
+
* This function is specifically for wrapping functions that do not return a value (void functions).
|
|
1393
|
+
* Exceptions are swallowed without any logging or handling.
|
|
1394
|
+
*
|
|
1395
|
+
* @param func - The void function to be wrapped.
|
|
1396
|
+
* @returns A wrapped version of the input function that ignores errors.
|
|
1397
|
+
*/
|
|
1398
|
+
function createErrorIgnoringFunction(func) {
|
|
1399
|
+
return function (...args) {
|
|
1400
|
+
try {
|
|
1401
|
+
func(...args);
|
|
1402
|
+
}
|
|
1403
|
+
catch {
|
|
1404
|
+
// Deliberately swallowing/ignoring the exception
|
|
1405
|
+
}
|
|
1406
|
+
};
|
|
1407
|
+
}
|
|
1408
|
+
|
|
1409
|
+
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
1410
|
+
/**
|
|
1411
|
+
* A no-operation function (noop) that does nothing regardless of the arguments passed.
|
|
1412
|
+
* It is designed to be as permissive as possible in its typing without using the `Function` keyword.
|
|
1413
|
+
*
|
|
1414
|
+
* @param args - Any arguments passed to the function (ignored)
|
|
1415
|
+
*/
|
|
1416
|
+
const noop = (...args) => {
|
|
1417
|
+
// Intentionally does nothing
|
|
1418
|
+
};
|
|
1419
|
+
|
|
1420
|
+
const logLevels = ['none', 'error', 'warn', 'log', 'info', 'debug'];
|
|
1421
|
+
const priority = {
|
|
1422
|
+
error: 4,
|
|
1423
|
+
warn: 3,
|
|
1424
|
+
log: 2,
|
|
1425
|
+
info: 1,
|
|
1426
|
+
debug: 0,
|
|
1427
|
+
};
|
|
1428
|
+
/**
|
|
1429
|
+
* Validates whether a given string is a valid log level.
|
|
1430
|
+
*
|
|
1431
|
+
* @param level - The log level to validate
|
|
1432
|
+
* @returns True if the level is valid, false otherwise
|
|
1433
|
+
*/
|
|
1434
|
+
function isValidLogLevel(level) {
|
|
1435
|
+
return logLevels.includes(level);
|
|
1436
|
+
}
|
|
1437
|
+
/**
|
|
1438
|
+
* Creates a log level configuration manager for controlling logging behavior.
|
|
1439
|
+
* Provides methods to get, set, and evaluate log levels based on priority.
|
|
1440
|
+
*
|
|
1441
|
+
* @param level - The initial log level (defaults to 'error')
|
|
1442
|
+
* @returns A configuration object with log level management methods
|
|
1443
|
+
* @throws {Error} When the provided level is not a valid log level
|
|
1444
|
+
*/
|
|
1445
|
+
function createLogLevelConfig(level = 'error') {
|
|
1446
|
+
if (!isValidLogLevel(level)) {
|
|
1447
|
+
throw createError('Cannot create log level configuration with a valid default log level');
|
|
1448
|
+
}
|
|
1449
|
+
const state = { level };
|
|
1450
|
+
const getLogLevel = () => state.level;
|
|
1451
|
+
const setLogLevel = (level) => {
|
|
1452
|
+
if (!isValidLogLevel(level)) {
|
|
1453
|
+
throw createError(`Cannot set value '${level}' level. Expected levels are ${logLevels}.`);
|
|
1454
|
+
}
|
|
1455
|
+
state.level = level;
|
|
1456
|
+
};
|
|
1457
|
+
const shouldLog = (level) => {
|
|
1458
|
+
if (state.level === 'none' || level === 'none' || !isValidLogLevel(level)) {
|
|
1459
|
+
return false;
|
|
1460
|
+
}
|
|
1461
|
+
return priority[level] >= priority[state.level];
|
|
1462
|
+
};
|
|
1463
|
+
return freeze({
|
|
1464
|
+
getLogLevel,
|
|
1465
|
+
setLogLevel,
|
|
1466
|
+
shouldLog,
|
|
1467
|
+
});
|
|
1468
|
+
}
|
|
1469
|
+
|
|
1470
|
+
/**
|
|
1471
|
+
* Creates a logger instance with configurable log level filtering.
|
|
1472
|
+
* Each log function is wrapped to respect the current log level setting.
|
|
1473
|
+
*
|
|
1474
|
+
* @param error - Function to handle error-level logs (required)
|
|
1475
|
+
* @param warn - Function to handle warning-level logs (optional, defaults to noop)
|
|
1476
|
+
* @param log - Function to handle standard logs (optional, defaults to noop)
|
|
1477
|
+
* @param info - Function to handle info-level logs (optional, defaults to noop)
|
|
1478
|
+
* @param debug - Function to handle debug-level logs (optional, defaults to noop)
|
|
1479
|
+
* @returns A frozen logger object with log methods and level control
|
|
1480
|
+
* @throws {ErrorLevelFn} When any provided log function is invalid
|
|
1481
|
+
*/
|
|
1482
|
+
function createLogger(error, warn = noop, log = noop, info = noop, debug = noop) {
|
|
1483
|
+
if (notValidLogFn(error)) {
|
|
1484
|
+
throw createError(notFnMsg('error'));
|
|
1485
|
+
}
|
|
1486
|
+
if (notValidLogFn(warn)) {
|
|
1487
|
+
throw createError(notFnMsg('warn'));
|
|
1488
|
+
}
|
|
1489
|
+
if (notValidLogFn(log)) {
|
|
1490
|
+
throw createError(notFnMsg('log'));
|
|
1491
|
+
}
|
|
1492
|
+
if (notValidLogFn(info)) {
|
|
1493
|
+
throw createError(notFnMsg('info'));
|
|
1494
|
+
}
|
|
1495
|
+
if (notValidLogFn(debug)) {
|
|
1496
|
+
throw createError(notFnMsg('debug'));
|
|
1497
|
+
}
|
|
1498
|
+
const { setLogLevel, getLogLevel, shouldLog } = createLogLevelConfig();
|
|
1499
|
+
const wrapLogFn = (fn, level) => {
|
|
1500
|
+
if (fn === noop)
|
|
1501
|
+
return fn;
|
|
1502
|
+
const condition = () => shouldLog(level);
|
|
1503
|
+
return createConditionalExecutionFunction(createErrorIgnoringFunction(fn), condition);
|
|
1504
|
+
};
|
|
1505
|
+
return freeze({
|
|
1506
|
+
error: wrapLogFn(error, 'error'),
|
|
1507
|
+
warn: wrapLogFn(warn, 'warn'),
|
|
1508
|
+
log: wrapLogFn(log, 'log'),
|
|
1509
|
+
info: wrapLogFn(info, 'info'),
|
|
1510
|
+
debug: wrapLogFn(debug, 'debug'),
|
|
1511
|
+
setLogLevel,
|
|
1512
|
+
getLogLevel,
|
|
1513
|
+
});
|
|
1514
|
+
}
|
|
1515
|
+
/**
|
|
1516
|
+
* Validates whether a given value is a valid log function.
|
|
1517
|
+
*
|
|
1518
|
+
* @param fn - The value to validate
|
|
1519
|
+
* @returns True if the value is not a function (invalid), false if it is valid
|
|
1520
|
+
*/
|
|
1521
|
+
function notValidLogFn(fn) {
|
|
1522
|
+
return getType(fn) !== 'function' && fn !== noop;
|
|
1523
|
+
}
|
|
1524
|
+
/**
|
|
1525
|
+
* Generates an error message for invalid log function parameters.
|
|
1526
|
+
*
|
|
1527
|
+
* @param label - The name of the log function that failed validation
|
|
1528
|
+
* @returns A formatted error message string
|
|
1529
|
+
*/
|
|
1530
|
+
function notFnMsg(label) {
|
|
1531
|
+
return `Cannot create a logger when ${label} is not a function`;
|
|
1532
|
+
}
|
|
1533
|
+
|
|
1534
|
+
createLogger(error, warn, log, info, debug);
|
|
1535
|
+
|
|
1536
|
+
/**
|
|
1537
|
+
* Global log level registry.
|
|
1538
|
+
* Tracks all created scoped loggers to allow global log level changes.
|
|
1539
|
+
*/
|
|
1540
|
+
const loggerRegistry = createSet();
|
|
1541
|
+
/** Redacted placeholder for sensitive values */
|
|
1542
|
+
const REDACTED = '[REDACTED]';
|
|
1543
|
+
/**
|
|
1544
|
+
* Patterns that indicate a sensitive key name.
|
|
1545
|
+
* Keys containing these patterns will have their values sanitized.
|
|
1546
|
+
*/
|
|
1547
|
+
const SENSITIVE_KEY_PATTERNS = [
|
|
1548
|
+
/token/i,
|
|
1549
|
+
/key/i,
|
|
1550
|
+
/password/i,
|
|
1551
|
+
/secret/i,
|
|
1552
|
+
/credential/i,
|
|
1553
|
+
/auth/i,
|
|
1554
|
+
/bearer/i,
|
|
1555
|
+
/api[_-]?key/i,
|
|
1556
|
+
/private/i,
|
|
1557
|
+
/passphrase/i,
|
|
1558
|
+
];
|
|
1559
|
+
/**
|
|
1560
|
+
* Checks if a key name indicates sensitive data.
|
|
1561
|
+
*
|
|
1562
|
+
* @param key - Key name to check
|
|
1563
|
+
* @returns True if the key indicates sensitive data
|
|
1564
|
+
*/
|
|
1565
|
+
function isSensitiveKey(key) {
|
|
1566
|
+
return SENSITIVE_KEY_PATTERNS.some((pattern) => pattern.test(key));
|
|
1567
|
+
}
|
|
1568
|
+
/**
|
|
1569
|
+
* Sanitizes an object by replacing sensitive values with REDACTED.
|
|
1570
|
+
* This function recursively processes nested objects and arrays.
|
|
1571
|
+
*
|
|
1572
|
+
* @param obj - Object to sanitize
|
|
1573
|
+
* @returns New object with sensitive values redacted
|
|
1574
|
+
*/
|
|
1575
|
+
function sanitize(obj) {
|
|
1576
|
+
if (obj === null || obj === undefined) {
|
|
1577
|
+
return obj;
|
|
1578
|
+
}
|
|
1579
|
+
if (isArray(obj)) {
|
|
1580
|
+
return obj.map((item) => sanitize(item));
|
|
1581
|
+
}
|
|
1582
|
+
if (typeof obj === 'object') {
|
|
1583
|
+
const result = {};
|
|
1584
|
+
for (const [key, value] of entries(obj)) {
|
|
1585
|
+
if (isSensitiveKey(key)) {
|
|
1586
|
+
result[key] = REDACTED;
|
|
1587
|
+
}
|
|
1588
|
+
else if (typeof value === 'object' && value !== null) {
|
|
1589
|
+
result[key] = sanitize(value);
|
|
1590
|
+
}
|
|
1591
|
+
else {
|
|
1592
|
+
result[key] = value;
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
return result;
|
|
1596
|
+
}
|
|
1597
|
+
return obj;
|
|
1598
|
+
}
|
|
1599
|
+
/**
|
|
1600
|
+
* Formats a log message with optional metadata.
|
|
1601
|
+
*
|
|
1602
|
+
* @param namespace - Logger namespace prefix
|
|
1603
|
+
* @param message - Log message
|
|
1604
|
+
* @param meta - Optional metadata object
|
|
1605
|
+
* @returns Formatted log string
|
|
1606
|
+
*/
|
|
1607
|
+
function formatMessage(namespace, message, meta) {
|
|
1608
|
+
const prefix = `[${namespace}]`;
|
|
1609
|
+
if (meta && keys(meta).length > 0) {
|
|
1610
|
+
const sanitizedMeta = sanitize(meta);
|
|
1611
|
+
return `${prefix} ${message} ${stringify(sanitizedMeta)}`;
|
|
1612
|
+
}
|
|
1613
|
+
return `${prefix} ${message}`;
|
|
1614
|
+
}
|
|
1615
|
+
/**
|
|
1616
|
+
* Creates a scoped logger with namespace prefix and optional secret sanitization.
|
|
1617
|
+
* All log messages will be prefixed with [namespace] and sensitive metadata
|
|
1618
|
+
* values will be automatically redacted.
|
|
1619
|
+
*
|
|
1620
|
+
* @param namespace - Logger namespace (e.g., 'project-scope', 'analyze')
|
|
1621
|
+
* @param options - Logger configuration options
|
|
1622
|
+
* @returns A configured scoped logger instance
|
|
1623
|
+
*
|
|
1624
|
+
* @example
|
|
1625
|
+
* ```typescript
|
|
1626
|
+
* const logger = createScopedLogger('project-scope')
|
|
1627
|
+
* logger.setLogLevel('debug')
|
|
1628
|
+
*
|
|
1629
|
+
* // Basic logging
|
|
1630
|
+
* logger.info('Starting analysis', { path: './project' })
|
|
1631
|
+
*
|
|
1632
|
+
* // Sensitive data is automatically redacted
|
|
1633
|
+
* logger.debug('Config loaded', { apiKey: 'secret123' })
|
|
1634
|
+
* // Output: [project-scope] Config loaded {"apiKey":"[REDACTED]"}
|
|
1635
|
+
* ```
|
|
1636
|
+
*/
|
|
1637
|
+
function createScopedLogger(namespace, options = {}) {
|
|
1638
|
+
const { level = 'error', sanitizeSecrets = true } = options;
|
|
1639
|
+
// Create wrapper functions that add namespace prefix and sanitization
|
|
1640
|
+
const createLogFn = (baseFn) => (message, meta) => {
|
|
1641
|
+
const processedMeta = sanitizeSecrets && meta ? sanitize(meta) : meta;
|
|
1642
|
+
baseFn(formatMessage(namespace, message, processedMeta));
|
|
1643
|
+
};
|
|
1644
|
+
// Create base logger with wrapped functions
|
|
1645
|
+
const baseLogger = createLogger(createLogFn(error), createLogFn(warn), createLogFn(log), createLogFn(info), createLogFn(debug));
|
|
1646
|
+
// Set initial log level (use global override if set)
|
|
1647
|
+
baseLogger.setLogLevel(level);
|
|
1648
|
+
const scopedLogger = freeze({
|
|
1649
|
+
error: (message, meta) => baseLogger.error(message, meta),
|
|
1650
|
+
warn: (message, meta) => baseLogger.warn(message, meta),
|
|
1651
|
+
log: (message, meta) => baseLogger.log(message, meta),
|
|
1652
|
+
info: (message, meta) => baseLogger.info(message, meta),
|
|
1653
|
+
debug: (message, meta) => baseLogger.debug(message, meta),
|
|
1654
|
+
setLogLevel: baseLogger.setLogLevel,
|
|
1655
|
+
getLogLevel: baseLogger.getLogLevel,
|
|
1656
|
+
});
|
|
1657
|
+
// Register logger for global level management
|
|
1658
|
+
loggerRegistry.add(scopedLogger);
|
|
1659
|
+
return scopedLogger;
|
|
1660
|
+
}
|
|
1661
|
+
/**
|
|
1662
|
+
* Default logger instance for the project-scope library.
|
|
1663
|
+
* Use this for general logging within the library.
|
|
1664
|
+
*
|
|
1665
|
+
* @example
|
|
1666
|
+
* ```typescript
|
|
1667
|
+
* import { logger } from '@hyperfrontend/project-scope/core'
|
|
1668
|
+
*
|
|
1669
|
+
* logger.setLogLevel('debug')
|
|
1670
|
+
* logger.debug('Analyzing project', { path: './src' })
|
|
1671
|
+
* ```
|
|
1672
|
+
*/
|
|
1673
|
+
createScopedLogger('project-scope');
|
|
1674
|
+
|
|
1675
|
+
createScopedLogger('project-scope:fs');
|
|
1676
|
+
/**
|
|
1677
|
+
* Create a file system error with code and context.
|
|
1678
|
+
*
|
|
1679
|
+
* @param message - The error message describing what went wrong
|
|
1680
|
+
* @param code - The category code for this type of filesystem failure
|
|
1681
|
+
* @param context - Additional context including path, operation, and cause
|
|
1682
|
+
* @returns A configured Error object with code and context properties
|
|
1683
|
+
*/
|
|
1684
|
+
function createFileSystemError(message, code, context) {
|
|
1685
|
+
const error = createError(message);
|
|
1686
|
+
defineProperties(error, {
|
|
1687
|
+
code: { value: code, enumerable: true },
|
|
1688
|
+
context: { value: context, enumerable: true },
|
|
1689
|
+
});
|
|
1690
|
+
return error;
|
|
1691
|
+
}
|
|
1692
|
+
/**
|
|
1693
|
+
* Read file if exists, return null otherwise.
|
|
1694
|
+
*
|
|
1695
|
+
* @param filePath - Path to file
|
|
1696
|
+
* @param encoding - File encoding (default: utf-8)
|
|
1697
|
+
* @returns File contents or null if file doesn't exist
|
|
1698
|
+
*/
|
|
1699
|
+
function readFileIfExists(filePath, encoding = 'utf-8') {
|
|
1700
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1701
|
+
return null;
|
|
1702
|
+
}
|
|
1703
|
+
try {
|
|
1704
|
+
return node_fs.readFileSync(filePath, { encoding });
|
|
1705
|
+
}
|
|
1706
|
+
catch {
|
|
1707
|
+
return null;
|
|
1708
|
+
}
|
|
1709
|
+
}
|
|
1710
|
+
/**
|
|
1711
|
+
* Read and parse JSON file if exists, return null otherwise.
|
|
1712
|
+
*
|
|
1713
|
+
* @param filePath - Path to JSON file
|
|
1714
|
+
* @returns Parsed JSON object or null if file doesn't exist or is invalid
|
|
1715
|
+
*/
|
|
1716
|
+
function readJsonFileIfExists(filePath) {
|
|
1717
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1718
|
+
return null;
|
|
1719
|
+
}
|
|
1720
|
+
try {
|
|
1721
|
+
const content = node_fs.readFileSync(filePath, { encoding: 'utf-8' });
|
|
1722
|
+
return parse(content);
|
|
1723
|
+
}
|
|
1724
|
+
catch {
|
|
1725
|
+
return null;
|
|
1726
|
+
}
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1729
|
+
createScopedLogger('project-scope:fs:write');
|
|
1730
|
+
|
|
1731
|
+
/**
|
|
1732
|
+
* Get file stats with error handling.
|
|
1733
|
+
*
|
|
1734
|
+
* @param filePath - Path to file
|
|
1735
|
+
* @param followSymlinks - Whether to follow symlinks (default: true)
|
|
1736
|
+
* @returns File stats or null if path doesn't exist
|
|
1737
|
+
*/
|
|
1738
|
+
function getFileStat(filePath, followSymlinks = true) {
|
|
1739
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1740
|
+
return null;
|
|
1741
|
+
}
|
|
1742
|
+
try {
|
|
1743
|
+
const stat = followSymlinks ? node_fs.statSync(filePath) : node_fs.lstatSync(filePath);
|
|
1744
|
+
return {
|
|
1745
|
+
isFile: stat.isFile(),
|
|
1746
|
+
isDirectory: stat.isDirectory(),
|
|
1747
|
+
isSymlink: stat.isSymbolicLink(),
|
|
1748
|
+
size: stat.size,
|
|
1749
|
+
created: stat.birthtime,
|
|
1750
|
+
modified: stat.mtime,
|
|
1751
|
+
accessed: stat.atime,
|
|
1752
|
+
mode: stat.mode,
|
|
1753
|
+
};
|
|
1754
|
+
}
|
|
1755
|
+
catch {
|
|
1756
|
+
return null;
|
|
1757
|
+
}
|
|
1758
|
+
}
|
|
1759
|
+
/**
|
|
1760
|
+
* Check if path is a directory.
|
|
1761
|
+
*
|
|
1762
|
+
* @param dirPath - Path to check
|
|
1763
|
+
* @returns True if path is a directory
|
|
1764
|
+
*/
|
|
1765
|
+
function isDirectory(dirPath) {
|
|
1766
|
+
const stats = getFileStat(dirPath);
|
|
1767
|
+
return stats?.isDirectory ?? false;
|
|
1768
|
+
}
|
|
1769
|
+
/**
|
|
1770
|
+
* Check if path exists.
|
|
1771
|
+
*
|
|
1772
|
+
* @param filePath - Path to check
|
|
1773
|
+
* @returns True if path exists
|
|
1774
|
+
*/
|
|
1775
|
+
function exists(filePath) {
|
|
1776
|
+
return node_fs.existsSync(filePath);
|
|
1777
|
+
}
|
|
1778
|
+
|
|
1779
|
+
const fsDirLogger = createScopedLogger('project-scope:fs:dir');
|
|
1780
|
+
/**
|
|
1781
|
+
* List immediate contents of a directory.
|
|
1782
|
+
*
|
|
1783
|
+
* @param dirPath - Absolute or relative path to the directory
|
|
1784
|
+
* @returns Array of entries with metadata for each file/directory
|
|
1785
|
+
* @throws {Error} If directory doesn't exist or isn't a directory
|
|
1786
|
+
*
|
|
1787
|
+
* @example
|
|
1788
|
+
* ```typescript
|
|
1789
|
+
* import { readDirectory } from '@hyperfrontend/project-scope'
|
|
1790
|
+
*
|
|
1791
|
+
* const entries = readDirectory('./src')
|
|
1792
|
+
* for (const entry of entries) {
|
|
1793
|
+
* console.log(entry.name, entry.isFile ? 'file' : 'directory')
|
|
1794
|
+
* }
|
|
1795
|
+
* ```
|
|
1796
|
+
*/
|
|
1797
|
+
function readDirectory(dirPath) {
|
|
1798
|
+
fsDirLogger.debug('Reading directory', { path: dirPath });
|
|
1799
|
+
if (!node_fs.existsSync(dirPath)) {
|
|
1800
|
+
fsDirLogger.debug('Directory not found', { path: dirPath });
|
|
1801
|
+
throw createFileSystemError(`Directory not found: ${dirPath}`, 'FS_NOT_FOUND', { path: dirPath, operation: 'readdir' });
|
|
1802
|
+
}
|
|
1803
|
+
if (!isDirectory(dirPath)) {
|
|
1804
|
+
fsDirLogger.debug('Path is not a directory', { path: dirPath });
|
|
1805
|
+
throw createFileSystemError(`Not a directory: ${dirPath}`, 'FS_NOT_A_DIRECTORY', { path: dirPath, operation: 'readdir' });
|
|
1806
|
+
}
|
|
1807
|
+
try {
|
|
1808
|
+
const entries = node_fs.readdirSync(dirPath, { withFileTypes: true });
|
|
1809
|
+
fsDirLogger.debug('Directory read complete', { path: dirPath, entryCount: entries.length });
|
|
1810
|
+
return entries.map((entry) => ({
|
|
1811
|
+
name: entry.name,
|
|
1812
|
+
path: node_path.join(dirPath, entry.name),
|
|
1813
|
+
isFile: entry.isFile(),
|
|
1814
|
+
isDirectory: entry.isDirectory(),
|
|
1815
|
+
isSymlink: entry.isSymbolicLink(),
|
|
1816
|
+
}));
|
|
1817
|
+
}
|
|
1818
|
+
catch (error) {
|
|
1819
|
+
fsDirLogger.warn('Failed to read directory', { path: dirPath, error: error instanceof Error ? error.message : String(error) });
|
|
1820
|
+
throw createFileSystemError(`Failed to read directory: ${dirPath}`, 'FS_READ_ERROR', {
|
|
1821
|
+
path: dirPath,
|
|
1822
|
+
operation: 'readdir',
|
|
1823
|
+
cause: error,
|
|
1824
|
+
});
|
|
1825
|
+
}
|
|
1826
|
+
}
|
|
1827
|
+
|
|
1828
|
+
createScopedLogger('project-scope:fs:traversal');
|
|
1829
|
+
|
|
1830
|
+
const packageLogger = createScopedLogger('project-scope:project:package');
|
|
1831
|
+
/**
|
|
1832
|
+
* Verifies that a value is an object with only string values,
|
|
1833
|
+
* used for validating dependency maps and script definitions.
|
|
1834
|
+
*
|
|
1835
|
+
* @param value - Value to check
|
|
1836
|
+
* @returns True if value is a record of strings
|
|
1837
|
+
*/
|
|
1838
|
+
function isStringRecord(value) {
|
|
1839
|
+
if (typeof value !== 'object' || value === null)
|
|
1840
|
+
return false;
|
|
1841
|
+
return values(value).every((v) => typeof v === 'string');
|
|
1842
|
+
}
|
|
1843
|
+
/**
|
|
1844
|
+
* Extracts and normalizes the workspaces field from package.json,
|
|
1845
|
+
* supporting both array format and object with packages array.
|
|
1846
|
+
*
|
|
1847
|
+
* @param value - Raw workspaces value from package.json
|
|
1848
|
+
* @returns Normalized workspace patterns or undefined if invalid
|
|
1849
|
+
*/
|
|
1850
|
+
function parseWorkspaces(value) {
|
|
1851
|
+
if (isArray(value) && value.every((v) => typeof v === 'string')) {
|
|
1852
|
+
return value;
|
|
1853
|
+
}
|
|
1854
|
+
if (typeof value === 'object' && value !== null) {
|
|
1855
|
+
const obj = value;
|
|
1856
|
+
if (isArray(obj['packages'])) {
|
|
1857
|
+
return { packages: obj['packages'] };
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
return undefined;
|
|
1861
|
+
}
|
|
1862
|
+
/**
|
|
1863
|
+
* Validate and normalize package.json data.
|
|
1864
|
+
*
|
|
1865
|
+
* @param data - Raw parsed data
|
|
1866
|
+
* @returns Validated package.json
|
|
1867
|
+
*/
|
|
1868
|
+
function validatePackageJson(data) {
|
|
1869
|
+
if (typeof data !== 'object' || data === null) {
|
|
1870
|
+
throw createError('package.json must be an object');
|
|
1871
|
+
}
|
|
1872
|
+
const pkg = data;
|
|
1873
|
+
return {
|
|
1874
|
+
name: typeof pkg['name'] === 'string' ? pkg['name'] : undefined,
|
|
1875
|
+
version: typeof pkg['version'] === 'string' ? pkg['version'] : undefined,
|
|
1876
|
+
description: typeof pkg['description'] === 'string' ? pkg['description'] : undefined,
|
|
1877
|
+
main: typeof pkg['main'] === 'string' ? pkg['main'] : undefined,
|
|
1878
|
+
module: typeof pkg['module'] === 'string' ? pkg['module'] : undefined,
|
|
1879
|
+
browser: typeof pkg['browser'] === 'string' ? pkg['browser'] : undefined,
|
|
1880
|
+
types: typeof pkg['types'] === 'string' ? pkg['types'] : undefined,
|
|
1881
|
+
bin: typeof pkg['bin'] === 'string' || isStringRecord(pkg['bin']) ? pkg['bin'] : undefined,
|
|
1882
|
+
scripts: isStringRecord(pkg['scripts']) ? pkg['scripts'] : undefined,
|
|
1883
|
+
dependencies: isStringRecord(pkg['dependencies']) ? pkg['dependencies'] : undefined,
|
|
1884
|
+
devDependencies: isStringRecord(pkg['devDependencies']) ? pkg['devDependencies'] : undefined,
|
|
1885
|
+
peerDependencies: isStringRecord(pkg['peerDependencies']) ? pkg['peerDependencies'] : undefined,
|
|
1886
|
+
optionalDependencies: isStringRecord(pkg['optionalDependencies']) ? pkg['optionalDependencies'] : undefined,
|
|
1887
|
+
workspaces: parseWorkspaces(pkg['workspaces']),
|
|
1888
|
+
exports: typeof pkg['exports'] === 'object' ? pkg['exports'] : undefined,
|
|
1889
|
+
engines: isStringRecord(pkg['engines']) ? pkg['engines'] : undefined,
|
|
1890
|
+
...pkg,
|
|
1891
|
+
};
|
|
1892
|
+
}
|
|
1893
|
+
/**
|
|
1894
|
+
* Attempts to read and parse package.json if it exists,
|
|
1895
|
+
* returning null on missing file or parse failure.
|
|
1896
|
+
*
|
|
1897
|
+
* @param projectPath - Project directory path or path to package.json
|
|
1898
|
+
* @returns Parsed package.json or null if not found
|
|
1899
|
+
*/
|
|
1900
|
+
function readPackageJsonIfExists(projectPath) {
|
|
1901
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : node_path.join(projectPath, 'package.json');
|
|
1902
|
+
const content = readFileIfExists(packageJsonPath);
|
|
1903
|
+
if (!content) {
|
|
1904
|
+
packageLogger.debug('Package.json not found', { path: packageJsonPath });
|
|
1905
|
+
return null;
|
|
1906
|
+
}
|
|
1907
|
+
try {
|
|
1908
|
+
const validated = validatePackageJson(parse(content));
|
|
1909
|
+
packageLogger.debug('Package.json loaded', { path: packageJsonPath, name: validated.name });
|
|
1910
|
+
return validated;
|
|
1911
|
+
}
|
|
1912
|
+
catch {
|
|
1913
|
+
packageLogger.debug('Failed to parse package.json, returning null', { path: packageJsonPath });
|
|
1914
|
+
return null;
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
|
|
1918
|
+
createScopedLogger('project-scope:root');
|
|
1919
|
+
|
|
1920
|
+
const nxLogger = createScopedLogger('project-scope:nx');
|
|
1921
|
+
/**
|
|
1922
|
+
* Files indicating NX workspace root.
|
|
1923
|
+
*/
|
|
1924
|
+
const NX_CONFIG_FILES = ['nx.json', 'workspace.json'];
|
|
1925
|
+
/**
|
|
1926
|
+
* NX-specific project file.
|
|
1927
|
+
*/
|
|
1928
|
+
const NX_PROJECT_FILE = 'project.json';
|
|
1929
|
+
/**
|
|
1930
|
+
* Check if directory is an NX workspace root.
|
|
1931
|
+
*
|
|
1932
|
+
* @param path - Directory path to check
|
|
1933
|
+
* @returns True if the directory contains nx.json or workspace.json
|
|
1934
|
+
*
|
|
1935
|
+
* @example
|
|
1936
|
+
* ```typescript
|
|
1937
|
+
* import { isNxWorkspace } from '@hyperfrontend/project-scope'
|
|
1938
|
+
*
|
|
1939
|
+
* if (isNxWorkspace('./my-project')) {
|
|
1940
|
+
* console.log('This is an NX monorepo')
|
|
1941
|
+
* }
|
|
1942
|
+
* ```
|
|
1943
|
+
*/
|
|
1944
|
+
function isNxWorkspace(path) {
|
|
1945
|
+
for (const configFile of NX_CONFIG_FILES) {
|
|
1946
|
+
if (exists(node_path.join(path, configFile))) {
|
|
1947
|
+
nxLogger.debug('NX workspace detected', { path, configFile });
|
|
1948
|
+
return true;
|
|
1949
|
+
}
|
|
1950
|
+
}
|
|
1951
|
+
nxLogger.debug('Not an NX workspace', { path });
|
|
1952
|
+
return false;
|
|
1953
|
+
}
|
|
1954
|
+
/**
|
|
1955
|
+
* Check if directory is an NX project.
|
|
1956
|
+
*
|
|
1957
|
+
* @param path - Directory path to check
|
|
1958
|
+
* @returns True if the directory contains project.json
|
|
1959
|
+
*/
|
|
1960
|
+
function isNxProject(path) {
|
|
1961
|
+
const isProject = exists(node_path.join(path, NX_PROJECT_FILE));
|
|
1962
|
+
nxLogger.debug('NX project check', { path, isProject });
|
|
1963
|
+
return isProject;
|
|
1964
|
+
}
|
|
1965
|
+
/**
|
|
1966
|
+
* Detect NX version from package.json dependencies.
|
|
1967
|
+
*
|
|
1968
|
+
* @param workspacePath - Workspace root path
|
|
1969
|
+
* @returns NX version string (without semver range) or null
|
|
1970
|
+
*/
|
|
1971
|
+
function detectNxVersion(workspacePath) {
|
|
1972
|
+
const packageJson = readPackageJsonIfExists(workspacePath);
|
|
1973
|
+
if (packageJson) {
|
|
1974
|
+
const nxVersion = packageJson.devDependencies?.['nx'] ?? packageJson.dependencies?.['nx'];
|
|
1975
|
+
if (nxVersion) {
|
|
1976
|
+
// Strip semver range characters (^, ~, >=, etc.)
|
|
1977
|
+
return nxVersion.replace(/^[\^~>=<]+/, '');
|
|
1978
|
+
}
|
|
1979
|
+
}
|
|
1980
|
+
return null;
|
|
1981
|
+
}
|
|
1982
|
+
/**
|
|
1983
|
+
* Check if workspace is integrated (not standalone).
|
|
1984
|
+
* Integrated repos typically have workspaceLayout, namedInputs, or targetDefaults.
|
|
1985
|
+
*
|
|
1986
|
+
* @param nxJson - Parsed nx.json configuration
|
|
1987
|
+
* @returns True if the workspace is integrated
|
|
1988
|
+
*/
|
|
1989
|
+
function isIntegratedRepo(nxJson) {
|
|
1990
|
+
return nxJson.workspaceLayout !== undefined || nxJson.namedInputs !== undefined || nxJson.targetDefaults !== undefined;
|
|
1991
|
+
}
|
|
1992
|
+
/**
|
|
1993
|
+
* Get comprehensive NX workspace information.
|
|
1994
|
+
*
|
|
1995
|
+
* @param workspacePath - Workspace root path
|
|
1996
|
+
* @returns Workspace info or null if not an NX workspace
|
|
1997
|
+
*/
|
|
1998
|
+
function getNxWorkspaceInfo(workspacePath) {
|
|
1999
|
+
nxLogger.debug('Getting NX workspace info', { workspacePath });
|
|
2000
|
+
if (!isNxWorkspace(workspacePath)) {
|
|
2001
|
+
return null;
|
|
2002
|
+
}
|
|
2003
|
+
const nxJson = readJsonFileIfExists(node_path.join(workspacePath, 'nx.json'));
|
|
2004
|
+
if (!nxJson) {
|
|
2005
|
+
// Check for workspace.json as fallback (older NX)
|
|
2006
|
+
const workspaceJson = readJsonFileIfExists(node_path.join(workspacePath, 'workspace.json'));
|
|
2007
|
+
if (!workspaceJson) {
|
|
2008
|
+
nxLogger.debug('No nx.json or workspace.json found', { workspacePath });
|
|
2009
|
+
return null;
|
|
2010
|
+
}
|
|
2011
|
+
nxLogger.debug('Using legacy workspace.json', { workspacePath });
|
|
2012
|
+
// Create minimal nx.json from workspace.json
|
|
2013
|
+
return {
|
|
2014
|
+
root: workspacePath,
|
|
2015
|
+
version: detectNxVersion(workspacePath),
|
|
2016
|
+
nxJson: {},
|
|
2017
|
+
isIntegrated: true,
|
|
2018
|
+
workspaceLayout: {
|
|
2019
|
+
appsDir: 'apps',
|
|
2020
|
+
libsDir: 'libs',
|
|
2021
|
+
},
|
|
2022
|
+
};
|
|
2023
|
+
}
|
|
2024
|
+
const info = {
|
|
2025
|
+
root: workspacePath,
|
|
2026
|
+
version: detectNxVersion(workspacePath),
|
|
2027
|
+
nxJson,
|
|
2028
|
+
isIntegrated: isIntegratedRepo(nxJson),
|
|
2029
|
+
defaultProject: nxJson.defaultProject,
|
|
2030
|
+
workspaceLayout: {
|
|
2031
|
+
appsDir: nxJson.workspaceLayout?.appsDir ?? 'apps',
|
|
2032
|
+
libsDir: nxJson.workspaceLayout?.libsDir ?? 'libs',
|
|
2033
|
+
},
|
|
2034
|
+
};
|
|
2035
|
+
nxLogger.debug('NX workspace info retrieved', {
|
|
2036
|
+
workspacePath,
|
|
2037
|
+
version: info.version,
|
|
2038
|
+
isIntegrated: info.isIntegrated,
|
|
2039
|
+
defaultProject: info.defaultProject,
|
|
2040
|
+
});
|
|
2041
|
+
return info;
|
|
2042
|
+
}
|
|
2043
|
+
|
|
2044
|
+
createScopedLogger('project-scope:nx:devkit');
|
|
2045
|
+
|
|
2046
|
+
const nxConfigLogger = createScopedLogger('project-scope:nx:config');
|
|
2047
|
+
/**
|
|
2048
|
+
* Read project.json for an NX project.
|
|
2049
|
+
*
|
|
2050
|
+
* @param projectPath - Project directory path
|
|
2051
|
+
* @returns Parsed project.json or null if not found
|
|
2052
|
+
*/
|
|
2053
|
+
function readProjectJson(projectPath) {
|
|
2054
|
+
const projectJsonPath = node_path.join(projectPath, NX_PROJECT_FILE);
|
|
2055
|
+
nxConfigLogger.debug('Reading project.json', { path: projectJsonPath });
|
|
2056
|
+
const result = readJsonFileIfExists(projectJsonPath);
|
|
2057
|
+
if (result) {
|
|
2058
|
+
nxConfigLogger.debug('Project.json loaded', { path: projectJsonPath, name: result.name });
|
|
2059
|
+
}
|
|
2060
|
+
else {
|
|
2061
|
+
nxConfigLogger.debug('Project.json not found', { path: projectJsonPath });
|
|
2062
|
+
}
|
|
2063
|
+
return result;
|
|
2064
|
+
}
|
|
2065
|
+
/**
|
|
2066
|
+
* Get project configuration from project.json or package.json nx field.
|
|
2067
|
+
*
|
|
2068
|
+
* @param projectPath - Project directory path
|
|
2069
|
+
* @param workspacePath - Workspace root path (for relative path calculation)
|
|
2070
|
+
* @returns Project configuration or null if not found
|
|
2071
|
+
*/
|
|
2072
|
+
function getProjectConfig(projectPath, workspacePath) {
|
|
2073
|
+
nxConfigLogger.debug('Getting project config', { projectPath, workspacePath });
|
|
2074
|
+
// Try project.json first
|
|
2075
|
+
const projectJson = readProjectJson(projectPath);
|
|
2076
|
+
if (projectJson) {
|
|
2077
|
+
nxConfigLogger.debug('Using project.json config', { projectPath, name: projectJson.name });
|
|
2078
|
+
return {
|
|
2079
|
+
...projectJson,
|
|
2080
|
+
root: projectJson.root ?? node_path.relative(workspacePath, projectPath),
|
|
2081
|
+
};
|
|
2082
|
+
}
|
|
2083
|
+
// Try to infer from package.json nx field
|
|
2084
|
+
const packageJson = readPackageJsonIfExists(projectPath);
|
|
2085
|
+
if (packageJson && typeof packageJson['nx'] === 'object') {
|
|
2086
|
+
nxConfigLogger.debug('Using package.json nx field', { projectPath, name: packageJson.name });
|
|
2087
|
+
const nxConfig = packageJson['nx'];
|
|
2088
|
+
return {
|
|
2089
|
+
name: packageJson.name,
|
|
2090
|
+
root: node_path.relative(workspacePath, projectPath),
|
|
2091
|
+
...nxConfig,
|
|
2092
|
+
};
|
|
2093
|
+
}
|
|
2094
|
+
nxConfigLogger.debug('No project config found', { projectPath });
|
|
2095
|
+
return null;
|
|
2096
|
+
}
|
|
2097
|
+
/**
|
|
2098
|
+
* Recursively scan directory for project.json files.
|
|
2099
|
+
*
|
|
2100
|
+
* @param dirPath - Directory to scan
|
|
2101
|
+
* @param workspacePath - Workspace root path
|
|
2102
|
+
* @param projects - Map to add discovered projects to
|
|
2103
|
+
* @param maxDepth - Maximum recursion depth
|
|
2104
|
+
* @param currentDepth - Current recursion depth
|
|
2105
|
+
*/
|
|
2106
|
+
function scanForProjects(dirPath, workspacePath, projects, maxDepth, currentDepth = 0) {
|
|
2107
|
+
if (currentDepth > maxDepth)
|
|
2108
|
+
return;
|
|
2109
|
+
try {
|
|
2110
|
+
const entries = readDirectory(dirPath);
|
|
2111
|
+
for (const entry of entries) {
|
|
2112
|
+
// Skip node_modules and hidden directories
|
|
2113
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'dist') {
|
|
2114
|
+
continue;
|
|
2115
|
+
}
|
|
2116
|
+
const fullPath = node_path.join(dirPath, entry.name);
|
|
2117
|
+
if (entry.isDirectory) {
|
|
2118
|
+
// Check if this directory is an NX project
|
|
2119
|
+
if (isNxProject(fullPath)) {
|
|
2120
|
+
const config = getProjectConfig(fullPath, workspacePath);
|
|
2121
|
+
if (config) {
|
|
2122
|
+
const name = config.name || node_path.relative(workspacePath, fullPath).replace(/[\\/]/g, '-');
|
|
2123
|
+
projects.set(name, {
|
|
2124
|
+
...config,
|
|
2125
|
+
name,
|
|
2126
|
+
root: node_path.relative(workspacePath, fullPath),
|
|
2127
|
+
});
|
|
2128
|
+
}
|
|
2129
|
+
}
|
|
2130
|
+
// Recursively scan subdirectories
|
|
2131
|
+
scanForProjects(fullPath, workspacePath, projects, maxDepth, currentDepth + 1);
|
|
2132
|
+
}
|
|
2133
|
+
}
|
|
2134
|
+
}
|
|
2135
|
+
catch {
|
|
2136
|
+
// Directory not readable, skip
|
|
2137
|
+
}
|
|
2138
|
+
}
|
|
2139
|
+
/**
|
|
2140
|
+
* Discover all NX projects in workspace.
|
|
2141
|
+
* Supports both workspace.json (older format) and project.json (newer format).
|
|
2142
|
+
*
|
|
2143
|
+
* @param workspacePath - Workspace root path
|
|
2144
|
+
* @returns Map of project name to configuration
|
|
2145
|
+
*/
|
|
2146
|
+
function discoverNxProjects(workspacePath) {
|
|
2147
|
+
const projects = createMap();
|
|
2148
|
+
// Check for workspace.json (older NX format)
|
|
2149
|
+
const workspaceJson = readJsonFileIfExists(node_path.join(workspacePath, 'workspace.json'));
|
|
2150
|
+
if (workspaceJson?.projects) {
|
|
2151
|
+
for (const [name, config] of entries(workspaceJson.projects)) {
|
|
2152
|
+
if (typeof config === 'string') {
|
|
2153
|
+
// Path reference to project directory
|
|
2154
|
+
const projectPath = node_path.join(workspacePath, config);
|
|
2155
|
+
const projectConfig = getProjectConfig(projectPath, workspacePath);
|
|
2156
|
+
if (projectConfig) {
|
|
2157
|
+
projects.set(name, { ...projectConfig, name });
|
|
2158
|
+
}
|
|
2159
|
+
}
|
|
2160
|
+
else if (typeof config === 'object' && config !== null) {
|
|
2161
|
+
// Inline config
|
|
2162
|
+
projects.set(name, { name, ...config });
|
|
2163
|
+
}
|
|
2164
|
+
}
|
|
2165
|
+
return projects;
|
|
2166
|
+
}
|
|
2167
|
+
// Scan for project.json files (newer NX format)
|
|
2168
|
+
const workspaceInfo = getNxWorkspaceInfo(workspacePath);
|
|
2169
|
+
const appsDir = workspaceInfo?.workspaceLayout.appsDir ?? 'apps';
|
|
2170
|
+
const libsDir = workspaceInfo?.workspaceLayout.libsDir ?? 'libs';
|
|
2171
|
+
const searchDirs = [appsDir, libsDir];
|
|
2172
|
+
// Also check packages directory (common in some setups)
|
|
2173
|
+
if (exists(node_path.join(workspacePath, 'packages'))) {
|
|
2174
|
+
searchDirs.push('packages');
|
|
2175
|
+
}
|
|
2176
|
+
for (const dir of searchDirs) {
|
|
2177
|
+
const dirPath = node_path.join(workspacePath, dir);
|
|
2178
|
+
if (exists(dirPath) && isDirectory(dirPath)) {
|
|
2179
|
+
try {
|
|
2180
|
+
scanForProjects(dirPath, workspacePath, projects, 3);
|
|
2181
|
+
}
|
|
2182
|
+
catch {
|
|
2183
|
+
// Directory not accessible
|
|
2184
|
+
}
|
|
2185
|
+
}
|
|
2186
|
+
}
|
|
2187
|
+
// Also check root-level projects (standalone projects in monorepo root)
|
|
2188
|
+
if (isNxProject(workspacePath)) {
|
|
2189
|
+
const config = readProjectJson(workspacePath);
|
|
2190
|
+
if (config) {
|
|
2191
|
+
const name = config.name || node_path.basename(workspacePath);
|
|
2192
|
+
projects.set(name, {
|
|
2193
|
+
...config,
|
|
2194
|
+
name,
|
|
2195
|
+
root: '.',
|
|
2196
|
+
});
|
|
2197
|
+
}
|
|
2198
|
+
}
|
|
2199
|
+
return projects;
|
|
2200
|
+
}
|
|
2201
|
+
/**
|
|
2202
|
+
* Build a simple project graph from discovered projects.
|
|
2203
|
+
* For full graph capabilities, use `@nx/devkit`.
|
|
2204
|
+
*
|
|
2205
|
+
* @param workspacePath - Workspace root path
|
|
2206
|
+
* @param projects - Existing configuration map to skip auto-discovery
|
|
2207
|
+
* @returns NxProjectGraph with nodes and dependencies
|
|
2208
|
+
*/
|
|
2209
|
+
function buildSimpleProjectGraph(workspacePath, projects) {
|
|
2210
|
+
const projectMap = projects ?? discoverNxProjects(workspacePath);
|
|
2211
|
+
const nodes = {};
|
|
2212
|
+
const dependencies = {};
|
|
2213
|
+
for (const [name, config] of projectMap) {
|
|
2214
|
+
nodes[name] = {
|
|
2215
|
+
name,
|
|
2216
|
+
type: config.projectType ?? 'library',
|
|
2217
|
+
data: config,
|
|
2218
|
+
};
|
|
2219
|
+
dependencies[name] = [];
|
|
2220
|
+
// Add implicit dependencies
|
|
2221
|
+
if (config.implicitDependencies) {
|
|
2222
|
+
for (const dep of config.implicitDependencies) {
|
|
2223
|
+
// Skip negative dependencies (those starting with !)
|
|
2224
|
+
if (!dep.startsWith('!')) {
|
|
2225
|
+
dependencies[name].push({
|
|
2226
|
+
target: dep,
|
|
2227
|
+
type: 'implicit',
|
|
2228
|
+
});
|
|
2229
|
+
}
|
|
2230
|
+
}
|
|
2231
|
+
}
|
|
2232
|
+
}
|
|
2233
|
+
return { nodes, dependencies };
|
|
2234
|
+
}
|
|
2235
|
+
|
|
2236
|
+
/**
|
|
2237
|
+
* Creates an empty classification summary.
|
|
2238
|
+
*
|
|
2239
|
+
* @returns A new ClassificationSummary with all counts at zero
|
|
2240
|
+
*/
|
|
2241
|
+
function createEmptyClassificationSummary() {
|
|
2242
|
+
return {
|
|
2243
|
+
total: 0,
|
|
2244
|
+
included: 0,
|
|
2245
|
+
excluded: 0,
|
|
2246
|
+
bySource: {
|
|
2247
|
+
'direct-scope': 0,
|
|
2248
|
+
'direct-file': 0,
|
|
2249
|
+
'unscoped-file': 0,
|
|
2250
|
+
'indirect-dependency': 0,
|
|
2251
|
+
'indirect-infra': 0,
|
|
2252
|
+
'unscoped-global': 0,
|
|
2253
|
+
excluded: 0,
|
|
2254
|
+
},
|
|
2255
|
+
};
|
|
2256
|
+
}
|
|
2257
|
+
/**
|
|
2258
|
+
* Creates a classified commit.
|
|
2259
|
+
*
|
|
2260
|
+
* @param commit - The parsed conventional commit
|
|
2261
|
+
* @param raw - The raw git commit
|
|
2262
|
+
* @param source - How the commit relates to the project
|
|
2263
|
+
* @param options - Additional classification options
|
|
2264
|
+
* @param options.touchedFiles - Files in the project modified by this commit
|
|
2265
|
+
* @param options.dependencyPath - Chain of dependencies leading to indirect inclusion
|
|
2266
|
+
* @returns A new ClassifiedCommit object
|
|
2267
|
+
*/
|
|
2268
|
+
function createClassifiedCommit(commit, raw, source, options) {
|
|
2269
|
+
const include = isIncludedSource(source);
|
|
2270
|
+
const preserveScope = shouldPreserveScope(source);
|
|
2271
|
+
return {
|
|
2272
|
+
commit,
|
|
2273
|
+
raw,
|
|
2274
|
+
source,
|
|
2275
|
+
include,
|
|
2276
|
+
preserveScope,
|
|
2277
|
+
touchedFiles: options?.touchedFiles,
|
|
2278
|
+
dependencyPath: options?.dependencyPath,
|
|
2279
|
+
};
|
|
2280
|
+
}
|
|
2281
|
+
/**
|
|
2282
|
+
* Determines if a source type should be included in changelog.
|
|
2283
|
+
*
|
|
2284
|
+
* @param source - The commit source type
|
|
2285
|
+
* @returns True if commits with this source should be included
|
|
2286
|
+
*/
|
|
2287
|
+
function isIncludedSource(source) {
|
|
2288
|
+
switch (source) {
|
|
2289
|
+
case 'direct-scope':
|
|
2290
|
+
case 'direct-file':
|
|
2291
|
+
case 'unscoped-file':
|
|
2292
|
+
case 'indirect-dependency':
|
|
2293
|
+
case 'indirect-infra':
|
|
2294
|
+
return true;
|
|
2295
|
+
case 'unscoped-global':
|
|
2296
|
+
case 'excluded':
|
|
2297
|
+
return false;
|
|
2298
|
+
}
|
|
2299
|
+
}
|
|
2300
|
+
/**
|
|
2301
|
+
* Determines if scope should be preserved for a source type.
|
|
2302
|
+
*
|
|
2303
|
+
* Direct commits omit scope (redundant in project changelog).
|
|
2304
|
+
* Indirect commits preserve scope for context.
|
|
2305
|
+
*
|
|
2306
|
+
* @param source - The commit source type
|
|
2307
|
+
* @returns True if scope should be preserved in changelog
|
|
2308
|
+
*/
|
|
2309
|
+
function shouldPreserveScope(source) {
|
|
2310
|
+
switch (source) {
|
|
2311
|
+
case 'direct-scope':
|
|
2312
|
+
case 'unscoped-file':
|
|
2313
|
+
return false; // Scope would be redundant
|
|
2314
|
+
case 'direct-file':
|
|
2315
|
+
case 'indirect-dependency':
|
|
2316
|
+
case 'indirect-infra':
|
|
2317
|
+
return true; // Scope provides context
|
|
2318
|
+
case 'unscoped-global':
|
|
2319
|
+
case 'excluded':
|
|
2320
|
+
return false; // Won't be shown
|
|
2321
|
+
}
|
|
2322
|
+
}
|
|
2323
|
+
|
|
2324
|
+
/**
|
|
2325
|
+
* Derives all scope variations that should match a project.
|
|
2326
|
+
*
|
|
2327
|
+
* Given a project named 'lib-versioning' with package '@hyperfrontend/versioning',
|
|
2328
|
+
* this generates variations like:
|
|
2329
|
+
* - 'lib-versioning' (full project name)
|
|
2330
|
+
* - 'versioning' (without lib- prefix)
|
|
2331
|
+
*
|
|
2332
|
+
* @param options - Project identification options
|
|
2333
|
+
* @returns Array of scope strings that match this project
|
|
2334
|
+
*
|
|
2335
|
+
* @example
|
|
2336
|
+
* deriveProjectScopes({ projectName: 'lib-versioning', packageName: '@hyperfrontend/versioning' })
|
|
2337
|
+
* // Returns: ['lib-versioning', 'versioning']
|
|
2338
|
+
*
|
|
2339
|
+
* @example
|
|
2340
|
+
* deriveProjectScopes({ projectName: 'app-demo', packageName: 'demo-app' })
|
|
2341
|
+
* // Returns: ['app-demo', 'demo']
|
|
2342
|
+
*/
|
|
2343
|
+
function deriveProjectScopes(options) {
|
|
2344
|
+
const { projectName, packageName, additionalScopes = [], prefixes = DEFAULT_PROJECT_PREFIXES } = options;
|
|
2345
|
+
const scopes = createSet();
|
|
2346
|
+
// Always include the full project name
|
|
2347
|
+
scopes.add(projectName);
|
|
2348
|
+
// Add variations based on common prefixes
|
|
2349
|
+
const prefixVariations = extractPrefixVariations(projectName, prefixes);
|
|
2350
|
+
for (const variation of prefixVariations) {
|
|
2351
|
+
scopes.add(variation);
|
|
2352
|
+
}
|
|
2353
|
+
// Add package name variations if provided
|
|
2354
|
+
if (packageName) {
|
|
2355
|
+
const packageVariations = extractPackageNameVariations(packageName);
|
|
2356
|
+
for (const variation of packageVariations) {
|
|
2357
|
+
scopes.add(variation);
|
|
2358
|
+
}
|
|
2359
|
+
}
|
|
2360
|
+
// Add any additional scopes
|
|
2361
|
+
for (const scope of additionalScopes) {
|
|
2362
|
+
if (scope) {
|
|
2363
|
+
scopes.add(scope);
|
|
2364
|
+
}
|
|
2365
|
+
}
|
|
2366
|
+
return [...scopes];
|
|
2367
|
+
}
|
|
2368
|
+
/**
|
|
2369
|
+
* Default project name prefixes that can be stripped for scope matching.
|
|
2370
|
+
*/
|
|
2371
|
+
const DEFAULT_PROJECT_PREFIXES = ['lib-', 'app-', 'e2e-', 'tool-', 'plugin-', 'feature-', 'package-'];
|
|
2372
|
+
/**
|
|
2373
|
+
* Generates scope variations by stripping recognized project prefixes.
|
|
2374
|
+
*
|
|
2375
|
+
* @param projectName - The project name to extract variations from
|
|
2376
|
+
* @param prefixes - Prefixes to check and strip
|
|
2377
|
+
* @returns Array of scope name variations
|
|
2378
|
+
*/
|
|
2379
|
+
function extractPrefixVariations(projectName, prefixes) {
|
|
2380
|
+
const variations = [];
|
|
2381
|
+
for (const prefix of prefixes) {
|
|
2382
|
+
if (projectName.startsWith(prefix)) {
|
|
2383
|
+
const withoutPrefix = projectName.slice(prefix.length);
|
|
2384
|
+
if (withoutPrefix) {
|
|
2385
|
+
variations.push(withoutPrefix);
|
|
2386
|
+
}
|
|
2387
|
+
break; // Only remove one prefix
|
|
2388
|
+
}
|
|
2389
|
+
}
|
|
2390
|
+
return variations;
|
|
2391
|
+
}
|
|
2392
|
+
/**
|
|
2393
|
+
* Extracts scope variations from an npm package name.
|
|
2394
|
+
*
|
|
2395
|
+
* @param packageName - The npm package name (e.g., '@scope/name')
|
|
2396
|
+
* @returns Array of name variations
|
|
2397
|
+
*/
|
|
2398
|
+
function extractPackageNameVariations(packageName) {
|
|
2399
|
+
const variations = [];
|
|
2400
|
+
// Handle scoped packages: @scope/name -> name
|
|
2401
|
+
if (packageName.startsWith('@')) {
|
|
2402
|
+
const slashIndex = packageName.indexOf('/');
|
|
2403
|
+
if (slashIndex !== -1) {
|
|
2404
|
+
const unscoped = packageName.slice(slashIndex + 1);
|
|
2405
|
+
if (unscoped) {
|
|
2406
|
+
variations.push(unscoped);
|
|
2407
|
+
}
|
|
2408
|
+
}
|
|
2409
|
+
}
|
|
2410
|
+
else {
|
|
2411
|
+
// Non-scoped package: just use the name
|
|
2412
|
+
variations.push(packageName);
|
|
2413
|
+
}
|
|
2414
|
+
return variations;
|
|
2415
|
+
}
|
|
2416
|
+
/**
|
|
2417
|
+
* Checks if a commit scope matches any of the project scopes.
|
|
2418
|
+
*
|
|
2419
|
+
* @param commitScope - The scope from a conventional commit
|
|
2420
|
+
* @param projectScopes - Array of scopes that match the project
|
|
2421
|
+
* @returns True if the commit scope matches the project
|
|
2422
|
+
*
|
|
2423
|
+
* @example
|
|
2424
|
+
* scopeMatchesProject('versioning', ['lib-versioning', 'versioning']) // true
|
|
2425
|
+
* scopeMatchesProject('logging', ['lib-versioning', 'versioning']) // false
|
|
2426
|
+
*/
|
|
2427
|
+
function scopeMatchesProject(commitScope, projectScopes) {
|
|
2428
|
+
if (!commitScope) {
|
|
2429
|
+
return false;
|
|
2430
|
+
}
|
|
2431
|
+
// Case-insensitive comparison
|
|
2432
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2433
|
+
return projectScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2434
|
+
}
|
|
2435
|
+
/**
|
|
2436
|
+
* Checks if a commit scope should be explicitly excluded.
|
|
2437
|
+
*
|
|
2438
|
+
* @param commitScope - The scope from a conventional commit
|
|
2439
|
+
* @param excludeScopes - Array of scopes to exclude
|
|
2440
|
+
* @returns True if the scope should be excluded
|
|
2441
|
+
*/
|
|
2442
|
+
function scopeIsExcluded(commitScope, excludeScopes) {
|
|
2443
|
+
if (!commitScope) {
|
|
2444
|
+
return false;
|
|
2445
|
+
}
|
|
2446
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2447
|
+
return excludeScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2448
|
+
}
|
|
2449
|
+
/**
|
|
2450
|
+
* Default scopes to exclude from changelogs.
|
|
2451
|
+
*
|
|
2452
|
+
* These represent repository-level or infrastructure changes
|
|
2453
|
+
* that typically don't belong in individual project changelogs.
|
|
2454
|
+
*/
|
|
2455
|
+
const DEFAULT_EXCLUDE_SCOPES = ['release', 'deps', 'workspace', 'root', 'repo', 'ci', 'build'];
|
|
2456
|
+
|
|
2457
|
+
/**
|
|
2458
|
+
* Classifies a single commit against a project.
|
|
2459
|
+
*
|
|
2460
|
+
* Implements the hybrid classification strategy:
|
|
2461
|
+
* 1. Check scope match (fast path)
|
|
2462
|
+
* 2. Check file touch (validation/catch-all)
|
|
2463
|
+
* 3. Check dependency touch (indirect)
|
|
2464
|
+
* 4. Fallback to excluded
|
|
2465
|
+
*
|
|
2466
|
+
* @param input - The commit to classify
|
|
2467
|
+
* @param context - Classification context with project info
|
|
2468
|
+
* @returns Classified commit with source attribution
|
|
2469
|
+
*
|
|
2470
|
+
* @example
|
|
2471
|
+
* const classified = classifyCommit(
|
|
2472
|
+
* { commit: parsedCommit, raw: gitCommit },
|
|
2473
|
+
* { projectScopes: ['versioning'], fileCommitHashes: new Set(['abc123']) }
|
|
2474
|
+
* )
|
|
2475
|
+
*/
|
|
2476
|
+
function classifyCommit(input, context) {
|
|
2477
|
+
const { commit, raw } = input;
|
|
2478
|
+
const { projectScopes, fileCommitHashes, dependencyCommitMap, infrastructureCommitHashes, excludeScopes = DEFAULT_EXCLUDE_SCOPES, includeScopes = [], } = context;
|
|
2479
|
+
const scope = commit.scope;
|
|
2480
|
+
const hasScope = !!scope;
|
|
2481
|
+
const allProjectScopes = [...projectScopes, ...includeScopes];
|
|
2482
|
+
// First check: Is this scope explicitly excluded?
|
|
2483
|
+
if (hasScope && scopeIsExcluded(scope, excludeScopes)) {
|
|
2484
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2485
|
+
}
|
|
2486
|
+
// Priority 1: Scope-based direct match (fast path)
|
|
2487
|
+
if (hasScope && scopeMatchesProject(scope, allProjectScopes)) {
|
|
2488
|
+
return createClassifiedCommit(commit, raw, 'direct-scope');
|
|
2489
|
+
}
|
|
2490
|
+
// Priority 2: File-based direct match (validation/catch-all)
|
|
2491
|
+
if (fileCommitHashes.has(raw.hash)) {
|
|
2492
|
+
// Commit touched project files
|
|
2493
|
+
if (hasScope) {
|
|
2494
|
+
// Has a scope but it's different - likely a typo or cross-cutting change
|
|
2495
|
+
return createClassifiedCommit(commit, raw, 'direct-file');
|
|
2496
|
+
}
|
|
2497
|
+
// No scope but touched project files
|
|
2498
|
+
return createClassifiedCommit(commit, raw, 'unscoped-file');
|
|
2499
|
+
}
|
|
2500
|
+
// Priority 3: Indirect dependency match
|
|
2501
|
+
if (hasScope && dependencyCommitMap) {
|
|
2502
|
+
const dependencyPath = findDependencyPath(scope, raw.hash, dependencyCommitMap);
|
|
2503
|
+
if (dependencyPath) {
|
|
2504
|
+
return createClassifiedCommit(commit, raw, 'indirect-dependency', { dependencyPath });
|
|
2505
|
+
}
|
|
2506
|
+
}
|
|
2507
|
+
// File-based infrastructure match
|
|
2508
|
+
if (infrastructureCommitHashes?.has(raw.hash)) {
|
|
2509
|
+
return createClassifiedCommit(commit, raw, 'indirect-infra');
|
|
2510
|
+
}
|
|
2511
|
+
// Fallback: No match found
|
|
2512
|
+
if (!hasScope) {
|
|
2513
|
+
// Unscoped commit that didn't touch any project files
|
|
2514
|
+
return createClassifiedCommit(commit, raw, 'unscoped-global');
|
|
2515
|
+
}
|
|
2516
|
+
// Scoped commit that doesn't match anything
|
|
2517
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2518
|
+
}
|
|
2519
|
+
/**
|
|
2520
|
+
* Classifies multiple commits against a project.
|
|
2521
|
+
*
|
|
2522
|
+
* @param commits - Array of commits to classify
|
|
2523
|
+
* @param context - Classification context with project info
|
|
2524
|
+
* @returns Classification result with all commits and summary
|
|
2525
|
+
*/
|
|
2526
|
+
function classifyCommits(commits, context) {
|
|
2527
|
+
const classified = [];
|
|
2528
|
+
const included = [];
|
|
2529
|
+
const excluded = [];
|
|
2530
|
+
const summary = createEmptyClassificationSummary();
|
|
2531
|
+
const bySource = { ...summary.bySource };
|
|
2532
|
+
for (const input of commits) {
|
|
2533
|
+
const result = classifyCommit(input, context);
|
|
2534
|
+
classified.push(result);
|
|
2535
|
+
// Update summary
|
|
2536
|
+
bySource[result.source]++;
|
|
2537
|
+
if (result.include) {
|
|
2538
|
+
included.push(result);
|
|
2539
|
+
}
|
|
2540
|
+
else {
|
|
2541
|
+
excluded.push(result);
|
|
2542
|
+
}
|
|
2543
|
+
}
|
|
2544
|
+
return {
|
|
2545
|
+
commits: classified,
|
|
2546
|
+
included,
|
|
2547
|
+
excluded,
|
|
2548
|
+
summary: {
|
|
2549
|
+
total: classified.length,
|
|
2550
|
+
included: included.length,
|
|
2551
|
+
excluded: excluded.length,
|
|
2552
|
+
bySource,
|
|
2553
|
+
},
|
|
2554
|
+
};
|
|
2555
|
+
}
|
|
2556
|
+
/**
|
|
2557
|
+
* Finds a dependency path for a given scope and commit hash.
|
|
2558
|
+
*
|
|
2559
|
+
* Verifies both:
|
|
2560
|
+
* 1. The scope matches a dependency name (or variation)
|
|
2561
|
+
* 2. The commit hash is in that dependency's commit set
|
|
2562
|
+
*
|
|
2563
|
+
* This prevents false positives from mislabeled commits.
|
|
2564
|
+
*
|
|
2565
|
+
* @param scope - The commit scope
|
|
2566
|
+
* @param hash - The commit hash to verify
|
|
2567
|
+
* @param dependencyCommitMap - Map of dependencies to their commit hashes
|
|
2568
|
+
* @returns Dependency path if found and hash verified, undefined otherwise
|
|
2569
|
+
*/
|
|
2570
|
+
function findDependencyPath(scope, hash, dependencyCommitMap) {
|
|
2571
|
+
const normalizedScope = scope.toLowerCase();
|
|
2572
|
+
for (const [depName, depHashes] of dependencyCommitMap) {
|
|
2573
|
+
// Check if scope matches dependency name or variations
|
|
2574
|
+
const depVariations = getDependencyVariations(depName);
|
|
2575
|
+
if (depVariations.some((v) => v.toLowerCase() === normalizedScope)) {
|
|
2576
|
+
// CRITICAL: Verify the commit actually touched this dependency's files
|
|
2577
|
+
// This prevents false positives from mislabeled commits
|
|
2578
|
+
if (depHashes.has(hash)) {
|
|
2579
|
+
return [depName];
|
|
2580
|
+
}
|
|
2581
|
+
}
|
|
2582
|
+
}
|
|
2583
|
+
return undefined;
|
|
2584
|
+
}
|
|
2585
|
+
/**
|
|
2586
|
+
* Generates name variations for a dependency to enable flexible scope matching.
|
|
89
2587
|
*
|
|
90
|
-
* @param
|
|
91
|
-
* @returns
|
|
2588
|
+
* @param depName - The dependency project or package name
|
|
2589
|
+
* @returns Array of name variations including stripped prefixes
|
|
92
2590
|
*/
|
|
93
|
-
function
|
|
2591
|
+
function getDependencyVariations(depName) {
|
|
2592
|
+
const variations = [depName];
|
|
2593
|
+
// Handle lib- prefix
|
|
2594
|
+
if (depName.startsWith('lib-')) {
|
|
2595
|
+
variations.push(depName.slice(4));
|
|
2596
|
+
}
|
|
2597
|
+
// Handle @scope/name
|
|
2598
|
+
if (depName.startsWith('@')) {
|
|
2599
|
+
const slashIndex = depName.indexOf('/');
|
|
2600
|
+
if (slashIndex !== -1) {
|
|
2601
|
+
variations.push(depName.slice(slashIndex + 1));
|
|
2602
|
+
}
|
|
2603
|
+
}
|
|
2604
|
+
return variations;
|
|
2605
|
+
}
|
|
2606
|
+
/**
|
|
2607
|
+
* Creates a classification context from common inputs.
|
|
2608
|
+
*
|
|
2609
|
+
* @param projectScopes - Scopes that match the project
|
|
2610
|
+
* @param fileCommitHashes - Set of commit hashes that touched project files
|
|
2611
|
+
* @param options - Additional context options
|
|
2612
|
+
* @param options.dependencyCommitMap - Map of dependency names to commit hashes touching them
|
|
2613
|
+
* @param options.infrastructureCommitHashes - Set of commit hashes touching infrastructure paths
|
|
2614
|
+
* @param options.excludeScopes - Scopes to explicitly exclude from classification
|
|
2615
|
+
* @param options.includeScopes - Additional scopes to include as direct matches
|
|
2616
|
+
* @returns A ClassificationContext object
|
|
2617
|
+
*/
|
|
2618
|
+
function createClassificationContext(projectScopes, fileCommitHashes, options) {
|
|
94
2619
|
return {
|
|
95
|
-
|
|
96
|
-
|
|
2620
|
+
projectScopes,
|
|
2621
|
+
fileCommitHashes,
|
|
2622
|
+
dependencyCommitMap: options?.dependencyCommitMap,
|
|
2623
|
+
infrastructureCommitHashes: options?.infrastructureCommitHashes,
|
|
2624
|
+
excludeScopes: options?.excludeScopes ?? DEFAULT_EXCLUDE_SCOPES,
|
|
2625
|
+
includeScopes: options?.includeScopes,
|
|
97
2626
|
};
|
|
98
2627
|
}
|
|
99
|
-
|
|
100
|
-
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
101
2628
|
/**
|
|
102
|
-
* Creates
|
|
103
|
-
*
|
|
104
|
-
* This step:
|
|
105
|
-
* 1. Queries the registry for the latest published version
|
|
106
|
-
* 2. Reads the current version from package.json
|
|
107
|
-
* 3. Determines if this is a first release
|
|
2629
|
+
* Creates a modified conventional commit with scope handling based on classification.
|
|
108
2630
|
*
|
|
109
|
-
*
|
|
110
|
-
*
|
|
111
|
-
* - currentVersion: Version from local package.json
|
|
112
|
-
* - isFirstRelease: True if never published
|
|
2631
|
+
* For direct commits, the scope is removed (redundant in project changelog).
|
|
2632
|
+
* For indirect commits, the scope is preserved (provides context).
|
|
113
2633
|
*
|
|
114
|
-
* @
|
|
2634
|
+
* @param classified - Commit with classification metadata determining scope display
|
|
2635
|
+
* @returns A conventional commit with appropriate scope handling
|
|
115
2636
|
*/
|
|
116
|
-
function
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
//
|
|
120
|
-
const packageJsonPath = `${projectRoot}/package.json`;
|
|
121
|
-
let currentVersion = '0.0.0';
|
|
122
|
-
try {
|
|
123
|
-
const content = tree.read(packageJsonPath, 'utf-8');
|
|
124
|
-
if (content) {
|
|
125
|
-
const pkg = parse(content);
|
|
126
|
-
currentVersion = pkg.version ?? '0.0.0';
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
catch (error) {
|
|
130
|
-
logger.warn(`Could not read package.json: ${error}`);
|
|
131
|
-
}
|
|
132
|
-
// Query registry for published version
|
|
133
|
-
let publishedVersion = null;
|
|
134
|
-
let isFirstRelease = true;
|
|
135
|
-
try {
|
|
136
|
-
publishedVersion = await registry.getLatestVersion(packageName);
|
|
137
|
-
isFirstRelease = publishedVersion === null;
|
|
138
|
-
}
|
|
139
|
-
catch (error) {
|
|
140
|
-
// Package might not exist yet, which is fine
|
|
141
|
-
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
142
|
-
isFirstRelease = true;
|
|
143
|
-
}
|
|
144
|
-
const message = isFirstRelease ? `First release (local: ${currentVersion})` : `Published: ${publishedVersion}, Local: ${currentVersion}`;
|
|
2637
|
+
function toChangelogCommit(classified) {
|
|
2638
|
+
const { commit, preserveScope } = classified;
|
|
2639
|
+
if (!preserveScope && commit.scope) {
|
|
2640
|
+
// Remove the scope for direct commits
|
|
145
2641
|
return {
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
isFirstRelease,
|
|
151
|
-
},
|
|
152
|
-
message,
|
|
2642
|
+
...commit,
|
|
2643
|
+
scope: undefined,
|
|
2644
|
+
// Rebuild raw to reflect removed scope
|
|
2645
|
+
raw: rebuildRawWithoutScope(commit),
|
|
153
2646
|
};
|
|
154
|
-
}
|
|
2647
|
+
}
|
|
2648
|
+
return commit;
|
|
2649
|
+
}
|
|
2650
|
+
/**
|
|
2651
|
+
* Reconstructs a conventional commit message string without the scope portion.
|
|
2652
|
+
*
|
|
2653
|
+
* @param commit - The conventional commit to rebuild
|
|
2654
|
+
* @returns Reconstructed raw message with scope removed
|
|
2655
|
+
*/
|
|
2656
|
+
function rebuildRawWithoutScope(commit) {
|
|
2657
|
+
const breaking = commit.breaking && !commit.breakingDescription ? '!' : '';
|
|
2658
|
+
const header = `${commit.type}${breaking}: ${commit.subject}`;
|
|
2659
|
+
if (!commit.body && commit.footers.length === 0) {
|
|
2660
|
+
return header;
|
|
2661
|
+
}
|
|
2662
|
+
let raw = header;
|
|
2663
|
+
if (commit.body) {
|
|
2664
|
+
raw += `\n\n${commit.body}`;
|
|
2665
|
+
}
|
|
2666
|
+
for (const footer of commit.footers) {
|
|
2667
|
+
raw += `\n${footer.key}${footer.separator}${footer.value}`;
|
|
2668
|
+
}
|
|
2669
|
+
return raw;
|
|
155
2670
|
}
|
|
156
2671
|
|
|
157
2672
|
/**
|
|
158
|
-
*
|
|
2673
|
+
* Creates a matcher that checks if commit scope matches any of the given scopes.
|
|
159
2674
|
*
|
|
160
|
-
*
|
|
161
|
-
*
|
|
2675
|
+
* @param scopes - Scopes to match against (case-insensitive)
|
|
2676
|
+
* @returns Matcher that returns true if scope matches
|
|
162
2677
|
*
|
|
163
|
-
*
|
|
164
|
-
*
|
|
2678
|
+
* @example
|
|
2679
|
+
* const matcher = scopeMatcher(['ci', 'build', 'tooling'])
|
|
2680
|
+
* matcher({ scope: 'CI', ... }) // true
|
|
2681
|
+
* matcher({ scope: 'feat', ... }) // false
|
|
2682
|
+
*/
|
|
2683
|
+
function scopeMatcher(scopes) {
|
|
2684
|
+
const normalizedScopes = createSet(scopes.map((s) => s.toLowerCase()));
|
|
2685
|
+
return (ctx) => {
|
|
2686
|
+
if (!ctx.scope)
|
|
2687
|
+
return false;
|
|
2688
|
+
return normalizedScopes.has(ctx.scope.toLowerCase());
|
|
2689
|
+
};
|
|
2690
|
+
}
|
|
2691
|
+
/**
|
|
2692
|
+
* Creates a matcher that checks if commit scope starts with any of the given prefixes.
|
|
165
2693
|
*
|
|
166
|
-
* @
|
|
2694
|
+
* @param prefixes - Scope prefixes to match (case-insensitive)
|
|
2695
|
+
* @returns Matcher that returns true if scope starts with any prefix
|
|
2696
|
+
*
|
|
2697
|
+
* @example
|
|
2698
|
+
* const matcher = scopePrefixMatcher(['tool-', 'infra-'])
|
|
2699
|
+
* matcher({ scope: 'tool-package', ... }) // true
|
|
2700
|
+
* matcher({ scope: 'lib-utils', ... }) // false
|
|
2701
|
+
*/
|
|
2702
|
+
function scopePrefixMatcher(prefixes) {
|
|
2703
|
+
const normalizedPrefixes = prefixes.map((p) => p.toLowerCase());
|
|
2704
|
+
return (ctx) => {
|
|
2705
|
+
if (!ctx.scope)
|
|
2706
|
+
return false;
|
|
2707
|
+
const normalizedScope = ctx.scope.toLowerCase();
|
|
2708
|
+
return normalizedPrefixes.some((prefix) => normalizedScope.startsWith(prefix));
|
|
2709
|
+
};
|
|
2710
|
+
}
|
|
2711
|
+
/**
|
|
2712
|
+
* Combines matchers with OR logic - returns true if ANY matcher matches.
|
|
2713
|
+
*
|
|
2714
|
+
* @param matchers - Matchers to combine
|
|
2715
|
+
* @returns Combined matcher
|
|
2716
|
+
*
|
|
2717
|
+
* @example
|
|
2718
|
+
* const combined = anyOf(
|
|
2719
|
+
* scopeMatcher(['ci', 'build']),
|
|
2720
|
+
* messageMatcher(['[infra]']),
|
|
2721
|
+
* custom((ctx) => ctx.scope?.startsWith('tool-'))
|
|
2722
|
+
* )
|
|
167
2723
|
*/
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
2724
|
+
function anyOf(...matchers) {
|
|
2725
|
+
return (ctx) => matchers.some((matcher) => matcher(ctx));
|
|
2726
|
+
}
|
|
171
2727
|
/**
|
|
172
|
-
*
|
|
173
|
-
* Use this instead of `new Error()`.
|
|
2728
|
+
* Matches common CI/CD scopes.
|
|
174
2729
|
*
|
|
175
|
-
*
|
|
176
|
-
|
|
177
|
-
|
|
2730
|
+
* Matches: ci, cd, build, pipeline, workflow, actions
|
|
2731
|
+
*/
|
|
2732
|
+
const CI_SCOPE_MATCHER = scopeMatcher(['ci', 'cd', 'build', 'pipeline', 'workflow', 'actions']);
|
|
2733
|
+
/**
|
|
2734
|
+
* Matches common tooling/workspace scopes.
|
|
2735
|
+
*
|
|
2736
|
+
* Matches: tooling, workspace, monorepo, nx, root
|
|
2737
|
+
*/
|
|
2738
|
+
const TOOLING_SCOPE_MATCHER = scopeMatcher(['tooling', 'workspace', 'monorepo', 'nx', 'root']);
|
|
2739
|
+
/**
|
|
2740
|
+
* Matches tool-prefixed scopes (e.g., tool-package, tool-scripts).
|
|
2741
|
+
*/
|
|
2742
|
+
const TOOL_PREFIX_MATCHER = scopePrefixMatcher(['tool-']);
|
|
2743
|
+
/**
|
|
2744
|
+
* Combined matcher for common infrastructure patterns.
|
|
2745
|
+
*
|
|
2746
|
+
* Combines CI, tooling, and tool-prefix matchers.
|
|
2747
|
+
*/
|
|
2748
|
+
anyOf(CI_SCOPE_MATCHER, TOOLING_SCOPE_MATCHER, TOOL_PREFIX_MATCHER);
|
|
2749
|
+
/**
|
|
2750
|
+
* Builds a combined matcher from infrastructure configuration.
|
|
2751
|
+
*
|
|
2752
|
+
* Combines scope-based matching with any custom matcher using OR logic.
|
|
2753
|
+
* Path-based matching is handled separately via git queries.
|
|
2754
|
+
*
|
|
2755
|
+
* @param config - Infrastructure configuration
|
|
2756
|
+
* @returns Combined matcher, or null if no matchers configured
|
|
2757
|
+
*
|
|
2758
|
+
* @example
|
|
2759
|
+
* const matcher = buildInfrastructureMatcher({
|
|
2760
|
+
* scopes: ['ci', 'build'],
|
|
2761
|
+
* matcher: (ctx) => ctx.scope?.startsWith('tool-')
|
|
2762
|
+
* })
|
|
2763
|
+
*/
|
|
2764
|
+
function buildInfrastructureMatcher(config) {
|
|
2765
|
+
const matchers = [];
|
|
2766
|
+
// Add scope matcher if scopes configured
|
|
2767
|
+
if (config.scopes && config.scopes.length > 0) {
|
|
2768
|
+
matchers.push(scopeMatcher(config.scopes));
|
|
2769
|
+
}
|
|
2770
|
+
// Add custom matcher if provided
|
|
2771
|
+
if (config.matcher) {
|
|
2772
|
+
matchers.push(config.matcher);
|
|
2773
|
+
}
|
|
2774
|
+
// Return combined or null
|
|
2775
|
+
if (matchers.length === 0) {
|
|
2776
|
+
return null;
|
|
2777
|
+
}
|
|
2778
|
+
if (matchers.length === 1) {
|
|
2779
|
+
return matchers[0];
|
|
2780
|
+
}
|
|
2781
|
+
return anyOf(...matchers);
|
|
2782
|
+
}
|
|
2783
|
+
/**
|
|
2784
|
+
* Creates match context from a git commit.
|
|
2785
|
+
*
|
|
2786
|
+
* Extracts scope from conventional commit message if present.
|
|
2787
|
+
*
|
|
2788
|
+
* @param commit - Git commit to create context for
|
|
2789
|
+
* @param scope - Pre-parsed scope (optional, saves re-parsing)
|
|
2790
|
+
* @returns Match context for use with matchers
|
|
178
2791
|
*/
|
|
179
|
-
|
|
2792
|
+
function createMatchContext(commit, scope) {
|
|
2793
|
+
return {
|
|
2794
|
+
commit,
|
|
2795
|
+
scope,
|
|
2796
|
+
subject: commit.subject,
|
|
2797
|
+
message: commit.message,
|
|
2798
|
+
};
|
|
2799
|
+
}
|
|
180
2800
|
|
|
181
2801
|
/**
|
|
182
2802
|
* Replaces all occurrences of a character in a string.
|
|
@@ -602,72 +3222,158 @@ function splitLines(message) {
|
|
|
602
3222
|
return lines;
|
|
603
3223
|
}
|
|
604
3224
|
|
|
3225
|
+
/**
|
|
3226
|
+
* Default changelog filename.
|
|
3227
|
+
*/
|
|
3228
|
+
const DEFAULT_CHANGELOG_FILENAME = 'CHANGELOG.md';
|
|
3229
|
+
/**
|
|
3230
|
+
* Default scope filtering configuration.
|
|
3231
|
+
*
|
|
3232
|
+
* Uses DEFAULT_EXCLUDE_SCOPES from commits/classify to ensure consistency
|
|
3233
|
+
* between flow-level filtering and commit classification.
|
|
3234
|
+
*/
|
|
3235
|
+
const DEFAULT_SCOPE_FILTERING_CONFIG = {
|
|
3236
|
+
strategy: 'hybrid',
|
|
3237
|
+
includeScopes: [],
|
|
3238
|
+
excludeScopes: DEFAULT_EXCLUDE_SCOPES,
|
|
3239
|
+
trackDependencyChanges: false,
|
|
3240
|
+
projectPrefixes: DEFAULT_PROJECT_PREFIXES,
|
|
3241
|
+
infrastructure: undefined,
|
|
3242
|
+
infrastructureMatcher: undefined,
|
|
3243
|
+
};
|
|
3244
|
+
|
|
605
3245
|
const ANALYZE_COMMITS_STEP_ID = 'analyze-commits';
|
|
606
3246
|
/**
|
|
607
3247
|
* Creates the analyze-commits step.
|
|
608
3248
|
*
|
|
609
3249
|
* This step:
|
|
610
|
-
* 1.
|
|
611
|
-
* 2.
|
|
612
|
-
* 3.
|
|
613
|
-
* 4.
|
|
3250
|
+
* 1. Uses publishedCommit from npm registry (set by fetch-registry step)
|
|
3251
|
+
* 2. Verifies the commit is reachable from current HEAD
|
|
3252
|
+
* 3. Gets all commits since that commit (or recent commits if first release/fallback)
|
|
3253
|
+
* 4. Parses each commit using conventional commit format
|
|
3254
|
+
* 5. Classifies commits based on scope filtering strategy
|
|
3255
|
+
* 6. Filters to only release-worthy commits that belong to this project
|
|
614
3256
|
*
|
|
615
3257
|
* State updates:
|
|
616
|
-
* -
|
|
617
|
-
* - commits: Array of parsed conventional commits
|
|
3258
|
+
* - effectiveBaseCommit: The verified base commit (null if fallback was used)
|
|
3259
|
+
* - commits: Array of parsed conventional commits (for backward compatibility)
|
|
3260
|
+
* - classificationResult: Full classification result with source attribution
|
|
618
3261
|
*
|
|
619
3262
|
* @returns A FlowStep that analyzes commits
|
|
620
3263
|
*/
|
|
621
3264
|
function createAnalyzeCommitsStep() {
|
|
622
3265
|
return createStep(ANALYZE_COMMITS_STEP_ID, 'Analyze Commits', async (ctx) => {
|
|
623
|
-
const { git, projectName, packageName, config, logger, state } = ctx;
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
3266
|
+
const { git, projectName, projectRoot, packageName, workspaceRoot, config, logger, state } = ctx;
|
|
3267
|
+
const maxFallback = config.maxCommitFallback ?? 500;
|
|
3268
|
+
// Use publishedCommit from registry (set by fetch-registry step)
|
|
3269
|
+
const { publishedCommit, isFirstRelease } = state;
|
|
3270
|
+
let rawCommits;
|
|
3271
|
+
let effectiveBaseCommit = null;
|
|
3272
|
+
if (publishedCommit && !isFirstRelease) {
|
|
3273
|
+
// CRITICAL: Verify the commit exists and is reachable from HEAD
|
|
3274
|
+
if (git.commitReachableFromHead(publishedCommit)) {
|
|
3275
|
+
rawCommits = git.getCommitsSince(publishedCommit);
|
|
3276
|
+
effectiveBaseCommit = publishedCommit;
|
|
3277
|
+
logger.debug(`Found ${rawCommits.length} commits since ${publishedCommit.slice(0, 7)}`);
|
|
633
3278
|
}
|
|
634
3279
|
else {
|
|
635
|
-
//
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
3280
|
+
// GRACEFUL DEGRADATION: Commit not in history (rebase/force push occurred)
|
|
3281
|
+
logger.warn(`Published commit ${publishedCommit.slice(0, 7)} not found in history. ` +
|
|
3282
|
+
`This may indicate a rebase or force push occurred after publishing v${state.publishedVersion}. ` +
|
|
3283
|
+
`Falling back to recent commit analysis.`);
|
|
3284
|
+
rawCommits = git.getCommitLog({ maxCount: maxFallback });
|
|
3285
|
+
// effectiveBaseCommit stays null - no compare URL will be generated
|
|
641
3286
|
}
|
|
642
3287
|
}
|
|
643
|
-
// Get commits
|
|
644
|
-
let rawCommits;
|
|
645
|
-
if (lastReleaseTag) {
|
|
646
|
-
rawCommits = git.getCommitsSince(lastReleaseTag);
|
|
647
|
-
logger.debug(`Found ${rawCommits.length} commits since ${lastReleaseTag}`);
|
|
648
|
-
}
|
|
649
3288
|
else {
|
|
650
|
-
// First release
|
|
651
|
-
rawCommits = git.getCommitLog({ maxCount:
|
|
3289
|
+
// First release or no published version
|
|
3290
|
+
rawCommits = git.getCommitLog({ maxCount: maxFallback });
|
|
652
3291
|
logger.debug(`First release - analyzing up to ${rawCommits.length} commits`);
|
|
653
3292
|
}
|
|
654
|
-
//
|
|
655
|
-
const
|
|
3293
|
+
// Get scope filtering configuration
|
|
3294
|
+
const scopeFilteringConfig = {
|
|
3295
|
+
...DEFAULT_SCOPE_FILTERING_CONFIG,
|
|
3296
|
+
...config.scopeFiltering,
|
|
3297
|
+
};
|
|
3298
|
+
const strategy = resolveStrategy(scopeFilteringConfig.strategy ?? 'hybrid', rawCommits);
|
|
3299
|
+
// Parse commits with conventional commit format
|
|
656
3300
|
const releaseTypes = config.releaseTypes ?? ['feat', 'fix', 'perf', 'revert'];
|
|
3301
|
+
const parsedCommits = [];
|
|
657
3302
|
for (const rawCommit of rawCommits) {
|
|
658
3303
|
const parsed = parseConventionalCommit(rawCommit.message);
|
|
659
3304
|
if (parsed.type && releaseTypes.includes(parsed.type)) {
|
|
660
|
-
|
|
3305
|
+
parsedCommits.push({
|
|
3306
|
+
commit: parsed,
|
|
3307
|
+
raw: {
|
|
3308
|
+
hash: rawCommit.hash,
|
|
3309
|
+
shortHash: rawCommit.hash.slice(0, 7),
|
|
3310
|
+
message: rawCommit.message,
|
|
3311
|
+
subject: parsed.subject ?? rawCommit.message.split('\n')[0],
|
|
3312
|
+
body: parsed.body ?? '',
|
|
3313
|
+
authorName: '',
|
|
3314
|
+
authorEmail: '',
|
|
3315
|
+
authorDate: '',
|
|
3316
|
+
committerName: '',
|
|
3317
|
+
committerEmail: '',
|
|
3318
|
+
commitDate: '',
|
|
3319
|
+
parents: [],
|
|
3320
|
+
refs: [],
|
|
3321
|
+
},
|
|
3322
|
+
});
|
|
661
3323
|
}
|
|
662
3324
|
}
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
3325
|
+
// Build file commit hashes for hybrid/file-only strategies
|
|
3326
|
+
let fileCommitHashes = createSet();
|
|
3327
|
+
if (strategy === 'hybrid' || strategy === 'file-only') {
|
|
3328
|
+
// Get commits that touched project files using path filter
|
|
3329
|
+
const relativePath = getRelativePath(workspaceRoot, projectRoot);
|
|
3330
|
+
const pathFilteredCommits = effectiveBaseCommit
|
|
3331
|
+
? git.getCommitsSince(effectiveBaseCommit, { path: relativePath })
|
|
3332
|
+
: git.getCommitLog({ maxCount: maxFallback, path: relativePath });
|
|
3333
|
+
fileCommitHashes = createSet(pathFilteredCommits.map((c) => c.hash));
|
|
3334
|
+
logger.debug(`Found ${fileCommitHashes.size} commits touching ${relativePath}`);
|
|
3335
|
+
}
|
|
3336
|
+
// Derive project scopes
|
|
3337
|
+
const projectScopes = deriveProjectScopes({
|
|
3338
|
+
projectName,
|
|
3339
|
+
packageName,
|
|
3340
|
+
additionalScopes: scopeFilteringConfig.includeScopes,
|
|
3341
|
+
prefixes: scopeFilteringConfig.projectPrefixes,
|
|
3342
|
+
});
|
|
3343
|
+
logger.debug(`Project scopes: ${projectScopes.join(', ')}`);
|
|
3344
|
+
// Build infrastructure commit hashes for file-based infrastructure detection
|
|
3345
|
+
const infrastructureCommitHashes = buildInfrastructureCommitHashes(git, effectiveBaseCommit, rawCommits, parsedCommits, scopeFilteringConfig, logger, maxFallback);
|
|
3346
|
+
// Build dependency commit map if tracking is enabled (Phase 4)
|
|
3347
|
+
let dependencyCommitMap;
|
|
3348
|
+
if (scopeFilteringConfig.trackDependencyChanges) {
|
|
3349
|
+
dependencyCommitMap = buildDependencyCommitMap(git, workspaceRoot, projectName, effectiveBaseCommit, logger, maxFallback);
|
|
3350
|
+
}
|
|
3351
|
+
// Create classification context
|
|
3352
|
+
const classificationContext = createClassificationContext(projectScopes, fileCommitHashes, {
|
|
3353
|
+
excludeScopes: scopeFilteringConfig.excludeScopes,
|
|
3354
|
+
includeScopes: scopeFilteringConfig.includeScopes,
|
|
3355
|
+
infrastructureCommitHashes,
|
|
3356
|
+
dependencyCommitMap,
|
|
3357
|
+
});
|
|
3358
|
+
// Classify commits
|
|
3359
|
+
const classificationResult = classifyCommits(parsedCommits, classificationContext);
|
|
3360
|
+
// Apply strategy-specific filtering
|
|
3361
|
+
const includedCommits = applyStrategyFilter(classificationResult.included, strategy);
|
|
3362
|
+
// Extract conventional commits for backward compatibility
|
|
3363
|
+
// Use toChangelogCommit to properly handle scope based on classification
|
|
3364
|
+
const commits = includedCommits.map((c) => toChangelogCommit(c));
|
|
3365
|
+
// Build message with classification summary
|
|
3366
|
+
const { summary } = classificationResult;
|
|
3367
|
+
const message = buildSummaryMessage(commits.length, rawCommits.length, summary, strategy);
|
|
3368
|
+
logger.debug(`Classification breakdown: direct-scope=${summary.bySource['direct-scope']}, ` +
|
|
3369
|
+
`direct-file=${summary.bySource['direct-file']}, unscoped-file=${summary.bySource['unscoped-file']}, ` +
|
|
3370
|
+
`excluded=${summary.bySource['excluded']}`);
|
|
666
3371
|
return {
|
|
667
3372
|
status: 'success',
|
|
668
3373
|
stateUpdates: {
|
|
669
|
-
|
|
3374
|
+
effectiveBaseCommit,
|
|
670
3375
|
commits,
|
|
3376
|
+
classificationResult,
|
|
671
3377
|
},
|
|
672
3378
|
message,
|
|
673
3379
|
};
|
|
@@ -675,6 +3381,376 @@ function createAnalyzeCommitsStep() {
|
|
|
675
3381
|
dependsOn: ['fetch-registry'],
|
|
676
3382
|
});
|
|
677
3383
|
}
|
|
3384
|
+
/**
|
|
3385
|
+
* Resolves the filtering strategy, handling 'inferred' by analyzing commits.
|
|
3386
|
+
*
|
|
3387
|
+
* @param strategy - The configured scope filtering strategy
|
|
3388
|
+
* @param commits - The commits to analyze for strategy inference
|
|
3389
|
+
* @returns The resolved strategy (never 'inferred')
|
|
3390
|
+
*/
|
|
3391
|
+
function resolveStrategy(strategy, commits) {
|
|
3392
|
+
if (strategy !== 'inferred') {
|
|
3393
|
+
return strategy;
|
|
3394
|
+
}
|
|
3395
|
+
// Infer strategy from commit history
|
|
3396
|
+
// Count commits with conventional scopes
|
|
3397
|
+
let scopedCount = 0;
|
|
3398
|
+
for (const commit of commits) {
|
|
3399
|
+
const parsed = parseConventionalCommit(commit.message);
|
|
3400
|
+
if (parsed.scope) {
|
|
3401
|
+
scopedCount++;
|
|
3402
|
+
}
|
|
3403
|
+
}
|
|
3404
|
+
const scopeRatio = commits.length > 0 ? scopedCount / commits.length : 0;
|
|
3405
|
+
// If >70% of commits have scopes, scope-only is viable
|
|
3406
|
+
// If <30% have scopes, file-only is better
|
|
3407
|
+
// Otherwise, use hybrid
|
|
3408
|
+
if (scopeRatio > 0.7) {
|
|
3409
|
+
return 'scope-only';
|
|
3410
|
+
}
|
|
3411
|
+
else if (scopeRatio < 0.3) {
|
|
3412
|
+
return 'file-only';
|
|
3413
|
+
}
|
|
3414
|
+
return 'hybrid';
|
|
3415
|
+
}
|
|
3416
|
+
/**
|
|
3417
|
+
* Applies strategy-specific filtering to classified commits.
|
|
3418
|
+
*
|
|
3419
|
+
* @param commits - The classified commits to filter
|
|
3420
|
+
* @param strategy - The resolved filtering strategy to apply
|
|
3421
|
+
* @returns Filtered commits based on the strategy
|
|
3422
|
+
*/
|
|
3423
|
+
function applyStrategyFilter(commits, strategy) {
|
|
3424
|
+
switch (strategy) {
|
|
3425
|
+
case 'scope-only':
|
|
3426
|
+
// Only include direct-scope commits
|
|
3427
|
+
return commits.filter((c) => c.source === 'direct-scope');
|
|
3428
|
+
case 'file-only':
|
|
3429
|
+
// Only include file-based commits (direct-file, unscoped-file)
|
|
3430
|
+
return commits.filter((c) => c.source === 'direct-file' || c.source === 'unscoped-file');
|
|
3431
|
+
case 'hybrid':
|
|
3432
|
+
default:
|
|
3433
|
+
// Include all non-excluded commits (already filtered in classifyCommits)
|
|
3434
|
+
return commits;
|
|
3435
|
+
}
|
|
3436
|
+
}
|
|
3437
|
+
/**
|
|
3438
|
+
* Gets the relative path from workspace root to project root.
|
|
3439
|
+
*
|
|
3440
|
+
* @param workspaceRoot - The absolute path to the workspace root
|
|
3441
|
+
* @param projectRoot - The absolute path to the project root
|
|
3442
|
+
* @returns The relative path from workspace to project
|
|
3443
|
+
*/
|
|
3444
|
+
function getRelativePath(workspaceRoot, projectRoot) {
|
|
3445
|
+
if (projectRoot.startsWith(workspaceRoot)) {
|
|
3446
|
+
return projectRoot.slice(workspaceRoot.length).replace(/^\//, '');
|
|
3447
|
+
}
|
|
3448
|
+
return projectRoot;
|
|
3449
|
+
}
|
|
3450
|
+
/**
|
|
3451
|
+
* Builds a summary message for the step result.
|
|
3452
|
+
*
|
|
3453
|
+
* @param includedCount - Number of commits included in the release
|
|
3454
|
+
* @param totalCount - Total number of commits analyzed
|
|
3455
|
+
* @param summary - Classification summary object
|
|
3456
|
+
* @param summary.bySource - Count of commits by source type
|
|
3457
|
+
* @param strategy - The filtering strategy used
|
|
3458
|
+
* @returns A human-readable summary message
|
|
3459
|
+
*/
|
|
3460
|
+
function buildSummaryMessage(includedCount, totalCount, summary, strategy) {
|
|
3461
|
+
if (includedCount === 0) {
|
|
3462
|
+
return `No releasable commits found for this project (${totalCount} total, strategy: ${strategy})`;
|
|
3463
|
+
}
|
|
3464
|
+
const parts = [`Found ${includedCount} releasable commits`, `(${totalCount} total`, `strategy: ${strategy})`];
|
|
3465
|
+
return parts.join(' ');
|
|
3466
|
+
}
|
|
3467
|
+
/**
|
|
3468
|
+
* Builds a set of commit hashes that touched infrastructure paths or match infrastructure criteria.
|
|
3469
|
+
*
|
|
3470
|
+
* Supports multiple detection methods combined with OR logic:
|
|
3471
|
+
* 1. Path-based: Commits touching configured infrastructure paths (via git)
|
|
3472
|
+
* 2. Scope-based: Commits with scopes matching infrastructure.scopes
|
|
3473
|
+
* 3. Custom matcher: User-provided matching logic
|
|
3474
|
+
*
|
|
3475
|
+
* @param git - Git client for querying commits by path
|
|
3476
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3477
|
+
* @param rawCommits - All raw commits being analyzed
|
|
3478
|
+
* @param parsedCommits - Parsed commits with conventional commit data
|
|
3479
|
+
* @param config - Scope filtering configuration
|
|
3480
|
+
* @param logger - Logger with debug method for output
|
|
3481
|
+
* @param logger.debug - Debug logging function
|
|
3482
|
+
* @param maxFallback - Maximum commits to query when baseCommit is null
|
|
3483
|
+
* @returns Set of commit hashes classified as infrastructure
|
|
3484
|
+
*/
|
|
3485
|
+
function buildInfrastructureCommitHashes(git, baseCommit, rawCommits, parsedCommits, config, logger, maxFallback) {
|
|
3486
|
+
// Collect all infrastructure commit hashes
|
|
3487
|
+
let infraHashes = createSet();
|
|
3488
|
+
// Method 1: Path-based detection (query git for commits touching infra paths)
|
|
3489
|
+
const infraPaths = config.infrastructure?.paths ?? [];
|
|
3490
|
+
if (infraPaths.length > 0) {
|
|
3491
|
+
for (const infraPath of infraPaths) {
|
|
3492
|
+
const pathCommits = baseCommit
|
|
3493
|
+
? git.getCommitsSince(baseCommit, { path: infraPath })
|
|
3494
|
+
: git.getCommitLog({ maxCount: maxFallback, path: infraPath });
|
|
3495
|
+
for (const commit of pathCommits) {
|
|
3496
|
+
infraHashes = infraHashes.add(commit.hash);
|
|
3497
|
+
}
|
|
3498
|
+
}
|
|
3499
|
+
logger.debug(`Found ${infraHashes.size} commits touching infrastructure paths: ${infraPaths.join(', ')}`);
|
|
3500
|
+
}
|
|
3501
|
+
// Method 2 & 3: Scope-based and custom matcher detection
|
|
3502
|
+
// Build a combined matcher from infrastructure config and/or custom matcher
|
|
3503
|
+
const configMatcher = config.infrastructure ? buildInfrastructureMatcher(config.infrastructure) : null;
|
|
3504
|
+
const customMatcher = config.infrastructureMatcher;
|
|
3505
|
+
const combinedMatcher = combineMatcher(configMatcher, customMatcher);
|
|
3506
|
+
if (combinedMatcher) {
|
|
3507
|
+
// Build a lookup for parsed commits by hash
|
|
3508
|
+
let parsedByHash = createMap();
|
|
3509
|
+
for (const parsed of parsedCommits) {
|
|
3510
|
+
parsedByHash = parsedByHash.set(parsed.raw.hash, parsed);
|
|
3511
|
+
}
|
|
3512
|
+
// Evaluate each raw commit against the matcher
|
|
3513
|
+
for (const rawCommit of rawCommits) {
|
|
3514
|
+
// Skip if already matched by path
|
|
3515
|
+
if (infraHashes.has(rawCommit.hash))
|
|
3516
|
+
continue;
|
|
3517
|
+
// Get parsed scope if available
|
|
3518
|
+
const parsed = parsedByHash.get(rawCommit.hash);
|
|
3519
|
+
const scope = parsed?.commit.scope;
|
|
3520
|
+
// Create match context and evaluate
|
|
3521
|
+
const context = createMatchContext(rawCommit, scope);
|
|
3522
|
+
if (combinedMatcher(context)) {
|
|
3523
|
+
infraHashes = infraHashes.add(rawCommit.hash);
|
|
3524
|
+
}
|
|
3525
|
+
}
|
|
3526
|
+
logger.debug(`Infrastructure matcher found ${infraHashes.size} total commits`);
|
|
3527
|
+
}
|
|
3528
|
+
// Return undefined if no infrastructure detection configured
|
|
3529
|
+
if (infraHashes.size === 0 && infraPaths.length === 0 && !combinedMatcher) {
|
|
3530
|
+
return undefined;
|
|
3531
|
+
}
|
|
3532
|
+
return infraHashes;
|
|
3533
|
+
}
|
|
3534
|
+
/**
|
|
3535
|
+
* Combines two optional matchers into one using OR logic.
|
|
3536
|
+
*
|
|
3537
|
+
* @param a - First matcher (may be null)
|
|
3538
|
+
* @param b - Second matcher (may be undefined)
|
|
3539
|
+
* @returns Combined matcher or null if neither provided
|
|
3540
|
+
*/
|
|
3541
|
+
function combineMatcher(a, b) {
|
|
3542
|
+
if (a && b) {
|
|
3543
|
+
return (ctx) => a(ctx) || b(ctx);
|
|
3544
|
+
}
|
|
3545
|
+
return a ?? b ?? null;
|
|
3546
|
+
}
|
|
3547
|
+
/**
|
|
3548
|
+
* Builds a map of dependency project names to the commit hashes that touched them.
|
|
3549
|
+
*
|
|
3550
|
+
* This enables accurate indirect-dependency classification by verifying that:
|
|
3551
|
+
* 1. A commit's scope matches a dependency name
|
|
3552
|
+
* 2. The commit actually touched that dependency's files (hash in set)
|
|
3553
|
+
*
|
|
3554
|
+
* Uses lib-project-scope for dependency discovery, avoiding hard NX dependency.
|
|
3555
|
+
*
|
|
3556
|
+
* @param git - Git client for querying commits by path
|
|
3557
|
+
* @param workspaceRoot - Absolute path to workspace root
|
|
3558
|
+
* @param projectName - Name of the project being versioned
|
|
3559
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3560
|
+
* @param logger - Logger with debug method for output
|
|
3561
|
+
* @param logger.debug - Debug logging function
|
|
3562
|
+
* @param maxFallback - Maximum commits to query when baseCommit is null
|
|
3563
|
+
* @returns Map of dependency names to commit hashes touching that dependency
|
|
3564
|
+
*/
|
|
3565
|
+
function buildDependencyCommitMap(git, workspaceRoot, projectName, baseCommit, logger, maxFallback) {
|
|
3566
|
+
let dependencyMap = createMap();
|
|
3567
|
+
try {
|
|
3568
|
+
// Discover all projects in workspace using lib-project-scope
|
|
3569
|
+
// This gracefully handles NX and non-NX workspaces
|
|
3570
|
+
const projects = discoverNxProjects(workspaceRoot);
|
|
3571
|
+
const projectGraph = buildSimpleProjectGraph(workspaceRoot, projects);
|
|
3572
|
+
// Get dependencies for the current project
|
|
3573
|
+
const projectDeps = projectGraph.dependencies[projectName] ?? [];
|
|
3574
|
+
if (projectDeps.length === 0) {
|
|
3575
|
+
logger.debug(`No dependencies found for project: ${projectName}`);
|
|
3576
|
+
return dependencyMap;
|
|
3577
|
+
}
|
|
3578
|
+
logger.debug(`Found ${projectDeps.length} dependencies for ${projectName}: ${projectDeps.map((d) => d.target).join(', ')}`);
|
|
3579
|
+
// For each dependency, find commits that touched its files
|
|
3580
|
+
for (const dep of projectDeps) {
|
|
3581
|
+
const depNode = projectGraph.nodes[dep.target];
|
|
3582
|
+
if (!depNode?.data?.root) {
|
|
3583
|
+
logger.debug(`Skipping dependency ${dep.target}: no root path found`);
|
|
3584
|
+
continue;
|
|
3585
|
+
}
|
|
3586
|
+
const depRoot = depNode.data.root;
|
|
3587
|
+
// Query git for commits touching this dependency's path
|
|
3588
|
+
const depCommits = baseCommit
|
|
3589
|
+
? git.getCommitsSince(baseCommit, { path: depRoot })
|
|
3590
|
+
: git.getCommitLog({ maxCount: maxFallback, path: depRoot });
|
|
3591
|
+
if (depCommits.length > 0) {
|
|
3592
|
+
const hashSet = createSet(depCommits.map((c) => c.hash));
|
|
3593
|
+
dependencyMap = dependencyMap.set(dep.target, hashSet);
|
|
3594
|
+
logger.debug(`Dependency ${dep.target}: ${depCommits.length} commits at ${depRoot}`);
|
|
3595
|
+
}
|
|
3596
|
+
}
|
|
3597
|
+
}
|
|
3598
|
+
catch (error) {
|
|
3599
|
+
// Graceful degradation: if project discovery fails, return empty map
|
|
3600
|
+
// This allows versioning to proceed without dependency tracking
|
|
3601
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3602
|
+
logger.debug(`Failed to build dependency map: ${message}`);
|
|
3603
|
+
}
|
|
3604
|
+
return dependencyMap;
|
|
3605
|
+
}
|
|
3606
|
+
|
|
3607
|
+
/**
|
|
3608
|
+
* Safe copies of Number built-in methods and constants.
|
|
3609
|
+
*
|
|
3610
|
+
* These references are captured at module initialization time to protect against
|
|
3611
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
3612
|
+
*
|
|
3613
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
3614
|
+
*/
|
|
3615
|
+
// Capture references at module initialization time
|
|
3616
|
+
const _parseInt = globalThis.parseInt;
|
|
3617
|
+
const _isNaN = globalThis.isNaN;
|
|
3618
|
+
// ============================================================================
|
|
3619
|
+
// Parsing
|
|
3620
|
+
// ============================================================================
|
|
3621
|
+
/**
|
|
3622
|
+
* (Safe copy) Parses a string and returns an integer.
|
|
3623
|
+
*/
|
|
3624
|
+
const parseInt = _parseInt;
|
|
3625
|
+
// ============================================================================
|
|
3626
|
+
// Global Type Checking (legacy, less strict)
|
|
3627
|
+
// ============================================================================
|
|
3628
|
+
/**
|
|
3629
|
+
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
3630
|
+
*/
|
|
3631
|
+
const globalIsNaN = _isNaN;
|
|
3632
|
+
|
|
3633
|
+
/**
|
|
3634
|
+
* Compares two semantic versions.
|
|
3635
|
+
*
|
|
3636
|
+
* @param a - First version
|
|
3637
|
+
* @param b - Second version
|
|
3638
|
+
* @returns -1 if a < b, 0 if a == b, 1 if a > b
|
|
3639
|
+
*
|
|
3640
|
+
* @example
|
|
3641
|
+
* compare(parseVersion('1.0.0'), parseVersion('2.0.0')) // -1
|
|
3642
|
+
* compare(parseVersion('1.0.0'), parseVersion('1.0.0')) // 0
|
|
3643
|
+
* compare(parseVersion('2.0.0'), parseVersion('1.0.0')) // 1
|
|
3644
|
+
*/
|
|
3645
|
+
function compare(a, b) {
|
|
3646
|
+
// Compare major, minor, patch
|
|
3647
|
+
if (a.major !== b.major) {
|
|
3648
|
+
return a.major < b.major ? -1 : 1;
|
|
3649
|
+
}
|
|
3650
|
+
if (a.minor !== b.minor) {
|
|
3651
|
+
return a.minor < b.minor ? -1 : 1;
|
|
3652
|
+
}
|
|
3653
|
+
if (a.patch !== b.patch) {
|
|
3654
|
+
return a.patch < b.patch ? -1 : 1;
|
|
3655
|
+
}
|
|
3656
|
+
// Compare prerelease
|
|
3657
|
+
// Version with prerelease has lower precedence than release
|
|
3658
|
+
if (a.prerelease.length === 0 && b.prerelease.length > 0) {
|
|
3659
|
+
return 1; // a is release, b is prerelease -> a > b
|
|
3660
|
+
}
|
|
3661
|
+
if (a.prerelease.length > 0 && b.prerelease.length === 0) {
|
|
3662
|
+
return -1; // a is prerelease, b is release -> a < b
|
|
3663
|
+
}
|
|
3664
|
+
// Both have prerelease - compare identifiers
|
|
3665
|
+
const maxLen = max(a.prerelease.length, b.prerelease.length);
|
|
3666
|
+
for (let i = 0; i < maxLen; i++) {
|
|
3667
|
+
const aId = a.prerelease[i];
|
|
3668
|
+
const bId = b.prerelease[i];
|
|
3669
|
+
// Shorter prerelease array has lower precedence
|
|
3670
|
+
if (aId === undefined && bId !== undefined) {
|
|
3671
|
+
return -1;
|
|
3672
|
+
}
|
|
3673
|
+
if (aId !== undefined && bId === undefined) {
|
|
3674
|
+
return 1;
|
|
3675
|
+
}
|
|
3676
|
+
if (aId === undefined || bId === undefined) {
|
|
3677
|
+
continue;
|
|
3678
|
+
}
|
|
3679
|
+
// Compare identifiers
|
|
3680
|
+
const cmp = compareIdentifiers(aId, bId);
|
|
3681
|
+
if (cmp !== 0) {
|
|
3682
|
+
return cmp;
|
|
3683
|
+
}
|
|
3684
|
+
}
|
|
3685
|
+
return 0;
|
|
3686
|
+
}
|
|
3687
|
+
/**
|
|
3688
|
+
* Checks if a > b.
|
|
3689
|
+
*
|
|
3690
|
+
* @param a - First version to compare
|
|
3691
|
+
* @param b - Second version to compare
|
|
3692
|
+
* @returns True if a is greater than b
|
|
3693
|
+
*/
|
|
3694
|
+
function gt(a, b) {
|
|
3695
|
+
return compare(a, b) === 1;
|
|
3696
|
+
}
|
|
3697
|
+
// ============================================================================
|
|
3698
|
+
// Internal helpers
|
|
3699
|
+
// ============================================================================
|
|
3700
|
+
/**
|
|
3701
|
+
* Compares two prerelease identifiers.
|
|
3702
|
+
* Numeric identifiers have lower precedence than alphanumeric.
|
|
3703
|
+
* Numeric identifiers are compared numerically.
|
|
3704
|
+
* Alphanumeric identifiers are compared lexically.
|
|
3705
|
+
*
|
|
3706
|
+
* @param a - First prerelease identifier
|
|
3707
|
+
* @param b - Second prerelease identifier
|
|
3708
|
+
* @returns -1 if a < b, 0 if equal, 1 if a > b
|
|
3709
|
+
*/
|
|
3710
|
+
function compareIdentifiers(a, b) {
|
|
3711
|
+
const aIsNumeric = isNumeric(a);
|
|
3712
|
+
const bIsNumeric = isNumeric(b);
|
|
3713
|
+
// Numeric identifiers have lower precedence
|
|
3714
|
+
if (aIsNumeric && !bIsNumeric) {
|
|
3715
|
+
return -1;
|
|
3716
|
+
}
|
|
3717
|
+
if (!aIsNumeric && bIsNumeric) {
|
|
3718
|
+
return 1;
|
|
3719
|
+
}
|
|
3720
|
+
// Both numeric - compare as numbers
|
|
3721
|
+
if (aIsNumeric && bIsNumeric) {
|
|
3722
|
+
const aNum = parseInt(a, 10);
|
|
3723
|
+
const bNum = parseInt(b, 10);
|
|
3724
|
+
if (aNum < bNum)
|
|
3725
|
+
return -1;
|
|
3726
|
+
if (aNum > bNum)
|
|
3727
|
+
return 1;
|
|
3728
|
+
return 0;
|
|
3729
|
+
}
|
|
3730
|
+
// Both alphanumeric - compare lexically
|
|
3731
|
+
if (a < b)
|
|
3732
|
+
return -1;
|
|
3733
|
+
if (a > b)
|
|
3734
|
+
return 1;
|
|
3735
|
+
return 0;
|
|
3736
|
+
}
|
|
3737
|
+
/**
|
|
3738
|
+
* Checks if a string consists only of digits.
|
|
3739
|
+
*
|
|
3740
|
+
* @param str - String to check for numeric content
|
|
3741
|
+
* @returns True if string contains only digits
|
|
3742
|
+
*/
|
|
3743
|
+
function isNumeric(str) {
|
|
3744
|
+
if (str.length === 0)
|
|
3745
|
+
return false;
|
|
3746
|
+
for (let i = 0; i < str.length; i++) {
|
|
3747
|
+
const code = str.charCodeAt(i);
|
|
3748
|
+
if (code < 48 || code > 57) {
|
|
3749
|
+
return false;
|
|
3750
|
+
}
|
|
3751
|
+
}
|
|
3752
|
+
return true;
|
|
3753
|
+
}
|
|
678
3754
|
|
|
679
3755
|
/**
|
|
680
3756
|
* Converts a SemVer to its canonical string representation.
|
|
@@ -693,32 +3769,6 @@ function format(version) {
|
|
|
693
3769
|
return result;
|
|
694
3770
|
}
|
|
695
3771
|
|
|
696
|
-
/**
|
|
697
|
-
* Safe copies of Number built-in methods and constants.
|
|
698
|
-
*
|
|
699
|
-
* These references are captured at module initialization time to protect against
|
|
700
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
701
|
-
*
|
|
702
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
703
|
-
*/
|
|
704
|
-
// Capture references at module initialization time
|
|
705
|
-
const _parseInt = globalThis.parseInt;
|
|
706
|
-
const _isNaN = globalThis.isNaN;
|
|
707
|
-
// ============================================================================
|
|
708
|
-
// Parsing
|
|
709
|
-
// ============================================================================
|
|
710
|
-
/**
|
|
711
|
-
* (Safe copy) Parses a string and returns an integer.
|
|
712
|
-
*/
|
|
713
|
-
const parseInt = _parseInt;
|
|
714
|
-
// ============================================================================
|
|
715
|
-
// Global Type Checking (legacy, less strict)
|
|
716
|
-
// ============================================================================
|
|
717
|
-
/**
|
|
718
|
-
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
719
|
-
*/
|
|
720
|
-
const globalIsNaN = _isNaN;
|
|
721
|
-
|
|
722
3772
|
/**
|
|
723
3773
|
* Creates a new SemVer object.
|
|
724
3774
|
*
|
|
@@ -1196,7 +4246,7 @@ function createCalculateBumpStep() {
|
|
|
1196
4246
|
message: 'No version bump needed',
|
|
1197
4247
|
};
|
|
1198
4248
|
}
|
|
1199
|
-
//
|
|
4249
|
+
// Parse versions for comparison
|
|
1200
4250
|
const current = parseVersion(currentVersion ?? '0.0.0');
|
|
1201
4251
|
if (!current.success || !current.version) {
|
|
1202
4252
|
return {
|
|
@@ -1205,6 +4255,27 @@ function createCalculateBumpStep() {
|
|
|
1205
4255
|
message: `Could not parse current version: ${currentVersion}`,
|
|
1206
4256
|
};
|
|
1207
4257
|
}
|
|
4258
|
+
const { publishedVersion } = state;
|
|
4259
|
+
const published = parseVersion(publishedVersion ?? '0.0.0');
|
|
4260
|
+
// Detect pending publication state: currentVersion > publishedVersion
|
|
4261
|
+
// This means a previous bump happened but was never published
|
|
4262
|
+
const isPendingPublication = published.success && published.version && publishedVersion != null && gt(current.version, published.version);
|
|
4263
|
+
if (isPendingPublication && published.version) {
|
|
4264
|
+
// ALWAYS calculate from publishedVersion - commits may have changed
|
|
4265
|
+
const next = increment(published.version, bumpType);
|
|
4266
|
+
const nextVersion = format(next);
|
|
4267
|
+
logger.info(`Pending publication detected: recalculating from ${publishedVersion} → ${nextVersion}`);
|
|
4268
|
+
return {
|
|
4269
|
+
status: 'success',
|
|
4270
|
+
stateUpdates: {
|
|
4271
|
+
bumpType,
|
|
4272
|
+
nextVersion,
|
|
4273
|
+
isPendingPublication: true,
|
|
4274
|
+
},
|
|
4275
|
+
message: `${bumpType} bump (pending): ${publishedVersion} → ${nextVersion}`,
|
|
4276
|
+
};
|
|
4277
|
+
}
|
|
4278
|
+
// Normal path: increment from currentVersion
|
|
1208
4279
|
const next = increment(current.version, bumpType);
|
|
1209
4280
|
const nextVersion = format(next);
|
|
1210
4281
|
return {
|
|
@@ -1259,24 +4330,6 @@ function createCheckIdempotencyStep() {
|
|
|
1259
4330
|
});
|
|
1260
4331
|
}
|
|
1261
4332
|
|
|
1262
|
-
/**
|
|
1263
|
-
* Safe copies of Date built-in via factory function and static methods.
|
|
1264
|
-
*
|
|
1265
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1266
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1267
|
-
*
|
|
1268
|
-
* These references are captured at module initialization time to protect against
|
|
1269
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1270
|
-
*
|
|
1271
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1272
|
-
*/
|
|
1273
|
-
// Capture references at module initialization time
|
|
1274
|
-
const _Date = globalThis.Date;
|
|
1275
|
-
const _Reflect$1 = globalThis.Reflect;
|
|
1276
|
-
function createDate(...args) {
|
|
1277
|
-
return _Reflect$1.construct(_Date, args);
|
|
1278
|
-
}
|
|
1279
|
-
|
|
1280
4333
|
/**
|
|
1281
4334
|
* Creates a new changelog item.
|
|
1282
4335
|
*
|
|
@@ -1291,6 +4344,8 @@ function createChangelogItem(description, options) {
|
|
|
1291
4344
|
commits: options?.commits ?? [],
|
|
1292
4345
|
references: options?.references ?? [],
|
|
1293
4346
|
breaking: options?.breaking ?? false,
|
|
4347
|
+
source: options?.source,
|
|
4348
|
+
indirect: options?.indirect,
|
|
1294
4349
|
};
|
|
1295
4350
|
}
|
|
1296
4351
|
/**
|
|
@@ -1414,96 +4469,6 @@ function getSectionType(heading) {
|
|
|
1414
4469
|
return SECTION_TYPE_MAP[normalized] ?? 'other';
|
|
1415
4470
|
}
|
|
1416
4471
|
|
|
1417
|
-
/**
|
|
1418
|
-
* Safe copies of Map built-in via factory function.
|
|
1419
|
-
*
|
|
1420
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1421
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1422
|
-
*
|
|
1423
|
-
* These references are captured at module initialization time to protect against
|
|
1424
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1425
|
-
*
|
|
1426
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
1427
|
-
*/
|
|
1428
|
-
// Capture references at module initialization time
|
|
1429
|
-
const _Map = globalThis.Map;
|
|
1430
|
-
const _Reflect = globalThis.Reflect;
|
|
1431
|
-
/**
|
|
1432
|
-
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
1433
|
-
* Use this instead of `new Map()`.
|
|
1434
|
-
*
|
|
1435
|
-
* @param iterable - Optional iterable of key-value pairs.
|
|
1436
|
-
* @returns A new Map instance.
|
|
1437
|
-
*/
|
|
1438
|
-
const createMap = (iterable) => _Reflect.construct(_Map, iterable ? [iterable] : []);
|
|
1439
|
-
|
|
1440
|
-
/**
|
|
1441
|
-
* Safe copies of Object built-in methods.
|
|
1442
|
-
*
|
|
1443
|
-
* These references are captured at module initialization time to protect against
|
|
1444
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1445
|
-
*
|
|
1446
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1447
|
-
*/
|
|
1448
|
-
// Capture references at module initialization time
|
|
1449
|
-
const _Object = globalThis.Object;
|
|
1450
|
-
/**
|
|
1451
|
-
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1452
|
-
*/
|
|
1453
|
-
const entries = _Object.entries;
|
|
1454
|
-
|
|
1455
|
-
/**
|
|
1456
|
-
* Safe copies of URL built-ins via factory functions.
|
|
1457
|
-
*
|
|
1458
|
-
* Provides safe references to URL and URLSearchParams.
|
|
1459
|
-
* These references are captured at module initialization time to protect against
|
|
1460
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1461
|
-
*
|
|
1462
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
1463
|
-
*/
|
|
1464
|
-
// Capture references at module initialization time
|
|
1465
|
-
const _URL = globalThis.URL;
|
|
1466
|
-
/**
|
|
1467
|
-
* (Safe copy) Creates an object URL for the given object.
|
|
1468
|
-
* Use this instead of `URL.createObjectURL()`.
|
|
1469
|
-
*
|
|
1470
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1471
|
-
*/
|
|
1472
|
-
typeof _URL.createObjectURL === 'function'
|
|
1473
|
-
? _URL.createObjectURL.bind(_URL)
|
|
1474
|
-
: () => {
|
|
1475
|
-
throw new Error('URL.createObjectURL is not available in this environment');
|
|
1476
|
-
};
|
|
1477
|
-
/**
|
|
1478
|
-
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
1479
|
-
* Use this instead of `URL.revokeObjectURL()`.
|
|
1480
|
-
*
|
|
1481
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1482
|
-
*/
|
|
1483
|
-
typeof _URL.revokeObjectURL === 'function'
|
|
1484
|
-
? _URL.revokeObjectURL.bind(_URL)
|
|
1485
|
-
: () => {
|
|
1486
|
-
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
1487
|
-
};
|
|
1488
|
-
|
|
1489
|
-
/**
|
|
1490
|
-
* Safe copies of Math built-in methods.
|
|
1491
|
-
*
|
|
1492
|
-
* These references are captured at module initialization time to protect against
|
|
1493
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1494
|
-
*
|
|
1495
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
1496
|
-
*/
|
|
1497
|
-
// Capture references at module initialization time
|
|
1498
|
-
const _Math = globalThis.Math;
|
|
1499
|
-
// ============================================================================
|
|
1500
|
-
// Min/Max
|
|
1501
|
-
// ============================================================================
|
|
1502
|
-
/**
|
|
1503
|
-
* (Safe copy) Returns the larger of zero or more numbers.
|
|
1504
|
-
*/
|
|
1505
|
-
const max = _Math.max;
|
|
1506
|
-
|
|
1507
4472
|
/**
|
|
1508
4473
|
* Line Parser
|
|
1509
4474
|
*
|
|
@@ -1559,6 +4524,25 @@ function parseVersionFromHeading(heading) {
|
|
|
1559
4524
|
if (trimmed[pos] === ']') {
|
|
1560
4525
|
pos++;
|
|
1561
4526
|
}
|
|
4527
|
+
// Handle markdown link format [version](url) - jscutlery/semver style
|
|
4528
|
+
// This extracts the compare URL from patterns like [0.0.4](https://github.com/.../compare/...)
|
|
4529
|
+
if (trimmed[pos] === '(') {
|
|
4530
|
+
const urlStart = pos + 1;
|
|
4531
|
+
let depth = 1;
|
|
4532
|
+
pos++;
|
|
4533
|
+
// Find matching closing parenthesis (handles nested parens in URLs)
|
|
4534
|
+
while (pos < trimmed.length && depth > 0) {
|
|
4535
|
+
if (trimmed[pos] === '(')
|
|
4536
|
+
depth++;
|
|
4537
|
+
else if (trimmed[pos] === ')')
|
|
4538
|
+
depth--;
|
|
4539
|
+
pos++;
|
|
4540
|
+
}
|
|
4541
|
+
// Extract URL if we found the closing paren
|
|
4542
|
+
if (depth === 0) {
|
|
4543
|
+
compareUrl = trimmed.slice(urlStart, pos - 1);
|
|
4544
|
+
}
|
|
4545
|
+
}
|
|
1562
4546
|
// Skip whitespace and separator
|
|
1563
4547
|
while (pos < trimmed.length && (trimmed[pos] === ' ' || trimmed[pos] === '-' || trimmed[pos] === '–')) {
|
|
1564
4548
|
pos++;
|
|
@@ -1575,8 +4559,8 @@ function parseVersionFromHeading(heading) {
|
|
|
1575
4559
|
while (pos < trimmed.length && trimmed[pos] === ' ') {
|
|
1576
4560
|
pos++;
|
|
1577
4561
|
}
|
|
1578
|
-
// Check for link at end: [compare](url)
|
|
1579
|
-
if (pos < trimmed.length) {
|
|
4562
|
+
// Check for link at end: [compare](url) - only if no URL was already extracted
|
|
4563
|
+
if (pos < trimmed.length && !compareUrl) {
|
|
1580
4564
|
const linkMatch = extractLink(trimmed.slice(pos));
|
|
1581
4565
|
if (linkMatch?.url) {
|
|
1582
4566
|
compareUrl = linkMatch.url;
|
|
@@ -2270,11 +5254,22 @@ function isWhitespace(char) {
|
|
|
2270
5254
|
}
|
|
2271
5255
|
|
|
2272
5256
|
/**
|
|
2273
|
-
*
|
|
5257
|
+
* Validates that a URL is actually a GitHub URL by parsing it properly.
|
|
5258
|
+
* This prevents SSRF attacks where 'github.com' could appear in path/query.
|
|
2274
5259
|
*
|
|
2275
|
-
*
|
|
2276
|
-
*
|
|
5260
|
+
* @param url - The URL string to validate
|
|
5261
|
+
* @returns True if the URL host is github.com or a subdomain
|
|
2277
5262
|
*/
|
|
5263
|
+
function isGitHubUrl(url) {
|
|
5264
|
+
try {
|
|
5265
|
+
const parsed = createURL(url);
|
|
5266
|
+
// Check that the host is exactly github.com or ends with .github.com
|
|
5267
|
+
return parsed.host === 'github.com' || parsed.host.endsWith('.github.com');
|
|
5268
|
+
}
|
|
5269
|
+
catch {
|
|
5270
|
+
return false;
|
|
5271
|
+
}
|
|
5272
|
+
}
|
|
2278
5273
|
/**
|
|
2279
5274
|
* Parses a changelog markdown string into a Changelog object.
|
|
2280
5275
|
*
|
|
@@ -2342,7 +5337,7 @@ function parseHeader(state) {
|
|
|
2342
5337
|
description.push(`[${token.value}](${nextToken.value})`);
|
|
2343
5338
|
links.push({ label: token.value, url: nextToken.value });
|
|
2344
5339
|
// Try to detect repository URL
|
|
2345
|
-
if (!state.repositoryUrl && nextToken.value
|
|
5340
|
+
if (!state.repositoryUrl && isGitHubUrl(nextToken.value)) {
|
|
2346
5341
|
state.repositoryUrl = extractRepoUrl(nextToken.value);
|
|
2347
5342
|
}
|
|
2348
5343
|
advance(state); // skip link-text
|
|
@@ -2973,20 +5968,28 @@ function serializeIssueRef(ref) {
|
|
|
2973
5968
|
* ```
|
|
2974
5969
|
*/
|
|
2975
5970
|
function addEntry(changelog, entry, options) {
|
|
5971
|
+
const position = options?.position ?? 'start';
|
|
5972
|
+
const replaceExisting = options?.replaceExisting ?? false;
|
|
5973
|
+
const updateMetadata = options?.updateMetadata ?? false;
|
|
2976
5974
|
// Check for existing entry
|
|
2977
5975
|
const existingIndex = changelog.entries.findIndex((e) => e.version === entry.version);
|
|
2978
|
-
if (existingIndex !== -1 &&
|
|
5976
|
+
if (existingIndex !== -1 && !replaceExisting) {
|
|
2979
5977
|
throw createError(`Entry with version "${entry.version}" already exists. Use replaceExisting: true to replace.`);
|
|
2980
5978
|
}
|
|
2981
5979
|
let newEntries;
|
|
2982
|
-
{
|
|
5980
|
+
if (existingIndex !== -1 && replaceExisting) {
|
|
5981
|
+
// Replace existing entry
|
|
5982
|
+
newEntries = [...changelog.entries];
|
|
5983
|
+
newEntries[existingIndex] = entry;
|
|
5984
|
+
}
|
|
5985
|
+
else {
|
|
2983
5986
|
// Add new entry
|
|
2984
|
-
const insertIndex = 0 ;
|
|
5987
|
+
const insertIndex = position === 'start' ? 0 : position === 'end' ? changelog.entries.length : position;
|
|
2985
5988
|
newEntries = [...changelog.entries];
|
|
2986
5989
|
newEntries.splice(insertIndex, 0, entry);
|
|
2987
5990
|
}
|
|
2988
5991
|
// Build new metadata if requested
|
|
2989
|
-
const metadata = changelog.metadata;
|
|
5992
|
+
const metadata = updateMetadata ? { ...changelog.metadata, warnings: [] } : changelog.metadata;
|
|
2990
5993
|
return {
|
|
2991
5994
|
...changelog,
|
|
2992
5995
|
entries: newEntries,
|
|
@@ -2994,11 +5997,149 @@ function addEntry(changelog, entry, options) {
|
|
|
2994
5997
|
};
|
|
2995
5998
|
}
|
|
2996
5999
|
|
|
6000
|
+
/**
|
|
6001
|
+
* Changelog Entry Removal
|
|
6002
|
+
*
|
|
6003
|
+
* Functions for removing entries from a changelog.
|
|
6004
|
+
*/
|
|
6005
|
+
/**
|
|
6006
|
+
* Removes multiple entries from a changelog.
|
|
6007
|
+
*
|
|
6008
|
+
* @param changelog - The changelog to remove from
|
|
6009
|
+
* @param versions - The versions to remove
|
|
6010
|
+
* @param options - Optional removal options
|
|
6011
|
+
* @returns A new changelog without the specified entries
|
|
6012
|
+
*/
|
|
6013
|
+
function removeEntries(changelog, versions, options) {
|
|
6014
|
+
const versionsSet = createSet(versions);
|
|
6015
|
+
const newEntries = changelog.entries.filter((e) => !versionsSet.has(e.version));
|
|
6016
|
+
return {
|
|
6017
|
+
...changelog,
|
|
6018
|
+
entries: newEntries,
|
|
6019
|
+
};
|
|
6020
|
+
}
|
|
6021
|
+
|
|
6022
|
+
/**
|
|
6023
|
+
* Creates a platform-specific compare URL for viewing changes between two commits.
|
|
6024
|
+
*
|
|
6025
|
+
* Each platform has a different URL format:
|
|
6026
|
+
* - **GitHub**: `{baseUrl}/compare/{fromCommit}...{toCommit}` (three dots)
|
|
6027
|
+
* - **GitLab**: `{baseUrl}/-/compare/{fromCommit}...{toCommit}` (three dots, `/-/` prefix)
|
|
6028
|
+
* - **Bitbucket**: `{baseUrl}/compare/{toCommit}..{fromCommit}` (two dots, reversed order)
|
|
6029
|
+
* - **Azure DevOps**: `{baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}` (query params)
|
|
6030
|
+
*
|
|
6031
|
+
* For `custom` platforms, a `formatCompareUrl` function must be provided in the repository config.
|
|
6032
|
+
* For `unknown` platforms, returns `null`.
|
|
6033
|
+
*
|
|
6034
|
+
* @param options - Compare URL options including repository, fromCommit, and toCommit
|
|
6035
|
+
* @returns The compare URL string, or null if URL cannot be generated
|
|
6036
|
+
*
|
|
6037
|
+
* @example
|
|
6038
|
+
* ```typescript
|
|
6039
|
+
* // GitHub
|
|
6040
|
+
* createCompareUrl({
|
|
6041
|
+
* repository: { platform: 'github', baseUrl: 'https://github.com/owner/repo' },
|
|
6042
|
+
* fromCommit: 'abc1234',
|
|
6043
|
+
* toCommit: 'def5678'
|
|
6044
|
+
* })
|
|
6045
|
+
* // → 'https://github.com/owner/repo/compare/abc1234...def5678'
|
|
6046
|
+
*
|
|
6047
|
+
* // GitLab
|
|
6048
|
+
* createCompareUrl({
|
|
6049
|
+
* repository: { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' },
|
|
6050
|
+
* fromCommit: 'abc1234',
|
|
6051
|
+
* toCommit: 'def5678'
|
|
6052
|
+
* })
|
|
6053
|
+
* // → 'https://gitlab.com/group/project/-/compare/abc1234...def5678'
|
|
6054
|
+
*
|
|
6055
|
+
* // Bitbucket (reversed order)
|
|
6056
|
+
* createCompareUrl({
|
|
6057
|
+
* repository: { platform: 'bitbucket', baseUrl: 'https://bitbucket.org/owner/repo' },
|
|
6058
|
+
* fromCommit: 'abc1234',
|
|
6059
|
+
* toCommit: 'def5678'
|
|
6060
|
+
* })
|
|
6061
|
+
* // → 'https://bitbucket.org/owner/repo/compare/def5678..abc1234'
|
|
6062
|
+
*
|
|
6063
|
+
* // Azure DevOps
|
|
6064
|
+
* createCompareUrl({
|
|
6065
|
+
* repository: { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' },
|
|
6066
|
+
* fromCommit: 'abc1234',
|
|
6067
|
+
* toCommit: 'def5678'
|
|
6068
|
+
* })
|
|
6069
|
+
* // → 'https://dev.azure.com/org/proj/_git/repo/compare?version=GTdef5678&compareVersion=GTabc1234'
|
|
6070
|
+
*
|
|
6071
|
+
* // Custom formatter
|
|
6072
|
+
* createCompareUrl({
|
|
6073
|
+
* repository: {
|
|
6074
|
+
* platform: 'custom',
|
|
6075
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
6076
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
6077
|
+
* },
|
|
6078
|
+
* fromCommit: 'abc1234',
|
|
6079
|
+
* toCommit: 'def5678'
|
|
6080
|
+
* })
|
|
6081
|
+
* // → 'https://my-git.internal/diff/abc1234/def5678'
|
|
6082
|
+
* ```
|
|
6083
|
+
*/
|
|
6084
|
+
function createCompareUrl(options) {
|
|
6085
|
+
const { repository, fromCommit, toCommit } = options;
|
|
6086
|
+
// Validate inputs
|
|
6087
|
+
if (!repository || !fromCommit || !toCommit) {
|
|
6088
|
+
return null;
|
|
6089
|
+
}
|
|
6090
|
+
// If custom formatter is provided, use it (works for any platform including overrides)
|
|
6091
|
+
if (repository.formatCompareUrl) {
|
|
6092
|
+
return repository.formatCompareUrl(fromCommit, toCommit);
|
|
6093
|
+
}
|
|
6094
|
+
const { platform, baseUrl } = repository;
|
|
6095
|
+
// Cannot generate URL for unknown platforms without a formatter
|
|
6096
|
+
if (platform === 'unknown') {
|
|
6097
|
+
return null;
|
|
6098
|
+
}
|
|
6099
|
+
// Custom platform requires a formatter
|
|
6100
|
+
if (platform === 'custom') {
|
|
6101
|
+
return null;
|
|
6102
|
+
}
|
|
6103
|
+
// Generate URL for known platforms
|
|
6104
|
+
if (isKnownPlatform(platform)) {
|
|
6105
|
+
return formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit);
|
|
6106
|
+
}
|
|
6107
|
+
return null;
|
|
6108
|
+
}
|
|
6109
|
+
/**
|
|
6110
|
+
* Formats a compare URL for known platforms.
|
|
6111
|
+
*
|
|
6112
|
+
* @param platform - Known platform type
|
|
6113
|
+
* @param baseUrl - Repository base URL
|
|
6114
|
+
* @param fromCommit - Source commit hash (older version)
|
|
6115
|
+
* @param toCommit - Target commit hash (newer version)
|
|
6116
|
+
* @returns Formatted compare URL
|
|
6117
|
+
*
|
|
6118
|
+
* @internal
|
|
6119
|
+
*/
|
|
6120
|
+
function formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit) {
|
|
6121
|
+
switch (platform) {
|
|
6122
|
+
case 'github':
|
|
6123
|
+
// GitHub: {baseUrl}/compare/{fromCommit}...{toCommit}
|
|
6124
|
+
return `${baseUrl}/compare/${fromCommit}...${toCommit}`;
|
|
6125
|
+
case 'gitlab':
|
|
6126
|
+
// GitLab: {baseUrl}/-/compare/{fromCommit}...{toCommit}
|
|
6127
|
+
return `${baseUrl}/-/compare/${fromCommit}...${toCommit}`;
|
|
6128
|
+
case 'bitbucket':
|
|
6129
|
+
// Bitbucket: {baseUrl}/compare/{toCommit}..{fromCommit} (reversed order, two dots)
|
|
6130
|
+
return `${baseUrl}/compare/${toCommit}..${fromCommit}`;
|
|
6131
|
+
case 'azure-devops':
|
|
6132
|
+
// Azure DevOps: {baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}
|
|
6133
|
+
// Use encodeURIComponent for query parameter values
|
|
6134
|
+
return `${baseUrl}/compare?version=GT${encodeURIComponent(toCommit)}&compareVersion=GT${encodeURIComponent(fromCommit)}`;
|
|
6135
|
+
}
|
|
6136
|
+
}
|
|
6137
|
+
|
|
2997
6138
|
const GENERATE_CHANGELOG_STEP_ID = 'generate-changelog';
|
|
2998
6139
|
/**
|
|
2999
6140
|
* Maps conventional commit types to changelog section types.
|
|
3000
6141
|
*/
|
|
3001
|
-
const
|
|
6142
|
+
const DEFAULT_COMMIT_TYPE_TO_SECTION = {
|
|
3002
6143
|
feat: 'features',
|
|
3003
6144
|
fix: 'fixes',
|
|
3004
6145
|
perf: 'performance',
|
|
@@ -3011,23 +6152,102 @@ const COMMIT_TYPE_TO_SECTION = {
|
|
|
3011
6152
|
chore: 'chores',
|
|
3012
6153
|
style: 'other',
|
|
3013
6154
|
};
|
|
6155
|
+
/**
|
|
6156
|
+
* Resolves the commit type to section mapping by merging config with defaults.
|
|
6157
|
+
*
|
|
6158
|
+
* @param configMapping - User-provided partial mapping from FlowConfig
|
|
6159
|
+
* @returns Resolved mapping with user overrides applied
|
|
6160
|
+
*/
|
|
6161
|
+
function resolveCommitTypeMapping(configMapping) {
|
|
6162
|
+
if (!configMapping) {
|
|
6163
|
+
return DEFAULT_COMMIT_TYPE_TO_SECTION;
|
|
6164
|
+
}
|
|
6165
|
+
return { ...DEFAULT_COMMIT_TYPE_TO_SECTION, ...configMapping };
|
|
6166
|
+
}
|
|
6167
|
+
/**
|
|
6168
|
+
* Checks if a commit source represents an indirect change.
|
|
6169
|
+
*
|
|
6170
|
+
* @param source - The commit source type
|
|
6171
|
+
* @returns True if the commit is indirect (dependency or infrastructure)
|
|
6172
|
+
*/
|
|
6173
|
+
function isIndirectSource(source) {
|
|
6174
|
+
return source === 'indirect-dependency' || source === 'indirect-infra';
|
|
6175
|
+
}
|
|
6176
|
+
/**
|
|
6177
|
+
* Groups classified commits by their section type.
|
|
6178
|
+
*
|
|
6179
|
+
* @param commits - Array of classified commits
|
|
6180
|
+
* @param mapping - Commit type to section mapping
|
|
6181
|
+
* @returns Record of section type to classified commits
|
|
6182
|
+
*/
|
|
6183
|
+
function groupClassifiedCommitsBySection(commits, mapping) {
|
|
6184
|
+
const groups = {};
|
|
6185
|
+
for (const classified of commits) {
|
|
6186
|
+
const sectionType = mapping[classified.commit.type ?? 'chore'];
|
|
6187
|
+
// Skip if explicitly excluded (null)
|
|
6188
|
+
if (sectionType === null)
|
|
6189
|
+
continue;
|
|
6190
|
+
// Fallback to 'chores' for unmapped types
|
|
6191
|
+
const resolvedSection = sectionType ?? 'chores';
|
|
6192
|
+
if (!groups[resolvedSection]) {
|
|
6193
|
+
groups[resolvedSection] = [];
|
|
6194
|
+
}
|
|
6195
|
+
groups[resolvedSection].push(classified);
|
|
6196
|
+
}
|
|
6197
|
+
return groups;
|
|
6198
|
+
}
|
|
3014
6199
|
/**
|
|
3015
6200
|
* Groups commits by their section type.
|
|
3016
6201
|
*
|
|
3017
6202
|
* @param commits - Array of conventional commits
|
|
6203
|
+
* @param mapping - Commit type to section mapping
|
|
3018
6204
|
* @returns Record of section type to commits
|
|
3019
6205
|
*/
|
|
3020
|
-
function groupCommitsBySection(commits) {
|
|
6206
|
+
function groupCommitsBySection(commits, mapping) {
|
|
3021
6207
|
const groups = {};
|
|
3022
6208
|
for (const commit of commits) {
|
|
3023
|
-
const sectionType =
|
|
3024
|
-
if (
|
|
3025
|
-
|
|
6209
|
+
const sectionType = mapping[commit.type ?? 'chore'];
|
|
6210
|
+
// Skip if explicitly excluded (null)
|
|
6211
|
+
if (sectionType === null)
|
|
6212
|
+
continue;
|
|
6213
|
+
// Fallback to 'chores' for unmapped types
|
|
6214
|
+
const resolvedSection = sectionType ?? 'chores';
|
|
6215
|
+
if (!groups[resolvedSection]) {
|
|
6216
|
+
groups[resolvedSection] = [];
|
|
3026
6217
|
}
|
|
3027
|
-
groups[
|
|
6218
|
+
groups[resolvedSection].push(commit);
|
|
3028
6219
|
}
|
|
3029
6220
|
return groups;
|
|
3030
6221
|
}
|
|
6222
|
+
/**
|
|
6223
|
+
* Creates a changelog item from a classified commit.
|
|
6224
|
+
*
|
|
6225
|
+
* Applies scope display rules:
|
|
6226
|
+
* - Direct commits: scope omitted (redundant in project changelog)
|
|
6227
|
+
* - Indirect commits: scope preserved (provides context)
|
|
6228
|
+
*
|
|
6229
|
+
* @param classified - The classified commit with source metadata
|
|
6230
|
+
* @returns A changelog item with proper scope handling
|
|
6231
|
+
*/
|
|
6232
|
+
function classifiedCommitToItem(classified) {
|
|
6233
|
+
// Apply scope transformation based on classification
|
|
6234
|
+
const commit = toChangelogCommit(classified);
|
|
6235
|
+
const indirect = isIndirectSource(classified.source);
|
|
6236
|
+
let text = commit.subject;
|
|
6237
|
+
// Add scope prefix if preserved (indirect commits)
|
|
6238
|
+
if (commit.scope) {
|
|
6239
|
+
text = `**${commit.scope}:** ${text}`;
|
|
6240
|
+
}
|
|
6241
|
+
// Add breaking change indicator
|
|
6242
|
+
if (commit.breaking) {
|
|
6243
|
+
text = `⚠️ BREAKING: ${text}`;
|
|
6244
|
+
}
|
|
6245
|
+
return createChangelogItem(text, {
|
|
6246
|
+
source: classified.source,
|
|
6247
|
+
indirect,
|
|
6248
|
+
breaking: commit.breaking,
|
|
6249
|
+
});
|
|
6250
|
+
}
|
|
3031
6251
|
/**
|
|
3032
6252
|
* Creates a changelog item from a conventional commit.
|
|
3033
6253
|
*
|
|
@@ -3063,6 +6283,8 @@ function createGenerateChangelogStep() {
|
|
|
3063
6283
|
return createStep(GENERATE_CHANGELOG_STEP_ID, 'Generate Changelog Entry', async (ctx) => {
|
|
3064
6284
|
const { config, state } = ctx;
|
|
3065
6285
|
const { commits, nextVersion, bumpType } = state;
|
|
6286
|
+
// Resolve commit type to section mapping
|
|
6287
|
+
const commitTypeMapping = resolveCommitTypeMapping(config.commitTypeToSection);
|
|
3066
6288
|
// Skip if no bump needed
|
|
3067
6289
|
if (!nextVersion || bumpType === 'none') {
|
|
3068
6290
|
return createSkippedResult('No version bump, skipping changelog generation');
|
|
@@ -3073,9 +6295,26 @@ function createGenerateChangelogStep() {
|
|
|
3073
6295
|
}
|
|
3074
6296
|
// Handle case with no commits (e.g., first release)
|
|
3075
6297
|
if (!commits || commits.length === 0) {
|
|
6298
|
+
// Generate compare URL using commit hashes ONLY
|
|
6299
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6300
|
+
let compareUrl;
|
|
6301
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6302
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6303
|
+
compareUrl =
|
|
6304
|
+
createCompareUrl({
|
|
6305
|
+
repository: state.repositoryConfig,
|
|
6306
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6307
|
+
toCommit: currentCommit,
|
|
6308
|
+
}) ?? undefined;
|
|
6309
|
+
}
|
|
6310
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6311
|
+
// Log why we're not generating a compare URL
|
|
6312
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6313
|
+
}
|
|
3076
6314
|
const entry = createChangelogEntry(nextVersion, {
|
|
3077
6315
|
date: createDate().toISOString().split('T')[0],
|
|
3078
6316
|
sections: [createChangelogSection('features', 'Features', [createChangelogItem('Initial release')])],
|
|
6317
|
+
compareUrl,
|
|
3079
6318
|
});
|
|
3080
6319
|
return {
|
|
3081
6320
|
status: 'success',
|
|
@@ -3083,41 +6322,109 @@ function createGenerateChangelogStep() {
|
|
|
3083
6322
|
message: 'Generated initial release changelog entry',
|
|
3084
6323
|
};
|
|
3085
6324
|
}
|
|
3086
|
-
//
|
|
3087
|
-
const
|
|
3088
|
-
// Create sections
|
|
6325
|
+
// Use classification result when available for proper scope handling
|
|
6326
|
+
const { classificationResult } = state;
|
|
3089
6327
|
const sections = [];
|
|
3090
|
-
|
|
3091
|
-
|
|
3092
|
-
|
|
3093
|
-
|
|
3094
|
-
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3099
|
-
|
|
3100
|
-
|
|
3101
|
-
|
|
3102
|
-
|
|
3103
|
-
|
|
3104
|
-
|
|
3105
|
-
|
|
3106
|
-
|
|
3107
|
-
|
|
3108
|
-
|
|
3109
|
-
|
|
3110
|
-
|
|
3111
|
-
|
|
3112
|
-
|
|
3113
|
-
|
|
3114
|
-
|
|
6328
|
+
if (classificationResult && classificationResult.included.length > 0) {
|
|
6329
|
+
// Use classified commits for proper scope display rules
|
|
6330
|
+
const classifiedCommits = classificationResult.included;
|
|
6331
|
+
// Separate direct and indirect commits
|
|
6332
|
+
const directCommits = classifiedCommits.filter((c) => !isIndirectSource(c.source));
|
|
6333
|
+
const indirectCommits = classifiedCommits.filter((c) => isIndirectSource(c.source));
|
|
6334
|
+
// Add breaking changes section first if any
|
|
6335
|
+
const breakingCommits = classifiedCommits.filter((c) => c.commit.breaking);
|
|
6336
|
+
if (breakingCommits.length > 0) {
|
|
6337
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6338
|
+
const commit = toChangelogCommit(c);
|
|
6339
|
+
const text = commit.breakingDescription ?? commit.subject;
|
|
6340
|
+
const indirect = isIndirectSource(c.source);
|
|
6341
|
+
return createChangelogItem(commit.scope ? `**${commit.scope}:** ${text}` : text, {
|
|
6342
|
+
source: c.source,
|
|
6343
|
+
indirect,
|
|
6344
|
+
breaking: true,
|
|
6345
|
+
});
|
|
6346
|
+
})));
|
|
6347
|
+
}
|
|
6348
|
+
// Group direct commits by section
|
|
6349
|
+
const groupedDirect = groupClassifiedCommitsBySection(directCommits, commitTypeMapping);
|
|
6350
|
+
// Add other sections in conventional order (direct commits only)
|
|
6351
|
+
const sectionOrder = [
|
|
6352
|
+
{ type: 'features', heading: 'Features' },
|
|
6353
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6354
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6355
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6356
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6357
|
+
{ type: 'build', heading: 'Build' },
|
|
6358
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6359
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6360
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6361
|
+
{ type: 'other', heading: 'Other' },
|
|
6362
|
+
];
|
|
6363
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6364
|
+
const sectionCommits = groupedDirect[sectionType];
|
|
6365
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6366
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(classifiedCommitToItem)));
|
|
6367
|
+
}
|
|
6368
|
+
}
|
|
6369
|
+
// Add Dependency Updates section for indirect commits if any
|
|
6370
|
+
if (indirectCommits.length > 0) {
|
|
6371
|
+
sections.push(createChangelogSection('other', // Use 'other' as section type for dependency updates
|
|
6372
|
+
'Dependency Updates', indirectCommits.map((c) => classifiedCommitToItem(c))));
|
|
6373
|
+
}
|
|
6374
|
+
}
|
|
6375
|
+
else {
|
|
6376
|
+
// Fallback: use commits without classification (backward compatibility)
|
|
6377
|
+
const grouped = groupCommitsBySection(commits, commitTypeMapping);
|
|
6378
|
+
// Add breaking changes section first if any
|
|
6379
|
+
const breakingCommits = commits.filter((c) => c.breaking);
|
|
6380
|
+
if (breakingCommits.length > 0) {
|
|
6381
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6382
|
+
const text = c.breakingDescription ?? c.subject;
|
|
6383
|
+
return createChangelogItem(c.scope ? `**${c.scope}:** ${text}` : text);
|
|
6384
|
+
})));
|
|
6385
|
+
}
|
|
6386
|
+
// Add other sections in conventional order
|
|
6387
|
+
const sectionOrder = [
|
|
6388
|
+
{ type: 'features', heading: 'Features' },
|
|
6389
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6390
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6391
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6392
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6393
|
+
{ type: 'build', heading: 'Build' },
|
|
6394
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6395
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6396
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6397
|
+
{ type: 'other', heading: 'Other' },
|
|
6398
|
+
];
|
|
6399
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6400
|
+
const sectionCommits = grouped[sectionType];
|
|
6401
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6402
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(commitToItem)));
|
|
6403
|
+
}
|
|
3115
6404
|
}
|
|
3116
6405
|
}
|
|
6406
|
+
// Generate compare URL using commit hashes ONLY
|
|
6407
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6408
|
+
let compareUrl;
|
|
6409
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6410
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6411
|
+
compareUrl =
|
|
6412
|
+
createCompareUrl({
|
|
6413
|
+
repository: state.repositoryConfig,
|
|
6414
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6415
|
+
toCommit: currentCommit,
|
|
6416
|
+
}) ?? undefined;
|
|
6417
|
+
ctx.logger.debug(`Compare URL: ${state.effectiveBaseCommit.slice(0, 7)}...${currentCommit.slice(0, 7)}`);
|
|
6418
|
+
}
|
|
6419
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6420
|
+
// Log why we're not generating a compare URL
|
|
6421
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6422
|
+
}
|
|
3117
6423
|
// Create the entry
|
|
3118
6424
|
const entry = createChangelogEntry(nextVersion, {
|
|
3119
6425
|
date: createDate().toISOString().split('T')[0],
|
|
3120
6426
|
sections,
|
|
6427
|
+
compareUrl,
|
|
3121
6428
|
});
|
|
3122
6429
|
return {
|
|
3123
6430
|
status: 'success',
|
|
@@ -3143,14 +6450,15 @@ function createWriteChangelogStep() {
|
|
|
3143
6450
|
if (!nextVersion || bumpType === 'none' || !changelogEntry || config.skipChangelog) {
|
|
3144
6451
|
return createSkippedResult('No changelog to write');
|
|
3145
6452
|
}
|
|
3146
|
-
const
|
|
6453
|
+
const changelogFileName = config.changelogFileName ?? DEFAULT_CHANGELOG_FILENAME;
|
|
6454
|
+
const changelogPath = `${projectRoot}/${changelogFileName}`;
|
|
3147
6455
|
let existingContent = '';
|
|
3148
6456
|
// Read existing changelog
|
|
3149
6457
|
try {
|
|
3150
6458
|
existingContent = tree.read(changelogPath, 'utf-8') ?? '';
|
|
3151
6459
|
}
|
|
3152
6460
|
catch {
|
|
3153
|
-
logger.debug(
|
|
6461
|
+
logger.debug(`No existing ${changelogFileName} found`);
|
|
3154
6462
|
}
|
|
3155
6463
|
// If no existing content, create new changelog
|
|
3156
6464
|
if (!existingContent.trim()) {
|
|
@@ -3168,12 +6476,33 @@ function createWriteChangelogStep() {
|
|
|
3168
6476
|
stateUpdates: {
|
|
3169
6477
|
modifiedFiles: [...(state.modifiedFiles ?? []), changelogPath],
|
|
3170
6478
|
},
|
|
3171
|
-
message: `Created
|
|
6479
|
+
message: `Created ${changelogFileName} with version ${nextVersion}`,
|
|
3172
6480
|
};
|
|
3173
6481
|
}
|
|
3174
6482
|
// Parse existing and add entry
|
|
3175
6483
|
const existing = parseChangelog(existingContent);
|
|
3176
|
-
const
|
|
6484
|
+
const isPendingPublication = state.isPendingPublication === true;
|
|
6485
|
+
let changelog = existing;
|
|
6486
|
+
// Clean up stacked entries when in pending publication state
|
|
6487
|
+
if (isPendingPublication && state.publishedVersion) {
|
|
6488
|
+
const publishedVer = parseVersion(state.publishedVersion);
|
|
6489
|
+
if (publishedVer.success && publishedVer.version) {
|
|
6490
|
+
const pubVer = publishedVer.version;
|
|
6491
|
+
const toRemove = changelog.entries
|
|
6492
|
+
.filter((e) => !e.unreleased)
|
|
6493
|
+
.filter((e) => {
|
|
6494
|
+
const ver = parseVersion(e.version);
|
|
6495
|
+
return ver.success && ver.version && gt(ver.version, pubVer);
|
|
6496
|
+
})
|
|
6497
|
+
.map((e) => e.version);
|
|
6498
|
+
if (toRemove.length > 0) {
|
|
6499
|
+
logger.info(`Removing stacked entries: ${toRemove.join(', ')}`);
|
|
6500
|
+
changelog = removeEntries(changelog, toRemove);
|
|
6501
|
+
}
|
|
6502
|
+
}
|
|
6503
|
+
}
|
|
6504
|
+
// Add entry (replaceExisting handles case where nextVersion entry already exists)
|
|
6505
|
+
const updated = addEntry(changelog, changelogEntry, { replaceExisting: isPendingPublication });
|
|
3177
6506
|
const serialized = serializeChangelog(updated);
|
|
3178
6507
|
tree.write(changelogPath, serialized);
|
|
3179
6508
|
return {
|
|
@@ -3181,7 +6510,7 @@ function createWriteChangelogStep() {
|
|
|
3181
6510
|
stateUpdates: {
|
|
3182
6511
|
modifiedFiles: [...(state.modifiedFiles ?? []), changelogPath],
|
|
3183
6512
|
},
|
|
3184
|
-
message: `Updated
|
|
6513
|
+
message: `Updated ${changelogFileName} with version ${nextVersion}`,
|
|
3185
6514
|
};
|
|
3186
6515
|
}, {
|
|
3187
6516
|
dependsOn: ['generate-changelog'],
|
|
@@ -3210,23 +6539,26 @@ function createUpdatePackageStep() {
|
|
|
3210
6539
|
return createSkippedResult('No version bump needed');
|
|
3211
6540
|
}
|
|
3212
6541
|
const packageJsonPath = `${projectRoot}/package.json`;
|
|
6542
|
+
logger.debug(`Reading package.json from: ${packageJsonPath}`);
|
|
3213
6543
|
// Read package.json
|
|
3214
6544
|
let content;
|
|
3215
6545
|
try {
|
|
3216
6546
|
content = tree.read(packageJsonPath, 'utf-8') ?? '';
|
|
3217
6547
|
if (!content) {
|
|
6548
|
+
logger.error(`package.json not found at ${packageJsonPath}`);
|
|
3218
6549
|
return {
|
|
3219
6550
|
status: 'failed',
|
|
3220
|
-
error: createError(
|
|
3221
|
-
message:
|
|
6551
|
+
error: createError(`package.json not found at ${packageJsonPath}`),
|
|
6552
|
+
message: `Could not read package.json at ${packageJsonPath}`,
|
|
3222
6553
|
};
|
|
3223
6554
|
}
|
|
3224
6555
|
}
|
|
3225
6556
|
catch (error) {
|
|
6557
|
+
logger.error(`Failed to read package.json at ${packageJsonPath}: ${error}`);
|
|
3226
6558
|
return {
|
|
3227
6559
|
status: 'failed',
|
|
3228
6560
|
error: error instanceof Error ? error : createError(String(error)),
|
|
3229
|
-
message:
|
|
6561
|
+
message: `Failed to read package.json at ${packageJsonPath}`,
|
|
3230
6562
|
};
|
|
3231
6563
|
}
|
|
3232
6564
|
// Parse and update version
|
|
@@ -3509,14 +6841,15 @@ const CONVENTIONAL_FLOW_CONFIG = {
|
|
|
3509
6841
|
*
|
|
3510
6842
|
* This flow follows the standard conventional commits workflow:
|
|
3511
6843
|
* 1. Fetch published version from registry
|
|
3512
|
-
* 2.
|
|
3513
|
-
* 3.
|
|
3514
|
-
* 4.
|
|
3515
|
-
* 5.
|
|
3516
|
-
* 6.
|
|
3517
|
-
* 7.
|
|
3518
|
-
* 8.
|
|
3519
|
-
* 9. Create git
|
|
6844
|
+
* 2. Resolve repository configuration (for compare URLs)
|
|
6845
|
+
* 3. Analyze commits since last release
|
|
6846
|
+
* 4. Calculate version bump based on commit types
|
|
6847
|
+
* 5. Check if version already published (idempotency)
|
|
6848
|
+
* 6. Generate changelog entry (with compare URL if repository resolved)
|
|
6849
|
+
* 7. Update package.json version
|
|
6850
|
+
* 8. Write changelog to file
|
|
6851
|
+
* 9. Create git commit (optional)
|
|
6852
|
+
* 10. Create git tag (optional, typically after publish)
|
|
3520
6853
|
*
|
|
3521
6854
|
* @param config - Optional configuration overrides
|
|
3522
6855
|
* @returns A VersionFlow configured for conventional commits
|
|
@@ -3541,6 +6874,7 @@ function createConventionalFlow(config) {
|
|
|
3541
6874
|
const mergedConfig = { ...CONVENTIONAL_FLOW_CONFIG, ...config };
|
|
3542
6875
|
return createFlow('conventional', 'Conventional Commits Flow', [
|
|
3543
6876
|
createFetchRegistryStep(),
|
|
6877
|
+
createResolveRepositoryStep(),
|
|
3544
6878
|
createAnalyzeCommitsStep(),
|
|
3545
6879
|
createCalculateBumpStep(),
|
|
3546
6880
|
createCheckIdempotencyStep(),
|
|
@@ -3672,6 +7006,7 @@ function createIndependentFlow(config) {
|
|
|
3672
7006
|
const mergedConfig = { ...INDEPENDENT_FLOW_CONFIG, ...config };
|
|
3673
7007
|
return createFlow('independent', 'Independent Versioning Flow', [
|
|
3674
7008
|
createFetchRegistryStep(),
|
|
7009
|
+
createResolveRepositoryStep(),
|
|
3675
7010
|
createAnalyzeCommitsStep(),
|
|
3676
7011
|
createCalculateBumpStep(),
|
|
3677
7012
|
createCheckDependentBumpsStep(),
|
|
@@ -3700,6 +7035,7 @@ function createIndependentFlow(config) {
|
|
|
3700
7035
|
function createBatchReleaseFlow(config) {
|
|
3701
7036
|
return createFlow('batch-release', 'Batch Release Flow', [
|
|
3702
7037
|
createFetchRegistryStep(),
|
|
7038
|
+
createResolveRepositoryStep(),
|
|
3703
7039
|
createAnalyzeCommitsStep(),
|
|
3704
7040
|
createCalculateBumpStep(),
|
|
3705
7041
|
createCheckIdempotencyStep(),
|
|
@@ -3833,6 +7169,7 @@ function createSyncedFlow(config) {
|
|
|
3833
7169
|
const mergedConfig = { ...SYNCED_FLOW_CONFIG, ...config };
|
|
3834
7170
|
return createFlow('synced', 'Synced Versioning Flow', [
|
|
3835
7171
|
createFetchRegistryStep(),
|
|
7172
|
+
createResolveRepositoryStep(),
|
|
3836
7173
|
createAnalyzeCommitsStep(),
|
|
3837
7174
|
createCalculateBumpStep(),
|
|
3838
7175
|
createCheckIdempotencyStep(),
|
|
@@ -3872,6 +7209,7 @@ function createFixedVersionFlow(version, config) {
|
|
|
3872
7209
|
});
|
|
3873
7210
|
return createFlow('fixed', 'Fixed Version Flow', [
|
|
3874
7211
|
createFetchRegistryStep(),
|
|
7212
|
+
createResolveRepositoryStep(),
|
|
3875
7213
|
createAnalyzeCommitsStep(),
|
|
3876
7214
|
fixedBumpStep,
|
|
3877
7215
|
createCheckIdempotencyStep(),
|