@hyperfrontend/versioning 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +50 -1
- package/CHANGELOG.md +23 -23
- package/README.md +12 -9
- package/changelog/index.cjs.js +23 -2
- package/changelog/index.cjs.js.map +1 -1
- package/changelog/index.esm.js +23 -2
- package/changelog/index.esm.js.map +1 -1
- package/changelog/models/entry.d.ts +5 -0
- package/changelog/models/entry.d.ts.map +1 -1
- package/changelog/models/index.cjs.js +2 -0
- package/changelog/models/index.cjs.js.map +1 -1
- package/changelog/models/index.esm.js +2 -0
- package/changelog/models/index.esm.js.map +1 -1
- package/changelog/operations/index.cjs.js.map +1 -1
- package/changelog/operations/index.esm.js.map +1 -1
- package/changelog/parse/index.cjs.js +23 -2
- package/changelog/parse/index.cjs.js.map +1 -1
- package/changelog/parse/index.esm.js +23 -2
- package/changelog/parse/index.esm.js.map +1 -1
- package/changelog/parse/line.d.ts.map +1 -1
- package/commits/classify/classifier.d.ts +73 -0
- package/commits/classify/classifier.d.ts.map +1 -0
- package/commits/classify/index.cjs.js +705 -0
- package/commits/classify/index.cjs.js.map +1 -0
- package/commits/classify/index.d.ts +8 -0
- package/commits/classify/index.d.ts.map +1 -0
- package/commits/classify/index.esm.js +678 -0
- package/commits/classify/index.esm.js.map +1 -0
- package/commits/classify/infrastructure.d.ts +205 -0
- package/commits/classify/infrastructure.d.ts.map +1 -0
- package/commits/classify/models.d.ts +108 -0
- package/commits/classify/models.d.ts.map +1 -0
- package/commits/classify/project-scopes.d.ts +59 -0
- package/commits/classify/project-scopes.d.ts.map +1 -0
- package/commits/index.cjs.js +702 -0
- package/commits/index.cjs.js.map +1 -1
- package/commits/index.d.ts +1 -0
- package/commits/index.d.ts.map +1 -1
- package/commits/index.esm.js +677 -1
- package/commits/index.esm.js.map +1 -1
- package/flow/executor/execute.d.ts +6 -0
- package/flow/executor/execute.d.ts.map +1 -1
- package/flow/executor/index.cjs.js +1604 -42
- package/flow/executor/index.cjs.js.map +1 -1
- package/flow/executor/index.esm.js +1610 -48
- package/flow/executor/index.esm.js.map +1 -1
- package/flow/index.cjs.js +6651 -2893
- package/flow/index.cjs.js.map +1 -1
- package/flow/index.esm.js +6655 -2899
- package/flow/index.esm.js.map +1 -1
- package/flow/models/index.cjs.js +125 -0
- package/flow/models/index.cjs.js.map +1 -1
- package/flow/models/index.esm.js +125 -0
- package/flow/models/index.esm.js.map +1 -1
- package/flow/models/types.d.ts +148 -3
- package/flow/models/types.d.ts.map +1 -1
- package/flow/presets/conventional.d.ts +9 -8
- package/flow/presets/conventional.d.ts.map +1 -1
- package/flow/presets/independent.d.ts.map +1 -1
- package/flow/presets/index.cjs.js +3588 -298
- package/flow/presets/index.cjs.js.map +1 -1
- package/flow/presets/index.esm.js +3588 -298
- package/flow/presets/index.esm.js.map +1 -1
- package/flow/presets/synced.d.ts.map +1 -1
- package/flow/steps/analyze-commits.d.ts +9 -6
- package/flow/steps/analyze-commits.d.ts.map +1 -1
- package/flow/steps/calculate-bump.d.ts.map +1 -1
- package/flow/steps/fetch-registry.d.ts.map +1 -1
- package/flow/steps/generate-changelog.d.ts.map +1 -1
- package/flow/steps/index.cjs.js +3604 -318
- package/flow/steps/index.cjs.js.map +1 -1
- package/flow/steps/index.d.ts +1 -0
- package/flow/steps/index.d.ts.map +1 -1
- package/flow/steps/index.esm.js +3603 -319
- package/flow/steps/index.esm.js.map +1 -1
- package/flow/steps/resolve-repository.d.ts +36 -0
- package/flow/steps/resolve-repository.d.ts.map +1 -0
- package/flow/steps/update-packages.d.ts.map +1 -1
- package/git/factory.d.ts +14 -0
- package/git/factory.d.ts.map +1 -1
- package/git/index.cjs.js +65 -0
- package/git/index.cjs.js.map +1 -1
- package/git/index.esm.js +66 -2
- package/git/index.esm.js.map +1 -1
- package/git/operations/index.cjs.js +40 -0
- package/git/operations/index.cjs.js.map +1 -1
- package/git/operations/index.d.ts +1 -1
- package/git/operations/index.d.ts.map +1 -1
- package/git/operations/index.esm.js +41 -2
- package/git/operations/index.esm.js.map +1 -1
- package/git/operations/log.d.ts +23 -0
- package/git/operations/log.d.ts.map +1 -1
- package/index.cjs.js +6962 -4413
- package/index.cjs.js.map +1 -1
- package/index.esm.js +6964 -4415
- package/index.esm.js.map +1 -1
- package/package.json +26 -1
- package/registry/index.cjs.js +3 -3
- package/registry/index.cjs.js.map +1 -1
- package/registry/index.esm.js +3 -3
- package/registry/index.esm.js.map +1 -1
- package/registry/models/index.cjs.js +2 -0
- package/registry/models/index.cjs.js.map +1 -1
- package/registry/models/index.esm.js +2 -0
- package/registry/models/index.esm.js.map +1 -1
- package/registry/models/version-info.d.ts +10 -0
- package/registry/models/version-info.d.ts.map +1 -1
- package/registry/npm/client.d.ts.map +1 -1
- package/registry/npm/index.cjs.js +1 -3
- package/registry/npm/index.cjs.js.map +1 -1
- package/registry/npm/index.esm.js +1 -3
- package/registry/npm/index.esm.js.map +1 -1
- package/repository/index.cjs.js +998 -0
- package/repository/index.cjs.js.map +1 -0
- package/repository/index.d.ts +4 -0
- package/repository/index.d.ts.map +1 -0
- package/repository/index.esm.js +981 -0
- package/repository/index.esm.js.map +1 -0
- package/repository/models/index.cjs.js +301 -0
- package/repository/models/index.cjs.js.map +1 -0
- package/repository/models/index.d.ts +7 -0
- package/repository/models/index.d.ts.map +1 -0
- package/repository/models/index.esm.js +290 -0
- package/repository/models/index.esm.js.map +1 -0
- package/repository/models/platform.d.ts +58 -0
- package/repository/models/platform.d.ts.map +1 -0
- package/repository/models/repository-config.d.ts +132 -0
- package/repository/models/repository-config.d.ts.map +1 -0
- package/repository/models/resolution.d.ts +121 -0
- package/repository/models/resolution.d.ts.map +1 -0
- package/repository/parse/index.cjs.js +755 -0
- package/repository/parse/index.cjs.js.map +1 -0
- package/repository/parse/index.d.ts +5 -0
- package/repository/parse/index.d.ts.map +1 -0
- package/repository/parse/index.esm.js +749 -0
- package/repository/parse/index.esm.js.map +1 -0
- package/repository/parse/package-json.d.ts +100 -0
- package/repository/parse/package-json.d.ts.map +1 -0
- package/repository/parse/url.d.ts +81 -0
- package/repository/parse/url.d.ts.map +1 -0
- package/repository/url/compare.d.ts +84 -0
- package/repository/url/compare.d.ts.map +1 -0
- package/repository/url/index.cjs.js +178 -0
- package/repository/url/index.cjs.js.map +1 -0
- package/repository/url/index.d.ts +3 -0
- package/repository/url/index.d.ts.map +1 -0
- package/repository/url/index.esm.js +176 -0
- package/repository/url/index.esm.js.map +1 -0
- package/workspace/discovery/index.cjs.js +324 -330
- package/workspace/discovery/index.cjs.js.map +1 -1
- package/workspace/discovery/index.esm.js +324 -330
- package/workspace/discovery/index.esm.js.map +1 -1
- package/workspace/discovery/packages.d.ts +0 -6
- package/workspace/discovery/packages.d.ts.map +1 -1
- package/workspace/index.cjs.js +0 -6
- package/workspace/index.cjs.js.map +1 -1
- package/workspace/index.esm.js +0 -6
- package/workspace/index.esm.js.map +1 -1
package/flow/steps/index.cjs.js
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
+
var node_path = require('node:path');
|
|
4
|
+
var node_fs = require('node:fs');
|
|
5
|
+
|
|
3
6
|
/**
|
|
4
7
|
* Safe copies of JSON built-in methods.
|
|
5
8
|
*
|
|
@@ -29,125 +32,2741 @@ const stringify = _JSON.stringify;
|
|
|
29
32
|
* @returns A FlowStep object
|
|
30
33
|
*
|
|
31
34
|
* @example
|
|
32
|
-
* ```typescript
|
|
33
|
-
* const fetchStep = createStep(
|
|
34
|
-
* 'fetch-registry',
|
|
35
|
-
* 'Fetch Registry Version',
|
|
36
|
-
* async (ctx) => {
|
|
37
|
-
* const version = await ctx.registry.getLatestVersion(ctx.packageName)
|
|
38
|
-
* return {
|
|
39
|
-
* status: 'success',
|
|
40
|
-
* stateUpdates: { publishedVersion: version },
|
|
41
|
-
* message: `Found published version: ${version}`
|
|
42
|
-
* }
|
|
43
|
-
* }
|
|
44
|
-
* )
|
|
45
|
-
* ```
|
|
35
|
+
* ```typescript
|
|
36
|
+
* const fetchStep = createStep(
|
|
37
|
+
* 'fetch-registry',
|
|
38
|
+
* 'Fetch Registry Version',
|
|
39
|
+
* async (ctx) => {
|
|
40
|
+
* const version = await ctx.registry.getLatestVersion(ctx.packageName)
|
|
41
|
+
* return {
|
|
42
|
+
* status: 'success',
|
|
43
|
+
* stateUpdates: { publishedVersion: version },
|
|
44
|
+
* message: `Found published version: ${version}`
|
|
45
|
+
* }
|
|
46
|
+
* }
|
|
47
|
+
* )
|
|
48
|
+
* ```
|
|
49
|
+
*/
|
|
50
|
+
function createStep(id, name, execute, options = {}) {
|
|
51
|
+
return {
|
|
52
|
+
id,
|
|
53
|
+
name,
|
|
54
|
+
execute,
|
|
55
|
+
description: options.description,
|
|
56
|
+
skipIf: options.skipIf,
|
|
57
|
+
continueOnError: options.continueOnError,
|
|
58
|
+
dependsOn: options.dependsOn,
|
|
59
|
+
};
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Creates a skipped step result.
|
|
63
|
+
*
|
|
64
|
+
* @param message - Explanation for why the step was skipped
|
|
65
|
+
* @returns A FlowStepResult with 'skipped' status
|
|
66
|
+
*/
|
|
67
|
+
function createSkippedResult(message) {
|
|
68
|
+
return {
|
|
69
|
+
status: 'skipped',
|
|
70
|
+
message,
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
75
|
+
/**
|
|
76
|
+
* Creates the fetch-registry step.
|
|
77
|
+
*
|
|
78
|
+
* This step:
|
|
79
|
+
* 1. Queries the registry for the latest published version
|
|
80
|
+
* 2. Reads the current version from package.json
|
|
81
|
+
* 3. Determines if this is a first release
|
|
82
|
+
*
|
|
83
|
+
* State updates:
|
|
84
|
+
* - publishedVersion: Latest version on registry (null if not published)
|
|
85
|
+
* - currentVersion: Version from local package.json
|
|
86
|
+
* - isFirstRelease: True if never published
|
|
87
|
+
*
|
|
88
|
+
* @returns A FlowStep that fetches registry information
|
|
89
|
+
*/
|
|
90
|
+
function createFetchRegistryStep() {
|
|
91
|
+
return createStep(FETCH_REGISTRY_STEP_ID, 'Fetch Registry Version', async (ctx) => {
|
|
92
|
+
const { registry, tree, projectRoot, packageName, logger } = ctx;
|
|
93
|
+
// Read local package.json for current version
|
|
94
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
95
|
+
let currentVersion = '0.0.0';
|
|
96
|
+
try {
|
|
97
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
98
|
+
if (content) {
|
|
99
|
+
const pkg = parse(content);
|
|
100
|
+
currentVersion = pkg.version ?? '0.0.0';
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
catch (error) {
|
|
104
|
+
logger.warn(`Could not read package.json: ${error}`);
|
|
105
|
+
}
|
|
106
|
+
// Query registry for published version
|
|
107
|
+
let publishedVersion = null;
|
|
108
|
+
let publishedCommit = null;
|
|
109
|
+
let isFirstRelease = true;
|
|
110
|
+
try {
|
|
111
|
+
publishedVersion = await registry.getLatestVersion(packageName);
|
|
112
|
+
isFirstRelease = publishedVersion === null;
|
|
113
|
+
// When published version exists, get its commit hash from gitHead
|
|
114
|
+
if (publishedVersion) {
|
|
115
|
+
try {
|
|
116
|
+
const versionInfo = await registry.getVersionInfo(packageName, publishedVersion);
|
|
117
|
+
publishedCommit = versionInfo?.gitHead ?? null;
|
|
118
|
+
if (publishedCommit) {
|
|
119
|
+
logger.debug(`Published ${publishedVersion} at commit ${publishedCommit.slice(0, 7)}`);
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
logger.debug(`Published ${publishedVersion} has no gitHead (older package or published without git)`);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
catch (error) {
|
|
126
|
+
// Version info fetch failed, but we still have the version
|
|
127
|
+
logger.debug(`Could not fetch version info for ${publishedVersion}: ${error}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
catch (error) {
|
|
132
|
+
// Package might not exist yet, which is fine
|
|
133
|
+
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
134
|
+
isFirstRelease = true;
|
|
135
|
+
}
|
|
136
|
+
const message = isFirstRelease
|
|
137
|
+
? `First release (local: ${currentVersion})`
|
|
138
|
+
: `Published: ${publishedVersion}${publishedCommit ? ` @ ${publishedCommit.slice(0, 7)}` : ''}, Local: ${currentVersion}`;
|
|
139
|
+
return {
|
|
140
|
+
status: 'success',
|
|
141
|
+
stateUpdates: {
|
|
142
|
+
publishedVersion,
|
|
143
|
+
publishedCommit,
|
|
144
|
+
currentVersion,
|
|
145
|
+
isFirstRelease,
|
|
146
|
+
},
|
|
147
|
+
message,
|
|
148
|
+
};
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Safe copies of Error built-ins via factory functions.
|
|
154
|
+
*
|
|
155
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
156
|
+
* provides factory functions that use Reflect.construct internally.
|
|
157
|
+
*
|
|
158
|
+
* These references are captured at module initialization time to protect against
|
|
159
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
160
|
+
*
|
|
161
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/error
|
|
162
|
+
*/
|
|
163
|
+
// Capture references at module initialization time
|
|
164
|
+
const _Error = globalThis.Error;
|
|
165
|
+
const _Reflect$4 = globalThis.Reflect;
|
|
166
|
+
/**
|
|
167
|
+
* (Safe copy) Creates a new Error using the captured Error constructor.
|
|
168
|
+
* Use this instead of `new Error()`.
|
|
169
|
+
*
|
|
170
|
+
* @param message - Optional error message.
|
|
171
|
+
* @param options - Optional error options.
|
|
172
|
+
* @returns A new Error instance.
|
|
173
|
+
*/
|
|
174
|
+
const createError = (message, options) => _Reflect$4.construct(_Error, [message, options]);
|
|
175
|
+
|
|
176
|
+
/**
|
|
177
|
+
* Creates a new RepositoryConfig.
|
|
178
|
+
*
|
|
179
|
+
* Normalizes the base URL by stripping trailing slashes and validating
|
|
180
|
+
* that custom platforms have a formatter function.
|
|
181
|
+
*
|
|
182
|
+
* @param options - Repository configuration options
|
|
183
|
+
* @returns A new RepositoryConfig object
|
|
184
|
+
* @throws {Error} if platform is 'custom' but no formatCompareUrl is provided
|
|
185
|
+
*
|
|
186
|
+
* @example
|
|
187
|
+
* ```typescript
|
|
188
|
+
* // GitHub repository
|
|
189
|
+
* const config = createRepositoryConfig({
|
|
190
|
+
* platform: 'github',
|
|
191
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
192
|
+
* })
|
|
193
|
+
*
|
|
194
|
+
* // Custom platform
|
|
195
|
+
* const customConfig = createRepositoryConfig({
|
|
196
|
+
* platform: 'custom',
|
|
197
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
198
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
199
|
+
* })
|
|
200
|
+
* ```
|
|
201
|
+
*/
|
|
202
|
+
function createRepositoryConfig(options) {
|
|
203
|
+
const { platform, formatCompareUrl } = options;
|
|
204
|
+
// Validate custom platform has formatter
|
|
205
|
+
if (platform === 'custom' && !formatCompareUrl) {
|
|
206
|
+
throw createError("Repository config with platform 'custom' requires a formatCompareUrl function");
|
|
207
|
+
}
|
|
208
|
+
// Normalize base URL - strip trailing slashes
|
|
209
|
+
const baseUrl = normalizeBaseUrl(options.baseUrl);
|
|
210
|
+
return {
|
|
211
|
+
platform,
|
|
212
|
+
baseUrl,
|
|
213
|
+
formatCompareUrl,
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Checks if a value is a RepositoryConfig object.
|
|
218
|
+
*
|
|
219
|
+
* @param value - Value to check
|
|
220
|
+
* @returns True if the value is a RepositoryConfig
|
|
221
|
+
*
|
|
222
|
+
* @example
|
|
223
|
+
* ```typescript
|
|
224
|
+
* const config = { platform: 'github', baseUrl: 'https://...' }
|
|
225
|
+
* if (isRepositoryConfig(config)) {
|
|
226
|
+
* // config is typed as RepositoryConfig
|
|
227
|
+
* }
|
|
228
|
+
* ```
|
|
229
|
+
*/
|
|
230
|
+
function isRepositoryConfig(value) {
|
|
231
|
+
if (typeof value !== 'object' || value === null) {
|
|
232
|
+
return false;
|
|
233
|
+
}
|
|
234
|
+
const obj = value;
|
|
235
|
+
return (typeof obj['platform'] === 'string' &&
|
|
236
|
+
typeof obj['baseUrl'] === 'string' &&
|
|
237
|
+
(obj['formatCompareUrl'] === undefined || typeof obj['formatCompareUrl'] === 'function'));
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Normalizes a base URL by stripping trailing slashes and .git suffix.
|
|
241
|
+
*
|
|
242
|
+
* @param url - URL to normalize
|
|
243
|
+
* @returns Normalized URL
|
|
244
|
+
*
|
|
245
|
+
* @internal
|
|
246
|
+
*/
|
|
247
|
+
function normalizeBaseUrl(url) {
|
|
248
|
+
let normalized = url.trim();
|
|
249
|
+
// Remove trailing slashes
|
|
250
|
+
while (normalized.endsWith('/')) {
|
|
251
|
+
normalized = normalized.slice(0, -1);
|
|
252
|
+
}
|
|
253
|
+
// Remove .git suffix if present
|
|
254
|
+
if (normalized.endsWith('.git')) {
|
|
255
|
+
normalized = normalized.slice(0, -4);
|
|
256
|
+
}
|
|
257
|
+
return normalized;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/**
|
|
261
|
+
* Creates a disabled repository resolution configuration.
|
|
262
|
+
*
|
|
263
|
+
* No compare URLs will be generated.
|
|
264
|
+
*
|
|
265
|
+
* @returns A RepositoryResolution with mode 'disabled'
|
|
266
|
+
*
|
|
267
|
+
* @example
|
|
268
|
+
* ```typescript
|
|
269
|
+
* const config = createDisabledResolution()
|
|
270
|
+
* // { mode: 'disabled' }
|
|
271
|
+
* ```
|
|
272
|
+
*/
|
|
273
|
+
/**
|
|
274
|
+
* Checks if a value is a RepositoryResolution object.
|
|
275
|
+
*
|
|
276
|
+
* @param value - Value to check
|
|
277
|
+
* @returns True if the value is a RepositoryResolution
|
|
278
|
+
*/
|
|
279
|
+
function isRepositoryResolution(value) {
|
|
280
|
+
if (typeof value !== 'object' || value === null) {
|
|
281
|
+
return false;
|
|
282
|
+
}
|
|
283
|
+
const obj = value;
|
|
284
|
+
const mode = obj['mode'];
|
|
285
|
+
return mode === 'explicit' || mode === 'inferred' || mode === 'disabled';
|
|
286
|
+
}
|
|
287
|
+
/**
|
|
288
|
+
* Default inference order when mode is 'inferred'.
|
|
289
|
+
*/
|
|
290
|
+
const DEFAULT_INFERENCE_ORDER = ['package-json', 'git-remote'];
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* Safe copies of Map built-in via factory function.
|
|
294
|
+
*
|
|
295
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
296
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
297
|
+
*
|
|
298
|
+
* These references are captured at module initialization time to protect against
|
|
299
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
300
|
+
*
|
|
301
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
302
|
+
*/
|
|
303
|
+
// Capture references at module initialization time
|
|
304
|
+
const _Map = globalThis.Map;
|
|
305
|
+
const _Reflect$3 = globalThis.Reflect;
|
|
306
|
+
/**
|
|
307
|
+
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
308
|
+
* Use this instead of `new Map()`.
|
|
309
|
+
*
|
|
310
|
+
* @param iterable - Optional iterable of key-value pairs.
|
|
311
|
+
* @returns A new Map instance.
|
|
312
|
+
*/
|
|
313
|
+
const createMap = (iterable) => _Reflect$3.construct(_Map, iterable ? [iterable] : []);
|
|
314
|
+
|
|
315
|
+
/**
|
|
316
|
+
* Safe copies of Math built-in methods.
|
|
317
|
+
*
|
|
318
|
+
* These references are captured at module initialization time to protect against
|
|
319
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
320
|
+
*
|
|
321
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
322
|
+
*/
|
|
323
|
+
// Capture references at module initialization time
|
|
324
|
+
const _Math = globalThis.Math;
|
|
325
|
+
// ============================================================================
|
|
326
|
+
// Min/Max
|
|
327
|
+
// ============================================================================
|
|
328
|
+
/**
|
|
329
|
+
* (Safe copy) Returns the larger of zero or more numbers.
|
|
330
|
+
*/
|
|
331
|
+
const max = _Math.max;
|
|
332
|
+
/**
|
|
333
|
+
* (Safe copy) Returns the smaller of zero or more numbers.
|
|
334
|
+
*/
|
|
335
|
+
const min = _Math.min;
|
|
336
|
+
|
|
337
|
+
/**
|
|
338
|
+
* Safe copies of URL built-ins via factory functions.
|
|
339
|
+
*
|
|
340
|
+
* Provides safe references to URL and URLSearchParams.
|
|
341
|
+
* These references are captured at module initialization time to protect against
|
|
342
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
343
|
+
*
|
|
344
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
345
|
+
*/
|
|
346
|
+
// Capture references at module initialization time
|
|
347
|
+
const _URL = globalThis.URL;
|
|
348
|
+
const _Reflect$2 = globalThis.Reflect;
|
|
349
|
+
// ============================================================================
|
|
350
|
+
// URL
|
|
351
|
+
// ============================================================================
|
|
352
|
+
/**
|
|
353
|
+
* (Safe copy) Creates a new URL using the captured URL constructor.
|
|
354
|
+
* Use this instead of `new URL()`.
|
|
355
|
+
*
|
|
356
|
+
* @param url - The URL string to parse.
|
|
357
|
+
* @param base - Optional base URL for relative URLs.
|
|
358
|
+
* @returns A new URL instance.
|
|
359
|
+
*/
|
|
360
|
+
const createURL = (url, base) => _Reflect$2.construct(_URL, [url, base]);
|
|
361
|
+
/**
|
|
362
|
+
* (Safe copy) Creates an object URL for the given object.
|
|
363
|
+
* Use this instead of `URL.createObjectURL()`.
|
|
364
|
+
*
|
|
365
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
366
|
+
*/
|
|
367
|
+
typeof _URL.createObjectURL === 'function'
|
|
368
|
+
? _URL.createObjectURL.bind(_URL)
|
|
369
|
+
: () => {
|
|
370
|
+
throw new Error('URL.createObjectURL is not available in this environment');
|
|
371
|
+
};
|
|
372
|
+
/**
|
|
373
|
+
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
374
|
+
* Use this instead of `URL.revokeObjectURL()`.
|
|
375
|
+
*
|
|
376
|
+
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
377
|
+
*/
|
|
378
|
+
typeof _URL.revokeObjectURL === 'function'
|
|
379
|
+
? _URL.revokeObjectURL.bind(_URL)
|
|
380
|
+
: () => {
|
|
381
|
+
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
382
|
+
};
|
|
383
|
+
|
|
384
|
+
/**
|
|
385
|
+
* Checks if a platform identifier is a known platform with built-in support.
|
|
386
|
+
*
|
|
387
|
+
* @param platform - Platform identifier to check
|
|
388
|
+
* @returns True if the platform is a known platform
|
|
389
|
+
*
|
|
390
|
+
* @example
|
|
391
|
+
* ```typescript
|
|
392
|
+
* isKnownPlatform('github') // true
|
|
393
|
+
* isKnownPlatform('gitlab') // true
|
|
394
|
+
* isKnownPlatform('custom') // false
|
|
395
|
+
* isKnownPlatform('unknown') // false
|
|
396
|
+
* ```
|
|
397
|
+
*/
|
|
398
|
+
function isKnownPlatform(platform) {
|
|
399
|
+
return platform === 'github' || platform === 'gitlab' || platform === 'bitbucket' || platform === 'azure-devops';
|
|
400
|
+
}
|
|
401
|
+
/**
|
|
402
|
+
* Known platform hostnames mapped to their platform type.
|
|
403
|
+
* Used for automatic platform detection from repository URLs.
|
|
404
|
+
*
|
|
405
|
+
* Includes both standard SaaS domains and common patterns for self-hosted instances.
|
|
406
|
+
*/
|
|
407
|
+
const PLATFORM_HOSTNAMES = createMap([
|
|
408
|
+
// GitHub
|
|
409
|
+
['github.com', 'github'],
|
|
410
|
+
// GitLab
|
|
411
|
+
['gitlab.com', 'gitlab'],
|
|
412
|
+
// Bitbucket
|
|
413
|
+
['bitbucket.org', 'bitbucket'],
|
|
414
|
+
// Azure DevOps
|
|
415
|
+
['dev.azure.com', 'azure-devops'],
|
|
416
|
+
['visualstudio.com', 'azure-devops'],
|
|
417
|
+
]);
|
|
418
|
+
/**
|
|
419
|
+
* Detects platform from a hostname.
|
|
420
|
+
*
|
|
421
|
+
* First checks for exact match in known platforms, then applies heuristics
|
|
422
|
+
* for self-hosted instances (e.g., `github.company.com` → `github`).
|
|
423
|
+
*
|
|
424
|
+
* @param hostname - Hostname to detect platform from (e.g., "github.com")
|
|
425
|
+
* @returns Detected platform or 'unknown' if not recognized
|
|
426
|
+
*
|
|
427
|
+
* @example
|
|
428
|
+
* ```typescript
|
|
429
|
+
* detectPlatformFromHostname('github.com') // 'github'
|
|
430
|
+
* detectPlatformFromHostname('gitlab.mycompany.com') // 'gitlab'
|
|
431
|
+
* detectPlatformFromHostname('custom-git.internal') // 'unknown'
|
|
432
|
+
* ```
|
|
433
|
+
*/
|
|
434
|
+
function detectPlatformFromHostname(hostname) {
|
|
435
|
+
const normalized = hostname.toLowerCase();
|
|
436
|
+
// Check exact matches first
|
|
437
|
+
const exactMatch = PLATFORM_HOSTNAMES.get(normalized);
|
|
438
|
+
if (exactMatch) {
|
|
439
|
+
return exactMatch;
|
|
440
|
+
}
|
|
441
|
+
// Check for Azure DevOps legacy domain pattern
|
|
442
|
+
if (normalized.endsWith('.visualstudio.com')) {
|
|
443
|
+
return 'azure-devops';
|
|
444
|
+
}
|
|
445
|
+
// Check for Azure DevOps modern domain pattern (includes ssh.dev.azure.com)
|
|
446
|
+
if (normalized.endsWith('.azure.com')) {
|
|
447
|
+
return 'azure-devops';
|
|
448
|
+
}
|
|
449
|
+
// Heuristics for self-hosted instances
|
|
450
|
+
// GitHub Enterprise typically uses "github" in the hostname
|
|
451
|
+
if (normalized.includes('github')) {
|
|
452
|
+
return 'github';
|
|
453
|
+
}
|
|
454
|
+
// GitLab self-hosted typically uses "gitlab" in the hostname
|
|
455
|
+
if (normalized.includes('gitlab')) {
|
|
456
|
+
return 'gitlab';
|
|
457
|
+
}
|
|
458
|
+
// Bitbucket Data Center/Server might use "bitbucket" in hostname
|
|
459
|
+
if (normalized.includes('bitbucket')) {
|
|
460
|
+
return 'bitbucket';
|
|
461
|
+
}
|
|
462
|
+
return 'unknown';
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
/**
|
|
466
|
+
* Parses a git URL and extracts platform and base URL.
|
|
467
|
+
*
|
|
468
|
+
* Supports multiple URL formats:
|
|
469
|
+
* - `https://github.com/owner/repo`
|
|
470
|
+
* - `https://github.com/owner/repo.git`
|
|
471
|
+
* - `git+https://github.com/owner/repo.git`
|
|
472
|
+
* - `git://github.com/owner/repo.git`
|
|
473
|
+
* - `git@github.com:owner/repo.git` (SSH format)
|
|
474
|
+
*
|
|
475
|
+
* Handles self-hosted instances by detecting platform from hostname:
|
|
476
|
+
* - `github.mycompany.com` → `github`
|
|
477
|
+
* - `gitlab.internal.com` → `gitlab`
|
|
478
|
+
*
|
|
479
|
+
* Handles Azure DevOps URL formats:
|
|
480
|
+
* - `https://dev.azure.com/org/project/_git/repo`
|
|
481
|
+
* - `https://org.visualstudio.com/project/_git/repo`
|
|
482
|
+
*
|
|
483
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
484
|
+
* @returns Parsed repository info with platform and base URL, or null if parsing fails
|
|
485
|
+
*
|
|
486
|
+
* @example
|
|
487
|
+
* ```typescript
|
|
488
|
+
* // GitHub HTTPS
|
|
489
|
+
* parseRepositoryUrl('https://github.com/owner/repo')
|
|
490
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
491
|
+
*
|
|
492
|
+
* // SSH format
|
|
493
|
+
* parseRepositoryUrl('git@github.com:owner/repo.git')
|
|
494
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
495
|
+
*
|
|
496
|
+
* // Azure DevOps
|
|
497
|
+
* parseRepositoryUrl('https://dev.azure.com/org/proj/_git/repo')
|
|
498
|
+
* // → { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' }
|
|
499
|
+
*
|
|
500
|
+
* // Self-hosted GitLab
|
|
501
|
+
* parseRepositoryUrl('https://gitlab.mycompany.com/team/project')
|
|
502
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.mycompany.com/team/project' }
|
|
503
|
+
* ```
|
|
504
|
+
*/
|
|
505
|
+
function parseRepositoryUrl(gitUrl) {
|
|
506
|
+
if (!gitUrl || typeof gitUrl !== 'string') {
|
|
507
|
+
return null;
|
|
508
|
+
}
|
|
509
|
+
const trimmed = gitUrl.trim();
|
|
510
|
+
if (!trimmed) {
|
|
511
|
+
return null;
|
|
512
|
+
}
|
|
513
|
+
// Try SSH format first: git@hostname:path
|
|
514
|
+
const sshParsed = parseSshUrl(trimmed);
|
|
515
|
+
if (sshParsed) {
|
|
516
|
+
return sshParsed;
|
|
517
|
+
}
|
|
518
|
+
// Try HTTP(S) formats
|
|
519
|
+
const httpParsed = parseHttpUrl(trimmed);
|
|
520
|
+
if (httpParsed) {
|
|
521
|
+
return httpParsed;
|
|
522
|
+
}
|
|
523
|
+
return null;
|
|
524
|
+
}
|
|
525
|
+
/**
|
|
526
|
+
* Parses an SSH-style git URL.
|
|
527
|
+
*
|
|
528
|
+
* @param url - URL to parse (e.g., "git@github.com:owner/repo.git")
|
|
529
|
+
* @returns Parsed repository or null
|
|
530
|
+
*
|
|
531
|
+
* @internal
|
|
532
|
+
*/
|
|
533
|
+
function parseSshUrl(url) {
|
|
534
|
+
// Handle optional ssh:// prefix
|
|
535
|
+
let remaining = url;
|
|
536
|
+
if (remaining.startsWith('ssh://')) {
|
|
537
|
+
remaining = remaining.slice(6);
|
|
538
|
+
}
|
|
539
|
+
// Must start with git@
|
|
540
|
+
if (!remaining.startsWith('git@')) {
|
|
541
|
+
return null;
|
|
542
|
+
}
|
|
543
|
+
// Remove git@ prefix
|
|
544
|
+
remaining = remaining.slice(4);
|
|
545
|
+
// Find the separator (: or /)
|
|
546
|
+
const colonIndex = remaining.indexOf(':');
|
|
547
|
+
const slashIndex = remaining.indexOf('/');
|
|
548
|
+
let separatorIndex;
|
|
549
|
+
if (colonIndex === -1 && slashIndex === -1) {
|
|
550
|
+
return null;
|
|
551
|
+
}
|
|
552
|
+
else if (colonIndex === -1) {
|
|
553
|
+
separatorIndex = slashIndex;
|
|
554
|
+
}
|
|
555
|
+
else if (slashIndex === -1) {
|
|
556
|
+
separatorIndex = colonIndex;
|
|
557
|
+
}
|
|
558
|
+
else {
|
|
559
|
+
separatorIndex = min(colonIndex, slashIndex);
|
|
560
|
+
}
|
|
561
|
+
const hostname = remaining.slice(0, separatorIndex);
|
|
562
|
+
const pathPart = normalizePathPart(remaining.slice(separatorIndex + 1));
|
|
563
|
+
if (!hostname || !pathPart) {
|
|
564
|
+
return null;
|
|
565
|
+
}
|
|
566
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
567
|
+
// For Azure DevOps, construct proper base URL
|
|
568
|
+
if (platform === 'azure-devops') {
|
|
569
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
570
|
+
if (baseUrl) {
|
|
571
|
+
return { platform, baseUrl };
|
|
572
|
+
}
|
|
573
|
+
return null;
|
|
574
|
+
}
|
|
575
|
+
// Standard platforms: https://hostname/path
|
|
576
|
+
const baseUrl = `https://${hostname}/${pathPart}`;
|
|
577
|
+
return { platform, baseUrl };
|
|
578
|
+
}
|
|
579
|
+
/**
|
|
580
|
+
* Parses an HTTP(S)-style git URL.
|
|
581
|
+
*
|
|
582
|
+
* @param url - URL to parse
|
|
583
|
+
* @returns Parsed repository or null
|
|
584
|
+
*
|
|
585
|
+
* @internal
|
|
586
|
+
*/
|
|
587
|
+
function parseHttpUrl(url) {
|
|
588
|
+
// Normalize various git URL prefixes to https://
|
|
589
|
+
const normalized = url
|
|
590
|
+
.replace(/^git\+/, '') // git+https:// → https://
|
|
591
|
+
.replace(/^git:\/\//, 'https://'); // git:// → https://
|
|
592
|
+
let parsed;
|
|
593
|
+
try {
|
|
594
|
+
parsed = createURL(normalized);
|
|
595
|
+
}
|
|
596
|
+
catch {
|
|
597
|
+
return null;
|
|
598
|
+
}
|
|
599
|
+
// Only support http and https protocols
|
|
600
|
+
if (parsed.protocol !== 'http:' && parsed.protocol !== 'https:') {
|
|
601
|
+
return null;
|
|
602
|
+
}
|
|
603
|
+
const hostname = parsed.hostname.toLowerCase();
|
|
604
|
+
const platform = detectPlatformFromHostname(hostname);
|
|
605
|
+
const pathPart = normalizePathPart(parsed.pathname);
|
|
606
|
+
if (!pathPart) {
|
|
607
|
+
return null;
|
|
608
|
+
}
|
|
609
|
+
// Handle Azure DevOps special URL structure
|
|
610
|
+
if (platform === 'azure-devops') {
|
|
611
|
+
const baseUrl = constructAzureDevOpsBaseUrl(hostname, pathPart);
|
|
612
|
+
if (baseUrl) {
|
|
613
|
+
return { platform, baseUrl };
|
|
614
|
+
}
|
|
615
|
+
// If Azure DevOps URL cannot be parsed properly, return null
|
|
616
|
+
return null;
|
|
617
|
+
}
|
|
618
|
+
// Standard platforms
|
|
619
|
+
const baseUrl = `${parsed.protocol}//${hostname}/${pathPart}`;
|
|
620
|
+
return { platform, baseUrl };
|
|
621
|
+
}
|
|
622
|
+
/**
|
|
623
|
+
* Normalizes a path part by removing leading slashes and .git suffix.
|
|
624
|
+
*
|
|
625
|
+
* @param path - Path to normalize
|
|
626
|
+
* @returns Normalized path or null if empty
|
|
627
|
+
*
|
|
628
|
+
* @internal
|
|
629
|
+
*/
|
|
630
|
+
function normalizePathPart(path) {
|
|
631
|
+
let normalized = path.trim();
|
|
632
|
+
// Remove leading slashes
|
|
633
|
+
while (normalized.startsWith('/')) {
|
|
634
|
+
normalized = normalized.slice(1);
|
|
635
|
+
}
|
|
636
|
+
// Remove trailing slashes
|
|
637
|
+
while (normalized.endsWith('/')) {
|
|
638
|
+
normalized = normalized.slice(0, -1);
|
|
639
|
+
}
|
|
640
|
+
// Remove .git suffix
|
|
641
|
+
if (normalized.endsWith('.git')) {
|
|
642
|
+
normalized = normalized.slice(0, -4);
|
|
643
|
+
}
|
|
644
|
+
// Validate we have something
|
|
645
|
+
if (!normalized) {
|
|
646
|
+
return null;
|
|
647
|
+
}
|
|
648
|
+
return normalized;
|
|
649
|
+
}
|
|
650
|
+
/**
|
|
651
|
+
* Constructs the base URL for Azure DevOps repositories.
|
|
652
|
+
*
|
|
653
|
+
* Azure DevOps has special URL structures:
|
|
654
|
+
* - Modern: `https://dev.azure.com/{org}/{project}/_git/{repo}`
|
|
655
|
+
* - Legacy: `https://{org}.visualstudio.com/{project}/_git/{repo}`
|
|
656
|
+
* - SSH: `git@ssh.dev.azure.com:v3/{org}/{project}/{repo}`
|
|
657
|
+
*
|
|
658
|
+
* @param hostname - Hostname from the URL
|
|
659
|
+
* @param pathPart - Path portion after hostname
|
|
660
|
+
* @returns Constructed base URL or null if invalid
|
|
661
|
+
*
|
|
662
|
+
* @internal
|
|
663
|
+
*/
|
|
664
|
+
function constructAzureDevOpsBaseUrl(hostname, pathPart) {
|
|
665
|
+
const pathParts = pathPart.split('/');
|
|
666
|
+
// dev.azure.com format: org/project/_git/repo
|
|
667
|
+
if (hostname === 'dev.azure.com' || hostname.endsWith('.azure.com')) {
|
|
668
|
+
// Need at least: org/project/_git/repo (4 parts)
|
|
669
|
+
// Or for SSH v3: v3/org/project/repo (4 parts)
|
|
670
|
+
if (pathParts.length >= 4) {
|
|
671
|
+
// Check for v3 SSH format
|
|
672
|
+
if (pathParts[0] === 'v3') {
|
|
673
|
+
// v3/org/project/repo → https://dev.azure.com/org/project/_git/repo
|
|
674
|
+
const org = pathParts[1];
|
|
675
|
+
const project = pathParts[2];
|
|
676
|
+
const repo = pathParts[3];
|
|
677
|
+
if (org && project && repo) {
|
|
678
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
// Standard format: org/project/_git/repo
|
|
682
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
683
|
+
if (gitIndex >= 2 && pathParts[gitIndex + 1]) {
|
|
684
|
+
const org = pathParts.slice(0, gitIndex - 1).join('/');
|
|
685
|
+
const project = pathParts[gitIndex - 1];
|
|
686
|
+
const repo = pathParts[gitIndex + 1];
|
|
687
|
+
if (org && project && repo) {
|
|
688
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
689
|
+
}
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
return null;
|
|
693
|
+
}
|
|
694
|
+
// visualstudio.com format: {org}.visualstudio.com/project/_git/repo
|
|
695
|
+
if (hostname.endsWith('.visualstudio.com')) {
|
|
696
|
+
const org = hostname.replace('.visualstudio.com', '');
|
|
697
|
+
const gitIndex = pathParts.indexOf('_git');
|
|
698
|
+
if (gitIndex >= 1 && pathParts[gitIndex + 1]) {
|
|
699
|
+
const project = pathParts.slice(0, gitIndex).join('/');
|
|
700
|
+
const repo = pathParts[gitIndex + 1];
|
|
701
|
+
if (project && repo) {
|
|
702
|
+
// Normalize to dev.azure.com format
|
|
703
|
+
return `https://dev.azure.com/${org}/${project}/_git/${repo}`;
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
return null;
|
|
707
|
+
}
|
|
708
|
+
return null;
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* Creates a RepositoryConfig from a git URL.
|
|
712
|
+
*
|
|
713
|
+
* This is a convenience function that combines `parseRepositoryUrl` with
|
|
714
|
+
* `createRepositoryConfig` to produce a ready-to-use configuration.
|
|
715
|
+
*
|
|
716
|
+
* @param gitUrl - Git repository URL in any supported format
|
|
717
|
+
* @returns RepositoryConfig or null if URL cannot be parsed
|
|
718
|
+
*
|
|
719
|
+
* @example
|
|
720
|
+
* ```typescript
|
|
721
|
+
* const config = createRepositoryConfigFromUrl('https://github.com/owner/repo')
|
|
722
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
723
|
+
*
|
|
724
|
+
* const config = createRepositoryConfigFromUrl('git@gitlab.com:group/project.git')
|
|
725
|
+
* // → { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' }
|
|
726
|
+
* ```
|
|
727
|
+
*/
|
|
728
|
+
function createRepositoryConfigFromUrl(gitUrl) {
|
|
729
|
+
const parsed = parseRepositoryUrl(gitUrl);
|
|
730
|
+
if (!parsed) {
|
|
731
|
+
return null;
|
|
732
|
+
}
|
|
733
|
+
// Don't create configs for unknown platforms as they can't generate URLs
|
|
734
|
+
if (parsed.platform === 'unknown') {
|
|
735
|
+
return null;
|
|
736
|
+
}
|
|
737
|
+
return createRepositoryConfig({
|
|
738
|
+
platform: parsed.platform,
|
|
739
|
+
baseUrl: parsed.baseUrl,
|
|
740
|
+
});
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
/**
|
|
744
|
+
* Shorthand platform prefixes supported in package.json repository field.
|
|
745
|
+
*
|
|
746
|
+
* Format: `"platform:owner/repo"` or `"owner/repo"` (defaults to GitHub)
|
|
747
|
+
*
|
|
748
|
+
* @see https://docs.npmjs.com/cli/v9/configuring-npm/package-json#repository
|
|
749
|
+
*/
|
|
750
|
+
const SHORTHAND_PLATFORMS = createMap([
|
|
751
|
+
['github', 'https://github.com'],
|
|
752
|
+
['gitlab', 'https://gitlab.com'],
|
|
753
|
+
['bitbucket', 'https://bitbucket.org'],
|
|
754
|
+
['gist', 'https://gist.github.com'],
|
|
755
|
+
]);
|
|
756
|
+
/**
|
|
757
|
+
* Infers repository configuration from package.json content.
|
|
758
|
+
*
|
|
759
|
+
* Handles multiple formats:
|
|
760
|
+
* - Shorthand: `"github:owner/repo"`, `"gitlab:group/project"`, `"bitbucket:team/repo"`
|
|
761
|
+
* - Bare shorthand: `"owner/repo"` (defaults to GitHub)
|
|
762
|
+
* - URL string: `"https://github.com/owner/repo"`
|
|
763
|
+
* - Object with URL: `{ "type": "git", "url": "https://..." }`
|
|
764
|
+
*
|
|
765
|
+
* @param packageJsonContent - Raw JSON string content of package.json
|
|
766
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
767
|
+
*
|
|
768
|
+
* @example
|
|
769
|
+
* ```typescript
|
|
770
|
+
* // Shorthand format
|
|
771
|
+
* inferRepositoryFromPackageJson('{"repository": "github:owner/repo"}')
|
|
772
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
773
|
+
*
|
|
774
|
+
* // URL string
|
|
775
|
+
* inferRepositoryFromPackageJson('{"repository": "https://github.com/owner/repo"}')
|
|
776
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
777
|
+
*
|
|
778
|
+
* // Object format
|
|
779
|
+
* inferRepositoryFromPackageJson('{"repository": {"type": "git", "url": "https://github.com/owner/repo"}}')
|
|
780
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
781
|
+
*
|
|
782
|
+
* // Bare shorthand (defaults to GitHub)
|
|
783
|
+
* inferRepositoryFromPackageJson('{"repository": "owner/repo"}')
|
|
784
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
785
|
+
* ```
|
|
786
|
+
*/
|
|
787
|
+
function inferRepositoryFromPackageJson(packageJsonContent) {
|
|
788
|
+
if (!packageJsonContent || typeof packageJsonContent !== 'string') {
|
|
789
|
+
return null;
|
|
790
|
+
}
|
|
791
|
+
let packageJson;
|
|
792
|
+
try {
|
|
793
|
+
packageJson = parse(packageJsonContent);
|
|
794
|
+
}
|
|
795
|
+
catch {
|
|
796
|
+
return null;
|
|
797
|
+
}
|
|
798
|
+
return inferRepositoryFromPackageJsonObject(packageJson);
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* Infers repository configuration from a parsed package.json object.
|
|
802
|
+
*
|
|
803
|
+
* This is useful when you already have the parsed object.
|
|
804
|
+
*
|
|
805
|
+
* @param packageJson - Parsed package.json object
|
|
806
|
+
* @returns RepositoryConfig or null if repository cannot be inferred
|
|
807
|
+
*
|
|
808
|
+
* @example
|
|
809
|
+
* ```typescript
|
|
810
|
+
* const pkg = { repository: 'github:owner/repo' }
|
|
811
|
+
* inferRepositoryFromPackageJsonObject(pkg)
|
|
812
|
+
* // → { platform: 'github', baseUrl: 'https://github.com/owner/repo' }
|
|
813
|
+
* ```
|
|
814
|
+
*/
|
|
815
|
+
function inferRepositoryFromPackageJsonObject(packageJson) {
|
|
816
|
+
const { repository } = packageJson;
|
|
817
|
+
if (!repository) {
|
|
818
|
+
return null;
|
|
819
|
+
}
|
|
820
|
+
// Handle string format
|
|
821
|
+
if (typeof repository === 'string') {
|
|
822
|
+
return parseRepositoryString(repository);
|
|
823
|
+
}
|
|
824
|
+
// Handle object format
|
|
825
|
+
if (typeof repository === 'object' && repository.url) {
|
|
826
|
+
return createRepositoryConfigFromUrl(repository.url);
|
|
827
|
+
}
|
|
828
|
+
return null;
|
|
829
|
+
}
|
|
830
|
+
/**
|
|
831
|
+
* Parses a repository string (shorthand or URL).
|
|
832
|
+
*
|
|
833
|
+
* @param repoString - Repository string from package.json
|
|
834
|
+
* @returns RepositoryConfig or null
|
|
835
|
+
*
|
|
836
|
+
* @internal
|
|
837
|
+
*/
|
|
838
|
+
function parseRepositoryString(repoString) {
|
|
839
|
+
const trimmed = repoString.trim();
|
|
840
|
+
if (!trimmed) {
|
|
841
|
+
return null;
|
|
842
|
+
}
|
|
843
|
+
// Check for shorthand format: platform:owner/repo
|
|
844
|
+
const colonIndex = trimmed.indexOf(':');
|
|
845
|
+
if (colonIndex > 0) {
|
|
846
|
+
const potentialPlatform = trimmed.slice(0, colonIndex);
|
|
847
|
+
// Platform must be only letters (a-z, case insensitive)
|
|
848
|
+
if (isOnlyLetters(potentialPlatform)) {
|
|
849
|
+
const platform = potentialPlatform.toLowerCase();
|
|
850
|
+
const path = trimmed.slice(colonIndex + 1);
|
|
851
|
+
if (path) {
|
|
852
|
+
const baseUrl = SHORTHAND_PLATFORMS.get(platform);
|
|
853
|
+
if (baseUrl) {
|
|
854
|
+
// Construct full URL and parse it
|
|
855
|
+
const fullUrl = `${baseUrl}/${path}`;
|
|
856
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
857
|
+
}
|
|
858
|
+
// Unknown shorthand platform - try as URL
|
|
859
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
}
|
|
863
|
+
// Check for bare shorthand: owner/repo (no protocol, no platform prefix)
|
|
864
|
+
// Must match pattern like "owner/repo" but not "https://..." or "git@..."
|
|
865
|
+
if (!trimmed.includes('://') && !trimmed.startsWith('git@')) {
|
|
866
|
+
if (isBareShorthand(trimmed)) {
|
|
867
|
+
// Bare shorthand defaults to GitHub
|
|
868
|
+
const fullUrl = `https://github.com/${trimmed}`;
|
|
869
|
+
return createRepositoryConfigFromUrl(fullUrl);
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
// Try as a full URL
|
|
873
|
+
return createRepositoryConfigFromUrl(trimmed);
|
|
874
|
+
}
|
|
875
|
+
/**
|
|
876
|
+
* Checks if a string contains only ASCII letters (a-z, A-Z).
|
|
877
|
+
*
|
|
878
|
+
* @param str - String to check
|
|
879
|
+
* @returns True if string contains only letters
|
|
880
|
+
*
|
|
881
|
+
* @internal
|
|
882
|
+
*/
|
|
883
|
+
function isOnlyLetters(str) {
|
|
884
|
+
for (let i = 0; i < str.length; i++) {
|
|
885
|
+
const char = str.charCodeAt(i);
|
|
886
|
+
const isLowercase = char >= 97 && char <= 122; // a-z
|
|
887
|
+
const isUppercase = char >= 65 && char <= 90; // A-Z
|
|
888
|
+
if (!isLowercase && !isUppercase) {
|
|
889
|
+
return false;
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
return str.length > 0;
|
|
893
|
+
}
|
|
894
|
+
/**
|
|
895
|
+
* Checks if a string is a bare shorthand format (owner/repo).
|
|
896
|
+
* Must have exactly one forward slash with content on both sides.
|
|
897
|
+
*
|
|
898
|
+
* @param str - String to check
|
|
899
|
+
* @returns True if string matches owner/repo format
|
|
900
|
+
*
|
|
901
|
+
* @internal
|
|
902
|
+
*/
|
|
903
|
+
function isBareShorthand(str) {
|
|
904
|
+
const slashIndex = str.indexOf('/');
|
|
905
|
+
if (slashIndex <= 0 || slashIndex === str.length - 1) {
|
|
906
|
+
return false;
|
|
907
|
+
}
|
|
908
|
+
// Must not have another slash
|
|
909
|
+
return str.indexOf('/', slashIndex + 1) === -1;
|
|
910
|
+
}
|
|
911
|
+
|
|
912
|
+
const RESOLVE_REPOSITORY_STEP_ID = 'resolve-repository';
|
|
913
|
+
/**
|
|
914
|
+
* Creates the resolve-repository step.
|
|
915
|
+
*
|
|
916
|
+
* This step resolves repository configuration for compare URL generation.
|
|
917
|
+
* It supports multiple resolution modes:
|
|
918
|
+
*
|
|
919
|
+
* - `undefined` or `'disabled'`: No-op, backward compatible default
|
|
920
|
+
* - `'inferred'`: Auto-detect from package.json or git remote
|
|
921
|
+
* - `RepositoryConfig`: Direct repository configuration provided
|
|
922
|
+
* - `RepositoryResolution`: Fine-grained control with mode and options
|
|
923
|
+
*
|
|
924
|
+
* State updates:
|
|
925
|
+
* - repositoryConfig: Resolved repository configuration (if successful)
|
|
926
|
+
*
|
|
927
|
+
* @returns A FlowStep that resolves repository configuration
|
|
928
|
+
*
|
|
929
|
+
* @example
|
|
930
|
+
* ```typescript
|
|
931
|
+
* // Auto-detect repository
|
|
932
|
+
* const flow = createFlow({
|
|
933
|
+
* repository: 'inferred'
|
|
934
|
+
* })
|
|
935
|
+
*
|
|
936
|
+
* // Explicit repository
|
|
937
|
+
* const flow = createFlow({
|
|
938
|
+
* repository: {
|
|
939
|
+
* platform: 'github',
|
|
940
|
+
* baseUrl: 'https://github.com/owner/repo'
|
|
941
|
+
* }
|
|
942
|
+
* })
|
|
943
|
+
* ```
|
|
944
|
+
*/
|
|
945
|
+
function createResolveRepositoryStep() {
|
|
946
|
+
return createStep(RESOLVE_REPOSITORY_STEP_ID, 'Resolve Repository', async (ctx) => {
|
|
947
|
+
const { config, logger, tree, git, projectRoot } = ctx;
|
|
948
|
+
const repoConfig = config.repository;
|
|
949
|
+
// Disabled or undefined - no-op for backward compatibility
|
|
950
|
+
if (repoConfig === undefined || repoConfig === 'disabled') {
|
|
951
|
+
logger.debug('Repository resolution disabled');
|
|
952
|
+
return {
|
|
953
|
+
status: 'skipped',
|
|
954
|
+
message: 'Repository resolution disabled',
|
|
955
|
+
};
|
|
956
|
+
}
|
|
957
|
+
// Direct RepositoryConfig provided
|
|
958
|
+
if (isRepositoryConfig(repoConfig)) {
|
|
959
|
+
logger.debug(`Using explicit repository config: ${repoConfig.platform}`);
|
|
960
|
+
return {
|
|
961
|
+
status: 'success',
|
|
962
|
+
stateUpdates: {
|
|
963
|
+
repositoryConfig: repoConfig,
|
|
964
|
+
},
|
|
965
|
+
message: `Using explicit ${repoConfig.platform} repository`,
|
|
966
|
+
};
|
|
967
|
+
}
|
|
968
|
+
// Shorthand 'inferred' mode
|
|
969
|
+
if (repoConfig === 'inferred') {
|
|
970
|
+
const resolved = await inferRepository(tree, git, projectRoot, DEFAULT_INFERENCE_ORDER, logger);
|
|
971
|
+
if (resolved) {
|
|
972
|
+
return {
|
|
973
|
+
status: 'success',
|
|
974
|
+
stateUpdates: {
|
|
975
|
+
repositoryConfig: resolved,
|
|
976
|
+
},
|
|
977
|
+
message: `Inferred ${resolved.platform} repository from ${resolved.baseUrl}`,
|
|
978
|
+
};
|
|
979
|
+
}
|
|
980
|
+
// Graceful degradation - no error, just no URLs
|
|
981
|
+
logger.debug('Could not infer repository from package.json or git remote');
|
|
982
|
+
return {
|
|
983
|
+
status: 'skipped',
|
|
984
|
+
message: 'Could not infer repository configuration',
|
|
985
|
+
};
|
|
986
|
+
}
|
|
987
|
+
// Full RepositoryResolution object
|
|
988
|
+
if (isRepositoryResolution(repoConfig)) {
|
|
989
|
+
return handleRepositoryResolution(repoConfig, tree, git, projectRoot, logger);
|
|
990
|
+
}
|
|
991
|
+
// Unknown configuration - should not happen with TypeScript
|
|
992
|
+
logger.warn('Unknown repository configuration format');
|
|
993
|
+
return {
|
|
994
|
+
status: 'skipped',
|
|
995
|
+
message: 'Unknown repository configuration format',
|
|
996
|
+
};
|
|
997
|
+
}, {
|
|
998
|
+
description: 'Resolves repository configuration for compare URL generation',
|
|
999
|
+
});
|
|
1000
|
+
}
|
|
1001
|
+
/**
|
|
1002
|
+
* Handles a full RepositoryResolution configuration.
|
|
1003
|
+
*
|
|
1004
|
+
* @param resolution - Repository resolution configuration
|
|
1005
|
+
* @param tree - Virtual file system tree
|
|
1006
|
+
* @param git - Git client instance
|
|
1007
|
+
* @param projectRoot - Path to the project root
|
|
1008
|
+
* @param logger - Logger instance
|
|
1009
|
+
* @returns Flow step result with repository config or skip/error status
|
|
1010
|
+
* @internal
|
|
1011
|
+
*/
|
|
1012
|
+
async function handleRepositoryResolution(resolution, tree, git, projectRoot, logger) {
|
|
1013
|
+
const { mode, repository, inferenceOrder } = resolution;
|
|
1014
|
+
// Disabled mode
|
|
1015
|
+
if (mode === 'disabled') {
|
|
1016
|
+
logger.debug('Repository resolution explicitly disabled');
|
|
1017
|
+
return {
|
|
1018
|
+
status: 'skipped',
|
|
1019
|
+
message: 'Repository resolution disabled',
|
|
1020
|
+
};
|
|
1021
|
+
}
|
|
1022
|
+
// Explicit mode - must have repository
|
|
1023
|
+
if (mode === 'explicit') {
|
|
1024
|
+
if (!repository) {
|
|
1025
|
+
return {
|
|
1026
|
+
status: 'failed',
|
|
1027
|
+
message: 'Repository config required when mode is "explicit"',
|
|
1028
|
+
error: createError('Repository config required when mode is "explicit"'),
|
|
1029
|
+
};
|
|
1030
|
+
}
|
|
1031
|
+
logger.debug(`Using explicit repository config: ${repository.platform}`);
|
|
1032
|
+
return {
|
|
1033
|
+
status: 'success',
|
|
1034
|
+
stateUpdates: {
|
|
1035
|
+
repositoryConfig: repository,
|
|
1036
|
+
},
|
|
1037
|
+
message: `Using explicit ${repository.platform} repository`,
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
1040
|
+
// Inferred mode
|
|
1041
|
+
const order = inferenceOrder ?? DEFAULT_INFERENCE_ORDER;
|
|
1042
|
+
const resolved = await inferRepository(tree, git, projectRoot, order, logger);
|
|
1043
|
+
if (resolved) {
|
|
1044
|
+
return {
|
|
1045
|
+
status: 'success',
|
|
1046
|
+
stateUpdates: {
|
|
1047
|
+
repositoryConfig: resolved,
|
|
1048
|
+
},
|
|
1049
|
+
message: `Inferred ${resolved.platform} repository`,
|
|
1050
|
+
};
|
|
1051
|
+
}
|
|
1052
|
+
// Graceful degradation
|
|
1053
|
+
logger.debug('Could not infer repository configuration');
|
|
1054
|
+
return {
|
|
1055
|
+
status: 'skipped',
|
|
1056
|
+
message: 'Could not infer repository configuration',
|
|
1057
|
+
};
|
|
1058
|
+
}
|
|
1059
|
+
/**
|
|
1060
|
+
* Infers repository configuration from available sources.
|
|
1061
|
+
*
|
|
1062
|
+
* @param tree - Virtual file system tree
|
|
1063
|
+
* @param git - Git client instance
|
|
1064
|
+
* @param projectRoot - Path to the project root
|
|
1065
|
+
* @param order - Inference source order
|
|
1066
|
+
* @param logger - Logger instance
|
|
1067
|
+
* @returns Repository config or null if none found
|
|
1068
|
+
* @internal
|
|
1069
|
+
*/
|
|
1070
|
+
async function inferRepository(tree, git, projectRoot, order, logger) {
|
|
1071
|
+
for (const source of order) {
|
|
1072
|
+
const config = await inferFromSource(tree, git, projectRoot, source, logger);
|
|
1073
|
+
if (config) {
|
|
1074
|
+
logger.debug(`Inferred repository from ${source}: ${config.platform}`);
|
|
1075
|
+
return config;
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
return null;
|
|
1079
|
+
}
|
|
1080
|
+
/**
|
|
1081
|
+
* Infers repository from a single source.
|
|
1082
|
+
*
|
|
1083
|
+
* @param tree - Virtual file system tree
|
|
1084
|
+
* @param git - Git client instance
|
|
1085
|
+
* @param projectRoot - Path to the project root
|
|
1086
|
+
* @param source - Inference source type
|
|
1087
|
+
* @param logger - Logger instance
|
|
1088
|
+
* @returns Repository config or null if not found
|
|
1089
|
+
* @internal
|
|
1090
|
+
*/
|
|
1091
|
+
async function inferFromSource(tree, git, projectRoot, source, logger) {
|
|
1092
|
+
if (source === 'package-json') {
|
|
1093
|
+
return inferFromPackageJson(tree, projectRoot, logger);
|
|
1094
|
+
}
|
|
1095
|
+
if (source === 'git-remote') {
|
|
1096
|
+
return inferFromGitRemote(git, logger);
|
|
1097
|
+
}
|
|
1098
|
+
logger.warn(`Unknown inference source: ${source}`);
|
|
1099
|
+
return null;
|
|
1100
|
+
}
|
|
1101
|
+
/**
|
|
1102
|
+
* Infers repository from package.json repository field.
|
|
1103
|
+
*
|
|
1104
|
+
* @param tree - Virtual file system tree
|
|
1105
|
+
* @param projectRoot - Path to the project root
|
|
1106
|
+
* @param logger - Logger instance
|
|
1107
|
+
* @returns Repository config or null if not found
|
|
1108
|
+
* @internal
|
|
1109
|
+
*/
|
|
1110
|
+
function inferFromPackageJson(tree, projectRoot, logger) {
|
|
1111
|
+
const packageJsonPath = `${projectRoot}/package.json`;
|
|
1112
|
+
if (!tree.exists(packageJsonPath)) {
|
|
1113
|
+
logger.debug(`package.json not found at ${packageJsonPath}`);
|
|
1114
|
+
return null;
|
|
1115
|
+
}
|
|
1116
|
+
const content = tree.read(packageJsonPath, 'utf-8');
|
|
1117
|
+
if (!content) {
|
|
1118
|
+
logger.debug('Could not read package.json');
|
|
1119
|
+
return null;
|
|
1120
|
+
}
|
|
1121
|
+
const config = inferRepositoryFromPackageJson(content);
|
|
1122
|
+
if (config) {
|
|
1123
|
+
logger.debug(`Found repository in package.json: ${config.baseUrl}`);
|
|
1124
|
+
}
|
|
1125
|
+
return config;
|
|
1126
|
+
}
|
|
1127
|
+
/**
|
|
1128
|
+
* Infers repository from git remote URL.
|
|
1129
|
+
*
|
|
1130
|
+
* @param git - Git client instance
|
|
1131
|
+
* @param logger - Logger instance
|
|
1132
|
+
* @returns Repository config or null if not found
|
|
1133
|
+
* @internal
|
|
1134
|
+
*/
|
|
1135
|
+
async function inferFromGitRemote(git, logger) {
|
|
1136
|
+
const remoteUrl = await git.getRemoteUrl('origin');
|
|
1137
|
+
if (!remoteUrl) {
|
|
1138
|
+
logger.debug('Could not get git remote URL');
|
|
1139
|
+
return null;
|
|
1140
|
+
}
|
|
1141
|
+
const config = createRepositoryConfigFromUrl(remoteUrl);
|
|
1142
|
+
if (config) {
|
|
1143
|
+
logger.debug(`Inferred repository from git remote: ${config.baseUrl}`);
|
|
1144
|
+
}
|
|
1145
|
+
return config;
|
|
1146
|
+
}
|
|
1147
|
+
|
|
1148
|
+
/**
|
|
1149
|
+
* Safe copies of Set built-in via factory function.
|
|
1150
|
+
*
|
|
1151
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1152
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1153
|
+
*
|
|
1154
|
+
* These references are captured at module initialization time to protect against
|
|
1155
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1156
|
+
*
|
|
1157
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/set
|
|
1158
|
+
*/
|
|
1159
|
+
// Capture references at module initialization time
|
|
1160
|
+
const _Set = globalThis.Set;
|
|
1161
|
+
const _Reflect$1 = globalThis.Reflect;
|
|
1162
|
+
/**
|
|
1163
|
+
* (Safe copy) Creates a new Set using the captured Set constructor.
|
|
1164
|
+
* Use this instead of `new Set()`.
|
|
1165
|
+
*
|
|
1166
|
+
* @param iterable - Optional iterable of values.
|
|
1167
|
+
* @returns A new Set instance.
|
|
1168
|
+
*/
|
|
1169
|
+
const createSet = (iterable) => _Reflect$1.construct(_Set, iterable ? [iterable] : []);
|
|
1170
|
+
|
|
1171
|
+
/**
|
|
1172
|
+
* Safe copies of Object built-in methods.
|
|
1173
|
+
*
|
|
1174
|
+
* These references are captured at module initialization time to protect against
|
|
1175
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1176
|
+
*
|
|
1177
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1178
|
+
*/
|
|
1179
|
+
// Capture references at module initialization time
|
|
1180
|
+
const _Object = globalThis.Object;
|
|
1181
|
+
/**
|
|
1182
|
+
* (Safe copy) Prevents modification of existing property attributes and values,
|
|
1183
|
+
* and prevents the addition of new properties.
|
|
1184
|
+
*/
|
|
1185
|
+
const freeze = _Object.freeze;
|
|
1186
|
+
/**
|
|
1187
|
+
* (Safe copy) Returns the names of the enumerable string properties and methods of an object.
|
|
1188
|
+
*/
|
|
1189
|
+
const keys = _Object.keys;
|
|
1190
|
+
/**
|
|
1191
|
+
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1192
|
+
*/
|
|
1193
|
+
const entries = _Object.entries;
|
|
1194
|
+
/**
|
|
1195
|
+
* (Safe copy) Returns an array of values of the enumerable own properties of an object.
|
|
1196
|
+
*/
|
|
1197
|
+
const values = _Object.values;
|
|
1198
|
+
/**
|
|
1199
|
+
* (Safe copy) Adds one or more properties to an object, and/or modifies attributes of existing properties.
|
|
1200
|
+
*/
|
|
1201
|
+
const defineProperties = _Object.defineProperties;
|
|
1202
|
+
|
|
1203
|
+
/**
|
|
1204
|
+
* Safe copies of Array built-in static methods.
|
|
1205
|
+
*
|
|
1206
|
+
* These references are captured at module initialization time to protect against
|
|
1207
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1208
|
+
*
|
|
1209
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/array
|
|
1210
|
+
*/
|
|
1211
|
+
// Capture references at module initialization time
|
|
1212
|
+
const _Array = globalThis.Array;
|
|
1213
|
+
/**
|
|
1214
|
+
* (Safe copy) Determines whether the passed value is an Array.
|
|
1215
|
+
*/
|
|
1216
|
+
const isArray = _Array.isArray;
|
|
1217
|
+
|
|
1218
|
+
/**
|
|
1219
|
+
* Safe copies of Console built-in methods.
|
|
1220
|
+
*
|
|
1221
|
+
* These references are captured at module initialization time to protect against
|
|
1222
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1223
|
+
*
|
|
1224
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/console
|
|
1225
|
+
*/
|
|
1226
|
+
// Capture references at module initialization time
|
|
1227
|
+
const _console = globalThis.console;
|
|
1228
|
+
/**
|
|
1229
|
+
* (Safe copy) Outputs a message to the console.
|
|
1230
|
+
*/
|
|
1231
|
+
const log = _console.log.bind(_console);
|
|
1232
|
+
/**
|
|
1233
|
+
* (Safe copy) Outputs a warning message to the console.
|
|
1234
|
+
*/
|
|
1235
|
+
const warn = _console.warn.bind(_console);
|
|
1236
|
+
/**
|
|
1237
|
+
* (Safe copy) Outputs an error message to the console.
|
|
1238
|
+
*/
|
|
1239
|
+
const error = _console.error.bind(_console);
|
|
1240
|
+
/**
|
|
1241
|
+
* (Safe copy) Outputs an informational message to the console.
|
|
1242
|
+
*/
|
|
1243
|
+
const info = _console.info.bind(_console);
|
|
1244
|
+
/**
|
|
1245
|
+
* (Safe copy) Outputs a debug message to the console.
|
|
1246
|
+
*/
|
|
1247
|
+
const debug = _console.debug.bind(_console);
|
|
1248
|
+
/**
|
|
1249
|
+
* (Safe copy) Outputs a stack trace to the console.
|
|
1250
|
+
*/
|
|
1251
|
+
_console.trace.bind(_console);
|
|
1252
|
+
/**
|
|
1253
|
+
* (Safe copy) Displays an interactive listing of the properties of a specified object.
|
|
1254
|
+
*/
|
|
1255
|
+
_console.dir.bind(_console);
|
|
1256
|
+
/**
|
|
1257
|
+
* (Safe copy) Displays tabular data as a table.
|
|
1258
|
+
*/
|
|
1259
|
+
_console.table.bind(_console);
|
|
1260
|
+
/**
|
|
1261
|
+
* (Safe copy) Writes an error message to the console if the assertion is false.
|
|
1262
|
+
*/
|
|
1263
|
+
_console.assert.bind(_console);
|
|
1264
|
+
/**
|
|
1265
|
+
* (Safe copy) Clears the console.
|
|
1266
|
+
*/
|
|
1267
|
+
_console.clear.bind(_console);
|
|
1268
|
+
/**
|
|
1269
|
+
* (Safe copy) Logs the number of times that this particular call to count() has been called.
|
|
1270
|
+
*/
|
|
1271
|
+
_console.count.bind(_console);
|
|
1272
|
+
/**
|
|
1273
|
+
* (Safe copy) Resets the counter used with console.count().
|
|
1274
|
+
*/
|
|
1275
|
+
_console.countReset.bind(_console);
|
|
1276
|
+
/**
|
|
1277
|
+
* (Safe copy) Creates a new inline group in the console.
|
|
1278
|
+
*/
|
|
1279
|
+
_console.group.bind(_console);
|
|
1280
|
+
/**
|
|
1281
|
+
* (Safe copy) Creates a new inline group in the console that is initially collapsed.
|
|
1282
|
+
*/
|
|
1283
|
+
_console.groupCollapsed.bind(_console);
|
|
1284
|
+
/**
|
|
1285
|
+
* (Safe copy) Exits the current inline group.
|
|
1286
|
+
*/
|
|
1287
|
+
_console.groupEnd.bind(_console);
|
|
1288
|
+
/**
|
|
1289
|
+
* (Safe copy) Starts a timer with a name specified as an input parameter.
|
|
1290
|
+
*/
|
|
1291
|
+
_console.time.bind(_console);
|
|
1292
|
+
/**
|
|
1293
|
+
* (Safe copy) Stops a timer that was previously started.
|
|
1294
|
+
*/
|
|
1295
|
+
_console.timeEnd.bind(_console);
|
|
1296
|
+
/**
|
|
1297
|
+
* (Safe copy) Logs the current value of a timer that was previously started.
|
|
1298
|
+
*/
|
|
1299
|
+
_console.timeLog.bind(_console);
|
|
1300
|
+
|
|
1301
|
+
const registeredClasses = [];
|
|
1302
|
+
|
|
1303
|
+
/**
|
|
1304
|
+
* Returns the data type of the target.
|
|
1305
|
+
* Uses native `typeof` operator, however, makes distinction between `null`, `array`, and `object`.
|
|
1306
|
+
* Also, when classes are registered via `registerClass`, it checks if objects are instance of any known registered class.
|
|
1307
|
+
*
|
|
1308
|
+
* @param target - The target to get the data type of.
|
|
1309
|
+
* @returns The data type of the target.
|
|
1310
|
+
*/
|
|
1311
|
+
const getType = (target) => {
|
|
1312
|
+
if (target === null)
|
|
1313
|
+
return 'null';
|
|
1314
|
+
const nativeDataType = typeof target;
|
|
1315
|
+
if (nativeDataType === 'object') {
|
|
1316
|
+
if (isArray(target))
|
|
1317
|
+
return 'array';
|
|
1318
|
+
for (const registeredClass of registeredClasses) {
|
|
1319
|
+
if (target instanceof registeredClass)
|
|
1320
|
+
return registeredClass.name;
|
|
1321
|
+
}
|
|
1322
|
+
}
|
|
1323
|
+
return nativeDataType;
|
|
1324
|
+
};
|
|
1325
|
+
|
|
1326
|
+
/**
|
|
1327
|
+
* Safe copies of Date built-in via factory function and static methods.
|
|
1328
|
+
*
|
|
1329
|
+
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1330
|
+
* provides a factory function that uses Reflect.construct internally.
|
|
1331
|
+
*
|
|
1332
|
+
* These references are captured at module initialization time to protect against
|
|
1333
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1334
|
+
*
|
|
1335
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1336
|
+
*/
|
|
1337
|
+
// Capture references at module initialization time
|
|
1338
|
+
const _Date = globalThis.Date;
|
|
1339
|
+
const _Reflect = globalThis.Reflect;
|
|
1340
|
+
function createDate(...args) {
|
|
1341
|
+
return _Reflect.construct(_Date, args);
|
|
1342
|
+
}
|
|
1343
|
+
|
|
1344
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1345
|
+
/**
|
|
1346
|
+
* Creates a wrapper function that only executes the wrapped function if the condition function returns true.
|
|
1347
|
+
*
|
|
1348
|
+
* @param func - The function to be conditionally executed.
|
|
1349
|
+
* @param conditionFunc - A function that returns a boolean, determining if `func` should be executed.
|
|
1350
|
+
* @returns A wrapped version of `func` that executes conditionally.
|
|
1351
|
+
*/
|
|
1352
|
+
function createConditionalExecutionFunction(func, conditionFunc) {
|
|
1353
|
+
return function (...args) {
|
|
1354
|
+
if (conditionFunc()) {
|
|
1355
|
+
return func(...args);
|
|
1356
|
+
}
|
|
1357
|
+
};
|
|
1358
|
+
}
|
|
1359
|
+
|
|
1360
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
1361
|
+
/**
|
|
1362
|
+
* Creates a wrapper function that silently ignores any errors thrown by the wrapped void function.
|
|
1363
|
+
* This function is specifically for wrapping functions that do not return a value (void functions).
|
|
1364
|
+
* Exceptions are swallowed without any logging or handling.
|
|
1365
|
+
*
|
|
1366
|
+
* @param func - The void function to be wrapped.
|
|
1367
|
+
* @returns A wrapped version of the input function that ignores errors.
|
|
1368
|
+
*/
|
|
1369
|
+
function createErrorIgnoringFunction(func) {
|
|
1370
|
+
return function (...args) {
|
|
1371
|
+
try {
|
|
1372
|
+
func(...args);
|
|
1373
|
+
}
|
|
1374
|
+
catch {
|
|
1375
|
+
// Deliberately swallowing/ignoring the exception
|
|
1376
|
+
}
|
|
1377
|
+
};
|
|
1378
|
+
}
|
|
1379
|
+
|
|
1380
|
+
/* eslint-disable @typescript-eslint/no-unused-vars */
|
|
1381
|
+
/**
|
|
1382
|
+
* A no-operation function (noop) that does nothing regardless of the arguments passed.
|
|
1383
|
+
* It is designed to be as permissive as possible in its typing without using the `Function` keyword.
|
|
1384
|
+
*
|
|
1385
|
+
* @param args - Any arguments passed to the function (ignored)
|
|
1386
|
+
*/
|
|
1387
|
+
const noop = (...args) => {
|
|
1388
|
+
// Intentionally does nothing
|
|
1389
|
+
};
|
|
1390
|
+
|
|
1391
|
+
const logLevels = ['none', 'error', 'warn', 'log', 'info', 'debug'];
|
|
1392
|
+
const priority = {
|
|
1393
|
+
error: 4,
|
|
1394
|
+
warn: 3,
|
|
1395
|
+
log: 2,
|
|
1396
|
+
info: 1,
|
|
1397
|
+
debug: 0,
|
|
1398
|
+
};
|
|
1399
|
+
/**
|
|
1400
|
+
* Validates whether a given string is a valid log level.
|
|
1401
|
+
*
|
|
1402
|
+
* @param level - The log level to validate
|
|
1403
|
+
* @returns True if the level is valid, false otherwise
|
|
1404
|
+
*/
|
|
1405
|
+
function isValidLogLevel(level) {
|
|
1406
|
+
return logLevels.includes(level);
|
|
1407
|
+
}
|
|
1408
|
+
/**
|
|
1409
|
+
* Creates a log level configuration manager for controlling logging behavior.
|
|
1410
|
+
* Provides methods to get, set, and evaluate log levels based on priority.
|
|
1411
|
+
*
|
|
1412
|
+
* @param level - The initial log level (defaults to 'error')
|
|
1413
|
+
* @returns A configuration object with log level management methods
|
|
1414
|
+
* @throws {Error} When the provided level is not a valid log level
|
|
1415
|
+
*/
|
|
1416
|
+
function createLogLevelConfig(level = 'error') {
|
|
1417
|
+
if (!isValidLogLevel(level)) {
|
|
1418
|
+
throw createError('Cannot create log level configuration with a valid default log level');
|
|
1419
|
+
}
|
|
1420
|
+
const state = { level };
|
|
1421
|
+
const getLogLevel = () => state.level;
|
|
1422
|
+
const setLogLevel = (level) => {
|
|
1423
|
+
if (!isValidLogLevel(level)) {
|
|
1424
|
+
throw createError(`Cannot set value '${level}' level. Expected levels are ${logLevels}.`);
|
|
1425
|
+
}
|
|
1426
|
+
state.level = level;
|
|
1427
|
+
};
|
|
1428
|
+
const shouldLog = (level) => {
|
|
1429
|
+
if (state.level === 'none' || level === 'none' || !isValidLogLevel(level)) {
|
|
1430
|
+
return false;
|
|
1431
|
+
}
|
|
1432
|
+
return priority[level] >= priority[state.level];
|
|
1433
|
+
};
|
|
1434
|
+
return freeze({
|
|
1435
|
+
getLogLevel,
|
|
1436
|
+
setLogLevel,
|
|
1437
|
+
shouldLog,
|
|
1438
|
+
});
|
|
1439
|
+
}
|
|
1440
|
+
|
|
1441
|
+
/**
|
|
1442
|
+
* Creates a logger instance with configurable log level filtering.
|
|
1443
|
+
* Each log function is wrapped to respect the current log level setting.
|
|
1444
|
+
*
|
|
1445
|
+
* @param error - Function to handle error-level logs (required)
|
|
1446
|
+
* @param warn - Function to handle warning-level logs (optional, defaults to noop)
|
|
1447
|
+
* @param log - Function to handle standard logs (optional, defaults to noop)
|
|
1448
|
+
* @param info - Function to handle info-level logs (optional, defaults to noop)
|
|
1449
|
+
* @param debug - Function to handle debug-level logs (optional, defaults to noop)
|
|
1450
|
+
* @returns A frozen logger object with log methods and level control
|
|
1451
|
+
* @throws {ErrorLevelFn} When any provided log function is invalid
|
|
1452
|
+
*/
|
|
1453
|
+
function createLogger(error, warn = noop, log = noop, info = noop, debug = noop) {
|
|
1454
|
+
if (notValidLogFn(error)) {
|
|
1455
|
+
throw createError(notFnMsg('error'));
|
|
1456
|
+
}
|
|
1457
|
+
if (notValidLogFn(warn)) {
|
|
1458
|
+
throw createError(notFnMsg('warn'));
|
|
1459
|
+
}
|
|
1460
|
+
if (notValidLogFn(log)) {
|
|
1461
|
+
throw createError(notFnMsg('log'));
|
|
1462
|
+
}
|
|
1463
|
+
if (notValidLogFn(info)) {
|
|
1464
|
+
throw createError(notFnMsg('info'));
|
|
1465
|
+
}
|
|
1466
|
+
if (notValidLogFn(debug)) {
|
|
1467
|
+
throw createError(notFnMsg('debug'));
|
|
1468
|
+
}
|
|
1469
|
+
const { setLogLevel, getLogLevel, shouldLog } = createLogLevelConfig();
|
|
1470
|
+
const wrapLogFn = (fn, level) => {
|
|
1471
|
+
if (fn === noop)
|
|
1472
|
+
return fn;
|
|
1473
|
+
const condition = () => shouldLog(level);
|
|
1474
|
+
return createConditionalExecutionFunction(createErrorIgnoringFunction(fn), condition);
|
|
1475
|
+
};
|
|
1476
|
+
return freeze({
|
|
1477
|
+
error: wrapLogFn(error, 'error'),
|
|
1478
|
+
warn: wrapLogFn(warn, 'warn'),
|
|
1479
|
+
log: wrapLogFn(log, 'log'),
|
|
1480
|
+
info: wrapLogFn(info, 'info'),
|
|
1481
|
+
debug: wrapLogFn(debug, 'debug'),
|
|
1482
|
+
setLogLevel,
|
|
1483
|
+
getLogLevel,
|
|
1484
|
+
});
|
|
1485
|
+
}
|
|
1486
|
+
/**
|
|
1487
|
+
* Validates whether a given value is a valid log function.
|
|
1488
|
+
*
|
|
1489
|
+
* @param fn - The value to validate
|
|
1490
|
+
* @returns True if the value is not a function (invalid), false if it is valid
|
|
1491
|
+
*/
|
|
1492
|
+
function notValidLogFn(fn) {
|
|
1493
|
+
return getType(fn) !== 'function' && fn !== noop;
|
|
1494
|
+
}
|
|
1495
|
+
/**
|
|
1496
|
+
* Generates an error message for invalid log function parameters.
|
|
1497
|
+
*
|
|
1498
|
+
* @param label - The name of the log function that failed validation
|
|
1499
|
+
* @returns A formatted error message string
|
|
1500
|
+
*/
|
|
1501
|
+
function notFnMsg(label) {
|
|
1502
|
+
return `Cannot create a logger when ${label} is not a function`;
|
|
1503
|
+
}
|
|
1504
|
+
|
|
1505
|
+
createLogger(error, warn, log, info, debug);
|
|
1506
|
+
|
|
1507
|
+
/**
|
|
1508
|
+
* Global log level registry.
|
|
1509
|
+
* Tracks all created scoped loggers to allow global log level changes.
|
|
1510
|
+
*/
|
|
1511
|
+
const loggerRegistry = createSet();
|
|
1512
|
+
/** Redacted placeholder for sensitive values */
|
|
1513
|
+
const REDACTED = '[REDACTED]';
|
|
1514
|
+
/**
|
|
1515
|
+
* Patterns that indicate a sensitive key name.
|
|
1516
|
+
* Keys containing these patterns will have their values sanitized.
|
|
1517
|
+
*/
|
|
1518
|
+
const SENSITIVE_KEY_PATTERNS = [
|
|
1519
|
+
/token/i,
|
|
1520
|
+
/key/i,
|
|
1521
|
+
/password/i,
|
|
1522
|
+
/secret/i,
|
|
1523
|
+
/credential/i,
|
|
1524
|
+
/auth/i,
|
|
1525
|
+
/bearer/i,
|
|
1526
|
+
/api[_-]?key/i,
|
|
1527
|
+
/private/i,
|
|
1528
|
+
/passphrase/i,
|
|
1529
|
+
];
|
|
1530
|
+
/**
|
|
1531
|
+
* Checks if a key name indicates sensitive data.
|
|
1532
|
+
*
|
|
1533
|
+
* @param key - Key name to check
|
|
1534
|
+
* @returns True if the key indicates sensitive data
|
|
1535
|
+
*/
|
|
1536
|
+
function isSensitiveKey(key) {
|
|
1537
|
+
return SENSITIVE_KEY_PATTERNS.some((pattern) => pattern.test(key));
|
|
1538
|
+
}
|
|
1539
|
+
/**
|
|
1540
|
+
* Sanitizes an object by replacing sensitive values with REDACTED.
|
|
1541
|
+
* This function recursively processes nested objects and arrays.
|
|
1542
|
+
*
|
|
1543
|
+
* @param obj - Object to sanitize
|
|
1544
|
+
* @returns New object with sensitive values redacted
|
|
1545
|
+
*/
|
|
1546
|
+
function sanitize(obj) {
|
|
1547
|
+
if (obj === null || obj === undefined) {
|
|
1548
|
+
return obj;
|
|
1549
|
+
}
|
|
1550
|
+
if (isArray(obj)) {
|
|
1551
|
+
return obj.map((item) => sanitize(item));
|
|
1552
|
+
}
|
|
1553
|
+
if (typeof obj === 'object') {
|
|
1554
|
+
const result = {};
|
|
1555
|
+
for (const [key, value] of entries(obj)) {
|
|
1556
|
+
if (isSensitiveKey(key)) {
|
|
1557
|
+
result[key] = REDACTED;
|
|
1558
|
+
}
|
|
1559
|
+
else if (typeof value === 'object' && value !== null) {
|
|
1560
|
+
result[key] = sanitize(value);
|
|
1561
|
+
}
|
|
1562
|
+
else {
|
|
1563
|
+
result[key] = value;
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
return result;
|
|
1567
|
+
}
|
|
1568
|
+
return obj;
|
|
1569
|
+
}
|
|
1570
|
+
/**
|
|
1571
|
+
* Formats a log message with optional metadata.
|
|
1572
|
+
*
|
|
1573
|
+
* @param namespace - Logger namespace prefix
|
|
1574
|
+
* @param message - Log message
|
|
1575
|
+
* @param meta - Optional metadata object
|
|
1576
|
+
* @returns Formatted log string
|
|
1577
|
+
*/
|
|
1578
|
+
function formatMessage(namespace, message, meta) {
|
|
1579
|
+
const prefix = `[${namespace}]`;
|
|
1580
|
+
if (meta && keys(meta).length > 0) {
|
|
1581
|
+
const sanitizedMeta = sanitize(meta);
|
|
1582
|
+
return `${prefix} ${message} ${stringify(sanitizedMeta)}`;
|
|
1583
|
+
}
|
|
1584
|
+
return `${prefix} ${message}`;
|
|
1585
|
+
}
|
|
1586
|
+
/**
|
|
1587
|
+
* Creates a scoped logger with namespace prefix and optional secret sanitization.
|
|
1588
|
+
* All log messages will be prefixed with [namespace] and sensitive metadata
|
|
1589
|
+
* values will be automatically redacted.
|
|
1590
|
+
*
|
|
1591
|
+
* @param namespace - Logger namespace (e.g., 'project-scope', 'analyze')
|
|
1592
|
+
* @param options - Logger configuration options
|
|
1593
|
+
* @returns A configured scoped logger instance
|
|
1594
|
+
*
|
|
1595
|
+
* @example
|
|
1596
|
+
* ```typescript
|
|
1597
|
+
* const logger = createScopedLogger('project-scope')
|
|
1598
|
+
* logger.setLogLevel('debug')
|
|
1599
|
+
*
|
|
1600
|
+
* // Basic logging
|
|
1601
|
+
* logger.info('Starting analysis', { path: './project' })
|
|
1602
|
+
*
|
|
1603
|
+
* // Sensitive data is automatically redacted
|
|
1604
|
+
* logger.debug('Config loaded', { apiKey: 'secret123' })
|
|
1605
|
+
* // Output: [project-scope] Config loaded {"apiKey":"[REDACTED]"}
|
|
1606
|
+
* ```
|
|
1607
|
+
*/
|
|
1608
|
+
function createScopedLogger(namespace, options = {}) {
|
|
1609
|
+
const { level = 'error', sanitizeSecrets = true } = options;
|
|
1610
|
+
// Create wrapper functions that add namespace prefix and sanitization
|
|
1611
|
+
const createLogFn = (baseFn) => (message, meta) => {
|
|
1612
|
+
const processedMeta = sanitizeSecrets && meta ? sanitize(meta) : meta;
|
|
1613
|
+
baseFn(formatMessage(namespace, message, processedMeta));
|
|
1614
|
+
};
|
|
1615
|
+
// Create base logger with wrapped functions
|
|
1616
|
+
const baseLogger = createLogger(createLogFn(error), createLogFn(warn), createLogFn(log), createLogFn(info), createLogFn(debug));
|
|
1617
|
+
// Set initial log level (use global override if set)
|
|
1618
|
+
baseLogger.setLogLevel(level);
|
|
1619
|
+
const scopedLogger = freeze({
|
|
1620
|
+
error: (message, meta) => baseLogger.error(message, meta),
|
|
1621
|
+
warn: (message, meta) => baseLogger.warn(message, meta),
|
|
1622
|
+
log: (message, meta) => baseLogger.log(message, meta),
|
|
1623
|
+
info: (message, meta) => baseLogger.info(message, meta),
|
|
1624
|
+
debug: (message, meta) => baseLogger.debug(message, meta),
|
|
1625
|
+
setLogLevel: baseLogger.setLogLevel,
|
|
1626
|
+
getLogLevel: baseLogger.getLogLevel,
|
|
1627
|
+
});
|
|
1628
|
+
// Register logger for global level management
|
|
1629
|
+
loggerRegistry.add(scopedLogger);
|
|
1630
|
+
return scopedLogger;
|
|
1631
|
+
}
|
|
1632
|
+
/**
|
|
1633
|
+
* Default logger instance for the project-scope library.
|
|
1634
|
+
* Use this for general logging within the library.
|
|
1635
|
+
*
|
|
1636
|
+
* @example
|
|
1637
|
+
* ```typescript
|
|
1638
|
+
* import { logger } from '@hyperfrontend/project-scope/core'
|
|
1639
|
+
*
|
|
1640
|
+
* logger.setLogLevel('debug')
|
|
1641
|
+
* logger.debug('Analyzing project', { path: './src' })
|
|
1642
|
+
* ```
|
|
1643
|
+
*/
|
|
1644
|
+
createScopedLogger('project-scope');
|
|
1645
|
+
|
|
1646
|
+
createScopedLogger('project-scope:fs');
|
|
1647
|
+
/**
|
|
1648
|
+
* Create a file system error with code and context.
|
|
1649
|
+
*
|
|
1650
|
+
* @param message - The error message describing what went wrong
|
|
1651
|
+
* @param code - The category code for this type of filesystem failure
|
|
1652
|
+
* @param context - Additional context including path, operation, and cause
|
|
1653
|
+
* @returns A configured Error object with code and context properties
|
|
1654
|
+
*/
|
|
1655
|
+
function createFileSystemError(message, code, context) {
|
|
1656
|
+
const error = createError(message);
|
|
1657
|
+
defineProperties(error, {
|
|
1658
|
+
code: { value: code, enumerable: true },
|
|
1659
|
+
context: { value: context, enumerable: true },
|
|
1660
|
+
});
|
|
1661
|
+
return error;
|
|
1662
|
+
}
|
|
1663
|
+
/**
|
|
1664
|
+
* Read file if exists, return null otherwise.
|
|
1665
|
+
*
|
|
1666
|
+
* @param filePath - Path to file
|
|
1667
|
+
* @param encoding - File encoding (default: utf-8)
|
|
1668
|
+
* @returns File contents or null if file doesn't exist
|
|
1669
|
+
*/
|
|
1670
|
+
function readFileIfExists(filePath, encoding = 'utf-8') {
|
|
1671
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1672
|
+
return null;
|
|
1673
|
+
}
|
|
1674
|
+
try {
|
|
1675
|
+
return node_fs.readFileSync(filePath, { encoding });
|
|
1676
|
+
}
|
|
1677
|
+
catch {
|
|
1678
|
+
return null;
|
|
1679
|
+
}
|
|
1680
|
+
}
|
|
1681
|
+
/**
|
|
1682
|
+
* Read and parse JSON file if exists, return null otherwise.
|
|
1683
|
+
*
|
|
1684
|
+
* @param filePath - Path to JSON file
|
|
1685
|
+
* @returns Parsed JSON object or null if file doesn't exist or is invalid
|
|
1686
|
+
*/
|
|
1687
|
+
function readJsonFileIfExists(filePath) {
|
|
1688
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1689
|
+
return null;
|
|
1690
|
+
}
|
|
1691
|
+
try {
|
|
1692
|
+
const content = node_fs.readFileSync(filePath, { encoding: 'utf-8' });
|
|
1693
|
+
return parse(content);
|
|
1694
|
+
}
|
|
1695
|
+
catch {
|
|
1696
|
+
return null;
|
|
1697
|
+
}
|
|
1698
|
+
}
|
|
1699
|
+
|
|
1700
|
+
createScopedLogger('project-scope:fs:write');
|
|
1701
|
+
|
|
1702
|
+
/**
|
|
1703
|
+
* Get file stats with error handling.
|
|
1704
|
+
*
|
|
1705
|
+
* @param filePath - Path to file
|
|
1706
|
+
* @param followSymlinks - Whether to follow symlinks (default: true)
|
|
1707
|
+
* @returns File stats or null if path doesn't exist
|
|
1708
|
+
*/
|
|
1709
|
+
function getFileStat(filePath, followSymlinks = true) {
|
|
1710
|
+
if (!node_fs.existsSync(filePath)) {
|
|
1711
|
+
return null;
|
|
1712
|
+
}
|
|
1713
|
+
try {
|
|
1714
|
+
const stat = followSymlinks ? node_fs.statSync(filePath) : node_fs.lstatSync(filePath);
|
|
1715
|
+
return {
|
|
1716
|
+
isFile: stat.isFile(),
|
|
1717
|
+
isDirectory: stat.isDirectory(),
|
|
1718
|
+
isSymlink: stat.isSymbolicLink(),
|
|
1719
|
+
size: stat.size,
|
|
1720
|
+
created: stat.birthtime,
|
|
1721
|
+
modified: stat.mtime,
|
|
1722
|
+
accessed: stat.atime,
|
|
1723
|
+
mode: stat.mode,
|
|
1724
|
+
};
|
|
1725
|
+
}
|
|
1726
|
+
catch {
|
|
1727
|
+
return null;
|
|
1728
|
+
}
|
|
1729
|
+
}
|
|
1730
|
+
/**
|
|
1731
|
+
* Check if path is a directory.
|
|
1732
|
+
*
|
|
1733
|
+
* @param dirPath - Path to check
|
|
1734
|
+
* @returns True if path is a directory
|
|
1735
|
+
*/
|
|
1736
|
+
function isDirectory(dirPath) {
|
|
1737
|
+
const stats = getFileStat(dirPath);
|
|
1738
|
+
return stats?.isDirectory ?? false;
|
|
1739
|
+
}
|
|
1740
|
+
/**
|
|
1741
|
+
* Check if path exists.
|
|
1742
|
+
*
|
|
1743
|
+
* @param filePath - Path to check
|
|
1744
|
+
* @returns True if path exists
|
|
1745
|
+
*/
|
|
1746
|
+
function exists(filePath) {
|
|
1747
|
+
return node_fs.existsSync(filePath);
|
|
1748
|
+
}
|
|
1749
|
+
|
|
1750
|
+
const fsDirLogger = createScopedLogger('project-scope:fs:dir');
|
|
1751
|
+
/**
|
|
1752
|
+
* List immediate contents of a directory.
|
|
1753
|
+
*
|
|
1754
|
+
* @param dirPath - Absolute or relative path to the directory
|
|
1755
|
+
* @returns Array of entries with metadata for each file/directory
|
|
1756
|
+
* @throws {Error} If directory doesn't exist or isn't a directory
|
|
1757
|
+
*
|
|
1758
|
+
* @example
|
|
1759
|
+
* ```typescript
|
|
1760
|
+
* import { readDirectory } from '@hyperfrontend/project-scope'
|
|
1761
|
+
*
|
|
1762
|
+
* const entries = readDirectory('./src')
|
|
1763
|
+
* for (const entry of entries) {
|
|
1764
|
+
* console.log(entry.name, entry.isFile ? 'file' : 'directory')
|
|
1765
|
+
* }
|
|
1766
|
+
* ```
|
|
1767
|
+
*/
|
|
1768
|
+
function readDirectory(dirPath) {
|
|
1769
|
+
fsDirLogger.debug('Reading directory', { path: dirPath });
|
|
1770
|
+
if (!node_fs.existsSync(dirPath)) {
|
|
1771
|
+
fsDirLogger.debug('Directory not found', { path: dirPath });
|
|
1772
|
+
throw createFileSystemError(`Directory not found: ${dirPath}`, 'FS_NOT_FOUND', { path: dirPath, operation: 'readdir' });
|
|
1773
|
+
}
|
|
1774
|
+
if (!isDirectory(dirPath)) {
|
|
1775
|
+
fsDirLogger.debug('Path is not a directory', { path: dirPath });
|
|
1776
|
+
throw createFileSystemError(`Not a directory: ${dirPath}`, 'FS_NOT_A_DIRECTORY', { path: dirPath, operation: 'readdir' });
|
|
1777
|
+
}
|
|
1778
|
+
try {
|
|
1779
|
+
const entries = node_fs.readdirSync(dirPath, { withFileTypes: true });
|
|
1780
|
+
fsDirLogger.debug('Directory read complete', { path: dirPath, entryCount: entries.length });
|
|
1781
|
+
return entries.map((entry) => ({
|
|
1782
|
+
name: entry.name,
|
|
1783
|
+
path: node_path.join(dirPath, entry.name),
|
|
1784
|
+
isFile: entry.isFile(),
|
|
1785
|
+
isDirectory: entry.isDirectory(),
|
|
1786
|
+
isSymlink: entry.isSymbolicLink(),
|
|
1787
|
+
}));
|
|
1788
|
+
}
|
|
1789
|
+
catch (error) {
|
|
1790
|
+
fsDirLogger.warn('Failed to read directory', { path: dirPath, error: error instanceof Error ? error.message : String(error) });
|
|
1791
|
+
throw createFileSystemError(`Failed to read directory: ${dirPath}`, 'FS_READ_ERROR', {
|
|
1792
|
+
path: dirPath,
|
|
1793
|
+
operation: 'readdir',
|
|
1794
|
+
cause: error,
|
|
1795
|
+
});
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1798
|
+
|
|
1799
|
+
createScopedLogger('project-scope:fs:traversal');
|
|
1800
|
+
|
|
1801
|
+
const packageLogger = createScopedLogger('project-scope:project:package');
|
|
1802
|
+
/**
|
|
1803
|
+
* Verifies that a value is an object with only string values,
|
|
1804
|
+
* used for validating dependency maps and script definitions.
|
|
1805
|
+
*
|
|
1806
|
+
* @param value - Value to check
|
|
1807
|
+
* @returns True if value is a record of strings
|
|
1808
|
+
*/
|
|
1809
|
+
function isStringRecord(value) {
|
|
1810
|
+
if (typeof value !== 'object' || value === null)
|
|
1811
|
+
return false;
|
|
1812
|
+
return values(value).every((v) => typeof v === 'string');
|
|
1813
|
+
}
|
|
1814
|
+
/**
|
|
1815
|
+
* Extracts and normalizes the workspaces field from package.json,
|
|
1816
|
+
* supporting both array format and object with packages array.
|
|
1817
|
+
*
|
|
1818
|
+
* @param value - Raw workspaces value from package.json
|
|
1819
|
+
* @returns Normalized workspace patterns or undefined if invalid
|
|
1820
|
+
*/
|
|
1821
|
+
function parseWorkspaces(value) {
|
|
1822
|
+
if (isArray(value) && value.every((v) => typeof v === 'string')) {
|
|
1823
|
+
return value;
|
|
1824
|
+
}
|
|
1825
|
+
if (typeof value === 'object' && value !== null) {
|
|
1826
|
+
const obj = value;
|
|
1827
|
+
if (isArray(obj['packages'])) {
|
|
1828
|
+
return { packages: obj['packages'] };
|
|
1829
|
+
}
|
|
1830
|
+
}
|
|
1831
|
+
return undefined;
|
|
1832
|
+
}
|
|
1833
|
+
/**
|
|
1834
|
+
* Validate and normalize package.json data.
|
|
1835
|
+
*
|
|
1836
|
+
* @param data - Raw parsed data
|
|
1837
|
+
* @returns Validated package.json
|
|
1838
|
+
*/
|
|
1839
|
+
function validatePackageJson(data) {
|
|
1840
|
+
if (typeof data !== 'object' || data === null) {
|
|
1841
|
+
throw createError('package.json must be an object');
|
|
1842
|
+
}
|
|
1843
|
+
const pkg = data;
|
|
1844
|
+
return {
|
|
1845
|
+
name: typeof pkg['name'] === 'string' ? pkg['name'] : undefined,
|
|
1846
|
+
version: typeof pkg['version'] === 'string' ? pkg['version'] : undefined,
|
|
1847
|
+
description: typeof pkg['description'] === 'string' ? pkg['description'] : undefined,
|
|
1848
|
+
main: typeof pkg['main'] === 'string' ? pkg['main'] : undefined,
|
|
1849
|
+
module: typeof pkg['module'] === 'string' ? pkg['module'] : undefined,
|
|
1850
|
+
browser: typeof pkg['browser'] === 'string' ? pkg['browser'] : undefined,
|
|
1851
|
+
types: typeof pkg['types'] === 'string' ? pkg['types'] : undefined,
|
|
1852
|
+
bin: typeof pkg['bin'] === 'string' || isStringRecord(pkg['bin']) ? pkg['bin'] : undefined,
|
|
1853
|
+
scripts: isStringRecord(pkg['scripts']) ? pkg['scripts'] : undefined,
|
|
1854
|
+
dependencies: isStringRecord(pkg['dependencies']) ? pkg['dependencies'] : undefined,
|
|
1855
|
+
devDependencies: isStringRecord(pkg['devDependencies']) ? pkg['devDependencies'] : undefined,
|
|
1856
|
+
peerDependencies: isStringRecord(pkg['peerDependencies']) ? pkg['peerDependencies'] : undefined,
|
|
1857
|
+
optionalDependencies: isStringRecord(pkg['optionalDependencies']) ? pkg['optionalDependencies'] : undefined,
|
|
1858
|
+
workspaces: parseWorkspaces(pkg['workspaces']),
|
|
1859
|
+
exports: typeof pkg['exports'] === 'object' ? pkg['exports'] : undefined,
|
|
1860
|
+
engines: isStringRecord(pkg['engines']) ? pkg['engines'] : undefined,
|
|
1861
|
+
...pkg,
|
|
1862
|
+
};
|
|
1863
|
+
}
|
|
1864
|
+
/**
|
|
1865
|
+
* Attempts to read and parse package.json if it exists,
|
|
1866
|
+
* returning null on missing file or parse failure.
|
|
1867
|
+
*
|
|
1868
|
+
* @param projectPath - Project directory path or path to package.json
|
|
1869
|
+
* @returns Parsed package.json or null if not found
|
|
1870
|
+
*/
|
|
1871
|
+
function readPackageJsonIfExists(projectPath) {
|
|
1872
|
+
const packageJsonPath = projectPath.endsWith('package.json') ? projectPath : node_path.join(projectPath, 'package.json');
|
|
1873
|
+
const content = readFileIfExists(packageJsonPath);
|
|
1874
|
+
if (!content) {
|
|
1875
|
+
packageLogger.debug('Package.json not found', { path: packageJsonPath });
|
|
1876
|
+
return null;
|
|
1877
|
+
}
|
|
1878
|
+
try {
|
|
1879
|
+
const validated = validatePackageJson(parse(content));
|
|
1880
|
+
packageLogger.debug('Package.json loaded', { path: packageJsonPath, name: validated.name });
|
|
1881
|
+
return validated;
|
|
1882
|
+
}
|
|
1883
|
+
catch {
|
|
1884
|
+
packageLogger.debug('Failed to parse package.json, returning null', { path: packageJsonPath });
|
|
1885
|
+
return null;
|
|
1886
|
+
}
|
|
1887
|
+
}
|
|
1888
|
+
|
|
1889
|
+
createScopedLogger('project-scope:root');
|
|
1890
|
+
|
|
1891
|
+
const nxLogger = createScopedLogger('project-scope:nx');
|
|
1892
|
+
/**
|
|
1893
|
+
* Files indicating NX workspace root.
|
|
1894
|
+
*/
|
|
1895
|
+
const NX_CONFIG_FILES = ['nx.json', 'workspace.json'];
|
|
1896
|
+
/**
|
|
1897
|
+
* NX-specific project file.
|
|
1898
|
+
*/
|
|
1899
|
+
const NX_PROJECT_FILE = 'project.json';
|
|
1900
|
+
/**
|
|
1901
|
+
* Check if directory is an NX workspace root.
|
|
1902
|
+
*
|
|
1903
|
+
* @param path - Directory path to check
|
|
1904
|
+
* @returns True if the directory contains nx.json or workspace.json
|
|
1905
|
+
*
|
|
1906
|
+
* @example
|
|
1907
|
+
* ```typescript
|
|
1908
|
+
* import { isNxWorkspace } from '@hyperfrontend/project-scope'
|
|
1909
|
+
*
|
|
1910
|
+
* if (isNxWorkspace('./my-project')) {
|
|
1911
|
+
* console.log('This is an NX monorepo')
|
|
1912
|
+
* }
|
|
1913
|
+
* ```
|
|
1914
|
+
*/
|
|
1915
|
+
function isNxWorkspace(path) {
|
|
1916
|
+
for (const configFile of NX_CONFIG_FILES) {
|
|
1917
|
+
if (exists(node_path.join(path, configFile))) {
|
|
1918
|
+
nxLogger.debug('NX workspace detected', { path, configFile });
|
|
1919
|
+
return true;
|
|
1920
|
+
}
|
|
1921
|
+
}
|
|
1922
|
+
nxLogger.debug('Not an NX workspace', { path });
|
|
1923
|
+
return false;
|
|
1924
|
+
}
|
|
1925
|
+
/**
|
|
1926
|
+
* Check if directory is an NX project.
|
|
1927
|
+
*
|
|
1928
|
+
* @param path - Directory path to check
|
|
1929
|
+
* @returns True if the directory contains project.json
|
|
1930
|
+
*/
|
|
1931
|
+
function isNxProject(path) {
|
|
1932
|
+
const isProject = exists(node_path.join(path, NX_PROJECT_FILE));
|
|
1933
|
+
nxLogger.debug('NX project check', { path, isProject });
|
|
1934
|
+
return isProject;
|
|
1935
|
+
}
|
|
1936
|
+
/**
|
|
1937
|
+
* Detect NX version from package.json dependencies.
|
|
1938
|
+
*
|
|
1939
|
+
* @param workspacePath - Workspace root path
|
|
1940
|
+
* @returns NX version string (without semver range) or null
|
|
1941
|
+
*/
|
|
1942
|
+
function detectNxVersion(workspacePath) {
|
|
1943
|
+
const packageJson = readPackageJsonIfExists(workspacePath);
|
|
1944
|
+
if (packageJson) {
|
|
1945
|
+
const nxVersion = packageJson.devDependencies?.['nx'] ?? packageJson.dependencies?.['nx'];
|
|
1946
|
+
if (nxVersion) {
|
|
1947
|
+
// Strip semver range characters (^, ~, >=, etc.)
|
|
1948
|
+
return nxVersion.replace(/^[\^~>=<]+/, '');
|
|
1949
|
+
}
|
|
1950
|
+
}
|
|
1951
|
+
return null;
|
|
1952
|
+
}
|
|
1953
|
+
/**
|
|
1954
|
+
* Check if workspace is integrated (not standalone).
|
|
1955
|
+
* Integrated repos typically have workspaceLayout, namedInputs, or targetDefaults.
|
|
1956
|
+
*
|
|
1957
|
+
* @param nxJson - Parsed nx.json configuration
|
|
1958
|
+
* @returns True if the workspace is integrated
|
|
1959
|
+
*/
|
|
1960
|
+
function isIntegratedRepo(nxJson) {
|
|
1961
|
+
return nxJson.workspaceLayout !== undefined || nxJson.namedInputs !== undefined || nxJson.targetDefaults !== undefined;
|
|
1962
|
+
}
|
|
1963
|
+
/**
|
|
1964
|
+
* Get comprehensive NX workspace information.
|
|
1965
|
+
*
|
|
1966
|
+
* @param workspacePath - Workspace root path
|
|
1967
|
+
* @returns Workspace info or null if not an NX workspace
|
|
1968
|
+
*/
|
|
1969
|
+
function getNxWorkspaceInfo(workspacePath) {
|
|
1970
|
+
nxLogger.debug('Getting NX workspace info', { workspacePath });
|
|
1971
|
+
if (!isNxWorkspace(workspacePath)) {
|
|
1972
|
+
return null;
|
|
1973
|
+
}
|
|
1974
|
+
const nxJson = readJsonFileIfExists(node_path.join(workspacePath, 'nx.json'));
|
|
1975
|
+
if (!nxJson) {
|
|
1976
|
+
// Check for workspace.json as fallback (older NX)
|
|
1977
|
+
const workspaceJson = readJsonFileIfExists(node_path.join(workspacePath, 'workspace.json'));
|
|
1978
|
+
if (!workspaceJson) {
|
|
1979
|
+
nxLogger.debug('No nx.json or workspace.json found', { workspacePath });
|
|
1980
|
+
return null;
|
|
1981
|
+
}
|
|
1982
|
+
nxLogger.debug('Using legacy workspace.json', { workspacePath });
|
|
1983
|
+
// Create minimal nx.json from workspace.json
|
|
1984
|
+
return {
|
|
1985
|
+
root: workspacePath,
|
|
1986
|
+
version: detectNxVersion(workspacePath),
|
|
1987
|
+
nxJson: {},
|
|
1988
|
+
isIntegrated: true,
|
|
1989
|
+
workspaceLayout: {
|
|
1990
|
+
appsDir: 'apps',
|
|
1991
|
+
libsDir: 'libs',
|
|
1992
|
+
},
|
|
1993
|
+
};
|
|
1994
|
+
}
|
|
1995
|
+
const info = {
|
|
1996
|
+
root: workspacePath,
|
|
1997
|
+
version: detectNxVersion(workspacePath),
|
|
1998
|
+
nxJson,
|
|
1999
|
+
isIntegrated: isIntegratedRepo(nxJson),
|
|
2000
|
+
defaultProject: nxJson.defaultProject,
|
|
2001
|
+
workspaceLayout: {
|
|
2002
|
+
appsDir: nxJson.workspaceLayout?.appsDir ?? 'apps',
|
|
2003
|
+
libsDir: nxJson.workspaceLayout?.libsDir ?? 'libs',
|
|
2004
|
+
},
|
|
2005
|
+
};
|
|
2006
|
+
nxLogger.debug('NX workspace info retrieved', {
|
|
2007
|
+
workspacePath,
|
|
2008
|
+
version: info.version,
|
|
2009
|
+
isIntegrated: info.isIntegrated,
|
|
2010
|
+
defaultProject: info.defaultProject,
|
|
2011
|
+
});
|
|
2012
|
+
return info;
|
|
2013
|
+
}
|
|
2014
|
+
|
|
2015
|
+
createScopedLogger('project-scope:nx:devkit');
|
|
2016
|
+
|
|
2017
|
+
const nxConfigLogger = createScopedLogger('project-scope:nx:config');
|
|
2018
|
+
/**
|
|
2019
|
+
* Read project.json for an NX project.
|
|
2020
|
+
*
|
|
2021
|
+
* @param projectPath - Project directory path
|
|
2022
|
+
* @returns Parsed project.json or null if not found
|
|
2023
|
+
*/
|
|
2024
|
+
function readProjectJson(projectPath) {
|
|
2025
|
+
const projectJsonPath = node_path.join(projectPath, NX_PROJECT_FILE);
|
|
2026
|
+
nxConfigLogger.debug('Reading project.json', { path: projectJsonPath });
|
|
2027
|
+
const result = readJsonFileIfExists(projectJsonPath);
|
|
2028
|
+
if (result) {
|
|
2029
|
+
nxConfigLogger.debug('Project.json loaded', { path: projectJsonPath, name: result.name });
|
|
2030
|
+
}
|
|
2031
|
+
else {
|
|
2032
|
+
nxConfigLogger.debug('Project.json not found', { path: projectJsonPath });
|
|
2033
|
+
}
|
|
2034
|
+
return result;
|
|
2035
|
+
}
|
|
2036
|
+
/**
|
|
2037
|
+
* Get project configuration from project.json or package.json nx field.
|
|
2038
|
+
*
|
|
2039
|
+
* @param projectPath - Project directory path
|
|
2040
|
+
* @param workspacePath - Workspace root path (for relative path calculation)
|
|
2041
|
+
* @returns Project configuration or null if not found
|
|
2042
|
+
*/
|
|
2043
|
+
function getProjectConfig(projectPath, workspacePath) {
|
|
2044
|
+
nxConfigLogger.debug('Getting project config', { projectPath, workspacePath });
|
|
2045
|
+
// Try project.json first
|
|
2046
|
+
const projectJson = readProjectJson(projectPath);
|
|
2047
|
+
if (projectJson) {
|
|
2048
|
+
nxConfigLogger.debug('Using project.json config', { projectPath, name: projectJson.name });
|
|
2049
|
+
return {
|
|
2050
|
+
...projectJson,
|
|
2051
|
+
root: projectJson.root ?? node_path.relative(workspacePath, projectPath),
|
|
2052
|
+
};
|
|
2053
|
+
}
|
|
2054
|
+
// Try to infer from package.json nx field
|
|
2055
|
+
const packageJson = readPackageJsonIfExists(projectPath);
|
|
2056
|
+
if (packageJson && typeof packageJson['nx'] === 'object') {
|
|
2057
|
+
nxConfigLogger.debug('Using package.json nx field', { projectPath, name: packageJson.name });
|
|
2058
|
+
const nxConfig = packageJson['nx'];
|
|
2059
|
+
return {
|
|
2060
|
+
name: packageJson.name,
|
|
2061
|
+
root: node_path.relative(workspacePath, projectPath),
|
|
2062
|
+
...nxConfig,
|
|
2063
|
+
};
|
|
2064
|
+
}
|
|
2065
|
+
nxConfigLogger.debug('No project config found', { projectPath });
|
|
2066
|
+
return null;
|
|
2067
|
+
}
|
|
2068
|
+
/**
|
|
2069
|
+
* Recursively scan directory for project.json files.
|
|
2070
|
+
*
|
|
2071
|
+
* @param dirPath - Directory to scan
|
|
2072
|
+
* @param workspacePath - Workspace root path
|
|
2073
|
+
* @param projects - Map to add discovered projects to
|
|
2074
|
+
* @param maxDepth - Maximum recursion depth
|
|
2075
|
+
* @param currentDepth - Current recursion depth
|
|
2076
|
+
*/
|
|
2077
|
+
function scanForProjects(dirPath, workspacePath, projects, maxDepth, currentDepth = 0) {
|
|
2078
|
+
if (currentDepth > maxDepth)
|
|
2079
|
+
return;
|
|
2080
|
+
try {
|
|
2081
|
+
const entries = readDirectory(dirPath);
|
|
2082
|
+
for (const entry of entries) {
|
|
2083
|
+
// Skip node_modules and hidden directories
|
|
2084
|
+
if (entry.name.startsWith('.') || entry.name === 'node_modules' || entry.name === 'dist') {
|
|
2085
|
+
continue;
|
|
2086
|
+
}
|
|
2087
|
+
const fullPath = node_path.join(dirPath, entry.name);
|
|
2088
|
+
if (entry.isDirectory) {
|
|
2089
|
+
// Check if this directory is an NX project
|
|
2090
|
+
if (isNxProject(fullPath)) {
|
|
2091
|
+
const config = getProjectConfig(fullPath, workspacePath);
|
|
2092
|
+
if (config) {
|
|
2093
|
+
const name = config.name || node_path.relative(workspacePath, fullPath).replace(/[\\/]/g, '-');
|
|
2094
|
+
projects.set(name, {
|
|
2095
|
+
...config,
|
|
2096
|
+
name,
|
|
2097
|
+
root: node_path.relative(workspacePath, fullPath),
|
|
2098
|
+
});
|
|
2099
|
+
}
|
|
2100
|
+
}
|
|
2101
|
+
// Recursively scan subdirectories
|
|
2102
|
+
scanForProjects(fullPath, workspacePath, projects, maxDepth, currentDepth + 1);
|
|
2103
|
+
}
|
|
2104
|
+
}
|
|
2105
|
+
}
|
|
2106
|
+
catch {
|
|
2107
|
+
// Directory not readable, skip
|
|
2108
|
+
}
|
|
2109
|
+
}
|
|
2110
|
+
/**
|
|
2111
|
+
* Discover all NX projects in workspace.
|
|
2112
|
+
* Supports both workspace.json (older format) and project.json (newer format).
|
|
2113
|
+
*
|
|
2114
|
+
* @param workspacePath - Workspace root path
|
|
2115
|
+
* @returns Map of project name to configuration
|
|
2116
|
+
*/
|
|
2117
|
+
function discoverNxProjects(workspacePath) {
|
|
2118
|
+
const projects = createMap();
|
|
2119
|
+
// Check for workspace.json (older NX format)
|
|
2120
|
+
const workspaceJson = readJsonFileIfExists(node_path.join(workspacePath, 'workspace.json'));
|
|
2121
|
+
if (workspaceJson?.projects) {
|
|
2122
|
+
for (const [name, config] of entries(workspaceJson.projects)) {
|
|
2123
|
+
if (typeof config === 'string') {
|
|
2124
|
+
// Path reference to project directory
|
|
2125
|
+
const projectPath = node_path.join(workspacePath, config);
|
|
2126
|
+
const projectConfig = getProjectConfig(projectPath, workspacePath);
|
|
2127
|
+
if (projectConfig) {
|
|
2128
|
+
projects.set(name, { ...projectConfig, name });
|
|
2129
|
+
}
|
|
2130
|
+
}
|
|
2131
|
+
else if (typeof config === 'object' && config !== null) {
|
|
2132
|
+
// Inline config
|
|
2133
|
+
projects.set(name, { name, ...config });
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
return projects;
|
|
2137
|
+
}
|
|
2138
|
+
// Scan for project.json files (newer NX format)
|
|
2139
|
+
const workspaceInfo = getNxWorkspaceInfo(workspacePath);
|
|
2140
|
+
const appsDir = workspaceInfo?.workspaceLayout.appsDir ?? 'apps';
|
|
2141
|
+
const libsDir = workspaceInfo?.workspaceLayout.libsDir ?? 'libs';
|
|
2142
|
+
const searchDirs = [appsDir, libsDir];
|
|
2143
|
+
// Also check packages directory (common in some setups)
|
|
2144
|
+
if (exists(node_path.join(workspacePath, 'packages'))) {
|
|
2145
|
+
searchDirs.push('packages');
|
|
2146
|
+
}
|
|
2147
|
+
for (const dir of searchDirs) {
|
|
2148
|
+
const dirPath = node_path.join(workspacePath, dir);
|
|
2149
|
+
if (exists(dirPath) && isDirectory(dirPath)) {
|
|
2150
|
+
try {
|
|
2151
|
+
scanForProjects(dirPath, workspacePath, projects, 3);
|
|
2152
|
+
}
|
|
2153
|
+
catch {
|
|
2154
|
+
// Directory not accessible
|
|
2155
|
+
}
|
|
2156
|
+
}
|
|
2157
|
+
}
|
|
2158
|
+
// Also check root-level projects (standalone projects in monorepo root)
|
|
2159
|
+
if (isNxProject(workspacePath)) {
|
|
2160
|
+
const config = readProjectJson(workspacePath);
|
|
2161
|
+
if (config) {
|
|
2162
|
+
const name = config.name || node_path.basename(workspacePath);
|
|
2163
|
+
projects.set(name, {
|
|
2164
|
+
...config,
|
|
2165
|
+
name,
|
|
2166
|
+
root: '.',
|
|
2167
|
+
});
|
|
2168
|
+
}
|
|
2169
|
+
}
|
|
2170
|
+
return projects;
|
|
2171
|
+
}
|
|
2172
|
+
/**
|
|
2173
|
+
* Build a simple project graph from discovered projects.
|
|
2174
|
+
* For full graph capabilities, use `@nx/devkit`.
|
|
2175
|
+
*
|
|
2176
|
+
* @param workspacePath - Workspace root path
|
|
2177
|
+
* @param projects - Existing configuration map to skip auto-discovery
|
|
2178
|
+
* @returns NxProjectGraph with nodes and dependencies
|
|
2179
|
+
*/
|
|
2180
|
+
function buildSimpleProjectGraph(workspacePath, projects) {
|
|
2181
|
+
const projectMap = projects ?? discoverNxProjects(workspacePath);
|
|
2182
|
+
const nodes = {};
|
|
2183
|
+
const dependencies = {};
|
|
2184
|
+
for (const [name, config] of projectMap) {
|
|
2185
|
+
nodes[name] = {
|
|
2186
|
+
name,
|
|
2187
|
+
type: config.projectType ?? 'library',
|
|
2188
|
+
data: config,
|
|
2189
|
+
};
|
|
2190
|
+
dependencies[name] = [];
|
|
2191
|
+
// Add implicit dependencies
|
|
2192
|
+
if (config.implicitDependencies) {
|
|
2193
|
+
for (const dep of config.implicitDependencies) {
|
|
2194
|
+
// Skip negative dependencies (those starting with !)
|
|
2195
|
+
if (!dep.startsWith('!')) {
|
|
2196
|
+
dependencies[name].push({
|
|
2197
|
+
target: dep,
|
|
2198
|
+
type: 'implicit',
|
|
2199
|
+
});
|
|
2200
|
+
}
|
|
2201
|
+
}
|
|
2202
|
+
}
|
|
2203
|
+
}
|
|
2204
|
+
return { nodes, dependencies };
|
|
2205
|
+
}
|
|
2206
|
+
|
|
2207
|
+
/**
|
|
2208
|
+
* Creates an empty classification summary.
|
|
2209
|
+
*
|
|
2210
|
+
* @returns A new ClassificationSummary with all counts at zero
|
|
2211
|
+
*/
|
|
2212
|
+
function createEmptyClassificationSummary() {
|
|
2213
|
+
return {
|
|
2214
|
+
total: 0,
|
|
2215
|
+
included: 0,
|
|
2216
|
+
excluded: 0,
|
|
2217
|
+
bySource: {
|
|
2218
|
+
'direct-scope': 0,
|
|
2219
|
+
'direct-file': 0,
|
|
2220
|
+
'unscoped-file': 0,
|
|
2221
|
+
'indirect-dependency': 0,
|
|
2222
|
+
'indirect-infra': 0,
|
|
2223
|
+
'unscoped-global': 0,
|
|
2224
|
+
excluded: 0,
|
|
2225
|
+
},
|
|
2226
|
+
};
|
|
2227
|
+
}
|
|
2228
|
+
/**
|
|
2229
|
+
* Creates a classified commit.
|
|
2230
|
+
*
|
|
2231
|
+
* @param commit - The parsed conventional commit
|
|
2232
|
+
* @param raw - The raw git commit
|
|
2233
|
+
* @param source - How the commit relates to the project
|
|
2234
|
+
* @param options - Additional classification options
|
|
2235
|
+
* @param options.touchedFiles - Files in the project modified by this commit
|
|
2236
|
+
* @param options.dependencyPath - Chain of dependencies leading to indirect inclusion
|
|
2237
|
+
* @returns A new ClassifiedCommit object
|
|
2238
|
+
*/
|
|
2239
|
+
function createClassifiedCommit(commit, raw, source, options) {
|
|
2240
|
+
const include = isIncludedSource(source);
|
|
2241
|
+
const preserveScope = shouldPreserveScope(source);
|
|
2242
|
+
return {
|
|
2243
|
+
commit,
|
|
2244
|
+
raw,
|
|
2245
|
+
source,
|
|
2246
|
+
include,
|
|
2247
|
+
preserveScope,
|
|
2248
|
+
touchedFiles: options?.touchedFiles,
|
|
2249
|
+
dependencyPath: options?.dependencyPath,
|
|
2250
|
+
};
|
|
2251
|
+
}
|
|
2252
|
+
/**
|
|
2253
|
+
* Determines if a source type should be included in changelog.
|
|
2254
|
+
*
|
|
2255
|
+
* @param source - The commit source type
|
|
2256
|
+
* @returns True if commits with this source should be included
|
|
2257
|
+
*/
|
|
2258
|
+
function isIncludedSource(source) {
|
|
2259
|
+
switch (source) {
|
|
2260
|
+
case 'direct-scope':
|
|
2261
|
+
case 'direct-file':
|
|
2262
|
+
case 'unscoped-file':
|
|
2263
|
+
case 'indirect-dependency':
|
|
2264
|
+
case 'indirect-infra':
|
|
2265
|
+
return true;
|
|
2266
|
+
case 'unscoped-global':
|
|
2267
|
+
case 'excluded':
|
|
2268
|
+
return false;
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
/**
|
|
2272
|
+
* Determines if scope should be preserved for a source type.
|
|
2273
|
+
*
|
|
2274
|
+
* Direct commits omit scope (redundant in project changelog).
|
|
2275
|
+
* Indirect commits preserve scope for context.
|
|
2276
|
+
*
|
|
2277
|
+
* @param source - The commit source type
|
|
2278
|
+
* @returns True if scope should be preserved in changelog
|
|
2279
|
+
*/
|
|
2280
|
+
function shouldPreserveScope(source) {
|
|
2281
|
+
switch (source) {
|
|
2282
|
+
case 'direct-scope':
|
|
2283
|
+
case 'unscoped-file':
|
|
2284
|
+
return false; // Scope would be redundant
|
|
2285
|
+
case 'direct-file':
|
|
2286
|
+
case 'indirect-dependency':
|
|
2287
|
+
case 'indirect-infra':
|
|
2288
|
+
return true; // Scope provides context
|
|
2289
|
+
case 'unscoped-global':
|
|
2290
|
+
case 'excluded':
|
|
2291
|
+
return false; // Won't be shown
|
|
2292
|
+
}
|
|
2293
|
+
}
|
|
2294
|
+
|
|
2295
|
+
/**
|
|
2296
|
+
* Derives all scope variations that should match a project.
|
|
2297
|
+
*
|
|
2298
|
+
* Given a project named 'lib-versioning' with package '@hyperfrontend/versioning',
|
|
2299
|
+
* this generates variations like:
|
|
2300
|
+
* - 'lib-versioning' (full project name)
|
|
2301
|
+
* - 'versioning' (without lib- prefix)
|
|
2302
|
+
*
|
|
2303
|
+
* @param options - Project identification options
|
|
2304
|
+
* @returns Array of scope strings that match this project
|
|
2305
|
+
*
|
|
2306
|
+
* @example
|
|
2307
|
+
* deriveProjectScopes({ projectName: 'lib-versioning', packageName: '@hyperfrontend/versioning' })
|
|
2308
|
+
* // Returns: ['lib-versioning', 'versioning']
|
|
2309
|
+
*
|
|
2310
|
+
* @example
|
|
2311
|
+
* deriveProjectScopes({ projectName: 'app-demo', packageName: 'demo-app' })
|
|
2312
|
+
* // Returns: ['app-demo', 'demo']
|
|
2313
|
+
*/
|
|
2314
|
+
function deriveProjectScopes(options) {
|
|
2315
|
+
const { projectName, packageName, additionalScopes = [] } = options;
|
|
2316
|
+
const scopes = createSet();
|
|
2317
|
+
// Always include the full project name
|
|
2318
|
+
scopes.add(projectName);
|
|
2319
|
+
// Add variations based on common prefixes
|
|
2320
|
+
const prefixVariations = extractPrefixVariations(projectName);
|
|
2321
|
+
for (const variation of prefixVariations) {
|
|
2322
|
+
scopes.add(variation);
|
|
2323
|
+
}
|
|
2324
|
+
// Add package name variations if provided
|
|
2325
|
+
if (packageName) {
|
|
2326
|
+
const packageVariations = extractPackageNameVariations(packageName);
|
|
2327
|
+
for (const variation of packageVariations) {
|
|
2328
|
+
scopes.add(variation);
|
|
2329
|
+
}
|
|
2330
|
+
}
|
|
2331
|
+
// Add any additional scopes
|
|
2332
|
+
for (const scope of additionalScopes) {
|
|
2333
|
+
if (scope) {
|
|
2334
|
+
scopes.add(scope);
|
|
2335
|
+
}
|
|
2336
|
+
}
|
|
2337
|
+
return [...scopes];
|
|
2338
|
+
}
|
|
2339
|
+
/**
|
|
2340
|
+
* Recognized project name prefixes that can be stripped for scope matching.
|
|
2341
|
+
*/
|
|
2342
|
+
const PROJECT_PREFIXES = ['lib-', 'app-', 'e2e-', 'tool-', 'plugin-', 'feature-', 'package-'];
|
|
2343
|
+
/**
|
|
2344
|
+
* Generates scope variations by stripping recognized project prefixes.
|
|
2345
|
+
*
|
|
2346
|
+
* @param projectName - The project name to extract variations from
|
|
2347
|
+
* @returns Array of scope name variations
|
|
2348
|
+
*/
|
|
2349
|
+
function extractPrefixVariations(projectName) {
|
|
2350
|
+
const variations = [];
|
|
2351
|
+
for (const prefix of PROJECT_PREFIXES) {
|
|
2352
|
+
if (projectName.startsWith(prefix)) {
|
|
2353
|
+
const withoutPrefix = projectName.slice(prefix.length);
|
|
2354
|
+
if (withoutPrefix) {
|
|
2355
|
+
variations.push(withoutPrefix);
|
|
2356
|
+
}
|
|
2357
|
+
break; // Only remove one prefix
|
|
2358
|
+
}
|
|
2359
|
+
}
|
|
2360
|
+
return variations;
|
|
2361
|
+
}
|
|
2362
|
+
/**
|
|
2363
|
+
* Extracts scope variations from an npm package name.
|
|
2364
|
+
*
|
|
2365
|
+
* @param packageName - The npm package name (e.g., '@scope/name')
|
|
2366
|
+
* @returns Array of name variations
|
|
2367
|
+
*/
|
|
2368
|
+
function extractPackageNameVariations(packageName) {
|
|
2369
|
+
const variations = [];
|
|
2370
|
+
// Handle scoped packages: @scope/name -> name
|
|
2371
|
+
if (packageName.startsWith('@')) {
|
|
2372
|
+
const slashIndex = packageName.indexOf('/');
|
|
2373
|
+
if (slashIndex !== -1) {
|
|
2374
|
+
const unscoped = packageName.slice(slashIndex + 1);
|
|
2375
|
+
if (unscoped) {
|
|
2376
|
+
variations.push(unscoped);
|
|
2377
|
+
}
|
|
2378
|
+
}
|
|
2379
|
+
}
|
|
2380
|
+
else {
|
|
2381
|
+
// Non-scoped package: just use the name
|
|
2382
|
+
variations.push(packageName);
|
|
2383
|
+
}
|
|
2384
|
+
return variations;
|
|
2385
|
+
}
|
|
2386
|
+
/**
|
|
2387
|
+
* Checks if a commit scope matches any of the project scopes.
|
|
2388
|
+
*
|
|
2389
|
+
* @param commitScope - The scope from a conventional commit
|
|
2390
|
+
* @param projectScopes - Array of scopes that match the project
|
|
2391
|
+
* @returns True if the commit scope matches the project
|
|
2392
|
+
*
|
|
2393
|
+
* @example
|
|
2394
|
+
* scopeMatchesProject('versioning', ['lib-versioning', 'versioning']) // true
|
|
2395
|
+
* scopeMatchesProject('logging', ['lib-versioning', 'versioning']) // false
|
|
2396
|
+
*/
|
|
2397
|
+
function scopeMatchesProject(commitScope, projectScopes) {
|
|
2398
|
+
if (!commitScope) {
|
|
2399
|
+
return false;
|
|
2400
|
+
}
|
|
2401
|
+
// Case-insensitive comparison
|
|
2402
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2403
|
+
return projectScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2404
|
+
}
|
|
2405
|
+
/**
|
|
2406
|
+
* Checks if a commit scope should be explicitly excluded.
|
|
2407
|
+
*
|
|
2408
|
+
* @param commitScope - The scope from a conventional commit
|
|
2409
|
+
* @param excludeScopes - Array of scopes to exclude
|
|
2410
|
+
* @returns True if the scope should be excluded
|
|
2411
|
+
*/
|
|
2412
|
+
function scopeIsExcluded(commitScope, excludeScopes) {
|
|
2413
|
+
if (!commitScope) {
|
|
2414
|
+
return false;
|
|
2415
|
+
}
|
|
2416
|
+
const normalizedScope = commitScope.toLowerCase();
|
|
2417
|
+
return excludeScopes.some((scope) => scope.toLowerCase() === normalizedScope);
|
|
2418
|
+
}
|
|
2419
|
+
/**
|
|
2420
|
+
* Default scopes to exclude from changelogs.
|
|
2421
|
+
*
|
|
2422
|
+
* These represent repository-level or infrastructure changes
|
|
2423
|
+
* that typically don't belong in individual project changelogs.
|
|
2424
|
+
*/
|
|
2425
|
+
const DEFAULT_EXCLUDE_SCOPES = ['release', 'deps', 'workspace', 'root', 'repo', 'ci', 'build'];
|
|
2426
|
+
|
|
2427
|
+
/**
|
|
2428
|
+
* Classifies a single commit against a project.
|
|
2429
|
+
*
|
|
2430
|
+
* Implements the hybrid classification strategy:
|
|
2431
|
+
* 1. Check scope match (fast path)
|
|
2432
|
+
* 2. Check file touch (validation/catch-all)
|
|
2433
|
+
* 3. Check dependency touch (indirect)
|
|
2434
|
+
* 4. Fallback to excluded
|
|
2435
|
+
*
|
|
2436
|
+
* @param input - The commit to classify
|
|
2437
|
+
* @param context - Classification context with project info
|
|
2438
|
+
* @returns Classified commit with source attribution
|
|
2439
|
+
*
|
|
2440
|
+
* @example
|
|
2441
|
+
* const classified = classifyCommit(
|
|
2442
|
+
* { commit: parsedCommit, raw: gitCommit },
|
|
2443
|
+
* { projectScopes: ['versioning'], fileCommitHashes: new Set(['abc123']) }
|
|
2444
|
+
* )
|
|
2445
|
+
*/
|
|
2446
|
+
function classifyCommit(input, context) {
|
|
2447
|
+
const { commit, raw } = input;
|
|
2448
|
+
const { projectScopes, fileCommitHashes, dependencyCommitMap, infrastructureCommitHashes, excludeScopes = DEFAULT_EXCLUDE_SCOPES, includeScopes = [], } = context;
|
|
2449
|
+
const scope = commit.scope;
|
|
2450
|
+
const hasScope = !!scope;
|
|
2451
|
+
const allProjectScopes = [...projectScopes, ...includeScopes];
|
|
2452
|
+
// First check: Is this scope explicitly excluded?
|
|
2453
|
+
if (hasScope && scopeIsExcluded(scope, excludeScopes)) {
|
|
2454
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2455
|
+
}
|
|
2456
|
+
// Priority 1: Scope-based direct match (fast path)
|
|
2457
|
+
if (hasScope && scopeMatchesProject(scope, allProjectScopes)) {
|
|
2458
|
+
return createClassifiedCommit(commit, raw, 'direct-scope');
|
|
2459
|
+
}
|
|
2460
|
+
// Priority 2: File-based direct match (validation/catch-all)
|
|
2461
|
+
if (fileCommitHashes.has(raw.hash)) {
|
|
2462
|
+
// Commit touched project files
|
|
2463
|
+
if (hasScope) {
|
|
2464
|
+
// Has a scope but it's different - likely a typo or cross-cutting change
|
|
2465
|
+
return createClassifiedCommit(commit, raw, 'direct-file');
|
|
2466
|
+
}
|
|
2467
|
+
// No scope but touched project files
|
|
2468
|
+
return createClassifiedCommit(commit, raw, 'unscoped-file');
|
|
2469
|
+
}
|
|
2470
|
+
// Priority 3: Indirect dependency match
|
|
2471
|
+
if (hasScope && dependencyCommitMap) {
|
|
2472
|
+
const dependencyPath = findDependencyPath(scope, raw.hash, dependencyCommitMap);
|
|
2473
|
+
if (dependencyPath) {
|
|
2474
|
+
return createClassifiedCommit(commit, raw, 'indirect-dependency', { dependencyPath });
|
|
2475
|
+
}
|
|
2476
|
+
}
|
|
2477
|
+
// File-based infrastructure match
|
|
2478
|
+
if (infrastructureCommitHashes?.has(raw.hash)) {
|
|
2479
|
+
return createClassifiedCommit(commit, raw, 'indirect-infra');
|
|
2480
|
+
}
|
|
2481
|
+
// Fallback: No match found
|
|
2482
|
+
if (!hasScope) {
|
|
2483
|
+
// Unscoped commit that didn't touch any project files
|
|
2484
|
+
return createClassifiedCommit(commit, raw, 'unscoped-global');
|
|
2485
|
+
}
|
|
2486
|
+
// Scoped commit that doesn't match anything
|
|
2487
|
+
return createClassifiedCommit(commit, raw, 'excluded');
|
|
2488
|
+
}
|
|
2489
|
+
/**
|
|
2490
|
+
* Classifies multiple commits against a project.
|
|
2491
|
+
*
|
|
2492
|
+
* @param commits - Array of commits to classify
|
|
2493
|
+
* @param context - Classification context with project info
|
|
2494
|
+
* @returns Classification result with all commits and summary
|
|
2495
|
+
*/
|
|
2496
|
+
function classifyCommits(commits, context) {
|
|
2497
|
+
const classified = [];
|
|
2498
|
+
const included = [];
|
|
2499
|
+
const excluded = [];
|
|
2500
|
+
const summary = createEmptyClassificationSummary();
|
|
2501
|
+
const bySource = { ...summary.bySource };
|
|
2502
|
+
for (const input of commits) {
|
|
2503
|
+
const result = classifyCommit(input, context);
|
|
2504
|
+
classified.push(result);
|
|
2505
|
+
// Update summary
|
|
2506
|
+
bySource[result.source]++;
|
|
2507
|
+
if (result.include) {
|
|
2508
|
+
included.push(result);
|
|
2509
|
+
}
|
|
2510
|
+
else {
|
|
2511
|
+
excluded.push(result);
|
|
2512
|
+
}
|
|
2513
|
+
}
|
|
2514
|
+
return {
|
|
2515
|
+
commits: classified,
|
|
2516
|
+
included,
|
|
2517
|
+
excluded,
|
|
2518
|
+
summary: {
|
|
2519
|
+
total: classified.length,
|
|
2520
|
+
included: included.length,
|
|
2521
|
+
excluded: excluded.length,
|
|
2522
|
+
bySource,
|
|
2523
|
+
},
|
|
2524
|
+
};
|
|
2525
|
+
}
|
|
2526
|
+
/**
|
|
2527
|
+
* Finds a dependency path for a given scope and commit hash.
|
|
2528
|
+
*
|
|
2529
|
+
* Verifies both:
|
|
2530
|
+
* 1. The scope matches a dependency name (or variation)
|
|
2531
|
+
* 2. The commit hash is in that dependency's commit set
|
|
2532
|
+
*
|
|
2533
|
+
* This prevents false positives from mislabeled commits.
|
|
2534
|
+
*
|
|
2535
|
+
* @param scope - The commit scope
|
|
2536
|
+
* @param hash - The commit hash to verify
|
|
2537
|
+
* @param dependencyCommitMap - Map of dependencies to their commit hashes
|
|
2538
|
+
* @returns Dependency path if found and hash verified, undefined otherwise
|
|
2539
|
+
*/
|
|
2540
|
+
function findDependencyPath(scope, hash, dependencyCommitMap) {
|
|
2541
|
+
const normalizedScope = scope.toLowerCase();
|
|
2542
|
+
for (const [depName, depHashes] of dependencyCommitMap) {
|
|
2543
|
+
// Check if scope matches dependency name or variations
|
|
2544
|
+
const depVariations = getDependencyVariations(depName);
|
|
2545
|
+
if (depVariations.some((v) => v.toLowerCase() === normalizedScope)) {
|
|
2546
|
+
// CRITICAL: Verify the commit actually touched this dependency's files
|
|
2547
|
+
// This prevents false positives from mislabeled commits
|
|
2548
|
+
if (depHashes.has(hash)) {
|
|
2549
|
+
return [depName];
|
|
2550
|
+
}
|
|
2551
|
+
}
|
|
2552
|
+
}
|
|
2553
|
+
return undefined;
|
|
2554
|
+
}
|
|
2555
|
+
/**
|
|
2556
|
+
* Generates name variations for a dependency to enable flexible scope matching.
|
|
2557
|
+
*
|
|
2558
|
+
* @param depName - The dependency project or package name
|
|
2559
|
+
* @returns Array of name variations including stripped prefixes
|
|
2560
|
+
*/
|
|
2561
|
+
function getDependencyVariations(depName) {
|
|
2562
|
+
const variations = [depName];
|
|
2563
|
+
// Handle lib- prefix
|
|
2564
|
+
if (depName.startsWith('lib-')) {
|
|
2565
|
+
variations.push(depName.slice(4));
|
|
2566
|
+
}
|
|
2567
|
+
// Handle @scope/name
|
|
2568
|
+
if (depName.startsWith('@')) {
|
|
2569
|
+
const slashIndex = depName.indexOf('/');
|
|
2570
|
+
if (slashIndex !== -1) {
|
|
2571
|
+
variations.push(depName.slice(slashIndex + 1));
|
|
2572
|
+
}
|
|
2573
|
+
}
|
|
2574
|
+
return variations;
|
|
2575
|
+
}
|
|
2576
|
+
/**
|
|
2577
|
+
* Creates a classification context from common inputs.
|
|
2578
|
+
*
|
|
2579
|
+
* @param projectScopes - Scopes that match the project
|
|
2580
|
+
* @param fileCommitHashes - Set of commit hashes that touched project files
|
|
2581
|
+
* @param options - Additional context options
|
|
2582
|
+
* @param options.dependencyCommitMap - Map of dependency names to commit hashes touching them
|
|
2583
|
+
* @param options.infrastructureCommitHashes - Set of commit hashes touching infrastructure paths
|
|
2584
|
+
* @param options.excludeScopes - Scopes to explicitly exclude from classification
|
|
2585
|
+
* @param options.includeScopes - Additional scopes to include as direct matches
|
|
2586
|
+
* @returns A ClassificationContext object
|
|
2587
|
+
*/
|
|
2588
|
+
function createClassificationContext(projectScopes, fileCommitHashes, options) {
|
|
2589
|
+
return {
|
|
2590
|
+
projectScopes,
|
|
2591
|
+
fileCommitHashes,
|
|
2592
|
+
dependencyCommitMap: options?.dependencyCommitMap,
|
|
2593
|
+
infrastructureCommitHashes: options?.infrastructureCommitHashes,
|
|
2594
|
+
excludeScopes: options?.excludeScopes ?? DEFAULT_EXCLUDE_SCOPES,
|
|
2595
|
+
includeScopes: options?.includeScopes,
|
|
2596
|
+
};
|
|
2597
|
+
}
|
|
2598
|
+
/**
|
|
2599
|
+
* Creates a modified conventional commit with scope handling based on classification.
|
|
2600
|
+
*
|
|
2601
|
+
* For direct commits, the scope is removed (redundant in project changelog).
|
|
2602
|
+
* For indirect commits, the scope is preserved (provides context).
|
|
2603
|
+
*
|
|
2604
|
+
* @param classified - Commit with classification metadata determining scope display
|
|
2605
|
+
* @returns A conventional commit with appropriate scope handling
|
|
2606
|
+
*/
|
|
2607
|
+
function toChangelogCommit(classified) {
|
|
2608
|
+
const { commit, preserveScope } = classified;
|
|
2609
|
+
if (!preserveScope && commit.scope) {
|
|
2610
|
+
// Remove the scope for direct commits
|
|
2611
|
+
return {
|
|
2612
|
+
...commit,
|
|
2613
|
+
scope: undefined,
|
|
2614
|
+
// Rebuild raw to reflect removed scope
|
|
2615
|
+
raw: rebuildRawWithoutScope(commit),
|
|
2616
|
+
};
|
|
2617
|
+
}
|
|
2618
|
+
return commit;
|
|
2619
|
+
}
|
|
2620
|
+
/**
|
|
2621
|
+
* Reconstructs a conventional commit message string without the scope portion.
|
|
2622
|
+
*
|
|
2623
|
+
* @param commit - The conventional commit to rebuild
|
|
2624
|
+
* @returns Reconstructed raw message with scope removed
|
|
2625
|
+
*/
|
|
2626
|
+
function rebuildRawWithoutScope(commit) {
|
|
2627
|
+
const breaking = commit.breaking && !commit.breakingDescription ? '!' : '';
|
|
2628
|
+
const header = `${commit.type}${breaking}: ${commit.subject}`;
|
|
2629
|
+
if (!commit.body && commit.footers.length === 0) {
|
|
2630
|
+
return header;
|
|
2631
|
+
}
|
|
2632
|
+
let raw = header;
|
|
2633
|
+
if (commit.body) {
|
|
2634
|
+
raw += `\n\n${commit.body}`;
|
|
2635
|
+
}
|
|
2636
|
+
for (const footer of commit.footers) {
|
|
2637
|
+
raw += `\n${footer.key}${footer.separator}${footer.value}`;
|
|
2638
|
+
}
|
|
2639
|
+
return raw;
|
|
2640
|
+
}
|
|
2641
|
+
|
|
2642
|
+
/**
|
|
2643
|
+
* Creates a matcher that checks if commit scope matches any of the given scopes.
|
|
2644
|
+
*
|
|
2645
|
+
* @param scopes - Scopes to match against (case-insensitive)
|
|
2646
|
+
* @returns Matcher that returns true if scope matches
|
|
2647
|
+
*
|
|
2648
|
+
* @example
|
|
2649
|
+
* const matcher = scopeMatcher(['ci', 'build', 'tooling'])
|
|
2650
|
+
* matcher({ scope: 'CI', ... }) // true
|
|
2651
|
+
* matcher({ scope: 'feat', ... }) // false
|
|
2652
|
+
*/
|
|
2653
|
+
function scopeMatcher(scopes) {
|
|
2654
|
+
const normalizedScopes = createSet(scopes.map((s) => s.toLowerCase()));
|
|
2655
|
+
return (ctx) => {
|
|
2656
|
+
if (!ctx.scope)
|
|
2657
|
+
return false;
|
|
2658
|
+
return normalizedScopes.has(ctx.scope.toLowerCase());
|
|
2659
|
+
};
|
|
2660
|
+
}
|
|
2661
|
+
/**
|
|
2662
|
+
* Creates a matcher that checks if commit scope starts with any of the given prefixes.
|
|
2663
|
+
*
|
|
2664
|
+
* @param prefixes - Scope prefixes to match (case-insensitive)
|
|
2665
|
+
* @returns Matcher that returns true if scope starts with any prefix
|
|
2666
|
+
*
|
|
2667
|
+
* @example
|
|
2668
|
+
* const matcher = scopePrefixMatcher(['tool-', 'infra-'])
|
|
2669
|
+
* matcher({ scope: 'tool-package', ... }) // true
|
|
2670
|
+
* matcher({ scope: 'lib-utils', ... }) // false
|
|
46
2671
|
*/
|
|
47
|
-
function
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
continueOnError: options.continueOnError,
|
|
55
|
-
dependsOn: options.dependsOn,
|
|
2672
|
+
function scopePrefixMatcher(prefixes) {
|
|
2673
|
+
const normalizedPrefixes = prefixes.map((p) => p.toLowerCase());
|
|
2674
|
+
return (ctx) => {
|
|
2675
|
+
if (!ctx.scope)
|
|
2676
|
+
return false;
|
|
2677
|
+
const normalizedScope = ctx.scope.toLowerCase();
|
|
2678
|
+
return normalizedPrefixes.some((prefix) => normalizedScope.startsWith(prefix));
|
|
56
2679
|
};
|
|
57
2680
|
}
|
|
58
2681
|
/**
|
|
59
|
-
*
|
|
2682
|
+
* Combines matchers with OR logic - returns true if ANY matcher matches.
|
|
60
2683
|
*
|
|
61
|
-
* @param
|
|
62
|
-
* @returns
|
|
2684
|
+
* @param matchers - Matchers to combine
|
|
2685
|
+
* @returns Combined matcher
|
|
2686
|
+
*
|
|
2687
|
+
* @example
|
|
2688
|
+
* const combined = anyOf(
|
|
2689
|
+
* scopeMatcher(['ci', 'build']),
|
|
2690
|
+
* messageMatcher(['[infra]']),
|
|
2691
|
+
* custom((ctx) => ctx.scope?.startsWith('tool-'))
|
|
2692
|
+
* )
|
|
63
2693
|
*/
|
|
64
|
-
function
|
|
65
|
-
return
|
|
66
|
-
status: 'skipped',
|
|
67
|
-
message,
|
|
68
|
-
};
|
|
2694
|
+
function anyOf(...matchers) {
|
|
2695
|
+
return (ctx) => matchers.some((matcher) => matcher(ctx));
|
|
69
2696
|
}
|
|
70
|
-
|
|
71
|
-
const FETCH_REGISTRY_STEP_ID = 'fetch-registry';
|
|
72
2697
|
/**
|
|
73
|
-
*
|
|
2698
|
+
* Matches common CI/CD scopes.
|
|
74
2699
|
*
|
|
75
|
-
*
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
2700
|
+
* Matches: ci, cd, build, pipeline, workflow, actions
|
|
2701
|
+
*/
|
|
2702
|
+
const CI_SCOPE_MATCHER = scopeMatcher(['ci', 'cd', 'build', 'pipeline', 'workflow', 'actions']);
|
|
2703
|
+
/**
|
|
2704
|
+
* Matches common tooling/workspace scopes.
|
|
79
2705
|
*
|
|
80
|
-
*
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
2706
|
+
* Matches: tooling, workspace, monorepo, nx, root
|
|
2707
|
+
*/
|
|
2708
|
+
const TOOLING_SCOPE_MATCHER = scopeMatcher(['tooling', 'workspace', 'monorepo', 'nx', 'root']);
|
|
2709
|
+
/**
|
|
2710
|
+
* Matches tool-prefixed scopes (e.g., tool-package, tool-scripts).
|
|
2711
|
+
*/
|
|
2712
|
+
const TOOL_PREFIX_MATCHER = scopePrefixMatcher(['tool-']);
|
|
2713
|
+
/**
|
|
2714
|
+
* Combined matcher for common infrastructure patterns.
|
|
84
2715
|
*
|
|
85
|
-
*
|
|
2716
|
+
* Combines CI, tooling, and tool-prefix matchers.
|
|
86
2717
|
*/
|
|
87
|
-
|
|
88
|
-
return createStep(FETCH_REGISTRY_STEP_ID, 'Fetch Registry Version', async (ctx) => {
|
|
89
|
-
const { registry, tree, projectRoot, packageName, logger } = ctx;
|
|
90
|
-
// Read local package.json for current version
|
|
91
|
-
const packageJsonPath = `${projectRoot}/package.json`;
|
|
92
|
-
let currentVersion = '0.0.0';
|
|
93
|
-
try {
|
|
94
|
-
const content = tree.read(packageJsonPath, 'utf-8');
|
|
95
|
-
if (content) {
|
|
96
|
-
const pkg = parse(content);
|
|
97
|
-
currentVersion = pkg.version ?? '0.0.0';
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
catch (error) {
|
|
101
|
-
logger.warn(`Could not read package.json: ${error}`);
|
|
102
|
-
}
|
|
103
|
-
// Query registry for published version
|
|
104
|
-
let publishedVersion = null;
|
|
105
|
-
let isFirstRelease = true;
|
|
106
|
-
try {
|
|
107
|
-
publishedVersion = await registry.getLatestVersion(packageName);
|
|
108
|
-
isFirstRelease = publishedVersion === null;
|
|
109
|
-
}
|
|
110
|
-
catch (error) {
|
|
111
|
-
// Package might not exist yet, which is fine
|
|
112
|
-
logger.debug(`Registry query failed (package may not exist): ${error}`);
|
|
113
|
-
isFirstRelease = true;
|
|
114
|
-
}
|
|
115
|
-
const message = isFirstRelease ? `First release (local: ${currentVersion})` : `Published: ${publishedVersion}, Local: ${currentVersion}`;
|
|
116
|
-
return {
|
|
117
|
-
status: 'success',
|
|
118
|
-
stateUpdates: {
|
|
119
|
-
publishedVersion,
|
|
120
|
-
currentVersion,
|
|
121
|
-
isFirstRelease,
|
|
122
|
-
},
|
|
123
|
-
message,
|
|
124
|
-
};
|
|
125
|
-
});
|
|
126
|
-
}
|
|
127
|
-
|
|
2718
|
+
anyOf(CI_SCOPE_MATCHER, TOOLING_SCOPE_MATCHER, TOOL_PREFIX_MATCHER);
|
|
128
2719
|
/**
|
|
129
|
-
*
|
|
2720
|
+
* Builds a combined matcher from infrastructure configuration.
|
|
130
2721
|
*
|
|
131
|
-
*
|
|
132
|
-
*
|
|
2722
|
+
* Combines scope-based matching with any custom matcher using OR logic.
|
|
2723
|
+
* Path-based matching is handled separately via git queries.
|
|
133
2724
|
*
|
|
134
|
-
*
|
|
135
|
-
*
|
|
2725
|
+
* @param config - Infrastructure configuration
|
|
2726
|
+
* @returns Combined matcher, or null if no matchers configured
|
|
136
2727
|
*
|
|
137
|
-
* @
|
|
2728
|
+
* @example
|
|
2729
|
+
* const matcher = buildInfrastructureMatcher({
|
|
2730
|
+
* scopes: ['ci', 'build'],
|
|
2731
|
+
* matcher: (ctx) => ctx.scope?.startsWith('tool-')
|
|
2732
|
+
* })
|
|
138
2733
|
*/
|
|
139
|
-
|
|
140
|
-
const
|
|
141
|
-
|
|
2734
|
+
function buildInfrastructureMatcher(config) {
|
|
2735
|
+
const matchers = [];
|
|
2736
|
+
// Add scope matcher if scopes configured
|
|
2737
|
+
if (config.scopes && config.scopes.length > 0) {
|
|
2738
|
+
matchers.push(scopeMatcher(config.scopes));
|
|
2739
|
+
}
|
|
2740
|
+
// Add custom matcher if provided
|
|
2741
|
+
if (config.matcher) {
|
|
2742
|
+
matchers.push(config.matcher);
|
|
2743
|
+
}
|
|
2744
|
+
// Return combined or null
|
|
2745
|
+
if (matchers.length === 0) {
|
|
2746
|
+
return null;
|
|
2747
|
+
}
|
|
2748
|
+
if (matchers.length === 1) {
|
|
2749
|
+
return matchers[0];
|
|
2750
|
+
}
|
|
2751
|
+
return anyOf(...matchers);
|
|
2752
|
+
}
|
|
142
2753
|
/**
|
|
143
|
-
*
|
|
144
|
-
* Use this instead of `new Error()`.
|
|
2754
|
+
* Creates match context from a git commit.
|
|
145
2755
|
*
|
|
146
|
-
*
|
|
147
|
-
*
|
|
148
|
-
* @
|
|
2756
|
+
* Extracts scope from conventional commit message if present.
|
|
2757
|
+
*
|
|
2758
|
+
* @param commit - Git commit to create context for
|
|
2759
|
+
* @param scope - Pre-parsed scope (optional, saves re-parsing)
|
|
2760
|
+
* @returns Match context for use with matchers
|
|
149
2761
|
*/
|
|
150
|
-
|
|
2762
|
+
function createMatchContext(commit, scope) {
|
|
2763
|
+
return {
|
|
2764
|
+
commit,
|
|
2765
|
+
scope,
|
|
2766
|
+
subject: commit.subject,
|
|
2767
|
+
message: commit.message,
|
|
2768
|
+
};
|
|
2769
|
+
}
|
|
151
2770
|
|
|
152
2771
|
/**
|
|
153
2772
|
* Replaces all occurrences of a character in a string.
|
|
@@ -573,78 +3192,525 @@ function splitLines(message) {
|
|
|
573
3192
|
return lines;
|
|
574
3193
|
}
|
|
575
3194
|
|
|
3195
|
+
/**
|
|
3196
|
+
* Default scope filtering configuration.
|
|
3197
|
+
*
|
|
3198
|
+
* Uses DEFAULT_EXCLUDE_SCOPES from commits/classify to ensure consistency
|
|
3199
|
+
* between flow-level filtering and commit classification.
|
|
3200
|
+
*/
|
|
3201
|
+
const DEFAULT_SCOPE_FILTERING_CONFIG = {
|
|
3202
|
+
strategy: 'hybrid',
|
|
3203
|
+
includeScopes: [],
|
|
3204
|
+
excludeScopes: DEFAULT_EXCLUDE_SCOPES,
|
|
3205
|
+
trackDependencyChanges: false,
|
|
3206
|
+
infrastructure: undefined,
|
|
3207
|
+
infrastructureMatcher: undefined,
|
|
3208
|
+
};
|
|
3209
|
+
|
|
576
3210
|
const ANALYZE_COMMITS_STEP_ID = 'analyze-commits';
|
|
577
3211
|
/**
|
|
578
3212
|
* Creates the analyze-commits step.
|
|
579
3213
|
*
|
|
580
3214
|
* This step:
|
|
581
|
-
* 1.
|
|
582
|
-
* 2.
|
|
583
|
-
* 3.
|
|
584
|
-
* 4.
|
|
3215
|
+
* 1. Uses publishedCommit from npm registry (set by fetch-registry step)
|
|
3216
|
+
* 2. Verifies the commit is reachable from current HEAD
|
|
3217
|
+
* 3. Gets all commits since that commit (or recent commits if first release/fallback)
|
|
3218
|
+
* 4. Parses each commit using conventional commit format
|
|
3219
|
+
* 5. Classifies commits based on scope filtering strategy
|
|
3220
|
+
* 6. Filters to only release-worthy commits that belong to this project
|
|
585
3221
|
*
|
|
586
3222
|
* State updates:
|
|
587
|
-
* -
|
|
588
|
-
* - commits: Array of parsed conventional commits
|
|
3223
|
+
* - effectiveBaseCommit: The verified base commit (null if fallback was used)
|
|
3224
|
+
* - commits: Array of parsed conventional commits (for backward compatibility)
|
|
3225
|
+
* - classificationResult: Full classification result with source attribution
|
|
589
3226
|
*
|
|
590
3227
|
* @returns A FlowStep that analyzes commits
|
|
591
3228
|
*/
|
|
592
3229
|
function createAnalyzeCommitsStep() {
|
|
593
3230
|
return createStep(ANALYZE_COMMITS_STEP_ID, 'Analyze Commits', async (ctx) => {
|
|
594
|
-
const { git, projectName, packageName, config, logger, state } = ctx;
|
|
595
|
-
//
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
3231
|
+
const { git, projectName, projectRoot, packageName, workspaceRoot, config, logger, state } = ctx;
|
|
3232
|
+
// Use publishedCommit from registry (set by fetch-registry step)
|
|
3233
|
+
const { publishedCommit, isFirstRelease } = state;
|
|
3234
|
+
let rawCommits;
|
|
3235
|
+
let effectiveBaseCommit = null;
|
|
3236
|
+
if (publishedCommit && !isFirstRelease) {
|
|
3237
|
+
// CRITICAL: Verify the commit exists and is reachable from HEAD
|
|
3238
|
+
if (git.commitReachableFromHead(publishedCommit)) {
|
|
3239
|
+
rawCommits = git.getCommitsSince(publishedCommit);
|
|
3240
|
+
effectiveBaseCommit = publishedCommit;
|
|
3241
|
+
logger.debug(`Found ${rawCommits.length} commits since ${publishedCommit.slice(0, 7)}`);
|
|
604
3242
|
}
|
|
605
3243
|
else {
|
|
606
|
-
//
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
3244
|
+
// GRACEFUL DEGRADATION: Commit not in history (rebase/force push occurred)
|
|
3245
|
+
logger.warn(`Published commit ${publishedCommit.slice(0, 7)} not found in history. ` +
|
|
3246
|
+
`This may indicate a rebase or force push occurred after publishing v${state.publishedVersion}. ` +
|
|
3247
|
+
`Falling back to recent commit analysis.`);
|
|
3248
|
+
rawCommits = git.getCommitLog({ maxCount: 100 });
|
|
3249
|
+
// effectiveBaseCommit stays null - no compare URL will be generated
|
|
612
3250
|
}
|
|
613
3251
|
}
|
|
614
|
-
// Get commits
|
|
615
|
-
let rawCommits;
|
|
616
|
-
if (lastReleaseTag) {
|
|
617
|
-
rawCommits = git.getCommitsSince(lastReleaseTag);
|
|
618
|
-
logger.debug(`Found ${rawCommits.length} commits since ${lastReleaseTag}`);
|
|
619
|
-
}
|
|
620
3252
|
else {
|
|
621
|
-
// First release
|
|
3253
|
+
// First release or no published version
|
|
622
3254
|
rawCommits = git.getCommitLog({ maxCount: 100 });
|
|
623
3255
|
logger.debug(`First release - analyzing up to ${rawCommits.length} commits`);
|
|
624
3256
|
}
|
|
625
|
-
//
|
|
626
|
-
const
|
|
3257
|
+
// Get scope filtering configuration
|
|
3258
|
+
const scopeFilteringConfig = {
|
|
3259
|
+
...DEFAULT_SCOPE_FILTERING_CONFIG,
|
|
3260
|
+
...config.scopeFiltering,
|
|
3261
|
+
};
|
|
3262
|
+
const strategy = resolveStrategy(scopeFilteringConfig.strategy ?? 'hybrid', rawCommits);
|
|
3263
|
+
// Parse commits with conventional commit format
|
|
627
3264
|
const releaseTypes = config.releaseTypes ?? ['feat', 'fix', 'perf', 'revert'];
|
|
3265
|
+
const parsedCommits = [];
|
|
628
3266
|
for (const rawCommit of rawCommits) {
|
|
629
3267
|
const parsed = parseConventionalCommit(rawCommit.message);
|
|
630
3268
|
if (parsed.type && releaseTypes.includes(parsed.type)) {
|
|
631
|
-
|
|
3269
|
+
parsedCommits.push({
|
|
3270
|
+
commit: parsed,
|
|
3271
|
+
raw: {
|
|
3272
|
+
hash: rawCommit.hash,
|
|
3273
|
+
shortHash: rawCommit.hash.slice(0, 7),
|
|
3274
|
+
message: rawCommit.message,
|
|
3275
|
+
subject: parsed.subject ?? rawCommit.message.split('\n')[0],
|
|
3276
|
+
body: parsed.body ?? '',
|
|
3277
|
+
authorName: '',
|
|
3278
|
+
authorEmail: '',
|
|
3279
|
+
authorDate: '',
|
|
3280
|
+
committerName: '',
|
|
3281
|
+
committerEmail: '',
|
|
3282
|
+
commitDate: '',
|
|
3283
|
+
parents: [],
|
|
3284
|
+
refs: [],
|
|
3285
|
+
},
|
|
3286
|
+
});
|
|
3287
|
+
}
|
|
3288
|
+
}
|
|
3289
|
+
// Build file commit hashes for hybrid/file-only strategies
|
|
3290
|
+
let fileCommitHashes = createSet();
|
|
3291
|
+
if (strategy === 'hybrid' || strategy === 'file-only') {
|
|
3292
|
+
// Get commits that touched project files using path filter
|
|
3293
|
+
const relativePath = getRelativePath(workspaceRoot, projectRoot);
|
|
3294
|
+
const pathFilteredCommits = effectiveBaseCommit
|
|
3295
|
+
? git.getCommitsSince(effectiveBaseCommit, { path: relativePath })
|
|
3296
|
+
: git.getCommitLog({ maxCount: 100, path: relativePath });
|
|
3297
|
+
fileCommitHashes = createSet(pathFilteredCommits.map((c) => c.hash));
|
|
3298
|
+
logger.debug(`Found ${fileCommitHashes.size} commits touching ${relativePath}`);
|
|
3299
|
+
}
|
|
3300
|
+
// Derive project scopes
|
|
3301
|
+
const projectScopes = deriveProjectScopes({
|
|
3302
|
+
projectName,
|
|
3303
|
+
packageName,
|
|
3304
|
+
additionalScopes: scopeFilteringConfig.includeScopes,
|
|
3305
|
+
});
|
|
3306
|
+
logger.debug(`Project scopes: ${projectScopes.join(', ')}`);
|
|
3307
|
+
// Build infrastructure commit hashes for file-based infrastructure detection
|
|
3308
|
+
const infrastructureCommitHashes = buildInfrastructureCommitHashes(git, effectiveBaseCommit, rawCommits, parsedCommits, scopeFilteringConfig, logger);
|
|
3309
|
+
// Build dependency commit map if tracking is enabled (Phase 4)
|
|
3310
|
+
let dependencyCommitMap;
|
|
3311
|
+
if (scopeFilteringConfig.trackDependencyChanges) {
|
|
3312
|
+
dependencyCommitMap = buildDependencyCommitMap(git, workspaceRoot, projectName, effectiveBaseCommit, logger);
|
|
3313
|
+
}
|
|
3314
|
+
// Create classification context
|
|
3315
|
+
const classificationContext = createClassificationContext(projectScopes, fileCommitHashes, {
|
|
3316
|
+
excludeScopes: scopeFilteringConfig.excludeScopes,
|
|
3317
|
+
includeScopes: scopeFilteringConfig.includeScopes,
|
|
3318
|
+
infrastructureCommitHashes,
|
|
3319
|
+
dependencyCommitMap,
|
|
3320
|
+
});
|
|
3321
|
+
// Classify commits
|
|
3322
|
+
const classificationResult = classifyCommits(parsedCommits, classificationContext);
|
|
3323
|
+
// Apply strategy-specific filtering
|
|
3324
|
+
const includedCommits = applyStrategyFilter(classificationResult.included, strategy);
|
|
3325
|
+
// Extract conventional commits for backward compatibility
|
|
3326
|
+
// Use toChangelogCommit to properly handle scope based on classification
|
|
3327
|
+
const commits = includedCommits.map((c) => toChangelogCommit(c));
|
|
3328
|
+
// Build message with classification summary
|
|
3329
|
+
const { summary } = classificationResult;
|
|
3330
|
+
const message = buildSummaryMessage(commits.length, rawCommits.length, summary, strategy);
|
|
3331
|
+
logger.debug(`Classification breakdown: direct-scope=${summary.bySource['direct-scope']}, ` +
|
|
3332
|
+
`direct-file=${summary.bySource['direct-file']}, unscoped-file=${summary.bySource['unscoped-file']}, ` +
|
|
3333
|
+
`excluded=${summary.bySource['excluded']}`);
|
|
3334
|
+
return {
|
|
3335
|
+
status: 'success',
|
|
3336
|
+
stateUpdates: {
|
|
3337
|
+
effectiveBaseCommit,
|
|
3338
|
+
commits,
|
|
3339
|
+
classificationResult,
|
|
3340
|
+
},
|
|
3341
|
+
message,
|
|
3342
|
+
};
|
|
3343
|
+
}, {
|
|
3344
|
+
dependsOn: ['fetch-registry'],
|
|
3345
|
+
});
|
|
3346
|
+
}
|
|
3347
|
+
/**
|
|
3348
|
+
* Resolves the filtering strategy, handling 'inferred' by analyzing commits.
|
|
3349
|
+
*
|
|
3350
|
+
* @param strategy - The configured scope filtering strategy
|
|
3351
|
+
* @param commits - The commits to analyze for strategy inference
|
|
3352
|
+
* @returns The resolved strategy (never 'inferred')
|
|
3353
|
+
*/
|
|
3354
|
+
function resolveStrategy(strategy, commits) {
|
|
3355
|
+
if (strategy !== 'inferred') {
|
|
3356
|
+
return strategy;
|
|
3357
|
+
}
|
|
3358
|
+
// Infer strategy from commit history
|
|
3359
|
+
// Count commits with conventional scopes
|
|
3360
|
+
let scopedCount = 0;
|
|
3361
|
+
for (const commit of commits) {
|
|
3362
|
+
const parsed = parseConventionalCommit(commit.message);
|
|
3363
|
+
if (parsed.scope) {
|
|
3364
|
+
scopedCount++;
|
|
3365
|
+
}
|
|
3366
|
+
}
|
|
3367
|
+
const scopeRatio = commits.length > 0 ? scopedCount / commits.length : 0;
|
|
3368
|
+
// If >70% of commits have scopes, scope-only is viable
|
|
3369
|
+
// If <30% have scopes, file-only is better
|
|
3370
|
+
// Otherwise, use hybrid
|
|
3371
|
+
if (scopeRatio > 0.7) {
|
|
3372
|
+
return 'scope-only';
|
|
3373
|
+
}
|
|
3374
|
+
else if (scopeRatio < 0.3) {
|
|
3375
|
+
return 'file-only';
|
|
3376
|
+
}
|
|
3377
|
+
return 'hybrid';
|
|
3378
|
+
}
|
|
3379
|
+
/**
|
|
3380
|
+
* Applies strategy-specific filtering to classified commits.
|
|
3381
|
+
*
|
|
3382
|
+
* @param commits - The classified commits to filter
|
|
3383
|
+
* @param strategy - The resolved filtering strategy to apply
|
|
3384
|
+
* @returns Filtered commits based on the strategy
|
|
3385
|
+
*/
|
|
3386
|
+
function applyStrategyFilter(commits, strategy) {
|
|
3387
|
+
switch (strategy) {
|
|
3388
|
+
case 'scope-only':
|
|
3389
|
+
// Only include direct-scope commits
|
|
3390
|
+
return commits.filter((c) => c.source === 'direct-scope');
|
|
3391
|
+
case 'file-only':
|
|
3392
|
+
// Only include file-based commits (direct-file, unscoped-file)
|
|
3393
|
+
return commits.filter((c) => c.source === 'direct-file' || c.source === 'unscoped-file');
|
|
3394
|
+
case 'hybrid':
|
|
3395
|
+
default:
|
|
3396
|
+
// Include all non-excluded commits (already filtered in classifyCommits)
|
|
3397
|
+
return commits;
|
|
3398
|
+
}
|
|
3399
|
+
}
|
|
3400
|
+
/**
|
|
3401
|
+
* Gets the relative path from workspace root to project root.
|
|
3402
|
+
*
|
|
3403
|
+
* @param workspaceRoot - The absolute path to the workspace root
|
|
3404
|
+
* @param projectRoot - The absolute path to the project root
|
|
3405
|
+
* @returns The relative path from workspace to project
|
|
3406
|
+
*/
|
|
3407
|
+
function getRelativePath(workspaceRoot, projectRoot) {
|
|
3408
|
+
if (projectRoot.startsWith(workspaceRoot)) {
|
|
3409
|
+
return projectRoot.slice(workspaceRoot.length).replace(/^\//, '');
|
|
3410
|
+
}
|
|
3411
|
+
return projectRoot;
|
|
3412
|
+
}
|
|
3413
|
+
/**
|
|
3414
|
+
* Builds a summary message for the step result.
|
|
3415
|
+
*
|
|
3416
|
+
* @param includedCount - Number of commits included in the release
|
|
3417
|
+
* @param totalCount - Total number of commits analyzed
|
|
3418
|
+
* @param summary - Classification summary object
|
|
3419
|
+
* @param summary.bySource - Count of commits by source type
|
|
3420
|
+
* @param strategy - The filtering strategy used
|
|
3421
|
+
* @returns A human-readable summary message
|
|
3422
|
+
*/
|
|
3423
|
+
function buildSummaryMessage(includedCount, totalCount, summary, strategy) {
|
|
3424
|
+
if (includedCount === 0) {
|
|
3425
|
+
return `No releasable commits found for this project (${totalCount} total, strategy: ${strategy})`;
|
|
3426
|
+
}
|
|
3427
|
+
const parts = [`Found ${includedCount} releasable commits`, `(${totalCount} total`, `strategy: ${strategy})`];
|
|
3428
|
+
return parts.join(' ');
|
|
3429
|
+
}
|
|
3430
|
+
/**
|
|
3431
|
+
* Builds a set of commit hashes that touched infrastructure paths or match infrastructure criteria.
|
|
3432
|
+
*
|
|
3433
|
+
* Supports multiple detection methods combined with OR logic:
|
|
3434
|
+
* 1. Path-based: Commits touching configured infrastructure paths (via git)
|
|
3435
|
+
* 2. Scope-based: Commits with scopes matching infrastructure.scopes
|
|
3436
|
+
* 3. Custom matcher: User-provided matching logic
|
|
3437
|
+
*
|
|
3438
|
+
* @param git - Git client for querying commits by path
|
|
3439
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3440
|
+
* @param rawCommits - All raw commits being analyzed
|
|
3441
|
+
* @param parsedCommits - Parsed commits with conventional commit data
|
|
3442
|
+
* @param config - Scope filtering configuration
|
|
3443
|
+
* @param logger - Logger with debug method for output
|
|
3444
|
+
* @param logger.debug - Debug logging function
|
|
3445
|
+
* @returns Set of commit hashes classified as infrastructure
|
|
3446
|
+
*/
|
|
3447
|
+
function buildInfrastructureCommitHashes(git, baseCommit, rawCommits, parsedCommits, config, logger) {
|
|
3448
|
+
// Collect all infrastructure commit hashes
|
|
3449
|
+
let infraHashes = createSet();
|
|
3450
|
+
// Method 1: Path-based detection (query git for commits touching infra paths)
|
|
3451
|
+
const infraPaths = config.infrastructure?.paths ?? [];
|
|
3452
|
+
if (infraPaths.length > 0) {
|
|
3453
|
+
for (const infraPath of infraPaths) {
|
|
3454
|
+
const pathCommits = baseCommit
|
|
3455
|
+
? git.getCommitsSince(baseCommit, { path: infraPath })
|
|
3456
|
+
: git.getCommitLog({ maxCount: 100, path: infraPath });
|
|
3457
|
+
for (const commit of pathCommits) {
|
|
3458
|
+
infraHashes = infraHashes.add(commit.hash);
|
|
3459
|
+
}
|
|
3460
|
+
}
|
|
3461
|
+
logger.debug(`Found ${infraHashes.size} commits touching infrastructure paths: ${infraPaths.join(', ')}`);
|
|
3462
|
+
}
|
|
3463
|
+
// Method 2 & 3: Scope-based and custom matcher detection
|
|
3464
|
+
// Build a combined matcher from infrastructure config and/or custom matcher
|
|
3465
|
+
const configMatcher = config.infrastructure ? buildInfrastructureMatcher(config.infrastructure) : null;
|
|
3466
|
+
const customMatcher = config.infrastructureMatcher;
|
|
3467
|
+
const combinedMatcher = combineMatcher(configMatcher, customMatcher);
|
|
3468
|
+
if (combinedMatcher) {
|
|
3469
|
+
// Build a lookup for parsed commits by hash
|
|
3470
|
+
let parsedByHash = createMap();
|
|
3471
|
+
for (const parsed of parsedCommits) {
|
|
3472
|
+
parsedByHash = parsedByHash.set(parsed.raw.hash, parsed);
|
|
3473
|
+
}
|
|
3474
|
+
// Evaluate each raw commit against the matcher
|
|
3475
|
+
for (const rawCommit of rawCommits) {
|
|
3476
|
+
// Skip if already matched by path
|
|
3477
|
+
if (infraHashes.has(rawCommit.hash))
|
|
3478
|
+
continue;
|
|
3479
|
+
// Get parsed scope if available
|
|
3480
|
+
const parsed = parsedByHash.get(rawCommit.hash);
|
|
3481
|
+
const scope = parsed?.commit.scope;
|
|
3482
|
+
// Create match context and evaluate
|
|
3483
|
+
const context = createMatchContext(rawCommit, scope);
|
|
3484
|
+
if (combinedMatcher(context)) {
|
|
3485
|
+
infraHashes = infraHashes.add(rawCommit.hash);
|
|
3486
|
+
}
|
|
3487
|
+
}
|
|
3488
|
+
logger.debug(`Infrastructure matcher found ${infraHashes.size} total commits`);
|
|
3489
|
+
}
|
|
3490
|
+
// Return undefined if no infrastructure detection configured
|
|
3491
|
+
if (infraHashes.size === 0 && infraPaths.length === 0 && !combinedMatcher) {
|
|
3492
|
+
return undefined;
|
|
3493
|
+
}
|
|
3494
|
+
return infraHashes;
|
|
3495
|
+
}
|
|
3496
|
+
/**
|
|
3497
|
+
* Combines two optional matchers into one using OR logic.
|
|
3498
|
+
*
|
|
3499
|
+
* @param a - First matcher (may be null)
|
|
3500
|
+
* @param b - Second matcher (may be undefined)
|
|
3501
|
+
* @returns Combined matcher or null if neither provided
|
|
3502
|
+
*/
|
|
3503
|
+
function combineMatcher(a, b) {
|
|
3504
|
+
if (a && b) {
|
|
3505
|
+
return (ctx) => a(ctx) || b(ctx);
|
|
3506
|
+
}
|
|
3507
|
+
return a ?? b ?? null;
|
|
3508
|
+
}
|
|
3509
|
+
/**
|
|
3510
|
+
* Builds a map of dependency project names to the commit hashes that touched them.
|
|
3511
|
+
*
|
|
3512
|
+
* This enables accurate indirect-dependency classification by verifying that:
|
|
3513
|
+
* 1. A commit's scope matches a dependency name
|
|
3514
|
+
* 2. The commit actually touched that dependency's files (hash in set)
|
|
3515
|
+
*
|
|
3516
|
+
* Uses lib-project-scope for dependency discovery, avoiding hard NX dependency.
|
|
3517
|
+
*
|
|
3518
|
+
* @param git - Git client for querying commits by path
|
|
3519
|
+
* @param workspaceRoot - Absolute path to workspace root
|
|
3520
|
+
* @param projectName - Name of the project being versioned
|
|
3521
|
+
* @param baseCommit - Base commit hash for commit range (null for first release/fallback)
|
|
3522
|
+
* @param logger - Logger with debug method for output
|
|
3523
|
+
* @param logger.debug - Debug logging function
|
|
3524
|
+
* @returns Map of dependency names to commit hashes touching that dependency
|
|
3525
|
+
*/
|
|
3526
|
+
function buildDependencyCommitMap(git, workspaceRoot, projectName, baseCommit, logger) {
|
|
3527
|
+
let dependencyMap = createMap();
|
|
3528
|
+
try {
|
|
3529
|
+
// Discover all projects in workspace using lib-project-scope
|
|
3530
|
+
// This gracefully handles NX and non-NX workspaces
|
|
3531
|
+
const projects = discoverNxProjects(workspaceRoot);
|
|
3532
|
+
const projectGraph = buildSimpleProjectGraph(workspaceRoot, projects);
|
|
3533
|
+
// Get dependencies for the current project
|
|
3534
|
+
const projectDeps = projectGraph.dependencies[projectName] ?? [];
|
|
3535
|
+
if (projectDeps.length === 0) {
|
|
3536
|
+
logger.debug(`No dependencies found for project: ${projectName}`);
|
|
3537
|
+
return dependencyMap;
|
|
3538
|
+
}
|
|
3539
|
+
logger.debug(`Found ${projectDeps.length} dependencies for ${projectName}: ${projectDeps.map((d) => d.target).join(', ')}`);
|
|
3540
|
+
// For each dependency, find commits that touched its files
|
|
3541
|
+
for (const dep of projectDeps) {
|
|
3542
|
+
const depNode = projectGraph.nodes[dep.target];
|
|
3543
|
+
if (!depNode?.data?.root) {
|
|
3544
|
+
logger.debug(`Skipping dependency ${dep.target}: no root path found`);
|
|
3545
|
+
continue;
|
|
3546
|
+
}
|
|
3547
|
+
const depRoot = depNode.data.root;
|
|
3548
|
+
// Query git for commits touching this dependency's path
|
|
3549
|
+
const depCommits = baseCommit
|
|
3550
|
+
? git.getCommitsSince(baseCommit, { path: depRoot })
|
|
3551
|
+
: git.getCommitLog({ maxCount: 100, path: depRoot });
|
|
3552
|
+
if (depCommits.length > 0) {
|
|
3553
|
+
const hashSet = createSet(depCommits.map((c) => c.hash));
|
|
3554
|
+
dependencyMap = dependencyMap.set(dep.target, hashSet);
|
|
3555
|
+
logger.debug(`Dependency ${dep.target}: ${depCommits.length} commits at ${depRoot}`);
|
|
632
3556
|
}
|
|
633
3557
|
}
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
3558
|
+
}
|
|
3559
|
+
catch (error) {
|
|
3560
|
+
// Graceful degradation: if project discovery fails, return empty map
|
|
3561
|
+
// This allows versioning to proceed without dependency tracking
|
|
3562
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3563
|
+
logger.debug(`Failed to build dependency map: ${message}`);
|
|
3564
|
+
}
|
|
3565
|
+
return dependencyMap;
|
|
3566
|
+
}
|
|
3567
|
+
|
|
3568
|
+
/**
|
|
3569
|
+
* Safe copies of Number built-in methods and constants.
|
|
3570
|
+
*
|
|
3571
|
+
* These references are captured at module initialization time to protect against
|
|
3572
|
+
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
3573
|
+
*
|
|
3574
|
+
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
3575
|
+
*/
|
|
3576
|
+
// Capture references at module initialization time
|
|
3577
|
+
const _parseInt = globalThis.parseInt;
|
|
3578
|
+
const _isNaN = globalThis.isNaN;
|
|
3579
|
+
// ============================================================================
|
|
3580
|
+
// Parsing
|
|
3581
|
+
// ============================================================================
|
|
3582
|
+
/**
|
|
3583
|
+
* (Safe copy) Parses a string and returns an integer.
|
|
3584
|
+
*/
|
|
3585
|
+
const parseInt = _parseInt;
|
|
3586
|
+
// ============================================================================
|
|
3587
|
+
// Global Type Checking (legacy, less strict)
|
|
3588
|
+
// ============================================================================
|
|
3589
|
+
/**
|
|
3590
|
+
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
3591
|
+
*/
|
|
3592
|
+
const globalIsNaN = _isNaN;
|
|
3593
|
+
|
|
3594
|
+
/**
|
|
3595
|
+
* Compares two semantic versions.
|
|
3596
|
+
*
|
|
3597
|
+
* @param a - First version
|
|
3598
|
+
* @param b - Second version
|
|
3599
|
+
* @returns -1 if a < b, 0 if a == b, 1 if a > b
|
|
3600
|
+
*
|
|
3601
|
+
* @example
|
|
3602
|
+
* compare(parseVersion('1.0.0'), parseVersion('2.0.0')) // -1
|
|
3603
|
+
* compare(parseVersion('1.0.0'), parseVersion('1.0.0')) // 0
|
|
3604
|
+
* compare(parseVersion('2.0.0'), parseVersion('1.0.0')) // 1
|
|
3605
|
+
*/
|
|
3606
|
+
function compare(a, b) {
|
|
3607
|
+
// Compare major, minor, patch
|
|
3608
|
+
if (a.major !== b.major) {
|
|
3609
|
+
return a.major < b.major ? -1 : 1;
|
|
3610
|
+
}
|
|
3611
|
+
if (a.minor !== b.minor) {
|
|
3612
|
+
return a.minor < b.minor ? -1 : 1;
|
|
3613
|
+
}
|
|
3614
|
+
if (a.patch !== b.patch) {
|
|
3615
|
+
return a.patch < b.patch ? -1 : 1;
|
|
3616
|
+
}
|
|
3617
|
+
// Compare prerelease
|
|
3618
|
+
// Version with prerelease has lower precedence than release
|
|
3619
|
+
if (a.prerelease.length === 0 && b.prerelease.length > 0) {
|
|
3620
|
+
return 1; // a is release, b is prerelease -> a > b
|
|
3621
|
+
}
|
|
3622
|
+
if (a.prerelease.length > 0 && b.prerelease.length === 0) {
|
|
3623
|
+
return -1; // a is prerelease, b is release -> a < b
|
|
3624
|
+
}
|
|
3625
|
+
// Both have prerelease - compare identifiers
|
|
3626
|
+
const maxLen = max(a.prerelease.length, b.prerelease.length);
|
|
3627
|
+
for (let i = 0; i < maxLen; i++) {
|
|
3628
|
+
const aId = a.prerelease[i];
|
|
3629
|
+
const bId = b.prerelease[i];
|
|
3630
|
+
// Shorter prerelease array has lower precedence
|
|
3631
|
+
if (aId === undefined && bId !== undefined) {
|
|
3632
|
+
return -1;
|
|
3633
|
+
}
|
|
3634
|
+
if (aId !== undefined && bId === undefined) {
|
|
3635
|
+
return 1;
|
|
3636
|
+
}
|
|
3637
|
+
if (aId === undefined || bId === undefined) {
|
|
3638
|
+
continue;
|
|
3639
|
+
}
|
|
3640
|
+
// Compare identifiers
|
|
3641
|
+
const cmp = compareIdentifiers(aId, bId);
|
|
3642
|
+
if (cmp !== 0) {
|
|
3643
|
+
return cmp;
|
|
3644
|
+
}
|
|
3645
|
+
}
|
|
3646
|
+
return 0;
|
|
3647
|
+
}
|
|
3648
|
+
/**
|
|
3649
|
+
* Checks if a > b.
|
|
3650
|
+
*
|
|
3651
|
+
* @param a - First version to compare
|
|
3652
|
+
* @param b - Second version to compare
|
|
3653
|
+
* @returns True if a is greater than b
|
|
3654
|
+
*/
|
|
3655
|
+
function gt(a, b) {
|
|
3656
|
+
return compare(a, b) === 1;
|
|
3657
|
+
}
|
|
3658
|
+
// ============================================================================
|
|
3659
|
+
// Internal helpers
|
|
3660
|
+
// ============================================================================
|
|
3661
|
+
/**
|
|
3662
|
+
* Compares two prerelease identifiers.
|
|
3663
|
+
* Numeric identifiers have lower precedence than alphanumeric.
|
|
3664
|
+
* Numeric identifiers are compared numerically.
|
|
3665
|
+
* Alphanumeric identifiers are compared lexically.
|
|
3666
|
+
*
|
|
3667
|
+
* @param a - First prerelease identifier
|
|
3668
|
+
* @param b - Second prerelease identifier
|
|
3669
|
+
* @returns -1 if a < b, 0 if equal, 1 if a > b
|
|
3670
|
+
*/
|
|
3671
|
+
function compareIdentifiers(a, b) {
|
|
3672
|
+
const aIsNumeric = isNumeric(a);
|
|
3673
|
+
const bIsNumeric = isNumeric(b);
|
|
3674
|
+
// Numeric identifiers have lower precedence
|
|
3675
|
+
if (aIsNumeric && !bIsNumeric) {
|
|
3676
|
+
return -1;
|
|
3677
|
+
}
|
|
3678
|
+
if (!aIsNumeric && bIsNumeric) {
|
|
3679
|
+
return 1;
|
|
3680
|
+
}
|
|
3681
|
+
// Both numeric - compare as numbers
|
|
3682
|
+
if (aIsNumeric && bIsNumeric) {
|
|
3683
|
+
const aNum = parseInt(a, 10);
|
|
3684
|
+
const bNum = parseInt(b, 10);
|
|
3685
|
+
if (aNum < bNum)
|
|
3686
|
+
return -1;
|
|
3687
|
+
if (aNum > bNum)
|
|
3688
|
+
return 1;
|
|
3689
|
+
return 0;
|
|
3690
|
+
}
|
|
3691
|
+
// Both alphanumeric - compare lexically
|
|
3692
|
+
if (a < b)
|
|
3693
|
+
return -1;
|
|
3694
|
+
if (a > b)
|
|
3695
|
+
return 1;
|
|
3696
|
+
return 0;
|
|
3697
|
+
}
|
|
3698
|
+
/**
|
|
3699
|
+
* Checks if a string consists only of digits.
|
|
3700
|
+
*
|
|
3701
|
+
* @param str - String to check for numeric content
|
|
3702
|
+
* @returns True if string contains only digits
|
|
3703
|
+
*/
|
|
3704
|
+
function isNumeric(str) {
|
|
3705
|
+
if (str.length === 0)
|
|
3706
|
+
return false;
|
|
3707
|
+
for (let i = 0; i < str.length; i++) {
|
|
3708
|
+
const code = str.charCodeAt(i);
|
|
3709
|
+
if (code < 48 || code > 57) {
|
|
3710
|
+
return false;
|
|
3711
|
+
}
|
|
3712
|
+
}
|
|
3713
|
+
return true;
|
|
648
3714
|
}
|
|
649
3715
|
|
|
650
3716
|
/**
|
|
@@ -664,32 +3730,6 @@ function format(version) {
|
|
|
664
3730
|
return result;
|
|
665
3731
|
}
|
|
666
3732
|
|
|
667
|
-
/**
|
|
668
|
-
* Safe copies of Number built-in methods and constants.
|
|
669
|
-
*
|
|
670
|
-
* These references are captured at module initialization time to protect against
|
|
671
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
672
|
-
*
|
|
673
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/number
|
|
674
|
-
*/
|
|
675
|
-
// Capture references at module initialization time
|
|
676
|
-
const _parseInt = globalThis.parseInt;
|
|
677
|
-
const _isNaN = globalThis.isNaN;
|
|
678
|
-
// ============================================================================
|
|
679
|
-
// Parsing
|
|
680
|
-
// ============================================================================
|
|
681
|
-
/**
|
|
682
|
-
* (Safe copy) Parses a string and returns an integer.
|
|
683
|
-
*/
|
|
684
|
-
const parseInt = _parseInt;
|
|
685
|
-
// ============================================================================
|
|
686
|
-
// Global Type Checking (legacy, less strict)
|
|
687
|
-
// ============================================================================
|
|
688
|
-
/**
|
|
689
|
-
* (Safe copy) Global isNaN function (coerces to number first, less strict than Number.isNaN).
|
|
690
|
-
*/
|
|
691
|
-
const globalIsNaN = _isNaN;
|
|
692
|
-
|
|
693
3733
|
/**
|
|
694
3734
|
* Creates a new SemVer object.
|
|
695
3735
|
*
|
|
@@ -1167,7 +4207,7 @@ function createCalculateBumpStep() {
|
|
|
1167
4207
|
message: 'No version bump needed',
|
|
1168
4208
|
};
|
|
1169
4209
|
}
|
|
1170
|
-
//
|
|
4210
|
+
// Parse versions for comparison
|
|
1171
4211
|
const current = parseVersion(currentVersion ?? '0.0.0');
|
|
1172
4212
|
if (!current.success || !current.version) {
|
|
1173
4213
|
return {
|
|
@@ -1176,6 +4216,27 @@ function createCalculateBumpStep() {
|
|
|
1176
4216
|
message: `Could not parse current version: ${currentVersion}`,
|
|
1177
4217
|
};
|
|
1178
4218
|
}
|
|
4219
|
+
const { publishedVersion } = state;
|
|
4220
|
+
const published = parseVersion(publishedVersion ?? '0.0.0');
|
|
4221
|
+
// Detect pending publication state: currentVersion > publishedVersion
|
|
4222
|
+
// This means a previous bump happened but was never published
|
|
4223
|
+
const isPendingPublication = published.success && published.version && publishedVersion != null && gt(current.version, published.version);
|
|
4224
|
+
if (isPendingPublication && published.version) {
|
|
4225
|
+
// ALWAYS calculate from publishedVersion - commits may have changed
|
|
4226
|
+
const next = increment(published.version, bumpType);
|
|
4227
|
+
const nextVersion = format(next);
|
|
4228
|
+
logger.info(`Pending publication detected: recalculating from ${publishedVersion} → ${nextVersion}`);
|
|
4229
|
+
return {
|
|
4230
|
+
status: 'success',
|
|
4231
|
+
stateUpdates: {
|
|
4232
|
+
bumpType,
|
|
4233
|
+
nextVersion,
|
|
4234
|
+
isPendingPublication: true,
|
|
4235
|
+
},
|
|
4236
|
+
message: `${bumpType} bump (pending): ${publishedVersion} → ${nextVersion}`,
|
|
4237
|
+
};
|
|
4238
|
+
}
|
|
4239
|
+
// Normal path: increment from currentVersion
|
|
1179
4240
|
const next = increment(current.version, bumpType);
|
|
1180
4241
|
const nextVersion = format(next);
|
|
1181
4242
|
return {
|
|
@@ -1230,24 +4291,6 @@ function createCheckIdempotencyStep() {
|
|
|
1230
4291
|
});
|
|
1231
4292
|
}
|
|
1232
4293
|
|
|
1233
|
-
/**
|
|
1234
|
-
* Safe copies of Date built-in via factory function and static methods.
|
|
1235
|
-
*
|
|
1236
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1237
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1238
|
-
*
|
|
1239
|
-
* These references are captured at module initialization time to protect against
|
|
1240
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1241
|
-
*
|
|
1242
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/date
|
|
1243
|
-
*/
|
|
1244
|
-
// Capture references at module initialization time
|
|
1245
|
-
const _Date = globalThis.Date;
|
|
1246
|
-
const _Reflect$1 = globalThis.Reflect;
|
|
1247
|
-
function createDate(...args) {
|
|
1248
|
-
return _Reflect$1.construct(_Date, args);
|
|
1249
|
-
}
|
|
1250
|
-
|
|
1251
4294
|
/**
|
|
1252
4295
|
* Creates a new changelog item.
|
|
1253
4296
|
*
|
|
@@ -1262,6 +4305,8 @@ function createChangelogItem(description, options) {
|
|
|
1262
4305
|
commits: options?.commits ?? [],
|
|
1263
4306
|
references: options?.references ?? [],
|
|
1264
4307
|
breaking: options?.breaking ?? false,
|
|
4308
|
+
source: options?.source,
|
|
4309
|
+
indirect: options?.indirect,
|
|
1265
4310
|
};
|
|
1266
4311
|
}
|
|
1267
4312
|
/**
|
|
@@ -1385,96 +4430,6 @@ function getSectionType(heading) {
|
|
|
1385
4430
|
return SECTION_TYPE_MAP[normalized] ?? 'other';
|
|
1386
4431
|
}
|
|
1387
4432
|
|
|
1388
|
-
/**
|
|
1389
|
-
* Safe copies of Map built-in via factory function.
|
|
1390
|
-
*
|
|
1391
|
-
* Since constructors cannot be safely captured via Object.assign, this module
|
|
1392
|
-
* provides a factory function that uses Reflect.construct internally.
|
|
1393
|
-
*
|
|
1394
|
-
* These references are captured at module initialization time to protect against
|
|
1395
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1396
|
-
*
|
|
1397
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/map
|
|
1398
|
-
*/
|
|
1399
|
-
// Capture references at module initialization time
|
|
1400
|
-
const _Map = globalThis.Map;
|
|
1401
|
-
const _Reflect = globalThis.Reflect;
|
|
1402
|
-
/**
|
|
1403
|
-
* (Safe copy) Creates a new Map using the captured Map constructor.
|
|
1404
|
-
* Use this instead of `new Map()`.
|
|
1405
|
-
*
|
|
1406
|
-
* @param iterable - Optional iterable of key-value pairs.
|
|
1407
|
-
* @returns A new Map instance.
|
|
1408
|
-
*/
|
|
1409
|
-
const createMap = (iterable) => _Reflect.construct(_Map, iterable ? [iterable] : []);
|
|
1410
|
-
|
|
1411
|
-
/**
|
|
1412
|
-
* Safe copies of Object built-in methods.
|
|
1413
|
-
*
|
|
1414
|
-
* These references are captured at module initialization time to protect against
|
|
1415
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1416
|
-
*
|
|
1417
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/object
|
|
1418
|
-
*/
|
|
1419
|
-
// Capture references at module initialization time
|
|
1420
|
-
const _Object = globalThis.Object;
|
|
1421
|
-
/**
|
|
1422
|
-
* (Safe copy) Returns an array of key/values of the enumerable own properties of an object.
|
|
1423
|
-
*/
|
|
1424
|
-
const entries = _Object.entries;
|
|
1425
|
-
|
|
1426
|
-
/**
|
|
1427
|
-
* Safe copies of URL built-ins via factory functions.
|
|
1428
|
-
*
|
|
1429
|
-
* Provides safe references to URL and URLSearchParams.
|
|
1430
|
-
* These references are captured at module initialization time to protect against
|
|
1431
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1432
|
-
*
|
|
1433
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/url
|
|
1434
|
-
*/
|
|
1435
|
-
// Capture references at module initialization time
|
|
1436
|
-
const _URL = globalThis.URL;
|
|
1437
|
-
/**
|
|
1438
|
-
* (Safe copy) Creates an object URL for the given object.
|
|
1439
|
-
* Use this instead of `URL.createObjectURL()`.
|
|
1440
|
-
*
|
|
1441
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1442
|
-
*/
|
|
1443
|
-
typeof _URL.createObjectURL === 'function'
|
|
1444
|
-
? _URL.createObjectURL.bind(_URL)
|
|
1445
|
-
: () => {
|
|
1446
|
-
throw new Error('URL.createObjectURL is not available in this environment');
|
|
1447
|
-
};
|
|
1448
|
-
/**
|
|
1449
|
-
* (Safe copy) Revokes an object URL previously created with createObjectURL.
|
|
1450
|
-
* Use this instead of `URL.revokeObjectURL()`.
|
|
1451
|
-
*
|
|
1452
|
-
* Note: This is a browser-only API. In Node.js environments, this will throw.
|
|
1453
|
-
*/
|
|
1454
|
-
typeof _URL.revokeObjectURL === 'function'
|
|
1455
|
-
? _URL.revokeObjectURL.bind(_URL)
|
|
1456
|
-
: () => {
|
|
1457
|
-
throw new Error('URL.revokeObjectURL is not available in this environment');
|
|
1458
|
-
};
|
|
1459
|
-
|
|
1460
|
-
/**
|
|
1461
|
-
* Safe copies of Math built-in methods.
|
|
1462
|
-
*
|
|
1463
|
-
* These references are captured at module initialization time to protect against
|
|
1464
|
-
* prototype pollution attacks. Import only what you need for tree-shaking.
|
|
1465
|
-
*
|
|
1466
|
-
* @module @hyperfrontend/immutable-api-utils/built-in-copy/math
|
|
1467
|
-
*/
|
|
1468
|
-
// Capture references at module initialization time
|
|
1469
|
-
const _Math = globalThis.Math;
|
|
1470
|
-
// ============================================================================
|
|
1471
|
-
// Min/Max
|
|
1472
|
-
// ============================================================================
|
|
1473
|
-
/**
|
|
1474
|
-
* (Safe copy) Returns the larger of zero or more numbers.
|
|
1475
|
-
*/
|
|
1476
|
-
const max = _Math.max;
|
|
1477
|
-
|
|
1478
4433
|
/**
|
|
1479
4434
|
* Line Parser
|
|
1480
4435
|
*
|
|
@@ -1530,6 +4485,25 @@ function parseVersionFromHeading(heading) {
|
|
|
1530
4485
|
if (trimmed[pos] === ']') {
|
|
1531
4486
|
pos++;
|
|
1532
4487
|
}
|
|
4488
|
+
// Handle markdown link format [version](url) - jscutlery/semver style
|
|
4489
|
+
// This extracts the compare URL from patterns like [0.0.4](https://github.com/.../compare/...)
|
|
4490
|
+
if (trimmed[pos] === '(') {
|
|
4491
|
+
const urlStart = pos + 1;
|
|
4492
|
+
let depth = 1;
|
|
4493
|
+
pos++;
|
|
4494
|
+
// Find matching closing parenthesis (handles nested parens in URLs)
|
|
4495
|
+
while (pos < trimmed.length && depth > 0) {
|
|
4496
|
+
if (trimmed[pos] === '(')
|
|
4497
|
+
depth++;
|
|
4498
|
+
else if (trimmed[pos] === ')')
|
|
4499
|
+
depth--;
|
|
4500
|
+
pos++;
|
|
4501
|
+
}
|
|
4502
|
+
// Extract URL if we found the closing paren
|
|
4503
|
+
if (depth === 0) {
|
|
4504
|
+
compareUrl = trimmed.slice(urlStart, pos - 1);
|
|
4505
|
+
}
|
|
4506
|
+
}
|
|
1533
4507
|
// Skip whitespace and separator
|
|
1534
4508
|
while (pos < trimmed.length && (trimmed[pos] === ' ' || trimmed[pos] === '-' || trimmed[pos] === '–')) {
|
|
1535
4509
|
pos++;
|
|
@@ -1546,8 +4520,8 @@ function parseVersionFromHeading(heading) {
|
|
|
1546
4520
|
while (pos < trimmed.length && trimmed[pos] === ' ') {
|
|
1547
4521
|
pos++;
|
|
1548
4522
|
}
|
|
1549
|
-
// Check for link at end: [compare](url)
|
|
1550
|
-
if (pos < trimmed.length) {
|
|
4523
|
+
// Check for link at end: [compare](url) - only if no URL was already extracted
|
|
4524
|
+
if (pos < trimmed.length && !compareUrl) {
|
|
1551
4525
|
const linkMatch = extractLink(trimmed.slice(pos));
|
|
1552
4526
|
if (linkMatch?.url) {
|
|
1553
4527
|
compareUrl = linkMatch.url;
|
|
@@ -2944,20 +5918,28 @@ function serializeIssueRef(ref) {
|
|
|
2944
5918
|
* ```
|
|
2945
5919
|
*/
|
|
2946
5920
|
function addEntry(changelog, entry, options) {
|
|
5921
|
+
const position = options?.position ?? 'start';
|
|
5922
|
+
const replaceExisting = options?.replaceExisting ?? false;
|
|
5923
|
+
const updateMetadata = options?.updateMetadata ?? false;
|
|
2947
5924
|
// Check for existing entry
|
|
2948
5925
|
const existingIndex = changelog.entries.findIndex((e) => e.version === entry.version);
|
|
2949
|
-
if (existingIndex !== -1 &&
|
|
5926
|
+
if (existingIndex !== -1 && !replaceExisting) {
|
|
2950
5927
|
throw createError(`Entry with version "${entry.version}" already exists. Use replaceExisting: true to replace.`);
|
|
2951
5928
|
}
|
|
2952
5929
|
let newEntries;
|
|
2953
|
-
{
|
|
5930
|
+
if (existingIndex !== -1 && replaceExisting) {
|
|
5931
|
+
// Replace existing entry
|
|
5932
|
+
newEntries = [...changelog.entries];
|
|
5933
|
+
newEntries[existingIndex] = entry;
|
|
5934
|
+
}
|
|
5935
|
+
else {
|
|
2954
5936
|
// Add new entry
|
|
2955
|
-
const insertIndex = 0 ;
|
|
5937
|
+
const insertIndex = position === 'start' ? 0 : position === 'end' ? changelog.entries.length : position;
|
|
2956
5938
|
newEntries = [...changelog.entries];
|
|
2957
5939
|
newEntries.splice(insertIndex, 0, entry);
|
|
2958
5940
|
}
|
|
2959
5941
|
// Build new metadata if requested
|
|
2960
|
-
const metadata = changelog.metadata;
|
|
5942
|
+
const metadata = updateMetadata ? { ...changelog.metadata, warnings: [] } : changelog.metadata;
|
|
2961
5943
|
return {
|
|
2962
5944
|
...changelog,
|
|
2963
5945
|
entries: newEntries,
|
|
@@ -2965,6 +5947,144 @@ function addEntry(changelog, entry, options) {
|
|
|
2965
5947
|
};
|
|
2966
5948
|
}
|
|
2967
5949
|
|
|
5950
|
+
/**
|
|
5951
|
+
* Changelog Entry Removal
|
|
5952
|
+
*
|
|
5953
|
+
* Functions for removing entries from a changelog.
|
|
5954
|
+
*/
|
|
5955
|
+
/**
|
|
5956
|
+
* Removes multiple entries from a changelog.
|
|
5957
|
+
*
|
|
5958
|
+
* @param changelog - The changelog to remove from
|
|
5959
|
+
* @param versions - The versions to remove
|
|
5960
|
+
* @param options - Optional removal options
|
|
5961
|
+
* @returns A new changelog without the specified entries
|
|
5962
|
+
*/
|
|
5963
|
+
function removeEntries(changelog, versions, options) {
|
|
5964
|
+
const versionsSet = createSet(versions);
|
|
5965
|
+
const newEntries = changelog.entries.filter((e) => !versionsSet.has(e.version));
|
|
5966
|
+
return {
|
|
5967
|
+
...changelog,
|
|
5968
|
+
entries: newEntries,
|
|
5969
|
+
};
|
|
5970
|
+
}
|
|
5971
|
+
|
|
5972
|
+
/**
|
|
5973
|
+
* Creates a platform-specific compare URL for viewing changes between two commits.
|
|
5974
|
+
*
|
|
5975
|
+
* Each platform has a different URL format:
|
|
5976
|
+
* - **GitHub**: `{baseUrl}/compare/{fromCommit}...{toCommit}` (three dots)
|
|
5977
|
+
* - **GitLab**: `{baseUrl}/-/compare/{fromCommit}...{toCommit}` (three dots, `/-/` prefix)
|
|
5978
|
+
* - **Bitbucket**: `{baseUrl}/compare/{toCommit}..{fromCommit}` (two dots, reversed order)
|
|
5979
|
+
* - **Azure DevOps**: `{baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}` (query params)
|
|
5980
|
+
*
|
|
5981
|
+
* For `custom` platforms, a `formatCompareUrl` function must be provided in the repository config.
|
|
5982
|
+
* For `unknown` platforms, returns `null`.
|
|
5983
|
+
*
|
|
5984
|
+
* @param options - Compare URL options including repository, fromCommit, and toCommit
|
|
5985
|
+
* @returns The compare URL string, or null if URL cannot be generated
|
|
5986
|
+
*
|
|
5987
|
+
* @example
|
|
5988
|
+
* ```typescript
|
|
5989
|
+
* // GitHub
|
|
5990
|
+
* createCompareUrl({
|
|
5991
|
+
* repository: { platform: 'github', baseUrl: 'https://github.com/owner/repo' },
|
|
5992
|
+
* fromCommit: 'abc1234',
|
|
5993
|
+
* toCommit: 'def5678'
|
|
5994
|
+
* })
|
|
5995
|
+
* // → 'https://github.com/owner/repo/compare/abc1234...def5678'
|
|
5996
|
+
*
|
|
5997
|
+
* // GitLab
|
|
5998
|
+
* createCompareUrl({
|
|
5999
|
+
* repository: { platform: 'gitlab', baseUrl: 'https://gitlab.com/group/project' },
|
|
6000
|
+
* fromCommit: 'abc1234',
|
|
6001
|
+
* toCommit: 'def5678'
|
|
6002
|
+
* })
|
|
6003
|
+
* // → 'https://gitlab.com/group/project/-/compare/abc1234...def5678'
|
|
6004
|
+
*
|
|
6005
|
+
* // Bitbucket (reversed order)
|
|
6006
|
+
* createCompareUrl({
|
|
6007
|
+
* repository: { platform: 'bitbucket', baseUrl: 'https://bitbucket.org/owner/repo' },
|
|
6008
|
+
* fromCommit: 'abc1234',
|
|
6009
|
+
* toCommit: 'def5678'
|
|
6010
|
+
* })
|
|
6011
|
+
* // → 'https://bitbucket.org/owner/repo/compare/def5678..abc1234'
|
|
6012
|
+
*
|
|
6013
|
+
* // Azure DevOps
|
|
6014
|
+
* createCompareUrl({
|
|
6015
|
+
* repository: { platform: 'azure-devops', baseUrl: 'https://dev.azure.com/org/proj/_git/repo' },
|
|
6016
|
+
* fromCommit: 'abc1234',
|
|
6017
|
+
* toCommit: 'def5678'
|
|
6018
|
+
* })
|
|
6019
|
+
* // → 'https://dev.azure.com/org/proj/_git/repo/compare?version=GTdef5678&compareVersion=GTabc1234'
|
|
6020
|
+
*
|
|
6021
|
+
* // Custom formatter
|
|
6022
|
+
* createCompareUrl({
|
|
6023
|
+
* repository: {
|
|
6024
|
+
* platform: 'custom',
|
|
6025
|
+
* baseUrl: 'https://my-git.internal/repo',
|
|
6026
|
+
* formatCompareUrl: (from, to) => `https://my-git.internal/diff/${from}/${to}`
|
|
6027
|
+
* },
|
|
6028
|
+
* fromCommit: 'abc1234',
|
|
6029
|
+
* toCommit: 'def5678'
|
|
6030
|
+
* })
|
|
6031
|
+
* // → 'https://my-git.internal/diff/abc1234/def5678'
|
|
6032
|
+
* ```
|
|
6033
|
+
*/
|
|
6034
|
+
function createCompareUrl(options) {
|
|
6035
|
+
const { repository, fromCommit, toCommit } = options;
|
|
6036
|
+
// Validate inputs
|
|
6037
|
+
if (!repository || !fromCommit || !toCommit) {
|
|
6038
|
+
return null;
|
|
6039
|
+
}
|
|
6040
|
+
// If custom formatter is provided, use it (works for any platform including overrides)
|
|
6041
|
+
if (repository.formatCompareUrl) {
|
|
6042
|
+
return repository.formatCompareUrl(fromCommit, toCommit);
|
|
6043
|
+
}
|
|
6044
|
+
const { platform, baseUrl } = repository;
|
|
6045
|
+
// Cannot generate URL for unknown platforms without a formatter
|
|
6046
|
+
if (platform === 'unknown') {
|
|
6047
|
+
return null;
|
|
6048
|
+
}
|
|
6049
|
+
// Custom platform requires a formatter
|
|
6050
|
+
if (platform === 'custom') {
|
|
6051
|
+
return null;
|
|
6052
|
+
}
|
|
6053
|
+
// Generate URL for known platforms
|
|
6054
|
+
if (isKnownPlatform(platform)) {
|
|
6055
|
+
return formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit);
|
|
6056
|
+
}
|
|
6057
|
+
return null;
|
|
6058
|
+
}
|
|
6059
|
+
/**
|
|
6060
|
+
* Formats a compare URL for known platforms.
|
|
6061
|
+
*
|
|
6062
|
+
* @param platform - Known platform type
|
|
6063
|
+
* @param baseUrl - Repository base URL
|
|
6064
|
+
* @param fromCommit - Source commit hash (older version)
|
|
6065
|
+
* @param toCommit - Target commit hash (newer version)
|
|
6066
|
+
* @returns Formatted compare URL
|
|
6067
|
+
*
|
|
6068
|
+
* @internal
|
|
6069
|
+
*/
|
|
6070
|
+
function formatKnownPlatformCompareUrl(platform, baseUrl, fromCommit, toCommit) {
|
|
6071
|
+
switch (platform) {
|
|
6072
|
+
case 'github':
|
|
6073
|
+
// GitHub: {baseUrl}/compare/{fromCommit}...{toCommit}
|
|
6074
|
+
return `${baseUrl}/compare/${fromCommit}...${toCommit}`;
|
|
6075
|
+
case 'gitlab':
|
|
6076
|
+
// GitLab: {baseUrl}/-/compare/{fromCommit}...{toCommit}
|
|
6077
|
+
return `${baseUrl}/-/compare/${fromCommit}...${toCommit}`;
|
|
6078
|
+
case 'bitbucket':
|
|
6079
|
+
// Bitbucket: {baseUrl}/compare/{toCommit}..{fromCommit} (reversed order, two dots)
|
|
6080
|
+
return `${baseUrl}/compare/${toCommit}..${fromCommit}`;
|
|
6081
|
+
case 'azure-devops':
|
|
6082
|
+
// Azure DevOps: {baseUrl}/compare?version=GT{toCommit}&compareVersion=GT{fromCommit}
|
|
6083
|
+
// Use encodeURIComponent for query parameter values
|
|
6084
|
+
return `${baseUrl}/compare?version=GT${encodeURIComponent(toCommit)}&compareVersion=GT${encodeURIComponent(fromCommit)}`;
|
|
6085
|
+
}
|
|
6086
|
+
}
|
|
6087
|
+
|
|
2968
6088
|
const GENERATE_CHANGELOG_STEP_ID = 'generate-changelog';
|
|
2969
6089
|
/**
|
|
2970
6090
|
* Maps conventional commit types to changelog section types.
|
|
@@ -2982,6 +6102,32 @@ const COMMIT_TYPE_TO_SECTION = {
|
|
|
2982
6102
|
chore: 'chores',
|
|
2983
6103
|
style: 'other',
|
|
2984
6104
|
};
|
|
6105
|
+
/**
|
|
6106
|
+
* Checks if a commit source represents an indirect change.
|
|
6107
|
+
*
|
|
6108
|
+
* @param source - The commit source type
|
|
6109
|
+
* @returns True if the commit is indirect (dependency or infrastructure)
|
|
6110
|
+
*/
|
|
6111
|
+
function isIndirectSource(source) {
|
|
6112
|
+
return source === 'indirect-dependency' || source === 'indirect-infra';
|
|
6113
|
+
}
|
|
6114
|
+
/**
|
|
6115
|
+
* Groups classified commits by their section type.
|
|
6116
|
+
*
|
|
6117
|
+
* @param commits - Array of classified commits
|
|
6118
|
+
* @returns Record of section type to classified commits
|
|
6119
|
+
*/
|
|
6120
|
+
function groupClassifiedCommitsBySection(commits) {
|
|
6121
|
+
const groups = {};
|
|
6122
|
+
for (const classified of commits) {
|
|
6123
|
+
const sectionType = COMMIT_TYPE_TO_SECTION[classified.commit.type ?? 'chore'] ?? 'chores';
|
|
6124
|
+
if (!groups[sectionType]) {
|
|
6125
|
+
groups[sectionType] = [];
|
|
6126
|
+
}
|
|
6127
|
+
groups[sectionType].push(classified);
|
|
6128
|
+
}
|
|
6129
|
+
return groups;
|
|
6130
|
+
}
|
|
2985
6131
|
/**
|
|
2986
6132
|
* Groups commits by their section type.
|
|
2987
6133
|
*
|
|
@@ -2999,6 +6145,35 @@ function groupCommitsBySection(commits) {
|
|
|
2999
6145
|
}
|
|
3000
6146
|
return groups;
|
|
3001
6147
|
}
|
|
6148
|
+
/**
|
|
6149
|
+
* Creates a changelog item from a classified commit.
|
|
6150
|
+
*
|
|
6151
|
+
* Applies scope display rules:
|
|
6152
|
+
* - Direct commits: scope omitted (redundant in project changelog)
|
|
6153
|
+
* - Indirect commits: scope preserved (provides context)
|
|
6154
|
+
*
|
|
6155
|
+
* @param classified - The classified commit with source metadata
|
|
6156
|
+
* @returns A changelog item with proper scope handling
|
|
6157
|
+
*/
|
|
6158
|
+
function classifiedCommitToItem(classified) {
|
|
6159
|
+
// Apply scope transformation based on classification
|
|
6160
|
+
const commit = toChangelogCommit(classified);
|
|
6161
|
+
const indirect = isIndirectSource(classified.source);
|
|
6162
|
+
let text = commit.subject;
|
|
6163
|
+
// Add scope prefix if preserved (indirect commits)
|
|
6164
|
+
if (commit.scope) {
|
|
6165
|
+
text = `**${commit.scope}:** ${text}`;
|
|
6166
|
+
}
|
|
6167
|
+
// Add breaking change indicator
|
|
6168
|
+
if (commit.breaking) {
|
|
6169
|
+
text = `⚠️ BREAKING: ${text}`;
|
|
6170
|
+
}
|
|
6171
|
+
return createChangelogItem(text, {
|
|
6172
|
+
source: classified.source,
|
|
6173
|
+
indirect,
|
|
6174
|
+
breaking: commit.breaking,
|
|
6175
|
+
});
|
|
6176
|
+
}
|
|
3002
6177
|
/**
|
|
3003
6178
|
* Creates a changelog item from a conventional commit.
|
|
3004
6179
|
*
|
|
@@ -3044,9 +6219,26 @@ function createGenerateChangelogStep() {
|
|
|
3044
6219
|
}
|
|
3045
6220
|
// Handle case with no commits (e.g., first release)
|
|
3046
6221
|
if (!commits || commits.length === 0) {
|
|
6222
|
+
// Generate compare URL using commit hashes ONLY
|
|
6223
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6224
|
+
let compareUrl;
|
|
6225
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6226
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6227
|
+
compareUrl =
|
|
6228
|
+
createCompareUrl({
|
|
6229
|
+
repository: state.repositoryConfig,
|
|
6230
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6231
|
+
toCommit: currentCommit,
|
|
6232
|
+
}) ?? undefined;
|
|
6233
|
+
}
|
|
6234
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6235
|
+
// Log why we're not generating a compare URL
|
|
6236
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6237
|
+
}
|
|
3047
6238
|
const entry = createChangelogEntry(nextVersion, {
|
|
3048
6239
|
date: createDate().toISOString().split('T')[0],
|
|
3049
6240
|
sections: [createChangelogSection('features', 'Features', [createChangelogItem('Initial release')])],
|
|
6241
|
+
compareUrl,
|
|
3050
6242
|
});
|
|
3051
6243
|
return {
|
|
3052
6244
|
status: 'success',
|
|
@@ -3054,41 +6246,109 @@ function createGenerateChangelogStep() {
|
|
|
3054
6246
|
message: 'Generated initial release changelog entry',
|
|
3055
6247
|
};
|
|
3056
6248
|
}
|
|
3057
|
-
//
|
|
3058
|
-
const
|
|
3059
|
-
// Create sections
|
|
6249
|
+
// Use classification result when available for proper scope handling
|
|
6250
|
+
const { classificationResult } = state;
|
|
3060
6251
|
const sections = [];
|
|
3061
|
-
|
|
3062
|
-
|
|
3063
|
-
|
|
3064
|
-
|
|
3065
|
-
|
|
3066
|
-
|
|
3067
|
-
|
|
3068
|
-
|
|
3069
|
-
|
|
3070
|
-
|
|
3071
|
-
|
|
3072
|
-
|
|
3073
|
-
|
|
3074
|
-
|
|
3075
|
-
|
|
3076
|
-
|
|
3077
|
-
|
|
3078
|
-
|
|
3079
|
-
|
|
3080
|
-
|
|
3081
|
-
|
|
3082
|
-
|
|
3083
|
-
|
|
3084
|
-
|
|
3085
|
-
|
|
6252
|
+
if (classificationResult && classificationResult.included.length > 0) {
|
|
6253
|
+
// Use classified commits for proper scope display rules
|
|
6254
|
+
const classifiedCommits = classificationResult.included;
|
|
6255
|
+
// Separate direct and indirect commits
|
|
6256
|
+
const directCommits = classifiedCommits.filter((c) => !isIndirectSource(c.source));
|
|
6257
|
+
const indirectCommits = classifiedCommits.filter((c) => isIndirectSource(c.source));
|
|
6258
|
+
// Add breaking changes section first if any
|
|
6259
|
+
const breakingCommits = classifiedCommits.filter((c) => c.commit.breaking);
|
|
6260
|
+
if (breakingCommits.length > 0) {
|
|
6261
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6262
|
+
const commit = toChangelogCommit(c);
|
|
6263
|
+
const text = commit.breakingDescription ?? commit.subject;
|
|
6264
|
+
const indirect = isIndirectSource(c.source);
|
|
6265
|
+
return createChangelogItem(commit.scope ? `**${commit.scope}:** ${text}` : text, {
|
|
6266
|
+
source: c.source,
|
|
6267
|
+
indirect,
|
|
6268
|
+
breaking: true,
|
|
6269
|
+
});
|
|
6270
|
+
})));
|
|
6271
|
+
}
|
|
6272
|
+
// Group direct commits by section
|
|
6273
|
+
const groupedDirect = groupClassifiedCommitsBySection(directCommits);
|
|
6274
|
+
// Add other sections in conventional order (direct commits only)
|
|
6275
|
+
const sectionOrder = [
|
|
6276
|
+
{ type: 'features', heading: 'Features' },
|
|
6277
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6278
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6279
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6280
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6281
|
+
{ type: 'build', heading: 'Build' },
|
|
6282
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6283
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6284
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6285
|
+
{ type: 'other', heading: 'Other' },
|
|
6286
|
+
];
|
|
6287
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6288
|
+
const sectionCommits = groupedDirect[sectionType];
|
|
6289
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6290
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(classifiedCommitToItem)));
|
|
6291
|
+
}
|
|
6292
|
+
}
|
|
6293
|
+
// Add Dependency Updates section for indirect commits if any
|
|
6294
|
+
if (indirectCommits.length > 0) {
|
|
6295
|
+
sections.push(createChangelogSection('other', // Use 'other' as section type for dependency updates
|
|
6296
|
+
'Dependency Updates', indirectCommits.map((c) => classifiedCommitToItem(c))));
|
|
6297
|
+
}
|
|
6298
|
+
}
|
|
6299
|
+
else {
|
|
6300
|
+
// Fallback: use commits without classification (backward compatibility)
|
|
6301
|
+
const grouped = groupCommitsBySection(commits);
|
|
6302
|
+
// Add breaking changes section first if any
|
|
6303
|
+
const breakingCommits = commits.filter((c) => c.breaking);
|
|
6304
|
+
if (breakingCommits.length > 0) {
|
|
6305
|
+
sections.push(createChangelogSection('breaking', 'Breaking Changes', breakingCommits.map((c) => {
|
|
6306
|
+
const text = c.breakingDescription ?? c.subject;
|
|
6307
|
+
return createChangelogItem(c.scope ? `**${c.scope}:** ${text}` : text);
|
|
6308
|
+
})));
|
|
6309
|
+
}
|
|
6310
|
+
// Add other sections in conventional order
|
|
6311
|
+
const sectionOrder = [
|
|
6312
|
+
{ type: 'features', heading: 'Features' },
|
|
6313
|
+
{ type: 'fixes', heading: 'Bug Fixes' },
|
|
6314
|
+
{ type: 'performance', heading: 'Performance' },
|
|
6315
|
+
{ type: 'documentation', heading: 'Documentation' },
|
|
6316
|
+
{ type: 'refactoring', heading: 'Code Refactoring' },
|
|
6317
|
+
{ type: 'build', heading: 'Build' },
|
|
6318
|
+
{ type: 'ci', heading: 'Continuous Integration' },
|
|
6319
|
+
{ type: 'tests', heading: 'Tests' },
|
|
6320
|
+
{ type: 'chores', heading: 'Chores' },
|
|
6321
|
+
{ type: 'other', heading: 'Other' },
|
|
6322
|
+
];
|
|
6323
|
+
for (const { type: sectionType, heading } of sectionOrder) {
|
|
6324
|
+
const sectionCommits = grouped[sectionType];
|
|
6325
|
+
if (sectionCommits && sectionCommits.length > 0) {
|
|
6326
|
+
sections.push(createChangelogSection(sectionType, heading, sectionCommits.map(commitToItem)));
|
|
6327
|
+
}
|
|
3086
6328
|
}
|
|
3087
6329
|
}
|
|
6330
|
+
// Generate compare URL using commit hashes ONLY
|
|
6331
|
+
// Only generate if we have a valid base commit (effectiveBaseCommit will be null if fallback was used)
|
|
6332
|
+
let compareUrl;
|
|
6333
|
+
if (state.repositoryConfig && state.effectiveBaseCommit) {
|
|
6334
|
+
const currentCommit = ctx.git.getHeadHash();
|
|
6335
|
+
compareUrl =
|
|
6336
|
+
createCompareUrl({
|
|
6337
|
+
repository: state.repositoryConfig,
|
|
6338
|
+
fromCommit: state.effectiveBaseCommit,
|
|
6339
|
+
toCommit: currentCommit,
|
|
6340
|
+
}) ?? undefined;
|
|
6341
|
+
ctx.logger.debug(`Compare URL: ${state.effectiveBaseCommit.slice(0, 7)}...${currentCommit.slice(0, 7)}`);
|
|
6342
|
+
}
|
|
6343
|
+
else if (state.publishedCommit && !state.effectiveBaseCommit) {
|
|
6344
|
+
// Log why we're not generating a compare URL
|
|
6345
|
+
ctx.logger.info('Compare URL omitted: published commit not in current history');
|
|
6346
|
+
}
|
|
3088
6347
|
// Create the entry
|
|
3089
6348
|
const entry = createChangelogEntry(nextVersion, {
|
|
3090
6349
|
date: createDate().toISOString().split('T')[0],
|
|
3091
6350
|
sections,
|
|
6351
|
+
compareUrl,
|
|
3092
6352
|
});
|
|
3093
6353
|
return {
|
|
3094
6354
|
status: 'success',
|
|
@@ -3144,7 +6404,28 @@ function createWriteChangelogStep() {
|
|
|
3144
6404
|
}
|
|
3145
6405
|
// Parse existing and add entry
|
|
3146
6406
|
const existing = parseChangelog(existingContent);
|
|
3147
|
-
const
|
|
6407
|
+
const isPendingPublication = state.isPendingPublication === true;
|
|
6408
|
+
let changelog = existing;
|
|
6409
|
+
// Clean up stacked entries when in pending publication state
|
|
6410
|
+
if (isPendingPublication && state.publishedVersion) {
|
|
6411
|
+
const publishedVer = parseVersion(state.publishedVersion);
|
|
6412
|
+
if (publishedVer.success && publishedVer.version) {
|
|
6413
|
+
const pubVer = publishedVer.version;
|
|
6414
|
+
const toRemove = changelog.entries
|
|
6415
|
+
.filter((e) => !e.unreleased)
|
|
6416
|
+
.filter((e) => {
|
|
6417
|
+
const ver = parseVersion(e.version);
|
|
6418
|
+
return ver.success && ver.version && gt(ver.version, pubVer);
|
|
6419
|
+
})
|
|
6420
|
+
.map((e) => e.version);
|
|
6421
|
+
if (toRemove.length > 0) {
|
|
6422
|
+
logger.info(`Removing stacked entries: ${toRemove.join(', ')}`);
|
|
6423
|
+
changelog = removeEntries(changelog, toRemove);
|
|
6424
|
+
}
|
|
6425
|
+
}
|
|
6426
|
+
}
|
|
6427
|
+
// Add entry (replaceExisting handles case where nextVersion entry already exists)
|
|
6428
|
+
const updated = addEntry(changelog, changelogEntry, { replaceExisting: isPendingPublication });
|
|
3148
6429
|
const serialized = serializeChangelog(updated);
|
|
3149
6430
|
tree.write(changelogPath, serialized);
|
|
3150
6431
|
return {
|
|
@@ -3181,23 +6462,26 @@ function createUpdatePackageStep() {
|
|
|
3181
6462
|
return createSkippedResult('No version bump needed');
|
|
3182
6463
|
}
|
|
3183
6464
|
const packageJsonPath = `${projectRoot}/package.json`;
|
|
6465
|
+
logger.debug(`Reading package.json from: ${packageJsonPath}`);
|
|
3184
6466
|
// Read package.json
|
|
3185
6467
|
let content;
|
|
3186
6468
|
try {
|
|
3187
6469
|
content = tree.read(packageJsonPath, 'utf-8') ?? '';
|
|
3188
6470
|
if (!content) {
|
|
6471
|
+
logger.error(`package.json not found at ${packageJsonPath}`);
|
|
3189
6472
|
return {
|
|
3190
6473
|
status: 'failed',
|
|
3191
|
-
error: createError(
|
|
3192
|
-
message:
|
|
6474
|
+
error: createError(`package.json not found at ${packageJsonPath}`),
|
|
6475
|
+
message: `Could not read package.json at ${packageJsonPath}`,
|
|
3193
6476
|
};
|
|
3194
6477
|
}
|
|
3195
6478
|
}
|
|
3196
6479
|
catch (error) {
|
|
6480
|
+
logger.error(`Failed to read package.json at ${packageJsonPath}: ${error}`);
|
|
3197
6481
|
return {
|
|
3198
6482
|
status: 'failed',
|
|
3199
6483
|
error: error instanceof Error ? error : createError(String(error)),
|
|
3200
|
-
message:
|
|
6484
|
+
message: `Failed to read package.json at ${packageJsonPath}`,
|
|
3201
6485
|
};
|
|
3202
6486
|
}
|
|
3203
6487
|
// Parse and update version
|
|
@@ -3508,6 +6792,7 @@ exports.CREATE_COMMIT_STEP_ID = CREATE_COMMIT_STEP_ID;
|
|
|
3508
6792
|
exports.CREATE_TAG_STEP_ID = CREATE_TAG_STEP_ID;
|
|
3509
6793
|
exports.FETCH_REGISTRY_STEP_ID = FETCH_REGISTRY_STEP_ID;
|
|
3510
6794
|
exports.GENERATE_CHANGELOG_STEP_ID = GENERATE_CHANGELOG_STEP_ID;
|
|
6795
|
+
exports.RESOLVE_REPOSITORY_STEP_ID = RESOLVE_REPOSITORY_STEP_ID;
|
|
3511
6796
|
exports.UPDATE_PACKAGES_STEP_ID = UPDATE_PACKAGES_STEP_ID;
|
|
3512
6797
|
exports.createAnalyzeCommitsStep = createAnalyzeCommitsStep;
|
|
3513
6798
|
exports.createCalculateBumpStep = createCalculateBumpStep;
|
|
@@ -3517,6 +6802,7 @@ exports.createFetchRegistryStep = createFetchRegistryStep;
|
|
|
3517
6802
|
exports.createGenerateChangelogStep = createGenerateChangelogStep;
|
|
3518
6803
|
exports.createGitCommitStep = createGitCommitStep;
|
|
3519
6804
|
exports.createPushTagStep = createPushTagStep;
|
|
6805
|
+
exports.createResolveRepositoryStep = createResolveRepositoryStep;
|
|
3520
6806
|
exports.createTagStep = createTagStep;
|
|
3521
6807
|
exports.createUpdatePackageStep = createUpdatePackageStep;
|
|
3522
6808
|
exports.createWriteChangelogStep = createWriteChangelogStep;
|